Exemplo n.º 1
0
class TestHTTPBasicAuth(TestCase):
    def setUp(self):
        self.app = Flask(__name__)
        self.scheduler = APScheduler()
        self.scheduler.auth = HTTPBasicAuth()
        self.scheduler.api_enabled = True
        self.scheduler.init_app(self.app)
        self.scheduler.start()
        self.scheduler.authenticate(self._authenticate)
        self.client = self.app.test_client()

    def _authenticate(self, auth):
        return auth['username'] == 'test' and auth['password'] == 'test'

    def test_valid_credentials(self):
        headers = {'Authorization': 'Basic ' + base64.b64encode(b'test:test').decode('ascii')}
        response = self.client.get(self.scheduler.api_prefix + '', headers=headers)
        self.assertEqual(response.status_code, 200)

    def test_invalid_credentials(self):
        headers = {'Authorization': 'Basic ' + base64.b64encode(b'guest:guest').decode('ascii')}
        response = self.client.get(self.scheduler.api_prefix + '', headers=headers)
        self.assertEqual(response.status_code, 401)
        self.assertEqual(response.headers['WWW-Authenticate'], 'Basic realm="Authentication Required"')

    def test_invalid_header_format(self):
        headers = {'Authorization': 'Basic 1231234'}
        response = self.client.get(self.scheduler.api_prefix + '', headers=headers)
        self.assertEqual(response.status_code, 401)
        self.assertEqual(response.headers['WWW-Authenticate'], 'Basic realm="Authentication Required"')

    def test_missing_credentials(self):
        response = self.client.get(self.scheduler.api_prefix + '')
        self.assertEqual(response.status_code, 401)
        self.assertEqual(response.headers['WWW-Authenticate'], 'Basic realm="Authentication Required"')
Exemplo n.º 2
0
def create_app():

  app = Flask(__name__)

  if os.environ.get('SITTER_SETTINGS'):
    app.config.from_envvar('SITTER_SETTINGS')
  else:
    for path in (os.path.dirname(os.path.dirname(__file__)), os.path.dirname(__file__)):

      settings = os.path.join(path, 'settings.cfg')

      if os.path.isfile(settings):
        app.config.from_pyfile(settings)
        break

  if not os.path.exists(app.config['SESSION_STORAGE_DIR']):
    os.makedirs(app.config['SESSION_STORAGE_DIR'])

  kv = KVSessionExtension(FilesystemStore(app.config['SESSION_STORAGE_DIR']), app)

  # Remove extra white space.
  app.jinja_env.trim_blocks = True
  app.jinja_env.lstrip_blocks = True

  def cleanup_sessions():
    kv.cleanup_sessions(app)

  # Add some scheduled jobs.
  scheduler = APScheduler()
  scheduler.init_app(app)
  scheduler.add_job(id="kv-session-cleanup", func=cleanup_sessions, seconds=10, trigger='interval')
  scheduler.start()

  return app
Exemplo n.º 3
0
class TestAPIPrefix(TestCase):
    def setUp(self):
        self.app = Flask(__name__)
        self.scheduler = APScheduler()
        self.scheduler.api_enabled = True
        self.scheduler.api_prefix = '/api'
        self.scheduler.init_app(self.app)
        self.scheduler.start()
        self.client = self.app.test_client()

    def test_api_prefix(self):
        response = self.client.get(self.scheduler.api_prefix + '/jobs')
        self.assertEqual(response.status_code, 200)

    def test_invalid_api_prefix(self):
        response = self.client.get('/invalidapi/jobs')
        self.assertEqual(response.status_code, 404)
Exemplo n.º 4
0
def stacosys_server(config_pathname):

    app = Flask(__name__)
    config.initialize(config_pathname, app)

    # configure logging
    logger = logging.getLogger(__name__)
    configure_logging(logging.INFO)
    logging.getLogger("werkzeug").level = logging.WARNING
    logging.getLogger("apscheduler.executors").level = logging.WARNING

    # initialize database
    from core import database

    database.setup()

    # cron email fetcher
    app.config.from_object(
        JobConfig(
            config.getInt(config.MAIL_POLLING), config.getInt(config.COMMENT_POLLING)
        )
    )
    scheduler = APScheduler()
    scheduler.init_app(app)
    scheduler.start()

    logger.info("Start Stacosys application")

    # generate RSS for all sites
    from core import rss

    rss.generate_all()

    # start Flask
    from interface import api
    from interface import form

    logger.debug("Load interface %s" % api)
    logger.debug("Load interface %s" % form)

    app.run(
        host=config.get(config.HTTP_HOST),
        port=config.get(config.HTTP_PORT),
        debug=False,
        use_reloader=False,
    )
class TestScheduler(TestCase):
    def setUp(self):
        self.app = Flask(__name__)
        self.scheduler = APScheduler()

    def test_running(self):
        self.assertFalse(self.scheduler.running)
        self.scheduler.start()
        self.assertTrue(self.scheduler.running)

    def test_start_with_allowed_hosts(self):
        self.app.config['SCHEDULER_ALLOWED_HOSTS'] = ['any_server_name']
        self.scheduler.init_app(self.app)
        self.scheduler.start()
        self.assertFalse(self.scheduler.running)

    def test_start_without_allowed_hosts(self):
        self.app.config['SCHEDULER_ALLOWED_HOSTS'] = []
        self.scheduler.init_app(self.app)
        self.scheduler.start()
        self.assertFalse(self.scheduler.running)

    def test_shutdown(self):
        self.scheduler.init_app(self.app)
        self.scheduler.start()
        self.assertTrue(self.scheduler.running)
        self.scheduler.shutdown()
        self.assertFalse(self.scheduler.running)

    def test_load_jobs_from_config(self):
        self.app.config['JOBS'] = [
            {
                'id': 'job1',
                'func': 'tests.test_api:job1',
                'trigger': 'interval',
                'seconds': 10
            }
        ]

        self.scheduler.init_app(self.app)
        job = self.scheduler.get_job('job1')
        self.assertIsNotNone(job)

    def test_task_decorator(self):
        @self.scheduler.task('interval', seconds=10, id='job1')
        def decorated_job():
            pass

        job = self.scheduler.get_job('job1')
        self.assertIsNotNone(job)
Exemplo n.º 6
0
class TestEndpointPrefix(TestCase):
    def setUp(self):
        self.app = Flask(__name__)
        self.scheduler = APScheduler()
        self.scheduler.api_enabled = True
        self.scheduler.endpoint_prefix = 'api.'
        self.scheduler.init_app(self.app)
        self.scheduler.start()
        self.client = self.app.test_client()

    def test_endpoint_prefix(self):
        with self.scheduler.app.test_request_context():
            valid_url = True if url_for(self.scheduler.endpoint_prefix + 'get_scheduler_info') else False
            self.assertTrue(valid_url)

    def test_invalid_endpoint_prefix(self):
        with self.scheduler.app.test_request_context():
            try:
                valid_url = url_for('get_scheduler_info')
            except BuildError as _:
                valid_url = False
            self.assertFalse(valid_url)
Exemplo n.º 7
0
Arquivo: run.py Projeto: kianby/srmail
def srmail_server(config_pathname):

    app = Flask(__name__)
    config.initialize(config_pathname, app)

    # configure logging
    logger = logging.getLogger(__name__)
    configure_logging(logging.INFO)

    # initialize database
    from core import database

    database.setup()

    # cron email fetcher
    app.config.from_object(JobConfig(config.getInt(config.IMAP_POLLING)))
    scheduler = APScheduler()
    scheduler.init_app(app)
    scheduler.start()

    logger.info("Starting SRMAIL application")

    # start Flask
    from interface import api

    logger.debug("Load interface %s" % api)

    app.run(
        host=config.get(config.HTTP_HOST),
        port=config.get(config.HTTP_PORT),
        debug=False,
        use_reloader=False,
    )

    # Exit application
    logger.info("Stopping SRMAIL application")
Exemplo n.º 8
0
def scheduled_init(app):
    scheduler = APScheduler()
    app.config['JOBS'] = jobs
    scheduler.api_enabled = True
    scheduler.init_app(app)
    scheduler.start()
Exemplo n.º 9
0
        'args': (1, 2),
        'trigger': 'interval',
        'seconds': 10
    }]

    SCHEDULER_AUTH_ENABLED = True
    SCHEDULER_API_ENABLED = True


def job1(a, b):
    print(str(a) + ' ' + str(b))


app = Flask(__name__)
app.config.from_object(Config())

scheduler = APScheduler()
scheduler.init_app(app)
scheduler.start()


@scheduler.authenticate
def authenticate(auth):
    if auth.type != 'basic':
        return None

    return auth.username == 'guest' and auth.password == 'guest'


app.run()
Exemplo n.º 10
0
from flask import Flask, redirect, url_for, session
from flask import render_template
from flask import request
from flask_apscheduler import APScheduler
from datetime import timedelta
from mysql_db import *
import json
from config import *
from ip_address import *
app = Flask(__name__)
app.config.from_object('flask_config')
app.config['SECRET_KEY'] = os.urandom(24)
app.config['PERMANENT_SESSION_LIFETIME'] = timedelta(days=7)
sched = APScheduler()
sched.init_app(app)
sched.start()


@app.route('/')
def index():
    session.permanent = True
    return render_template("login.html")


@app.route('/api/', methods=['GET'])
def api():
    try:
        # num = request.args.get('num') if request.args.get('num') != None else 1
        # ip_type = request.args.get('type') if request.args.get('type') != None else 'http'
        page = request.args.get(
            'page') if request.args.get('page') != None else 0
Exemplo n.º 11
0
def initSchedule():
	app.config.from_object(Config())
	scheduler = APScheduler()
	scheduler.init_app(app)
	scheduler.start()
	print("Scheduler Initialized\n")
class ApplicationWindow(QtWidgets.QMainWindow):
    def __init__(self):
        super(ApplicationWindow, self).__init__()
        self.bus_num = 1
        self.bus = SMBus(self.bus_num)
        self.ui = Ui_MainWindow()
        self.ui.setupUi(self)

        # Get easy access to UI elements
        self.rgb = self.ui.lid_display.layout()
        self.minutes = self.ui.lcdMinutes
        self.seconds = self.ui.lcdSeconds
        self.potentiometer = self.ui.dial

        # TODO Make connections to functions

        # TODO Listen on the SMBus for messages and then call the right functions

        for h, button in self.keypad.items():
            button.clicked.connect(
                partial(
                    self.bus.write_byte, I2C.ARDUINO, h
                )
            )

        for _, checkbox in self.photo_resistor.items():
            checkbox.clicked.connect(
                lambda: self.bus.write_byte(I2C.LASERS, self.laser_mask)
            )

        self.potentiometer.valueChanged.connect(
            lambda: self.bus.write_byte(I2C.ROTARY, self.potentiometer.value()))

        for _, switch in self.switches.items():
            switch.valueChanged.connect(
                lambda: self.bus.write_byte(I2C.SWITCHES, self.switch_mask)
            )

        # Colored disconnectable wires
        self.ui.wire_red.clicked.connect(lambda: self.bus.write_byte(I2C.WIRE, 0xd))
        self.ui.wire_blue.clicked.connect(lambda: self.bus.write_byte(I2C.WIRE, 0xb))
        self.ui.wire_green.clicked.connect(lambda: self.bus.write_byte(I2C.WIRE, 0xe))

        # Reset button
        self.ui.start_reset.clicked.connect(lambda: self.bus.write_byte(I2C.RESET, 0x1))

        self.ui.ultrasonicSlider.valueChanged.connect(
            lambda: self.bus.write_byte(I2C.ULTRASONIC, self.ui.ultrasonicSlider.value())
        )

        self.scheduler = APScheduler(scheduler=BackgroundScheduler())
        self.scheduler.add_job("poll", self.poll_sensors, max_instances=2,
                               replace_existing=False)
        self.scheduler.start()

    def poll_sensors(self):
        for i in I2C:
            word = self.bus.read_i2c_block_data(i, 0, 10)
            if i is I2C.SEVENSEG:
                self.ui.lcdMinutes.display("0x{}{}".format(SevenSeg.inv_map.get(word[0], 0), SevenSeg.inv_map.get(word[2], 0)))
                self.ui.lcdSeconds.display("0x{}{}".format(SevenSeg.inv_map.get(word[6], 0), SevenSeg.inv_map.get(word[8], 0)))
            elif i is I2C.ARDUINO:
                self.ui.RGB_red.setChecked(False)
                self.ui.RGB_blue.setChecked(False)
                self.ui.RGB_green.setChecked(False)
                self.ui.RGB_red.setCheckable(bool(word[0]))
                self.ui.RGB_blue.setCheckable(bool(word[0]))
                self.ui.RGB_green.setCheckable(bool(word[0]))
                if not word[0]:
                    pass
                elif word[0] < 20:
                    self.ui.RGB_blue.setChecked(True)
                elif word[0] < 200:
                    self.ui.RGB_green.setChecked(True)
                else:
                    self.ui.RGB_red.setChecked(True)
            elif i is I2C.SOLENOID:
                if word[0] == 255:
                    self.ui.solenoid.setChecked(True)
                else:
                    # TODO lock the solenoid after 10 seconds like the real one
                    self.ui.solenoid.setChecked(False)

        # sleep for a frame
        sleep(1/60)
        self.scheduler.add_job("poll", self.poll_sensors, max_instances=2,
                               replace_existing=False)

    @property
    def time(self) -> str:
        """
        :return: The current time shown on the lcd
        """

    @time.setter
    def time(self, value):
        """
        TODO set the value on the timer
        :param value:
        :return:
        """

    @property
    def switches(self) -> Dict[int, QSlider]:
        return {
            4: self.ui.verticalSlider_1,
            3: self.ui.verticalSlider_2,
            2: self.ui.verticalSlider_3,
            1: self.ui.verticalSlider_4,
            0: self.ui.verticalSlider_5,
        }

    @property
    def switch_mask(self) -> bin:
        result = 0b00000
        for offset, sw in self.switches.items():
            result ^= ((1 if sw.value() else 0) << offset)
        return result

    @property
    def laser_mask(self) -> bin:
        """
        :return: An integer that represents all the photo resistors that have a laser shining on them
        """
        result = 0b000000
        for offset, box in self.photo_resistor.items():
            result ^= ((1 if box.isChecked() else 0) << offset)
        return result

    @property
    def laser(self) -> Dict[int, QCheckBox]:
        return {
            5: self.ui.laser_0,
            4: self.ui.laser_1,
            3: self.ui.laser_2,
            2: self.ui.laser_3,
            1: self.ui.laser_4,
            0: self.ui.laser_5,
        }

    @property
    def photo_resistor(self) -> Dict[int, QCheckBox]:
        return {
            5: self.ui.photodiode_0,
            4: self.ui.photodiode_1,
            3: self.ui.photodiode_2,
            2: self.ui.photodiode_3,
            1: self.ui.photodiode_4,
            0: self.ui.photodiode_5,
        }

    @property
    def led(self) -> Dict[int, QCheckBox]:
        return {
            0: self.ui.led_0,
            1: self.ui.led_1,
            2: self.ui.led_2,
            3: self.ui.led_3,
            4: self.ui.led_4,
            5: self.ui.led_5,
            6: self.ui.led_6,
            7: self.ui.led_7,
        }

    @property
    def keypad(self) -> Dict[hex, QPushButton]:
        return {
            0x0: self.ui.pushButton0,
            0x1: self.ui.pushButton1,
            0x2: self.ui.pushButton2,
            0x3: self.ui.pushButton3,
            0x4: self.ui.pushButton4,
            0x5: self.ui.pushButton5,
            0x6: self.ui.pushButton6,
            0x7: self.ui.pushButton7,
            0x8: self.ui.pushButton8,
            0x9: self.ui.pushButton9,
            0xa: self.ui.pushButtona,
            0xb: self.ui.pushButtonb,
            0xc: self.ui.pushButtonc,
            0xd: self.ui.pushButtond,
            0xe: self.ui.pushButtone,
            0xf: self.ui.pushButtonf,
        }

    @staticmethod
    def run():
        app = QtWidgets.QApplication(sys.argv)
        application = ApplicationWindow()
        application.show()
        return app.exec_()
Exemplo n.º 13
0
def schedulerStart():
    scheduler = APScheduler()
    scheduler.init_app(app)
    scheduler.start()
Exemplo n.º 14
0
def load(app):
    # upgrade()
    app.db.create_all()
    CHALLENGE_CLASSES["dynamic_docker"] = DynamicValueDockerChallenge
    register_plugin_assets_directory(
        app, base_path="/plugins/ctfd-whale/assets/"
    )

    page_blueprint = Blueprint(
        "ctfd-whale",
        __name__,
        template_folder="templates",
        static_folder="assets",
        url_prefix="/plugins/ctfd-whale"
    )

    @page_blueprint.route('/admin/settings', methods=['GET'])
    @admins_only
    def admin_list_configs():
        configs = DBUtils.get_all_configs()
        return render_template('config.html', configs=configs)

    @page_blueprint.route('/admin/settings', methods=['PATCH'])
    @admins_only
    def admin_save_configs():
        req = request.get_json()
        DBUtils.save_all_configs(req.items())
        return json.dumps({'success': True})

    @page_blueprint.route("/admin/containers", methods=['GET'])
    @admins_only
    def admin_list_containers():
        configs = DBUtils.get_all_configs()
        page = abs(request.args.get("page", 1, type=int))
        results_per_page = 50
        page_start = results_per_page * (page - 1)
        page_end = results_per_page * (page - 1) + results_per_page

        count = DBUtils.get_all_alive_container_count()
        containers = DBUtils.get_all_alive_container_page(page_start, page_end)

        pages = int(count / results_per_page) + (count % results_per_page > 0)
        return render_template("containers.html", containers=containers, pages=pages, curr_page=page,
                               curr_page_start=page_start, configs=configs)

    @page_blueprint.route("/admin/containers", methods=['DELETE'])
    @admins_only
    def admin_delete_container():
        user_id = request.args.get('user_id')
        ControlUtil.remove_container(user_id)
        return json.dumps({'success': True})

    @page_blueprint.route("/admin/containers", methods=['PATCH'])
    @admins_only
    def admin_renew_container():
        user_id = request.args.get('user_id')
        challenge_id = request.args.get('challenge_id')
        DBUtils.renew_current_container(user_id=user_id, challenge_id=challenge_id)
        return json.dumps({'success': True})

    @page_blueprint.route('/container', methods=['POST'])
    @authed_only
    def add_container():
        user_id = current_user.get_current_user().id
        lock_util = LockUtils(app=app, user_id=user_id)

        if not lock_util.acquire():
            return json.dumps({'success': False, 'msg': 'Request Too Fast!'})

        if ControlUtil.frequency_limit():
            return json.dumps({'success': False, 'msg': 'Frequency limit, You should wait at least 1 min.'})

        ControlUtil.remove_container(user_id)
        challenge_id = request.args.get('challenge_id')
        ControlUtil.check_challenge(challenge_id, user_id)

        configs = DBUtils.get_all_configs()
        current_count = DBUtils.get_all_alive_container_count()
        if int(configs.get("docker_max_container_count")) <= int(current_count):
            return json.dumps({'success': False, 'msg': 'Max container count exceed.'})

        dynamic_docker_challenge = DynamicDockerChallenge.query \
            .filter(DynamicDockerChallenge.id == challenge_id) \
            .first_or_404()
        flag = "flag{" + str(uuid.uuid4()) + "}"
        if dynamic_docker_challenge.redirect_type == "http":
            ControlUtil.add_container(user_id=user_id, challenge_id=challenge_id, flag=flag)
        else:
            port = random.randint(int(configs.get("frp_direct_port_minimum")),
                                  int(configs.get("frp_direct_port_maximum")))
            while True:
                if DBUtils.get_container_by_port(port) is None:
                    break
                port = random.randint(int(configs.get("frp_direct_port_minimum")),
                                      int(configs.get("frp_direct_port_maximum")))
            ControlUtil.add_container(user_id=user_id, challenge_id=challenge_id, flag=flag, port=port)

        lock_util.release()
        return json.dumps({'success': True})

    @page_blueprint.route('/container', methods=['GET'])
    @authed_only
    def list_container():
        user_id = current_user.get_current_user().id
        challenge_id = request.args.get('challenge_id')
        ControlUtil.check_challenge(challenge_id, user_id)
        data = DBUtils.get_current_containers(user_id=user_id)
        configs = DBUtils.get_all_configs()
        domain = configs.get('frp_http_domain_suffix', "")
        if data is not None:
            if int(data.challenge_id) != int(challenge_id):
                return json.dumps({})
            dynamic_docker_challenge = DynamicDockerChallenge.query \
                .filter(DynamicDockerChallenge.id == data.challenge_id) \
                .first_or_404()
            lan_domain = str(user_id) + "-" + data.uuid
            if dynamic_docker_challenge.redirect_type == "http":
                if int(configs.get('frp_http_port', "80")) == 80:
                    return json.dumps({'success': True, 'type': 'http', 'domain': data.uuid + domain,
                                       'remaining_time': 3600 - (datetime.now() - data.start_time).seconds,
                                       'lan_domain': lan_domain})
                else:
                    return json.dumps({'success': True, 'type': 'http',
                                       'domain': data.uuid + domain + ":" + configs.get('frp_http_port', "80"),
                                       'remaining_time': 3600 - (datetime.now() - data.start_time).seconds,
                                       'lan_domain': lan_domain})
            else:
                return json.dumps({'success': True, 'type': 'redirect', 'ip': configs.get('frp_direct_ip_address', ""),
                                   'port': data.port,
                                   'remaining_time': 3600 - (datetime.now() - data.start_time).seconds,
                                   'lan_domain': lan_domain})
        else:
            return json.dumps({'success': True})

    @page_blueprint.route('/container', methods=['DELETE'])
    @authed_only
    def remove_container():
        user_id = current_user.get_current_user().id
        lock_util = LockUtils(app=app, user_id=user_id)
        if not lock_util.acquire():
            return json.dumps({'success': False, 'msg': 'Request Too Fast!'})

        if ControlUtil.frequency_limit():
            return json.dumps({'success': False, 'msg': 'Frequency limit, You should wait at least 1 min.'})

        if ControlUtil.remove_container(user_id):
            lock_util.release()

            return json.dumps({'success': True})
        else:
            return json.dumps({'success': False, 'msg': 'Failed when destroy instance, please contact admin!'})

    @page_blueprint.route('/container', methods=['PATCH'])
    @authed_only
    def renew_container():
        user_id = current_user.get_current_user().id
        lock_util = LockUtils(app=app, user_id=user_id)
        if not lock_util.acquire():
            return json.dumps({'success': False, 'msg': 'Request Too Fast!'})

        if ControlUtil.frequency_limit():
            return json.dumps({'success': False, 'msg': 'Frequency limit, You should wait at least 1 min.'})

        configs = DBUtils.get_all_configs()
        challenge_id = request.args.get('challenge_id')
        ControlUtil.check_challenge(challenge_id, user_id)
        docker_max_renew_count = int(configs.get("docker_max_renew_count"))
        container = DBUtils.get_current_containers(user_id)
        if container is None:
            return json.dumps({'success': False, 'msg': 'Instance not found.'})
        if container.renew_count >= docker_max_renew_count:
            return json.dumps({'success': False, 'msg': 'Max renewal times exceed.'})
        DBUtils.renew_current_container(user_id=user_id, challenge_id=challenge_id)
        lock_util.release()
        return json.dumps({'success': True})

    def auto_clean_container():
        with app.app_context():
            results = DBUtils.get_all_expired_container()
            for r in results:
                ControlUtil.remove_container(r.user_id)

            FrpUtils.update_frp_redirect()

    app.register_blueprint(page_blueprint)

    try:
        lock_file = open("/tmp/ctfd_whale.lock", "w")
        lock_fd = lock_file.fileno()
        fcntl.lockf(lock_fd, fcntl.LOCK_EX | fcntl.LOCK_NB)

        scheduler = APScheduler()
        scheduler.init_app(app)
        scheduler.start()
        scheduler.add_job(id='whale-auto-clean', func=auto_clean_container, trigger="interval", seconds=10)
        print("[CTFd Whale]Started successfully")
    except IOError:
        pass
Exemplo n.º 15
0
def handleAPISchedule():
    scheduler = APScheduler()
    scheduler.add_job(func=callChurchMusicAPI, args=[], trigger='interval', id='callApiJob', minutes=30)
    scheduler.start()
    print("Music API Scheduler start!")
Exemplo n.º 16
0
@app.route('/streams', methods=['GET'])
def get_streams():
    message = data_collector.stream_infos
    resp = jsonify(message)
    resp.status_code = 200
    print(resp)
    return resp


@app.route('/audio_devices')
def get_audio_devices():
    p = pyaudio.PyAudio()
    devices = []
    for i in range(p.get_device_count()):
        device = p.get_device_info_by_index(i)
        devices.append(device)
    devices = [
        device for device in devices
        if 'Sound Mapper' not in device['name'] and device['maxInputChannels']
    ]
    resp = jsonify(devices)
    resp.status_code = 200
    return resp


if __name__ == '__main__':
    cron.start()
    atexit.register(lambda: cron.shutdown(wait=False))
    app.run(debug=True, threaded=True, use_reloader=False, port=5010)
Exemplo n.º 17
0
    ping_timeout=10,
    ping_interval=5,
    cors_allowed_origins=flask_app.config['SOCKETIO_CORS_ALLOWED_ORIGIN'])

flask_app.url_map.converters['int'] = IntegerConverter

login_manager.init_app(flask_app)
login_manager.login_message = 'Je moet aangemeld zijn om deze pagina te zien!'
login_manager.login_view = 'auth.login'

migrate = Migrate(flask_app, db)

SCHEDULER_API_ENABLED = True
message_scheduler = APScheduler()
message_scheduler.init_app(flask_app)
message_scheduler.start()

soap = Client(flask_app.config['SMARTSCHOOL_URL'])


def create_admin():
    from app.data.models import User
    find_admin = User.query.filter(User.username == 'admin').first()
    if not find_admin:
        admin = User(username='******',
                     password='******',
                     level=User.LEVEL.ADMIN,
                     user_type=User.USER_TYPE.LOCAL)
        db.session.add(admin)
        db.session.commit()
Exemplo n.º 18
0
        if (sender['card'] == ""):
            #没设置群名片
            user_info = dbconn.get_friend_info(user_id)
            if (user_info):
                #好友库中有此人
                user_name = user_info['mark_name']
            else:
                user_name = sender['nickname']
        else:
            user_name = sender['card']
        goapi_recv.sendMsg(
            TARGET_USER_ID,
            f"[{user_name}]-{group_info['group_name']}\n{message}")

        #群聊消息,仅当成功转发时增加好友计数
        dbconn.count_plus(user_id)
    else:
        #暂时不转发的消息逻辑
        pass


if __name__ == "__main__":
    #每次运行前自动更新好友和群库
    dbconn.update_friends_info(goapi_recv.get_friends_list())
    dbconn.update_group_info(goapi_recv.get_group_list())

    scheduler = APScheduler()  # 实例化APScheduler
    scheduler.init_app(app)  # 把任务列表载入实例flask
    scheduler.start()  # 启动任务计划
    app.run(host=flask_addr, port=flask_port, debug=False)
Exemplo n.º 19
0
class TestAPI(TestCase):
    def setUp(self):
        self.app = Flask(__name__)
        self.scheduler = APScheduler()
        self.scheduler.api_enabled = True
        self.scheduler.init_app(self.app)
        self.scheduler.start()
        self.client = self.app.test_client()

    def test_scheduler_info(self):
        response = self.client.get(self.scheduler.api_prefix)
        self.assertEqual(response.status_code, 200)
        info = json.loads(response.get_data(as_text=True))
        self.assertIsNotNone(info['current_host'])
        self.assertEqual(info['allowed_hosts'], ['*'])
        self.assertTrue(info['running'])

    def test_add_job(self):
        job = {
            'id': 'job1',
            'func': 'tests.test_api:job1',
            'trigger': 'date',
            'run_date': '2020-12-01T12:30:01+00:00',
        }

        response = self.client.post(self.scheduler.api_prefix + '/jobs', data=json.dumps(job))
        self.assertEqual(response.status_code, 200)

        job2 = json.loads(response.get_data(as_text=True))

        self.assertEqual(job.get('id'), job2.get('id'))
        self.assertEqual(job.get('func'), job2.get('func'))
        self.assertEqual(job.get('trigger'), job2.get('trigger'))
        self.assertEqual(job.get('run_date'), job2.get('run_date'))

    def test_add_conflicted_job(self):
        job = {
            'id': 'job1',
            'func': 'tests.test_api:job1',
            'trigger': 'date',
            'run_date': '2020-12-01T12:30:01+00:00',
        }

        response = self.client.post(self.scheduler.api_prefix + '/jobs', data=json.dumps(job))
        self.assertEqual(response.status_code, 200)

        response = self.client.post(self.scheduler.api_prefix + '/jobs', data=json.dumps(job))
        self.assertEqual(response.status_code, 409)

    def test_add_invalid_job(self):
        job = {
            'id': None,
        }

        response = self.client.post(self.scheduler.api_prefix + '/jobs', data=json.dumps(job))
        self.assertEqual(response.status_code, 500)

    def test_delete_job(self):
        self.__add_job()

        response = self.client.delete(self.scheduler.api_prefix + '/jobs/job1')
        self.assertEqual(response.status_code, 204)

        response = self.client.get(self.scheduler.api_prefix + '/jobs/job1')
        self.assertEqual(response.status_code, 404)

    def test_delete_job_not_found(self):
        response = self.client.delete(self.scheduler.api_prefix + '/jobs/job1')
        self.assertEqual(response.status_code, 404)

    def test_get_job(self):
        job = self.__add_job()

        response = self.client.get(self.scheduler.api_prefix + '/jobs/job1')
        self.assertEqual(response.status_code, 200)

        job2 = json.loads(response.get_data(as_text=True))

        self.assertEqual(job.get('id'), job2.get('id'))
        self.assertEqual(job.get('func'), job2.get('func'))
        self.assertEqual(job.get('trigger'), job2.get('trigger'))
        self.assertEqual(job.get('minutes'), job2.get('minutes'))

    def test_get_job_not_found(self):
        response = self.client.get(self.scheduler.api_prefix + '/jobs/job1')
        self.assertEqual(response.status_code, 404)

    def test_get_all_jobs(self):
        job = self.__add_job()

        response = self.client.get(self.scheduler.api_prefix + '/jobs')
        self.assertEqual(response.status_code, 200)

        jobs = json.loads(response.get_data(as_text=True))

        self.assertEqual(len(jobs), 1)

        job2 = jobs[0]

        self.assertEqual(job.get('id'), job2.get('id'))
        self.assertEqual(job.get('func'), job2.get('func'))
        self.assertEqual(job.get('trigger'), job2.get('trigger'))
        self.assertEqual(job.get('minutes'), job2.get('minutes'))

    def test_update_job(self):
        job = self.__add_job()

        data_to_update = {
            'args': [1],
            'trigger': 'cron',
            'minute': '*/1',
            'start_date': '2021-01-01'
        }

        response = self.client.patch(self.scheduler.api_prefix + '/jobs/job1', data=json.dumps(data_to_update))
        self.assertEqual(response.status_code, 200)

        job2 = json.loads(response.get_data(as_text=True))

        self.assertEqual(job.get('id'), job2.get('id'))
        self.assertEqual(job.get('func'), job2.get('func'))
        self.assertEqual(data_to_update.get('args'), job2.get('args'))
        self.assertEqual(data_to_update.get('trigger'), job2.get('trigger'))
        self.assertEqual('2021-01-01T00:00:00+00:00', job2.get('start_date'))
        self.assertEqual('2021-01-01T00:00:00+00:00', job2.get('next_run_time'))

    def test_update_job_not_found(self):
        data_to_update = {
            'args': [1],
            'trigger': 'cron',
            'minute': '*/1',
            'start_date': '2021-01-01'
        }

        response = self.client.patch(self.scheduler.api_prefix + '/jobs/job1', data=json.dumps(data_to_update))
        self.assertEqual(response.status_code, 404)

    def test_update_invalid_job(self):
        self.__add_job()

        data_to_update = {
            'trigger': 'invalid_trigger',
        }

        response = self.client.patch(self.scheduler.api_prefix + '/jobs/job1', data=json.dumps(data_to_update))
        self.assertEqual(response.status_code, 500)

    def test_pause_and_resume_job(self):
        self.__add_job()

        response = self.client.post(self.scheduler.api_prefix + '/jobs/job1/pause')
        self.assertEqual(response.status_code, 200)
        job = json.loads(response.get_data(as_text=True))
        self.assertIsNone(job.get('next_run_time'))

        response = self.client.post(self.scheduler.api_prefix + '/jobs/job1/resume')
        self.assertEqual(response.status_code, 200)
        job = json.loads(response.get_data(as_text=True))
        self.assertIsNotNone(job.get('next_run_time'))

    def test_pause_and_resume_job_not_found(self):
        response = self.client.post(self.scheduler.api_prefix + '/jobs/job1/pause')
        self.assertEqual(response.status_code, 404)

        response = self.client.post(self.scheduler.api_prefix + '/jobs/job1/resume')
        self.assertEqual(response.status_code, 404)

    def test_run_job(self):
        self.__add_job()

        response = self.client.post(self.scheduler.api_prefix + '/jobs/job1/run')
        self.assertEqual(response.status_code, 200)
        job = json.loads(response.get_data(as_text=True))
        self.assertIsNotNone(job.get('next_run_time'))

    def test_run_job_not_found(self):
        response = self.client.post(self.scheduler.api_prefix + '/jobs/job1/run')
        self.assertEqual(response.status_code, 404)

    def __add_job(self):
        job = {
            'id': 'job1',
            'func': 'tests.test_api:job1',
            'trigger': 'interval',
            'minutes': 10,
        }

        response = self.client.post(self.scheduler.api_prefix + '/jobs', data=json.dumps(job))
        return json.loads(response.get_data(as_text=True))
Exemplo n.º 20
0
def get_app():
    import atexit
    import os

    import yaml
    from flask import Flask
    from flask_apscheduler import APScheduler
    from apscheduler.jobstores.redis import RedisJobStore
    from apscheduler.jobstores.memory import MemoryJobStore

    from tracker_client_plugins import tracker_clients
    import docker_worker_pool
    from docker_worker_pool import get_cron_workers, DockerWorker

    global _app
    if _app is not None:
        return _app

    _app = Flask(__name__)

    # load tracker plugins
    try:
        with open('tracker_client_plugins.yaml', 'r') as f:
            tracker_dict = yaml.load(f, Loader=yaml.FullLoader)

        for plugin_name, kwargs in tracker_dict.items():
            tracker_clients.add(plugin_name, **kwargs)
    except FileNotFoundError:
        pass

    job_stores = {
        'redis':
        RedisJobStore(host=tracker_dict['redis_tracker_client']['host'],
                      port=tracker_dict['redis_tracker_client']['port']),
        'default':
        MemoryJobStore()
    }

    _app.config['SCHEDULER_JOBSTORES'] = job_stores

    scheduler = APScheduler()

    # it is also possible to enable the API directly
    # scheduler.api_enabled = True
    scheduler.init_app(_app)
    atexit.register(lambda: scheduler.shutdown(wait=False))

    num_workers = int(os.environ.get("NUM_WORKERS", 1))
    for i in range(num_workers):
        docker_worker_pool.add()

    scheduler.start()

    loaded_scheduled_jobs = scheduler.get_jobs(jobstore='redis')
    _cron_workers = get_cron_workers()
    if loaded_scheduled_jobs:
        for cron_job in loaded_scheduled_jobs:
            cron_worker_index = docker_worker_pool.get_cron_worker_index(
                cron_job.id)
            _cron_workers[cron_worker_index] = DockerWorker(
                cron_job.id, cron_job)

    return _app
Exemplo n.º 21
0
def index():
    return "Welcome to the scheduler!"


def scheduled_task():
    # get data from google drive
    data_file = drive.get_data_file()
    # read lines and split each one by ','
    lines = [item.strip().split(',') for item in data_file.split('\n')]
    #  create scheduler
    result_data = scheduler.data_processing(lines[1:])
    # create spreadsheet
    spreadsheet.create(result_data)
    # send email with spreadsheet attached, path= utils/
    gmail.send()


def counter():
    print("1")


if __name__ == '__main__':
    # apsscheduler.add_job(id='Scheduled task', func=scheduled_task, trigger='cron', day_of_week=0)
    apsscheduler.add_job(id='Scheduled task',
                         func=scheduled_task,
                         trigger='interval',
                         seconds=60)
    apsscheduler.start()
    app.run(host='0.0.0.0', port=8080, debug=True)
    # app.run(host='0.0.0.0', port=8080, debug=False)
Exemplo n.º 22
0
                cache_json.pop(key)
    with open(cache_json_path, "w") as f:
        json.dump(cache_json, f, indent=2)


app = Flask(__name__)
cache = lru.LRUCacheDict(max_size=100, expiration=60 * 60, concurrent=True)
files = lru.LRUCacheDict(max_size=5000, expiration=60 * 60, concurrent=True)

buffer_size = 1024 * 1024  # 1MB
youtube_url = "https://www.youtube.com/watch?v="

app.config.from_object(Config())
Scheduler.init_app(app)

Scheduler.start()


class VideoInfo:
    def __init__(self, id, title, url, audio_url, extension, size):
        self.id = id
        self.title = title
        self.url = url
        self.audio_url = audio_url
        self.extension = extension
        self.size = size


def get_file_by_id(video_id):
    file_name = str(video_id) + '.mp4'
    files[file_name] = video_id
Exemplo n.º 23
0
class App(FlaskApp):
    def __init__(
        self,
        name,
        use_tracer=None,
        use_metric=False,
        use_logging_level=logging.DEBUG,
        use_optimizer=None,
        use_cors=None,
        use_default_error=None,
        use_scheduler=None,
        all=None,
        flaskName=None,
        *args,
        **kwargs,
    ):
        # TODO: Add more text here for current situation
        """
        Initialize Flask App with multiple microservice-related and usability features.

        None is for deactivating, so it equals to False.

        *use_tracer* must be of type: bool (True for defaults, False for deactivating), 
        opentracing.Tracer (use it for configuration), dict (use default opentracing.Tracer and the given dict as config) or defaults: None

        *use_metric* must be of type: bool (True for defaults, False for deactivating) or defaults: None

        *use_logging_level* must be of type: logging.{INFO, WARNING, ERROR, DEBUG}, defaults: DEBUG
        *use_optimizer* must be of type: bool (True for defaults, False for deactivating) or defaults: None
        *use_cors* must be of type: bool (True for defaults, False for deactivating) or defaults: None
        *use_default_error* must be of type: bool (True for defaults, False for deactivating) or defaults: None
        *use_scheduler* must be of type: bool (True for defaults, False for deactivating) or defaults: None
        *all* must be of type: bool (True for use all functions with defaults, False for deactivating all functions) or defaults: None
        """
        if flaskName is None:
            flaskName = __name__

        super().__init__(flaskName, *args, **kwargs)
        logger = logging.getLogger("")

        self.name = name
        self.metrics = None
        self.tracing = None
        self.optimize = None
        self.cors = None
        self.default_errorhandler = None
        self.scheduler = None

        if all is not None and all is not False:
            use_tracer = True
            use_metric = True
            use_optimizer = True
            use_cors = True
            use_default_error = True
            use_scheduler = True

        logger.info("--- Start Connexion-Plus ---")

        if not isinstance(self.app, (Flask, FlaskApp)):
            logger.warning(
                "Given App is not flask, so it cannot get any functionality added from this lib currently."
            )
            return

        # add default error
        if use_default_error is not None and use_default_error is not False:
            from werkzeug.exceptions import HTTPException
            from werkzeug.exceptions import default_exceptions

            logger.info("Add default error handler to Flask...")

            if callable(use_default_error):
                self.default_errorhandler = use_default_error

                logger.info("use given handler.")

            else:

                def handle_error(e):
                    code = 500
                    if isinstance(e, HTTPException):
                        code = e.code

                    error = {
                        "error": e.__class__.__name__,
                        "http_code": code,
                        "description": str(e),
                    }
                    logger.exception(error)
                    return jsonify(error), code

                self.default_errorhandler = handle_error

                logger.info("use default one")

            # register for all json exceptions
            self.app.register_error_handler(Exception,
                                            self.default_errorhandler)

            # register handler for all http exceptions
            for ex in default_exceptions:
                self.app.register_error_handler(ex, self.default_errorhandler)

        if use_scheduler is not None and use_scheduler is not False:
            logger.info("Add background scheduler to Flask")
            from flask_apscheduler import APScheduler

            self.scheduler = APScheduler()
            self.scheduler.init_app(self.app)
            self.scheduler.start()

        # add optimizer
        if use_optimizer is not None and use_optimizer is not False:
            logger.info("Add optimizer to Flask...")
            from .Optimizer import FlaskOptimize

            config = {"compress": False, "minify": False}
            if isinstance(use_optimizer, dict):
                config.update(use_optimizer)

            if isinstance(use_optimizer, bool) and use_optimizer:
                config.update({"compress": True, "minify": True})

            logger.info("use config {}.".format(config))

            self.optimize = FlaskOptimize(self.app, config)

        # add CORS
        if use_cors is not None and use_cors is not False:
            logger.info("Add cors to Flask...")
            from flask_cors import CORS

            if isinstance(use_cors, dict):
                logger.info("use given settings.")
                self.cors = CORS(self.app, resources=use_cors)
            else:
                logger.info("use default ones.")
                self.cors = CORS(self.app)

            logger.info("CORS added.")

        # add prometheus
        if use_metric is not None and use_metric is not False:
            # TODO: add configuration https://github.com/rycus86/prometheus_flask_exporter#configuration

            from prometheus_flask_exporter import PrometheusMetrics

            self.metrics = PrometheusMetrics(self.app)
            logger.info("Add prometheus to Flask")

        # add tracing
        if use_tracer is not None and use_tracer is not False:
            logger.info("Add opentracing to Flask...")
            # add tracing to all routes in flaskApp
            from flask_opentracing import FlaskTracing
            import opentracing
            from functools import wraps
            from flask import request

            def wrapper(fn):
                @wraps(fn)
                def request_func(*args, **kwargs):
                    if request.path != "/metrics":
                        return fn(*args, **kwargs)

                return request_func

            FlaskTracing._before_request_fn = wrapper(
                FlaskTracing._before_request_fn)
            FlaskTracing._after_request_fn = wrapper(
                FlaskTracing._after_request_fn)

            config = None
            if not isinstance(use_tracer, opentracing.Tracer):
                logger.info("use default one.")
                from jaeger_client import Config as jConfig

                tracer_config = {
                    "sampler": {
                        "type": "const",
                        "param": 1,
                    },
                    "local_agent": {
                        "reporting_host": "jaeger-agent",
                        "reporting_port": 5775,
                    },
                    "logging": True,
                }

                if isinstance(use_tracer, dict):
                    tracer_config = use_tracer

                if isinstance(use_metric, bool) and use_metric is True:
                    logger.info("Use metrics for tracer.")
                    from jaeger_client.metrics.prometheus import (
                        PrometheusMetricsFactory, )

                    config = jConfig(
                        config=tracer_config,
                        service_name=f"{name}ConnexionPlus",
                        metrics_factory=PrometheusMetricsFactory(
                            namespace=f"{name}ConnexionPlus"),
                    )
                else:
                    logger.info("no metrics for tracer configured.")
                    config = jConfig(
                        config=tracer_config,
                        service_name=f"{name}ConnexionPlus",
                    )
            else:
                logger.info("use given tracer config.")

            tracer_obj = use_tracer if config is None else config.initialize_tracer(
            )
            self.tracing = FlaskTracing(tracer_obj, True, self.app)

            # add tracer to everything to support spans through multiple microservices via rpc-calls
            from opentracing_instrumentation.client_hooks import install_all_patches

            install_all_patches()
            logger.info("All tracing relevant libs patched.")

            # add a TracingHandler for Logging
            from .TracingHandler import TracingHandler

            th = TracingHandler(use_tracer)
            th.setLevel(use_logging_level)

            logging.getLogger("").addHandler(th)
            logger.info("Finished Tracer adding.")

        logger.info("--- Finished Connexion-Plus ---")
Exemplo n.º 24
0
def load(app):
    # upgrade()
    plugin_name = __name__.split('.')[-1]
    app.db.create_all()
    if not DBConfig.get_config("setup"):
        setup_default_configs()
    CHALLENGE_CLASSES["dynamic_docker"] = DynamicValueDockerChallenge
    register_plugin_assets_directory(
        app, base_path="/plugins/" + plugin_name + "/assets/"
    )

    page_blueprint = Blueprint(
        "ctfd-whale",
        __name__,
        template_folder="templates",
        static_folder="assets",
        url_prefix="/plugins/ctfd-whale"
    )
    register_admin_plugin_menu_bar(
        'Whale', '/plugins/ctfd-whale/admin/settings'
    )
    CTFd_API_v1.add_namespace(admin_namespace, path="/plugins/ctfd-whale/admin")
    CTFd_API_v1.add_namespace(user_namespace, path="/plugins/ctfd-whale")

    @page_blueprint.route('/admin/settings', methods=['GET', 'POST'])
    @admins_only
    def admin_list_configs():
        if request.method == 'POST':
            data = request.form.to_dict()
            data.pop('nonce')
            DBConfig.set_all_configs(data)
            RedisUtils(app=current_app).init_redis_port_sets()
        session["nonce"] = generate_nonce()
        configs = DBConfig.get_all_configs()
        return render_template('config.html', configs=configs)

    @page_blueprint.route("/admin/containers")
    @admins_only
    def admin_list_containers():
        result = AdminContainers.get()
        return render_template("containers.html",
                               plugin_name=plugin_name,
                               containers=result['data']['containers'],
                               pages=result['data']['pages'],
                               curr_page=abs(request.args.get("page", 1, type=int)),
                               curr_page_start=result['data']['page_start'])

    def auto_clean_container():
        with app.app_context():
            results = DBContainer.get_all_expired_container()
            for r in results:
                ControlUtil.try_remove_container(r.user_id)

            configs = DBConfig.get_all_configs()
            containers = DBContainer.get_all_alive_container()

            config = ''.join([c.frp_config for c in containers])

            try:
                # you can authorize a connection by setting
                # frp_url = http://user:pass@ip:port
                frp_addr = configs.get("frp_api_url")
                if not frp_addr:
                    frp_addr = f'http://{configs.get("frp_api_ip", "frpc")}:{configs.get("frp_api_port", "7400")}'
                    # backward compatibility
                common = configs.get("frp_config_template")
                if '[common]' in common:
                    output = common + config
                else:
                    remote = requests.get(f'{frp_addr.lstrip("/")}/api/config')
                    assert remote.status_code == 200
                    configs["frp_config_template"] = remote.text
                    output = remote.text + config
                assert requests.put(
                    f'{frp_addr.lstrip("/")}/api/config', output, timeout=5
                ).status_code == 200
                assert requests.get(
                    f'{frp_addr.lstrip("/")}/api/reload', timeout=5
                ).status_code == 200
            except (requests.RequestException, AssertionError):
                raise WhaleError(
                    'frpc request failed\n' +
                    'please check the frp related configs'
                )

    app.register_blueprint(page_blueprint)

    try:
        lock_file = open("/tmp/ctfd_whale.lock", "w")
        lock_fd = lock_file.fileno()
        fcntl.lockf(lock_fd, fcntl.LOCK_EX | fcntl.LOCK_NB)

        scheduler = APScheduler()
        scheduler.init_app(app)
        scheduler.start()
        scheduler.add_job(
            id='whale-auto-clean', func=auto_clean_container,
            trigger="interval", seconds=10
        )

        redis_util = RedisUtils(app=app)
        redis_util.init_redis_port_sets()

        print("[CTFd Whale]Started successfully")
    except IOError:
        pass
Exemplo n.º 25
0
# manage 启动脚本
# app    入口
#   __init__.py
#   models.py   数据模型
#   static      静态文件
#   home        前台
#       __init__.py
#       views.py    前台视图
#       forms.py    表单
#   admin
#       __init__.py
#       views.py    管理视图
#       forms.py    管理表单
#   templates
#       home        前台模版
#       admin       后台模版

from flask_apscheduler import APScheduler
from app import app

if __name__ == "__main__":
    scheduler = APScheduler()  # 实例化APScheduler
    scheduler.init_app(app)  # 把任务列表放进flask
    scheduler.start()  # 启动任务列表
    app.run(host='0.0.0.0', port=5000, debug=True)
Exemplo n.º 26
0
from flask_apscheduler import APScheduler
from resource.stockDB import start2
from flask_restful import Api

app = Flask(__name__)  # 建立application 物件
api = Api(app)
aps = APScheduler()


class schedulerConfig(object):
    JOBS = [{
        'id': 'job1',
        'func': 'resource.stockDB:start2',
        'args': (),
        'trigger': 'interval',
        'seconds': 5
    }]
    SCHEDULER_API_ENABLED = True


@app.route("/")
def index():
    return "hello world"


if __name__ == "__main__":
    app.debug = True
    app.config.from_object(schedulerConfig())
    aps.init_app(app)
    aps.start()
    app.run(host='0.0.0.0', port=80)
Exemplo n.º 27
0
    def run(self, isDaemon=False):
        ####################################################
        #  Setup Logging
        ####################################################
        self.setup_logging()

        ####################################################
        #  Setup Configuration
        ####################################################
        parser = SafeConfigParser()
        parser.read('RF.conf')
        for section_name in parser.sections():
            featureBroker.features.Provide(
                'conf_{name}'.format(name=section_name),
                dict(parser.items(section_name)))

        ####################################################
        #  Start Flask
        ####################################################

        app.web = Flask("__main__")

        # Create Scheduler
        if not app.web.debug or os.environ.get('WERKZEUG_RUN_MAIN') == 'true':
            app.web.config['SCHEDULER_VIEWS_ENABLED'] = True
            app.web.config['SCHEDULER_JOBSTORES'] = {
                'default':
                MemoryJobStore(),
                'db':
                SQLAlchemyJobStore(
                    url='sqlite:///' +
                    featureBroker.RequiredFeature('conf_DB').result['jobs'])
            }
            scheduler = APScheduler()
            jobs = featureBroker.RequiredFeatures(
                'job', featureBroker.HasMethods('register')).result
            for job in jobs:
                job.register(scheduler.scheduler)
            scheduler.init_app(app.web)
            scheduler.start()
            featureBroker.features.Provide('scheduler', scheduler.scheduler)

        # Register Controllers
        controllers = featureBroker.RequiredFeatures(
            'controller', featureBroker.HasMethods('register')).result
        for controller in controllers:
            controller.register(app.web)

        # Set secret Key
        app.web.secret_key = 'A0ew:DE~7/T6yA^8vqNgjVB5tZr98j/3yX R~XHH!jmew:DE~7/T6yA^8vqNgjVB5tN]LWX/,?RT'

        # Flask-SQLAlchemy
        app.web.config[
            'SQLALCHEMY_DATABASE_URI'] = 'sqlite:///' + featureBroker.RequiredFeature(
                'conf_DB').result['rf']
        app.db = SQLAlchemy(app.web)
        entities.Base.metadata.create_all(app.db.engine, checkfirst=True)

        # RF Sniffer
        if not app.web.debug or os.environ.get('WERKZEUG_RUN_MAIN') == 'true':
            sniffer = rfSniffer()

        app.web.run(host='0.0.0.0',
                    port=parser.getint("Web", "port"),
                    debug=(not isDaemon))
Exemplo n.º 28
0
class Server(object):
    app = None

    def __init__(self):
        #  print("initializing " * 20)
        self.async_mode = 'eventlet'  # None
        self.app = Flask(__name__)
        self.app.config['SECRET_KEY'] = 'secret!'
        thread = None
        self.socketio = SocketIO(self.app, async_mode=self.async_mode)
        template_loader = jinja2.ChoiceLoader([
            self.app.jinja_loader,
            jinja2.PackageLoader('mTree', 'base/admin_templates'),
            jinja2.PackageLoader('mTree', 'base/user_templates')
        ])
        self.app.jinja_loader = template_loader

        self.app.config['BASIC_AUTH_USERNAME'] = '******'
        self.app.config['BASIC_AUTH_PASSWORD'] = '******'

        self.basic_auth = BasicAuth(self.app)

        self.add_routes()
        self.scheduler = APScheduler()
        self.scheduler.init_app(self.app)
        self.scheduler.start()
        #self.scheduler.add_listener(self.my_listener, events.EVENT_ALL)

    def my_listener(self, event):
        print("APSCHEDULER EVENT " + str(event))

    def run_server(self):
        print("RUNNING " * 20)
        self.socketio.run(self.app, host='0.0.0.0', debug=True)

    def attach_experiment(self, experiment):
        self.experiment = experiment()
        self.experiment.attach_emitter(emit)
        self.experiment.attach_socketio(self.socketio)

        self.experiment.attach_app(self.app)
        self.experiment.attach_db(None)
        self.experiment.attach_scheduler(self.scheduler)

    def get_response(self, emit):
        return Response(emit, self.app, self.db)

    def add_scheduler(self, sched_function):
        self.scheduler.add_job(func=sched_function,
                               trigger=IntervalTrigger(seconds=5),
                               id="print_test",
                               name="print something",
                               replace_existing=True)

    def add_routes(self):
        @self.app.route('/admin_dashboard')  # URL path for the admin screen
        @self.basic_auth.required
        def index():
            return render_template('admin_base.html')

        @self.app.route('/static_content/<string:path>')
        def static_hosting(path):
            static_content_location = self.experiment.get_static_content_location(
            )
            return send_from_directory(static_content_location, path)

        @self.app.route('/subject')  # URL path for the subject screen
        def not_search():
            assignment_id = request.args.get('assignmentId')
            hit_id = request.args.get('hitId')
            turk_submit_to = request.args.get('turkSubmitTo')
            worker_id = request.args.get('workerId')

            if assignment_id == "ASSIGNMENT_ID_NOT_AVAILABLE":
                # display the preview screen... presumably
                context = {}
                template = Environment(loader=FileSystemLoader(
                    self.experiment.get_template_location()
                    or './')).get_template(
                        self.experiment.get_task_preview()).render(context)
                print("PREPARING FOR A PREVIEW...")
                return template
            else:
                return render_template('subject_base.html',
                                       async_mode=self.socketio.async_mode)

        @self.app.route(
            '/<string:experiment_id>/<request_page>'
        )  # TODO(@skunath) This is where it's failing. What's happening?
        def pageHandler(template):
            return render_template(template)

        @self.socketio.on('admin_control', namespace='/admin')
        def admin_control_message(message):
            #self.experiment.admin_event_handler(message)
            self.experiment.start_experiment()

        @self.socketio.on('user_configuration', namespace='/subject')
        def receive_user_configuration(message):
            # need to send user id information
            event = json.loads(message["data"])
            print(event)
            user_id = event["user_id"]
            assignment_id = event["assignmentId"]
            hit_id = event["hitId"]
            worker_id = event["workerId"]
            self.experiment.add_user_property(user_id, "assignment_id",
                                              assignment_id)
            self.experiment.add_user_property(user_id, "hit_id", hit_id)
            #self.experiment.add_user_property(user_id, "turk_submit_to", turk_submit_to)
            self.experiment.add_user_property(user_id, "worker_id", worker_id)
            # print("PUT OCCUR -- " + str(event))

        @self.socketio.on('put', namespace='/subject')
        def receive_put(message):
            # need to send user id information
            event = json.loads(message["data"])
            #print("PUT OCCUR -- " + str(event))
            self.experiment.event_handler(event)

        @self.socketio.on('join', namespace='/subject')
        def subjectJoin(message):
            print("\n\nSUBJECT JUST JOINED\n\n")

            join_room(message['room'])

        @self.socketio.on('connect', namespace='/subject')
        def subject_connect():
            # need to send user id information
            assignment_id = request.args.get('assignmentId')
            hit_id = request.args.get('hitId')
            turk_submit_to = request.args.get('turkSubmitTo')
            worker_id = request.args.get('workerId')

            user_id = self.experiment.create_user(request.sid)

            join_room(user_id)
            print("\nCONNECTED\nUser: {}\n\n".format(user_id))

            self.experiment.user_objects[user_id].display_welcome_screen(
            )  # display the welcome screen to the connected user

            self.experiment.check_experiment_state_to_run(
                user_id)  # Auto start when subjects connect

        @self.socketio.on('disconnect', namespace='/subject')
        def subject_disconnect():
            print("CLIENT DISCONNECTED")
            self.experiment.remove_user(
                request.sid
            )  # TODO(@messiest) Think of a better way to remove users
Exemplo n.º 29
0
class TestAPI(TestCase):
    def setUp(self):
        self.app = Flask(__name__)
        self.scheduler = APScheduler()
        self.scheduler.api_enabled = True
        self.scheduler.init_app(self.app)
        self.scheduler.start()
        self.client = self.app.test_client()

    def test_scheduler_info(self):
        response = self.client.get(self.scheduler.api_prefix)
        self.assertEqual(response.status_code, 200)
        info = json.loads(response.get_data(as_text=True))
        self.assertIsNotNone(info['current_host'])
        self.assertEqual(info['allowed_hosts'], ['*'])
        self.assertTrue(info['running'])

    def test_add_job(self):
        job = {
            'id': 'job1',
            'func': 'tests.test_api:job1',
            'trigger': 'date',
            'run_date': '2020-12-01T12:30:01+00:00',
        }

        response = self.client.post(self.scheduler.api_prefix + '/jobs',
                                    data=json.dumps(job))
        self.assertEqual(response.status_code, 200)

        job2 = json.loads(response.get_data(as_text=True))

        self.assertEqual(job.get('id'), job2.get('id'))
        self.assertEqual(job.get('func'), job2.get('func'))
        self.assertEqual(job.get('trigger'), job2.get('trigger'))
        self.assertEqual(job.get('run_date'), job2.get('run_date'))

    def test_add_conflicted_job(self):
        job = {
            'id': 'job1',
            'func': 'tests.test_api:job1',
            'trigger': 'date',
            'run_date': '2020-12-01T12:30:01+00:00',
        }

        response = self.client.post(self.scheduler.api_prefix + '/jobs',
                                    data=json.dumps(job))
        self.assertEqual(response.status_code, 200)

        response = self.client.post(self.scheduler.api_prefix + '/jobs',
                                    data=json.dumps(job))
        self.assertEqual(response.status_code, 409)

    def test_add_invalid_job(self):
        job = {
            'id': None,
        }

        response = self.client.post(self.scheduler.api_prefix + '/jobs',
                                    data=json.dumps(job))
        self.assertEqual(response.status_code, 500)

    def test_delete_job(self):
        self.__add_job()

        response = self.client.delete(self.scheduler.api_prefix + '/jobs/job1')
        self.assertEqual(response.status_code, 204)

        response = self.client.get(self.scheduler.api_prefix + '/jobs/job1')
        self.assertEqual(response.status_code, 404)

    def test_delete_job_not_found(self):
        response = self.client.delete(self.scheduler.api_prefix + '/jobs/job1')
        self.assertEqual(response.status_code, 404)

    def test_get_job(self):
        job = self.__add_job()

        response = self.client.get(self.scheduler.api_prefix + '/jobs/job1')
        self.assertEqual(response.status_code, 200)

        job2 = json.loads(response.get_data(as_text=True))

        self.assertEqual(job.get('id'), job2.get('id'))
        self.assertEqual(job.get('func'), job2.get('func'))
        self.assertEqual(job.get('trigger'), job2.get('trigger'))
        self.assertEqual(job.get('minutes'), job2.get('minutes'))

    def test_get_job_not_found(self):
        response = self.client.get(self.scheduler.api_prefix + '/jobs/job1')
        self.assertEqual(response.status_code, 404)

    def test_get_all_jobs(self):
        job = self.__add_job()

        response = self.client.get(self.scheduler.api_prefix + '/jobs')
        self.assertEqual(response.status_code, 200)

        jobs = json.loads(response.get_data(as_text=True))

        self.assertEqual(len(jobs), 1)

        job2 = jobs[0]

        self.assertEqual(job.get('id'), job2.get('id'))
        self.assertEqual(job.get('func'), job2.get('func'))
        self.assertEqual(job.get('trigger'), job2.get('trigger'))
        self.assertEqual(job.get('minutes'), job2.get('minutes'))

    def test_update_job(self):
        job = self.__add_job()

        data_to_update = {
            'args': [1],
            'trigger': 'cron',
            'minute': '*/1',
            'start_date': '2021-01-01'
        }

        response = self.client.patch(self.scheduler.api_prefix + '/jobs/job1',
                                     data=json.dumps(data_to_update))
        self.assertEqual(response.status_code, 200)

        job2 = json.loads(response.get_data(as_text=True))

        self.assertEqual(job.get('id'), job2.get('id'))
        self.assertEqual(job.get('func'), job2.get('func'))
        self.assertEqual(data_to_update.get('args'), job2.get('args'))
        self.assertEqual(data_to_update.get('trigger'), job2.get('trigger'))
        self.assertEqual('2021-01-01T00:00:00+00:00', job2.get('start_date'))
        self.assertEqual('2021-01-01T00:00:00+00:00',
                         job2.get('next_run_time'))

    def test_update_job_not_found(self):
        data_to_update = {
            'args': [1],
            'trigger': 'cron',
            'minute': '*/1',
            'start_date': '2021-01-01'
        }

        response = self.client.patch(self.scheduler.api_prefix + '/jobs/job1',
                                     data=json.dumps(data_to_update))
        self.assertEqual(response.status_code, 404)

    def test_update_invalid_job(self):
        self.__add_job()

        data_to_update = {
            'trigger': 'invalid_trigger',
        }

        response = self.client.patch(self.scheduler.api_prefix + '/jobs/job1',
                                     data=json.dumps(data_to_update))
        self.assertEqual(response.status_code, 500)

    def test_pause_and_resume_job(self):
        self.__add_job()

        response = self.client.post(self.scheduler.api_prefix +
                                    '/jobs/job1/pause')
        self.assertEqual(response.status_code, 200)
        job = json.loads(response.get_data(as_text=True))
        self.assertIsNone(job.get('next_run_time'))

        response = self.client.post(self.scheduler.api_prefix +
                                    '/jobs/job1/resume')
        self.assertEqual(response.status_code, 200)
        job = json.loads(response.get_data(as_text=True))
        self.assertIsNotNone(job.get('next_run_time'))

    def test_pause_and_resume_job_not_found(self):
        response = self.client.post(self.scheduler.api_prefix +
                                    '/jobs/job1/pause')
        self.assertEqual(response.status_code, 404)

        response = self.client.post(self.scheduler.api_prefix +
                                    '/jobs/job1/resume')
        self.assertEqual(response.status_code, 404)

    def test_run_job(self):
        self.__add_job()

        response = self.client.post(self.scheduler.api_prefix +
                                    '/jobs/job1/run')
        self.assertEqual(response.status_code, 200)
        job = json.loads(response.get_data(as_text=True))
        self.assertIsNotNone(job.get('next_run_time'))

    def test_run_job_not_found(self):
        response = self.client.post(self.scheduler.api_prefix +
                                    '/jobs/job1/run')
        self.assertEqual(response.status_code, 404)

    def __add_job(self):
        job = {
            'id': 'job1',
            'func': 'tests.test_api:job1',
            'trigger': 'interval',
            'minutes': 10,
        }

        response = self.client.post(self.scheduler.api_prefix + '/jobs',
                                    data=json.dumps(job))
        return json.loads(response.get_data(as_text=True))
Exemplo n.º 30
0
from flask import Flask
from config import Config
import os
from flask_apscheduler import APScheduler

import time
from google.cloud import language_v1


app = Flask(__name__)
app.config.from_object(Config)

schedular = APScheduler()
schedular.init_app(app)
schedular.start()

client = language_v1.LanguageServiceClient()










from . import routes
from .jobs.webScraper import webScrapeToJSONAndPush
Exemplo n.º 31
0
REGEXCIDSTRING = r"^[\w]{3,20}(?:,[\w]{3,20})*$"
REGEXMGSSTRING = r"^(?:\d{3})?[a-zA-Z]{2,6}-\d{3,5}(?:,(?:\d{3})?[a-zA-Z]{2,6}-\d{3,5})*$"
REGEXPIDSTRING = r"^[\w]{3,25}(?:,[\w]{3,25})*$"
REGEXNUM = re.compile(REGEXNUMSTRING)
REGEXCID = re.compile(REGEXCIDSTRING, flags=re.ASCII)
REGEXMGS = re.compile(REGEXMGSSTRING)
REGEXPID = re.compile(REGEXPIDSTRING)


LOGIN_MANAGER = LoginManager()
LOGIN_MANAGER.login_view = "login"
LOGIN_MANAGER.init_app(APP)

SCHEDULER = APScheduler()
SCHEDULER.init_app(APP)
SCHEDULER.start()

ASYNC_MODE = 'eventlet'
THREAD_LOCK = Lock()
SOCKETIO = SocketIO(async_mode=ASYNC_MODE)
SOCKETIO.init_app(APP,cors_allowed_origins="*")


class Task:
    def __init__(self):
        self.task_id = 0
        self.task_index = 0
        self.task_queue = deque([])
        self.pushlog_finished = False
        self.background_thread = None
        self.task_status = 0
Exemplo n.º 32
0
from flask import Flask
from flask_apscheduler import APScheduler


class Config(object):
    JOBS = [
        {
            'id': 'job1',
            'func': '__main__:job1',
            'args': (1, 2),
            'trigger': 'interval',
            'seconds': 10
        }
    ]

    SCHEDULER_VIEWS_ENABLED = True


def job1(a, b):
    print(str(a) + ' ' + str(b))

app = Flask(__name__)
app.config.from_object(Config())
app.debug = True

scheduler = APScheduler()
scheduler.init_app(app)
scheduler.start()

app.run()
Exemplo n.º 33
0
            # 'args': (1, 2),
            'trigger': 'interval',
            'seconds': 5
        }
    ]

    SCHEDULER_API_ENABLED = True

app = Flask(__name__)
sche = APScheduler()
@app.route('/')
def hello():
    return "hello world"

@app.route('/add')
def add_myjob():
    app.apscheduler.add_job(func='my_jobs:job2', id='job2', trigger='interval', seconds=2)
    return 'add success'

@app.route('/add-cron')
def add_cron_job():
    app.apscheduler.add_job(func='my_jobs:job3', id='job3', trigger='cron', minute='*/3')
    return 'add success'

if __name__ == '__main__':
    print('Let us run out of the loop')
    app.config.from_object(Config())
    sche.init_app(app)
    sche.start()

    app.run(host='0.0.0.0', debug=True)
Exemplo n.º 34
0
def load(app):
    # upgrade()
    app.db.create_all()
    CHALLENGE_CLASSES["dynamic_check_docker"] = DynamicCheckValueChallenge
    register_plugin_assets_directory(app,
                                     base_path="/plugins/ctfd-owl/assets/")
    owl_blueprint = Blueprint("ctfd-owl",
                              __name__,
                              template_folder="templates",
                              static_folder="assets",
                              url_prefix="/plugins/ctfd-owl")

    log_dir = app.config["LOG_FOLDER"]
    logger_owl = logging.getLogger("owl")
    logger_owl.setLevel(logging.INFO)
    logs = {
        "owl": os.path.join(log_dir, "owl.log"),
    }
    try:
        for log in logs.values():
            if not os.path.exists(log):
                open(log, "a").close()
        container_log = logging.handlers.RotatingFileHandler(logs["owl"],
                                                             maxBytes=10000)
        logger_owl.addHandler(container_log)
    except IOError:
        pass

    stdout = logging.StreamHandler(stream=sys.stdout)
    logger_owl.addHandler(stdout)
    logger_owl.propagate = 0

    @owl_blueprint.route('/admin/settings', methods=['GET'])
    @admins_only
    # list plugin settings
    def admin_list_configs():
        configs = DBUtils.get_all_configs()
        return render_template('configs.html', configs=configs)

    @owl_blueprint.route('/admin/settings', methods=['PATCH'])
    @admins_only
    # modify plugin settings
    def admin_save_configs():
        req = request.get_json()
        DBUtils.save_all_configs(req.items())
        return jsonify({'success': True})

    @owl_blueprint.route("/admin/containers", methods=['GET'])
    @admins_only
    # list alive containers
    def admin_list_containers():
        configs = DBUtils.get_all_configs()
        page = abs(request.args.get("page", 1, type=int))
        results_per_page = 50
        page_start = results_per_page * (page - 1)
        page_end = results_per_page * (page - 1) + results_per_page

        count = DBUtils.get_all_alive_container_count()
        containers = DBUtils.get_all_alive_container_page(page_start, page_end)

        pages = int(count / results_per_page) + (count % results_per_page > 0)
        return render_template("containers.html",
                               containers=containers,
                               pages=pages,
                               curr_page=page,
                               curr_page_start=page_start,
                               configs=configs)

    @owl_blueprint.route("/admin/containers", methods=['PATCH'])
    @admins_only
    def admin_expired_container():
        user_id = request.args.get('user_id')
        challenge_id = request.args.get('challenge_id')
        ControlUtil.expired_container(user_id=user_id,
                                      challenge_id=challenge_id)
        return jsonify({'success': True})

    @owl_blueprint.route("/admin/containers", methods=['DELETE'])
    @admins_only
    def admin_delete_container():
        user_id = request.args.get('user_id')
        ControlUtil.destroy_container(user_id)
        return jsonify({'success': True})

    # instances
    @owl_blueprint.route('/container', methods=['GET'])
    @authed_only
    def list_container():
        user_id = get_mode()
        challenge_id = request.args.get('challenge_id')
        ControlUtil.check_challenge(challenge_id, user_id)
        data = ControlUtil.get_container(user_id=user_id)
        configs = DBUtils.get_all_configs()
        remain_time = int(configs.get("docker_max_renew_count"))
        domain = configs.get('frp_http_domain_suffix', "")
        if data is not None:
            if int(data.challenge_id) != int(challenge_id):
                return jsonify({})
            dynamic_docker_challenge = DynamicCheckChallenge.query \
                .filter(DynamicCheckChallenge.id == data.challenge_id) \
                .first_or_404()
            lan_domain = str(user_id) + "-" + data.docker_id

            if dynamic_docker_challenge.deployment == "single":
                return jsonify({
                    'success':
                    True,
                    'type':
                    'redirect',
                    'ip':
                    configs.get('frp_direct_ip_address', ""),
                    'port':
                    data.port,
                    'remaining_time':
                    remain_time -
                    (datetime.datetime.utcnow() - data.start_time).seconds,
                    'lan_domain':
                    lan_domain
                })
            else:
                if dynamic_docker_challenge.redirect_type == "http":
                    if int(configs.get('frp_http_port', "80")) == 80:
                        return jsonify({
                            'success':
                            True,
                            'type':
                            'http',
                            'domain':
                            data.docker_id + "." + domain,
                            'remaining_time':
                            remain_time - (datetime.datetime.utcnow() -
                                           data.start_time).seconds,
                            'lan_domain':
                            lan_domain
                        })
                    else:
                        return jsonify({
                            'success':
                            True,
                            'type':
                            'http',
                            'domain':
                            data.docker_id + "." + domain + ":" +
                            configs.get('frp_http_port', "80"),
                            'remaining_time':
                            remain_time - (datetime.datetime.utcnow() -
                                           data.start_time).seconds,
                            'lan_domain':
                            lan_domain
                        })
                else:
                    return jsonify({
                        'success':
                        True,
                        'type':
                        'redirect',
                        'ip':
                        configs.get('frp_direct_ip_address', ""),
                        'port':
                        data.port,
                        'remaining_time':
                        remain_time -
                        (datetime.datetime.utcnow() - data.start_time).seconds,
                        'lan_domain':
                        lan_domain
                    })
        else:
            return jsonify({'success': True})

    @owl_blueprint.route('/container', methods=['POST'])
    @authed_only
    def new_container():
        user_id = get_mode()

        if ControlUtil.frequency_limit():
            return jsonify({
                'success':
                False,
                'msg':
                'Frequency limit, You should wait at least 1 min.'
            })
        # check whether exist container before
        existContainer = ControlUtil.get_container(user_id)
        if existContainer:
            return jsonify({
                'success':
                False,
                'msg':
                'You have boot {} before.'.format(
                    existContainer.challenge.name)
            })
        else:
            challenge_id = request.args.get('challenge_id')
            ControlUtil.check_challenge(challenge_id, user_id)
            configs = DBUtils.get_all_configs()
            current_count = DBUtils.get_all_alive_container_count()
            # print(configs.get("docker_max_container_count"))
            if configs.get("docker_max_container_count") != "None":
                if int(configs.get("docker_max_container_count")) <= int(
                        current_count):
                    return jsonify({
                        'success': False,
                        'msg': 'Max container count exceed.'
                    })

            dynamic_docker_challenge = DynamicCheckChallenge.query \
                .filter(DynamicCheckChallenge.id == challenge_id) \
                .first_or_404()

            if dynamic_docker_challenge.redirect_type == "http":
                ControlUtil.new_container(user_id=user_id,
                                          challenge_id=challenge_id)
            else:
                ControlUtil.new_container(user_id=user_id,
                                          challenge_id=challenge_id)
            return jsonify({'success': True})

    @owl_blueprint.route('/container', methods=['DELETE'])
    @authed_only
    def destroy_container():
        user_id = get_mode()

        if ControlUtil.frequency_limit():
            return jsonify({
                'success':
                False,
                'msg':
                'Frequency limit, You should wait at least 1 min.'
            })

        if ControlUtil.destroy_container(user_id):
            return jsonify({'success': True})
        else:
            return jsonify({
                'success':
                False,
                'msg':
                'Failed when destroy instance, please contact admin!'
            })

    @owl_blueprint.route('/container', methods=['PATCH'])
    @authed_only
    def renew_container():
        user_id = current_user.get_current_user().id

        if ControlUtil.frequency_limit():
            return jsonify({
                'success':
                False,
                'msg':
                'Frequency limit, You should wait at least 1 min.'
            })

        configs = DBUtils.get_all_configs()
        challenge_id = request.args.get('challenge_id')
        ControlUtil.check_challenge(challenge_id, user_id)
        docker_max_renew_count = int(configs.get("docker_max_renew_count"))
        container = ControlUtil.get_container(user_id)
        if container is None:
            return jsonify({'success': False, 'msg': 'Instance not found.'})
        if container.renew_count >= docker_max_renew_count:
            return jsonify({
                'success': False,
                'msg': 'Max renewal times exceed.'
            })

        ControlUtil.expired_container(user_id=user_id,
                                      challenge_id=challenge_id)

        return jsonify({'success': True})

    def auto_clean_container():
        with app.app_context():
            results = DBUtils.get_all_expired_container()
            for r in results:
                ControlUtil.destroy_container(r.user_id)

            FrpUtils.update_frp_redirect()

    app.register_blueprint(owl_blueprint)

    try:
        lock_file = open("/tmp/ctfd_owl.lock", "w")
        lock_fd = lock_file.fileno()
        fcntl.lockf(lock_fd, fcntl.LOCK_EX | fcntl.LOCK_NB)

        scheduler = APScheduler()
        scheduler.init_app(app)
        scheduler.start()
        scheduler.add_job(id='owl-auto-clean',
                          func=auto_clean_container,
                          trigger="interval",
                          seconds=5)

        print("[CTFd Owl]Started successfully")
    except IOError:
        pass
Exemplo n.º 35
0
def load(app):
    # upgrade()
    plugin_name = __name__.split('.')[-1]
    set_config('whale:plugin_name', plugin_name)
    app.db.create_all()
    if not get_config("whale:setup"):
        setup_default_configs()

    register_plugin_assets_directory(
        app,
        base_path=f"/plugins/{plugin_name}/assets",
        endpoint='plugins.ctfd-whale.assets')
    register_admin_plugin_menu_bar(title='Whale',
                                   route='/plugins/ctfd-whale/admin/settings')

    DynamicValueDockerChallenge.templates = {
        "create": f"/plugins/{plugin_name}/assets/create.html",
        "update": f"/plugins/{plugin_name}/assets/update.html",
        "view": f"/plugins/{plugin_name}/assets/view.html",
    }
    DynamicValueDockerChallenge.scripts = {
        "create": "/plugins/ctfd-whale/assets/create.js",
        "update": "/plugins/ctfd-whale/assets/update.js",
        "view": "/plugins/ctfd-whale/assets/view.js",
    }
    CHALLENGE_CLASSES["dynamic_docker"] = DynamicValueDockerChallenge

    page_blueprint = Blueprint("ctfd-whale",
                               __name__,
                               template_folder="templates",
                               static_folder="assets",
                               url_prefix="/plugins/ctfd-whale")
    CTFd_API_v1.add_namespace(admin_namespace,
                              path="/plugins/ctfd-whale/admin")
    CTFd_API_v1.add_namespace(user_namespace, path="/plugins/ctfd-whale")

    @page_blueprint.route('/admin/settings')
    @admins_only
    def admin_list_configs():
        errors = WhaleChecks.perform()
        if not errors and get_config("whale:refresh", "false"):
            DockerUtils.init()
            CacheProvider(app=current_app).init_port_sets()
            set_config("whale:refresh", "false")
        return render_template('whale_config.html', errors=errors)

    @page_blueprint.route("/admin/containers")
    @admins_only
    def admin_list_containers():
        result = AdminContainers.get()
        view_mode = request.args.get('mode', session.get('view_mode', 'list'))
        session['view_mode'] = view_mode
        return render_template("whale_containers.html",
                               plugin_name=plugin_name,
                               containers=result['data']['containers'],
                               pages=result['data']['pages'],
                               curr_page=abs(
                                   request.args.get("page", 1, type=int)),
                               curr_page_start=result['data']['page_start'])

    def auto_clean_container():
        with app.app_context():
            results = DBContainer.get_all_expired_container()
            containers = DBContainer.get_all_alive_container()

            config = ''.join([c.frp_config for c in containers])

            try:
                # you can authorize a connection by setting
                # frp_url = http://user:pass@ip:port
                frp_addr = get_config("whale:frp_api_url")
                if not frp_addr:
                    frp_addr = f'http://{get_config("whale:frp_api_ip", "frpc")}:{get_config("whale:frp_api_port", "7400")}'
                    # backward compatibility
                common = get_config("whale:frp_config_template", '')
                if '[common]' in common:
                    output = common + config
                else:
                    remote = requests.get(f'{frp_addr.rstrip("/")}/api/config')
                    assert remote.status_code == 200
                    set_config("whale:frp_config_template", remote.text)
                    output = remote.text + config
                assert requests.put(f'{frp_addr.rstrip("/")}/api/config',
                                    output,
                                    timeout=5).status_code == 200
                assert requests.get(f'{frp_addr.rstrip("/")}/api/reload',
                                    timeout=5).status_code == 200
            except (requests.RequestException, AssertionError) as e:
                raise WhaleError(
                    '\nfrpc request failed\n' + (f'{e}\n' if str(e) else '') +
                    'please check the frp related configs') from None

    app.register_blueprint(page_blueprint)

    try:
        CacheProvider(app=app).init_port_sets()
        DockerUtils.init()
    except Exception:
        warnings.warn("Initialization Failed. Please check your configs.",
                      WhaleWarning)

    try:
        lock_file = open("/tmp/ctfd_whale.lock", "w")
        lock_fd = lock_file.fileno()
        fcntl.lockf(lock_fd, fcntl.LOCK_EX | fcntl.LOCK_NB)

        scheduler = APScheduler()
        scheduler.init_app(app)
        scheduler.start()
        # scheduler.add_job(
        #     id='whale-auto-clean', func=auto_clean_container,
        #     trigger="interval", seconds=10
        # )

        print("[CTFd Whale] Started successfully")
    except IOError:
        pass
            "func": "timedTask:job_1",
            "args": (3, 4),
            "trigger": "interval",
            "seconds": 5  # 不指定时间每5S执行一次
        }
    ]

    SCHEDULER_API_ENABLED = True


def get_user_table():
    filename = "./getUserInfo.py"
    os.system("python2 " + filename)


def job_1(a, b):
    print a + b


if __name__ == '__main__':
    app = Flask(__name__)
    app.config.from_object(Config())

    scheduler = APScheduler()  # 创建定时任务

    # scheduler.api_enabled = True
    scheduler.init_app(app)  # 初始化flask实例
    scheduler.start()  # 开启定时任务,每次项目重启会执行一次定时任务再开始计时

    app.run(host="0.0.0.0", port=8081, debug=True, threaded=True)
Exemplo n.º 37
0
class TestViews(TestCase):
    def setUp(self):
        self.app = Flask(__name__)
        self.app.config['SCHEDULER_VIEWS_ENABLED'] = True
        self.scheduler = APScheduler(app=self.app)
        self.scheduler.start()
        self.client = self.app.test_client()

    def test_add_job(self):
        job = {
            'id': 'job1',
            'func': 'test_views:job1',
            'trigger': 'date',
            'run_date': '2020-12-01T12:30:01+00:00',
        }

        response = self.client.post('/scheduler/jobs', data=json.dumps(job))
        self.assertEqual(response.status_code, 200)

        job2 = json.loads(response.get_data(as_text=True))

        self.assertEqual(job.get('id'), job2.get('id'))
        self.assertEqual(job.get('func'), job2.get('func'))
        self.assertEqual(job.get('trigger'), job2.get('trigger'))
        self.assertEqual(job.get('run_date'), job2.get('run_date'))

    def test_delete_job(self):
        self.__add_job()

        response = self.client.delete('/scheduler/jobs/job1')
        self.assertEqual(response.status_code, 204)

        response = self.client.get('/scheduler/jobs/job1')
        self.assertEqual(response.status_code, 404)

    def test_get_job(self):
        job = self.__add_job()

        response = self.client.get('/scheduler/jobs/job1')
        self.assertEqual(response.status_code, 200)

        job2 = json.loads(response.get_data(as_text=True))

        self.assertEqual(job.get('id'), job2.get('id'))
        self.assertEqual(job.get('func'), job2.get('func'))
        self.assertEqual(job.get('trigger'), job2.get('trigger'))
        self.assertEqual(job.get('minutes'), job2.get('minutes'))

    def test_get_all_jobs(self):
        job = self.__add_job()

        response = self.client.get('/scheduler/jobs')
        self.assertEqual(response.status_code, 200)

        jobs = json.loads(response.get_data(as_text=True))

        self.assertEqual(len(jobs), 1)

        job2 = jobs[0]

        self.assertEqual(job.get('id'), job2.get('id'))
        self.assertEqual(job.get('func'), job2.get('func'))
        self.assertEqual(job.get('trigger'), job2.get('trigger'))
        self.assertEqual(job.get('minutes'), job2.get('minutes'))

    def test_update_job(self):
        job = self.__add_job()

        data_to_update = {
            'args': [1]
        }

        response = self.client.patch('/scheduler/jobs/job1', data=json.dumps(data_to_update))
        self.assertEqual(response.status_code, 200)

        job2 = json.loads(response.get_data(as_text=True))

        self.assertEqual(job.get('id'), job2.get('id'))
        self.assertEqual(job.get('func'), job2.get('func'))
        self.assertEqual(data_to_update.get('args'), job2.get('args'))
        self.assertEqual(job.get('trigger'), job2.get('trigger'))
        self.assertEqual(job.get('minutes'), job2.get('minutes'))

    def test_pause_and_resume_job(self):
        self.__add_job()

        response = self.client.post('/scheduler/jobs/job1/pause')
        self.assertEqual(response.status_code, 200)
        job = json.loads(response.get_data(as_text=True))
        self.assertIsNone(job.get('next_run_time'))

        response = self.client.post('/scheduler/jobs/job1/resume')
        self.assertEqual(response.status_code, 200)
        job = json.loads(response.get_data(as_text=True))
        self.assertIsNotNone(job.get('next_run_time'))

    def __add_job(self):
        job = {
            'id': 'job1',
            'func': 'test_views:job1',
            'trigger': 'interval',
            'minutes': 10,
        }

        response = self.client.post('/scheduler/jobs', data=json.dumps(job))
        return json.loads(response.get_data(as_text=True))
Exemplo n.º 38
0
class TestScheduler(TestCase):
    def setUp(self):
        self.app = Flask(__name__)
        self.scheduler = APScheduler()
        self.scheduler_two = APScheduler(app=self.app)

    def test_running(self):
        self.assertFalse(self.scheduler.running)
        self.scheduler.start()
        self.assertTrue(self.scheduler.running)

    def test_start_with_allowed_hosts(self):
        self.app.config['SCHEDULER_ALLOWED_HOSTS'] = ['any_server_name']
        self.scheduler.init_app(self.app)
        self.scheduler.start()
        self.assertFalse(self.scheduler.running)

    def test_start_without_allowed_hosts(self):
        self.app.config['SCHEDULER_ALLOWED_HOSTS'] = []
        self.scheduler.init_app(self.app)
        self.scheduler.start()
        self.assertFalse(self.scheduler.running)

    def test_shutdown(self):
        self.scheduler.init_app(self.app)
        self.scheduler.start()
        self.assertTrue(self.scheduler.running)
        self.scheduler.shutdown()
        self.assertFalse(self.scheduler.running)

    def test_load_jobs_from_config(self):
        self.app.config['JOBS'] = [{
            'id': 'job1',
            'func': 'tests.test_api:job1',
            'trigger': 'interval',
            'seconds': 10,
        }]
        self.app.config['SCHEDULER_JOBSTORES'] = {
            "default": apscheduler.jobstores.memory.MemoryJobStore()
        }
        self.app.config['SCHEDULER_EXECUTORS'] = {
            "default": {
                "type": "threadpool"
            }
        }
        self.app.config['SCHEDULER_JOB_DEFAULTS'] = {"coalesce": True}
        self.app.config['SCHEDULER_TIMEZONE'] = utc

        self.scheduler.init_app(app=self.app)
        job = self.scheduler.get_job('job1')
        self.assertIsNotNone(job)

    def test_task_decorator(self):
        @self.scheduler.task('interval', seconds=10, id='job1')
        def decorated_job():
            pass

        job = self.scheduler.get_job('job1')
        self.assertIsNotNone(job)

    def test_state_prop(self):
        self.scheduler.init_app(self.app)
        self.scheduler.start()
        self.assertTrue(self.scheduler.state)
        self.scheduler.shutdown()
        self.assertFalse(self.scheduler.state)

    def test_scheduler_prop(self):
        self.scheduler.init_app(self.app)
        self.scheduler.start()
        self.assertIsNotNone(self.scheduler.scheduler)
        self.scheduler.shutdown()
        self.assertFalse(self.scheduler.running)

    def test_pause_resume(self):
        self.scheduler.init_app(self.app)
        self.scheduler.start()
        self.assertTrue(self.scheduler.running)
        self.scheduler.pause()
        self.assertTrue(self.scheduler.state == 2)
        self.scheduler.resume()
        self.assertTrue(self.scheduler.state == 1)
        self.scheduler.shutdown()
        self.assertFalse(self.scheduler.running)

    def test_add_listener(self):
        self.scheduler.init_app(self.app)
        self.scheduler.start()
        self.assertTrue(self.scheduler.running)
        self.scheduler.add_listener(None)
        self.scheduler.remove_listener(None)
        self.scheduler.shutdown()
        self.assertFalse(self.scheduler.running)

    def test_add_remove_job(self):
        @self.scheduler.task('interval', seconds=10, id='job1')
        def decorated_job():
            pass

        self.scheduler.init_app(self.app)
        self.scheduler.start()
        job = self.scheduler.get_job('job1')
        self.assertIsNotNone(job)

        self.scheduler.remove_job('job1')
        self.assertFalse(self.scheduler.get_job('job1'))
        self.scheduler.shutdown()
        self.assertFalse(self.scheduler.running)

    def test_add_delete_job(self):
        @self.scheduler.task('interval', seconds=10, id='job1')
        def decorated_job():
            pass

        self.scheduler.init_app(self.app)
        self.scheduler.start()
        job = self.scheduler.get_job('job1')
        self.assertIsNotNone(job)

        self.scheduler.delete_job('job1')
        self.assertFalse(self.scheduler.get_job('job1'))
        self.scheduler.shutdown()
        self.assertFalse(self.scheduler.running)

    def test_add_remove_all_jobs(self):
        @self.scheduler.task('interval', hours=1, id='job1')
        def decorated_job():
            pass

        @self.scheduler.task('interval', hours=1, id='job2')
        def decorated_job2():
            pass

        self.scheduler.init_app(self.app)
        self.scheduler.start()
        jobs = self.scheduler.get_jobs()
        self.assertTrue(len(jobs) == 2)
        self.scheduler.remove_all_jobs()

        self.assertFalse(self.scheduler.get_job('job1'))
        self.assertFalse(self.scheduler.get_job('job2'))

        self.scheduler.shutdown()
        self.assertFalse(self.scheduler.running)

    def test_add_delete_all_jobs(self):
        @self.scheduler.task('interval', hours=1, id='job1')
        def decorated_job():
            pass

        @self.scheduler.task('interval', hours=1, id='job2')
        def decorated_job2():
            pass

        self.scheduler.init_app(self.app)
        self.scheduler.start()
        jobs = self.scheduler.get_jobs()
        self.assertTrue(len(jobs) == 2)
        self.scheduler.delete_all_jobs()

        self.assertFalse(self.scheduler.get_job('job1'))
        self.assertFalse(self.scheduler.get_job('job2'))

        self.scheduler.shutdown()
        self.assertFalse(self.scheduler.running)

    def test_job_to_dict(self):
        @self.scheduler.task('interval',
                             hours=1,
                             id='job1',
                             end_date=datetime.datetime.now(),
                             weeks=1,
                             days=1,
                             seconds=99)
        def decorated_job():
            pass

        self.scheduler.init_app(self.app)
        self.scheduler.start()
        job = self.scheduler.get_job('job1')
        self.assertIsNotNone(job)

        self.assertTrue(len(utils.job_to_dict(job)))
        self.scheduler.delete_job('job1')
        self.assertFalse(self.scheduler.get_job('job1'))
        self.scheduler.shutdown()
        self.assertFalse(self.scheduler.running)
Exemplo n.º 39
0
    try:
        handler.handle(body, signature)
    except InvalidSignatureError:
        print(
            "Invalid signature. Please check your channel access token/channel secret."
        )
        abort(400)

    return 'OK'


# linebot處理文字訊息
@handler.add(MessageEvent, message=TextMessage)
def handle_message(event):

    # linebot關鍵字回傳訊息
    if '小幫手' in event.message.text:
        replyMsg = FindKeyWordInText(text=event.message.text,
                                     userId=event.source.user_id)
        line_bot_api.reply_message(event.reply_token,
                                   TextSendMessage(text=replyMsg))


# 開始運作Flask
if __name__ == "__main__":
    scheduler = APScheduler(
        BackgroundScheduler(timezone="Asia/Taipei"))  # 例項化APScheduler
    scheduler.init_app(app)  # 把任務列表放進flask
    scheduler.start()  # 啟動任務列表
    app.run(host='0.0.0.0')
Exemplo n.º 40
0
def load(app):
    # upgrade()
    app.db.create_all()
    CHALLENGE_CLASSES["dynamic_docker"] = DynamicValueDockerChallenge
    register_plugin_assets_directory(app,
                                     base_path="/plugins/ctfd-docker/assets/")

    page_admin_blueprint = Blueprint("ctfd-docker-admin-page",
                                     __name__,
                                     template_folder="templates",
                                     static_folder="assets",
                                     url_prefix="/admin/plugins/ctfd-docker")

    @page_admin_blueprint.route('/settings', methods=['GET'])
    @admins_only
    def admin_list_configs():
        configs = DBUtils.get_all_configs()
        return render_template('config.html', configs=configs)

    @page_admin_blueprint.route('/settings', methods=['PATCH'])
    @admins_only
    def admin_save_configs():
        req = request.get_json()
        DBUtils.save_all_configs(req.items())
        return json.dumps({'success': True})

    @page_admin_blueprint.route("/containers", methods=['GET'])
    @admins_only
    def admin_list_containers():
        configs = DBUtils.get_all_configs()
        page = abs(request.args.get("page", 1, type=int))
        results_per_page = 50
        page_start = results_per_page * (page - 1)
        page_end = results_per_page * (page - 1) + results_per_page

        count = DBUtils.get_all_alive_container_count()
        containers = DBUtils.get_all_alive_container_page(page_start, page_end)

        pages = int(count / results_per_page) + (count % results_per_page > 0)
        return render_template("containers.html",
                               containers=containers,
                               pages=pages,
                               curr_page=page,
                               curr_page_start=page_start,
                               configs=configs)

    @page_admin_blueprint.route("/containers", methods=['DELETE'])
    @admins_only
    def admin_delete_container():
        user_id = request.args.get('user_id')
        ControlUtil.remove_container(user_id)
        return json.dumps({'success': True})

    @page_admin_blueprint.route("/containers", methods=['PATCH'])
    @admins_only
    def admin_renew_container():
        user_id = request.args.get('user_id')
        challenge_id = request.args.get('challenge_id')
        DBUtils.renew_current_container(user_id=user_id,
                                        challenge_id=challenge_id)
        return json.dumps({'success': True})

    app.register_blueprint(page_admin_blueprint)

    page_blueprint = Blueprint("ctfd-docker-page",
                               __name__,
                               template_folder="templates",
                               static_folder="assets",
                               url_prefix="/plugins/ctfd-docker")

    @page_blueprint.route('/container', methods=['POST'])
    @authed_only
    def add_container():
        if ControlUtil.frequency_limit():
            return json.dumps({
                'success':
                False,
                'msg':
                'Frequency limit, You should wait at least 1 min.'
            })

        user_id = current_user.get_current_user().id
        ControlUtil.remove_container(user_id)
        challenge_id = request.args.get('challenge_id')
        ControlUtil.check_challenge(challenge_id)

        configs = DBUtils.get_all_configs()
        current_count = DBUtils.get_all_alive_container_count()
        if int(configs.get("docker_max_container_count")) <= int(
                current_count):
            return json.dumps({
                'success': False,
                'msg': 'Max container count exceed.'
            })

        dynamic_docker_challenge = DynamicDockerChallenge.query \
            .filter(DynamicDockerChallenge.id == challenge_id) \
            .first_or_404()
        flag = "flag{" + str(uuid.uuid4()) + "}"
        host = configs.get("docker_client_ip")
        while True:
            port = random.randint(10000, 50000)
            if ControlUtil.is_container_port_invalid(str(host), int(port)):
                break
        if ControlUtil.add_container(user_id=user_id,
                                     challenge_id=challenge_id,
                                     flag=flag,
                                     port=port):
            return json.dumps({'success': True})
        else:
            return json.dumps({
                'success': False,
                'msg': 'ERROR: container start failed'
            })

    @page_blueprint.route('/container', methods=['GET'])
    @authed_only
    def list_container():
        user_id = current_user.get_current_user().id
        challenge_id = request.args.get('challenge_id')
        ControlUtil.check_challenge(challenge_id)
        data = DBUtils.get_current_containers(user_id=user_id)
        configs = DBUtils.get_all_configs()
        if data is not None:
            if int(data.challenge_id) != int(challenge_id):
                return json.dumps({})
            return json.dumps({
                'success':
                True,
                'type':
                'redirect',
                'ip':
                configs.get('docker_client_ip'),
                'port':
                data.port,
                'remaining_time':
                3600 - (datetime.now() - data.start_time).seconds
            })
        else:
            return json.dumps({'success': True})

    @page_blueprint.route('/container', methods=['DELETE'])
    @authed_only
    def remove_container():
        if ControlUtil.frequency_limit():
            return json.dumps({
                'success':
                False,
                'msg':
                'Frequency limit, You should wait at least 1 min.'
            })

        user_id = current_user.get_current_user().id
        ControlUtil.remove_container(user_id)
        return json.dumps({'success': True})

    @page_blueprint.route('/container', methods=['PATCH'])
    @authed_only
    def renew_container():
        if ControlUtil.frequency_limit():
            return json.dumps({
                'success':
                False,
                'msg':
                'Frequency limit, You should wait at least 1 min.'
            })

        configs = DBUtils.get_all_configs()
        user_id = current_user.get_current_user().id
        challenge_id = request.args.get('challenge_id')
        ControlUtil.check_challenge(challenge_id)
        docker_max_renew_count = int(configs.get("docker_max_renew_count"))
        container = DBUtils.get_current_containers(user_id)
        if container.renew_count >= docker_max_renew_count:
            return json.dumps({
                'success': False,
                'msg': 'Max renewal times exceed.'
            })
        DBUtils.renew_current_container(user_id=user_id,
                                        challenge_id=challenge_id)
        return json.dumps({'success': True})

    def auto_clean_container():
        with app.app_context():
            results = DBUtils.get_all_expired_container()
            for r in results:
                ControlUtil.remove_container(r.user_id)

    app.register_blueprint(page_blueprint)

    scheduler = APScheduler()
    scheduler.init_app(app)
    scheduler.start()
    scheduler.add_job(id='whale-auto-clean',
                      func=auto_clean_container,
                      trigger="interval",
                      seconds=10)
Exemplo n.º 41
0
def task_init():
    scheduler = APScheduler()
    scheduler.init_app(app)
    scheduler.start()
    refresh()
Exemplo n.º 42
0
def create_app(testing=False):
    # create and configure the app
    app = Flask(__name__, instance_relative_config=True)

    # Load conf depending on the mode
    if testing:
        app.config.from_pyfile("test.conf.py", silent=False)
    elif app.env == "development":
        app.config.from_pyfile("back.conf.py", silent=False)
    elif app.env == "production":
        app.config.from_pyfile("/etc/config/back.conf.py", silent=False)
    else:
        raise ValueError("App mode unknow: not in dev|prod|test")

    # K8sManager configuration changes with app mode
    app.k8s = None
    if not testing:
        if app.env == "development":
            k8s_configuration = kubernetes.client.Configuration()
            k8s_configuration.verify_ssl = False
            k8s_configuration.debug = False
            k8s_configuration.host = app.config["K8S_HOST"]
            k8s_configuration.api_key["authorization"] = app.config[
                "K8S_API_KEY"]
            k8s_configuration.api_key_prefix["authorization"] = "Bearer"

            app.k8s = kubernetes.client.ApiClient(k8s_configuration)
        else:
            kubernetes.config.load_incluster_config()
            app.k8s = kubernetes.client.ApiClient()

    app.mongo = PyMongo(app)

    if not testing:
        #  Scheduler to kill old challenges
        if (not app.debug or os.environ.get("WERKZEUG_RUN_MAIN")
                == "true"):  # avoid double task exec in dev
            scheduler = APScheduler()
            scheduler.init_app(app)
            scheduler.start()

        @scheduler.task(
            "cron",
            id="do_stop_old_participation",
            hour="*/{}".format(
                app.config["CHECK_OLD_CHALLENGES_INTERVAL_HOURS"]),
        )
        def stop_old():
            with app.app_context():
                stop_old_participations()

    login_manager = LoginManager()
    login_manager.init_app(app)
    app.logger = create_logger(app)

    @login_manager.user_loader
    def load_user(user_id):
        return UserManager().get(user_id)

    @app.errorhandler(EmmentalException)
    def handle_emmental_exception(e):
        app.logger.error(traceback.format_exc())
        app.logger.error(e.internal_message)
        response = jsonify({
            "error_code": e.error_code,
            "error_message": e.external_message,
        })
        return response, e.status_code

    @app.errorhandler(Exception)
    def handle_exception(e):
        app.logger.error(traceback.format_exc())
        response = jsonify({
            "error_code": -1,
            "error_message": "Unknown Error",
        })
        return response, 500

    app.register_blueprint(users)
    app.register_blueprint(challenges)
    app.register_blueprint(challenge_categories)
    app.register_blueprint(challenge_participations)

    @app.route("/config")
    def config():
        res = {
            "version": "0.0.1",
            "isAuthenticated": current_user.is_authenticated,
            "env": app.env,
        }
        if current_user.is_authenticated:
            res.update({"currentUser": current_user.to_dict()})
        return jsonify(res)

    return app