Exemplo n.º 1
0
def create_app():

  app = Flask(__name__)

  if os.environ.get('SITTER_SETTINGS'):
    app.config.from_envvar('SITTER_SETTINGS')
  else:
    for path in (os.path.dirname(os.path.dirname(__file__)), os.path.dirname(__file__)):

      settings = os.path.join(path, 'settings.cfg')

      if os.path.isfile(settings):
        app.config.from_pyfile(settings)
        break

  if not os.path.exists(app.config['SESSION_STORAGE_DIR']):
    os.makedirs(app.config['SESSION_STORAGE_DIR'])

  kv = KVSessionExtension(FilesystemStore(app.config['SESSION_STORAGE_DIR']), app)

  # Remove extra white space.
  app.jinja_env.trim_blocks = True
  app.jinja_env.lstrip_blocks = True

  def cleanup_sessions():
    kv.cleanup_sessions(app)

  # Add some scheduled jobs.
  scheduler = APScheduler()
  scheduler.init_app(app)
  scheduler.add_job(id="kv-session-cleanup", func=cleanup_sessions, seconds=10, trigger='interval')
  scheduler.start()

  return app
Exemplo n.º 2
0
class TestHTTPBasicAuth(TestCase):
    def setUp(self):
        self.app = Flask(__name__)
        self.scheduler = APScheduler()
        self.scheduler.auth = HTTPBasicAuth()
        self.scheduler.api_enabled = True
        self.scheduler.init_app(self.app)
        self.scheduler.start()
        self.scheduler.authenticate(self._authenticate)
        self.client = self.app.test_client()

    def _authenticate(self, auth):
        return auth['username'] == 'test' and auth['password'] == 'test'

    def test_valid_credentials(self):
        headers = {'Authorization': 'Basic ' + base64.b64encode(b'test:test').decode('ascii')}
        response = self.client.get(self.scheduler.api_prefix + '', headers=headers)
        self.assertEqual(response.status_code, 200)

    def test_invalid_credentials(self):
        headers = {'Authorization': 'Basic ' + base64.b64encode(b'guest:guest').decode('ascii')}
        response = self.client.get(self.scheduler.api_prefix + '', headers=headers)
        self.assertEqual(response.status_code, 401)
        self.assertEqual(response.headers['WWW-Authenticate'], 'Basic realm="Authentication Required"')

    def test_invalid_header_format(self):
        headers = {'Authorization': 'Basic 1231234'}
        response = self.client.get(self.scheduler.api_prefix + '', headers=headers)
        self.assertEqual(response.status_code, 401)
        self.assertEqual(response.headers['WWW-Authenticate'], 'Basic realm="Authentication Required"')

    def test_missing_credentials(self):
        response = self.client.get(self.scheduler.api_prefix + '')
        self.assertEqual(response.status_code, 401)
        self.assertEqual(response.headers['WWW-Authenticate'], 'Basic realm="Authentication Required"')
Exemplo n.º 3
0
Arquivo: axe.py Projeto: soasme/axe
def create_app():
    app = Flask(__name__)

    if 'AXE_CONFIG' in os.environ:
        app.config.from_envvar('AXE_CONFIG')
    else:
        logger.warning('Missing config: AXE_CONFIG.')

    app.config.update({
        'SCHEDULER_API_ENABLED': True,
        'JOBS': [
            {
                'func': count,
                'trigger': 'interval',
                'seconds': 3,
                'id': 'count',
            }
        ]
    })
    scheduler = APScheduler()
    scheduler.init_app(app)

    app.wsgi_app = DispatcherMiddleware(app.wsgi_app, {
        '/metrics': make_wsgi_app()
    })

    return app
Exemplo n.º 4
0
class TestAPIPrefix(TestCase):
    def setUp(self):
        self.app = Flask(__name__)
        self.scheduler = APScheduler()
        self.scheduler.api_enabled = True
        self.scheduler.api_prefix = '/api'
        self.scheduler.init_app(self.app)
        self.scheduler.start()
        self.client = self.app.test_client()

    def test_api_prefix(self):
        response = self.client.get(self.scheduler.api_prefix + '/jobs')
        self.assertEqual(response.status_code, 200)

    def test_invalid_api_prefix(self):
        response = self.client.get('/invalidapi/jobs')
        self.assertEqual(response.status_code, 404)
Exemplo n.º 5
0
def stacosys_server(config_pathname):

    app = Flask(__name__)
    config.initialize(config_pathname, app)

    # configure logging
    logger = logging.getLogger(__name__)
    configure_logging(logging.INFO)
    logging.getLogger("werkzeug").level = logging.WARNING
    logging.getLogger("apscheduler.executors").level = logging.WARNING

    # initialize database
    from core import database

    database.setup()

    # cron email fetcher
    app.config.from_object(
        JobConfig(
            config.getInt(config.MAIL_POLLING), config.getInt(config.COMMENT_POLLING)
        )
    )
    scheduler = APScheduler()
    scheduler.init_app(app)
    scheduler.start()

    logger.info("Start Stacosys application")

    # generate RSS for all sites
    from core import rss

    rss.generate_all()

    # start Flask
    from interface import api
    from interface import form

    logger.debug("Load interface %s" % api)
    logger.debug("Load interface %s" % form)

    app.run(
        host=config.get(config.HTTP_HOST),
        port=config.get(config.HTTP_PORT),
        debug=False,
        use_reloader=False,
    )
class TestScheduler(TestCase):
    def setUp(self):
        self.app = Flask(__name__)
        self.scheduler = APScheduler()

    def test_running(self):
        self.assertFalse(self.scheduler.running)
        self.scheduler.start()
        self.assertTrue(self.scheduler.running)

    def test_start_with_allowed_hosts(self):
        self.app.config['SCHEDULER_ALLOWED_HOSTS'] = ['any_server_name']
        self.scheduler.init_app(self.app)
        self.scheduler.start()
        self.assertFalse(self.scheduler.running)

    def test_start_without_allowed_hosts(self):
        self.app.config['SCHEDULER_ALLOWED_HOSTS'] = []
        self.scheduler.init_app(self.app)
        self.scheduler.start()
        self.assertFalse(self.scheduler.running)

    def test_shutdown(self):
        self.scheduler.init_app(self.app)
        self.scheduler.start()
        self.assertTrue(self.scheduler.running)
        self.scheduler.shutdown()
        self.assertFalse(self.scheduler.running)

    def test_load_jobs_from_config(self):
        self.app.config['JOBS'] = [
            {
                'id': 'job1',
                'func': 'tests.test_api:job1',
                'trigger': 'interval',
                'seconds': 10
            }
        ]

        self.scheduler.init_app(self.app)
        job = self.scheduler.get_job('job1')
        self.assertIsNotNone(job)

    def test_task_decorator(self):
        @self.scheduler.task('interval', seconds=10, id='job1')
        def decorated_job():
            pass

        job = self.scheduler.get_job('job1')
        self.assertIsNotNone(job)
Exemplo n.º 7
0
class TestEndpointPrefix(TestCase):
    def setUp(self):
        self.app = Flask(__name__)
        self.scheduler = APScheduler()
        self.scheduler.api_enabled = True
        self.scheduler.endpoint_prefix = 'api.'
        self.scheduler.init_app(self.app)
        self.scheduler.start()
        self.client = self.app.test_client()

    def test_endpoint_prefix(self):
        with self.scheduler.app.test_request_context():
            valid_url = True if url_for(self.scheduler.endpoint_prefix + 'get_scheduler_info') else False
            self.assertTrue(valid_url)

    def test_invalid_endpoint_prefix(self):
        with self.scheduler.app.test_request_context():
            try:
                valid_url = url_for('get_scheduler_info')
            except BuildError as _:
                valid_url = False
            self.assertFalse(valid_url)
Exemplo n.º 8
0
Arquivo: run.py Projeto: kianby/srmail
def srmail_server(config_pathname):

    app = Flask(__name__)
    config.initialize(config_pathname, app)

    # configure logging
    logger = logging.getLogger(__name__)
    configure_logging(logging.INFO)

    # initialize database
    from core import database

    database.setup()

    # cron email fetcher
    app.config.from_object(JobConfig(config.getInt(config.IMAP_POLLING)))
    scheduler = APScheduler()
    scheduler.init_app(app)
    scheduler.start()

    logger.info("Starting SRMAIL application")

    # start Flask
    from interface import api

    logger.debug("Load interface %s" % api)

    app.run(
        host=config.get(config.HTTP_HOST),
        port=config.get(config.HTTP_PORT),
        debug=False,
        use_reloader=False,
    )

    # Exit application
    logger.info("Stopping SRMAIL application")
Exemplo n.º 9
0
from flask import Flask
from config import Config
import os
from flask_apscheduler import APScheduler

import time
from google.cloud import language_v1


app = Flask(__name__)
app.config.from_object(Config)

schedular = APScheduler()
schedular.init_app(app)
schedular.start()

client = language_v1.LanguageServiceClient()










from . import routes
from .jobs.webScraper import webScrapeToJSONAndPush
Exemplo n.º 10
0
# -*- coding: utf-8 -*-
from flask import Flask
from Config.scheduler_job import Config
from flask_apscheduler import APScheduler
from apscheduler.events import EVENT_JOB_EXECUTED, EVENT_JOB_ERROR

flask_app = Flask(__name__)

# flask 添加 定时任务配置
flask_app.config.from_object(Config())

# 初始化定时任务
scheduler = APScheduler()
scheduler.init_app(flask_app)

# 为定时任务添加监听器
scheduler.add_listener(Config.my_listener,
                       EVENT_JOB_EXECUTED | EVENT_JOB_ERROR)

# 启动定时任务
scheduler.start()
Exemplo n.º 11
0
from flask import Flask

from flask_mongoengine import MongoEngine
from flask_apscheduler import APScheduler

from app_apscheduler.apscheduler_config import Config

app = Flask(__name__)

app.config['MONGODB_SETTINGS'] = {
    'db': 'flask_wechat',
    'host': '127.0.0.1',
    'port': 27017
}
db = MongoEngine(app)

# 定时任务配置
app.config.from_object(Config())
scheduler = APScheduler()  # 实例化APScheduler
scheduler.init_app(app)  # 把任务列表放进flask
scheduler.start()  # 启动任务列表

# api的业务逻辑
import app_resource.views
import app_movie.views
Exemplo n.º 12
0
def initSchedule(app):
    scheduler = APScheduler()
    app.config.from_object(Config())
    scheduler.init_app(app)
    return scheduler
Exemplo n.º 13
0
class TestAPI(TestCase):
    def setUp(self):
        self.app = Flask(__name__)
        self.scheduler = APScheduler()
        self.scheduler.api_enabled = True
        self.scheduler.init_app(self.app)
        self.scheduler.start()
        self.client = self.app.test_client()

    def test_scheduler_info(self):
        response = self.client.get(self.scheduler.api_prefix)
        self.assertEqual(response.status_code, 200)
        info = json.loads(response.get_data(as_text=True))
        self.assertIsNotNone(info['current_host'])
        self.assertEqual(info['allowed_hosts'], ['*'])
        self.assertTrue(info['running'])

    def test_add_job(self):
        job = {
            'id': 'job1',
            'func': 'tests.test_api:job1',
            'trigger': 'date',
            'run_date': '2020-12-01T12:30:01+00:00',
        }

        response = self.client.post(self.scheduler.api_prefix + '/jobs', data=json.dumps(job))
        self.assertEqual(response.status_code, 200)

        job2 = json.loads(response.get_data(as_text=True))

        self.assertEqual(job.get('id'), job2.get('id'))
        self.assertEqual(job.get('func'), job2.get('func'))
        self.assertEqual(job.get('trigger'), job2.get('trigger'))
        self.assertEqual(job.get('run_date'), job2.get('run_date'))

    def test_add_conflicted_job(self):
        job = {
            'id': 'job1',
            'func': 'tests.test_api:job1',
            'trigger': 'date',
            'run_date': '2020-12-01T12:30:01+00:00',
        }

        response = self.client.post(self.scheduler.api_prefix + '/jobs', data=json.dumps(job))
        self.assertEqual(response.status_code, 200)

        response = self.client.post(self.scheduler.api_prefix + '/jobs', data=json.dumps(job))
        self.assertEqual(response.status_code, 409)

    def test_add_invalid_job(self):
        job = {
            'id': None,
        }

        response = self.client.post(self.scheduler.api_prefix + '/jobs', data=json.dumps(job))
        self.assertEqual(response.status_code, 500)

    def test_delete_job(self):
        self.__add_job()

        response = self.client.delete(self.scheduler.api_prefix + '/jobs/job1')
        self.assertEqual(response.status_code, 204)

        response = self.client.get(self.scheduler.api_prefix + '/jobs/job1')
        self.assertEqual(response.status_code, 404)

    def test_delete_job_not_found(self):
        response = self.client.delete(self.scheduler.api_prefix + '/jobs/job1')
        self.assertEqual(response.status_code, 404)

    def test_get_job(self):
        job = self.__add_job()

        response = self.client.get(self.scheduler.api_prefix + '/jobs/job1')
        self.assertEqual(response.status_code, 200)

        job2 = json.loads(response.get_data(as_text=True))

        self.assertEqual(job.get('id'), job2.get('id'))
        self.assertEqual(job.get('func'), job2.get('func'))
        self.assertEqual(job.get('trigger'), job2.get('trigger'))
        self.assertEqual(job.get('minutes'), job2.get('minutes'))

    def test_get_job_not_found(self):
        response = self.client.get(self.scheduler.api_prefix + '/jobs/job1')
        self.assertEqual(response.status_code, 404)

    def test_get_all_jobs(self):
        job = self.__add_job()

        response = self.client.get(self.scheduler.api_prefix + '/jobs')
        self.assertEqual(response.status_code, 200)

        jobs = json.loads(response.get_data(as_text=True))

        self.assertEqual(len(jobs), 1)

        job2 = jobs[0]

        self.assertEqual(job.get('id'), job2.get('id'))
        self.assertEqual(job.get('func'), job2.get('func'))
        self.assertEqual(job.get('trigger'), job2.get('trigger'))
        self.assertEqual(job.get('minutes'), job2.get('minutes'))

    def test_update_job(self):
        job = self.__add_job()

        data_to_update = {
            'args': [1],
            'trigger': 'cron',
            'minute': '*/1',
            'start_date': '2021-01-01'
        }

        response = self.client.patch(self.scheduler.api_prefix + '/jobs/job1', data=json.dumps(data_to_update))
        self.assertEqual(response.status_code, 200)

        job2 = json.loads(response.get_data(as_text=True))

        self.assertEqual(job.get('id'), job2.get('id'))
        self.assertEqual(job.get('func'), job2.get('func'))
        self.assertEqual(data_to_update.get('args'), job2.get('args'))
        self.assertEqual(data_to_update.get('trigger'), job2.get('trigger'))
        self.assertEqual('2021-01-01T00:00:00+00:00', job2.get('start_date'))
        self.assertEqual('2021-01-01T00:00:00+00:00', job2.get('next_run_time'))

    def test_update_job_not_found(self):
        data_to_update = {
            'args': [1],
            'trigger': 'cron',
            'minute': '*/1',
            'start_date': '2021-01-01'
        }

        response = self.client.patch(self.scheduler.api_prefix + '/jobs/job1', data=json.dumps(data_to_update))
        self.assertEqual(response.status_code, 404)

    def test_update_invalid_job(self):
        self.__add_job()

        data_to_update = {
            'trigger': 'invalid_trigger',
        }

        response = self.client.patch(self.scheduler.api_prefix + '/jobs/job1', data=json.dumps(data_to_update))
        self.assertEqual(response.status_code, 500)

    def test_pause_and_resume_job(self):
        self.__add_job()

        response = self.client.post(self.scheduler.api_prefix + '/jobs/job1/pause')
        self.assertEqual(response.status_code, 200)
        job = json.loads(response.get_data(as_text=True))
        self.assertIsNone(job.get('next_run_time'))

        response = self.client.post(self.scheduler.api_prefix + '/jobs/job1/resume')
        self.assertEqual(response.status_code, 200)
        job = json.loads(response.get_data(as_text=True))
        self.assertIsNotNone(job.get('next_run_time'))

    def test_pause_and_resume_job_not_found(self):
        response = self.client.post(self.scheduler.api_prefix + '/jobs/job1/pause')
        self.assertEqual(response.status_code, 404)

        response = self.client.post(self.scheduler.api_prefix + '/jobs/job1/resume')
        self.assertEqual(response.status_code, 404)

    def test_run_job(self):
        self.__add_job()

        response = self.client.post(self.scheduler.api_prefix + '/jobs/job1/run')
        self.assertEqual(response.status_code, 200)
        job = json.loads(response.get_data(as_text=True))
        self.assertIsNotNone(job.get('next_run_time'))

    def test_run_job_not_found(self):
        response = self.client.post(self.scheduler.api_prefix + '/jobs/job1/run')
        self.assertEqual(response.status_code, 404)

    def __add_job(self):
        job = {
            'id': 'job1',
            'func': 'tests.test_api:job1',
            'trigger': 'interval',
            'minutes': 10,
        }

        response = self.client.post(self.scheduler.api_prefix + '/jobs', data=json.dumps(job))
        return json.loads(response.get_data(as_text=True))
Exemplo n.º 14
0
import os
from flask import Flask
from flask_apscheduler import APScheduler


class Config(object):
    SCHEDULER_API_ENABLED = True
    SCHEDULER_TIMEZONE = 'UTC'
    TWILIO_ACCOUNT_SID = os.environ.get('TWILIO_ACCOUNT_SID')
    TWILIO_AUTH_TOKEN = os.environ.get('TWILIO_AUTH_TOKEN')


application = Flask(__name__)
application.config.from_object(Config())

scheduler = APScheduler()
scheduler.init_app(application)
scheduler.start()


@application.route('/', methods=['GET'])
def hello_world():
    return "Hello World!"


if __name__ == '__main__':
    application.run()
Exemplo n.º 15
0
def load(app):
    # upgrade()
    app.db.create_all()
    CHALLENGE_CLASSES["Aliyun_Instance_Challenge"] = AliyunInstanceChallenge
    register_plugin_assets_directory(
        app, base_path="/plugins/aliyun-instance/assets/")
    ali_blueprint = Blueprint("aliyun-instance",
                              __name__,
                              template_folder="templates",
                              static_folder="assets",
                              url_prefix="/plugins/aliyun-instance")

    log_dir = app.config["LOG_FOLDER"]
    logger_ali = logging.getLogger("aliyun-instance")
    logger_ali.setLevel(logging.INFO)
    logs = {
        "aliyun-instance": os.path.join(log_dir, "aliyun-instance.log"),
    }
    try:
        for log in logs.values():
            if not os.path.exists(log):
                open(log, "a").close()
        container_log = logging.handlers.RotatingFileHandler(
            logs["aliyun-instance"], maxBytes=10000)
        logger_ali.addHandler(container_log)
    except IOError:
        pass

    stdout = logging.StreamHandler(stream=sys.stdout)
    logger_ali.addHandler(stdout)
    logger_ali.propagate = 0

    @ali_blueprint.route('/admin/settings', methods=['GET'])
    @admins_only
    # list plugin settings
    def admin_list_configs():
        configs = DBUtils.get_all_configs()
        return render_template('aliyun_configs.html', configs=configs)

    @ali_blueprint.route('/admin/settings', methods=['PATCH'])
    @admins_only
    # modify plugin settings
    def admin_save_configs():
        req = request.get_json()
        DBUtils.save_all_configs(req.items())
        return jsonify({'success': True})

    @ali_blueprint.route("/admin/containers", methods=['GET'])
    @admins_only
    # list alive containers
    def admin_list_containers():
        mode = utils.get_config("user_mode")
        configs = DBUtils.get_all_configs()
        page = abs(request.args.get("page", 1, type=int))
        results_per_page = 50
        page_start = results_per_page * (page - 1)
        page_end = results_per_page * (page - 1) + results_per_page

        count = DBUtils.get_all_alive_instance_count()
        containers = DBUtils.get_all_alive_instance_page(page_start, page_end)

        pages = int(count / results_per_page) + (count % results_per_page > 0)

        return render_template("aliyun_containers.html",
                               containers=containers,
                               pages=pages,
                               curr_page=page,
                               curr_page_start=page_start,
                               configs=configs,
                               mode=mode)

    @ali_blueprint.route("/admin/containers", methods=['PATCH'])
    @admins_only
    def admin_expired_instance():
        user_id = request.args.get('user_id')
        challenge_id = request.args.get('challenge_id')
        ControlUtil.expired_instance(user_id=user_id,
                                     challenge_id=challenge_id)
        return jsonify({'success': True})

    @ali_blueprint.route("/admin/containers", methods=['DELETE'])
    @admins_only
    def admin_delete_container():
        user_id = request.args.get('user_id')
        ControlUtil.destroy_instance(user_id)
        return jsonify({'success': True})

    # instances
    @ali_blueprint.route('/container', methods=['GET'])
    @authed_only
    def list_container():
        try:
            user_id = get_mode()
            challenge_id = request.args.get('challenge_id')
            ControlUtil.check_challenge(challenge_id, user_id)
            data = ControlUtil.get_instance(user_id=user_id)
            if data is not None:
                if int(data.challenge_id) != int(challenge_id):
                    return jsonify({})

                dynamic_aliyun_challenge = AliyunChallenge.query \
                    .filter(AliyunChallenge.id == data.challenge_id) \
                    .first_or_404()

                return jsonify({
                    'success':
                    True,
                    'type':
                    'redirect',
                    'ip':
                    data.ip,
                    'remaining_time':
                    3600 -
                    (datetime.datetime.utcnow() - data.start_time).seconds
                })
            else:
                return jsonify({'success': True})
        except Exception as e:
            logging.exception(e)
            return jsonify({'success': False, 'msg': str(e)})

    @ali_blueprint.route('/container', methods=['POST'])
    @authed_only
    def new_instance():
        try:
            user_id = get_mode()

            if ControlUtil.frequency_limit():
                return jsonify({
                    'success':
                    False,
                    'msg':
                    'Frequency limit, You should wait at least 1 min.'
                })
            # check whether exist container before
            existContainer = ControlUtil.get_instance(user_id)
            if existContainer:
                return jsonify({
                    'success':
                    False,
                    'msg':
                    'You have boot {} before.'.format(
                        existContainer.challenge.name)
                })
            else:
                challenge_id = request.args.get('challenge_id')
                ControlUtil.check_challenge(challenge_id, user_id)
                configs = DBUtils.get_all_configs()
                current_count = DBUtils.get_all_alive_instance_count()
                if configs.get("aliyun_max_ECS_count") != "None":
                    if int(configs.get("aliyun_max_ECS_count")) <= int(
                            current_count):
                        return jsonify({
                            'success': False,
                            'msg': 'Max container count exceed.'
                        })

                dynamic_aliyun_challenge = AliyunChallenge.query \
                    .filter(AliyunChallenge.id == challenge_id) \
                    .first_or_404()
                try:
                    result = ControlUtil.new_instance(
                        user_id=user_id, challenge_id=challenge_id)
                    if isinstance(result, bool):
                        return jsonify({'success': True})
                    else:
                        return jsonify({'success': False, 'msg': str(result)})
                except Exception as e:
                    return jsonify({
                        'success':
                        True,
                        'msg':
                        'Failed when launch instance, please contact with the admin.'
                        + e
                    })
        except Exception as e:
            return jsonify({'success': False, 'msg': str(e)})

    @ali_blueprint.route('/container', methods=['DELETE'])
    @authed_only
    def destroy_instance():
        user_id = get_mode()

        if ControlUtil.frequency_limit():
            return jsonify({
                'success':
                False,
                'msg':
                'Frequency limit, You should wait at least 1 min.'
            })

        if ControlUtil.destroy_instance(user_id):
            return jsonify({'success': True})
        else:
            return jsonify({
                'success':
                False,
                'msg':
                'Failed when destroy instance, please contact with the admin!'
            })

    @ali_blueprint.route('/container', methods=['PATCH'])
    @authed_only
    def renew_instance():
        user_id = get_mode()
        if ControlUtil.frequency_limit():
            return jsonify({
                'success':
                False,
                'msg':
                'Frequency limit, You should wait at least 1 min.'
            })

        configs = DBUtils.get_all_configs()
        challenge_id = request.args.get('challenge_id')
        ControlUtil.check_challenge(challenge_id, user_id)
        aliyun_max_renew_count = int(configs.get("aliyun_max_renew_count"))
        container = ControlUtil.get_instance(user_id)
        if container is None:
            return jsonify({'success': False, 'msg': 'Instance not found.'})
        if container.renew_count >= aliyun_max_renew_count:
            return jsonify({
                'success': False,
                'msg': 'Max renewal times exceed.'
            })

        ControlUtil.expired_instance(user_id=user_id,
                                     challenge_id=challenge_id)

        return jsonify({'success': True})

    def auto_clean_container():
        # 每隔5分钟就好,第一次创建的时间为50分钟
        with app.app_context():
            results = DBUtils.get_all_expired_instance()
            for r in results:
                ControlUtil.destroy_instance(r.user_id)

            FrpUtils.update_frp_redirect()

    app.register_blueprint(ali_blueprint)

    try:
        lock_file = open("/tmp/aliyun-instance.lock", "w")
        lock_fd = lock_file.fileno()
        fcntl.lockf(lock_fd, fcntl.LOCK_EX | fcntl.LOCK_NB)

        scheduler = APScheduler()
        scheduler.init_app(app)
        scheduler.start()
        scheduler.add_job(id='aliyun-instance-auto-clean',
                          func=auto_clean_container,
                          trigger="interval",
                          seconds=300)

        print("[CTFd Ali-ECS]Started successfully")
    except IOError:
        pass
Exemplo n.º 16
0
REGEXNUMSTRING = r"^[a-zA-Z]{1,10}\-[0-9]{1,10}(?:,[a-zA-Z]{1,10}\-[0-9]{1,10})*$"
REGEXCIDSTRING = r"^[\w]{3,20}(?:,[\w]{3,20})*$"
REGEXMGSSTRING = r"^(?:\d{3})?[a-zA-Z]{2,6}-\d{3,5}(?:,(?:\d{3})?[a-zA-Z]{2,6}-\d{3,5})*$"
REGEXPIDSTRING = r"^[\w]{3,25}(?:,[\w]{3,25})*$"
REGEXNUM = re.compile(REGEXNUMSTRING)
REGEXCID = re.compile(REGEXCIDSTRING, flags=re.ASCII)
REGEXMGS = re.compile(REGEXMGSSTRING)
REGEXPID = re.compile(REGEXPIDSTRING)


LOGIN_MANAGER = LoginManager()
LOGIN_MANAGER.login_view = "login"
LOGIN_MANAGER.init_app(APP)

SCHEDULER = APScheduler()
SCHEDULER.init_app(APP)
SCHEDULER.start()

ASYNC_MODE = 'eventlet'
THREAD_LOCK = Lock()
SOCKETIO = SocketIO(async_mode=ASYNC_MODE)
SOCKETIO.init_app(APP,cors_allowed_origins="*")


class Task:
    def __init__(self):
        self.task_id = 0
        self.task_index = 0
        self.task_queue = deque([])
        self.pushlog_finished = False
        self.background_thread = None
Exemplo n.º 17
0
def initSchedule():
	app.config.from_object(Config())
	scheduler = APScheduler()
	scheduler.init_app(app)
	scheduler.start()
	print("Scheduler Initialized\n")
Exemplo n.º 18
0
    def run(self, isDaemon=False):
        ####################################################
        #  Setup Logging
        ####################################################
        self.setup_logging()

        ####################################################
        #  Setup Configuration
        ####################################################
        parser = SafeConfigParser()
        parser.read('RF.conf')
        for section_name in parser.sections():
            featureBroker.features.Provide(
                'conf_{name}'.format(name=section_name),
                dict(parser.items(section_name)))

        ####################################################
        #  Start Flask
        ####################################################

        app.web = Flask("__main__")

        # Create Scheduler
        if not app.web.debug or os.environ.get('WERKZEUG_RUN_MAIN') == 'true':
            app.web.config['SCHEDULER_VIEWS_ENABLED'] = True
            app.web.config['SCHEDULER_JOBSTORES'] = {
                'default':
                MemoryJobStore(),
                'db':
                SQLAlchemyJobStore(
                    url='sqlite:///' +
                    featureBroker.RequiredFeature('conf_DB').result['jobs'])
            }
            scheduler = APScheduler()
            jobs = featureBroker.RequiredFeatures(
                'job', featureBroker.HasMethods('register')).result
            for job in jobs:
                job.register(scheduler.scheduler)
            scheduler.init_app(app.web)
            scheduler.start()
            featureBroker.features.Provide('scheduler', scheduler.scheduler)

        # Register Controllers
        controllers = featureBroker.RequiredFeatures(
            'controller', featureBroker.HasMethods('register')).result
        for controller in controllers:
            controller.register(app.web)

        # Set secret Key
        app.web.secret_key = 'A0ew:DE~7/T6yA^8vqNgjVB5tZr98j/3yX R~XHH!jmew:DE~7/T6yA^8vqNgjVB5tN]LWX/,?RT'

        # Flask-SQLAlchemy
        app.web.config[
            'SQLALCHEMY_DATABASE_URI'] = 'sqlite:///' + featureBroker.RequiredFeature(
                'conf_DB').result['rf']
        app.db = SQLAlchemy(app.web)
        entities.Base.metadata.create_all(app.db.engine, checkfirst=True)

        # RF Sniffer
        if not app.web.debug or os.environ.get('WERKZEUG_RUN_MAIN') == 'true':
            sniffer = rfSniffer()

        app.web.run(host='0.0.0.0',
                    port=parser.getint("Web", "port"),
                    debug=(not isDaemon))
Exemplo n.º 19
0
def schedulerStart():
    scheduler = APScheduler()
    scheduler.init_app(app)
    scheduler.start()
Exemplo n.º 20
0
class TestScheduler(TestCase):
    def setUp(self):
        self.app = Flask(__name__)
        self.scheduler = APScheduler()
        self.scheduler_two = APScheduler(app=self.app)

    def test_running(self):
        self.assertFalse(self.scheduler.running)
        self.scheduler.start()
        self.assertTrue(self.scheduler.running)

    def test_start_with_allowed_hosts(self):
        self.app.config['SCHEDULER_ALLOWED_HOSTS'] = ['any_server_name']
        self.scheduler.init_app(self.app)
        self.scheduler.start()
        self.assertFalse(self.scheduler.running)

    def test_start_without_allowed_hosts(self):
        self.app.config['SCHEDULER_ALLOWED_HOSTS'] = []
        self.scheduler.init_app(self.app)
        self.scheduler.start()
        self.assertFalse(self.scheduler.running)

    def test_shutdown(self):
        self.scheduler.init_app(self.app)
        self.scheduler.start()
        self.assertTrue(self.scheduler.running)
        self.scheduler.shutdown()
        self.assertFalse(self.scheduler.running)

    def test_load_jobs_from_config(self):
        self.app.config['JOBS'] = [{
            'id': 'job1',
            'func': 'tests.test_api:job1',
            'trigger': 'interval',
            'seconds': 10,
        }]
        self.app.config['SCHEDULER_JOBSTORES'] = {
            "default": apscheduler.jobstores.memory.MemoryJobStore()
        }
        self.app.config['SCHEDULER_EXECUTORS'] = {
            "default": {
                "type": "threadpool"
            }
        }
        self.app.config['SCHEDULER_JOB_DEFAULTS'] = {"coalesce": True}
        self.app.config['SCHEDULER_TIMEZONE'] = utc

        self.scheduler.init_app(app=self.app)
        job = self.scheduler.get_job('job1')
        self.assertIsNotNone(job)

    def test_task_decorator(self):
        @self.scheduler.task('interval', seconds=10, id='job1')
        def decorated_job():
            pass

        job = self.scheduler.get_job('job1')
        self.assertIsNotNone(job)

    def test_state_prop(self):
        self.scheduler.init_app(self.app)
        self.scheduler.start()
        self.assertTrue(self.scheduler.state)
        self.scheduler.shutdown()
        self.assertFalse(self.scheduler.state)

    def test_scheduler_prop(self):
        self.scheduler.init_app(self.app)
        self.scheduler.start()
        self.assertIsNotNone(self.scheduler.scheduler)
        self.scheduler.shutdown()
        self.assertFalse(self.scheduler.running)

    def test_pause_resume(self):
        self.scheduler.init_app(self.app)
        self.scheduler.start()
        self.assertTrue(self.scheduler.running)
        self.scheduler.pause()
        self.assertTrue(self.scheduler.state == 2)
        self.scheduler.resume()
        self.assertTrue(self.scheduler.state == 1)
        self.scheduler.shutdown()
        self.assertFalse(self.scheduler.running)

    def test_add_listener(self):
        self.scheduler.init_app(self.app)
        self.scheduler.start()
        self.assertTrue(self.scheduler.running)
        self.scheduler.add_listener(None)
        self.scheduler.remove_listener(None)
        self.scheduler.shutdown()
        self.assertFalse(self.scheduler.running)

    def test_add_remove_job(self):
        @self.scheduler.task('interval', seconds=10, id='job1')
        def decorated_job():
            pass

        self.scheduler.init_app(self.app)
        self.scheduler.start()
        job = self.scheduler.get_job('job1')
        self.assertIsNotNone(job)

        self.scheduler.remove_job('job1')
        self.assertFalse(self.scheduler.get_job('job1'))
        self.scheduler.shutdown()
        self.assertFalse(self.scheduler.running)

    def test_add_delete_job(self):
        @self.scheduler.task('interval', seconds=10, id='job1')
        def decorated_job():
            pass

        self.scheduler.init_app(self.app)
        self.scheduler.start()
        job = self.scheduler.get_job('job1')
        self.assertIsNotNone(job)

        self.scheduler.delete_job('job1')
        self.assertFalse(self.scheduler.get_job('job1'))
        self.scheduler.shutdown()
        self.assertFalse(self.scheduler.running)

    def test_add_remove_all_jobs(self):
        @self.scheduler.task('interval', hours=1, id='job1')
        def decorated_job():
            pass

        @self.scheduler.task('interval', hours=1, id='job2')
        def decorated_job2():
            pass

        self.scheduler.init_app(self.app)
        self.scheduler.start()
        jobs = self.scheduler.get_jobs()
        self.assertTrue(len(jobs) == 2)
        self.scheduler.remove_all_jobs()

        self.assertFalse(self.scheduler.get_job('job1'))
        self.assertFalse(self.scheduler.get_job('job2'))

        self.scheduler.shutdown()
        self.assertFalse(self.scheduler.running)

    def test_add_delete_all_jobs(self):
        @self.scheduler.task('interval', hours=1, id='job1')
        def decorated_job():
            pass

        @self.scheduler.task('interval', hours=1, id='job2')
        def decorated_job2():
            pass

        self.scheduler.init_app(self.app)
        self.scheduler.start()
        jobs = self.scheduler.get_jobs()
        self.assertTrue(len(jobs) == 2)
        self.scheduler.delete_all_jobs()

        self.assertFalse(self.scheduler.get_job('job1'))
        self.assertFalse(self.scheduler.get_job('job2'))

        self.scheduler.shutdown()
        self.assertFalse(self.scheduler.running)

    def test_job_to_dict(self):
        @self.scheduler.task('interval',
                             hours=1,
                             id='job1',
                             end_date=datetime.datetime.now(),
                             weeks=1,
                             days=1,
                             seconds=99)
        def decorated_job():
            pass

        self.scheduler.init_app(self.app)
        self.scheduler.start()
        job = self.scheduler.get_job('job1')
        self.assertIsNotNone(job)

        self.assertTrue(len(utils.job_to_dict(job)))
        self.scheduler.delete_job('job1')
        self.assertFalse(self.scheduler.get_job('job1'))
        self.scheduler.shutdown()
        self.assertFalse(self.scheduler.running)
            "func": "timedTask:job_1",
            "args": (3, 4),
            "trigger": "interval",
            "seconds": 5  # 不指定时间每5S执行一次
        }
    ]

    SCHEDULER_API_ENABLED = True


def get_user_table():
    filename = "./getUserInfo.py"
    os.system("python2 " + filename)


def job_1(a, b):
    print a + b


if __name__ == '__main__':
    app = Flask(__name__)
    app.config.from_object(Config())

    scheduler = APScheduler()  # 创建定时任务

    # scheduler.api_enabled = True
    scheduler.init_app(app)  # 初始化flask实例
    scheduler.start()  # 开启定时任务,每次项目重启会执行一次定时任务再开始计时

    app.run(host="0.0.0.0", port=8081, debug=True, threaded=True)
Exemplo n.º 22
0
def create_app(testing=False):
    # create and configure the app
    app = Flask(__name__, instance_relative_config=True)

    # Load conf depending on the mode
    if testing:
        app.config.from_pyfile("test.conf.py", silent=False)
    elif app.env == "development":
        app.config.from_pyfile("back.conf.py", silent=False)
    elif app.env == "production":
        app.config.from_pyfile("/etc/config/back.conf.py", silent=False)
    else:
        raise ValueError("App mode unknow: not in dev|prod|test")

    # K8sManager configuration changes with app mode
    app.k8s = None
    if not testing:
        if app.env == "development":
            k8s_configuration = kubernetes.client.Configuration()
            k8s_configuration.verify_ssl = False
            k8s_configuration.debug = False
            k8s_configuration.host = app.config["K8S_HOST"]
            k8s_configuration.api_key["authorization"] = app.config[
                "K8S_API_KEY"]
            k8s_configuration.api_key_prefix["authorization"] = "Bearer"

            app.k8s = kubernetes.client.ApiClient(k8s_configuration)
        else:
            kubernetes.config.load_incluster_config()
            app.k8s = kubernetes.client.ApiClient()

    app.mongo = PyMongo(app)

    if not testing:
        #  Scheduler to kill old challenges
        if (not app.debug or os.environ.get("WERKZEUG_RUN_MAIN")
                == "true"):  # avoid double task exec in dev
            scheduler = APScheduler()
            scheduler.init_app(app)
            scheduler.start()

        @scheduler.task(
            "cron",
            id="do_stop_old_participation",
            hour="*/{}".format(
                app.config["CHECK_OLD_CHALLENGES_INTERVAL_HOURS"]),
        )
        def stop_old():
            with app.app_context():
                stop_old_participations()

    login_manager = LoginManager()
    login_manager.init_app(app)
    app.logger = create_logger(app)

    @login_manager.user_loader
    def load_user(user_id):
        return UserManager().get(user_id)

    @app.errorhandler(EmmentalException)
    def handle_emmental_exception(e):
        app.logger.error(traceback.format_exc())
        app.logger.error(e.internal_message)
        response = jsonify({
            "error_code": e.error_code,
            "error_message": e.external_message,
        })
        return response, e.status_code

    @app.errorhandler(Exception)
    def handle_exception(e):
        app.logger.error(traceback.format_exc())
        response = jsonify({
            "error_code": -1,
            "error_message": "Unknown Error",
        })
        return response, 500

    app.register_blueprint(users)
    app.register_blueprint(challenges)
    app.register_blueprint(challenge_categories)
    app.register_blueprint(challenge_participations)

    @app.route("/config")
    def config():
        res = {
            "version": "0.0.1",
            "isAuthenticated": current_user.is_authenticated,
            "env": app.env,
        }
        if current_user.is_authenticated:
            res.update({"currentUser": current_user.to_dict()})
        return jsonify(res)

    return app
Exemplo n.º 23
0
    message = json.loads(body)
    user = message['events'][0]['source']['userId']
    collect_user(user)
    message = message['events'][0]['message']['text']
    if message == '報時':
        time_report(True, request)
    elif message == '輿情':
        public_opinion(True, request)
    elif '關鍵字 ' in message:
        new = message.split(' ')[1]
        new_word(request, new)
    else:
        reply(request, '沒有這個指令喔~')
    return 'OK'


@app.route("/test")
def test():
    return 'test ok~~~'


if __name__ == "__main__":
    scheduler=APScheduler()  # 例項化APScheduler
    scheduler.init_app(app)  # 把任務列表放進flask
    scheduler.start() # 啟動任務列表

    app.debug = True
    app.run()


Exemplo n.º 24
0
            # 'args': (1, 2),
            'trigger': 'interval',
            'seconds': 5
        }
    ]

    SCHEDULER_API_ENABLED = True

app = Flask(__name__)
sche = APScheduler()
@app.route('/')
def hello():
    return "hello world"

@app.route('/add')
def add_myjob():
    app.apscheduler.add_job(func='my_jobs:job2', id='job2', trigger='interval', seconds=2)
    return 'add success'

@app.route('/add-cron')
def add_cron_job():
    app.apscheduler.add_job(func='my_jobs:job3', id='job3', trigger='cron', minute='*/3')
    return 'add success'

if __name__ == '__main__':
    print('Let us run out of the loop')
    app.config.from_object(Config())
    sche.init_app(app)
    sche.start()

    app.run(host='0.0.0.0', debug=True)
Exemplo n.º 25
0
flask.config['JOBS'] = [
        {
            'id': 'buildNewlyAddedRSSFeed',
            'func': 'server.views:buildNewlyAddedRSSFeed',
            'trigger': 'interval',
            'seconds': (60*60)
        },
        {
            'id': 'buildNewlyAddedAtomFeed',
            'func': 'server.views:buildNewlyAddedAtomFeed',
            'trigger': 'interval',
            'seconds': (60*60)
        },
        {
            'id': 'buildNewlyAddedRSSFeedAtStartup',
            'func': 'server.views:buildNewlyAddedRSSFeed'
        },
        {
            'id': 'buildNewlyAddedAtomFeedAtStartup',
            'func': 'server.views:buildNewlyAddedAtomFeed'
        }
    ]
flask.config['SCHEDULER_VIEWS_ENABLED'] = True
flask.debug = True

scheduler = APScheduler()
scheduler.init_app(flask)
scheduler.start()

if __name__ == '__main__':
    flask.run(host = '0.0.0.0', port = 8000)
Exemplo n.º 26
0
def scheduled_init(app):
    scheduler = APScheduler()
    app.config['JOBS'] = jobs
    scheduler.api_enabled = True
    scheduler.init_app(app)
    scheduler.start()
Exemplo n.º 27
0
                if is_finished: t.status = 'COMPLETED'
        else:
            t.status = 'KILLED'

        if t.status != 'RUNNING':
            sub = 'Training {} COMPLETED'.format(t.name)
            msg = 'as title'
            send_mail(sub, t.email, msg)
            t.stop_at = datetime.datetime.now()
            db_session.commit()


app = Flask(__name__)
app.config.from_object(Config())
cron = APScheduler()
cron.init_app(app)
cron.start()


@app.teardown_appcontext
def shutdown_session(exception=None):
    db_session.remove()


@app.route('/')
def index():
    return render_template('index.html')


@app.route('/trainings/new/', methods=['GET', 'POST'])
def trainings_new():
Exemplo n.º 28
0
    print(filename)
    root_dir = os.path.dirname(os.getcwd())
    return send_from_directory(os.path.join(root_dir, 'static', ''), filename)


# @app.route('/topwords')
# def topwords():

# @app.route('/top_yaks')
# def topwords():

if __name__ == '__main__':

    init_db()
    db = connect_db()
    db.text_factory = str
    #generates the colleges to pull yaks from
    print("Generating colleges")
    generateColleges(db)
    print("Generated colleges")
    #pulls initial set of yaks from each college
    print("Updating Yaks")
    updateYaks()
    print("Updated Yaks")
    getJson()

    scheduler = APScheduler()
    scheduler.init_app(app)
    scheduler.start()
    app.run(debug=False)
Exemplo n.º 29
0
class TestAPI(TestCase):
    def setUp(self):
        self.app = Flask(__name__)
        self.scheduler = APScheduler()
        self.scheduler.api_enabled = True
        self.scheduler.init_app(self.app)
        self.scheduler.start()
        self.client = self.app.test_client()

    def test_scheduler_info(self):
        response = self.client.get('/scheduler')
        self.assertEqual(response.status_code, 200)
        info = json.loads(response.get_data(as_text=True))
        self.assertIsNotNone(info['current_host'])
        self.assertEqual(info['allowed_hosts'], ['*'])
        self.assertTrue(info['running'])

    def test_add_job(self):
        job = {
            'id': 'job1',
            'func': 'tests.test_api:job1',
            'trigger': 'date',
            'run_date': '2020-12-01T12:30:01+00:00',
        }

        response = self.client.post('/scheduler/jobs', data=json.dumps(job))
        self.assertEqual(response.status_code, 200)

        job2 = json.loads(response.get_data(as_text=True))

        self.assertEqual(job.get('id'), job2.get('id'))
        self.assertEqual(job.get('func'), job2.get('func'))
        self.assertEqual(job.get('trigger'), job2.get('trigger'))
        self.assertEqual(job.get('run_date'), job2.get('run_date'))

    def test_add_conflicted_job(self):
        job = {
            'id': 'job1',
            'func': 'tests.test_api:job1',
            'trigger': 'date',
            'run_date': '2020-12-01T12:30:01+00:00',
        }

        response = self.client.post('/scheduler/jobs', data=json.dumps(job))
        self.assertEqual(response.status_code, 200)

        response = self.client.post('/scheduler/jobs', data=json.dumps(job))
        self.assertEqual(response.status_code, 409)

    def test_add_invalid_job(self):
        job = {
            'id': None,
        }

        response = self.client.post('/scheduler/jobs', data=json.dumps(job))
        self.assertEqual(response.status_code, 500)

    def test_delete_job(self):
        self.__add_job()

        response = self.client.delete('/scheduler/jobs/job1')
        self.assertEqual(response.status_code, 204)

        response = self.client.get('/scheduler/jobs/job1')
        self.assertEqual(response.status_code, 404)

    def test_delete_job_not_found(self):
        response = self.client.delete('/scheduler/jobs/job1')
        self.assertEqual(response.status_code, 404)

    def test_get_job(self):
        job = self.__add_job()

        response = self.client.get('/scheduler/jobs/job1')
        self.assertEqual(response.status_code, 200)

        job2 = json.loads(response.get_data(as_text=True))

        self.assertEqual(job.get('id'), job2.get('id'))
        self.assertEqual(job.get('func'), job2.get('func'))
        self.assertEqual(job.get('trigger'), job2.get('trigger'))
        self.assertEqual(job.get('minutes'), job2.get('minutes'))

    def test_get_job_not_found(self):
        response = self.client.get('/scheduler/jobs/job1')
        self.assertEqual(response.status_code, 404)

    def test_get_all_jobs(self):
        job = self.__add_job()

        response = self.client.get('/scheduler/jobs')
        self.assertEqual(response.status_code, 200)

        jobs = json.loads(response.get_data(as_text=True))

        self.assertEqual(len(jobs), 1)

        job2 = jobs[0]

        self.assertEqual(job.get('id'), job2.get('id'))
        self.assertEqual(job.get('func'), job2.get('func'))
        self.assertEqual(job.get('trigger'), job2.get('trigger'))
        self.assertEqual(job.get('minutes'), job2.get('minutes'))

    def test_update_job(self):
        job = self.__add_job()

        data_to_update = {
            'args': [1],
            'trigger': 'cron',
            'minute': '*/1',
            'start_date': '2021-01-01'
        }

        response = self.client.patch('/scheduler/jobs/job1',
                                     data=json.dumps(data_to_update))
        self.assertEqual(response.status_code, 200)

        job2 = json.loads(response.get_data(as_text=True))

        self.assertEqual(job.get('id'), job2.get('id'))
        self.assertEqual(job.get('func'), job2.get('func'))
        self.assertEqual(data_to_update.get('args'), job2.get('args'))
        self.assertEqual(data_to_update.get('trigger'), job2.get('trigger'))
        self.assertEqual('2021-01-01T00:00:00+00:00', job2.get('start_date'))
        self.assertEqual('2021-01-01T00:00:00+00:00',
                         job2.get('next_run_time'))

    def test_update_job_not_found(self):
        data_to_update = {
            'args': [1],
            'trigger': 'cron',
            'minute': '*/1',
            'start_date': '2021-01-01'
        }

        response = self.client.patch('/scheduler/jobs/job1',
                                     data=json.dumps(data_to_update))
        self.assertEqual(response.status_code, 404)

    def test_update_invalid_job(self):
        self.__add_job()

        data_to_update = {
            'trigger': 'invalid_trigger',
        }

        response = self.client.patch('/scheduler/jobs/job1',
                                     data=json.dumps(data_to_update))
        self.assertEqual(response.status_code, 500)

    def test_pause_and_resume_job(self):
        self.__add_job()

        response = self.client.post('/scheduler/jobs/job1/pause')
        self.assertEqual(response.status_code, 200)
        job = json.loads(response.get_data(as_text=True))
        self.assertIsNone(job.get('next_run_time'))

        response = self.client.post('/scheduler/jobs/job1/resume')
        self.assertEqual(response.status_code, 200)
        job = json.loads(response.get_data(as_text=True))
        self.assertIsNotNone(job.get('next_run_time'))

    def test_pause_and_resume_job_not_found(self):
        response = self.client.post('/scheduler/jobs/job1/pause')
        self.assertEqual(response.status_code, 404)

        response = self.client.post('/scheduler/jobs/job1/resume')
        self.assertEqual(response.status_code, 404)

    def test_run_job(self):
        self.__add_job()

        response = self.client.post('/scheduler/jobs/job1/run')
        self.assertEqual(response.status_code, 200)
        job = json.loads(response.get_data(as_text=True))
        self.assertIsNotNone(job.get('next_run_time'))

    def test_run_job_not_found(self):
        response = self.client.post('/scheduler/jobs/job1/run')
        self.assertEqual(response.status_code, 404)

    def __add_job(self):
        job = {
            'id': 'job1',
            'func': 'tests.test_api:job1',
            'trigger': 'interval',
            'minutes': 10,
        }

        response = self.client.post('/scheduler/jobs', data=json.dumps(job))
        return json.loads(response.get_data(as_text=True))
Exemplo n.º 30
0
from flask import Flask
from flask_apscheduler import APScheduler


class Config(object):
    JOBS = [
        {
            'id': 'job1',
            'func': '__main__:job1',
            'args': (1, 2),
            'trigger': 'interval',
            'seconds': 10
        }
    ]

    SCHEDULER_VIEWS_ENABLED = True


def job1(a, b):
    print(str(a) + ' ' + str(b))

app = Flask(__name__)
app.config.from_object(Config())
app.debug = True

scheduler = APScheduler()
scheduler.init_app(app)
scheduler.start()

app.run()
Exemplo n.º 31
0
@app.route('/help')
def renderHelp():
    return render_template('help.html')


@app.route('/archive')
def renderArchive():
    stonklist = os.listdir('static/archive')
    return render_template('archive.html',
                           len=len(stonklist),
                           stonklist=stonklist)


@app.route('/old')
def renderStatsOld():
    return render_template('stats.html',
                           data=statsOld.to_html(
                               classes="table table-hover table-striped",
                               justify='center'),
                           time=czas,
                           simple='stonksSimpleOld.csv',
                           full='stonksFullOld.csv')


if __name__ == '__main__':
    app.config.from_object(Config())
    sched.init_app(app)
    sched.start()
    app.run()
Exemplo n.º 32
0
from flask_apscheduler import APScheduler
from resource.stockDB import start2
from flask_restful import Api

app = Flask(__name__)  # 建立application 物件
api = Api(app)
aps = APScheduler()


class schedulerConfig(object):
    JOBS = [{
        'id': 'job1',
        'func': 'resource.stockDB:start2',
        'args': (),
        'trigger': 'interval',
        'seconds': 5
    }]
    SCHEDULER_API_ENABLED = True


@app.route("/")
def index():
    return "hello world"


if __name__ == "__main__":
    app.debug = True
    app.config.from_object(schedulerConfig())
    aps.init_app(app)
    aps.start()
    app.run(host='0.0.0.0', port=80)
Exemplo n.º 33
0
def task():
    scheduler = APScheduler()
    scheduler.init_app(app)
    scheduler.add_job(func=auto_remove_ban, trigger='interval', seconds=10, id='my_job_id')
    scheduler.start()
Exemplo n.º 34
0
def load(app):
    # upgrade()
    app.db.create_all()
    CHALLENGE_CLASSES["dynamic_docker"] = DynamicValueDockerChallenge
    register_plugin_assets_directory(app,
                                     base_path="/plugins/ctfd-whale/assets/")

    page_blueprint = Blueprint("ctfd-whale",
                               __name__,
                               template_folder="templates",
                               static_folder="assets",
                               url_prefix="/plugins/ctfd-whale")

    @page_blueprint.route('/admin/settings', methods=['GET'])
    @admins_only
    def admin_list_configs():
        configs = DBUtils.get_all_configs()
        return render_template('config.html', configs=configs)

    @page_blueprint.route('/admin/settings', methods=['PATCH'])
    @admins_only
    def admin_save_configs():
        req = request.get_json()
        DBUtils.save_all_configs(req.items())
        return json.dumps({'success': True})

    @page_blueprint.route("/admin/containers", methods=['GET'])
    @admins_only
    def admin_list_containers():
        configs = DBUtils.get_all_configs()
        page = abs(request.args.get("page", 1, type=int))
        results_per_page = 50
        page_start = results_per_page * (page - 1)
        page_end = results_per_page * (page - 1) + results_per_page

        count = DBUtils.get_all_alive_container_count()
        containers = DBUtils.get_all_alive_container_page(page_start, page_end)

        pages = int(count / results_per_page) + (count % results_per_page > 0)
        return render_template("containers.html",
                               containers=containers,
                               pages=pages,
                               curr_page=page,
                               curr_page_start=page_start,
                               configs=configs)

    @page_blueprint.route("/admin/containers", methods=['DELETE'])
    @admins_only
    def admin_delete_container():
        user_id = request.args.get('user_id')
        ControlUtil.remove_container(user_id)
        return json.dumps({'success': True})

    @page_blueprint.route("/admin/containers", methods=['PATCH'])
    @admins_only
    def admin_renew_container():
        user_id = request.args.get('user_id')
        challenge_id = request.args.get('challenge_id')
        DBUtils.renew_current_container(user_id=user_id,
                                        challenge_id=challenge_id)
        return json.dumps({'success': True})

    @page_blueprint.route('/container', methods=['POST'])
    @authed_only
    def add_container():
        if ControlUtil.frequency_limit():
            return json.dumps({
                'success':
                False,
                'msg':
                'Frequency limit, You should wait at least 1 min.'
            })

        user_id = current_user.get_current_user().id
        ControlUtil.remove_container(user_id)
        challenge_id = request.args.get('challenge_id')
        ControlUtil.check_challenge(challenge_id, user_id)

        configs = DBUtils.get_all_configs()
        current_count = DBUtils.get_all_alive_container_count()
        if int(configs.get("docker_max_container_count")) <= int(
                current_count):
            return json.dumps({
                'success': False,
                'msg': 'Max container count exceed.'
            })

        dynamic_docker_challenge = DynamicDockerChallenge.query \
            .filter(DynamicDockerChallenge.id == challenge_id) \
            .first_or_404()
        flag = "flag{" + str(uuid.uuid4()) + "}"
        if dynamic_docker_challenge.redirect_type == "http":
            ControlUtil.add_container(user_id=user_id,
                                      challenge_id=challenge_id,
                                      flag=flag)
        else:
            port = random.randint(int(configs.get("frp_direct_port_minimum")),
                                  int(configs.get("frp_direct_port_maximum")))
            while True:
                if DBUtils.get_container_by_port(port) is None:
                    break
                port = random.randint(
                    int(configs.get("frp_direct_port_minimum")),
                    int(configs.get("frp_direct_port_maximum")))
            ControlUtil.add_container(user_id=user_id,
                                      challenge_id=challenge_id,
                                      flag=flag,
                                      port=port)

        return json.dumps({'success': True})

    @page_blueprint.route('/container', methods=['GET'])
    @authed_only
    def list_container():
        user_id = current_user.get_current_user().id
        challenge_id = request.args.get('challenge_id')
        ControlUtil.check_challenge(challenge_id, user_id)
        data = DBUtils.get_current_containers(user_id=user_id)
        configs = DBUtils.get_all_configs()
        domain = configs.get('frp_http_domain_suffix', "")
        if data is not None:
            if int(data.challenge_id) != int(challenge_id):
                return json.dumps({})
            dynamic_docker_challenge = DynamicDockerChallenge.query \
                .filter(DynamicDockerChallenge.id == data.challenge_id) \
                .first_or_404()
            if dynamic_docker_challenge.redirect_type == "http":
                return json.dumps({
                    'success':
                    True,
                    'type':
                    'http',
                    'domain':
                    data.uuid + domain,
                    'remaining_time':
                    3600 - (datetime.now() - data.start_time).seconds
                })
            else:
                return json.dumps({
                    'success':
                    True,
                    'type':
                    'redirect',
                    'ip':
                    configs.get('frp_direct_ip_address', ""),
                    'port':
                    data.port,
                    'remaining_time':
                    3600 - (datetime.now() - data.start_time).seconds
                })
        else:
            return json.dumps({'success': True})

    @page_blueprint.route('/container', methods=['DELETE'])
    @authed_only
    def remove_container():
        if ControlUtil.frequency_limit():
            return json.dumps({
                'success':
                False,
                'msg':
                'Frequency limit, You should wait at least 1 min.'
            })

        user_id = current_user.get_current_user().id
        if ControlUtil.remove_container(user_id):
            return json.dumps({'success': True})
        else:
            return json.dumps({
                'success':
                False,
                'msg':
                'Failed when destroy instance, please contact admin!'
            })

    @page_blueprint.route('/container', methods=['PATCH'])
    @authed_only
    def renew_container():
        if ControlUtil.frequency_limit():
            return json.dumps({
                'success':
                False,
                'msg':
                'Frequency limit, You should wait at least 1 min.'
            })

        configs = DBUtils.get_all_configs()
        user_id = current_user.get_current_user().id
        challenge_id = request.args.get('challenge_id')
        ControlUtil.check_challenge(challenge_id, user_id)
        docker_max_renew_count = int(configs.get("docker_max_renew_count"))
        container = DBUtils.get_current_containers(user_id)
        if container is None:
            return json.dumps({'success': False, 'msg': 'Instance not found.'})
        if container.renew_count >= docker_max_renew_count:
            return json.dumps({
                'success': False,
                'msg': 'Max renewal times exceed.'
            })
        DBUtils.renew_current_container(user_id=user_id,
                                        challenge_id=challenge_id)
        return json.dumps({'success': True})

    def auto_clean_container():
        with app.app_context():
            results = DBUtils.get_all_expired_container()
            for r in results:
                ControlUtil.remove_container(r.user_id)

            FrpUtils.update_frp_redirect()

    app.register_blueprint(page_blueprint)

    scheduler = APScheduler()
    scheduler.init_app(app)
    scheduler.start()
    scheduler.add_job(id='whale-auto-clean',
                      func=auto_clean_container,
                      trigger="interval",
                      seconds=10)
Exemplo n.º 35
0
            'trigger': CronTrigger.from_crontab("*/30 * * * *"),
            # 'jitter': 120,
        },
        # job2 is run every 6 hours and 15 minutes (00:15, 06:15, 12:15, 18:15)
        {
            'id': 'job-2',
            'func': 'job2:job2',
            'trigger': CronTrigger.from_crontab("15 */6 * * *"),
            # 'jitter': 120,
        }
    ]
    SCHEDULER_API_ENABLED = True


# Add scheduling to server
server = app.server
server.config.from_object(Config())

scheduler = APScheduler()
scheduler.init_app(server)
scheduler.start()


# ####################### #
# ## Layout definition ## #
# ####################### #
app.layout = main_layout.layout

if __name__ == '__main__':
    app.run_server(host='0.0.0.0', port=8050, debug=True)
Exemplo n.º 36
0
def startJobs(app):
    scheduler = APScheduler(BackgroundScheduler(timezone="Asia/Shanghai"))
    scheduler.init_app(app)
    scheduler.start()
    currentLogger.info("调度任务已开启")
Exemplo n.º 37
0
def load(app):
    # upgrade()
    plugin_name = __name__.split('.')[-1]
    set_config('whale:plugin_name', plugin_name)
    app.db.create_all()
    if not get_config("whale:setup"):
        setup_default_configs()

    register_plugin_assets_directory(
        app,
        base_path=f"/plugins/{plugin_name}/assets",
        endpoint='plugins.ctfd-whale.assets')
    register_admin_plugin_menu_bar(title='Whale',
                                   route='/plugins/ctfd-whale/admin/settings')

    DynamicValueDockerChallenge.templates = {
        "create": f"/plugins/{plugin_name}/assets/create.html",
        "update": f"/plugins/{plugin_name}/assets/update.html",
        "view": f"/plugins/{plugin_name}/assets/view.html",
    }
    DynamicValueDockerChallenge.scripts = {
        "create": "/plugins/ctfd-whale/assets/create.js",
        "update": "/plugins/ctfd-whale/assets/update.js",
        "view": "/plugins/ctfd-whale/assets/view.js",
    }
    CHALLENGE_CLASSES["dynamic_docker"] = DynamicValueDockerChallenge

    page_blueprint = Blueprint("ctfd-whale",
                               __name__,
                               template_folder="templates",
                               static_folder="assets",
                               url_prefix="/plugins/ctfd-whale")
    CTFd_API_v1.add_namespace(admin_namespace,
                              path="/plugins/ctfd-whale/admin")
    CTFd_API_v1.add_namespace(user_namespace, path="/plugins/ctfd-whale")

    @page_blueprint.route('/admin/settings')
    @admins_only
    def admin_list_configs():
        errors = WhaleChecks.perform()
        if not errors and get_config("whale:refresh", "false"):
            DockerUtils.init()
            CacheProvider(app=current_app).init_port_sets()
            set_config("whale:refresh", "false")
        return render_template('whale_config.html', errors=errors)

    @page_blueprint.route("/admin/containers")
    @admins_only
    def admin_list_containers():
        result = AdminContainers.get()
        view_mode = request.args.get('mode', session.get('view_mode', 'list'))
        session['view_mode'] = view_mode
        return render_template("whale_containers.html",
                               plugin_name=plugin_name,
                               containers=result['data']['containers'],
                               pages=result['data']['pages'],
                               curr_page=abs(
                                   request.args.get("page", 1, type=int)),
                               curr_page_start=result['data']['page_start'])

    def auto_clean_container():
        with app.app_context():
            results = DBContainer.get_all_expired_container()
            containers = DBContainer.get_all_alive_container()

            config = ''.join([c.frp_config for c in containers])

            try:
                # you can authorize a connection by setting
                # frp_url = http://user:pass@ip:port
                frp_addr = get_config("whale:frp_api_url")
                if not frp_addr:
                    frp_addr = f'http://{get_config("whale:frp_api_ip", "frpc")}:{get_config("whale:frp_api_port", "7400")}'
                    # backward compatibility
                common = get_config("whale:frp_config_template", '')
                if '[common]' in common:
                    output = common + config
                else:
                    remote = requests.get(f'{frp_addr.rstrip("/")}/api/config')
                    assert remote.status_code == 200
                    set_config("whale:frp_config_template", remote.text)
                    output = remote.text + config
                assert requests.put(f'{frp_addr.rstrip("/")}/api/config',
                                    output,
                                    timeout=5).status_code == 200
                assert requests.get(f'{frp_addr.rstrip("/")}/api/reload',
                                    timeout=5).status_code == 200
            except (requests.RequestException, AssertionError) as e:
                raise WhaleError(
                    '\nfrpc request failed\n' + (f'{e}\n' if str(e) else '') +
                    'please check the frp related configs') from None

    app.register_blueprint(page_blueprint)

    try:
        CacheProvider(app=app).init_port_sets()
        DockerUtils.init()
    except Exception:
        warnings.warn("Initialization Failed. Please check your configs.",
                      WhaleWarning)

    try:
        lock_file = open("/tmp/ctfd_whale.lock", "w")
        lock_fd = lock_file.fileno()
        fcntl.lockf(lock_fd, fcntl.LOCK_EX | fcntl.LOCK_NB)

        scheduler = APScheduler()
        scheduler.init_app(app)
        scheduler.start()
        # scheduler.add_job(
        #     id='whale-auto-clean', func=auto_clean_container,
        #     trigger="interval", seconds=10
        # )

        print("[CTFd Whale] Started successfully")
    except IOError:
        pass
Exemplo n.º 38
0
class Server(object):
    app = None

    def __init__(self):
        #  print("initializing " * 20)
        self.async_mode = 'eventlet'  # None
        self.app = Flask(__name__)
        self.app.config['SECRET_KEY'] = 'secret!'
        thread = None
        self.socketio = SocketIO(self.app, async_mode=self.async_mode)
        template_loader = jinja2.ChoiceLoader([
            self.app.jinja_loader,
            jinja2.PackageLoader('mTree', 'base/admin_templates'),
            jinja2.PackageLoader('mTree', 'base/user_templates')
        ])
        self.app.jinja_loader = template_loader

        self.app.config['BASIC_AUTH_USERNAME'] = '******'
        self.app.config['BASIC_AUTH_PASSWORD'] = '******'

        self.basic_auth = BasicAuth(self.app)

        self.add_routes()
        self.scheduler = APScheduler()
        self.scheduler.init_app(self.app)
        self.scheduler.start()
        #self.scheduler.add_listener(self.my_listener, events.EVENT_ALL)

    def my_listener(self, event):
        print("APSCHEDULER EVENT " + str(event))

    def run_server(self):
        print("RUNNING " * 20)
        self.socketio.run(self.app, host='0.0.0.0', debug=True)

    def attach_experiment(self, experiment):
        self.experiment = experiment()
        self.experiment.attach_emitter(emit)
        self.experiment.attach_socketio(self.socketio)

        self.experiment.attach_app(self.app)
        self.experiment.attach_db(None)
        self.experiment.attach_scheduler(self.scheduler)

    def get_response(self, emit):
        return Response(emit, self.app, self.db)

    def add_scheduler(self, sched_function):
        self.scheduler.add_job(func=sched_function,
                               trigger=IntervalTrigger(seconds=5),
                               id="print_test",
                               name="print something",
                               replace_existing=True)

    def add_routes(self):
        @self.app.route('/admin_dashboard')  # URL path for the admin screen
        @self.basic_auth.required
        def index():
            return render_template('admin_base.html')

        @self.app.route('/static_content/<string:path>')
        def static_hosting(path):
            static_content_location = self.experiment.get_static_content_location(
            )
            return send_from_directory(static_content_location, path)

        @self.app.route('/subject')  # URL path for the subject screen
        def not_search():
            assignment_id = request.args.get('assignmentId')
            hit_id = request.args.get('hitId')
            turk_submit_to = request.args.get('turkSubmitTo')
            worker_id = request.args.get('workerId')

            if assignment_id == "ASSIGNMENT_ID_NOT_AVAILABLE":
                # display the preview screen... presumably
                context = {}
                template = Environment(loader=FileSystemLoader(
                    self.experiment.get_template_location()
                    or './')).get_template(
                        self.experiment.get_task_preview()).render(context)
                print("PREPARING FOR A PREVIEW...")
                return template
            else:
                return render_template('subject_base.html',
                                       async_mode=self.socketio.async_mode)

        @self.app.route(
            '/<string:experiment_id>/<request_page>'
        )  # TODO(@skunath) This is where it's failing. What's happening?
        def pageHandler(template):
            return render_template(template)

        @self.socketio.on('admin_control', namespace='/admin')
        def admin_control_message(message):
            #self.experiment.admin_event_handler(message)
            self.experiment.start_experiment()

        @self.socketio.on('user_configuration', namespace='/subject')
        def receive_user_configuration(message):
            # need to send user id information
            event = json.loads(message["data"])
            print(event)
            user_id = event["user_id"]
            assignment_id = event["assignmentId"]
            hit_id = event["hitId"]
            worker_id = event["workerId"]
            self.experiment.add_user_property(user_id, "assignment_id",
                                              assignment_id)
            self.experiment.add_user_property(user_id, "hit_id", hit_id)
            #self.experiment.add_user_property(user_id, "turk_submit_to", turk_submit_to)
            self.experiment.add_user_property(user_id, "worker_id", worker_id)
            # print("PUT OCCUR -- " + str(event))

        @self.socketio.on('put', namespace='/subject')
        def receive_put(message):
            # need to send user id information
            event = json.loads(message["data"])
            #print("PUT OCCUR -- " + str(event))
            self.experiment.event_handler(event)

        @self.socketio.on('join', namespace='/subject')
        def subjectJoin(message):
            print("\n\nSUBJECT JUST JOINED\n\n")

            join_room(message['room'])

        @self.socketio.on('connect', namespace='/subject')
        def subject_connect():
            # need to send user id information
            assignment_id = request.args.get('assignmentId')
            hit_id = request.args.get('hitId')
            turk_submit_to = request.args.get('turkSubmitTo')
            worker_id = request.args.get('workerId')

            user_id = self.experiment.create_user(request.sid)

            join_room(user_id)
            print("\nCONNECTED\nUser: {}\n\n".format(user_id))

            self.experiment.user_objects[user_id].display_welcome_screen(
            )  # display the welcome screen to the connected user

            self.experiment.check_experiment_state_to_run(
                user_id)  # Auto start when subjects connect

        @self.socketio.on('disconnect', namespace='/subject')
        def subject_disconnect():
            print("CLIENT DISCONNECTED")
            self.experiment.remove_user(
                request.sid
            )  # TODO(@messiest) Think of a better way to remove users
Exemplo n.º 39
0
import os
from flask import Flask, flash, request, redirect, url_for, render_template, current_app
from werkzeug.utils import secure_filename
import pandas as pd
import csv
import logging
from app.main.model.code_of_stock import code_stocks
from app.main.model.make_database_of_code import csv_file_to_sql
from app.main.service.time import run_on_time
from threading import Thread
import threading
from flask_apscheduler import APScheduler

ALLOWED_EXTENSIONS = {'csv'}
scheduler = APScheduler()
scheduler.init_app(bp)
scheduler.start()


def read_csv_file(url_file_csv):
    data = []
    with open(url_file_csv, mode='r') as csv_file:
        csv_reader = csv.reader(csv_file)
        line_count = 0
        print(csv_reader)
        for row in csv_reader:
            data.append(row)
            line_count += 1
        data = pd.DataFrame(data)
    return data
Exemplo n.º 40
0
def load(app):
    # upgrade()
    app.db.create_all()
    CHALLENGE_CLASSES["dynamic_docker"] = DynamicValueDockerChallenge
    register_plugin_assets_directory(app,
                                     base_path="/plugins/ctfd-whale/assets/")
    register_admin_plugin_menu_bar('Whale',
                                   '/plugins/ctfd-whale/admin/settings')

    page_blueprint = Blueprint("ctfd-whale",
                               __name__,
                               template_folder="templates",
                               static_folder="assets",
                               url_prefix="/plugins/ctfd-whale")

    @page_blueprint.route('/admin/settings', methods=['GET'])
    @admins_only
    def admin_list_configs():
        configs = DBUtils.get_all_configs()
        return render_template('config.html', configs=configs)

    @page_blueprint.route('/admin/settings', methods=['PATCH'])
    @admins_only
    def admin_save_configs():
        req = request.get_json()
        DBUtils.save_all_configs(req.items())
        redis_util = RedisUtils(app=app)
        redis_util.init_redis_port_sets()
        return json.dumps({'success': True})

    @page_blueprint.route("/admin/containers", methods=['GET'])
    @admins_only
    def admin_list_containers():
        configs = DBUtils.get_all_configs()
        page = abs(request.args.get("page", 1, type=int))
        results_per_page = 50
        page_start = results_per_page * (page - 1)
        page_end = results_per_page * (page - 1) + results_per_page

        count = DBUtils.get_all_alive_container_count()
        containers = DBUtils.get_all_alive_container_page(page_start, page_end)

        pages = int(count / results_per_page) + (count % results_per_page > 0)
        return render_template("containers.html",
                               containers=containers,
                               pages=pages,
                               curr_page=page,
                               curr_page_start=page_start,
                               configs=configs)

    @page_blueprint.route("/admin/containers", methods=['DELETE'])
    @admins_only
    def admin_delete_container():
        user_id = request.args.get('user_id')
        ControlUtil.remove_container(app, user_id)
        return json.dumps({'success': True})

    @page_blueprint.route("/admin/containers", methods=['PATCH'])
    @admins_only
    def admin_renew_container():
        user_id = request.args.get('user_id')
        challenge_id = request.args.get('challenge_id')
        DBUtils.renew_current_container(user_id=user_id,
                                        challenge_id=challenge_id)
        return json.dumps({'success': True})

    @app.route('/api/v1/container', methods=['POST'])
    @authed_only
    def add_container():
        user_id = current_user.get_current_user().id
        redis_util = RedisUtils(app=app, user_id=user_id)

        if not redis_util.acquire_lock():
            return json.dumps({'success': False, 'msg': REQUEST_TOO_FAST})

        if ControlUtil.frequency_limit():
            return json.dumps({'success': False, 'msg': REQUEST_INTERVAL})

        ControlUtil.remove_container(app, user_id)
        challenge_id = request.args.get('challenge_id')
        ControlUtil.check_challenge(challenge_id, user_id)

        configs = DBUtils.get_all_configs()
        current_count = DBUtils.get_all_alive_container_count()
        if int(configs.get("docker_max_container_count")) <= int(
                current_count):
            return json.dumps({'success': False, 'msg': TOO_MANY_INSTANCES})

        dynamic_docker_challenge = DynamicDockerChallenge.query \
            .filter(DynamicDockerChallenge.id == challenge_id) \
            .first_or_404()
        flag = "flag{" + str(uuid.uuid4()) + "}"
        if dynamic_docker_challenge.redirect_type == "http":
            ControlUtil.add_container(app=app,
                                      user_id=user_id,
                                      challenge_id=challenge_id,
                                      flag=flag)
        else:
            port = redis_util.get_available_port()
            ControlUtil.add_container(app=app,
                                      user_id=user_id,
                                      challenge_id=challenge_id,
                                      flag=flag,
                                      port=port)

        redis_util.release_lock()
        return json.dumps({'success': True})

    @app.route('/api/v1/container', methods=['GET'])
    @authed_only
    def list_container():
        user_id = current_user.get_current_user().id
        challenge_id = request.args.get('challenge_id')
        ControlUtil.check_challenge(challenge_id, user_id)
        data = ControlUtil.get_container(user_id=user_id)
        configs = DBUtils.get_all_configs()
        domain = configs.get('frp_http_domain_suffix', "")
        timeout = int(configs.get("docker_timeout", "3600"))
        if data is not None:
            if int(data.challenge_id) != int(challenge_id):
                return json.dumps({'success': False})
            dynamic_docker_challenge = DynamicDockerChallenge.query \
                .filter(DynamicDockerChallenge.id == data.challenge_id) \
                .first_or_404()
            lan_domain = hashlib.md5(
                (str(user_id) + "-" + data.uuid).encode()).hexdigest()
            if dynamic_docker_challenge.redirect_type == "http":
                if int(configs.get('frp_http_port', "80")) == 80:
                    return json.dumps({
                        'success':
                        True,
                        'type':
                        'http',
                        'domain':
                        lan_domain + domain,
                        'remaining_time':
                        timeout - (datetime.now() - data.start_time).seconds,
                        'lan_domain':
                        lan_domain
                    })
                else:
                    return json.dumps({
                        'success':
                        True,
                        'type':
                        'http',
                        'domain':
                        lan_domain + domain + ":" +
                        configs.get('frp_http_port', "80"),
                        'remaining_time':
                        timeout - (datetime.now() - data.start_time).seconds,
                        'lan_domain':
                        lan_domain
                    })
            else:
                return json.dumps({
                    'success':
                    True,
                    'type':
                    'redirect',
                    'ip':
                    configs.get('frp_direct_ip_address', ""),
                    'port':
                    data.port,
                    'remaining_time':
                    timeout - (datetime.now() - data.start_time).seconds,
                    'lan_domain':
                    lan_domain
                })
        else:
            return json.dumps({'success': True})

    @app.route('/api/v1/container', methods=['DELETE'])
    @authed_only
    def remove_container():
        user_id = current_user.get_current_user().id
        redis_util = RedisUtils(app=app, user_id=user_id)
        if not redis_util.acquire_lock():
            return json.dumps({'success': False, 'msg': REQUEST_TOO_FAST})

        if ControlUtil.frequency_limit():
            return json.dumps({'success': False, 'msg': REQUEST_INTERVAL})

        if ControlUtil.remove_container(app, user_id):
            redis_util.release_lock()

            return json.dumps({'success': True})
        else:
            return json.dumps({'success': False, 'msg': DESTROY_FAILED})

    @app.route('/api/v1/container', methods=['PATCH'])
    @authed_only
    def renew_container():
        user_id = current_user.get_current_user().id
        redis_util = RedisUtils(app=app, user_id=user_id)
        if not redis_util.acquire_lock():
            return json.dumps({'success': False, 'msg': REQUEST_TOO_FAST})

        if ControlUtil.frequency_limit():
            return json.dumps({'success': False, 'msg': REQUEST_INTERVAL})

        configs = DBUtils.get_all_configs()
        challenge_id = request.args.get('challenge_id')
        ControlUtil.check_challenge(challenge_id, user_id)
        docker_max_renew_count = int(configs.get("docker_max_renew_count"))
        container = ControlUtil.get_container(user_id)
        if container is None:
            return json.dumps({'success': False, 'msg': INSTANCE_NOT_FOUND})
        if container.renew_count >= docker_max_renew_count:
            return json.dumps({'success': False, 'msg': RENEW_EXCEEDED})
        ControlUtil.renew_container(user_id=user_id, challenge_id=challenge_id)
        redis_util.release_lock()
        return json.dumps({'success': True})

    def auto_clean_container():
        with app.app_context():
            results = DBUtils.get_all_expired_container()
            for r in results:
                ControlUtil.remove_container(app, r.user_id)

            FrpUtils.update_frp_redirect()

    app.register_blueprint(page_blueprint)

    try:
        lock_file = open("/tmp/ctfd_whale.lock", "w")
        lock_fd = lock_file.fileno()
        fcntl.lockf(lock_fd, fcntl.LOCK_EX | fcntl.LOCK_NB)

        scheduler = APScheduler()
        scheduler.init_app(app)
        scheduler.start()
        scheduler.add_job(id='whale-auto-clean',
                          func=auto_clean_container,
                          trigger="interval",
                          seconds=10)

        redis_util = RedisUtils(app=app)
        redis_util.init_redis_port_sets()

        print("[CTFd Whale]Started successfully")
    except IOError:
        pass
Exemplo n.º 41
0
def task_init():
    scheduler = APScheduler()
    scheduler.init_app(app)
    scheduler.start()
    refresh()
Exemplo n.º 42
0
def configure_scheduler(app):
    """Configure Scheduler"""
    scheduler = APScheduler()
    scheduler.init_app(app)
    scheduler.start()