Esempio n. 1
0
ZOOM_RECORDINGS_URI = 'https://api.zoom.us/v2/users/me/recordings'

# Local cache directory for moving files between Zoom and Google Drive
CACHE_DIRECTORY = os.environ.get("ZOOMTODRIVE_CACHE_DIR") or \
    '/var/cache/zoomtodrive'

# Google settings
DRIVE_FOLDER_NAME = os.environ.get('DRIVE_FOLDER_NAME') or "Zoom to Drive"

app = flask.Flask(__name__)

# Setup redis
REDIS_URL = os.environ.get('REDIS_URL') or 'redis://'
REDIS_QUEUE = os.environ.get('REDIS_QUEUE') or 'zoomtodrive'
app.redis = Redis.from_url(REDIS_URL)
app.task_queue = rq.Queue(REDIS_QUEUE, connection=app.redis)

app.secret_key = os.environ.get('FLASK_SECRET_KEY')


## Cache ##
def get_cache_directory():
    # FIXME: use zoom's email
    email = flask.session['google-email']
    directory = os.path.join(CACHE_DIRECTORY, email)
    if not os.path.exists(directory):
        os.mkdir(directory)
    return directory


def get_cache_files():
Esempio n. 2
0
def create_app(config_class=Config):
    app = Flask(__name__)
    app.config.from_object(config_class)

    db.init_app(app)
    migrate.init_app(app, db)
    login.init_app(app)
    configure_uploads(app, images)
    mail.init_app(app)
    bootstrap.init_app(app)
    moment.init_app(app)
    babel.init_app(app)
    #metrics.init_app(app)
    scheduler.init_app(app)

    PrometheusMetrics(app, registry=CollectorRegistry())

    @app.before_first_request
    def init_scheduler():
        scheduler.start()
        from app import models, tasks
        # Shut down the scheduler when exiting the app
        atexit.register(lambda: scheduler.shutdown())

    app.redis = Redis.from_url(app.config['REDIS_URL'])
    app.task_queue = rq.Queue('expenseapp-tasks', connection=app.redis)

    from app.errors import bp as errors_bp
    app.register_blueprint(errors_bp, url_prefix='/error')

    from app.auth import bp as auth_bp
    app.register_blueprint(auth_bp, url_prefix='/auth')

    from app.apis import bp as apis_bp
    app.register_blueprint(apis_bp, url_prefix='/apis')

    from app.event import bp as event_bp
    app.register_blueprint(event_bp, url_prefix='/event')

    from app.main import bp as main_bp
    app.register_blueprint(main_bp)

    if not app.debug and not app.testing:
        if app.config['MAIL_SERVER']:
            auth = None
            if app.config['MAIL_USERNAME'] or app.config['MAIL_PASSWORD']:
                auth = (app.config['MAIL_USERNAME'],
                        app.config['MAIL_PASSWORD'])
            secure = None
            if app.config['MAIL_USE_TLS'] or app.config['MAIL_USE_SSL']:
                secure = ()
            mail_handler = SMTPHandler(
                mailhost=(app.config['MAIL_SERVER'], app.config['MAIL_PORT']),
                fromaddr=app.config['ADMIN_NOREPLY_SENDER'],
                toaddrs=[app.config['ADMIN_EMAIL']],
                subject='Failure',
                credentials=auth,
                secure=secure)
            mail_handler.setLevel(logging.ERROR)
            app.logger.addHandler(mail_handler)

            if not os.path.exists('logs'):
                os.mkdir('logs')
            file_handler = RotatingFileHandler('logs/errorlog.log',
                                               maxBytes=10240,
                                               backupCount=10)
            file_handler.setFormatter(
                logging.Formatter(
                    '%(asctime)s %(levelname)s: %(message)s [in %(pathname)s:%(lineno)d]'
                ))
            file_handler.setLevel(logging.INFO)
            app.logger.addHandler(file_handler)

            app.logger.setLevel(logging.INFO)
            app.logger.info('App startup')

    return app
Esempio n. 3
0
#  You may obtain a copy of the License at
#
#      http://www.apache.org/licenses/LICENSE-2.0
#
#  Unless required by applicable law or agreed to in writing, software
#  distributed under the License is distributed on an "AS IS" BASIS,
#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#  See the License for the specific language governing permissions and
#  limitations under the License.
# ******************************************************************************

from flask import Flask
from app.config import Config
from flask_sqlalchemy import SQLAlchemy
from flask_migrate import Migrate
from redis import Redis
import rq
from app import Config
import logging

app = Flask(__name__)
app.config.from_object(Config)
db = SQLAlchemy(app)
migrate = Migrate(app, db)

from app import routes, result_model, errors

app.redis = Redis.from_url(app.config['REDIS_URL'], port=5040)
app.execute_queue = rq.Queue('pytket-service_execute', connection=app.redis, default_timeout=3600)
app.logger.setLevel(logging.INFO)
Esempio n. 4
0
def create_app(config_class=Config, keeplog=True):
    app = Flask(__name__)
    app.config.from_object(config_class)
    db.init_app(app)
    migrate.init_app(app, db)
    bootstrap.init_app(app)
    login.init_app(app)
    mail.init_app(app)
    mongo.init_app(app)

    app.redis = Redis.from_url(app.config['REDIS_URL'])
    app.task_queue = rq.Queue('ngs-server-tasks', connection=app.redis)

    app.fetch_job_result = fetch_job_from_queue(app.redis)

    app.elasticsearch = Elasticsearch([app.config['ELASTICSEARCH_URL']]) \
        if app.config['ELASTICSEARCH_URL'] else None

    from app.auth import bp as auth_bp
    app.register_blueprint(auth_bp, url_prefix='/auth')

    from app.errors import bp as errors_bp
    app.register_blueprint(errors_bp)

    from app.APPS import bp as apps_bp
    app.register_blueprint(apps_bp, url_prefix='/apps')

    from app.main import bp as main_bp
    app.register_blueprint(main_bp)

    from app.admin import bp as admin_bp
    app.register_blueprint(admin_bp, url_prefix='/admin')

    from app.ngs import bp as ngs_bp
    app.register_blueprint(ngs_bp, url_prefix='/ngs')

    from app.ppt import bp as ppt_bp
    app.register_blueprint(ppt_bp, url_prefix='/ppt')

    from app.upload import bp as upload_bp
    app.register_blueprint(upload_bp, url_prefix='/upload')

    from app.API import bp as api_bp
    app.register_blueprint(api_bp, url_prefix='/api')

    if (not app.debug) and (not app.testing) and keeplog:
        if not os.path.exists('logs'):
            os.mkdir('logs')

        file_handler = RotatingFileHandler('logs/ngs_server.log',
                                           maxBytes=1024000,
                                           backupCount=10)
        file_handler.setFormatter(
            logging.Formatter(
                '%(asctime)s %(levelname)s: %(message)s [in %(pathname)s:%(lineno)d]'
            ))
        file_handler.setLevel(logging.ERROR)
        app.logger.addHandler(file_handler)
        app.logger.setLevel(logging.ERROR)
        app.logger.info('ngs_server startup')

    return app
Esempio n. 5
0
File: web.py Progetto: wcmckee/coil
def configure_site():
    """Configure the Nikola site for Coil CMS."""
    global _site, site, db, q

    nikola.__main__._RETURN_DOITNIKOLA = True
    _dn = nikola.__main__.main([])
    _dn.sub_cmds = _dn.get_commands()
    _site = _dn.nikola
    app.config['NIKOLA_ROOT'] = os.getcwd()
    app.config['DEBUG'] = False

    # Logging configuration

    logf = (u'[{record.time:%Y-%m-%dT%H:%M:%SZ}] {record.level_name}: '
            u'{record.channel}: {record.message}')
    logh = (u'[{record.time:%Y-%m-%dT%H:%M:%SZ}] {record.channel} '
            u'{record.message}')

    loghandlers = [
        ColorfulStderrHandler(level=logbook.DEBUG,
                              format_string=logf,
                              bubble=True),
        logbook.FileHandler('coil.log',
                            'a',
                            'utf-8',
                            logbook.DEBUG,
                            logf,
                            bubble=True)
    ]

    hloghandlers = [
        ColorfulStderrHandler(level=logbook.DEBUG,
                              format_string=logh,
                              bubble=True),
        logbook.FileHandler('coil.log',
                            'a',
                            'utf-8',
                            logbook.DEBUG,
                            logh,
                            bubble=True)
    ]

    _site.loghandlers = loghandlers
    nikola.utils.LOGGER.handlers = loghandlers

    nikola.plugins.command.new_post.POSTLOGGER.handlers = loghandlers
    nikola.plugins.command.new_post.PAGELOGGER.handlers = loghandlers

    app.config['LOGGER_NAME'] = 'Coil'
    app._logger = get_logger('Coil', loghandlers)
    app.http_logger = get_logger('CoilHTTP', hloghandlers)

    if not _site.configured:
        app.logger("Not a Nikola site.")
        return

    app.secret_key = _site.config.get('COIL_SECRET_KEY')
    app.config['COIL_URL'] = _site.config.get('COIL_URL')
    app.config['REDIS_URL'] = _site.config.get('COIL_REDIS_URL',
                                               'redis://localhost:6379/0')
    db = redis.StrictRedis.from_url(app.config['REDIS_URL'])
    q = rq.Queue(name='coil', connection=db)

    _site.template_hooks['menu'].append(generate_menu)
    _site.template_hooks['menu_alt'].append(generate_menu_alt)

    app.config['NIKOLA_URL'] = _site.config['SITE_URL']
    _site.config['NAVIGATION_LINKS'] = {
        'en': (
            (app.config['NIKOLA_URL'], '<i class="fa fa-globe"></i>'),
            ('http://coil.readthedocs.org/en/latest/user/',
             '<i class="fa fa-question-circle"></i>'),
        )
    }
    _site.GLOBAL_CONTEXT['navigation_links'] = _site.config['NAVIGATION_LINKS']
    TITLE = _site.GLOBAL_CONTEXT['blog_title']('en') + ' Administration'
    _site.config['BLOG_TITLE'] = TranslatableSetting(
        'BLOG_TITLE', TITLE, _site.config['TRANSLATIONS'])
    _site.GLOBAL_CONTEXT['blog_title'] = _site.config['BLOG_TITLE']
    _site.GLOBAL_CONTEXT['lang'] = 'en'
    _site.GLOBAL_CONTEXT['extra_head_data'] = TranslatableSetting(
        'EXTRA_HEAD_DATA',
        """<link href="//maxcdn.bootstrapcdn.com/font-awesome/4.2.0/css/"""
        """font-awesome.min.css" rel="stylesheet">\n"""
        """<link href="/coil_assets/css/coil.css" rel="stylesheet">""",
        _site.config['TRANSLATIONS'])
    # HACK: body_end appears after extra_js from templates, so we must use
    #       social_buttons_code instead
    _site.GLOBAL_CONTEXT['social_buttons_code'] = TranslatableSetting(
        'SOCIAL_BUTTONS_CODE',
        """<script src="/coil_assets/js/coil.js"></script>""",
        _site.config['TRANSLATIONS'])

    # Theme must inherit from bootstrap3, because we have hardcoded HTML.
    bs3 = (('bootstrap3' in _site.THEMES)
           or ('bootstrap3-jinja' in _site.THEMES))
    if not bs3:
        app.logger.notice("THEME does not inherit from 'bootstrap3' or "
                          "'bootstrap3-jinja', using 'bootstrap3' instead.")
        _site.config['THEME'] = 'bootstrap3'
        # Reloading some things
        _site._THEMES = None
        _site._get_themes()
        _site._template_system = None
        _site._get_template_system()
        if 'has_custom_css' in _site._GLOBAL_CONTEXT:
            del _site._GLOBAL_CONTEXT['has_custom_css']
        _site._get_global_context()

    tmpl_dir = pkg_resources.resource_filename(
        'coil', os.path.join('data', 'templates', _site.template_system.name))
    if os.path.isdir(tmpl_dir):
        # Inject tmpl_dir low in the theme chain
        _site.template_system.inject_directory(tmpl_dir)

    # Site proxy
    site = SiteProxy(db, _site, app.logger)
    configure_url(app.config['COIL_URL'])
Esempio n. 6
0
def create_app(test_config=None):
    # Create Flask app with a default config
    app = Flask(__name__, instance_relative_config=True)

    # Load test config if we are in testing mode
    if test_config is None:
        app.config.from_pyfile('config.py', silent=True)
    else:
        app.config.from_mapping(test_config)

    # ensure the instance folder exists
    try:
        os.makedirs(app.instance_path)
    except OSError:
        pass

    from lidarts.models import User, Role
    from lidarts.auth.forms import ExtendedLoginForm, ExtendedRegisterForm, \
        ExtendedChangePasswordForm, ExtendedResetPasswordForm

    # Initialize Flask extensions
    db.init_app(app)
    cdn.init_app(app)
    migrate.init_app(app, db)
    mail.init_app(app)
    user_datastore = SQLAlchemyUserDatastore(db, User, Role)
    security.init_app(app,
                      user_datastore,
                      login_form=ExtendedLoginForm,
                      register_form=ExtendedRegisterForm,
                      change_password_form=ExtendedChangePasswordForm,
                      reset_password_form=ExtendedResetPasswordForm)

    origins = app.config[
        'CORS_ALLOWED_ORIGINS'] if 'CORS_ALLOWED_ORIGINS' in app.config else '*'

    if 'ENGINEIO_MAX_DECODE_PACKETS' in app.config:
        Payload.max_decode_packets = app.config['ENGINEIO_MAX_DECODE_PACKETS']

    message_queue = app.config[
        'SOCKETIO_MESSAGE_QUEUE'] if 'SOCKETIO_MESSAGE_QUEUE' in app.config else 'redis://'
    socketio.init_app(
        app,
        message_queue=message_queue,
        async_mode='gevent',
        cors_allowed_origins=origins,
        # logger=True, engineio_logger=True,
    )
    babelobject.init_app(app)
    moment.init_app(app)
    configure_uploads(app, avatars)
    patch_request_class(app, 2 * 1024 * 1024)

    app.json_encoder = JSONEncoder
    # Fixes bug: url_for generates http endpoints instead of https which causes mixed-content-errors
    app.wsgi_app = ReverseProxied(app.wsgi_app)

    # filter for jinja
    app.jinja_env.filters['datetime'] = format_datetime
    app.jinja_env.globals['get_locale'] = get_locale

    if 'REDIS_URL' in app.config:
        # app.redis = Redis.from_url('redis://')
        # app.redis_client = StrictRedis()
        app.redis = StrictRedis(
            host=app.config['REDIS_URL'],
            password=app.config['REDIS_PASSWORD'],
        )
    else:
        app.redis = StrictRedis.from_url('redis://')
    app.task_queue = rq.Queue('lidarts-tasks', connection=app.redis)

    # Flask-Security mails need to be sent in background
    @security.send_mail_task
    def delay_flask_security_mail(msg):
        app.task_queue.enqueue('lidarts.tasks.send_mail', msg)

    if 'DASHBOARD_ENABLED' in app.config and app.config['DASHBOARD_ENABLED']:
        dashboard.config.init_from(
            file=os.path.join(app.instance_path, 'dashboard.cfg'))

        def get_user_id():
            return current_user.id

        dashboard.config.group_by = get_user_id
        dashboard.bind(app)

    # Load all blueprints
    from lidarts.admin import bp as admin_bp
    app.register_blueprint(admin_bp)

    from lidarts.api import bp as api_bp
    app.register_blueprint(api_bp)

    from lidarts.generic import bp as generic_bp
    app.register_blueprint(generic_bp)

    from lidarts.game import bp as game_bp
    app.register_blueprint(game_bp)

    from lidarts.profile import bp as profile_bp
    app.register_blueprint(profile_bp)

    from lidarts.legal import bp as legal_bp
    app.register_blueprint(legal_bp)

    from lidarts.auth import bp as auth_bp
    app.register_blueprint(auth_bp)

    from lidarts.tools import bp as tools_bp
    app.register_blueprint(tools_bp)

    from lidarts.statistics import bp as statistics_bp
    app.register_blueprint(statistics_bp)

    from lidarts.tournament import bp as tournament_bp
    app.register_blueprint(tournament_bp)

    from lidarts.generic.errors import not_found_error, internal_error
    app.register_error_handler(404, not_found_error)
    app.register_error_handler(500, internal_error)

    import lidarts.models
    import lidarts.socket.base_handler
    import lidarts.socket.chat_handler
    import lidarts.socket.X01_game_handler
    import lidarts.socket.game.cricket.cricket_game_handler
    import lidarts.socket.public_challenge_handler
    import lidarts.socket.tournament_handler
    import lidarts.socket.webcam_follow_handler

    return app
            'inscription_id': inscription_id,
            'status_summary': status,
            'status_detail': status_detail
        }
        log.debug('payload, ```{}```'.format(pprint.pformat(payload)))
        r = requests.post(self.PROCESS_STATUS_UPDATER_URL,
                          data=json.dumps(payload))
        log.debug('post-result, ```{}```'.format(r.status_code))
        return

    ## end class ProcessStatusUpdater()


## runners ##

q = rq.Queue(u'iip_prc', connection=redis.Redis())
puller = Puller()
backupper = StatusBackupper()
prepper = Prepper()
indexer = Indexer()
process_status_updater = ProcessStatusUpdater()


def run_call_git_pull(to_process_dct):
    """ Initiates a git pull update.
            Eventually spawns a call to indexer.run_update_index() which handles each result found.
        Called by github_helper.GHHelper.handle_inscription_update(). """
    log.debug('to_process_dct, ```{}```'.format(pprint.pformat(to_process_dct))
              )  # keys: [ 'files_removed', 'files_updated', 'timestamp']
    time.sleep(2)  # let any existing in-process pull finish
    puller.call_git_pull()
Esempio n. 8
0
def create_app(config_class=Config):
    app = Flask(__name__)
    app.config.from_object(config_class)

    app.redis = Redis.from_url(app.config['REDIS_URL'])
    app.task_queue = rq.Queue('myblog-tasks', connection=app.redis)

    db.init_app(app)
    migrate.init_app(app, db)
    login.init_app(app)
    mail.init_app(app)
    bootstrap.init_app(app)
    moment.init_app(app)
    babel.init_app(app)
    admin.init_app(app)

    from app.admin import bp as admin_bp
    app.register_blueprint(admin_bp)

    from app.errors import bp as errors_bp
    app.register_blueprint(errors_bp)

    from app.auth import bp as auth_bp
    app.register_blueprint(auth_bp, url_prefix='/auth')

    from app.main import bp as main_bp
    app.register_blueprint(main_bp)

    from app.api import bp as api_bp
    app.register_blueprint(api_bp, url_prefix='/api')

    @babel.localeselector
    def get_locale():
        return request.accept_languages.best_match(app.config['LANGUAGES'])

    if not app.debug and not app.testing:
        if app.config['MAIL_SERVER']:
            auth = None
            if app.config['MAIL_USERNAME'] or app.config['MAIL_PASSWORD']:
                auth = (app.config['MAIL_USERNAME'],
                        app.config['MAIL_PASSWORD'])
            secure = None
            if app.config['MAIL_USE_TLS']:
                secure = ()
            mail_handler = SMTPHandler(
                mailhost=(app.config['MAIL_SERVER'], app.config['MAIL_PORT']),
                fromaddr=f'no-reply@{app.config["MAIL_SERVER"]}',
                toaddrs=app.config['ADMINS'],
                subject='My Blog Failure',
                credentials=auth,
                secure=secure)
            mail_handler.setLevel(logging.ERROR)
            app.logger.addHandler(mail_handler)

        if app.config['LOG_TO_STDOUT']:
            stream_handler = logging.StreamHandler()
            stream_handler.setLevel(logging.INFO)
            app.logger.addHandler(stream_handler)
        else:
            if not os.path.exists('logs'):
                os.mkdir('logs')
            file_handler = RotatingFileHandler('logs/myblog.log',
                                               maxBytes=10240,
                                               backupCount=10)
            file_handler.setFormatter(
                logging.Formatter(
                    '%(asctime)s %(levelname)s: %(message)s [in %(pathname)s:%(lineno)d]'
                ))
            file_handler.setLevel(logging.INFO)
            app.logger.addHandler(file_handler)

            app.logger.setLevel(logging.INFO)
            app.logger.info('My Blog startup')

    return app
Esempio n. 9
0
    headers_enabled=
    True,  # Send headers with info about how much time has left until unlocking
)
# Premium users who can spam as much as they want
# That is - me :)
premium_passwords = []
_p_pass_file = pathlib.Path('premium_passwords.txt')
if _p_pass_file.exists():
    with open(_p_pass_file, 'r') as f:
        premium_passwords = f.read().split()

# Main API endpoint - change this if anything big changes, or features/endpoints are removed
API1 = '/api/'

_redis = redis.Redis()
queue_images = rq.Queue('images', connection=_redis)
queue_vision = rq.Queue('vision', connection=_redis)


@app.route(API1)  # TODO: Maybe add some documentation there
def hello():
    return "Hello there!"


@limiter.request_filter
def password_whitelist():
    return request.headers.get('pwd') in premium_passwords


def _roll_rate_limit():
    """A helper function that if-else-es what rate limit to set right now
Esempio n. 10
0
def create_queue(name, **kwargs):
    """Create an rq.Queue using QUEUE_OPTS merged with kwargs."""
    opts = QUEUE_OPTS.copy()
    opts.update(**kwargs)
    return rq.Queue(name, **opts)
Esempio n. 11
0
    RequestMetricsInput,
    ServerJob,
    ServerJobStatus,
)

from .schema import schema

# Flask Setup
app = Flask(__name__)
sockets = Sockets(app)
CORS(app)
app.debug = True

# Job queue setup, the worker runs in the `rq_worker` container
REDIS_CONNECTION = redis.Redis("redis", 6379)
JOB_QUEUE = rq.Queue(connection=REDIS_CONNECTION, default_timeout=30)

# {{{ ServerJobManager


class ServerJobManager:
    """A wrapper that holds all information for a server job including shared state.

    Parameters
    ----------
    server_job: ServerJob
        Contains information required to run the job, this state is shared
        between the client and the server and the `ServerJobManager` attempts
        to sync any changes to `server_job`

    websocket
Esempio n. 12
0

def register_shellcontext(app):
    """Register shell context objects."""
    def shell_context():
        """Shell context objects."""
        return {'db': db, 'User': user.models.User}

    app.shell_context_processor(shell_context)


def register_blueprints(app):
    """Register Flask blueprints."""
    app.register_blueprint(public.routes.blueprint)
    app.register_blueprint(auth.routes.blueprint, url_prefix='/auth')
    app.register_blueprint(webhooks.routes.blueprint, url_prefix='/webhooks')
    app.register_blueprint(user.routes.blueprint, url_prefix='/user')
    return None


config_class = Config
app = Flask(__name__.split('.')[0])
app.config.from_object(config_class)
app.redis = Redis.from_url(app.config['REDIS_URL'])
app.task_queue = rq.Queue('swt-tasks', connection=app.redis)
register_extensions(app)
register_shellcontext(app)
register_blueprints(app)

hourly(app)
Esempio n. 13
0
# -*- coding: utf-8 -*-

import sys
import rq
from rq import Queue,Connection,Worker


# why this?  it can use the sqlalchemy's connection poll  from rq  Performance notes  
# add by notedit  2013-01-24

with Connection():

    qs = map(rq.Queue, sys.argv[1:]) or [rq.Queue()]
    
    w = rq.Worker(qs)
    w.work()


Esempio n. 14
0
from flask import Flask, Response
from flask import request
from flask_cors import CORS

import task_handler
from enums.type_data import DataType
from worker import conn

FLASK_APP = Flask(__name__)
CORS(FLASK_APP
     )  # allowing request from different urls... (localhost in another port)

# just to avoid a windows error...
os.environ.setdefault('FORKED_BY_MULTIPROCESSING', '1')

tasksQueue = rq.Queue(connection=conn, default_timeout=3600)

# TODO revisar esto por posibles errores


@FLASK_APP.route("/task/<task_id>", methods=["GET"])
def get_task_status(task_id):
    task = tasksQueue.fetch_job(task_id)

    if task:
        response_object = {
            "status": "success",
            "data": {
                "task_id": task.get_id(),
                "task_status": task.get_status(),
                "task_result": task.result,
Esempio n. 15
0
from flask import Flask
from config import Config
from flask_sqlalchemy import SQLAlchemy
from flask_migrate import Migrate
from flask_login import LoginManager
from flask_bootstrap import Bootstrap
from flask_moment import Moment
from redis import Redis
import rq

app = Flask(__name__)
app.config['TEMPLATES_AUTO_RELOAD'] = True
app.config.from_object(Config)
db = SQLAlchemy(app)
migrate = Migrate(app, db)
login = LoginManager(app)
login.login_view = 'login'
bootstrap = Bootstrap(app)
moment = Moment(app)
redis = Redis.from_url(app.config['REDIS_URL'])
task_queue = rq.Queue('microblog-tasks', connection=redis)
app.jinja_env.auto_reload = True

from app import routes, models, errors
Esempio n. 16
0
def get_q():
    try:
        return g.q
    except AttributeError:
        q = g.q = rq.Queue(connection=get_redis())
        return q
Esempio n. 17
0
        BROCAPI_PROCESSING_DIR)
    try:
        os.makedirs(BROCAPI_PROCESSING_DIR)
    except:
        logger.error("Could not create Brocapi tmp dirs.")
        sys.exit(1)
    logger.info("Successfully created the processing directory %s",
                BROCAPI_PROCESSING_DIR)

# Create a connection to our rq worker queue
logger.info("Connecting to worker queue..")
try:
    rs = redis.Redis()
    # Test if the redis server is up
    rs.get(None)
    brocapi_queue = rq.Queue(connection=rs)
except Exception as e:
    logger.error("Error attempting to connect to worker queue!")
    logger.error(e)
    sys.exit(1)
logger.info("Successfully connected to worker queue")

# Set up our Flask app
app = flask.Flask(__name__)


@app.route('/submit/pcap', methods=['POST'])
def api_submit_pcap():
    """API Endpoint for Bro pcap processing"""
    # Create a unique job uuid and folders
    job_uuid = str(uuid.uuid4())
Esempio n. 18
0
# -*- coding: utf-8 -*-

import os, pprint
import redis, rq

QUEUE_NAME = os.environ['IIP_PRC__QUEUE_NAME']

q = rq.Queue(QUEUE_NAME, connection=redis.Redis())

print(f'- initial number of jobs in queue ``{QUEUE_NAME}``: ``{len(q.jobs)}``')

for job in q.jobs:
    job_d = {
        'args': job._args,
        'kwargs': job._kwargs,
        'function': job._func_name,
        'description': job.description,
        'dt_created': job.created_at,
        'dt_enqueued': job.enqueued_at,
        'dt_ended': job.ended_at,
        'origin': job.origin,
        'id': job._id,
        'traceback': job.exc_info
    }
    print('- job info...')
    pprint.pprint(job_d)
    job.delete()
    print('- deleted.')
    print('---')

print(f'- current number of jobs in queue ``{QUEUE_NAME}``: ``{len(q.jobs)}``')
Esempio n. 19
0
def create_app(config_class=Config):
    app = Flask(__name__)
    app.config.from_object(config_class)

    db.init_app(app)
    migrate.init_app(app, db)
    login.init_app(app)
    app.elasticsearch = Elasticsearch(app.config['ELASTICSEARCH_URL']) if app.config['ELASTICSEARCH_URL'] else None
    app.redis = Redis.from_url(app.config['REDIS_URL'])
    app.task_queue = rq.Queue('microblog-task', connection=app.redis)
    mail.init_app(app)
    bootstrap.init_app(app)
    moment.init_app(app)
    babel.init_app(app)

    from app.errors import bp as error_bp
    app.register_blueprint(error_bp)

    from app.auth import bp as auth_bp
    app.register_blueprint(auth_bp, url_prefix="/auth")

    from app.main import bp as main_bp
    app.register_blueprint(main_bp)
    
    from app.api import bp as api_bp
    app.register_blueprint(api_bp, url_prefix="/api")
    

    """Logging Info Errors """
    if not app.debug:
        if app.config["MAIL_SERVER"]:
            auth = None
            if app.config["MAIL_USERNAME"] or app.config["MAIL_PASSWORD"]:
                auth = (app.config["MAIL_USERNAME"], app.config["MAIL_PASSWORD"])
                secure = None
            if app.config["MAIL_USE_TLS"]:
                secure = ()
            mail_handler = SMTPHandler(
                mailhost=(app.config["MAIL_SERVER"], app.config["MAIL_PORT"]),
                fromaddr=f'no-reply@{app.config["MAIL_SERVER"]}',
                toaddrs=app.config["ADMINS"],
                subject="Microblog Failure",
                credentials=auth,
                secure=secure,
            )
            mail_handler.setLevel(logging.ERROR)
            app.logger.addHandler(mail_handler)

        if not os.path.exists("logs"):
            os.mkdir("logs")
        file_handler = RotatingFileHandler(
            "logs/microblog.log", maxBytes=1024, backupCount=10
        )
        file_handler.setFormatter(
            logging.Formatter(
                "%(asctime)s %(levelname)s: %(message)s [in %(pathname)s:%(lineno)d]"
            )
        )
        file_handler.setLevel(logging.INFO)
        app.logger.addHandler(file_handler)
        app.logger.setLevel(logging.INFO)
        app.logger.info("Microblog Startup")

    return app
Esempio n. 20
0
                                headers=self.config["headers"], allow_redirects=True)
        data = session.cookies['ASP.NET_SessionId']
        session.close()
        if response.url == self.config['url']+'/CoreAccount/Portal':
            return data
        else:
            raise Exception("Can't login into IOP", data)


r_number2group = redis.Redis(host='redis', port=6379, decode_responses=True, db=1)
r_group2id = redis.Redis(host='redis', port=6379, decode_responses=True, db=2)
r_id2timetable = redis.Redis(host='redis', port=6379, decode_responses=True, db=4)
r_stats = redis.Redis(host='redis', port=6379, decode_responses=True, db=5)

# Подключение для создания очереди в Redis с помощью python-rq
queue = rq.Queue('sender-tasks', connection=redis.Redis.from_url('redis://redis:6379/3'))

s_obj = GetSettingsClass()
uconfig = s_obj.config
sender_config = {
    "gsm_url" : uconfig["gsm_url"],
    "gsm_password" : uconfig["gsm_password"],
    "SMS_TIME_SLEEP" : uconfig["SMS_TIME_SLEEP"],
}

def get_date_now():
    """
    Метод для возврата текущей даты

    Используется для таблицы статистики №5 в async check_send
    """
Esempio n. 21
0
# python
import collections

# redis
from redis import Redis
import rq
from rq.job import Job

# app source
from . import database
from .database import redis
from . import nlp

# initialize redis connections
queue = rq.Queue(connection=redis)


def init():
    nlp.init()


#### DATABASE ACCESS ###########################################################

## Word Index ------------------------------------------------------------------


def rec_add_title(book_title, book_id):
    # split title into words
    words = nlp.stem_title(book_title)

    # for each word in title, store book_id in word lookup table
Esempio n. 22
0
            auth = (app.config['MAIL_USERNAME'], app.config['MAIL_PASSWORD'])
        secure = None
        if app.config['MAIL_USE_TLS']:
            secure = ()
        mail_handler = SMTPHandler(
            mailhost=(app.config['MAIL_SERVER'], app.config['MAIL_PORT']),
            fromaddr=app.config['MAIL_DEFAULT_SENDER'],
            toaddrs=app.config['ADMINS'], subject='Stadsgids error',
            credentials=auth, secure=secure)
        mail_handler.setLevel(logging.DEBUG)
        logger.addHandler(mail_handler)

# check if running on development server
if os.getenv("PRODUCTION_SERVER") == "True":
    # set session cookie secure
    app.config["SESSION_COOKIE_SECURE"] = True

    # import worker
    from runworker import conn

    # set worker Queue
    queue = rq.Queue('default', connection=conn)

    # set redirect to https
    @app.before_request
    def before_request():
        if request.url.startswith('http://'):
            url = request.url.replace('http://', 'https://', 1)
            code = 301
            return redirect(url, code=code)
Esempio n. 23
0
import traceback
import redis
import rq

rq.use_connection(redis.Redis())
q = rq.Queue("lifeboxQueue")
print(q.count)
workers = rq.Worker.all(queue=q)
for w in workers:
    print(f"{w.name} - {w.get_state()} - {w.get_current_job()}")



Esempio n. 24
0
from flask import Flask
from config import Config
from flask_sqlalchemy import SQLAlchemy
from flask_migrate import Migrate
from flask_mail import Mail
from redis import Redis
import rq

app = Flask(__name__)
app.config.from_object(Config)

db = SQLAlchemy(app)
migrate = Migrate(app, db)
mail = Mail(app)

app.redis = Redis.from_url(app.config['REDIS_URL'])
app.task_queue = rq.Queue('ticker-tasks', connection=app.redis)

from app import routes, models
app = Flask(__name__)
app.config.from_pyfile('config.py')

# mongo
client = MongoClient(app.config['MONGO_URL'])
db = client.images_info

# azure files
file_service = FileService(
    account_name=app.config['AZURE_ACCOUNT'],
    account_key=app.config['AZURE_KEY'],
)

# async
queue = rq.Queue(connection=Redis.from_url(app.config['REDIS_URL']))

# cache
cache = Cache(app, config={
    'CACHE_TYPE': 'redis',
    'CACHE_REDIS_URL': app.config['REDIS_URL'],
})

# logging
es = Elasticsearch(app.config['ELASTICSEARCH_CONFIG'])


def log(log_body):
    es.index(index='logs', doc_type='log1', body=log_body)

Esempio n. 26
0
    def partial_update(self, request, pk=None, **kwargs):

        # Any updates to this dictionary should also be updated in console/django_scantron/models.py
        scan_status_allowed_state_update_dict = {
            "pending": ["started", "error"],
            "started": ["pause", "cancel", "completed", "error"],
            "pause": ["paused", "error"],
            "paused": ["pending", "cancel", "error"],
            "cancel": ["cancelled", "error"],
            "cancelled": ["error"],
            "completed": ["error"],
            "error": ["pending"],
        }

        try:

            # Extract the json payload.
            body = self.request.data
            new_scan_status = body["scan_status"]

            if new_scan_status in [
                    "started", "pause", "paused", "cancel", "cancelled",
                    "completed", "error"
            ]:

                # Filter only the applicable ScheduledScans for the engine.  Prevents an engine modifying another engine's
                # ScheduledScan information.
                scheduled_scan_dict = ScheduledScan.objects.filter(
                    scan_engine=request.user).filter(pk=pk).values()[0]

                current_scan_status = scheduled_scan_dict["scan_status"]

                # Based off the current scan status, ensure the updated scan status is valid.
                if new_scan_status not in scan_status_allowed_state_update_dict[
                        current_scan_status]:

                    # Convert list to a string.
                    valid_scan_states = ", ".join(
                        scan_status_allowed_state_update_dict[
                            current_scan_status])

                    response_dict = {
                        "detail":
                        f"Invalid scan status change requested.  Scan status state '{current_scan_status}' "
                        f"can only transition to: {valid_scan_states}"
                    }

                    return JsonResponse(response_dict)

                # Update the scheduled_scan_dict with the most recent scan_status state from the PUT request.  When
                # originally querying above, the old state is passed to utility.py unless it is updated.
                scheduled_scan_dict["scan_status"] = new_scan_status

                # Create a redis connection object.
                redis_conn = redis.Redis(host="127.0.0.1", port=6379, db=0)

                # Create a redis queue object.
                q = rq.Queue(connection=redis_conn)

                # Queue up the scheduled_scan_dict to be processed by utility.py.
                job = q.enqueue(utility.process_scan_status_change,
                                scheduled_scan_dict)  # noqa

            else:
                raise Http404

        except ScheduledScan.DoesNotExist:
            raise Http404

        kwargs["partial"] = True

        return self.update(request, pk, **kwargs)
Esempio n. 27
0
import shlex
import tempfile
from subprocess32 import check_output, CalledProcessError
import rq
from redis import StrictRedis
import json

redis_conn = StrictRedis(host='django_rq', port=6379, db=0)
q = rq.Queue('nlp_importer', connection=redis_conn)

run_cmd = shlex.split('mvn -q exec:java -Dexec.mainClass="annotator.Annotator" -Dexec.args=in.json')

def nlp_hints(hint_source):
    with tempfile.NamedTemporaryFile(delete=True) as f:
        f.write(hint_source)
        f.flush()
        run_cmd[-1] = '-Dexec.args=%s' % f.name
        annotations = check_output(run_cmd, universal_newlines=True)
        # NLP-hints currently drops article_id from result records, so
        # convert source and dest from JSON so we can copy article_id over
        taskList = json.loads(hint_source)
        resultList = json.loads(annotations)
        for i in range(len(taskList)):
            resultList[i]['article_id'] = taskList[i].get('article_id', None)
        annotations = json.dumps(resultList)
        # send annotations back to Django for importing
        job = q.enqueue('data.nlp_importer.nlp_load', annotations,
                        timeout=60, result_ttl=24*3600)
        return annotations
Esempio n. 28
0
from redis import Redis
import rq

queue = rq.Queue(
    'microblog-tasks',
    connection=Redis.from_url(
        'redis://:[email protected]:6379/0'))
job = queue.enqueue('app.ffmpegtasks.ffmpegtask',
                    job_timeout='1h',
                    description='test stream')
Esempio n. 29
0
def create_app(config_class=Config):
    app = Flask(__name__)
    app.config.from_object(config_class)

    db.init_app(app)
    migrate.init_app(app, db)
    login.init_app(app)
    mail.init_app(app)
    bootstrap.init_app(app)
    moment.init_app(app)
    babel.init_app(app)

    from app.errors import bp as errors_bp
    app.register_blueprint(errors_bp)

    from app.auth import bp as auth_bp
    app.register_blueprint(auth_bp, url_prefix='/auth')

    from app.main import bp as main_bp
    app.register_blueprint(main_bp)

    app.elasticsearch = Elasticsearch([app.config['ELASTICSEARCH_URL']]) \
        if app.config['ELASTICSEARCH_URL'] else None

    app.redis = Redis.from_url(app.config['REDIS_URL'])
    app.task_queue = rq.Queue('microblog-tasks', connection=app.redis)

    from app.api import bp as api_bp
    app.register_blueprint(api_bp, url_prefix='/api')

    if not app.debug and not app.testing:
        if app.config['MAIL_SERVER']:
            auth = None
            if app.config['MAIL_USERNAME'] or app.config['MAIL_PASSWORD']:
                auth = (app.config['MAIL_USERNAME'],
                        app.config['MAIL_PASSWORD'])
            secure = None
            if app.config['MAIL_USE_TLS']:
                secure = ()
            mail_handler = SMTPHandler(
                mailhost=(app.config['MAIL_SERVER'], app.config['MAIL_PORT']),
                fromaddr='no-reply@' + app.config['MAIL_SERVER'],
                toaddrs=app.config['ADMINS'],
                subject='Microblog Failure',
                credentials=auth,
                secure=secure)
            mail_handler.setLevel(logging.ERROR)
            app.logger.addHandler(mail_handler)

        if not os.path.exists('logs'):
            os.mkdir('logs')
        file_handler = RotatingFileHandler('logs/microblog.log',
                                           maxBytes=10240,
                                           backupCount=10)
        file_handler.setFormatter(
            logging.Formatter('%(asctime)s %(levelname)s: %(message)s '
                              '[in %(pathname)s:%(lineno)d]'))
        file_handler.setLevel(logging.INFO)
        app.logger.addHandler(file_handler)

        app.logger.setLevel(logging.INFO)
        app.logger.info('Microblog startup')

    return app
Esempio n. 30
0
import os
from flask import Flask, render_template, url_for, request, redirect, send_from_directory, jsonify, session, make_response

from flask import Flask
from PIL import Image
import datetime
from flaskr.db import get_db
from redis import Redis
import rq
import os
import requests

os.environ['NO_PROXY'] = '127.0.0.1'
queue = rq.Queue('save-image', connection=Redis.from_url('redis://'))


def create_app(test_config=None):
    # create and configure the app
    app = Flask(__name__, instance_relative_config=True)
    app.config.from_mapping(
        SECRET_KEY='dev',
        DATABASE=os.path.join(app.instance_path, 'flaskr.sqlite'),
    )

    if test_config is None:
        # load the instance config, if it exists, when not testing
        app.config.from_pyfile('config.py', silent=True)
    else:
        # load the test config if passed in
        app.config.from_mapping(test_config)