Ejemplo n.º 1
0
def init_test_db(force=False):
    """Same as `init_db` command, but creates a database that will be used to
    run tests and doesn't import data (no need to do that).

    `SQLALCHEMY_TEST_URI` must be defined in the config file.
    """
    if force:
        exit_code = _run_psql('drop_test_db.sql')
        if exit_code != 0:
            raise Exception('Failed to drop existing database and user! Exit code: %i' % exit_code)

    print('Creating database and user for testing...')
    exit_code = _run_psql('create_test_db.sql')
    if exit_code != 0:
        raise Exception('Failed to create new database and user! Exit code: %i' % exit_code)

    exit_code = _run_psql('create_extensions.sql', 'ab_test')
    if exit_code != 0:
        raise Exception('Failed to create database extensions! Exit code: %i' % exit_code)

    db.init_db_engine(config.SQLALCHEMY_TEST_URI)

    db.run_sql_script(os.path.join(ADMIN_SQL_DIR, 'create_types.sql'))
    db.run_sql_script(os.path.join(ADMIN_SQL_DIR, 'create_tables.sql'))
    db.run_sql_script(os.path.join(ADMIN_SQL_DIR, 'create_primary_keys.sql'))
    db.run_sql_script(os.path.join(ADMIN_SQL_DIR, 'create_foreign_keys.sql'))
    db.run_sql_script(os.path.join(ADMIN_SQL_DIR, 'create_indexes.sql'))

    print("Done!")
Ejemplo n.º 2
0
def init_test_db(force=False):
    """Same as `init_db` command, but creates a database that will be used to
    run tests and doesn't import data (no need to do that).

    `SQLALCHEMY_TEST_URI` must be defined in the config file.
    """
    if force:
        exit_code = _run_psql('drop_test_db.sql')
        if exit_code != 0:
            raise Exception(
                'Failed to drop existing database and user! Exit code: %i' %
                exit_code)

    print('Creating database and user for testing...')
    exit_code = _run_psql('create_test_db.sql')
    if exit_code != 0:
        raise Exception(
            'Failed to create new database and user! Exit code: %i' %
            exit_code)

    exit_code = _run_psql('create_extensions.sql', 'ab_test')
    if exit_code != 0:
        raise Exception('Failed to create database extensions! Exit code: %i' %
                        exit_code)

    db.init_db_engine(config.SQLALCHEMY_TEST_URI)

    db.run_sql_script(os.path.join(ADMIN_SQL_DIR, 'create_types.sql'))
    db.run_sql_script(os.path.join(ADMIN_SQL_DIR, 'create_tables.sql'))
    db.run_sql_script(os.path.join(ADMIN_SQL_DIR, 'create_primary_keys.sql'))
    db.run_sql_script(os.path.join(ADMIN_SQL_DIR, 'create_foreign_keys.sql'))
    db.run_sql_script(os.path.join(ADMIN_SQL_DIR, 'create_indexes.sql'))

    print("Done!")
Ejemplo n.º 3
0
def incremental(location, id, threads):
    db.init_db_engine(config.SQLALCHEMY_DATABASE_URI)
    dump_id, start_t, end_t = dump.prepare_incremental_dump(int(id) if id else None)
    print("Creating incremental dumps with data between %s and %s:\n" % (start_t, end_t))
    _incremental_db(location, dump_id, threads)
    _incremental_json_lowlevel(location, dump_id)
    _incremental_json_highlevel(location, dump_id)
Ejemplo n.º 4
0
def generate_snapshots():
    db.init_db_engine(config.SQLALCHEMY_DATABASE_URI)
    with db.engine.connect() as connection:
        for job in get_all_jobs(connection):
            set_snapshot_id(connection=connection,
                            job_id=job["job_id"],
                            snapshot_id=db.dataset.create_snapshot(
                                job["dataset_id"]))
Ejemplo n.º 5
0
def add_admin(username, force=False):
    """Make user an admin."""
    db.init_db_engine(config.SQLALCHEMY_DATABASE_URI)
    try:
        db.user.set_admin(username, admin=True, force=force)
    except db.exceptions.DatabaseException as e:
        click.echo("Error: %s" % e, err=True)
    click.echo("Made %s an admin." % username)
Ejemplo n.º 6
0
def remove_admin(username):
    """Remove admin privileges from a user."""
    db.init_db_engine(config.SQLALCHEMY_DATABASE_URI)
    try:
        db.user.set_admin(username, admin=False)
    except db.exceptions.DatabaseException as e:
        click.echo("Error: %s" % e, err=True)
    click.echo("Removed admin privileges from %s." % username)
def generate_snapshots():
    db.init_db_engine(config.SQLALCHEMY_DATABASE_URI)
    with db.engine.connect() as connection:
        for job in get_all_jobs(connection):
            set_snapshot_id(
                connection=connection,
                job_id=job["job_id"],
                snapshot_id=db.dataset.create_snapshot(job["dataset_id"])
            )
def create_app():
    app = Flask(__name__)

    # Configuration
    sys.path.append(
        os.path.join(os.path.dirname(os.path.realpath(__file__)), ".."))
    import default_config
    app.config.from_object(default_config)
    import config
    app.config.from_object(config)

    # Logging
    from webserver.loggers import init_loggers
    init_loggers(app)

    # Database connection
    from db import init_db_engine
    init_db_engine(app.config['SQLALCHEMY_DATABASE_URI'])

    # Extensions
    from flask_uuid import FlaskUUID
    FlaskUUID(app)

    # MusicBrainz
    import musicbrainzngs
    from db import SCHEMA_VERSION
    musicbrainzngs.set_useragent(app.config['MUSICBRAINZ_USERAGENT'],
                                 SCHEMA_VERSION)
    if app.config['MUSICBRAINZ_HOSTNAME']:
        musicbrainzngs.set_hostname(app.config['MUSICBRAINZ_HOSTNAME'])

    # OAuth
    from webserver.login import login_manager, provider
    login_manager.init_app(app)
    provider.init(app.config['MUSICBRAINZ_CLIENT_ID'],
                  app.config['MUSICBRAINZ_CLIENT_SECRET'])

    # Error handling
    from webserver.errors import init_error_handlers
    init_error_handlers(app)

    # Static files
    import static_manager
    static_manager.read_manifest()

    # Template utilities
    app.jinja_env.add_extension('jinja2.ext.do')
    from webserver import utils
    app.jinja_env.filters['date'] = utils.reformat_date
    app.jinja_env.filters['datetime'] = utils.reformat_datetime
    app.context_processor(
        lambda: dict(get_static_path=static_manager.get_static_path))

    _register_blueprints(app)

    return app
Ejemplo n.º 9
0
def init_db(archive, force):
    """Initializes database and imports data if needed.

    This process involves several steps:
    1. Table structure is created.
    2. Data is imported from the archive if it is specified.
    3. Primary keys and foreign keys are created.
    4. Indexes are created.

    Data dump needs to be a .tar.xz archive produced by export command.

    More information about populating a PostgreSQL database efficiently can be
    found at http://www.postgresql.org/docs/current/static/populate.html.
    """
    if force:
        exit_code = _run_psql('drop_db.sql')
        if exit_code != 0:
            raise Exception(
                'Failed to drop existing database and user! Exit code: %i' %
                exit_code)

    print('Creating user and a database...')
    exit_code = _run_psql('create_db.sql')
    if exit_code != 0:
        raise Exception(
            'Failed to create new database and user! Exit code: %i' %
            exit_code)

    print('Creating database extensions...')
    exit_code = _run_psql('create_extensions.sql', 'acousticbrainz')
    if exit_code != 0:
        raise Exception('Failed to create database extensions! Exit code: %i' %
                        exit_code)

    db.init_db_engine(config.SQLALCHEMY_DATABASE_URI)

    print('Creating types...')
    db.run_sql_script(os.path.join(ADMIN_SQL_DIR, 'create_types.sql'))

    print('Creating tables...')
    db.run_sql_script(os.path.join(ADMIN_SQL_DIR, 'create_tables.sql'))

    if archive:
        print('Importing data...')
        db.dump.import_db_dump(archive)
    else:
        print('Skipping data importing.')

    print('Creating primary and foreign keys...')
    db.run_sql_script(os.path.join(ADMIN_SQL_DIR, 'create_primary_keys.sql'))
    db.run_sql_script(os.path.join(ADMIN_SQL_DIR, 'create_foreign_keys.sql'))

    print('Creating indexes...')
    db.run_sql_script(os.path.join(ADMIN_SQL_DIR, 'create_indexes.sql'))

    print("Done!")
Ejemplo n.º 10
0
def main(args):
    db.init_db_engine(config.SQLALCHEMY_DATABASE_URI)
    if args.source == "musicbrainz":
        import_musicbrainz(args.filename)
    elif args.source == "discogs":
        import_discogs(args.filename)
    elif args.source == "msdartist":
        import_msd_artists(args.filename)
    elif args.source == "msdsong":
        import_msd_songs(args.filename)
Ejemplo n.º 11
0
def full_db(location, threads, rotate):
    db.init_db_engine(config.SQLALCHEMY_DATABASE_URI)
    print("Creating full database dump...")
    path = dump.dump_db(location, threads)
    print("Done! Created:", path)

    if rotate:
        print("Removing old dumps (except two latest)...")
        remove_old_archives(location, "acousticbrainz-dump-[0-9]+-[0-9]+.tar.xz",
                            is_dir=False, sort_key=lambda x: os.path.getmtime(x))
Ejemplo n.º 12
0
def json(location, rotate, no_lowlevel, no_highlevel):
    db.init_db_engine(config.SQLALCHEMY_DATABASE_URI)
    if no_lowlevel and no_highlevel:
        print("wut? check your options, mate!")

    if not no_lowlevel:
        _json_lowlevel(location, rotate)

    if not no_highlevel:
        _json_highlevel(location, rotate)
Ejemplo n.º 13
0
def main():
    logging.info("Starting dataset evaluator...")
    while True:
        db.init_db_engine(config.SQLALCHEMY_DATABASE_URI)
        pending_job = db.dataset_eval.get_next_pending_job()
        if pending_job:
            logging.info("Processing job %s..." % pending_job["id"])
            evaluate_dataset(pending_job)
        else:
            logging.info("No pending datasets. Sleeping %s seconds." % SLEEP_DURATION)
            time.sleep(SLEEP_DURATION)
Ejemplo n.º 14
0
def main():
    db.init_db_engine(config.SQLALCHEMY_DATABASE_URI)
    total = db.data.get_count_pending_songs()
    done = 0
    starttime = time.time()
    thisdone, rem = lookup()
    done += thisdone
    while rem > 0:
        thisdone, rem = lookup()
        done += thisdone
        durdelta, remdelta = util.stats(done, total, starttime)
        log.info("Done %s/%s in %s; %s remaining", done, total, str(durdelta), str(remdelta))
Ejemplo n.º 15
0
def init_db(archive, force):
    """Initializes database and imports data if needed.

    This process involves several steps:
    1. Table structure is created.
    2. Data is imported from the archive if it is specified.
    3. Primary keys and foreign keys are created.
    4. Indexes are created.

    Data dump needs to be a .tar.xz archive produced by export command.

    More information about populating a PostgreSQL database efficiently can be
    found at http://www.postgresql.org/docs/current/static/populate.html.
    """
    if force:
        exit_code = _run_psql('drop_db.sql')
        if exit_code != 0:
            raise Exception('Failed to drop existing database and user! Exit code: %i' % exit_code)

    print('Creating user and a database...')
    exit_code = _run_psql('create_db.sql')
    if exit_code != 0:
        raise Exception('Failed to create new database and user! Exit code: %i' % exit_code)

    print('Creating database extensions...')
    exit_code = _run_psql('create_extensions.sql', 'acousticbrainz')
    if exit_code != 0:
        raise Exception('Failed to create database extensions! Exit code: %i' % exit_code)

    db.init_db_engine(config.SQLALCHEMY_DATABASE_URI)

    print('Creating types...')
    db.run_sql_script(os.path.join(ADMIN_SQL_DIR, 'create_types.sql'))

    print('Creating tables...')
    db.run_sql_script(os.path.join(ADMIN_SQL_DIR, 'create_tables.sql'))

    if archive:
        print('Importing data...')
        db.dump.import_db_dump(archive)
    else:
        print('Skipping data importing.')

    print('Creating primary and foreign keys...')
    db.run_sql_script(os.path.join(ADMIN_SQL_DIR, 'create_primary_keys.sql'))
    db.run_sql_script(os.path.join(ADMIN_SQL_DIR, 'create_foreign_keys.sql'))

    print('Creating indexes...')
    db.run_sql_script(os.path.join(ADMIN_SQL_DIR, 'create_indexes.sql'))

    print("Done!")
Ejemplo n.º 16
0
def incremental_info(all=False):
    db.init_db_engine(config.SQLALCHEMY_DATABASE_URI)
    """Prints information about incremental dumps: id, timestamp.

    By default outputs information for the latest dump.
    """
    info = dump.list_incremental_dumps()
    if info:
        if all:
            print('Incremental dumps:')
            for current in info:
                print(' - %s at %s' % current)
        else:
            print('Last dump ID: %s\nTimestamp: %s' % info[0])
    else:
        print('No incremental dumps yet.')
Ejemplo n.º 17
0
def full(ctx, location, threads, rotate):
    """This command creates:
    1. New incremental dump record (+ incremental dumps unless it's the first record)
    2. Full database dump
    3. Full JSON dump

    Archive rotation is enabled by default for full dumps.
    """
    db.init_db_engine(config.SQLALCHEMY_DATABASE_URI)
    try:
        start_t = dump.prepare_incremental_dump()[1]
        if start_t:  # not the first incremental dump
            incremental(location, id=None, threads=None)
        else:
            print("Skipping incremental dump creation since it's the first one.\n")
    except dump.NoNewData:
        print("Skipping incremental dump creation. No new data.\n")

    ctx.invoke(full_db, location=location, threads=threads, rotate=rotate)
    ctx.invoke(json, location=location, rotate=rotate)
Ejemplo n.º 18
0
def rewrite_lowlevel():
    db.init_db_engine(config.SQLALCHEMY_DATABASE_URI)
    oldengine = create_engine(config.SQLALCHEMY_DATABASE_URI_OLD,
                              poolclass=NullPool)

    status = read_migrate_status()

    connection = oldengine.connect()
    res1 = connection.execute(
        """SELECT id FROM lowlevel ll WHERE id > %s ORDER BY id""", (status, ))
    total = 0
    newconn = db.engine.connect()
    while True:
        id_list = res1.fetchmany(size=DUMP_CHUNK_SIZE)
        if not id_list:
            break

        id_list = tuple([i[0] for i in id_list])

        # make new transaction
        newtrans = newconn.begin()
        try:
            migrate_rows(oldengine, newconn, id_list)
            newtrans.commit()
        except sqlalchemy.exc.DataError:  # Database error
            newtrans.rollback()
            # If there was an error, split the rows into one transaction per item
            # and find the error, and just ignore it
            # print "*** Error inserting rows, doing them one at a time"
            for i in id_list:
                newtrans = newconn.begin()
                try:
                    migrate_rows(oldengine, newconn, (i, ))
                    newtrans.commit()
                except sqlalchemy.exc.DataError:  # Database error
                    print "  *** Exception processing row %s, skipping" % i
                    newtrans.rollback()
                    # rollback, ignore just this one
                    pass
        logging.info("processed %s, now at %s" %
                     (DUMP_CHUNK_SIZE, max(id_list)))
def rewrite_lowlevel():
    db.init_db_engine(config.SQLALCHEMY_DATABASE_URI)
    oldengine = create_engine(config.SQLALCHEMY_DATABASE_URI_OLD, poolclass=NullPool)

    status = read_migrate_status()

    connection = oldengine.connect()
    res1 = connection.execute(
        """SELECT id FROM lowlevel ll WHERE id > %s ORDER BY id""", (status, ))
    total = 0
    newconn = db.engine.connect()
    while True:
        id_list = res1.fetchmany(size = DUMP_CHUNK_SIZE)
        if not id_list:
            break

        id_list = tuple([ i[0] for i in id_list ])

        # make new transaction
        newtrans = newconn.begin()
        try:
            migrate_rows(oldengine, newconn, id_list)
            newtrans.commit()
        except sqlalchemy.exc.DataError: # Database error
            newtrans.rollback()
            # If there was an error, split the rows into one transaction per item
            # and find the error, and just ignore it
            # print "*** Error inserting rows, doing them one at a time"
            for i in id_list:
                newtrans = newconn.begin()
                try:
                    migrate_rows(oldengine, newconn, (i, ))
                    newtrans.commit()
                except sqlalchemy.exc.DataError: # Database error
                    print "  *** Exception processing row %s, skipping" % i
                    newtrans.rollback()
                    # rollback, ignore just this one
                    pass
        logging.info("processed %s, now at %s" % (DUMP_CHUNK_SIZE, max(id_list)))
Ejemplo n.º 20
0
def init_db(archive, force, skip_create_db=False):
    """Initialize database and import data.

    This process involves several steps:
    1. Table structure is created.
    2. Data is imported from the archive if it is specified.
    3. Primary keys and foreign keys are created.
    4. Indexes are created.

    Data dump needs to be a .tar.xz archive produced by export command.

    More information about populating a PostgreSQL database efficiently can be
    found at http://www.postgresql.org/docs/current/static/populate.html.
    """

    db.init_db_engine(current_app.config['POSTGRES_ADMIN_URI'])
    if force:
        res = db.run_sql_script_without_transaction(os.path.join(ADMIN_SQL_DIR, 'drop_db.sql'))
        if not res:
            raise Exception('Failed to drop existing database and user! Exit code: %i' % res)

    if not skip_create_db:
        current_app.logger.info('Creating user and a database...')
        res = db.run_sql_script_without_transaction(os.path.join(ADMIN_SQL_DIR, 'create_db.sql'))
        if not res:
            raise Exception('Failed to create new database and user! Exit code: %i' % res)

    current_app.logger.info('Creating database extensions...')
    db.init_db_engine(current_app.config['POSTGRES_ADMIN_AB_URI'])
    res = db.run_sql_script_without_transaction(os.path.join(ADMIN_SQL_DIR, 'create_extensions.sql'))

    db.init_db_engine(current_app.config['SQLALCHEMY_DATABASE_URI'])

    current_app.logger.info('Creating types...')
    db.run_sql_script(os.path.join(ADMIN_SQL_DIR, 'create_types.sql'))

    current_app.logger.info('Creating tables...')
    db.run_sql_script(os.path.join(ADMIN_SQL_DIR, 'create_tables.sql'))

    if archive:
        current_app.logger.info('Importing data...')
        db.dump.import_dump(archive)
    else:
        current_app.logger.info('Skipping data importing.')
        current_app.logger.info('Loading fixtures...')
        current_app.logger.info('Models...')
        db.run_sql_script(os.path.join(ADMIN_SQL_DIR, 'create_models.sql'))

    current_app.logger.info('Creating primary and foreign keys...')
    db.run_sql_script(os.path.join(ADMIN_SQL_DIR, 'create_primary_keys.sql'))
    db.run_sql_script(os.path.join(ADMIN_SQL_DIR, 'create_foreign_keys.sql'))

    current_app.logger.info('Creating indexes...')
    db.run_sql_script(os.path.join(ADMIN_SQL_DIR, 'create_indexes.sql'))

    current_app.logger.info("Done!")
Ejemplo n.º 21
0
 def setUp(self):
     db.init_db_engine(config.SQLALCHEMY_TEST_URI)
     self.reset_db()
Ejemplo n.º 22
0
def create_app(debug=None, config_path=None):
    app = CustomFlask(import_name=__name__,
                      use_flask_uuid=True,
                      use_debug_toolbar=True)

    # Configuration
    root_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), "..")
    app.config.from_pyfile(os.path.join(root_path, 'default_config.py'))
    app.config.from_pyfile(os.path.join(root_path, 'custom_config.py'),
                           silent=True)

    if config_path:
        app.config.from_pyfile(config_path)

    if debug is not None:
        app.debug = debug

    # Logging
    from webserver.loggers import init_loggers
    init_loggers(app)

    # Database connection
    from db import init_db_engine
    init_db_engine(app.config['SQLALCHEMY_DATABASE_URI'])

    # Memcached
    if 'MEMCACHED_SERVERS' in app.config:
        from db import cache
        cache.init(app.config['MEMCACHED_SERVERS'],
                   app.config['MEMCACHED_NAMESPACE'],
                   debug=1 if app.debug else 0)

    # Extensions
    from flask_uuid import FlaskUUID
    FlaskUUID(app)

    # MusicBrainz
    import musicbrainzngs
    from db import SCHEMA_VERSION
    musicbrainzngs.set_useragent(app.config['MUSICBRAINZ_USERAGENT'],
                                 SCHEMA_VERSION)
    if app.config['MUSICBRAINZ_HOSTNAME']:
        musicbrainzngs.set_hostname(app.config['MUSICBRAINZ_HOSTNAME'])

    # OAuth
    from webserver.login import login_manager, provider
    login_manager.init_app(app)
    provider.init(app.config['MUSICBRAINZ_CLIENT_ID'],
                  app.config['MUSICBRAINZ_CLIENT_SECRET'])

    # Error handling
    from webserver.errors import init_error_handlers
    init_error_handlers(app)

    # Static files
    import static_manager
    static_manager.read_manifest()

    # Template utilities
    app.jinja_env.add_extension('jinja2.ext.do')
    from webserver import utils
    app.jinja_env.filters['date'] = utils.reformat_date
    app.jinja_env.filters['datetime'] = utils.reformat_datetime
    app.context_processor(
        lambda: dict(get_static_path=static_manager.get_static_path))

    _register_blueprints(app)

    # Admin section
    from flask_admin import Admin
    from webserver.admin import views as admin_views
    admin = Admin(app, index_view=admin_views.HomeView(name='Admin'))
    admin.add_view(admin_views.AdminsView(name='Admins'))

    return app
Ejemplo n.º 23
0
from flask import Flask, jsonify, redirect
import sqlalchemy
from db import init_db_engine
import json
from collections import OrderedDict

connect_str = "postgres://*****:*****@localhost:5432/codeai"
global engine
engine = init_db_engine(connect_str)

MY_API = '/api/1.0/'
app = Flask(__name__)


@app.route(MY_API + 'submission/<handle>/<type>/<verdict>')
def user_submission(handle, type, verdict):
    with engine.connect() as connection:
        results = connection.execute(
            sqlalchemy.text("""
            SELECT q.id,
                   u.handle,
                   verdict.name,
                   language.name as language,
                   luq.timestamp as timestamp,
                   luq.relative_time,
                   luq.participant_type
              FROM l_user_question luq
              JOIN "user" u
                ON u.id = luq.user_id
              JOIN question q
                ON q.id = luq.question_id
Ejemplo n.º 24
0
def import_data(archive):
    """Imports data dump into the database."""
    db.init_db_engine(config.SQLALCHEMY_DATABASE_URI)
    print('Importing data...')
    db.dump.import_db_dump(archive)
Ejemplo n.º 25
0
def compute_stats():
    """Compute any outstanding hourly stats and add to the database."""
    db.init_db_engine(config.SQLALCHEMY_DATABASE_URI)
    import datetime
    import pytz
    db.stats.compute_stats(datetime.datetime.now(pytz.utc))
Ejemplo n.º 26
0
from gevent import monkey
monkey.patch_all()

import flask
from flask import Flask, redirect
from flask_restful import Resource, Api, abort, reqparse, request
from engine import Logger, EngineRunner
from db import init_db_engine, create_db, upgrade
from plugin_managers import load_plugins, get_all_plugins, upgrades, TrackersManager, ClientsManager
from flask_socketio import SocketIO, emit

init_db_engine("sqlite:///monitorrent.db", True)
load_plugins()
upgrade(get_all_plugins(), upgrades)
create_db()

tracker_manager = TrackersManager()
clients_manager = ClientsManager()

static_folder = "webapp"
app = Flask(__name__, static_folder=static_folder, static_url_path='')
app.config['SECRET_KEY'] = 'secret!'
socketio = SocketIO(app)


class EngineWebSocketLogger(Logger):
    def started(self):
        socketio.emit('started', namespace='/execute')

    def finished(self, finish_time, exception):
        args = {
Ejemplo n.º 27
0
 def setUp(self):
     db.init_db_engine(config.SQLALCHEMY_TEST_URI)
     self.reset_db()
Ejemplo n.º 28
0
def cache_stats():
    """Compute recent stats and add to memcache."""
    db.init_db_engine(config.SQLALCHEMY_DATABASE_URI)
    db.cache.init(config.MEMCACHED_SERVERS)
    db.stats.add_stats_to_cache()
Ejemplo n.º 29
0
def create_app(debug=None, config_path=None):
    app = create_app_with_configuration(config_path)

    if debug is not None:
        app.debug = debug

    if app.debug and app.config['SECRET_KEY']:
        app.init_debug_toolbar()

    # Logging
    from webserver.loggers import init_loggers
    init_loggers(app)

    # Database connection
    from db import init_db_engine
    init_db_engine(app.config['SQLALCHEMY_DATABASE_URI'])

    # Cache
    if 'REDIS_HOST' in app.config and\
       'REDIS_PORT' in app.config and\
       'REDIS_NAMESPACE' in app.config and\
       'REDIS_NS_VERSIONS_LOCATION' in app.config:
        if not os.path.exists(app.config['REDIS_NS_VERSIONS_LOCATION']):
            os.makedirs(app.config['REDIS_NS_VERSIONS_LOCATION'])

        from brainzutils import cache
        cache.init(host=app.config['REDIS_HOST'],
                   port=app.config['REDIS_PORT'],
                   namespace=app.config['REDIS_NAMESPACE'],
                   ns_versions_loc=app.config['REDIS_NS_VERSIONS_LOCATION'])
    else:
        raise Exception(
            'One or more redis cache configuration options are missing from custom_config.py'
        )

    # Extensions
    from flask_uuid import FlaskUUID
    FlaskUUID(app)

    # MusicBrainz
    import musicbrainzngs
    from db import SCHEMA_VERSION
    musicbrainzngs.set_useragent(app.config['MUSICBRAINZ_USERAGENT'],
                                 SCHEMA_VERSION)
    if app.config['MUSICBRAINZ_HOSTNAME']:
        musicbrainzngs.set_hostname(app.config['MUSICBRAINZ_HOSTNAME'])

    # OAuth
    from webserver.login import login_manager, provider
    login_manager.init_app(app)
    provider.init(app.config['MUSICBRAINZ_CLIENT_ID'],
                  app.config['MUSICBRAINZ_CLIENT_SECRET'])

    # Error handling
    from webserver.errors import init_error_handlers
    init_error_handlers(app)

    # Static files
    import static_manager
    static_manager.read_manifest()

    # Template utilities
    app.jinja_env.add_extension('jinja2.ext.do')
    from webserver import utils
    app.jinja_env.filters['date'] = utils.reformat_date
    app.jinja_env.filters['datetime'] = utils.reformat_datetime
    app.context_processor(
        lambda: dict(get_static_path=static_manager.get_static_path))

    _register_blueprints(app)

    # Admin section
    from flask_admin import Admin
    from webserver.admin import views as admin_views
    admin = Admin(app, index_view=admin_views.HomeView(name='Admin'))
    admin.add_view(admin_views.AdminsView(name='Admins'))

    return app
Ejemplo n.º 30
0
def create_app(debug=None):
    app = CustomFlask(
        import_name=__name__,
        use_flask_uuid=True,
    )

    # Configuration
    load_config(app)

    if debug is not None:
        app.debug = debug

    if app.debug and app.config['SECRET_KEY']:
        app.init_debug_toolbar()

    # Logging
    app.init_loggers(file_config=app.config.get('LOG_FILE'),
                     email_config=app.config.get('LOG_EMAIL'),
                     sentry_config=app.config.get('LOG_SENTRY')
                     )

    # Database connection
    from db import init_db_engine
    init_db_engine(app.config['SQLALCHEMY_DATABASE_URI'])

    # Cache
    if 'REDIS_HOST' in app.config and\
       'REDIS_PORT' in app.config and\
       'REDIS_NAMESPACE' in app.config and\
       'REDIS_NS_VERSIONS_LOCATION' in app.config:
        if not os.path.exists(app.config['REDIS_NS_VERSIONS_LOCATION']):
            os.makedirs(app.config['REDIS_NS_VERSIONS_LOCATION'])

        from brainzutils import cache
        cache.init(
            host=app.config['REDIS_HOST'],
            port=app.config['REDIS_PORT'],
            namespace=app.config['REDIS_NAMESPACE'],
            ns_versions_loc=app.config['REDIS_NS_VERSIONS_LOCATION'])
    else:
        raise Exception('One or more redis cache configuration options are missing from config.py')

    # Extensions
    from flask_uuid import FlaskUUID
    FlaskUUID(app)

    # MusicBrainz
    import musicbrainzngs
    from db import SCHEMA_VERSION
    musicbrainzngs.set_useragent(app.config['MUSICBRAINZ_USERAGENT'], SCHEMA_VERSION)
    if app.config['MUSICBRAINZ_HOSTNAME']:
        musicbrainzngs.set_hostname(app.config['MUSICBRAINZ_HOSTNAME'])

    # OAuth
    from webserver.login import login_manager, provider
    login_manager.init_app(app)
    provider.init(app.config['MUSICBRAINZ_CLIENT_ID'],
                  app.config['MUSICBRAINZ_CLIENT_SECRET'])

    # Error handling
    from webserver.errors import init_error_handlers
    init_error_handlers(app)

    # Static files
    import static_manager
    static_manager.read_manifest()

    # Template utilities
    app.jinja_env.add_extension('jinja2.ext.do')
    from webserver import utils
    app.jinja_env.filters['date'] = utils.reformat_date
    app.jinja_env.filters['datetime'] = utils.reformat_datetime
    app.context_processor(lambda: dict(get_static_path=static_manager.get_static_path))

    _register_blueprints(app)

    # Admin section
    from flask_admin import Admin
    from webserver.admin import views as admin_views
    admin = Admin(app, index_view=admin_views.HomeView(name='Admin'))
    admin.add_view(admin_views.AdminsView(name='Admins'))

    @ app.before_request
    def before_request_gdpr_check():
        # skip certain pages, static content and the API
        if request.path == url_for('index.gdpr_notice') \
          or request.path == url_for('login.logout') \
          or request.path.startswith('/_debug') \
          or request.path.startswith('/static') \
          or request.path.startswith(API_PREFIX):
            return
        # otherwise if user is logged in and hasn't agreed to gdpr,
        # redirect them to agree to terms page.
        elif current_user.is_authenticated and current_user.gdpr_agreed is None:
            return redirect(url_for('index.gdpr_notice', next=request.full_path))

    return app
Ejemplo n.º 31
0
def cache_stats():
    """Compute recent stats and add to memcache."""
    db.init_db_engine(config.SQLALCHEMY_DATABASE_URI)
    db.cache.init(config.MEMCACHED_SERVERS)
    db.stats.add_stats_to_cache()
Ejemplo n.º 32
0
def compute_stats():
    """Compute any outstanding hourly stats and add to the database."""
    db.init_db_engine(config.SQLALCHEMY_DATABASE_URI)
    import datetime
    import pytz
    db.stats.compute_stats(datetime.datetime.now(pytz.utc))
Ejemplo n.º 33
0
def import_data(archive):
    """Imports data dump into the database."""
    db.init_db_engine(config.SQLALCHEMY_DATABASE_URI)
    print('Importing data...')
    db.dump.import_db_dump(archive)
Ejemplo n.º 34
0
def create_app():
    app = Flask(__name__)

    # Configuration
    sys.path.append(os.path.join(os.path.dirname(os.path.realpath(__file__)), ".."))
    import config
    app.config.from_object(config)

    # Logging
    from webserver.loggers import init_loggers
    init_loggers(app)

    # Database connection
    from db import init_db_engine
    init_db_engine(app.config['SQLALCHEMY_DATABASE_URI'])

    # Memcached
    if 'MEMCACHED_SERVERS' in app.config:
        from db import cache
        cache.init(app.config['MEMCACHED_SERVERS'],
                   app.config['MEMCACHED_NAMESPACE'],
                   debug=1 if app.debug else 0)

    # Extensions
    from flask_uuid import FlaskUUID
    FlaskUUID(app)

    # MusicBrainz
    import musicbrainzngs
    from db import SCHEMA_VERSION
    musicbrainzngs.set_useragent(app.config['MUSICBRAINZ_USERAGENT'], SCHEMA_VERSION)
    if app.config['MUSICBRAINZ_HOSTNAME']:
        musicbrainzngs.set_hostname(app.config['MUSICBRAINZ_HOSTNAME'])

    # OAuth
    from webserver.login import login_manager, provider
    login_manager.init_app(app)
    provider.init(app.config['MUSICBRAINZ_CLIENT_ID'],
                  app.config['MUSICBRAINZ_CLIENT_SECRET'])

    # Error handling
    from webserver.errors import init_error_handlers
    init_error_handlers(app)

    # Template utilities
    app.jinja_env.add_extension('jinja2.ext.do')
    from webserver import utils
    app.jinja_env.filters['date'] = utils.reformat_date
    app.jinja_env.filters['datetime'] = utils.reformat_datetime

    # Blueprints
    from webserver.views.index import index_bp
    from webserver.views.data import data_bp
    from webserver.views.api import api_bp
    from webserver.views.stats import stats_bp
    from webserver.views.login import login_bp
    from webserver.views.user import user_bp
    from webserver.views.datasets import datasets_bp
    app.register_blueprint(index_bp)
    app.register_blueprint(data_bp)
    app.register_blueprint(api_bp)
    app.register_blueprint(stats_bp)
    app.register_blueprint(login_bp, url_prefix='/login')
    app.register_blueprint(user_bp, url_prefix='/user')
    app.register_blueprint(datasets_bp, url_prefix='/datasets')

    return app
Ejemplo n.º 35
0
 def context(self):
     with self.create_app().app_context():
         db.init_db_engine(config.SQLALCHEMY_TEST_URI)
         flask.session["user_id"] = self.test_user_id
         flask.session["_fresh"] = True
         yield
Ejemplo n.º 36
0
def main(num_threads):
    print("High-level extractor daemon starting with %d threads" % num_threads)
    sys.stdout.flush()
    build_sha1 = get_build_sha1(HIGH_LEVEL_EXTRACTOR_BINARY)
    create_profile(PROFILE_CONF_TEMPLATE, PROFILE_CONF, build_sha1)
    db.init_db_engine(config.SQLALCHEMY_DATABASE_URI)

    num_processed = 0

    pool = {}
    docs = []
    while True:
        # Check to see if we need more database rows
        if len(docs) == 0:
            # Fetch more rows from the DB
            docs = db.data.get_unprocessed_highlevel_documents()

            # We will fetch some rows that are already in progress. Remove those.
            in_progress = pool.keys()
            filtered = []
            for mbid, doc, id in docs:
                if mbid not in in_progress:
                    filtered.append((mbid, doc, id))
            docs = filtered

        if len(docs):
            # Start one document
            mbid, doc, id = docs.pop()
            th = HighLevel(mbid, doc, id)
            th.start()
            print("start %s" % mbid)
            sys.stdout.flush()
            pool[mbid] = th

        # If we're at max threads, wait for one to complete
        while True:
            if len(pool) == 0 and len(docs) == 0:
                if num_processed > 0:
                    print("processed %s documents, none remain. Sleeping." %
                          num_processed)
                    sys.stdout.flush()
                num_processed = 0
                # Let's be nice and not keep any connections to the DB open while we nap
                # TODO: Close connections when we're sleeping
                sleep(SLEEP_DURATION)

            for mbid in pool.keys():
                if not pool[mbid].is_alive():

                    # Fetch the data and clean up the thread object
                    hl_data = pool[mbid].get_data()
                    ll_id = pool[mbid].get_ll_id()
                    pool[mbid].join()
                    del pool[mbid]

                    try:
                        jdata = json.loads(hl_data)
                    except ValueError:
                        print("error %s: Cannot parse result document" % mbid)
                        print(hl_data)
                        sys.stdout.flush()
                        jdata = {}

                    db.data.write_high_level(mbid, ll_id, jdata, build_sha1)

                    print("done  %s" % mbid)
                    sys.stdout.flush()
                    num_processed += 1

            if len(pool) == num_threads:
                # tranquilo!
                sleep(.1)
            else:
                break
Ejemplo n.º 37
0
def create_app():
    app = Flask(__name__)

    # Configuration
    sys.path.append(
        os.path.join(os.path.dirname(os.path.realpath(__file__)), ".."))
    import config
    app.config.from_object(config)

    # Logging
    from webserver.loggers import init_loggers
    init_loggers(app)

    # Database connection
    from db import init_db_engine
    init_db_engine(app.config['SQLALCHEMY_DATABASE_URI'])

    # Memcached
    if 'MEMCACHED_SERVERS' in app.config:
        from db import cache
        cache.init(app.config['MEMCACHED_SERVERS'],
                   app.config['MEMCACHED_NAMESPACE'],
                   debug=1 if app.debug else 0)

    # Extensions
    from flask_uuid import FlaskUUID
    FlaskUUID(app)

    # MusicBrainz
    import musicbrainzngs
    from db import SCHEMA_VERSION
    musicbrainzngs.set_useragent(app.config['MUSICBRAINZ_USERAGENT'],
                                 SCHEMA_VERSION)
    if app.config['MUSICBRAINZ_HOSTNAME']:
        musicbrainzngs.set_hostname(app.config['MUSICBRAINZ_HOSTNAME'])

    # OAuth
    from webserver.login import login_manager, provider
    login_manager.init_app(app)
    provider.init(app.config['MUSICBRAINZ_CLIENT_ID'],
                  app.config['MUSICBRAINZ_CLIENT_SECRET'])

    # Error handling
    from webserver.errors import init_error_handlers
    init_error_handlers(app)

    # Static files
    import static_manager
    static_manager.read_manifest()

    # Template utilities
    app.jinja_env.add_extension('jinja2.ext.do')
    from webserver import utils
    app.jinja_env.filters['date'] = utils.reformat_date
    app.jinja_env.filters['datetime'] = utils.reformat_datetime
    app.context_processor(
        lambda: dict(get_static_path=static_manager.get_static_path))

    # Blueprints
    from webserver.views.index import index_bp
    from webserver.views.data import data_bp
    from webserver.views.api import api_bp
    from webserver.views.stats import stats_bp
    from webserver.views.login import login_bp
    from webserver.views.user import user_bp
    from webserver.views.datasets import datasets_bp
    app.register_blueprint(index_bp)
    app.register_blueprint(data_bp)
    app.register_blueprint(api_bp)
    app.register_blueprint(stats_bp)
    app.register_blueprint(login_bp, url_prefix='/login')
    app.register_blueprint(user_bp, url_prefix='/user')
    app.register_blueprint(datasets_bp, url_prefix='/datasets')

    return app
Ejemplo n.º 38
0
def main(num_threads):
    print("High-level extractor daemon starting with %d threads" % num_threads)
    sys.stdout.flush()
    build_sha1 = get_build_sha1(HIGH_LEVEL_EXTRACTOR_BINARY)
    create_profile(PROFILE_CONF_TEMPLATE, PROFILE_CONF, build_sha1)
    db.init_db_engine(config.SQLALCHEMY_DATABASE_URI)

    num_processed = 0

    pool = {}
    docs = []
    while True:
        # Check to see if we need more database rows
        if len(docs) == 0:
            # Fetch more rows from the DB
            docs = db.data.get_unprocessed_highlevel_documents()

            # We will fetch some rows that are already in progress. Remove those.
            in_progress = pool.keys()
            filtered = []
            for mbid, doc, id in docs:
                if mbid not in in_progress:
                    filtered.append((mbid, doc, id))
            docs = filtered

        if len(docs):
            # Start one document
            mbid, doc, id = docs.pop()
            th = HighLevel(mbid, doc, id)
            th.start()
            print("start %s" % mbid)
            sys.stdout.flush()
            pool[mbid] = th

        # If we're at max threads, wait for one to complete
        while True:
            if len(pool) == 0 and len(docs) == 0:
                if num_processed > 0:
                    print("processed %s documents, none remain. Sleeping." % num_processed)
                    sys.stdout.flush()
                num_processed = 0
                # Let's be nice and not keep any connections to the DB open while we nap
                # TODO: Close connections when we're sleeping
                sleep(SLEEP_DURATION)

            for mbid in pool.keys():
                if not pool[mbid].is_alive():

                    # Fetch the data and clean up the thread object
                    hl_data = pool[mbid].get_data()
                    ll_id = pool[mbid].get_ll_id()
                    pool[mbid].join()
                    del pool[mbid]

                    try:
                        jdata = json.loads(hl_data)
                    except ValueError:
                        print("error %s: Cannot parse result document" % mbid)
                        print(hl_data)
                        sys.stdout.flush()
                        jdata = {}

                    db.data.write_high_level(mbid, ll_id, jdata, build_sha1)

                    print("done  %s" % mbid)
                    sys.stdout.flush()
                    num_processed += 1

            if len(pool) == num_threads:
                # tranquilo!
                sleep(.1)
            else:
                break
Ejemplo n.º 39
0
def create_app(debug=None):
    app = CustomFlask(
        import_name=__name__,
        use_flask_uuid=True,
    )

    # Configuration
    load_config(app)

    if debug is not None:
        app.debug = debug

    if app.debug and app.config['SECRET_KEY']:
        app.init_debug_toolbar()

    # Logging
    app.init_loggers(file_config=app.config.get('LOG_FILE'),
                     email_config=app.config.get('LOG_EMAIL'),
                     sentry_config=app.config.get('LOG_SENTRY')
                     )

    # Database connection
    from db import init_db_engine
    init_db_engine(app.config['SQLALCHEMY_DATABASE_URI'])

    # Cache
    if 'REDIS_HOST' in app.config and\
       'REDIS_PORT' in app.config and\
       'REDIS_NAMESPACE' in app.config and\
       'REDIS_NS_VERSIONS_LOCATION' in app.config:
        if not os.path.exists(app.config['REDIS_NS_VERSIONS_LOCATION']):
            os.makedirs(app.config['REDIS_NS_VERSIONS_LOCATION'])

        from brainzutils import cache
        cache.init(
            host=app.config['REDIS_HOST'],
            port=app.config['REDIS_PORT'],
            namespace=app.config['REDIS_NAMESPACE'],
            ns_versions_loc=app.config['REDIS_NS_VERSIONS_LOCATION'])
    else:
        raise Exception('One or more redis cache configuration options are missing from config.py')

    # Add rate limiting support
    @app.after_request
    def after_request_callbacks(response):
        return inject_x_rate_headers(response)

    # check for ratelimit config values and set them if present
    if 'RATELIMIT_PER_IP' in app.config and 'RATELIMIT_WINDOW' in app.config:
        set_rate_limits(app.config['RATELIMIT_PER_IP'], app.config['RATELIMIT_PER_IP'], app.config['RATELIMIT_WINDOW'])

    # MusicBrainz
    import musicbrainzngs
    from db import SCHEMA_VERSION
    musicbrainzngs.set_useragent(app.config['MUSICBRAINZ_USERAGENT'], SCHEMA_VERSION)
    if app.config['MUSICBRAINZ_HOSTNAME']:
        musicbrainzngs.set_hostname(app.config['MUSICBRAINZ_HOSTNAME'])

    # OAuth
    from webserver.login import login_manager, provider
    login_manager.init_app(app)
    provider.init(app.config['MUSICBRAINZ_CLIENT_ID'],
                  app.config['MUSICBRAINZ_CLIENT_SECRET'])

    # Error handling
    from webserver.errors import init_error_handlers
    init_error_handlers(app)

    # Static files
    import static_manager

    # Template utilities
    app.jinja_env.add_extension('jinja2.ext.do')
    from webserver import utils
    app.jinja_env.filters['date'] = utils.reformat_date
    app.jinja_env.filters['datetime'] = utils.reformat_datetime
    # During development, built js and css assets don't have a hash, but in production we use
    # a manifest to map a name to name.hash.extension for caching/cache busting
    if app.debug:
        app.context_processor(lambda: dict(get_static_path=static_manager.development_get_static_path))
    else:
        static_manager.read_manifest()
        app.context_processor(lambda: dict(get_static_path=static_manager.manifest_get_static_path))

    _register_blueprints(app)

    # Admin section
    from flask_admin import Admin
    from webserver.admin import views as admin_views
    admin = Admin(app, index_view=admin_views.HomeView(name='Admin'))
    admin.add_view(admin_views.AdminsView(name='Admins'))

    @app.before_request
    def prod_https_login_redirect():
        """ Redirect to HTTPS in production except for the API endpoints
        """
        if urlparse.urlsplit(request.url).scheme == 'http' \
                and app.config['DEBUG'] == False \
                and app.config['TESTING'] == False \
                and request.blueprint not in ('api', 'api_v1_core', 'api_v1_datasets', 'api_v1_dataset_eval'):
            url = request.url[7:] # remove http:// from url
            return redirect('https://{}'.format(url), 301)


    @app.before_request
    def before_request_gdpr_check():
        # skip certain pages, static content and the API
        if request.path == url_for('index.gdpr_notice') \
          or request.path == url_for('login.logout') \
          or request.path.startswith('/_debug') \
          or request.path.startswith('/static') \
          or request.path.startswith(API_PREFIX):
            return
        # otherwise if user is logged in and hasn't agreed to gdpr,
        # redirect them to agree to terms page.
        elif current_user.is_authenticated and current_user.gdpr_agreed is None:
            return redirect(url_for('index.gdpr_notice', next=request.full_path))

    return app
Ejemplo n.º 40
0
def create_app():
    app = Flask(__name__)

    # Configuration
    sys.path.append(os.path.join(os.path.dirname(os.path.realpath(__file__)), ".."))
    import default_config
    app.config.from_object(default_config)
    import config
    app.config.from_object(config)

    # Logging
    from webserver.loggers import init_loggers
    init_loggers(app)

    # Database connection
    from db import init_db_engine
    init_db_engine(app.config['SQLALCHEMY_DATABASE_URI'])

    # Memcached
    if 'MEMCACHED_SERVERS' in app.config:
        from db import cache
        cache.init(app.config['MEMCACHED_SERVERS'],
                   app.config['MEMCACHED_NAMESPACE'],
                   debug=1 if app.debug else 0)

    # Extensions
    from flask_uuid import FlaskUUID
    FlaskUUID(app)

    # MusicBrainz
    import musicbrainzngs
    from db import SCHEMA_VERSION
    musicbrainzngs.set_useragent(app.config['MUSICBRAINZ_USERAGENT'], SCHEMA_VERSION)
    if app.config['MUSICBRAINZ_HOSTNAME']:
        musicbrainzngs.set_hostname(app.config['MUSICBRAINZ_HOSTNAME'])

    # OAuth
    from webserver.login import login_manager, provider
    login_manager.init_app(app)
    provider.init(app.config['MUSICBRAINZ_CLIENT_ID'],
                  app.config['MUSICBRAINZ_CLIENT_SECRET'])

    # Error handling
    from webserver.errors import init_error_handlers
    init_error_handlers(app)

    # Static files
    import static_manager
    static_manager.read_manifest()

    # Template utilities
    app.jinja_env.add_extension('jinja2.ext.do')
    from webserver import utils
    app.jinja_env.filters['date'] = utils.reformat_date
    app.jinja_env.filters['datetime'] = utils.reformat_datetime
    app.context_processor(lambda: dict(get_static_path=static_manager.get_static_path))

    _register_blueprints(app)

    # Admin section
    from flask_admin import Admin
    from webserver.admin import views as admin_views
    admin = Admin(app, index_view=admin_views.HomeView(name='Admin'))
    admin.add_view(admin_views.AdminsView(name='Admins'))

    return app
Ejemplo n.º 41
0
from gevent import monkey
monkey.patch_all()

import flask
from flask import Flask, redirect
from flask_restful import Resource, Api, abort, reqparse, request
from engine import Logger, EngineRunner
from db import init_db_engine, create_db, upgrade
from plugin_managers import load_plugins, get_all_plugins, upgrades, TrackersManager, ClientsManager
from flask_socketio import SocketIO, emit

init_db_engine("sqlite:///monitorrent.db", True)
load_plugins()
upgrade(get_all_plugins(), upgrades)
create_db()

tracker_manager = TrackersManager()
clients_manager = ClientsManager()

static_folder = "webapp"
app = Flask(__name__, static_folder=static_folder, static_url_path='')
app.config['SECRET_KEY'] = 'secret!'
socketio = SocketIO(app)

class EngineWebSocketLogger(Logger):
    def started(self):
        socketio.emit('started', namespace='/execute')

    def finished(self, finish_time, exception):
        args = {
            'finish_time': finish_time.isoformat(),