def app(request): """Flask application fixture. Creates a Flask application with a simple testing configuration, then creates an application context and inside of it recreates all databases and indices from the fixtures. Finally it yields, so that all tests that explicitly use the ``app`` fixture have access to an application context. See: http://flask.pocoo.org/docs/0.12/appcontext/. """ app = create_app() # app.config.update({'DEBUG': True}) with app.app_context(): # Celery task imports must be local, otherwise their # configuration would use the default pickle serializer. from inspirehep.modules.migrator.tasks import migrate_from_file db.drop_all() drop_alembic_version_table() alembic = Alembic(app=app) alembic.upgrade() _es = app.extensions['invenio-search'] list(_es.delete(ignore=[404])) list(_es.create(ignore=[400])) init_all_storage_paths() init_users_and_permissions() migrate_from_file('./inspirehep/demosite/data/demo-records-acceptance.xml.gz', wait_for_results=True) es.indices.refresh('records-hep') yield app
def create_all(app): alembic = Alembic(app=app) alembic.upgrade() _es = app.extensions['invenio-search'] list(_es.create(ignore=[400])) init_all_storage_paths() init_users_and_permissions()
def app(): """ Deprecated: do not use this fixtures for new tests, unless for very specific use cases. Use `isolated_app` instead. Flask application with demosite data and without any database isolation: any db transaction performed during the tests are persisted into the db. Creates a Flask application with a simple testing configuration, then creates an application context and inside of it recreates all databases and indices from the fixtures. Finally it yields, so that all tests that explicitly use the ``app`` fixtures have access to an application context. See: http://flask.pocoo.org/docs/0.12/appcontext/. """ app = create_app( DEBUG=False, # Tests may fail when turned on because of Flask bug (A setup function was called after the first request was handled. when initializing - when Alembic initialization) WTF_CSRF_ENABLED=False, CELERY_TASK_ALWAYS_EAGER=True, CELERY_RESULT_BACKEND='cache', CELERY_CACHE_BACKEND='memory', CELERY_TASK_EAGER_PROPAGATES=True, SECRET_KEY='secret!', RECORD_EDITOR_FILE_UPLOAD_FOLDER='tests/integration/editor/temp', TESTING=True, ) with app.app_context(), mock.patch( 'inspirehep.modules.records.receivers.index_modified_citations_from_record.delay' ): # Celery task imports must be local, otherwise their # configuration would use the default pickle serializer. from inspirehep.modules.migrator.tasks import migrate_from_file db.session.close() db.drop_all() drop_alembic_version_table() alembic = Alembic(app=current_app) alembic.upgrade() _es = app.extensions['invenio-search'] list(_es.delete(ignore=[404])) list(_es.create(ignore=[400])) init_all_storage_paths() init_users_and_permissions() init_authentication_token() migrate_from_file('./inspirehep/demosite/data/demo-records.xml.gz', wait_for_results=True) es.indices.refresh('records-hep') # Makes sure that all HEP records were migrated. yield app
class SQLAlchemy(BaseSQLAlchemy): def __init__(self, **kwargs): super().__init__(**kwargs) self.alembic = Alembic() self.alembic.rev_id = rev_id def init_app(self, app): super(SQLAlchemy, self).init_app(app) self.alembic.init_app(app) app.shell_context_processor(lambda: {'db': self})
def init_db(): LOGGER.info('Recreating the DB') db.session.close() db.drop_all() drop_alembic_version_table() alembic = Alembic(app=current_app) alembic.upgrade() db.session.commit() LOGGER.info('Recreating the DB: done') return jsonify("Db recreated")
def clear_environment(app): with app.app_context(): db.session.close() db.drop_all() drop_alembic_version_table() alembic = Alembic(app=app) alembic.upgrade() _es = app.extensions['invenio-search'] list(_es.delete(ignore=[404])) list(_es.create(ignore=[400])) es.indices.refresh('records-hep') init_all_storage_paths() init_users_and_permissions()
from zeus.utils.celery import Celery from zeus.utils.nplusone import NPlusOne from zeus.utils.redis import Redis from zeus.utils.ssl import SSL ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir)) WORKSPACE_ROOT = os.path.expanduser( os.environ.get("WORKSPACE_ROOT", "~/.zeus/")) # Sigh. If only developers would stop having feelings, and use more facts. # HTTP (non-SSL) is a valid callback URL for many OAuth2 providers, especially # when you're running on localhost. os.environ["OAUTHLIB_INSECURE_TRANSPORT"] = "1" alembic = Alembic() celery = Celery() db = SQLAlchemy() mail = Mail() nplusone = NPlusOne() redis = Redis() sentry = Sentry(logging=True, level=logging.ERROR, wrap_wsgi=True) ssl = SSL() def with_health_check(app): def middleware(environ, start_response): if environ.get("PATH_INFO", "") == "/healthz": start_response("200 OK", [("Content-Type", "application/json")]) return [b'{"ok": true}']
from raven.contrib.flask import Sentry import flask_restplus as restful from .conf import configure app = Flask('sigil') configure(app) if app.config['DEBUG']: logging.basicConfig(stream=sys.stdout, level=logging.DEBUG) logger = logging.getLogger(__name__) db = SQLAlchemy(app) mail = Mail(app) alembic = Alembic(app) EXTRA_FIELDS = [v for v in app.config["EXTRA_FIELDS"].split(",") if v] sentry = None if app.config['SENTRY_DSN']: logger.info('Sentry is active') sentry = Sentry(app) else: logger.info('Sentry is inactive') if app.config['SERVE_STATIC']: # static files @app.route('{}/'.format(app.config['UI_URL_PREFIX'])) @app.route('{}/<path:path>'.format(app.config['UI_URL_PREFIX'])) def serve(path=""): d = os.path.dirname
def __init__(self, app=None, **kwargs): """Extension initialization.""" self.alembic = Alembic(run_mkdir=False, command_name='alembic') if app: self.init_app(app, **kwargs)
#!/usr/bin/env python # -*- encoding:utf-8 -*- """ 数据迁移工具 """ from app import app, db from flask_alembic import Alembic with app.app_context(): alembic = Alembic(app) alembic.revision('add column desc') alembic.upgrade() # create: 15/11/30 # End
class InvenioDB(object): """Invenio database extension.""" def __init__(self, app=None, **kwargs): """Extension initialization.""" self.alembic = Alembic(run_mkdir=False, command_name='alembic') if app: self.init_app(app, **kwargs) def init_app(self, app, **kwargs): """Initialize application object.""" self.init_db(app, **kwargs) app.config.setdefault('ALEMBIC', { 'script_location': pkg_resources.resource_filename( 'invenio_db', 'alembic' ), 'version_locations': [ (base_entry.name, pkg_resources.resource_filename( base_entry.module_name, os.path.join(*base_entry.attrs) )) for base_entry in pkg_resources.iter_entry_points( 'invenio_db.alembic' ) ], }) self.alembic.init_app(app) app.extensions['invenio-db'] = self app.cli.add_command(db_cmd) def init_db(self, app, entry_point_group='invenio_db.models', **kwargs): """Initialize Flask-SQLAlchemy extension.""" # Setup SQLAlchemy app.config.setdefault( 'SQLALCHEMY_DATABASE_URI', 'sqlite:///' + os.path.join(app.instance_path, app.name + '.db') ) app.config.setdefault('SQLALCHEMY_ECHO', app.debug) # Initialize Flask-SQLAlchemy extension. database = kwargs.get('db', db) database.init_app(app) # Initialize versioning support. self.init_versioning(app, database, kwargs.get('versioning_manager')) # Initialize model bases if entry_point_group: for base_entry in pkg_resources.iter_entry_points( entry_point_group): base_entry.load() # All models should be loaded by now. sa.orm.configure_mappers() # Ensure that versioning classes have been built. if app.config['DB_VERSIONING']: manager = self.versioning_manager if manager.pending_classes: manager.builder.configure_versioned_classes() elif 'transaction' not in database.metadata.tables: manager.declarative_base = database.Model manager.create_transaction_model() manager.plugins.after_build_tx_class(manager) def init_versioning(self, app, database, versioning_manager=None): """Initialize the versioning support using SQLAlchemy-Continuum.""" try: pkg_resources.get_distribution('sqlalchemy_continuum') except pkg_resources.DistributionNotFound: # pragma: no cover default_versioning = False else: default_versioning = True app.config.setdefault('DB_VERSIONING', default_versioning) if not app.config['DB_VERSIONING']: return if not default_versioning: # pragma: no cover raise RuntimeError( 'Please install extra versioning support first by running ' 'pip install invenio-db[versioning].' ) # Now we can import SQLAlchemy-Continuum. from sqlalchemy_continuum import make_versioned from sqlalchemy_continuum import versioning_manager as default_vm from sqlalchemy_continuum.plugins import FlaskPlugin # Try to guess user model class: if 'DB_VERSIONING_USER_MODEL' not in app.config: # pragma: no cover try: pkg_resources.get_distribution('invenio_accounts') except pkg_resources.DistributionNotFound: user_cls = None else: user_cls = 'User' else: user_cls = app.config.get('DB_VERSIONING_USER_MODEL') plugins = [FlaskPlugin()] if user_cls else [] # Call make_versioned() before your models are defined. self.versioning_manager = versioning_manager or default_vm make_versioned( user_cls=user_cls, manager=self.versioning_manager, plugins=plugins, ) # Register models that have been loaded beforehand. builder = self.versioning_manager.builder for tbl in database.metadata.tables.values(): builder.instrument_versioned_classes( database.mapper, get_class_by_table(database.Model, tbl) )
def test_downgrade(inspire_app): alembic = Alembic(current_app) alembic.downgrade(target="020b99d0beb7") assert "ix_experiment_literature_literature_uuid" not in _get_indexes( "experiment_literature" ) assert "ix_experiment_literature_experiment_uuid" not in _get_indexes( "experiment_literature" ) assert "experiment_literature" not in _get_table_names() alembic.downgrade(target="8ba47044154a") assert "ix_records_authors_id_type_record_id" not in _get_indexes("records_authors") alembic.downgrade(target="5a0e2405b624") assert "ix_records_citations_cited_id_citation_type" not in _get_indexes( "records_citations" ) assert "ix_records_citations_citer_id_citation_type" not in _get_indexes( "records_citations" ) alembic.downgrade(target="595c36d68964") assert _check_column_in_table("records_citations", "is_self_citation") is False alembic.downgrade(target="cea5fa2e5d2c") assert "records_authors" not in _get_table_names() assert "ix_records_citations_cited_id" in _get_indexes("records_citations") assert "ix_records_citations_cited_id_citer_id" not in _get_indexes( "records_citations" ) alembic.downgrade("b0cdab232269") assert "institution_literature" not in _get_table_names() assert "ix_institution_literature_literature_uuid" not in _get_indexes( "institution_literature" ) assert "ix_institution_literature_institution_uuid" not in _get_indexes( "institution_literature" ) alembic.downgrade("e5e43ad8f861") assert "idx_pidstore_pid_pid_value" not in _get_indexes("pidstore_pid") alembic.downgrade(target="f563233434cd") assert "enum_conference_to_literature_relationship_type" not in _get_custom_enums() assert "conference_literature" not in _get_table_names() assert "ix_conference_literature_literature_uuid" not in _get_indexes( "conference_literature" ) assert "ix_conference_literature_conference_uuid" not in _get_indexes( "conference_literature" ) alembic.downgrade(target="788a3a61a635") assert "idx_pid_provider" not in _get_indexes("pidstore_pid") alembic.downgrade(target="dc1ae5abe9d6") assert "idx_pid_provider" in _get_indexes("pidstore_pid") alembic.downgrade(target="c6570e49b7b2") assert "records_citations" in _get_table_names() assert "ix_records_citations_cited_id" in _get_indexes("records_citations") alembic.downgrade(target="5ce9ef759ace") assert "record_citations" in _get_table_names() assert "records_citations" not in _get_table_names() assert "ix_records_citations_cited_id" not in _get_indexes("record_citations") assert "idx_citations_cited" in _get_indexes("record_citations") alembic.downgrade(target="b646d3592dd5") assert "ix_legacy_records_mirror_last_updated" not in _get_indexes( "legacy_records_mirror" ) assert "ix_legacy_records_mirror_valid_collection" not in _get_indexes( "legacy_records_mirror" ) assert "legacy_records_mirror" not in _get_table_names() alembic.downgrade(target="7be4c8b5c5e8") assert "idx_citations_cited" not in _get_indexes("record_citations") assert "record_citations" not in _get_table_names()
import os import joblib import pandas as pd import numpy as np from sklearn.feature_extraction.text import TfidfVectorizer from sklearn import decomposition import nltk from nltk import word_tokenize from nltk.corpus import stopwords import itertools from flask_cors import CORS from flask_alembic import Alembic import enum alembic = Alembic() app = Flask(__name__) alembic.init_app(app) CORS(app) print( "Database URL: ", os.environ.get('DATABASE_URL', 'postgresql://hunterowens:@localhost/frankenstein')) app.config['SQLALCHEMY_DATABASE_URI'] = os.environ.get( 'DATABASE_URL', 'postgresql://hunterowens:@localhost/frankenstein') db = SQLAlchemy(app) class StatusEnum(enum.Enum): preshow = 'preshow'
class BrazilDataCubeDB: """Database management extension for Brazil Data Cube applications and services. Attributes: alembic: A Flask-Alembic instance used to prepare migration environment. """ triggers: Dict[str, Dict[str, str]] = None scripts: Dict[str, Dict[str, str]] = None namespaces: List[str] = [] def __init__(self, app=None, **kwargs): """Initialize the database management extension. Args: app: Flask application kwargs: Optional arguments to Flask-SQLAlchemy. """ self.triggers = dict() self.scripts = dict() self.alembic = Alembic(run_mkdir=False, command_name='alembic') if app: self.init_app(app, **kwargs) def init_app(self, app, **kwargs): """Initialize Flask application instance. This method prepares the Alembic configuration for multiple named branches according to each package entry point. Args: app: Flask application kwargs: Optional arguments to Flask-SQLAlchemy. """ self.init_db(app, **kwargs) # Load package namespaces self.load_namespaces() # Load package triggers self.load_triggers(**kwargs) # Load package SQL scripts self.load_scripts(**kwargs) # prepare the configuration for multiple named branches # according to each package entry point script_location = pkg_resources.resource_filename('bdc_db', 'alembic') version_locations = [ (base_entry.name, pkg_resources.resource_filename(base_entry.module_name, os.path.join( *base_entry.attrs, ))) for base_entry in pkg_resources.iter_entry_points('bdc_db.alembic') ] if ('bdc_db', script_location) in version_locations: version_locations.remove(('bdc_db', script_location)) app.config.setdefault( 'ALEMBIC', { 'script_location': script_location, 'version_locations': version_locations, }) # Exclude PostGIS tables from migration exclude_tables = [ 'spatial_ref_sys', ] app.config.setdefault('ALEMBIC_EXCLUDE_TABLES', exclude_tables) # Use a default callable function or one provided # in kwargs in order to give the chance to consider # an object in the autogenerate sweep. handler_include_table = kwargs.get('include_object', alembic_include_object) # Set the Alembic environment context app.config.setdefault( 'ALEMBIC_CONTEXT', { 'compare_type': True, 'include_schemas': True, 'include_object': handler_include_table, }) # Initialize Flask-Alembic extension self.alembic.init_app(app) # Add BDC-DB extension to Flask extension list app.extensions['bdc-db'] = self def init_db(self, app, entry_point_group='bdc_db.models', **kwargs): """Initialize Flask-SQLAlchemy extension. Args: app: Flask application entry_point_group: Entrypoint definition to load models kwargs: optional Arguments to Flask-SQLAlchemy. """ # Setup SQLAlchemy app.config.setdefault('SQLALCHEMY_DATABASE_URI', _config.SQLALCHEMY_DATABASE_URI) app.config.setdefault('SQLALCHEMY_TRACK_MODIFICATIONS', _config.SQLALCHEMY_TRACK_MODIFICATIONS) app.config.setdefault('SQLALCHEMY_ECHO', _config.SQLALCHEMY_ECHO) # Initialize Flask-SQLAlchemy extension. database = kwargs.get('db', _db) database.init_app(app) # Loads all models if entry_point_group: for base_entry in pkg_resources.iter_entry_points( entry_point_group): base_entry.load() # All models should be loaded by now. # Initialize the inter-mapper relationships of all loaded mappers. configure_mappers() def load_namespaces(self, entry_point: str = 'bdc_db.namespaces'): """Load application namespaces dynamically using entry points. Args: entry_point - Pattern to search in the setup.py entry points. """ for base_entry in pkg_resources.iter_entry_points(entry_point): namespace = base_entry.load() if not namespace: raise RuntimeError( f'Invalid namespace {namespace} in {base_entry.module_name}' ) if namespace in self.namespaces: current_app.logger.warning( f'Namespace {namespace} already loaded. Skipping') continue self.namespaces.append(namespace) def load_triggers(self, entry_point_group: str = 'bdc_db.triggers', **kwargs): """Load trigger files from packages to BDC-DB context. Seeks for .sql files in packages which set `bdc_db.triggers` entry point. Notes: It may throw exception when module is set, but does not exists in disk. Args: entry_point_group - Pattern to search in the setup.py entry points. """ if entry_point_group: triggers = self._load_module(entry_point_group) for module, script in triggers.items(): for trigger in script: self.register_trigger(module, Path(trigger).stem, trigger) def _load_module(self, entry_point) -> Dict[str, Iterable[str]]: """Seek for files inside Python entry point.""" modules = dict() if entry_point: for base_entry in pkg_resources.iter_entry_points(entry_point): package = base_entry.load() directory = package.__path__ for path in directory._path: modules.setdefault(package.__name__, list()) modules[package.__name__].extend(self._get_scripts(path)) return modules def load_scripts(self, entry_point_group: str = 'bdc_db.scripts', **kwargs): """Load SQL files from packages to BDC-DB context.""" scripts = self._load_module(entry_point_group) for module, script in scripts.items(): for trigger in script: self.register_scripts(module, Path(trigger).stem, trigger) @staticmethod def _get_scripts(path: str) -> Iterable[str]: _path = Path(path) found_scripts = [] for entry in _path.iterdir(): if entry.is_file() and entry.suffix == '.sql': found_scripts.append(str(entry)) return found_scripts def register_directory(self, module_name: str, path: str): """Register a package script directory to the BDC-DB.""" _path = Path(path) for entry in _path.iterdir(): if entry.is_file() and entry.suffix == '.sql': self.register_trigger(module_name, entry.stem, str(entry)) def register_trigger(self, module_name: str, trigger_name: str, path: str): """Register trigger command to BDC-DB.""" self.triggers.setdefault(module_name, dict()) self.triggers[module_name][trigger_name] = path def register_scripts(self, module_name: str, trigger_name: str, path: str): """Register trigger command to BDC-DB.""" self.scripts.setdefault(module_name, dict()) self.scripts[module_name][trigger_name] = path
def test_downgrade(isolated_app): alembic = Alembic(isolated_app) alembic.upgrade() # downgrade 0bc0a6ee1bc0 == downgrade to 2f5368ff6d20 alembic.downgrade(target='0bc0a6ee1bc0') assert 'ix_records_metadata_json_referenced_records_2_0' not in _get_indexes( 'records_metadata') assert 'ix_records_metadata_json_referenced_records' in _get_indexes( 'records_metadata') alembic.downgrade(target='2f5368ff6d20') assert 'ix_records_metadata_json_referenced_records' not in _get_indexes( 'records_metadata') # 2f5368ff6d20 # TODO |Create proper tests for 2f5368ff6d20, eaab22c59b89, f9ea5752e7a5 # TODO | and 17ff155db70d alembic.downgrade(target="eaab22c59b89") # eaab22c59b89 alembic.downgrade(target="f9ea5752e7a5") # f9ea5752e7a5 alembic.downgrade(target="17ff155db70d") # 17ff155db70d alembic.downgrade(target="402af3fbf68b") # 402af3fbf68b alembic.downgrade(target='53e8594bc789') # 53e8594bc789 alembic.downgrade(target='d99c70308006') assert 'inspire_prod_records' in _get_table_names() assert 'inspire_prod_records_recid_seq' in _get_sequences() assert 'legacy_records_mirror' not in _get_table_names() assert 'legacy_records_mirror_recid_seq' not in _get_sequences() # d99c70308006 alembic.downgrade(target='cb9f81e8251c') alembic.downgrade(target='cb5153afd839') # cb9f81e8251c & cb5153afd839 alembic.downgrade(target='fddb3cfe7a9c') assert 'idxgindoctype' not in _get_indexes('records_metadata') assert 'idxgintitles' not in _get_indexes('records_metadata') assert 'idxginjournaltitle' not in _get_indexes('records_metadata') assert 'idxgincollections' not in _get_indexes('records_metadata') assert 'workflows_record_sources' not in _get_table_names() # fddb3cfe7a9c alembic.downgrade(target='a82a46d12408') assert 'inspire_prod_records' not in _get_table_names() assert 'inspire_prod_records_recid_seq' not in _get_sequences() assert 'workflows_audit_logging' not in _get_table_names() assert 'workflows_audit_logging_id_seq' not in _get_sequences() assert 'workflows_pending_record' not in _get_table_names()
def test_upgrade(app): alembic = Alembic(app) alembic.upgrade() alembic.downgrade(target='a82a46d12408') # fddb3cfe7a9c alembic.upgrade(target='fddb3cfe7a9c') assert 'inspire_prod_records' in _get_table_names() assert 'inspire_prod_records_recid_seq' in _get_sequences() assert 'workflows_audit_logging' in _get_table_names() assert 'workflows_audit_logging_id_seq' in _get_sequences() assert 'workflows_pending_record' in _get_table_names() # cb9f81e8251c alembic.upgrade(target='cb9f81e8251c') assert 'idxgindoctype' in _get_indexes('records_metadata') assert 'idxgintitles' in _get_indexes('records_metadata') assert 'idxginjournaltitle' in _get_indexes('records_metadata') assert 'idxgincollections' in _get_indexes('records_metadata') # cb5153afd839 alembic.downgrade(target='fddb3cfe7a9c') alembic.upgrade(target='cb5153afd839') assert 'workflows_record_sources' in _get_table_names() # d99c70308006 alembic.upgrade(target='d99c70308006') # 53e8594bc789 alembic.upgrade(target='53e8594bc789') # 402af3fbf68b alembic.upgrade(target='402af3fbf68b') assert 'inspire_prod_records' not in _get_table_names() assert 'inspire_prod_records_recid_seq' not in _get_sequences() assert 'legacy_records_mirror' in _get_table_names() assert 'legacy_records_mirror_recid_seq' in _get_sequences() # 17ff155db70d alembic.upgrade(target="17ff155db70d") # Not checking as it only adds or modifies columns # f9ea5752e7a5 alembic.upgrade(target="f9ea5752e7a5") # Not checking as it only adds or modifies columns # eaab22c59b89 alembic.upgrade(target="eaab22c59b89") # 2f5368ff6d20 alembic.upgrade(target="2f5368ff6d20") # TODO Create proper tests for 2f5368ff6d20, eaab22c59b89, f9ea5752e7a5, # 17ff155db70d # 0bc0a6ee1bc0 alembic.upgrade(target='0bc0a6ee1bc0') assert 'ix_records_metadata_json_referenced_records' in _get_indexes( 'records_metadata') alembic.upgrade(target='2dd443feeb63') assert 'ix_records_metadata_json_referenced_records_2_0' in _get_indexes( 'records_metadata') assert 'ix_records_metadata_json_referenced_records' not in _get_indexes( 'records_metadata')
def __init__(self, **kwargs): super().__init__(**kwargs) self.alembic = Alembic() self.alembic.rev_id = rev_id
def create_app(app_name, config_obj, with_api=True): """ Generates and configures the main shop application. All additional """ # Launching application app = Flask(app_name) # So the engine would recognize the root package # Load Configuration app.config.from_object(config_obj) # Initializing Database db = SQLAlchemy(app) app.db = db # migrate = Migrate(app, db) alembic = Alembic() alembic.init_app(app) app.alembic = alembic # Loading assets assets = Environment(app) assets.from_yaml('assets.yaml') app.assets = assets # Initialize Mail app.mail = Mail(app) # Initializing login manager login_manager = LoginManager() login_manager.login_view = app.config.get('LOGIN_VIEW', '.login') # login_manager.login_message = 'You need to be logged in to access this page' login_manager.session_protection = 'strong' login_manager.setup_app(app) app.login_manager = login_manager # Initializing principal manager app.principal = Principal(app) # Initializing bcrypt password encryption bcrypt = Bcrypt(app) app.bcrypt = bcrypt app.cloudinary = cloudinary app.cloudinary_upload = cloudinary_upload photos = UploadSet('photos', IMAGES) archives = UploadSet('archives', ARCHIVES) configure_uploads(app, (photos, archives)) patch_request_class(app, 16 * 1024 * 1024) # Patches to 16MB file uploads max. app.photos = photos app.archives = archives moment = Moment(app) app.moment = moment # Redis store for session management # The process running Flask needs write access to this directory: # store = RedisStore(redis.StrictRedis()) # # this will replace the app'cs session handling # KVSessionExtension(store, app) # # configure sentry # if not app.config.get("DEBUG", False): # sentry = Sentry(app) # app.sentry = sentry # Integrate Elasticsearch # es_config = app.config.get("ES_CONFIG", []) # app.es = Elasticsearch(es_config) # Caching app.cache = Cache(app) # Initializing the restful API if with_api: api = Api(app, prefix='/v1') app.api = api # Initialize Logging if not app.debug: import logging from logging.handlers import RotatingFileHandler file_handler = RotatingFileHandler( "/var/log/kx/%s.log" % app.config.get("LOGFILE_NAME", app_name), maxBytes=500 * 1024) file_handler.setLevel(logging.INFO) from logging import Formatter file_handler.setFormatter( Formatter('%(asctime)s %(levelname)s: %(message)s ' '[in %(pathname)s:%(lineno)d]')) app.logger.addHandler(file_handler) # include an api_registry to the application app.api_registry = [] # a simple list holding the values to be registered return app
def test_upgrade(base_app, db, es): alembic = Alembic(base_app) # go down to first migration alembic.downgrade(target="b5be5fda2ee7") alembic.upgrade(target="7be4c8b5c5e8") assert "workflows_record_sources" in _get_table_names(db) assert "workflows_pending_record" in _get_table_names(db) assert "crawler_workflows_object" in _get_table_names(db) assert "crawler_job" in _get_table_names(db) assert "workflows_audit_logging" in _get_table_names(db) assert "workflows_buckets" in _get_table_names(db) assert "workflows_object" in _get_table_names(db) assert "workflows_workflow" in _get_table_names(db) assert "ix_crawler_job_job_id" in _get_indexes("crawler_job", db) assert "ix_crawler_job_scheduled" in _get_indexes("crawler_job", db) assert "ix_crawler_job_spider" in _get_indexes("crawler_job", db) assert "ix_crawler_job_workflow" in _get_indexes("crawler_job", db) assert "ix_workflows_audit_logging_object_id" in _get_indexes( "workflows_audit_logging", db ) assert "ix_workflows_audit_logging_user_id" in _get_indexes( "workflows_audit_logging", db ) assert "ix_workflows_object_data_type" in _get_indexes("workflows_object", db) assert "ix_workflows_object_id_parent" in _get_indexes("workflows_object", db) assert "ix_workflows_object_id_workflow" in _get_indexes("workflows_object", db) assert "ix_workflows_object_status" in _get_indexes("workflows_object", db) assert "ix_records_metadata_json_referenced_records_2_0" in _get_indexes( "records_metadata", db ) alembic.upgrade(target="b646d3592dd5") assert "idx_citations_cited" in _get_indexes("record_citations", db) assert "record_citations" in _get_table_names(db) alembic.upgrade(target="5ce9ef759ace") assert "ix_legacy_records_mirror_last_updated" in _get_indexes( "legacy_records_mirror", db ) assert "ix_legacy_records_mirror_valid_collection" in _get_indexes( "legacy_records_mirror", db ) assert "legacy_records_mirror" in _get_table_names(db) alembic.upgrade(target="c6570e49b7b2") assert "records_citations" in _get_table_names(db) assert "record_citations" not in _get_table_names(db) assert "ix_records_citations_cited_id" in _get_indexes("records_citations", db) assert "idx_citations_cited" not in _get_indexes("records_citations", db) alembic.upgrade(target="dc1ae5abe9d6") assert "idx_pid_provider" in _get_indexes("pidstore_pid", db) alembic.upgrade(target="788a3a61a635") assert "idx_pid_provider" not in _get_indexes("pidstore_pid", db)
from .secretly.db import db from .secretly.feed import feed from .secretly.auth import auth app = Flask(__name__) app.config['DEBUG'] = True app.config['FLASK_LOG_LEVEL'] = 'DEBUG' app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:////tmp/test.db' app.config['SQLALCHEMY_ECHO'] = True app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False app.config['SECRET_KEY'] = 'eps-Oyb-Toj-fI' FlaskCLI(app) flask_log = Logging(app) db.init_app(app) app.cli.add_command(alembic_cli, 'db') alembic = Alembic() alembic.init_app(app) app.register_blueprint(auth, url_prefix='/auth') app.register_blueprint(feed) @app.before_request def before_request(): from .secretly.models.user import User if session.get('user_id'): request.user = User.query.get(session['user_id'])
class InvenioDB(object): """Invenio database extension.""" def __init__(self, app=None, **kwargs): """Extension initialization.""" self.alembic = Alembic(run_mkdir=False, command_name='alembic') if app: self.init_app(app, **kwargs) def init_app(self, app, **kwargs): """Initialize application object.""" self.init_db(app, **kwargs) app.config.setdefault( 'ALEMBIC', { 'script_location': pkg_resources.resource_filename('invenio_db', 'alembic'), 'version_locations': [(base_entry.name, pkg_resources.resource_filename( base_entry.module_name, os.path.join(*base_entry.attrs))) for base_entry in pkg_resources.iter_entry_points( 'invenio_db.alembic')], }) self.alembic.init_app(app) app.extensions['invenio-db'] = self app.cli.add_command(db_cmd) def init_db(self, app, entry_point_group='invenio_db.models', **kwargs): """Initialize Flask-SQLAlchemy extension.""" # Setup SQLAlchemy app.config.setdefault( 'SQLALCHEMY_DATABASE_URI', 'sqlite:///' + os.path.join(app.instance_path, app.name + '.db')) app.config.setdefault('SQLALCHEMY_ECHO', False) # Initialize Flask-SQLAlchemy extension. database = kwargs.get('db', db) database.init_app(app) # Initialize versioning support. self.init_versioning(app, database, kwargs.get('versioning_manager')) # Initialize model bases if entry_point_group: for base_entry in pkg_resources.iter_entry_points( entry_point_group): base_entry.load() # All models should be loaded by now. sa.orm.configure_mappers() # Ensure that versioning classes have been built. if app.config['DB_VERSIONING']: manager = self.versioning_manager if manager.pending_classes: if not versioning_models_registered(manager, database.Model): manager.builder.configure_versioned_classes() elif 'transaction' not in database.metadata.tables: manager.declarative_base = database.Model manager.create_transaction_model() manager.plugins.after_build_tx_class(manager) def init_versioning(self, app, database, versioning_manager=None): """Initialize the versioning support using SQLAlchemy-Continuum.""" try: pkg_resources.get_distribution('sqlalchemy_continuum') except pkg_resources.DistributionNotFound: # pragma: no cover default_versioning = False else: default_versioning = True app.config.setdefault('DB_VERSIONING', default_versioning) if not app.config['DB_VERSIONING']: return if not default_versioning: # pragma: no cover raise RuntimeError( 'Please install extra versioning support first by running ' 'pip install invenio-db[versioning].') # Now we can import SQLAlchemy-Continuum. from sqlalchemy_continuum import make_versioned from sqlalchemy_continuum import versioning_manager as default_vm from sqlalchemy_continuum.plugins import FlaskPlugin # Try to guess user model class: if 'DB_VERSIONING_USER_MODEL' not in app.config: # pragma: no cover try: pkg_resources.get_distribution('invenio_accounts') except pkg_resources.DistributionNotFound: user_cls = None else: user_cls = 'User' else: user_cls = app.config.get('DB_VERSIONING_USER_MODEL') plugins = [FlaskPlugin()] if user_cls else [] # Call make_versioned() before your models are defined. self.versioning_manager = versioning_manager or default_vm make_versioned( user_cls=user_cls, manager=self.versioning_manager, plugins=plugins, ) # Register models that have been loaded beforehand. builder = self.versioning_manager.builder for tbl in database.metadata.tables.values(): builder.instrument_versioned_classes( database.mapper, get_class_by_table(database.Model, tbl))
#!/usr/bin/env python # -*- encoding:utf-8 -*- """ 数据迁移工具 """ from app import app, db from flask_alembic import Alembic with app.app_context(): alembic = Alembic(app) alembic.revision('some comments') alembic.upgrade() # create: 15/11/30 # End
def _create_flask_app(_): app = create_app() Alembic(app) return app