示例#1
0
def test_donor_and_receiver_result(mocker, method, config, interface):
    mocker.patch.object(Introspector, 'from_database')
    model = type('model', (object, ), {})
    Introspector.from_database().generate_models.return_value = {'m': model}
    result = getattr(interface, method)()
    assert result == {'m': model}
    assert model.__str__ == Interface.string_special
示例#2
0
    def __init__(self, url, include_views=False, **kwargs):
        if isinstance(url, Database):
            self._url = None
            self._database = url
            self._database_path = self._database.database
        else:
            self._url = url
            parse_result = urlparse(url)
            self._database_path = parse_result.path[1:]

            # Connect to the database.
            self._database = connect(url)

        # Open a connection if one does not already exist.
        self._database.connect(reuse_if_open=True)

        # Introspect the database and generate models.
        self._introspector = Introspector.from_database(self._database)
        self._include_views = include_views
        self._models = self._introspector.generate_models(
            skip_invalid=True,
            literal_column_names=True,
            include_views=self._include_views,
            **kwargs)
        self._migrator = SchemaMigrator.from_database(self._database)

        class BaseModel(Model):
            class Meta:
                database = self._database

        self._base_model = BaseModel
        self._export_formats = self.get_export_formats()
        self._import_formats = self.get_import_formats()
示例#3
0
 async def test_connection(self):
     instance = Controller.prepare(name='default').instance
     await instance.db.connect()
     with instance.db.allow_sync():
         introspector = Introspector.from_database(instance.db.engine)
         db_name = introspector.get_database_name()
         assert db_name and len(db_name) > 0
示例#4
0
 def python(self, forwards_func, inject_models=False):
     if inject_models:
         models = Introspector.from_database(
             self.database).generate_models()
         forwards_func(models)
     else:
         forwards_func()
示例#5
0
    def __init__(self, url, bare_fields=False):
        if isinstance(url, Database):
            self._url = None
            self._database = url
            self._database_path = self._database.database
        else:
            self._url = url
            parse_result = urlparse(url)
            self._database_path = parse_result.path[1:]

            # Connect to the database.
            self._database = connect(url)

        self._database.connect()

        # Introspect the database and generate models.
        self._introspector = Introspector.from_database(self._database)
        self._models = self._introspector.generate_models(
            skip_invalid=True,
            literal_column_names=True,
            bare_fields=bare_fields)
        self._migrator = SchemaMigrator.from_database(self._database)

        class BaseModel(Model):
            class Meta:
                database = self._database
        self._base_model = BaseModel
        self._export_formats = self.get_export_formats()
        self._import_formats = self.get_import_formats()
示例#6
0
    def __init__(self, url, bare_fields=False):
        if isinstance(url, Database):
            self._url = None
            self._database = url
            self._database_path = self._database.database
        else:
            self._url = url
            parse_result = urlparse(url)
            self._database_path = parse_result.path[1:]

            # Connect to the database.
            self._database = connect(url)

        self._database.connect()

        # Introspect the database and generate models.
        self._introspector = Introspector.from_database(self._database)
        self._models = self._introspector.generate_models(
            skip_invalid=True,
            literal_column_names=True,
            bare_fields=bare_fields)
        self._migrator = SchemaMigrator.from_database(self._database)

        class BaseModel(Model):
            class Meta:
                database = self._database
        self._base_model = BaseModel
        self._export_formats = self.get_export_formats()
        self._import_formats = self.get_import_formats()
示例#7
0
 def instrospect_models(self):
     try:
         introspect = Introspector.from_database(self.db)
         models = introspect.generate_models()
         self.models = Munch.fromDict(models)
     except Exception as e:
         log.error(f'Whoops in introspect db.')
         raise
示例#8
0
 def _generate_models(self, db_config):
     name = db_config['name']
     self.create_database(name, db_type=db_config['type'],
                          auth=db_config['auth'])
     introspector = Introspector.from_database(self.databases[name])
     models = introspector.generate_models()
     for (name, model) in models.items():
         model.__str__ = Interface.string_special
     return models
示例#9
0
    async def test_creation(self):
        instance = Controller.prepare(name='default').instance
        await instance.db.connect()
        await instance.apps.discover()
        await instance.db.initiate()

        with instance.db.allow_sync():
            introspector = Introspector.from_database(instance.db.engine)
            metadata = introspector.introspect()
            assert len(metadata.model_names) > 0
示例#10
0
文件: connection.py 项目: sdss/sdssdb
    def get_introspector(self, schema=None):
        """Gets a Peewee database :class:`peewee:Introspector`."""

        schema_key = schema or ''

        if schema_key not in self.introspector:
            self.introspector[schema_key] = Introspector.from_database(
                self, schema=schema)

        return self.introspector[schema_key]
示例#11
0
文件: cuckoo.py 项目: opfront/cuckoo
    def __init__(self, db, path):
        self.db = db
        self._path = path

        self._introspector = Introspector.from_database(self.db)
        self.models = self._introspector.generate_models()
        self.migration_ids = [
            filename.split('.')[0] for filename in filter(
                lambda f: f.endswith('.py') and '__init__' not in f,
                sorted(os.listdir(self._path)))
        ]
示例#12
0
    def database_needs_migrations(self, db: _DatabaseSub) -> bool:
        models_in_db = Introspector.from_database(db).generate_models()
        metrics_found = "metrics" in models_in_db
        classifiermetrics_not_found = "classifiermetrics" not in models_in_db

        if metrics_found != classifiermetrics_not_found:
            raise RuntimeError(
                f"Inconsistent status:"
                f" classifiermetrics_not_found={classifiermetrics_not_found} and metrics_found={metrics_found}"
            )
        return metrics_found
示例#13
0
    def database_needs_migrations(self, db: pw.Database) -> bool:
        models_in_db = Introspector.from_database(db).generate_models()
        if "ldaset" not in models_in_db:
            raise RuntimeError(
                "Did not find models named  'ldaset' in models_in_db, but found"
                + str(models_in_db))
        print("Found models in db:", models_in_db)
        topicmodelmetrics_found = "topicmodelmetrics" in models_in_db
        metrics_id_found = "metrics_id" in models_in_db["ldaset"]._meta.columns

        if topicmodelmetrics_found != metrics_id_found:
            raise RuntimeError(
                f"Inconsistent status:"
                f" topicmodelmetrics_found={topicmodelmetrics_found} and metrics_col_found={metrics_id_found}"
            )
        return not topicmodelmetrics_found
示例#14
0
文件: dataset.py 项目: cadim/peewee
    def __init__(self, url):
        self._url = url
        parse_result = urlparse(url)
        self._database_path = parse_result.path[1:]

        # Connect to the database.
        self._database = connect(url)
        self._database.connect()

        # Introspect the database and generate models.
        self._introspector = Introspector.from_database(self._database)
        self._models = self._introspector.generate_models()
        self._migrator = SchemaMigrator.from_database(self._database)

        class BaseModel(Model):
            class Meta:
                database = self._database
        self._base_model = BaseModel
        self._export_formats = self.get_export_formats()
示例#15
0
    def gen_tables(self):
        class_list = []
        table_list = []
        result = {}
        introspector = Introspector.from_database(self.db)
        database = introspector.introspect()
        for table in sorted(database.model_names.values()):
            class_list.append(table)
        for table in sorted(database.model_names.keys()):
            table_list.append(table)
        for table, class_name in itertools.izip(table_list, class_list):
            item = {}
            item['class_name'] = class_name
            columns = database.columns[table]

            foreign_keys = database.foreign_keys[table]
            foreign_key_item = []
            for foreign_key in foreign_keys:
                dest_table = foreign_key.dest_table
                foreign_key_item.append({foreign_key: dest_table})
            item['foreign_keys'] = foreign_key_item

            cursor.execute("show full fields from %s" % table)
            sql_res = cursor.fetchall()
            field_item = {}
            for field_name, column in columns.items():
                field = {}
                field['field_name'] = field_name
                field['raw_column_type'] = column.raw_column_type
                field['nullable'] = column.nullable
                field['is_primary_key'] = column.primary_key
                #field_item.append({field_name: field})
                for res in sql_res:
                    if res[0] == field_name:
                        field['comment'] = res[8] or re.sub(
                            '_+', ' ', field_name).title()
                        field['default'] = res[5]
                        field['raw_types'] = res[1]
                field_item.update({column.name: field})
            item['fields'] = field_item
            result.update({table: item})
        return result
示例#16
0
    def __init__(self, url):
        self._url = url
        parse_result = urlparse(url)
        self._database_path = parse_result.path[1:]

        # Connect to the database.
        self._database = connect(url)
        self._database.connect()

        # Introspect the database and generate models.
        self._introspector = Introspector.from_database(self._database)
        self._models = self._introspector.generate_models(skip_invalid=True)
        self._migrator = SchemaMigrator.from_database(self._database)

        class BaseModel(Model):
            class Meta:
                database = self._database
        self._base_model = BaseModel
        self._export_formats = self.get_export_formats()
        self._import_formats = self.get_import_formats()
示例#17
0
def migrate(*operations, **kwargs):
    '''
    A wraper around :func:playhouse.migrate.migrate:
    
    This method ensures that the same migrations are performed on nested :class:peewee_versioned.VersionedModel:'s
    '''
    
    # Collect nested classes
    for operation in operations:
        migrator = operation.migrator
        database = operation.migrator.database
        method = operation.method
        args = list(copy(operation.args))
        kwargs = operation.kwargs.copy()
        
        # Exit early for NOOP methods
        if method in NOOP_OPERATIONS:
            operation.run()
            continue
        
        # potential arguments to be used with the nested class
        version_args = copy(args)
        version_kwargs = kwargs.copy()
        
        # potential operation to run on the nested class
        version_operation = None
        
        # Get the table name of the operation
        # Update version args/kwargs
        if method == 'rename_table':
            table = kwargs.get('old_name', None)
            if table is not None:
                version_kwargs['old_name'] = table + 'version'
        else:
            table = kwargs.get('table', None)
            if table is not None:
                version_kwargs['table'] = table + 'version'
        if table is None:
            table = args[0]
            version_args[0] = table + 'version'
        
        # Read models from the database and cache
        introspector = Introspector.from_database(database)
        models = introspector.generate_models(skip_invalid=True)
        
        # Test if the model has a version model associated with it
        version_name = table + 'version'
        if version_name in models:
            version_model = models[version_name]
            version_fields = version_model._meta.fields
            
            # Handle special cases first
            if method == 'add_column':
                # Don't add foreign keys
                field = kwargs.get('field', None)
                if field is None:
                    field = args[2]
                if isinstance(field, ForeignKeyField):
                    operation.run()
                    continue
            elif method == 'drop_column':
                column_name = kwargs.get('column_name', None)
                if column_name is None:
                    column_name = args[1]
                if column_name not in version_fields:
                    operation.run()
                    continue
            elif method == 'rename_column':
                old_name = kwargs.get('old_name', None)
                if old_name is None:
                    old_name = args[1]
                if old_name not in version_fields:
                    operation.run()
                    continue
            elif method in ('add_not_null', 'drop_not_null'):
                column = kwargs.get('column', None)
                if column is None:
                    column = args[1]
                if column not in version_fields:
                    operation.run()
                    continue
            elif method == 'rename_table':
                old_name = kwargs.get('old_name', None)
                if old_name is None:
                    old_name = args[0]
                new_name = kwargs.get('new_name', None)
                if new_name is None:
                    new_name = version_args[1]
                
                _rename_table(operation, migrator, introspector, old_name, new_name)
                continue
                
                    
            # I guess we have a valid operation, so we will create and run it for the nested verion model
            version_operation = Operation(migrator, method, *version_args, **version_kwargs)
            
        
        # Run the operations
        operation.run()
        if version_operation is not None:
            version_operation.run()
示例#18
0
# Setup Database
database_url = os.environ.get('DATABASE', None)
if database_url:
    database = connect(database_url)
    if database_url.startswith('mysql'):
        migrator = MySQLMigrator.from_database(database)
    if database_url.startswith('postgres'):
        migrator = PostgresqlMigrator.from_database(database)
    if database_url.startswith('sqlite'):
        migrator = SqliteMigrator.from_database(database)

else:
    database = SqliteDatabase(':memory:')
    migrator = SqliteMigrator.from_database(database)

introspector = Introspector.from_database(database)

# Basic example class


class BaseClass(VersionedModel):

    class Meta:
        database = database


class Food(BaseClass):
    name = CharField(null=True)
    is_tasty = BooleanField()

示例#19
0
 def generate_models(self, *args, **kwargs):
     return Introspector.from_database(self.db).generate_models(
         *args, **kwargs)
示例#20
0
    raise ValueError('Provide a URL for redirects!')

COOKIE_NAME = os.environ.get('GATEKEEPER_COOKIE_NAME', 'GATEKEEPER')
COOKIE_DOMAIN = os.environ.get('GATEKEEPER_COOKIE_DOMAIN')

TOKEN_SECRET = os.environ.get('GATEKEEPER_TOKEN_SECRET', app.config.get('SECRET_KEY'))
TOKEN_EXPIRATION_TIME = os.environ.get('GATEKEEPER_COOKIE_EXPIRATION_TIME', 60*60*24) #Default to 24 hours

HEADER_KEY = os.environ.get('GATEKEEPER_HEADER_KEY', 'GATEKEEPER')

DB_URL = os.environ.get('GATEKEEPER_DB_URL')
DB_TABLE = os.environ.get('GATEKEEPER_DB_TABLE', 'USERS')

try:
    db_wrapper = FlaskDB(app, DB_URL)
    introspector = Introspector.from_database(db_wrapper.database)
    USER = introspector.generate_models(table_names=[DB_TABLE])[DB_TABLE]
except ValueError:
    raise ValueError('Provide a valid DB_URL!')


@app.route('/login', methods=['GET', 'POST'])
def login():

    if request.method == 'POST':
        # Extract request data
        username = request.form['username']
        password = request.form['pass']
        remember_me = request.form.get('remember-me') == 'on'

        next_page = request.args.get('next')
from peewee import MySQLDatabase, SqliteDatabase
from playhouse.reflection import Introspector, print_model, print_table_sql

# step 1: generate a Model class for each table found in the source SQLite database
sqlite_db = SqliteDatabase("prodigy.db")
introspector = Introspector.from_database(sqlite_db)
models = introspector.generate_models()

# print a user-friendly description of the generated models and their SQL
for model in models.keys():
    print_model(models[model])
    print_table_sql(models[model])

# step 2: create the tables using their model classes
mysql_db = MySQLDatabase(
    user="******",
    password="******",
    host="host name",
    port=3306,
    database="database name",
    ssl={"ssl": {
        "ssl_ca": "certificate.crt.pem"
    }},
)

mysql_db.connect()
mysql_db.create_tables(list(models.values()))
mysql_db.get_tables()
mysql_db.close()
示例#22
0
    def scan(self):
        self.statements = list()

        models = p.sort_models_topologically(self.manager.models)
        self.order_of_models = [m._meta.db_table for m in models]
        self.local_models = {m._meta.db_table: m for m in models}

        with self.manager.using(self.database):
            self.local = Topology(self.connection, self.local_models)

            introspector = Introspector.from_database(self.connection)
            self.online_models = introspector.generate_models()
            self.online = Topology(self.connection, self.online_models)

        # first missing tables to be created
        for db_table in self.order_of_models:
            if db_table not in self.online.models:
                local_model = self.local.models[db_table]
                self.state('create_table', local_model['instance'])

        # second missing tables to be dropped
        for db_table, online_model in iteritems(self.online.models):
            if db_table not in self.local.models:
                self.state('drop_table', online_model['instance'])

        # third scan fields to be created, dropped or mutate
        for db_table, online_model in iteritems(self.online.models):
            if db_table not in self.local.models:
                continue

            local_model = self.local.models[db_table]

            online_instance = online_model['instance']
            local_instance = local_model['instance']

            online_fields = online_model['fields']
            local_fields = local_model['fields']

            online_indexes = online_model['indexes']
            local_indexes = local_model['indexes']

            # scan indexes to be dropped
            for online_index in online_indexes:
                found = any(l == online_index for l in local_indexes)
                if not found:
                    self.state('drop_index', online_instance, online_index)

            # fields to be dropped
            for field_name, online_field in iteritems(online_fields):
                if field_name not in local_fields:
                    self.state('drop_column', local_instance, online_field)

            # fields to be added
            for field_name, local_field in iteritems(local_fields):
                if field_name not in online_fields:
                    self.state('add_column', local_instance, local_field)

            # fields to be mutated
            for field_name, local_field in iteritems(local_fields):
                if field_name not in online_fields:
                    continue

                online_field = online_fields[field_name]

                if local_field == online_field:
                    continue

                if local_field.test_modifiers_changed(online_field):
                    pass
                    # peewee currently does not support reflection based on
                    # the modifier, when changed it always triggers this
                    # "changed" element.
                elif local_field.test_null_changed(online_field):
                    if online_field.field.null:
                        self.state('add_not_null', local_instance, local_field)
                    else:
                        self.state('drop_not_null', local_instance,
                                   local_field)
                else:
                    skip = False

                    if local_field.sql != online_field.sql:
                        try:
                            from playhouse.postgres_ext import ArrayField
                            if isinstance(local_field, ArrayField):
                                skip = True
                        except ImportError:
                            pass

                    if skip:
                        self.state('drop_column', online_instance,
                                   online_field)
                        self.state('add_column', local_instance,
                                   local_field)

            # scan indexes to be created
            for local_index in local_indexes:
                found = any(l == local_index for l in online_indexes)
                if not found:
                    self.state('add_index', local_instance, local_index)
示例#23
0
 def test_monkey_patches(self):
     """Test patch to peewee.Introspector"""
     db = MssqlDatabase("", host="", user="", password="")
     intro = Introspector.from_database(db, 'dbo')
     self.assertIsInstance(intro, Introspector)
     self.assertIsInstance(intro.metadata, MssqlMetadata)
示例#24
0
	def generate_models(self):
		return Introspector.from_database(self.db).generate_models()
示例#25
0
 def get_database_models(self, db=None):
     i = Introspector.from_database(db)
     return i.generate_models().values()
示例#26
0
    def select_on(self, items, selector):
        for s in selector:
            key, operator, values = s.key, s.__class__, s.value
            field = self.fields[key]
            if operator == dom.Operator.Equals:
                items = items.where(field == values)
            elif operator == dom.Operator.NotEquals:
                items = items.where(field != values)
            else:
                raise Exception('unsupported')
        return items

if __name__ == '__main__':
    Model.make_trpc_endpoint=PeeweeEndpoint

    url = os.environ.get("DATABASE_URL","sqlite:///trpc.db")

    db = db_connect(url)
    db.connect()

    introspector = Introspector.from_database(db)
    endpoints = introspector.generate_models()
    database = introspector.introspect()


    app = App('Database', endpoints)
    app.main()


示例#27
0
文件: models.py 项目: abyth/coldsweat
def migrate_database_schema():
    '''
    Migrate database schema from previous versions (0.9.4 and up)
    '''

    introspector = Introspector.from_database(_db)
    models = introspector.generate_models()
    Feed_ = models['feeds']
    Entry_ = models['entries']

    drop_table_migrations, column_migrations = [], []
    
    # --------------------------------------------------------------------------
    # Schema changes introduced in version 0.9.4
    # --------------------------------------------------------------------------
    
    # Change columns

    if hasattr(Feed_, 'icon_id'):
        column_migrations.append(migrator.drop_column('feeds', 'icon_id'))

    if not hasattr(Feed_, 'icon'):
        column_migrations.append(migrator.add_column('feeds', 'icon', Feed.icon))

    if not hasattr(Feed_, 'icon_last_updated_on'):
        column_migrations.append(migrator.add_column('feeds', 'icon_last_updated_on', Feed.icon_last_updated_on))
        
    if not hasattr(Entry_, 'content_type'):
        column_migrations.append(migrator.add_column('entries', 'content_type', Entry.content_type))

    # Drop tables

    if Icon.table_exists():
        drop_table_migrations.append(Icon.drop_table)

    # --------------------------------------------------------------------------
    # Schema changes introduced in version 0.9.5
    # --------------------------------------------------------------------------
    
    # Change columns

    class UpdateFeedSelfLinkHashOperation(object):
        # Fake migrate.Operation protocol and upon saving populate all self_link_hash fields
        def run(self):        
            for feed in Feed.select():
                feed.save()

    class UpdateEntryGuidHashOperation(object):
        def run(self):        
            for entry in Entry.select():
                entry.save()

    class UpdateUserApiKeyOperation(object):
        def run(self):        
            for user in User.select():
                user.save()
                
    if not hasattr(Feed_, 'self_link_hash'):
        # Start relaxing index constrains to cope with existing data...
        self_link_hash = CharField(null=True, max_length=40)
        column_migrations.append(migrator.add_column('feeds', 'self_link_hash', self_link_hash))
        column_migrations.append(UpdateFeedSelfLinkHashOperation())
        # ...and make them strict again
        column_migrations.append(migrator.add_index('feeds', ('self_link_hash',), True))
        
    if not hasattr(Entry_, 'guid_hash'):
        # Start relaxing index constrains to cope with existing data...    
        guid_hash = CharField(null=True, max_length=40)
        column_migrations.append(migrator.add_column('entries', 'guid_hash', guid_hash))
        column_migrations.append(UpdateEntryGuidHashOperation())
        # ...and make them strict again
        column_migrations.append(migrator.add_index('entries', ('guid_hash',), True))

    # Drop obsolete indices
    
    if Feed_.self_link.unique:
        column_migrations.append(migrator.drop_index('feeds', 'feeds_self_link'))
    
    if Entry_.link.index:
        column_migrations.append(migrator.drop_index('entries', 'entries_link'))

    if Entry_.guid.index:
        column_migrations.append(migrator.drop_index('entries', 'entries_guid'))        

    # Misc.
        
    column_migrations.append(UpdateUserApiKeyOperation())
        
    # --------------------------------------------------------------------------
    
    # Run all table and column migrations

    if column_migrations:
        # Let caller to catch any OperationalError's
        migrate(*column_migrations)        

    for drop in drop_table_migrations:
        drop()

    # True if at least one is non-empty
    return drop_table_migrations or column_migrations
示例#28
0
def migrate_database_schema():
    '''
    Migrate database schema from previous versions (0.9.4 and up)
    '''

    introspector = Introspector.from_database(_db)
    models = introspector.generate_models()
    Feed_ = models['feeds']
    Entry_ = models['entries']

    drop_table_migrations, column_migrations = [], []

    # --------------------------------------------------------------------------
    # Schema changes introduced in version 0.9.4
    # --------------------------------------------------------------------------

    # Change columns

    if hasattr(Feed_, 'icon_id'):
        column_migrations.append(migrator.drop_column('feeds', 'icon_id'))

    if not hasattr(Feed_, 'icon'):
        column_migrations.append(
            migrator.add_column('feeds', 'icon', Feed.icon))

    if not hasattr(Feed_, 'icon_last_updated_on'):
        column_migrations.append(
            migrator.add_column('feeds', 'icon_last_updated_on',
                                Feed.icon_last_updated_on))

    if not hasattr(Entry_, 'content_type'):
        column_migrations.append(
            migrator.add_column('entries', 'content_type', Entry.content_type))

    # Drop tables

    if Icon.table_exists():
        drop_table_migrations.append(Icon.drop_table)

    # --------------------------------------------------------------------------
    # Schema changes introduced in version 0.9.5
    # --------------------------------------------------------------------------

    # Change columns

    class UpdateFeedSelfLinkHashOperation(object):
        # Fake migrate.Operation protocol and upon saving populate all self_link_hash fields
        def run(self):
            for feed in Feed.select():
                feed.save()

    class UpdateEntryGuidHashOperation(object):
        def run(self):
            for entry in Entry.select():
                entry.save()

    class UpdateUserApiKeyOperation(object):
        def run(self):
            for user in User.select():
                user.save()

    if not hasattr(Feed_, 'self_link_hash'):
        # Start relaxing index constrains to cope with existing data...
        self_link_hash = CharField(null=True, max_length=40)
        column_migrations.append(
            migrator.add_column('feeds', 'self_link_hash', self_link_hash))
        column_migrations.append(UpdateFeedSelfLinkHashOperation())
        # ...and make them strict again
        column_migrations.append(
            migrator.add_index('feeds', ('self_link_hash', ), True))

    if not hasattr(Entry_, 'guid_hash'):
        # Start relaxing index constrains to cope with existing data...
        guid_hash = CharField(null=True, max_length=40)
        column_migrations.append(
            migrator.add_column('entries', 'guid_hash', guid_hash))
        column_migrations.append(UpdateEntryGuidHashOperation())
        # ...and make them strict again
        column_migrations.append(
            migrator.add_index('entries', ('guid_hash', ), True))

    # Drop obsolete indices

    if Feed_.self_link.unique:
        column_migrations.append(
            migrator.drop_index('feeds', 'feeds_self_link'))

    if Entry_.link.index:
        column_migrations.append(migrator.drop_index('entries',
                                                     'entries_link'))

    if Entry_.guid.index:
        column_migrations.append(migrator.drop_index('entries',
                                                     'entries_guid'))

    # Misc.

    column_migrations.append(UpdateUserApiKeyOperation())

    # --------------------------------------------------------------------------

    # Run all table and column migrations

    if column_migrations:
        # Let caller to catch any OperationalError's
        migrate(*column_migrations)

    for drop in drop_table_migrations:
        drop()

    # True if at least one is non-empty
    return drop_table_migrations or column_migrations
示例#29
0
def migrate(*operations, **kwargs):
    '''
    A wraper around :func:playhouse.migrate.migrate:
    
    This method ensures that the same migrations are performed on nested :class:peewee_versioned.VersionedModel:'s
    '''

    # Collect nested classes
    for operation in operations:
        migrator = operation.migrator
        database = operation.migrator.database
        method = operation.method
        args = list(copy(operation.args))
        kwargs = operation.kwargs.copy()

        # Exit early for NOOP methods
        if method in NOOP_OPERATIONS:
            operation.run()
            continue

        # potential arguments to be used with the nested class
        version_args = copy(args)
        version_kwargs = kwargs.copy()

        # potential operation to run on the nested class
        version_operation = None

        # Get the table name of the operation
        # Update version args/kwargs
        if method == 'rename_table':
            table = kwargs.get('old_name', None)
            if table is not None:
                version_kwargs['old_name'] = table + 'version'
        else:
            table = kwargs.get('table', None)
            if table is not None:
                version_kwargs['table'] = table + 'version'
        if table is None:
            table = args[0]
            version_args[0] = table + 'version'

        # Read models from the database and cache
        introspector = Introspector.from_database(database)
        models = introspector.generate_models(skip_invalid=True)

        # Test if the model has a version model associated with it
        version_name = table + 'version'
        if version_name in models:
            version_model = models[version_name]
            version_fields = version_model._meta.fields

            # Handle special cases first
            if method == 'add_column':
                # Don't add foreign keys
                field = kwargs.get('field', None)
                if field is None:
                    field = args[2]
                if isinstance(field, ForeignKeyField):
                    operation.run()
                    continue
            elif method == 'drop_column':
                column_name = kwargs.get('column_name', None)
                if column_name is None:
                    column_name = args[1]
                if column_name not in version_fields:
                    operation.run()
                    continue
            elif method == 'rename_column':
                old_name = kwargs.get('old_name', None)
                if old_name is None:
                    old_name = args[1]
                if old_name not in version_fields:
                    operation.run()
                    continue
            elif method in ('add_not_null', 'drop_not_null'):
                column = kwargs.get('column', None)
                if column is None:
                    column = args[1]
                if column not in version_fields:
                    operation.run()
                    continue
            elif method == 'rename_table':
                old_name = kwargs.get('old_name', None)
                if old_name is None:
                    old_name = args[0]
                new_name = kwargs.get('new_name', None)
                if new_name is None:
                    new_name = version_args[1]

                _rename_table(operation, migrator, introspector, old_name,
                              new_name)
                continue

            # I guess we have a valid operation, so we will create and run it for the nested verion model
            version_operation = Operation(migrator, method, *version_args,
                                          **version_kwargs)

        # Run the operations
        operation.run()
        if version_operation is not None:
            version_operation.run()