Пример #1
0
def forward():
    comment_author = peewee.CharField(max_length=60, default='')
    comment_email = peewee.CharField(max_length=60, default='')

    migrator = PostgresqlMigrator(models.DB)
    migrate(
        migrator.add_column('comment', 'comment_author', comment_author),
        migrator.add_column('comment', 'comment_email', comment_email),
    )
Пример #2
0
def forward():
    models.DB.create_tables([models.Comment])
    comment = peewee.ForeignKeyField(models.Comment,
                                     null=True,
                                     to_field=models.Comment.id)
    migrator = PostgresqlMigrator(models.DB)
    migrate(migrator.add_column('post', 'comment_id', comment), )
Пример #3
0
def forward():
    models.DB.create_tables([models.Author])
    author = peewee.ForeignKeyField(models.Author,
                                    null=True,
                                    to_field=models.Author.id)
    migrator = PostgresqlMigrator(models.DB)
    migrate(migrator.add_column('post', 'author_id', author), )
Пример #4
0
def migration_002():
    """Add path length property to instance, so we do not "need" to calculate it on the fly."""
    migrator = PostgresqlMigrator(db)
    with db.transaction():
        migrate(
            migrator.add_column(Instance._meta.db_table,
                                Instance.path_length.db_column,
                                Instance.path_length))
Пример #5
0
def migration_001():
    """Add time property to instance, so we do not "need" to calculate median in database."""
    migrator = PostgresqlMigrator(db)
    with db.transaction():
        migrate(
            migrator.add_column(Instance._meta.db_table,
                                Instance.median_time.db_column,
                                Instance.median_time))
Пример #6
0
def forward():
    models.DB.create_tables([models.Comments])

    comments = peewee.ForeignKeyField(
        models.Comments, null=True, to_field=models.Comments.id)

    migrator = PostgresqlMigrator(models.DB)
    migrate(
        migrator.add_column('blogpost', 'comments', comments),
    )
Пример #7
0
def forward():
    models.DB.create_tables([models.Author])
    #     models.DB.create_tables([models.Author, models.BlogPost, models.Comment])     #You can also create all tables at once through this method.

    author = peewee.ForeignKeyField(models.Author,
                                    null=True,
                                    to_field=models.Author.id)

    migrator = PostgresqlMigrator(models.DB)
    migrate(migrator.add_column('blogpost', 'author_id', author), )
Пример #8
0
from playhouse.migrate import migrate, PostgresqlMigrator
from app.models import DATABASE
from app.notes.models import Note

Note.description.default = ''

migrator = PostgresqlMigrator(DATABASE)

migrate(
    migrator.add_column('TBL_NOTE', Note.description.db_column,
                        Note.description))
    with db.database.transaction():
        Organization.create_table()

        default_org = Organization.create(
            name="Default",
            slug='default',
            settings={
                Organization.SETTING_GOOGLE_APPS_DOMAINS:
                list(GOOGLE_APPS_DOMAIN)
            })

        column = Group.org
        column.default = default_org

        migrate(
            migrator.add_column('groups', 'org_id', column),
            migrator.add_column('events', 'org_id', column),
            migrator.add_column('data_sources', 'org_id', column),
            migrator.add_column('users', 'org_id', column),
            migrator.add_column('dashboards', 'org_id', column),
            migrator.add_column('queries', 'org_id', column),
            migrator.add_column('query_results', 'org_id', column),
        )

        # Change the uniqueness constraint on user email to be (org, email):
        migrate(migrator.drop_index('users', 'users_email'),
                migrator.add_index('users', ('org_id', 'email'), unique=True))

    db.close_db(None)
from redash.models import db, Query
from playhouse.migrate import PostgresqlMigrator, migrate

if __name__ == '__main__':
    migrator = PostgresqlMigrator(db.database)

    with db.database.transaction():
        migrate(
            migrator.add_column('queries', 'options', Query.options),
        )
from redash.models import db, Change, AccessPermission, Query, Dashboard
from playhouse.migrate import PostgresqlMigrator, migrate

if __name__ == '__main__':

    if not Change.table_exists():
        Change.create_table()

    if not AccessPermission.table_exists():
        AccessPermission.create_table()

    migrator = PostgresqlMigrator(db.database)

    try:
        migrate(
            migrator.add_column('queries', 'version', Query.version),
            migrator.add_column('dashboards', 'version', Dashboard.version))
    except Exception as ex:
        print "Error while adding version column to queries/dashboards. Maybe it already exists?"
        print ex
Пример #12
0
from playhouse.migrate import migrate, PostgresqlMigrator
from app.models import DATABASE
from app.qa.models import Question

migrator = PostgresqlMigrator(DATABASE)

migrate(
    migrator.add_column('TBL_QUESTION', Question.datetime.db_column,
                        Question.datetime))
from playhouse.migrate import migrate, PostgresqlMigrator
from app.models import DATABASE
from app.notes.models import Note

migrator = PostgresqlMigrator(DATABASE)

migrate(migrator.add_column('TBL_NOTE', Note.datetime.db_column,
                            Note.datetime))
Пример #14
0
import peewee
from playhouse.migrate import PostgresqlMigrator, migrate

from redash.models import db
from redash import models

if __name__ == '__main__':
    db.connect_db()
    migrator = PostgresqlMigrator(db.database)

    cursor = db.database.execute_sql(
        "SELECT column_name FROM information_schema.columns WHERE table_name='alerts' and column_name='rearm';"
    )
    if cursor.rowcount > 0:
        print "Column exists. Skipping."
        exit()

    with db.database.transaction():
        migrate(migrator.add_column('alerts', 'rearm', models.Alert.rearm), )

    db.close_db(None)
import peewee
from playhouse.migrate import PostgresqlMigrator, migrate

from redash.models import db
from redash import models

if __name__ == '__main__':
    db.connect_db()
    migrator = PostgresqlMigrator(db.database)

    cursor = db.database.execute_sql("SELECT column_name FROM information_schema.columns WHERE table_name='alerts' and column_name='rearm';")
    if cursor.rowcount > 0:
        print "Column exists. Skipping."
        exit()

    with db.database.transaction():
        migrate(
            migrator.add_column('alerts', 'rearm', models.Alert.rearm),
        )

    db.close_db(None)
Пример #16
0
from peewee import ProgrammingError, BooleanField
from playhouse.migrate import migrate, PostgresqlMigrator

from application.models import database
from application.utils import print_tb

migrator = PostgresqlMigrator(database)

try:
    sex = BooleanField(default=True)
    migrate(migrator.add_column('bot_person', 'sex', sex))
except ProgrammingError as pe:
    print_tb(pe)
    database.rollback()
except Exception as e:
    database.rollback()
Пример #17
0
from playhouse.migrate import PostgresqlMigrator, migrate

from redash.models import db
from redash import models

if __name__ == "__main__":
    db.connect_db()
    migrator = PostgresqlMigrator(db.database)

    with db.database.transaction():
        migrate(migrator.add_column("queries", "last_modified_by_id", models.Query.last_modified_by))

        db.database.execute_sql("UPDATE queries SET last_modified_by_id = user_id;")

    db.close_db(None)
from playhouse.migrate import PostgresqlMigrator, migrate

from redash.models import db
from redash import models

if __name__ == '__main__':
    db.connect_db()
    migrator = PostgresqlMigrator(db.database)

    with db.database.transaction():
        migrate(
            migrator.add_column('queries', 'schedule', models.Query.schedule),
        )

        db.database.execute_sql("UPDATE queries SET schedule = ttl WHERE ttl > 0;")

        migrate(
            migrator.drop_column('queries', 'ttl')
        )

    db.close_db(None)


from peewee import PostgresqlDatabase
from playhouse.migrate import PostgresqlMigrator, migrate
from playhouse.postgres_ext import JSONField

db = PostgresqlDatabase('burnthrough',
                        user="******",
                        password="******",
                        host="127.0.0.1",
                        port=5432)
migrator = PostgresqlMigrator(db)

migrate(
    migrator.add_column('usertasktree', 'expanded_nodes',
                        JSONField(default=[])), )
Пример #20
0
from playhouse.migrate import PostgresqlMigrator, migrate

from redash.models import db
from redash import models

if __name__ == '__main__':
    db.connect_db()
    migrator = PostgresqlMigrator(db.database)

    with db.database.transaction():
        migrate(
            migrator.add_column('queries', 'last_modified_by_id', models.Query.last_modified_by)
        )

        db.database.execute_sql("UPDATE queries SET last_modified_by_id = user_id;")

    db.close_db(None)
Пример #21
0
from redash.models import db, Query
from playhouse.migrate import PostgresqlMigrator, migrate

if __name__ == '__main__':
    migrator = PostgresqlMigrator(db.database)

    with db.database.transaction():
        migrate(migrator.add_column('queries', 'options', Query.options), )
Пример #22
0
HIPCHAT_ROOM_ID = os.environ.get('REDASH_HIPCHAT_ROOM_ID', None)

WEBHOOK_ENDPOINT = os.environ.get('REDASH_WEBHOOK_ENDPOINT', None)
WEBHOOK_USERNAME = os.environ.get('REDASH_WEBHOOK_USERNAME', None)
WEBHOOK_PASSWORD = os.environ.get('REDASH_WEBHOOK_PASSWORD', None)

if __name__ == '__main__':
    migrator = PostgresqlMigrator(db.database)
    with db.database.transaction():

        if not NotificationDestination.table_exists():
            NotificationDestination.create_table()
            
            # Update alert subscription fields
            migrate(
                migrator.add_column('alert_subscriptions', 'destination_id', AlertSubscription.destination)
            )

            try:
                org = Organization.get_by_slug('default')
                user = User.select().where(User.org==org, peewee.SQL("%s = ANY(groups)", org.admin_group.id)).get()
            except Exception:
                print("!!! Warning: failed finding default organization or admin user, won't migrate Webhook/HipChat alert subscriptions.")
                exit()

            if WEBHOOK_ENDPOINT:
                # Have all existing alerts send to webhook if already configured
                schema = get_configuration_schema_for_destination_type('webhook')
                conf = {'url': WEBHOOK_ENDPOINT}
                if WEBHOOK_USERNAME:
                    conf['username'] = WEBHOOK_USERNAME
from redash import settings
from redash.models import db, NotificationDestination, AlertSubscription, Alert, Organization, User
from redash.destinations import get_configuration_schema_for_destination_type
from redash.utils.configuration import ConfigurationContainer
from playhouse.migrate import PostgresqlMigrator, migrate

if __name__ == '__main__':
    migrator = PostgresqlMigrator(db.database)
    with db.database.transaction():

        if not NotificationDestination.table_exists():
            NotificationDestination.create_table()
            
            # Update alert subscription fields
            migrate(
                migrator.add_column('alert_subscriptions', 'destination_id', AlertSubscription.destination)
            )

            try:
                org = Organization.get_by_slug('default')
                user = User.select().where(User.org==org, peewee.SQL("%s = ANY(groups)", org.admin_group.id)).get()
            except Exception:
                print "!!! Warning: failed finding default organization or admin user, won't migrate Webhook/HipChat alert subscriptions."
                exit()

            if settings.WEBHOOK_ENDPOINT:
                # Have all existing alerts send to webhook if already configured
                schema = get_configuration_schema_for_destination_type('webhook')
                conf = {'url': settings.WEBHOOK_ENDPOINT}
                if settings.WEBHOOK_USERNAME:
                    conf['username'] = settings.WEBHOOK_USERNAME
Пример #24
0
def forward():
    category = peewee.CharField(max_length=20, default='')

    migrator = PostgresqlMigrator(models.DB)
    migrate(migrator.add_column('blogpost', 'category', category), )
Пример #25
0
from playhouse.migrate import PostgresqlMigrator, migrate

from redash.models import db
from redash import models

if __name__ == '__main__':
    db.connect_db()
    migrator = PostgresqlMigrator(db.database)

    with db.database.transaction():
        migrate(
            migrator.add_column('queries', 'is_archived', models.Query.is_archived)
        )

    db.close_db(None)
from playhouse.migrate import PostgresqlMigrator, migrate

from redash.models import db
from redash import models

if __name__ == "__main__":
    db.connect_db()
    migrator = PostgresqlMigrator(db.database)

    with db.database.transaction():

        column = models.Group.countries
        column.null = True

        migrate(migrator.add_column("users", "countries", models.Group.countries))

        # for group in models.Group.select():
        #     group.save()
        migrate(migrator.drop_not_null("users", "countries"))

    db.close_db(None)
from playhouse.migrate import migrate, PostgresqlMigrator
from app.models import DATABASE
from app.lesson.models import LessonStudent

migrator = PostgresqlMigrator(DATABASE)

LessonStudent.semester.default = 1
LessonStudent.year.default = 2015

migrate(
    migrator.add_column('TBL_LESSON_STUDENT', LessonStudent.semester.db_column,
                        LessonStudent.semester),
    migrator.add_column('TBL_LESSON_STUDENT', LessonStudent.year.db_column,
                        LessonStudent.year),
)
from playhouse.migrate import migrate, PostgresqlMigrator
from app.models import DATABASE
from app.qa.models import Question, Reply
migrator = PostgresqlMigrator(DATABASE)

migrate(
    migrator.add_column('TBL_QUESTION', Question.votes.db_column, Question.votes),
    migrator.add_column('TBL_REPLY', Reply.votes.db_column, Reply.votes),
)
Пример #29
0
class Migrate(object):
    def __init__(self, rules, func):
        self.rules = rules
        self.func = func
        self.actions = []
        self.raw_actions = []
        self.m = PostgresqlMigrator(database)

    def run(self):
        conn = database.obj.connection()

        for rule in self.rules:
            with conn.cursor() as cur:
                if not rule(cur):
                    return

        self.func(self)
        self.apply()

    def apply(self):
        print('Applying {} actions'.format(len(self.actions)))
        migrate(*self.actions)

        print('Executing {} raw queries'.format(len(self.raw_actions)))
        conn = database.obj.connection()
        for query, args in self.raw_actions:
            with conn.cursor() as cur:
                cur.execute(query, args)
            conn.commit()

    def add_columns(self, table, *fields):
        for field in fields:
            self.actions.append(
                self.m.add_column(table._meta.db_table, field.name, field))

    def rename_column(self, table, field, new_name):
        self.actions.append(
            self.m.rename_column(table._meta.db_table, field.name, new_name))

    def drop_not_nulls(self, table, *fields):
        for field in fields:
            self.actions.append(
                self.m.drop_not_null(table._meta.db_table, field.name))

    def add_not_nulls(self, table, *fields):
        for field in fields:
            self.actions.append(
                self.m.add_not_null(table._meta.db_table, field.name))

    def execute(self, query, params=None):
        self.raw_actions.append((query, params or []))

    def backfill_column(self,
                        table,
                        old_columns,
                        new_columns,
                        pkeys=None,
                        cast_funcs=None):
        total = table.select().count()

        if not pkeys:
            pkeys = [table._meta.primary_key]

        q = table.select(*(pkeys + old_columns)).tuples()

        idx = 0
        modified = 0

        start = time.time()
        with database.transaction() as txn:
            for values in q:
                idx += 1

                if idx % 10000 == 0:
                    print('[%ss] Backfilling %s %s/%s (wrote %s)' %
                          (time.time() - start, str(table), idx, total,
                           modified))

                if modified % 1000:
                    txn.commit()

                obj = {
                    new_column.name:
                    cast_funcs[new_column](values[i + len(pkeys)])
                    if cast_funcs and new_column in cast_funcs else values[i] +
                    len(pkeys)
                    for i, new_column in enumerate(new_columns)
                }
                if not any(obj.values()):
                    continue

                modified += 1
                table.update(
                    **{
                        new_column.name: values[i + len(pkeys)]
                        for i, new_column in enumerate(new_columns)
                    }).where(
                        reduce(operator.and_,
                               [(iz == values[i])
                                for i, iz in enumerate(pkeys)])).execute()

        txn.commit()
        print('DONE, %s scanned %s written' % (idx, modified))

    @staticmethod
    def missing(table, field):
        def rule(cursor):
            cursor.execute(COLUMN_EXISTS_SQL, (table._meta.db_table, field))
            if len(cursor.fetchall()) == 0:
                return True
            return False

        return rule

    @staticmethod
    def nullable(table, field):
        def rule(cursor):
            cursor.execute(GET_NULLABLE_SQL, (table._meta.db_table, field))
            return cursor.fetchone()[0] == 'YES'

        return rule

    @staticmethod
    def non_nullable(table, field):
        def rule(cursor):
            cursor.execute(GET_NULLABLE_SQL, (table._meta.db_table, field))
            return cursor.fetchone()[0] == 'NO'

        return rule

    @classmethod
    def only_if(cls, check, table, *fields):
        def deco(func):
            rules = [check(table, i) for i in fields]
            cls(rules, func).run()

        return deco

    @classmethod
    def always(cls):
        def deco(func):
            cls([lambda c: True], func).run()

        return deco
from playhouse.migrate import PostgresqlMigrator, migrate

from redash.models import db
from redash import models

if __name__ == "__main__":
    db.connect_db()
    migrator = PostgresqlMigrator(db.database)

    with db.database.transaction():
        migrate(migrator.add_column("alerts", "rearm", models.Alert.rearm))
    db.close_db(None)
Пример #31
0
from playhouse.migrate import PostgresqlMigrator, migrate

from redash.models import db
from redash import models

if __name__ == '__main__':
    db.connect_db()
    migrator = PostgresqlMigrator(db.database)

    with db.database.transaction():
        column = models.User.api_key
        column.null = True
        migrate(
            migrator.add_column('users', 'api_key', models.User.api_key),
        )

        for user in models.User.select(models.User.id, models.User.api_key):
            user.save(only=user.dirty_fields)

        migrate(
            migrator.add_not_null('users', 'api_key')
        )

    db.close_db(None)



Пример #32
0
from playhouse.migrate import PostgresqlMigrator, migrate

from redash.models import db
from redash import models

if __name__ == '__main__':
    db.connect_db()
    migrator = PostgresqlMigrator(db.database)

    with db.database.transaction():
        migrate(
            migrator.add_column('queries', 'schedule', models.Query.schedule),
        )

        db.database.execute_sql("UPDATE queries SET schedule = ttl WHERE ttl > 0;")

        migrate(
            migrator.drop_column('queries', 'ttl')
        )

    db.close_db(None)
Пример #33
0
from playhouse.migrate import PostgresqlMigrator, migrate

from redash.models import db
from redash import models

if __name__ == '__main__':
    db.connect_db()
    migrator = PostgresqlMigrator(db.database)

    with db.database.transaction():
        migrate(
            migrator.add_column('queries', 'is_draft', models.Query.is_draft))
        migrate(
            migrator.add_column('dashboards', 'is_draft',
                                models.Query.is_draft))
        db.database.execute_sql(
            "UPDATE queries SET is_draft = (name = 'New Query')")
    db.close_db(None)
from playhouse.migrate import PostgresqlMigrator, migrate

from redash.models import db
from redash import models

if __name__ == '__main__':
    db.connect_db()
    migrator = PostgresqlMigrator(db.database)

    with db.database.transaction():
        migrate(
            migrator.add_column('queries', 'updated_at',
                                models.Query.updated_at),
            migrator.add_column('dashboards', 'updated_at',
                                models.Dashboard.updated_at),
            migrator.add_column('widgets', 'updated_at',
                                models.Widget.updated_at),
            migrator.add_column('users', 'created_at', models.User.created_at),
            migrator.add_column('users', 'updated_at', models.User.updated_at),
            migrator.add_column('visualizations', 'created_at',
                                models.Visualization.created_at),
            migrator.add_column('visualizations', 'updated_at',
                                models.Visualization.updated_at))

        db.database.execute_sql("UPDATE queries SET updated_at = created_at;")
        db.database.execute_sql(
            "UPDATE dashboards SET updated_at = created_at;")
        db.database.execute_sql("UPDATE widgets SET updated_at = created_at;")

    db.close_db(None)
Пример #35
0
from playhouse.migrate import PostgresqlMigrator, migrate

from redash.models import db
from redash import models

if __name__ == '__main__':
    db.connect_db()
    migrator = PostgresqlMigrator(db.database)

    with db.database.transaction():
        migrate(
            migrator.add_column('queries', 'updated_at', models.Query.updated_at),
            migrator.add_column('dashboards', 'updated_at', models.Dashboard.updated_at),
            migrator.add_column('widgets', 'updated_at', models.Widget.updated_at),
            migrator.add_column('users', 'created_at', models.User.created_at),
            migrator.add_column('users', 'updated_at', models.User.updated_at),
            migrator.add_column('visualizations', 'created_at', models.Visualization.created_at),
            migrator.add_column('visualizations', 'updated_at', models.Visualization.updated_at)
        )

        db.database.execute_sql("UPDATE queries SET updated_at = created_at;")
        db.database.execute_sql("UPDATE dashboards SET updated_at = created_at;")
        db.database.execute_sql("UPDATE widgets SET updated_at = created_at;")

    db.close_db(None)
from playhouse.migrate import migrate, PostgresqlMigrator
from app.models import DATABASE
from app.notes.models import Note

Note.original_filename.default = ''

migrator = PostgresqlMigrator(DATABASE)

migrate(
    migrator.add_column('TBL_NOTE', Note.original_filename.db_column,
                        Note.original_filename))

for note in Note.select():
    note.original_filename = note.filename
    note.save()
from __future__ import print_function
from redash.models import db, Change, AccessPermission, Query, Dashboard
from playhouse.migrate import PostgresqlMigrator, migrate

if __name__ == '__main__':

    if not Change.table_exists():
        Change.create_table()

    if not AccessPermission.table_exists():
        AccessPermission.create_table()

    migrator = PostgresqlMigrator(db.database)

    try:
        migrate(
            migrator.add_column('queries', 'version', Query.version),
            migrator.add_column('dashboards', 'version', Dashboard.version)
        )
    except Exception as ex:
        print("Error while adding version column to queries/dashboards. Maybe it already exists?")
        print(ex)

Пример #38
0
from playhouse.migrate import PostgresqlMigrator, migrate

from redash.models import db
from redash import models

if __name__ == '__main__':
    db.connect_db()
    migrator = PostgresqlMigrator(db.database)

    with db.database.transaction():
        migrate(
            migrator.add_column('queries', 'is_draft', models.Query.is_draft)
        )
        migrate(
            migrator.add_column('dashboards', 'is_draft', models.Query.is_draft)
        )
        db.database.execute_sql("UPDATE queries SET is_draft = (name = 'New Query')")
    db.close_db(None)
Пример #39
0
from playhouse.migrate import PostgresqlMigrator, migrate

from redash.models import db
from redash import models

if __name__ == '__main__':
    db.connect_db()
    migrator = PostgresqlMigrator(db.database)

    with db.database.transaction():
        column = models.User.api_key
        column.null = True
        migrate(migrator.add_column('users', 'api_key', models.User.api_key), )

        for user in models.User.select(models.User.id, models.User.api_key):
            user.save(only=user.dirty_fields)

        migrate(migrator.add_not_null('users', 'api_key'))

    db.close_db(None)
Пример #40
0
from collections import defaultdict
from redash.models import db, DataSourceGroup, DataSource, Group, Organization, User
from playhouse.migrate import PostgresqlMigrator, migrate
import peewee

if __name__ == '__main__':
    migrator = PostgresqlMigrator(db.database)

    with db.database.transaction():
        # Add type to groups
        migrate(migrator.add_column('groups', 'type', Group.type))

        for name in ['default', 'admin']:
            group = Group.get(Group.name == name)
            group.type = Group.BUILTIN_GROUP
            group.save()

        # Create association table between data sources and groups
        DataSourceGroup.create_table()

        # add default to existing data source:
        default_org = Organization.get_by_id(1)
        default_group = Group.get(Group.name == "default")
        for ds in DataSource.all(default_org):
            DataSourceGroup.create(data_source=ds, group=default_group)

        # change the groups list on a user object to be an ids list
        migrate(migrator.rename_column('users', 'groups', 'old_groups'), )

        migrate(migrator.add_column('users', 'groups', User.groups))
Пример #41
0
def upgrade_db_structure():
    """Upgrade the tables version by version"""
    from playhouse.migrate import PostgresqlMigrator, migrate
    migrator = PostgresqlMigrator(wapt_db)
    logger.info('Current DB: %s version: %s' %
                (wapt_db.connect_kwargs, get_db_version()))

    # from 1.4.1 to 1.4.2
    if get_db_version() < '1.4.2':
        with wapt_db.atomic():
            logger.info('Migrating from %s to %s' %
                        (get_db_version(), '1.4.2'))
            migrate(
                migrator.rename_column(Hosts._meta.name, 'host', 'host_info'),
                migrator.rename_column(Hosts._meta.name, 'wapt',
                                       'wapt_status'),
                migrator.rename_column(Hosts._meta.name, 'update_status',
                                       'last_update_status'),
                migrator.rename_column(Hosts._meta.name, 'softwares',
                                       'installed_softwares'),
                migrator.rename_column(Hosts._meta.name, 'packages',
                                       'installed_packages'),
            )
            HostGroups.create_table(fail_silently=True)
            HostJsonRaw.create_table(fail_silently=True)
            HostWsus.create_table(fail_silently=True)

            (v, created) = ServerAttribs.get_or_create(key='db_version')
            v.value = '1.4.2'
            v.save()

    next_version = '1.4.3'
    if get_db_version() < next_version:
        with wapt_db.atomic():
            logger.info('Migrating from %s to %s' %
                        (get_db_version(), next_version))
            if not [
                    c.name for c in wapt_db.get_columns('hosts')
                    if c.name == 'host_certificate'
            ]:
                migrate(
                    migrator.add_column(Hosts._meta.name, 'host_certificate',
                                        Hosts.host_certificate), )

            (v, created) = ServerAttribs.get_or_create(key='db_version')
            v.value = next_version
            v.save()

    next_version = '1.4.3.1'
    if get_db_version() < next_version:
        with wapt_db.atomic():
            logger.info('Migrating from %s to %s' %
                        (get_db_version(), next_version))
            columns = [c.name for c in wapt_db.get_columns('hosts')]
            opes = []
            if not 'last_logged_on_user' in columns:
                opes.append(
                    migrator.add_column(Hosts._meta.name,
                                        'last_logged_on_user',
                                        Hosts.last_logged_on_user))
            if 'installed_sofwares' in columns:
                opes.append(
                    migrator.drop_column(Hosts._meta.name,
                                         'installed_sofwares'))
            if 'installed_sofwares' in columns:
                opes.append(
                    migrator.drop_column(Hosts._meta.name,
                                         'installed_packages'))
            migrate(*opes)

            (v, created) = ServerAttribs.get_or_create(key='db_version')
            v.value = next_version
            v.save()

    next_version = '1.4.3.2'
    if get_db_version() < next_version:
        with wapt_db.atomic():
            logger.info('Migrating from %s to %s' %
                        (get_db_version(), next_version))
            wapt_db.execute_sql('''\
                ALTER TABLE hostsoftwares
                    ALTER COLUMN publisher TYPE character varying(2000),
                    ALTER COLUMN version TYPE character varying(1000);''')
            (v, created) = ServerAttribs.get_or_create(key='db_version')
            v.value = next_version
            v.save()

    next_version = '1.5.0.4'
    if get_db_version() < next_version:
        with wapt_db.atomic():
            logger.info('Migrating from %s to %s' %
                        (get_db_version(), next_version))
            columns = [c.name for c in wapt_db.get_columns('hosts')]
            opes = []
            if not 'server_uuid' in columns:
                opes.append(
                    migrator.add_column(Hosts._meta.name, 'server_uuid',
                                        Hosts.server_uuid))
            migrate(*opes)
            (v, created) = ServerAttribs.get_or_create(key='db_version')
            v.value = next_version
            v.save()

    next_version = '1.5.0.11'
    if get_db_version() < next_version:
        with wapt_db.atomic():
            logger.info('Migrating from %s to %s' %
                        (get_db_version(), next_version))
            HostGroups.create_table(fail_silently=True)
            (v, created) = ServerAttribs.get_or_create(key='db_version')
            v.value = next_version
            v.save()

    next_version = '1.5.1.1'
    if get_db_version() < next_version:
        with wapt_db.atomic():
            logger.info('Migrating from %s to %s' %
                        (get_db_version(), next_version))
            columns = [c.name for c in wapt_db.get_columns('hosts')]
            opes = []
            if not 'computer_ad_site' in columns:
                opes.append(
                    migrator.add_column(Hosts._meta.name, 'computer_ad_site',
                                        Hosts.computer_ad_site))
            if not 'computer_ad_ou' in columns:
                opes.append(
                    migrator.add_column(Hosts._meta.name, 'computer_ad_ou',
                                        Hosts.computer_ad_ou))
            if not 'computer_ad_groups' in columns:
                opes.append(
                    migrator.add_column(Hosts._meta.name, 'computer_ad_groups',
                                        Hosts.computer_ad_groups))
            migrate(*opes)

            (v, created) = ServerAttribs.get_or_create(key='db_version')
            v.value = next_version
            v.save()

    next_version = '1.5.1.3'
    if get_db_version() < next_version:
        with wapt_db.atomic():
            logger.info('Migrating from %s to %s' %
                        (get_db_version(), next_version))
            columns = [c.name for c in wapt_db.get_columns('hosts')]
            opes = []
            if not 'registration_auth_user' in columns:
                opes.append(
                    migrator.add_column(Hosts._meta.name,
                                        'registration_auth_user',
                                        Hosts.registration_auth_user))
            migrate(*opes)

            (v, created) = ServerAttribs.get_or_create(key='db_version')
            v.value = next_version
            v.save()

    next_version = '1.5.1.14'
    if get_db_version() < next_version:
        with wapt_db.atomic():
            logger.info('Migrating from %s to %s' %
                        (get_db_version(), next_version))
            columns = [
                c.name for c in wapt_db.get_columns('hostpackagesstatus')
            ]
            opes = []
            if not 'depends' in columns:
                opes.append(
                    migrator.add_column(HostPackagesStatus._meta.name,
                                        'depends', HostPackagesStatus.depends))
            if not 'conflicts' in columns:
                opes.append(
                    migrator.add_column(HostPackagesStatus._meta.name,
                                        'conflicts',
                                        HostPackagesStatus.conflicts))
            migrate(*opes)

            (v, created) = ServerAttribs.get_or_create(key='db_version')
            v.value = next_version
            v.save()

    next_version = '1.5.1.17'
    if get_db_version() < next_version:
        with wapt_db.atomic():
            logger.info('Migrating from %s to %s' %
                        (get_db_version(), next_version))
            opes = []
            ##
            migrate(*opes)

            WsusScan2History.create_table(fail_silently=True)

            (v, created) = ServerAttribs.get_or_create(key='db_version')
            v.value = next_version
            v.save()
Пример #42
0
from playhouse.migrate import PostgresqlMigrator, migrate

from redash.models import db
from redash import models

if __name__ == '__main__':
    db.connect_db()
    migrator = PostgresqlMigrator(db.database)

    with db.database.transaction():
        migrate(
            migrator.add_column('dashboards', 'groups', models.Dashboard.groups)
        )

    db.close_db(None)
Пример #43
0
if __name__ == '__main__':
    migrator = PostgresqlMigrator(db.database)

    with db.database.transaction():
        Organization.create_table()

        default_org = Organization.create(name="Default", slug='default', settings={
            Organization.SETTING_GOOGLE_APPS_DOMAINS: list(settings.GOOGLE_APPS_DOMAIN)
        })

        column = Group.org
        column.default = default_org

        migrate(
            migrator.add_column('groups', 'org_id', column),
            migrator.add_column('events', 'org_id', column),
            migrator.add_column('data_sources', 'org_id', column),
            migrator.add_column('users', 'org_id', column),
            migrator.add_column('dashboards', 'org_id', column),
            migrator.add_column('queries', 'org_id', column),
            migrator.add_column('query_results', 'org_id', column),
        )

        # Change the uniqueness constraint on user email to be (org, email):
        migrate(
            migrator.drop_index('users', 'users_email'),
            migrator.add_index('users', ('org_id', 'email'), unique=True)
        )

    db.close_db(None)
from playhouse.migrate import PostgresqlMigrator, migrate

from redash.models import db
from redash import models

if __name__ == '__main__':
    db.connect_db()
    migrator = PostgresqlMigrator(db.database)

    with db.database.transaction():

        column = models.User.parent_user_id
        column.null = True

        migrate(migrator.add_column('users', 'parent_user_id', models.User.parent_user_id))

        # for group in models.Group.select():
        #     group.save()
        migrate(migrator.drop_not_null('users', 'parent_user_id'))

    db.close_db(None)
from playhouse.migrate import migrate, PostgresqlMigrator
from app.models import DATABASE
from app.qa.models import Reply
migrator = PostgresqlMigrator(DATABASE)

Reply.content.default = ''

migrate(
    migrator.drop_column('TBL_REPLY', Reply.content.db_column),
    migrator.add_column('TBL_REPLY', Reply.content.db_column, Reply.content))
Пример #46
0
from collections import defaultdict
from redash.models import db, DataSourceGroup, DataSource, Group, Organization, User
from playhouse.migrate import PostgresqlMigrator, migrate
import peewee

if __name__ == '__main__':
    migrator = PostgresqlMigrator(db.database)

    with db.database.transaction():
        # Add type to groups
        migrate(
            migrator.add_column('groups', 'type', Group.type)
        )

        for name in ['default', 'admin']:
            group = Group.get(Group.name==name)
            group.type = Group.BUILTIN_GROUP
            group.save()

        # Create association table between data sources and groups
        DataSourceGroup.create_table()

        # add default to existing data source:
        default_org = Organization.get_by_id(1)
        default_group = Group.get(Group.name=="default")
        for ds in DataSource.all(default_org):
            DataSourceGroup.create(data_source=ds, group=default_group)

        # change the groups list on a user object to be an ids list
        migrate(
            migrator.rename_column('users', 'groups', 'old_groups'),