Beispiel #1
0
def migrate(store=avatar_store, config=config, dryRun=False):
    schema = makeSchema(store, dryRun)
    if schema is None:
        print "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~"
        print "Migrations not supported for", getConnectionClassName(store)
        return

    schema.ensureSchemaTable()

    # Make sure the real schema is what schemup_tables says it is
    print "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~"
    print "Checking schema integrity..."
    mismatches = validator.findSchemaMismatches(schema)
    # NTA TODO: Pretty print
    if mismatches:
        print "Real schema & 'schemup_tables' are out of sync (did you change the schema outside of schemup?):"
        for mismatch in mismatches:
            print mismatch, "\n"
        raise Exception("Schema mismatches")

    loadMigrations(store, config)

    print "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~"
    print "Upgrading..."
    sqls = commands.upgrade(schema, stormSchema)

    # Sanity checking
    if not dryRun:
        commands.validate(schema, stormSchema)

    # This is needed because several schemup operations touch the DB
    # through queries, but only "ensureSchemaTable" and "upgrade" end
    # the transaction (when they need to persist data (committing)).
    # TODO: The correct fix would be putting transaction start/end in
    # single functions (either this or schemup's (both of which
    # requires changing schemup)). Preferrably we want to separate
    # actualy querying and transaction start/end management
    store.rollback()

    if not sqls:
        print "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~"
        print "Schema up-to-date"
    elif dryRun:
        print "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~"
        for sql in sqls:
            print ""
            print sql
    else:
        print "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~"
        print "Migrated successfully"

    print "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~"
Beispiel #2
0
def setup():
    # Populate config
    app.config.from_envvar('GTOKEN_COMMON_SETTINGS')
    app.config.from_envvar('GTOKEN_LOCAL_SETTINGS')

    app.wsgi_app = ProxyFix(app.wsgi_app)


    if not app.debug:
        # Set up email alert for error
        from logging.handlers import SMTPHandler
        mail_handler = SMTPHandler('127.0.0.1',
                                   '*****@*****.**',
                                   app.config['ADMINS'], 'GToken server had an error')
        mail_handler.setLevel(logging.ERROR)
        app.logger.addHandler(mail_handler)

    # Log go to a file
    # Not working
    from logging.handlers import RotatingFileHandler
    file_handler = RotatingFileHandler(app.config['LOG_DIR'], maxBytes=10, backupCount=5)
    file_handler.setLevel(logging.DEBUG)
    app.logger.addHandler(file_handler)


    # Init Storm store
    database = create_database(app.config['DB'])
    store.__init__(database)

    pgConn = store._connection._raw_connection
    pgSchema = postgres.PostgresSchema(pgConn, dryRun=False)

    import os
    app.logger.debug(os.getcwd())

    commands.load('migrations')
    commands.upgrade(pgSchema, models.stormSchema)
    commands.validate(pgSchema, models.stormSchema)
Beispiel #3
0
def run(action='nothing'):
    dryRun = action != 'commit'

    dbConfig = config.POSTGRES
    dbConfigParams = {
        "database": dbConfig["database"],
        "host": dbConfig["host"],
        "user": dbConfig["username"],
        "password": dbConfig["password"],
        "port": dbConfig["port"]
    }

    pgConn = psycopg2.connect(**dbConfigParams)

    pgSchema = postgres.PostgresSchema(pgConn, dryRun=dryRun)

    dictSchema = DictSchema("schema/versions.json")

    pgSchema.ensureSchemaTable()

    # Ensure current DB's integrity
    schemaMismatches = findSchemaMismatches(pgSchema)
    if schemaMismatches:
        print "Real schema & 'schemup_tables' are out of sync"
        for mismatch in schemaMismatches:
            print mismatch, "\n"
        sys.exit(1)

    commands.load('schema/migrations')
    sqls = commands.upgrade(pgSchema, dictSchema)

    if dryRun and sqls:
        print "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~"
        for sql in sqls:
            print sql
        print "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~"
        sys.exit(1)

    commands.validate(pgSchema, dictSchema)
Beispiel #4
0
def run(action='nothing'):
    dryRun = action != 'commit'

    dbConfig = config.POSTGRES
    dbConfigParams = {
        "database": dbConfig["database"],
        "host": dbConfig["host"],
        "user": dbConfig["username"],
        "password": dbConfig["password"],
        "port": dbConfig["port"]
    }

    pgConn = psycopg2.connect(**dbConfigParams)

    pgSchema = postgres.PostgresSchema(pgConn, dryRun=dryRun)

    dictSchema = DictSchema("schema/versions.json")

    pgSchema.ensureSchemaTable()

    # Ensure current DB's integrity
    schemaMismatches = findSchemaMismatches(pgSchema)
    if schemaMismatches:
        print "Real schema & 'schemup_tables' are out of sync"
        for mismatch in schemaMismatches:
            print mismatch, "\n"
        sys.exit(1)

    commands.load('schema/migrations')
    sqls = commands.upgrade(pgSchema, dictSchema)

    if dryRun and sqls:
        print "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~"
        for sql in sqls: print sql
        print "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~"
        sys.exit(1)

    commands.validate(pgSchema, dictSchema)
Beispiel #5
0
        return sorted(self.versions.iteritems())

dbConfig = json.load(open("../config/db.json", "r"))

pgConn = psycopg2.connect(**dbConfig)

pgSchema = postgres.PostgresSchema(pgConn, dryRun=dryRun)

dictSchema = DictSchema("versions.json")

pgSchema.ensureSchemaTable()

# Ensure current DB's integrity
schemaMismatches = findSchemaMismatches(pgSchema)
if schemaMismatches:
    print "Real schema & 'schemup_tables' are out of sync"
    for mismatch in schemaMismatches:
        print mismatch, "\n"
    sys.exit(1)

commands.load('migrations')
sqls = commands.upgrade(pgSchema, dictSchema)

if dryRun and sqls:
    print "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~"
    for sql in sqls: print sql
    print "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~"
    sys.exit(1)

commands.validate(pgSchema, dictSchema)
Beispiel #6
0
dryRun = not (len(sys.argv) > 1 and sys.argv[1] == 'commit')


class DictSchema(object):
    def __init__(self, path):
        self.versions = json.load(open(path, "r"))

    def getExpectedTableVersions(self):
        return sorted(self.versions.iteritems())


dbConfig = json.load(open("../config/db.json", "r"))

pgConn = psycopg2.connect(**dbConfig)

pgSchema = postgres.PostgresSchema(pgConn, dryRun=dryRun)

dictSchema = DictSchema("versions.json")

commands.load('migrations')
sqls = commands.upgrade(pgSchema, dictSchema)

if dryRun and sqls:
    print "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~"
    for sql in sqls:
        print sql
    print "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~"
    sys.exit(1)

commands.validate(pgSchema, dictSchema)
Beispiel #7
0
@upgrader('quick', 'bgh_2', 'bgh_3')
def quick_bgh2to3(dbSchema):
    dbSchema.execute("ALTER TABLE quick ADD onemore INTEGER")


@stormSchema.versioned
class NewTable(object):
    __storm_table__ = "new_table"
    __version__ = "bgh_1"

@upgrader('new_table', None, 'bgh_1')
def new_table_create(dbSchema):
    dbSchema.execute("CREATE TABLE new_table ("
                     " id SERIAL NOT NULL PRIMARY KEY,"
                     " name VARCHAR)")


commands.upgrade(postgresSchema, stormSchema)

validationError = commands.validate(postgresSchema, stormSchema)
if validationError is not None:
    errorType, errors = validationError
    print("Validation failed (%s)" % errorType)
    for (tableName, actual, expected) in errors:
        print("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~")
        print("Table: %s" % tableName)
        print("- Actual: %s" % actual)
        print("- Expected: %s" % expected)
    raise SystemExit
Beispiel #8
0
@upgrader('quick', 'bgh_2', 'bgh_3')
def quick_bgh2to3(dbSchema):
    dbSchema.execute("ALTER TABLE quick ADD onemore INTEGER")


@stormSchema.versioned
class NewTable(object):
    __storm_table__ = "new_table"
    __version__ = "bgh_1"

@upgrader('new_table', None, 'bgh_1')
def new_table_create(dbSchema):
    dbSchema.execute("CREATE TABLE new_table ("
                     " id SERIAL NOT NULL PRIMARY KEY,"
                     " name VARCHAR)")


commands.upgrade(postgresSchema, stormSchema)

validationError = commands.validate(postgresSchema, stormSchema)
if validationError is not None:
    errorType, errors = validationError
    print "Validation failed (%s)" % errorType
    for (tableName, actual, expected) in errors:
        print "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~"
        print "Table: %s" % tableName
        print "- Actual: %s" % actual
        print "- Expected: %s" % expected
    raise SystemExit