def connect_db(init_schema=True): db_file = os.path.join(BASE_FOLDER, "sync.db") con = DAL('sqlite://' + db_file, pool_size=10, check_reserved=['all'], lazy_tables=lazy_tables, fake_migrate=fake_migrate, fake_migrate_all=fake_migrate_all, migrate=migrate) # fake_migrate_all=True con.executesql('PRAGMA journal_mode=WAL') if init_schema is True: init_db_schema(con) return con
class S3Migration(object): """ Database Migration Toolkit - used to help migrate both a production database on a server and also an offline client Normally run from a script in web2py context, but without models loaded: cd web2py python web2py.py -S eden -R <script.py> Where script looks like: m = local_import("s3migration") migrate = m.S3Migration() migrate.prep(foreigns=[], ondeletes=[], strbools=[], strints=[], uniques=[], ) #migrate.migrate() migrate.post(strbools=[], strints=[], ) FYI: If you need to access a filename in eden/databases/ then here is how: import hashlib (db_string, pool_size) = settings.get_database_string() prefix = hashlib.md5(db_string).hexdigest() filename = "%s_%s.table" % (prefix, tablename) """ def __init__(self): request= current.request # Load s3cfg name = "applications.%s.modules.s3cfg" % request.application s3cfg = __import__(name) for item in name.split(".")[1:]: # Remove the dot s3cfg = getattr(s3cfg, item) settings = s3cfg.S3Config() # Pass into template current.deployment_settings = settings # Read settings model = "%s/models/000_config.py" % request.folder code = getcfs(model, model, None) environment = build_environment(request, current.response, current.session) environment["settings"] = settings def template_path(): " Return the path of the Template config.py to load " path = os.path.join(request.folder, "private", "templates", settings.get_template(), "config.py") return path environment["template_path"] = template_path environment["os"] = os environment["Storage"] = Storage restricted(code, environment, layer=model) self.db_engine = settings.get_database_type() (db_string, pool_size) = settings.get_database_string() # Get a handle to the database self.db = DAL(db_string, #folder="%s/databases" % request.folder, auto_import=True, # @ToDo: Set to False until we migrate migrate_enabled=True, ) # ------------------------------------------------------------------------- def prep(self, foreigns=[], ondeletes=[], strbools=[], strints=[], uniques=[], ): """ Preparation before migration @param foreigns : List of tuples (tablename, fieldname) to have the foreign keys removed - if tablename == "all" then all tables are checked @param ondeletes : List of tuples (tablename, fieldname, reftable, ondelete) to have the ondelete modified to @param strbools : List of tuples (tablename, fieldname) to convert from string/integer to bools @param strints : List of tuples (tablename, fieldname) to convert from string to integer @param uniques : List of tuples (tablename, fieldname) to have the unique indices removed, """ # Backup current database self.backup() # Remove Foreign Key constraints which need to go in next code for tablename, fieldname in foreigns: self.remove_foreign(tablename, fieldname) # Remove Unique indices which need to go in next code for tablename, fieldname in uniques: self.remove_unique(tablename, fieldname) # Modify ondeletes for tablename, fieldname, reftable, ondelete in uniques: self.ondelete(tablename, fieldname, reftable, ondelete) # Remove fields which need to be altered in next code for tablename, fieldname in strbools: self.drop(tablename, fieldname) for tablename, fieldname in strints: self.drop(tablename, fieldname) self.db.commit() # ------------------------------------------------------------------------- def migrate(self): """ Perform the migration @ToDo """ # Update code: git pull # run_models_in(environment) # or # Set migrate=True in models/000_config.py # current.s3db.load_all_models() via applications/eden/static/scripts/tools/noop.py # Set migrate=False in models/000_config.py pass # ------------------------------------------------------------------------- def post(self, strbools=[], strints=[], ): """ Cleanup after migration @param strbools : List of tuples (tablename, fieldname) to convert from string/integer to bools @param strints : List of tuples (tablename, fieldname) to convert from string to integer """ db = self.db # @ToDo: Do prepops of new tables # Restore data from backup folder = "%s/databases/backup" % current.request.folder db_bak = DAL("sqlite://backup.db", folder=folder, auto_import=True, migrate=False) for tablename, fieldname in strints: newtable = db[tablename] newrows = db(newtable.id > 0).select(newtable.id) oldtable = db_bak[tablename] oldrows = db_bak(oldtable.id > 0).select(oldtable.id, oldtable[fieldname]) oldvals = oldrows.as_dict() for row in newrows: id = row.id val = oldvals[id][fieldname] if not val: continue try: vars = {fieldname : int(val)} except: current.log.warning("S3Migrate: Unable to convert %s to an integer - skipping" % val) else: db(newtable.id == id).update(**vars) for tablename, fieldname in strbools: to_bool = self.to_bool newtable = db[tablename] newrows = db(newtable.id > 0).select(newtable.id) oldtable = db_bak[tablename] oldrows = db_bak(oldtable.id > 0).select(oldtable.id, oldtable[fieldname]) oldvals = oldrows.as_dict() for row in newrows: id = row.id val = oldvals[id][fieldname] if not val: continue val = to_bool(val) if val: vars = {fieldname : val} db(newtable.id == id).update(**vars) db.commit() # ------------------------------------------------------------------------- @staticmethod def to_bool(value): """ Converts 'something' to boolean. Raises exception for invalid formats Possible True values: 1, True, "1", "TRue", "yes", "y", "t" Possible False values: 0, False, "0", "faLse", "no", "n", "f", 0.0 """ val = str(value).lower() if val in ("yes", "y", "true", "t", "1"): return True elif val in ("no", "n", "false", "f", "0", "0.0"): return False else: return None # ------------------------------------------------------------------------- def backup(self): """ Backup the database to a local SQLite database @ToDo: Option to use a temporary DB in Postgres/MySQL as this takes too long for a large DB """ import os db = self.db folder = "%s/databases/backup" % current.request.folder # Create clean folder for the backup if os.path.exists(folder): import shutil shutil.rmtree(folder) import time time.sleep(1) os.mkdir(folder) # Setup backup database db_bak = DAL("sqlite://backup.db", folder=folder) # Copy Table structure for tablename in db.tables: if tablename == "gis_location": table = db[tablename] fields = [table[field] for field in table.fields if field != "the_geom"] db_bak.define_table(tablename, *fields) else: db_bak.define_table(tablename, db[tablename]) # Copy Data import csv csv.field_size_limit(2**20 * 100) # 100 megs filename = "%s/data.csv" % folder file = open(filename, "w") db.export_to_csv_file(file) file.close() file = open(filename, "r") db_bak.import_from_csv_file(file, unique="uuid2") # designed to fail file.close() db_bak.commit() # Pass handle back to other functions self.db_bak = db_bak # ------------------------------------------------------------------------- def drop(self, tablename, fieldname): """ Drop a field from a table e.g. for when changing type """ db = self.db db_engine = self.db_engine # Modify the database if db_engine == "sqlite": # Not Supported: http://www.sqlite.org/lang_altertable.html # But also not required (for strints anyway) sql = "" elif db_engine == "mysql": # http://dev.mysql.com/doc/refman/5.1/en/alter-table.html sql = "ALTER TABLE %(tablename)s DROP COLUMN %(fieldname)s;" % \ dict(tablename=tablename, fieldname=fieldname) elif db_engine == "postgres": # http://www.postgresql.org/docs/9.3/static/sql-altertable.html sql = "ALTER TABLE %(tablename)s DROP COLUMN %(fieldname)s;" % \ dict(tablename=tablename, fieldname=fieldname) try: db.executesql(sql) except: import sys e = sys.exc_info()[1] print >> sys.stderr, e # Modify the .table file table = db[tablename] fields = [] for fn in table.fields: if fn == fieldname: continue fields.append(table[fn]) db.__delattr__(tablename) db.tables.remove(tablename) db.define_table(tablename, *fields, # Rebuild the .table file from this definition fake_migrate=True) # ------------------------------------------------------------------------- def ondelete(self, tablename, fieldname, reftable, ondelete): """ Modify the ondelete constraint for a foreign key """ db = self.db db_engine = self.db_engine executesql = db.executesql if tablename == "all": tables = db.tables else: tables = [tablename] for tablename in tables: if fieldname not in db[tablename].fields: continue # Modify the database if db_engine == "sqlite": # @ToDo: http://www.sqlite.org/lang_altertable.html raise NotImplementedError elif db_engine == "mysql": # @ToDo: http://dev.mysql.com/doc/refman/5.1/en/alter-table.html raise NotImplementedError elif db_engine == "postgres": # http://www.postgresql.org/docs/9.3/static/sql-altertable.html sql = "ALTER TABLE %(tablename)s DROP CONSTRAINT %(tablename)s_%(fieldname)s_fkey, ALTER TABLE %(tablename)s ADD CONSTRAINT %(tablename)s_%(fieldname)s_fkey FOREIGN KEY (%(fieldname)s) REFERENCES %(reftable)s ON DELETE %(ondelete)s;" % \ dict(tablename=tablename, fieldname=fieldname, reftable=reftable, ondelete=ondelete) try: executesql(sql) except: print "Error: Table %s with FK %s" % (tablename, fk) import sys e = sys.exc_info()[1] print >> sys.stderr, e # ------------------------------------------------------------------------- def remove_foreign(self, tablename, fieldname): """ Remove a Foreign Key constraint from a table """ db = self.db db_engine = self.db_engine executesql = db.executesql if tablename == "all": tables = db.tables else: tables = [tablename] for tablename in tables: if fieldname not in db[tablename].fields: continue # Modify the database if db_engine == "sqlite": # @ToDo: http://www.sqlite.org/lang_altertable.html raise NotImplementedError elif db_engine == "mysql": # http://dev.mysql.com/doc/refman/5.1/en/alter-table.html create = executesql("SHOW CREATE TABLE `%s`;" % tablename)[0][1] fk = create.split("` FOREIGN KEY (`%s" % fieldname)[0].split("CONSTRAINT `").pop() if "`" in fk: fk = fk.split("`")[0] sql = "ALTER TABLE `%(tablename)s` DROP FOREIGN KEY `%(fk)s`;" % \ dict(tablename=tablename, fk=fk) elif db_engine == "postgres": # http://www.postgresql.org/docs/9.3/static/sql-altertable.html sql = "ALTER TABLE %(tablename)s DROP CONSTRAINT %(tablename)s_%(fieldname)s_fkey;" % \ dict(tablename=tablename, fieldname=fieldname) try: executesql(sql) except: print "Error: Table %s with FK %s" % (tablename, fk) import sys e = sys.exc_info()[1] print >> sys.stderr, e # ------------------------------------------------------------------------- def remove_unique(self, tablename, fieldname): """ Remove a Unique Index from a table """ db = self.db db_engine = self.db_engine # Modify the database if db_engine == "sqlite": # @ToDo: http://www.sqlite.org/lang_altertable.html raise NotImplementedError elif db_engine == "mysql": # http://dev.mysql.com/doc/refman/5.1/en/alter-table.html sql = "ALTER TABLE `%(tablename)s` DROP INDEX `%(fieldname)s`;" % \ dict(tablename=tablename, fieldname=fieldname) elif db_engine == "postgres": # http://www.postgresql.org/docs/9.3/static/sql-altertable.html sql = "ALTER TABLE %(tablename)s DROP CONSTRAINT %(tablename)s_%(fieldname)s_key;" % \ dict(tablename=tablename, fieldname=fieldname) try: db.executesql(sql) except: import sys e = sys.exc_info()[1] print >> sys.stderr, e # Modify the .table file table = db[tablename] fields = [] for fn in table.fields: field = table[fn] if fn == fieldname: field.unique = False fields.append(field) db.__delattr__(tablename) db.tables.remove(tablename) db.define_table(tablename, *fields, # Rebuild the .table file from this definition fake_migrate=True) # ========================================================================= # OLD CODE below here # - There are tests for these in /tests/dbmigration # ------------------------------------------------------------------------- def rename_field(self, tablename, fieldname_old, fieldname_new, attributes_to_copy=None): """ Rename a field, while keeping the other properties of the field the same. If there are some indexes on that table, these will be recreated and other constraints will remain unchanged too. @param tablename : name of the table in which the field is renamed @param fieldname_old : name of the original field before renaming @param fieldname_new : name of the field after renaming @param attributes_to_copy : list of attributes which need to be copied from the old_field to the new_field (needed only in sqlite) """ db = self.db db_engine = self.db_engine if db_engine == "sqlite": self._add_renamed_fields(db, tablename, fieldname_old, fieldname_new, attributes_to_copy) self._copy_field(db, tablename, fieldname_old, fieldname_new) sql = "SELECT sql FROM sqlite_master WHERE type='index' AND tbl_name='%s' ORDER BY name;" % \ tablename list_index = db.executesql(sql) for element in list_index: search_str = "%s(%s)" % (tablename, fieldname_old) if element[0] is not None and search_str in element[0]: sql = "CREATE INDEX %s__idx on %s(%s);" % \ (fieldname_new, tablename, fieldname_new) try: db.executesql(sql) except: pass elif db_engine == "mysql": field = db[tablename][fieldname_old] sql_type = map_type_web2py_to_sql(field.type) sql = "ALTER TABLE %s CHANGE %s %s %s(%s)" % (tablename, fieldname_old, fieldname_new, sql_type, field.length) db.executesql(sql) elif db_engine == "postgres": sql = "ALTER TABLE %s RENAME COLUMN %s TO %s" % \ (tablename, fieldname_old, fieldname_new) db.executesql(sql) # ------------------------------------------------------------------------- def rename_table(self, tablename_old, tablename_new): """ Rename a table. If any fields reference that table, they will be handled too. @param tablename_old : name of the original table before renaming @param tablename_new : name of the table after renaming """ try: sql = "ALTER TABLE %s RENAME TO %s;" % (tablename_old, tablename_new) self.db.executesql(sql) except Exception, e: print e
class S3Migration(object): """ Database Migration Toolkit - used to help migrate both a production database on a server and also an offline client Normally run from a script in web2py context, but without models loaded: cd web2py python web2py.py -S eden -R <script.py> Where script looks like: m = local_import("s3migration") migrate = m.S3Migration() migrate.prep(foreigns=[], moves=[], news=[], ondeletes=[], strbools=[], strints=[], uniques=[], ) #migrate.migrate() migrate.post(moves=[], news=[], strbools=[], strints=[], ) FYI: If you need to access a filename in eden/databases/ then here is how: import hashlib (db_string, pool_size) = settings.get_database_string() prefix = hashlib.md5(db_string).hexdigest() filename = "%s_%s.table" % (prefix, tablename) FYI: To view all constraints on a table in MySQL: SHOW CREATE TABLE tablename; or select COLUMN_NAME, CONSTRAINT_NAME, REFERENCED_COLUMN_NAME, REFERENCED_TABLE_NAME from information_schema.KEY_COLUMN_USAGE where TABLE_NAME = 'module_resourcename'; @ToDo: Function to ensure that roles match those in prepop @ToDo: Function to do selective additional prepop """ def __init__(self): request = current.request # Load s3cfg => but why do this so complicated? #name = "applications.%s.modules.s3cfg" % request.application #s3cfg = __import__(name) #for item in name.split(".")[1:]: ## Remove the dot #s3cfg = getattr(s3cfg, item) #settings = s3cfg.S3Config() # Can use normal import here since executed in web2py environment: import s3cfg settings = s3cfg.S3Config() # Pass into template current.deployment_settings = settings # Read settings model = "%s/models/000_config.py" % request.folder code = getcfs(model, model, None) response = current.response # Needed as some Templates look at this & we don't wish to crash: response.s3 = Storage() # Global variables for 000_config.py environment = build_environment(request, response, current.session) environment["settings"] = settings # Some (older) 000_config.py also use "deployment_settings": environment["deployment_settings"] = settings # For backwards-compatibility with older 000_config.py: #def template_path(): # # When you see this warning, you should update 000_config.py # # See: http://eden.sahanafoundation.org/wiki/DeveloperGuidelines/Templates/Migration#Changesin000_config.py # print "template_path() is deprecated, please update 000_config.py" # # Return just any valid path to make sure the path-check succeeds, # # => modern S3Config will find the template itself # return request.folder #environment["template_path"] = template_path environment["os"] = os environment["Storage"] = Storage # Execute 000_config.py restricted(code, environment, layer=model) self.db_engine = settings.get_database_type() (db_string, pool_size) = settings.get_database_string() # Get a handle to the database self.db = DAL(db_string, #folder="%s/databases" % request.folder, auto_import=True, # @ToDo: Set to False until we migrate migrate_enabled=True, ) # ------------------------------------------------------------------------- def prep(self, foreigns=None, moves=None, news=None, ondeletes=None, strbools=None, strints=None, uniques=None, ): """ Preparation before migration @param foreigns : List of tuples (tablename, fieldname) to have the foreign keys removed - if tablename == "all" then all tables are checked @param moves : List of dicts {tablename: [(fieldname, new_tablename, link_fieldname)]} to move a field from 1 table to another - fieldname can be a tuple if the fieldname changes: (fieldname, new_fieldname) @param news : List of dicts {new_tablename: {'lookup_field': '', 'tables': [tablename: [fieldname]], 'supers': [tablename: [fieldname]], } to create new records from 1 or more old tables (inc all instances of an SE) - fieldname can be a tuple if the fieldname changes: (fieldname, new_fieldname) @param ondeletes : List of tuples [(tablename, fieldname, reftable, ondelete)] to have the ondelete modified to @param strbools : List of tuples [(tablename, fieldname)] to convert from string/integer to bools @param strints : List of tuples [(tablename, fieldname)] to convert from string to integer @param uniques : List of tuples [(tablename, fieldname)] to have the unique indices removed, """ # Backup current database self.moves = moves self.news = news self.strbools = strbools self.strints = strints self.backup() if foreigns: # Remove Foreign Key constraints which need to go in next code for tablename, fieldname in foreigns: self.remove_foreign(tablename, fieldname) if uniques: # Remove Unique indices which need to go in next code for tablename, fieldname in uniques: self.remove_unique(tablename, fieldname) if ondeletes: # Modify ondeletes for tablename, fieldname, reftable, ondelete in ondeletes: self.ondelete(tablename, fieldname, reftable, ondelete) # Remove fields which need to be altered in next code if strbools: for tablename, fieldname in strbools: self.drop(tablename, fieldname) if strints: for tablename, fieldname in strints: self.drop(tablename, fieldname) self.db.commit() # ------------------------------------------------------------------------- def backup(self): """ Backup the database to a local SQLite database @ToDo: Option to use a temporary DB in Postgres/MySQL as this takes too long for a large DB """ moves = self.moves news = self.news strints = self.strints strbools = self.strbools if not moves and not news and not strbools and not strints: # Nothing to backup return import os db = self.db folder = "%s/databases/backup" % current.request.folder # Create clean folder for the backup if os.path.exists(folder): import shutil shutil.rmtree(folder) import time time.sleep(1) os.mkdir(folder) # Setup backup database db_bak = DAL("sqlite://backup.db", folder=folder, adapter_args={"foreign_keys": False}) # Copy Table structure skip = [] for tablename in db.tables: if tablename == "gis_location": table = db[tablename] fields = [table[field] for field in table.fields if field != "the_geom"] try: db_bak.define_table(tablename, *fields) except KeyError: # Can't resolve reference yet # Cleanup del db_bak[tablename] # Try later skip.append(tablename) else: try: db_bak.define_table(tablename, db[tablename]) except KeyError: # Can't resolve reference yet # Cleanup del db_bak[tablename] # Try later skip.append(tablename) while skip: _skip = [] for tablename in skip: if tablename == "gis_location": table = db[tablename] fields = [table[field] for field in table.fields if field != "the_geom"] try: db_bak.define_table(tablename, *fields) except KeyError: # Can't resolve reference yet # Cleanup del db_bak[tablename] # Try later _skip.append(tablename) except: import sys print "Skipping %s: %s" % (tablename, sys.exc_info()[1]) else: try: db_bak.define_table(tablename, db[tablename]) except KeyError: # Can't resolve reference yet # Cleanup del db_bak[tablename] # Try later _skip.append(tablename) except: import sys print "Skipping %s: %s" % (tablename, sys.exc_info()[1]) skip = _skip # Which tables do we need to backup? tables = [] if moves: for tablename in moves: tables.append(tablename) if news: for tablename in news: new = news[tablename] for t in new["tables"]: tables.append(t) for s in new["supers"]: tables.append(s) stable = db[s] rows = db(stable._id > 0).select(stable.instance_type) instance_types = set([r.instance_type for r in rows]) for t in instance_types: tables.append(t) if strbools: for tablename, fieldname in strints: tables.append(tablename) if strints: for tablename, fieldname in strints: tables.append(tablename) # Remove duplicates tables = set(tables) # Copy Data import csv csv.field_size_limit(2**20 * 100) # 100 megs for tablename in tables: filename = "%s/%s.csv" % (folder, tablename) file = open(filename, "w") rows = db(db[tablename].id > 0).select() rows.export_to_csv_file(file) file.close() file = open(filename, "r") db_bak[tablename].import_from_csv_file(file, unique="uuid2") # uuid2 designed to not hit! file.close() db_bak.commit() # Pass handle back to other functions self.db_bak = db_bak # ------------------------------------------------------------------------- def migrate(self): """ Perform the migration @ToDo """ # Update code: git pull # run_models_in(environment) # or # Set migrate=True in models/000_config.py # current.s3db.load_all_models() via applications/eden/static/scripts/tools/noop.py # Set migrate=False in models/000_config.py pass # ------------------------------------------------------------------------- def post(self, moves=None, news=None, strbools=None, strints=None, ): """ Cleanup after migration @param moves : List of dicts {tablename: [(fieldname, new_tablename, link_fieldname)]} to move a field from 1 table to another - fieldname can be a tuple if the fieldname changes: (fieldname, new_fieldname) @param news : List of dicts {new_tablename: {'lookup_field': '', 'tables': [tablename: [fieldname]], 'supers': [tablename: [fieldname]], } to create new records from 1 or more old tables (inc all instances of an SE) - fieldname can be a tuple if the fieldname changes: (fieldname, new_fieldname) @param strbools : List of tuples [(tablename, fieldname)] to convert from string/integer to bools @param strints : List of tuples [(tablename, fieldname)] to convert from string to integer """ db = self.db # @ToDo: Do prepops of new tables # Restore data from backup folder = "%s/databases/backup" % current.request.folder db_bak = DAL("sqlite://backup.db", folder=folder, auto_import=True, migrate=False) if moves: for tablename in moves: table = db_bak[tablename] fieldname, new_tablename, link_fieldname = moves[tablename] if isinstance(fieldname, (tuple, list)): fieldname, new_fieldname = fieldname else: new_fieldname = fieldname old_field = table[fieldname] new_linkfield = db[new_tablename][link_fieldname] rows = db_bak(table._id > 0).select(old_field, link_fieldname) for row in rows: update_vars = {} update_vars[new_fieldname] = row[old_field] db(new_linkfield == row[link_fieldname]).update(**update_vars) if news: for tablename in news: # Read Data data = {} new = news[tablename] lookup_field = new["lookup_field"] _tables = new["tables"] for t in _tables: fields = _tables[t] # @ToDo: Support tuples #for f in fields: # if isinstance(f, (tuple, list)): table = db_bak[t] table_fields = [table[f] for f in fields] rows = db_bak(table.deleted == False).select(table[lookup_field], *table_fields) for row in rows: record_id = row[lookup_field] if record_id in data: _new = False _data = data[record_id] else: _new = True _data = {} for f in fields: if f in row: if row[f] not in ("", None): # JSON type doesn't like "" _data[f] = row[f] if _new: data[record_id] = _data for s in new["supers"]: fields = new["supers"][s] # @ToDo: Support tuples #for f in fields: # if isinstance(f, (tuple, list)): stable = db_bak[s] superkey = stable._id.name rows = db_bak(stable.deleted == False).select(stable._id, stable.instance_type) for row in rows: etable = db_bak[row["instance_type"]] _fields = [f for f in fields if f in etable.fields] table_fields = [etable[f] for f in _fields] record = db_bak(etable[superkey] == row[superkey]).select(etable[lookup_field], *table_fields ).first() if record: record_id = record[lookup_field] if record_id in data: _new = False _data = data[record_id] else: _new = True _data = {} for f in _fields: if f in record: if record[f] not in ("", None): # JSON type doesn't like "" _data[f] = record[f] if _new: data[record_id] = _data # Create Records table = db[tablename] for record_id in data: update_vars = data[record_id] if update_vars: update_vars[lookup_field] = record_id # Can't rely on the defaults as auto_import doesn't see DAL defaults update_vars["created_on"] = datetime.datetime.utcnow() update_vars["deleted"] = False update_vars["mci"] = 0 update_vars["modified_on"] = datetime.datetime.utcnow() update_vars["uuid"] = uuid4().urn # Would always be identical otherwise table.insert(**update_vars) if strints: for tablename, fieldname in strints: newtable = db[tablename] newrows = db(newtable.id > 0).select(newtable.id) oldtable = db_bak[tablename] oldrows = db_bak(oldtable.id > 0).select(oldtable.id, oldtable[fieldname]) oldvals = oldrows.as_dict() for row in newrows: _id = row.id val = oldvals[_id][fieldname] if not val: continue try: update_vars = {fieldname : int(val)} except: current.log.warning("S3Migrate: Unable to convert %s to an integer - skipping" % val) else: db(newtable.id == _id).update(**update_vars) if strbools: for tablename, fieldname in strbools: to_bool = self.to_bool newtable = db[tablename] newrows = db(newtable.id > 0).select(newtable.id) oldtable = db_bak[tablename] oldrows = db_bak(oldtable.id > 0).select(oldtable.id, oldtable[fieldname]) oldvals = oldrows.as_dict() for row in newrows: _id = row.id val = oldvals[_id][fieldname] if not val: continue val = to_bool(val) if val: update_vars = {fieldname : val} db(newtable.id == _id).update(**update_vars) db.commit() # ------------------------------------------------------------------------- @staticmethod def to_bool(value): """ Converts 'something' to boolean. Raises exception for invalid formats Possible True values: 1, True, "1", "TRue", "yes", "y", "t" Possible False values: 0, False, "0", "faLse", "no", "n", "f", 0.0 """ val = str(value).lower() if val in ("yes", "y", "true", "t", "1"): return True elif val in ("no", "n", "false", "f", "0", "0.0"): return False else: return None # ------------------------------------------------------------------------- def drop(self, tablename, fieldname): """ Drop a field from a table e.g. for when changing type """ db = self.db db_engine = self.db_engine # Modify the database if db_engine == "sqlite": # Not Supported: http://www.sqlite.org/lang_altertable.html # But also not required (for strints anyway) sql = "" elif db_engine == "mysql": # http://dev.mysql.com/doc/refman/5.1/en/alter-table.html sql = "ALTER TABLE %(tablename)s DROP COLUMN %(fieldname)s;" % \ dict(tablename=tablename, fieldname=fieldname) elif db_engine == "postgres": # http://www.postgresql.org/docs/9.3/static/sql-altertable.html sql = "ALTER TABLE %(tablename)s DROP COLUMN %(fieldname)s;" % \ dict(tablename=tablename, fieldname=fieldname) try: db.executesql(sql) except: import sys e = sys.exc_info()[1] print >> sys.stderr, e # Modify the .table file table = db[tablename] fields = [] for fn in table.fields: if fn == fieldname: continue fields.append(table[fn]) db.__delattr__(tablename) db.tables.remove(tablename) db.define_table(tablename, *fields, # Rebuild the .table file from this definition fake_migrate=True) # ------------------------------------------------------------------------- def ondelete(self, tablename, fieldname, reftable, ondelete): """ Modify the ondelete constraint for a foreign key """ db = self.db db_engine = self.db_engine executesql = db.executesql if tablename == "all": tables = db.tables else: tables = [tablename] for tablename in tables: if fieldname not in db[tablename].fields: continue # Modify the database if db_engine == "sqlite": # @ToDo: http://www.sqlite.org/lang_altertable.html raise NotImplementedError elif db_engine == "mysql": # http://dev.mysql.com/doc/refman/5.1/en/alter-table.html create = executesql("SHOW CREATE TABLE `%s`;" % tablename)[0][1] fk = create.split("` FOREIGN KEY (`%s" % fieldname)[0].split("CONSTRAINT `").pop() if "`" in fk: fk = fk.split("`")[0] sql = "ALTER TABLE `%(tablename)s` DROP FOREIGN KEY `%(fk)s`, ALTER TABLE %(tablename)s ADD CONSTRAINT %(fk)s FOREIGN KEY (%(fieldname)s) REFERENCES %(reftable)s(id) ON DELETE %(ondelete)s;" % \ dict(tablename=tablename, fk=fk, fieldname=fieldname, reftable=reftable, ondelete=ondelete) elif db_engine == "postgres": # http://www.postgresql.org/docs/9.3/static/sql-altertable.html sql = "ALTER TABLE %(tablename)s DROP CONSTRAINT %(tablename)s_%(fieldname)s_fkey, ALTER TABLE %(tablename)s ADD CONSTRAINT %(tablename)s_%(fieldname)s_fkey FOREIGN KEY (%(fieldname)s) REFERENCES %(reftable)s ON DELETE %(ondelete)s;" % \ dict(tablename=tablename, fieldname=fieldname, reftable=reftable, ondelete=ondelete) try: executesql(sql) except: print "Error: Table %s with FK %s" % (tablename, fk) import sys e = sys.exc_info()[1] print >> sys.stderr, e # ------------------------------------------------------------------------- def remove_foreign(self, tablename, fieldname): """ Remove a Foreign Key constraint from a table """ db = self.db db_engine = self.db_engine executesql = db.executesql if tablename == "all": tables = db.tables else: tables = [tablename] for tablename in tables: if fieldname not in db[tablename].fields: continue # Modify the database if db_engine == "sqlite": # @ToDo: http://www.sqlite.org/lang_altertable.html raise NotImplementedError elif db_engine == "mysql": # http://dev.mysql.com/doc/refman/5.1/en/alter-table.html create = executesql("SHOW CREATE TABLE `%s`;" % tablename)[0][1] fk = create.split("` FOREIGN KEY (`%s" % fieldname)[0].split("CONSTRAINT `").pop() if "`" in fk: fk = fk.split("`")[0] sql = "ALTER TABLE `%(tablename)s` DROP FOREIGN KEY `%(fk)s`;" % \ dict(tablename=tablename, fk=fk) elif db_engine == "postgres": # http://www.postgresql.org/docs/9.3/static/sql-altertable.html sql = "ALTER TABLE %(tablename)s DROP CONSTRAINT %(tablename)s_%(fieldname)s_fkey;" % \ dict(tablename=tablename, fieldname=fieldname) try: executesql(sql) except: print "Error: Table %s with FK %s" % (tablename, fk) import sys e = sys.exc_info()[1] print >> sys.stderr, e # ------------------------------------------------------------------------- def remove_unique(self, tablename, fieldname): """ Remove a Unique Index from a table """ db = self.db db_engine = self.db_engine # Modify the database if db_engine == "sqlite": # @ToDo: http://www.sqlite.org/lang_altertable.html raise NotImplementedError elif db_engine == "mysql": # http://dev.mysql.com/doc/refman/5.1/en/alter-table.html sql = "ALTER TABLE `%(tablename)s` DROP INDEX `%(fieldname)s`;" % \ dict(tablename=tablename, fieldname=fieldname) elif db_engine == "postgres": # http://www.postgresql.org/docs/9.3/static/sql-altertable.html sql = "ALTER TABLE %(tablename)s DROP CONSTRAINT %(tablename)s_%(fieldname)s_key;" % \ dict(tablename=tablename, fieldname=fieldname) try: db.executesql(sql) except: import sys e = sys.exc_info()[1] print >> sys.stderr, e # Modify the .table file table = db[tablename] fields = [] for fn in table.fields: field = table[fn] if fn == fieldname: field.unique = False fields.append(field) db.__delattr__(tablename) db.tables.remove(tablename) db.define_table(tablename, *fields, # Rebuild the .table file from this definition fake_migrate=True) # ========================================================================= # OLD CODE below here # - There are tests for these in /tests/dbmigration # ------------------------------------------------------------------------- def rename_field(self, tablename, fieldname_old, fieldname_new, attributes_to_copy=None): """ Rename a field, while keeping the other properties of the field the same. If there are some indexes on that table, these will be recreated and other constraints will remain unchanged too. @param tablename : name of the table in which the field is renamed @param fieldname_old : name of the original field before renaming @param fieldname_new : name of the field after renaming @param attributes_to_copy : list of attributes which need to be copied from the old_field to the new_field (needed only in sqlite) """ db = self.db db_engine = self.db_engine if db_engine == "sqlite": self._add_renamed_fields(db, tablename, fieldname_old, fieldname_new, attributes_to_copy) self._copy_field(db, tablename, fieldname_old, fieldname_new) sql = "SELECT sql FROM sqlite_master WHERE type='index' AND tbl_name='%s' ORDER BY name;" % \ tablename list_index = db.executesql(sql) for element in list_index: search_str = "%s(%s)" % (tablename, fieldname_old) if element[0] is not None and search_str in element[0]: sql = "CREATE INDEX %s__idx on %s(%s);" % \ (fieldname_new, tablename, fieldname_new) try: db.executesql(sql) except: pass elif db_engine == "mysql": field = db[tablename][fieldname_old] sql_type = map_type_web2py_to_sql(field.type) sql = "ALTER TABLE %s CHANGE %s %s %s(%s)" % (tablename, fieldname_old, fieldname_new, sql_type, field.length) db.executesql(sql) elif db_engine == "postgres": sql = "ALTER TABLE %s RENAME COLUMN %s TO %s" % \ (tablename, fieldname_old, fieldname_new) db.executesql(sql) # ------------------------------------------------------------------------- def rename_table(self, tablename_old, tablename_new): """ Rename a table. If any fields reference that table, they will be handled too. @param tablename_old : name of the original table before renaming @param tablename_new : name of the table after renaming """ try: sql = "ALTER TABLE %s RENAME TO %s;" % (tablename_old, tablename_new) self.db.executesql(sql) except Exception, e: print e
db.define_table('MoneyType',Field('CreationTime'),Field('IsDelete'),Field('Name')) db.define_table('ProjectCode',Field('CreationTime'),Field('EmployeeId'),Field('IsDelete'),Field('Option1'),Field('Option2'),Field('Option3'),Field('ProjectNumber'),Field('ProjectTypeId'),Field('ProtocolId')) db.define_table('ProjectResource',Field('CreationTime'),Field('IsDelete'),Field('Name')) db.define_table('Project',Field('Assistant'),Field('BuyerId'),Field('ChargeRate'),Field('CreationDate'),Field('EmployeeId'),Field('EntrustMoney'),Field('IsDelete'),Field('MakeOutDate'),Field('ManagementStyleId'),Field('Note'),Field('Package'),Field('ProjectCodeId'),Field('ProjectName'),Field('ProjectSourceId'),Field('ProjectTypeId'),Field('ProtocolCodeId'),Field('SigningDate'),Field('SourcesOfFundingId'),Field('StateId'),Field('WinningCompany'),Field('WinningMoney')) db.define_table('ProjectStatus',Field('CreationTime'),Field('IsDelete'),Field('Name')) db.define_table('ProtocolCode',Field('CreationTime'),Field('EmployeeId'),Field('IsDelete'),Field('ProtocolNumber'),Field('TypeId')) db.define_table('ProtocolCodeType',Field('TypeCode'),Field('TypeId'),Field('TypeName')) db.define_table('Suggest',Field('Content'),Field('CreationTime'),Field('IsDelete'),Field('UserId')) db.define_table('Task',Field('CreationDate'),Field('Deadline'),Field('EmployeeId'),Field('IsDelete'),Field('Note'),Field('PlaceId'),Field('ProjectId'),Field('StateId'),Field('TitleId')) db.define_table('TaskLocation',Field('CreationTime'),Field('IsDelete'),Field('Name')) db.define_table('TaskStatus',Field('CreationTime'),Field('IsDelete'),Field('Name')) db.define_table('TypeOfTask',Field('CreationDate'),Field('IsDelete'),Field('Name'),Field('OrderId')) db.define_table('ProjectPackage',Field('ChargeRate'),Field('CreationDate'),Field('EntrustMoney'),Field('IsDelete'),Field('MakeOutDate'),Field('Note'),Field('PackageName'),Field('PackageNumber'),Field('ProjectId'),Field('SigningDate'),Field('StateId'),Field('WinningCompany'),Field('WinningMoney')) print db.executesql(u'select * from [zhaobiao].[dbo].[Zbgg]', as_dict=True) def update(): table_name = 'ProtocolCode' row = {'TypeId': 111121, 'EmployeeId': 700, 'CreationTime': '01/01/16 20:08', 'ProtocolNumber': '11111111111111', 'IsDelete': True, 'id': 6L} if table_name == 'ProtocolCode': id = '6' db(db['ProtocolCode']._id == id).update(**{'TypeId':row['TypeId']}) db(db['ProtocolCode']._id == id).update(**{'EmployeeId':row['EmployeeId']}) db(db['ProtocolCode']._id == id).update(**{'CreationTime':row['CreationTime']}) db(db['ProtocolCode']._id == id).update(**{'ProtocolNumber':row['ProtocolNumber']}) db(db['ProtocolCode']._id == id).update(**{'IsDelete':row['IsDelete']}) row = db(db['ProtocolCode']._id ==id).select().first() print row db.commit()
class S3Migration(object): """ Database Migration Toolkit - used to help migrate both a production database on a server and also an offline client Normally run from a script in web2py context, but without models loaded: cd web2py python web2py.py -S eden -R <script.py> Where script looks like: m = local_import("s3migration") migrate = m.S3Migration() #migrate.pull() migrate.prep(foreigns=[], moves=[], news=[], ondeletes=[], strbools=[], strints=[], uniques=[], ) migrate.migrate() migrate.compile() migrate.post(moves=[], news=[], strbools=[], strints=[], ) FYI: If you need to access a filename in eden/databases/ then here is how: import hashlib (db_string, pool_size) = settings.get_database_string() prefix = hashlib.md5(db_string).hexdigest() filename = "%s_%s.table" % (prefix, tablename) FYI: To view all constraints on a table in MySQL: SHOW CREATE TABLE tablename; or select COLUMN_NAME, CONSTRAINT_NAME, REFERENCED_COLUMN_NAME, REFERENCED_TABLE_NAME from information_schema.KEY_COLUMN_USAGE where TABLE_NAME = 'module_resourcename'; @ToDo: Function to ensure that roles match those in prepop @ToDo: Function to do selective additional prepop """ def __init__(self): # Load s3cfg import s3cfg settings = s3cfg.S3Config() # Pass into template current.deployment_settings = settings # Read settings request = current.request model = "%s/models/000_config.py" % request.folder code = getcfs(model, model, None) response = current.response # Needed as some Templates look at this & we don't wish to crash: response.s3 = Storage() # Global variables for 000_config.py environment = build_environment(request, response, current.session) environment["settings"] = settings # Some (older) 000_config.py also use "deployment_settings": environment["deployment_settings"] = settings # For backwards-compatibility with older 000_config.py: #def template_path(): # # When you see this warning, you should update 000_config.py # # See: http://eden.sahanafoundation.org/wiki/DeveloperGuidelines/Templates/Migration#Changesin000_config.py # print "template_path() is deprecated, please update 000_config.py" # # Return just any valid path to make sure the path-check succeeds, # # => modern S3Config will find the template itself # return request.folder #environment["template_path"] = template_path environment["os"] = os environment["Storage"] = Storage # Execute 000_config.py restricted(code, environment, layer=model) self.environment = environment self.db_engine = settings.get_database_type() (db_string, pool_size) = settings.get_database_string() # Get a handle to the database self.db = DAL(db_string, #folder="%s/databases" % request.folder, auto_import=True, # @ToDo: Set to False until we migrate migrate_enabled=True, ) # ------------------------------------------------------------------------- def prep(self, foreigns=None, moves=None, news=None, ondeletes=None, strbools=None, strints=None, uniques=None, ): """ Preparation before migration @param foreigns : List of tuples (tablename, fieldname) to have the foreign keys removed - if tablename == "all" then all tables are checked @param moves : List of dicts {tablename: [(fieldname, new_tablename, link_fieldname)]} to move a field from 1 table to another - fieldname can be a tuple if the fieldname changes: (fieldname, new_fieldname) @param news : List of dicts {new_tablename: {'lookup_field': '', 'tables': [tablename: [fieldname]], 'supers': [tablename: [fieldname]], } to create new records from 1 or more old tables (inc all instances of an SE) - fieldname can be a tuple if the fieldname changes: (fieldname, new_fieldname) @param ondeletes : List of tuples [(tablename, fieldname, reftable, ondelete)] to have the ondelete modified to @param strbools : List of tuples [(tablename, fieldname)] to convert from string/integer to bools @param strints : List of tuples [(tablename, fieldname)] to convert from string to integer @param uniques : List of tuples [(tablename, fieldname)] to have the unique indices removed, """ # Backup current database self.moves = moves self.news = news self.strbools = strbools self.strints = strints self.backup() if foreigns: # Remove Foreign Key constraints which need to go in next code for tablename, fieldname in foreigns: self.remove_foreign(tablename, fieldname) if uniques: # Remove Unique indices which need to go in next code for tablename, fieldname in uniques: self.remove_unique(tablename, fieldname) if ondeletes: # Modify ondeletes for tablename, fieldname, reftable, ondelete in ondeletes: self.ondelete(tablename, fieldname, reftable, ondelete) # Remove fields which need to be altered in next code if strbools: for tablename, fieldname in strbools: self.drop(tablename, fieldname) if strints: for tablename, fieldname in strints: self.drop(tablename, fieldname) self.db.commit() # ------------------------------------------------------------------------- def backup(self): """ Backup the database to a local SQLite database @ToDo: Option to use a temporary DB in Postgres/MySQL as this takes too long for a large DB """ moves = self.moves news = self.news strints = self.strints strbools = self.strbools if not moves and not news and not strbools and not strints: # Nothing to backup return import os db = self.db folder = "%s/databases/backup" % current.request.folder # Create clean folder for the backup if os.path.exists(folder): shutil.rmtree(folder) import time time.sleep(1) os.mkdir(folder) # Setup backup database db_bak = DAL("sqlite://backup.db", folder=folder, adapter_args={"foreign_keys": False}) # Copy Table structure skip = [] for tablename in db.tables: if tablename == "gis_location": table = db[tablename] fields = [table[field] for field in table.fields if field != "the_geom"] try: db_bak.define_table(tablename, *fields) except KeyError: # Can't resolve reference yet # Cleanup del db_bak[tablename] # Try later skip.append(tablename) else: try: db_bak.define_table(tablename, db[tablename]) except KeyError: # Can't resolve reference yet # Cleanup del db_bak[tablename] # Try later skip.append(tablename) while skip: _skip = [] for tablename in skip: if tablename == "gis_location": table = db[tablename] fields = [table[field] for field in table.fields if field != "the_geom"] try: db_bak.define_table(tablename, *fields) except KeyError: # Can't resolve reference yet # Cleanup del db_bak[tablename] # Try later _skip.append(tablename) except: import sys print "Skipping %s: %s" % (tablename, sys.exc_info()[1]) else: try: db_bak.define_table(tablename, db[tablename]) except KeyError: # Can't resolve reference yet # Cleanup del db_bak[tablename] # Try later _skip.append(tablename) except: import sys print "Skipping %s: %s" % (tablename, sys.exc_info()[1]) skip = _skip # Which tables do we need to backup? tables = [] if moves: for tablename in moves: tables.append(tablename) if news: for tablename in news: new = news[tablename] for t in new["tables"]: tables.append(t) for s in new["supers"]: tables.append(s) stable = db[s] rows = db(stable._id > 0).select(stable.instance_type) instance_types = set([r.instance_type for r in rows]) for t in instance_types: tables.append(t) if strbools: for tablename, fieldname in strints: tables.append(tablename) if strints: for tablename, fieldname in strints: tables.append(tablename) # Remove duplicates tables = set(tables) # Copy Data import csv csv.field_size_limit(2**20 * 100) # 100 megs for tablename in tables: filename = "%s/%s.csv" % (folder, tablename) file = open(filename, "w") rows = db(db[tablename].id > 0).select() rows.export_to_csv_file(file) file.close() file = open(filename, "r") db_bak[tablename].import_from_csv_file(file, unique="uuid2") # uuid2 designed to not hit! file.close() db_bak.commit() # Pass handle back to other functions self.db_bak = db_bak # ------------------------------------------------------------------------- def pull(self, version=None): """ Update the Eden code """ #if GITPYTHON: #else: #import s3log #s3log.S3Log.setup() #current.log.warning("GitPython not installed, will need to call out to Git via CLI") # Copy the current working directory to revert back to later cwd = os.getcwd() # Change to the Eden folder folder = current.request.folder os.chdir(os.path.join(cwd, folder)) # Remove old compiled code remove_compiled_application(folder) # Reset to remove any hotfixes subprocess.call(["git", "reset", "--hard", "HEAD"]) # Store the current version old_version = subprocess.check_output(["git", "describe", "--always", "HEAD"]) self.old_version = old_version.strip() # Pull subprocess.call(["git", "pull"]) if version: # Checkout this version subprocess.call(["git", "checkout", version]) # Change back os.chdir(cwd) # ------------------------------------------------------------------------- def find_script(self): """ Find the upgrade script(s) to run """ old_version = self.old_version if not old_version: # Nothing we can do return # Find the current version new_version = subprocess.check_output(["git", "describe", "--always", "HEAD"]) new_version = new_version.strip() # Look for a script to the current version path = os.path.join(request.folder, "static", "scripts", "upgrade") # ------------------------------------------------------------------------- def run_model(self): """ Execute all the models/ """ if not hasattr(current, "db"): run_models_in(self.environment) # ------------------------------------------------------------------------- def compile(self): """ Compile the Eden code """ # Load the base Model self.run_model() from gluon.fileutils import up request = current.request os_path = os.path join = os_path.join # Pass View Templates to Compiler settings = current.deployment_settings s3 = current.response.s3 s3.views = views = {} s3.theme = theme = settings.get_theme() if theme != "default": folder = request.folder location = settings.get_template_location() exists = os_path.exists for view in ["create.html", #"delete.html", "display.html", "iframe.html", "list.html", "list_filter.html", "map.html", #"merge.html", "plain.html", "popup.html", "profile.html", "report.html", #"review.html", "summary.html", #"timeplot.html", "update.html", ]: if exists(join(folder, location, "templates", theme, "views", "_%s" % view)): views[view] = "../%s/templates/%s/views/_%s" % (location, theme, view) def apath(path="", r=None): """ Builds a path inside an application folder Parameters ---------- path: path within the application folder r: the global request object """ opath = up(r.folder) while path[:3] == "../": (opath, path) = (up(opath), path[3:]) return join(opath, path).replace("\\", "/") folder = apath(request.application, request) compile_application(folder) # ------------------------------------------------------------------------- def migrate(self): """ Perform an automatic database migration """ # Load the base model self.run_model() # Load all conditional models current.s3db.load_all_models() # ------------------------------------------------------------------------- def post(self, moves=None, news=None, strbools=None, strints=None, ): """ Cleanup after migration @param moves : List of dicts {tablename: [(fieldname, new_tablename, link_fieldname)]} to move a field from 1 table to another - fieldname can be a tuple if the fieldname changes: (fieldname, new_fieldname) @param news : List of dicts {new_tablename: {'lookup_field': '', 'tables': [tablename: [fieldname]], 'supers': [tablename: [fieldname]], } to create new records from 1 or more old tables (inc all instances of an SE) - fieldname can be a tuple if the fieldname changes: (fieldname, new_fieldname) @param strbools : List of tuples [(tablename, fieldname)] to convert from string/integer to bools @param strints : List of tuples [(tablename, fieldname)] to convert from string to integer """ db = self.db # @ToDo: Do prepops of new tables # Restore data from backup folder = "%s/databases/backup" % current.request.folder db_bak = DAL("sqlite://backup.db", folder=folder, auto_import=True, migrate=False) if moves: for tablename in moves: table = db_bak[tablename] fieldname, new_tablename, link_fieldname = moves[tablename] if isinstance(fieldname, (tuple, list)): fieldname, new_fieldname = fieldname else: new_fieldname = fieldname old_field = table[fieldname] new_linkfield = db[new_tablename][link_fieldname] rows = db_bak(table._id > 0).select(old_field, link_fieldname) for row in rows: update_vars = {} update_vars[new_fieldname] = row[old_field] db(new_linkfield == row[link_fieldname]).update(**update_vars) if news: for tablename in news: # Read Data data = {} new = news[tablename] lookup_field = new["lookup_field"] _tables = new["tables"] for t in _tables: fields = _tables[t] # @ToDo: Support tuples #for f in fields: # if isinstance(f, (tuple, list)): table = db_bak[t] table_fields = [table[f] for f in fields] rows = db_bak(table.deleted == False).select(table[lookup_field], *table_fields) for row in rows: record_id = row[lookup_field] if record_id in data: _new = False _data = data[record_id] else: _new = True _data = {} for f in fields: if f in row: if row[f] not in ("", None): # JSON type doesn't like "" _data[f] = row[f] if _new: data[record_id] = _data for s in new["supers"]: fields = new["supers"][s] # @ToDo: Support tuples #for f in fields: # if isinstance(f, (tuple, list)): stable = db_bak[s] superkey = stable._id.name rows = db_bak(stable.deleted == False).select(stable._id, stable.instance_type) for row in rows: etable = db_bak[row["instance_type"]] _fields = [f for f in fields if f in etable.fields] table_fields = [etable[f] for f in _fields] record = db_bak(etable[superkey] == row[superkey]).select(etable[lookup_field], *table_fields ).first() if record: record_id = record[lookup_field] if record_id in data: _new = False _data = data[record_id] else: _new = True _data = {} for f in _fields: if f in record: if record[f] not in ("", None): # JSON type doesn't like "" _data[f] = record[f] if _new: data[record_id] = _data # Create Records table = db[tablename] for record_id in data: update_vars = data[record_id] if update_vars: update_vars[lookup_field] = record_id # Can't rely on the defaults as auto_import doesn't see DAL defaults update_vars["created_on"] = datetime.datetime.utcnow() update_vars["deleted"] = False update_vars["mci"] = 0 update_vars["modified_on"] = datetime.datetime.utcnow() update_vars["uuid"] = uuid4().urn # Would always be identical otherwise table.insert(**update_vars) if strints: for tablename, fieldname in strints: newtable = db[tablename] newrows = db(newtable.id > 0).select(newtable.id) oldtable = db_bak[tablename] oldrows = db_bak(oldtable.id > 0).select(oldtable.id, oldtable[fieldname]) oldvals = oldrows.as_dict() for row in newrows: _id = row.id val = oldvals[_id][fieldname] if not val: continue try: update_vars = {fieldname : int(val)} except: current.log.warning("S3Migrate: Unable to convert %s to an integer - skipping" % val) else: db(newtable.id == _id).update(**update_vars) if strbools: for tablename, fieldname in strbools: to_bool = self.to_bool newtable = db[tablename] newrows = db(newtable.id > 0).select(newtable.id) oldtable = db_bak[tablename] oldrows = db_bak(oldtable.id > 0).select(oldtable.id, oldtable[fieldname]) oldvals = oldrows.as_dict() for row in newrows: _id = row.id val = oldvals[_id][fieldname] if not val: continue val = to_bool(val) if val: update_vars = {fieldname : val} db(newtable.id == _id).update(**update_vars) db.commit() # ------------------------------------------------------------------------- @staticmethod def to_bool(value): """ Converts 'something' to boolean. Raises exception for invalid formats Possible True values: 1, True, "1", "TRue", "yes", "y", "t" Possible False values: 0, False, "0", "faLse", "no", "n", "f", 0.0 """ val = str(value).lower() if val in ("yes", "y", "true", "t", "1"): return True elif val in ("no", "n", "false", "f", "0", "0.0"): return False else: return None # ------------------------------------------------------------------------- def drop(self, tablename, fieldname): """ Drop a field from a table e.g. for when changing type """ db = self.db db_engine = self.db_engine # Modify the database if db_engine == "sqlite": # Not Supported: http://www.sqlite.org/lang_altertable.html # But also not required (for strints anyway) sql = "" elif db_engine == "mysql": # http://dev.mysql.com/doc/refman/5.1/en/alter-table.html sql = "ALTER TABLE %(tablename)s DROP COLUMN %(fieldname)s;" % \ dict(tablename=tablename, fieldname=fieldname) elif db_engine == "postgres": # http://www.postgresql.org/docs/9.3/static/sql-altertable.html sql = "ALTER TABLE %(tablename)s DROP COLUMN %(fieldname)s;" % \ dict(tablename=tablename, fieldname=fieldname) try: db.executesql(sql) except: import sys e = sys.exc_info()[1] print >> sys.stderr, e # Modify the .table file table = db[tablename] fields = [] for fn in table.fields: if fn == fieldname: continue fields.append(table[fn]) db.__delattr__(tablename) db.tables.remove(tablename) db.define_table(tablename, *fields, # Rebuild the .table file from this definition fake_migrate=True) # ------------------------------------------------------------------------- def ondelete(self, tablename, fieldname, reftable, ondelete): """ Modify the ondelete constraint for a foreign key """ db = self.db db_engine = self.db_engine executesql = db.executesql if tablename == "all": tables = db.tables else: tables = [tablename] for tablename in tables: if fieldname not in db[tablename].fields: continue # Modify the database if db_engine == "sqlite": # @ToDo: http://www.sqlite.org/lang_altertable.html raise NotImplementedError elif db_engine == "mysql": # http://dev.mysql.com/doc/refman/5.1/en/alter-table.html create = executesql("SHOW CREATE TABLE `%s`;" % tablename)[0][1] fk = create.split("` FOREIGN KEY (`%s" % fieldname)[0].split("CONSTRAINT `").pop() if "`" in fk: fk = fk.split("`")[0] sql = "ALTER TABLE `%(tablename)s` DROP FOREIGN KEY `%(fk)s`, ALTER TABLE %(tablename)s ADD CONSTRAINT %(fk)s FOREIGN KEY (%(fieldname)s) REFERENCES %(reftable)s(id) ON DELETE %(ondelete)s;" % \ dict(tablename=tablename, fk=fk, fieldname=fieldname, reftable=reftable, ondelete=ondelete) elif db_engine == "postgres": # http://www.postgresql.org/docs/9.3/static/sql-altertable.html sql = "ALTER TABLE %(tablename)s DROP CONSTRAINT %(tablename)s_%(fieldname)s_fkey, ALTER TABLE %(tablename)s ADD CONSTRAINT %(tablename)s_%(fieldname)s_fkey FOREIGN KEY (%(fieldname)s) REFERENCES %(reftable)s ON DELETE %(ondelete)s;" % \ dict(tablename=tablename, fieldname=fieldname, reftable=reftable, ondelete=ondelete) try: executesql(sql) except: print "Error: Table %s with FK %s" % (tablename, fk) import sys e = sys.exc_info()[1] print >> sys.stderr, e # ------------------------------------------------------------------------- def remove_foreign(self, tablename, fieldname): """ Remove a Foreign Key constraint from a table """ db = self.db db_engine = self.db_engine executesql = db.executesql if tablename == "all": tables = db.tables else: tables = [tablename] for tablename in tables: if fieldname not in db[tablename].fields: continue # Modify the database if db_engine == "sqlite": # @ToDo: http://www.sqlite.org/lang_altertable.html raise NotImplementedError elif db_engine == "mysql": # http://dev.mysql.com/doc/refman/5.1/en/alter-table.html create = executesql("SHOW CREATE TABLE `%s`;" % tablename)[0][1] fk = create.split("` FOREIGN KEY (`%s" % fieldname)[0].split("CONSTRAINT `").pop() if "`" in fk: fk = fk.split("`")[0] sql = "ALTER TABLE `%(tablename)s` DROP FOREIGN KEY `%(fk)s`;" % \ dict(tablename=tablename, fk=fk) elif db_engine == "postgres": # http://www.postgresql.org/docs/9.3/static/sql-altertable.html sql = "ALTER TABLE %(tablename)s DROP CONSTRAINT %(tablename)s_%(fieldname)s_fkey;" % \ dict(tablename=tablename, fieldname=fieldname) try: executesql(sql) except: print "Error: Table %s with FK %s" % (tablename, fk) import sys e = sys.exc_info()[1] print >> sys.stderr, e # ------------------------------------------------------------------------- def remove_unique(self, tablename, fieldname): """ Remove a Unique Index from a table """ db = self.db db_engine = self.db_engine # Modify the database if db_engine == "sqlite": # @ToDo: http://www.sqlite.org/lang_altertable.html raise NotImplementedError elif db_engine == "mysql": # http://dev.mysql.com/doc/refman/5.1/en/alter-table.html sql = "ALTER TABLE `%(tablename)s` DROP INDEX `%(fieldname)s`;" % \ dict(tablename=tablename, fieldname=fieldname) elif db_engine == "postgres": # http://www.postgresql.org/docs/9.3/static/sql-altertable.html sql = "ALTER TABLE %(tablename)s DROP CONSTRAINT %(tablename)s_%(fieldname)s_key;" % \ dict(tablename=tablename, fieldname=fieldname) try: db.executesql(sql) except: import sys e = sys.exc_info()[1] print >> sys.stderr, e # Modify the .table file table = db[tablename] fields = [] for fn in table.fields: field = table[fn] if fn == fieldname: field.unique = False fields.append(field) db.__delattr__(tablename) db.tables.remove(tablename) db.define_table(tablename, *fields, # Rebuild the .table file from this definition fake_migrate=True) # ========================================================================= # OLD CODE below here # - There are tests for these in /tests/dbmigration # ------------------------------------------------------------------------- def rename_field(self, tablename, fieldname_old, fieldname_new, attributes_to_copy=None): """ Rename a field, while keeping the other properties of the field the same. If there are some indexes on that table, these will be recreated and other constraints will remain unchanged too. @param tablename : name of the table in which the field is renamed @param fieldname_old : name of the original field before renaming @param fieldname_new : name of the field after renaming @param attributes_to_copy : list of attributes which need to be copied from the old_field to the new_field (needed only in sqlite) """ db = self.db db_engine = self.db_engine if db_engine == "sqlite": self._add_renamed_fields(db, tablename, fieldname_old, fieldname_new, attributes_to_copy) self._copy_field(db, tablename, fieldname_old, fieldname_new) sql = "SELECT sql FROM sqlite_master WHERE type='index' AND tbl_name='%s' ORDER BY name;" % \ tablename list_index = db.executesql(sql) for element in list_index: search_str = "%s(%s)" % (tablename, fieldname_old) if element[0] is not None and search_str in element[0]: sql = "CREATE INDEX %s__idx on %s(%s);" % \ (fieldname_new, tablename, fieldname_new) try: db.executesql(sql) except: pass elif db_engine == "mysql": field = db[tablename][fieldname_old] sql_type = map_type_web2py_to_sql(field.type) sql = "ALTER TABLE %s CHANGE %s %s %s(%s)" % (tablename, fieldname_old, fieldname_new, sql_type, field.length) db.executesql(sql) elif db_engine == "postgres": sql = "ALTER TABLE %s RENAME COLUMN %s TO %s" % \ (tablename, fieldname_old, fieldname_new) db.executesql(sql) # ------------------------------------------------------------------------- def rename_table(self, tablename_old, tablename_new): """ Rename a table. If any fields reference that table, they will be handled too. @param tablename_old : name of the original table before renaming @param tablename_new : name of the table after renaming """ try: sql = "ALTER TABLE %s RENAME TO %s;" % (tablename_old, tablename_new) self.db.executesql(sql) except Exception, e: print e
import sys sys.path.append('/home/www-data/web2py') from gluon import DAL, Field db4=DAL('postgres://*****:*****@localhost:5433/vpak',migrate_enabled=False, pool_size=20, lazy_tables=True, fake_migrate_all=False) db4.define_table('geocode', Field('zip', 'string'), Field('city', 'string'), Field('state','string'), Field('longitude','string'), Field('latitude','string')) flat = '88.10' flon = '47.1' fradius = 50 rows = db4.executesql("select city,longitude,latitude, radians(cast(latitude as float)),radians({0}), radians({1}) from geocode where city like 'Bolingbrook'".format(flat, flon)) print rows[1]
else ("Open" if not row.project.isclosed() else("Funded" if row.project.isfunded() else "Not funded")))), Field.Method('hascontributed', lambda row: db((db.booting.openprojectid == row.project.idproject) & ( db.booting.userid == auth.user_id)).count() > 0), Field.Method('pledges', lambda row: db((db.pledge.projectid == row.project.idproject) & joinpledgestats).select( db.pledge.ALL, db.pledgestat.ALL, orderby=db.pledge.value)), Field.Method('expectedrewards', lambda row: groupby(db( (db.expectedrewards.projectid == row.project.idproject) & ( db.user.id == db.expectedrewards.userid)).select(db.expectedrewards.ALL, db.user.ALL), key=lambda item: item.user.username)), Field.Method('rewards', lambda row: db(db.reward.projectid == row.project.idproject).select()), Field.Method('rawdelete', lambda row: db.executesql('DELETE FROM project WHERE idproject={0}'.format(row.project.idproject, ))) ) """ Reward Fields: IdReward (integer) PRIMARY KEY AUTO INCREMENT, Description (text), ProjectId (project) Nothing interesting here, project ID is set to reference the project. added a validator to ensure that the project exists, but this is already handled through the FK constraint in the DB. FK constraint is to cascade on delete as projects can be deleted, all rewards/pledges should be deleted too CREATE TABLE Reward ( IdReward INTEGER PRIMARY KEY AUTOINCREMENT
class S3Migration(object): """ Database Migration Toolkit - will be useful to migrate both a production database on a server and also an offline client Normally run from a script in web2py context, but without models loaded: cd web2py python web2py.py -S eden -R <script.py> Where script looks like: m = local_import("s3migration") migrate = m.S3Migration() migrate.prep() migrate.migrate() migrate.post() """ def __init__(self): request = current.request # Load s3cfg name = "applications.%s.modules.s3cfg" % request.application s3cfg = __import__(name) for item in name.split(".")[1:]: # Remove the dot s3cfg = getattr(s3cfg, item) settings = s3cfg.S3Config() # Pass into template current.deployment_settings = settings # Read settings model = "%s/models/000_config.py" % request.folder code = getcfs(model, model, None) environment = build_environment(request, current.response, current.session) environment["settings"] = settings def template_path(): " Return the path of the Template config.py to load " path = os.path.join(request.folder, "private", "templates", settings.get_template(), "config.py") return path environment["template_path"] = template_path environment["os"] = os environment["Storage"] = Storage restricted(code, environment, layer=model) self.db_engine = settings.get_database_type() (db_string, pool_size) = settings.get_database_string() # Get a handle to the database self.db = DAL( db_string, #folder="%s/databases" % request.folder, auto_import=True, # @ToDo: Set to False until we migrate migrate_enabled=True, ) # ------------------------------------------------------------------------- def prep(self, foreigns=[], uniques=[]): """ Preparation before migration @param foreigns : List of tuples (tablename, fieldname) to have the foreign keys removed - if tablename == "all" then all tables are checked @param uniques : List of tuples (tablename, fieldname) to have the unique indices removed """ # Backup current database self.backup() # Remove Foreign Key constraints which need to go in next code for tablename, fieldname in foreigns: self.remove_foreign(tablename, fieldname) # Remove Unique indices which need to go in next code for tablename, fieldname in uniques: self.remove_unique(tablename, fieldname) self.db.commit() # ------------------------------------------------------------------------- def post(self): """ Cleanup after migration @ToDo """ # Do prepops of new tables # Copy data pass # ------------------------------------------------------------------------- def migrate(self): """ Perform the migration @ToDo """ # Update code: git pull # run_models_in(environment) # or # Set migrate=True in models/000_config.py # current.s3db.load_all_models() via applications/eden/static/scripts/tools/noop.py # Set migrate=False in models/000_config.py pass # ------------------------------------------------------------------------- def backup(self): """ Backup the database to a local SQLite database """ import os db = self.db folder = "%s/databases/backup" % current.request.folder # Create clean folder for the backup if os.path.exists(folder): import shutil shutil.rmtree(folder) import time time.sleep(1) os.mkdir(folder) # Setup backup database db_bak = DAL("sqlite://backup.db", folder=folder) # Copy Table structure for tablename in db.tables: if tablename == "gis_location": table = db[tablename] fields = [ table[field] for field in table.fields if field != "the_geom" ] db_bak.define_table(tablename, *fields) else: db_bak.define_table(tablename, db[tablename]) # Copy Data import csv csv.field_size_limit(2**20 * 100) # 100 megs filename = "%s/data.csv" % folder file = open(filename, "w") db.export_to_csv_file(file) file.close() file = open(filename, "r") db_bak.import_from_csv_file(file, unique="uuid2") # designed to fail file.close() db_bak.commit() # Pass handle back to other functions self.db_bak = db_bak # ------------------------------------------------------------------------- def remove_foreign(self, tablename, fieldname): """ Remove a Foreign Key constraint from a table """ db = self.db db_engine = self.db_engine executesql = db.executesql if tablename == "all": tables = db.tables else: tables = [tablename] for tablename in tables: if fieldname not in db[tablename].fields: continue # Modify the database if db_engine == "sqlite": # @ToDo: raise NotImplementedError elif db_engine == "mysql": # http://dev.mysql.com/doc/refman/5.1/en/alter-table.html create = executesql("SHOW CREATE TABLE `%s`;" % tablename)[0][1] fk = create.split("` FOREIGN KEY (`%s" % fieldname)[0].split("CONSTRAINT `").pop() if "`" in fk: fk = fk.split("`")[0] sql = "ALTER TABLE `%(tablename)s` DROP FOREIGN KEY `%(fk)s`;" % \ dict(tablename=tablename, fk=fk) elif db_engine == "postgres": # @ToDo: raise NotImplementedError try: executesql(sql) except: print "Error: Table %s with FK %s" % (tablename, fk) import sys e = sys.exc_info()[1] print >> sys.stderr, e # ------------------------------------------------------------------------- def remove_unique(self, tablename, fieldname): """ Remove a Unique Index from a table """ db = self.db db_engine = self.db_engine # Modify the database if db_engine == "sqlite": # @ToDo: raise NotImplementedError elif db_engine == "mysql": # http://dev.mysql.com/doc/refman/5.1/en/alter-table.html sql = "ALTER TABLE `%(tablename)s` DROP INDEX `%(fieldname)s`;" % \ dict(tablename=tablename, fieldname=fieldname) elif db_engine == "postgres": sql = "ALTER TABLE %(tablename)s DROP CONSTRAINT %(tablename)s_%(fieldname)s_key;" % \ dict(tablename=tablename, fieldname=fieldname) try: db.executesql(sql) except: import sys e = sys.exc_info()[1] print >> sys.stderr, e # Modify the .table file table = db[tablename] fields = [] for fn in table.fields: field = table[fn] if fn == fieldname: field.unique = False fields.append(field) db.__delattr__(tablename) db.tables.remove(tablename) db.define_table( tablename, *fields, # Rebuild the .table file from this definition fake_migrate=True) # ========================================================================= # OLD CODE below here # - There are tests for this in /tests/dbmigration # ------------------------------------------------------------------------- def rename_field(self, tablename, fieldname_old, fieldname_new, attributes_to_copy=None): """ Rename a field, while keeping the other properties of the field the same. If there are some indexes on that table, these will be recreated and other constraints will remain unchanged too. @param tablename : name of the table in which the field is renamed @param fieldname_old : name of the original field before renaming @param fieldname_new : name of the field after renaming @param attributes_to_copy : list of attributes which need to be copied from the old_field to the new_field (needed only in sqlite) """ db = self.db db_engine = self.db_engine if db_engine == "sqlite": self._add_renamed_fields(db, tablename, fieldname_old, fieldname_new, attributes_to_copy) self._copy_field(db, tablename, fieldname_old, fieldname_new) sql = "SELECT sql FROM sqlite_master WHERE type='index' AND tbl_name='%s' ORDER BY name;" % \ tablename list_index = db.executesql(sql) for element in list_index: search_str = "%s(%s)" % (tablename, fieldname_old) if element[0] is not None and search_str in element[0]: sql = "CREATE INDEX %s__idx on %s(%s);" % \ (fieldname_new, tablename, fieldname_new) try: db.executesql(sql) except: pass elif db_engine == "mysql": field = db[tablename][fieldname_old] sql_type = map_type_web2py_to_sql(field.type) sql = "ALTER TABLE %s CHANGE %s %s %s(%s)" % ( tablename, fieldname_old, fieldname_new, sql_type, field.length) db.executesql(sql) elif db_engine == "postgres": sql = "ALTER TABLE %s RENAME COLUMN %s TO %s" % \ (tablename, fieldname_old, fieldname_new) db.executesql(sql) # ------------------------------------------------------------------------- def rename_table(self, tablename_old, tablename_new): """ Rename a table. If any fields reference that table, they will be handled too. @param tablename_old : name of the original table before renaming @param tablename_new : name of the table after renaming """ try: sql = "ALTER TABLE %s RENAME TO %s;" % (tablename_old, tablename_new) self.db.executesql(sql) except Exception, e: print e
class S3Migration(object): """ Database Migration Toolkit - will be useful to migrate both a production database on a server and also an offline client Normally run from a script in web2py context, but without models loaded: cd web2py python web2py.py -S eden -R <script.py> Where script looks like: m = local_import("s3migration") migrate = m.S3Migration() migrate.prep() migrate.migrate() migrate.post() """ def __init__(self): request= current.request # Load s3cfg name = "applications.%s.modules.s3cfg" % request.application s3cfg = __import__(name) for item in name.split(".")[1:]: # Remove the dot s3cfg = getattr(s3cfg, item) settings = s3cfg.S3Config() # Pass into template current.deployment_settings = settings # Read settings model = "%s/models/000_config.py" % request.folder code = getcfs(model, model, None) environment = build_environment(request, current.response, current.session) environment["settings"] = settings def template_path(): " Return the path of the Template config.py to load " path = os.path.join(request.folder, "private", "templates", settings.get_template(), "config.py") return path environment["template_path"] = template_path environment["os"] = os environment["Storage"] = Storage restricted(code, environment, layer=model) self.db_engine = settings.get_database_type() (db_string, pool_size) = settings.get_database_string() # Get a handle to the database self.db = DAL(db_string, #folder="%s/databases" % request.folder, auto_import=True, # @ToDo: Set to False until we migrate migrate_enabled=True, ) # ------------------------------------------------------------------------- def prep(self, foreigns=[], uniques=[]): """ Preparation before migration @param foreigns : List of tuples (tablename, fieldname) to have the foreign keys removed - if tablename == "all" then all tables are checked @param uniques : List of tuples (tablename, fieldname) to have the unique indices removed """ # Backup current database self.backup() # Remove Foreign Key constraints which need to go in next code for tablename, fieldname in foreigns: self.remove_foreign(tablename, fieldname) # Remove Unique indices which need to go in next code for tablename, fieldname in uniques: self.remove_unique(tablename, fieldname) self.db.commit() # ------------------------------------------------------------------------- def post(self): """ Cleanup after migration @ToDo """ # Do prepops of new tables # Copy data pass # ------------------------------------------------------------------------- def migrate(self): """ Perform the migration @ToDo """ # Update code: git pull # run_models_in(environment) # or # Set migrate=True in models/000_config.py # current.s3db.load_all_models() via applications/eden/static/scripts/tools/noop.py # Set migrate=False in models/000_config.py pass # ------------------------------------------------------------------------- def backup(self): """ Backup the database to a local SQLite database """ import os db = self.db folder = "%s/databases/backup" % current.request.folder # Create clean folder for the backup if os.path.exists(folder): import shutil shutil.rmtree(folder) import time time.sleep(1) os.mkdir(folder) # Setup backup database db_bak = DAL("sqlite://backup.db", folder=folder) # Copy Table structure for tablename in db.tables: if tablename == "gis_location": table = db[tablename] fields = [table[field] for field in table.fields if field != "the_geom"] db_bak.define_table(tablename, *fields) else: db_bak.define_table(tablename, db[tablename]) # Copy Data import csv csv.field_size_limit(2**20 * 100) # 100 megs filename = "%s/data.csv" % folder file = open(filename, "w") db.export_to_csv_file(file) file.close() file = open(filename, "r") db_bak.import_from_csv_file(file, unique="uuid2") # designed to fail file.close() db_bak.commit() # Pass handle back to other functions self.db_bak = db_bak # ------------------------------------------------------------------------- def remove_foreign(self, tablename, fieldname): """ Remove a Foreign Key constraint from a table """ db = self.db db_engine = self.db_engine executesql = db.executesql if tablename == "all": tables = db.tables else: tables = [tablename] for tablename in tables: if fieldname not in db[tablename].fields: continue # Modify the database if db_engine == "sqlite": # @ToDo: raise NotImplementedError elif db_engine == "mysql": # http://dev.mysql.com/doc/refman/5.1/en/alter-table.html create = executesql("SHOW CREATE TABLE `%s`;" % tablename)[0][1] fk = create.split("` FOREIGN KEY (`%s" % fieldname)[0].split("CONSTRAINT `").pop() if "`" in fk: fk = fk.split("`")[0] sql = "ALTER TABLE `%(tablename)s` DROP FOREIGN KEY `%(fk)s`;" % \ dict(tablename=tablename, fk=fk) elif db_engine == "postgres": # @ToDo: raise NotImplementedError try: executesql(sql) except: print "Error: Table %s with FK %s" % (tablename, fk) import sys e = sys.exc_info()[1] print >> sys.stderr, e # ------------------------------------------------------------------------- def remove_unique(self, tablename, fieldname): """ Remove a Unique Index from a table """ db = self.db db_engine = self.db_engine # Modify the database if db_engine == "sqlite": # @ToDo: raise NotImplementedError elif db_engine == "mysql": # http://dev.mysql.com/doc/refman/5.1/en/alter-table.html sql = "ALTER TABLE `%(tablename)s` DROP INDEX `%(fieldname)s`;" % \ dict(tablename=tablename, fieldname=fieldname) elif db_engine == "postgres": sql = "ALTER TABLE %(tablename)s DROP CONSTRAINT %(tablename)s_%(fieldname)s_key;" % \ dict(tablename=tablename, fieldname=fieldname) try: db.executesql(sql) except: import sys e = sys.exc_info()[1] print >> sys.stderr, e # Modify the .table file table = db[tablename] fields = [] for fn in table.fields: field = table[fn] if fn == fieldname: field.unique = False fields.append(field) db.__delattr__(tablename) db.tables.remove(tablename) db.define_table(tablename, *fields, # Rebuild the .table file from this definition fake_migrate=True) # ========================================================================= # OLD CODE below here # - There are tests for this in /tests/dbmigration # ------------------------------------------------------------------------- def rename_field(self, tablename, fieldname_old, fieldname_new, attributes_to_copy=None): """ Rename a field, while keeping the other properties of the field the same. If there are some indexes on that table, these will be recreated and other constraints will remain unchanged too. @param tablename : name of the table in which the field is renamed @param fieldname_old : name of the original field before renaming @param fieldname_new : name of the field after renaming @param attributes_to_copy : list of attributes which need to be copied from the old_field to the new_field (needed only in sqlite) """ db = self.db db_engine = self.db_engine if db_engine == "sqlite": self._add_renamed_fields(db, tablename, fieldname_old, fieldname_new, attributes_to_copy) self._copy_field(db, tablename, fieldname_old, fieldname_new) sql = "SELECT sql FROM sqlite_master WHERE type='index' AND tbl_name='%s' ORDER BY name;" % \ tablename list_index = db.executesql(sql) for element in list_index: search_str = "%s(%s)" % (tablename, fieldname_old) if element[0] is not None and search_str in element[0]: sql = "CREATE INDEX %s__idx on %s(%s);" % \ (fieldname_new, tablename, fieldname_new) try: db.executesql(sql) except: pass elif db_engine == "mysql": field = db[tablename][fieldname_old] sql_type = map_type_web2py_to_sql(field.type) sql = "ALTER TABLE %s CHANGE %s %s %s(%s)" % (tablename, fieldname_old, fieldname_new, sql_type, field.length) db.executesql(sql) elif db_engine == "postgres": sql = "ALTER TABLE %s RENAME COLUMN %s TO %s" % \ (tablename, fieldname_old, fieldname_new) db.executesql(sql) # ------------------------------------------------------------------------- def rename_table(self, tablename_old, tablename_new): """ Rename a table. If any fields reference that table, they will be handled too. @param tablename_old : name of the original table before renaming @param tablename_new : name of the table after renaming """ try: sql = "ALTER TABLE %s RENAME TO %s;" % (tablename_old, tablename_new) self.db.executesql(sql) except Exception, e: print e
class S3Migration(object): """ Database Migration Helper Class """ def __init__(self): # Read settings name = "applications.%s.modules.s3cfg" % current.request.application s3cfg = __import__(name) for item in name.split(".")[1:]: # Remove the dot s3cfg = getattr(s3cfg, item) settings = s3cfg.S3Config() self.db_engine = settings.get_database_type() (db_string, pool_size) = settings.get_database_string() # Get a handle to the database self.db = DAL(db_string, #folder="%s/databases" % request.folder, auto_import=True, migrate_enabled=True, migrate=True ) # ------------------------------------------------------------------------- def rename_field(self, tablename, fieldname_old, fieldname_new, attributes_to_copy=None): """ Rename a field, while keeping the other properties of the field the same. If there are some indexes on that table, these will be recreated and other constraints will remain unchanged too. @param tablename : name of the table in which the field is renamed @param fieldname_old : name of the original field before renaming @param fieldname_new : name of the field after renaming @param attributes_to_copy : list of attributes which need to be copied from the old_field to the new_field (needed only in sqlite) """ db = self.db db_engine = self.db_engine if db_engine == "sqlite": self._add_renamed_fields(db, tablename, fieldname_old, fieldname_new, attributes_to_copy) self._copy_field(db, tablename, fieldname_old, fieldname_new) sql = "SELECT sql FROM sqlite_master WHERE type='index' AND tbl_name='%s' ORDER BY name;" % \ tablename list_index = db.executesql(sql) for element in list_index: search_str = "%s(%s)" % (tablename, fieldname_old) if element[0] is not None and search_str in element[0]: sql = "CREATE INDEX %s__idx on %s(%s);" % \ (fieldname_new, tablename, fieldname_new) try: db.executesql(sql) except: pass elif db_engine == "mysql": field = db[tablename][fieldname_old] sql_type = map_type_web2py_to_sql(field.type) sql = "ALTER TABLE %s CHANGE %s %s %s(%s)" % (tablename, fieldname_old, fieldname_new, sql_type, field.length) db.executesql(sql) elif db_engine == "postgres": sql = "ALTER TABLE %s RENAME COLUMN %s TO %s" % \ (tablename, fieldname_old, fieldname_new) db.executesql(sql) # ------------------------------------------------------------------------- def rename_table(self, tablename_old, tablename_new): """ Rename a table. If any fields reference that table, they will be handled too. @param tablename_old : name of the original table before renaming @param tablename_new : name of the table after renaming """ try: sql = "ALTER TABLE %s RENAME TO %s;" % (tablename_old, tablename_new) self.db.executesql(sql) except Exception, e: print e
db = DAL('mysql://*****:*****@localhost/test',migrate_enabled=True) db.define_table('mytable', Field('temperature','double'),Field('pressure','double'),Field('dates','date'),\ Field('time','time'),format='%(content)s',migrate=True) print db._uri print db._dbname print db.mytable.fields #print db.field print db.tables # insert data json_string = "{ \ 'first': 1,\ 'second': 2,\ 'date': '2016-2-6',\ 'temperature': '37.5',\ 'unit': 'km'\ }"; #auth.define_tables(migrate=False) #db.mytable.insert(temperature = 36.5,pressure = 90) db.mytable.insert(temperature = 36.5,pressure = 90,dates = datetime.datetime.now().date(), \ time= datetime.datetime.now().time()) #db.mytable.insert() db.commit() #select rows = db.executesql('SELECT * FROM mytable ORDER BY id DESC LIMIT 5 ;') print rows rows = db().select(db.mytable.ALL) #print rows #print rows = db().select(db.mytable.ALL)