def copyDB(self): other_db = DAL("%s://%s" % ( self.targetdbType, self.targetdbName), folder=self.targetFolder) print 'creating tables...' for table in self.db: other_db.define_table( table._tablename, *[field for field in table]) ''' should there be an option to truncAte target DB? if yes, then change args to allow for choice and set self.trancate to the art value if self.truncate==True: other_db[table._tablename].truncate() ''' print 'exporting data...' self.db.export_to_csv_file(open('tmp.sql', 'wb')) print 'importing data...' other_db.import_from_csv_file(open('tmp.sql', 'rb')) other_db.commit() print 'done!' print 'Attention: do not run this program again or you end up with duplicate records'
def dbf2sqlite(): db = DAL('sqlite://upload.sqlite', pool_size=1, check_reserved=['sqlite'], folder=DATA_SQLITE) db = get_model(db) for table in db.tables: print ' table', table, for row_upper_names in DBF(os.path.join(DATA_FOX, '%s.dbf' % table)): something = False row_lower_names = {} # table definitions in applications/viewer/modules/db_model.py and in fox/src/myconversion/myconversion.prg must be the same for k, v in row_upper_names.iteritems(): k = k.lower() if v is not None: # k != 'id': something = True row_lower_names[k.lower()] = v # but fox thurn fields to uppercase and we preffer lowercase if something: db[table].insert(**row_lower_names) db.commit() print ' - done' try: # are some import-post-actions defined? from myconversion import myconversion # ./myconversion.py : def myconversion(db): print ' additional data conversion', myconversion(db) # see www.web2py.com/book, chapter 6 - DAL db.commit() # auto commit if you miss commit in myconversion() print ' - done' except ImportError: pass db.close()
def copyDB(self): other_db = DAL("{0}://{1}".format(self.targetdbType, self.targetdbName), folder=self.targetFolder) print 'creating tables...' for table in self.db: other_db.define_table(table._tablename, *[field for field in table]) ''' should there be an option to truncAte target DB? if yes, then change args to allow for choice and set self.trancate to the art value if self.truncate==True: other_db[table._tablename].truncate() ''' print 'exporting data...' self.db.export_to_csv_file(open('tmp.sql', 'wb')) print 'importing data...' other_db.import_from_csv_file(open('tmp.sql', 'rb')) other_db.commit() print 'done!' print 'Attention: do not run this program again or you end up with duplicate records'
def append(): db=DAL('sqlite://storage.sqlite', folder='/home/www-data/web2py/applications/tgmonitor/databases') db.define_table('tg_load', Field('check_date','datetime'), Field('tg_number','integer', notnull=True), Field('busy', 'integer'), Field('installed', 'integer') ) db.tg_load.insert(check_date='',tg_number=2, busy=45, installed=60) db.commit()
def backup(self): """ Backup the database to a local SQLite database @ToDo: Option to use a temporary DB in Postgres/MySQL as this takes too long for a large DB """ import os db = self.db folder = "%s/databases/backup" % current.request.folder # Create clean folder for the backup if os.path.exists(folder): import shutil shutil.rmtree(folder) import time time.sleep(1) os.mkdir(folder) # Setup backup database db_bak = DAL("sqlite://backup.db", folder=folder) # Copy Table structure for tablename in db.tables: if tablename == "gis_location": table = db[tablename] fields = [ table[field] for field in table.fields if field != "the_geom" ] db_bak.define_table(tablename, *fields) else: db_bak.define_table(tablename, db[tablename]) # Copy Data import csv csv.field_size_limit(2**20 * 100) # 100 megs filename = "%s/data.csv" % folder file = open(filename, "w") db.export_to_csv_file(file) file.close() file = open(filename, "r") db_bak.import_from_csv_file(file, unique="uuid2") # designed to fail file.close() db_bak.commit() # Pass handle back to other functions self.db_bak = db_bak
class Config(object): """Build and read config on GAE or config.cfg file""" def __init__(self, autogetconfig=True): from gluon import current self.request = current.request self.ongae = self.request.env.web2py_runtime_gae self.T = current.T self.cache = current.cache from gluon import DAL if self.ongae: self._db = DAL("google:datastore") else: self._db = DAL("sqlite://config_movuca.sqlite") self.define_tables() if autogetconfig: self.get_config() def define_tables(self): from datamodel.setup import Setup setup = Setup() self.tables = filter(lambda item: not item.startswith("_"), dir(setup)) for table in self.tables: entity = self._db.define_table(table, *setup.__getattribute__(table), **dict(migrate="config_%s.table" % table) ) self.__setattr__(table, entity) def set_default_config(self): now = self.request.now for table in self.tables: self.__getattribute__(table).insert(setuptime=now) self._db.commit() def get_config(self, expire=300): for table in self.tables: result = self._db(self._db[table]).select(cache=(self.cache.ram, expire)).last() self.__setattr__(table.split("_")[0], result) def get_list(self, table, option): options = self.__getattribute__(table)[option] assert isinstance(options, list) if not "mailhide" in option: return [(option.split(":")[0], str(self.T(option.split(":")[1]))) for option in options] else: return [(option.split(":")[0], str(option.split(":")[1])) for option in options]
def backup(self): """ Backup the database to a local SQLite database @ToDo: Option to use a temporary DB in Postgres/MySQL as this takes too long for a large DB """ import os db = self.db folder = "%s/databases/backup" % current.request.folder # Create clean folder for the backup if os.path.exists(folder): import shutil shutil.rmtree(folder) import time time.sleep(1) os.mkdir(folder) # Setup backup database db_bak = DAL("sqlite://backup.db", folder=folder) # Copy Table structure for tablename in db.tables: if tablename == "gis_location": table = db[tablename] fields = [table[field] for field in table.fields if field != "the_geom"] db_bak.define_table(tablename, *fields) else: db_bak.define_table(tablename, db[tablename]) # Copy Data import csv csv.field_size_limit(2**20 * 100) # 100 megs filename = "%s/data.csv" % folder file = open(filename, "w") db.export_to_csv_file(file) file.close() file = open(filename, "r") db_bak.import_from_csv_file(file, unique="uuid2") # designed to fail file.close() db_bak.commit() # Pass handle back to other functions self.db_bak = db_bak
last = db.tg_load.check_date.max() last_date = db().select(last).first()[last] my_result= db((db.tg_load.check_date==last_date) & (db.tg_load.tg_number=='44')).select() for row in my_result: last_busy=row.busy db.tg_load.insert(check_date=datetime.datetime.now(), tg_number=44, busy=_busy, installed=_ins_num) #db.tg_load.insert(check_date='',tg_number=3, busy=45, installed=60) db.commit() #draw diagramm import PIL, Image, ImageDraw, ImageFont width = 800 height = 420 if os.path.isfile('/home/www-data/web2py/applications/tgmonitor/cron/44.png')==False: im = Image.new('RGB', (width,height), (255,255,255)) im.save('/home/www-data/web2py/applications/tgmonitor/cron/44.png') im = Image.open('/home/www-data/web2py/applications/tgmonitor/cron/44.png') draw = ImageDraw.Draw(im)
db_path = os.path.join(request.folder, 'databases') tk_db = DAL(db_string, folder=db_path, auto_import=True) ts = TicketStorage(db=tk_db) tk_table = ts._get_table(db=tk_db, tablename=ts.tablename, app=request.application) hashes = {} while 1: if request.tickets_db: print("You're storing tickets yet in database") sys.exit(1) for file in os.listdir(errors_path): filename = os.path.join(errors_path, file) modified_time = os.stat(filename)[stat.ST_MTIME] modified_time = datetime.datetime.fromtimestamp(modified_time) ticket_id = file ticket_data = open(filename, encoding='UTF-8').read() tk_table.insert(ticket_id=ticket_id, ticket_data=ticket_data, created_datetime=modified_time) tk_db.commit() os.unlink(filename) time.sleep(SLEEP_MINUTES * 60)
list_of_new_table_fields = [] list_of_new_table_fields.append(Field(new_table_field, "integer")) try: db[changed_table]._primarykey except KeyError: db[changed_table]._primarykey = None temp_db.define_table(changed_table, db[changed_table], *list_of_fields, primarykey=db[changed_table]._primarykey) temp_db.define_table(new_table, *list_of_new_table_fields) temp_db.commit() # Add a new field of int type in a class for old_row in temp_db().select(temp_db[changed_table][old_field]): if (len( temp_db(temp_db[new_table][new_table_field] == old_row[old_field]).select()) == 0): row = temp_db[new_table].insert(name=old_row[old_field]) new_id = int(row["id"]) temp_db( temp_db[changed_table][old_field] == old_row[old_field]).update( type_id=new_id) temp_db.commit() # END =========================================================================
old_database_folder = "%s/applications/%s/databases" % (WEB2PY_PATH, APP) temp_db = DAL( database_string, folder = old_database_folder, migrate_enabled=True ,migrate = True) #MIGRATION SCRIPT list_of_fields = [] list_of_fields.append(Field(new_field,"integer")) list_of_new_table_fields = [] list_of_new_table_fields.append(Field(new_table_field,"integer")) try: db[changed_table]._primarykey except KeyError: db[changed_table]._primarykey = None temp_db.define_table(changed_table ,db[changed_table],*list_of_fields,primarykey = db[changed_table]._primarykey) temp_db.define_table(new_table , *list_of_new_table_fields) temp_db.commit() #Adding a new field of int type in a class for old_row in temp_db().select(temp_db[changed_table][old_field]): if (len(temp_db(temp_db[new_table][new_table_field] == old_row[old_field]).select()) == 0): row = temp_db[new_table].insert(name = old_row[old_field]) new_id = int(row["id"]) temp_db(temp_db[changed_table][old_field] == old_row[old_field]).update(type_id = new_id) temp_db.commit()
def backup(self): """ Backup the database to a local SQLite database @ToDo: Option to use a temporary DB in Postgres/MySQL as this takes too long for a large DB """ moves = self.moves news = self.news strints = self.strints strbools = self.strbools if not moves and not news and not strbools and not strints: # Nothing to backup return import os db = self.db folder = "%s/databases/backup" % current.request.folder # Create clean folder for the backup if os.path.exists(folder): import shutil shutil.rmtree(folder) import time time.sleep(1) os.mkdir(folder) # Setup backup database db_bak = DAL("sqlite://backup.db", folder=folder, adapter_args={"foreign_keys": False}) # Copy Table structure skip = [] for tablename in db.tables: if tablename == "gis_location": table = db[tablename] fields = [table[field] for field in table.fields if field != "the_geom"] try: db_bak.define_table(tablename, *fields) except KeyError: # Can't resolve reference yet # Cleanup del db_bak[tablename] # Try later skip.append(tablename) else: try: db_bak.define_table(tablename, db[tablename]) except KeyError: # Can't resolve reference yet # Cleanup del db_bak[tablename] # Try later skip.append(tablename) while skip: _skip = [] for tablename in skip: if tablename == "gis_location": table = db[tablename] fields = [table[field] for field in table.fields if field != "the_geom"] try: db_bak.define_table(tablename, *fields) except KeyError: # Can't resolve reference yet # Cleanup del db_bak[tablename] # Try later _skip.append(tablename) except: import sys print "Skipping %s: %s" % (tablename, sys.exc_info()[1]) else: try: db_bak.define_table(tablename, db[tablename]) except KeyError: # Can't resolve reference yet # Cleanup del db_bak[tablename] # Try later _skip.append(tablename) except: import sys print "Skipping %s: %s" % (tablename, sys.exc_info()[1]) skip = _skip # Which tables do we need to backup? tables = [] if moves: for tablename in moves: tables.append(tablename) if news: for tablename in news: new = news[tablename] for t in new["tables"]: tables.append(t) for s in new["supers"]: tables.append(s) stable = db[s] rows = db(stable._id > 0).select(stable.instance_type) instance_types = set([r.instance_type for r in rows]) for t in instance_types: tables.append(t) if strbools: for tablename, fieldname in strints: tables.append(tablename) if strints: for tablename, fieldname in strints: tables.append(tablename) # Remove duplicates tables = set(tables) # Copy Data import csv csv.field_size_limit(2**20 * 100) # 100 megs for tablename in tables: filename = "%s/%s.csv" % (folder, tablename) file = open(filename, "w") rows = db(db[tablename].id > 0).select() rows.export_to_csv_file(file) file.close() file = open(filename, "r") db_bak[tablename].import_from_csv_file(file, unique="uuid2") # uuid2 designed to not hit! file.close() db_bak.commit() # Pass handle back to other functions self.db_bak = db_bak
class S3Migration(object): """ Database Migration Toolkit - used to help migrate both a production database on a server and also an offline client Normally run from a script in web2py context, but without models loaded: cd web2py python web2py.py -S eden -R <script.py> Where script looks like: m = local_import("s3migration") migrate = m.S3Migration() migrate.prep(foreigns=[], moves=[], news=[], ondeletes=[], strbools=[], strints=[], uniques=[], ) #migrate.migrate() migrate.post(moves=[], news=[], strbools=[], strints=[], ) FYI: If you need to access a filename in eden/databases/ then here is how: import hashlib (db_string, pool_size) = settings.get_database_string() prefix = hashlib.md5(db_string).hexdigest() filename = "%s_%s.table" % (prefix, tablename) FYI: To view all constraints on a table in MySQL: SHOW CREATE TABLE tablename; or select COLUMN_NAME, CONSTRAINT_NAME, REFERENCED_COLUMN_NAME, REFERENCED_TABLE_NAME from information_schema.KEY_COLUMN_USAGE where TABLE_NAME = 'module_resourcename'; @ToDo: Function to ensure that roles match those in prepop @ToDo: Function to do selective additional prepop """ def __init__(self): request = current.request # Load s3cfg => but why do this so complicated? #name = "applications.%s.modules.s3cfg" % request.application #s3cfg = __import__(name) #for item in name.split(".")[1:]: ## Remove the dot #s3cfg = getattr(s3cfg, item) #settings = s3cfg.S3Config() # Can use normal import here since executed in web2py environment: import s3cfg settings = s3cfg.S3Config() # Pass into template current.deployment_settings = settings # Read settings model = "%s/models/000_config.py" % request.folder code = getcfs(model, model, None) response = current.response # Needed as some Templates look at this & we don't wish to crash: response.s3 = Storage() # Global variables for 000_config.py environment = build_environment(request, response, current.session) environment["settings"] = settings # Some (older) 000_config.py also use "deployment_settings": environment["deployment_settings"] = settings # For backwards-compatibility with older 000_config.py: #def template_path(): # # When you see this warning, you should update 000_config.py # # See: http://eden.sahanafoundation.org/wiki/DeveloperGuidelines/Templates/Migration#Changesin000_config.py # print "template_path() is deprecated, please update 000_config.py" # # Return just any valid path to make sure the path-check succeeds, # # => modern S3Config will find the template itself # return request.folder #environment["template_path"] = template_path environment["os"] = os environment["Storage"] = Storage # Execute 000_config.py restricted(code, environment, layer=model) self.db_engine = settings.get_database_type() (db_string, pool_size) = settings.get_database_string() # Get a handle to the database self.db = DAL(db_string, #folder="%s/databases" % request.folder, auto_import=True, # @ToDo: Set to False until we migrate migrate_enabled=True, ) # ------------------------------------------------------------------------- def prep(self, foreigns=None, moves=None, news=None, ondeletes=None, strbools=None, strints=None, uniques=None, ): """ Preparation before migration @param foreigns : List of tuples (tablename, fieldname) to have the foreign keys removed - if tablename == "all" then all tables are checked @param moves : List of dicts {tablename: [(fieldname, new_tablename, link_fieldname)]} to move a field from 1 table to another - fieldname can be a tuple if the fieldname changes: (fieldname, new_fieldname) @param news : List of dicts {new_tablename: {'lookup_field': '', 'tables': [tablename: [fieldname]], 'supers': [tablename: [fieldname]], } to create new records from 1 or more old tables (inc all instances of an SE) - fieldname can be a tuple if the fieldname changes: (fieldname, new_fieldname) @param ondeletes : List of tuples [(tablename, fieldname, reftable, ondelete)] to have the ondelete modified to @param strbools : List of tuples [(tablename, fieldname)] to convert from string/integer to bools @param strints : List of tuples [(tablename, fieldname)] to convert from string to integer @param uniques : List of tuples [(tablename, fieldname)] to have the unique indices removed, """ # Backup current database self.moves = moves self.news = news self.strbools = strbools self.strints = strints self.backup() if foreigns: # Remove Foreign Key constraints which need to go in next code for tablename, fieldname in foreigns: self.remove_foreign(tablename, fieldname) if uniques: # Remove Unique indices which need to go in next code for tablename, fieldname in uniques: self.remove_unique(tablename, fieldname) if ondeletes: # Modify ondeletes for tablename, fieldname, reftable, ondelete in ondeletes: self.ondelete(tablename, fieldname, reftable, ondelete) # Remove fields which need to be altered in next code if strbools: for tablename, fieldname in strbools: self.drop(tablename, fieldname) if strints: for tablename, fieldname in strints: self.drop(tablename, fieldname) self.db.commit() # ------------------------------------------------------------------------- def backup(self): """ Backup the database to a local SQLite database @ToDo: Option to use a temporary DB in Postgres/MySQL as this takes too long for a large DB """ moves = self.moves news = self.news strints = self.strints strbools = self.strbools if not moves and not news and not strbools and not strints: # Nothing to backup return import os db = self.db folder = "%s/databases/backup" % current.request.folder # Create clean folder for the backup if os.path.exists(folder): import shutil shutil.rmtree(folder) import time time.sleep(1) os.mkdir(folder) # Setup backup database db_bak = DAL("sqlite://backup.db", folder=folder, adapter_args={"foreign_keys": False}) # Copy Table structure skip = [] for tablename in db.tables: if tablename == "gis_location": table = db[tablename] fields = [table[field] for field in table.fields if field != "the_geom"] try: db_bak.define_table(tablename, *fields) except KeyError: # Can't resolve reference yet # Cleanup del db_bak[tablename] # Try later skip.append(tablename) else: try: db_bak.define_table(tablename, db[tablename]) except KeyError: # Can't resolve reference yet # Cleanup del db_bak[tablename] # Try later skip.append(tablename) while skip: _skip = [] for tablename in skip: if tablename == "gis_location": table = db[tablename] fields = [table[field] for field in table.fields if field != "the_geom"] try: db_bak.define_table(tablename, *fields) except KeyError: # Can't resolve reference yet # Cleanup del db_bak[tablename] # Try later _skip.append(tablename) except: import sys print "Skipping %s: %s" % (tablename, sys.exc_info()[1]) else: try: db_bak.define_table(tablename, db[tablename]) except KeyError: # Can't resolve reference yet # Cleanup del db_bak[tablename] # Try later _skip.append(tablename) except: import sys print "Skipping %s: %s" % (tablename, sys.exc_info()[1]) skip = _skip # Which tables do we need to backup? tables = [] if moves: for tablename in moves: tables.append(tablename) if news: for tablename in news: new = news[tablename] for t in new["tables"]: tables.append(t) for s in new["supers"]: tables.append(s) stable = db[s] rows = db(stable._id > 0).select(stable.instance_type) instance_types = set([r.instance_type for r in rows]) for t in instance_types: tables.append(t) if strbools: for tablename, fieldname in strints: tables.append(tablename) if strints: for tablename, fieldname in strints: tables.append(tablename) # Remove duplicates tables = set(tables) # Copy Data import csv csv.field_size_limit(2**20 * 100) # 100 megs for tablename in tables: filename = "%s/%s.csv" % (folder, tablename) file = open(filename, "w") rows = db(db[tablename].id > 0).select() rows.export_to_csv_file(file) file.close() file = open(filename, "r") db_bak[tablename].import_from_csv_file(file, unique="uuid2") # uuid2 designed to not hit! file.close() db_bak.commit() # Pass handle back to other functions self.db_bak = db_bak # ------------------------------------------------------------------------- def migrate(self): """ Perform the migration @ToDo """ # Update code: git pull # run_models_in(environment) # or # Set migrate=True in models/000_config.py # current.s3db.load_all_models() via applications/eden/static/scripts/tools/noop.py # Set migrate=False in models/000_config.py pass # ------------------------------------------------------------------------- def post(self, moves=None, news=None, strbools=None, strints=None, ): """ Cleanup after migration @param moves : List of dicts {tablename: [(fieldname, new_tablename, link_fieldname)]} to move a field from 1 table to another - fieldname can be a tuple if the fieldname changes: (fieldname, new_fieldname) @param news : List of dicts {new_tablename: {'lookup_field': '', 'tables': [tablename: [fieldname]], 'supers': [tablename: [fieldname]], } to create new records from 1 or more old tables (inc all instances of an SE) - fieldname can be a tuple if the fieldname changes: (fieldname, new_fieldname) @param strbools : List of tuples [(tablename, fieldname)] to convert from string/integer to bools @param strints : List of tuples [(tablename, fieldname)] to convert from string to integer """ db = self.db # @ToDo: Do prepops of new tables # Restore data from backup folder = "%s/databases/backup" % current.request.folder db_bak = DAL("sqlite://backup.db", folder=folder, auto_import=True, migrate=False) if moves: for tablename in moves: table = db_bak[tablename] fieldname, new_tablename, link_fieldname = moves[tablename] if isinstance(fieldname, (tuple, list)): fieldname, new_fieldname = fieldname else: new_fieldname = fieldname old_field = table[fieldname] new_linkfield = db[new_tablename][link_fieldname] rows = db_bak(table._id > 0).select(old_field, link_fieldname) for row in rows: update_vars = {} update_vars[new_fieldname] = row[old_field] db(new_linkfield == row[link_fieldname]).update(**update_vars) if news: for tablename in news: # Read Data data = {} new = news[tablename] lookup_field = new["lookup_field"] _tables = new["tables"] for t in _tables: fields = _tables[t] # @ToDo: Support tuples #for f in fields: # if isinstance(f, (tuple, list)): table = db_bak[t] table_fields = [table[f] for f in fields] rows = db_bak(table.deleted == False).select(table[lookup_field], *table_fields) for row in rows: record_id = row[lookup_field] if record_id in data: _new = False _data = data[record_id] else: _new = True _data = {} for f in fields: if f in row: if row[f] not in ("", None): # JSON type doesn't like "" _data[f] = row[f] if _new: data[record_id] = _data for s in new["supers"]: fields = new["supers"][s] # @ToDo: Support tuples #for f in fields: # if isinstance(f, (tuple, list)): stable = db_bak[s] superkey = stable._id.name rows = db_bak(stable.deleted == False).select(stable._id, stable.instance_type) for row in rows: etable = db_bak[row["instance_type"]] _fields = [f for f in fields if f in etable.fields] table_fields = [etable[f] for f in _fields] record = db_bak(etable[superkey] == row[superkey]).select(etable[lookup_field], *table_fields ).first() if record: record_id = record[lookup_field] if record_id in data: _new = False _data = data[record_id] else: _new = True _data = {} for f in _fields: if f in record: if record[f] not in ("", None): # JSON type doesn't like "" _data[f] = record[f] if _new: data[record_id] = _data # Create Records table = db[tablename] for record_id in data: update_vars = data[record_id] if update_vars: update_vars[lookup_field] = record_id # Can't rely on the defaults as auto_import doesn't see DAL defaults update_vars["created_on"] = datetime.datetime.utcnow() update_vars["deleted"] = False update_vars["mci"] = 0 update_vars["modified_on"] = datetime.datetime.utcnow() update_vars["uuid"] = uuid4().urn # Would always be identical otherwise table.insert(**update_vars) if strints: for tablename, fieldname in strints: newtable = db[tablename] newrows = db(newtable.id > 0).select(newtable.id) oldtable = db_bak[tablename] oldrows = db_bak(oldtable.id > 0).select(oldtable.id, oldtable[fieldname]) oldvals = oldrows.as_dict() for row in newrows: _id = row.id val = oldvals[_id][fieldname] if not val: continue try: update_vars = {fieldname : int(val)} except: current.log.warning("S3Migrate: Unable to convert %s to an integer - skipping" % val) else: db(newtable.id == _id).update(**update_vars) if strbools: for tablename, fieldname in strbools: to_bool = self.to_bool newtable = db[tablename] newrows = db(newtable.id > 0).select(newtable.id) oldtable = db_bak[tablename] oldrows = db_bak(oldtable.id > 0).select(oldtable.id, oldtable[fieldname]) oldvals = oldrows.as_dict() for row in newrows: _id = row.id val = oldvals[_id][fieldname] if not val: continue val = to_bool(val) if val: update_vars = {fieldname : val} db(newtable.id == _id).update(**update_vars) db.commit() # ------------------------------------------------------------------------- @staticmethod def to_bool(value): """ Converts 'something' to boolean. Raises exception for invalid formats Possible True values: 1, True, "1", "TRue", "yes", "y", "t" Possible False values: 0, False, "0", "faLse", "no", "n", "f", 0.0 """ val = str(value).lower() if val in ("yes", "y", "true", "t", "1"): return True elif val in ("no", "n", "false", "f", "0", "0.0"): return False else: return None # ------------------------------------------------------------------------- def drop(self, tablename, fieldname): """ Drop a field from a table e.g. for when changing type """ db = self.db db_engine = self.db_engine # Modify the database if db_engine == "sqlite": # Not Supported: http://www.sqlite.org/lang_altertable.html # But also not required (for strints anyway) sql = "" elif db_engine == "mysql": # http://dev.mysql.com/doc/refman/5.1/en/alter-table.html sql = "ALTER TABLE %(tablename)s DROP COLUMN %(fieldname)s;" % \ dict(tablename=tablename, fieldname=fieldname) elif db_engine == "postgres": # http://www.postgresql.org/docs/9.3/static/sql-altertable.html sql = "ALTER TABLE %(tablename)s DROP COLUMN %(fieldname)s;" % \ dict(tablename=tablename, fieldname=fieldname) try: db.executesql(sql) except: import sys e = sys.exc_info()[1] print >> sys.stderr, e # Modify the .table file table = db[tablename] fields = [] for fn in table.fields: if fn == fieldname: continue fields.append(table[fn]) db.__delattr__(tablename) db.tables.remove(tablename) db.define_table(tablename, *fields, # Rebuild the .table file from this definition fake_migrate=True) # ------------------------------------------------------------------------- def ondelete(self, tablename, fieldname, reftable, ondelete): """ Modify the ondelete constraint for a foreign key """ db = self.db db_engine = self.db_engine executesql = db.executesql if tablename == "all": tables = db.tables else: tables = [tablename] for tablename in tables: if fieldname not in db[tablename].fields: continue # Modify the database if db_engine == "sqlite": # @ToDo: http://www.sqlite.org/lang_altertable.html raise NotImplementedError elif db_engine == "mysql": # http://dev.mysql.com/doc/refman/5.1/en/alter-table.html create = executesql("SHOW CREATE TABLE `%s`;" % tablename)[0][1] fk = create.split("` FOREIGN KEY (`%s" % fieldname)[0].split("CONSTRAINT `").pop() if "`" in fk: fk = fk.split("`")[0] sql = "ALTER TABLE `%(tablename)s` DROP FOREIGN KEY `%(fk)s`, ALTER TABLE %(tablename)s ADD CONSTRAINT %(fk)s FOREIGN KEY (%(fieldname)s) REFERENCES %(reftable)s(id) ON DELETE %(ondelete)s;" % \ dict(tablename=tablename, fk=fk, fieldname=fieldname, reftable=reftable, ondelete=ondelete) elif db_engine == "postgres": # http://www.postgresql.org/docs/9.3/static/sql-altertable.html sql = "ALTER TABLE %(tablename)s DROP CONSTRAINT %(tablename)s_%(fieldname)s_fkey, ALTER TABLE %(tablename)s ADD CONSTRAINT %(tablename)s_%(fieldname)s_fkey FOREIGN KEY (%(fieldname)s) REFERENCES %(reftable)s ON DELETE %(ondelete)s;" % \ dict(tablename=tablename, fieldname=fieldname, reftable=reftable, ondelete=ondelete) try: executesql(sql) except: print "Error: Table %s with FK %s" % (tablename, fk) import sys e = sys.exc_info()[1] print >> sys.stderr, e # ------------------------------------------------------------------------- def remove_foreign(self, tablename, fieldname): """ Remove a Foreign Key constraint from a table """ db = self.db db_engine = self.db_engine executesql = db.executesql if tablename == "all": tables = db.tables else: tables = [tablename] for tablename in tables: if fieldname not in db[tablename].fields: continue # Modify the database if db_engine == "sqlite": # @ToDo: http://www.sqlite.org/lang_altertable.html raise NotImplementedError elif db_engine == "mysql": # http://dev.mysql.com/doc/refman/5.1/en/alter-table.html create = executesql("SHOW CREATE TABLE `%s`;" % tablename)[0][1] fk = create.split("` FOREIGN KEY (`%s" % fieldname)[0].split("CONSTRAINT `").pop() if "`" in fk: fk = fk.split("`")[0] sql = "ALTER TABLE `%(tablename)s` DROP FOREIGN KEY `%(fk)s`;" % \ dict(tablename=tablename, fk=fk) elif db_engine == "postgres": # http://www.postgresql.org/docs/9.3/static/sql-altertable.html sql = "ALTER TABLE %(tablename)s DROP CONSTRAINT %(tablename)s_%(fieldname)s_fkey;" % \ dict(tablename=tablename, fieldname=fieldname) try: executesql(sql) except: print "Error: Table %s with FK %s" % (tablename, fk) import sys e = sys.exc_info()[1] print >> sys.stderr, e # ------------------------------------------------------------------------- def remove_unique(self, tablename, fieldname): """ Remove a Unique Index from a table """ db = self.db db_engine = self.db_engine # Modify the database if db_engine == "sqlite": # @ToDo: http://www.sqlite.org/lang_altertable.html raise NotImplementedError elif db_engine == "mysql": # http://dev.mysql.com/doc/refman/5.1/en/alter-table.html sql = "ALTER TABLE `%(tablename)s` DROP INDEX `%(fieldname)s`;" % \ dict(tablename=tablename, fieldname=fieldname) elif db_engine == "postgres": # http://www.postgresql.org/docs/9.3/static/sql-altertable.html sql = "ALTER TABLE %(tablename)s DROP CONSTRAINT %(tablename)s_%(fieldname)s_key;" % \ dict(tablename=tablename, fieldname=fieldname) try: db.executesql(sql) except: import sys e = sys.exc_info()[1] print >> sys.stderr, e # Modify the .table file table = db[tablename] fields = [] for fn in table.fields: field = table[fn] if fn == fieldname: field.unique = False fields.append(field) db.__delattr__(tablename) db.tables.remove(tablename) db.define_table(tablename, *fields, # Rebuild the .table file from this definition fake_migrate=True) # ========================================================================= # OLD CODE below here # - There are tests for these in /tests/dbmigration # ------------------------------------------------------------------------- def rename_field(self, tablename, fieldname_old, fieldname_new, attributes_to_copy=None): """ Rename a field, while keeping the other properties of the field the same. If there are some indexes on that table, these will be recreated and other constraints will remain unchanged too. @param tablename : name of the table in which the field is renamed @param fieldname_old : name of the original field before renaming @param fieldname_new : name of the field after renaming @param attributes_to_copy : list of attributes which need to be copied from the old_field to the new_field (needed only in sqlite) """ db = self.db db_engine = self.db_engine if db_engine == "sqlite": self._add_renamed_fields(db, tablename, fieldname_old, fieldname_new, attributes_to_copy) self._copy_field(db, tablename, fieldname_old, fieldname_new) sql = "SELECT sql FROM sqlite_master WHERE type='index' AND tbl_name='%s' ORDER BY name;" % \ tablename list_index = db.executesql(sql) for element in list_index: search_str = "%s(%s)" % (tablename, fieldname_old) if element[0] is not None and search_str in element[0]: sql = "CREATE INDEX %s__idx on %s(%s);" % \ (fieldname_new, tablename, fieldname_new) try: db.executesql(sql) except: pass elif db_engine == "mysql": field = db[tablename][fieldname_old] sql_type = map_type_web2py_to_sql(field.type) sql = "ALTER TABLE %s CHANGE %s %s %s(%s)" % (tablename, fieldname_old, fieldname_new, sql_type, field.length) db.executesql(sql) elif db_engine == "postgres": sql = "ALTER TABLE %s RENAME COLUMN %s TO %s" % \ (tablename, fieldname_old, fieldname_new) db.executesql(sql) # ------------------------------------------------------------------------- def rename_table(self, tablename_old, tablename_new): """ Rename a table. If any fields reference that table, they will be handled too. @param tablename_old : name of the original table before renaming @param tablename_new : name of the table after renaming """ try: sql = "ALTER TABLE %s RENAME TO %s;" % (tablename_old, tablename_new) self.db.executesql(sql) except Exception, e: print e
def backup(self): """ Backup the database to a local SQLite database @ToDo: Option to use a temporary DB in Postgres/MySQL as this takes too long for a large DB """ moves = self.moves news = self.news strints = self.strints strbools = self.strbools if not moves and not news and not strbools and not strints: # Nothing to backup return import os db = self.db folder = "%s/databases/backup" % current.request.folder # Create clean folder for the backup if os.path.exists(folder): shutil.rmtree(folder) import time time.sleep(1) os.mkdir(folder) # Setup backup database db_bak = DAL("sqlite://backup.db", folder=folder, adapter_args={"foreign_keys": False}) # Copy Table structure skip = [] for tablename in db.tables: if tablename == "gis_location": table = db[tablename] fields = [table[field] for field in table.fields if field != "the_geom"] try: db_bak.define_table(tablename, *fields) except KeyError: # Can't resolve reference yet # Cleanup del db_bak[tablename] # Try later skip.append(tablename) else: try: db_bak.define_table(tablename, db[tablename]) except KeyError: # Can't resolve reference yet # Cleanup del db_bak[tablename] # Try later skip.append(tablename) while skip: _skip = [] for tablename in skip: if tablename == "gis_location": table = db[tablename] fields = [table[field] for field in table.fields if field != "the_geom"] try: db_bak.define_table(tablename, *fields) except KeyError: # Can't resolve reference yet # Cleanup del db_bak[tablename] # Try later _skip.append(tablename) except: import sys print "Skipping %s: %s" % (tablename, sys.exc_info()[1]) else: try: db_bak.define_table(tablename, db[tablename]) except KeyError: # Can't resolve reference yet # Cleanup del db_bak[tablename] # Try later _skip.append(tablename) except: import sys print "Skipping %s: %s" % (tablename, sys.exc_info()[1]) skip = _skip # Which tables do we need to backup? tables = [] if moves: for tablename in moves: tables.append(tablename) if news: for tablename in news: new = news[tablename] for t in new["tables"]: tables.append(t) for s in new["supers"]: tables.append(s) stable = db[s] rows = db(stable._id > 0).select(stable.instance_type) instance_types = set([r.instance_type for r in rows]) for t in instance_types: tables.append(t) if strbools: for tablename, fieldname in strints: tables.append(tablename) if strints: for tablename, fieldname in strints: tables.append(tablename) # Remove duplicates tables = set(tables) # Copy Data import csv csv.field_size_limit(2**20 * 100) # 100 megs for tablename in tables: filename = "%s/%s.csv" % (folder, tablename) file = open(filename, "w") rows = db(db[tablename].id > 0).select() rows.export_to_csv_file(file) file.close() file = open(filename, "r") db_bak[tablename].import_from_csv_file(file, unique="uuid2") # uuid2 designed to not hit! file.close() db_bak.commit() # Pass handle back to other functions self.db_bak = db_bak
class S3Migration(object): """ Database Migration Toolkit - used to help migrate both a production database on a server and also an offline client Normally run from a script in web2py context, but without models loaded: cd web2py python web2py.py -S eden -R <script.py> Where script looks like: m = local_import("s3migration") migrate = m.S3Migration() #migrate.pull() migrate.prep(foreigns=[], moves=[], news=[], ondeletes=[], strbools=[], strints=[], uniques=[], ) migrate.migrate() migrate.compile() migrate.post(moves=[], news=[], strbools=[], strints=[], ) FYI: If you need to access a filename in eden/databases/ then here is how: import hashlib (db_string, pool_size) = settings.get_database_string() prefix = hashlib.md5(db_string).hexdigest() filename = "%s_%s.table" % (prefix, tablename) FYI: To view all constraints on a table in MySQL: SHOW CREATE TABLE tablename; or select COLUMN_NAME, CONSTRAINT_NAME, REFERENCED_COLUMN_NAME, REFERENCED_TABLE_NAME from information_schema.KEY_COLUMN_USAGE where TABLE_NAME = 'module_resourcename'; @ToDo: Function to ensure that roles match those in prepop @ToDo: Function to do selective additional prepop """ def __init__(self): # Load s3cfg import s3cfg settings = s3cfg.S3Config() # Pass into template current.deployment_settings = settings # Read settings request = current.request model = "%s/models/000_config.py" % request.folder code = getcfs(model, model, None) response = current.response # Needed as some Templates look at this & we don't wish to crash: response.s3 = Storage() # Global variables for 000_config.py environment = build_environment(request, response, current.session) environment["settings"] = settings # Some (older) 000_config.py also use "deployment_settings": environment["deployment_settings"] = settings # For backwards-compatibility with older 000_config.py: #def template_path(): # # When you see this warning, you should update 000_config.py # # See: http://eden.sahanafoundation.org/wiki/DeveloperGuidelines/Templates/Migration#Changesin000_config.py # print "template_path() is deprecated, please update 000_config.py" # # Return just any valid path to make sure the path-check succeeds, # # => modern S3Config will find the template itself # return request.folder #environment["template_path"] = template_path environment["os"] = os environment["Storage"] = Storage # Execute 000_config.py restricted(code, environment, layer=model) self.environment = environment self.db_engine = settings.get_database_type() (db_string, pool_size) = settings.get_database_string() # Get a handle to the database self.db = DAL(db_string, #folder="%s/databases" % request.folder, auto_import=True, # @ToDo: Set to False until we migrate migrate_enabled=True, ) # ------------------------------------------------------------------------- def prep(self, foreigns=None, moves=None, news=None, ondeletes=None, strbools=None, strints=None, uniques=None, ): """ Preparation before migration @param foreigns : List of tuples (tablename, fieldname) to have the foreign keys removed - if tablename == "all" then all tables are checked @param moves : List of dicts {tablename: [(fieldname, new_tablename, link_fieldname)]} to move a field from 1 table to another - fieldname can be a tuple if the fieldname changes: (fieldname, new_fieldname) @param news : List of dicts {new_tablename: {'lookup_field': '', 'tables': [tablename: [fieldname]], 'supers': [tablename: [fieldname]], } to create new records from 1 or more old tables (inc all instances of an SE) - fieldname can be a tuple if the fieldname changes: (fieldname, new_fieldname) @param ondeletes : List of tuples [(tablename, fieldname, reftable, ondelete)] to have the ondelete modified to @param strbools : List of tuples [(tablename, fieldname)] to convert from string/integer to bools @param strints : List of tuples [(tablename, fieldname)] to convert from string to integer @param uniques : List of tuples [(tablename, fieldname)] to have the unique indices removed, """ # Backup current database self.moves = moves self.news = news self.strbools = strbools self.strints = strints self.backup() if foreigns: # Remove Foreign Key constraints which need to go in next code for tablename, fieldname in foreigns: self.remove_foreign(tablename, fieldname) if uniques: # Remove Unique indices which need to go in next code for tablename, fieldname in uniques: self.remove_unique(tablename, fieldname) if ondeletes: # Modify ondeletes for tablename, fieldname, reftable, ondelete in ondeletes: self.ondelete(tablename, fieldname, reftable, ondelete) # Remove fields which need to be altered in next code if strbools: for tablename, fieldname in strbools: self.drop(tablename, fieldname) if strints: for tablename, fieldname in strints: self.drop(tablename, fieldname) self.db.commit() # ------------------------------------------------------------------------- def backup(self): """ Backup the database to a local SQLite database @ToDo: Option to use a temporary DB in Postgres/MySQL as this takes too long for a large DB """ moves = self.moves news = self.news strints = self.strints strbools = self.strbools if not moves and not news and not strbools and not strints: # Nothing to backup return import os db = self.db folder = "%s/databases/backup" % current.request.folder # Create clean folder for the backup if os.path.exists(folder): shutil.rmtree(folder) import time time.sleep(1) os.mkdir(folder) # Setup backup database db_bak = DAL("sqlite://backup.db", folder=folder, adapter_args={"foreign_keys": False}) # Copy Table structure skip = [] for tablename in db.tables: if tablename == "gis_location": table = db[tablename] fields = [table[field] for field in table.fields if field != "the_geom"] try: db_bak.define_table(tablename, *fields) except KeyError: # Can't resolve reference yet # Cleanup del db_bak[tablename] # Try later skip.append(tablename) else: try: db_bak.define_table(tablename, db[tablename]) except KeyError: # Can't resolve reference yet # Cleanup del db_bak[tablename] # Try later skip.append(tablename) while skip: _skip = [] for tablename in skip: if tablename == "gis_location": table = db[tablename] fields = [table[field] for field in table.fields if field != "the_geom"] try: db_bak.define_table(tablename, *fields) except KeyError: # Can't resolve reference yet # Cleanup del db_bak[tablename] # Try later _skip.append(tablename) except: import sys print "Skipping %s: %s" % (tablename, sys.exc_info()[1]) else: try: db_bak.define_table(tablename, db[tablename]) except KeyError: # Can't resolve reference yet # Cleanup del db_bak[tablename] # Try later _skip.append(tablename) except: import sys print "Skipping %s: %s" % (tablename, sys.exc_info()[1]) skip = _skip # Which tables do we need to backup? tables = [] if moves: for tablename in moves: tables.append(tablename) if news: for tablename in news: new = news[tablename] for t in new["tables"]: tables.append(t) for s in new["supers"]: tables.append(s) stable = db[s] rows = db(stable._id > 0).select(stable.instance_type) instance_types = set([r.instance_type for r in rows]) for t in instance_types: tables.append(t) if strbools: for tablename, fieldname in strints: tables.append(tablename) if strints: for tablename, fieldname in strints: tables.append(tablename) # Remove duplicates tables = set(tables) # Copy Data import csv csv.field_size_limit(2**20 * 100) # 100 megs for tablename in tables: filename = "%s/%s.csv" % (folder, tablename) file = open(filename, "w") rows = db(db[tablename].id > 0).select() rows.export_to_csv_file(file) file.close() file = open(filename, "r") db_bak[tablename].import_from_csv_file(file, unique="uuid2") # uuid2 designed to not hit! file.close() db_bak.commit() # Pass handle back to other functions self.db_bak = db_bak # ------------------------------------------------------------------------- def pull(self, version=None): """ Update the Eden code """ #if GITPYTHON: #else: #import s3log #s3log.S3Log.setup() #current.log.warning("GitPython not installed, will need to call out to Git via CLI") # Copy the current working directory to revert back to later cwd = os.getcwd() # Change to the Eden folder folder = current.request.folder os.chdir(os.path.join(cwd, folder)) # Remove old compiled code remove_compiled_application(folder) # Reset to remove any hotfixes subprocess.call(["git", "reset", "--hard", "HEAD"]) # Store the current version old_version = subprocess.check_output(["git", "describe", "--always", "HEAD"]) self.old_version = old_version.strip() # Pull subprocess.call(["git", "pull"]) if version: # Checkout this version subprocess.call(["git", "checkout", version]) # Change back os.chdir(cwd) # ------------------------------------------------------------------------- def find_script(self): """ Find the upgrade script(s) to run """ old_version = self.old_version if not old_version: # Nothing we can do return # Find the current version new_version = subprocess.check_output(["git", "describe", "--always", "HEAD"]) new_version = new_version.strip() # Look for a script to the current version path = os.path.join(request.folder, "static", "scripts", "upgrade") # ------------------------------------------------------------------------- def run_model(self): """ Execute all the models/ """ if not hasattr(current, "db"): run_models_in(self.environment) # ------------------------------------------------------------------------- def compile(self): """ Compile the Eden code """ # Load the base Model self.run_model() from gluon.fileutils import up request = current.request os_path = os.path join = os_path.join # Pass View Templates to Compiler settings = current.deployment_settings s3 = current.response.s3 s3.views = views = {} s3.theme = theme = settings.get_theme() if theme != "default": folder = request.folder location = settings.get_template_location() exists = os_path.exists for view in ["create.html", #"delete.html", "display.html", "iframe.html", "list.html", "list_filter.html", "map.html", #"merge.html", "plain.html", "popup.html", "profile.html", "report.html", #"review.html", "summary.html", #"timeplot.html", "update.html", ]: if exists(join(folder, location, "templates", theme, "views", "_%s" % view)): views[view] = "../%s/templates/%s/views/_%s" % (location, theme, view) def apath(path="", r=None): """ Builds a path inside an application folder Parameters ---------- path: path within the application folder r: the global request object """ opath = up(r.folder) while path[:3] == "../": (opath, path) = (up(opath), path[3:]) return join(opath, path).replace("\\", "/") folder = apath(request.application, request) compile_application(folder) # ------------------------------------------------------------------------- def migrate(self): """ Perform an automatic database migration """ # Load the base model self.run_model() # Load all conditional models current.s3db.load_all_models() # ------------------------------------------------------------------------- def post(self, moves=None, news=None, strbools=None, strints=None, ): """ Cleanup after migration @param moves : List of dicts {tablename: [(fieldname, new_tablename, link_fieldname)]} to move a field from 1 table to another - fieldname can be a tuple if the fieldname changes: (fieldname, new_fieldname) @param news : List of dicts {new_tablename: {'lookup_field': '', 'tables': [tablename: [fieldname]], 'supers': [tablename: [fieldname]], } to create new records from 1 or more old tables (inc all instances of an SE) - fieldname can be a tuple if the fieldname changes: (fieldname, new_fieldname) @param strbools : List of tuples [(tablename, fieldname)] to convert from string/integer to bools @param strints : List of tuples [(tablename, fieldname)] to convert from string to integer """ db = self.db # @ToDo: Do prepops of new tables # Restore data from backup folder = "%s/databases/backup" % current.request.folder db_bak = DAL("sqlite://backup.db", folder=folder, auto_import=True, migrate=False) if moves: for tablename in moves: table = db_bak[tablename] fieldname, new_tablename, link_fieldname = moves[tablename] if isinstance(fieldname, (tuple, list)): fieldname, new_fieldname = fieldname else: new_fieldname = fieldname old_field = table[fieldname] new_linkfield = db[new_tablename][link_fieldname] rows = db_bak(table._id > 0).select(old_field, link_fieldname) for row in rows: update_vars = {} update_vars[new_fieldname] = row[old_field] db(new_linkfield == row[link_fieldname]).update(**update_vars) if news: for tablename in news: # Read Data data = {} new = news[tablename] lookup_field = new["lookup_field"] _tables = new["tables"] for t in _tables: fields = _tables[t] # @ToDo: Support tuples #for f in fields: # if isinstance(f, (tuple, list)): table = db_bak[t] table_fields = [table[f] for f in fields] rows = db_bak(table.deleted == False).select(table[lookup_field], *table_fields) for row in rows: record_id = row[lookup_field] if record_id in data: _new = False _data = data[record_id] else: _new = True _data = {} for f in fields: if f in row: if row[f] not in ("", None): # JSON type doesn't like "" _data[f] = row[f] if _new: data[record_id] = _data for s in new["supers"]: fields = new["supers"][s] # @ToDo: Support tuples #for f in fields: # if isinstance(f, (tuple, list)): stable = db_bak[s] superkey = stable._id.name rows = db_bak(stable.deleted == False).select(stable._id, stable.instance_type) for row in rows: etable = db_bak[row["instance_type"]] _fields = [f for f in fields if f in etable.fields] table_fields = [etable[f] for f in _fields] record = db_bak(etable[superkey] == row[superkey]).select(etable[lookup_field], *table_fields ).first() if record: record_id = record[lookup_field] if record_id in data: _new = False _data = data[record_id] else: _new = True _data = {} for f in _fields: if f in record: if record[f] not in ("", None): # JSON type doesn't like "" _data[f] = record[f] if _new: data[record_id] = _data # Create Records table = db[tablename] for record_id in data: update_vars = data[record_id] if update_vars: update_vars[lookup_field] = record_id # Can't rely on the defaults as auto_import doesn't see DAL defaults update_vars["created_on"] = datetime.datetime.utcnow() update_vars["deleted"] = False update_vars["mci"] = 0 update_vars["modified_on"] = datetime.datetime.utcnow() update_vars["uuid"] = uuid4().urn # Would always be identical otherwise table.insert(**update_vars) if strints: for tablename, fieldname in strints: newtable = db[tablename] newrows = db(newtable.id > 0).select(newtable.id) oldtable = db_bak[tablename] oldrows = db_bak(oldtable.id > 0).select(oldtable.id, oldtable[fieldname]) oldvals = oldrows.as_dict() for row in newrows: _id = row.id val = oldvals[_id][fieldname] if not val: continue try: update_vars = {fieldname : int(val)} except: current.log.warning("S3Migrate: Unable to convert %s to an integer - skipping" % val) else: db(newtable.id == _id).update(**update_vars) if strbools: for tablename, fieldname in strbools: to_bool = self.to_bool newtable = db[tablename] newrows = db(newtable.id > 0).select(newtable.id) oldtable = db_bak[tablename] oldrows = db_bak(oldtable.id > 0).select(oldtable.id, oldtable[fieldname]) oldvals = oldrows.as_dict() for row in newrows: _id = row.id val = oldvals[_id][fieldname] if not val: continue val = to_bool(val) if val: update_vars = {fieldname : val} db(newtable.id == _id).update(**update_vars) db.commit() # ------------------------------------------------------------------------- @staticmethod def to_bool(value): """ Converts 'something' to boolean. Raises exception for invalid formats Possible True values: 1, True, "1", "TRue", "yes", "y", "t" Possible False values: 0, False, "0", "faLse", "no", "n", "f", 0.0 """ val = str(value).lower() if val in ("yes", "y", "true", "t", "1"): return True elif val in ("no", "n", "false", "f", "0", "0.0"): return False else: return None # ------------------------------------------------------------------------- def drop(self, tablename, fieldname): """ Drop a field from a table e.g. for when changing type """ db = self.db db_engine = self.db_engine # Modify the database if db_engine == "sqlite": # Not Supported: http://www.sqlite.org/lang_altertable.html # But also not required (for strints anyway) sql = "" elif db_engine == "mysql": # http://dev.mysql.com/doc/refman/5.1/en/alter-table.html sql = "ALTER TABLE %(tablename)s DROP COLUMN %(fieldname)s;" % \ dict(tablename=tablename, fieldname=fieldname) elif db_engine == "postgres": # http://www.postgresql.org/docs/9.3/static/sql-altertable.html sql = "ALTER TABLE %(tablename)s DROP COLUMN %(fieldname)s;" % \ dict(tablename=tablename, fieldname=fieldname) try: db.executesql(sql) except: import sys e = sys.exc_info()[1] print >> sys.stderr, e # Modify the .table file table = db[tablename] fields = [] for fn in table.fields: if fn == fieldname: continue fields.append(table[fn]) db.__delattr__(tablename) db.tables.remove(tablename) db.define_table(tablename, *fields, # Rebuild the .table file from this definition fake_migrate=True) # ------------------------------------------------------------------------- def ondelete(self, tablename, fieldname, reftable, ondelete): """ Modify the ondelete constraint for a foreign key """ db = self.db db_engine = self.db_engine executesql = db.executesql if tablename == "all": tables = db.tables else: tables = [tablename] for tablename in tables: if fieldname not in db[tablename].fields: continue # Modify the database if db_engine == "sqlite": # @ToDo: http://www.sqlite.org/lang_altertable.html raise NotImplementedError elif db_engine == "mysql": # http://dev.mysql.com/doc/refman/5.1/en/alter-table.html create = executesql("SHOW CREATE TABLE `%s`;" % tablename)[0][1] fk = create.split("` FOREIGN KEY (`%s" % fieldname)[0].split("CONSTRAINT `").pop() if "`" in fk: fk = fk.split("`")[0] sql = "ALTER TABLE `%(tablename)s` DROP FOREIGN KEY `%(fk)s`, ALTER TABLE %(tablename)s ADD CONSTRAINT %(fk)s FOREIGN KEY (%(fieldname)s) REFERENCES %(reftable)s(id) ON DELETE %(ondelete)s;" % \ dict(tablename=tablename, fk=fk, fieldname=fieldname, reftable=reftable, ondelete=ondelete) elif db_engine == "postgres": # http://www.postgresql.org/docs/9.3/static/sql-altertable.html sql = "ALTER TABLE %(tablename)s DROP CONSTRAINT %(tablename)s_%(fieldname)s_fkey, ALTER TABLE %(tablename)s ADD CONSTRAINT %(tablename)s_%(fieldname)s_fkey FOREIGN KEY (%(fieldname)s) REFERENCES %(reftable)s ON DELETE %(ondelete)s;" % \ dict(tablename=tablename, fieldname=fieldname, reftable=reftable, ondelete=ondelete) try: executesql(sql) except: print "Error: Table %s with FK %s" % (tablename, fk) import sys e = sys.exc_info()[1] print >> sys.stderr, e # ------------------------------------------------------------------------- def remove_foreign(self, tablename, fieldname): """ Remove a Foreign Key constraint from a table """ db = self.db db_engine = self.db_engine executesql = db.executesql if tablename == "all": tables = db.tables else: tables = [tablename] for tablename in tables: if fieldname not in db[tablename].fields: continue # Modify the database if db_engine == "sqlite": # @ToDo: http://www.sqlite.org/lang_altertable.html raise NotImplementedError elif db_engine == "mysql": # http://dev.mysql.com/doc/refman/5.1/en/alter-table.html create = executesql("SHOW CREATE TABLE `%s`;" % tablename)[0][1] fk = create.split("` FOREIGN KEY (`%s" % fieldname)[0].split("CONSTRAINT `").pop() if "`" in fk: fk = fk.split("`")[0] sql = "ALTER TABLE `%(tablename)s` DROP FOREIGN KEY `%(fk)s`;" % \ dict(tablename=tablename, fk=fk) elif db_engine == "postgres": # http://www.postgresql.org/docs/9.3/static/sql-altertable.html sql = "ALTER TABLE %(tablename)s DROP CONSTRAINT %(tablename)s_%(fieldname)s_fkey;" % \ dict(tablename=tablename, fieldname=fieldname) try: executesql(sql) except: print "Error: Table %s with FK %s" % (tablename, fk) import sys e = sys.exc_info()[1] print >> sys.stderr, e # ------------------------------------------------------------------------- def remove_unique(self, tablename, fieldname): """ Remove a Unique Index from a table """ db = self.db db_engine = self.db_engine # Modify the database if db_engine == "sqlite": # @ToDo: http://www.sqlite.org/lang_altertable.html raise NotImplementedError elif db_engine == "mysql": # http://dev.mysql.com/doc/refman/5.1/en/alter-table.html sql = "ALTER TABLE `%(tablename)s` DROP INDEX `%(fieldname)s`;" % \ dict(tablename=tablename, fieldname=fieldname) elif db_engine == "postgres": # http://www.postgresql.org/docs/9.3/static/sql-altertable.html sql = "ALTER TABLE %(tablename)s DROP CONSTRAINT %(tablename)s_%(fieldname)s_key;" % \ dict(tablename=tablename, fieldname=fieldname) try: db.executesql(sql) except: import sys e = sys.exc_info()[1] print >> sys.stderr, e # Modify the .table file table = db[tablename] fields = [] for fn in table.fields: field = table[fn] if fn == fieldname: field.unique = False fields.append(field) db.__delattr__(tablename) db.tables.remove(tablename) db.define_table(tablename, *fields, # Rebuild the .table file from this definition fake_migrate=True) # ========================================================================= # OLD CODE below here # - There are tests for these in /tests/dbmigration # ------------------------------------------------------------------------- def rename_field(self, tablename, fieldname_old, fieldname_new, attributes_to_copy=None): """ Rename a field, while keeping the other properties of the field the same. If there are some indexes on that table, these will be recreated and other constraints will remain unchanged too. @param tablename : name of the table in which the field is renamed @param fieldname_old : name of the original field before renaming @param fieldname_new : name of the field after renaming @param attributes_to_copy : list of attributes which need to be copied from the old_field to the new_field (needed only in sqlite) """ db = self.db db_engine = self.db_engine if db_engine == "sqlite": self._add_renamed_fields(db, tablename, fieldname_old, fieldname_new, attributes_to_copy) self._copy_field(db, tablename, fieldname_old, fieldname_new) sql = "SELECT sql FROM sqlite_master WHERE type='index' AND tbl_name='%s' ORDER BY name;" % \ tablename list_index = db.executesql(sql) for element in list_index: search_str = "%s(%s)" % (tablename, fieldname_old) if element[0] is not None and search_str in element[0]: sql = "CREATE INDEX %s__idx on %s(%s);" % \ (fieldname_new, tablename, fieldname_new) try: db.executesql(sql) except: pass elif db_engine == "mysql": field = db[tablename][fieldname_old] sql_type = map_type_web2py_to_sql(field.type) sql = "ALTER TABLE %s CHANGE %s %s %s(%s)" % (tablename, fieldname_old, fieldname_new, sql_type, field.length) db.executesql(sql) elif db_engine == "postgres": sql = "ALTER TABLE %s RENAME COLUMN %s TO %s" % \ (tablename, fieldname_old, fieldname_new) db.executesql(sql) # ------------------------------------------------------------------------- def rename_table(self, tablename_old, tablename_new): """ Rename a table. If any fields reference that table, they will be handled too. @param tablename_old : name of the original table before renaming @param tablename_new : name of the table after renaming """ try: sql = "ALTER TABLE %s RENAME TO %s;" % (tablename_old, tablename_new) self.db.executesql(sql) except Exception, e: print e
class Application(Tkinter.Frame): def __init__(self, master=None): Tkinter.Frame.__init__(self, master) self.grid() self.db = DAL("sqlite://teste.sqlite") ne = IS_NOT_EMPTY() self.table = self.db.define_table("produto", Field("nome", requires=ne), Field("marca", requires=ne), Field("peso", "double", requires=ne), Field("valor", "double", requires=ne), Field("quantidade", "integer", requires=ne), ) self.create_form() self.show_result() def create_form(self): self.form = Tkinter.LabelFrame(self, text="Cadastro", padx="30", pady="15") self.form.pack(side=Tkinter.TOP) self.vars = {} # self.nome_label = Tkinter.Label(self.form, text="nome do produto") # self.nome_label.pack() # self.nome = Tkinter.Entry(self.form) # self.nome.pack() self.SQLTKFORM(self.table) def show_result(self): self.result = Tkinter.LabelFrame(self, text="Results", padx="30", pady="15") self.result.pack(side=Tkinter.BOTTOM) # query db for records records = self.db(self.table).select() if records: self.records = Tkinter.Text(self.result) lines = [",".join(self.table.fields), "_" * 80] for record in records: lines.append(",".join([str(record[field]) for field in self.table.fields])) self.records.insert(1.0, "\n".join(lines)) else: self.records = Tkinter.Label(self.result, text="Nothing found") self.records.pack() def SQLTKFORM(self, table, hidden="id"): fields = table.fields for field in fields: if field not in hidden: self.create_input(field, table[field].label) self.save_button = Tkinter.Button(self.form, text="Salvar", command=self.save_to_db).pack() def save_to_db(self): try: values = [(name, value()) for name, value in self.vars.items()] record = self.table.validate_and_insert(**dict(values)) if record.errors: self.db.rollback() for error, error_message in record.errors.items(): label_text = getattr(self, error + "_label_text") actual_text = label_text.get() if not ":" in actual_text: label_text.set(actual_text + ": " + error_message) getattr(self, error).configure(bg="red") return except Exception: raise Exception("something wrong....") else: self.db.commit() self.reset_form() def reset_form(self): self.form.destroy() self.create_form() self.result.destroy() self.show_result() def create_input(self, name, text): # LABEL TEXT IS VARIABLE setattr(self, name + "_label_text", Tkinter.StringVar(self.form)) label_text = getattr(self, name + "_label_text") label_text.set(text) # LABEL setattr(self, name + "_label", Tkinter.Label(self.form, textvariable=label_text)) # INPUT setattr(self, name, Tkinter.Entry(self.form)) # PACK getattr(self, name + "_label").pack() getattr(self, name).pack() # VARS self.vars[name] = getattr(self, name).get
class S3Migration(object): """ Database Migration Toolkit - used to help migrate both a production database on a server and also an offline client Normally run from a script in web2py context, but without models loaded: cd web2py python web2py.py -S eden -R <script.py> Where script looks like: m = local_import("s3migration") migrate = m.S3Migration() migrate.prep(foreigns=[], ondeletes=[], strbools=[], strints=[], uniques=[], ) #migrate.migrate() migrate.post(strbools=[], strints=[], ) FYI: If you need to access a filename in eden/databases/ then here is how: import hashlib (db_string, pool_size) = settings.get_database_string() prefix = hashlib.md5(db_string).hexdigest() filename = "%s_%s.table" % (prefix, tablename) """ def __init__(self): request= current.request # Load s3cfg name = "applications.%s.modules.s3cfg" % request.application s3cfg = __import__(name) for item in name.split(".")[1:]: # Remove the dot s3cfg = getattr(s3cfg, item) settings = s3cfg.S3Config() # Pass into template current.deployment_settings = settings # Read settings model = "%s/models/000_config.py" % request.folder code = getcfs(model, model, None) environment = build_environment(request, current.response, current.session) environment["settings"] = settings def template_path(): " Return the path of the Template config.py to load " path = os.path.join(request.folder, "private", "templates", settings.get_template(), "config.py") return path environment["template_path"] = template_path environment["os"] = os environment["Storage"] = Storage restricted(code, environment, layer=model) self.db_engine = settings.get_database_type() (db_string, pool_size) = settings.get_database_string() # Get a handle to the database self.db = DAL(db_string, #folder="%s/databases" % request.folder, auto_import=True, # @ToDo: Set to False until we migrate migrate_enabled=True, ) # ------------------------------------------------------------------------- def prep(self, foreigns=[], ondeletes=[], strbools=[], strints=[], uniques=[], ): """ Preparation before migration @param foreigns : List of tuples (tablename, fieldname) to have the foreign keys removed - if tablename == "all" then all tables are checked @param ondeletes : List of tuples (tablename, fieldname, reftable, ondelete) to have the ondelete modified to @param strbools : List of tuples (tablename, fieldname) to convert from string/integer to bools @param strints : List of tuples (tablename, fieldname) to convert from string to integer @param uniques : List of tuples (tablename, fieldname) to have the unique indices removed, """ # Backup current database self.backup() # Remove Foreign Key constraints which need to go in next code for tablename, fieldname in foreigns: self.remove_foreign(tablename, fieldname) # Remove Unique indices which need to go in next code for tablename, fieldname in uniques: self.remove_unique(tablename, fieldname) # Modify ondeletes for tablename, fieldname, reftable, ondelete in uniques: self.ondelete(tablename, fieldname, reftable, ondelete) # Remove fields which need to be altered in next code for tablename, fieldname in strbools: self.drop(tablename, fieldname) for tablename, fieldname in strints: self.drop(tablename, fieldname) self.db.commit() # ------------------------------------------------------------------------- def migrate(self): """ Perform the migration @ToDo """ # Update code: git pull # run_models_in(environment) # or # Set migrate=True in models/000_config.py # current.s3db.load_all_models() via applications/eden/static/scripts/tools/noop.py # Set migrate=False in models/000_config.py pass # ------------------------------------------------------------------------- def post(self, strbools=[], strints=[], ): """ Cleanup after migration @param strbools : List of tuples (tablename, fieldname) to convert from string/integer to bools @param strints : List of tuples (tablename, fieldname) to convert from string to integer """ db = self.db # @ToDo: Do prepops of new tables # Restore data from backup folder = "%s/databases/backup" % current.request.folder db_bak = DAL("sqlite://backup.db", folder=folder, auto_import=True, migrate=False) for tablename, fieldname in strints: newtable = db[tablename] newrows = db(newtable.id > 0).select(newtable.id) oldtable = db_bak[tablename] oldrows = db_bak(oldtable.id > 0).select(oldtable.id, oldtable[fieldname]) oldvals = oldrows.as_dict() for row in newrows: id = row.id val = oldvals[id][fieldname] if not val: continue try: vars = {fieldname : int(val)} except: current.log.warning("S3Migrate: Unable to convert %s to an integer - skipping" % val) else: db(newtable.id == id).update(**vars) for tablename, fieldname in strbools: to_bool = self.to_bool newtable = db[tablename] newrows = db(newtable.id > 0).select(newtable.id) oldtable = db_bak[tablename] oldrows = db_bak(oldtable.id > 0).select(oldtable.id, oldtable[fieldname]) oldvals = oldrows.as_dict() for row in newrows: id = row.id val = oldvals[id][fieldname] if not val: continue val = to_bool(val) if val: vars = {fieldname : val} db(newtable.id == id).update(**vars) db.commit() # ------------------------------------------------------------------------- @staticmethod def to_bool(value): """ Converts 'something' to boolean. Raises exception for invalid formats Possible True values: 1, True, "1", "TRue", "yes", "y", "t" Possible False values: 0, False, "0", "faLse", "no", "n", "f", 0.0 """ val = str(value).lower() if val in ("yes", "y", "true", "t", "1"): return True elif val in ("no", "n", "false", "f", "0", "0.0"): return False else: return None # ------------------------------------------------------------------------- def backup(self): """ Backup the database to a local SQLite database @ToDo: Option to use a temporary DB in Postgres/MySQL as this takes too long for a large DB """ import os db = self.db folder = "%s/databases/backup" % current.request.folder # Create clean folder for the backup if os.path.exists(folder): import shutil shutil.rmtree(folder) import time time.sleep(1) os.mkdir(folder) # Setup backup database db_bak = DAL("sqlite://backup.db", folder=folder) # Copy Table structure for tablename in db.tables: if tablename == "gis_location": table = db[tablename] fields = [table[field] for field in table.fields if field != "the_geom"] db_bak.define_table(tablename, *fields) else: db_bak.define_table(tablename, db[tablename]) # Copy Data import csv csv.field_size_limit(2**20 * 100) # 100 megs filename = "%s/data.csv" % folder file = open(filename, "w") db.export_to_csv_file(file) file.close() file = open(filename, "r") db_bak.import_from_csv_file(file, unique="uuid2") # designed to fail file.close() db_bak.commit() # Pass handle back to other functions self.db_bak = db_bak # ------------------------------------------------------------------------- def drop(self, tablename, fieldname): """ Drop a field from a table e.g. for when changing type """ db = self.db db_engine = self.db_engine # Modify the database if db_engine == "sqlite": # Not Supported: http://www.sqlite.org/lang_altertable.html # But also not required (for strints anyway) sql = "" elif db_engine == "mysql": # http://dev.mysql.com/doc/refman/5.1/en/alter-table.html sql = "ALTER TABLE %(tablename)s DROP COLUMN %(fieldname)s;" % \ dict(tablename=tablename, fieldname=fieldname) elif db_engine == "postgres": # http://www.postgresql.org/docs/9.3/static/sql-altertable.html sql = "ALTER TABLE %(tablename)s DROP COLUMN %(fieldname)s;" % \ dict(tablename=tablename, fieldname=fieldname) try: db.executesql(sql) except: import sys e = sys.exc_info()[1] print >> sys.stderr, e # Modify the .table file table = db[tablename] fields = [] for fn in table.fields: if fn == fieldname: continue fields.append(table[fn]) db.__delattr__(tablename) db.tables.remove(tablename) db.define_table(tablename, *fields, # Rebuild the .table file from this definition fake_migrate=True) # ------------------------------------------------------------------------- def ondelete(self, tablename, fieldname, reftable, ondelete): """ Modify the ondelete constraint for a foreign key """ db = self.db db_engine = self.db_engine executesql = db.executesql if tablename == "all": tables = db.tables else: tables = [tablename] for tablename in tables: if fieldname not in db[tablename].fields: continue # Modify the database if db_engine == "sqlite": # @ToDo: http://www.sqlite.org/lang_altertable.html raise NotImplementedError elif db_engine == "mysql": # @ToDo: http://dev.mysql.com/doc/refman/5.1/en/alter-table.html raise NotImplementedError elif db_engine == "postgres": # http://www.postgresql.org/docs/9.3/static/sql-altertable.html sql = "ALTER TABLE %(tablename)s DROP CONSTRAINT %(tablename)s_%(fieldname)s_fkey, ALTER TABLE %(tablename)s ADD CONSTRAINT %(tablename)s_%(fieldname)s_fkey FOREIGN KEY (%(fieldname)s) REFERENCES %(reftable)s ON DELETE %(ondelete)s;" % \ dict(tablename=tablename, fieldname=fieldname, reftable=reftable, ondelete=ondelete) try: executesql(sql) except: print "Error: Table %s with FK %s" % (tablename, fk) import sys e = sys.exc_info()[1] print >> sys.stderr, e # ------------------------------------------------------------------------- def remove_foreign(self, tablename, fieldname): """ Remove a Foreign Key constraint from a table """ db = self.db db_engine = self.db_engine executesql = db.executesql if tablename == "all": tables = db.tables else: tables = [tablename] for tablename in tables: if fieldname not in db[tablename].fields: continue # Modify the database if db_engine == "sqlite": # @ToDo: http://www.sqlite.org/lang_altertable.html raise NotImplementedError elif db_engine == "mysql": # http://dev.mysql.com/doc/refman/5.1/en/alter-table.html create = executesql("SHOW CREATE TABLE `%s`;" % tablename)[0][1] fk = create.split("` FOREIGN KEY (`%s" % fieldname)[0].split("CONSTRAINT `").pop() if "`" in fk: fk = fk.split("`")[0] sql = "ALTER TABLE `%(tablename)s` DROP FOREIGN KEY `%(fk)s`;" % \ dict(tablename=tablename, fk=fk) elif db_engine == "postgres": # http://www.postgresql.org/docs/9.3/static/sql-altertable.html sql = "ALTER TABLE %(tablename)s DROP CONSTRAINT %(tablename)s_%(fieldname)s_fkey;" % \ dict(tablename=tablename, fieldname=fieldname) try: executesql(sql) except: print "Error: Table %s with FK %s" % (tablename, fk) import sys e = sys.exc_info()[1] print >> sys.stderr, e # ------------------------------------------------------------------------- def remove_unique(self, tablename, fieldname): """ Remove a Unique Index from a table """ db = self.db db_engine = self.db_engine # Modify the database if db_engine == "sqlite": # @ToDo: http://www.sqlite.org/lang_altertable.html raise NotImplementedError elif db_engine == "mysql": # http://dev.mysql.com/doc/refman/5.1/en/alter-table.html sql = "ALTER TABLE `%(tablename)s` DROP INDEX `%(fieldname)s`;" % \ dict(tablename=tablename, fieldname=fieldname) elif db_engine == "postgres": # http://www.postgresql.org/docs/9.3/static/sql-altertable.html sql = "ALTER TABLE %(tablename)s DROP CONSTRAINT %(tablename)s_%(fieldname)s_key;" % \ dict(tablename=tablename, fieldname=fieldname) try: db.executesql(sql) except: import sys e = sys.exc_info()[1] print >> sys.stderr, e # Modify the .table file table = db[tablename] fields = [] for fn in table.fields: field = table[fn] if fn == fieldname: field.unique = False fields.append(field) db.__delattr__(tablename) db.tables.remove(tablename) db.define_table(tablename, *fields, # Rebuild the .table file from this definition fake_migrate=True) # ========================================================================= # OLD CODE below here # - There are tests for these in /tests/dbmigration # ------------------------------------------------------------------------- def rename_field(self, tablename, fieldname_old, fieldname_new, attributes_to_copy=None): """ Rename a field, while keeping the other properties of the field the same. If there are some indexes on that table, these will be recreated and other constraints will remain unchanged too. @param tablename : name of the table in which the field is renamed @param fieldname_old : name of the original field before renaming @param fieldname_new : name of the field after renaming @param attributes_to_copy : list of attributes which need to be copied from the old_field to the new_field (needed only in sqlite) """ db = self.db db_engine = self.db_engine if db_engine == "sqlite": self._add_renamed_fields(db, tablename, fieldname_old, fieldname_new, attributes_to_copy) self._copy_field(db, tablename, fieldname_old, fieldname_new) sql = "SELECT sql FROM sqlite_master WHERE type='index' AND tbl_name='%s' ORDER BY name;" % \ tablename list_index = db.executesql(sql) for element in list_index: search_str = "%s(%s)" % (tablename, fieldname_old) if element[0] is not None and search_str in element[0]: sql = "CREATE INDEX %s__idx on %s(%s);" % \ (fieldname_new, tablename, fieldname_new) try: db.executesql(sql) except: pass elif db_engine == "mysql": field = db[tablename][fieldname_old] sql_type = map_type_web2py_to_sql(field.type) sql = "ALTER TABLE %s CHANGE %s %s %s(%s)" % (tablename, fieldname_old, fieldname_new, sql_type, field.length) db.executesql(sql) elif db_engine == "postgres": sql = "ALTER TABLE %s RENAME COLUMN %s TO %s" % \ (tablename, fieldname_old, fieldname_new) db.executesql(sql) # ------------------------------------------------------------------------- def rename_table(self, tablename_old, tablename_new): """ Rename a table. If any fields reference that table, they will be handled too. @param tablename_old : name of the original table before renaming @param tablename_new : name of the table after renaming """ try: sql = "ALTER TABLE %s RENAME TO %s;" % (tablename_old, tablename_new) self.db.executesql(sql) except Exception, e: print e
class S3Migration(object): """ Database Migration Toolkit - will be useful to migrate both a production database on a server and also an offline client Normally run from a script in web2py context, but without models loaded: cd web2py python web2py.py -S eden -R <script.py> Where script looks like: m = local_import("s3migration") migrate = m.S3Migration() migrate.prep() migrate.migrate() migrate.post() """ def __init__(self): request= current.request # Load s3cfg name = "applications.%s.modules.s3cfg" % request.application s3cfg = __import__(name) for item in name.split(".")[1:]: # Remove the dot s3cfg = getattr(s3cfg, item) settings = s3cfg.S3Config() # Pass into template current.deployment_settings = settings # Read settings model = "%s/models/000_config.py" % request.folder code = getcfs(model, model, None) environment = build_environment(request, current.response, current.session) environment["settings"] = settings def template_path(): " Return the path of the Template config.py to load " path = os.path.join(request.folder, "private", "templates", settings.get_template(), "config.py") return path environment["template_path"] = template_path environment["os"] = os environment["Storage"] = Storage restricted(code, environment, layer=model) self.db_engine = settings.get_database_type() (db_string, pool_size) = settings.get_database_string() # Get a handle to the database self.db = DAL(db_string, #folder="%s/databases" % request.folder, auto_import=True, # @ToDo: Set to False until we migrate migrate_enabled=True, ) # ------------------------------------------------------------------------- def prep(self, foreigns=[], uniques=[]): """ Preparation before migration @param foreigns : List of tuples (tablename, fieldname) to have the foreign keys removed - if tablename == "all" then all tables are checked @param uniques : List of tuples (tablename, fieldname) to have the unique indices removed """ # Backup current database self.backup() # Remove Foreign Key constraints which need to go in next code for tablename, fieldname in foreigns: self.remove_foreign(tablename, fieldname) # Remove Unique indices which need to go in next code for tablename, fieldname in uniques: self.remove_unique(tablename, fieldname) self.db.commit() # ------------------------------------------------------------------------- def post(self): """ Cleanup after migration @ToDo """ # Do prepops of new tables # Copy data pass # ------------------------------------------------------------------------- def migrate(self): """ Perform the migration @ToDo """ # Update code: git pull # run_models_in(environment) # or # Set migrate=True in models/000_config.py # current.s3db.load_all_models() via applications/eden/static/scripts/tools/noop.py # Set migrate=False in models/000_config.py pass # ------------------------------------------------------------------------- def backup(self): """ Backup the database to a local SQLite database """ import os db = self.db folder = "%s/databases/backup" % current.request.folder # Create clean folder for the backup if os.path.exists(folder): import shutil shutil.rmtree(folder) import time time.sleep(1) os.mkdir(folder) # Setup backup database db_bak = DAL("sqlite://backup.db", folder=folder) # Copy Table structure for tablename in db.tables: if tablename == "gis_location": table = db[tablename] fields = [table[field] for field in table.fields if field != "the_geom"] db_bak.define_table(tablename, *fields) else: db_bak.define_table(tablename, db[tablename]) # Copy Data import csv csv.field_size_limit(2**20 * 100) # 100 megs filename = "%s/data.csv" % folder file = open(filename, "w") db.export_to_csv_file(file) file.close() file = open(filename, "r") db_bak.import_from_csv_file(file, unique="uuid2") # designed to fail file.close() db_bak.commit() # Pass handle back to other functions self.db_bak = db_bak # ------------------------------------------------------------------------- def remove_foreign(self, tablename, fieldname): """ Remove a Foreign Key constraint from a table """ db = self.db db_engine = self.db_engine executesql = db.executesql if tablename == "all": tables = db.tables else: tables = [tablename] for tablename in tables: if fieldname not in db[tablename].fields: continue # Modify the database if db_engine == "sqlite": # @ToDo: raise NotImplementedError elif db_engine == "mysql": # http://dev.mysql.com/doc/refman/5.1/en/alter-table.html create = executesql("SHOW CREATE TABLE `%s`;" % tablename)[0][1] fk = create.split("` FOREIGN KEY (`%s" % fieldname)[0].split("CONSTRAINT `").pop() if "`" in fk: fk = fk.split("`")[0] sql = "ALTER TABLE `%(tablename)s` DROP FOREIGN KEY `%(fk)s`;" % \ dict(tablename=tablename, fk=fk) elif db_engine == "postgres": # @ToDo: raise NotImplementedError try: executesql(sql) except: print "Error: Table %s with FK %s" % (tablename, fk) import sys e = sys.exc_info()[1] print >> sys.stderr, e # ------------------------------------------------------------------------- def remove_unique(self, tablename, fieldname): """ Remove a Unique Index from a table """ db = self.db db_engine = self.db_engine # Modify the database if db_engine == "sqlite": # @ToDo: raise NotImplementedError elif db_engine == "mysql": # http://dev.mysql.com/doc/refman/5.1/en/alter-table.html sql = "ALTER TABLE `%(tablename)s` DROP INDEX `%(fieldname)s`;" % \ dict(tablename=tablename, fieldname=fieldname) elif db_engine == "postgres": sql = "ALTER TABLE %(tablename)s DROP CONSTRAINT %(tablename)s_%(fieldname)s_key;" % \ dict(tablename=tablename, fieldname=fieldname) try: db.executesql(sql) except: import sys e = sys.exc_info()[1] print >> sys.stderr, e # Modify the .table file table = db[tablename] fields = [] for fn in table.fields: field = table[fn] if fn == fieldname: field.unique = False fields.append(field) db.__delattr__(tablename) db.tables.remove(tablename) db.define_table(tablename, *fields, # Rebuild the .table file from this definition fake_migrate=True) # ========================================================================= # OLD CODE below here # - There are tests for this in /tests/dbmigration # ------------------------------------------------------------------------- def rename_field(self, tablename, fieldname_old, fieldname_new, attributes_to_copy=None): """ Rename a field, while keeping the other properties of the field the same. If there are some indexes on that table, these will be recreated and other constraints will remain unchanged too. @param tablename : name of the table in which the field is renamed @param fieldname_old : name of the original field before renaming @param fieldname_new : name of the field after renaming @param attributes_to_copy : list of attributes which need to be copied from the old_field to the new_field (needed only in sqlite) """ db = self.db db_engine = self.db_engine if db_engine == "sqlite": self._add_renamed_fields(db, tablename, fieldname_old, fieldname_new, attributes_to_copy) self._copy_field(db, tablename, fieldname_old, fieldname_new) sql = "SELECT sql FROM sqlite_master WHERE type='index' AND tbl_name='%s' ORDER BY name;" % \ tablename list_index = db.executesql(sql) for element in list_index: search_str = "%s(%s)" % (tablename, fieldname_old) if element[0] is not None and search_str in element[0]: sql = "CREATE INDEX %s__idx on %s(%s);" % \ (fieldname_new, tablename, fieldname_new) try: db.executesql(sql) except: pass elif db_engine == "mysql": field = db[tablename][fieldname_old] sql_type = map_type_web2py_to_sql(field.type) sql = "ALTER TABLE %s CHANGE %s %s %s(%s)" % (tablename, fieldname_old, fieldname_new, sql_type, field.length) db.executesql(sql) elif db_engine == "postgres": sql = "ALTER TABLE %s RENAME COLUMN %s TO %s" % \ (tablename, fieldname_old, fieldname_new) db.executesql(sql) # ------------------------------------------------------------------------- def rename_table(self, tablename_old, tablename_new): """ Rename a table. If any fields reference that table, they will be handled too. @param tablename_old : name of the original table before renaming @param tablename_new : name of the table after renaming """ try: sql = "ALTER TABLE %s RENAME TO %s;" % (tablename_old, tablename_new) self.db.executesql(sql) except Exception, e: print e
db_path = os.path.join(request.folder, 'databases') tk_db = DAL(db_string, folder=db_path, auto_import=True) ts = TicketStorage(db=tk_db) tk_table = ts._get_table( db=tk_db, tablename=ts.tablename, app=request.application) hashes = {} while 1: if request.tickets_db: print "You're storing tickets yet in database" sys.exit(1) for file in os.listdir(errors_path): filename = os.path.join(errors_path, file) modified_time = os.stat(filename)[stat.ST_MTIME] modified_time = datetime.datetime.fromtimestamp(modified_time) ticket_id = file ticket_data = open(filename).read() tk_table.insert(ticket_id=ticket_id, ticket_data=ticket_data, created_datetime=modified_time ) tk_db.commit() os.unlink(filename) time.sleep(SLEEP_MINUTES * 60)
class S3Migration(object): """ Database Migration Toolkit - will be useful to migrate both a production database on a server and also an offline client Normally run from a script in web2py context, but without models loaded: cd web2py python web2py.py -S eden -R <script.py> Where script looks like: m = local_import("s3migration") migrate = m.S3Migration() migrate.prep() migrate.migrate() migrate.post() """ def __init__(self): request = current.request # Load s3cfg name = "applications.%s.modules.s3cfg" % request.application s3cfg = __import__(name) for item in name.split(".")[1:]: # Remove the dot s3cfg = getattr(s3cfg, item) settings = s3cfg.S3Config() # Pass into template current.deployment_settings = settings # Read settings model = "%s/models/000_config.py" % request.folder code = getcfs(model, model, None) environment = build_environment(request, current.response, current.session) environment["settings"] = settings def template_path(): " Return the path of the Template config.py to load " path = os.path.join(request.folder, "private", "templates", settings.get_template(), "config.py") return path environment["template_path"] = template_path environment["os"] = os environment["Storage"] = Storage restricted(code, environment, layer=model) self.db_engine = settings.get_database_type() (db_string, pool_size) = settings.get_database_string() # Get a handle to the database self.db = DAL( db_string, #folder="%s/databases" % request.folder, auto_import=True, # @ToDo: Set to False until we migrate migrate_enabled=True, ) # ------------------------------------------------------------------------- def prep(self, foreigns=[], uniques=[]): """ Preparation before migration @param foreigns : List of tuples (tablename, fieldname) to have the foreign keys removed - if tablename == "all" then all tables are checked @param uniques : List of tuples (tablename, fieldname) to have the unique indices removed """ # Backup current database self.backup() # Remove Foreign Key constraints which need to go in next code for tablename, fieldname in foreigns: self.remove_foreign(tablename, fieldname) # Remove Unique indices which need to go in next code for tablename, fieldname in uniques: self.remove_unique(tablename, fieldname) self.db.commit() # ------------------------------------------------------------------------- def post(self): """ Cleanup after migration @ToDo """ # Do prepops of new tables # Copy data pass # ------------------------------------------------------------------------- def migrate(self): """ Perform the migration @ToDo """ # Update code: git pull # run_models_in(environment) # or # Set migrate=True in models/000_config.py # current.s3db.load_all_models() via applications/eden/static/scripts/tools/noop.py # Set migrate=False in models/000_config.py pass # ------------------------------------------------------------------------- def backup(self): """ Backup the database to a local SQLite database """ import os db = self.db folder = "%s/databases/backup" % current.request.folder # Create clean folder for the backup if os.path.exists(folder): import shutil shutil.rmtree(folder) import time time.sleep(1) os.mkdir(folder) # Setup backup database db_bak = DAL("sqlite://backup.db", folder=folder) # Copy Table structure for tablename in db.tables: if tablename == "gis_location": table = db[tablename] fields = [ table[field] for field in table.fields if field != "the_geom" ] db_bak.define_table(tablename, *fields) else: db_bak.define_table(tablename, db[tablename]) # Copy Data import csv csv.field_size_limit(2**20 * 100) # 100 megs filename = "%s/data.csv" % folder file = open(filename, "w") db.export_to_csv_file(file) file.close() file = open(filename, "r") db_bak.import_from_csv_file(file, unique="uuid2") # designed to fail file.close() db_bak.commit() # Pass handle back to other functions self.db_bak = db_bak # ------------------------------------------------------------------------- def remove_foreign(self, tablename, fieldname): """ Remove a Foreign Key constraint from a table """ db = self.db db_engine = self.db_engine executesql = db.executesql if tablename == "all": tables = db.tables else: tables = [tablename] for tablename in tables: if fieldname not in db[tablename].fields: continue # Modify the database if db_engine == "sqlite": # @ToDo: raise NotImplementedError elif db_engine == "mysql": # http://dev.mysql.com/doc/refman/5.1/en/alter-table.html create = executesql("SHOW CREATE TABLE `%s`;" % tablename)[0][1] fk = create.split("` FOREIGN KEY (`%s" % fieldname)[0].split("CONSTRAINT `").pop() if "`" in fk: fk = fk.split("`")[0] sql = "ALTER TABLE `%(tablename)s` DROP FOREIGN KEY `%(fk)s`;" % \ dict(tablename=tablename, fk=fk) elif db_engine == "postgres": # @ToDo: raise NotImplementedError try: executesql(sql) except: print "Error: Table %s with FK %s" % (tablename, fk) import sys e = sys.exc_info()[1] print >> sys.stderr, e # ------------------------------------------------------------------------- def remove_unique(self, tablename, fieldname): """ Remove a Unique Index from a table """ db = self.db db_engine = self.db_engine # Modify the database if db_engine == "sqlite": # @ToDo: raise NotImplementedError elif db_engine == "mysql": # http://dev.mysql.com/doc/refman/5.1/en/alter-table.html sql = "ALTER TABLE `%(tablename)s` DROP INDEX `%(fieldname)s`;" % \ dict(tablename=tablename, fieldname=fieldname) elif db_engine == "postgres": sql = "ALTER TABLE %(tablename)s DROP CONSTRAINT %(tablename)s_%(fieldname)s_key;" % \ dict(tablename=tablename, fieldname=fieldname) try: db.executesql(sql) except: import sys e = sys.exc_info()[1] print >> sys.stderr, e # Modify the .table file table = db[tablename] fields = [] for fn in table.fields: field = table[fn] if fn == fieldname: field.unique = False fields.append(field) db.__delattr__(tablename) db.tables.remove(tablename) db.define_table( tablename, *fields, # Rebuild the .table file from this definition fake_migrate=True) # ========================================================================= # OLD CODE below here # - There are tests for this in /tests/dbmigration # ------------------------------------------------------------------------- def rename_field(self, tablename, fieldname_old, fieldname_new, attributes_to_copy=None): """ Rename a field, while keeping the other properties of the field the same. If there are some indexes on that table, these will be recreated and other constraints will remain unchanged too. @param tablename : name of the table in which the field is renamed @param fieldname_old : name of the original field before renaming @param fieldname_new : name of the field after renaming @param attributes_to_copy : list of attributes which need to be copied from the old_field to the new_field (needed only in sqlite) """ db = self.db db_engine = self.db_engine if db_engine == "sqlite": self._add_renamed_fields(db, tablename, fieldname_old, fieldname_new, attributes_to_copy) self._copy_field(db, tablename, fieldname_old, fieldname_new) sql = "SELECT sql FROM sqlite_master WHERE type='index' AND tbl_name='%s' ORDER BY name;" % \ tablename list_index = db.executesql(sql) for element in list_index: search_str = "%s(%s)" % (tablename, fieldname_old) if element[0] is not None and search_str in element[0]: sql = "CREATE INDEX %s__idx on %s(%s);" % \ (fieldname_new, tablename, fieldname_new) try: db.executesql(sql) except: pass elif db_engine == "mysql": field = db[tablename][fieldname_old] sql_type = map_type_web2py_to_sql(field.type) sql = "ALTER TABLE %s CHANGE %s %s %s(%s)" % ( tablename, fieldname_old, fieldname_new, sql_type, field.length) db.executesql(sql) elif db_engine == "postgres": sql = "ALTER TABLE %s RENAME COLUMN %s TO %s" % \ (tablename, fieldname_old, fieldname_new) db.executesql(sql) # ------------------------------------------------------------------------- def rename_table(self, tablename_old, tablename_new): """ Rename a table. If any fields reference that table, they will be handled too. @param tablename_old : name of the original table before renaming @param tablename_new : name of the table after renaming """ try: sql = "ALTER TABLE %s RENAME TO %s;" % (tablename_old, tablename_new) self.db.executesql(sql) except Exception, e: print e
_ins_num = get_circuits(ans, 'Installation number') last = db.tg_load.check_date.max() last_date = db().select(last).first()[last] my_result = db((db.tg_load.check_date == last_date) & (db.tg_load.tg_number == '44')).select() for row in my_result: last_busy = row.busy db.tg_load.insert(check_date=datetime.datetime.now(), tg_number=44, busy=_busy, installed=_ins_num) #db.tg_load.insert(check_date='',tg_number=3, busy=45, installed=60) db.commit() #draw diagramm import PIL, Image, ImageDraw, ImageFont width = 800 height = 420 if os.path.isfile( '/home/www-data/web2py/applications/tgmonitor/cron/44.png') == False: im = Image.new('RGB', (width, height), (255, 255, 255)) im.save('/home/www-data/web2py/applications/tgmonitor/cron/44.png') im = Image.open('/home/www-data/web2py/applications/tgmonitor/cron/44.png') draw = ImageDraw.Draw(im)
class Application(Tkinter.Frame): def __init__(self, master=None): Tkinter.Frame.__init__(self, master) self.grid() self.define_table() self.create_form() def define_table(self): self.db = DAL("sqlite://produtos.sqlite") self.table = self.db.define_table("produto", Field("nome"), Field("marca"), Field("peso", "double", label="Peso do produto"), Field("valor", "double"), Field("quant", "integer"), ) def create_form(self): self.vars = {} self.form = Tkinter.LabelFrame(self, text="Cadastro de produtos", padx="30", pady="15") self.form.pack(side=Tkinter.TOP) self.SQLTKFORM(self.table, self.form) def SQLTKFORM(self, table, master, hidden=['id']): fields = table.fields for field in fields: if field not in hidden: self.create_input(field, table[field].label, master) master.save_button = Tkinter.Button(master, text="Salvar", command=self.save_to_db).pack() def save_to_db(self): try: values = [(field, value()) for field, value in self.vars.items()] self.table.insert(**dict(values)) except Exception: self.db.rollback() else: self.db.commit() self.reset_form() def reset_form(self): self.form.destroy() self.create_form() def create_input(self, field, label, master): # self.label = Tkinter.Label(master, text=label) # label.pack() setattr(self, field+"_label", Tkinter.Label(master, text=label)) getattr(self, field+"_label").pack() setattr(self, field, Tkinter.Entry(master)) getattr(self, field).pack() self.vars[field] = getattr(self, field).get