def copyDB(self): other_db = DAL("{0}://{1}".format(self.targetdbType, self.targetdbName), folder=self.targetFolder) print 'creating tables...' for table in self.db: other_db.define_table(table._tablename, *[field for field in table]) ''' should there be an option to truncAte target DB? if yes, then change args to allow for choice and set self.trancate to the art value if self.truncate==True: other_db[table._tablename].truncate() ''' print 'exporting data...' self.db.export_to_csv_file(open('tmp.sql', 'wb')) print 'importing data...' other_db.import_from_csv_file(open('tmp.sql', 'rb')) other_db.commit() print 'done!' print 'Attention: do not run this program again or you end up with duplicate records'
def copyDB(self): other_db = DAL("%s://%s" % ( self.targetdbType, self.targetdbName), folder=self.targetFolder) print 'creating tables...' for table in self.db: other_db.define_table( table._tablename, *[field for field in table]) ''' should there be an option to truncAte target DB? if yes, then change args to allow for choice and set self.trancate to the art value if self.truncate==True: other_db[table._tablename].truncate() ''' print 'exporting data...' self.db.export_to_csv_file(open('tmp.sql', 'wb')) print 'importing data...' other_db.import_from_csv_file(open('tmp.sql', 'rb')) other_db.commit() print 'done!' print 'Attention: do not run this program again or you end up with duplicate records'
def backup(self): """ Backup the database to a local SQLite database @ToDo: Option to use a temporary DB in Postgres/MySQL as this takes too long for a large DB """ import os db = self.db folder = "%s/databases/backup" % current.request.folder # Create clean folder for the backup if os.path.exists(folder): import shutil shutil.rmtree(folder) import time time.sleep(1) os.mkdir(folder) # Setup backup database db_bak = DAL("sqlite://backup.db", folder=folder) # Copy Table structure for tablename in db.tables: if tablename == "gis_location": table = db[tablename] fields = [ table[field] for field in table.fields if field != "the_geom" ] db_bak.define_table(tablename, *fields) else: db_bak.define_table(tablename, db[tablename]) # Copy Data import csv csv.field_size_limit(2**20 * 100) # 100 megs filename = "%s/data.csv" % folder file = open(filename, "w") db.export_to_csv_file(file) file.close() file = open(filename, "r") db_bak.import_from_csv_file(file, unique="uuid2") # designed to fail file.close() db_bak.commit() # Pass handle back to other functions self.db_bak = db_bak
def backup(self): """ Backup the database to a local SQLite database @ToDo: Option to use a temporary DB in Postgres/MySQL as this takes too long for a large DB """ import os db = self.db folder = "%s/databases/backup" % current.request.folder # Create clean folder for the backup if os.path.exists(folder): import shutil shutil.rmtree(folder) import time time.sleep(1) os.mkdir(folder) # Setup backup database db_bak = DAL("sqlite://backup.db", folder=folder) # Copy Table structure for tablename in db.tables: if tablename == "gis_location": table = db[tablename] fields = [table[field] for field in table.fields if field != "the_geom"] db_bak.define_table(tablename, *fields) else: db_bak.define_table(tablename, db[tablename]) # Copy Data import csv csv.field_size_limit(2**20 * 100) # 100 megs filename = "%s/data.csv" % folder file = open(filename, "w") db.export_to_csv_file(file) file.close() file = open(filename, "r") db_bak.import_from_csv_file(file, unique="uuid2") # designed to fail file.close() db_bak.commit() # Pass handle back to other functions self.db_bak = db_bak