Example #1
0
class DBDataMixinBase(object):

    def __init__(self, dbconn):
        self.db = DAL(**dbconn)
        self.dalobjs = {}

    def loadAllTables(self, objdefs):
        for vs in objdefs:
            self.dalobjs[vs[0]] = DalObj(self.db, vs[0], vs[1:])

    def loadSomeTables(self, objdefs, tablelist):
        for vs in objdefs:
            if vs[0] in tablelist:
                self.dalobjs[vs[0]] = DalObj(self.db, vs[0], vs[1:])

    def makeIndex(self, indexDef):
        for k, v in indexDef.iteritems():
            self.dalobjs[k].makeIndex(indexDef[k])

    def removeIndex(self, indexDef):
        for k, v in indexDef.iteritems():
            self.dalobjs[k].removeIndex(indexDef[k])

    def truncateTables(self, tablelist):
        for tn in tablelist:
            self.dalobjs[tn].truncate()

    def closeDB(self):
        self.db.close()
Example #2
0
    def __init__(self):

        request = current.request

        # Load s3cfg => but why do this so complicated?
        #name = "applications.%s.modules.s3cfg" % request.application
        #s3cfg = __import__(name)
        #for item in name.split(".")[1:]:
        ## Remove the dot
        #s3cfg = getattr(s3cfg, item)
        #settings = s3cfg.S3Config()

        # Can use normal import here since executed in web2py environment:
        import s3cfg
        settings = s3cfg.S3Config()

        # Pass into template
        current.deployment_settings = settings

        # Read settings
        model = "%s/models/000_config.py" % request.folder
        code = getcfs(model, model, None)
        response = current.response

        # Needed as some Templates look at this & we don't wish to crash:
        response.s3 = Storage()

        # Global variables for 000_config.py
        environment = build_environment(request, response, current.session)
        environment["settings"] = settings
        # Some (older) 000_config.py also use "deployment_settings":
        environment["deployment_settings"] = settings
        # For backwards-compatibility with older 000_config.py:
        #def template_path():
        #    # When you see this warning, you should update 000_config.py
        #    # See: http://eden.sahanafoundation.org/wiki/DeveloperGuidelines/Templates/Migration#Changesin000_config.py
        #    print "template_path() is deprecated, please update 000_config.py"
        #    # Return just any valid path to make sure the path-check succeeds,
        #    # => modern S3Config will find the template itself
        #    return request.folder
        #environment["template_path"] = template_path
        environment["os"] = os
        environment["Storage"] = Storage

        # Execute 000_config.py
        restricted(code, environment, layer=model)

        self.db_engine = settings.get_database_type()
        (db_string, pool_size) = settings.get_database_string()

        # Get a handle to the database
        self.db = DAL(
            db_string,
            #folder="%s/databases" % request.folder,
            auto_import=True,
            # @ToDo: Set to False until we migrate
            migrate_enabled=True,
        )
Example #3
0
	def GetDocMeta( self, params = None ):
		if params is None:
			params = self.params
		searchText = params["searchText"]
		searchLimit = params["searchLimit"]
		searchOffset = params["searchOffset"]
		
		print "web2py"
		start = time.clock()
		
		# get data using web2py application layer
		db = DAL('sqlite://doc-meta.sqlite', migrate_enabled=False)
		db.define_table('DocMeta', Field('Key'), Field('DocContent'), Field('DocID'), primarykey=['Key'])
		
		# build query using params
		q = db.DocMeta.DocContent.like('%' + searchText + '%')
		matches = db(q).count();
		limitb = searchOffset + searchLimit
		
		count = 0
		pyResults = {}
		queryResults = db(q).select(orderby=db.DocMeta.DocID, orderby_on_limitby = False, limitby=(searchOffset, limitb));
		for row in queryResults:
		    count += 1
		    pyResults[row.DocID] = dict(row)
		
		elapsed = (time.clock() -start)
		print "elapsed = ", elapsed
		        
		        
		print "exhaustive"
		start = time.clock()
		# get data using exhaustive search of JSON
		filename = os.path.join( self.request.folder, 'data/corpus', 'doc-meta.json' )
		with open( filename ) as f:
			content = json.load( f, encoding = 'utf-8' )
			results = {}
			matchCount = 0
			discardCount = 0
			keys = sorted(content.keys())
			for index in range(len(keys)):
			    obj = content[keys[index]]
			    docContent = obj["DocContent"]
			    if searchText in docContent:
			        matchCount += 1
			        if len(results.keys()) < searchLimit and discardCount >= searchOffset:
			            results[obj["DocID"]] = obj
			        elif discardCount < searchOffset:
			            discardCount += 1
		elapsed = (time.clock() - start)
		print "elapsed = ", elapsed
		
		return {
			"Documents" : pyResults,
			"docCount" : len(pyResults),
			"docMaxCount" : matches
		}
Example #4
0
    def copyDB(self):
        other_db = DAL("{0}://{1}".format(self.targetdbType,
                                          self.targetdbName),
                       folder=self.targetFolder)

        print 'creating tables...'

        for table in self.db:
            other_db.define_table(table._tablename,
                                  *[field for field in table])
            '''
            should there be an option to truncAte target DB?
            if yes, then change args to allow for choice
            and set self.trancate to the art value

            if self.truncate==True:
                other_db[table._tablename].truncate()
            '''

        print 'exporting data...'
        self.db.export_to_csv_file(open('tmp.sql', 'wb'))

        print 'importing data...'
        other_db.import_from_csv_file(open('tmp.sql', 'rb'))
        other_db.commit()
        print 'done!'
        print 'Attention: do not run this program again or you end up with duplicate records'
Example #5
0
    def backup(self):
        """
            Backup the database to a local SQLite database

            @ToDo: Option to use a temporary DB in Postgres/MySQL as this takes
                   too long for a large DB
        """

        import os

        db = self.db
        folder = "%s/databases/backup" % current.request.folder

        # Create clean folder for the backup
        if os.path.exists(folder):
            import shutil
            shutil.rmtree(folder)
            import time
            time.sleep(1)
        os.mkdir(folder)

        # Setup backup database
        db_bak = DAL("sqlite://backup.db", folder=folder)

        # Copy Table structure
        for tablename in db.tables:
            if tablename == "gis_location":
                table = db[tablename]
                fields = [
                    table[field] for field in table.fields
                    if field != "the_geom"
                ]
                db_bak.define_table(tablename, *fields)
            else:
                db_bak.define_table(tablename, db[tablename])

        # Copy Data
        import csv
        csv.field_size_limit(2**20 * 100)  # 100 megs
        filename = "%s/data.csv" % folder
        file = open(filename, "w")
        db.export_to_csv_file(file)
        file.close()
        file = open(filename, "r")
        db_bak.import_from_csv_file(file, unique="uuid2")  # designed to fail
        file.close()
        db_bak.commit()

        # Pass handle back to other functions
        self.db_bak = db_bak
Example #6
0
    def __init__(self, autogetconfig=True):
        from gluon import current
        self.request = current.request
        self.ongae = self.request.env.web2py_runtime_gae
        self.T = current.T
        self.cache = current.cache
        from gluon import DAL
        if self.ongae:
            self._db = DAL("google:datastore")
        else:
            self._db = DAL("sqlite://config_movuca.sqlite")

        self.define_tables()
        if autogetconfig:
            self.get_config()
Example #7
0
    def __init__(self, autogetconfig=True):
        from gluon import current
        self.request = current.request
        self.ongae = self.request.env.web2py_runtime_gae
        self.T = current.T
        self.cache = current.cache
        from gluon import DAL
        if self.ongae:
            self._db = DAL("google:datastore")
        else:
            self._db = DAL("sqlite://config_movuca.sqlite")

        self.define_tables()
        if autogetconfig:
            self.get_config()
Example #8
0
def connect_db(init_schema=True):
    db_file = os.path.join(BASE_FOLDER, "sync.db")
    con = DAL('sqlite://' + db_file,
              pool_size=10,
              check_reserved=['all'],
              lazy_tables=lazy_tables,
              fake_migrate=fake_migrate,
              fake_migrate_all=fake_migrate_all,
              migrate=migrate)  # fake_migrate_all=True
    con.executesql('PRAGMA journal_mode=WAL')

    if init_schema is True:
        init_db_schema(con)

    return con
Example #9
0
def adding_new_fields(new_unique_field,changed_table):
    """
    This function adds a new_uniwue_field into the changed_table , while keeping all the rest of 
    the properties of the table ubchanged
    """
    database_string = "sqlite://storage.db"
    old_database_folder = "%s/applications/%s/databases" % (WEB2PY_PATH, APP)
    temp_db = DAL( database_string, folder = old_database_folder, migrate_enabled=True ,migrate = True)
    new_field = Field(new_unique_field,"integer")
    try:
        changed_table_primary_key = db[changed_table]._primarykey
    except KeyError:
        changed_table_primary_key = None
    temp_db.define_table(changed_table ,db[changed_table],new_field,primarykey = changed_table_primary_key)
    return temp_db
Example #10
0
class Config(object):
    """Build and read config on GAE or config.cfg file"""
    def __init__(self, autogetconfig=True):
        from gluon import current
        self.request = current.request
        self.ongae = self.request.env.web2py_runtime_gae
        self.T = current.T
        self.cache = current.cache
        from gluon import DAL
        if self.ongae:
            self._db = DAL("google:datastore")
        else:
            self._db = DAL("sqlite://config_movuca.sqlite")

        self.define_tables()
        if autogetconfig:
            self.get_config()

    def define_tables(self):
        from datamodel.setup import Setup
        setup = Setup()
        self.tables = filter(lambda item: not item.startswith("_"), dir(setup))
        for table in self.tables:
            entity = self._db.define_table(table,
                *setup.__getattribute__(table),
                **dict(migrate="config_%s.table" % table)
            )
            self.__setattr__(table, entity)

    def set_default_config(self):
        now = self.request.now
        for table in self.tables:
            self.__getattribute__(table).insert(setuptime=now)
            self._db.commit()

    def get_config(self, expire=300):
        for table in self.tables:
            result = self._db(self._db[table]).select(cache=(self.cache.ram, expire)).last()
            self.__setattr__(table.split("_")[0],
                             result)

    def get_list(self, table, option):
        options = self.__getattribute__(table)[option]
        assert isinstance(options, list)
        if not "mailhide" in option:
            return [(option.split(":")[0], str(self.T(option.split(":")[1]))) for option in options]
        else:
            return [(option.split(":")[0], str(option.split(":")[1])) for option in options]
Example #11
0
 def send_heartbeat(self,counter):
     if not self.db_thread:
         logging.debug('thread building own DAL object')    
         self.db_thread = DAL(self.db._uri,folder = self.db._adapter.folder)
         self.define_tables(self.db_thread,migrate=False)
     try:
         db = self.db_thread
         sw, st = db.scheduler_worker, db.scheduler_task
         now = datetime.datetime.now()
         expiration = now-datetime.timedelta(seconds=self.heartbeat*3)    
         # record heartbeat
         logging.debug('........recording heartbeat')    
         if not db(sw.worker_name==self.worker_name)\
                 .update(last_heartbeat = now, status = ACTIVE):
             sw.insert(status = ACTIVE,worker_name = self.worker_name,
                       first_heartbeat = now,last_heartbeat = now)
         if counter % 10 == 0:
             # deallocate jobs assigned to inactive workers and requeue them
             logging.debug('    freeing workers that have not sent heartbeat')    
             inactive_workers = db(sw.last_heartbeat<expiration)
             db(st.assigned_worker_name.belongs(
                     inactive_workers._select(sw.worker_name)))\
                     (st.status.belongs((RUNNING,ASSIGNED,QUEUED)))\
                     .update(assigned_worker_name='',status=QUEUED)
             inactive_workers.delete()
         db.commit()
     except:
         db.rollback()
     time.sleep(self.heartbeat)
Example #12
0
def get_ticket_storage(app):
    private_folder = apath('%s/private' % app, r=request)
    db_string = open(os.path.join(private_folder, 'ticket_storage.txt')).read().replace('\r','').replace('\n','').strip()
    tickets_table = 'web2py_ticket'
    tablename = tickets_table + '_' + app
    db_path = apath('%s/databases' % app, r=request)
    from gluon import DAL
    ticketsdb = DAL(db_string, folder=db_path, auto_import=True)
    if not ticketsdb.get(tablename):
        table = ticketsdb.define_table(
                tablename,
                Field('ticket_id', length=100),
                Field('ticket_data', 'text'),
                Field('created_datetime', 'datetime'),
                )
    return ticketsdb , ticketsdb.get(tablename)
Example #13
0
def get_ticket_storage(app):
    private_folder = apath('%s/private' % app, r=request)
    db_string = open(os.path.join(private_folder, 'ticket_storage.txt')).read().replace('\r','').replace('\n','').strip()
    tickets_table = 'web2py_ticket'
    tablename = tickets_table + '_' + app
    db_path = apath('%s/databases' % app, r=request)
    from gluon import DAL
    ticketsdb = DAL(db_string, folder=db_path, auto_import=True)
    if not ticketsdb.get(tablename):
        table = ticketsdb.define_table(
                tablename,
                Field('ticket_id', length=100),
                Field('ticket_data', 'text'),
                Field('created_datetime', 'datetime'),
                )
    return ticketsdb , ticketsdb.get(tablename)
Example #14
0
def opendb():
    global db
    if (db == None):
        #		print "open database DAL"
        db = DAL('sqlite://storage.sqlite',
                 folder="%s/web2py/applications/gate/databases" % cwd)
        execfile("%s/web2py/applications/gate/models/db_gate.py" % cwd)
Example #15
0
    def post(self, strbools=[],
                   strints=[],
                   ):
        """
            Cleanup after migration

            @param strbools : List of tuples (tablename, fieldname) to convert from string/integer to bools
            @param strints : List of tuples (tablename, fieldname) to convert from string to integer
        """

        db = self.db

        # @ToDo: Do prepops of new tables

        # Restore data from backup
        folder = "%s/databases/backup" % current.request.folder
        db_bak = DAL("sqlite://backup.db",
                     folder=folder,
                     auto_import=True,
                     migrate=False)

        for tablename, fieldname in strints:
            newtable = db[tablename]
            newrows = db(newtable.id > 0).select(newtable.id)
            oldtable = db_bak[tablename]
            oldrows = db_bak(oldtable.id > 0).select(oldtable.id,
                                                     oldtable[fieldname])
            oldvals = oldrows.as_dict()
            for row in newrows:
                id = row.id
                val = oldvals[id][fieldname]
                if not val:
                    continue
                try:
                    vars = {fieldname : int(val)}
                except:
                    current.log.warning("S3Migrate: Unable to convert %s to an integer - skipping" % val)
                else:
                    db(newtable.id == id).update(**vars)

        for tablename, fieldname in strbools:
            to_bool = self.to_bool
            newtable = db[tablename]
            newrows = db(newtable.id > 0).select(newtable.id)
            oldtable = db_bak[tablename]
            oldrows = db_bak(oldtable.id > 0).select(oldtable.id,
                                                     oldtable[fieldname])
            oldvals = oldrows.as_dict()
            for row in newrows:
                id = row.id
                val = oldvals[id][fieldname]
                if not val:
                    continue
                val = to_bool(val)
                if val:
                    vars = {fieldname : val}
                    db(newtable.id == id).update(**vars)

        db.commit()
Example #16
0
def dbf2sqlite():
    db = DAL('sqlite://upload.sqlite',
            pool_size=1, check_reserved=['sqlite'], folder=DATA_SQLITE)
    db = get_model(db)
    for table in db.tables: 
        print '  table', table,
        for row_upper_names in DBF(os.path.join(DATA_FOX, '%s.dbf' % table)):
            something = False
            row_lower_names = {}     # table definitions in applications/viewer/modules/db_model.py and in fox/src/myconversion/myconversion.prg must be the same
            for k, v in row_upper_names.iteritems():
                k = k.lower()
                if v is not None: # k != 'id':
                    something = True
                    row_lower_names[k.lower()] = v   # but fox thurn fields to uppercase and we preffer lowercase
            if something:
                db[table].insert(**row_lower_names)
        db.commit() 
        print ' - done'

    try:                                       # are some import-post-actions defined?
        from myconversion import myconversion  # ./myconversion.py : def myconversion(db): 
        print '  additional data conversion',
        myconversion(db)                       # see www.web2py.com/book, chapter 6 - DAL
        db.commit()                            # auto commit if you miss commit in myconversion()  
        print ' - done'
    except ImportError:
        pass
    db.close()
Example #17
0
    def __init__(self):

        request = current.request

        # Load s3cfg
        name = "applications.%s.modules.s3cfg" % request.application
        s3cfg = __import__(name)
        for item in name.split(".")[1:]:
            # Remove the dot
            s3cfg = getattr(s3cfg, item)
        settings = s3cfg.S3Config()
        # Pass into template
        current.deployment_settings = settings

        # Read settings
        model = "%s/models/000_config.py" % request.folder
        code = getcfs(model, model, None)
        response = current.response
        response.s3 = Storage(
        )  # Needed as some Templates look at this & we don't wish to crash
        environment = build_environment(request, response, current.session)
        environment["settings"] = settings

        def template_path():
            " Return the path of the Template config.py to load "
            path = os.path.join(request.folder, "private", "templates",
                                settings.get_template(), "config.py")
            return path

        environment["template_path"] = template_path
        environment["os"] = os
        environment["Storage"] = Storage
        restricted(code, environment, layer=model)

        self.db_engine = settings.get_database_type()
        (db_string, pool_size) = settings.get_database_string()

        # Get a handle to the database
        self.db = DAL(
            db_string,
            #folder="%s/databases" % request.folder,
            auto_import=True,
            # @ToDo: Set to False until we migrate
            migrate_enabled=True,
        )
Example #18
0
    def copyDB(self):
        other_db = DAL("%s://%s" % (
            self.targetdbType, self.targetdbName), folder=self.targetFolder)

        print 'creating tables...'

        for table in self.db:
            other_db.define_table(
                table._tablename, *[field for field in table])
            '''
            should there be an option to truncAte target DB?
            if yes, then change args to allow for choice
            and set self.trancate to the art value

            if self.truncate==True:
                other_db[table._tablename].truncate()
            '''

        print 'exporting data...'
        self.db.export_to_csv_file(open('tmp.sql', 'wb'))

        print 'importing data...'
        other_db.import_from_csv_file(open('tmp.sql', 'rb'))
        other_db.commit()
        print 'done!'
        print 'Attention: do not run this program again or you end up with duplicate records'
Example #19
0
 def define_table(self):
     self.db = DAL("sqlite://produtos.sqlite")
     self.table = self.db.define_table("produto",
        Field("nome"),
        Field("marca"),
        Field("peso", "double", label="Peso do produto"),
        Field("valor", "double"),
        Field("quant", "integer"),
     ) 
Example #20
0
    def __init__(self):

        request = current.request

        # Load s3cfg => but why do this so complicated?
        #name = "applications.%s.modules.s3cfg" % request.application
        #s3cfg = __import__(name)
        #for item in name.split(".")[1:]:
            ## Remove the dot
            #s3cfg = getattr(s3cfg, item)
        #settings = s3cfg.S3Config()

        # Can use normal import here since executed in web2py environment:
        import s3cfg
        settings = s3cfg.S3Config()

        # Pass into template
        current.deployment_settings = settings

        # Read settings
        model = "%s/models/000_config.py" % request.folder
        code = getcfs(model, model, None)
        response = current.response

        # Needed as some Templates look at this & we don't wish to crash:
        response.s3 = Storage()

        # Global variables for 000_config.py
        environment = build_environment(request, response, current.session)
        environment["settings"] = settings
        # Some (older) 000_config.py also use "deployment_settings":
        environment["deployment_settings"] = settings
        # For backwards-compatibility with older 000_config.py:
        #def template_path():
        #    # When you see this warning, you should update 000_config.py
        #    # See: http://eden.sahanafoundation.org/wiki/DeveloperGuidelines/Templates/Migration#Changesin000_config.py
        #    print "template_path() is deprecated, please update 000_config.py"
        #    # Return just any valid path to make sure the path-check succeeds,
        #    # => modern S3Config will find the template itself
        #    return request.folder
        #environment["template_path"] = template_path
        environment["os"] = os
        environment["Storage"] = Storage

        # Execute 000_config.py
        restricted(code, environment, layer=model)

        self.db_engine = settings.get_database_type()
        (db_string, pool_size) = settings.get_database_string()

        # Get a handle to the database
        self.db = DAL(db_string,
                      #folder="%s/databases" % request.folder,
                      auto_import=True,
                      # @ToDo: Set to False until we migrate
                      migrate_enabled=True,
                      )
Example #21
0
def adding_renamed_fields(table_name,old_field_name,new_field_name,attributes_to_copy):
    """
    This function is used add a field in table mentioned while 
    renaming a field . The renamed field is added separately to the table with the 
    same properties as the original field.     
    """
    database_string = "sqlite://storage.db"
    old_database_folder = "%s/applications/%s/databases" % (WEB2PY_PATH, APP)
    temp_db = DAL( database_string, folder = old_database_folder, migrate_enabled=True ,migrate = True)
    new_field = Field(new_field_name)
    try:
        table_primary_key = db[table_name]._primarykey
    except KeyError:
        table_primary_key = None
    for attribute in attributes_to_copy:
        exec_str = "new_field.%(attribute)s = db[table_name][old_field_name].%(attribute)s" % {"attribute":attribute}
        exec exec_str in globals() , locals()
    temp_db.define_table(table_name ,db[table_name],new_field,primarykey = table_primary_key)
    return temp_db
Example #22
0
def get_ticket_storage(app):
    private_folder = apath("%s/private" % app, r=request)
    db_string = (
        open(os.path.join(private_folder, "ticket_storage.txt")).read().replace("\r", "").replace("\n", "").strip()
    )
    tickets_table = "web2py_ticket"
    tablename = tickets_table + "_" + app
    db_path = apath("%s/databases" % app, r=request)
    from gluon import DAL

    ticketsdb = DAL(db_string, folder=db_path, auto_import=True)
    if not ticketsdb.get(tablename):
        table = ticketsdb.define_table(
            tablename,
            Field("ticket_id", length=100),
            Field("ticket_data", "text"),
            Field("created_datetime", "datetime"),
        )
    return ticketsdb, ticketsdb.get(tablename)
Example #23
0
    def backup(self):
        """
            Backup the database to a local SQLite database

            @ToDo: Option to use a temporary DB in Postgres/MySQL as this takes
                   too long for a large DB
        """

        import os

        db = self.db
        folder = "%s/databases/backup" % current.request.folder

        # Create clean folder for the backup
        if os.path.exists(folder):
            import shutil
            shutil.rmtree(folder)
            import time
            time.sleep(1)
        os.mkdir(folder)

        # Setup backup database
        db_bak = DAL("sqlite://backup.db", folder=folder)

        # Copy Table structure
        for tablename in db.tables:
            if tablename == "gis_location":
                table = db[tablename]
                fields = [table[field] for field in table.fields if field != "the_geom"]
                db_bak.define_table(tablename, *fields)
            else:
                db_bak.define_table(tablename, db[tablename])

        # Copy Data
        import csv
        csv.field_size_limit(2**20 * 100)  # 100 megs
        filename = "%s/data.csv" % folder
        file = open(filename, "w")
        db.export_to_csv_file(file)
        file.close()
        file = open(filename, "r")
        db_bak.import_from_csv_file(file, unique="uuid2") # designed to fail
        file.close()
        db_bak.commit()

        # Pass handle back to other functions
        self.db_bak = db_bak
Example #24
0
 def get_db(self):
     """
         Return the connected db
     """
     if not os.path.exists(self.get_option('dbmetadata')):
         os.makedirs(self.get_option('dbmetadata'))
     db=DAL(self.get_option('database'),lazy_tables=True,folder=self.get_option("dbmetadata"))
     db.define_table('storages',
                 Field('storagename','string'),
                 Field('creation_ts','datetime',
                       default=datetime.datetime.now()),
                 Field('modified_ts','datetime',
                       default=datetime.datetime.now(),
                       update=datetime.datetime.now()),
                 )
     db.define_table('files',
                 Field('storages_id',db.storages),
                 Field('parent_id','reference files'),
                 Field('filename','string'),
                 Field('description','string'),
                 Field('mime','string'),
                 Field('ftype','string'),
                 Field('mode','integer'),
                 Field('inode','integer'),
                 Field('dev','string'),
                 Field('nlink','integer'),
                 Field('uid','integer'),
                 Field('gid','integer'),
                 Field('size','double'),
                 Field('ctime','datetime'),
                 Field('mtime','datetime'),
                 Field('atime','datetime'),
                 )
     return db
Example #25
0
    def __init__(self, master=None):
        Tkinter.Frame.__init__(self, master)
        self.grid()

        self.db = DAL("sqlite://teste.sqlite")
        ne = IS_NOT_EMPTY()
        self.table = self.db.define_table("produto",
            Field("nome", requires=ne),
            Field("marca", requires=ne),
            Field("peso", "double", requires=ne),
            Field("valor", "double", requires=ne),
            Field("quantidade", "integer", requires=ne),
        )

        self.create_form()
        self.show_result()
Example #26
0
def append():
    db=DAL('sqlite://storage.sqlite', folder='/home/www-data/web2py/applications/tgmonitor/databases')
    db.define_table('tg_load',
        Field('check_date','datetime'),
        Field('tg_number','integer', notnull=True),
        Field('busy', 'integer'),
        Field('installed', 'integer')
        )

    db.tg_load.insert(check_date='',tg_number=2, busy=45, installed=60)
    db.commit()
Example #27
0
def get_migrated_db():
    """
    This function let up view how the database was after the 
    migration scripts were called , this lets us compare the 2 databases
    the one before and the one after the migrations
    """
    os.chdir(WEB2PY_PATH)
    sys.path.append(WEB2PY_PATH)
    from gluon import DAL, Field
    database_string = "sqlite://storage.db"
    old_database_folder = "%s/applications/%s/databases" % (WEB2PY_PATH, APP)
    db = DAL(database_string,
             folder=old_database_folder,
             auto_import=True,
             migrate_enabled=True,
             migrate=True)
    return db
Example #28
0
    def __init__(self):

        request = current.request

        # Load s3cfg
        name = "applications.%s.modules.s3cfg" % request.application
        s3cfg = __import__(name)
        for item in name.split(".")[1:]:
            # Remove the dot
            s3cfg = getattr(s3cfg, item)
        settings = s3cfg.S3Config()
        # Pass into template
        current.deployment_settings = settings

        # Read settings
        model = "%s/models/000_config.py" % request.folder
        code = getcfs(model, model, None)
        response = current.response
        response.s3 = Storage() # Needed as some Templates look at this & we don't wish to crash
        environment = build_environment(request, response,
                                        current.session)
        environment["settings"] = settings
        def template_path():
            " Return the path of the Template config.py to load "
            path = os.path.join(request.folder,
                                "private", "templates",
                                settings.get_template(),
                                "config.py")
            return path
        environment["template_path"] = template_path
        environment["os"] = os
        environment["Storage"] = Storage
        restricted(code, environment, layer=model)

        self.db_engine = settings.get_database_type()
        (db_string, pool_size) = settings.get_database_string()

        # Get a handle to the database
        self.db = DAL(db_string,
                      #folder="%s/databases" % request.folder,
                      auto_import=True,
                      # @ToDo: Set to False until we migrate
                      migrate_enabled=True,
                      )
Example #29
0
    def __init__(self):
        self.db = DAL(
            current.config.get("cognito_db.uri"),
            pool_size=current.config.get("cognito_db.pool_size"),
            migrate_enabled=current.config.get("cognito_db.migrate"),
            check_reserved=["all"],
        )

        self.auth = Auth(db=self.db,
                         host_names=current.config.get("host.names"))

        self.auth.settings.create_user_groups = None

        # TODO: extend this during implementation
        self.auth.settings.extra_fields["auth_user"] = [
            Field("user_attributes", type="json")
        ]

        self.auth.define_tables(username=True, signature=True)
Example #30
0
    def __init__(self):

        # Read settings
        name = "applications.%s.modules.s3cfg" % current.request.application
        s3cfg = __import__(name)
        for item in name.split(".")[1:]:
            # Remove the dot
            s3cfg = getattr(s3cfg, item)
        settings = s3cfg.S3Config()
        self.db_engine = settings.get_database_type()
        (db_string, pool_size) = settings.get_database_string()

        # Get a handle to the database
        self.db = DAL(db_string,
                      #folder="%s/databases" % request.folder,
                      auto_import=True,
                      migrate_enabled=True,
                      migrate=True
                      )
Example #31
0
    def check_status(user_id, log_name, task_id, scheduler, task_name, folder):
        import os
        log_path = os.path.join(folder, "logs", "tasks")
        from gluon import DAL, Field
        '''
        If we use current.db here instead of getting a
        new handle to the db, the task that we
        previously queued won't get inserted into the db
        so every call we make in this method to check
        on the task's status will always result in the task being in
        the 'QUEUED' state.
        '''
        db = DAL('sqlite://storage.db',
                 folder='applications/eden/databases',
                 auto_import=True)
        table = db.scheduler_task
        query = (table.id == task_id)
        task_status = None
        try:
            task_status = db(query).select(table.status).first().status
        except AttributeError:
            task_status = 'Unknown (task not yet in db)'
        '''
        This is the preferred way to check a task's status since
        it's using the web2py API, but we can't use this
        because the scheduler is pointing to
        current.db (see above comment):
        task_status = scheduler.task_status(task_id, output=True)
        print task_status.scheduler_task.status
        print task_status.result
        print task_status.scheduler_run.run_output
        '''

        if not os.path.exists(log_path):
            os.makedirs(log_path)

        with open(os.path.join(log_path, log_name), "a+") as log:
            log.write('<%s>: %s is currently in the %s state\n' %
                      (datetime.datetime.now(), task_name, task_status))
Example #32
0
from itertools import groupby
from gluon import current, DAL, Field, DIV,SELECT
from gluon.validators import *
from gluon.custom_import import track_changes

from bootupauth import BootUpAuth

import math
from datetime import datetime
track_changes(True)


db = DAL('sqlite://bootup.db', pool_size=1, check_reserved=['sqlite'], migrate=False)
current.db = db

auth = BootUpAuth()


def date_mmyy_widget(field, value):
    monthvals = map(lambda val: str(val).zfill(2), range(1, 12 + 1))
    yearvals = map(lambda val: str(val)[2:], range(current.request.now.year, current.request.now.year + 7))

    if value is not None:
        value = datetime.strptime(value, '%Y-%m-%d').date()
        monthvals.insert(0, str(value.month).zfill(2))
        yearvals.insert(0, str(value.year)[2:])

    return DIV(SELECT(monthvals, _name=field.name + "_month"), SELECT(yearvals, _name=field.name + "_year"))


def date_yyyymmdd_widget (field, value):
Example #33
0
#!/usr/bin/env python
# -*- coding: utf-8 -*-

import sys, os, telnetlib, time, datetime


def get_circuits(s='', find_par=''):
    pos = s.find(find_par) + 35
    return s[pos:pos + 6].strip()


sys.path.append('/home/www-data/web2py')
from gluon import DAL, Field
db = DAL('sqlite://storage.sqlite',
         folder='/home/www-data/web2py/applications/tgmonitor/databases')
db.define_table('tg_load', Field('check_date', 'datetime'),
                Field('tg_number', length=17), Field('busy', 'integer'),
                Field('installed', 'integer'))

host = '10.200.66.70'
port = '6000'
tn = telnetlib.Telnet(host, port)
tn.write('LGI:op="monitor",PWD ="dspoftk",SER="10.100.100.104---O&M System";')
ans = tn.read_until('END')

tn.write('DSP OFTK: LT=TG, TG=44, DT=AT;')
ans = tn.read_until('END')

_busy = get_circuits(ans, 'Busy')
_ins_num = get_circuits(ans, 'Installation number')
# Copyright 2011 - Thomas Bellembois [email protected]
# Cecill licence, see LICENSE
# $Id: chimitheque_ide_autocomplete.py 194 2015-02-23 16:27:16Z tbellemb $
# -*- coding: utf-8 -*-
if False:
    #
    # never imported - just for IDE autocompletion
    #
    from gluon import DAL
    db = DAL()
    from gluon import settings
    from gluon.cache import Cache
    from gluon.dal import Field
    from gluon.globals import Request
    request = Request()
    from gluon.globals import Response
    response = Response()
    from gluon.globals import Session
    session = Session()
    from gluon.html import *
    from gluon.http import HTTP
    from gluon.http import redirect
    from gluon.languages import translator
    T = translator(request)
    from gluon.sqlhtml import SQLFORM
    from gluon.tools import Auth
    auth = Auth()
    from gluon.tools import Crud, Mail
    from gluon.validators import *
    import sys
    mail = Mail()
Example #35
0
#!/usr/bin/env python
# -*- coding: utf-8 -*-

import sys, os, telnetlib, time, datetime


def get_circuits(s='',find_par=''):
    pos = s.find(find_par)+35
    return s[pos:pos+6].strip()



sys.path.append('/home/www-data/web2py')
from gluon import DAL, Field
db=DAL('sqlite://storage.sqlite', folder='/home/www-data/web2py/applications/tgmonitor/databases')
db.define_table('tg_load',
    Field('check_date','datetime'),
    Field('tg_number',length=17),
    Field('busy', 'integer'),
    Field('installed', 'integer')
    )


host = '10.200.66.70'
port = '6000'
tn = telnetlib.Telnet(host,port)
tn.write('LGI:op="monitor",PWD ="dspoftk",SER="10.100.100.104---O&M System";')
ans = tn.read_until('END')


tn.write('DSP OFTK: LT=TG, TG=44, DT=AT;')
Example #36
0
class S3Migration(object):
    """
        Database Migration Toolkit
        - used to help migrate both a production database on a server
          and also an offline client

        Normally run from a script in web2py context, but without models loaded:
        cd web2py
        python web2py.py -S eden -R <script.py>

        Where script looks like:
        m = local_import("s3migration")
        migrate = m.S3Migration()
        migrate.prep(foreigns=[],
                     ondeletes=[],
                     strbools=[],
                     strints=[],
                     uniques=[],
                     )
        #migrate.migrate()
        migrate.post(strbools=[],
                     strints=[],
                     )

        FYI: If you need to access a filename in eden/databases/ then here is how:
        import hashlib
        (db_string, pool_size) = settings.get_database_string()
        prefix = hashlib.md5(db_string).hexdigest()
        filename = "%s_%s.table" % (prefix, tablename)
    """

    def __init__(self):

        request= current.request

        # Load s3cfg
        name = "applications.%s.modules.s3cfg" % request.application
        s3cfg = __import__(name)
        for item in name.split(".")[1:]:
            # Remove the dot
            s3cfg = getattr(s3cfg, item)
        settings = s3cfg.S3Config()
        # Pass into template
        current.deployment_settings = settings

        # Read settings
        model = "%s/models/000_config.py" % request.folder
        code = getcfs(model, model, None)
        environment = build_environment(request, current.response,
                                        current.session)
        environment["settings"] = settings
        def template_path():
            " Return the path of the Template config.py to load "
            path = os.path.join(request.folder,
                                "private", "templates",
                                settings.get_template(),
                                "config.py")
            return path
        environment["template_path"] = template_path
        environment["os"] = os
        environment["Storage"] = Storage
        restricted(code, environment, layer=model)

        self.db_engine = settings.get_database_type()
        (db_string, pool_size) = settings.get_database_string()

        # Get a handle to the database
        self.db = DAL(db_string,
                      #folder="%s/databases" % request.folder,
                      auto_import=True,
                      # @ToDo: Set to False until we migrate
                      migrate_enabled=True,
                      )

    # -------------------------------------------------------------------------
    def prep(self, foreigns=[],
                   ondeletes=[],
                   strbools=[],
                   strints=[],
                   uniques=[],
                   ):
        """
            Preparation before migration

            @param foreigns  : List of tuples (tablename, fieldname) to have the foreign keys removed
                              - if tablename == "all" then all tables are checked
            @param ondeletes : List of tuples (tablename, fieldname, reftable, ondelete) to have the ondelete modified to
            @param strbools  : List of tuples (tablename, fieldname) to convert from string/integer to bools
            @param strints   : List of tuples (tablename, fieldname) to convert from string to integer
            @param uniques   : List of tuples (tablename, fieldname) to have the unique indices removed,
        """

        # Backup current database
        self.backup()

        # Remove Foreign Key constraints which need to go in next code
        for tablename, fieldname in foreigns:
            self.remove_foreign(tablename, fieldname)

        # Remove Unique indices which need to go in next code
        for tablename, fieldname in uniques:
            self.remove_unique(tablename, fieldname)

        # Modify ondeletes
        for tablename, fieldname, reftable, ondelete in uniques:
            self.ondelete(tablename, fieldname, reftable, ondelete)

        # Remove fields which need to be altered in next code
        for tablename, fieldname in strbools:
            self.drop(tablename, fieldname)
        for tablename, fieldname in strints:
            self.drop(tablename, fieldname)

        self.db.commit()

    # -------------------------------------------------------------------------
    def migrate(self):
        """
            Perform the migration
            @ToDo
        """

        # Update code: git pull
        # run_models_in(environment)
        # or
        # Set migrate=True in models/000_config.py
        # current.s3db.load_all_models() via applications/eden/static/scripts/tools/noop.py
        # Set migrate=False in models/000_config.py
        pass

    # -------------------------------------------------------------------------
    def post(self, strbools=[],
                   strints=[],
                   ):
        """
            Cleanup after migration

            @param strbools : List of tuples (tablename, fieldname) to convert from string/integer to bools
            @param strints : List of tuples (tablename, fieldname) to convert from string to integer
        """

        db = self.db

        # @ToDo: Do prepops of new tables

        # Restore data from backup
        folder = "%s/databases/backup" % current.request.folder
        db_bak = DAL("sqlite://backup.db",
                     folder=folder,
                     auto_import=True,
                     migrate=False)

        for tablename, fieldname in strints:
            newtable = db[tablename]
            newrows = db(newtable.id > 0).select(newtable.id)
            oldtable = db_bak[tablename]
            oldrows = db_bak(oldtable.id > 0).select(oldtable.id,
                                                     oldtable[fieldname])
            oldvals = oldrows.as_dict()
            for row in newrows:
                id = row.id
                val = oldvals[id][fieldname]
                if not val:
                    continue
                try:
                    vars = {fieldname : int(val)}
                except:
                    current.log.warning("S3Migrate: Unable to convert %s to an integer - skipping" % val)
                else:
                    db(newtable.id == id).update(**vars)

        for tablename, fieldname in strbools:
            to_bool = self.to_bool
            newtable = db[tablename]
            newrows = db(newtable.id > 0).select(newtable.id)
            oldtable = db_bak[tablename]
            oldrows = db_bak(oldtable.id > 0).select(oldtable.id,
                                                     oldtable[fieldname])
            oldvals = oldrows.as_dict()
            for row in newrows:
                id = row.id
                val = oldvals[id][fieldname]
                if not val:
                    continue
                val = to_bool(val)
                if val:
                    vars = {fieldname : val}
                    db(newtable.id == id).update(**vars)

        db.commit()

    # -------------------------------------------------------------------------
    @staticmethod
    def to_bool(value):
        """
           Converts 'something' to boolean. Raises exception for invalid formats
            Possible True  values: 1, True, "1", "TRue", "yes", "y", "t"
            Possible False values: 0, False, "0", "faLse", "no", "n", "f", 0.0
        """

        val = str(value).lower()
        if val in ("yes", "y", "true",  "t", "1"):
            return True
        elif val in ("no",  "n", "false", "f", "0", "0.0"):
            return False
        else:
            return None

    # -------------------------------------------------------------------------
    def backup(self):
        """
            Backup the database to a local SQLite database

            @ToDo: Option to use a temporary DB in Postgres/MySQL as this takes
                   too long for a large DB
        """

        import os

        db = self.db
        folder = "%s/databases/backup" % current.request.folder

        # Create clean folder for the backup
        if os.path.exists(folder):
            import shutil
            shutil.rmtree(folder)
            import time
            time.sleep(1)
        os.mkdir(folder)

        # Setup backup database
        db_bak = DAL("sqlite://backup.db", folder=folder)

        # Copy Table structure
        for tablename in db.tables:
            if tablename == "gis_location":
                table = db[tablename]
                fields = [table[field] for field in table.fields if field != "the_geom"]
                db_bak.define_table(tablename, *fields)
            else:
                db_bak.define_table(tablename, db[tablename])

        # Copy Data
        import csv
        csv.field_size_limit(2**20 * 100)  # 100 megs
        filename = "%s/data.csv" % folder
        file = open(filename, "w")
        db.export_to_csv_file(file)
        file.close()
        file = open(filename, "r")
        db_bak.import_from_csv_file(file, unique="uuid2") # designed to fail
        file.close()
        db_bak.commit()

        # Pass handle back to other functions
        self.db_bak = db_bak

    # -------------------------------------------------------------------------
    def drop(self, tablename, fieldname):
        """
            Drop a field from a table
            e.g. for when changing type
        """

        db = self.db
        db_engine = self.db_engine

        # Modify the database
        if db_engine == "sqlite":
            # Not Supported: http://www.sqlite.org/lang_altertable.html
            # But also not required (for strints anyway)
            sql = ""

        elif db_engine == "mysql":
            # http://dev.mysql.com/doc/refman/5.1/en/alter-table.html
            sql = "ALTER TABLE %(tablename)s DROP COLUMN %(fieldname)s;" % \
                dict(tablename=tablename, fieldname=fieldname)

        elif db_engine == "postgres":
            # http://www.postgresql.org/docs/9.3/static/sql-altertable.html
            sql = "ALTER TABLE %(tablename)s DROP COLUMN %(fieldname)s;" % \
                dict(tablename=tablename, fieldname=fieldname)

        try:
            db.executesql(sql)
        except:
            import sys
            e = sys.exc_info()[1]
            print >> sys.stderr, e

        # Modify the .table file
        table = db[tablename]
        fields = []
        for fn in table.fields:
            if fn == fieldname:
                continue
            fields.append(table[fn])
        db.__delattr__(tablename)
        db.tables.remove(tablename)
        db.define_table(tablename, *fields,
                        # Rebuild the .table file from this definition
                        fake_migrate=True)

    # -------------------------------------------------------------------------
    def ondelete(self, tablename, fieldname, reftable, ondelete):
        """
            Modify the ondelete constraint for a foreign key
        """

        db = self.db
        db_engine = self.db_engine
        executesql = db.executesql

        if tablename == "all":
            tables = db.tables
        else:
            tables = [tablename]

        for tablename in tables:
            if fieldname not in db[tablename].fields:
                continue

            # Modify the database
            if db_engine == "sqlite":
                # @ToDo: http://www.sqlite.org/lang_altertable.html
                raise NotImplementedError

            elif db_engine == "mysql":
                # @ToDo: http://dev.mysql.com/doc/refman/5.1/en/alter-table.html
                raise NotImplementedError

            elif db_engine == "postgres":
                # http://www.postgresql.org/docs/9.3/static/sql-altertable.html
                sql = "ALTER TABLE %(tablename)s DROP CONSTRAINT %(tablename)s_%(fieldname)s_fkey, ALTER TABLE %(tablename)s ADD CONSTRAINT %(tablename)s_%(fieldname)s_fkey FOREIGN KEY (%(fieldname)s) REFERENCES %(reftable)s ON DELETE %(ondelete)s;" % \
                    dict(tablename=tablename, fieldname=fieldname, reftable=reftable, ondelete=ondelete)

            try:
                executesql(sql)
            except:
                print "Error: Table %s with FK %s" % (tablename, fk)
                import sys
                e = sys.exc_info()[1]
                print >> sys.stderr, e

    # -------------------------------------------------------------------------
    def remove_foreign(self, tablename, fieldname):
        """
            Remove a Foreign Key constraint from a table
        """

        db = self.db
        db_engine = self.db_engine
        executesql = db.executesql

        if tablename == "all":
            tables = db.tables
        else:
            tables = [tablename]

        for tablename in tables:
            if fieldname not in db[tablename].fields:
                continue

            # Modify the database
            if db_engine == "sqlite":
                # @ToDo: http://www.sqlite.org/lang_altertable.html
                raise NotImplementedError

            elif db_engine == "mysql":
                # http://dev.mysql.com/doc/refman/5.1/en/alter-table.html
                create = executesql("SHOW CREATE TABLE `%s`;" % tablename)[0][1]
                fk = create.split("` FOREIGN KEY (`%s" % fieldname)[0].split("CONSTRAINT `").pop()
                if "`" in fk:
                    fk = fk.split("`")[0]
                sql = "ALTER TABLE `%(tablename)s` DROP FOREIGN KEY `%(fk)s`;" % \
                    dict(tablename=tablename, fk=fk)

            elif db_engine == "postgres":
                # http://www.postgresql.org/docs/9.3/static/sql-altertable.html
                sql = "ALTER TABLE %(tablename)s DROP CONSTRAINT %(tablename)s_%(fieldname)s_fkey;" % \
                    dict(tablename=tablename, fieldname=fieldname)

            try:
                executesql(sql)
            except:
                print "Error: Table %s with FK %s" % (tablename, fk)
                import sys
                e = sys.exc_info()[1]
                print >> sys.stderr, e

    # -------------------------------------------------------------------------
    def remove_unique(self, tablename, fieldname):
        """
            Remove a Unique Index from a table
        """

        db = self.db
        db_engine = self.db_engine

        # Modify the database
        if db_engine == "sqlite":
            # @ToDo: http://www.sqlite.org/lang_altertable.html
            raise NotImplementedError

        elif db_engine == "mysql":
            # http://dev.mysql.com/doc/refman/5.1/en/alter-table.html
            sql = "ALTER TABLE `%(tablename)s` DROP INDEX `%(fieldname)s`;" % \
                dict(tablename=tablename, fieldname=fieldname)

        elif db_engine == "postgres":
            # http://www.postgresql.org/docs/9.3/static/sql-altertable.html
            sql = "ALTER TABLE %(tablename)s DROP CONSTRAINT %(tablename)s_%(fieldname)s_key;" % \
                dict(tablename=tablename, fieldname=fieldname)

        try:
            db.executesql(sql)
        except:
            import sys
            e = sys.exc_info()[1]
            print >> sys.stderr, e

        # Modify the .table file
        table = db[tablename]
        fields = []
        for fn in table.fields:
            field = table[fn]
            if fn == fieldname:
                field.unique = False
            fields.append(field)
        db.__delattr__(tablename)
        db.tables.remove(tablename)
        db.define_table(tablename, *fields,
                        # Rebuild the .table file from this definition
                        fake_migrate=True)

    # =========================================================================
    # OLD CODE below here
    # - There are tests for these in /tests/dbmigration
    # -------------------------------------------------------------------------
    def rename_field(self,
                     tablename,
                     fieldname_old,
                     fieldname_new,
                     attributes_to_copy=None):
        """
            Rename a field, while keeping the other properties of the field the same. 
            If there are some indexes on that table, these will be recreated and other constraints will remain unchanged too.
            
            @param tablename          : name of the table in which the field is renamed
            @param fieldname_old      : name of the original field before renaming
            @param fieldname_new      : name of the field after renaming
            @param attributes_to_copy : list of attributes which need to be copied from the old_field to the new_field (needed only in sqlite)
        """

        db = self.db
        db_engine = self.db_engine

        if db_engine == "sqlite":
            self._add_renamed_fields(db, tablename, fieldname_old, fieldname_new, attributes_to_copy)
            self._copy_field(db, tablename, fieldname_old, fieldname_new)     
            sql = "SELECT sql FROM sqlite_master WHERE type='index' AND tbl_name='%s' ORDER BY name;" % \
                tablename
            list_index = db.executesql(sql)
            for element in list_index:
                search_str = "%s(%s)" % (tablename, fieldname_old)
                if element[0] is not None and search_str in element[0]:
                    sql = "CREATE INDEX %s__idx on %s(%s);" % \
                        (fieldname_new, tablename, fieldname_new)
                    try:
                        db.executesql(sql)
                    except:
                        pass

        elif db_engine == "mysql":
            field = db[tablename][fieldname_old]
            sql_type = map_type_web2py_to_sql(field.type)
            sql = "ALTER TABLE %s CHANGE %s %s %s(%s)" % (tablename,
                                                          fieldname_old,
                                                          fieldname_new,
                                                          sql_type,
                                                          field.length)
            db.executesql(sql)

        elif db_engine == "postgres":
            sql = "ALTER TABLE %s RENAME COLUMN %s TO %s" % \
                (tablename, fieldname_old, fieldname_new)
            db.executesql(sql)

    # -------------------------------------------------------------------------
    def rename_table(self,
                     tablename_old,
                     tablename_new):
        """
            Rename a table.
            If any fields reference that table, they will be handled too.
            
            @param tablename_old : name of the original table before renaming
            @param tablename_new : name of the table after renaming
        """

        try:
            sql = "ALTER TABLE %s RENAME TO %s;" % (tablename_old,
                                                    tablename_new)
            self.db.executesql(sql)
        except Exception, e:
            print e
import logging
from datetime import datetime, timedelta
from multiprocessing.util import Finalize
from time import time
from anyjson import deserialize, serialize
from celery import schedules
from celery.beat import Scheduler, ScheduleEntry
from celery.utils.timeutils import timedelta_seconds
from celeryconfig import CELERY_RESULT_DBURI

import sys, os
sys.path.append(os.environ['WEB2PY_PATH'])

from gluon import DAL
folder, uri = os.path.split(CELERY_RESULT_DBURI.split(':///')[1])
db=DAL(CELERY_RESULT_DBURI.split(':///')[0]+'://'+uri,folder=folder,
       migrate_enabled=False,auto_import=True)
print 'I found these table: ' +', '.join(db.tables())
if db(db.celery_periodictasks).count()>0:
    logging.error('found too many db.celery_periodictasks, deleting them all')
    db(db.celery_periodictasks).delete()
if db(db.celery_periodictasks).count()<1:
    logging.error('found no db.celery_periodictasks, making a singleton')
    db.celery_periodictasks(last_update=datetime.now())

def get_or_make_unique(table,**fields):
    query = reduce(lambda a,b:a&b,[table[key]==value for key,value in fields.items()])
    rows = table._db(query).select(limitby=(0,2))
    if len(rows)>1:
        table._db(query).delete()
        rows=[]
    if len(rows)==1:
Example #38
0
# -*- coding: utf-8 -*-
#01.当py文件中中文时一定要使用上面的一个编码注释
from gluon import DAL, Field
import pyodbc
#02.设置字符串缺省的编码,如果不这样的话str无法使用
import sys
import json
from datetime import datetime

reload(sys)
sys.setdefaultencoding('utf-8')
str_db = u'mssql4://sa:1@localhost/BIDDING'
#03.连接需要用utf8字符集,这样返回的中文结果可直接解码
db = DAL(str_db,migrate_enabled=False)
print db._uri
print db._dbname
db.define_table('BankRecord',Field('Account'),Field('CompanyName'),Field('CreateDate'),Field('Money'),Field('Note'),Field('TradingTime'))
db.define_table('Customer',Field('Address1'),Field('Address2'),Field('CompanyName'),Field('CompanyPhone'),Field('ContactName'),Field('CreationDate'),Field('Email'),Field('EmployeeId'),Field('Fax'),Field('IsDelete'),Field('MobilePhone'),Field('Note'),Field('PassWord'),Field('Position'),Field('Type'),Field('UserName'),Field('ZipCode'))
db.define_table('Employee',Field('Address'),Field('Age'),Field('Code'),Field('CompanyPhone'),Field('CreationDate'),Field('DateOfBirth'),Field('Department'),Field('Email'),Field('EmergencyContactName'),Field('EmergencyContactPhone'),Field('EmergencyContactRelationship'),Field('HomePhone'),Field('IsDelete'),Field('MobilePhone'),Field('Name'),Field('Note'),Field('PassWord'),Field('Position'),Field('SexId'),Field('Type'),Field('UserName'))
db.define_table('Finance',Field('Activity'),Field('CreationDate'),Field('EmployeeId'),Field('Income'),Field('IsDelete'),Field('Note'),Field('ProjectCodeId'),Field('ProtocolCodeId'),Field('Spending'),Field('TargetId'),Field('TitleId'))
db.define_table('Log',Field('Agent'),Field('CreationDate'),Field('Ip'),Field('Kind'),Field('Note'),Field('UserId'))
db.define_table('Management',Field('Code'),Field('CreationDate'),Field('IsDelete'),Field('Name'))
db.define_table('MoneyType',Field('CreationTime'),Field('IsDelete'),Field('Name'))
db.define_table('ProjectCode',Field('CreationTime'),Field('EmployeeId'),Field('IsDelete'),Field('Option1'),Field('Option2'),Field('Option3'),Field('ProjectNumber'),Field('ProjectTypeId'),Field('ProtocolId'))
db.define_table('ProjectResource',Field('CreationTime'),Field('IsDelete'),Field('Name'))
db.define_table('Project',Field('Assistant'),Field('BuyerId'),Field('ChargeRate'),Field('CreationDate'),Field('EmployeeId'),Field('EntrustMoney'),Field('IsDelete'),Field('MakeOutDate'),Field('ManagementStyleId'),Field('Note'),Field('Package'),Field('ProjectCodeId'),Field('ProjectName'),Field('ProjectSourceId'),Field('ProjectTypeId'),Field('ProtocolCodeId'),Field('SigningDate'),Field('SourcesOfFundingId'),Field('StateId'),Field('WinningCompany'),Field('WinningMoney'))
db.define_table('ProjectStatus',Field('CreationTime'),Field('IsDelete'),Field('Name'))
db.define_table('ProtocolCode',Field('CreationTime'),Field('EmployeeId'),Field('IsDelete'),Field('ProtocolNumber'),Field('TypeId'))
db.define_table('ProtocolCodeType',Field('TypeCode'),Field('TypeId'),Field('TypeName'))
db.define_table('Suggest',Field('Content'),Field('CreationTime'),Field('IsDelete'),Field('UserId'))
db.define_table('Task',Field('CreationDate'),Field('Deadline'),Field('EmployeeId'),Field('IsDelete'),Field('Note'),Field('PlaceId'),Field('ProjectId'),Field('StateId'),Field('TitleId'))
Example #39
0
from gluon.utils import md5_hash
from gluon.restricted import RestrictedError, TicketStorage
from gluon import DAL

SLEEP_MINUTES = 5

errors_path = os.path.join(request.folder, 'errors')
try:
    db_string = open(os.path.join(request.folder, 'private', 'ticket_storage.txt')).read().replace('\r', '').replace('\n', '').strip()
except:
    db_string = 'sqlite://storage.db'

db_path = os.path.join(request.folder, 'databases')

tk_db = DAL(db_string, folder=db_path, auto_import=True)
ts = TicketStorage(db=tk_db)
tk_table = ts._get_table(
    db=tk_db, tablename=ts.tablename, app=request.application)


hashes = {}

while 1:
    if request.tickets_db:
        print "You're storing tickets yet in database"
        sys.exit(1)

    for file in os.listdir(errors_path):
        filename = os.path.join(errors_path, file)
    def send_heartbeat(self, counter):
        if not self.db_thread:
            logging.debug('thread building own DAL object')
            self.db_thread = DAL(self.db._uri, folder=self.db._adapter.folder)
            self.define_tables(self.db_thread, migrate=False)
        try:
            db = self.db_thread
            sw, st = db.scheduler_worker, db.scheduler_task
            now = self.now()
            expiration = now - datetime.timedelta(seconds=self.heartbeat * 3)
            departure = now - datetime.timedelta(seconds=self.heartbeat * 3 *
                                                 MAXHIBERNATION)
            # record heartbeat
            mybackedstatus = db(
                sw.worker_name == self.worker_name).select().first()
            if not mybackedstatus:
                sw.insert(status=ACTIVE,
                          worker_name=self.worker_name,
                          first_heartbeat=now,
                          last_heartbeat=now,
                          group_names=self.group_names)
                self.worker_status = ACTIVE, 1  #activating the process
            else:
                if mybackedstatus.status == DISABLED:
                    self.worker_status = DISABLED, self.worker_status[
                        1]  #keep sleeping
                    if self.worker_status[1] == MAXHIBERNATION:
                        logging.debug('........recording heartbeat')
                        db(sw.worker_name == self.worker_name).update(
                            last_heartbeat=now)

                elif mybackedstatus.status == TERMINATE:
                    self.worker_status = TERMINATE, self.worker_status[1]
                    logging.debug("Waiting to terminate the current task")
                    self.give_up()
                    return
                elif mybackedstatus.status == KILL:
                    self.worker_status = KILL, self.worker_status[1]
                    self.die()

                else:
                    logging.debug('........recording heartbeat')
                    db(sw.worker_name == self.worker_name).update(
                        last_heartbeat=now, status=ACTIVE)
                    self.worker_status = ACTIVE, 1  #re-activating the process

            self.do_assign_tasks = False
            if counter % 5 == 0:
                try:
                    # delete inactive workers
                    logging.debug(
                        '    freeing workers that have not sent heartbeat')
                    inactive_workers = db(((sw.last_heartbeat < expiration)
                                           & (sw.status == ACTIVE))
                                          | ((sw.last_heartbeat < departure)
                                             & (sw.status != ACTIVE)))
                    db(st.assigned_worker_name.belongs(
                        inactive_workers._select(sw.worker_name)))\
                        (st.status == RUNNING)\
                        .update(assigned_worker_name='',status=QUEUED)
                    inactive_workers.delete()
                    self.is_a_ticker = self.being_a_ticker()
                    if self.worker_status[0] == ACTIVE:
                        self.do_assign_tasks = True
                except:
                    pass
            db.commit()
        except:
            db.rollback()
        self.adj_hibernation()
        self.sleep()
Example #41
0
class S3Migration(object):
    """
        Database Migration Toolkit
        - used to help migrate both a production database on a server
          and also an offline client

        Normally run from a script in web2py context, but without models loaded:
        cd web2py
        python web2py.py -S eden -R <script.py>

        Where script looks like:
        m = local_import("s3migration")
        migrate = m.S3Migration()
        #migrate.pull()
        migrate.prep(foreigns=[],
                     moves=[],
                     news=[],
                     ondeletes=[],
                     strbools=[],
                     strints=[],
                     uniques=[],
                     )
        migrate.migrate()
        migrate.compile()
        migrate.post(moves=[],
                     news=[],
                     strbools=[],
                     strints=[],
                     )

        FYI: If you need to access a filename in eden/databases/ then here is how:
        import hashlib
        (db_string, pool_size) = settings.get_database_string()
        prefix = hashlib.md5(db_string).hexdigest()
        filename = "%s_%s.table" % (prefix, tablename)

        FYI: To view all constraints on a table in MySQL:
        SHOW CREATE TABLE tablename;
        or
        select COLUMN_NAME, CONSTRAINT_NAME, REFERENCED_COLUMN_NAME, REFERENCED_TABLE_NAME
        from information_schema.KEY_COLUMN_USAGE
        where TABLE_NAME = 'module_resourcename';

        @ToDo: Function to ensure that roles match those in prepop
        @ToDo: Function to do selective additional prepop
    """

    def __init__(self):

        # Load s3cfg
        import s3cfg
        settings = s3cfg.S3Config()

        # Pass into template
        current.deployment_settings = settings

        # Read settings
        request = current.request
        model = "%s/models/000_config.py" % request.folder
        code = getcfs(model, model, None)
        response = current.response

        # Needed as some Templates look at this & we don't wish to crash:
        response.s3 = Storage()

        # Global variables for 000_config.py
        environment = build_environment(request, response, current.session)
        environment["settings"] = settings
        # Some (older) 000_config.py also use "deployment_settings":
        environment["deployment_settings"] = settings
        # For backwards-compatibility with older 000_config.py:
        #def template_path():
        #    # When you see this warning, you should update 000_config.py
        #    # See: http://eden.sahanafoundation.org/wiki/DeveloperGuidelines/Templates/Migration#Changesin000_config.py
        #    print "template_path() is deprecated, please update 000_config.py"
        #    # Return just any valid path to make sure the path-check succeeds,
        #    # => modern S3Config will find the template itself
        #    return request.folder
        #environment["template_path"] = template_path
        environment["os"] = os
        environment["Storage"] = Storage

        # Execute 000_config.py
        restricted(code, environment, layer=model)

        self.environment = environment

        self.db_engine = settings.get_database_type()
        (db_string, pool_size) = settings.get_database_string()

        # Get a handle to the database
        self.db = DAL(db_string,
                      #folder="%s/databases" % request.folder,
                      auto_import=True,
                      # @ToDo: Set to False until we migrate
                      migrate_enabled=True,
                      )

    # -------------------------------------------------------------------------
    def prep(self, foreigns=None,
                   moves=None,
                   news=None,
                   ondeletes=None,
                   strbools=None,
                   strints=None,
                   uniques=None,
                   ):
        """
            Preparation before migration

            @param foreigns  : List of tuples (tablename, fieldname) to have the foreign keys removed
                              - if tablename == "all" then all tables are checked
            @param moves     : List of dicts {tablename: [(fieldname, new_tablename, link_fieldname)]} to move a field from 1 table to another
                              - fieldname can be a tuple if the fieldname changes: (fieldname, new_fieldname)
            @param news      : List of dicts {new_tablename: {'lookup_field': '',
                                                              'tables': [tablename: [fieldname]],
                                                              'supers': [tablename: [fieldname]],
                                                              } to create new records from 1 or more old tables (inc all instances of an SE)
                              - fieldname can be a tuple if the fieldname changes: (fieldname, new_fieldname)
            @param ondeletes : List of tuples [(tablename, fieldname, reftable, ondelete)] to have the ondelete modified to
            @param strbools  : List of tuples [(tablename, fieldname)] to convert from string/integer to bools
            @param strints   : List of tuples [(tablename, fieldname)] to convert from string to integer
            @param uniques   : List of tuples [(tablename, fieldname)] to have the unique indices removed,
        """

        # Backup current database
        self.moves = moves
        self.news = news
        self.strbools = strbools
        self.strints = strints
        self.backup()

        if foreigns:
            # Remove Foreign Key constraints which need to go in next code
            for tablename, fieldname in foreigns:
                self.remove_foreign(tablename, fieldname)

        if uniques:
            # Remove Unique indices which need to go in next code
            for tablename, fieldname in uniques:
                self.remove_unique(tablename, fieldname)

        if ondeletes:
            # Modify ondeletes
            for tablename, fieldname, reftable, ondelete in ondeletes:
                self.ondelete(tablename, fieldname, reftable, ondelete)

        # Remove fields which need to be altered in next code
        if strbools:
            for tablename, fieldname in strbools:
                self.drop(tablename, fieldname)
        if strints:
            for tablename, fieldname in strints:
                self.drop(tablename, fieldname)

        self.db.commit()

    # -------------------------------------------------------------------------
    def backup(self):
        """
            Backup the database to a local SQLite database

            @ToDo: Option to use a temporary DB in Postgres/MySQL as this takes
                   too long for a large DB
        """

        moves = self.moves
        news = self.news
        strints = self.strints
        strbools = self.strbools
        if not moves and not news and not strbools and not strints:
            # Nothing to backup
            return

        import os

        db = self.db
        folder = "%s/databases/backup" % current.request.folder

        # Create clean folder for the backup
        if os.path.exists(folder):
            shutil.rmtree(folder)
            import time
            time.sleep(1)
        os.mkdir(folder)

        # Setup backup database
        db_bak = DAL("sqlite://backup.db", folder=folder, adapter_args={"foreign_keys": False})

        # Copy Table structure
        skip = []
        for tablename in db.tables:
            if tablename == "gis_location":
                table = db[tablename]
                fields = [table[field] for field in table.fields if field != "the_geom"]
                try:
                    db_bak.define_table(tablename, *fields)
                except KeyError:
                    # Can't resolve reference yet
                    # Cleanup
                    del db_bak[tablename]
                    # Try later
                    skip.append(tablename)
            else:
                try:
                    db_bak.define_table(tablename, db[tablename])
                except KeyError:
                    # Can't resolve reference yet
                    # Cleanup
                    del db_bak[tablename]
                    # Try later
                    skip.append(tablename)
        while skip:
            _skip = []
            for tablename in skip:
                if tablename == "gis_location":
                    table = db[tablename]
                    fields = [table[field] for field in table.fields if field != "the_geom"]
                    try:
                        db_bak.define_table(tablename, *fields)
                    except KeyError:
                        # Can't resolve reference yet
                        # Cleanup
                        del db_bak[tablename]
                        # Try later
                        _skip.append(tablename)
                    except:
                        import sys
                        print "Skipping %s: %s" % (tablename, sys.exc_info()[1])
                else:
                    try:
                        db_bak.define_table(tablename, db[tablename])
                    except KeyError:
                        # Can't resolve reference yet
                        # Cleanup
                        del db_bak[tablename]
                        # Try later
                        _skip.append(tablename)
                    except:
                        import sys
                        print "Skipping %s: %s" % (tablename, sys.exc_info()[1])
            skip = _skip

        # Which tables do we need to backup?
        tables = []
        if moves:
            for tablename in moves:
                tables.append(tablename)
        if news:
            for tablename in news:
                new = news[tablename]
                for t in new["tables"]:
                    tables.append(t)
                for s in new["supers"]:
                    tables.append(s)
                    stable = db[s]
                    rows = db(stable._id > 0).select(stable.instance_type)
                    instance_types = set([r.instance_type for r in rows])
                    for t in instance_types:
                        tables.append(t)
        if strbools:
            for tablename, fieldname in strints:
                tables.append(tablename)
        if strints:
            for tablename, fieldname in strints:
                tables.append(tablename)

        # Remove duplicates
        tables = set(tables)

        # Copy Data
        import csv
        csv.field_size_limit(2**20 * 100)  # 100 megs
        for tablename in tables:
            filename = "%s/%s.csv" % (folder, tablename)
            file = open(filename, "w")
            rows = db(db[tablename].id > 0).select()
            rows.export_to_csv_file(file)
            file.close()
            file = open(filename, "r")
            db_bak[tablename].import_from_csv_file(file, unique="uuid2") # uuid2 designed to not hit!
            file.close()
            db_bak.commit()

        # Pass handle back to other functions
        self.db_bak = db_bak

    # -------------------------------------------------------------------------
    def pull(self, version=None):
        """
            Update the Eden code
        """

        #if GITPYTHON:
        #else:
        #import s3log
        #s3log.S3Log.setup()
        #current.log.warning("GitPython not installed, will need to call out to Git via CLI")

        # Copy the current working directory to revert back to later
        cwd = os.getcwd()

        # Change to the Eden folder
        folder = current.request.folder
        os.chdir(os.path.join(cwd, folder))

        # Remove old compiled code
        remove_compiled_application(folder)

        # Reset to remove any hotfixes
        subprocess.call(["git", "reset", "--hard", "HEAD"])

        # Store the current version
        old_version = subprocess.check_output(["git", "describe", "--always", "HEAD"])
        self.old_version = old_version.strip()

        # Pull
        subprocess.call(["git", "pull"])

        if version:
            # Checkout this version
            subprocess.call(["git", "checkout", version])

        # Change back
        os.chdir(cwd)

    # -------------------------------------------------------------------------
    def find_script(self):
        """
            Find the upgrade script(s) to run
        """

        old_version = self.old_version
        if not old_version:
            # Nothing we can do
            return

        # Find the current version
        new_version = subprocess.check_output(["git", "describe", "--always", "HEAD"])
        new_version = new_version.strip()

        # Look for a script to the current version
        path = os.path.join(request.folder, "static", "scripts", "upgrade")

    # -------------------------------------------------------------------------
    def run_model(self):
        """
            Execute all the models/
        """

        if not hasattr(current, "db"):
            run_models_in(self.environment)

    # -------------------------------------------------------------------------
    def compile(self):
        """
            Compile the Eden code
        """

        # Load the base Model
        self.run_model()

        from gluon.fileutils import up

        request = current.request
        os_path = os.path
        join = os_path.join

        # Pass View Templates to Compiler
        settings = current.deployment_settings
        s3 = current.response.s3
        s3.views = views = {}
        s3.theme = theme = settings.get_theme()
        if theme != "default":
            folder = request.folder
            location = settings.get_template_location()
            exists = os_path.exists
            for view in ["create.html",
                         #"delete.html",
                         "display.html",
                         "iframe.html",
                         "list.html",
                         "list_filter.html",
                         "map.html",
                         #"merge.html",
                         "plain.html",
                         "popup.html",
                         "profile.html",
                         "report.html",
                         #"review.html",
                         "summary.html",
                         #"timeplot.html",
                         "update.html",
                         ]:
                if exists(join(folder, location, "templates", theme, "views", "_%s" % view)):
                    views[view] = "../%s/templates/%s/views/_%s" % (location, theme, view)

        def apath(path="", r=None):
            """
            Builds a path inside an application folder

            Parameters
            ----------
            path:
                path within the application folder
            r:
                the global request object

            """

            opath = up(r.folder)
            while path[:3] == "../":
                (opath, path) = (up(opath), path[3:])
            return join(opath, path).replace("\\", "/")

        folder = apath(request.application, request)
        compile_application(folder)

    # -------------------------------------------------------------------------
    def migrate(self):
        """
            Perform an automatic database migration
        """

        # Load the base model
        self.run_model()

        # Load all conditional models
        current.s3db.load_all_models()

    # -------------------------------------------------------------------------
    def post(self, moves=None,
                   news=None,
                   strbools=None,
                   strints=None,
                   ):
        """
            Cleanup after migration

            @param moves     : List of dicts {tablename: [(fieldname, new_tablename, link_fieldname)]} to move a field from 1 table to another
                              - fieldname can be a tuple if the fieldname changes: (fieldname, new_fieldname)
            @param news      : List of dicts {new_tablename: {'lookup_field': '',
                                                              'tables': [tablename: [fieldname]],
                                                              'supers': [tablename: [fieldname]],
                                                              } to create new records from 1 or more old tables (inc all instances of an SE)
                              - fieldname can be a tuple if the fieldname changes: (fieldname, new_fieldname)
            @param strbools : List of tuples [(tablename, fieldname)] to convert from string/integer to bools
            @param strints  : List of tuples [(tablename, fieldname)] to convert from string to integer
        """

        db = self.db

        # @ToDo: Do prepops of new tables

        # Restore data from backup
        folder = "%s/databases/backup" % current.request.folder
        db_bak = DAL("sqlite://backup.db",
                     folder=folder,
                     auto_import=True,
                     migrate=False)

        if moves:
            for tablename in moves:
                table = db_bak[tablename]
                fieldname, new_tablename, link_fieldname = moves[tablename]
                if isinstance(fieldname, (tuple, list)):
                    fieldname, new_fieldname = fieldname
                else:
                    new_fieldname = fieldname
                old_field = table[fieldname]
                new_linkfield = db[new_tablename][link_fieldname]
                rows = db_bak(table._id > 0).select(old_field, link_fieldname)
                for row in rows:
                    update_vars = {}
                    update_vars[new_fieldname] = row[old_field]
                    db(new_linkfield == row[link_fieldname]).update(**update_vars)

        if news:
            for tablename in news:
                # Read Data
                data = {}
                new = news[tablename]
                lookup_field = new["lookup_field"]
                _tables = new["tables"]
                for t in _tables:
                    fields = _tables[t]
                    # @ToDo: Support tuples
                    #for f in fields:
                    #    if isinstance(f, (tuple, list)):
                    table = db_bak[t]
                    table_fields = [table[f] for f in fields]
                    rows = db_bak(table.deleted == False).select(table[lookup_field],
                                                                 *table_fields)
                    for row in rows:
                        record_id = row[lookup_field]
                        if record_id in data:
                            _new = False
                            _data = data[record_id]
                        else:
                            _new = True
                            _data = {}
                        for f in fields:
                            if f in row:
                                if row[f] not in ("", None):
                                    # JSON type doesn't like ""
                                    _data[f] = row[f]
                        if _new:
                            data[record_id] = _data

                for s in new["supers"]:
                    fields = new["supers"][s]
                    # @ToDo: Support tuples
                    #for f in fields:
                    #    if isinstance(f, (tuple, list)):
                    stable = db_bak[s]
                    superkey = stable._id.name
                    rows = db_bak(stable.deleted == False).select(stable._id,
                                                                  stable.instance_type)
                    for row in rows:
                        etable = db_bak[row["instance_type"]]
                        _fields = [f for f in fields if f in etable.fields]
                        table_fields = [etable[f] for f in _fields]
                        record = db_bak(etable[superkey] == row[superkey]).select(etable[lookup_field],
                                                                                  *table_fields
                                                                                  ).first()
                        if record:
                            record_id = record[lookup_field]
                            if record_id in data:
                                _new = False
                                _data = data[record_id]
                            else:
                                _new = True
                                _data = {}
                            for f in _fields:
                                if f in record:
                                    if record[f] not in ("", None):
                                        # JSON type doesn't like ""
                                        _data[f] = record[f]
                            if _new:
                                data[record_id] = _data

                # Create Records
                table = db[tablename]
                for record_id in data:
                    update_vars = data[record_id]
                    if update_vars:
                        update_vars[lookup_field] = record_id
                        # Can't rely on the defaults as auto_import doesn't see DAL defaults
                        update_vars["created_on"] = datetime.datetime.utcnow()
                        update_vars["deleted"] = False
                        update_vars["mci"] = 0
                        update_vars["modified_on"] = datetime.datetime.utcnow()
                        update_vars["uuid"] = uuid4().urn # Would always be identical otherwise
                        table.insert(**update_vars)

        if strints:
            for tablename, fieldname in strints:
                newtable = db[tablename]
                newrows = db(newtable.id > 0).select(newtable.id)
                oldtable = db_bak[tablename]
                oldrows = db_bak(oldtable.id > 0).select(oldtable.id,
                                                         oldtable[fieldname])
                oldvals = oldrows.as_dict()
                for row in newrows:
                    _id = row.id
                    val = oldvals[_id][fieldname]
                    if not val:
                        continue
                    try:
                        update_vars = {fieldname : int(val)}
                    except:
                        current.log.warning("S3Migrate: Unable to convert %s to an integer - skipping" % val)
                    else:
                        db(newtable.id == _id).update(**update_vars)

        if strbools:
            for tablename, fieldname in strbools:
                to_bool = self.to_bool
                newtable = db[tablename]
                newrows = db(newtable.id > 0).select(newtable.id)
                oldtable = db_bak[tablename]
                oldrows = db_bak(oldtable.id > 0).select(oldtable.id,
                                                         oldtable[fieldname])
                oldvals = oldrows.as_dict()
                for row in newrows:
                    _id = row.id
                    val = oldvals[_id][fieldname]
                    if not val:
                        continue
                    val = to_bool(val)
                    if val:
                        update_vars = {fieldname : val}
                        db(newtable.id == _id).update(**update_vars)

        db.commit()

    # -------------------------------------------------------------------------
    @staticmethod
    def to_bool(value):
        """
           Converts 'something' to boolean. Raises exception for invalid formats
            Possible True  values: 1, True, "1", "TRue", "yes", "y", "t"
            Possible False values: 0, False, "0", "faLse", "no", "n", "f", 0.0
        """

        val = str(value).lower()
        if val in ("yes", "y", "true",  "t", "1"):
            return True
        elif val in ("no",  "n", "false", "f", "0", "0.0"):
            return False
        else:
            return None

    # -------------------------------------------------------------------------
    def drop(self, tablename, fieldname):
        """
            Drop a field from a table
            e.g. for when changing type
        """

        db = self.db
        db_engine = self.db_engine

        # Modify the database
        if db_engine == "sqlite":
            # Not Supported: http://www.sqlite.org/lang_altertable.html
            # But also not required (for strints anyway)
            sql = ""

        elif db_engine == "mysql":
            # http://dev.mysql.com/doc/refman/5.1/en/alter-table.html
            sql = "ALTER TABLE %(tablename)s DROP COLUMN %(fieldname)s;" % \
                dict(tablename=tablename, fieldname=fieldname)

        elif db_engine == "postgres":
            # http://www.postgresql.org/docs/9.3/static/sql-altertable.html
            sql = "ALTER TABLE %(tablename)s DROP COLUMN %(fieldname)s;" % \
                dict(tablename=tablename, fieldname=fieldname)

        try:
            db.executesql(sql)
        except:
            import sys
            e = sys.exc_info()[1]
            print >> sys.stderr, e

        # Modify the .table file
        table = db[tablename]
        fields = []
        for fn in table.fields:
            if fn == fieldname:
                continue
            fields.append(table[fn])
        db.__delattr__(tablename)
        db.tables.remove(tablename)
        db.define_table(tablename, *fields,
                        # Rebuild the .table file from this definition
                        fake_migrate=True)

    # -------------------------------------------------------------------------
    def ondelete(self, tablename, fieldname, reftable, ondelete):
        """
            Modify the ondelete constraint for a foreign key
        """

        db = self.db
        db_engine = self.db_engine
        executesql = db.executesql

        if tablename == "all":
            tables = db.tables
        else:
            tables = [tablename]

        for tablename in tables:
            if fieldname not in db[tablename].fields:
                continue

            # Modify the database
            if db_engine == "sqlite":
                # @ToDo: http://www.sqlite.org/lang_altertable.html
                raise NotImplementedError

            elif db_engine == "mysql":
                # http://dev.mysql.com/doc/refman/5.1/en/alter-table.html
                create = executesql("SHOW CREATE TABLE `%s`;" % tablename)[0][1]
                fk = create.split("` FOREIGN KEY (`%s" % fieldname)[0].split("CONSTRAINT `").pop()
                if "`" in fk:
                    fk = fk.split("`")[0]
                sql = "ALTER TABLE `%(tablename)s` DROP FOREIGN KEY `%(fk)s`, ALTER TABLE %(tablename)s ADD CONSTRAINT %(fk)s FOREIGN KEY (%(fieldname)s) REFERENCES %(reftable)s(id) ON DELETE %(ondelete)s;" % \
                    dict(tablename=tablename, fk=fk, fieldname=fieldname, reftable=reftable, ondelete=ondelete)

            elif db_engine == "postgres":
                # http://www.postgresql.org/docs/9.3/static/sql-altertable.html
                sql = "ALTER TABLE %(tablename)s DROP CONSTRAINT %(tablename)s_%(fieldname)s_fkey, ALTER TABLE %(tablename)s ADD CONSTRAINT %(tablename)s_%(fieldname)s_fkey FOREIGN KEY (%(fieldname)s) REFERENCES %(reftable)s ON DELETE %(ondelete)s;" % \
                    dict(tablename=tablename, fieldname=fieldname, reftable=reftable, ondelete=ondelete)

            try:
                executesql(sql)
            except:
                print "Error: Table %s with FK %s" % (tablename, fk)
                import sys
                e = sys.exc_info()[1]
                print >> sys.stderr, e

    # -------------------------------------------------------------------------
    def remove_foreign(self, tablename, fieldname):
        """
            Remove a Foreign Key constraint from a table
        """

        db = self.db
        db_engine = self.db_engine
        executesql = db.executesql

        if tablename == "all":
            tables = db.tables
        else:
            tables = [tablename]

        for tablename in tables:
            if fieldname not in db[tablename].fields:
                continue

            # Modify the database
            if db_engine == "sqlite":
                # @ToDo: http://www.sqlite.org/lang_altertable.html
                raise NotImplementedError

            elif db_engine == "mysql":
                # http://dev.mysql.com/doc/refman/5.1/en/alter-table.html
                create = executesql("SHOW CREATE TABLE `%s`;" % tablename)[0][1]
                fk = create.split("` FOREIGN KEY (`%s" % fieldname)[0].split("CONSTRAINT `").pop()
                if "`" in fk:
                    fk = fk.split("`")[0]
                sql = "ALTER TABLE `%(tablename)s` DROP FOREIGN KEY `%(fk)s`;" % \
                    dict(tablename=tablename, fk=fk)

            elif db_engine == "postgres":
                # http://www.postgresql.org/docs/9.3/static/sql-altertable.html
                sql = "ALTER TABLE %(tablename)s DROP CONSTRAINT %(tablename)s_%(fieldname)s_fkey;" % \
                    dict(tablename=tablename, fieldname=fieldname)

            try:
                executesql(sql)
            except:
                print "Error: Table %s with FK %s" % (tablename, fk)
                import sys
                e = sys.exc_info()[1]
                print >> sys.stderr, e

    # -------------------------------------------------------------------------
    def remove_unique(self, tablename, fieldname):
        """
            Remove a Unique Index from a table
        """

        db = self.db
        db_engine = self.db_engine

        # Modify the database
        if db_engine == "sqlite":
            # @ToDo: http://www.sqlite.org/lang_altertable.html
            raise NotImplementedError

        elif db_engine == "mysql":
            # http://dev.mysql.com/doc/refman/5.1/en/alter-table.html
            sql = "ALTER TABLE `%(tablename)s` DROP INDEX `%(fieldname)s`;" % \
                dict(tablename=tablename, fieldname=fieldname)

        elif db_engine == "postgres":
            # http://www.postgresql.org/docs/9.3/static/sql-altertable.html
            sql = "ALTER TABLE %(tablename)s DROP CONSTRAINT %(tablename)s_%(fieldname)s_key;" % \
                dict(tablename=tablename, fieldname=fieldname)

        try:
            db.executesql(sql)
        except:
            import sys
            e = sys.exc_info()[1]
            print >> sys.stderr, e

        # Modify the .table file
        table = db[tablename]
        fields = []
        for fn in table.fields:
            field = table[fn]
            if fn == fieldname:
                field.unique = False
            fields.append(field)
        db.__delattr__(tablename)
        db.tables.remove(tablename)
        db.define_table(tablename, *fields,
                        # Rebuild the .table file from this definition
                        fake_migrate=True)

    # =========================================================================
    # OLD CODE below here
    # - There are tests for these in /tests/dbmigration
    # -------------------------------------------------------------------------
    def rename_field(self,
                     tablename,
                     fieldname_old,
                     fieldname_new,
                     attributes_to_copy=None):
        """
            Rename a field, while keeping the other properties of the field the same.
            If there are some indexes on that table, these will be recreated and other constraints will remain unchanged too.

            @param tablename          : name of the table in which the field is renamed
            @param fieldname_old      : name of the original field before renaming
            @param fieldname_new      : name of the field after renaming
            @param attributes_to_copy : list of attributes which need to be copied from the old_field to the new_field (needed only in sqlite)
        """

        db = self.db
        db_engine = self.db_engine

        if db_engine == "sqlite":
            self._add_renamed_fields(db, tablename, fieldname_old, fieldname_new, attributes_to_copy)
            self._copy_field(db, tablename, fieldname_old, fieldname_new)
            sql = "SELECT sql FROM sqlite_master WHERE type='index' AND tbl_name='%s' ORDER BY name;" % \
                tablename
            list_index = db.executesql(sql)
            for element in list_index:
                search_str = "%s(%s)" % (tablename, fieldname_old)
                if element[0] is not None and search_str in element[0]:
                    sql = "CREATE INDEX %s__idx on %s(%s);" % \
                        (fieldname_new, tablename, fieldname_new)
                    try:
                        db.executesql(sql)
                    except:
                        pass

        elif db_engine == "mysql":
            field = db[tablename][fieldname_old]
            sql_type = map_type_web2py_to_sql(field.type)
            sql = "ALTER TABLE %s CHANGE %s %s %s(%s)" % (tablename,
                                                          fieldname_old,
                                                          fieldname_new,
                                                          sql_type,
                                                          field.length)
            db.executesql(sql)

        elif db_engine == "postgres":
            sql = "ALTER TABLE %s RENAME COLUMN %s TO %s" % \
                (tablename, fieldname_old, fieldname_new)
            db.executesql(sql)

    # -------------------------------------------------------------------------
    def rename_table(self,
                     tablename_old,
                     tablename_new):
        """
            Rename a table.
            If any fields reference that table, they will be handled too.

            @param tablename_old : name of the original table before renaming
            @param tablename_new : name of the table after renaming
        """

        try:
            sql = "ALTER TABLE %s RENAME TO %s;" % (tablename_old,
                                                    tablename_new)
            self.db.executesql(sql)
        except Exception, e:
            print e
Example #42
0
from datetime import datetime, timedelta
from multiprocessing.util import Finalize
from time import time
from anyjson import deserialize, serialize
from celery import schedules
from celery.beat import Scheduler, ScheduleEntry
from celery.utils.timeutils import timedelta_seconds
from celeryconfig import CELERY_RESULT_DBURI

import sys, os
sys.path.append(os.environ['WEB2PY_PATH'])

from gluon import DAL
folder, uri = os.path.split(CELERY_RESULT_DBURI.split(':///')[1])
db = DAL(CELERY_RESULT_DBURI.split(':///')[0] + '://' + uri,
         folder=folder,
         migrate_enabled=False,
         auto_import=True)
print 'I found these table: ' + ', '.join(db.tables())
if db(db.celery_periodictasks).count() > 0:
    logging.error('found too many db.celery_periodictasks, deleting them all')
    db(db.celery_periodictasks).delete()
if db(db.celery_periodictasks).count() < 1:
    logging.error('found no db.celery_periodictasks, making a singleton')
    db.celery_periodictasks(last_update=datetime.now())


def get_or_make_unique(table, **fields):
    query = reduce(lambda a, b: a & b,
                   [table[key] == value for key, value in fields.items()])
    rows = table._db(query).select(limitby=(0, 2))
    if len(rows) > 1:
Example #43
0
class S3Migration(object):
    """
        Database Migration Toolkit
        - used to help migrate both a production database on a server
          and also an offline client

        Normally run from a script in web2py context, but without models loaded:
        cd web2py
        python web2py.py -S eden -R <script.py>

        Where script looks like:
        m = local_import("s3migration")
        migrate = m.S3Migration()
        migrate.prep(foreigns=[],
                     moves=[],
                     news=[],
                     ondeletes=[],
                     strbools=[],
                     strints=[],
                     uniques=[],
                     )
        #migrate.migrate()
        migrate.post(moves=[],
                     news=[],
                     strbools=[],
                     strints=[],
                     )

        FYI: If you need to access a filename in eden/databases/ then here is how:
        import hashlib
        (db_string, pool_size) = settings.get_database_string()
        prefix = hashlib.md5(db_string).hexdigest()
        filename = "%s_%s.table" % (prefix, tablename)

        FYI: To view all constraints on a table in MySQL:
        SHOW CREATE TABLE tablename;
        or
        select COLUMN_NAME, CONSTRAINT_NAME, REFERENCED_COLUMN_NAME, REFERENCED_TABLE_NAME
        from information_schema.KEY_COLUMN_USAGE
        where TABLE_NAME = 'module_resourcename';

        @ToDo: Function to ensure that roles match those in prepop
        @ToDo: Function to do selective additional prepop
    """

    def __init__(self):

        request = current.request

        # Load s3cfg => but why do this so complicated?
        #name = "applications.%s.modules.s3cfg" % request.application
        #s3cfg = __import__(name)
        #for item in name.split(".")[1:]:
            ## Remove the dot
            #s3cfg = getattr(s3cfg, item)
        #settings = s3cfg.S3Config()

        # Can use normal import here since executed in web2py environment:
        import s3cfg
        settings = s3cfg.S3Config()

        # Pass into template
        current.deployment_settings = settings

        # Read settings
        model = "%s/models/000_config.py" % request.folder
        code = getcfs(model, model, None)
        response = current.response

        # Needed as some Templates look at this & we don't wish to crash:
        response.s3 = Storage()

        # Global variables for 000_config.py
        environment = build_environment(request, response, current.session)
        environment["settings"] = settings
        # Some (older) 000_config.py also use "deployment_settings":
        environment["deployment_settings"] = settings
        # For backwards-compatibility with older 000_config.py:
        #def template_path():
        #    # When you see this warning, you should update 000_config.py
        #    # See: http://eden.sahanafoundation.org/wiki/DeveloperGuidelines/Templates/Migration#Changesin000_config.py
        #    print "template_path() is deprecated, please update 000_config.py"
        #    # Return just any valid path to make sure the path-check succeeds,
        #    # => modern S3Config will find the template itself
        #    return request.folder
        #environment["template_path"] = template_path
        environment["os"] = os
        environment["Storage"] = Storage

        # Execute 000_config.py
        restricted(code, environment, layer=model)

        self.db_engine = settings.get_database_type()
        (db_string, pool_size) = settings.get_database_string()

        # Get a handle to the database
        self.db = DAL(db_string,
                      #folder="%s/databases" % request.folder,
                      auto_import=True,
                      # @ToDo: Set to False until we migrate
                      migrate_enabled=True,
                      )

    # -------------------------------------------------------------------------
    def prep(self, foreigns=None,
                   moves=None,
                   news=None,
                   ondeletes=None,
                   strbools=None,
                   strints=None,
                   uniques=None,
                   ):
        """
            Preparation before migration

            @param foreigns  : List of tuples (tablename, fieldname) to have the foreign keys removed
                              - if tablename == "all" then all tables are checked
            @param moves     : List of dicts {tablename: [(fieldname, new_tablename, link_fieldname)]} to move a field from 1 table to another
                              - fieldname can be a tuple if the fieldname changes: (fieldname, new_fieldname)
            @param news      : List of dicts {new_tablename: {'lookup_field': '',
                                                              'tables': [tablename: [fieldname]],
                                                              'supers': [tablename: [fieldname]],
                                                              } to create new records from 1 or more old tables (inc all instances of an SE)
                              - fieldname can be a tuple if the fieldname changes: (fieldname, new_fieldname)
            @param ondeletes : List of tuples [(tablename, fieldname, reftable, ondelete)] to have the ondelete modified to
            @param strbools  : List of tuples [(tablename, fieldname)] to convert from string/integer to bools
            @param strints   : List of tuples [(tablename, fieldname)] to convert from string to integer
            @param uniques   : List of tuples [(tablename, fieldname)] to have the unique indices removed,
        """

        # Backup current database
        self.moves = moves
        self.news = news
        self.strbools = strbools
        self.strints = strints
        self.backup()

        if foreigns:
            # Remove Foreign Key constraints which need to go in next code
            for tablename, fieldname in foreigns:
                self.remove_foreign(tablename, fieldname)

        if uniques:
            # Remove Unique indices which need to go in next code
            for tablename, fieldname in uniques:
                self.remove_unique(tablename, fieldname)

        if ondeletes:
            # Modify ondeletes
            for tablename, fieldname, reftable, ondelete in ondeletes:
                self.ondelete(tablename, fieldname, reftable, ondelete)

        # Remove fields which need to be altered in next code
        if strbools:
            for tablename, fieldname in strbools:
                self.drop(tablename, fieldname)
        if strints:
            for tablename, fieldname in strints:
                self.drop(tablename, fieldname)

        self.db.commit()

    # -------------------------------------------------------------------------
    def backup(self):
        """
            Backup the database to a local SQLite database

            @ToDo: Option to use a temporary DB in Postgres/MySQL as this takes
                   too long for a large DB
        """

        moves = self.moves
        news = self.news
        strints = self.strints
        strbools = self.strbools
        if not moves and not news and not strbools and not strints:
            # Nothing to backup
            return

        import os

        db = self.db
        folder = "%s/databases/backup" % current.request.folder

        # Create clean folder for the backup
        if os.path.exists(folder):
            import shutil
            shutil.rmtree(folder)
            import time
            time.sleep(1)
        os.mkdir(folder)

        # Setup backup database
        db_bak = DAL("sqlite://backup.db", folder=folder, adapter_args={"foreign_keys": False})

        # Copy Table structure
        skip = []
        for tablename in db.tables:
            if tablename == "gis_location":
                table = db[tablename]
                fields = [table[field] for field in table.fields if field != "the_geom"]
                try:
                    db_bak.define_table(tablename, *fields)
                except KeyError:
                    # Can't resolve reference yet
                    # Cleanup
                    del db_bak[tablename]
                    # Try later
                    skip.append(tablename)
            else:
                try:
                    db_bak.define_table(tablename, db[tablename])
                except KeyError:
                    # Can't resolve reference yet
                    # Cleanup
                    del db_bak[tablename]
                    # Try later
                    skip.append(tablename)
        while skip:
            _skip = []
            for tablename in skip:
                if tablename == "gis_location":
                    table = db[tablename]
                    fields = [table[field] for field in table.fields if field != "the_geom"]
                    try:
                        db_bak.define_table(tablename, *fields)
                    except KeyError:
                        # Can't resolve reference yet
                        # Cleanup
                        del db_bak[tablename]
                        # Try later
                        _skip.append(tablename)
                    except:
                        import sys
                        print "Skipping %s: %s" % (tablename, sys.exc_info()[1])
                else:
                    try:
                        db_bak.define_table(tablename, db[tablename])
                    except KeyError:
                        # Can't resolve reference yet
                        # Cleanup
                        del db_bak[tablename]
                        # Try later
                        _skip.append(tablename)
                    except:
                        import sys
                        print "Skipping %s: %s" % (tablename, sys.exc_info()[1])
            skip = _skip

        # Which tables do we need to backup?
        tables = []
        if moves:
            for tablename in moves:
                tables.append(tablename)
        if news:
            for tablename in news:
                new = news[tablename]
                for t in new["tables"]:
                    tables.append(t)
                for s in new["supers"]:
                    tables.append(s)
                    stable = db[s]
                    rows = db(stable._id > 0).select(stable.instance_type)
                    instance_types = set([r.instance_type for r in rows])
                    for t in instance_types:
                        tables.append(t)
        if strbools:
            for tablename, fieldname in strints:
                tables.append(tablename)
        if strints:
            for tablename, fieldname in strints:
                tables.append(tablename)

        # Remove duplicates
        tables = set(tables)

        # Copy Data
        import csv
        csv.field_size_limit(2**20 * 100)  # 100 megs
        for tablename in tables:
            filename = "%s/%s.csv" % (folder, tablename)
            file = open(filename, "w")
            rows = db(db[tablename].id > 0).select()
            rows.export_to_csv_file(file)
            file.close()
            file = open(filename, "r")
            db_bak[tablename].import_from_csv_file(file, unique="uuid2") # uuid2 designed to not hit!
            file.close()
            db_bak.commit()

        # Pass handle back to other functions
        self.db_bak = db_bak

    # -------------------------------------------------------------------------
    def migrate(self):
        """
            Perform the migration
            @ToDo
        """

        # Update code: git pull
        # run_models_in(environment)
        # or
        # Set migrate=True in models/000_config.py
        # current.s3db.load_all_models() via applications/eden/static/scripts/tools/noop.py
        # Set migrate=False in models/000_config.py
        pass

    # -------------------------------------------------------------------------
    def post(self, moves=None,
                   news=None,
                   strbools=None,
                   strints=None,
                   ):
        """
            Cleanup after migration

            @param moves     : List of dicts {tablename: [(fieldname, new_tablename, link_fieldname)]} to move a field from 1 table to another
                              - fieldname can be a tuple if the fieldname changes: (fieldname, new_fieldname)
            @param news      : List of dicts {new_tablename: {'lookup_field': '',
                                                              'tables': [tablename: [fieldname]],
                                                              'supers': [tablename: [fieldname]],
                                                              } to create new records from 1 or more old tables (inc all instances of an SE)
                              - fieldname can be a tuple if the fieldname changes: (fieldname, new_fieldname)
            @param strbools : List of tuples [(tablename, fieldname)] to convert from string/integer to bools
            @param strints  : List of tuples [(tablename, fieldname)] to convert from string to integer
        """

        db = self.db

        # @ToDo: Do prepops of new tables

        # Restore data from backup
        folder = "%s/databases/backup" % current.request.folder
        db_bak = DAL("sqlite://backup.db",
                     folder=folder,
                     auto_import=True,
                     migrate=False)

        if moves:
            for tablename in moves:
                table = db_bak[tablename]
                fieldname, new_tablename, link_fieldname = moves[tablename]
                if isinstance(fieldname, (tuple, list)):
                    fieldname, new_fieldname = fieldname
                else:
                    new_fieldname = fieldname
                old_field = table[fieldname]
                new_linkfield = db[new_tablename][link_fieldname]
                rows = db_bak(table._id > 0).select(old_field, link_fieldname)
                for row in rows:
                    update_vars = {}
                    update_vars[new_fieldname] = row[old_field]
                    db(new_linkfield == row[link_fieldname]).update(**update_vars)

        if news:
            for tablename in news:
                # Read Data
                data = {}
                new = news[tablename]
                lookup_field = new["lookup_field"]
                _tables = new["tables"]
                for t in _tables:
                    fields = _tables[t]
                    # @ToDo: Support tuples
                    #for f in fields:
                    #    if isinstance(f, (tuple, list)):
                    table = db_bak[t]
                    table_fields = [table[f] for f in fields]
                    rows = db_bak(table.deleted == False).select(table[lookup_field],
                                                                 *table_fields)
                    for row in rows:
                        record_id = row[lookup_field]
                        if record_id in data:
                            _new = False
                            _data = data[record_id]
                        else:
                            _new = True
                            _data = {}
                        for f in fields:
                            if f in row:
                                if row[f] not in ("", None):
                                    # JSON type doesn't like ""
                                    _data[f] = row[f]
                        if _new:
                            data[record_id] = _data

                for s in new["supers"]:
                    fields = new["supers"][s]
                    # @ToDo: Support tuples
                    #for f in fields:
                    #    if isinstance(f, (tuple, list)):
                    stable = db_bak[s]
                    superkey = stable._id.name
                    rows = db_bak(stable.deleted == False).select(stable._id,
                                                                  stable.instance_type)
                    for row in rows:
                        etable = db_bak[row["instance_type"]]
                        _fields = [f for f in fields if f in etable.fields]
                        table_fields = [etable[f] for f in _fields]
                        record = db_bak(etable[superkey] == row[superkey]).select(etable[lookup_field],
                                                                                  *table_fields
                                                                                  ).first()
                        if record:
                            record_id = record[lookup_field]
                            if record_id in data:
                                _new = False
                                _data = data[record_id]
                            else:
                                _new = True
                                _data = {}
                            for f in _fields:
                                if f in record:
                                    if record[f] not in ("", None):
                                        # JSON type doesn't like ""
                                        _data[f] = record[f]
                            if _new:
                                data[record_id] = _data

                # Create Records
                table = db[tablename]
                for record_id in data:
                    update_vars = data[record_id]
                    if update_vars:
                        update_vars[lookup_field] = record_id
                        # Can't rely on the defaults as auto_import doesn't see DAL defaults
                        update_vars["created_on"] = datetime.datetime.utcnow()
                        update_vars["deleted"] = False
                        update_vars["mci"] = 0
                        update_vars["modified_on"] = datetime.datetime.utcnow()
                        update_vars["uuid"] = uuid4().urn # Would always be identical otherwise
                        table.insert(**update_vars)

        if strints:
            for tablename, fieldname in strints:
                newtable = db[tablename]
                newrows = db(newtable.id > 0).select(newtable.id)
                oldtable = db_bak[tablename]
                oldrows = db_bak(oldtable.id > 0).select(oldtable.id,
                                                         oldtable[fieldname])
                oldvals = oldrows.as_dict()
                for row in newrows:
                    _id = row.id
                    val = oldvals[_id][fieldname]
                    if not val:
                        continue
                    try:
                        update_vars = {fieldname : int(val)}
                    except:
                        current.log.warning("S3Migrate: Unable to convert %s to an integer - skipping" % val)
                    else:
                        db(newtable.id == _id).update(**update_vars)

        if strbools:
            for tablename, fieldname in strbools:
                to_bool = self.to_bool
                newtable = db[tablename]
                newrows = db(newtable.id > 0).select(newtable.id)
                oldtable = db_bak[tablename]
                oldrows = db_bak(oldtable.id > 0).select(oldtable.id,
                                                         oldtable[fieldname])
                oldvals = oldrows.as_dict()
                for row in newrows:
                    _id = row.id
                    val = oldvals[_id][fieldname]
                    if not val:
                        continue
                    val = to_bool(val)
                    if val:
                        update_vars = {fieldname : val}
                        db(newtable.id == _id).update(**update_vars)

        db.commit()

    # -------------------------------------------------------------------------
    @staticmethod
    def to_bool(value):
        """
           Converts 'something' to boolean. Raises exception for invalid formats
            Possible True  values: 1, True, "1", "TRue", "yes", "y", "t"
            Possible False values: 0, False, "0", "faLse", "no", "n", "f", 0.0
        """

        val = str(value).lower()
        if val in ("yes", "y", "true",  "t", "1"):
            return True
        elif val in ("no",  "n", "false", "f", "0", "0.0"):
            return False
        else:
            return None

    # -------------------------------------------------------------------------
    def drop(self, tablename, fieldname):
        """
            Drop a field from a table
            e.g. for when changing type
        """

        db = self.db
        db_engine = self.db_engine

        # Modify the database
        if db_engine == "sqlite":
            # Not Supported: http://www.sqlite.org/lang_altertable.html
            # But also not required (for strints anyway)
            sql = ""

        elif db_engine == "mysql":
            # http://dev.mysql.com/doc/refman/5.1/en/alter-table.html
            sql = "ALTER TABLE %(tablename)s DROP COLUMN %(fieldname)s;" % \
                dict(tablename=tablename, fieldname=fieldname)

        elif db_engine == "postgres":
            # http://www.postgresql.org/docs/9.3/static/sql-altertable.html
            sql = "ALTER TABLE %(tablename)s DROP COLUMN %(fieldname)s;" % \
                dict(tablename=tablename, fieldname=fieldname)

        try:
            db.executesql(sql)
        except:
            import sys
            e = sys.exc_info()[1]
            print >> sys.stderr, e

        # Modify the .table file
        table = db[tablename]
        fields = []
        for fn in table.fields:
            if fn == fieldname:
                continue
            fields.append(table[fn])
        db.__delattr__(tablename)
        db.tables.remove(tablename)
        db.define_table(tablename, *fields,
                        # Rebuild the .table file from this definition
                        fake_migrate=True)

    # -------------------------------------------------------------------------
    def ondelete(self, tablename, fieldname, reftable, ondelete):
        """
            Modify the ondelete constraint for a foreign key
        """

        db = self.db
        db_engine = self.db_engine
        executesql = db.executesql

        if tablename == "all":
            tables = db.tables
        else:
            tables = [tablename]

        for tablename in tables:
            if fieldname not in db[tablename].fields:
                continue

            # Modify the database
            if db_engine == "sqlite":
                # @ToDo: http://www.sqlite.org/lang_altertable.html
                raise NotImplementedError

            elif db_engine == "mysql":
                # http://dev.mysql.com/doc/refman/5.1/en/alter-table.html
                create = executesql("SHOW CREATE TABLE `%s`;" % tablename)[0][1]
                fk = create.split("` FOREIGN KEY (`%s" % fieldname)[0].split("CONSTRAINT `").pop()
                if "`" in fk:
                    fk = fk.split("`")[0]
                sql = "ALTER TABLE `%(tablename)s` DROP FOREIGN KEY `%(fk)s`, ALTER TABLE %(tablename)s ADD CONSTRAINT %(fk)s FOREIGN KEY (%(fieldname)s) REFERENCES %(reftable)s(id) ON DELETE %(ondelete)s;" % \
                    dict(tablename=tablename, fk=fk, fieldname=fieldname, reftable=reftable, ondelete=ondelete)

            elif db_engine == "postgres":
                # http://www.postgresql.org/docs/9.3/static/sql-altertable.html
                sql = "ALTER TABLE %(tablename)s DROP CONSTRAINT %(tablename)s_%(fieldname)s_fkey, ALTER TABLE %(tablename)s ADD CONSTRAINT %(tablename)s_%(fieldname)s_fkey FOREIGN KEY (%(fieldname)s) REFERENCES %(reftable)s ON DELETE %(ondelete)s;" % \
                    dict(tablename=tablename, fieldname=fieldname, reftable=reftable, ondelete=ondelete)

            try:
                executesql(sql)
            except:
                print "Error: Table %s with FK %s" % (tablename, fk)
                import sys
                e = sys.exc_info()[1]
                print >> sys.stderr, e

    # -------------------------------------------------------------------------
    def remove_foreign(self, tablename, fieldname):
        """
            Remove a Foreign Key constraint from a table
        """

        db = self.db
        db_engine = self.db_engine
        executesql = db.executesql

        if tablename == "all":
            tables = db.tables
        else:
            tables = [tablename]

        for tablename in tables:
            if fieldname not in db[tablename].fields:
                continue

            # Modify the database
            if db_engine == "sqlite":
                # @ToDo: http://www.sqlite.org/lang_altertable.html
                raise NotImplementedError

            elif db_engine == "mysql":
                # http://dev.mysql.com/doc/refman/5.1/en/alter-table.html
                create = executesql("SHOW CREATE TABLE `%s`;" % tablename)[0][1]
                fk = create.split("` FOREIGN KEY (`%s" % fieldname)[0].split("CONSTRAINT `").pop()
                if "`" in fk:
                    fk = fk.split("`")[0]
                sql = "ALTER TABLE `%(tablename)s` DROP FOREIGN KEY `%(fk)s`;" % \
                    dict(tablename=tablename, fk=fk)

            elif db_engine == "postgres":
                # http://www.postgresql.org/docs/9.3/static/sql-altertable.html
                sql = "ALTER TABLE %(tablename)s DROP CONSTRAINT %(tablename)s_%(fieldname)s_fkey;" % \
                    dict(tablename=tablename, fieldname=fieldname)

            try:
                executesql(sql)
            except:
                print "Error: Table %s with FK %s" % (tablename, fk)
                import sys
                e = sys.exc_info()[1]
                print >> sys.stderr, e

    # -------------------------------------------------------------------------
    def remove_unique(self, tablename, fieldname):
        """
            Remove a Unique Index from a table
        """

        db = self.db
        db_engine = self.db_engine

        # Modify the database
        if db_engine == "sqlite":
            # @ToDo: http://www.sqlite.org/lang_altertable.html
            raise NotImplementedError

        elif db_engine == "mysql":
            # http://dev.mysql.com/doc/refman/5.1/en/alter-table.html
            sql = "ALTER TABLE `%(tablename)s` DROP INDEX `%(fieldname)s`;" % \
                dict(tablename=tablename, fieldname=fieldname)

        elif db_engine == "postgres":
            # http://www.postgresql.org/docs/9.3/static/sql-altertable.html
            sql = "ALTER TABLE %(tablename)s DROP CONSTRAINT %(tablename)s_%(fieldname)s_key;" % \
                dict(tablename=tablename, fieldname=fieldname)

        try:
            db.executesql(sql)
        except:
            import sys
            e = sys.exc_info()[1]
            print >> sys.stderr, e

        # Modify the .table file
        table = db[tablename]
        fields = []
        for fn in table.fields:
            field = table[fn]
            if fn == fieldname:
                field.unique = False
            fields.append(field)
        db.__delattr__(tablename)
        db.tables.remove(tablename)
        db.define_table(tablename, *fields,
                        # Rebuild the .table file from this definition
                        fake_migrate=True)

    # =========================================================================
    # OLD CODE below here
    # - There are tests for these in /tests/dbmigration
    # -------------------------------------------------------------------------
    def rename_field(self,
                     tablename,
                     fieldname_old,
                     fieldname_new,
                     attributes_to_copy=None):
        """
            Rename a field, while keeping the other properties of the field the same.
            If there are some indexes on that table, these will be recreated and other constraints will remain unchanged too.

            @param tablename          : name of the table in which the field is renamed
            @param fieldname_old      : name of the original field before renaming
            @param fieldname_new      : name of the field after renaming
            @param attributes_to_copy : list of attributes which need to be copied from the old_field to the new_field (needed only in sqlite)
        """

        db = self.db
        db_engine = self.db_engine

        if db_engine == "sqlite":
            self._add_renamed_fields(db, tablename, fieldname_old, fieldname_new, attributes_to_copy)
            self._copy_field(db, tablename, fieldname_old, fieldname_new)
            sql = "SELECT sql FROM sqlite_master WHERE type='index' AND tbl_name='%s' ORDER BY name;" % \
                tablename
            list_index = db.executesql(sql)
            for element in list_index:
                search_str = "%s(%s)" % (tablename, fieldname_old)
                if element[0] is not None and search_str in element[0]:
                    sql = "CREATE INDEX %s__idx on %s(%s);" % \
                        (fieldname_new, tablename, fieldname_new)
                    try:
                        db.executesql(sql)
                    except:
                        pass

        elif db_engine == "mysql":
            field = db[tablename][fieldname_old]
            sql_type = map_type_web2py_to_sql(field.type)
            sql = "ALTER TABLE %s CHANGE %s %s %s(%s)" % (tablename,
                                                          fieldname_old,
                                                          fieldname_new,
                                                          sql_type,
                                                          field.length)
            db.executesql(sql)

        elif db_engine == "postgres":
            sql = "ALTER TABLE %s RENAME COLUMN %s TO %s" % \
                (tablename, fieldname_old, fieldname_new)
            db.executesql(sql)

    # -------------------------------------------------------------------------
    def rename_table(self,
                     tablename_old,
                     tablename_new):
        """
            Rename a table.
            If any fields reference that table, they will be handled too.

            @param tablename_old : name of the original table before renaming
            @param tablename_new : name of the table after renaming
        """

        try:
            sql = "ALTER TABLE %s RENAME TO %s;" % (tablename_old,
                                                    tablename_new)
            self.db.executesql(sql)
        except Exception, e:
            print e
Example #44
0
    def backup(self):
        """
            Backup the database to a local SQLite database

            @ToDo: Option to use a temporary DB in Postgres/MySQL as this takes
                   too long for a large DB
        """

        moves = self.moves
        news = self.news
        strints = self.strints
        strbools = self.strbools
        if not moves and not news and not strbools and not strints:
            # Nothing to backup
            return

        import os

        db = self.db
        folder = "%s/databases/backup" % current.request.folder

        # Create clean folder for the backup
        if os.path.exists(folder):
            import shutil
            shutil.rmtree(folder)
            import time
            time.sleep(1)
        os.mkdir(folder)

        # Setup backup database
        db_bak = DAL("sqlite://backup.db", folder=folder, adapter_args={"foreign_keys": False})

        # Copy Table structure
        skip = []
        for tablename in db.tables:
            if tablename == "gis_location":
                table = db[tablename]
                fields = [table[field] for field in table.fields if field != "the_geom"]
                try:
                    db_bak.define_table(tablename, *fields)
                except KeyError:
                    # Can't resolve reference yet
                    # Cleanup
                    del db_bak[tablename]
                    # Try later
                    skip.append(tablename)
            else:
                try:
                    db_bak.define_table(tablename, db[tablename])
                except KeyError:
                    # Can't resolve reference yet
                    # Cleanup
                    del db_bak[tablename]
                    # Try later
                    skip.append(tablename)
        while skip:
            _skip = []
            for tablename in skip:
                if tablename == "gis_location":
                    table = db[tablename]
                    fields = [table[field] for field in table.fields if field != "the_geom"]
                    try:
                        db_bak.define_table(tablename, *fields)
                    except KeyError:
                        # Can't resolve reference yet
                        # Cleanup
                        del db_bak[tablename]
                        # Try later
                        _skip.append(tablename)
                    except:
                        import sys
                        print "Skipping %s: %s" % (tablename, sys.exc_info()[1])
                else:
                    try:
                        db_bak.define_table(tablename, db[tablename])
                    except KeyError:
                        # Can't resolve reference yet
                        # Cleanup
                        del db_bak[tablename]
                        # Try later
                        _skip.append(tablename)
                    except:
                        import sys
                        print "Skipping %s: %s" % (tablename, sys.exc_info()[1])
            skip = _skip

        # Which tables do we need to backup?
        tables = []
        if moves:
            for tablename in moves:
                tables.append(tablename)
        if news:
            for tablename in news:
                new = news[tablename]
                for t in new["tables"]:
                    tables.append(t)
                for s in new["supers"]:
                    tables.append(s)
                    stable = db[s]
                    rows = db(stable._id > 0).select(stable.instance_type)
                    instance_types = set([r.instance_type for r in rows])
                    for t in instance_types:
                        tables.append(t)
        if strbools:
            for tablename, fieldname in strints:
                tables.append(tablename)
        if strints:
            for tablename, fieldname in strints:
                tables.append(tablename)

        # Remove duplicates
        tables = set(tables)

        # Copy Data
        import csv
        csv.field_size_limit(2**20 * 100)  # 100 megs
        for tablename in tables:
            filename = "%s/%s.csv" % (folder, tablename)
            file = open(filename, "w")
            rows = db(db[tablename].id > 0).select()
            rows.export_to_csv_file(file)
            file.close()
            file = open(filename, "r")
            db_bak[tablename].import_from_csv_file(file, unique="uuid2") # uuid2 designed to not hit!
            file.close()
            db_bak.commit()

        # Pass handle back to other functions
        self.db_bak = db_bak
Example #45
0
#!/usr/bin/python
# -*- coding: utf-8 -*-
import sys

sys.path.insert(1, '/home/iotuser/gate/web2py')

from gluon import DAL, Field
from gluon.validators import IS_NOT_EMPTY, IS_EMAIL, IS_NOT_IN_DB, IS_INT_IN_RANGE

db = DAL('sqlite://storage.sqlite',
         folder='/home/iotuser/gate/web2py/applications/gate/databases')
execfile('/home/iotuser/gate/web2py/applications/gate/models/db_gate.py')

tn = db(db.trusted.id == db.neighborhood.neighbor_id).select().first()
print tn

print db.tables
print db().select(db.oui.ALL)
#print db().select(db.security.ALL)
Example #46
0
File: uodb.py Project: BrenCam/uodb
import os, sys
sdir = '/home/brencam/web2py'
if sdir not in sys.path:
    sys.path.append(sdir)

sdir = '/home/brencam/web2py/applications/uodb/modules'
if sdir not in sys.path:
    sys.path.append(sdir)

try:    
    from gluon import DAL, Field
except ImportError as err:
    print('gluon path not found') 

#db = DAL('sqlite://storage.sqlite', folder='../databases')
db = DAL('sqlite://storage.sqlite')

migrate=True

db.define_table('patient',
    #Field('mrn', type='string', length=10, required=True, unique=True, label=T('Medical Rec #')),
    Field('mrn', type='string', length=10, required=True, unique=True),
    Field('date_of_birth', type='datetime'),
    Field('fname', type='string', length=25),
    Field('mname', type='string', length=10),
    Field('lname', type='string', length=25),
    Field('ethnicity', type='string', length=8),
    Field('primary_tx', type='string', length=8),
    Field('marital_status', type='string', length=8),
    Field('review_status', type='string', length=8),
    Field('review_date', type='datetime'),
Example #47
0
# -*- coding: utf-8 -*-
#########################################################################
## Define your tables below (or better in another model file) for example
from gluon import DAL, Field
import pyodbc
#02.设置字符串缺省的编码,如果不这样的话str无法使用
import sys
import json
from gluon.tools import *

# reload(sys)
# sys.setdefaultencoding('utf-8')
str_db = u'mssql4://sa:sohunj123@localhost/BIDDING'
#03.连接需要用utf8字符集,这样返回的中文结果可直接解码

db = DAL(str_db,migrate_enabled=False)
dbu = DAL(str_db,migrate_enabled=False)
auth = Auth(dbu)
auth.settings.extra_fields['auth_user']= [Field('chinesename'), Field('code')]
auth.define_tables(username=True)
crud = Crud(dbu)
print db._uri

db.define_table('BankRecord',Field('Account'),Field('CompanyName'),Field('CreateDate'),Field('Money'),Field('Note'),Field('TradingTime'))
db.define_table('BiddingCountType',Field('BiddingCountTypeCode'),Field('BiddingCountTypeId'),Field('BiddingCountTypeName'))
db.define_table('BiddingSiteStatisticType',Field('BiddingSiteStatisticTypeCode'),Field('BiddingSiteStatisticTypeId'),Field('BiddingSiteStatisticTypeName'))
db.define_table('Columns_ProtocolCode',Field('columnlabel'),Field('columnname'),Field('typename'))
db.define_table('Customer',Field('Address1'),Field('Address2'),Field('CompanyName'),Field('CompanyPhone'),Field('ContactName'),Field('CreationDate'),Field('Email'),Field('EmployeeId'),Field('Fax'),Field('IsDelete'),Field('MobilePhone'),Field('Note'),Field('PassWord'),Field('Position'),Field('Type'),Field('UserName'),Field('ZipCode'))
db.define_table('Employee',Field('Address'),Field('Age'),Field('Code'),Field('CompanyPhone'),Field('CreationDate'),Field('DateOfBirth'),Field('Department'),Field('Email'),Field('EmergencyContactName'),Field('EmergencyContactPhone'),Field('EmergencyContactRelationship'),Field('HomePhone'),Field('IsDelete'),Field('MobilePhone'),Field('Name'),Field('Note'),Field('PassWord'),Field('Position'),Field('SexId'),Field('Type'),Field('UserName'))
db.define_table('Finance',Field('Activity'),Field('CreationDate'),Field('EmployeeId'),Field('Income'),Field('IsDelete'),Field('Note'),Field('ProjectCodeId'),Field('ProtocolCodeId'),Field('Spending'),Field('TargetId'),Field('TitleId'))
db.define_table('Log',Field('Agent'),Field('CreationDate'),Field('Ip'),Field('Kind'),Field('Note'),Field('UserId'))
Example #48
0
    def send_heartbeat(self, counter):
        if not self.db_thread:
            logger.debug('thread building own DAL object')
            self.db_thread = DAL(self.db._uri, folder=self.db._adapter.folder)
            self.define_tables(self.db_thread, migrate=False)
        try:
            db = self.db_thread
            sw, st = db.scheduler_worker, db.scheduler_task
            now = self.now()
            # record heartbeat
            mybackedstatus = db(
                sw.worker_name == self.worker_name).select().first()
            if not mybackedstatus:
                sw.insert(status=ACTIVE,
                          worker_name=self.worker_name,
                          first_heartbeat=now,
                          last_heartbeat=now,
                          group_names=self.group_names)
                self.worker_status = [ACTIVE, 1]  # activating the process
                mybackedstatus = ACTIVE
            else:
                mybackedstatus = mybackedstatus.status
                if mybackedstatus == DISABLED:
                    # keep sleeping
                    self.worker_status[0] = DISABLED
                    if self.worker_status[1] == MAXHIBERNATION:
                        logger.debug('........recording heartbeat (%s)',
                                     self.worker_status[0])
                        db(sw.worker_name == self.worker_name).update(
                            last_heartbeat=now)
                elif mybackedstatus == TERMINATE:
                    self.worker_status[0] = TERMINATE
                    logger.debug("Waiting to terminate the current task")
                    self.give_up()
                    return
                elif mybackedstatus == KILL:
                    self.worker_status[0] = KILL
                    self.die()
                else:
                    if mybackedstatus == STOP_TASK:
                        logger.info('Asked to kill the current task')
                        self.terminate_process()
                    logger.debug('........recording heartbeat (%s)',
                                 self.worker_status[0])
                    db(sw.worker_name == self.worker_name).update(
                        last_heartbeat=now, status=ACTIVE)
                    self.worker_status[1] = 1  # re-activating the process
                    if self.worker_status[0] != RUNNING:
                        self.worker_status[0] = ACTIVE

            self.do_assign_tasks = False
            if counter % 5 == 0 or mybackedstatus == PICK:
                try:
                    # delete inactive workers
                    expiration = now - datetime.timedelta(
                        seconds=self.heartbeat * 3)
                    departure = now - datetime.timedelta(
                        seconds=self.heartbeat * 3 * MAXHIBERNATION)
                    logger.debug(
                        '    freeing workers that have not sent heartbeat')
                    inactive_workers = db(((sw.last_heartbeat < expiration)
                                           & (sw.status == ACTIVE))
                                          | ((sw.last_heartbeat < departure)
                                             & (sw.status != ACTIVE)))
                    db(st.assigned_worker_name.belongs(
                        inactive_workers._select(sw.worker_name)))(st.status == RUNNING)\
                        .update(assigned_worker_name='', status=QUEUED)
                    inactive_workers.delete()
                    try:
                        self.is_a_ticker = self.being_a_ticker()
                    except:
                        logger.error('Error coordinating TICKER')
                    if self.worker_status[0] == ACTIVE:
                        self.do_assign_tasks = True
                except:
                    logger.error('Error cleaning up')
            db.commit()
        except:
            logger.error('Error retrieving status')
            db.rollback()
        self.adj_hibernation()
        self.sleep()
Example #49
0
wa_img_dir = abspath(join(dirname(__file__), pardir, 'private', 'images'))
try:
    makedirs(wa_img_dir)
except os.error:
    pass   # directory already created
for img in listdir(repo_img_dir):
    if splitext(img)[1] == '.jpg':
        copy(join(repo_img_dir, img), wa_img_dir)

# Connect to database
db_dir = abspath(join(dirname(__file__), pardir, 'databases'))
try:
    makedirs(db_dir)
except os.error:
    pass
db = DAL('sqlite://../databases/storage.db', folder=db_dir)

# Create images table.
db.define_table('images',
    Field('represent', type='string', length=100, required=True),
    Field('file_name', type='string', length=100, required=True))

# Insert image file names and meanings.
img_pattern = re.compile(r'(\D+?)\d*\.')
for img in listdir(wa_img_dir):
    match = img_pattern.match(img)
    if match is not None:
        db.images.insert(
            represent=match.group(1).capitalize(),
            file_name=img)
Example #50
0
def main():
    """
    allows to run worker without python web2py.py .... by simply python this.py
    """
    parser = optparse.OptionParser()
    parser.add_option("-w",
                      "--worker_name",
                      dest="worker_name",
                      default=None,
                      help="start a worker with name")
    parser.add_option("-b",
                      "--heartbeat",
                      dest="heartbeat",
                      default=10,
                      type='int',
                      help="heartbeat time in seconds (default 10)")
    parser.add_option(
        "-L",
        "--logger_level",
        dest="logger_level",
        default=30,
        type='int',
        help=
        "set debug output level (0-100, 0 means all, 100 means none;default is 30)"
    )
    parser.add_option(
        "-E",
        "--empty-runs",
        dest="max_empty_runs",
        type='int',
        default=0,
        help="max loops with no grabbed tasks permitted (0 for never check)")
    parser.add_option(
        "-g",
        "--group_names",
        dest="group_names",
        default='main',
        help="comma separated list of groups to be picked by the worker")
    parser.add_option(
        "-f",
        "--db_folder",
        dest="db_folder",
        default='/Users/mdipierro/web2py/applications/scheduler/databases',
        help="location of the dal database folder")
    parser.add_option("-u",
                      "--db_uri",
                      dest="db_uri",
                      default='sqlite://storage.sqlite',
                      help="database URI string (web2py DAL syntax)")
    parser.add_option(
        "-t", "--tasks",dest="tasks",default=None,
        help="file containing task files, must define" + \
            "tasks = {'task_name':(lambda: 'output')} or similar set of tasks")
    (options, args) = parser.parse_args()
    if not options.tasks or not options.db_uri:
        print USAGE
    if options.tasks:
        path, filename = os.path.split(options.tasks)
        if filename.endswith('.py'):
            filename = filename[:-3]
        sys.path.append(path)
        print 'importing tasks...'
        tasks = __import__(filename, globals(), locals(), [], -1).tasks
        print 'tasks found: ' + ', '.join(tasks.keys())
    else:
        tasks = {}
    group_names = [x.strip() for x in options.group_names.split(',')]

    logging.getLogger().setLevel(options.logger_level)

    print 'groups for this worker: ' + ', '.join(group_names)
    print 'connecting to database in folder: ' + options.db_folder or './'
    print 'using URI: ' + options.db_uri
    db = DAL(options.db_uri, folder=options.db_folder)
    print 'instantiating scheduler...'
    scheduler = Scheduler(db=db,
                          worker_name=options.worker_name,
                          tasks=tasks,
                          migrate=True,
                          group_names=group_names,
                          heartbeat=options.heartbeat,
                          max_empty_runs=options.max_empty_runs)
    print 'starting main worker loop...'
    scheduler.loop()
Example #51
0
 def instDB(self, storageFolder, storageConnectionString, autoImport):
     self.db = DAL(storageConnectionString,
                   folder=os.path.abspath(storageFolder),
                   auto_import=autoImport)
     return self.db
Example #52
0
    def post(self, moves=None,
                   news=None,
                   strbools=None,
                   strints=None,
                   ):
        """
            Cleanup after migration

            @param moves     : List of dicts {tablename: [(fieldname, new_tablename, link_fieldname)]} to move a field from 1 table to another
                              - fieldname can be a tuple if the fieldname changes: (fieldname, new_fieldname)
            @param news      : List of dicts {new_tablename: {'lookup_field': '',
                                                              'tables': [tablename: [fieldname]],
                                                              'supers': [tablename: [fieldname]],
                                                              } to create new records from 1 or more old tables (inc all instances of an SE)
                              - fieldname can be a tuple if the fieldname changes: (fieldname, new_fieldname)
            @param strbools : List of tuples [(tablename, fieldname)] to convert from string/integer to bools
            @param strints  : List of tuples [(tablename, fieldname)] to convert from string to integer
        """

        db = self.db

        # @ToDo: Do prepops of new tables

        # Restore data from backup
        folder = "%s/databases/backup" % current.request.folder
        db_bak = DAL("sqlite://backup.db",
                     folder=folder,
                     auto_import=True,
                     migrate=False)

        if moves:
            for tablename in moves:
                table = db_bak[tablename]
                fieldname, new_tablename, link_fieldname = moves[tablename]
                if isinstance(fieldname, (tuple, list)):
                    fieldname, new_fieldname = fieldname
                else:
                    new_fieldname = fieldname
                old_field = table[fieldname]
                new_linkfield = db[new_tablename][link_fieldname]
                rows = db_bak(table._id > 0).select(old_field, link_fieldname)
                for row in rows:
                    update_vars = {}
                    update_vars[new_fieldname] = row[old_field]
                    db(new_linkfield == row[link_fieldname]).update(**update_vars)

        if news:
            for tablename in news:
                # Read Data
                data = {}
                new = news[tablename]
                lookup_field = new["lookup_field"]
                _tables = new["tables"]
                for t in _tables:
                    fields = _tables[t]
                    # @ToDo: Support tuples
                    #for f in fields:
                    #    if isinstance(f, (tuple, list)):
                    table = db_bak[t]
                    table_fields = [table[f] for f in fields]
                    rows = db_bak(table.deleted == False).select(table[lookup_field],
                                                                 *table_fields)
                    for row in rows:
                        record_id = row[lookup_field]
                        if record_id in data:
                            _new = False
                            _data = data[record_id]
                        else:
                            _new = True
                            _data = {}
                        for f in fields:
                            if f in row:
                                if row[f] not in ("", None):
                                    # JSON type doesn't like ""
                                    _data[f] = row[f]
                        if _new:
                            data[record_id] = _data

                for s in new["supers"]:
                    fields = new["supers"][s]
                    # @ToDo: Support tuples
                    #for f in fields:
                    #    if isinstance(f, (tuple, list)):
                    stable = db_bak[s]
                    superkey = stable._id.name
                    rows = db_bak(stable.deleted == False).select(stable._id,
                                                                  stable.instance_type)
                    for row in rows:
                        etable = db_bak[row["instance_type"]]
                        _fields = [f for f in fields if f in etable.fields]
                        table_fields = [etable[f] for f in _fields]
                        record = db_bak(etable[superkey] == row[superkey]).select(etable[lookup_field],
                                                                                  *table_fields
                                                                                  ).first()
                        if record:
                            record_id = record[lookup_field]
                            if record_id in data:
                                _new = False
                                _data = data[record_id]
                            else:
                                _new = True
                                _data = {}
                            for f in _fields:
                                if f in record:
                                    if record[f] not in ("", None):
                                        # JSON type doesn't like ""
                                        _data[f] = record[f]
                            if _new:
                                data[record_id] = _data

                # Create Records
                table = db[tablename]
                for record_id in data:
                    update_vars = data[record_id]
                    if update_vars:
                        update_vars[lookup_field] = record_id
                        # Can't rely on the defaults as auto_import doesn't see DAL defaults
                        update_vars["created_on"] = datetime.datetime.utcnow()
                        update_vars["deleted"] = False
                        update_vars["mci"] = 0
                        update_vars["modified_on"] = datetime.datetime.utcnow()
                        update_vars["uuid"] = uuid4().urn # Would always be identical otherwise
                        table.insert(**update_vars)

        if strints:
            for tablename, fieldname in strints:
                newtable = db[tablename]
                newrows = db(newtable.id > 0).select(newtable.id)
                oldtable = db_bak[tablename]
                oldrows = db_bak(oldtable.id > 0).select(oldtable.id,
                                                         oldtable[fieldname])
                oldvals = oldrows.as_dict()
                for row in newrows:
                    _id = row.id
                    val = oldvals[_id][fieldname]
                    if not val:
                        continue
                    try:
                        update_vars = {fieldname : int(val)}
                    except:
                        current.log.warning("S3Migrate: Unable to convert %s to an integer - skipping" % val)
                    else:
                        db(newtable.id == _id).update(**update_vars)

        if strbools:
            for tablename, fieldname in strbools:
                to_bool = self.to_bool
                newtable = db[tablename]
                newrows = db(newtable.id > 0).select(newtable.id)
                oldtable = db_bak[tablename]
                oldrows = db_bak(oldtable.id > 0).select(oldtable.id,
                                                         oldtable[fieldname])
                oldvals = oldrows.as_dict()
                for row in newrows:
                    _id = row.id
                    val = oldvals[_id][fieldname]
                    if not val:
                        continue
                    val = to_bool(val)
                    if val:
                        update_vars = {fieldname : val}
                        db(newtable.id == _id).update(**update_vars)

        db.commit()
Example #53
0
    def backup(self):
        """
            Backup the database to a local SQLite database

            @ToDo: Option to use a temporary DB in Postgres/MySQL as this takes
                   too long for a large DB
        """

        moves = self.moves
        news = self.news
        strints = self.strints
        strbools = self.strbools
        if not moves and not news and not strbools and not strints:
            # Nothing to backup
            return

        import os

        db = self.db
        folder = "%s/databases/backup" % current.request.folder

        # Create clean folder for the backup
        if os.path.exists(folder):
            shutil.rmtree(folder)
            import time
            time.sleep(1)
        os.mkdir(folder)

        # Setup backup database
        db_bak = DAL("sqlite://backup.db", folder=folder, adapter_args={"foreign_keys": False})

        # Copy Table structure
        skip = []
        for tablename in db.tables:
            if tablename == "gis_location":
                table = db[tablename]
                fields = [table[field] for field in table.fields if field != "the_geom"]
                try:
                    db_bak.define_table(tablename, *fields)
                except KeyError:
                    # Can't resolve reference yet
                    # Cleanup
                    del db_bak[tablename]
                    # Try later
                    skip.append(tablename)
            else:
                try:
                    db_bak.define_table(tablename, db[tablename])
                except KeyError:
                    # Can't resolve reference yet
                    # Cleanup
                    del db_bak[tablename]
                    # Try later
                    skip.append(tablename)
        while skip:
            _skip = []
            for tablename in skip:
                if tablename == "gis_location":
                    table = db[tablename]
                    fields = [table[field] for field in table.fields if field != "the_geom"]
                    try:
                        db_bak.define_table(tablename, *fields)
                    except KeyError:
                        # Can't resolve reference yet
                        # Cleanup
                        del db_bak[tablename]
                        # Try later
                        _skip.append(tablename)
                    except:
                        import sys
                        print "Skipping %s: %s" % (tablename, sys.exc_info()[1])
                else:
                    try:
                        db_bak.define_table(tablename, db[tablename])
                    except KeyError:
                        # Can't resolve reference yet
                        # Cleanup
                        del db_bak[tablename]
                        # Try later
                        _skip.append(tablename)
                    except:
                        import sys
                        print "Skipping %s: %s" % (tablename, sys.exc_info()[1])
            skip = _skip

        # Which tables do we need to backup?
        tables = []
        if moves:
            for tablename in moves:
                tables.append(tablename)
        if news:
            for tablename in news:
                new = news[tablename]
                for t in new["tables"]:
                    tables.append(t)
                for s in new["supers"]:
                    tables.append(s)
                    stable = db[s]
                    rows = db(stable._id > 0).select(stable.instance_type)
                    instance_types = set([r.instance_type for r in rows])
                    for t in instance_types:
                        tables.append(t)
        if strbools:
            for tablename, fieldname in strints:
                tables.append(tablename)
        if strints:
            for tablename, fieldname in strints:
                tables.append(tablename)

        # Remove duplicates
        tables = set(tables)

        # Copy Data
        import csv
        csv.field_size_limit(2**20 * 100)  # 100 megs
        for tablename in tables:
            filename = "%s/%s.csv" % (folder, tablename)
            file = open(filename, "w")
            rows = db(db[tablename].id > 0).select()
            rows.export_to_csv_file(file)
            file.close()
            file = open(filename, "r")
            db_bak[tablename].import_from_csv_file(file, unique="uuid2") # uuid2 designed to not hit!
            file.close()
            db_bak.commit()

        # Pass handle back to other functions
        self.db_bak = db_bak
Example #54
0
import sys 
sys.path.append('/home/www-data/web2py')
from gluon import DAL, Field

db4=DAL('postgres://*****:*****@localhost:5433/vpak',migrate_enabled=False, pool_size=20, lazy_tables=True, fake_migrate_all=False)

db4.define_table('geocode',
                 Field('zip', 'string'),
                 Field('city', 'string'),
                 Field('state','string'),
                 Field('longitude','string'),
                 Field('latitude','string'))

flat = '88.10'
flon = '47.1'
fradius = 50

rows = db4.executesql("select city,longitude,latitude, radians(cast(latitude as float)),radians({0}), radians({1}) from geocode where city like 'Bolingbrook'".format(flat, flon))

print rows[1]