コード例 #1
0
def getReadyPostgres(connstr):
    pool = ConnectionPool('psycopg2', connstr)
    def i1(c):
        try:
            c.execute('''create table sticky (
                    id serial primary key,
                    board_id text,
                    updated timestamp default current_timestamp,
                    note text,
                    x integer,
                    y integer)''')
        except Exception as e:
            log.err(e)
    def i2(c):
        try:
            c.execute('''create table image (
                id serial primary key,
                board_id text,
                updated timestamp default current_timestamp,
                data bytea
            )''')
            c.execute('''create unique index image_board_id on image(board_id)''')
        except Exception as e:
            log.err(e)
    d = pool.runInteraction(i1)
    d.addCallback(lambda x: pool.runInteraction(i2))
    return d.addCallbacks((lambda x:pool), log.err)
コード例 #2
0
ファイル: toradbapi.py プロジェクト: geerk/toradbapi
class ConnectionPool(object):
    """
    Wrapper for twisted.enterprise.adbapi.ConnectionPool to use with tornado.
    """

    def __init__(self, *args, **kwargs):
        self._pool = TxConnectionPool(*args, **kwargs)

    def run_query(self, *args, **kwargs):
        return self._defer_to_future(self._pool.runQuery(*args, **kwargs))

    def run_operation(self, *args, **kwargs):
        return self._defer_to_future(self._pool.runOperation(*args, **kwargs))

    def run_interaction(self, *args, **kwargs):
        return self._defer_to_future(self._pool.runInteraction(*args, **kwargs))

    def close(self):
        self._pool.close()

    @staticmethod
    def _defer_to_future(defer):
        future = TracebackFuture()
        defer.addCallbacks(
            future.set_result,
            lambda failure: future.set_exc_info(
                (failure.type, failure.value, failure.tb)))
        return future
コード例 #3
0
def getReadySqlite(connstr):
    pool = ConnectionPool('pysqlite2.dbapi2', connstr,
                          cp_min=1, cp_max=1)
    def interaction(c):
        try:
            c.execute('''create table sticky (
                    id integer primary key,
                    board_id text,
                    updated timestamp default current_timestamp,
                    note text,
                    x integer,
                    y integer)''')
        except Exception as e:
            log.err(e)
        try:
            c.execute('''create table image (
                id integer primary key,
                board_id text,
                updated timestamp default current_timestamp,
                data blob
            )''')
            c.execute('''create unique index image_board_id on image(board_id)''')
        except Exception as e:
            log.err(e)
    return pool.runInteraction(interaction).addCallbacks((lambda x:pool), log.err)
コード例 #4
0
class ChannelLogger(object):
    implements(IDBLogger)

    def __init__(self, dbfile, **kw):
        # XXX Ignore thread warnings from sqlite3.  Should be OK.
        # http://twistedmatrix.com/trac/ticket/3629
        kw.setdefault("check_same_thread", False)

        from twisted.enterprise.adbapi import ConnectionPool
        type = 'sqlite3'
        self.dbfile = dbfile
        self.dbconn = ConnectionPool(type, dbfile, **kw)
        self.table = 'channels'
        self.initialize_db()

    def initialize_db(self):
        return self.dbconn.runInteraction(self._initialize_db, self.table)

    @staticmethod
    def _initialize_db(tx, table):
        tx.execute('CREATE TABLE IF NOT EXISTS {0} ('
                   'id INTEGER PRIMARY KEY AUTOINCREMENT,'
                   'timestamp INTEGER,'
                   'channel TEXT,'
                   'nick TEXT,'
                   'msg TEXT )'.format(table))

    def log(self, who, chan, msg):
        return self.dbconn.runInteraction(self._log, who, chan, msg, self.table)

    @staticmethod
    def _log(tx, who, chan, msg, table):
        now = int(time.time())
        stmt = 'INSERT INTO {0}(timestamp,nick,channel,msg) VALUES(?,?,?,?)'
        tx.execute(stmt.format(table), (now, who, chan, msg) )
コード例 #5
0
 def _runInteraction(self, interaction, *args, **kw):
     try:
         return ConnectionPool._runInteraction(self, interaction, *args,
                                               **kw)
     except MySQLdb.OperationalError as e:
         if e[0] not in (2006, 2013, 1213):
             raise
         # 2006 MySQL server has gone away
         # 2013 Lost connection to MySQL server
         # 1213 Deadlock found when trying to get lock; try restarting transaction
         log.msg("%s got error %s, retrying operation" %
                 (self.__class__.__name__, e))
         conn = self.connections.get(self.threadID())
         self.disconnect(conn)
         # try the interaction again
         return ConnectionPool._runInteraction(self, interaction, *args,
                                               **kw)
     except MySQLdb.InterfaceError as e:
         if e[0] not in (0, ):
             raise
         # 0 Interface error (conn gone away or closed)
         log.msg("%s got error %s, retrying operation" %
                 (self.__class__.__name__, e))
         conn = self.connections.get(self.threadID())
         self.disconnect(conn)
         # try the interaction again
         return ConnectionPool._runInteraction(self, interaction, *args,
                                               **kw)
コード例 #6
0
    def open(self):
        """
        Access the underlying database.
        @return: a db2 connection object for this index's underlying data store.
        """
        if not self.initialized:

            self.pool = ConnectionPool(self.dbapiName, *self.dbapiArgs, **self.dbapikwargs)

            # sqlite3 is not thread safe which means we have to close the sqlite3 connections in the same thread that
            # opened them. We need a special thread pool class that has a thread worker function that does a close
            # when a thread is closed.
            if self.dbapiName == "sqlite3":
                self.pool.threadpool.stop()
                self.pool.threadpool = ConnectionClosingThreadPool(1, 1)
                self.pool.threadpool.start()
                self.pool.threadpool.pool = self.pool

            #
            # Set up the schema
            #
            # Create CALDAV table if needed

            try:
                test = (yield self._test_schema_table())
                if test:
                    version = (yield self._db_value_for_sql("select VALUE from CALDAV where KEY = 'SCHEMA_VERSION'"))
                    dbtype = (yield self._db_value_for_sql("select VALUE from CALDAV where KEY = 'TYPE'"))

                    if (version != self._db_version()) or (dbtype != self._db_type()):

                        if dbtype != self._db_type():
                            log.error(
                                "Database {db} has different type ({t1} vs. {t2})",
                                db=self.dbID, t1=dbtype, t2=self._db_type()
                            )

                            # Delete this index and start over
                            yield self._db_remove()
                            yield self._db_init()

                        elif version != self._db_version():
                            log.error(
                                "Database {db} has different schema (v.{v1} vs. v.{v2})",
                                db=self.dbID, v1=version, v2=self._db_version()
                            )

                            # Upgrade the DB
                            yield self._db_upgrade(version)

                else:
                    yield self._db_init()
                self.initialized = True
            except:
                # Clean up upon error so we don't end up leaking threads
                self.pool.close()
                self.pool = None
                raise
コード例 #7
0
    def __init__(self, dbfile, **kw):
        # XXX Ignore thread warnings from sqlite3.  Should be OK.
        # http://twistedmatrix.com/trac/ticket/3629
        kw.setdefault("check_same_thread", False)

        from twisted.enterprise.adbapi import ConnectionPool
        type = 'sqlite3'
        self.dbfile = dbfile
        self.dbconn = ConnectionPool(type, dbfile, **kw)
        self.table = 'channels'
        self.initialize_db()
コード例 #8
0
 def test_startedClose(self):
     """
     If L{ConnectionPool.close} is called after it has been started, but
     not by its shutdown trigger, the shutdown trigger is cancelled.
     """
     reactor = EventReactor(True)
     pool = ConnectionPool('twisted.test.test_adbapi', cp_reactor=reactor)
     # There should be a shutdown trigger waiting.
     self.assertEqual(reactor.triggers, [('during', 'shutdown', pool.finalClose)])
     pool.close()
     # But not anymore.
     self.assertFalse(reactor.triggers)
コード例 #9
0
 def test_unstartedClose(self):
     """
     If L{ConnectionPool.close} is called without L{ConnectionPool.start}
     having been called, the pool's startup event is cancelled.
     """
     reactor = EventReactor(False)
     pool = ConnectionPool("twisted.test.test_adbapi", cp_reactor=reactor)
     # There should be a startup trigger waiting.
     self.assertEqual(reactor.triggers, [("after", "startup", pool._start)])
     pool.close()
     # But not anymore.
     self.assertFalse(reactor.triggers)
コード例 #10
0
ファイル: test_adbapi.py プロジェクト: michaelnt/twisted
 def test_unstartedClose(self):
     """
     If L{ConnectionPool.close} is called without L{ConnectionPool.start}
     having been called, the pool's startup event is cancelled.
     """
     reactor = EventReactor(False)
     pool = ConnectionPool('twisted.test.test_adbapi', cp_reactor=reactor)
     # There should be a startup trigger waiting.
     self.assertEquals(reactor.triggers, [('after', 'startup', pool._start)])
     pool.close()
     # But not anymore.
     self.assertFalse(reactor.triggers)
コード例 #11
0
ファイル: test_adbapi.py プロジェクト: michaelnt/twisted
 def test_startedClose(self):
     """
     If L{ConnectionPool.close} is called after it has been started, but
     not by its shutdown trigger, the shutdown trigger is cancelled.
     """
     reactor = EventReactor(True)
     pool = ConnectionPool('twisted.test.test_adbapi', cp_reactor=reactor)
     # There should be a shutdown trigger waiting.
     self.assertEquals(reactor.triggers, [('during', 'shutdown', pool.finalClose)])
     pool.close()
     # But not anymore.
     self.assertFalse(reactor.triggers)
コード例 #12
0
 def __init__(self, stats):
     #Instantiate DB
     self.dbpool = ConnectionPool('MySQLdb',
                                  host=SETTINGS['DB_HOST'],
                                  user=SETTINGS['DB_USER'],
                                  passwd=SETTINGS['DB_PASSWD'],
                                  port=SETTINGS['DB_PORT'],
                                  db=SETTINGS['DB_DB'],
                                  charset='utf8',
                                  use_unicode=True,
                                  cursorclass=MySQLdb.cursors.DictCursor)
     self.stats = stats
     dispatcher.connect(self.spider_closed, signals.spider_closed)
コード例 #13
0
    def connect(self):
        """
        Setup our database connection. Throws if cannot connect.
        """
        print("[DatabaseQuery] Connecting to database"
              "\n    -> database:'%s' user:'******' host:'%s'" %
              (self.postgres_database, self.postgres_user, self.postgres_host))

        # get a connection, if a connect cannot be made an exception will be raised here
        self.dbConnection = ConnectionPool('psycopg2',
                                           database=self.postgres_database,
                                           user=self.postgres_user,
                                           host=self.postgres_host,
                                           password=self.postgres_password)

        print("[DatabaseQuery] Database connection sucsessful")
コード例 #14
0
    def __init__(self, log, db_location):
        self.log = log

        type = "sqlite"

        self.coordinator = None
        self.histcollector = None
        self._db_location = db_location

        # Note: cp_max=1 is required otherwise undefined behaviour could occur when using yield icw subsequent
        # runQuery or runOperation statements
        if type == "sqlite":
            self.dbpool = ConnectionPool("sqlite3", db_location, check_same_thread=False, cp_max=1)
       
        # Check database schema version and upgrade when required
        self.updatedb('0.2')
コード例 #15
0
    def test_real_database(self):
        """ Test using a real connection to a database """
        from os import path
        import sqlite3

        db_name = 'UNITTEST.sqlite'
        db_path = path.realpath('UNITTEST.sqlite')

        dbpool = ConnectionPool('sqlite3', db_path, check_same_thread=False)
        db = Database(dbpool)
        table_name = 'test_table'

        # check if the db exists, create it if it doesn't
        if not path.exists(db_path):
            create_table_stmt = 'create table %s (key int primary key, name text)' % (
                table_name)
            yield db.execute(create_table_stmt)

        # remove any unnecessary files after this test case runs
        self.addCleanup(self.remove_test_files, db_path)

        # insert a record into the table
        key = 1
        name = 'test'
        insert_stmt = "insert into %s (key, name) values (%d, '%s')" % (
            table_name, key, name)
        yield db.execute(insert_stmt)

        # use the DBAPI 2.0 module to verify results
        connection = sqlite3.connect(db_name)
        cursor = connection.cursor()
        query_stmt = 'select key, name from %s where key=%d' % (table_name,
                                                                key)
        cursor.execute(query_stmt)
        assert cursor.fetchone() == (key, name)
コード例 #16
0
    def setup(cls, settings, sync=False):
        cls.redis_cache = None
        conf = settings.get("redis_settings")

        conf = settings.get("postgresql_settings")
        if conf:
            postgres_connection_settings = dict(
                host=conf.host,
                port=conf.port,
                database=conf.database,
                user=conf.username,
                password=conf.password,
                cp_min=1,
                cp_max=conf.poolsize,
                cp_reconnect=True,
                cp_noisy=settings['debug'],
                connection_factory=NamedTupleConnection)
            pg_cpool = ConnectionPool("psycopg2",
                                      **postgres_connection_settings)
            cls.postgresql = pg_cpool
            print pg_cpool

        conf = settings.get("search_engine")

        if conf:
            cls.search_engine = SearchEngine(conf)
            print cls.search_engine

        cls.preferred_db_class = DB_WRAPPER_MAP['postgresql'][0]
        cls._connect = getattr(cls, DB_WRAPPER_MAP['postgresql'][1])
コード例 #17
0
class MySQLPipeline(object):
    @classmethod
    def from_crawler(cls, crawler):
        return cls(crawler.stats)

    def __init__(self, stats):
        #Instantiate DB
        self.dbpool = ConnectionPool('MySQLdb',
                                     host=SETTINGS['DB_HOST'],
                                     user=SETTINGS['DB_USER'],
                                     passwd=SETTINGS['DB_PASSWD'],
                                     port=SETTINGS['DB_PORT'],
                                     db=SETTINGS['DB_DB'],
                                     charset='utf8',
                                     use_unicode=True,
                                     cursorclass=MySQLdb.cursors.DictCursor)
        self.stats = stats
        dispatcher.connect(self.spider_closed, signals.spider_closed)

    def spider_closed(self, spider):
        """ Cleanup function, called after crawing has finished to close open
            objects.
            Close ConnectionPool. """
        self.dbpool.close()

    def process_item(self, item, spider):
        query = self.dbpool.runInteraction(self._insert_record, item)
        query.addErrback(self._handle_error)
        return item

    def _insert_record(self, tx, item):
        result = tx.execute(
            """INSERT INTO agregator_results (task_id, direct_link, source_link, rank, site, `date`) 
                VALUES (%s, %s, %s, %s, %s, %s)""", (
                item["django_task_id"],
                item["direct_link"],
                item["source_link"],
                item["rank"],
                item["site"],
                item["date"],
            ))
        if result > 0:
            self.stats.inc_value('database/items_added')

    def _handle_error(self, e):
        log.err(e)
コード例 #18
0
def create_database(dbpath):
    dbpool = ConnectionPool('sqlite3', dbpath, check_same_thread=False)

    if path.exists(dbpath):
        answer = input('%s already exists. Delete? [yes/no]: ' % (dbpath))
        if answer.lower() in ['yes', 'y']:
            remove(dbpath)  # delete old database
        else:
            print('Database will not be created')
            sys.exit()  # don't delete

    # Create tables then exit
    dbpool = ConnectionPool('sqlite3', dbpath, check_same_thread=False)
    db = Database(dbpool)
    candidates = Candidates(db)
    votes = Votes(db, candidates)
    task.react(create_tables, (candidates, votes))
    sys.exit()
コード例 #19
0
def create_pool(size, filename):
    """ migratins/mydatabase.sqlite """
    pool = ConnectionPool('sqlite3',
                          filename,
                          cp_max=size,
                          check_same_thread=False,
                          cp_noisy=True,
                          cp_reconnect=True)
    return pool
コード例 #20
0
ファイル: history.py プロジェクト: wiseteck/HouseAgent
 def check_archive_db(self):
     """
     create non-existent archive_YYYY_MM db file
     """
     if os.path.exists(self.db_path):
         self.dbpool = ConnectionPool(self.type,
                                      self.db_path,
                                      check_same_thread=False,
                                      cp_max=1)
     else:
         self.create_archive_db()
         try:
             self.dbpool = ConnectionPool(self.type,
                                          self.db_path,
                                          check_same_thread=False,
                                          cp_max=1)
         except Exception, err:
             self.log.debug("dbpool exc: %s" % err)
             os._exit(1)
         self.prepare_archive_db()
コード例 #21
0
    def __init__(self, dbfile, **kw):
        # XXX Ignore thread warnings from sqlite3.  Should be OK.
        # http://twistedmatrix.com/trac/ticket/3629
        kw.setdefault("check_same_thread", False)

        from twisted.enterprise.adbapi import ConnectionPool
        type = 'sqlite3'
        self.dbfile = dbfile
        self.dbconn = ConnectionPool(type, dbfile, **kw)
        self.table = 'channels'
        self.initialize_db()
コード例 #22
0
ファイル: database.py プロジェクト: eventable/CalendarServer
    def open(self):
        """
        Access the underlying database.
        @return: a db2 connection object for this index's underlying data store.
        """
        if not self.initialized:

            self.pool = ConnectionPool(self.dbapiName, *self.dbapiArgs, **self.dbapikwargs)

            # sqlite3 is not thread safe which means we have to close the sqlite3 connections in the same thread that
            # opened them. We need a special thread pool class that has a thread worker function that does a close
            # when a thread is closed.
            if self.dbapiName == "sqlite3":
                self.pool.threadpool.stop()
                self.pool.threadpool = ConnectionClosingThreadPool(1, 1)
                self.pool.threadpool.start()
                self.pool.threadpool.pool = self.pool

            #
            # Set up the schema
            #
            # Create CALDAV table if needed

            try:
                test = (yield self._test_schema_table())
                if test:
                    version = (yield self._db_value_for_sql("select VALUE from CALDAV where KEY = 'SCHEMA_VERSION'"))
                    dbtype = (yield self._db_value_for_sql("select VALUE from CALDAV where KEY = 'TYPE'"))

                    if (version != self._db_version()) or (dbtype != self._db_type()):

                        if dbtype != self._db_type():
                            log.error("Database %s has different type (%s vs. %s)"
                                      % (self.dbID, dbtype, self._db_type()))

                            # Delete this index and start over
                            yield self._db_remove()
                            yield self._db_init()

                        elif version != self._db_version():
                            log.error("Database %s has different schema (v.%s vs. v.%s)"
                                      % (self.dbID, version, self._db_version()))

                            # Upgrade the DB
                            yield self._db_upgrade(version)

                else:
                    yield self._db_init()
                self.initialized = True
            except:
                # Clean up upon error so we don't end up leaking threads
                self.pool.close()
                self.pool = None
                raise
コード例 #23
0
ファイル: database.py プロジェクト: kronat/Marnatarlo
 def __init__(self, dbname):
     self.dbname = dbname
     try:
         fh = open(dbname)
     except IOError as e:
         conn = sqlite3.connect(dbname)
         curs = conn.cursor()
         curs.execute("Create table users (name text unique, password text)")
         curs.execute("Create table stats(name text, played INTEGER, won INTEGER, FOREIGN KEY(name) REFERENCES users(name))")
         conn.commit()
         curs.close()
     self.__dbpool = ConnectionPool('sqlite3', self.dbname)
コード例 #24
0
def main():
    # SQLite "connection"
    db_pool = ConnectionPool('sqlite3',
                             'sportsteam.sqlite',
                             check_same_thread=False)

    # web template engine
    jinja_env = Environment(loader=FileSystemLoader('./templates'),
                            autoescape=select_autoescape(['html']))

    app = Root(jinja_env, db_pool)
    app.router.run('0.0.0.0', 8888)
コード例 #25
0
def runserver(dbpath, host, port, logpath):
    dbpool = ConnectionPool('sqlite3', dbpath, check_same_thread=False)
    app = Application(dbpool)
    print('Database: %s' % (dbpath))

    if logpath:
        logfile = open(logpath, 'a')
        print('Log File: %s' % (logpath))
    else:
        logfile = None

    print('Host: %s\nPort: %d\n' % (host, port))
    app.run(host, port, logfile)
コード例 #26
0
ファイル: sqlmagic.py プロジェクト: nyov/scrapyext
	def _runInteraction(self, interaction, *args, **kw):
		try:
			return ConnectionPool._runInteraction(self, interaction, *args, **kw)
		except MySQLdb.OperationalError as e:
			if e[0] not in (2006, 2013, 1213):
				raise
			# 2006 MySQL server has gone away
			# 2013 Lost connection to MySQL server
			# 1213 Deadlock found when trying to get lock; try restarting transaction
			log.msg("%s got error %s, retrying operation" % (self.__class__.__name__, e))
			conn = self.connections.get(self.threadID())
			self.disconnect(conn)
			# try the interaction again
			return ConnectionPool._runInteraction(self, interaction, *args, **kw)
		except MySQLdb.InterfaceError as e:
			if e[0] not in (0,):
				raise
			# 0 Interface error (conn gone away or closed)
			log.msg("%s got error %s, retrying operation" % (self.__class__.__name__, e))
			conn = self.connections.get(self.threadID())
			self.disconnect(conn)
			# try the interaction again
			return ConnectionPool._runInteraction(self, interaction, *args, **kw)
コード例 #27
0
ファイル: database.py プロジェクト: rrada/HouseAgent
    def __init__(self, log, db_location):
        self.log = log

        type = "sqlite"

        self.coordinator = None

        # Note: cp_max=1 is required otherwise undefined behaviour could occur when using yield icw subsequent
        # runQuery or runOperation statements
        if type == "sqlite":
            self.dbpool = ConnectionPool("sqlite3", db_location, check_same_thread=False, cp_max=1)
       
        # Check database schema version and upgrade when required
        self.updatedb('0.1')
コード例 #28
0
ファイル: ircbot.py プロジェクト: koikonom/ircbot
 def init_db(self):
     self.db = ConnectionPool('sqlite3', db_file)
     yield self.db.runQuery('''CREATE TABLE IF NOT EXISTS QUOTES (ID INTEGER PRIMARY KEY,
                                                 NICK TEXT,
                                                 QUOTE TEXT collate nocase,
                                                 QUOTE_DT NUMERIC,
                                                 ADDED_BY TEXT,
                                                 CHANNEL TEXT
                                                 )''')
     yield self.db.runQuery('''CREATE TABLE IF NOT EXISTS URLS (ID INTEGER PRIMARY KEY,
                                                 URL TEXT collate nocase,
                                                 URL_DT NUMERIC,
                                                 ADDED_BY TEXT,
                                                 CHANNEL TEXT
                                                 )''')
コード例 #29
0
class ChannelLogger(object):
    implements(IDBLogger)

    def __init__(self, dbfile, **kw):
        # XXX Ignore thread warnings from sqlite3.  Should be OK.
        # http://twistedmatrix.com/trac/ticket/3629
        kw.setdefault("check_same_thread", False)

        from twisted.enterprise.adbapi import ConnectionPool
        type = 'sqlite3'
        self.dbfile = dbfile
        self.dbconn = ConnectionPool(type, dbfile, **kw)
        self.table = 'channels'
        self.initialize_db()

    def initialize_db(self):
        return self.dbconn.runInteraction(self._initialize_db, self.table)

    @staticmethod
    def _initialize_db(tx, table):
        tx.execute('CREATE TABLE IF NOT EXISTS {0} ('
                   'id INTEGER PRIMARY KEY AUTOINCREMENT,'
                   'timestamp INTEGER,'
                   'channel TEXT,'
                   'nick TEXT,'
                   'msg TEXT )'.format(table))

    def log(self, who, chan, msg):
        return self.dbconn.runInteraction(self._log, who, chan, msg,
                                          self.table)

    @staticmethod
    def _log(tx, who, chan, msg, table):
        now = int(time.time())
        stmt = 'INSERT INTO {0}(timestamp,nick,channel,msg) VALUES(?,?,?,?)'
        tx.execute(stmt.format(table), (now, who, chan, msg))
コード例 #30
0
ファイル: _store.py プロジェクト: varenc/ranger-ims-server
    def _db(self) -> ConnectionPool:
        if self._state.db is None:
            db = ConnectionPool(
                "pymysql",
                host=self.hostName,
                port=self.hostPort,
                database=self.database,
                user=self.username,
                password=self.password,
                cursorclass=Cursor,
                cp_reconnect=True,
            )

            # self._upgradeSchema(db)

            self._state.db = db

        return self._state.db
コード例 #31
0
 def setup(cls, settings):
     conf = settings.get("postgresql_settings")
     if conf:
         postgres_connection_settings = dict(
             host=conf.host,
             port=conf.port,
             database=conf.database,
             user=conf.username,
             password=conf.password,
             cp_min=1,
             cp_max=conf.poolsize,
             cp_reconnect=True,
             cp_noisy=settings['debug'],
             connection_factory=NamedTupleConnection)
         pg_cpool = ConnectionPool("psycopg2",
                                   **postgres_connection_settings)
         cls.postgresql = pg_cpool
         print pg_cpool
     cls.preferred_db_class = PostgresDatabase
コード例 #32
0
ファイル: dbpool.py プロジェクト: xdrew/twoost
 def connect(self):
     new_connection = self.threadID() not in self.connections
     conn = ConnectionPool.connect(self)
     if new_connection:
         self.prepare_connection(conn)
     return conn
コード例 #33
0
ファイル: dbpool.py プロジェクト: xdrew/twoost
 def __init__(self, *args, **kwargs):
     ConnectionPool.__init__(self, *args, **kwargs)
     self.cp_init_conn = kwargs.pop('cp_init_conn', None)
     self._database = kwargs.get('database') or kwargs.get('db')
     if isinstance(self.cp_init_conn, basestring):
         self.cp_init_conn = reflect.namedAny(self.cp_init_conn)
コード例 #34
0
 def connect(self) -> Connection:
     connection = ConnectionPool.connect(self)
     connection.ping(reconnect=True)
     return connection
コード例 #35
0
ファイル: dbpool.py プロジェクト: wgnet/twoost
 def __init__(self, *args, **kwargs):
     ConnectionPool.__init__(self, *args, **kwargs)
     self.cp_init_conn = kwargs.pop('cp_init_conn', None)
     self._database = kwargs.get('database') or kwargs.get('db')
     if isinstance(self.cp_init_conn, basestring):
         self.cp_init_conn = reflect.namedAny(self.cp_init_conn)
コード例 #36
0
 def makePool(self, **newkw):
     """Create a connection pool with additional keyword arguments."""
     args, kw = self.getPoolArgs()
     kw = kw.copy()
     kw.update(newkw)
     return ConnectionPool(*args, **kw)
コード例 #37
0
class AbstractADBAPIDatabase(object):
    """
    A generic SQL database.
    """

    def __init__(self, dbID, dbapiName, dbapiArgs, persistent, **kwargs):
        """
        
        @param persistent: C{True} if the data in the DB must be perserved during upgrades,
            C{False} if the DB data can be re-created from an external source.
        @type persistent: bool
        """
        self.dbID = dbID
        self.dbapiName = dbapiName
        self.dbapiArgs = dbapiArgs
        self.dbapikwargs = kwargs

        self.persistent = persistent
        
        self.initialized = False

    def __repr__(self):
        return "<%s %r>" % (self.__class__.__name__, self.pool)

    @inlineCallbacks
    def open(self):
        """
        Access the underlying database.
        @return: a db2 connection object for this index's underlying data store.
        """
        if not self.initialized:

            self.pool = ConnectionPool(self.dbapiName, *self.dbapiArgs, **self.dbapikwargs)
            
            # sqlite3 is not thread safe which means we have to close the sqlite3 connections in the same thread that
            # opened them. We need a special thread pool class that has a thread worker function that does a close
            # when a thread is closed.
            if self.dbapiName == "sqlite3":
                self.pool.threadpool.stop()
                self.pool.threadpool = ConnectionClosingThreadPool(1, 1)
                self.pool.threadpool.start()
                self.pool.threadpool.pool = self.pool

            #
            # Set up the schema
            #
            # Create CALDAV table if needed

            test = (yield self._test_schema_table())
            if test:
                version = (yield self._db_value_for_sql("select VALUE from CALDAV where KEY = 'SCHEMA_VERSION'"))
                dbtype = (yield self._db_value_for_sql("select VALUE from CALDAV where KEY = 'TYPE'"))

                if (version != self._db_version()) or (dbtype != self._db_type()):

                    if dbtype != self._db_type():
                        log.err("Database %s has different type (%s vs. %s)"
                                % (self.dbID, dbtype, self._db_type()))

                        # Delete this index and start over
                        yield self._db_remove()
                        yield self._db_init()

                    elif version != self._db_version():
                        log.err("Database %s has different schema (v.%s vs. v.%s)"
                                % (self.dbID, version, self._db_version()))
                        
                        # Upgrade the DB
                        yield self._db_upgrade(version)

            else:
                yield self._db_init()
            self.initialized = True

    def close(self):
        
        if self.initialized:
            self.pool.close()
            self.pool = None
            self.initialized = False

    @inlineCallbacks
    def clean(self):
        
        if not self.initialized:
            yield self.open()

        yield self._db_empty_data_tables()

    @inlineCallbacks
    def execute(self, sql, *query_params):
        
        if not self.initialized:
            yield self.open()

        yield self._db_execute(sql, *query_params)

    @inlineCallbacks
    def executescript(self, script):
        
        if not self.initialized:
            yield self.open()

        yield self._db_execute_script(script)

    @inlineCallbacks
    def query(self, sql, *query_params):
        
        if not self.initialized:
            yield self.open()

        result = (yield self._db_all_values_for_sql(sql, *query_params))
        returnValue(result)

    @inlineCallbacks
    def queryList(self, sql, *query_params):
        
        if not self.initialized:
            yield self.open()

        result = (yield self._db_values_for_sql(sql, *query_params))
        returnValue(result)

    @inlineCallbacks
    def queryOne(self, sql, *query_params):
        
        if not self.initialized:
            yield self.open()

        result = (yield self._db_value_for_sql(sql, *query_params))
        returnValue(result)

    def _db_version(self):
        """
        @return: the schema version assigned to this DB.
        """
        raise NotImplementedError
        
    def _db_type(self):
        """
        @return: the collection type assigned to this DB.
        """
        raise NotImplementedError
        
    def _test_schema_table(self):
        return self._test_table("CALDAV")

    @inlineCallbacks
    def _db_init(self):
        """
        Initialise the underlying database tables.
        """
        log.msg("Initializing database %s" % (self.dbID,))

        # TODO we need an exclusive lock of some kind here to prevent a race condition
        # in which multiple processes try to create the tables.
        

        yield self._db_init_schema_table()
        yield self._db_init_data_tables()
        yield self._db_recreate()

    @inlineCallbacks
    def _db_init_schema_table(self):
        """
        Initialise the underlying database tables.
        @param db_filename: the file name of the index database.
        @param q:           a database cursor to use.
        """

        #
        # CALDAV table keeps track of our schema version and type
        #
        yield self._create_table("CALDAV", (
            ("KEY", "text unique"),
            ("VALUE", "text unique"),
        ), True)

        yield self._db_execute(
            """
            insert or ignore into CALDAV (KEY, VALUE)
            values ('SCHEMA_VERSION', :1)
            """, (self._db_version(),)
        )
        yield self._db_execute(
            """
            insert or ignore into CALDAV (KEY, VALUE)
            values ('TYPE', :1)
            """, (self._db_type(),)
        )

    def _db_init_data_tables(self):
        """
        Initialise the underlying database tables.
        """
        raise NotImplementedError

    def _db_empty_data_tables(self):
        """
        Delete the database tables.
        """

        # Implementations can override this to re-create data
        pass
        
    def _db_recreate(self):
        """
        Recreate the database tables.
        """

        # Implementations can override this to re-create data
        pass

    @inlineCallbacks
    def _db_upgrade(self, old_version):
        """
        Upgrade the database tables.
        """
        
        if self.persistent:
            yield self._db_upgrade_data_tables(old_version)
            yield self._db_upgrade_schema()
        else:
            # Non-persistent DB's by default can be removed and re-created. However, for simple
            # DB upgrades they SHOULD override this method and handle those for better performance.
            yield self._db_remove()
            yield self._db_init()
    
    def _db_upgrade_data_tables(self, old_version):
        """
        Upgrade the data from an older version of the DB.
        """
        # Persistent DB's MUST override this method and do a proper upgrade. Their data
        # cannot be thrown away.
        raise NotImplementedError("Persistent databases MUST support an upgrade method.")

    @inlineCallbacks
    def _db_upgrade_schema(self):
        """
        Upgrade the stored schema version to the current one.
        """
        yield self._db_execute("insert or replace into CALDAV (KEY, VALUE) values ('SCHEMA_VERSION', :1)", (self._db_version(),))

    @inlineCallbacks
    def _db_remove(self):
        """
        Remove all database information (all the tables)
        """
        yield self._db_remove_data_tables()
        yield self._db_remove_schema()

    def _db_remove_data_tables(self):
        """
        Remove all the data from an older version of the DB.
        """
        raise NotImplementedError("Each database must remove its own tables.")

    @inlineCallbacks
    def _db_remove_schema(self):
        """
        Remove the stored schema version table.
        """
        yield self._db_execute("drop table if exists CALDAV")

    @inlineCallbacks
    def _db_all_values_for_sql(self, sql, *query_params):
        """
        Execute an SQL query and obtain the resulting values.
        @param sql: the SQL query to execute.
        @param query_params: parameters to C{sql}.
        @return: an interable of values in the first column of each row
            resulting from executing C{sql} with C{query_params}.
        @raise AssertionError: if the query yields multiple columns.
        """
        
        sql = self._prepare_statement(sql)
        results = (yield self.pool.runQuery(sql, *query_params))
        returnValue(tuple(results))

    @inlineCallbacks
    def _db_values_for_sql(self, sql, *query_params):
        """
        Execute an SQL query and obtain the resulting values.

        @param sql: the SQL query to execute.
        @param query_params: parameters to C{sql}.
        @return: an interable of values in the first column of each row
            resulting from executing C{sql} with C{query_params}.
        @raise AssertionError: if the query yields multiple columns.
        """
        
        sql = self._prepare_statement(sql)
        results = (yield self.pool.runQuery(sql, *query_params))
        returnValue(tuple([row[0] for row in results]))

    @inlineCallbacks
    def _db_value_for_sql(self, sql, *query_params):
        """
        Execute an SQL query and obtain a single value.

        @param sql: the SQL query to execute.
        @param query_params: parameters to C{sql}.
        @return: the value resulting from the executing C{sql} with
            C{query_params}.
        @raise AssertionError: if the query yields multiple rows or columns.
        """
        value = None
        for row in (yield self._db_values_for_sql(sql, *query_params)):
            assert value is None, "Multiple values in DB for %s %s" % (sql, query_params)
            value = row
        returnValue(value)

    def _db_execute(self, sql, *query_params):
        """
        Execute an SQL operation that returns None.

        @param sql: the SQL query to execute.
        @param query_params: parameters to C{sql}.
        @return: an iterable of tuples for each row resulting from executing
            C{sql} with C{query_params}.
        """
        
        sql = self._prepare_statement(sql)
        return self.pool.runOperation(sql, *query_params)

    """
    Since different databases support different types of columns and modifiers on those we need to
    have an "abstract" way of specifying columns in our code and then map the abstract specifiers to
    the underlying DB's allowed types.
    
    Types we can use are:
    
    integer
    text
    text(n)
    date
    serial
    
    The " unique" modifier can be appended to any of those.
    """
    def _map_column_types(self, type):
        raise NotImplementedError
        
    def _create_table(self, name, columns, ifnotexists=False):
        raise NotImplementedError

    def _test_table(self, name):
        raise NotImplementedError

    def _create_index(self, name, ontable, columns, ifnotexists=False):
        raise NotImplementedError

    def _prepare_statement(self, sql):
        raise NotImplementedError
コード例 #38
0
ファイル: toradbapi.py プロジェクト: geerk/toradbapi
 def __init__(self, *args, **kwargs):
     self._pool = TxConnectionPool(*args, **kwargs)
コード例 #39
0
class AbstractADBAPIDatabase(object):
    """
    A generic SQL database.
    """

    def __init__(self, dbID, dbapiName, dbapiArgs, persistent, **kwargs):
        """

        @param persistent: C{True} if the data in the DB must be perserved during upgrades,
            C{False} if the DB data can be re-created from an external source.
        @type persistent: bool
        """
        self.dbID = dbID
        self.dbapiName = dbapiName
        self.dbapiArgs = dbapiArgs
        self.dbapikwargs = kwargs

        self.persistent = persistent

        self.initialized = False

    def __repr__(self):
        return "<%s %r>" % (self.__class__.__name__, self.pool)

    @inlineCallbacks
    def open(self):
        """
        Access the underlying database.
        @return: a db2 connection object for this index's underlying data store.
        """
        if not self.initialized:

            self.pool = ConnectionPool(self.dbapiName, *self.dbapiArgs, **self.dbapikwargs)

            # sqlite3 is not thread safe which means we have to close the sqlite3 connections in the same thread that
            # opened them. We need a special thread pool class that has a thread worker function that does a close
            # when a thread is closed.
            if self.dbapiName == "sqlite3":
                self.pool.threadpool.stop()
                self.pool.threadpool = ConnectionClosingThreadPool(1, 1)
                self.pool.threadpool.start()
                self.pool.threadpool.pool = self.pool

            #
            # Set up the schema
            #
            # Create CALDAV table if needed

            try:
                test = (yield self._test_schema_table())
                if test:
                    version = (yield self._db_value_for_sql("select VALUE from CALDAV where KEY = 'SCHEMA_VERSION'"))
                    dbtype = (yield self._db_value_for_sql("select VALUE from CALDAV where KEY = 'TYPE'"))

                    if (version != self._db_version()) or (dbtype != self._db_type()):

                        if dbtype != self._db_type():
                            log.error(
                                "Database {db} has different type ({t1} vs. {t2})",
                                db=self.dbID, t1=dbtype, t2=self._db_type()
                            )

                            # Delete this index and start over
                            yield self._db_remove()
                            yield self._db_init()

                        elif version != self._db_version():
                            log.error(
                                "Database {db} has different schema (v.{v1} vs. v.{v2})",
                                db=self.dbID, v1=version, v2=self._db_version()
                            )

                            # Upgrade the DB
                            yield self._db_upgrade(version)

                else:
                    yield self._db_init()
                self.initialized = True
            except:
                # Clean up upon error so we don't end up leaking threads
                self.pool.close()
                self.pool = None
                raise

    def close(self):

        if self.initialized:
            try:
                self.pool.close()
            except Exception, e:
                log.error("Error whilst closing connection pool: {ex}", ex=e)
            self.pool = None
            self.initialized = False
コード例 #40
0
class Database():
    """
    HouseAgent database interaction.
    """
    def __init__(self, log, db_location):
        self.log = log

        type = "sqlite"

        self.coordinator = None
        self.histcollector = None
        self._db_location = db_location

        # Note: cp_max=1 is required otherwise undefined behaviour could occur when using yield icw subsequent
        # runQuery or runOperation statements
        if type == "sqlite":
            self.dbpool = ConnectionPool("sqlite3", db_location, check_same_thread=False, cp_max=1)
       
        # Check database schema version and upgrade when required
        self.updatedb('0.2')
             
    def updatedb(self, dbversion):
        '''
        Perform a database schema update when required. 
        '''
        # Note: runInteraction runs all queries defined within the specified function as part of a transaction.
        return self.dbpool.runInteraction(self._updatedb, dbversion)

    def _updatedb(self, txn, dbversion):
        '''
        Check whether a database schema update is required and act accordingly.
        '''
        # Note: Although all queries are run as part of a transaction, a create or drop table statement result in an implicit commit

        # Query the version of the current schema
        try:
            result = txn.execute("SELECT parm_value FROM common WHERE parm = 'schema_version'").fetchall()
        except:
            result = None
            
        if result:
            version = result[0][0]
        else:
            version = '0.0'

        if float(version) > float(dbversion):
            self.log.error("ERROR: The current database schema (%s) is not supported by this version of HouseAgent" % version)
            # Exit HouseAgent
            sys.exit(1)
        
        elif float(version) == float(dbversion):
            self.log.debug("Database schema is up to date")
            return
        
        else:
            self.log.info("Database schema will be updated from %s to %s:" % (version, dbversion))

            # Before we start manipulating the database schema, first make a backup copy of the database
            try:
                shutil.copy(self._db_location, self._db_location + datetime.datetime.strftime(datetime.datetime.now(), ".%y%m%d-%H%M%S"))
            except:
                self.log.error("Cannot make a backup copy of the database (%s)", sys.exc_info()[1])
                return

            if version == '0.0':
                try:
                    # Create common table
                    txn.execute("CREATE TABLE IF NOT EXISTS common (parm VARCHAR(16) PRIMARY KEY, parm_value VARCHAR(24) NOT NULL)")
            
                    # Add schema version to database
                    txn.execute("INSERT INTO common (parm, parm_value) VALUES ('schema_version', ?)", [dbversion])

                    # Set primary key of the devices table on address + plugin_id to prevent adding duplicate devices
                    txn.execute("CREATE TEMPORARY TABLE devices_backup(id INTEGER PRIMARY KEY, name VARCHAR(45), address VARCHAR(45) NOT NULL, plugin_id INTEGER NOT NULL, location_id INTEGER)")
                    txn.execute("INSERT INTO devices_backup SELECT id, name, address, plugin_id, location_id FROM devices")
                    txn.execute("DROP TABLE devices")
                    txn.execute("CREATE TABLE devices(id INTEGER PRIMARY KEY, name VARCHAR(45), address VARCHAR(45) NOT NULL, plugin_id INTEGER, location_id INTEGER)")
                    txn.execute("CREATE UNIQUE INDEX device_address ON devices (address, plugin_id)")
                    txn.execute("INSERT INTO devices SELECT id, name, address, plugin_id, location_id FROM devices_backup")
                    txn.execute("DROP TABLE devices_backup")

                    self.log.info("Successfully upgraded database schema")
                except:
                    self.log.error("Database schema upgrade failed (%s)" % sys.exc_info()[1])

            elif version == '0.1':
                # update DB schema version to '0.2'
                try:
                    # update common table
                    txn.execute("UPDATE common SET parm_value=0.2 WHERE parm='schema_version';")

                    # history_periods table
                    txn.execute("CREATE TABLE history_periods(id integer PRIMARY KEY AUTOINCREMENT NOT NULL,\
                                name varchar(20), secs integer NOT NULL, sysflag CHAR(1) NOT NULL DEFAULT '0');")
                    
                    # default values for history_periods table
                    txn.execute("INSERT INTO history_periods VALUES(1,'Disabled',0,'1');")
                    txn.execute("INSERT INTO history_periods VALUES(2,'5 min',300,'1');")
                    txn.execute("INSERT INTO history_periods VALUES(3,'15 min',900,'1');")
                    txn.execute("INSERT INTO history_periods VALUES(4,'30 min',1800,'1');")
                    txn.execute("INSERT INTO history_periods VALUES(5,'1 hour',3600,'1');")
                    txn.execute("INSERT INTO history_periods VALUES(6,'2 hours',7200,'1');")
                    txn.execute("INSERT INTO history_periods VALUES(7,'8 hours',28800,'1');")
                    txn.execute("INSERT INTO history_periods VALUES(8,'12 hours',43200,'1');")
                    txn.execute("INSERT INTO history_periods VALUES(9,'1 day',86400,'1');")

                    # history_types table
                    txn.execute("CREATE TABLE history_types (id integer PRIMARY KEY AUTOINCREMENT NOT NULL, \
                                name  varchar(50));")
                    
                    # default values for history_types table
                    txn.execute("INSERT INTO history_types VALUES (NULL, 'GAUGE');")
                    txn.execute("INSERT INTO history_types VALUES (NULL, 'COUNTER');")

                    txn.execute("CREATE TEMPORARY TABLE current_values_tmp( \
                                id integer PRIMARY KEY AUTOINCREMENT NOT NULL, \
                                name varchar(45), value varchar(45), device_id integer NOT NULL, \
                                lastupdate datetime, history bool DEFAULT 0, \
                                history_type_id integer, control_type_id integer DEFAULT 0);")
                    txn.execute("INSERT INTO current_values_tmp \
                                SELECT id, name, value, device_id, lastupdate, history, \
                                history_type_id, control_type_id FROM current_values;")
                    
                    # create new current_values scheme (old data are purged)
                    txn.execute("DROP TABLE current_values;")
                    txn.execute("CREATE TABLE current_values(id integer PRIMARY KEY AUTOINCREMENT NOT NULL, \
                                name varchar(45), value varchar(45), device_id integer NOT NULL, \
                                lastupdate datetime, history_period_id  int DEFAULT 1, \
                                history_type_id int DEFAULT 1, control_type_id  integer DEFAULT 0, \
                                FOREIGN KEY (history_period_id) REFERENCES history_periods(id), \
                                FOREIGN KEY (history_type_id) REFERENCES history_types(id), \
                                FOREIGN KEY (device_id) REFERENCES devices(id));")
                    
                    # current_values indexes
                    txn.execute("CREATE INDEX 'current_values.fk_current_values_control_types1' \
                                    ON current_values (control_type_id);")
                    txn.execute("CREATE INDEX 'current_values.fk_current_values_history_periods1' \
                                    ON current_values (history_period_id);")
                    txn.execute("CREATE INDEX 'current_values.fk_current_values_history_types1' \
                                    ON current_values (history_type_id);")
                    txn.execute("CREATE INDEX 'current_values.fk_values_devices1' \
                                    ON current_values (device_id);")
                    
                    # fill new current_values table
                    txn.execute("INSERT INTO current_values \
                                SELECT id, name, value, device_id, lastupdate, 1, 1, control_type_id \
                                FROM current_values_tmp;")
                    txn.execute("DROP TABLE current_values_tmp;")

                    # history_values table
                    txn.execute("CREATE TABLE history_values (value_id integer,\
                                value real, created_at datetime, \
                                FOREIGN KEY (value_id) REFERENCES current_values(id));")

                    txn.execute("CREATE INDEX 'history_values.idx_history_values_created_at1' \
                                    ON history_values (created_at);")
                    txn.execute("CREATE INDEX 'history_values.idx_history_values_value_id1' \
                                    ON history_values (value_id);")
                    
                    # Control types fix
                    txn.execute("INSERT into control_types VALUES(0, 'Not controllable');")
                    txn.execute("UPDATE control_types SET name='Switch (On/off)' WHERE id=1;")
                    txn.execute("UPDATE control_types SET name='Thermostat (Setpoint)' WHERE id=2;")

                    self.log.info("Successfully upgraded database schema")
                except:
                    self.log.error("Database schema upgrade failed (%s)" % sys.exc_info()[1])

    def query_plugin_auth(self, authcode):
        return self.dbpool.runQuery("SELECT authcode, id from plugins WHERE authcode = '%s'" % authcode)

    def check_plugin_auth(self, result):
        if len(result) >= 1:
            return {'registered': True}
        else:
            return {'registered': False}

    def insert_result(self, result):
        return {'received': True}

    def add_event(self, name, enabled, triggers):
        """
        This function adds an event to the database.
        """
        d = self.dbpool.runQuery("INSERT INTO events (name, enabled) VALUES (?, ?)", (name, enabled) )
        def event_added(result):
            print "added event"
            return self.dbpool.runQuery("select id from events order by id desc limit 1")      
        
        d.addCallback(event_added)
        def got_id(result):
            event_id = result[0][0]
            
            print "got event_id", result[0][0]
            print "triggers=",triggers
            
            # Add triggers
            deferredlist = []
            
            for trigger in triggers:
                trigger_type_id = trigger["trigger_type"]
                print "trigger", trigger
                
                def got_triggerid(result):
                    trigger_id = result[0][0]
                    
                    print "parameters", trigger["parameters"]
                    for name, value in trigger["parameters"].iteritems():
                        print name, value
                        deferredlist.append(self.dbpool.runQuery("INSERT INTO trigger_parameters (name, value, " +
                                                                 "triggers_id) VALUES (?, ?, ?)", (name, value, trigger_id)))
                
                def trigger_added(result):
                    self.dbpool.runQuery("select id from triggers order by id desc limit 1").addCallback(got_triggerid)     
                
                # Triggers
                deferredlist.append(self.dbpool.runQuery("INSERT INTO triggers (trigger_types_id, events_id)" +
                                                         " VALUES (?, ?)", (trigger_type_id, event_id)).addCallback(trigger_added) )        
                    
            d = defer.gatherResults(deferredlist)
            return d
            
        d.addCallback(got_id)
        
        def added_triggers(result):
            print "triggers added"
            
        d.addCallback(added_triggers)
        return d
    
    def add_location(self, name, parent):
        if parent:
            return self.dbpool.runQuery("INSERT INTO locations (name, parent) VALUES (?, ?)", [name, parent])
        else:
            return self.dbpool.runQuery("INSERT INTO locations (name) VALUES (?)", [name])
    
    @inlineCallbacks
    def add_event2(self, name, enabled, conditions, actions, trigger):
        '''
        This adds an event to the database.
        '''
        # Add event, and get event id
        yield self.dbpool.runQuery("INSERT INTO events (name, enabled) VALUES (?, ?)", [name, enabled])
        eventid = yield self.dbpool.runQuery("select id from events order by id desc limit 1")
        eventid = eventid[0][0]
        
        # Add conditions
        for condition in conditions:
            condition_type_id = condition["condition_type"]
            
            yield self.dbpool.runQuery("INSERT INTO conditions (condition_types_id, events_id)" +
                                       " VALUES (?, ?)", [condition_type_id, eventid])
            
            condition_id = yield self.dbpool.runQuery("select id from conditions order by id desc limit 1")
            condition_id = condition_id[0][0]
            
            for name, value in condition["parameters"].iteritems():
                yield self.dbpool.runQuery("INSERT INTO condition_parameters (name, value, " +
                                           "conditions_id) VALUES (?, ?, ?)", [name, value, condition_id])
        
        # Add actions
        for action in actions:
            action_type_id = action["action_type"]
            
            yield self.dbpool.runQuery("INSERT INTO actions (action_types_id, events_id)" +
                                       " VALUES (?, ?)", [action_type_id, eventid])
            
            action_id = yield self.dbpool.runQuery("select id from actions order by id desc limit 1")
            action_id = action_id[0][0]
            
            for name, value in action["parameters"].iteritems():
                yield self.dbpool.runQuery("INSERT INTO action_parameters (name, value, " +
                                           "actions_id) VALUES (?, ?, ?)", [name, value, action_id])
                
            
        # Insert trigger
        yield self.dbpool.runQuery("INSERT INTO triggers (trigger_types_id, events_id, conditions)" +
                                   " VALUES (?,?,?)", [trigger["trigger_type"], eventid, trigger["conditions"]])
 
        trigger_id = yield self.dbpool.runQuery("select id from triggers order by id desc limit 1")
        trigger_id = trigger_id[0][0]
       
        for name, value in trigger["parameters"].iteritems():
            yield self.dbpool.runQuery("INSERT INTO trigger_parameters (name, value, " +
                                       "triggers_id) VALUES (?, ?, ?)", [name, value, trigger_id])
               
    
    def add_trigger(self, trigger_type_id, event_id, value_id, parameters):
        print "INSERT INTO triggers (trigger_types_id, events_id, current_values_id) VALUES (%d, %d, %d)" % (int(trigger_type_id),
                                                                                                                                  int(event_id),
                                                                                                                                  int(value_id))
        d = self.dbpool.runQuery("INSERT INTO triggers (trigger_types_id, events_id" + 
                                 ", current_values_id) VALUES (%s, %s, %s)", (int(trigger_type_id),
                                                                              int(event_id),
                                                                              int(value_id)) ) 
        for name, value in parameters.iteritems():
            self.dbpool.runQuery("INSERT INTO trigger_parameters (name, value, triggers_id) VALUES (%s, %s, last_insert_id())", (name, value) )
    
        return d
    
    #def add_action(self, action_type_id, event_id):
    
    def query_latest_device_id(self):
        '''
        This function queries the latest device id.
        '''
        return self.dbpool.runQuery('select id from devices LIMIT 1')
         
    def query_triggers(self):
        return self.dbpool.runQuery("SELECT triggers.id, trigger_types.name, triggers.events_id, triggers.conditions " + 
                                    "FROM triggers INNER JOIN trigger_types ON (triggers.trigger_types_id = trigger_types.id)")

    def query_trigger(self, event_id):
        return self.dbpool.runQuery("SELECT triggers.id, trigger_types.name, triggers.events_id, triggers.conditions " + 
                                    "FROM triggers INNER JOIN trigger_types ON (triggers.trigger_types_id = trigger_types.id) " +
                                    "WHERE triggers.events_id = ? LIMIT 1", [event_id])
        
    def query_conditions(self):
        return self.dbpool.runQuery("SELECT conditions.id, condition_types.name, conditions.events_id " + 
                                    "FROM conditions INNER JOIN condition_types ON (conditions.condition_types_id = condition_types.id)")

    def query_actions(self):
        return self.dbpool.runQuery("SELECT actions.id, action_types.name, actions.events_id " + 
                                    "FROM actions INNER JOIN action_types ON (actions.action_types_id = action_types.id)")

    def query_trigger_parameters(self, trigger_id):
        return self.dbpool.runQuery("SELECT name, value from trigger_parameters WHERE triggers_id = ?", [trigger_id])
    
    def query_condition_parameters(self, condition_id):
        return self.dbpool.runQuery("SELECT name, value from condition_parameters WHERE conditions_id = ?", [condition_id])        

    def query_action_parameters(self, action_id):
        return self.dbpool.runQuery("SELECT name, value from action_parameters WHERE actions_id = ?", [action_id])
    
    def query_device_routing_by_id(self, device_id):
        return self.dbpool.runQuery("SELECT devices.address, plugins.authcode FROM devices " +  
                                    "INNER JOIN plugins ON (devices.plugin_id = plugins.id) "
                                    "WHERE devices.id = ?", [device_id])

    def query_value_properties(self, value_id):
        return self.dbpool.runQuery("SELECT current_values.name, devices.address, devices.plugin_id from current_values " + 
                                    "INNER JOIN devices ON (current_values.device_id = devices.id) " + 
                                    "WHERE current_values.id = ?", [value_id])

    def query_plugin_devices(self, plugin_id):
        return self.dbpool.runQuery("SELECT devices.id, devices.name, devices.address, locations.name from devices " +
                                    "LEFT OUTER JOIN locations ON (devices.location_id = locations.id) " +
                                    "WHERE plugin_id=? ", [plugin_id])

    @inlineCallbacks
    def update_or_add_value(self, name, value, pluginid, address, time=None):
        '''
        This function updates or adds values to the HouseAgent database.
        @param name: the name of the value
        @param value: the actual value of the value
        @param pluginid: the plugin which holds the device information
        @param address: the address of the device being handled
        @param time: the time at which the update has been received, this defaults to now()
        '''
        if not time:
            updatetime = datetime.datetime.now().isoformat(' ').split('.')[0]
        else:
            updatetime = datetime.datetime.fromtimestamp(time).isoformat(' ').split('.')[0]
        
        # Query device first
        device_id = yield self.dbpool.runQuery('select id from devices WHERE plugin_id = ? and address = ? LIMIT 1', (pluginid, address) )

        try:
            device_id = device_id[0][0]
        except:
            returnValue('') # device does not exist
        
        current_value = yield self.dbpool.runQuery("SELECT id, name, history_type_id, history_period_id FROM current_values WHERE name=? AND device_id=? LIMIT 1", (name, device_id))
    
        try:
            value_id = current_value[0][0]
        except:
            value_id = None
    
        if value_id:
            value_id = current_value[0][0]
            
            history_type = current_value[0][2]
            history_period = current_value[0][3]
            
            yield self.dbpool.runQuery("UPDATE current_values SET value=?, lastupdate=? WHERE id=?", (value, updatetime, value_id))
        else:
            yield self.dbpool.runQuery("INSERT INTO current_values (name, value, device_id, lastupdate) VALUES (?, ?, (SELECT id FROM devices WHERE address=? AND plugin_id=?),  ?)", (name, value, address, pluginid, updatetime))
            current_value = yield self.dbpool.runQuery("SELECT id FROM current_values WHERE name=? AND device_id=?", (name, device_id))
            value_id = current_value[0][0]
                        
        returnValue(value_id)

    def register_plugin(self, name, uuid, location):
        return self.dbpool.runQuery("INSERT INTO plugins (name, authcode, location_id) VALUES (?, ?, ?)", [str(name), str(uuid), location])

    def query_plugins(self):
        return self.dbpool.runQuery("SELECT plugins.name, plugins.authcode, plugins.id, locations.name, plugins.location_id from plugins " +
                                    "LEFT OUTER JOIN locations ON (plugins.location_id = locations.id)")
    
    def query_plugin_by_type_name(self, type_name):
        return self.dbpool.runQuery("SELECT plugins.id, plugins.authcode from plugins " +
                                    "INNER JOIN plugin_types ON (plugins.plugin_type_id = plugin_types.id)" +
                                    "WHERE plugin_types.name = ? LIMIT 1", [type_name])

    def query_device_classes(self):
        return self.dbpool.runQuery("SELECT * from device_class order by name ASC")
    
    def query_device_types(self):
        return self.dbpool.runQuery("SELECT * from device_types order by name ASC")
       
    @inlineCallbacks
    def cb_device_crud(self, result, action, id=None, plugin=None, address=None, name=None, location=None):
        '''
        Callback function that get's called when a device has been created, updated or deleted in, to or from the database.
        @param result: the result of the action
        @param action: the action initiating the callback being create, update or delete
        @param plugin: the uuid of the plugin owning the device
        @param address: the address of the device
        @param name: the name of the device
        @param location: the name of the location associated with the device
        '''
        if action == "create":
            parms = yield self.dbpool.runQuery("SELECT plugins.authcode, devices.address, devices.name, locations.name FROM devices, plugins, locations WHERE devices.plugin_id = plugins.id AND devices.location_id = locations.id ORDER BY devices.id DESC LIMIT 1")
            
        if action == "update":
            parms = yield self.dbpool.runQuery("SELECT plugins.authcode, devices.address, devices.name, locations.name FROM devices, plugins, locations WHERE devices.plugin_id = plugins.id AND devices.location_id = locations.id AND devices.id=?", [id])

        if action != "delete":
            plugin = parms[0][0]
            address = parms[0][1]
            name = parms[0][2]
            location = parms[0][3]
            
        parameters = {"plugin": plugin, 
                      "address": address,
                      "name": name,
                      "location": location}

        if self.coordinator:
            self.coordinator.send_crud_update("device", action, parameters)    

    def save_device(self, name, address, plugin_id, location_id, id=None):
        '''
        This functions saves a device in the HouseAgent database.
        @param name: the name of the device
        @param address: the address of the device
        @param plugin_id: the plugin_id of the associated plugin
        @param location_id: the location_id of the associated location
        @param id: the id of the device (in case this is an update)
        '''
        
        if not id:
            return self.dbpool.runQuery("INSERT INTO devices (name, address, plugin_id, location_id) VALUES (?, ?, ?, ?)", \
                                        (name, address, plugin_id, location_id)).addCallback(self.cb_device_crud, "create")
        else:
            return self.dbpool.runQuery("UPDATE devices SET name=?, address=?, plugin_id=?, location_id=? WHERE id=?", \
                                        (name, address, plugin_id, location_id, id)).addCallback(self.cb_device_crud, "update", id)

    def del_device(self, id):
        
        def delete(result, id):
            self.dbpool.runQuery("DELETE FROM devices WHERE id=?", [id]).addCallback(self.cb_device_crud, "delete", id, result[0][0], result[0][1], result[0][2], result[0][3])
        
        return self.dbpool.runQuery("SELECT plugins.authcode, devices.address, devices.name, locations.name FROM plugins, devices, locations " +
                                    "WHERE devices.plugin_id = plugins.id AND devices.location_id = locations.id AND devices.id=?", [id]).addCallback(delete, id)

    def del_location(self, id):
        return self.dbpool.runQuery("DELETE FROM locations WHERE id=?", [id])

    @inlineCallbacks
    def del_event(self, id):
        # Delete all parameters for this event id
        yield self.dbpool.runQuery("DELETE FROM trigger_parameters where triggers_id=" +
                                   " (select id from triggers where events_id=?)", [id])
        
        yield self.dbpool.runQuery("DELETE FROM condition_parameters where conditions_id=" +
                                   " (select id from conditions where events_id=?)" , [id])
    
        yield self.dbpool.runQuery("DELETE FROM action_parameters where actions_id=" +
                                   " (select id from actions where events_id=?)", [id])
        
        yield self.dbpool.runQuery("DELETE FROM triggers where events_id=?", [id])
        yield self.dbpool.runQuery("DELETE FROM actions where events_id=?", [id])
        yield self.dbpool.runQuery("DELETE FROM conditions where events_id=?", [id])
        
        yield self.dbpool.runQuery("DELETE FROM events where id=?", [id])

    def del_plugin(self, id):
        return self.dbpool.runQuery("DELETE FROM plugins WHERE id=?", [id])

    def query_locations(self):
        return self.dbpool.runQuery("select locations.id, locations.name, l2.name from locations " +  
                                    "left join locations as l2 on locations.parent=l2.id")

    def query_values(self):
        return self.dbpool.runQuery("SELECT current_values.name, current_values.value, devices.name, " + 
                               "current_values.lastupdate, plugins.name, devices.address, locations.name, current_values.id" + 
                               ", control_types.name, control_types.id, history_types.name, history_periods.name, plugins.id FROM current_values INNER " +
                               "JOIN devices ON (current_values.device_id = devices.id) INNER JOIN plugins ON (devices.plugin_id = plugins.id) " + 
                               "LEFT OUTER JOIN locations ON (devices.location_id = locations.id) " + 
                               "LEFT OUTER JOIN control_types ON (current_values.control_type_id = control_types.id) " +
                               "LEFT OUTER JOIN history_types ON (current_values.history_type_id = history_types.id) " +
                               "LEFT OUTER JOIN history_periods ON (current_values.history_period_id = history_periods.id)")

    def query_values_light(self):
        return self.dbpool.runQuery("SELECT id, name, history_period_id, history_type_id FROM current_values;")

    def query_devices(self):      
        return self.dbpool.runQuery("SELECT devices.id, devices.name, devices.address, plugins.name, locations.name from devices " +
                                    "INNER JOIN plugins ON (devices.plugin_id = plugins.id) " +
                                    "LEFT OUTER JOIN locations ON (devices.location_id = locations.id)")

    def query_location(self, id):
        return self.dbpool.runQuery("SELECT id, name, parent FROM locations WHERE id=?", [id])
    
    def query_plugin(self, id):
        return self.dbpool.runQuery("SELECT id, name, location_id FROM plugins WHERE id=?", [id])
    
    def query_device(self, id):
        return self.dbpool.runQuery("SELECT id, name, address, plugin_id, location_id FROM devices WHERE id=?", [id])

    def query_triggertypes(self):
        return self.dbpool.runQuery("SELECT id, name from trigger_types")

    def query_actiontypes(self):
        return self.dbpool.runQuery("SELECT id, name from action_types")
    
    def query_conditiontypes(self):
        return self.dbpool.runQuery("SELECT id, name from condition_types")
    
    def query_controltypes(self):
        return self.dbpool.runQuery("SELECT id, name from control_types")
    
    def query_controltypename(self, current_value_id):
        return self.dbpool.runQuery("select control_types.name from current_values " +
                                    "INNER JOIN controL_types ON (control_types.id = current_values.control_type_id) " +
                                    "where current_values.id=?", [current_value_id])
    
    def query_devices_simple(self):
        return self.dbpool.runQuery("SELECT id, name from devices")
    
    def query_plugintypes(self):
        return self.dbpool.runQuery("SELECT id, name from plugin_types")

    # history collector stuff
    def query_history_types(self):
        return self.dbpool.runQuery("SELECT id, name FROM history_types;")

    def query_history_schedules(self):
        return self.dbpool.runQuery("SELECT id, name, history_period_id, history_type_id FROM current_values;")

    def query_history_periods(self):
        return self.dbpool.runQuery("SELECT id, name, secs, sysflag FROM history_periods;")

    def query_history_values(self, date_from, date_to):
        return self.dbpool.runQuery("SELECT value, created_at FROM history_values WHERE created_at >= '%s' AND created_at < '%s';" % (date_from, date_to))

    def cleanup_history_values(self):
        """keep 7 days history of history_values table"""
        return self.dbpool.runQuery("DELETE FROM history_values WHERE created_at < DATETIME(DATETIME(), 'localtime', '-7 day');")

    def collect_history_values(self, value_id):
        return self.dbpool.runQuery("INSERT INTO history_values SELECT id, value, DATETIME(DATETIME(), 'localtime') FROM current_values WHERE id=?;", [value_id])

    # /history collector stuff

    def query_controllable_devices(self):
        return self.dbpool.runQuery("SELECT devices.name, devices.address, plugins.name, plugins.authcode, current_values.value, devices.id, control_types.name, current_values.id FROM current_values " +
                                    "INNER JOIN devices ON (current_values.device_id = devices.id) " +
                                    "INNER JOIN plugins ON (devices.plugin_id = plugins.id) " +
                                    "INNER JOIN control_types ON (current_values.control_type_id = control_types.id) " +
                                    "WHERE current_values.control_type_id != 0")
    
    def query_action_types_by_device_id(self, device_id):
        return self.dbpool.runQuery("SELECT current_values.id, current_values.name, control_types.name FROM current_values " +
                                    "INNER JOIN control_types ON (current_values.control_type_id = control_types.id) " +
                                    "WHERE current_values.device_id = ?", [device_id])

    def query_action_type_by_value_id(self, value_id):
        return self.dbpool.runQuery("SELECT control_types.name FROM current_values " +
                                    "INNER JOIN control_types ON (current_values.control_type_id = control_types.id) " +
                                    "WHERE current_values.id = ? LIMIT 1", [value_id])
        
    def query_values_by_device_id(self, device_id):
        return self.dbpool.runQuery("SELECT id, name from current_values WHERE device_id = '%s'" % device_id)

    def query_device_type_by_device_id(self, device_id):
        return self.dbpool.runQuery("SELECT device_types.name FROM devices " +  
                                    "INNER JOIN device_types ON (device_types.id = devices.device_type_id) " + 
                                    "WHERE devices.id = ? LIMIT 1", [device_id])

    def query_value_by_valueid(self, value_id):
        return self.dbpool.runQuery("SELECT value,name from current_values WHERE id = ? LIMIT 1", [value_id])
    
    def query_extra_valueinfo(self, value_id):
        return self.dbpool.runQuery("select devices.name, current_values.name from current_values " +
                                    "inner join devices on (current_values.device_id = devices.id) " + 
                                    "where current_values.id = ?", [value_id])

    def set_history(self, id, history_period, history_type):
        # histcollector needs a fresh data -> defer the UPDATE
        d = self.dbpool.runQuery("UPDATE current_values SET history_period_id=?, history_type_id=? WHERE id=?", [history_period, history_type, id])

        # helper fn
        def histcollector_refresh(result, id, history_period):
            self.histcollector.cb_unregister_schedule(int(id))
            self.histcollector.cb_register_schedule(int(id), history_period)

        d.addCallback(histcollector_refresh, id, history_period)
        return d
    
    def set_controltype(self, id, control_type):
        return self.dbpool.runQuery("UPDATE current_values SET control_type_id=? WHERE id=?", [control_type, id])

    def update_location(self, id, name, parent):
        return self.dbpool.runQuery("UPDATE locations SET name=?, parent=? WHERE id=?", [name, parent, id])
    
    def update_plugin(self, id, name, location):
        return self.dbpool.runQuery("UPDATE plugins SET name=?, location_id=? WHERE id=?", [name, location, id])
    
    def query_events(self):
        return self.dbpool.runQuery("SELECT id, name, enabled from events")
コード例 #41
0
ファイル: ircbot.py プロジェクト: koikonom/ircbot
class IrcBot(irc.IRCClient):


    def __init__(self, *args, **kwargs):
        self.plugins = {'privmsg': [self.url_plugin, self.cmd_plugin],
                   'signedOn': [self.signon_plugin],
                   'joined': [self.joined_plugin]}
        self.init_db()



    @defer.inlineCallbacks
    def init_db(self):
        self.db = ConnectionPool('sqlite3', db_file)
        yield self.db.runQuery('''CREATE TABLE IF NOT EXISTS QUOTES (ID INTEGER PRIMARY KEY,
                                                    NICK TEXT,
                                                    QUOTE TEXT collate nocase,
                                                    QUOTE_DT NUMERIC,
                                                    ADDED_BY TEXT,
                                                    CHANNEL TEXT
                                                    )''')
        yield self.db.runQuery('''CREATE TABLE IF NOT EXISTS URLS (ID INTEGER PRIMARY KEY,
                                                    URL TEXT collate nocase,
                                                    URL_DT NUMERIC,
                                                    ADDED_BY TEXT,
                                                    CHANNEL TEXT
                                                    )''')

    ###############################################################################
    # Misc methods
    #############################################################################

    # Return the name of the method that calls funcname() as a string
    def funcname(self):
        return inspect.stack()[1][3]

    # Helper method that converts seconds to a string
    # in the format of "X days, X hours, X minutes, X seconds
    def get_time(self, sec):
        sec = timedelta(seconds=int(time.time()) - sec)
        dt = datetime(1, 1, 1) + sec
        msg = "%s seconds" % dt.second
        if dt.minute:
            msg = "%s minute(s)," % dt.minute + msg
        if dt.hour:
            msg = "%s hour(s)," % dt.hour + msg
        if dt.day - 1:
            msg = "%s day(s)," % (dt.day - 1) + msg
        return msg

    #############################################################################
    # Plugin code, to be moved to a separate file
    #############################################################################

    def signon_plugin(self):
        self.setNick(self.factory.nickname)
        self.join(self.factory.channel)
        print "Signed on as %s." % (self.nickname,)

    def joined_plugin(self, channel):
        print "Joined %s." % (channel,)
        self.say(channel, 'hi')

    def cmd_plugin(self, user, channel, msg):
        cmd_dict = {'quote':self.quote,
                    'add':self.add_quote}

        if msg[0] == '!':
            cmd = msg.split()[0][1:]
            cmd_dict[cmd](user, channel, msg)

    def url_plugin(self, user, channel, msg):
        #Do not process commands
        if msg[0] == '!':
            return
        for tok in msg.split(' '):
            if urlparse.urlparse(tok).scheme[:4] == 'http':
                user = user.split('!')[0]
                self.store(tok, channel, user)

    @defer.inlineCallbacks
    def quote(self, user, channel, msg):
        toks = msg.split(' ')
        if len(toks) < 2:
            query = '''SELECT id, quote FROM quotes ORDER BY RANDOM() LIMIT 1'''
            quotes = yield self.db.runQuery(query,)
        else:
            query = '''SELECT id, quote from quotes where quote like ?'''
            pattern = '%%%s%%' % ' '.join(toks[1:])
            quotes = yield self.db.runQuery(query, (pattern,))

        if len(quotes):
            if len(quotes) > 1:
                quotes = [random.choice(quotes)]
            msg = '[%s] %s' % (str(quotes[0][0]), str(quotes[0][1]))
            self.say(channel, msg)

    @defer.inlineCallbacks
    def add_quote(self, user, channel, msg):
        print user,channel,msg
        quote = ' '.join(msg.split(' ')[1:])
        nick_match = re.match(r'\S*\s*<(\S*)>.*', quote)
        if nick_match:
            nick = nick_match.groups()[0]
        else:
            nick = ''
        user = user.split('!')[0]
        query = '''INSERT INTO quotes (NICK, QUOTE, QUOTE_DT, ADDED_BY, CHANNEL) VALUES (?, ?, ?, ?, ?)'''
        quotes = yield self.db.runQuery(query, (nick, quote, int(time.time()), user, channel))
        self.say(channel, 'Quote added!')

    @defer.inlineCallbacks
    def store(self, url, channel, nick):
        # find existing url
        query = '''SELECT * FROM urls WHERE url = ?'''
        result = yield self.db.runQuery(query, (url,))
        if not result:
            dt = int(time.time())
            query = '''INSERT INTO urls (URL, URL_DT, ADDED_BY, CHANNEL) VALUES (?, ?, ?, ?)'''
            quotes = yield self.db.runQuery(query, (url, dt, nick, channel))
            self.process_url(url, channel, dt)
        else:
            res = result[0]
            src_nick = res[3]
            src_channel = res[4]
            ts = self.get_time(int(res[2]))
            msg = "OLD! %s mentioned it %s ago" % (src_nick, ts)
            if src_channel != channel:
                msg += ", in %s" % src_channel
            self.say(channel, str(msg))

    # Check if the url is an html page or a image.
    # If it's HTML extract the title, otherwise store the image
    # in the DB.
    @defer.inlineCallbacks
    def process_url(self, url, channel, dt):
        agent = Agent(reactor)
        resp = yield agent.request('HEAD', url)
        type = resp.headers.getRawHeaders('Content-Type', default='')
        if type[0][:9] == 'text/html':
            html = yield getPage(url)
            title = fromstring(html).xpath('/html/head/title')[0].text
            msg = "[ %s ]" % title
            self.say(channel, str(msg))
        elif type[0].split('/')[0] == 'image':
            self.store_image(url, dt)
        else:
            self.say(channel, 'unknown mime type: %s' % str(type))

    @defer.inlineCallbacks
    def store_image(self, url, dt):
        #save the file.
        img_data = yield getPage(url)
        filename = urlparse.urlparse(url).path.split('/')[-1]
        new_path = os.path.join(store_dir, filename[0], ''.join([str(y) for y in filename[0:2]]))
        if not os.path.exists(new_path):
            yield os.makedirs(new_path)
        tgt_filename = os.path.join(new_path, '%s_%s' %(dt, filename))
        f = yield open(tgt_filename, 'wb')
        try:
            yield f.write(img_data)
            self.say(channel, 'image stored')
        except Exception, e:
            self.day(channel, 'failed to store image: %s' % e.message)
        finally:
コード例 #42
0
ファイル: database.py プロジェクト: rrada/HouseAgent
class Database():
    """
    HouseAgent database interaction.
    """
    def __init__(self, log, db_location):
        self.log = log

        type = "sqlite"

        self.coordinator = None

        # Note: cp_max=1 is required otherwise undefined behaviour could occur when using yield icw subsequent
        # runQuery or runOperation statements
        if type == "sqlite":
            self.dbpool = ConnectionPool("sqlite3", db_location, check_same_thread=False, cp_max=1)
       
        # Check database schema version and upgrade when required
        self.updatedb('0.1')
             
    def updatedb(self, dbversion):
        '''
        Perform a database schema update when required. 
        '''
        # Note: runInteraction runs all queries defined within the specified function as part of a transaction.
        return self.dbpool.runInteraction(self._updatedb, dbversion)

    def _updatedb(self, txn, dbversion):
        '''
        Check whether a database schema update is required and act accordingly.
        '''
        # Note: Although all queries are run as part of a transaction, a create or drop table statement result in an implicit commit

        # Query the version of the current schema
        try:
            result = txn.execute("SELECT parm_value FROM common WHERE parm = 'schema_version'").fetchall()
        except:
            result = None
            
        if result:
            version = result[0][0]
        else:
            version = '0.0'

        if float(version) > float(dbversion):
            self.log.error("ERROR: The current database schema (%s) is not supported by this version of HouseAgent" % version)
            # Exit HouseAgent
            sys.exit(1)
        
        elif float(version) == float(dbversion):
            self.log.debug("Database schema is up to date")
            return
        
        else:
            self.log.info("Database schema will be updated from %s to %s:" % (version, dbversion))

            # Before we start manipulating the database schema, first make a backup copy of the database
            try:
                shutil.copy(db_location, db_location + datetime.datetime.strftime(datetime.datetime.now(), ".%y%m%d-%H%M%S"))
            except:
                self.log.error("Cannot make a backup copy of the database (%s)", sys.exc_info()[1])
                return

            if version == '0.0':
                try:
                    # Create common table
                    txn.execute("CREATE TABLE IF NOT EXISTS common (parm VARCHAR(16) PRIMARY KEY, parm_value VARCHAR(24) NOT NULL)")
            
                    # Add schema version to database
                    txn.execute("INSERT INTO common (parm, parm_value) VALUES ('schema_version', ?)", [dbversion])

                    # Set primary key of the devices table on address + plugin_id to prevent adding duplicate devices
                    txn.execute("CREATE TEMPORARY TABLE devices_backup(id INTEGER PRIMARY KEY, name VARCHAR(45), address VARCHAR(45) NOT NULL, plugin_id INTEGER NOT NULL, location_id INTEGER)")
                    txn.execute("INSERT INTO devices_backup SELECT id, name, address, plugin_id, location_id FROM devices")
                    txn.execute("DROP TABLE devices")
                    txn.execute("CREATE TABLE devices(id INTEGER PRIMARY KEY, name VARCHAR(45), address VARCHAR(45) NOT NULL, plugin_id INTEGER, location_id INTEGER)")
                    txn.execute("CREATE UNIQUE INDEX device_address ON devices (address, plugin_id)")
                    txn.execute("INSERT INTO devices SELECT id, name, address, plugin_id, location_id FROM devices_backup")
                    txn.execute("DROP TABLE devices_backup")

                    self.log.info("Successfully upgraded database schema")
                except:
                    self.log.error("Database schema upgrade failed (%s)" % sys.exc_info()[1])

    def query_plugin_auth(self, authcode):
        return self.dbpool.runQuery("SELECT authcode, id from plugins WHERE authcode = '%s'" % authcode)

    def check_plugin_auth(self, result):
        if len(result) >= 1:
            return {'registered': True}
        else:
            return {'registered': False}

    def insert_result(self, result):
        return {'received': True}

    def add_event(self, name, enabled, triggers):
        """
        This function adds an event to the database.
        """
        d = self.dbpool.runQuery("INSERT INTO events (name, enabled) VALUES (?, ?)", (name, enabled) )
        def event_added(result):
            print "added event"
            return self.dbpool.runQuery("select id from events order by id desc limit 1")      
        
        d.addCallback(event_added)
        def got_id(result):
            event_id = result[0][0]
            
            print "got event_id", result[0][0]
            print "triggers=",triggers
            
            # Add triggers
            deferredlist = []
            
            for trigger in triggers:
                trigger_type_id = trigger["trigger_type"]
                print "trigger", trigger
                
                def got_triggerid(result):
                    trigger_id = result[0][0]
                    
                    print "parameters", trigger["parameters"]
                    for name, value in trigger["parameters"].iteritems():
                        print name, value
                        deferredlist.append(self.dbpool.runQuery("INSERT INTO trigger_parameters (name, value, " +
                                                                 "triggers_id) VALUES (?, ?, ?)", (name, value, trigger_id)))
                
                def trigger_added(result):
                    self.dbpool.runQuery("select id from triggers order by id desc limit 1").addCallback(got_triggerid)     
                
                # Triggers
                deferredlist.append(self.dbpool.runQuery("INSERT INTO triggers (trigger_types_id, events_id)" +
                                                         " VALUES (?, ?)", (trigger_type_id, event_id)).addCallback(trigger_added) )        
                    
            d = defer.gatherResults(deferredlist)
            return d
            
        d.addCallback(got_id)
        
        def added_triggers(result):
            print "triggers added"
            
        d.addCallback(added_triggers)
        return d
    
    def add_location(self, name, parent):
        if parent:
            return self.dbpool.runQuery("INSERT INTO locations (name, parent) VALUES (?, ?)", [name, parent])
        else:
            return self.dbpool.runQuery("INSERT INTO locations (name) VALUES (?)", [name])
    
    @inlineCallbacks
    def add_event2(self, name, enabled, conditions, actions, trigger):
        '''
        This adds an event to the database.
        '''
        # Add event, and get event id
        yield self.dbpool.runQuery("INSERT INTO events (name, enabled) VALUES (?, ?)", [name, enabled])
        eventid = yield self.dbpool.runQuery("select id from events order by id desc limit 1")
        eventid = eventid[0][0]
        
        # Add conditions
        for condition in conditions:
            condition_type_id = condition["condition_type"]
            
            yield self.dbpool.runQuery("INSERT INTO conditions (condition_types_id, events_id)" +
                                       " VALUES (?, ?)", [condition_type_id, eventid])
            
            condition_id = yield self.dbpool.runQuery("select id from conditions order by id desc limit 1")
            condition_id = condition_id[0][0]
            
            for name, value in condition["parameters"].iteritems():
                yield self.dbpool.runQuery("INSERT INTO condition_parameters (name, value, " +
                                           "conditions_id) VALUES (?, ?, ?)", [name, value, condition_id])
        
        # Add actions
        for action in actions:
            action_type_id = action["action_type"]
            
            yield self.dbpool.runQuery("INSERT INTO actions (action_types_id, events_id)" +
                                       " VALUES (?, ?)", [action_type_id, eventid])
            
            action_id = yield self.dbpool.runQuery("select id from actions order by id desc limit 1")
            action_id = action_id[0][0]
            
            for name, value in action["parameters"].iteritems():
                yield self.dbpool.runQuery("INSERT INTO action_parameters (name, value, " +
                                           "actions_id) VALUES (?, ?, ?)", [name, value, action_id])
                
            
        # Insert trigger
        yield self.dbpool.runQuery("INSERT INTO triggers (trigger_types_id, events_id, conditions)" +
                                   " VALUES (?,?,?)", [trigger["trigger_type"], eventid, trigger["conditions"]])
 
        trigger_id = yield self.dbpool.runQuery("select id from triggers order by id desc limit 1")
        trigger_id = trigger_id[0][0]
       
        for name, value in trigger["parameters"].iteritems():
            yield self.dbpool.runQuery("INSERT INTO trigger_parameters (name, value, " +
                                       "triggers_id) VALUES (?, ?, ?)", [name, value, trigger_id])
               
    
    def add_trigger(self, trigger_type_id, event_id, value_id, parameters):
        print "INSERT INTO triggers (trigger_types_id, events_id, current_values_id) VALUES (%d, %d, %d)" % (int(trigger_type_id),
                                                                                                                                  int(event_id),
                                                                                                                                  int(value_id))
        d = self.dbpool.runQuery("INSERT INTO triggers (trigger_types_id, events_id" + 
                                 ", current_values_id) VALUES (%s, %s, %s)", (int(trigger_type_id),
                                                                              int(event_id),
                                                                              int(value_id)) ) 
        for name, value in parameters.iteritems():
            self.dbpool.runQuery("INSERT INTO trigger_parameters (name, value, triggers_id) VALUES (%s, %s, last_insert_id())", (name, value) )
    
        return d
    
    #def add_action(self, action_type_id, event_id):
    
    def query_latest_device_id(self):
        '''
        This function queries the latest device id.
        '''
        return self.dbpool.runQuery('select id from devices LIMIT 1')
         
    def query_triggers(self):
        return self.dbpool.runQuery("SELECT triggers.id, trigger_types.name, triggers.events_id, triggers.conditions " + 
                                    "FROM triggers INNER JOIN trigger_types ON (triggers.trigger_types_id = trigger_types.id)")

    def query_trigger(self, event_id):
        return self.dbpool.runQuery("SELECT triggers.id, trigger_types.name, triggers.events_id, triggers.conditions " + 
                                    "FROM triggers INNER JOIN trigger_types ON (triggers.trigger_types_id = trigger_types.id) " +
                                    "WHERE triggers.events_id = ? LIMIT 1", [event_id])
        
    def query_conditions(self):
        return self.dbpool.runQuery("SELECT conditions.id, condition_types.name, conditions.events_id " + 
                                    "FROM conditions INNER JOIN condition_types ON (conditions.condition_types_id = condition_types.id)")

    def query_actions(self):
        return self.dbpool.runQuery("SELECT actions.id, action_types.name, actions.events_id " + 
                                    "FROM actions INNER JOIN action_types ON (actions.action_types_id = action_types.id)")

    def query_trigger_parameters(self, trigger_id):
        return self.dbpool.runQuery("SELECT name, value from trigger_parameters WHERE triggers_id = ?", [trigger_id])
    
    def query_condition_parameters(self, condition_id):
        return self.dbpool.runQuery("SELECT name, value from condition_parameters WHERE conditions_id = ?", [condition_id])        

    def query_action_parameters(self, action_id):
        return self.dbpool.runQuery("SELECT name, value from action_parameters WHERE actions_id = ?", [action_id])
    
    def query_device_routing_by_id(self, device_id):
        return self.dbpool.runQuery("SELECT devices.address, plugins.authcode FROM devices " +  
                                    "INNER JOIN plugins ON (devices.plugin_id = plugins.id) "
                                    "WHERE devices.id = ?", [device_id])

    def query_value_properties(self, value_id):
        return self.dbpool.runQuery("SELECT current_values.name, devices.address, devices.plugin_id from current_values " + 
                                    "INNER JOIN devices ON (current_values.device_id = devices.id) " + 
                                    "WHERE current_values.id = ?", [value_id])

    def query_plugin_devices(self, plugin_id):
        return self.dbpool.runQuery("SELECT devices.id, devices.name, devices.address, locations.name from devices " +
                                    "LEFT OUTER JOIN locations ON (devices.location_id = locations.id) " +
                                    "WHERE plugin_id=? ", [plugin_id])

    @inlineCallbacks
    def update_or_add_value(self, name, value, pluginid, address, time=None):
        '''
        This function updates or adds values to the HouseAgent database.
        @param name: the name of the value
        @param value: the actual value of the value
        @param pluginid: the plugin which holds the device information
        @param address: the address of the device being handled
        @param time: the time at which the update has been received, this defaults to now()
        '''
        if not time:
            updatetime = datetime.datetime.now().isoformat(' ').split('.')[0]
        else:
            updatetime = datetime.datetime.fromtimestamp(time).isoformat(' ').split('.')[0]
        
        # Query device first
        device_id = yield self.dbpool.runQuery('select id from devices WHERE plugin_id = ? and address = ? LIMIT 1', (pluginid, address) )

        try:
            device_id = device_id[0][0]
        except:
            returnValue('') # device does not exist
        
        current_value = yield self.dbpool.runQuery("select id, name, history from current_values where name=? AND device_id=? LIMIT 1", (name, device_id))
    
        try:
            value_id = current_value[0][0]
        except:
            value_id = None
    
        if value_id:
            value_id = current_value[0][0]
            
            if current_value[0][2] not in (0, None):
                DataHistory("data", current_value[0][0], value, "GAUGE", 60, int(time))
                
            yield self.dbpool.runQuery("UPDATE current_values SET value=?, lastupdate=? WHERE id=?", (value, updatetime, value_id))
        else:
            yield self.dbpool.runQuery("INSERT INTO current_values (name, value, device_id, lastupdate) VALUES (?, ?, (select id from devices where address=? AND plugin_id=?),  ?)", (name, value, address, pluginid, updatetime))
            current_value = yield self.dbpool.runQuery("select id from current_values where name=? AND device_id=?", (name, device_id))
            value_id = current_value[0][0]
                        
        returnValue(value_id)

    def register_plugin(self, name, uuid, location):
        return self.dbpool.runQuery("INSERT INTO plugins (name, authcode, location_id) VALUES (?, ?, ?)", [str(name), str(uuid), location])

    def query_plugins(self):
        return self.dbpool.runQuery("SELECT plugins.name, plugins.authcode, plugins.id, locations.name, plugins.location_id from plugins " +
                                    "LEFT OUTER JOIN locations ON (plugins.location_id = locations.id)")
    
    def query_plugin_by_type_name(self, type_name):
        return self.dbpool.runQuery("SELECT plugins.id, plugins.authcode from plugins " +
                                    "INNER JOIN plugin_types ON (plugins.plugin_type_id = plugin_types.id)" +
                                    "WHERE plugin_types.name = ? LIMIT 1", [type_name])

    def query_device_classes(self):
        return self.dbpool.runQuery("SELECT * from device_class order by name ASC")
    
    def query_device_types(self):
        return self.dbpool.runQuery("SELECT * from device_types order by name ASC")
       
    @inlineCallbacks
    def cb_device_crud(self, result, action, id=None, plugin=None, address=None, name=None, location=None):
        '''
        Callback function that get's called when a device has been created, updated or deleted in, to or from the database.
        @param result: the result of the action
        @param action: the action initiating the callback being create, update or delete
        @param plugin: the uuid of the plugin owning the device
        @param address: the address of the device
        @param name: the name of the device
        @param location: the name of the location associated with the device
        '''
        if action == "create":
            parms = yield self.dbpool.runQuery("SELECT plugins.authcode, devices.address, devices.name, locations.name FROM devices, plugins, locations WHERE devices.plugin_id = plugins.id AND devices.location_id = locations.id ORDER BY devices.id DESC LIMIT 1")
            
        if action == "update":
            parms = yield self.dbpool.runQuery("SELECT plugins.authcode, devices.address, devices.name, locations.name FROM devices, plugins, locations WHERE devices.plugin_id = plugins.id AND devices.location_id = locations.id AND devices.id=?", [id])

        if action != "delete":
            plugin = parms[0][0]
            address = parms[0][1]
            name = parms[0][2]
            location = parms[0][3]
            
        parameters = {"plugin": plugin, 
                      "address": address,
                      "name": name,
                      "location": location}

        if self.coordinator:
            self.coordinator.send_crud_update("device", action, parameters)    

    def save_device(self, name, address, plugin_id, location_id, id=None):
        '''
        This functions saves a device in the HouseAgent database.
        @param name: the name of the device
        @param address: the address of the device
        @param plugin_id: the plugin_id of the associated plugin
        @param location_id: the location_id of the associated location
        @param id: the id of the device (in case this is an update)
        '''
        
        if not id:
            return self.dbpool.runQuery("INSERT INTO devices (name, address, plugin_id, location_id) VALUES (?, ?, ?, ?)", \
                                        (name, address, plugin_id, location_id)).addCallback(self.cb_device_crud, "create")
        else:
            return self.dbpool.runQuery("UPDATE devices SET name=?, address=?, plugin_id=?, location_id=? WHERE id=?", \
                                        (name, address, plugin_id, location_id, id)).addCallback(self.cb_device_crud, "update", id)

    def del_device(self, id):
        
        def delete(result, id):
            self.dbpool.runQuery("DELETE FROM devices WHERE id=?", [id]).addCallback(self.cb_device_crud, "delete", id, result[0][0], result[0][1], result[0][2], result[0][3])
        
        return self.dbpool.runQuery("SELECT plugins.authcode, devices.address, devices.name, locations.name FROM plugins, devices, locations " +
                                    "WHERE devices.plugin_id = plugins.id AND devices.location_id = locations.id AND devices.id=?", [id]).addCallback(delete, id)

    def del_location(self, id):
        return self.dbpool.runQuery("DELETE FROM locations WHERE id=?", [id])

    @inlineCallbacks
    def del_event(self, id):
        # Delete all parameters for this event id
        yield self.dbpool.runQuery("DELETE FROM trigger_parameters where triggers_id=" +
                                   " (select id from triggers where events_id=?)", [id])
        
        yield self.dbpool.runQuery("DELETE FROM condition_parameters where conditions_id=" +
                                   " (select id from conditions where events_id=?)" , [id])
    
        yield self.dbpool.runQuery("DELETE FROM action_parameters where actions_id=" +
                                   " (select id from actions where events_id=?)", [id])
        
        yield self.dbpool.runQuery("DELETE FROM triggers where events_id=?", [id])
        yield self.dbpool.runQuery("DELETE FROM actions where events_id=?", [id])
        yield self.dbpool.runQuery("DELETE FROM conditions where events_id=?", [id])
        
        yield self.dbpool.runQuery("DELETE FROM events where id=?", [id])

    def del_plugin(self, id):
        return self.dbpool.runQuery("DELETE FROM plugins WHERE id=?", [id])

    def query_locations(self):
        return self.dbpool.runQuery("select locations.id, locations.name, l2.name from locations " +  
                                    "left join locations as l2 on locations.parent=l2.id")

    def query_values(self):
        return self.dbpool.runQuery("SELECT current_values.name, current_values.value, devices.name, " + 
                               "current_values.lastupdate, plugins.name, devices.address, locations.name, current_values.id" + 
                               ", control_types.name, control_types.id, history FROM current_values INNER " +
                               "JOIN devices ON (current_values.device_id = devices.id) INNER JOIN plugins ON (devices.plugin_id = plugins.id) " + 
                               "LEFT OUTER JOIN locations ON (devices.location_id = locations.id) " + 
                               "LEFT OUTER JOIN control_types ON (current_values.control_type_id = control_types.id)")

    def query_devices(self):      
        return self.dbpool.runQuery("SELECT devices.id, devices.name, devices.address, plugins.name, locations.name from devices " +
                                    "INNER JOIN plugins ON (devices.plugin_id = plugins.id) " +
                                    "LEFT OUTER JOIN locations ON (devices.location_id = locations.id)")

    def query_location(self, id):
        return self.dbpool.runQuery("SELECT id, name, parent FROM locations WHERE id=?", [id])
    
    def query_plugin(self, id):
        return self.dbpool.runQuery("SELECT id, name, location_id FROM plugins WHERE id=?", [id])
    
    def query_device(self, id):
        return self.dbpool.runQuery("SELECT id, name, address, plugin_id, location_id FROM devices WHERE id=?", [id])

    def query_triggertypes(self):
        return self.dbpool.runQuery("SELECT id, name from trigger_types")

    def query_actiontypes(self):
        return self.dbpool.runQuery("SELECT id, name from action_types")
    
    def query_conditiontypes(self):
        return self.dbpool.runQuery("SELECT id, name from condition_types")
    
    def query_controltypes(self):
        return self.dbpool.runQuery("SELECT id, name from control_types")
    
    def query_controltypename(self, current_value_id):
        return self.dbpool.runQuery("select control_types.name from current_values " +
                                    "INNER JOIN controL_types ON (control_types.id = current_values.control_type_id) " +
                                    "where current_values.id=?", [current_value_id])
    
    def query_devices_simple(self):
        return self.dbpool.runQuery("SELECT id, name from devices")
    
    def query_plugintypes(self):
        return self.dbpool.runQuery("SELECT id, name from plugin_types")

    def query_historic_values(self):
        return self.dbpool.runQuery("select current_values.id, current_values.name, devices.name, current_values.history from current_values, devices where current_values.device_id = devices.id and history = 1;")

    def query_controllable_devices(self):
        return self.dbpool.runQuery("SELECT devices.name, devices.address, plugins.name, plugins.authcode, current_values.value, devices.id, control_types.name FROM current_values " +
                                    "INNER JOIN devices ON (current_values.device_id = devices.id) " +
                                    "INNER JOIN plugins ON (devices.plugin_id = plugins.id) " +
                                    "INNER JOIN control_types ON (current_values.control_type_id = control_types.id) " +
                                    "WHERE current_values.control_type_id != ''")
    
    def query_action_types_by_device_id(self, device_id):
        return self.dbpool.runQuery("SELECT current_values.id, current_values.name, control_types.name FROM current_values " +
                                    "INNER JOIN control_types ON (current_values.control_type_id = control_types.id) " +
                                    "WHERE current_values.device_id = ?", [device_id])

    def query_action_type_by_value_id(self, value_id):
        return self.dbpool.runQuery("SELECT control_types.name FROM current_values " +
                                    "INNER JOIN control_types ON (current_values.control_type_id = control_types.id) " +
                                    "WHERE current_values.id = ? LIMIT 1", [value_id])
        
    def query_values_by_device_id(self, device_id):
        return self.dbpool.runQuery("SELECT id, name from current_values WHERE device_id = '%s'" % device_id)

    def query_device_type_by_device_id(self, device_id):
        return self.dbpool.runQuery("SELECT device_types.name FROM devices " +  
                                    "INNER JOIN device_types ON (device_types.id = devices.device_type_id) " + 
                                    "WHERE devices.id = ? LIMIT 1", [device_id])

    def query_value_by_valueid(self, value_id):
        return self.dbpool.runQuery("SELECT value,name from current_values WHERE id = ? LIMIT 1", [value_id])
    
    def query_extra_valueinfo(self, value_id):
        return self.dbpool.runQuery("select devices.name, current_values.name from current_values " +
                                    "inner join devices on (current_values.device_id = devices.id) " + 
                                    "where current_values.id = ?", [value_id])

    def set_history(self, id, history):
        return self.dbpool.runQuery("UPDATE current_values SET history=? WHERE id=?", [history, id])
    
    def set_controltype(self, id, control_type):
        return self.dbpool.runQuery("UPDATE current_values SET control_type_id=? WHERE id=?", [control_type, id])

    def update_location(self, id, name, parent):
        return self.dbpool.runQuery("UPDATE locations SET name=?, parent=? WHERE id=?", [name, parent, id])
    
    def update_plugin(self, id, name, location):
        return self.dbpool.runQuery("UPDATE plugins SET name=?, location_id=? WHERE id=?", [name, location, id])
    
    def query_events(self):
        return self.dbpool.runQuery("SELECT id, name, enabled from events")
コード例 #43
0
ファイル: database.py プロジェクト: kronat/Marnatarlo
class MDatabase:
    """
    Sqlite database for Marnatarlo
    """
    def __init__(self, dbname):
        self.dbname = dbname
        try:
            fh = open(dbname)
        except IOError as e:
            conn = sqlite3.connect(dbname)
            curs = conn.cursor()
            curs.execute("Create table users (name text unique, password text)")
            curs.execute("Create table stats(name text, played INTEGER, won INTEGER, FOREIGN KEY(name) REFERENCES users(name))")
            conn.commit()
            curs.close()
        self.__dbpool = ConnectionPool('sqlite3', self.dbname)

    def shutdown(self):
        """
            Shutdown function
            It's a required task to shutdown the database connection pool:
                garbage collector doesn't shutdown associated thread
        """
        self.__dbpool.close()

    def returnOk(self, o):
        return True

    def returnFailure(self, o):
        return False

    def returnResult(self, result):
        return result

    def _returnResult(self, deferred, count=None):
        if count:
            return self.__dbpool.fetchmany(count)
        else:
            return self.__dbpool.fetchall()

    def execSql(self, sql, params={}):
        """
        Exec an SQL command, return True or False
      
        @type sql C{str}
        @param sql SQL command
        """
        def run(sql, params):
            return self.__dbpool.runQuery(sql, params)
        d = run(sql, params)
        d.addCallback(self._returnResult)
        d.addErrback(self.returnFailure)
        d.addCallback(self.returnResult)
        return d

    def fetch(self, sql, params={}):
        """
          Exec an SQL command, fetching the rows resulting
      
          @type sql C{str}
          @param sql SQL command
        """
        def run(sql, params):
            return self.__dbpool.runQuery(sql, params)
        d = run(sql, params)
        d.addCallback(self.returnResult)
        d.addErrback(self.returnFailure)
        return d

    def get_stats(self, user):
        query = "SELECT * FROM stats WHERE name=?"
        return self.fetch(query, (user,))

    def user_won(self, user):
        query = "UPDATE stats SET won=won+1 WHERE name=?"
        return self.execSql(query, (user,))

    def user_play(self, user):
        query = "UPDATE stats SET played=played+1 WHERE name=?"
        return self.execSql(query, (user,))

    def save_user(self, user, passwd):
        """
        Save user / password into db
      
        @type user C{str}
        @type password C{str}
        """
        def insert_user(users, user, passwd):
            if len(users) > 0:
                return self.returnFailure(users)
            query = "INSERT INTO users(name, password) VALUES (?, ?)"
            self.execSql(query, (user, passwd,))
            query = "INSERT INTO stats(name, played, won) VALUES (?, 0,0)"
            return self.execSql(query, (user,))

        return self.get_user_login_info(user).addCallback(insert_user, user, passwd)

    def get_user_login_info(self, user):
        """
        Get a tuple, user / password
        @type user C{str}
        """
        query = "SELECT * FROM users WHERE name=?";
        return self.fetch(query, (user,))

    def get_all_users(self):
        """
        Get all users from db
        """
        query = "SELECT u.name, s.played, s.won FROM users AS u, stats AS s WHERE u.name = s.name";
        return self.fetch(query)
コード例 #44
0
ファイル: sqlmagic.py プロジェクト: nyov/scrapyext
	def __init__(self, settings, **kwargs):
		"""Connect to database in the pool."""

		if not isinstance(settings, dict):
			raise NotConfigured('No database connection settings found.')

		self.settings = settings
		self.stats = kwargs.get('stats')
		self.debug = kwargs.get('debug', False)
		self.paramstyle = ':'
		self.identifier = '"' # default to ANSI quoting
		self.queries = {
			'select': "SELECT $fields FROM $table:esc WHERE $indices:and", # select on UniqueFields
			'selectall': "SELECT $fields FROM $table:esc",
			'selectone': "SELECT $fields FROM $table:esc WHERE $indices:and LIMIT 1", # if backend supports LIMIT
			#
			'delete'  : "DELETE FROM $table:esc WHERE $indices:and", # match on UniqueFields
			'deleteme': "DELETE FROM $table:esc WHERE $fields_values:and", # exact item match
		}
		self.dbapi = None

		if self.settings.get('drivername') == 'sqlite':
			self.dbapi = __import__('sqlite3', fromlist=[''])
			self.__dbpool = ConnectionPool('sqlite3', self.settings.get('database', ':memory:'),
				# apparently the connection pool / thread pool does not do the teardown in the same thread
				# https://twistedmatrix.com/trac/ticket/3629
				# therefore throwing errors on finalClose at reactor shutdown
				# TODO: should be able to work around that?
				check_same_thread=False, # SQLite must be compiled threadsafe to use this
				# limit connection pool to one thread to avoid "database is locked" errors
				#cp_max=1,
				# - or raise the database timeout sufficiently
				timeout=300,
			)
			# alternative escaping parameter
			#self.paramstyle = '?'
			#self.paramstyle = ':'
			#self.paramstyle = '$'
			# default statements for sqlite
			self.queries.update({
				'insert': "INSERT INTO $table:esc SET $fields_values",
				'upsert': "INSERT OR REPLACE INTO $table:esc ($fields) VALUES ($values)",
				'update': "UPDATE $table:esc SET $fields_values WHERE $indices:and",
			})
		elif self.settings.get('drivername') == 'pgsql':
			self.dbapi = __import__('psycopg2', fromlist=[''])
			#from psycopg2.extras import DictCursor
			self.__dbpool = ConnectionPool('psycopg2', database=self.settings.get('database'),
				user = self.settings.get('username'),
				password = self.settings.get('password', None),
				host = self.settings.get('host', None), # default to unix socket
				port = self.settings.get('port', '5432'),
			#	cursor_factory = DictCursor,
			)
			self.paramstyle = '%s'
			# default statements for postgres
			self.queries.update({
				'insert': "INSERT INTO $table:esc ($fields) VALUES ($values)",
				'update': "UPDATE $table:esc SET $fields_values WHERE $indices:and",
			})
		elif self.settings.get('drivername') == 'mysql':
			self.dbapi = __import__('MySQLdb', fromlist=[''])
			from MySQLdb import cursors
			self.__dbpool = ReconnectingConnectionPool('MySQLdb', db=self.settings.get('database'),
				user = self.settings.get('username'),
				passwd = self.settings.get('password', None),
				host = self.settings.get('host', 'localhost'), # should default to unix socket
				port = self.settings.get('port', 3306),
				cursorclass = cursors.DictCursor,
				charset = 'utf8',
				use_unicode = True,
				# connpool settings
				cp_reconnect = True,
				#cp_noisy = True,
				#cp_min = 1,
				#cp_max = 1,
			)
			self.paramstyle = '%s'
			self.identifier = '`' # MySQL quoting
			# default statements for mysql
			self.queries.update({
				'insert': "INSERT INTO $table:esc ($fields) VALUES ($values)",
			#	'upsert': "REPLACE INTO $table ($fields) VALUES ($values)",
				'upsert': "INSERT INTO $table:esc SET $fields_values ON DUPLICATE KEY UPDATE $fields_values",
				'update': "UPDATE $table:esc SET $fields_values WHERE $indices:and",
			})
		elif self.settings.get('drivername') == 'firebird':
			# untested
			self.dbapi = __import__('fdb', fromlist=[''])
			self.__dbpool = ConnectionPool('fdb', database=self.settings.get('database'),
				user = self.settings.get('username'),
				password = self.settings.get('password', None),
				host = self.settings.get('host', None), # default to unix socket
				port = self.settings.get('port', 3050),
				#dialect = 1, # necessary for all dialect 1 databases
				charset = 'UTF8',# specify a character set for the connection
			)
			self.paramstyle = '?'
			self.queries.update({
				'insert': "INSERT INTO $table:esc ($fields) VALUES ($values)",
				'update': "UPDATE $table:esc SET $fields_values WHERE $indices:and",
			})

		self.queries.update(kwargs.get('queries', {}))
コード例 #45
0
ファイル: database.py プロジェクト: tomsoir/harold
 def __init__(self, db_config):
     Plugin.__init__(self)
     self.module, kwargs = db_config.get_module_and_params()
     ConnectionPool.__init__(self, self.module.__name__, **kwargs)
コード例 #46
0
ファイル: test.py プロジェクト: HSATAN/my-app
# coding=utf8

from twisted.internet import reactor, defer
from twisted.enterprise.adbapi import ConnectionPool
import psycopg2
import psycopg2.extras
#from eventlet.twistedutil import block_on
import time

dbpool = ConnectionPool("psycopg2",
                        host="47.93.5.189",
                        user="******",
                        password="******",
                        database="mzhan",
                        cursor_factory=psycopg2.extras.DictCursor)

import pymysql
db = pymysql.connect(
                     host="47.93.5.189",
                      user="******",
                      password="******",
                        database="spark",
                        charset="utf8"
                      )
cursor = db.cursor()


def _getData(txn, user):
    txn.execute("select * from mzhan_user ")
    result = txn.fetchall()
    if result:
コード例 #47
0
ファイル: sqlmagic.py プロジェクト: nyov/scrapyext
class SQLMagicPipeline(object):

	def __init__(self, settings, **kwargs):
		"""Connect to database in the pool."""

		if not isinstance(settings, dict):
			raise NotConfigured('No database connection settings found.')

		self.settings = settings
		self.stats = kwargs.get('stats')
		self.debug = kwargs.get('debug', False)
		self.paramstyle = ':'
		self.identifier = '"' # default to ANSI quoting
		self.queries = {
			'select': "SELECT $fields FROM $table:esc WHERE $indices:and", # select on UniqueFields
			'selectall': "SELECT $fields FROM $table:esc",
			'selectone': "SELECT $fields FROM $table:esc WHERE $indices:and LIMIT 1", # if backend supports LIMIT
			#
			'delete'  : "DELETE FROM $table:esc WHERE $indices:and", # match on UniqueFields
			'deleteme': "DELETE FROM $table:esc WHERE $fields_values:and", # exact item match
		}
		self.dbapi = None

		if self.settings.get('drivername') == 'sqlite':
			self.dbapi = __import__('sqlite3', fromlist=[''])
			self.__dbpool = ConnectionPool('sqlite3', self.settings.get('database', ':memory:'),
				# apparently the connection pool / thread pool does not do the teardown in the same thread
				# https://twistedmatrix.com/trac/ticket/3629
				# therefore throwing errors on finalClose at reactor shutdown
				# TODO: should be able to work around that?
				check_same_thread=False, # SQLite must be compiled threadsafe to use this
				# limit connection pool to one thread to avoid "database is locked" errors
				#cp_max=1,
				# - or raise the database timeout sufficiently
				timeout=300,
			)
			# alternative escaping parameter
			#self.paramstyle = '?'
			#self.paramstyle = ':'
			#self.paramstyle = '$'
			# default statements for sqlite
			self.queries.update({
				'insert': "INSERT INTO $table:esc SET $fields_values",
				'upsert': "INSERT OR REPLACE INTO $table:esc ($fields) VALUES ($values)",
				'update': "UPDATE $table:esc SET $fields_values WHERE $indices:and",
			})
		elif self.settings.get('drivername') == 'pgsql':
			self.dbapi = __import__('psycopg2', fromlist=[''])
			#from psycopg2.extras import DictCursor
			self.__dbpool = ConnectionPool('psycopg2', database=self.settings.get('database'),
				user = self.settings.get('username'),
				password = self.settings.get('password', None),
				host = self.settings.get('host', None), # default to unix socket
				port = self.settings.get('port', '5432'),
			#	cursor_factory = DictCursor,
			)
			self.paramstyle = '%s'
			# default statements for postgres
			self.queries.update({
				'insert': "INSERT INTO $table:esc ($fields) VALUES ($values)",
				'update': "UPDATE $table:esc SET $fields_values WHERE $indices:and",
			})
		elif self.settings.get('drivername') == 'mysql':
			self.dbapi = __import__('MySQLdb', fromlist=[''])
			from MySQLdb import cursors
			self.__dbpool = ReconnectingConnectionPool('MySQLdb', db=self.settings.get('database'),
				user = self.settings.get('username'),
				passwd = self.settings.get('password', None),
				host = self.settings.get('host', 'localhost'), # should default to unix socket
				port = self.settings.get('port', 3306),
				cursorclass = cursors.DictCursor,
				charset = 'utf8',
				use_unicode = True,
				# connpool settings
				cp_reconnect = True,
				#cp_noisy = True,
				#cp_min = 1,
				#cp_max = 1,
			)
			self.paramstyle = '%s'
			self.identifier = '`' # MySQL quoting
			# default statements for mysql
			self.queries.update({
				'insert': "INSERT INTO $table:esc ($fields) VALUES ($values)",
			#	'upsert': "REPLACE INTO $table ($fields) VALUES ($values)",
				'upsert': "INSERT INTO $table:esc SET $fields_values ON DUPLICATE KEY UPDATE $fields_values",
				'update': "UPDATE $table:esc SET $fields_values WHERE $indices:and",
			})
		elif self.settings.get('drivername') == 'firebird':
			# untested
			self.dbapi = __import__('fdb', fromlist=[''])
			self.__dbpool = ConnectionPool('fdb', database=self.settings.get('database'),
				user = self.settings.get('username'),
				password = self.settings.get('password', None),
				host = self.settings.get('host', None), # default to unix socket
				port = self.settings.get('port', 3050),
				#dialect = 1, # necessary for all dialect 1 databases
				charset = 'UTF8',# specify a character set for the connection
			)
			self.paramstyle = '?'
			self.queries.update({
				'insert': "INSERT INTO $table:esc ($fields) VALUES ($values)",
				'update': "UPDATE $table:esc SET $fields_values WHERE $indices:and",
			})

		self.queries.update(kwargs.get('queries', {}))

	@classmethod
	def from_crawler(cls, crawler):
		if not crawler.settings.get('SQLMAGIC_DATABASE'):
			raise NotConfigured('No database connection settings found.')

		o = cls(
			settings=crawler.settings.get('SQLMAGIC_DATABASE'),
			stats=crawler.stats,
			queries=crawler.settings.get('SQLMAGIC_QUERIES', {}),
			debug=crawler.settings.getbool('SQLMAGIC_DEBUG')
		)
		return o

	def open_spider(self, spider):
		self.on_connect()

	def on_connect(self):
		## override this to run some queries after connecting
		# e.g. create tables for an in-memory SQLite database
		pass

	def close_spider(self, spider):
		self.shutdown()

	def shutdown(self):
		"""Shutdown connection pool, kill associated threads"""
		self.__dbpool.close()

	def process_item(self, item, spider):
		"""Process the item."""

		# Only handle items inheriting SQLItem
		if not isinstance(item, SQLItem):
			return item

		self.stats.inc_value('sqlmagic/total_items_caught')

		# always return original item
		deferred = self.operation(item, spider)
		deferred.addBoth(lambda _: item)
		return deferred

	def operation(self, item, spider):

		def on_insert(result, query, params):
			self.stats.inc_value('sqlmagic/sqlop_success_insert')
			if self.debug:
				qlog = self._log_preparedsql(query, params)
				log.msg('%s executed: %s' % (self.__class__.__name__, qlog), level=log.DEBUG, spider=spider)
			return result

		def on_update(result, query, params):
			self.stats.inc_value('sqlmagic/sqlop_success_update')
			if self.debug:
				qlog = self._log_preparedsql(query, params)
				log.msg('%s executed: %s' % (self.__class__.__name__, qlog), level=log.DEBUG, spider=spider)
			return result

		def on_integrityerror(error, query, params):
			error.trap(self.dbapi.IntegrityError)
			e = error.getErrorMessage()
			self.stats.inc_value('sqlmagic/error_integrity')
			if self.debug:
				qlog = self._log_preparedsql(query, params)
				log.msg('%s failed executing: %s\nError: %s' % (self.__class__.__name__, qlog, e), level=log.INFO, spider=spider)
		#	error.raiseException() # keep bubbling

		def on_operationalerror(error, query, params):
			error.trap(self.dbapi.OperationalError)
			e = error.getErrorMessage()
			self.stats.inc_value('sqlmagic/error_operational')
			if self.debug:
				qlog = self._log_preparedsql(query, params)
				log.msg('%s failed executing: %s\nError: %s' % (self.__class__.__name__, qlog, e), level=log.WARNING, spider=spider)
		#	error.raiseException() # keep bubbling

		def on_seriouserror(error, query, params):
			error.trap(self.dbapi.ProgrammingError, self.dbapi.InterfaceError)
			e = error.getErrorMessage()
			self.stats.inc_value('sqlmagic/error_connection')
			if self.debug:
				qlog = self._log_preparedsql(query, params)
				log.msg('%s FAILED executing: %s\nError: %s' % (self.__class__.__name__, qlog, e), level=log.WARNING, spider=spider)
			error.raiseException() # keep bubbling
			return error

		def update(error, query, params):
			error.trap(self.dbapi.IntegrityError)
			if error.value[0] != 1062: # Duplicate key
				error.raiseException() # keep bubbling
			#e = error.getErrorMessage()
			#if self.debug:
			#	qlog = self._log_preparedsql(query, params)
			#	log.msg('%s got error %s - trying update' % (self.__class__.__name__, e), level=log.DEBUG, spider=spider)
			self.stats.inc_value('sqlmagic/sqlop_update_after_insert_tries')
			d = self.__dbpool.runInteraction(self.transaction, query, params, item, spider)
			d.addCallback(on_update, query, params)
			return d

		# try insert
		query, params = _sql_format(self.queries['insert'], item, paramstyle=self.paramstyle, identifier=self.identifier)
		#query, params = _sql_format(self.queries['upsert'], item, paramstyle=self.paramstyle, identifier=self.identifier)
		deferred = self.__dbpool.runInteraction(self.transaction, query, params, item, spider)
		deferred.addCallback(on_insert, query, params)
		deferred.addErrback(on_seriouserror, query, params)
		deferred.addErrback(on_operationalerror, query, params)
		#deferred.addErrback(on_integrityerror, query, params) # ignore failing inserts before update
		# on failure, update
		query, params = _sql_format(self.queries['update'], item, paramstyle=self.paramstyle, identifier=self.identifier)
		deferred.addErrback(update, query, params)
		deferred.addErrback(on_seriouserror, query, params)
		deferred.addErrback(on_operationalerror, query, params)
		deferred.addErrback(on_integrityerror, query, params)
		deferred.addErrback(self._database_error, item, spider)

	#	deferred = self.insert_or_update((query,params), (update, uparams), item, spider)

		self.stats.inc_value('sqlmagic/total_items_returned')
		return deferred

	def transaction(self, txn, query, params, item, spider):
		self.stats.inc_value('sqlmagic/sqlop_transact_%s' % query[:6].lower())
		txn.execute(query, params)

	"""
	def xtransaction(self, txn, query, params, item, spider):
		# primary key check
		query, params = _sql_format(self.queries['select'], item, paramstyle=self.paramstyle, identifier=self.identifier)
		txn.execute(query, params)
		result = txn.fetchone()
		if result:
			log.msg("Item already in db: (id) %s item:\n%r" % (result['id'], item), level=log.WARNING)

		query, params = _sql_format(self.queries['insert'], item, paramstyle=self.paramstyle, identifier=self.identifier)
		# transaction in thread
		qlog = self._log_preparedsql(query, params)
		try:
			txn.execute(query, params)
		except self.dbapi.IntegrityError as e:
			#spider.log('%s FAILED executing: %s' % (self.__class__.__name__, qlog), level=log.DEBUG)
			query, params = _sql_format(self.queries['update'], item, paramstyle=self.paramstyle, identifier=self.identifier)
			qlog = self._log_preparedsql(query, params)
			try:
				#spider.log('%s executing: %s' % (self.__class__.__name__, qlog), level=log.DEBUG)
				txn.execute(query, params)
			except self.dbapi.OperationalError as e:
				# retrying in new transaction
			#	spider.log('%s errored. Retrying.\nError: %s\nQuery: %s' % (self.__class__.__name__, e, qlog), level=log.WARNING)
			#	self._spool.append((query, params, item))
			#except Exception as e:
				if self.debug:
					spider.log('%s FAILED executing: %s\nError: %s' % (self.__class__.__name__, qlog, e), level=log.WARNING)
				raise
			finally:
				if self.debug:
					spider.log('%s executed: %s' % (self.__class__.__name__, qlog), level=log.DEBUG)
		except self.dbapi.OperationalError as e:
			# also try again
			if self.debug:
				spider.log('%s failed: %s' % (self.__class__.__name__, qlog), level=log.DEBUG)
			raise
		finally:
			if self.debug:
				spider.log('%s executed: %s' % (self.__class__.__name__, qlog), level=log.DEBUG)
	"""

	def _log_preparedsql(self, query, params):
		"""Simulate escaped query for log"""
		for p in params:
			query = re.sub('(\\'+self.paramstyle+r'\d?)', '"%s"' % p, query, count=1)
		return query

	def _database_error(self, e, item, spider=None):
		"""Log exceptions."""
		if spider:
			log.err(e, spider=spider)
		else:
			log.err(e)

	def query(self, sql):
		# run a query in the connection pool
		# parameters for prepared statements must be passed as 'sql=(query, params)'
		# (possible use-case from inside spider code)
		'''Spider Example: build start requests from database results

		from scrapy.exceptions import CloseSpider, NotConfigured
		from ..pipelines.sqlmagic import SQLMagicPipeline

		class MySpider(Spider):
			def spider_opened(self, spider):
				try:
					self.db = SQLMagicPipeline(self.settings.get('SQLMAGIC_DATABASE'))
				except NotConfigured:
					raise CloseSpider('Could not get database settings.')

			@defer.inlineCallbacks
			def db_queries(self, response):
				query = """CALL procedure ()"""
				result = yield self.db.query(query)

				# build requests
				requests = []
				for value in result:
					r = yield self.build_request_fromdb(response, value)
					requests.append(r)

				# queue them
				defer.returnValue(requests)

			def start_requests(self):
				yield Request(self.start_urls[0], callback=self.database_queries)

			def build_request_fromdb(self, response, db):
				# custom logic to convert db result into a request
				r = Request(response.url)
				r.callback = self.parse
				return r
		'''
		if query[:6].lower() in ('select',):
			deferred = self.__dbpool.runQuery(sql)
		if query[:4].lower() in ('call',):
			# potential fail: procedure must run a SELECT for this,
			# otherwise it should do runOperation
			deferred = self.__dbpool.runQuery(sql)
		else:
			deferred = self.__dbpool.runOperation(sql)
		return deferred
コード例 #48
0
ファイル: dbpool.py プロジェクト: wgnet/twoost
 def connect(self):
     new_connection = self.threadID() not in self.connections
     conn = ConnectionPool.connect(self)
     if new_connection:
         self.prepare_connection(conn)
     return conn
コード例 #49
0
#!/usr/bin/env python3

import os
from fzone import Repo
ROOT = os.path.dirname(__file__)
repo = Repo(os.path.join(ROOT, 'client'))

from twisted.enterprise.adbapi import ConnectionPool
dbpool = ConnectionPool("sqlite3", repo.index(), check_same_thread=False)

from twisted.conch.ssh.transport import SSHClientTransport
from twisted.internet.defer import succeed
from fzone.ssh import FZoneConnection


class FZoneClientConnection(FZoneConnection):
    def serviceStarted(self):
        super().serviceStarted()
        self.pull()


class FZoneClientTransport(SSHClientTransport):
    def verifyHostKey(self, hostKey, fingerprint):
        return succeed(True)

    def connectionSecure(self):
        self.requestService(FZoneClientConnection())


from twisted.internet.protocol import ClientFactory
コード例 #50
0
ファイル: database.py プロジェクト: Strange-G/harold
 def __init__(self, db_config):
     Plugin.__init__(self)
     self.module, kwargs = db_config.get_module_and_params()
     ConnectionPool.__init__(self, self.module.__name__, **kwargs)
コード例 #51
0
ファイル: database.py プロジェクト: eventable/CalendarServer
class AbstractADBAPIDatabase(object):
    """
    A generic SQL database.
    """

    def __init__(self, dbID, dbapiName, dbapiArgs, persistent, **kwargs):
        """

        @param persistent: C{True} if the data in the DB must be perserved during upgrades,
            C{False} if the DB data can be re-created from an external source.
        @type persistent: bool
        """
        self.dbID = dbID
        self.dbapiName = dbapiName
        self.dbapiArgs = dbapiArgs
        self.dbapikwargs = kwargs

        self.persistent = persistent

        self.initialized = False


    def __repr__(self):
        return "<%s %r>" % (self.__class__.__name__, self.pool)


    @inlineCallbacks
    def open(self):
        """
        Access the underlying database.
        @return: a db2 connection object for this index's underlying data store.
        """
        if not self.initialized:

            self.pool = ConnectionPool(self.dbapiName, *self.dbapiArgs, **self.dbapikwargs)

            # sqlite3 is not thread safe which means we have to close the sqlite3 connections in the same thread that
            # opened them. We need a special thread pool class that has a thread worker function that does a close
            # when a thread is closed.
            if self.dbapiName == "sqlite3":
                self.pool.threadpool.stop()
                self.pool.threadpool = ConnectionClosingThreadPool(1, 1)
                self.pool.threadpool.start()
                self.pool.threadpool.pool = self.pool

            #
            # Set up the schema
            #
            # Create CALDAV table if needed

            try:
                test = (yield self._test_schema_table())
                if test:
                    version = (yield self._db_value_for_sql("select VALUE from CALDAV where KEY = 'SCHEMA_VERSION'"))
                    dbtype = (yield self._db_value_for_sql("select VALUE from CALDAV where KEY = 'TYPE'"))

                    if (version != self._db_version()) or (dbtype != self._db_type()):

                        if dbtype != self._db_type():
                            log.error("Database %s has different type (%s vs. %s)"
                                      % (self.dbID, dbtype, self._db_type()))

                            # Delete this index and start over
                            yield self._db_remove()
                            yield self._db_init()

                        elif version != self._db_version():
                            log.error("Database %s has different schema (v.%s vs. v.%s)"
                                      % (self.dbID, version, self._db_version()))

                            # Upgrade the DB
                            yield self._db_upgrade(version)

                else:
                    yield self._db_init()
                self.initialized = True
            except:
                # Clean up upon error so we don't end up leaking threads
                self.pool.close()
                self.pool = None
                raise


    def close(self):

        if self.initialized:
            try:
                self.pool.close()
            except Exception, e:
                log.error("Error whilst closing connection pool: %s" % (e,))
            self.pool = None
            self.initialized = False