Exemple #1
0
def getReadyPostgres(connstr):
    pool = ConnectionPool('psycopg2', connstr)
    def i1(c):
        try:
            c.execute('''create table sticky (
                    id serial primary key,
                    board_id text,
                    updated timestamp default current_timestamp,
                    note text,
                    x integer,
                    y integer)''')
        except Exception as e:
            log.err(e)
    def i2(c):
        try:
            c.execute('''create table image (
                id serial primary key,
                board_id text,
                updated timestamp default current_timestamp,
                data bytea
            )''')
            c.execute('''create unique index image_board_id on image(board_id)''')
        except Exception as e:
            log.err(e)
    d = pool.runInteraction(i1)
    d.addCallback(lambda x: pool.runInteraction(i2))
    return d.addCallbacks((lambda x:pool), log.err)
class ChannelLogger(object):
    implements(IDBLogger)

    def __init__(self, dbfile, **kw):
        # XXX Ignore thread warnings from sqlite3.  Should be OK.
        # http://twistedmatrix.com/trac/ticket/3629
        kw.setdefault("check_same_thread", False)

        from twisted.enterprise.adbapi import ConnectionPool
        type = 'sqlite3'
        self.dbfile = dbfile
        self.dbconn = ConnectionPool(type, dbfile, **kw)
        self.table = 'channels'
        self.initialize_db()

    def initialize_db(self):
        return self.dbconn.runInteraction(self._initialize_db, self.table)

    @staticmethod
    def _initialize_db(tx, table):
        tx.execute('CREATE TABLE IF NOT EXISTS {0} ('
                   'id INTEGER PRIMARY KEY AUTOINCREMENT,'
                   'timestamp INTEGER,'
                   'channel TEXT,'
                   'nick TEXT,'
                   'msg TEXT )'.format(table))

    def log(self, who, chan, msg):
        return self.dbconn.runInteraction(self._log, who, chan, msg, self.table)

    @staticmethod
    def _log(tx, who, chan, msg, table):
        now = int(time.time())
        stmt = 'INSERT INTO {0}(timestamp,nick,channel,msg) VALUES(?,?,?,?)'
        tx.execute(stmt.format(table), (now, who, chan, msg) )
Exemple #3
0
def getReadySqlite(connstr):
    pool = ConnectionPool('pysqlite2.dbapi2', connstr,
                          cp_min=1, cp_max=1)
    def interaction(c):
        try:
            c.execute('''create table sticky (
                    id integer primary key,
                    board_id text,
                    updated timestamp default current_timestamp,
                    note text,
                    x integer,
                    y integer)''')
        except Exception as e:
            log.err(e)
        try:
            c.execute('''create table image (
                id integer primary key,
                board_id text,
                updated timestamp default current_timestamp,
                data blob
            )''')
            c.execute('''create unique index image_board_id on image(board_id)''')
        except Exception as e:
            log.err(e)
    return pool.runInteraction(interaction).addCallbacks((lambda x:pool), log.err)
Exemple #4
0
class ConnectionPool(object):
    """
    Wrapper for twisted.enterprise.adbapi.ConnectionPool to use with tornado.
    """

    def __init__(self, *args, **kwargs):
        self._pool = TxConnectionPool(*args, **kwargs)

    def run_query(self, *args, **kwargs):
        return self._defer_to_future(self._pool.runQuery(*args, **kwargs))

    def run_operation(self, *args, **kwargs):
        return self._defer_to_future(self._pool.runOperation(*args, **kwargs))

    def run_interaction(self, *args, **kwargs):
        return self._defer_to_future(self._pool.runInteraction(*args, **kwargs))

    def close(self):
        self._pool.close()

    @staticmethod
    def _defer_to_future(defer):
        future = TracebackFuture()
        defer.addCallbacks(
            future.set_result,
            lambda failure: future.set_exc_info(
                (failure.type, failure.value, failure.tb)))
        return future
Exemple #5
0
class ChannelLogger(object):
    implements(IDBLogger)

    def __init__(self, dbfile, **kw):
        # XXX Ignore thread warnings from sqlite3.  Should be OK.
        # http://twistedmatrix.com/trac/ticket/3629
        kw.setdefault("check_same_thread", False)

        from twisted.enterprise.adbapi import ConnectionPool
        type = 'sqlite3'
        self.dbfile = dbfile
        self.dbconn = ConnectionPool(type, dbfile, **kw)
        self.table = 'channels'
        self.initialize_db()

    def initialize_db(self):
        return self.dbconn.runInteraction(self._initialize_db, self.table)

    @staticmethod
    def _initialize_db(tx, table):
        tx.execute('CREATE TABLE IF NOT EXISTS {0} ('
                   'id INTEGER PRIMARY KEY AUTOINCREMENT,'
                   'timestamp INTEGER,'
                   'channel TEXT,'
                   'nick TEXT,'
                   'msg TEXT )'.format(table))

    def log(self, who, chan, msg):
        return self.dbconn.runInteraction(self._log, who, chan, msg,
                                          self.table)

    @staticmethod
    def _log(tx, who, chan, msg, table):
        now = int(time.time())
        stmt = 'INSERT INTO {0}(timestamp,nick,channel,msg) VALUES(?,?,?,?)'
        tx.execute(stmt.format(table), (now, who, chan, msg))
Exemple #6
0
class MySQLPipeline(object):
    @classmethod
    def from_crawler(cls, crawler):
        return cls(crawler.stats)

    def __init__(self, stats):
        #Instantiate DB
        self.dbpool = ConnectionPool('MySQLdb',
                                     host=SETTINGS['DB_HOST'],
                                     user=SETTINGS['DB_USER'],
                                     passwd=SETTINGS['DB_PASSWD'],
                                     port=SETTINGS['DB_PORT'],
                                     db=SETTINGS['DB_DB'],
                                     charset='utf8',
                                     use_unicode=True,
                                     cursorclass=MySQLdb.cursors.DictCursor)
        self.stats = stats
        dispatcher.connect(self.spider_closed, signals.spider_closed)

    def spider_closed(self, spider):
        """ Cleanup function, called after crawing has finished to close open
            objects.
            Close ConnectionPool. """
        self.dbpool.close()

    def process_item(self, item, spider):
        query = self.dbpool.runInteraction(self._insert_record, item)
        query.addErrback(self._handle_error)
        return item

    def _insert_record(self, tx, item):
        result = tx.execute(
            """INSERT INTO agregator_results (task_id, direct_link, source_link, rank, site, `date`) 
                VALUES (%s, %s, %s, %s, %s, %s)""", (
                item["django_task_id"],
                item["direct_link"],
                item["source_link"],
                item["rank"],
                item["site"],
                item["date"],
            ))
        if result > 0:
            self.stats.inc_value('database/items_added')

    def _handle_error(self, e):
        log.err(e)
Exemple #7
0
class Database():
    """
    HouseAgent database interaction.
    """
    def __init__(self, log, db_location):
        self.log = log

        type = "sqlite"

        self.coordinator = None

        # Note: cp_max=1 is required otherwise undefined behaviour could occur when using yield icw subsequent
        # runQuery or runOperation statements
        if type == "sqlite":
            self.dbpool = ConnectionPool("sqlite3", db_location, check_same_thread=False, cp_max=1)
       
        # Check database schema version and upgrade when required
        self.updatedb('0.1')
             
    def updatedb(self, dbversion):
        '''
        Perform a database schema update when required. 
        '''
        # Note: runInteraction runs all queries defined within the specified function as part of a transaction.
        return self.dbpool.runInteraction(self._updatedb, dbversion)

    def _updatedb(self, txn, dbversion):
        '''
        Check whether a database schema update is required and act accordingly.
        '''
        # Note: Although all queries are run as part of a transaction, a create or drop table statement result in an implicit commit

        # Query the version of the current schema
        try:
            result = txn.execute("SELECT parm_value FROM common WHERE parm = 'schema_version'").fetchall()
        except:
            result = None
            
        if result:
            version = result[0][0]
        else:
            version = '0.0'

        if float(version) > float(dbversion):
            self.log.error("ERROR: The current database schema (%s) is not supported by this version of HouseAgent" % version)
            # Exit HouseAgent
            sys.exit(1)
        
        elif float(version) == float(dbversion):
            self.log.debug("Database schema is up to date")
            return
        
        else:
            self.log.info("Database schema will be updated from %s to %s:" % (version, dbversion))

            # Before we start manipulating the database schema, first make a backup copy of the database
            try:
                shutil.copy(db_location, db_location + datetime.datetime.strftime(datetime.datetime.now(), ".%y%m%d-%H%M%S"))
            except:
                self.log.error("Cannot make a backup copy of the database (%s)", sys.exc_info()[1])
                return

            if version == '0.0':
                try:
                    # Create common table
                    txn.execute("CREATE TABLE IF NOT EXISTS common (parm VARCHAR(16) PRIMARY KEY, parm_value VARCHAR(24) NOT NULL)")
            
                    # Add schema version to database
                    txn.execute("INSERT INTO common (parm, parm_value) VALUES ('schema_version', ?)", [dbversion])

                    # Set primary key of the devices table on address + plugin_id to prevent adding duplicate devices
                    txn.execute("CREATE TEMPORARY TABLE devices_backup(id INTEGER PRIMARY KEY, name VARCHAR(45), address VARCHAR(45) NOT NULL, plugin_id INTEGER NOT NULL, location_id INTEGER)")
                    txn.execute("INSERT INTO devices_backup SELECT id, name, address, plugin_id, location_id FROM devices")
                    txn.execute("DROP TABLE devices")
                    txn.execute("CREATE TABLE devices(id INTEGER PRIMARY KEY, name VARCHAR(45), address VARCHAR(45) NOT NULL, plugin_id INTEGER, location_id INTEGER)")
                    txn.execute("CREATE UNIQUE INDEX device_address ON devices (address, plugin_id)")
                    txn.execute("INSERT INTO devices SELECT id, name, address, plugin_id, location_id FROM devices_backup")
                    txn.execute("DROP TABLE devices_backup")

                    self.log.info("Successfully upgraded database schema")
                except:
                    self.log.error("Database schema upgrade failed (%s)" % sys.exc_info()[1])

    def query_plugin_auth(self, authcode):
        return self.dbpool.runQuery("SELECT authcode, id from plugins WHERE authcode = '%s'" % authcode)

    def check_plugin_auth(self, result):
        if len(result) >= 1:
            return {'registered': True}
        else:
            return {'registered': False}

    def insert_result(self, result):
        return {'received': True}

    def add_event(self, name, enabled, triggers):
        """
        This function adds an event to the database.
        """
        d = self.dbpool.runQuery("INSERT INTO events (name, enabled) VALUES (?, ?)", (name, enabled) )
        def event_added(result):
            print "added event"
            return self.dbpool.runQuery("select id from events order by id desc limit 1")      
        
        d.addCallback(event_added)
        def got_id(result):
            event_id = result[0][0]
            
            print "got event_id", result[0][0]
            print "triggers=",triggers
            
            # Add triggers
            deferredlist = []
            
            for trigger in triggers:
                trigger_type_id = trigger["trigger_type"]
                print "trigger", trigger
                
                def got_triggerid(result):
                    trigger_id = result[0][0]
                    
                    print "parameters", trigger["parameters"]
                    for name, value in trigger["parameters"].iteritems():
                        print name, value
                        deferredlist.append(self.dbpool.runQuery("INSERT INTO trigger_parameters (name, value, " +
                                                                 "triggers_id) VALUES (?, ?, ?)", (name, value, trigger_id)))
                
                def trigger_added(result):
                    self.dbpool.runQuery("select id from triggers order by id desc limit 1").addCallback(got_triggerid)     
                
                # Triggers
                deferredlist.append(self.dbpool.runQuery("INSERT INTO triggers (trigger_types_id, events_id)" +
                                                         " VALUES (?, ?)", (trigger_type_id, event_id)).addCallback(trigger_added) )        
                    
            d = defer.gatherResults(deferredlist)
            return d
            
        d.addCallback(got_id)
        
        def added_triggers(result):
            print "triggers added"
            
        d.addCallback(added_triggers)
        return d
    
    def add_location(self, name, parent):
        if parent:
            return self.dbpool.runQuery("INSERT INTO locations (name, parent) VALUES (?, ?)", [name, parent])
        else:
            return self.dbpool.runQuery("INSERT INTO locations (name) VALUES (?)", [name])
    
    @inlineCallbacks
    def add_event2(self, name, enabled, conditions, actions, trigger):
        '''
        This adds an event to the database.
        '''
        # Add event, and get event id
        yield self.dbpool.runQuery("INSERT INTO events (name, enabled) VALUES (?, ?)", [name, enabled])
        eventid = yield self.dbpool.runQuery("select id from events order by id desc limit 1")
        eventid = eventid[0][0]
        
        # Add conditions
        for condition in conditions:
            condition_type_id = condition["condition_type"]
            
            yield self.dbpool.runQuery("INSERT INTO conditions (condition_types_id, events_id)" +
                                       " VALUES (?, ?)", [condition_type_id, eventid])
            
            condition_id = yield self.dbpool.runQuery("select id from conditions order by id desc limit 1")
            condition_id = condition_id[0][0]
            
            for name, value in condition["parameters"].iteritems():
                yield self.dbpool.runQuery("INSERT INTO condition_parameters (name, value, " +
                                           "conditions_id) VALUES (?, ?, ?)", [name, value, condition_id])
        
        # Add actions
        for action in actions:
            action_type_id = action["action_type"]
            
            yield self.dbpool.runQuery("INSERT INTO actions (action_types_id, events_id)" +
                                       " VALUES (?, ?)", [action_type_id, eventid])
            
            action_id = yield self.dbpool.runQuery("select id from actions order by id desc limit 1")
            action_id = action_id[0][0]
            
            for name, value in action["parameters"].iteritems():
                yield self.dbpool.runQuery("INSERT INTO action_parameters (name, value, " +
                                           "actions_id) VALUES (?, ?, ?)", [name, value, action_id])
                
            
        # Insert trigger
        yield self.dbpool.runQuery("INSERT INTO triggers (trigger_types_id, events_id, conditions)" +
                                   " VALUES (?,?,?)", [trigger["trigger_type"], eventid, trigger["conditions"]])
 
        trigger_id = yield self.dbpool.runQuery("select id from triggers order by id desc limit 1")
        trigger_id = trigger_id[0][0]
       
        for name, value in trigger["parameters"].iteritems():
            yield self.dbpool.runQuery("INSERT INTO trigger_parameters (name, value, " +
                                       "triggers_id) VALUES (?, ?, ?)", [name, value, trigger_id])
               
    
    def add_trigger(self, trigger_type_id, event_id, value_id, parameters):
        print "INSERT INTO triggers (trigger_types_id, events_id, current_values_id) VALUES (%d, %d, %d)" % (int(trigger_type_id),
                                                                                                                                  int(event_id),
                                                                                                                                  int(value_id))
        d = self.dbpool.runQuery("INSERT INTO triggers (trigger_types_id, events_id" + 
                                 ", current_values_id) VALUES (%s, %s, %s)", (int(trigger_type_id),
                                                                              int(event_id),
                                                                              int(value_id)) ) 
        for name, value in parameters.iteritems():
            self.dbpool.runQuery("INSERT INTO trigger_parameters (name, value, triggers_id) VALUES (%s, %s, last_insert_id())", (name, value) )
    
        return d
    
    #def add_action(self, action_type_id, event_id):
    
    def query_latest_device_id(self):
        '''
        This function queries the latest device id.
        '''
        return self.dbpool.runQuery('select id from devices LIMIT 1')
         
    def query_triggers(self):
        return self.dbpool.runQuery("SELECT triggers.id, trigger_types.name, triggers.events_id, triggers.conditions " + 
                                    "FROM triggers INNER JOIN trigger_types ON (triggers.trigger_types_id = trigger_types.id)")

    def query_trigger(self, event_id):
        return self.dbpool.runQuery("SELECT triggers.id, trigger_types.name, triggers.events_id, triggers.conditions " + 
                                    "FROM triggers INNER JOIN trigger_types ON (triggers.trigger_types_id = trigger_types.id) " +
                                    "WHERE triggers.events_id = ? LIMIT 1", [event_id])
        
    def query_conditions(self):
        return self.dbpool.runQuery("SELECT conditions.id, condition_types.name, conditions.events_id " + 
                                    "FROM conditions INNER JOIN condition_types ON (conditions.condition_types_id = condition_types.id)")

    def query_actions(self):
        return self.dbpool.runQuery("SELECT actions.id, action_types.name, actions.events_id " + 
                                    "FROM actions INNER JOIN action_types ON (actions.action_types_id = action_types.id)")

    def query_trigger_parameters(self, trigger_id):
        return self.dbpool.runQuery("SELECT name, value from trigger_parameters WHERE triggers_id = ?", [trigger_id])
    
    def query_condition_parameters(self, condition_id):
        return self.dbpool.runQuery("SELECT name, value from condition_parameters WHERE conditions_id = ?", [condition_id])        

    def query_action_parameters(self, action_id):
        return self.dbpool.runQuery("SELECT name, value from action_parameters WHERE actions_id = ?", [action_id])
    
    def query_device_routing_by_id(self, device_id):
        return self.dbpool.runQuery("SELECT devices.address, plugins.authcode FROM devices " +  
                                    "INNER JOIN plugins ON (devices.plugin_id = plugins.id) "
                                    "WHERE devices.id = ?", [device_id])

    def query_value_properties(self, value_id):
        return self.dbpool.runQuery("SELECT current_values.name, devices.address, devices.plugin_id from current_values " + 
                                    "INNER JOIN devices ON (current_values.device_id = devices.id) " + 
                                    "WHERE current_values.id = ?", [value_id])

    def query_plugin_devices(self, plugin_id):
        return self.dbpool.runQuery("SELECT devices.id, devices.name, devices.address, locations.name from devices " +
                                    "LEFT OUTER JOIN locations ON (devices.location_id = locations.id) " +
                                    "WHERE plugin_id=? ", [plugin_id])

    @inlineCallbacks
    def update_or_add_value(self, name, value, pluginid, address, time=None):
        '''
        This function updates or adds values to the HouseAgent database.
        @param name: the name of the value
        @param value: the actual value of the value
        @param pluginid: the plugin which holds the device information
        @param address: the address of the device being handled
        @param time: the time at which the update has been received, this defaults to now()
        '''
        if not time:
            updatetime = datetime.datetime.now().isoformat(' ').split('.')[0]
        else:
            updatetime = datetime.datetime.fromtimestamp(time).isoformat(' ').split('.')[0]
        
        # Query device first
        device_id = yield self.dbpool.runQuery('select id from devices WHERE plugin_id = ? and address = ? LIMIT 1', (pluginid, address) )

        try:
            device_id = device_id[0][0]
        except:
            returnValue('') # device does not exist
        
        current_value = yield self.dbpool.runQuery("select id, name, history from current_values where name=? AND device_id=? LIMIT 1", (name, device_id))
    
        try:
            value_id = current_value[0][0]
        except:
            value_id = None
    
        if value_id:
            value_id = current_value[0][0]
            
            if current_value[0][2] not in (0, None):
                DataHistory("data", current_value[0][0], value, "GAUGE", 60, int(time))
                
            yield self.dbpool.runQuery("UPDATE current_values SET value=?, lastupdate=? WHERE id=?", (value, updatetime, value_id))
        else:
            yield self.dbpool.runQuery("INSERT INTO current_values (name, value, device_id, lastupdate) VALUES (?, ?, (select id from devices where address=? AND plugin_id=?),  ?)", (name, value, address, pluginid, updatetime))
            current_value = yield self.dbpool.runQuery("select id from current_values where name=? AND device_id=?", (name, device_id))
            value_id = current_value[0][0]
                        
        returnValue(value_id)

    def register_plugin(self, name, uuid, location):
        return self.dbpool.runQuery("INSERT INTO plugins (name, authcode, location_id) VALUES (?, ?, ?)", [str(name), str(uuid), location])

    def query_plugins(self):
        return self.dbpool.runQuery("SELECT plugins.name, plugins.authcode, plugins.id, locations.name, plugins.location_id from plugins " +
                                    "LEFT OUTER JOIN locations ON (plugins.location_id = locations.id)")
    
    def query_plugin_by_type_name(self, type_name):
        return self.dbpool.runQuery("SELECT plugins.id, plugins.authcode from plugins " +
                                    "INNER JOIN plugin_types ON (plugins.plugin_type_id = plugin_types.id)" +
                                    "WHERE plugin_types.name = ? LIMIT 1", [type_name])

    def query_device_classes(self):
        return self.dbpool.runQuery("SELECT * from device_class order by name ASC")
    
    def query_device_types(self):
        return self.dbpool.runQuery("SELECT * from device_types order by name ASC")
       
    @inlineCallbacks
    def cb_device_crud(self, result, action, id=None, plugin=None, address=None, name=None, location=None):
        '''
        Callback function that get's called when a device has been created, updated or deleted in, to or from the database.
        @param result: the result of the action
        @param action: the action initiating the callback being create, update or delete
        @param plugin: the uuid of the plugin owning the device
        @param address: the address of the device
        @param name: the name of the device
        @param location: the name of the location associated with the device
        '''
        if action == "create":
            parms = yield self.dbpool.runQuery("SELECT plugins.authcode, devices.address, devices.name, locations.name FROM devices, plugins, locations WHERE devices.plugin_id = plugins.id AND devices.location_id = locations.id ORDER BY devices.id DESC LIMIT 1")
            
        if action == "update":
            parms = yield self.dbpool.runQuery("SELECT plugins.authcode, devices.address, devices.name, locations.name FROM devices, plugins, locations WHERE devices.plugin_id = plugins.id AND devices.location_id = locations.id AND devices.id=?", [id])

        if action != "delete":
            plugin = parms[0][0]
            address = parms[0][1]
            name = parms[0][2]
            location = parms[0][3]
            
        parameters = {"plugin": plugin, 
                      "address": address,
                      "name": name,
                      "location": location}

        if self.coordinator:
            self.coordinator.send_crud_update("device", action, parameters)    

    def save_device(self, name, address, plugin_id, location_id, id=None):
        '''
        This functions saves a device in the HouseAgent database.
        @param name: the name of the device
        @param address: the address of the device
        @param plugin_id: the plugin_id of the associated plugin
        @param location_id: the location_id of the associated location
        @param id: the id of the device (in case this is an update)
        '''
        
        if not id:
            return self.dbpool.runQuery("INSERT INTO devices (name, address, plugin_id, location_id) VALUES (?, ?, ?, ?)", \
                                        (name, address, plugin_id, location_id)).addCallback(self.cb_device_crud, "create")
        else:
            return self.dbpool.runQuery("UPDATE devices SET name=?, address=?, plugin_id=?, location_id=? WHERE id=?", \
                                        (name, address, plugin_id, location_id, id)).addCallback(self.cb_device_crud, "update", id)

    def del_device(self, id):
        
        def delete(result, id):
            self.dbpool.runQuery("DELETE FROM devices WHERE id=?", [id]).addCallback(self.cb_device_crud, "delete", id, result[0][0], result[0][1], result[0][2], result[0][3])
        
        return self.dbpool.runQuery("SELECT plugins.authcode, devices.address, devices.name, locations.name FROM plugins, devices, locations " +
                                    "WHERE devices.plugin_id = plugins.id AND devices.location_id = locations.id AND devices.id=?", [id]).addCallback(delete, id)

    def del_location(self, id):
        return self.dbpool.runQuery("DELETE FROM locations WHERE id=?", [id])

    @inlineCallbacks
    def del_event(self, id):
        # Delete all parameters for this event id
        yield self.dbpool.runQuery("DELETE FROM trigger_parameters where triggers_id=" +
                                   " (select id from triggers where events_id=?)", [id])
        
        yield self.dbpool.runQuery("DELETE FROM condition_parameters where conditions_id=" +
                                   " (select id from conditions where events_id=?)" , [id])
    
        yield self.dbpool.runQuery("DELETE FROM action_parameters where actions_id=" +
                                   " (select id from actions where events_id=?)", [id])
        
        yield self.dbpool.runQuery("DELETE FROM triggers where events_id=?", [id])
        yield self.dbpool.runQuery("DELETE FROM actions where events_id=?", [id])
        yield self.dbpool.runQuery("DELETE FROM conditions where events_id=?", [id])
        
        yield self.dbpool.runQuery("DELETE FROM events where id=?", [id])

    def del_plugin(self, id):
        return self.dbpool.runQuery("DELETE FROM plugins WHERE id=?", [id])

    def query_locations(self):
        return self.dbpool.runQuery("select locations.id, locations.name, l2.name from locations " +  
                                    "left join locations as l2 on locations.parent=l2.id")

    def query_values(self):
        return self.dbpool.runQuery("SELECT current_values.name, current_values.value, devices.name, " + 
                               "current_values.lastupdate, plugins.name, devices.address, locations.name, current_values.id" + 
                               ", control_types.name, control_types.id, history FROM current_values INNER " +
                               "JOIN devices ON (current_values.device_id = devices.id) INNER JOIN plugins ON (devices.plugin_id = plugins.id) " + 
                               "LEFT OUTER JOIN locations ON (devices.location_id = locations.id) " + 
                               "LEFT OUTER JOIN control_types ON (current_values.control_type_id = control_types.id)")

    def query_devices(self):      
        return self.dbpool.runQuery("SELECT devices.id, devices.name, devices.address, plugins.name, locations.name from devices " +
                                    "INNER JOIN plugins ON (devices.plugin_id = plugins.id) " +
                                    "LEFT OUTER JOIN locations ON (devices.location_id = locations.id)")

    def query_location(self, id):
        return self.dbpool.runQuery("SELECT id, name, parent FROM locations WHERE id=?", [id])
    
    def query_plugin(self, id):
        return self.dbpool.runQuery("SELECT id, name, location_id FROM plugins WHERE id=?", [id])
    
    def query_device(self, id):
        return self.dbpool.runQuery("SELECT id, name, address, plugin_id, location_id FROM devices WHERE id=?", [id])

    def query_triggertypes(self):
        return self.dbpool.runQuery("SELECT id, name from trigger_types")

    def query_actiontypes(self):
        return self.dbpool.runQuery("SELECT id, name from action_types")
    
    def query_conditiontypes(self):
        return self.dbpool.runQuery("SELECT id, name from condition_types")
    
    def query_controltypes(self):
        return self.dbpool.runQuery("SELECT id, name from control_types")
    
    def query_controltypename(self, current_value_id):
        return self.dbpool.runQuery("select control_types.name from current_values " +
                                    "INNER JOIN controL_types ON (control_types.id = current_values.control_type_id) " +
                                    "where current_values.id=?", [current_value_id])
    
    def query_devices_simple(self):
        return self.dbpool.runQuery("SELECT id, name from devices")
    
    def query_plugintypes(self):
        return self.dbpool.runQuery("SELECT id, name from plugin_types")

    def query_historic_values(self):
        return self.dbpool.runQuery("select current_values.id, current_values.name, devices.name, current_values.history from current_values, devices where current_values.device_id = devices.id and history = 1;")

    def query_controllable_devices(self):
        return self.dbpool.runQuery("SELECT devices.name, devices.address, plugins.name, plugins.authcode, current_values.value, devices.id, control_types.name FROM current_values " +
                                    "INNER JOIN devices ON (current_values.device_id = devices.id) " +
                                    "INNER JOIN plugins ON (devices.plugin_id = plugins.id) " +
                                    "INNER JOIN control_types ON (current_values.control_type_id = control_types.id) " +
                                    "WHERE current_values.control_type_id != ''")
    
    def query_action_types_by_device_id(self, device_id):
        return self.dbpool.runQuery("SELECT current_values.id, current_values.name, control_types.name FROM current_values " +
                                    "INNER JOIN control_types ON (current_values.control_type_id = control_types.id) " +
                                    "WHERE current_values.device_id = ?", [device_id])

    def query_action_type_by_value_id(self, value_id):
        return self.dbpool.runQuery("SELECT control_types.name FROM current_values " +
                                    "INNER JOIN control_types ON (current_values.control_type_id = control_types.id) " +
                                    "WHERE current_values.id = ? LIMIT 1", [value_id])
        
    def query_values_by_device_id(self, device_id):
        return self.dbpool.runQuery("SELECT id, name from current_values WHERE device_id = '%s'" % device_id)

    def query_device_type_by_device_id(self, device_id):
        return self.dbpool.runQuery("SELECT device_types.name FROM devices " +  
                                    "INNER JOIN device_types ON (device_types.id = devices.device_type_id) " + 
                                    "WHERE devices.id = ? LIMIT 1", [device_id])

    def query_value_by_valueid(self, value_id):
        return self.dbpool.runQuery("SELECT value,name from current_values WHERE id = ? LIMIT 1", [value_id])
    
    def query_extra_valueinfo(self, value_id):
        return self.dbpool.runQuery("select devices.name, current_values.name from current_values " +
                                    "inner join devices on (current_values.device_id = devices.id) " + 
                                    "where current_values.id = ?", [value_id])

    def set_history(self, id, history):
        return self.dbpool.runQuery("UPDATE current_values SET history=? WHERE id=?", [history, id])
    
    def set_controltype(self, id, control_type):
        return self.dbpool.runQuery("UPDATE current_values SET control_type_id=? WHERE id=?", [control_type, id])

    def update_location(self, id, name, parent):
        return self.dbpool.runQuery("UPDATE locations SET name=?, parent=? WHERE id=?", [name, parent, id])
    
    def update_plugin(self, id, name, location):
        return self.dbpool.runQuery("UPDATE plugins SET name=?, location_id=? WHERE id=?", [name, location, id])
    
    def query_events(self):
        return self.dbpool.runQuery("SELECT id, name, enabled from events")
Exemple #8
0
class Database():
    """
    HouseAgent database interaction.
    """
    def __init__(self, log, db_location):
        self.log = log

        type = "sqlite"

        self.coordinator = None
        self.histcollector = None
        self._db_location = db_location

        # Note: cp_max=1 is required otherwise undefined behaviour could occur when using yield icw subsequent
        # runQuery or runOperation statements
        if type == "sqlite":
            self.dbpool = ConnectionPool("sqlite3", db_location, check_same_thread=False, cp_max=1)
       
        # Check database schema version and upgrade when required
        self.updatedb('0.2')
             
    def updatedb(self, dbversion):
        '''
        Perform a database schema update when required. 
        '''
        # Note: runInteraction runs all queries defined within the specified function as part of a transaction.
        return self.dbpool.runInteraction(self._updatedb, dbversion)

    def _updatedb(self, txn, dbversion):
        '''
        Check whether a database schema update is required and act accordingly.
        '''
        # Note: Although all queries are run as part of a transaction, a create or drop table statement result in an implicit commit

        # Query the version of the current schema
        try:
            result = txn.execute("SELECT parm_value FROM common WHERE parm = 'schema_version'").fetchall()
        except:
            result = None
            
        if result:
            version = result[0][0]
        else:
            version = '0.0'

        if float(version) > float(dbversion):
            self.log.error("ERROR: The current database schema (%s) is not supported by this version of HouseAgent" % version)
            # Exit HouseAgent
            sys.exit(1)
        
        elif float(version) == float(dbversion):
            self.log.debug("Database schema is up to date")
            return
        
        else:
            self.log.info("Database schema will be updated from %s to %s:" % (version, dbversion))

            # Before we start manipulating the database schema, first make a backup copy of the database
            try:
                shutil.copy(self._db_location, self._db_location + datetime.datetime.strftime(datetime.datetime.now(), ".%y%m%d-%H%M%S"))
            except:
                self.log.error("Cannot make a backup copy of the database (%s)", sys.exc_info()[1])
                return

            if version == '0.0':
                try:
                    # Create common table
                    txn.execute("CREATE TABLE IF NOT EXISTS common (parm VARCHAR(16) PRIMARY KEY, parm_value VARCHAR(24) NOT NULL)")
            
                    # Add schema version to database
                    txn.execute("INSERT INTO common (parm, parm_value) VALUES ('schema_version', ?)", [dbversion])

                    # Set primary key of the devices table on address + plugin_id to prevent adding duplicate devices
                    txn.execute("CREATE TEMPORARY TABLE devices_backup(id INTEGER PRIMARY KEY, name VARCHAR(45), address VARCHAR(45) NOT NULL, plugin_id INTEGER NOT NULL, location_id INTEGER)")
                    txn.execute("INSERT INTO devices_backup SELECT id, name, address, plugin_id, location_id FROM devices")
                    txn.execute("DROP TABLE devices")
                    txn.execute("CREATE TABLE devices(id INTEGER PRIMARY KEY, name VARCHAR(45), address VARCHAR(45) NOT NULL, plugin_id INTEGER, location_id INTEGER)")
                    txn.execute("CREATE UNIQUE INDEX device_address ON devices (address, plugin_id)")
                    txn.execute("INSERT INTO devices SELECT id, name, address, plugin_id, location_id FROM devices_backup")
                    txn.execute("DROP TABLE devices_backup")

                    self.log.info("Successfully upgraded database schema")
                except:
                    self.log.error("Database schema upgrade failed (%s)" % sys.exc_info()[1])

            elif version == '0.1':
                # update DB schema version to '0.2'
                try:
                    # update common table
                    txn.execute("UPDATE common SET parm_value=0.2 WHERE parm='schema_version';")

                    # history_periods table
                    txn.execute("CREATE TABLE history_periods(id integer PRIMARY KEY AUTOINCREMENT NOT NULL,\
                                name varchar(20), secs integer NOT NULL, sysflag CHAR(1) NOT NULL DEFAULT '0');")
                    
                    # default values for history_periods table
                    txn.execute("INSERT INTO history_periods VALUES(1,'Disabled',0,'1');")
                    txn.execute("INSERT INTO history_periods VALUES(2,'5 min',300,'1');")
                    txn.execute("INSERT INTO history_periods VALUES(3,'15 min',900,'1');")
                    txn.execute("INSERT INTO history_periods VALUES(4,'30 min',1800,'1');")
                    txn.execute("INSERT INTO history_periods VALUES(5,'1 hour',3600,'1');")
                    txn.execute("INSERT INTO history_periods VALUES(6,'2 hours',7200,'1');")
                    txn.execute("INSERT INTO history_periods VALUES(7,'8 hours',28800,'1');")
                    txn.execute("INSERT INTO history_periods VALUES(8,'12 hours',43200,'1');")
                    txn.execute("INSERT INTO history_periods VALUES(9,'1 day',86400,'1');")

                    # history_types table
                    txn.execute("CREATE TABLE history_types (id integer PRIMARY KEY AUTOINCREMENT NOT NULL, \
                                name  varchar(50));")
                    
                    # default values for history_types table
                    txn.execute("INSERT INTO history_types VALUES (NULL, 'GAUGE');")
                    txn.execute("INSERT INTO history_types VALUES (NULL, 'COUNTER');")

                    txn.execute("CREATE TEMPORARY TABLE current_values_tmp( \
                                id integer PRIMARY KEY AUTOINCREMENT NOT NULL, \
                                name varchar(45), value varchar(45), device_id integer NOT NULL, \
                                lastupdate datetime, history bool DEFAULT 0, \
                                history_type_id integer, control_type_id integer DEFAULT 0);")
                    txn.execute("INSERT INTO current_values_tmp \
                                SELECT id, name, value, device_id, lastupdate, history, \
                                history_type_id, control_type_id FROM current_values;")
                    
                    # create new current_values scheme (old data are purged)
                    txn.execute("DROP TABLE current_values;")
                    txn.execute("CREATE TABLE current_values(id integer PRIMARY KEY AUTOINCREMENT NOT NULL, \
                                name varchar(45), value varchar(45), device_id integer NOT NULL, \
                                lastupdate datetime, history_period_id  int DEFAULT 1, \
                                history_type_id int DEFAULT 1, control_type_id  integer DEFAULT 0, \
                                FOREIGN KEY (history_period_id) REFERENCES history_periods(id), \
                                FOREIGN KEY (history_type_id) REFERENCES history_types(id), \
                                FOREIGN KEY (device_id) REFERENCES devices(id));")
                    
                    # current_values indexes
                    txn.execute("CREATE INDEX 'current_values.fk_current_values_control_types1' \
                                    ON current_values (control_type_id);")
                    txn.execute("CREATE INDEX 'current_values.fk_current_values_history_periods1' \
                                    ON current_values (history_period_id);")
                    txn.execute("CREATE INDEX 'current_values.fk_current_values_history_types1' \
                                    ON current_values (history_type_id);")
                    txn.execute("CREATE INDEX 'current_values.fk_values_devices1' \
                                    ON current_values (device_id);")
                    
                    # fill new current_values table
                    txn.execute("INSERT INTO current_values \
                                SELECT id, name, value, device_id, lastupdate, 1, 1, control_type_id \
                                FROM current_values_tmp;")
                    txn.execute("DROP TABLE current_values_tmp;")

                    # history_values table
                    txn.execute("CREATE TABLE history_values (value_id integer,\
                                value real, created_at datetime, \
                                FOREIGN KEY (value_id) REFERENCES current_values(id));")

                    txn.execute("CREATE INDEX 'history_values.idx_history_values_created_at1' \
                                    ON history_values (created_at);")
                    txn.execute("CREATE INDEX 'history_values.idx_history_values_value_id1' \
                                    ON history_values (value_id);")
                    
                    # Control types fix
                    txn.execute("INSERT into control_types VALUES(0, 'Not controllable');")
                    txn.execute("UPDATE control_types SET name='Switch (On/off)' WHERE id=1;")
                    txn.execute("UPDATE control_types SET name='Thermostat (Setpoint)' WHERE id=2;")

                    self.log.info("Successfully upgraded database schema")
                except:
                    self.log.error("Database schema upgrade failed (%s)" % sys.exc_info()[1])

    def query_plugin_auth(self, authcode):
        return self.dbpool.runQuery("SELECT authcode, id from plugins WHERE authcode = '%s'" % authcode)

    def check_plugin_auth(self, result):
        if len(result) >= 1:
            return {'registered': True}
        else:
            return {'registered': False}

    def insert_result(self, result):
        return {'received': True}

    def add_event(self, name, enabled, triggers):
        """
        This function adds an event to the database.
        """
        d = self.dbpool.runQuery("INSERT INTO events (name, enabled) VALUES (?, ?)", (name, enabled) )
        def event_added(result):
            print "added event"
            return self.dbpool.runQuery("select id from events order by id desc limit 1")      
        
        d.addCallback(event_added)
        def got_id(result):
            event_id = result[0][0]
            
            print "got event_id", result[0][0]
            print "triggers=",triggers
            
            # Add triggers
            deferredlist = []
            
            for trigger in triggers:
                trigger_type_id = trigger["trigger_type"]
                print "trigger", trigger
                
                def got_triggerid(result):
                    trigger_id = result[0][0]
                    
                    print "parameters", trigger["parameters"]
                    for name, value in trigger["parameters"].iteritems():
                        print name, value
                        deferredlist.append(self.dbpool.runQuery("INSERT INTO trigger_parameters (name, value, " +
                                                                 "triggers_id) VALUES (?, ?, ?)", (name, value, trigger_id)))
                
                def trigger_added(result):
                    self.dbpool.runQuery("select id from triggers order by id desc limit 1").addCallback(got_triggerid)     
                
                # Triggers
                deferredlist.append(self.dbpool.runQuery("INSERT INTO triggers (trigger_types_id, events_id)" +
                                                         " VALUES (?, ?)", (trigger_type_id, event_id)).addCallback(trigger_added) )        
                    
            d = defer.gatherResults(deferredlist)
            return d
            
        d.addCallback(got_id)
        
        def added_triggers(result):
            print "triggers added"
            
        d.addCallback(added_triggers)
        return d
    
    def add_location(self, name, parent):
        if parent:
            return self.dbpool.runQuery("INSERT INTO locations (name, parent) VALUES (?, ?)", [name, parent])
        else:
            return self.dbpool.runQuery("INSERT INTO locations (name) VALUES (?)", [name])
    
    @inlineCallbacks
    def add_event2(self, name, enabled, conditions, actions, trigger):
        '''
        This adds an event to the database.
        '''
        # Add event, and get event id
        yield self.dbpool.runQuery("INSERT INTO events (name, enabled) VALUES (?, ?)", [name, enabled])
        eventid = yield self.dbpool.runQuery("select id from events order by id desc limit 1")
        eventid = eventid[0][0]
        
        # Add conditions
        for condition in conditions:
            condition_type_id = condition["condition_type"]
            
            yield self.dbpool.runQuery("INSERT INTO conditions (condition_types_id, events_id)" +
                                       " VALUES (?, ?)", [condition_type_id, eventid])
            
            condition_id = yield self.dbpool.runQuery("select id from conditions order by id desc limit 1")
            condition_id = condition_id[0][0]
            
            for name, value in condition["parameters"].iteritems():
                yield self.dbpool.runQuery("INSERT INTO condition_parameters (name, value, " +
                                           "conditions_id) VALUES (?, ?, ?)", [name, value, condition_id])
        
        # Add actions
        for action in actions:
            action_type_id = action["action_type"]
            
            yield self.dbpool.runQuery("INSERT INTO actions (action_types_id, events_id)" +
                                       " VALUES (?, ?)", [action_type_id, eventid])
            
            action_id = yield self.dbpool.runQuery("select id from actions order by id desc limit 1")
            action_id = action_id[0][0]
            
            for name, value in action["parameters"].iteritems():
                yield self.dbpool.runQuery("INSERT INTO action_parameters (name, value, " +
                                           "actions_id) VALUES (?, ?, ?)", [name, value, action_id])
                
            
        # Insert trigger
        yield self.dbpool.runQuery("INSERT INTO triggers (trigger_types_id, events_id, conditions)" +
                                   " VALUES (?,?,?)", [trigger["trigger_type"], eventid, trigger["conditions"]])
 
        trigger_id = yield self.dbpool.runQuery("select id from triggers order by id desc limit 1")
        trigger_id = trigger_id[0][0]
       
        for name, value in trigger["parameters"].iteritems():
            yield self.dbpool.runQuery("INSERT INTO trigger_parameters (name, value, " +
                                       "triggers_id) VALUES (?, ?, ?)", [name, value, trigger_id])
               
    
    def add_trigger(self, trigger_type_id, event_id, value_id, parameters):
        print "INSERT INTO triggers (trigger_types_id, events_id, current_values_id) VALUES (%d, %d, %d)" % (int(trigger_type_id),
                                                                                                                                  int(event_id),
                                                                                                                                  int(value_id))
        d = self.dbpool.runQuery("INSERT INTO triggers (trigger_types_id, events_id" + 
                                 ", current_values_id) VALUES (%s, %s, %s)", (int(trigger_type_id),
                                                                              int(event_id),
                                                                              int(value_id)) ) 
        for name, value in parameters.iteritems():
            self.dbpool.runQuery("INSERT INTO trigger_parameters (name, value, triggers_id) VALUES (%s, %s, last_insert_id())", (name, value) )
    
        return d
    
    #def add_action(self, action_type_id, event_id):
    
    def query_latest_device_id(self):
        '''
        This function queries the latest device id.
        '''
        return self.dbpool.runQuery('select id from devices LIMIT 1')
         
    def query_triggers(self):
        return self.dbpool.runQuery("SELECT triggers.id, trigger_types.name, triggers.events_id, triggers.conditions " + 
                                    "FROM triggers INNER JOIN trigger_types ON (triggers.trigger_types_id = trigger_types.id)")

    def query_trigger(self, event_id):
        return self.dbpool.runQuery("SELECT triggers.id, trigger_types.name, triggers.events_id, triggers.conditions " + 
                                    "FROM triggers INNER JOIN trigger_types ON (triggers.trigger_types_id = trigger_types.id) " +
                                    "WHERE triggers.events_id = ? LIMIT 1", [event_id])
        
    def query_conditions(self):
        return self.dbpool.runQuery("SELECT conditions.id, condition_types.name, conditions.events_id " + 
                                    "FROM conditions INNER JOIN condition_types ON (conditions.condition_types_id = condition_types.id)")

    def query_actions(self):
        return self.dbpool.runQuery("SELECT actions.id, action_types.name, actions.events_id " + 
                                    "FROM actions INNER JOIN action_types ON (actions.action_types_id = action_types.id)")

    def query_trigger_parameters(self, trigger_id):
        return self.dbpool.runQuery("SELECT name, value from trigger_parameters WHERE triggers_id = ?", [trigger_id])
    
    def query_condition_parameters(self, condition_id):
        return self.dbpool.runQuery("SELECT name, value from condition_parameters WHERE conditions_id = ?", [condition_id])        

    def query_action_parameters(self, action_id):
        return self.dbpool.runQuery("SELECT name, value from action_parameters WHERE actions_id = ?", [action_id])
    
    def query_device_routing_by_id(self, device_id):
        return self.dbpool.runQuery("SELECT devices.address, plugins.authcode FROM devices " +  
                                    "INNER JOIN plugins ON (devices.plugin_id = plugins.id) "
                                    "WHERE devices.id = ?", [device_id])

    def query_value_properties(self, value_id):
        return self.dbpool.runQuery("SELECT current_values.name, devices.address, devices.plugin_id from current_values " + 
                                    "INNER JOIN devices ON (current_values.device_id = devices.id) " + 
                                    "WHERE current_values.id = ?", [value_id])

    def query_plugin_devices(self, plugin_id):
        return self.dbpool.runQuery("SELECT devices.id, devices.name, devices.address, locations.name from devices " +
                                    "LEFT OUTER JOIN locations ON (devices.location_id = locations.id) " +
                                    "WHERE plugin_id=? ", [plugin_id])

    @inlineCallbacks
    def update_or_add_value(self, name, value, pluginid, address, time=None):
        '''
        This function updates or adds values to the HouseAgent database.
        @param name: the name of the value
        @param value: the actual value of the value
        @param pluginid: the plugin which holds the device information
        @param address: the address of the device being handled
        @param time: the time at which the update has been received, this defaults to now()
        '''
        if not time:
            updatetime = datetime.datetime.now().isoformat(' ').split('.')[0]
        else:
            updatetime = datetime.datetime.fromtimestamp(time).isoformat(' ').split('.')[0]
        
        # Query device first
        device_id = yield self.dbpool.runQuery('select id from devices WHERE plugin_id = ? and address = ? LIMIT 1', (pluginid, address) )

        try:
            device_id = device_id[0][0]
        except:
            returnValue('') # device does not exist
        
        current_value = yield self.dbpool.runQuery("SELECT id, name, history_type_id, history_period_id FROM current_values WHERE name=? AND device_id=? LIMIT 1", (name, device_id))
    
        try:
            value_id = current_value[0][0]
        except:
            value_id = None
    
        if value_id:
            value_id = current_value[0][0]
            
            history_type = current_value[0][2]
            history_period = current_value[0][3]
            
            yield self.dbpool.runQuery("UPDATE current_values SET value=?, lastupdate=? WHERE id=?", (value, updatetime, value_id))
        else:
            yield self.dbpool.runQuery("INSERT INTO current_values (name, value, device_id, lastupdate) VALUES (?, ?, (SELECT id FROM devices WHERE address=? AND plugin_id=?),  ?)", (name, value, address, pluginid, updatetime))
            current_value = yield self.dbpool.runQuery("SELECT id FROM current_values WHERE name=? AND device_id=?", (name, device_id))
            value_id = current_value[0][0]
                        
        returnValue(value_id)

    def register_plugin(self, name, uuid, location):
        return self.dbpool.runQuery("INSERT INTO plugins (name, authcode, location_id) VALUES (?, ?, ?)", [str(name), str(uuid), location])

    def query_plugins(self):
        return self.dbpool.runQuery("SELECT plugins.name, plugins.authcode, plugins.id, locations.name, plugins.location_id from plugins " +
                                    "LEFT OUTER JOIN locations ON (plugins.location_id = locations.id)")
    
    def query_plugin_by_type_name(self, type_name):
        return self.dbpool.runQuery("SELECT plugins.id, plugins.authcode from plugins " +
                                    "INNER JOIN plugin_types ON (plugins.plugin_type_id = plugin_types.id)" +
                                    "WHERE plugin_types.name = ? LIMIT 1", [type_name])

    def query_device_classes(self):
        return self.dbpool.runQuery("SELECT * from device_class order by name ASC")
    
    def query_device_types(self):
        return self.dbpool.runQuery("SELECT * from device_types order by name ASC")
       
    @inlineCallbacks
    def cb_device_crud(self, result, action, id=None, plugin=None, address=None, name=None, location=None):
        '''
        Callback function that get's called when a device has been created, updated or deleted in, to or from the database.
        @param result: the result of the action
        @param action: the action initiating the callback being create, update or delete
        @param plugin: the uuid of the plugin owning the device
        @param address: the address of the device
        @param name: the name of the device
        @param location: the name of the location associated with the device
        '''
        if action == "create":
            parms = yield self.dbpool.runQuery("SELECT plugins.authcode, devices.address, devices.name, locations.name FROM devices, plugins, locations WHERE devices.plugin_id = plugins.id AND devices.location_id = locations.id ORDER BY devices.id DESC LIMIT 1")
            
        if action == "update":
            parms = yield self.dbpool.runQuery("SELECT plugins.authcode, devices.address, devices.name, locations.name FROM devices, plugins, locations WHERE devices.plugin_id = plugins.id AND devices.location_id = locations.id AND devices.id=?", [id])

        if action != "delete":
            plugin = parms[0][0]
            address = parms[0][1]
            name = parms[0][2]
            location = parms[0][3]
            
        parameters = {"plugin": plugin, 
                      "address": address,
                      "name": name,
                      "location": location}

        if self.coordinator:
            self.coordinator.send_crud_update("device", action, parameters)    

    def save_device(self, name, address, plugin_id, location_id, id=None):
        '''
        This functions saves a device in the HouseAgent database.
        @param name: the name of the device
        @param address: the address of the device
        @param plugin_id: the plugin_id of the associated plugin
        @param location_id: the location_id of the associated location
        @param id: the id of the device (in case this is an update)
        '''
        
        if not id:
            return self.dbpool.runQuery("INSERT INTO devices (name, address, plugin_id, location_id) VALUES (?, ?, ?, ?)", \
                                        (name, address, plugin_id, location_id)).addCallback(self.cb_device_crud, "create")
        else:
            return self.dbpool.runQuery("UPDATE devices SET name=?, address=?, plugin_id=?, location_id=? WHERE id=?", \
                                        (name, address, plugin_id, location_id, id)).addCallback(self.cb_device_crud, "update", id)

    def del_device(self, id):
        
        def delete(result, id):
            self.dbpool.runQuery("DELETE FROM devices WHERE id=?", [id]).addCallback(self.cb_device_crud, "delete", id, result[0][0], result[0][1], result[0][2], result[0][3])
        
        return self.dbpool.runQuery("SELECT plugins.authcode, devices.address, devices.name, locations.name FROM plugins, devices, locations " +
                                    "WHERE devices.plugin_id = plugins.id AND devices.location_id = locations.id AND devices.id=?", [id]).addCallback(delete, id)

    def del_location(self, id):
        return self.dbpool.runQuery("DELETE FROM locations WHERE id=?", [id])

    @inlineCallbacks
    def del_event(self, id):
        # Delete all parameters for this event id
        yield self.dbpool.runQuery("DELETE FROM trigger_parameters where triggers_id=" +
                                   " (select id from triggers where events_id=?)", [id])
        
        yield self.dbpool.runQuery("DELETE FROM condition_parameters where conditions_id=" +
                                   " (select id from conditions where events_id=?)" , [id])
    
        yield self.dbpool.runQuery("DELETE FROM action_parameters where actions_id=" +
                                   " (select id from actions where events_id=?)", [id])
        
        yield self.dbpool.runQuery("DELETE FROM triggers where events_id=?", [id])
        yield self.dbpool.runQuery("DELETE FROM actions where events_id=?", [id])
        yield self.dbpool.runQuery("DELETE FROM conditions where events_id=?", [id])
        
        yield self.dbpool.runQuery("DELETE FROM events where id=?", [id])

    def del_plugin(self, id):
        return self.dbpool.runQuery("DELETE FROM plugins WHERE id=?", [id])

    def query_locations(self):
        return self.dbpool.runQuery("select locations.id, locations.name, l2.name from locations " +  
                                    "left join locations as l2 on locations.parent=l2.id")

    def query_values(self):
        return self.dbpool.runQuery("SELECT current_values.name, current_values.value, devices.name, " + 
                               "current_values.lastupdate, plugins.name, devices.address, locations.name, current_values.id" + 
                               ", control_types.name, control_types.id, history_types.name, history_periods.name, plugins.id FROM current_values INNER " +
                               "JOIN devices ON (current_values.device_id = devices.id) INNER JOIN plugins ON (devices.plugin_id = plugins.id) " + 
                               "LEFT OUTER JOIN locations ON (devices.location_id = locations.id) " + 
                               "LEFT OUTER JOIN control_types ON (current_values.control_type_id = control_types.id) " +
                               "LEFT OUTER JOIN history_types ON (current_values.history_type_id = history_types.id) " +
                               "LEFT OUTER JOIN history_periods ON (current_values.history_period_id = history_periods.id)")

    def query_values_light(self):
        return self.dbpool.runQuery("SELECT id, name, history_period_id, history_type_id FROM current_values;")

    def query_devices(self):      
        return self.dbpool.runQuery("SELECT devices.id, devices.name, devices.address, plugins.name, locations.name from devices " +
                                    "INNER JOIN plugins ON (devices.plugin_id = plugins.id) " +
                                    "LEFT OUTER JOIN locations ON (devices.location_id = locations.id)")

    def query_location(self, id):
        return self.dbpool.runQuery("SELECT id, name, parent FROM locations WHERE id=?", [id])
    
    def query_plugin(self, id):
        return self.dbpool.runQuery("SELECT id, name, location_id FROM plugins WHERE id=?", [id])
    
    def query_device(self, id):
        return self.dbpool.runQuery("SELECT id, name, address, plugin_id, location_id FROM devices WHERE id=?", [id])

    def query_triggertypes(self):
        return self.dbpool.runQuery("SELECT id, name from trigger_types")

    def query_actiontypes(self):
        return self.dbpool.runQuery("SELECT id, name from action_types")
    
    def query_conditiontypes(self):
        return self.dbpool.runQuery("SELECT id, name from condition_types")
    
    def query_controltypes(self):
        return self.dbpool.runQuery("SELECT id, name from control_types")
    
    def query_controltypename(self, current_value_id):
        return self.dbpool.runQuery("select control_types.name from current_values " +
                                    "INNER JOIN controL_types ON (control_types.id = current_values.control_type_id) " +
                                    "where current_values.id=?", [current_value_id])
    
    def query_devices_simple(self):
        return self.dbpool.runQuery("SELECT id, name from devices")
    
    def query_plugintypes(self):
        return self.dbpool.runQuery("SELECT id, name from plugin_types")

    # history collector stuff
    def query_history_types(self):
        return self.dbpool.runQuery("SELECT id, name FROM history_types;")

    def query_history_schedules(self):
        return self.dbpool.runQuery("SELECT id, name, history_period_id, history_type_id FROM current_values;")

    def query_history_periods(self):
        return self.dbpool.runQuery("SELECT id, name, secs, sysflag FROM history_periods;")

    def query_history_values(self, date_from, date_to):
        return self.dbpool.runQuery("SELECT value, created_at FROM history_values WHERE created_at >= '%s' AND created_at < '%s';" % (date_from, date_to))

    def cleanup_history_values(self):
        """keep 7 days history of history_values table"""
        return self.dbpool.runQuery("DELETE FROM history_values WHERE created_at < DATETIME(DATETIME(), 'localtime', '-7 day');")

    def collect_history_values(self, value_id):
        return self.dbpool.runQuery("INSERT INTO history_values SELECT id, value, DATETIME(DATETIME(), 'localtime') FROM current_values WHERE id=?;", [value_id])

    # /history collector stuff

    def query_controllable_devices(self):
        return self.dbpool.runQuery("SELECT devices.name, devices.address, plugins.name, plugins.authcode, current_values.value, devices.id, control_types.name, current_values.id FROM current_values " +
                                    "INNER JOIN devices ON (current_values.device_id = devices.id) " +
                                    "INNER JOIN plugins ON (devices.plugin_id = plugins.id) " +
                                    "INNER JOIN control_types ON (current_values.control_type_id = control_types.id) " +
                                    "WHERE current_values.control_type_id != 0")
    
    def query_action_types_by_device_id(self, device_id):
        return self.dbpool.runQuery("SELECT current_values.id, current_values.name, control_types.name FROM current_values " +
                                    "INNER JOIN control_types ON (current_values.control_type_id = control_types.id) " +
                                    "WHERE current_values.device_id = ?", [device_id])

    def query_action_type_by_value_id(self, value_id):
        return self.dbpool.runQuery("SELECT control_types.name FROM current_values " +
                                    "INNER JOIN control_types ON (current_values.control_type_id = control_types.id) " +
                                    "WHERE current_values.id = ? LIMIT 1", [value_id])
        
    def query_values_by_device_id(self, device_id):
        return self.dbpool.runQuery("SELECT id, name from current_values WHERE device_id = '%s'" % device_id)

    def query_device_type_by_device_id(self, device_id):
        return self.dbpool.runQuery("SELECT device_types.name FROM devices " +  
                                    "INNER JOIN device_types ON (device_types.id = devices.device_type_id) " + 
                                    "WHERE devices.id = ? LIMIT 1", [device_id])

    def query_value_by_valueid(self, value_id):
        return self.dbpool.runQuery("SELECT value,name from current_values WHERE id = ? LIMIT 1", [value_id])
    
    def query_extra_valueinfo(self, value_id):
        return self.dbpool.runQuery("select devices.name, current_values.name from current_values " +
                                    "inner join devices on (current_values.device_id = devices.id) " + 
                                    "where current_values.id = ?", [value_id])

    def set_history(self, id, history_period, history_type):
        # histcollector needs a fresh data -> defer the UPDATE
        d = self.dbpool.runQuery("UPDATE current_values SET history_period_id=?, history_type_id=? WHERE id=?", [history_period, history_type, id])

        # helper fn
        def histcollector_refresh(result, id, history_period):
            self.histcollector.cb_unregister_schedule(int(id))
            self.histcollector.cb_register_schedule(int(id), history_period)

        d.addCallback(histcollector_refresh, id, history_period)
        return d
    
    def set_controltype(self, id, control_type):
        return self.dbpool.runQuery("UPDATE current_values SET control_type_id=? WHERE id=?", [control_type, id])

    def update_location(self, id, name, parent):
        return self.dbpool.runQuery("UPDATE locations SET name=?, parent=? WHERE id=?", [name, parent, id])
    
    def update_plugin(self, id, name, location):
        return self.dbpool.runQuery("UPDATE plugins SET name=?, location_id=? WHERE id=?", [name, location, id])
    
    def query_events(self):
        return self.dbpool.runQuery("SELECT id, name, enabled from events")
Exemple #9
0
class DatabaseQuery:
    def __init__(self):
        # Import postgres & twisted credentials from environment. Throws if does not exist.
        self.postgres_database = os.environ['POSTGRES_DATABASE']
        self.postgres_user = os.environ['POSTGRES_USER']
        self.postgres_password = os.environ['POSTGRES_PASS']
        self.postgres_host = os.environ['POSTGRES_HOST']

    def connect(self):
        """
        Setup our database connection. Throws if cannot connect.
        """
        print("[DatabaseQuery] Connecting to database"
              "\n    -> database:'%s' user:'******' host:'%s'" %
              (self.postgres_database, self.postgres_user, self.postgres_host))

        # get a connection, if a connect cannot be made an exception will be raised here
        self.dbConnection = ConnectionPool('psycopg2',
                                           database=self.postgres_database,
                                           user=self.postgres_user,
                                           host=self.postgres_host,
                                           password=self.postgres_password)

        print("[DatabaseQuery] Database connection sucsessful")

    def get_connection(self):
        """
        Returns the database connection so we can access it from an AMP responder
        :return:
        """
        if 'dbConnection' in dir(self):
            return self.dbConnection
        else:
            raise ConnectionError

    def insert_into_register_token_blind_token_hash_user_id_ballot_id(
            self, blind_token_hash, user_id, ballot_id):
        """
        Register a token signiture request in the database.
        :return:
        """
        def onSuccess(result):
            print("[DatabaseQuery - register_token_request] - Insert sucsess:")
            return {'ok': True}

        def onError(failure):
            print("[DatabaseQuery - register_token_request] - Insert error:")
            pprint.pprint(failure.value)
            raise failure.raiseException()

        def _insert(cursor, user_id, ballot_id):
            statement = "INSERT INTO token_request (blind_token_hash, user_id, ballot_id) VALUES (%s, %s, %s);"
            cursor.execute(statement, (blind_token_hash, user_id, ballot_id))

        deferred = self.dbConnection.runInteraction(_insert, user_id,
                                                    ballot_id)
        deferred.addCallback(onSuccess)
        deferred.addErrback(onError)

        return deferred

    def insert_into_register_vote_signed_token_hash_voter_address_ballot_id(
            self, signed_token_hash, voter_address, ballot_id):
        """
        Register a token signiture request in the database.
        :return:
        """
        def onSuccess(result):
            print("[DatabaseQuery - register_vote_request] - Insert sucsess:")
            return {'ok': True}

        def onError(failure):
            print("[DatabaseQuery - register_vote_request] - Insert error:")
            pprint.pprint(failure.value)
            raise failure.raiseException()

        def _insert(cursor, signed_token, voter_address, ballot_id):
            statement = "INSERT INTO register_vote (signed_token_hash, voter_address, ballot_id) VALUES (%s, %s, %s);"
            cursor.execute(statement,
                           (signed_token_hash, voter_address, ballot_id))

        deferred = self.dbConnection.runInteraction(_insert, signed_token_hash,
                                                    voter_address, ballot_id)
        deferred.addCallback(onSuccess)
        deferred.addErrback(onError)

        return deferred

    def search_register_vote_for_voter_address(self, voter_address):
        """
        Requests all rows ascociated with a voter_address from the token_request table. Will
        return either a dictionary (onSucsess) or raise an exception (onError) to be
        passed back to the client.

        :param user_id:
        :return:
        """
        def onSuccess(results):
            print("[DatabaseQuery - retrieve_request_sign] - Query sucsess:")
            # pprint.pprint(results, indent=4)

            # Convert list of results to bytes for transport
            encoded_results = pickle.dumps(results)

            return {'ok': encoded_results}

        def onError(failure):
            print("[DatabaseQuery - retrieve_request_sign] - Query error:")
            pprint.pprint(failure.value)
            raise failure.raiseException()

        query = "SELECT * FROM register_vote WHERE voter_address='%s';" % voter_address
        deferred = self.dbConnection.runQuery(query)
        deferred.addCallback(onSuccess)
        deferred.addErrback(onError)

        return deferred

    def search_token_request_for_user_id(self, user_id):
        """
        Requests all rows ascociated with a user_id from the token_request table. Will
        return either a dictionary (onSucsess) or raise an exception (onError) to be
        passed back to the client.

        :param user_id:
        :return:
        """
        def onSuccess(results):
            print("[DatabaseQuery - retrieve_request_sign] - Query sucsess:")
            pprint.pprint(results, indent=4)

            # Convert list of results to bytes for transport
            encoded_results = pickle.dumps(results)

            return {'ok': encoded_results}

        def onError(failure):
            print("[DatabaseQuery - retrieve_request_sign] - Query error:")
            pprint.pprint(failure.value)
            raise failure.raiseException()

        query = "SELECT * FROM token_request WHERE user_id=%s;" % user_id
        deferred = self.dbConnection.runQuery(query)
        deferred.addCallback(onSuccess)
        deferred.addErrback(onError)

        return deferred

    def search_ballot_register_for_ballot_id(self, ballot_id, voter_address):
        """
        Requests all rows ascociated with a ballot_id from the ballot_register table. Will
        return either a dictionary (onSucsess) or raise an exception (onError) to be
        passed back to the client.

        :param ballot_id:
        :param voter_address:
        :return:
        """
        def onSuccess(results):
            print(
                "[DatabaseQuery - retrieve_request_register] - Query sucsess:")
            pprint.pprint(results, indent=4)

            # Convert list of results to bytes for transport
            encoded_results = pickle.dumps(results)

            return {'ok': encoded_results}

        def onError(failure):
            print("[DatabaseQuery - retrieve_request_register] - Query error:")
            pprint.pprint(failure.value)
            raise failure.raiseException()

        query = "SELECT * FROM register_vote WHERE ballot_id=%s AND voter_address='%s';" % (
            ballot_id, voter_address)
        deferred = self.dbConnection.runQuery(query)
        deferred.addCallback(onSuccess)
        deferred.addErrback(onError)

        return deferred
Exemple #10
0
class SQLMagicPipeline(object):

	def __init__(self, settings, **kwargs):
		"""Connect to database in the pool."""

		if not isinstance(settings, dict):
			raise NotConfigured('No database connection settings found.')

		self.settings = settings
		self.stats = kwargs.get('stats')
		self.debug = kwargs.get('debug', False)
		self.paramstyle = ':'
		self.identifier = '"' # default to ANSI quoting
		self.queries = {
			'select': "SELECT $fields FROM $table:esc WHERE $indices:and", # select on UniqueFields
			'selectall': "SELECT $fields FROM $table:esc",
			'selectone': "SELECT $fields FROM $table:esc WHERE $indices:and LIMIT 1", # if backend supports LIMIT
			#
			'delete'  : "DELETE FROM $table:esc WHERE $indices:and", # match on UniqueFields
			'deleteme': "DELETE FROM $table:esc WHERE $fields_values:and", # exact item match
		}
		self.dbapi = None

		if self.settings.get('drivername') == 'sqlite':
			self.dbapi = __import__('sqlite3', fromlist=[''])
			self.__dbpool = ConnectionPool('sqlite3', self.settings.get('database', ':memory:'),
				# apparently the connection pool / thread pool does not do the teardown in the same thread
				# https://twistedmatrix.com/trac/ticket/3629
				# therefore throwing errors on finalClose at reactor shutdown
				# TODO: should be able to work around that?
				check_same_thread=False, # SQLite must be compiled threadsafe to use this
				# limit connection pool to one thread to avoid "database is locked" errors
				#cp_max=1,
				# - or raise the database timeout sufficiently
				timeout=300,
			)
			# alternative escaping parameter
			#self.paramstyle = '?'
			#self.paramstyle = ':'
			#self.paramstyle = '$'
			# default statements for sqlite
			self.queries.update({
				'insert': "INSERT INTO $table:esc SET $fields_values",
				'upsert': "INSERT OR REPLACE INTO $table:esc ($fields) VALUES ($values)",
				'update': "UPDATE $table:esc SET $fields_values WHERE $indices:and",
			})
		elif self.settings.get('drivername') == 'pgsql':
			self.dbapi = __import__('psycopg2', fromlist=[''])
			#from psycopg2.extras import DictCursor
			self.__dbpool = ConnectionPool('psycopg2', database=self.settings.get('database'),
				user = self.settings.get('username'),
				password = self.settings.get('password', None),
				host = self.settings.get('host', None), # default to unix socket
				port = self.settings.get('port', '5432'),
			#	cursor_factory = DictCursor,
			)
			self.paramstyle = '%s'
			# default statements for postgres
			self.queries.update({
				'insert': "INSERT INTO $table:esc ($fields) VALUES ($values)",
				'update': "UPDATE $table:esc SET $fields_values WHERE $indices:and",
			})
		elif self.settings.get('drivername') == 'mysql':
			self.dbapi = __import__('MySQLdb', fromlist=[''])
			from MySQLdb import cursors
			self.__dbpool = ReconnectingConnectionPool('MySQLdb', db=self.settings.get('database'),
				user = self.settings.get('username'),
				passwd = self.settings.get('password', None),
				host = self.settings.get('host', 'localhost'), # should default to unix socket
				port = self.settings.get('port', 3306),
				cursorclass = cursors.DictCursor,
				charset = 'utf8',
				use_unicode = True,
				# connpool settings
				cp_reconnect = True,
				#cp_noisy = True,
				#cp_min = 1,
				#cp_max = 1,
			)
			self.paramstyle = '%s'
			self.identifier = '`' # MySQL quoting
			# default statements for mysql
			self.queries.update({
				'insert': "INSERT INTO $table:esc ($fields) VALUES ($values)",
			#	'upsert': "REPLACE INTO $table ($fields) VALUES ($values)",
				'upsert': "INSERT INTO $table:esc SET $fields_values ON DUPLICATE KEY UPDATE $fields_values",
				'update': "UPDATE $table:esc SET $fields_values WHERE $indices:and",
			})
		elif self.settings.get('drivername') == 'firebird':
			# untested
			self.dbapi = __import__('fdb', fromlist=[''])
			self.__dbpool = ConnectionPool('fdb', database=self.settings.get('database'),
				user = self.settings.get('username'),
				password = self.settings.get('password', None),
				host = self.settings.get('host', None), # default to unix socket
				port = self.settings.get('port', 3050),
				#dialect = 1, # necessary for all dialect 1 databases
				charset = 'UTF8',# specify a character set for the connection
			)
			self.paramstyle = '?'
			self.queries.update({
				'insert': "INSERT INTO $table:esc ($fields) VALUES ($values)",
				'update': "UPDATE $table:esc SET $fields_values WHERE $indices:and",
			})

		self.queries.update(kwargs.get('queries', {}))

	@classmethod
	def from_crawler(cls, crawler):
		if not crawler.settings.get('SQLMAGIC_DATABASE'):
			raise NotConfigured('No database connection settings found.')

		o = cls(
			settings=crawler.settings.get('SQLMAGIC_DATABASE'),
			stats=crawler.stats,
			queries=crawler.settings.get('SQLMAGIC_QUERIES', {}),
			debug=crawler.settings.getbool('SQLMAGIC_DEBUG')
		)
		return o

	def open_spider(self, spider):
		self.on_connect()

	def on_connect(self):
		## override this to run some queries after connecting
		# e.g. create tables for an in-memory SQLite database
		pass

	def close_spider(self, spider):
		self.shutdown()

	def shutdown(self):
		"""Shutdown connection pool, kill associated threads"""
		self.__dbpool.close()

	def process_item(self, item, spider):
		"""Process the item."""

		# Only handle items inheriting SQLItem
		if not isinstance(item, SQLItem):
			return item

		self.stats.inc_value('sqlmagic/total_items_caught')

		# always return original item
		deferred = self.operation(item, spider)
		deferred.addBoth(lambda _: item)
		return deferred

	def operation(self, item, spider):

		def on_insert(result, query, params):
			self.stats.inc_value('sqlmagic/sqlop_success_insert')
			if self.debug:
				qlog = self._log_preparedsql(query, params)
				log.msg('%s executed: %s' % (self.__class__.__name__, qlog), level=log.DEBUG, spider=spider)
			return result

		def on_update(result, query, params):
			self.stats.inc_value('sqlmagic/sqlop_success_update')
			if self.debug:
				qlog = self._log_preparedsql(query, params)
				log.msg('%s executed: %s' % (self.__class__.__name__, qlog), level=log.DEBUG, spider=spider)
			return result

		def on_integrityerror(error, query, params):
			error.trap(self.dbapi.IntegrityError)
			e = error.getErrorMessage()
			self.stats.inc_value('sqlmagic/error_integrity')
			if self.debug:
				qlog = self._log_preparedsql(query, params)
				log.msg('%s failed executing: %s\nError: %s' % (self.__class__.__name__, qlog, e), level=log.INFO, spider=spider)
		#	error.raiseException() # keep bubbling

		def on_operationalerror(error, query, params):
			error.trap(self.dbapi.OperationalError)
			e = error.getErrorMessage()
			self.stats.inc_value('sqlmagic/error_operational')
			if self.debug:
				qlog = self._log_preparedsql(query, params)
				log.msg('%s failed executing: %s\nError: %s' % (self.__class__.__name__, qlog, e), level=log.WARNING, spider=spider)
		#	error.raiseException() # keep bubbling

		def on_seriouserror(error, query, params):
			error.trap(self.dbapi.ProgrammingError, self.dbapi.InterfaceError)
			e = error.getErrorMessage()
			self.stats.inc_value('sqlmagic/error_connection')
			if self.debug:
				qlog = self._log_preparedsql(query, params)
				log.msg('%s FAILED executing: %s\nError: %s' % (self.__class__.__name__, qlog, e), level=log.WARNING, spider=spider)
			error.raiseException() # keep bubbling
			return error

		def update(error, query, params):
			error.trap(self.dbapi.IntegrityError)
			if error.value[0] != 1062: # Duplicate key
				error.raiseException() # keep bubbling
			#e = error.getErrorMessage()
			#if self.debug:
			#	qlog = self._log_preparedsql(query, params)
			#	log.msg('%s got error %s - trying update' % (self.__class__.__name__, e), level=log.DEBUG, spider=spider)
			self.stats.inc_value('sqlmagic/sqlop_update_after_insert_tries')
			d = self.__dbpool.runInteraction(self.transaction, query, params, item, spider)
			d.addCallback(on_update, query, params)
			return d

		# try insert
		query, params = _sql_format(self.queries['insert'], item, paramstyle=self.paramstyle, identifier=self.identifier)
		#query, params = _sql_format(self.queries['upsert'], item, paramstyle=self.paramstyle, identifier=self.identifier)
		deferred = self.__dbpool.runInteraction(self.transaction, query, params, item, spider)
		deferred.addCallback(on_insert, query, params)
		deferred.addErrback(on_seriouserror, query, params)
		deferred.addErrback(on_operationalerror, query, params)
		#deferred.addErrback(on_integrityerror, query, params) # ignore failing inserts before update
		# on failure, update
		query, params = _sql_format(self.queries['update'], item, paramstyle=self.paramstyle, identifier=self.identifier)
		deferred.addErrback(update, query, params)
		deferred.addErrback(on_seriouserror, query, params)
		deferred.addErrback(on_operationalerror, query, params)
		deferred.addErrback(on_integrityerror, query, params)
		deferred.addErrback(self._database_error, item, spider)

	#	deferred = self.insert_or_update((query,params), (update, uparams), item, spider)

		self.stats.inc_value('sqlmagic/total_items_returned')
		return deferred

	def transaction(self, txn, query, params, item, spider):
		self.stats.inc_value('sqlmagic/sqlop_transact_%s' % query[:6].lower())
		txn.execute(query, params)

	"""
	def xtransaction(self, txn, query, params, item, spider):
		# primary key check
		query, params = _sql_format(self.queries['select'], item, paramstyle=self.paramstyle, identifier=self.identifier)
		txn.execute(query, params)
		result = txn.fetchone()
		if result:
			log.msg("Item already in db: (id) %s item:\n%r" % (result['id'], item), level=log.WARNING)

		query, params = _sql_format(self.queries['insert'], item, paramstyle=self.paramstyle, identifier=self.identifier)
		# transaction in thread
		qlog = self._log_preparedsql(query, params)
		try:
			txn.execute(query, params)
		except self.dbapi.IntegrityError as e:
			#spider.log('%s FAILED executing: %s' % (self.__class__.__name__, qlog), level=log.DEBUG)
			query, params = _sql_format(self.queries['update'], item, paramstyle=self.paramstyle, identifier=self.identifier)
			qlog = self._log_preparedsql(query, params)
			try:
				#spider.log('%s executing: %s' % (self.__class__.__name__, qlog), level=log.DEBUG)
				txn.execute(query, params)
			except self.dbapi.OperationalError as e:
				# retrying in new transaction
			#	spider.log('%s errored. Retrying.\nError: %s\nQuery: %s' % (self.__class__.__name__, e, qlog), level=log.WARNING)
			#	self._spool.append((query, params, item))
			#except Exception as e:
				if self.debug:
					spider.log('%s FAILED executing: %s\nError: %s' % (self.__class__.__name__, qlog, e), level=log.WARNING)
				raise
			finally:
				if self.debug:
					spider.log('%s executed: %s' % (self.__class__.__name__, qlog), level=log.DEBUG)
		except self.dbapi.OperationalError as e:
			# also try again
			if self.debug:
				spider.log('%s failed: %s' % (self.__class__.__name__, qlog), level=log.DEBUG)
			raise
		finally:
			if self.debug:
				spider.log('%s executed: %s' % (self.__class__.__name__, qlog), level=log.DEBUG)
	"""

	def _log_preparedsql(self, query, params):
		"""Simulate escaped query for log"""
		for p in params:
			query = re.sub('(\\'+self.paramstyle+r'\d?)', '"%s"' % p, query, count=1)
		return query

	def _database_error(self, e, item, spider=None):
		"""Log exceptions."""
		if spider:
			log.err(e, spider=spider)
		else:
			log.err(e)

	def query(self, sql):
		# run a query in the connection pool
		# parameters for prepared statements must be passed as 'sql=(query, params)'
		# (possible use-case from inside spider code)
		'''Spider Example: build start requests from database results

		from scrapy.exceptions import CloseSpider, NotConfigured
		from ..pipelines.sqlmagic import SQLMagicPipeline

		class MySpider(Spider):
			def spider_opened(self, spider):
				try:
					self.db = SQLMagicPipeline(self.settings.get('SQLMAGIC_DATABASE'))
				except NotConfigured:
					raise CloseSpider('Could not get database settings.')

			@defer.inlineCallbacks
			def db_queries(self, response):
				query = """CALL procedure ()"""
				result = yield self.db.query(query)

				# build requests
				requests = []
				for value in result:
					r = yield self.build_request_fromdb(response, value)
					requests.append(r)

				# queue them
				defer.returnValue(requests)

			def start_requests(self):
				yield Request(self.start_urls[0], callback=self.database_queries)

			def build_request_fromdb(self, response, db):
				# custom logic to convert db result into a request
				r = Request(response.url)
				r.callback = self.parse
				return r
		'''
		if query[:6].lower() in ('select',):
			deferred = self.__dbpool.runQuery(sql)
		if query[:4].lower() in ('call',):
			# potential fail: procedure must run a SELECT for this,
			# otherwise it should do runOperation
			deferred = self.__dbpool.runQuery(sql)
		else:
			deferred = self.__dbpool.runOperation(sql)
		return deferred
Exemple #11
0
class DatabaseQuery:
    def __init__(self):
        # Import postgres & twisted credentials from environment. Throws if does not exist.
        self.postgres_database = os.environ['POSTGRES_DATABASE']
        self.postgres_user = os.environ['POSTGRES_USER']
        self.postgres_password = os.environ['POSTGRES_PASS']
        self.postgres_host = os.environ['POSTGRES_HOST']

    def connect(self):
        """
        Setup our database connection. Throws if cannot connect.
        """
        print("[DatabaseQuery] Connecting to database"
              "\n    -> database:'%s' user:'******' host:'%s'" %
              (self.postgres_database, self.postgres_user, self.postgres_host))

        # get a connection, if a connect cannot be made an exception will be raised here
        self.dbConnection = ConnectionPool('psycopg2',
                                           database=self.postgres_database,
                                           user=self.postgres_user,
                                           host=self.postgres_host,
                                           password=self.postgres_password)

        print("[DatabaseQuery] Database connection sucsessful")

    def get_connection(self):
        """
        Returns the database connection so we can access it from an AMP responder
        :return:
        """
        if 'dbConnection' in dir(self):
            return self.dbConnection
        else:
            raise ConnectionError

    def search_ballot_register_for_user_id(self, user_id):
        """
        Requests all rows ascociated with a user_id from the ballot_register table. Will
        return either a dictionary (onSucsess) or raise an exception (onError) to be
        passed back to the client.

        :param user_id:
        :return:
        """
        def onSuccess(results):
            print("[DatabaseQuery - retrieve_ballots] - Query sucsess:")
            # pprint.pprint(results, indent=4)

            # Convert list of results to bytes for transport
            encoded_results = pickle.dumps(results)

            return {'ok': encoded_results}

        def onError(failure):
            print("[DatabaseQuery - retrieve_ballots] - Query error:")
            pprint.pprint(failure.value)
            raise failure.raiseException()

        query = "SELECT " \
                    "ballot_register.user_id," \
                    "ballot_register.ballot_id," \
                    "ballot_register.created_on," \
                    "available_ballots.ballot_name," \
                    "available_ballots.ballot_address " \
                "FROM " \
                    "available_ballots, " \
                    "ballot_register " \
                "WHERE " \
                    "ballot_register.user_id=%s AND " \
                    "ballot_register.ballot_id=available_ballots.ballot_id;" % user_id
        deferred = self.dbConnection.runQuery(query)
        deferred.addCallback(onSuccess)
        deferred.addErrback(onError)

        return deferred

    def search_ballots_available_for_all_ballots(self):
        """
        Requests all rows from the ballot_register table. Will return either a dictionary
        (onSucsess) or raise an exception (onError) to be passed back to the client.

        :return:
        """
        def onSuccess(results):
            print(
                "[DatabaseQuery - search_ballots_available_for_all_ballots] - Query sucsess:"
            )
            # pprint.pprint(results, indent=4)

            # Convert list of results to bytes for transport
            encoded_results = pickle.dumps(results)

            return {'ok': encoded_results}

        def onError(failure):
            print(
                "[DatabaseQuery - search_ballots_available_for_all_ballots] - Query error:"
            )
            pprint.pprint(failure.value)
            raise failure.raiseException()

        query = "SELECT * FROM available_ballots;"
        deferred = self.dbConnection.runQuery(query)
        deferred.addCallback(onSuccess)
        deferred.addErrback(onError)

        return deferred

    def search_ballots_available_for_ballot_id(self, ballot_id):
        """
        Requests all rows from the ballot_register table. Will return either a dictionary
        (onSucsess) or raise an exception (onError) to be passed back to the client.

        :return:
        """
        def onSuccess(results):
            print(
                "[DatabaseQuery - search_ballots_available_for_ballot_id] - Query sucsess:"
            )
            # pprint.pprint(results, indent=4)

            # Convert list of results to bytes for transport
            encoded_results = pickle.dumps(results)

            return {'ok': encoded_results}

        def onError(failure):
            print(
                "[DatabaseQuery - search_ballots_available_for_all_ballots] - Query error:"
            )
            pprint.pprint(failure.value)
            raise failure.raiseException()

        query = "SELECT * FROM available_ballots WHERE ballot_id=%s;" % ballot_id
        deferred = self.dbConnection.runQuery(query)
        deferred.addCallback(onSuccess)
        deferred.addErrback(onError)

        return deferred

    def insert_into_ballot_register_user_id_ballot_id(self, user_id,
                                                      ballot_id):
        """
        Request to register a user for a ballot in the ballot_register table. Will return
        True on sucsess or raise an exception on failure which will be passed back to the client.
        :param user_id:
        :return:
        """
        def onSuccess(result):
            print(
                "[DatabaseQuery - insert_into_ballot_register_user_id_ballot_id] - Insert sucsess:"
            )
            return {'ok': True}

        def onError(failure):
            print(
                "[DatabaseQuery - insert_into_ballot_register_user_id_ballot_id] - Insert error:"
            )
            pprint.pprint(failure.value)
            raise failure.raiseException()

        def _insert(cursor, user_id, ballot_id):
            statement = "INSERT INTO ballot_register (user_id, ballot_id) VALUES (%s, %s);" % (
                user_id, ballot_id)
            cursor.execute(statement)

        deferred = self.dbConnection.runInteraction(_insert, user_id,
                                                    ballot_id)
        deferred.addCallback(onSuccess)
        deferred.addErrback(onError)

        return deferred

    def insert_into_ballots_available(self, ballot_name, ballot_address,
                                      ballot_interface, ballot_end_date):
        """
        Request to register a new ballot in the ballots_available table. Will return
        True on sucsess or raise an exception on failure which will be passed back to the client.
        :param user_id:
        :return:
        """
        def onSuccess(result):
            print(
                "[DatabaseQuery - insert_into_ballots_available] - Insert sucsess:"
            )
            return {'ballot_address': ballot_address}

        def onError(failure):
            print(
                "[DatabaseQuery - insert_into_ballots_available] - Insert error:"
            )
            pprint.pprint(failure.value)
            raise failure.raiseException()

        def _insert(cursor, ballot_name, ballot_address, ballot_interface,
                    ballot_end_date):

            statement = "INSERT INTO available_ballots (ballot_name, ballot_address, ballot_interface, ballot_end_date) VALUES (%s, %s, %s, %s);" % (
                ballot_name, ballot_address, ballot_interface, ballot_end_date)
            print(statement)
            cursor.execute(statement)

        print("\nballot_name\n", ballot_name, "\nballot_address\n",
              ballot_address, "\nballot_interface\n", ballot_interface,
              "\nballot_end_date\n", ballot_end_date)

        deferred = self.dbConnection.runInteraction(_insert, ballot_name,
                                                    ballot_address,
                                                    ballot_interface,
                                                    ballot_end_date)
        deferred.addCallback(onSuccess)
        deferred.addErrback(onError)

        return deferred
Exemple #12
0
class SQLMagicPipeline(object):
    def __init__(self, settings, **kwargs):
        """Connect to database in the pool."""

        if not isinstance(settings, dict):
            raise NotConfigured('No database connection settings found.')

        self.settings = settings
        self.stats = kwargs.get('stats')
        self.debug = kwargs.get('debug', False)
        self.paramstyle = ':'
        self.identifier = '"'  # default to ANSI quoting
        self.queries = {
            'select':
            "SELECT $fields FROM $table:esc WHERE $indices:and",  # select on UniqueFields
            'selectall': "SELECT $fields FROM $table:esc",
            'selectone':
            "SELECT $fields FROM $table:esc WHERE $indices:and LIMIT 1",  # if backend supports LIMIT
            #
            'delete':
            "DELETE FROM $table:esc WHERE $indices:and",  # match on UniqueFields
            'deleteme':
            "DELETE FROM $table:esc WHERE $fields_values:and",  # exact item match
        }
        self.dbapi = None

        if self.settings.get('drivername') == 'sqlite':
            self.dbapi = __import__('sqlite3', fromlist=[''])
            self.__dbpool = ConnectionPool(
                'sqlite3',
                self.settings.get('database', ':memory:'),
                # apparently the connection pool / thread pool does not do the teardown in the same thread
                # https://twistedmatrix.com/trac/ticket/3629
                # therefore throwing errors on finalClose at reactor shutdown
                # TODO: should be able to work around that?
                check_same_thread=
                False,  # SQLite must be compiled threadsafe to use this
                # limit connection pool to one thread to avoid "database is locked" errors
                #cp_max=1,
                # - or raise the database timeout sufficiently
                timeout=300,
            )
            # alternative escaping parameter
            #self.paramstyle = '?'
            #self.paramstyle = ':'
            #self.paramstyle = '$'
            # default statements for sqlite
            self.queries.update({
                'insert':
                "INSERT INTO $table:esc SET $fields_values",
                'upsert':
                "INSERT OR REPLACE INTO $table:esc ($fields) VALUES ($values)",
                'update':
                "UPDATE $table:esc SET $fields_values WHERE $indices:and",
            })
        elif self.settings.get('drivername') == 'pgsql':
            self.dbapi = __import__('psycopg2', fromlist=[''])
            #from psycopg2.extras import DictCursor
            self.__dbpool = ConnectionPool(
                'psycopg2',
                database=self.settings.get('database'),
                user=self.settings.get('username'),
                password=self.settings.get('password', None),
                host=self.settings.get('host', None),  # default to unix socket
                port=self.settings.get('port', '5432'),
                #	cursor_factory = DictCursor,
            )
            self.paramstyle = '%s'
            # default statements for postgres
            self.queries.update({
                'insert':
                "INSERT INTO $table:esc ($fields) VALUES ($values)",
                'update':
                "UPDATE $table:esc SET $fields_values WHERE $indices:and",
            })
        elif self.settings.get('drivername') == 'mysql':
            self.dbapi = __import__('MySQLdb', fromlist=[''])
            from MySQLdb import cursors
            self.__dbpool = ReconnectingConnectionPool(
                'MySQLdb',
                db=self.settings.get('database'),
                user=self.settings.get('username'),
                passwd=self.settings.get('password', None),
                host=self.settings.get(
                    'host', 'localhost'),  # should default to unix socket
                port=self.settings.get('port', 3306),
                cursorclass=cursors.DictCursor,
                charset='utf8',
                use_unicode=True,
                # connpool settings
                cp_reconnect=True,
                #cp_noisy = True,
                #cp_min = 1,
                #cp_max = 1,
            )
            self.paramstyle = '%s'
            self.identifier = '`'  # MySQL quoting
            # default statements for mysql
            self.queries.update({
                'insert':
                "INSERT INTO $table:esc ($fields) VALUES ($values)",
                #	'upsert': "REPLACE INTO $table ($fields) VALUES ($values)",
                'upsert':
                "INSERT INTO $table:esc SET $fields_values ON DUPLICATE KEY UPDATE $fields_values",
                'update':
                "UPDATE $table:esc SET $fields_values WHERE $indices:and",
            })
        elif self.settings.get('drivername') == 'firebird':
            # untested
            self.dbapi = __import__('fdb', fromlist=[''])
            self.__dbpool = ConnectionPool(
                'fdb',
                database=self.settings.get('database'),
                user=self.settings.get('username'),
                password=self.settings.get('password', None),
                host=self.settings.get('host', None),  # default to unix socket
                port=self.settings.get('port', 3050),
                #dialect = 1, # necessary for all dialect 1 databases
                charset='UTF8',  # specify a character set for the connection
            )
            self.paramstyle = '?'
            self.queries.update({
                'insert':
                "INSERT INTO $table:esc ($fields) VALUES ($values)",
                'update':
                "UPDATE $table:esc SET $fields_values WHERE $indices:and",
            })

        self.queries.update(kwargs.get('queries', {}))

    @classmethod
    def from_crawler(cls, crawler):
        if not crawler.settings.get('SQLMAGIC_DATABASE'):
            raise NotConfigured('No database connection settings found.')

        o = cls(settings=crawler.settings.get('SQLMAGIC_DATABASE'),
                stats=crawler.stats,
                queries=crawler.settings.get('SQLMAGIC_QUERIES', {}),
                debug=crawler.settings.getbool('SQLMAGIC_DEBUG'))
        return o

    def open_spider(self, spider):
        self.on_connect()

    def on_connect(self):
        ## override this to run some queries after connecting
        # e.g. create tables for an in-memory SQLite database
        pass

    def close_spider(self, spider):
        self.shutdown()

    def shutdown(self):
        """Shutdown connection pool, kill associated threads"""
        self.__dbpool.close()

    def process_item(self, item, spider):
        """Process the item."""

        # Only handle items inheriting SQLItem
        if not isinstance(item, SQLItem):
            return item

        self.stats.inc_value('sqlmagic/total_items_caught')

        # always return original item
        deferred = self.operation(item, spider)
        deferred.addBoth(lambda _: item)
        return deferred

    def operation(self, item, spider):
        def on_insert(result, query, params):
            self.stats.inc_value('sqlmagic/sqlop_success_insert')
            if self.debug:
                qlog = self._log_preparedsql(query, params)
                log.msg('%s executed: %s' % (self.__class__.__name__, qlog),
                        level=log.DEBUG,
                        spider=spider)
            return result

        def on_update(result, query, params):
            self.stats.inc_value('sqlmagic/sqlop_success_update')
            if self.debug:
                qlog = self._log_preparedsql(query, params)
                log.msg('%s executed: %s' % (self.__class__.__name__, qlog),
                        level=log.DEBUG,
                        spider=spider)
            return result

        def on_integrityerror(error, query, params):
            error.trap(self.dbapi.IntegrityError)
            e = error.getErrorMessage()
            self.stats.inc_value('sqlmagic/error_integrity')
            if self.debug:
                qlog = self._log_preparedsql(query, params)
                log.msg('%s failed executing: %s\nError: %s' %
                        (self.__class__.__name__, qlog, e),
                        level=log.INFO,
                        spider=spider)

        #	error.raiseException() # keep bubbling

        def on_operationalerror(error, query, params):
            error.trap(self.dbapi.OperationalError)
            e = error.getErrorMessage()
            self.stats.inc_value('sqlmagic/error_operational')
            if self.debug:
                qlog = self._log_preparedsql(query, params)
                log.msg('%s failed executing: %s\nError: %s' %
                        (self.__class__.__name__, qlog, e),
                        level=log.WARNING,
                        spider=spider)

        #	error.raiseException() # keep bubbling

        def on_seriouserror(error, query, params):
            error.trap(self.dbapi.ProgrammingError, self.dbapi.InterfaceError)
            e = error.getErrorMessage()
            self.stats.inc_value('sqlmagic/error_connection')
            if self.debug:
                qlog = self._log_preparedsql(query, params)
                log.msg('%s FAILED executing: %s\nError: %s' %
                        (self.__class__.__name__, qlog, e),
                        level=log.WARNING,
                        spider=spider)
            error.raiseException()  # keep bubbling
            return error

        def update(error, query, params):
            error.trap(self.dbapi.IntegrityError)
            if error.value[0] != 1062:  # Duplicate key
                error.raiseException()  # keep bubbling
            #e = error.getErrorMessage()
            #if self.debug:
            #	qlog = self._log_preparedsql(query, params)
            #	log.msg('%s got error %s - trying update' % (self.__class__.__name__, e), level=log.DEBUG, spider=spider)
            self.stats.inc_value('sqlmagic/sqlop_update_after_insert_tries')
            d = self.__dbpool.runInteraction(self.transaction, query, params,
                                             item, spider)
            d.addCallback(on_update, query, params)
            return d

        # try insert
        query, params = _sql_format(self.queries['insert'],
                                    item,
                                    paramstyle=self.paramstyle,
                                    identifier=self.identifier)
        #query, params = _sql_format(self.queries['upsert'], item, paramstyle=self.paramstyle, identifier=self.identifier)
        deferred = self.__dbpool.runInteraction(self.transaction, query,
                                                params, item, spider)
        deferred.addCallback(on_insert, query, params)
        deferred.addErrback(on_seriouserror, query, params)
        deferred.addErrback(on_operationalerror, query, params)
        #deferred.addErrback(on_integrityerror, query, params) # ignore failing inserts before update
        # on failure, update
        query, params = _sql_format(self.queries['update'],
                                    item,
                                    paramstyle=self.paramstyle,
                                    identifier=self.identifier)
        deferred.addErrback(update, query, params)
        deferred.addErrback(on_seriouserror, query, params)
        deferred.addErrback(on_operationalerror, query, params)
        deferred.addErrback(on_integrityerror, query, params)
        deferred.addErrback(self._database_error, item, spider)

        #	deferred = self.insert_or_update((query,params), (update, uparams), item, spider)

        self.stats.inc_value('sqlmagic/total_items_returned')
        return deferred

    def transaction(self, txn, query, params, item, spider):
        self.stats.inc_value('sqlmagic/sqlop_transact_%s' % query[:6].lower())
        txn.execute(query, params)

    """
	def xtransaction(self, txn, query, params, item, spider):
		# primary key check
		query, params = _sql_format(self.queries['select'], item, paramstyle=self.paramstyle, identifier=self.identifier)
		txn.execute(query, params)
		result = txn.fetchone()
		if result:
			log.msg("Item already in db: (id) %s item:\n%r" % (result['id'], item), level=log.WARNING)

		query, params = _sql_format(self.queries['insert'], item, paramstyle=self.paramstyle, identifier=self.identifier)
		# transaction in thread
		qlog = self._log_preparedsql(query, params)
		try:
			txn.execute(query, params)
		except self.dbapi.IntegrityError as e:
			#spider.log('%s FAILED executing: %s' % (self.__class__.__name__, qlog), level=log.DEBUG)
			query, params = _sql_format(self.queries['update'], item, paramstyle=self.paramstyle, identifier=self.identifier)
			qlog = self._log_preparedsql(query, params)
			try:
				#spider.log('%s executing: %s' % (self.__class__.__name__, qlog), level=log.DEBUG)
				txn.execute(query, params)
			except self.dbapi.OperationalError as e:
				# retrying in new transaction
			#	spider.log('%s errored. Retrying.\nError: %s\nQuery: %s' % (self.__class__.__name__, e, qlog), level=log.WARNING)
			#	self._spool.append((query, params, item))
			#except Exception as e:
				if self.debug:
					spider.log('%s FAILED executing: %s\nError: %s' % (self.__class__.__name__, qlog, e), level=log.WARNING)
				raise
			finally:
				if self.debug:
					spider.log('%s executed: %s' % (self.__class__.__name__, qlog), level=log.DEBUG)
		except self.dbapi.OperationalError as e:
			# also try again
			if self.debug:
				spider.log('%s failed: %s' % (self.__class__.__name__, qlog), level=log.DEBUG)
			raise
		finally:
			if self.debug:
				spider.log('%s executed: %s' % (self.__class__.__name__, qlog), level=log.DEBUG)
	"""

    def _log_preparedsql(self, query, params):
        """Simulate escaped query for log"""
        for p in params:
            query = re.sub('(\\' + self.paramstyle + r'\d?)',
                           '"%s"' % p,
                           query,
                           count=1)
        return query

    def _database_error(self, e, item, spider=None):
        """Log exceptions."""
        if spider:
            log.err(e, spider=spider)
        else:
            log.err(e)

    def query(self, sql):
        # run a query in the connection pool
        # parameters for prepared statements must be passed as 'sql=(query, params)'
        # (possible use-case from inside spider code)
        '''Spider Example: build start requests from database results

		from scrapy.exceptions import CloseSpider, NotConfigured
		from ..pipelines.sqlmagic import SQLMagicPipeline

		class MySpider(Spider):
			def spider_opened(self, spider):
				try:
					self.db = SQLMagicPipeline(self.settings.get('SQLMAGIC_DATABASE'))
				except NotConfigured:
					raise CloseSpider('Could not get database settings.')

			@defer.inlineCallbacks
			def db_queries(self, response):
				query = """CALL procedure ()"""
				result = yield self.db.query(query)

				# build requests
				requests = []
				for value in result:
					r = yield self.build_request_fromdb(response, value)
					requests.append(r)

				# queue them
				defer.returnValue(requests)

			def start_requests(self):
				yield Request(self.start_urls[0], callback=self.database_queries)

			def build_request_fromdb(self, response, db):
				# custom logic to convert db result into a request
				r = Request(response.url)
				r.callback = self.parse
				return r
		'''
        if query[:6].lower() in ('select', ):
            deferred = self.__dbpool.runQuery(sql)
        if query[:4].lower() in ('call', ):
            # potential fail: procedure must run a SELECT for this,
            # otherwise it should do runOperation
            deferred = self.__dbpool.runQuery(sql)
        else:
            deferred = self.__dbpool.runOperation(sql)
        return deferred