def backHome(dbfly): q= MotherPool._pool_queue m= MotherPool._pool_current_mutex sname= dbfly.session_name m.acquire() if MotherPool._pool_type == DB_POOL_ELASTIC and \ MotherPool._pool_current > MotherPool._pool_min: MotherPool._ns_discard(dbfly) Speaker.log_info("Elastic Pool: session %s " "closed and removed.", OKI_COL(sname)) m.release() return try: q.put_nowait(dbfly) except Exception, ss: Speaker.log_warning( "Removing connection %s from Pool: %s.", ERR_COL(dbfly.session_name), ERR_COL(ss)) MotherPool._ns_discard(dbfly) m.release() return
def _ns_discard(dbfly): MotherPool._pool_current-= 1 MotherPool._del_orphan(dbfly) try: dbfly._close() except Exception, ss: Speaker.log_error("Unable to close connection: %s", ERR_COL(ss))
def _executemany(self, q, l): try: self.cursor.executemany(q, l) except psycopg2.OperationalError: Speaker.log_raise('Connection is broken...', BrokenConnection) except Exception, ss: Speaker.log_raise('%s', ERR_COL(ss), QueryError)
def _del_orphan(db): m= MotherPool._pool_orphans_mutex m.acquire() try: MotherPool._pool_orphans.pop(id(db)) except: Speaker.log_error("Unable to close de-orphan connection: %s, %s, %s", ERR_COL(MotherPool._pool_orphans), ERR_COL(db), ERR_COL(id(db))) m.release()
def discard(dbfly): m= MotherPool._pool_current_mutex m.acquire() MotherPool._ns_discard(dbfly) m.release() Speaker.log_warning("Connection %s dropped from Pool.", ERR_COL(dbfly.session_name))
def mq_query(s, l): """ Multiple Quiet Query: execute a multiple, quiet query.""" Speaker.log_debug("Executing Massive Query...") DbOne._safe_execute(DbOne._mqquery, s, l) if not DbOne.trans_level: DbOne._commit()
def mq_query(self, s, l): """ Multiple Quiet Query: execute a multiple quiet query.""" Speaker.log_debug("%s: Executing Massive Query...", INF_COL(self.session_name)) res= self._safe_execute(self._mqquery, s, l) # len(l) - 1: because a query is added by safe_execute self._queries_n+= len(l) - 1
def _init_pool(n= None, pg_conn= None): n= n or MotherPool._pool_min Speaker.log_info("Initializing connection Pool ...") from Queue import Queue MotherPool._pool_queue= Queue() MotherPool._addConnection(n) MotherPool._pool_initialized= True
def mg_query(s, l): """ Multiple Get Query: execute a multiple, get query.""" Speaker.log_debug("Executing Massive Query...") res= DbOne._safe_execute(DbOne._mgquery, s, l) if not DbOne.trans_level: DbOne._commit() return res
def _connect(self): Speaker.log_insane('Initializing postgres connection...') try: s= _PostgresInfo._connect_str() self.connection= psycopg2.connect(s) self.cursor= self.connection.cursor() except Exception, ss: Speaker.log_raise('Unable to establish a connection ' 'with the database: %s', ERR_COL(ss), BrokenConnection)
def or_query(s, filter= None): """ One Record Query: returns a dict.""" res= DbOne.mr_query(s, filter) if len(res)<> 1: Speaker.log_raise( "Query returned %s records instead of 1." % ERR_COL(len(res)), QueryError) return res[0]
def ov_query(s, filter): """ One Value Query: returns a unique value.""" res= DbOne.or_query(s, filter) res= res.values() if len(res)<> 1: Speaker.log_raise( "Query returned %s values instead of 1." % ERR_COL(len(res)), QueryError) return res[0]
def rollback(): if not DbOne.trans_level: Speaker.log_warning( "Nothing to rollback: " "nested rollback?") return Speaker.log_debug("Rollbacking queries... (trans level= %s) ", DbOne.trans_level) DbOne._rollback() DbOne.trans_level= 0 return
def _connect(self): Speaker.log_insane('Initializing sqlite connection (db = %s)...', INF_COL(_SqliteInfo.dbfile)) dbfile= _SqliteInfo.dbfile try: self.connection= apsw.Connection(dbfile) self.cursor= self.connection.cursor() except Exception, ss: Speaker.log_raise('Unable to establish a connection ' 'with the database: %s', ERR_COL(ss), BrokenConnection)
def init_postgres(vars): try: _PostgresInfo.dbuser= vars['DB_USER'] except: Speaker.log_int_raise('Variable %s not specified!', ERR_COL('DB_USER')) try: _PostgresInfo.dbname= vars['DB_NAME'] except: Speaker.log_int_raise('Variable %s not specified!', ERR_COL('DB_NAME')) _PostgresInfo.dbpasswd= vars.get('DB_PASSWD', '') _PostgresInfo.dbhost= vars.get('DB_HOST', 'localhost') _PostgresInfo.dbport= vars.get('DB_PORT', 5432)
def export_iface(where): if not DbOne._db_initialized: err= ERR_COL('!!!No Session Available!!!') Speaker.log_int_raise("%s You are using the Db Pool, you " "have disabled the persistent connection, " "but no session was used to initialize this " "Mother class", err) for attr in DbOne._exported_methods: setattr(where, attr, getattr(DbOne, attr)) mog= getattr(DbOne, 'mogrify', None) if mog: setattr(where, 'mogrify', mog)
def newSession(name= None): Speaker.log_info("Initializing session %s", INF_COL(name)) if not MotherPool._pool_initialized: return DbFly(name) m= MotherPool._pool_get_mutex m.acquire() try: session= MotherPool._get_session() except Exception, ss: m.release() Speaker.log_int_raise( "Cannot retrieve Session from Pool (%s). FATAL.", ERR_COL(ss))
def _add_conn(n=1): """ Never call me directly: use _addConnection!""" cur= MotherPool._pool_current max= MotherPool._pool_max Speaker.log_info( "Adding %s new connection(s) (max= %s, cur= %s)", OKI_COL(n), OKI_COL(max), OKI_COL(cur)) for i in xrange(n): p= DbFly() q= MotherPool._pool_queue p._pool_queue= q q.put(p) MotherPool._pool_added.append(p) MotherPool._pool_current+= n return n
def init_abdbda(conf, forced= {}): if _DbInfo.db_initialized: Speaker.log_info('AbDbDa already initialized.') return # Take trace of possible errors: # When Speaker will be initialized, it # will be possible to log them err= None if not isinstance(conf, dict): import mother.speaker as speaker loc= {} names_dict= speaker.__dict__.copy() names_dict.update(globals()) try: execfile(conf, names_dict, loc) except Exception, ss: err= "Unable to read Mother configuration " \ "file %s: %s" % (ERR_COL(conf), ss)
def return_filter(self): pre= self.pre_filter post= self.post_filter if len(pre) > 1 or len(post) > 1: Speaker.log_int_raise('Cannot handle Filter: pre, post= %s, %s', ERR_COL(pre), ERR_COL(post)) if pre: f= pre[0] else: f= '' strfilter= self.strfilter if len(strfilter): strfilter= _A(strfilter) strfilter= ('WHERE' in strfilter or 'WHERE' in f ) \ and 'AND %s' % strfilter \ or 'WHERE %s' % strfilter f= '%s %s' % (f, strfilter) if post: f= '%s %s' % (f, post[0]) return f, self.locals
def _get_session(): """ Never call me directly: use newSession!""" db_pool= MotherPool._pool_queue try: db= db_pool.get_nowait() return db except MotherPool._empty_queue: pass full= MotherPool._full() calm= MotherPool._pool_calm # Wait or create immediately a new connection? if calm or full: # wait. ptimeout= MotherPool._pool_timeout Speaker.log_info( "MotherPool: waiting for a free connection " "(timeout= %s) ...", INF_COL(ptimeout)) # If full, wait and hope. if full: return db_pool.get(True, ptimeout) # If calm, wait only try: return db_pool.get(True, ptimeout) except MotherPool._empty_queue: pass # Ok, here the pool is not full. # Moreover, we have already waited. MotherPool._addConnection() return db_pool.get_nowait()
def commit(): lvl= DbOne.trans_level if not lvl: Speaker.log_warning( "Nothing to commit: " "nested commit?") return if lvl== 1: DbOne._commit() DbOne.trans_level-= 1 Speaker.log_debug( "Queries committed.") return DbOne.trans_level-= 1 Speaker.log_debug( "Decremented transaction: now %s", INF_COL(DbOne.trans_level)) return
def _safe_execute(execattr, s, d): try: return execattr(s, d) except BrokenConnection, e: Speaker.log_info('Connection to DB seems broken, ' 'now reconnecting...') try: DbOne._connect() except: Speaker.log_raise('Cannot re-establish connection', BrokenConnection) # if we are inside a trans, we have to signal # the broken transaction -> exception. # otherwise, the query is tried once more if DbOne.trans_level: Speaker.log_raise('Connection re-established: transaction is lost.', ConnectionError) return execattr(s, d)
def _do_query(s, ftr= None, result= True): execattr= result and DbOne._gquery or DbOne._qquery if not ftr: d= {} elif isinstance(ftr, MoFilter): s, d= ftr.return_ftrqry(s) elif isinstance(ftr, dict): d= ftr elif isinstance(ftr, tuple): d= ftr else: Speaker.log_int_raise('Invalid Filter Type: %s', ERR_COL(type(ftr))) # logging info... mogrify= getattr(DbOne._iface_instance, '_mogrify', None) try: Speaker.log_debug("QSQL- %s", mogrify(s, d)) except: Speaker.log_debug("QSQL- %s, Filter= %s", s, d) return DbOne._safe_execute(execattr, s, d)
def _execute(self, q, d): d= d or {} try: self.cursor.execute(q, d) except Exception, ss: Speaker.log_raise('%s', ERR_COL(ss), QueryError)
"closed and removed.", OKI_COL(sname)) m.release() return try: q.put_nowait(dbfly) except Exception, ss: Speaker.log_warning( "Removing connection %s from Pool: %s.", ERR_COL(dbfly.session_name), ERR_COL(ss)) MotherPool._ns_discard(dbfly) m.release() return MotherPool._del_orphan(dbfly) Speaker.log_info('Session %s back to the Pool.', OKI_COL(sname)) m.release() return @staticmethod def _ns_discard(dbfly): MotherPool._pool_current-= 1 MotherPool._del_orphan(dbfly) try: dbfly._close() except Exception, ss: Speaker.log_error("Unable to close connection: %s", ERR_COL(ss)) @staticmethod def discard(dbfly):
def add_filter(self, filter, tbl= None, store= None): if not filter: return def t(tp): return isinstance(filter, tp) if store: self.locals.update(store) if t(str): if 'JOIN' in filter: self.pre_filter.append(filter) else: self.strfilter.append(filter) return afrm= self._arg_format res= [] if t(dict): for k, v in filter.iteritems(): if v== SQL_DEFAULT: if tbl: res.append('%s.%s= DEFAULT' % (tbl, k)) else: res.append('%s= DEFAULT' % k) elif v == SQL_NULL: if tbl: res.append('%s.%s IS NULL' % (tbl, k)) else: res.append('%s IS NULL' % k) else: if tbl: res.append('%s.%s= %s' % (tbl, k, afrm(k))) else: res.append('%s= %s' % (k, afrm(k))) self.strfilter.append(_A(res)) self.locals.update(filter) return # ToDo: just assume that filter is iterable? if t(list) or t(set) or t(frozenset): if not store: Speaker.log_raise('Invalid Filter: store not provided ' 'for iterator filter.', InvalidFilter) for k in filter: try: v= store[k] self.locals[k]= v except: Speaker.log_raise('Invalid Filter: key %s not in store', ERR_COL(k), InvalidFilter) if v== SQL_DEFAULT: if tbl: res.append('%s.%s= DEFAULT' % (tbl, k)) else: res.append('%s= DEFAULT' % k) elif v == SQL_DEFAULT: if tbl: res.append('%s.%s IS NULL' % (tbl, k)) else: res.append('%s IS NULL' % k) else: if tbl: res.append('%s.%s= %s' % (tbl, k, afrm(k))) else: res.append('%s= %s' % (k, afrm(k))) self.strfilter.append(_A(res)) return Speaker.log_raise('Invalid Filter: type %s not allowed.', ERR_COL(type(filter)), InvalidFilter)
def beginTrans(): DbOne.trans_level+= 1 Speaker.log_debug( 'Incremented transaction level: %s', INF_COL(DbOne.trans_level))
def _close(self): Speaker.log_insane("Closing Connection...") try: self.connection.close() except: pass
def init_sqlite(vars): try: _SqliteInfo.dbfile= vars['DB_FILE'] except: Speaker.log_int_raise('Variable %s not specified!', ERR_COL('DB_FILE'))