Example #1
0
 def __init__(self, pool_name, minconn, maxconn, host, port, dbname, encoding, statement_timeout, *args, **kwargs):
     self.logger = logging.getLogger(self.__class__.__name__)
     self.pool_name = pool_name
     self.address = ( host, port, )
     self.dbname = dbname
     self.encoding = encoding
     self.statement_timeout = statement_timeout
     PersistentConnectionPool.__init__(self, minconn, maxconn, *args, **kwargs) #IGNORE:W0142
def get_connection_pooled():
    """
    :rtype: connection
    """
    global conn_pool
    if conn_pool is None:
        CONFIG = ConfigParser()
        CONFIG.read(ini_file)
        conn_pool = PersistentConnectionPool(conn_pool_min,
                                             conn_pool_max,
                                             CONFIG["Postgres"]["db"],
                                             cursor_factory=DictCursor)
    conn = conn_pool.getconn()
    conn.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT)
    return conn
Example #3
0
    def init_app(self, app):
        self.app = app
        # read config
        db_args = {'minconn': app.config.get('DB_MIN_CONNECTIONS', 2),
                   'maxconn': app.config.get('DB_MAX_CONNECTIONS', 20),
                   'database': app.config.get('DB_NAME'),
                   'user': app.config.get('DB_USER'),
                   'password': app.config.get('DB_PASSWORD', ''),
                   }
        if 'DB_HOST' in app.config:
            db_args['host'] = app.config.get('DB_HOST')
        if 'DB_PORT' in app.config:
            db_args['port'] = app.config.get('DB_PORT')

        self.pool = PersistentConnectionPool(**db_args)

        app.after_request(self.cleanup)

        got_request_exception.connect(self.bailout, app)
Example #4
0
    def get_connection(database_config_name, path_to_conf_file=None):
        global conn_pool, MAX_CONNECTIONS_IN_POOL

        if conn_pool is None:
            config = ConnectionManager.get_config(database_config_name, path_to_conf_file)

            conn_pool = PersistentConnectionPool(minconn=1, maxconn=MAX_CONNECTIONS_IN_POOL,
                                                 host=config['host'],
                                                 database=config['database'],
                                                 user=config['user'],
                                                 password=config['password'])
        got_connection = False
        while not got_connection:
            try:
                conn = conn_pool.getconn()
                #cur = conn.cursor(cursor_factory=cursor_type)
                got_connection = True
            except psycopg2.OperationalError as mess:
                print("OperationalError opening connexion : %s" % mess)
                sleep(1)
            except AttributeError as mess:
                print("AttributeError opening connexion : %s" % mess)
                sleep(1)
        return conn
Example #5
0
	def __init__(self, config, min_connections=1, max_connections=5):
		"""Configures the Db, connection is not created yet.
		
		@param config: instance of RawConfigParser or subclass.
		@param min_connections: minimum connections in pool
		@param max_connections: maximum allowed connections in pool
		"""

		self.host = config.get("database", "host")
		self.port = config.getint("database", "port")
		self.user = config.get("database", "user")
		self.password = config.get("database", "password")
		self.db_name = config.get("database", "dbname")
		self.min_connections = min_connections
		self.max_connections = max_connections

		self.pool = PersistentConnectionPool(
			minconn = self.min_connections,
			maxconn = self.max_connections,
			host = self.host,
			port = self.port,
			user = self.user,
			password = self.password,
			database = self.db_name)
Example #6
0
 def _connect(self, key=None):
     conn = PersistentConnectionPool._connect(self, key)
     if self.encoding and conn.encoding and conn.encoding.lower() != self.encoding.lower() :
         conn.set_client_encoding(self.encoding)
     debug_string = 'Pool {0} created new connection with {1} encoding'.format( self.pool_name, conn.encoding )
     if self.statement_timeout is not None :
         cur = conn.cursor()
         try:
             # one can improve this by getting an old statement timeout value and then restoring it on 
             # reterning connection back to the pool 
             cur.execute('set STATEMENT_TIMEOUT to %s;', ( self.statement_timeout, ))
         except psycopg2.Error as e :
             self.logger.error('Could not set connection statement_timeout to {0}: {1}'.format( self.statement_timeout, e ) )
             conn.rollback()
         finally:
             cur.close()
             del cur
             conn.commit()
             debug_string += ' and statement_timeout set to {0} ms'.format(self.statement_timeout)
     self.logger.debug(debug_string + '.')
     del debug_string
     return conn
Example #7
0
class PostGres(object):
    INIT_SQL = None

    def __init__(self, app=None):
        self.app = None
        self.pool = None

        if app:
            self.init_app(app)

    def cleanup(self, response):
        """Return connection to pool on request end."""
        #FIXME: we should have better dirty detection, maybe wrap up insert queries?
        if getattr(g, 'transaction_dirty', False):
            if response.status_code < 400:
                self.connection.commit()
            else:
                self.connection.rollback()
            self.pool.putconn()
        return response

    def bailout(self, app, exception):
        """Return connection to pool on request ended by unhandled exception."""
        if app.debug and getattr(g, 'transaction_dirty', False):
            self.connection.rollback()
            self.pool.putconn()

    def init_app(self, app):
        self.app = app
        # read config
        db_args = {'minconn': app.config.get('DB_MIN_CONNECTIONS', 2),
                   'maxconn': app.config.get('DB_MAX_CONNECTIONS', 20),
                   'database': app.config.get('DB_NAME'),
                   'user': app.config.get('DB_USER'),
                   'password': app.config.get('DB_PASSWORD', ''),
                   }
        if 'DB_HOST' in app.config:
            db_args['host'] = app.config.get('DB_HOST')
        if 'DB_PORT' in app.config:
            db_args['port'] = app.config.get('DB_PORT')

        self.pool = PersistentConnectionPool(**db_args)

        app.after_request(self.cleanup)

        got_request_exception.connect(self.bailout, app)

    def get_connection(self):
        """Get a thread local database connection object."""
        #FIXME: this is dirty can we detect when in request context?
        try:
            g.transaction_dirty = True
        except:
            # using connection outside request context
            pass

        return self.pool.getconn()
    connection = property(get_connection)

    def get_cursor(self):
        """Get a database cursor object to be used for making queries."""
        #FIXME: maybe use server side cursors?
        return self.connection.cursor(cursor_factory=DictCursor)

    def init_db(self, *args, **kwargs):
        """Initialize the database."""
        if not self.INIT_SQL:
            return
        cursor = self.get_cursor()
        cursor.execute(self.INIT_SQL)
        cursor.connection.commit()

    def function_exists(self, function):
        """Check if the SQL function already exists in the database."""
        query = """SELECT EXISTS(SELECT proname FROM pg_proc WHERE proname = %(function)s)"""
        cursor = self.get_cursor()
        cursor.execute(query, {'function': function})
        return cursor.fetchone()[0]

    def table_exists(self, table):
        """Check if table already exists in the database."""
        query = """SELECT EXISTS(SELECT relname FROM pg_class WHERE relname = %(table)s and relkind='r')"""
        cursor = self.get_cursor()
        cursor.execute(query, {'table': table})
        return cursor.fetchone()[0]

    def drop_table(self, table):
        """Drop the table if it exists."""
        query = """DROP TABLE IF EXISTS %s CASCADE;""" % table
        cursor = self.get_cursor()
        cursor.execute(query)
        cursor.connection.commit()
Example #8
0
        #print "thread: ", threading.currentThread().name

        conn = conn_pool.getconn()
        do_work(conn, item)
        q.task_done()


logging.info("get assignments (multithreaded) from MTurk - START")

num_worker_threads = 10

q = Queue.Queue()
mturk_conn = mturk.conn()
conn_pool = PersistentConnectionPool(num_worker_threads,
                                     num_worker_threads + 5,
                                     database=settings["dbname"],
                                     user=settings["user"],
                                     host=settings["host"])

target_language = settings["target_language"]
logging.info("target language: %s" % (target_language))

conn = psycopg2.connect("dbname='" + settings["dbname"] + "' user='******' host='" + settings["host"] + "'")

#create workers pool
for i in range(num_worker_threads):
    t = threading.Thread(target=worker)
    t.daemon = True
    t.start()
Example #9
0
class DbPool(object):
	"""DB class that makes connection transparently. Thread-safe - every
	thread get its own database connection.
	"""

	def __init__(self, config, min_connections=1, max_connections=5):
		"""Configures the Db, connection is not created yet.
		
		@param config: instance of RawConfigParser or subclass.
		@param min_connections: minimum connections in pool
		@param max_connections: maximum allowed connections in pool
		"""

		self.host = config.get("database", "host")
		self.port = config.getint("database", "port")
		self.user = config.get("database", "user")
		self.password = config.get("database", "password")
		self.db_name = config.get("database", "dbname")
		self.min_connections = min_connections
		self.max_connections = max_connections

		self.pool = PersistentConnectionPool(
			minconn = self.min_connections,
			maxconn = self.max_connections,
			host = self.host,
			port = self.port,
			user = self.user,
			password = self.password,
			database = self.db_name)

	def cursor(self, **kwargs):
		"""Creates and returns cursor for current thread's connection.
		Cursor is a "dict" cursor, so you can access the columns by
		names (not just indices), e.g.:

		cursor.execute("SELECT id, name FROM ... WHERE ...", sql_args)
		row = cursor.fetchone()
		id = row['id']
		
		Server-side cursors (named cursors) should be closed explicitly.
		
		@param kwargs: currently string parameter 'name' is supported.
		Named cursors are for server-side cursors, which
		are useful when fetching result of a large query via fetchmany()
		method. See http://initd.org/psycopg/docs/usage.html#server-side-cursors
		"""
		return self.connection().cursor(cursor_factory=DictCursor, **kwargs)
	
	def connection(self):
		"""Return connection for this thread"""
		return self.pool.getconn()

	def commit(self):
		"""Commit all the commands in this transaction in this thread's
		connection. If errors (e.g. duplicate key) arose, this will
		cause transaction rollback.
		"""
		self.connection().commit()

	def rollback(self):
		"""Rollback last transaction on this thread's connection"""
		self.connection().rollback()
	
	def putconn(self):
		"""Put back connection used by this thread. Necessary upon finishing of
		spawned threads, otherwise new threads won't get connection if the pool
		is depleted."""
		conn = self.connection()
		self.pool.putconn(conn)
	
	def close(self):
		"""Close connection."""
		self.connection().close()
Example #10
0
  def __init__( self, connection = None, cache = None, host = None, ssl_mode = None, data_dir = None ):
    """
    Create a new database and return it.

    @type connection: existing connection object with cursor()/close()/commit() methods, or NoneType
    @param connection: database connection to use (optional, defaults to making a connection pool)
    @type cache: cmemcache.Client or something with a similar API, or NoneType
    @param cache: existing memory cache to use (optional, defaults to making a cache)
    @type host: unicode or NoneType
    @param host: hostname of PostgreSQL database, or None to use a local SQLite database
    @type ssl_mode: unicode or NoneType
    @param ssl_mode: SSL mode for the database connection, one of "disallow", "allow", "prefer", or
                     "require". ignored if host is None
    @type data_dir: unicode or NoneType
    @param data_dir: directory in which to store data (defaults to a reasonable directory). ignored
                     if host is not None
    @rtype: Database
    @return: newly constructed Database
    """
    # This tells PostgreSQL to give us timestamps in UTC. I'd use "set timezone" instead, but that
    # makes SQLite angry.
    os.putenv( "PGTZ", "UTC" )

    if host is None:
      #from pysqlite2 import dbapi2 as sqlite
      import sqlite3 as sqlite
      from datetime import datetime
      from pytz import utc

      TIMESTAMP_PATTERN = re.compile( "^(\d\d\d\d)-(\d\d)-(\d\d) (\d\d):(\d\d):(\d\d).(\d+)(?:\+\d\d:\d\d$)?" )
      MICROSECONDS_PER_SECOND = 1000000

      def convert_timestamp( value ):
        ( year, month, day, hours, minutes, seconds, fractional_seconds ) = \
          TIMESTAMP_PATTERN.search( value ).groups( 0 )

        # convert fractional seconds (with an arbitrary number of decimal places) to microseconds
        microseconds = int( fractional_seconds )
        while microseconds > MICROSECONDS_PER_SECOND:
          fractional_seconds = fractional_seconds[ : -1 ]
          microseconds = int( fractional_seconds or 0 )

        # ignore time zone in timestamp and assume UTC
        return datetime(
          int( year ), int( month ), int( day ),
          int( hours ), int( minutes ), int( seconds ), int( microseconds ),
          utc,
        )

      sqlite.register_converter( "boolean", lambda value: value in ( "t", "True", "true" ) and True or False )
      sqlite.register_converter( "timestamp", convert_timestamp )

      if connection:
        self.__connection = connection
      else:
        if data_dir is None:
          if sys.platform.startswith( "win" ):
            data_dir = os.path.join( os.environ.get( "APPDATA" ), "Luminotes" )
          else:
            data_dir = os.path.join( os.environ.get( "HOME", "" ), ".luminotes" )

        data_filename = os.path.join( data_dir, "luminotes.db" )

        # if the user doesn't yet have their own luminotes.db file, make them an initial copy
        if os.path.exists( "luminotes.db" ):
          if not os.path.exists( data_dir ):
            import stat
            os.makedirs( data_dir, stat.S_IXUSR | stat.S_IRUSR | stat.S_IWUSR )

          if not os.path.exists( data_filename ):
            import shutil
            shutil.copyfile( "luminotes.db", data_filename )

        self.__connection = \
          Connection_wrapper( sqlite.connect( data_filename, detect_types = sqlite.PARSE_DECLTYPES, check_same_thread = False ) )
  
      self.__pool = None
      self.__backend = Persistent.SQLITE_BACKEND
      self.lock = threading.Lock() # multiple simultaneous client threads make SQLite angry
    else:
      import psycopg2 as psycopg
      from psycopg2.pool import PersistentConnectionPool

      # forcibly replace psycopg's connect() function with another function that returns the psycopg
      # connection wrapped in a class with a pending_saves member, used in save() and commit() below
      original_connect = psycopg.connect

      def connect( *args, **kwargs ):
        return Connection_wrapper( original_connect( *args, **kwargs ) )

      psycopg.connect = connect

      if connection:
        self.__connection = connection
        self.__pool = None
      else:
        self.__connection = None
        self.__pool = PersistentConnectionPool(
          1,  # minimum connections
          50, # maximum connections
          "host=%s sslmode=%s dbname=luminotes user=luminotes password=%s" % (
            host or "localhost",
            ssl_mode or "allow",
            os.getenv( "PGPASSWORD", "dev" )
          ),
        )

      self.__backend = Persistent.POSTGRESQL_BACKEND
      self.lock = None # PostgreSQL does its own synchronization

    self.__cache = cache

    try:
      if self.__cache is None:
        import cmemcache
        print "using memcached"
    except ImportError:
      return None
Example #11
0
class Database( object ):
  ID_BITS = 128 # number of bits within an id
  ID_DIGITS = "0123456789abcdefghijklmnopqrstuvwxyz"

  # caching Notebooks causes problems because different users have different read_write/owner values
  CLASSES_NOT_TO_CACHE = ( Notebook, )

  def __init__( self, connection = None, cache = None, host = None, ssl_mode = None, data_dir = None ):
    """
    Create a new database and return it.

    @type connection: existing connection object with cursor()/close()/commit() methods, or NoneType
    @param connection: database connection to use (optional, defaults to making a connection pool)
    @type cache: cmemcache.Client or something with a similar API, or NoneType
    @param cache: existing memory cache to use (optional, defaults to making a cache)
    @type host: unicode or NoneType
    @param host: hostname of PostgreSQL database, or None to use a local SQLite database
    @type ssl_mode: unicode or NoneType
    @param ssl_mode: SSL mode for the database connection, one of "disallow", "allow", "prefer", or
                     "require". ignored if host is None
    @type data_dir: unicode or NoneType
    @param data_dir: directory in which to store data (defaults to a reasonable directory). ignored
                     if host is not None
    @rtype: Database
    @return: newly constructed Database
    """
    # This tells PostgreSQL to give us timestamps in UTC. I'd use "set timezone" instead, but that
    # makes SQLite angry.
    os.putenv( "PGTZ", "UTC" )

    if host is None:
      #from pysqlite2 import dbapi2 as sqlite
      import sqlite3 as sqlite
      from datetime import datetime
      from pytz import utc

      TIMESTAMP_PATTERN = re.compile( "^(\d\d\d\d)-(\d\d)-(\d\d) (\d\d):(\d\d):(\d\d).(\d+)(?:\+\d\d:\d\d$)?" )
      MICROSECONDS_PER_SECOND = 1000000

      def convert_timestamp( value ):
        ( year, month, day, hours, minutes, seconds, fractional_seconds ) = \
          TIMESTAMP_PATTERN.search( value ).groups( 0 )

        # convert fractional seconds (with an arbitrary number of decimal places) to microseconds
        microseconds = int( fractional_seconds )
        while microseconds > MICROSECONDS_PER_SECOND:
          fractional_seconds = fractional_seconds[ : -1 ]
          microseconds = int( fractional_seconds or 0 )

        # ignore time zone in timestamp and assume UTC
        return datetime(
          int( year ), int( month ), int( day ),
          int( hours ), int( minutes ), int( seconds ), int( microseconds ),
          utc,
        )

      sqlite.register_converter( "boolean", lambda value: value in ( "t", "True", "true" ) and True or False )
      sqlite.register_converter( "timestamp", convert_timestamp )

      if connection:
        self.__connection = connection
      else:
        if data_dir is None:
          if sys.platform.startswith( "win" ):
            data_dir = os.path.join( os.environ.get( "APPDATA" ), "Luminotes" )
          else:
            data_dir = os.path.join( os.environ.get( "HOME", "" ), ".luminotes" )

        data_filename = os.path.join( data_dir, "luminotes.db" )

        # if the user doesn't yet have their own luminotes.db file, make them an initial copy
        if os.path.exists( "luminotes.db" ):
          if not os.path.exists( data_dir ):
            import stat
            os.makedirs( data_dir, stat.S_IXUSR | stat.S_IRUSR | stat.S_IWUSR )

          if not os.path.exists( data_filename ):
            import shutil
            shutil.copyfile( "luminotes.db", data_filename )

        self.__connection = \
          Connection_wrapper( sqlite.connect( data_filename, detect_types = sqlite.PARSE_DECLTYPES, check_same_thread = False ) )
  
      self.__pool = None
      self.__backend = Persistent.SQLITE_BACKEND
      self.lock = threading.Lock() # multiple simultaneous client threads make SQLite angry
    else:
      import psycopg2 as psycopg
      from psycopg2.pool import PersistentConnectionPool

      # forcibly replace psycopg's connect() function with another function that returns the psycopg
      # connection wrapped in a class with a pending_saves member, used in save() and commit() below
      original_connect = psycopg.connect

      def connect( *args, **kwargs ):
        return Connection_wrapper( original_connect( *args, **kwargs ) )

      psycopg.connect = connect

      if connection:
        self.__connection = connection
        self.__pool = None
      else:
        self.__connection = None
        self.__pool = PersistentConnectionPool(
          1,  # minimum connections
          50, # maximum connections
          "host=%s sslmode=%s dbname=luminotes user=luminotes password=%s" % (
            host or "localhost",
            ssl_mode or "allow",
            os.getenv( "PGPASSWORD", "dev" )
          ),
        )

      self.__backend = Persistent.POSTGRESQL_BACKEND
      self.lock = None # PostgreSQL does its own synchronization

    self.__cache = cache

    try:
      if self.__cache is None:
        import cmemcache
        print "using memcached"
    except ImportError:
      return None

  def get_connection( self ):
    if self.__connection:
      return self.__connection
    else:
      return self.__pool.getconn()

  def __get_cache_connection( self ):
    if self.__cache is not None:
      return self.__cache

    try:
      import cmemcache
      return cmemcache.Client( [ "127.0.0.1:11211" ], debug = 0 )
    except ImportError:
      return None

  def unescape( self, sql_command ):
    """
    For backends that don't treat backslashes specially, un-double all backslashes in the given
    sql_command.
    """
    if self.__backend == Persistent.SQLITE_BACKEND:
      return sql_command.replace( "\\\\", "\\" )
    return sql_command

  @synchronized
  def save( self, obj, commit = True ):
    """
    Save the given object to the database.

    @type obj: Persistent
    @param obj: object to save
    @type commit: bool
    @param commit: True to automatically commit after the save
    """
    connection = self.get_connection()
    cursor = connection.cursor()

    cursor.execute( self.unescape( obj.sql_exists() ) )
    if cursor.fetchone():
      cursor.execute( self.unescape( obj.sql_update() ) )
    else:
      cursor.execute( self.unescape( obj.sql_create() ) )

    if isinstance( obj, self.CLASSES_NOT_TO_CACHE ):
      cache = None
    else:
      cache = self.__get_cache_connection()

    if commit:
      connection.commit()
      if cache:
        cache.set( obj.cache_key, obj )
    elif cache:
      # no commit yet, so don't touch the cache
      connection.pending_saves.append( obj )

  @synchronized
  def commit( self ):
    connection = self.get_connection()
    connection.commit()

    # save any pending saves to the cache
    cache = self.__get_cache_connection()

    if cache:
      for obj in connection.pending_saves:
        cache.set( obj.cache_key, obj )

      connection.pending_saves = []

  @synchronized
  def rollback( self ):
    connection = self.get_connection()
    connection.rollback()

  def load( self, Object_type, object_id, revision = None ):
    """
    Load the object corresponding to the given object id from the database and return it, or None if
    the object_id is unknown. If a revision is provided, a specific revision of the object will be
    loaded.

    @type Object_type: type
    @param Object_type: class of the object to load 
    @type object_id: unicode
    @param object_id: id of the object to load
    @type revision: int or NoneType
    @param revision: revision of the object to load (optional)
    @rtype: Object_type or NoneType
    @return: loaded object, or None if no match
    """
    if revision or Object_type in self.CLASSES_NOT_TO_CACHE:
      cache = None
    else:
      cache = self.__get_cache_connection()

    if cache: # don't bother caching old revisions
      obj = cache.get( Persistent.make_cache_key( Object_type, object_id ) )
      if obj:
        return obj

    obj = self.select_one( Object_type, Object_type.sql_load( object_id, revision ) )
    if obj and cache:
      cache.set( obj.cache_key, obj )

    return obj

  @synchronized
  def select_one( self, Object_type, sql_command, use_cache = False ):
    """
    Execute the given sql_command and return its results in the form of an object of Object_type,
    or None if there was no match.

    @type Object_type: type
    @param Object_type: class of the object to load 
    @type sql_command: unicode
    @param sql_command: SQL command to execute
    @type use_cache: bool
    @param use_cache: whether to look for and store objects in the cache
    @rtype: Object_type or NoneType
    @return: loaded object, or None if no match
    """
    if not use_cache or Object_type in self.CLASSES_NOT_TO_CACHE:
      cache = None
    else:
      cache = self.__get_cache_connection()

    if cache:
      cache_key = sha.new( sql_command ).hexdigest()
      obj = cache.get( cache_key )
      if obj:
        return obj

    connection = self.get_connection()
    cursor = connection.cursor()

    cursor.execute( self.unescape( sql_command ) )

    row = self.__row_to_unicode( cursor.fetchone() )
    if not row:
      return None

    if Object_type in ( tuple, list ):
      obj = Object_type( row )
    else:
      obj = Object_type( *row )

    if obj and cache:
      cache.set( cache_key, obj )

    return obj

  @synchronized
  def select_many( self, Object_type, sql_command ):
    """
    Execute the given sql_command and return its results in the form of a list of objects of
    Object_type.

    @type Object_type: type
    @param Object_type: class of the object to load 
    @type sql_command: unicode
    @param sql_command: SQL command to execute
    @rtype: list of Object_type
    @return: loaded objects
    """
    connection = self.get_connection()
    cursor = connection.cursor()

    cursor.execute( self.unescape( sql_command ) )

    objects = []
    row = self.__row_to_unicode( cursor.fetchone() )

    while row:
      if Object_type in ( tuple, list ):
        obj = Object_type( row )
      else:
        obj = Object_type( *row )

      objects.append( obj )
      row = self.__row_to_unicode( cursor.fetchone() )

    return objects

  def __row_to_unicode( self, row ):
    if row is None:
      return None

    return [ isinstance( item, str ) and unicode( item, encoding = "utf8" ) or item for item in row ]

  @synchronized
  def execute( self, sql_command, commit = True ):
    """
    Execute the given sql_command.

    @type sql_command: unicode
    @param sql_command: SQL command to execute
    @type commit: bool
    @param commit: True to automatically commit after the command
    """
    connection = self.get_connection()
    cursor = connection.cursor()

    cursor.execute( self.unescape( sql_command ) )

    if commit:
      connection.commit()

  @synchronized
  def execute_script( self, sql_commands, commit = True ):
    """
    Execute the given sql_commands.

    @type sql_command: unicode
    @param sql_command: multiple SQL commands to execute
    @type commit: bool
    @param commit: True to automatically commit after the command
    """
    connection = self.get_connection()
    cursor = connection.cursor()

    if self.__backend == Persistent.SQLITE_BACKEND:
      cursor.executescript( sql_commands )
    else:
      cursor.execute( self.unescape( sql_commands ) )

    if commit:
      connection.commit()

  def uncache_command( self, sql_command ):
    cache = self.__get_cache_connection()
    if not cache: return

    cache_key = sha.new( sql_command ).hexdigest()
    cache.delete( cache_key )

  def uncache( self, obj ):
    cache = self.__get_cache_connection()
    if not cache: return

    cache.delete( obj.cache_key )

  def uncache_many( self, Object_type, obj_ids ):
    cache = self.__get_cache_connection()
    if not cache: return

    for obj_id in obj_ids:
      cache.delete( Persistent.make_cache_key( Object_type, obj_id ) )

  @staticmethod
  def generate_id():
    int_id = random.getrandbits( Database.ID_BITS )

    base = len( Database.ID_DIGITS )
    digits = []

    while True:
      index = int_id % base
      digits.insert( 0, Database.ID_DIGITS[ index ] )
      int_id = int_id / base
      if int_id == 0:
        break

    return "".join( digits )

  @synchronized
  def next_id( self, Object_type, commit = True ):
    """
    Generate the next available object id and return it.

    @type Object_type: type
    @param Object_type: class of the object that the id is for
    @type commit: bool
    @param commit: True to automatically commit after storing the next id
    """
    connection = self.get_connection()
    cursor = connection.cursor()

    # generate a random id, but on the off-chance that it collides with something else already in
    # the database, try again
    next_id = Database.generate_id()
    cursor.execute( self.unescape( Object_type.sql_id_exists( next_id ) ) )

    while cursor.fetchone() is not None:
      next_id = Database.generate_id()
      cursor.execute( self.unescape( Object_type.sql_id_exists( next_id ) ) )

    # save a new object with the next_id to the database
    obj = Object_type( next_id )
    cursor.execute( self.unescape( obj.sql_create() ) )

    if commit:
      connection.commit()

    return next_id

  @synchronized
  def close( self ):
    """
    Shutdown the database.
    """
    if self.__connection:
      self.__connection.close()

    if self.__pool:
      self.__pool.closeall()

  backend = property( lambda self: self.__backend )
Example #12
0
    def __init__(self):

        routes = [
            url(r"/joukkoliikenne/kutsujoukkoliikenne/$",
                handlers.kutsuliikenne.IndexHandler,
                name='index'),
            url(r"/joukkoliikenne/kutsujoukkoliikenne/katselu/([\d]*?)$",
                handlers.kutsuliikenne.ViewHandler,
                name='view'),
            url(r"/joukkoliikenne/kutsujoukkoliikenne/kartta$",
                handlers.kutsuliikenne.MapHandler,
                name='map'),
            url(r"/joukkoliikenne/kutsujoukkoliikenne/kirjaudu$",
                handlers.hallinta.LoginHandler,
                name='login'),
            url(r"/joukkoliikenne/kutsujoukkoliikenne/ulos$",
                handlers.hallinta.LogoutHandler,
                name='logout'),
            url(r"/joukkoliikenne/kutsujoukkoliikenne/vie/([a-z]*?)$",
                handlers.kutsuliikenne.ExportHandler,
                name='export'),
            url(r"/joukkoliikenne/kutsujoukkoliikenne/vie/([a-z]*?)/all$",
                handlers.kutsuliikenne.ExportHandler,
                name='export-all'),
            url(r"/joukkoliikenne/kutsujoukkoliikenne/vie/([a-z]*?)/all.zip$",
                handlers.kutsuliikenne.ExportHandler,
                name='export-all-zip'),
            url(r"/joukkoliikenne/kutsujoukkoliikenne/vie/([a-z]*?)/([\d]*?)$",
                handlers.kutsuliikenne.ExportHandler,
                name='export-item'),
            url(r"/joukkoliikenne/kutsujoukkoliikenne/muokkaa/$",
                handlers.hallinta.EditIndexHandler,
                name='muokkaa'),
            url(r"/joukkoliikenne/kutsujoukkoliikenne/uusi_kohde/$",
                handlers.hallinta.EditInfoHandler,
                name='muokkaa-uusi'),
            url(r"/joukkoliikenne/kutsujoukkoliikenne/muokkaa/([\d]*)/status$",
                handlers.hallinta.EditStatusHandler,
                name='muokkaa-status'),
            url(r"/joukkoliikenne/kutsujoukkoliikenne/muokkaa/([\d]*)/poista$",
                handlers.hallinta.EditDeleteHandler,
                name='muokkaa-poista'),
            url(r"/joukkoliikenne/kutsujoukkoliikenne/muokkaa/([\d]*)/kartta$",
                handlers.hallinta.EditMapHandler,
                name='muokkaa-kartta'),
            url(r"/joukkoliikenne/kutsujoukkoliikenne/muokkaa/([\d]*)/tiedot$",
                handlers.hallinta.EditInfoHandler,
                name='muokkaa-info'),
            url(r"/joukkoliikenne/kutsujoukkoliikenne/muokkaa/([\d]*)/valtuudet$",
                handlers.hallinta.EditAuthHandler,
                name='muokkaa-valtuudet'),
            url(r"/joukkoliikenne/kutsujoukkoliikenne/hallinta",
                handlers.admin.IndexHandler,
                name='admin'),
            url(r"/joukkoliikenne/kutsujoukkoliikenne/hallinta/kayttaja",
                handlers.admin.UserHandler,
                name='admin-avain-uusi'),
            url(r"/joukkoliikenne/kutsujoukkoliikenne/hallinta/kayttaja/([\d]*?)/paata_voimassaolo$",
                handlers.admin.DeleteUserHandler,
                name='admin-avain-poisto'),
            url(r"/joukkoliikenne/kutsujoukkoliikenne/hallinta/kayttaja/([\d]*?)$",
                handlers.admin.UserHandler,
                name='admin-avain'),
            url(r"/joukkoliikenne/kutsujoukkoliikenne/hallinta/ryhma",
                handlers.admin.GroupHandler,
                name='admin-ryhma-uusi'),
            url(r"/joukkoliikenne/kutsujoukkoliikenne/hallinta/ryhma/([\d]*?)/paata_voimassaolo$",
                handlers.admin.DeleteGroupHandler,
                name='admin-ryhma-poisto'),
            url(r"/joukkoliikenne/kutsujoukkoliikenne/hallinta/ryhma/([\d]*?)/poista_jasen/([\d]*?)",
                handlers.admin.DeleteGroupUserHandler,
                name='admin-ryhma-poistajasen'),
            url(r"/joukkoliikenne/kutsujoukkoliikenne/hallinta/ryhma/([\d]*?)/lisaa_jasen$",
                handlers.admin.AddGroupUserHandler,
                name='admin-ryhma-lisaajasen'),
            url(r"/joukkoliikenne/kutsujoukkoliikenne/hallinta/ryhma/([\d]*?)$",
                handlers.admin.GroupHandler,
                name='admin-ryhma'),
            url(r"/joukkoliikenne/kutsujoukkoliikenne/api/liikenne/([\w]*?)$",
                handlers.api.ExportAllHandler,
                name='api-all'),
            url(r"/joukkoliikenne/kutsujoukkoliikenne/api/liikenne/([\d]*)/([\w]*?)$",
                handlers.api.ExportItemHandler,
                name='api-item'),
        ]
        settings = dict(
            template_path=os.path.join(os.path.dirname(__file__), "templates"),
            static_path=os.path.join(os.path.dirname(__file__), "static"),
            xsrf_cookies=True,
            debug=True,
            static_url_prefix='/joukkoliikenne/kutsujoukkoliikenne/static/',
            login_url="/joukkoliikenne/kutsujoukkoliikenne/kirjaudu",
            cookie_secret=
            "|_oveO?@Re,982Zh2|08wX$g%We8*&C0I1D_bWKd6|8Sh*Nr.2=10:A?941pZ;D")
        tornado.web.Application.__init__(self, routes, **settings)

        assert options.dbhost
        assert options.dbport
        assert options.dbname
        assert options.dbuser
        assert options.dbpasswd
        self.dbconn = PersistentConnectionPool(1,
                                               50,
                                               host=options.dbhost,
                                               port=options.dbport,
                                               dbname=options.dbname,
                                               user=options.dbuser,
                                               password=options.dbpasswd)

        self.security = livibetasecurity.LiViBetaSecurity(self.dbconn)
        self.redis = redis.StrictRedis(host='localhost',
                                       port=6379,
                                       db=0,
                                       decode_responses=True)

        self.fieldtrans = translations.fieldtrans
Example #13
0
import psycopg2
from psycopg2.pool import PersistentConnectionPool
from settings import Config
import threading

pool = PersistentConnectionPool(5,200, user='******', password = '******', dbname = 'Api', host="192.168.1.41", port="5432")



from sqlalchemy import create_engine
from sqlalchemy.orm import scoped_session, sessionmaker
from sqlalchemy.ext.declarative import declarative_base

engine = create_engine('postgresql+psycopg2://admin:[email protected]:5432/Api', pool_size=100, pool_recycle=5, pool_timeout=180, pool_pre_ping=True, max_overflow=0)

session = scoped_session(sessionmaker(autocommit=False,
                                         autoflush=False,
                                         bind=engine))
Base = declarative_base()
Base.query = session.query_property()
Example #14
0
            #print "answers ", len(assgnmnt.answers)
            #print result

        conn.close()


logging.info("get assignments (multithreaded) from MTurk - START")

target_language = settings["target_language"]
logging.info("target language: %s" % (target_language))

lock = threading.RLock()

conn_pool = PersistentConnectionPool(10,
                                     20,
                                     database=settings["dbname"],
                                     user=settings["user"],
                                     host=settings["host"])

# Initialize a pool, 5 threads in this case
pool = workerpool.WorkerPool(size=10)

try:
    conn = psycopg2.connect("dbname='" + settings["dbname"] + "' user='******' host='" + settings["host"] +
                            "'")
    logging.info("successfully connected to database")
except:
    logging.error("unable to connect to the database")

# Loop over HITs and create a job to get assignments
Example #15
0
import uwsgi
import psycopg2
from psycopg2.pool import PersistentConnectionPool
import simplejson
from sphinxapi import *
from urlparse import urlparse, urlsplit, parse_qs
import re

COMMON_HEADERS = [('Content-Type', 'application/json'),
                  ('Access-Control-Allow-Origin', '*'),
                  ('Access-Control-Allow-Headers',
                   'Requested-With,Content-Type')]

pool = PersistentConnectionPool(1, 20, "dbname='haltes'")

#update timingpoint set latitude = CAST(ST_Y(the_geom) AS NUMERIC(9,7)), longitude = CAST(ST_X(the_geom) AS NUMERIC(8,7)) FROM (select ST_Transform(st_setsrid(st_makepoint(locationx_ew, locationy_ns), 28992), 4326) AS the_geom from timingpoint as t2 where t2.timingpointcode = timingpointcode) AS W;


def notfound(start_response):
    start_response('404 File Not Found',
                   COMMON_HEADERS + [('Content-length', '2')])
    yield '[]'


def searchStops(query):
    reply = {
        'Columns': [
            'TimingPointTown', 'TimingPointName', 'Name', 'Latitude',
            'Longitude'
        ],
        'Rows': []