def get_connection_pooled():
    """
    :rtype: connection
    """
    global conn_pool
    if conn_pool is None:
        CONFIG = ConfigParser()
        CONFIG.read(ini_file)
        conn_pool = PersistentConnectionPool(conn_pool_min,
                                             conn_pool_max,
                                             CONFIG["Postgres"]["db"],
                                             cursor_factory=DictCursor)
    conn = conn_pool.getconn()
    conn.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT)
    return conn
Exemple #2
0
    def init_app(self, app):
        self.app = app
        # read config
        db_args = {'minconn': app.config.get('DB_MIN_CONNECTIONS', 2),
                   'maxconn': app.config.get('DB_MAX_CONNECTIONS', 20),
                   'database': app.config.get('DB_NAME'),
                   'user': app.config.get('DB_USER'),
                   'password': app.config.get('DB_PASSWORD', ''),
                   }
        if 'DB_HOST' in app.config:
            db_args['host'] = app.config.get('DB_HOST')
        if 'DB_PORT' in app.config:
            db_args['port'] = app.config.get('DB_PORT')

        self.pool = PersistentConnectionPool(**db_args)

        app.after_request(self.cleanup)

        got_request_exception.connect(self.bailout, app)
Exemple #3
0
    def get_connection(database_config_name, path_to_conf_file=None):
        global conn_pool, MAX_CONNECTIONS_IN_POOL

        if conn_pool is None:
            config = ConnectionManager.get_config(database_config_name, path_to_conf_file)

            conn_pool = PersistentConnectionPool(minconn=1, maxconn=MAX_CONNECTIONS_IN_POOL,
                                                 host=config['host'],
                                                 database=config['database'],
                                                 user=config['user'],
                                                 password=config['password'])
        got_connection = False
        while not got_connection:
            try:
                conn = conn_pool.getconn()
                #cur = conn.cursor(cursor_factory=cursor_type)
                got_connection = True
            except psycopg2.OperationalError as mess:
                print("OperationalError opening connexion : %s" % mess)
                sleep(1)
            except AttributeError as mess:
                print("AttributeError opening connexion : %s" % mess)
                sleep(1)
        return conn
Exemple #4
0
	def __init__(self, config, min_connections=1, max_connections=5):
		"""Configures the Db, connection is not created yet.
		
		@param config: instance of RawConfigParser or subclass.
		@param min_connections: minimum connections in pool
		@param max_connections: maximum allowed connections in pool
		"""

		self.host = config.get("database", "host")
		self.port = config.getint("database", "port")
		self.user = config.get("database", "user")
		self.password = config.get("database", "password")
		self.db_name = config.get("database", "dbname")
		self.min_connections = min_connections
		self.max_connections = max_connections

		self.pool = PersistentConnectionPool(
			minconn = self.min_connections,
			maxconn = self.max_connections,
			host = self.host,
			port = self.port,
			user = self.user,
			password = self.password,
			database = self.db_name)
Exemple #5
0
        #print "thread: ", threading.currentThread().name

        conn = conn_pool.getconn()
        do_work(conn, item)
        q.task_done()


logging.info("get assignments (multithreaded) from MTurk - START")

num_worker_threads = 10

q = Queue.Queue()
mturk_conn = mturk.conn()
conn_pool = PersistentConnectionPool(num_worker_threads,
                                     num_worker_threads + 5,
                                     database=settings["dbname"],
                                     user=settings["user"],
                                     host=settings["host"])

target_language = settings["target_language"]
logging.info("target language: %s" % (target_language))

conn = psycopg2.connect("dbname='" + settings["dbname"] + "' user='******' host='" + settings["host"] + "'")

#create workers pool
for i in range(num_worker_threads):
    t = threading.Thread(target=worker)
    t.daemon = True
    t.start()
Exemple #6
0
  def __init__( self, connection = None, cache = None, host = None, ssl_mode = None, data_dir = None ):
    """
    Create a new database and return it.

    @type connection: existing connection object with cursor()/close()/commit() methods, or NoneType
    @param connection: database connection to use (optional, defaults to making a connection pool)
    @type cache: cmemcache.Client or something with a similar API, or NoneType
    @param cache: existing memory cache to use (optional, defaults to making a cache)
    @type host: unicode or NoneType
    @param host: hostname of PostgreSQL database, or None to use a local SQLite database
    @type ssl_mode: unicode or NoneType
    @param ssl_mode: SSL mode for the database connection, one of "disallow", "allow", "prefer", or
                     "require". ignored if host is None
    @type data_dir: unicode or NoneType
    @param data_dir: directory in which to store data (defaults to a reasonable directory). ignored
                     if host is not None
    @rtype: Database
    @return: newly constructed Database
    """
    # This tells PostgreSQL to give us timestamps in UTC. I'd use "set timezone" instead, but that
    # makes SQLite angry.
    os.putenv( "PGTZ", "UTC" )

    if host is None:
      #from pysqlite2 import dbapi2 as sqlite
      import sqlite3 as sqlite
      from datetime import datetime
      from pytz import utc

      TIMESTAMP_PATTERN = re.compile( "^(\d\d\d\d)-(\d\d)-(\d\d) (\d\d):(\d\d):(\d\d).(\d+)(?:\+\d\d:\d\d$)?" )
      MICROSECONDS_PER_SECOND = 1000000

      def convert_timestamp( value ):
        ( year, month, day, hours, minutes, seconds, fractional_seconds ) = \
          TIMESTAMP_PATTERN.search( value ).groups( 0 )

        # convert fractional seconds (with an arbitrary number of decimal places) to microseconds
        microseconds = int( fractional_seconds )
        while microseconds > MICROSECONDS_PER_SECOND:
          fractional_seconds = fractional_seconds[ : -1 ]
          microseconds = int( fractional_seconds or 0 )

        # ignore time zone in timestamp and assume UTC
        return datetime(
          int( year ), int( month ), int( day ),
          int( hours ), int( minutes ), int( seconds ), int( microseconds ),
          utc,
        )

      sqlite.register_converter( "boolean", lambda value: value in ( "t", "True", "true" ) and True or False )
      sqlite.register_converter( "timestamp", convert_timestamp )

      if connection:
        self.__connection = connection
      else:
        if data_dir is None:
          if sys.platform.startswith( "win" ):
            data_dir = os.path.join( os.environ.get( "APPDATA" ), "Luminotes" )
          else:
            data_dir = os.path.join( os.environ.get( "HOME", "" ), ".luminotes" )

        data_filename = os.path.join( data_dir, "luminotes.db" )

        # if the user doesn't yet have their own luminotes.db file, make them an initial copy
        if os.path.exists( "luminotes.db" ):
          if not os.path.exists( data_dir ):
            import stat
            os.makedirs( data_dir, stat.S_IXUSR | stat.S_IRUSR | stat.S_IWUSR )

          if not os.path.exists( data_filename ):
            import shutil
            shutil.copyfile( "luminotes.db", data_filename )

        self.__connection = \
          Connection_wrapper( sqlite.connect( data_filename, detect_types = sqlite.PARSE_DECLTYPES, check_same_thread = False ) )
  
      self.__pool = None
      self.__backend = Persistent.SQLITE_BACKEND
      self.lock = threading.Lock() # multiple simultaneous client threads make SQLite angry
    else:
      import psycopg2 as psycopg
      from psycopg2.pool import PersistentConnectionPool

      # forcibly replace psycopg's connect() function with another function that returns the psycopg
      # connection wrapped in a class with a pending_saves member, used in save() and commit() below
      original_connect = psycopg.connect

      def connect( *args, **kwargs ):
        return Connection_wrapper( original_connect( *args, **kwargs ) )

      psycopg.connect = connect

      if connection:
        self.__connection = connection
        self.__pool = None
      else:
        self.__connection = None
        self.__pool = PersistentConnectionPool(
          1,  # minimum connections
          50, # maximum connections
          "host=%s sslmode=%s dbname=luminotes user=luminotes password=%s" % (
            host or "localhost",
            ssl_mode or "allow",
            os.getenv( "PGPASSWORD", "dev" )
          ),
        )

      self.__backend = Persistent.POSTGRESQL_BACKEND
      self.lock = None # PostgreSQL does its own synchronization

    self.__cache = cache

    try:
      if self.__cache is None:
        import cmemcache
        print "using memcached"
    except ImportError:
      return None
Exemple #7
0
    def __init__(self):

        routes = [
            url(r"/joukkoliikenne/kutsujoukkoliikenne/$",
                handlers.kutsuliikenne.IndexHandler,
                name='index'),
            url(r"/joukkoliikenne/kutsujoukkoliikenne/katselu/([\d]*?)$",
                handlers.kutsuliikenne.ViewHandler,
                name='view'),
            url(r"/joukkoliikenne/kutsujoukkoliikenne/kartta$",
                handlers.kutsuliikenne.MapHandler,
                name='map'),
            url(r"/joukkoliikenne/kutsujoukkoliikenne/kirjaudu$",
                handlers.hallinta.LoginHandler,
                name='login'),
            url(r"/joukkoliikenne/kutsujoukkoliikenne/ulos$",
                handlers.hallinta.LogoutHandler,
                name='logout'),
            url(r"/joukkoliikenne/kutsujoukkoliikenne/vie/([a-z]*?)$",
                handlers.kutsuliikenne.ExportHandler,
                name='export'),
            url(r"/joukkoliikenne/kutsujoukkoliikenne/vie/([a-z]*?)/all$",
                handlers.kutsuliikenne.ExportHandler,
                name='export-all'),
            url(r"/joukkoliikenne/kutsujoukkoliikenne/vie/([a-z]*?)/all.zip$",
                handlers.kutsuliikenne.ExportHandler,
                name='export-all-zip'),
            url(r"/joukkoliikenne/kutsujoukkoliikenne/vie/([a-z]*?)/([\d]*?)$",
                handlers.kutsuliikenne.ExportHandler,
                name='export-item'),
            url(r"/joukkoliikenne/kutsujoukkoliikenne/muokkaa/$",
                handlers.hallinta.EditIndexHandler,
                name='muokkaa'),
            url(r"/joukkoliikenne/kutsujoukkoliikenne/uusi_kohde/$",
                handlers.hallinta.EditInfoHandler,
                name='muokkaa-uusi'),
            url(r"/joukkoliikenne/kutsujoukkoliikenne/muokkaa/([\d]*)/status$",
                handlers.hallinta.EditStatusHandler,
                name='muokkaa-status'),
            url(r"/joukkoliikenne/kutsujoukkoliikenne/muokkaa/([\d]*)/poista$",
                handlers.hallinta.EditDeleteHandler,
                name='muokkaa-poista'),
            url(r"/joukkoliikenne/kutsujoukkoliikenne/muokkaa/([\d]*)/kartta$",
                handlers.hallinta.EditMapHandler,
                name='muokkaa-kartta'),
            url(r"/joukkoliikenne/kutsujoukkoliikenne/muokkaa/([\d]*)/tiedot$",
                handlers.hallinta.EditInfoHandler,
                name='muokkaa-info'),
            url(r"/joukkoliikenne/kutsujoukkoliikenne/muokkaa/([\d]*)/valtuudet$",
                handlers.hallinta.EditAuthHandler,
                name='muokkaa-valtuudet'),
            url(r"/joukkoliikenne/kutsujoukkoliikenne/hallinta",
                handlers.admin.IndexHandler,
                name='admin'),
            url(r"/joukkoliikenne/kutsujoukkoliikenne/hallinta/kayttaja",
                handlers.admin.UserHandler,
                name='admin-avain-uusi'),
            url(r"/joukkoliikenne/kutsujoukkoliikenne/hallinta/kayttaja/([\d]*?)/paata_voimassaolo$",
                handlers.admin.DeleteUserHandler,
                name='admin-avain-poisto'),
            url(r"/joukkoliikenne/kutsujoukkoliikenne/hallinta/kayttaja/([\d]*?)$",
                handlers.admin.UserHandler,
                name='admin-avain'),
            url(r"/joukkoliikenne/kutsujoukkoliikenne/hallinta/ryhma",
                handlers.admin.GroupHandler,
                name='admin-ryhma-uusi'),
            url(r"/joukkoliikenne/kutsujoukkoliikenne/hallinta/ryhma/([\d]*?)/paata_voimassaolo$",
                handlers.admin.DeleteGroupHandler,
                name='admin-ryhma-poisto'),
            url(r"/joukkoliikenne/kutsujoukkoliikenne/hallinta/ryhma/([\d]*?)/poista_jasen/([\d]*?)",
                handlers.admin.DeleteGroupUserHandler,
                name='admin-ryhma-poistajasen'),
            url(r"/joukkoliikenne/kutsujoukkoliikenne/hallinta/ryhma/([\d]*?)/lisaa_jasen$",
                handlers.admin.AddGroupUserHandler,
                name='admin-ryhma-lisaajasen'),
            url(r"/joukkoliikenne/kutsujoukkoliikenne/hallinta/ryhma/([\d]*?)$",
                handlers.admin.GroupHandler,
                name='admin-ryhma'),
            url(r"/joukkoliikenne/kutsujoukkoliikenne/api/liikenne/([\w]*?)$",
                handlers.api.ExportAllHandler,
                name='api-all'),
            url(r"/joukkoliikenne/kutsujoukkoliikenne/api/liikenne/([\d]*)/([\w]*?)$",
                handlers.api.ExportItemHandler,
                name='api-item'),
        ]
        settings = dict(
            template_path=os.path.join(os.path.dirname(__file__), "templates"),
            static_path=os.path.join(os.path.dirname(__file__), "static"),
            xsrf_cookies=True,
            debug=True,
            static_url_prefix='/joukkoliikenne/kutsujoukkoliikenne/static/',
            login_url="/joukkoliikenne/kutsujoukkoliikenne/kirjaudu",
            cookie_secret=
            "|_oveO?@Re,982Zh2|08wX$g%We8*&C0I1D_bWKd6|8Sh*Nr.2=10:A?941pZ;D")
        tornado.web.Application.__init__(self, routes, **settings)

        assert options.dbhost
        assert options.dbport
        assert options.dbname
        assert options.dbuser
        assert options.dbpasswd
        self.dbconn = PersistentConnectionPool(1,
                                               50,
                                               host=options.dbhost,
                                               port=options.dbport,
                                               dbname=options.dbname,
                                               user=options.dbuser,
                                               password=options.dbpasswd)

        self.security = livibetasecurity.LiViBetaSecurity(self.dbconn)
        self.redis = redis.StrictRedis(host='localhost',
                                       port=6379,
                                       db=0,
                                       decode_responses=True)

        self.fieldtrans = translations.fieldtrans
Exemple #8
0
import psycopg2
from psycopg2.pool import PersistentConnectionPool
from settings import Config
import threading

pool = PersistentConnectionPool(5,200, user='******', password = '******', dbname = 'Api', host="192.168.1.41", port="5432")



from sqlalchemy import create_engine
from sqlalchemy.orm import scoped_session, sessionmaker
from sqlalchemy.ext.declarative import declarative_base

engine = create_engine('postgresql+psycopg2://admin:[email protected]:5432/Api', pool_size=100, pool_recycle=5, pool_timeout=180, pool_pre_ping=True, max_overflow=0)

session = scoped_session(sessionmaker(autocommit=False,
                                         autoflush=False,
                                         bind=engine))
Base = declarative_base()
Base.query = session.query_property()
Exemple #9
0
            #print "answers ", len(assgnmnt.answers)
            #print result

        conn.close()


logging.info("get assignments (multithreaded) from MTurk - START")

target_language = settings["target_language"]
logging.info("target language: %s" % (target_language))

lock = threading.RLock()

conn_pool = PersistentConnectionPool(10,
                                     20,
                                     database=settings["dbname"],
                                     user=settings["user"],
                                     host=settings["host"])

# Initialize a pool, 5 threads in this case
pool = workerpool.WorkerPool(size=10)

try:
    conn = psycopg2.connect("dbname='" + settings["dbname"] + "' user='******' host='" + settings["host"] +
                            "'")
    logging.info("successfully connected to database")
except:
    logging.error("unable to connect to the database")

# Loop over HITs and create a job to get assignments
Exemple #10
0
import uwsgi
import psycopg2
from psycopg2.pool import PersistentConnectionPool
import simplejson
from sphinxapi import *
from urlparse import urlparse, urlsplit, parse_qs
import re

COMMON_HEADERS = [('Content-Type', 'application/json'),
                  ('Access-Control-Allow-Origin', '*'),
                  ('Access-Control-Allow-Headers',
                   'Requested-With,Content-Type')]

pool = PersistentConnectionPool(1, 20, "dbname='haltes'")

#update timingpoint set latitude = CAST(ST_Y(the_geom) AS NUMERIC(9,7)), longitude = CAST(ST_X(the_geom) AS NUMERIC(8,7)) FROM (select ST_Transform(st_setsrid(st_makepoint(locationx_ew, locationy_ns), 28992), 4326) AS the_geom from timingpoint as t2 where t2.timingpointcode = timingpointcode) AS W;


def notfound(start_response):
    start_response('404 File Not Found',
                   COMMON_HEADERS + [('Content-length', '2')])
    yield '[]'


def searchStops(query):
    reply = {
        'Columns': [
            'TimingPointTown', 'TimingPointName', 'Name', 'Latitude',
            'Longitude'
        ],
        'Rows': []