def __init__(self, host, port, user='******', password='', database='information_schema'):
        """ Initialize the RandomAggregatorPool with connection
        information for an aggregator in a MemSQL Distributed System.

        All aggregator connections will share the same user/password/database.
        """
        self.logger = logging.getLogger('memsql.random_aggregator_pool')
        self._pool = ConnectionPool()
        self._refresh_aggregator_list = memoize(30)(self._update_aggregator_list)
        self._lock = threading.RLock()

        self._primary_aggregator = (host, port)
        self._user = user
        self._password = password
        self._database = database
        self._aggregators = []
        self._aggregator = None
        self._master_aggregator = None
def pool():
    from memsql.common.connection_pool import ConnectionPool
    return ConnectionPool()
class RandomAggregatorPool(object):
    """ A automatic fail-over connection pool.

    One layer above the connection pool. It's purpose is to choose a
    random aggregator and use it while it is available. If not it fails
    over to another aggregator.  The class maintains the list of
    aggregators by periodically calling `SHOW AGGREGATORS`.

    Note: If you point this class at a MemSQL Singlebox instance, it
    will still work, but all connections will just be made to the
    singlebox node.
    """

    def __init__(self, host, port, user='******', password='', database='information_schema'):
        """ Initialize the RandomAggregatorPool with connection
        information for an aggregator in a MemSQL Distributed System.

        All aggregator connections will share the same user/password/database.
        """
        self.logger = logging.getLogger('memsql.random_aggregator_pool')
        self._pool = ConnectionPool()
        self._refresh_aggregator_list = memoize(30)(self._update_aggregator_list)
        self._lock = threading.RLock()

        self._primary_aggregator = (host, port)
        self._user = user
        self._password = password
        self._database = database
        self._aggregators = []
        self._aggregator = None
        self._master_aggregator = None

    def connect(self):
        """ Returns an aggregator connection, and periodically updates the aggregator list. """
        conn = self._connect()
        self._refresh_aggregator_list(conn)
        return conn

    def connect_master(self):
        if self._master_aggregator is None:
            with self._pool_connect(self._primary_aggregator) as conn:
                self._update_aggregator_list(conn)
                conn.expire()
        try:
            return self._pool_connect(self._master_aggregator)
        except PoolConnectionException:
            return None

    def close(self):
        self._pool.close()

    def _pool_connect(self, agg):
        """ `agg` should be (host, port)
            Returns a live connection from the connection pool
        """
        return self._pool.connect(agg[0], agg[1], self._user, self._password, self._database)

    def _connect(self):
        """ Returns an aggregator connection. """
        with self._lock:
            if self._aggregator:
                try:
                    return self._pool_connect(self._aggregator)
                except PoolConnectionException:
                    self._aggregator = None

            if not len(self._aggregators):
                with self._pool_connect(self._primary_aggregator) as conn:
                    self._update_aggregator_list(conn)
                    conn.expire()

            random.shuffle(self._aggregators)

            last_exception = None
            for aggregator in self._aggregators:
                self.logger.debug('Attempting connection with %s:%s' % (aggregator[0], aggregator[1]))

                try:
                    conn = self._pool_connect(aggregator)
                    # connection successful!
                    self._aggregator = aggregator
                    return conn
                except PoolConnectionException as e:
                    # connection error
                    last_exception = e
            else:
                # bad news bears...  try again later
                self._aggregator = None
                self._aggregators = []

                raise last_exception

    def _update_aggregator_list(self, conn):
        try:
            rows = conn.query('SHOW AGGREGATORS')
        except DatabaseError as e:
            if e.args[0] == errorcodes.ER_DISTRIBUTED_NOT_AGGREGATOR:
                # connected to memsql singlebox
                self._aggregators = [self._primary_aggregator]
                self._master_aggregator = self._primary_aggregator
            else:
                raise
        else:
            with self._lock:
                self._aggregators = []
                for row in rows:
                    if row.Host == '127.0.0.1':
                        # this is the aggregator we are connecting to
                        row['Host'] = conn.connection_info()[0]
                    if int(row.Master_Aggregator) == 1:
                        self._master_aggregator = (row.Host, row.Port)
                    self._aggregators.append((row.Host, row.Port))

            assert len(self._aggregators) > 0, "Failed to retrieve a list of aggregators"

        self.logger.debug('Aggregator list is updated to %s. Current aggregator is %s.' % (self._aggregators, self._aggregator))
 def __init__(self):
     self._pool = ConnectionPool()
     self._db_args = None
     self._tables = {}
 def __init__(self):
     self._pool = ConnectionPool()
     self._db_args = None
     self._tables = {}
class SQLUtility(object):
    def __init__(self):
        self._pool = ConnectionPool()
        self._db_args = None
        self._tables = {}

    ###############################
    # Public Interface

    def connect(self,
                host='127.0.0.1',
                port=3306,
                user='******',
                password='',
                database=None):
        """ Connect to the database specified """

        if database is None:
            raise exceptions.RequiresDatabase()

        self._db_args = {
            'host': host,
            'port': port,
            'user': user,
            'password': password,
            'database': database
        }
        with self._db_conn() as conn:
            conn.query('SELECT 1')
        return self

    def disconnect(self):
        self._pool.close()

    def setup(self):
        """ Initialize the required tables in the database """
        with self._db_conn() as conn:
            for table_defn in self._tables.values():
                conn.execute(table_defn)
        return self

    def destroy(self):
        """ Destroy the SQLStepQueue tables in the database """
        with self._db_conn() as conn:
            for table_name in self._tables:
                conn.execute('DROP TABLE IF EXISTS %s' % table_name)
        return self

    def ready(self):
        """ Returns True if the tables have been setup, False otherwise """
        with self._db_conn() as conn:
            tables = [
                row.t for row in conn.query(
                    '''
                SELECT table_name AS t FROM information_schema.tables
                WHERE table_schema=%s
            ''', self._db_args['database'])
            ]
        return all([table_name in tables for table_name in self._tables])

    ###############################
    # Protected Interface

    def _define_table(self, table_name, table_definition):
        self._tables[table_name] = table_definition

    def _db_conn(self):
        if self._db_args is None:
            raise exceptions.NotConnected()
        return self._pool.connect(**self._db_args)
class SQLUtility(object):
    def __init__(self):
        self._pool = ConnectionPool()
        self._db_args = None
        self._tables = {}

    ###############################
    # Public Interface

    def connect(self, host="127.0.0.1", port=3306, user="******", password="", database=None):
        """ Connect to the database specified """

        if database is None:
            raise exceptions.RequiresDatabase()

        self._db_args = {"host": host, "port": port, "user": user, "password": password, "database": database}
        with self._db_conn() as conn:
            conn.query("SELECT 1")
        return self

    def disconnect(self):
        self._pool.close()

    def setup(self):
        """ Initialize the required tables in the database """
        with self._db_conn() as conn:
            for table_defn in self._tables.values():
                conn.execute(table_defn)
        return self

    def destroy(self):
        """ Destroy the SQLStepQueue tables in the database """
        with self._db_conn() as conn:
            for table_name in self._tables:
                conn.execute("DROP TABLE IF EXISTS %s" % table_name)
        return self

    def ready(self):
        """ Returns True if the tables have been setup, False otherwise """
        with self._db_conn() as conn:
            tables = [
                row.t
                for row in conn.query(
                    """
                SELECT table_name AS t FROM information_schema.tables
                WHERE table_schema=%s
            """,
                    self._db_args["database"],
                )
            ]
        return all([table_name in tables for table_name in self._tables])

    ###############################
    # Protected Interface

    def _define_table(self, table_name, table_definition):
        self._tables[table_name] = table_definition

    def _db_conn(self):
        if self._db_args is None:
            raise exceptions.NotConnected()
        return self._pool.connect(**self._db_args)
Exemple #8
0
 def __init__(self):
     self.pool = ConnectionPool()
Exemple #9
0
class PooPooApi(object):
    def __init__(self):
        self.pool = ConnectionPool()

    def connect(self):
        host = os.getenv("DBHOST", "127.1")
        return self.pool.connect(host, 3306, "root", "", "poopoobanana")

    def AddUser(self, name):
        try:
            with self.connect() as conn:
                return conn.execute("""insert into users(user_name) values(%s)""", name)
        except Exception as e:
            raise Exception("user exists (%s)" % str(e))

    def Login(self, user, password):
        user_id = self.GetUserId(user)
        if user_id is None:
            return self.AddUser(user)
        return user_id

    def GetUserId(self, name):
        with self.connect() as conn:
            rows = conn.query("""select user_id from users where user_name = %s""", name)
        if len(rows) == 0:
            return None
        return int(rows[0]["user_id"])

    def AddAd(self, name, cost, ad_data):
        try:
            with self.connect() as conn:
                return conn.execute("""insert into ads(ad_name, ad_cost, ad_data) values (%s,%s,%s)""", name, str(cost), simplejson.dumps(ad_data))
        except Exception as e:
            raise Exception("ad exists (%s)" % str(e))

    def GetAd(self, name):
        with self.connect() as conn:
            rows = conn.query("""select * from ads where ad_name = %s""", name)
        if len(rows) == 0:
            return None
        result = simplejson.loads(rows[0]["ad_data"])
        result["ad_id"] = int(rows[0]["ad_id"])
        result["ad_cost"] = int(rows[0]["ad_cost"])
        return result

    def AddEvent(self, name, user_id, data):
        if "points" in data:
            points = str(data["points"])
            del data["points"]
        else:
            points = "0"
        with self.connect() as conn:
            conn.execute("""insert into events(event_name, user_id, event_points, event_data) values (%s, %s, %s, %s)""", name, user_id, points, simplejson.dumps(data))

    def GetUserPoints(self, ads_only, user_id=None, limit=None):
        q = """select users.user_name, users.user_id, %(total)s ifnull(sum(ads.ad_cost), 0) as points
               from users %(events_joins)s
                    left join ad_trophy_case on users.user_id = ad_trophy_case.user_id
                    left join ads on ad_trophy_case.ad_id = ads.ad_id """
        if not ads_only:
            dta = {"total": "ifnull(sum(events.event_points), 0) -",
                   "events_joins": "left join events on users.user_id = events.user_id"}
        else:
            dta = {"total": "",
                   "events_joins": ""}
        q = q % dta
        if user_id is None:
            if ads_only:
                q += "where ad_trophy_case.ad_watched = 1 "
            q += "group by users.user_id order by points desc "
            if limit is not None:
                q += "limit %d" % limit
        else:
            q += "where users.user_id = %d" % user_id
        with self.connect() as conn:
            rows = conn.query(q)
        if user_id is not None:
            return rows[0]
        return rows

    def Purchase(self, ad_id, user_id):
        pts = int(self.GetUserPoints(False, user_id)["points"])
        with self.connect() as conn:
            cost = int(conn.query("select ad_cost from ads where ad_id = %d" % ad_id)[0]["ad_cost"])
        if pts < cost:
            return False
        else:
            try:
                conn.query("insert into ad_trophy_case(user_id,ad_id,ad_watched) values (%d,%d,0)" % (user_id, ad_id))
            except Exception as e:
                raise Exception("already purchased (%s)" % str(e))
            return True

    def Watch(self, ad_id, user_id):
        with self.connect() as conn:
            conn.execute("update ad_trophy_case set ad_watched = 1 where ad_id = %d and user_id = %d" % (ad_id, user_id))

    def Library(self, user_id):
        q = """select * from
               ad_trophy_case join ads
               on ad_trophy_case.ad_id = ads.ad_id
               where ad_trophy_case.user_id = %d""" % user_id
        with self.connect() as conn:
            rows = conn.query(q)
        result = []
        for r in rows:
            result.append(simplejson.loads(r["ad_data"]))
            result[-1]["ad_name"] = r["ad_name"]
            result[-1]["ad_id"] = int(r["ad_id"])
            result[-1]["ad_cost"] = int(r["ad_cost"])
            result[-1]["watched"] = r["ad_watched"] == "1"
        return result
Exemple #10
0
 def __init__(self, node_row):
     self.update_from_node(node_row)
     self.alias = None
     self._pool = ConnectionPool()
Exemple #11
0
class Node(object):
    def __init__(self, node_row):
        self.update_from_node(node_row)
        self.alias = None
        self._pool = ConnectionPool()

    def update_from_node(self, node):
        self.id = node.id
        self.host = node.host
        self.port = node.port

    def update_alias(self, connection_pool, alias):
        self.alias = alias

        try:
            conn = connection_pool.connect_master()

            if conn:
                conn.execute('''
                    INSERT INTO node_alias (node_id, alias)
                    VALUES (%s, %s)
                    ON DUPLICATE KEY UPDATE alias=VALUES(alias)
                ''', self.id, alias)
        finally:
            if conn:
                conn.close()

    def connect(self):
        return self._pool.connect(
            host=self.host,
            port=self.port,
            user="******",
            password="",
            database="information_schema")

    def status(self):
        with self.connect() as conn:
            rows = conn.query('SHOW STATUS EXTENDED')

        for row in rows:
            name = row.Variable_name
            try:
                value = self._parse_value(row.Value)
            except ValueError:
                continue
            yield (name, value)

    STATUS_CONSTS = re.compile(r"ms|MB|KB", re.I)

    def _parse_value(self, value):
        if self.STATUS_CONSTS.search(value):
            return float(value.split(" ")[0])
        return float(value)

    def variables(self):
        with self.connect() as conn:
            rows = conn.query('SHOW VARIABLES')
        for row in rows:
            yield (row.Variable_name, row.Value)
        for name, path in self.directories().iteritems():
            yield (name, path)

    def directories(self):
        with self.connect() as conn:
            rows = conn.query("SHOW STATUS EXTENDED LIKE '%_directory'")
        return dict([(row.Variable_name, row.Value) for row in rows])