示例#1
0
    def __init__(self, db_settings, max_queue_len=None):

        # There is no upper limit on the queue length by default
        if not max_queue_len:
            max_queue_len = -1

        db_settings = self._filter_pool_settings(db_settings)

        # Create delivery queue for database statements to be executed
        self._queue = DeliveryQueue(
            self._process_statement,
            drop_callback=self._on_queue_drop,
            max_queue_len=max_queue_len,
            # Max parallel statements - should correspond with connection count
            max_active_queue_len=db_settings['pool_size'],
            # How often to retry deliver some
            retry_after_s=5)
        self._stats = DeliveryStats(STATEMENTS_LATE_LIMIT_SECONDS)

        self._drop_count = 0

        # Create db-pool with the provided arguments
        self._db_pool = self._create_db_pool(**db_settings)
        self._second_db_pool = None
        self._suspended = False
示例#2
0
class JsonReporter(object):

    def __init__(self, host, port,
                 max_queue_len=2048,
                 max_active_queue_len=100,
                 retry_after_s=5,
                 start_suspended=False):

        self._link = ReporterJsonLinkClient(self._connection_made_int,
                                            self._received_message)
        self._queue = DeliveryQueue(self._link.sendMessage,
                                    drop_callback=self.on_drop,
                                    max_queue_len=max_queue_len,
                                    max_active_queue_len=max_active_queue_len,
                                    retry_after_s=retry_after_s,
                                    start_suspended=start_suspended)
        self._connector = reactor.connectTCP(host, port, self._link)

    def redirect(self, host, port):
        self._connector.stopConnecting()
        self._connector.disconnect()
        self._connector = reactor.connectTCP(host, port, self._link)

    def _callback(self, result):
        return True

    def _errorback(self, failure):
        return False

    def _received_message(self, msg):
        return True

    def _connection_made_int(self, protocol):
        try:
            protocol.transport.setTcpNoDelay(True)
            protocol.transport.setTcpKeepAlive(True)
            # Seconds before sending keepalive probes
            protocol.transport.socket.setsockopt(socket.SOL_TCP, socket.TCP_KEEPIDLE, 120)
            # Interval in seconds between keepalive probes
            protocol.transport.socket.setsockopt(socket.SOL_TCP, socket.TCP_KEEPINTVL, 1)
            # Failed keepalive probles before declaring other end dead
            protocol.transport.socket.setsockopt(socket.SOL_TCP, socket.TCP_KEEPCNT, 5)
        except:
            pass
        self._connection_made()

    def _connection_made(self):
        """Default implementation does nothing."""

    def deliver(self, event):
        self._queue.deliver(event)

    def report(self, event_name, payload):
        obj = (event_name, payload)
        self._queue.deliver(obj)

    def on_drop(self, obj, reason):
        pass
示例#3
0
    def __init__(self,
                 host,
                 port,
                 max_queue_len=2048,
                 max_active_queue_len=100,
                 retry_after_s=5,
                 start_suspended=False):

        self._link = ReporterJsonLinkClient(self._connection_made_int,
                                            self._received_message)
        self._queue = DeliveryQueue(self._link.sendMessage,
                                    drop_callback=self.on_drop,
                                    max_queue_len=max_queue_len,
                                    max_active_queue_len=max_active_queue_len,
                                    retry_after_s=retry_after_s,
                                    start_suspended=start_suspended)
        self._connector = reactor.connectTCP(host, port, self._link)
 def test_deferred_always_with_drops(self):
     p = Processor('deferred_always')
     q = DeliveryQueue(p.accept,
                       max_queue_len=20,
                       max_active_queue_len=1,
                       retry_after_s=0.1)
     yield self._test_generic(q, range(50), p.get_result_list,
                              [0] + range(30, 50))
示例#5
0
    def __init__(self, host, port,
                 max_queue_len=2048,
                 max_active_queue_len=100,
                 retry_after_s=5,
                 start_suspended=False):

        self._link = ReporterJsonLinkClient(self._connection_made_int,
                                            self._received_message)
        self._queue = DeliveryQueue(self._link.sendMessage,
                                    drop_callback=self.on_drop,
                                    max_queue_len=max_queue_len,
                                    max_active_queue_len=max_active_queue_len,
                                    retry_after_s=retry_after_s,
                                    start_suspended=start_suspended)
        self._connector = reactor.connectTCP(host, port, self._link)
示例#6
0
    def __init__(self, db_settings, max_queue_len=None):

        # There is no upper limit on the queue length by default
        if not max_queue_len:
            max_queue_len = -1

        db_settings = self._filter_pool_settings(db_settings)

        # Create delivery queue for database statements to be executed
        self._queue = DeliveryQueue(self._process_statement,
                                    drop_callback=self._on_queue_drop,
                                    max_queue_len=max_queue_len,
                                    # Max parallel statements - should correspond with connection count
                                    max_active_queue_len=db_settings['pool_size'],
                                    # How often to retry deliver some
                                    retry_after_s=5)
        self._stats = DeliveryStats(STATEMENTS_LATE_LIMIT_SECONDS)

        self._drop_count = 0

        # Create db-pool with the provided arguments
        self._db_pool = self._create_db_pool(**db_settings)
        self._second_db_pool = None
        self._suspended = False
示例#7
0
class DbSink(object):
    def __init__(self, db_settings, max_queue_len=None):

        # There is no upper limit on the queue length by default
        if not max_queue_len:
            max_queue_len = -1

        db_settings = self._filter_pool_settings(db_settings)

        # Create delivery queue for database statements to be executed
        self._queue = DeliveryQueue(
            self._process_statement,
            drop_callback=self._on_queue_drop,
            max_queue_len=max_queue_len,
            # Max parallel statements - should correspond with connection count
            max_active_queue_len=db_settings['pool_size'],
            # How often to retry deliver some
            retry_after_s=5)
        self._stats = DeliveryStats(STATEMENTS_LATE_LIMIT_SECONDS)

        self._drop_count = 0

        # Create db-pool with the provided arguments
        self._db_pool = self._create_db_pool(**db_settings)
        self._second_db_pool = None
        self._suspended = False

    @staticmethod
    def _on_connect(conn):
        log.info("New DB connection created")
        conn.cursor().execute(
            "set session transaction isolation level read uncommitted")
        conn.autocommit(True)

    def _create_db_pool(self, driver, host, port, user, password, dbname,
                        pool_size):
        return adbapi.ConnectionPool(driver,
                                     host=host,
                                     port=port,
                                     user=user,
                                     passwd=password,
                                     db=dbname,
                                     cp_openfun=self._on_connect,
                                     cp_reconnect=True,
                                     cp_min=1,
                                     cp_max=pool_size)

    @staticmethod
    def _filter_pool_settings(settings):
        settings = {
            'driver': settings.get('driver'),
            'host': settings.get('host'),
            'port': settings.get('port'),
            'user': settings.get('user'),
            'password': settings.get('password'),
            'dbname': settings.get('dbname'),
            'pool_size': settings.get('pool_size'),
        }
        if sum(val > 1 for val in settings.values()) != 7:
            raise Exception('Some database parameter not specified (%s)' %
                            (str(settings)))

        return settings

    def get_queue_length(self):
        return self._queue.get_length()

    def get_stats(self):
        return self._stats.get_data()

    def reset_stats(self):
        return self._stats.reset()

    def execute(self, statement_name, statement):
        return self._queue.deliver((statement_name, statement))

    def direct_query(self, name, query):
        return self._inspect(name, self._db_pool.runQuery(query), True)

    def _process_statement(self, obj):
        """ Deliver queue's processor """
        # Try to deliver later, db_pool is not ready or the operation is suspended
        if self._suspended or not self._db_pool:
            return False
        try:
            (name, statement) = obj
            return self._inspect(name, self._db_pool.runOperation(statement),
                                 False)
        except BaseException, e:
            return e
示例#8
0
class DbSink(object):

    def __init__(self, db_settings, max_queue_len=None):

        # There is no upper limit on the queue length by default
        if not max_queue_len:
            max_queue_len = -1

        db_settings = self._filter_pool_settings(db_settings)

        # Create delivery queue for database statements to be executed
        self._queue = DeliveryQueue(self._process_statement,
                                    drop_callback=self._on_queue_drop,
                                    max_queue_len=max_queue_len,
                                    # Max parallel statements - should correspond with connection count
                                    max_active_queue_len=db_settings['pool_size'],
                                    # How often to retry deliver some
                                    retry_after_s=5)
        self._stats = DeliveryStats(STATEMENTS_LATE_LIMIT_SECONDS)

        self._drop_count = 0

        # Create db-pool with the provided arguments
        self._db_pool = self._create_db_pool(**db_settings)
        self._second_db_pool = None
        self._suspended = False

    @staticmethod
    def _on_connect(conn):
        log.info("New DB connection created")
        conn.cursor().execute("set session transaction isolation level read uncommitted")
        conn.autocommit(True)

    def _create_db_pool(self, driver, host, port, user, password, dbname, pool_size):
        return adbapi.ConnectionPool(driver, host=host, port=port, user=user, passwd=password,
                                     db=dbname, cp_openfun=self._on_connect, cp_reconnect=True,
                                     cp_min=1, cp_max=pool_size)

    @staticmethod
    def _filter_pool_settings(settings):
        settings = {
            'driver': settings.get('driver'),
            'host': settings.get('host'),
            'port': settings.get('port'),
            'user': settings.get('user'),
            'password': settings.get('password'),
            'dbname': settings.get('dbname'),
            'pool_size': settings.get('pool_size'),
        }
        if sum(val > 1 for val in settings.values()) != 7:
            raise Exception('Some database parameter not specified (%s)' % (str(settings)))

        return settings

    def get_queue_length(self):
        return self._queue.get_length()

    def get_stats(self):
        return self._stats.get_data()

    def reset_stats(self):
        return self._stats.reset()

    def execute(self, statement_name, statement):
        return self._queue.deliver((statement_name, statement))

    def direct_query(self, name, query):
        return self._inspect(name, self._db_pool.runQuery(query), True)

    def _process_statement(self, obj):
        """ Deliver queue's processor """
        # Try to deliver later, db_pool is not ready or the operation is suspended
        if self._suspended or not self._db_pool:
            return False
        try:
            (name, statement) = obj
            return self._inspect(name, self._db_pool.runOperation(statement), False)
        except BaseException, e:
            return e
示例#9
0
class JsonReporter(object):
    def __init__(self,
                 host,
                 port,
                 max_queue_len=2048,
                 max_active_queue_len=100,
                 retry_after_s=5,
                 start_suspended=False):

        self._link = ReporterJsonLinkClient(self._connection_made_int,
                                            self._received_message)
        self._queue = DeliveryQueue(self._link.sendMessage,
                                    drop_callback=self.on_drop,
                                    max_queue_len=max_queue_len,
                                    max_active_queue_len=max_active_queue_len,
                                    retry_after_s=retry_after_s,
                                    start_suspended=start_suspended)
        self._connector = reactor.connectTCP(host, port, self._link)

    def redirect(self, host, port):
        self._connector.stopConnecting()
        self._connector.disconnect()
        self._connector = reactor.connectTCP(host, port, self._link)

    def _callback(self, result):
        return True

    def _errorback(self, failure):
        return False

    def _received_message(self, msg):
        return True

    def _connection_made_int(self, protocol):
        try:
            protocol.transport.setTcpNoDelay(True)
            protocol.transport.setTcpKeepAlive(True)
            # Seconds before sending keepalive probes
            protocol.transport.socket.setsockopt(socket.SOL_TCP,
                                                 socket.TCP_KEEPIDLE, 120)
            # Interval in seconds between keepalive probes
            protocol.transport.socket.setsockopt(socket.SOL_TCP,
                                                 socket.TCP_KEEPINTVL, 1)
            # Failed keepalive probles before declaring other end dead
            protocol.transport.socket.setsockopt(socket.SOL_TCP,
                                                 socket.TCP_KEEPCNT, 5)
        except:
            pass
        self._connection_made()

    def _connection_made(self):
        """Default implementation does nothing."""

    def deliver(self, event):
        self._queue.deliver(event)

    def report(self, event_name, payload):
        obj = (event_name, payload)
        self._queue.deliver(obj)

    def on_drop(self, obj, reason):
        pass
 def test_deferred_always(self):
     p = Processor('deferred_always')
     q = DeliveryQueue(p.accept, max_active_queue_len=5, retry_after_s=0.1)
     yield self._test_generic(q, range(10), p.get_result_list)
 def test_second(self):
     p = Processor('second')
     q = DeliveryQueue(p.accept, max_active_queue_len=3, retry_after_s=0.1)
     yield self._test_generic(q, set(range(10)), p.get_result_set)
 def test_always(self):
     p = Processor('always')
     q = DeliveryQueue(p.accept)
     yield self._test_generic(q, range(10), p.get_result_list)
 def test_exception(self):
     p = Processor('exception')
     q = DeliveryQueue(p.accept, max_active_queue_len=1, retry_after_s=0.1)
     yield self._test_generic(q, set(range(10)), p.get_result_set, set([]))