def _raw_writer(self, connection): try: while True: for m in connection.outputstream.prepareRead(connection): yield m try: data = connection.outputstream.readonce() except EOFError: for m in connection.write( ConnectionWriteEvent(connection, connection.connmark, data=b'', EOF=True)): yield m break except IOError: for m in connection.reset(): yield m break else: for m in connection.write( ConnectionWriteEvent(connection, connection.connmark, data=data, EOF=False)): yield m finally: connection.outputstream.close(connection.scheduler)
def reconnect_init(self, connection): connection.xid = 1 connection.redis_replyxid = 1 connection.redis_ping = -1 connection.redis_pingreply = -1 connection.redis_bufferedxid = 0 connection.redis_sendbuffer = [] connection.redis_sender = False connection.redis_locker = object() write_buffer = [] if self.usehiredis: connection.redis_reader = hiredis.Reader(protocolError = RedisProtocolException, replyError = RedisReplyException) else: connection.redis_reader = RedisParser() if connection.redis_select: write_buffer.append(self.format_request(b'SELECT', connection.redis_select)) connection.xid += 1 if connection.redis_subscribe: if connection.redis_subscribe_keys: write_buffer.append(self.format_request(b'SUBSCRIBE', *tuple(connection.redis_subscribe_keys))) if connection.redis_subscribe_pkeys: write_buffer.append(self.format_request(b'PSUBSCRIBE', *tuple(connection.redis_subscribe_pkeys))) connection.scheduler.emergesend(ConnectionWriteEvent(connection, connection.connmark, data=b''.join(write_buffer))) for m in connection.waitForSend(RedisConnectionStateEvent(RedisConnectionStateEvent.CONNECTION_UP, connection, connection.connmark, self)): yield m
def send_batch(self, connection, container, *cmds): ''' Send multiple commands to redis server at once :param connection: redis connection :param container: routine container :param *cmds: commands to send. Each command is a tuple/list of bytes/str. :returns: list of reply event matchers (from container.retvalue) ''' if not cmds: raise RedisProtocolException('No commands') l = Lock(connection.redis_locker, connection.scheduler) for m in l.lock(container): yield m with l: commands = [] matchers = [] for c in cmds: try: r, reply_matcher = self._prepare_command(connection, c) commands.append(r) matchers.append(reply_matcher) except: self._logger.warning('Error in one of the commands in a batch: %r. The command is ignored.', c, exc_info = True) if not commands: raise RedisProtocolException('Error for every command in a batch') for m in connection.write(ConnectionWriteEvent(connection, connection.connmark, data = b''.join(commands)), False): yield m container.retvalue = matchers
def _send_batch(self, connection, container, *cmds): "Use delegate to ensure it always ends" if not cmds: raise RedisProtocolException('No commands') l = Lock(connection.redis_locker, connection.scheduler) for m in l.lock(container): yield m with l: commands = [] matchers = [] for c in cmds: try: r, reply_matcher = self._prepare_command(connection, c) commands.append(r) matchers.append(reply_matcher) except: self._logger.warning( 'Error in one of the commands in a batch: %r. The command is ignored.', c, exc_info=True) if not commands: raise RedisProtocolException( 'Error for every command in a batch') for m in connection.write( ConnectionWriteEvent(connection, connection.connmark, data=b''.join(commands)), False): yield m container.retvalue = matchers
def parse(self, connection, data, laststart): events = [] connection.redis_reader.feed(_copy(data)) while True: r = connection.redis_reader.gets() if r is False: break if connection.redis_replyxid < connection.xid: events.append(RedisResponseEvent(connection, connection.connmark, connection.redis_replyxid, isinstance(r, Exception), self, result = r)) connection.redis_replyxid += 1 elif connection.redis_subscribe: if isinstance(r, bytes) or isinstance(r, Exception): events.append(RedisResponseEvent(connection, connection.connmark, 0, isinstance(r, Exception), self, result = r)) elif r[0] == b'message': events.append(RedisSubscribeMessageEvent(RedisSubscribeMessageEvent.MESSAGE, r[1], r[1], connection, connection.connmark, self, message = r[2])) elif r[0] == b'pmessage': events.append(RedisSubscribeMessageEvent(RedisSubscribeMessageEvent.PMESSAGE, r[1], r[2], connection, connection.connmark, self, message = r[3])) elif r[0] == b'pong': events.append(RedisResponseEvent(connection, connection.connmark, connection.redis_pingreply, False, self, result = r[1])) connection.redis_pingreply -= 1 else: events.append(RedisSubscribeEvent(_str(r[0]), r[1], connection, connection.connmark, self, result = r)) if not r[2]: connection.redis_subscribe = False connection.xid += connection.redis_bufferedxid if laststart == len(data): # Remote write close events.append(ConnectionWriteEvent(connection, connection.connmark, data = b'', EOF = True)) return (events, 0)
async def reply_to(self, connection, reply, request, container): xid = request.xid reply.xid = xid await connection.write( ConnectionWriteEvent(connection, connection.connmark, data=reply._tobytes())) return xid
def formaterror(self, error, requestid, connection): msg = {'result': None, 'error': error, 'id': requestid} c = ConnectionWriteEvent(connection=connection, connmark=connection.connmark, data=json.dumps(msg).encode(self.encoding)) if self.debugging: self._logger.debug('message formatted: %r', msg) return c
def formatnotification(self, method, params, connection): msg = {'method': method, 'params': params, 'id': None} c = ConnectionWriteEvent(connection=connection, connmark=connection.connmark, data=json.dumps(msg).encode(self.encoding)) if self.debugging: self._logger.debug('message formatted: %r', msg) return c
def reply_to(self, connection, reply, request, container): xid = request.xid reply.xid = xid for m in connection.write( ConnectionWriteEvent(connection, connection.connmark, data=reply._tobytes())): yield m container.retvalue = xid
async def sendrequest(self, connection, request, container): connection.xid += 1 xid = connection.xid request.xid = xid await connection.write( ConnectionWriteEvent(connection, connection.connmark, data=request._tobytes())) return xid
def parse(self, connection, data, laststart): events = [] start = 0 while True: result = ZooKeeperReply.parse(data[start:]) if result is None: break reply, size = result start += size if not connection.zookeeper_handshake: reply.zookeeper_type = CONNECT_PACKET reply._autosubclass() connection.zookeeper_handshake = True events.append( ZooKeeperHandshakeEvent(connection, connection.connmark, self, message=reply)) else: reply.zookeeper_type = HEADER_PACKET reply._autosubclass() if reply.zxid > 0: connection.zookeeper_lastzxid = reply.zxid if reply.xid >= 0: xid = reply.xid if xid not in connection.zookeeper_requests: raise ZooKeeperProtocolException( 'xid does not match: receive %r' % (reply.xid, )) request_type = connection.zookeeper_requests.pop(xid) reply.zookeeper_request_type = request_type reply._autosubclass() if reply.xid == WATCHER_EVENT_XID: events.append( ZooKeeperWatcherEvent( connection, connection.connmark, self, reply.type, reply.state, b'' if reply.path is None else reply.path, message=reply)) else: events.append( ZooKeeperResponseEvent(connection, connection.connmark, self, reply.xid, message=reply)) if laststart == len(data): # Remote write close events.append( ConnectionWriteEvent(connection, connection.connmark, data=b'', EOF=True)) return (events, len(data) - start)
def sendrequest(self, connection, request, container): connection.xid += 1 xid = connection.xid request.xid = xid for m in connection.write( ConnectionWriteEvent(connection, connection.connmark, data=request._tobytes())): yield m container.retvalue = xid
def parse(self, connection, data, laststart): events = [] currstart = 0 while True: r = d.message.parse(data[currstart:]) if not r: break msg, size = r if msg.type == d.ECHO_REQUEST and msg.version == d.MESSAGE_VERSION_10: # Direct reply msg.type = d.ECHO_REPLY events.append( ConnectionWriteEvent(connection, connection.connmark, data=msg._tobytes())) elif msg.type & 1: events.append( MyProtocolRequestEvent(msg.type, msg.version, connection, connection.connmark, self, message=msg)) else: events.append( MyProtocolReplyEvent(msg.xid, connection, connection.connmark, self, msg.type, msg.type == d.ERROR, message=msg)) currstart += size if laststart == len(data): # Remote write close events.append( ConnectionWriteEvent(connection, connection.connmark, data=b'', EOF=True)) return (events, len(data) - currstart)
def _send_command(self, connection, container, *args): if not args: raise RedisProtocolException('No command name') l = Lock(connection.redis_locker, connection.scheduler) # The socket write sequence must be the same as the send sequence, add a lock to ensure that for m in l.lock(container): yield m with l: r, reply_matcher = self._prepare_command(connection, args) for m in connection.write(ConnectionWriteEvent(connection, connection.connmark, data = r), False): yield m container.retvalue = reply_matcher
async def _clientroutine(self, connection): # Send Data Until Connection closed try: data = b'\x00' * self.buffersize while True: we = ConnectionWriteEvent(connection, connection.connmark, data=data) await connection.write(we, False) except Exception: await connection.shutdown(True) raise
def _clientroutine(self, connection): # Send Data Until Connection closed try: data = b'\x00' * self.buffersize while True: we = ConnectionWriteEvent(connection, connection.connmark, data=data) for m in connection.write(we, False): yield m except: for m in connection.shutdown(True): yield m
def formatrequest(self, method, params, connection): msgid = connection.xid msg = {'method': method, 'params': params, 'id': msgid} connection.xid += 1 if connection.xid > 0x7fffffff: # Skip xid = 0 for special response connection.xid = 1 c = ConnectionWriteEvent(connection=connection, connmark=connection.connmark, data=json.dumps(msg).encode(self.encoding)) if self.debugging: self._logger.debug('message formatted: %r', msg) return (c, msgid)
async def init(self, connection): await Protocol.init(self, connection) if not self.server: connection.subroutine( connection.executeWithTimeout(self.totalsend + 1.0, self._clientroutine(connection)), False, 'protocolroutine') else: await connection.write( ConnectionWriteEvent(connection, connection.connmark, data=b'', EOF=True)) await connection.wait_for_send( TestConnectionEvent(TestConnectionEvent.UP, connection))
async def send_command(self, connection, container, *args): ''' Send command to Redis server. :param connection: Redis connection :param container: routine container :param \*args: command paramters, begin with command name, e.g. `'SET'`,`'key'`,`'value'` :returns: Event matcher to wait for reply ''' if not args: raise RedisProtocolException('No command name') r, reply_matcher = self._prepare_command(connection, args) await connection.write(ConnectionWriteEvent(connection, connection.connmark, data = r), False) return reply_matcher()
def init(self, connection): for m in Protocol.init(self, connection): yield m if not self.server: connection.subroutine( connection.executeWithTimeout(self.totalsend + 1.0, self._clientroutine(connection)), False, 'protocolroutine') else: for m in connection.write( ConnectionWriteEvent(connection, connection.connmark, data=b'', EOF=True)): yield m for m in connection.waitForSend( TestConnectionEvent(TestConnectionEvent.UP, connection)): yield m
def send_command(self, connection, container, *args): ''' Send command to Redis server. :param connection: Redis connection :param container: routine container :param *args: command paramters, begin with command name, e.g. 'SET','key','value' :returns: Event matcher to wait for reply. The value is returned from container.retvalue ''' if not args: raise RedisProtocolException('No command name') l = Lock(connection.redis_locker, connection.scheduler) # The socket write sequence must be the same as the send sequence, add a lock to ensure that for m in l.lock(container): yield m with l: r, reply_matcher = self._prepare_command(connection, args) for m in connection.write(ConnectionWriteEvent(connection, connection.connmark, data = r), False): yield m container.retvalue = reply_matcher
async def send_batch(self, connection, container, *cmds): ''' Send multiple commands to redis server at once :param connection: redis connection :param container: routine container :param \*cmds: commands to send. Each command is a tuple/list of bytes/str. :returns: list of reply event matchers ''' if not cmds: raise RedisProtocolException('No commands') commands = [] matchers = [] for c in cmds: r, reply_matcher = self._prepare_command(connection, c) commands.append(r) matchers.append(reply_matcher) await connection.write(ConnectionWriteEvent(connection, connection.connmark, data = b''.join(commands)), False) return [m() for m in matchers]
def __init__(self): ''' Constructor ''' if hasattr(self, 'logging'): if isinstance(self.logging, dict): logging_config = dict(self.logging) else: logging_config = self.logging.todict() logging_config.setdefault('disable_existing_loggers', False) logging.config.dictConfig(logging_config) elif hasattr(self, 'loggingconfig'): logging.config.fileConfig(self.loggingconfig, disable_existing_loggers=False) self.scheduler = Scheduler(DefaultPolling(), getattr(self, 'processevents', None), getattr(self, 'queuedefaultsize', None), getattr(self, 'queuemaxsize', None), defaultQueueClass=CBQueue.AutoClassQueue.initHelper('_classname0'), defaultQueuePriority = 400) if self.debugging: self.scheduler.debugging = True self.scheduler.logger.setLevel(logging.DEBUG) self.scheduler.queue.addSubQueue(self.pollwritepriority, PollEvent.createMatcher(category=PollEvent.WRITE_READY), 'write', None, None, CBQueue.AutoClassQueue.initHelper('fileno')) self.scheduler.queue.addSubQueue(self.pollreadpriority, PollEvent.createMatcher(category=PollEvent.READ_READY), 'read', None, None, CBQueue.AutoClassQueue.initHelper('fileno')) self.scheduler.queue.addSubQueue(self.pollerrorpriority, PollEvent.createMatcher(category=PollEvent.ERROR), 'error') self.scheduler.queue.addSubQueue(self.connectioncontrolpriority, ConnectionControlEvent.createMatcher(), 'control') self.scheduler.queue.addSubQueue(self.connectionwritepriority, ConnectionWriteEvent.createMatcher(), 'connectionwrite', self.totalwritelimit, self.totalwritelimit, CBQueue.AutoClassQueue.initHelper('connection', self.preservefornew, subqueuelimit = self.writelimitperconnection)) self.scheduler.queue.addSubQueue(self.streamdatapriority, StreamDataEvent.createMatcher(), 'streamdata', self.streamdatalimit, self.streamdatalimit, CBQueue.AutoClassQueue.initHelper('stream', self.preservefornew, subqueuelimit = self.datalimitperstream)) self.scheduler.queue.addSubQueue(self.routinecontrolpriority, RoutineControlEvent.createMatcher(), 'routine') self.scheduler.queue.addSubQueue(self.timerpriority, TimerEvent.createMatcher(), 'timer') self.scheduler.queue.addSubQueue(self.resolverresppriority, ResolveResponseEvent.createMatcher(), 'resolve') self.scheduler.queue.addSubQueue(self.resolverreqpriority, ResolveRequestEvent.createMatcher(), 'resolvereq', 16) self.scheduler.queue.addSubQueue(self.sysctlpriority, SystemControlEvent.createMatcher(), 'sysctl') self.scheduler.queue.addSubQueue(self.sysctllowpriority, SystemControlLowPriorityEvent.createMatcher(), 'sysctllow') self.scheduler.queue.addSubQueue(self.moduleapicallpriority, ModuleAPICall.createMatcher(), 'moduleapi', None, None, CBQueue.AutoClassQueue.initHelper('target', 2, subqueuelimit = 5)) self.scheduler.queue.addSubQueue(self.moduleapireplypriority, ModuleAPIReply.createMatcher(), 'moduleapireply') self.scheduler.queue.addSubQueue(self.modulenotifypriority, ModuleNotification.createMatcher(), 'modulenotify', None, None, CBQueue.AutoClassQueue.initHelper('target', subqueuelimit=5)) self.scheduler.queue.addSubQueue(self.moduleloadeventpriority, ModuleLoadStateChanged.createMatcher(), 'moduleload') self.scheduler.queue.addSubQueue(self.lockpriority, LockEvent.createMatcher(), 'lock', None, None, CBQueue.AutoClassQueue.initHelper('key', subqueuelimit=1)) self.scheduler.queue.addSubQueue(self.futurepriority, FutureEvent.createMatcher(), 'future') self.resolver = Resolver(self.scheduler, self.resolverpoolsize) self.moduleloader = ModuleLoader(self)
def __init__(self): ''' Constructor ''' if hasattr(self, 'logging'): if isinstance(self.logging, dict): logging_config = dict(self.logging) else: logging_config = self.logging.todict() logging_config.setdefault('disable_existing_loggers', False) logging.config.dictConfig(logging_config) elif hasattr(self, 'loggingconfig'): logging.config.fileConfig(self.loggingconfig, disable_existing_loggers=False) self.scheduler = Scheduler(DefaultPolling(), getattr(self, 'processevents', None), getattr(self, 'queuedefaultsize', None), getattr(self, 'queuemaxsize', None), defaultQueueClass=CBQueue.AutoClassQueue.initHelper('_classname0'), defaultQueuePriority = 400) if self.debugging: self.scheduler.debugging = True self.scheduler.logger.setLevel(logging.DEBUG) self.scheduler.queue.addSubQueue(self.pollwritepriority, PollEvent.createMatcher(category=PollEvent.WRITE_READY), 'write', None, None, CBQueue.AutoClassQueue.initHelper('fileno')) self.scheduler.queue.addSubQueue(self.pollreadpriority, PollEvent.createMatcher(category=PollEvent.READ_READY), 'read', None, None, CBQueue.AutoClassQueue.initHelper('fileno')) self.scheduler.queue.addSubQueue(self.pollerrorpriority, PollEvent.createMatcher(category=PollEvent.ERROR), 'error') self.scheduler.queue.addSubQueue(self.connectioncontrolpriority, ConnectionControlEvent.createMatcher(), 'control') self.scheduler.queue.addSubQueue(self.connectionwritepriority, ConnectionWriteEvent.createMatcher(), 'connectionwrite', self.totalwritelimit, self.totalwritelimit, CBQueue.AutoClassQueue.initHelper('connection', self.preservefornew, subqueuelimit = self.writelimitperconnection)) self.scheduler.queue.addSubQueue(self.streamdatapriority, StreamDataEvent.createMatcher(), 'streamdata', self.streamdatalimit, self.streamdatalimit, CBQueue.AutoClassQueue.initHelper('stream', self.preservefornew, subqueuelimit = self.datalimitperstream)) self.scheduler.queue.addSubQueue(self.routinecontrolpriority, RoutineControlEvent.createMatcher(), 'routine') self.scheduler.queue.addSubQueue(self.timerpriority, TimerEvent.createMatcher(), 'timer') self.scheduler.queue.addSubQueue(self.resolverresppriority, ResolveResponseEvent.createMatcher(), 'resolve') self.scheduler.queue.addSubQueue(self.resolverreqpriority, ResolveRequestEvent.createMatcher(), 'resolvereq', 16) self.scheduler.queue.addSubQueue(self.sysctlpriority, SystemControlEvent.createMatcher(), 'sysctl') self.scheduler.queue.addSubQueue(self.sysctllowpriority, SystemControlLowPriorityEvent.createMatcher(), 'sysctllow') self.scheduler.queue.addSubQueue(self.moduleapicallpriority, ModuleAPICall.createMatcher(), 'moduleapi', None, None, CBQueue.AutoClassQueue.initHelper('target', 2, subqueuelimit = 5)) self.scheduler.queue.addSubQueue(self.moduleapireplypriority, ModuleAPIReply.createMatcher(), 'moduleapireply') self.scheduler.queue.addSubQueue(self.modulenotifypriority, ModuleNotification.createMatcher(), 'modulenotify', None, None, CBQueue.AutoClassQueue.initHelper('target', subqueuelimit=5)) self.scheduler.queue.addSubQueue(self.moduleloadeventpriority, ModuleLoadStateChanged.createMatcher(), 'moduleload') self.scheduler.queue.addSubQueue(self.lockpriority, LockEvent.createMatcher(), 'lock', None, None, CBQueue.AutoClassQueue.initHelper('key', subqueuelimit=1)) self.resolver = Resolver(self.scheduler, self.resolverpoolsize) self.moduleloader = ModuleLoader(self)
def parse(self, connection, data, laststart): jsonstart = 0 start = laststart end = len(data) events = [] level = connection.jsonrpc_parserlevel state = connection.jsonrpc_parserstate _OBJECT_START = b'{'[0] _STRING_MARK = b'"'[0] _ESCAPE_MARK = b'\\'[0] _OBJECT_END = b'}'[0] while start < end: # We only match {} to find the end position if state == 'begin': m = self._BEGIN_PATTERN.match(data, start) start = m.end() if start < end: if data[start] == _OBJECT_START: start += 1 level += 1 state = 'object' else: raise JsonFormatException('"{" is not found') elif state == 'object': m = self._OBJECT_PATTERN.match(data, start) start = m.end() if start < end: if data[start] == _STRING_MARK: start += 1 state = 'string' elif data[start] == _OBJECT_START: start += 1 level += 1 elif data[start] == _OBJECT_END: start += 1 level -= 1 if level <= 0: state = 'begin' jsondata = data[jsonstart:start] if hasattr(jsondata, 'tobytes'): jsondata = jsondata.tobytes() jsondata = jsondata.decode(self.encoding) if self.debugging: self._logger.debug('Parsing json text:\n%s', jsondata) jsondata = json.loads(jsondata) if 'method' in jsondata: if jsondata['method'] is None: raise JsonFormatException( 'method is None in input json') if jsondata['id'] is not None: # Unprocessed requests will block the JSON-RPC connection message queue, # as a security consideration, the parser can automatically reject unknown # requests if self.allowedrequests is not None and str( jsondata['method'] ) not in self.allowedrequests: events.append( self.formaterror( 'method is not supported', jsondata['id'], connection)) else: events.append( JsonRPCRequestEvent( method=str(jsondata['method']), params=jsondata['params'], id=jsondata['id'], connection=connection, connmark=connection.connmark, createby=self)) self._logger.debug( 'Request received(method = %r, id = %r, connection = %r)', jsondata['method'], jsondata['id'], connection) else: events.append( JsonRPCNotificationEvent( method=str(jsondata['method']), params=jsondata['params'], connection=connection, connmark=connection.connmark, createby=self)) self._logger.debug( 'Notification received(method = %r, connection = %r)', str(jsondata['method']), connection) elif 'result' in jsondata: if jsondata['id'] is None: raise JsonFormatException( 'id is None for a response') events.append( JsonRPCResponseEvent( connection=connection, connmark=connection.connmark, id=jsondata['id'], iserror=jsondata['error'] is not None, result=jsondata['result'], error=jsondata['error'], createby=self)) self._logger.debug( 'Response received(id = %r, connection = %r)', jsondata['id'], connection) jsonstart = start else: # Never really reach raise JsonFormatException('How can this be reached...') elif state == 'string': m = self._STRING_PATTERN.match(data, start) start = m.end() if start < end: if data[start] == _STRING_MARK: start += 1 state = 'object' elif data[start] == _ESCAPE_MARK: start += 1 state = 'escape' else: # Never really reach raise JsonFormatException('How can this be reached...') else: # Escape start += 1 state = 'string' # Security check if start - jsonstart > self.messagelimit: raise JsonFormatException('JSON message size exceeds limit') if level > self.levellimit: raise JsonFormatException('JSON message level exceeds limit') connection.jsonrpc_parserlevel = level connection.jsonrpc_parserstate = state if laststart == len(data): # Remote write close events.append( ConnectionWriteEvent(connection, connection.connmark, data=b'', EOF=True)) return (events, len(data) - jsonstart)