def setUp(self): self.mediaedgegsm_calls = DeferredQueue() self.fake_http = FakeHttpServer(self.handle_request) self.base_url = "http://mediaedgegsm.example.com/" self.config = { 'web_path': "foo", 'web_port': 0, 'username': '******', 'password': '******', 'outbound_url': self.base_url, 'outbound_username': '******', 'outbound_password': '******', 'operator_mappings': { '417': { '417912': 'VODA', '417913': 'TIGO', '417914': 'UNKNOWN', } } } self.tx_helper = self.add_helper( TransportHelper(MediaEdgeGSMTransport)) self.transport = yield self.tx_helper.get_transport(self.config) self.transport.agent_factory = self.fake_http.get_agent self.transport_url = self.transport.get_transport_url() self.mediaedgegsm_response = '' self.mediaedgegsm_response_code = http.OK
class ProcessPool(object): def __init__(self, count=10): self.limiter = DeferredSemaphore(count) self.processes = [spawnProcess() for _ in xrange(count)] self.workQueue = DeferredQueue() for process in self.processes: process.onconnect.addCallback(self._prepareForWork) @inlineCallbacks def _prepareForWork(self, proto): deferred, func, args = yield self.workQueue.get() proto.queueWork(deferred, func, *args) def requeue(result): self._prepareForWork(proto) return result deferred.addCallback(requeue) returnValue(proto) def queueWork(self, function, *args): resultDeferred = Deferred() innerDeferred = Deferred() self.workQueue.put((innerDeferred, function, args)) def callResult(obj): resultDeferred.callback(obj) return obj innerDeferred.addCallback(callResult) return resultDeferred def stop(self): for process in self.processes: process.protocol.kill() process.transport.loseConnection()
def setUp(self): self.mock_server = MockHttpServer(self.handle_inbound_request) self.outbound_requests = DeferredQueue() self.mock_server_response = '' self.mock_server_response_code = http.OK yield self.mock_server.start() self.add_cleanup(self.mock_server.stop) config = { 'web_path': 'api/v1/apposit/sms', 'web_port': 0, 'credentials': { '8123': { 'username': '******', 'password': '******', 'service_id': 'service-id-1', }, '8124': { 'username': '******', 'password': '******', 'service_id': 'service-id-2', } }, 'outbound_url': self.mock_server.url, } self.tx_helper = self.add_helper( TransportHelper( AppositTransport, transport_addr='8123', mobile_addr='251911223344')) self.transport = yield self.tx_helper.get_transport(config) self.transport_url = self.transport.get_transport_url() self.web_path = config['web_path']
class SSMIServerProtocol(Protocol): delimiter = TruteqTransportProtocol.delimiter def __init__(self): self.receive_queue = DeferredQueue() self._buf = b"" def dataReceived(self, data): self._buf += data self.parse_commands() def parse_commands(self): while self.delimiter in self._buf: line, _, self._buf = self._buf.partition(self.delimiter) if line: self.receive_queue.put(SSMIRequest.parse(line)) def send(self, command): self.transport.write(str(command)) self.transport.write(self.delimiter) return wait0() def receive(self): return self.receive_queue.get() def disconnect(self): self.transport.loseConnection()
def __init__(self, connection_manager, channel, grpc_timeout, core_binding_key, core_transaction_key): self.connection_manager = connection_manager self.channel = channel self.grpc_timeout = grpc_timeout self.grpc_stub = VolthaServiceStub(channel) # This is the rw-core cluster to which an OFAgent is bound. # It is the affinity router that forwards all OFAgent # requests to a specific rw-core in this back-end cluster. self.core_group_id = '' self.core_group_id_key = core_binding_key # Since the api-router binds an OFAgent to two RW Cores in a pair and # transparently forward requests between the two then the onus is on # the OFAgent to fulfill part of the function of the api-server which # involves sending a transaction key to both RW Cores for the latter # to figure out which Core will handle the transaction. To prevent # collision between the api-server ID and the one from OFAgent then the # OFAgent ID will be prefixed with "O-". self.core_transaction_key = core_transaction_key self.stopped = False self.packet_out_queue = Queue() # queue to send out PacketOut msgs self.packet_in_queue = DeferredQueue() # queue to receive PacketIn self.change_event_queue = DeferredQueue() # queue change events
def test_health_response(self): health_url = 'http://%s:%s%s' % (self.addr.host, self.addr.port, self.config['health_path']) response = yield http_request_full(health_url, method='GET') self.assertEqual(response.delivered_body, '0') yield self.app_helper.make_dispatch_inbound('in 1', message_id='1', conv=self.conversation) queue = DeferredQueue() stream_url = '%s/%s/messages.json' % (self.url, self.conversation.key) stream_receiver = self.client.stream(TransportUserMessage, queue.put, queue.put, stream_url, Headers(self.auth_headers)) yield queue.get() response = yield http_request_full(health_url, method='GET') self.assertEqual(response.delivered_body, '1') stream_receiver.disconnect() response = yield http_request_full(health_url, method='GET') self.assertEqual(response.delivered_body, '0') self.assertEqual( self.app.client_manager.clients, {'sphex.stream.message.%s' % (self.conversation.key, ): []})
class RateLimitAgent(object): def __init__(self): self.pool = HTTPConnectionPool(reactor, persistent=True) self.pool.maxPersistentPerHost = 4 self.pool._factory = QuietHTTP11ClientFactory self.contextFactory = WhitelistContextFactory() self.agent = Agent(reactor, pool=self.pool, contextFactory=self.contextFactory) self.queue = DeferredQueue() self.getRequest() def request(self, method, url, headers, body): finished = Deferred() self.queue.put((finished, method, url, headers, body)) return finished def getRequest(self): d = self.queue.get() d.addCallback(self.gotRequest) def gotRequest(self, request): finished, method, url, headers, body = request d = self.agent.request(method, bytes(url), headers, body) d.addCallback(self.cbRequest, finished) d.addErrback(self.ebRequest, finished) def cbRequest(self, response, finished): finished.callback(response) reactor.callLater(1.3, self.getRequest) def ebRequest(self, failure, finished): finished.errback(failure) reactor.callLater(1.3, self.getRequest)
class WeChatTestCase(VumiTestCase): def setUp(self): self.tx_helper = self.add_helper(TransportHelper(WeChatTransport)) self.request_queue = DeferredQueue() self.fake_http = FakeHttpServer(self.handle_api_request) self.api_url = 'https://api.wechat.com/cgi-bin/' def handle_api_request(self, request): self.assertEqual(request.path[:len(self.api_url)], self.api_url) self.request_queue.put(request) return NOT_DONE_YET @inlineCallbacks def get_transport(self, **config): defaults = { 'auth_token': 'token', 'twisted_endpoint': 'tcp:0', 'wechat_appid': 'appid', 'wechat_secret': 'secret', 'embed_user_profile': False, } defaults.update(config) transport = yield self.tx_helper.get_transport(defaults) transport.agent_factory = self.fake_http.get_agent returnValue(transport) @inlineCallbacks def get_transport_with_access_token(self, access_token, **config): transport = yield self.get_transport(**config) yield transport.redis.set(WeChatTransport.ACCESS_TOKEN_KEY, access_token) returnValue(transport)
def __init__(self, adapter, device_id): self.log = structlog.get_logger(device_id=device_id) self.log.debug('function-entry') self.adapter = adapter self.adapter_agent = adapter.adapter_agent self.parent_adapter = None self.parent_id = None self.device_id = device_id self.incoming_messages = DeferredQueue() self.event_messages = DeferredQueue() self.proxy_address = None self.tx_id = 0 self._enabled = False self.alarms = None self.pm_metrics = None self._omcc_version = OMCCVersion.Unknown self._total_tcont_count = 0 # From ANI-G ME self._qos_flexibility = 0 # From ONT2_G ME self._onu_indication = None self._unis = dict() # Port # -> UniPort self._pon = None # TODO: probably shouldnt be hardcoded, determine from olt maybe? self._pon_port_number = 100 self.logical_device_id = None self._heartbeat = HeartBeat.create(self, device_id) # Set up OpenOMCI environment self._onu_omci_device = None self._dev_info_loaded = False self._deferred = None self._in_sync_subscription = None self._connectivity_subscription = None self._capabilities_subscription = None self.mac_bridge_service_profile_entity_id = 0x201 self.gal_enet_profile_entity_id = 0x1 self._tp_service_specific_task = dict() self._tech_profile_download_done = dict() # Initialize KV store client self.args = registry('main').get_args() if self.args.backend == 'etcd': host, port = self.args.etcd.split(':', 1) self.kv_client = EtcdStore( host, port, TechProfile.KV_STORE_TECH_PROFILE_PATH_PREFIX) elif self.args.backend == 'consul': host, port = self.args.consul.split(':', 1) self.kv_client = ConsulStore( host, port, TechProfile.KV_STORE_TECH_PROFILE_PATH_PREFIX) else: self.log.error('Invalid-backend') raise Exception("Invalid-backend-for-kv-store") # Handle received ONU event messages reactor.callLater(0, self.handle_onu_events)
class TestHTTPClientBase(TestCase): # TODO: Run client tests synchronously with treq.testing tools (#38) run_tests_with = AsynchronousDeferredRunTest.make_factory(timeout=0.1) def setUp(self): super(TestHTTPClientBase, self).setUp() self.requests = DeferredQueue() self.fake_server = FakeHttpServer(self.handle_request) fake_client = treq_HTTPClient(self.fake_server.get_agent()) self.client = self.get_client(fake_client) # Spin the reactor once at the end of each test to clean up any # cancelled deferreds self.addCleanup(wait0) def handle_request(self, request): self.requests.put(request) return NOT_DONE_YET def get_client(self, client): """To be implemented by subclass""" raise NotImplementedError() def uri(self, path): return '%s%s' % (self.client.url, path,) def cleanup_d(self, d): self.addCleanup(lambda: d) return d
def __init__(self, call_uuid): self.call_uuid = call_uuid self.esl_parser = EslParser() self.queue = DeferredQueue() self.connect_d = Deferred() self.disconnect_d = Deferred() self.setRawMode()
class QueuePoller(object): implements(IPoller) def __init__(self, config): self.config = config self.update_projects() self.dq = DeferredQueue(size=1) @inlineCallbacks def poll(self): if self.dq.pending: return for p, q in self.queues.iteritems(): c = yield maybeDeferred(q.count) if c: msg = yield maybeDeferred(q.pop) returnValue(self.dq.put(self._message(msg, p))) def next(self): return self.dq.get() def update_projects(self): self.queues = get_spider_queues(self.config) def _message(self, queue_msg, project): d = queue_msg.copy() d['_project'] = project d['_spider'] = d.pop('name') return d
def __init__(self, config, app): self.app = app self.config = config self.update_projects() self.dq = DeferredQueue(size=1) fp = 'setting.conf' conf = ConfigParser.ConfigParser() conf.read(fp) self.slave_id = conf.get('slave_id', 'slave_id') # 从配置文件中获取本机的ID self.dispatch_host = conf.get('dispatch', 'dispatch_host') # self.dispatch_port = conf.getint('dispatch', 'dispatch_port') # self.redis_conn = redis.Redis(host='10.195.112.13', port=6379, password='******') self.node_info = { 'ip': '', 'slaveid': '', 'operator': '', 'os': '', 'cpu': '', 'RAM': '', 'version': '', 'cpuUsed': '', 'RAMUsed': '', 'netCon': '' } # self.node_info['ip'] = get_local_ip() self.node_info['slaveid'] = self.slave_id self.node_info['os'] = sys.platform
class OmciRxProxy(pb.Root): def __init__(self): self.pb_server_ip = '192.168.24.20' # registry('main').get_args().external_host_address self.pb_server_port = 24497 self.pb_server_factory = pb.PBServerFactory(self) # start PB server self.listen_port = reactor.listenTCP(self.pb_server_port, self.pb_server_factory) self.omci_rx_queue = DeferredQueue() log.info('PB-server-started-on-port', port=self.pb_server_port) def get_ip(self): return self.pb_server_ip def get_port(self): return self.pb_server_port def get_host(self): return self.listen_port.getHost() def remote_echo(self, pkt_type, pon, onu, port, crc_ok, msg_size, msg_data): log.info('received-omci-msg', pkt_type=pkt_type, pon_id=pon, onu_id=onu, port_id=port, crc_ok=crc_ok, msg_size=msg_size, msg_data=hexify(msg_data)) self.omci_rx_queue.put((onu, msg_data)) def receive(self): return self.omci_rx_queue.get()
def __init__(self, pipe, width=5): Deferred.__init__(self) self.pipe = pipe self.width = width self.waiting = None self.running = 0 self.results = DeferredQueue()
class MockHttpServer(object): """ NOTE: This is deprecated. Please use :class:`vumi.tests.http_helpers.MockHttpHelper` instead. """ def __init__(self, handler=None): self.queue = DeferredQueue() self._handler = handler or self.handle_request self._webserver = None self.addr = None self.url = None def handle_request(self, request): self.queue.put(request) @inlineCallbacks def start(self): root = MockResource(self._handler) site_factory = LogFilterSite(root) self._webserver = yield reactor.listenTCP( 0, site_factory, interface='127.0.0.1') self.addr = self._webserver.getHost() self.url = "http://%s:%s/" % (self.addr.host, self.addr.port) @inlineCallbacks def stop(self): yield self._webserver.stopListening() yield self._webserver.loseConnection()
def test_single_consumergroup_join(self): record_stream = DeferredQueue(backlog=1) def processor(consumer, records): log.debug('processor(%r, %r)', consumer, records) record_stream.put(records) coord = ConsumerGroup( self.client, self.id(), topics=[self.topic], processor=processor, retry_backoff_ms=100, heartbeat_interval_ms=1000, fatal_backoff_ms=3000, ) join_de = self.when_called(coord, 'on_join_complete') coord.start() self.addCleanup(coord.stop) yield join_de self.assertIn(self.topic, coord.consumers) self.assertEqual(len(coord.consumers[self.topic]), self.num_partitions) self.assertEqual(coord.consumers[self.topic][0].topic, self.topic) self.assertEqual(coord.consumers[self.topic][0].partition, 0) for part in range(self.num_partitions): values = yield self.send_messages(part, [part]) msgs = yield record_stream.get() self.assertEqual(msgs[0].partition, part) self.assertEqual(msgs[0].message.value, values[0])
def __init__( self, uuid, dbid, poolsize, isLazy=False, handler=ConnectionHandler, charset="utf-8", password=None ): if not isinstance(poolsize, int): raise ValueError( "Redis poolsize must be an integer, not %s" % repr(poolsize) ) if not isinstance(dbid, (int, type(None))): raise ValueError( "Redis dbid must be an integer, not %s" % repr(dbid) ) self.uuid = uuid self.dbid = dbid self.poolsize = poolsize self.isLazy = isLazy self.charset = charset self.password = password self.idx = 0 self.size = 0 self.pool = [] self.deferred = Deferred() self.handler = handler(self) self.connectionQueue = DeferredQueue() self._waitingForEmptyPool = set()
def __init__(self): conf = siteConf() self.login = conf.get('QRZ', 'login') self.pwd = conf.get('QRZ', 'pwd') self.csQueue = DeferredQueue() self.queueTask = None self.getSessionID()
def __init__(self, vumi_transport): """ An SMPP 3.4 client suitable for use by a Vumi Transport. :param SmppTransceiverProtocol vumi_transport: The transport that is using this protocol to communicate with an SMSC. """ self.vumi_transport = vumi_transport self.config = self.vumi_transport.get_static_config() self.buffer = b'' self.state = self.CLOSED_STATE self.deliver_sm_processor = self.vumi_transport.deliver_sm_processor self.dr_processor = self.vumi_transport.dr_processor self.sequence_generator = self.vumi_transport.sequence_generator self.enquire_link_call = LoopingCall(self.enquire_link) self.drop_link_call = None self.idle_timeout = self.config.smpp_enquire_link_interval * 2 self.disconnect_call = self.clock.callLater( self.idle_timeout, self.disconnect, 'Disconnecting, no response from SMSC for longer ' 'than %s seconds' % (self.idle_timeout, )) self.unbind_resp_queue = DeferredQueue()
def __init__(self, auto_accept=True, auto_unbind=True): self.auto_accept = auto_accept self.auto_unbind = auto_unbind self.pdu_queue = DeferredQueue() self.endpoint = FakeSMSCEndpoint(self) self.connected = False self._reset_connection_ds()
def test_events_stream(self): url = '%s/%s/events.json' % (self.url, self.conversation.key) events = DeferredQueue() errors = DeferredQueue() receiver = yield self.client.stream(TransportEvent, events.put, events.put, url, Headers(self.auth_headers)) msg1 = yield self.app_helper.make_stored_outbound( self.conversation, 'out 1', message_id='1') ack1 = yield self.app_helper.make_dispatch_ack( msg1, conv=self.conversation) msg2 = yield self.app_helper.make_stored_outbound( self.conversation, 'out 2', message_id='2') ack2 = yield self.app_helper.make_dispatch_ack( msg2, conv=self.conversation) ra1 = yield events.get() ra2 = yield events.get() receiver.disconnect() self.assertEqual(ack1['event_id'], ra1['event_id']) self.assertEqual(ack2['event_id'], ra2['event_id']) self.assertEqual(errors.size, None)
class QueuePoller(object): implements(IPoller) def __init__(self, settings): self.q = RedisSpiderQueue(settings) self.dq = DeferredQueue(size=1) @inlineCallbacks def poll(self): if self.dq.pending: return c = yield maybeDeferred(self.q.count) if c: msg = yield maybeDeferred(self.q.pop) returnValue(self.dq.put(self._message(msg))) def next(self): return self.dq.get() def _message(self, queue_msg): d = queue_msg.copy() d['_project'] = SCRAPY_PROJECT d['_spider'] = d.pop('name') d['_job'] = d.pop('jobid', uuid.uuid1().hex) return d
def setUp(self): self.mediaedgegsm_calls = DeferredQueue() self.mock_mediaedgegsm = MockHttpServer(self.handle_request) self.add_cleanup(self.mock_mediaedgegsm.stop) yield self.mock_mediaedgegsm.start() self.config = { 'web_path': "foo", 'web_port': 0, 'username': '******', 'password': '******', 'outbound_url': self.mock_mediaedgegsm.url, 'outbound_username': '******', 'outbound_password': '******', 'operator_mappings': { '417': { '417912': 'VODA', '417913': 'TIGO', '417914': 'UNKNOWN', } } } self.tx_helper = self.add_helper( TransportHelper(MediaEdgeGSMTransport)) self.transport = yield self.tx_helper.get_transport(self.config) self.transport_url = self.transport.get_transport_url() self.mediaedgegsm_response = '' self.mediaedgegsm_response_code = http.OK
class QueuePoller(object): def __init__(self, config): self.config = config self.update_projects() self.dq = DeferredQueue() @inlineCallbacks def poll(self): if not self.dq.waiting: return for p, q in iteritems(self.queues): c = yield maybeDeferred(q.count) if c: msg = yield maybeDeferred(q.pop) if msg is not None: # In case of a concurrently accessed queue returnValue(self.dq.put(self._message(msg, p))) def next(self): return self.dq.get() def update_projects(self): self.queues = get_spider_queues(self.config) def _message(self, queue_msg, project): d = queue_msg.copy() d['_project'] = project d['_spider'] = d.pop('name') return d
class PostgresListenerServiceSpy(PostgresListenerService): """Save received notifies `captured_notifies` before processing them..""" HANDLE_NOTIFY_DELAY = CHANNEL_REGISTRAR_DELAY = 0 def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) # Captured notifications from the database will go here. self._captured_notifies = DeferredQueue() # Change notifications to a frozenset. This makes sure that the system # message does not go into the queue. Instead it should call the # handler directly in `doRead`. self.notifications = frozenset() def _process_notifies(self): for notify in self.connection.connection.notifies: self._captured_notifies.put(notify) super()._process_notifies() @inlineCallbacks def wait_notification(self, channel): """Wait for a notification to be received.""" while True: notice = yield self._captured_notifies.get() if notice.channel == channel: returnValue(notice)
class OmciProxy(pb.Root): def __init__(self): reactor.listenTCP(24497, pb.PBServerFactory(self)) self.remote = None self.response_queue = DeferredQueue() @inlineCallbacks def connect(self): factory = pb.PBClientFactory() reactor.connectTCP("10.111.101.206", 24498, factory) self.remote = yield factory.getRootObject() print 'connected' yield self.remote.callRemote("setRemote", port=24496) def remote_echo(self, pkt_type, pon, onu, port, crc, size, data): print "Packet Type:", pkt_type print "PON:", pon print "ONU ID:", onu print "Port:", port print "CRC OK:", crc print "Packet Size:", size print "received:", hexify(data) self.response_queue.put(data) @inlineCallbacks def send_omci(self, msg): if isinstance(msg, Packet): msg = str(msg) try: print ' sending:', msg yield self.remote.callRemote("send_omci", 0, 0, 1, msg) print 'msg sent' except Exception, e: print >> sys.stderr, 'Blew up:', str(e)
def event_queue(event): q = DeferredQueue() def cb(*args, **kwargs): q.put((args, kwargs)) h = event.subscribe_repeating(cb) q.unsubscribe = h.unsubscribe return q
class WeChatTestCase(VumiTestCase): def setUp(self): self.tx_helper = self.add_helper(TransportHelper(WeChatTransport)) self.request_queue = DeferredQueue() self.mock_server = MockHttpServer(self.handle_api_request) self.add_cleanup(self.mock_server.stop) return self.mock_server.start() def handle_api_request(self, request): self.request_queue.put(request) return NOT_DONE_YET def get_transport(self, **config): defaults = { 'api_url': self.mock_server.url, 'auth_token': 'token', 'twisted_endpoint': 'tcp:0', 'wechat_appid': 'appid', 'wechat_secret': 'secret', 'embed_user_profile': False, } defaults.update(config) return self.tx_helper.get_transport(defaults) @inlineCallbacks def get_transport_with_access_token(self, access_token, **config): transport = yield self.get_transport(**config) yield transport.redis.set(WeChatTransport.ACCESS_TOKEN_KEY, access_token) returnValue(transport)
def __init__(self, kafka_host_port, default_topic, group_id_prefix, target_cls): """ Initialize the kafka proxy. This is a singleton (may change to non-singleton if performance is better) :param kafka_host_port: Kafka host and port :param default_topic: Default topic to subscribe to :param target_cls: target class - method of that class is invoked when a message is received on the default_topic """ # return an exception if the object already exist if IKafkaMessagingProxy._kafka_messaging_instance: raise Exception('Singleton-exist') log.debug("Initializing-KafkaProxy") self.kafka_host_port = kafka_host_port self.default_topic = default_topic self.default_group_id = "_".join((group_id_prefix, default_topic)) self.target_cls = target_cls self.topic_target_cls_map = {} self.topic_callback_map = {} self.subscribers = {} self.kafka_proxy = None self.transaction_id_deferred_map = {} # Ident -> KafkaWaitForResponse self.received_msg_queue = DeferredQueue() self.stopped = False self.tx_stats = None # First 'clear' of stats enables them self.rx_stats = None log.debug("KafkaProxy-initialized")
class MockHttpServer(object): """ NOTE: This is deprecated. Please use :class:`vumi.tests.http_helpers.MockHttpHelper` instead. """ def __init__(self, handler=None): self.queue = DeferredQueue() self._handler = handler or self.handle_request self._webserver = None self.addr = None self.url = None def handle_request(self, request): self.queue.put(request) @inlineCallbacks def start(self): root = MockResource(self._handler) site_factory = LogFilterSite(root) self._webserver = yield reactor.listenTCP(0, site_factory, interface='127.0.0.1') self.addr = self._webserver.getHost() self.url = "http://%s:%s/" % (self.addr.host, self.addr.port) @inlineCallbacks def stop(self): yield self._webserver.stopListening() yield self._webserver.loseConnection()
def test_health_response(self): health_url = 'http://%s:%s%s' % ( self.addr.host, self.addr.port, self.config['health_path']) response = yield http_request_full(health_url, method='GET') self.assertEqual(response.delivered_body, '0') yield self.app_helper.make_dispatch_inbound( 'in 1', message_id='1', conv=self.conversation) queue = DeferredQueue() stream_url = '%s/%s/messages.json' % (self.url, self.conversation.key) stream_receiver = self.client.stream( TransportUserMessage, queue.put, queue.put, stream_url, Headers(self.auth_headers)) yield queue.get() response = yield http_request_full(health_url, method='GET') self.assertEqual(response.delivered_body, '1') stream_receiver.disconnect() response = yield http_request_full(health_url, method='GET') self.assertEqual(response.delivered_body, '0') self.assertEqual(self.app.client_manager.clients, { 'sphex.stream.message.%s' % (self.conversation.key,): [] })
def __init__(self, service, bind_type): """ An SMPP 3.4 client suitable for use by a Vumi Transport. :param SmppService service: The SMPP service that is using this protocol to communicate with an SMSC. """ self.service = service self.log = service.log self.bind_pdu = self._BIND_PDU[bind_type] self.clock = service.clock self.config = self.service.get_config() self.buffer = b'' self.state = self.CLOSED_STATE self.deliver_sm_processor = self.service.deliver_sm_processor self.dr_processor = self.service.dr_processor self.sequence_generator = self.service.sequence_generator self.enquire_link_call = LoopingCall(self.enquire_link) self.drop_link_call = None self.idle_timeout = self.config.smpp_enquire_link_interval * 2 self.disconnect_call = None self.unbind_resp_queue = DeferredQueue()
def __init__(self, pubnub_instance): self._message_queue = DeferredQueue() self.worker_loop = None self._heartbeat_loop = None self._heartbeat_call = None self.clock = pubnub_instance.clock super(TwistedSubscriptionManager, self).__init__(pubnub_instance)
def test_messages_stream(self): url = '%s/%s/messages.json' % (self.url, self.conversation.key) messages = DeferredQueue() errors = DeferredQueue() receiver = self.client.stream( TransportUserMessage, messages.put, errors.put, url, Headers(self.auth_headers)) msg1 = yield self.app_helper.make_dispatch_inbound( 'in 1', message_id='1', conv=self.conversation) msg2 = yield self.app_helper.make_dispatch_inbound( 'in 2', message_id='2', conv=self.conversation) rm1 = yield messages.get() rm2 = yield messages.get() receiver.disconnect() # Sometimes messages arrive out of order if we're hitting real redis. rm1, rm2 = sorted([rm1, rm2], key=lambda m: m['message_id']) self.assertEqual(msg1['message_id'], rm1['message_id']) self.assertEqual(msg2['message_id'], rm2['message_id']) self.assertEqual(errors.size, None)
def setUp(self): self.cellulant_sms_calls = DeferredQueue() self.mock_cellulant_sms = MockHttpServer(self.handle_request) yield self.mock_cellulant_sms.start() self.add_cleanup(self.mock_cellulant_sms.stop) self.config = { 'web_path': "foo", 'web_port': 0, 'credentials': { '2371234567': { 'username': '******', 'password': '******', }, '9292': { 'username': '******', 'password': '******', } }, 'outbound_url': self.mock_cellulant_sms.url, 'validation_mode': 'permissive', } self.tx_helper = self.add_helper( TransportHelper(CellulantSmsTransport)) self.transport = yield self.tx_helper.get_transport(self.config) self.transport_url = self.transport.get_transport_url()
def __init__(self, adapter, device_id): self.log = structlog.get_logger(device_id=device_id) self.log.debug('function-entry') self.adapter = adapter self.adapter_agent = adapter.adapter_agent self.device_id = device_id self.incoming_messages = DeferredQueue() self.event_messages = DeferredQueue() self.proxy_address = None self.tx_id = 0 self._enabled = False self._omcc_version = OMCCVersion.Unknown self._total_tcont_count = 0 # From ANI-G ME self._qos_flexibility = 0 # From ONT2_G ME self._onu_indication = None self._unis = dict() # Port # -> UniPort self._port_number_pool = IndexPool(_MAXIMUM_PORT, 0) self._pon = None #TODO: probably shouldnt be hardcoded, determine from olt maybe? self._pon_port_number = 100 self.logical_device_id = None # Set up OpenOMCI environment self._onu_omci_device = None self._dev_info_loaded = False self._deferred = None self._in_sync_subscription = None self._connectivity_subscription = None self._capabilities_subscription = None
def __init__(self, adapter, device_id): self.adapter = adapter self.adapter_agent = adapter.adapter_agent self.device_id = device_id self.log = structlog.get_logger(device_id=device_id) self.incoming_messages = DeferredQueue() self.proxy_address = None
def async_receive_stream(self, func, *args, **kw): queue = DeferredQueue() def _execute(): for result in func(*args, **kw): reactor.callFromThread(queue.put, result) _ = threads.deferToThread(_execute) while 1: yield queue.get()
def __init__(self): """Create a new TCP proxy. `self.server_queue` contains messages from end server to client. `self.client_queue` contains messages from client to end server. """ self.server_queue = DeferredQueue() self.client_queue = DeferredQueue() self.server_queue.get().addCallback(self.serverQueueCallback)
class DataConnection(Protocol): def __init__(self): self.queue = DeferredQueue() def dataReceived(self, data): self.queue.put(data) # put data from client into queue def connectionMade(self): reactor.connectTCP(SSH_HOST, SSH_PORT, ServiceConnFactory(self)) def forwardData(self, data): self.transport.write(data) # send data to Home through data connection
def __init__(self, handler=None): DeferredQueue.__init__(self) self.message_handler = handler def f(msg): if self.message_handler: self.message_handler(msg) else: self.receivedMessage(msg) self.get().addCallback(f) self.get().addCallback(f)
class DataConn(Protocol): def __init__(self): self.serv_stream_live = False self.dq = DeferredQueue() def dataReceived(self, data): if self.serv_stream_live: conndict['servConn'].transport.write(data) else: self.dq.put(data)
class PatchedMessengerTransport(MessengerTransport): def __init__(self, *args, **kwargs): super(PatchedMessengerTransport, self).__init__(*args, **kwargs) self.request_queue = DeferredQueue() def request(self, method, url, data, **kwargs): d = Deferred() self.request_queue.put((d, (method, url, data), kwargs)) return d
class ClientProtocol(LineReceiver): def __init__(self): self.queue = DeferredQueue() def lineReceived(self, line): self.queue.put(line) def connectionLost(self, reason): self.queue.put("DONE")
class FakeCellulant(object): def __init__(self): self.cellulant_sms_calls = DeferredQueue() self.fake_http = FakeHttpServer(self.handle_request) self.response = '' self.get_agent = self.fake_http.get_agent self.get = self.cellulant_sms_calls.get def handle_request(self, request): self.cellulant_sms_calls.put(request) return self.response
class BenchTransport(Transport): WORKER_QUEUE = DeferredQueue() @inlineCallbacks def startWorker(self): yield Transport.startWorker(self) self.message_queue = DeferredQueue() self.WORKER_QUEUE.put(self) def handle_outbound_message(self, msg): self.message_queue.put(msg)
class TestTransport(TestCase): @inlineCallbacks def setUp(self): DelayedCall.debug = True self.ok_transport_calls = DeferredQueue() self.mock_service = MockHttpServer(self.handle_request) yield self.mock_service.start() config = { 'transport_name': 'test_ok_transport', 'transport_type': 'ok', 'ussd_string_prefix': '', 'web_path': "foo", 'web_port': 0, 'url': self.mock_service.url, 'username': '******', 'password': '******', } self.worker = get_stubbed_worker(OkTransport, config) self.broker = self.worker._amqp_client.broker yield self.worker.startWorker() self.worker_url = self.worker.get_transport_url() @inlineCallbacks def tearDown(self): yield self.worker.stopWorker() yield self.mock_service.stop() def handle_request(self, request): self.ok_transport_calls.put(request) return '' @inlineCallbacks def test_health(self): result = yield http_request(self.worker_url + "health", "", method='GET') self.assertEqual(json.loads(result), { 'pending_requests': 0 }) @inlineCallbacks def test_inbound(self): d = http_request(self.worker_url + "foo", '', method='GET') msg, = yield self.broker.wait_messages("vumi", "test_ok_transport.inbound", 1) payload = msg.payload tum = TransportUserMessage(**payload) rep = tum.reply("OK") self.broker.publish_message("vumi", "test_ok_transport.outbound", rep) response = yield d self.assertEqual(response, 'OK')
class ClientConnection(Protocol): def __init__(self, server): self.server = server self.queue = DeferredQueue() def dataReceived(self, data): self.queue.put(data) # put data from client into queue def connectionMade(self): self.server.newConnect() if self.server.check == 1: # only listen to a port once reactor.listenTCP(DATA_PORT, DataConnFactory(self)) def forwardData(self, data): self.transport.write(data) # forward data to client def connectionLost(self, reason): self.server.check = 0
class ClientConn(LineReceiver): def __init__(self): self.data_stream_live = False self.dq = DeferredQueue() def connectionMade(self): reactor.listenTCP(32002, DataConnFactory()) conndict['cmdConn'].sendLine("new client") def dataReceived(self, data): if self.data_stream_live: conndict['dataConn'].transport.write(data) else: self.dq.put(data)
class QueueWrapper(object): """ Wrap a queue to have notifications when get is called on this particular queue. """ def __init__(self, queue): self._real_queue_get = queue.get self.event_queue = DeferredQueue() queue.get = self.get def get(self, timeout=None): self.event_queue.put(None) return self._real_queue_get(timeout)
class TestStreamingClient(VumiTestCase): @inlineCallbacks def setUp(self): self.mock_server = MockHttpServer(self.handle_request) self.add_cleanup(self.mock_server.stop) yield self.mock_server.start() self.url = self.mock_server.url self.client = StreamingClient() self.messages_received = DeferredQueue() self.errors_received = DeferredQueue() self.disconnects_received = DeferredQueue() def reason_trapper(reason): if reason.trap(ResponseDone): self.disconnects_received.put(reason.getErrorMessage()) self.receiver = self.client.stream( Message, self.messages_received.put, self.errors_received.put, self.url, on_disconnect=reason_trapper) def handle_request(self, request): self.mock_server.queue.put(request) return NOT_DONE_YET @inlineCallbacks def test_callback_on_disconnect(self): req = yield self.mock_server.queue.get() req.write( '%s\n' % (Message(foo='bar').to_json().encode('utf-8'),)) req.finish() message = yield self.messages_received.get() self.assertEqual(message['foo'], 'bar') reason = yield self.disconnects_received.get() # this is the error message we get when a ResponseDone is raised # which happens when the remote server closes the connection. self.assertEqual(reason, 'Response body fully received') @inlineCallbacks def test_invalid_json(self): req = yield self.mock_server.queue.get() req.write("Hello\n") req.finish() try: yield self.errors_received.get() except VumiBridgeInvalidJsonError, e: self.assertEqual(e.args, ("Hello",)) else:
class QueuePoller(object): implements(IPoller) def __init__(self, config, app): self.config = config self.update_projects() self.dq = DeferredQueue(size=1) self.max_jobs_per_project = self.config.getint('max_jobs_per_project', 4) @inlineCallbacks def poll(self, launcher): if self.dq.pending: return for p, q in self.queues.iteritems(): c = yield maybeDeferred(q.count) if c and self._has_slot_for_project(p, launcher): msg = yield maybeDeferred(q.pop) returnValue(self.dq.put(self._message(msg, p))) def _has_slot_for_project(self, project_name, launcher): running_jobs = 0 spiders = launcher.processes.values() for s in spiders: if s.project == project_name: running_jobs += 1 return running_jobs < self.max_jobs_per_project def next(self): return self.dq.get() def update_projects(self): self.queues = get_spider_queues(self.config) def _message(self, queue_msg, project): d = queue_msg.copy() d['_project'] = project d['_spider'] = d.pop('name') return d @property def launcher(self): """ Copied from website.Root Should do some refactory to avoid this duplicated code """ app = IServiceCollection(self.app, self.app) return app.getServiceNamed('launcher')
def do_work2(db): queue = DeferredQueue() defs = [] nworkers = len(entries)//5+1 if nworkers > self.nworkers: nworkers = self.nworkers for i in range(nworkers): d = self.fetch(db,queue) d.addErrback(log.err,"fetch()") defs.append(d) workers = set() for id,inum,typ in entries: if not self.running: break trace('copyrun',"%d: %s",inum,typ) self.last_entry = id if typ == 'd': def dt(inum): path = build_path(self.fs.store,inum, create=False) try: os.unlink(path) except EnvironmentError as e: if e.errno != errno.ENOENT: raise yield deferToThread(dt,inum) else: inode = SqlInode(self.fs,inum) yield inode._load(db) if typ == 'f': if inum in workers: trace('copyrun',"%d: in workers",inum,typ) continue workers.add(inum) queue.put((id,inode)) elif typ == 't': if inode.cache: yield inode.cache.trim(inode.size) else: raise RuntimeError("Typ '%s' not found (inode %d)" % (typ,inum)) continue for i in range(nworkers): queue.put(None) yield DeferredList(defs)
def __init__(self, count=10): self.limiter = DeferredSemaphore(count) self.processes = [spawnProcess() for _ in xrange(count)] self.workQueue = DeferredQueue() for process in self.processes: process.onconnect.addCallback(self._prepareForWork)