class NumberGateway(Service): identity = Setting('identity', default='127.0.0.1') leader = Setting('leader', default=None) cluster_ = Setting('cluster', default=['127.0.0.1']) def __init__(self): self.client = NumberClient(('127.0.0.1', 7776)) self.cluster = ClusterCoordinator(self.identity, self.leader) #self.cluster = Leadership(self.identity, ObservableSet(self.cluster_)) self.hub = MessageHub(self.cluster.set, self.identity) self.announcer = Announcer(self.hub, self.cluster) self.add_service(self.cluster) self.add_service(self.hub) self.add_service(self.announcer) self.add_service(self.client) def do_start(self): self._bridge() @autospawn def _bridge(self): for number in self.client: if self.cluster.is_leader: self.hub.publish('/numbers', number) gevent.sleep(0)
class RequestBin(Service): bind_address = Setting('bind_address', default=('0.0.0.0', 5000)) docs_url = Setting( 'docs_url', default='https://github.com/progrium/requestbin/wiki.atom') bin_ttl = Setting('bin_ttl', default=48 * 3600) storage_backend = Setting( 'storage_backend', default='requestbin.storage.memory.MemoryStorage') def __init__(self): self.server = WSGIServer(self.bind_address, web.app) self.add_service(ServerWrapper(self.server)) storage_module, storage_class = self.storage_backend.rsplit('.', 1) try: klass = getattr( __import__(storage_module, fromlist=[storage_class]), storage_class) except ImportError, e: raise ImportError("Unable to load storage backend '{}': {}".format( self.storage_backend, e)) self.storage = klass(self.bin_ttl) self.add_service(self.storage) web.app.config['service'] = self self.docs = None
class HttpStreamer(Service): port = Setting('pubsub_port', default=8088) keepalive_interval = Setting('keepalive_interval', default=5) def __init__(self, hub): self.hub = hub self.add_service( gevent.pywsgi.WSGIServer(listener=(self.hub.bind_interface, self.port), application=self.handle, spawn=self.spawn, log=None)) # This isn't the best we can do, but it makes things better self.catch(socket.error, lambda e, g: None) def handle(self, env, start_response): if env['REQUEST_METHOD'] == 'POST': return self.handle_publish(env, start_response) elif env['REQUEST_METHOD'] == 'GET': return self.handle_subscribe(env, start_response) else: start_response('405 Method not allowed', []) return ["Method not allowed\n"] def handle_publish(self, env, start_response): request = webob.Request(env) self.hub.publish(request.path, str(request.body)) start_response('200 OK', [('Content-Type', 'text/plain')]) return ["OK\n"] def handle_subscribe(self, env, start_response): request = webob.Request(env) subscription = self.hub.subscribe(request.path) self.keepalive(subscription) logger.info("New subscriber (stream)") start_response('200 OK', [ ('Connection', 'keep-alive'), ('Cache-Control', 'no-cache, must-revalidate'), ('Expires', 'Tue, 11 Sep 1985 19:00:00 GMT'), ]) try: for msg in subscription: if msg is None: yield '\n' else: yield '{}\n'.format(msg) except: subscription.cancel() logger.info("Lost subscriber") @autospawn def keepalive(self, subscription): while subscription.channel is not None: subscription.put(None) gevent.sleep(self.keepalive_interval)
class TunnelBroker(Service): """Top-level service that manages tunnels and runs the frontend""" port = Setting('port', default=8000) address = Setting('address', default='0.0.0.0') def __init__(self): self.frontend = BrokerFrontend(self) self.add_service(ServerWrapper(self.frontend)) self.tunnels = {} #def do_start(self): # gevent.spawn(self.visual_heartbeat) def visual_heartbeat(self): while True: print "." gevent.sleep(1) def open_tunnel(self, name): tunnel = Tunnel() self.tunnels[name] = tunnel return tunnel def close_tunnel(self, name): tunnel = self.tunnels.pop(name) tunnel.close() def lookup_tunnel(self, name): return self.tunnels.get(name)
class WorkerWeb(Service): port = Setting('port', default=7337, help="How often to wake up and check the workers") hostname = Setting('hostname', default='0.0.0.0', help="How often to wake up and check the workers") def __init__(self): self.add_service( SocketIOServer((self.hostname, self.port), self.app(), resource='socket.io', policy_server=True, policy_listener=(self.hostname, 10843))) def app(self): config = Configurator() simple_route(config, 'index', '/', lambda req: {}) # The socketio view configuration simple_route(config, 'socket_io', 'socket.io/*remaining', socketio_service) config.add_static_view('static', 'web', cache_max_age=3600) app = config.make_wsgi_app() return app
class NumberServer(Service): address = Setting("numbers_bind", default=('0.0.0.0', 7776)) emit_rate = Setting("rate_per_minute", default=60) def __init__(self): self.add_service(StreamServer(self.address, self.handle)) def do_start(self): logger.info("NumberServer is starting.") def do_stop(self): logger.info("NumberServer is stopping.") def do_reload(self): logger.info("NumberServer is reloading.") def handle(self, socket, address): logger.debug("New connection {}".format(address)) while True: try: number = random.randint(0, 10) socket.send("{}\n".format(number)) gevent.sleep(60.0 / self.emit_rate) except IOError: logger.debug("Connection dropped {}".format(address)) break
class RedisStorage(Service): prefix = Setting('redis_prefix', default='requestbin') redis_init = Setting('redis_init', default={ 'host': 'localhost', 'port': 6379, 'db': 0 }) def __init__(self, bin_ttl): self.bin_ttl = bin_ttl self.redis = redis.StrictRedis(**self.redis_init) def _key(self, name): return '{}_{}'.format(self.prefix, name) def _request_count_key(self): return '{}-requests'.format(self.prefix) def create_bin(self, private=False): bin = Bin(private) key = self._key(bin.name) self.redis.set(key, bin.dump()) self.redis.expireat(key, int(bin.created + self.bin_ttl)) return bin def create_request(self, bin, request): bin.add(request) key = self._key(bin.name) self.redis.set(key, bin.dump()) self.redis.expireat(key, int(bin.created + self.bin_ttl)) self.redis.setnx(self._request_count_key(), 0) self.redis.incr(self._request_count_key()) def count_bins(self): keys = self.redis.keys("{}_*".format(self.prefix)) return len(keys) def count_requests(self): return int(self.redis.get(self._request_count_key()) or 0) def avg_req_size(self): info = self.redis.info() return info['used_memory'] / info['db0']['keys'] / 1024 def lookup_bin(self, name): key = self._key(name) serialized_bin = self.redis.get(key) try: bin = Bin.load(serialized_bin) return bin except TypeError: self.redis.delete(key) # clear bad data raise KeyError("Bin not found")
class Leadership(Service): port = Setting('leader_port', default=12345) heartbeat_interval = Setting('leader_heartbeat_interval_secs', default=3) def __init__(self, identity, cluster, zmq_=None): zmq_ = zmq_ or zmq.Context() self.identity = identity self.leader = None self.set = cluster self._candidates = sorted(list(cluster)) self._promoted = Event() self._broadcaster = zmq_.socket(zmq.PUB) self._listener = zmq_.socket(zmq.SUB) self._listener.setsockopt(zmq.SUBSCRIBE, '') @property def is_leader(self): return self.identity == self.leader def wait_for_promotion(self): self._promoted.wait() def do_start(self): self._broadcaster.bind("tcp://{}:{}".format(self.identity, self.port)) self._broadcast_when_promoted() self._listen_for_heartbeats() self._next_leader() def _next_leader(self): self.leader = self._candidates.pop(0) if self.is_leader: self._promoted.set() else: self._listener.connect("tcp://{}:{}".format( self.leader, self.port)) @autospawn def _broadcast_when_promoted(self): self.wait_for_promotion() while self.is_leader: self._broadcaster.send(self.identity) gevent.sleep(self.heartbeat_interval) @autospawn def _listen_for_heartbeats(self): while not self.is_leader: leader = None with Timeout(self.heartbeat_interval * 2, False) as timeout: leader = self._listener.recv() if leader is None: self._next_leader()
class BrokerFrontend(gevent.pywsgi.WSGIServer): """Server that will manage a tunnel or proxy traffic through a tunnel""" hostname = Setting('hostname', default="vcap.me") # *.vcap.me -> 127.0.0.1 def __init__(self, broker): gevent.pywsgi.WSGIServer.__init__(self, (broker.address, broker.port)) self.broker = broker def handle(self, socket, address): hostname = '' hostheader = re.compile('host: ([^\(\);:,<>]+)', re.I) # Peek up to 512 bytes into data for the Host header for n in [128, 256, 512]: bytes = socket.recv(n, MSG_PEEK) if not bytes: break for line in bytes.split('\r\n'): match = hostheader.match(line) if match: hostname = match.group(1) if hostname: break hostname = hostname.split(':')[0] if hostname.endswith('.%s' % self.hostname): handler = ProxyHandler(socket, hostname, self.broker) handler.handle() else: handler = TunnelHandler(socket, address, self.broker) handler.handle()
class ClusterCoordinator(Service): port = Setting('cluster_port', default=4440) def __init__(self, identity, leader=None, cluster=None): leader = leader or identity self.server = PeerServer(self, identity) self.client = PeerClient(self, leader, identity) self.set = cluster or ObservableSet() self.promoted = Event() self.add_service(self.server) if leader != identity: self.add_service(self.client) self.is_leader = False else: self.is_leader = True def wait_for_promotion(self): self.promoted.wait() @property def leader(self): return self.client.leader @property def identity(self): return self.client.identity
class MyService(_Service): foo = Setting("foo", default=("foo", 12), help="This is foo") bar = Setting("bar", help="This is bar", monitored=True) delay = Setting("delay", default=1, help="Delay between hello printing") def __init__(self): import logging self.log = logging.getLogger(__name__) def do_start(self): self.log.info("Hello here") self.spawn(self.loop) def do_reload(self): self.log.info("reloaded!") self.log.info("changed: {}".format(self.bar.changed)) def loop(self): while True: self.log.info("hello") self. async .sleep(self.delay)
class WebSocketStreamer(Service): port = Setting('websocket_port', default=7070) def __init__(self, hub): self.hub = hub self.add_service( WebSocketServer((self.hub.bind_interface, self.port), self.handle)) def handle(self, websocket, environ): channel = environ.get('PATH_INFO') subscription = self.hub.subscribe(channel) for msg in subscription: try: websocket.send(msg) gevent.sleep(0) except IOError: break
class MessageBackend(Service): port = Setting('backend_port', default=2222) def __init__(self, cluster=None, bind_interface=None, zmq_=None): self.cluster = cluster or ObservableSet() self.zmq = zmq_ or zmq.Context() self.transmitter = PeerTransmitter(self) self.receiver = PeerReceiver(self, bind_interface) self.add_service(self.transmitter) self.add_service(self.receiver) def publish(self, channel, message): self.transmitter.broadcast(channel, message) def subscribe(self, channel): return Subscription(self.receiver, channel)
class Bin(object): max_requests = Setting('max_requests', default=20) def __init__(self, private=False): self.created = time.time() self.private = private self.color = random_color() self.name = tinyid(8) self.favicon_uri = solid16x16gif_datauri(*self.color) self.requests = [] self.secret_key = os.urandom(24) if self.private else None def json(self): return json.dumps(self.to_dict()) def to_dict(self): return dict(private=self.private, color=self.color, name=self.name, request_count=self.request_count) def dump(self): o = copy.copy(self.__dict__) o['requests'] = [r.dump() for r in self.requests] return msgpack.dumps(o) @staticmethod def load(data): o = msgpack.loads(data) o['requests'] = [Request.load(r) for r in o['requests']] b = Bin() b.__dict__ = o return b @property def request_count(self): return len(self.requests) def add(self, request): self.requests.insert(0, Request(request)) if len(self.requests) > self.max_requests: for _ in xrange(self.max_requests, len(self.requests)): self.requests.pop(self.max_requests)
class MemoryStorage(Service): cleanup_interval = Setting('cleanup_interval', default=3600) def __init__(self, bin_ttl): self.bin_ttl = bin_ttl self.bins = {} self.request_count = 0 def do_start(self): self.spawn(self._cleanup_loop) def _cleanup_loop(self): while True: self. async .sleep(self.cleanup_interval) self._expire_bins() def _expire_bins(self): expiry = time.time() - self.bin_ttl for name, bin in self.bins.items(): if bin.created < expiry: self.bins.pop(name) def create_bin(self, private=False): bin = Bin(private) self.bins[bin.name] = bin return self.bins[bin.name] def create_request(self, bin, request): bin.add(request) self.request_count += 1 def count_bins(self): return len(self.bins) def count_requests(self): return self.request_count def avg_req_size(self): return None def lookup_bin(self, name): return self.bins[name]
class HttpTailViewer(Service): port = Setting('tail_port', default=8089) def __init__(self, hub): self.hub = hub self.add_service( gevent.pywsgi.WSGIServer(listener=(self.hub.bind_interface, self.port), application=self.handle, spawn=self.spawn, log=None)) # This isn't the best we can do, but it makes things better self.catch(socket.error, lambda e, g: None) def handle(self, env, start_response): request = webob.Request(env) subscription = self.hub.subscribe(request.path) logger.info("New subscriber (tail view)") boundary = str(random.random()) start_response('200 OK', [ ('Content-Type', 'multipart/x-mixed-replace; boundary={}'.format(boundary)), ('Connection', 'keep-alive'), ('Cache-Control', 'no-cache, must-revalidate'), ('Expires', 'Tue, 11 Sep 1985 19:00:00 GMT'), ]) yield '--{}\n'.format(boundary) for msg in subscription: if msg is not None: yield '\n'.join([ 'Content-Type: text/plain', 'Content-Length: {}'.format(len(msg)), '\n{}'.format(msg), '--{}\n'.format(boundary) ])
class Request(object): ignore_headers = Setting('ignore_headers', default=[]) def __init__(self, input=None): if input: self.id = tinyid(6) self.time = time.time() self.remote_addr = input.headers.get('X-Forwarded-For', input.remote_addr) self.method = input.method self.headers = dict(input.headers) for header in self.ignore_headers: self.headers.pop(header, None) self.query_string = input.query_string self.form_data = [] for k in input.values: self.form_data.append([k, input.values[k]]) self.body = input.data self.path = input.path self.content_length = input.content_length self.content_type = input.content_type @property def created(self): return datetime.datetime.fromtimestamp(self.time) def dump(self): return msgpack.dumps(self.__dict__) @staticmethod def load(data): r = Request() r.__dict__ = msgpack.loads(data) return r def __iter__(self): out = [] if self.form_data: if hasattr(self.form_data, 'items'): items = self.form_data.items() else: items = self.form_data for k, v in items: try: outval = json.dumps(json.loads(v), sort_keys=True, indent=2) except (ValueError, TypeError): outval = v out.append((k, outval)) else: try: out = (('body', json.dumps(json.loads(self.body), sort_keys=True, indent=2)), ) except (ValueError, TypeError): out = (('body', self.body), ) # Sort by field/file then by field name files = list() fields = list() for (k, v) in out: if type(v) is dict: files.append((k, v)) else: fields.append((k, v)) return iter(sorted(fields) + sorted(files))
class Request(object): ignore_headers = Setting('ignore_headers', default=[]) max_raw_size = Setting('max_raw_size', default=1024 * 10) def __init__(self, input=None): if input: self.id = tinyid(6) self.time = time.time() self.remote_addr = input.headers.get('X-Forwarded-For', input.remote_addr) self.method = input.method self.headers = dict(input.headers) for header in self.ignore_headers: self.headers.pop(header, None) self.query_string = input.query_string self.form_data = [] for k in input.values: self.form_data.append([k, input.values[k]]) self.body = input.data self.path = input.path self.content_length = input.content_length self.content_type = input.content_type # This is where the magic of capture.py comes in self.raw = input.environ['raw'].getvalue() for header in self.ignore_headers: self.raw = re.sub(r'{}: [^\n]+\n'.format(header), '', self.raw, flags=re.IGNORECASE) if len(self.raw) > self.max_raw_size: self.raw = self.raw[0:self.max_raw_size] def to_dict(self): return dict( id=self.id, time=self.time, remote_addr=self.remote_addr, method=self.method, headers=self.headers, query_string=self.query_string, form_data=self.form_data, body=self.body, path=self.path, content_length=self.content_length, content_type=self.content_type, ) @property def created(self): return datetime.datetime.fromtimestamp(self.time) def dump(self): return msgpack.dumps(self.__dict__) @staticmethod def load(data): r = Request() r.__dict__ = msgpack.loads(data) return r def __iter__(self): out = [] if self.form_data: if hasattr(self.form_data, 'items'): items = self.form_data.items() else: items = self.form_data for k, v in items: try: outval = json.dumps(json.loads(v), sort_keys=True, indent=2) except (ValueError, TypeError): outval = v out.append((k, outval)) else: try: out = (('body', json.dumps(json.loads(self.body), sort_keys=True, indent=2)), ) except (ValueError, TypeError): out = (('body', self.body), ) # Sort by field/file then by field name files = list() fields = list() for (k, v) in out: if type(v) is dict: files.append((k, v)) else: fields.append((k, v)) return iter(sorted(fields) + sorted(files))