class HTTPReporter(Reporter): """Exposes metrics via HTTP. For web applications, you should almost certainly just use your existing framework's capabilities. This is for applications that don't have HTTP easily available. """ def __init__(self, port, registry=None): """ :param port: Port to listen on :param registry: The registry to report from, defaults to the global one """ super(HTTPReporter, self).__init__(registry) self.port = port self.thread = None self.httpd = None def start(self): class _RequestHandler(BaseHTTPRequestHandler): def do_GET(inner_self): inner_self.send_response(200) response_string = json.dumps(self.registry.get_stats()) inner_self.send_header('Content-Type', 'application/json') inner_self.send_header('Content-Length', len(response_string)) inner_self.end_headers() inner_self.wfile.write(response_string.encode('utf-8')) server_address = '', self.port self.httpd = HTTPServer(server_address, _RequestHandler) self.thread = Thread(target=self.httpd.serve_forever) self.thread.start() def stop(self): self.httpd.shutdown() self.thread.join()
def mirror_server(mirrors_dict): mirror_file_path = '/mirrors.json' mirror_corrupt_file_path = '/corrupt_mirrors.json' mirror_json_varname = 'ci_repos' mirror_data = {mirror_json_varname: mirrors_dict} mirror_json = json.dumps(mirror_data).encode('utf8') class MirrorRequestHandler(BaseHTTPRequestHandler): def do_GET(self): if self.path == mirror_file_path: self.send_response(200) self.send_header("Content-type", 'application/json') self.end_headers() self.wfile.write(mirror_json) elif self.path == mirror_corrupt_file_path: self.send_response(200) self.send_header("Content-type", 'application/json') self.end_headers() self.wfile.write('{"this": "is", "bad": "json"') else: self.send_error(404) for attempt in range(0, 20): server_address = ('127.0.0.1', randrange(8765, 8876)) try: server = HTTPServer(server_address, MirrorRequestHandler) except socket.error as e: if e.errno == 98: continue raise break else: raise RuntimeError("Failed to allocate port for mirror_server fixture") server_url = 'http://{0}:{1}'.format(*server_address) sthread = Thread(target=server.serve_forever) sthread.start() try: # Wait for http server to start sleep(0.1) # ensure we won't implictly try to use proxies to access local server old_env = dict(((k, environ.pop(k)) for k in ('http_proxy', 'HTTP_PROXY') if k in environ)) try: yield dict( mirror_url=urljoin(server_url, mirror_file_path), json_varname=mirror_json_varname, bad_path_url=urljoin(server_url, '/bad_file'), bad_port_url=urljoin( 'http://{0}:8764'.format(server_address[0]), mirror_file_path), corrupt_url=urljoin(server_url, mirror_corrupt_file_path), ) finally: environ.update(old_env) finally: server.shutdown() sthread.join()
def shutdown(self): """ Stop the server and free up the port """ # First call superclass shutdown() HTTPServer.shutdown(self) # We also need to manually close the socket self.socket.close()
class SimpleHTTPService(ServerCustomService): """Simple HTTP Honeycomb Service.""" httpd = None def __init__(self, *args, **kwargs): super(SimpleHTTPService, self).__init__(*args, **kwargs) def alert(self, request): """Raise an alert.""" params = { EVENT_TYPE_FIELD_NAME: SIMPLE_HTTP_ALERT_TYPE_NAME, ORIGINATING_IP_FIELD_NAME: request.client_address[0], ORIGINATING_PORT_FIELD_NAME: request.client_address[1], REQUEST_FIELD_NAME: " ".join([request.command, request.path]), } self.add_alert_to_queue(params) def on_server_start(self): """Initialize Service.""" os.chdir(os.path.join(os.path.dirname(__file__), "www")) requestHandler = HoneyHTTPRequestHandler requestHandler.alert = self.alert requestHandler.logger = self.logger #requestHandler.server_version = self.service_args.get("version", DEFAULT_SERVER_VERSION) port = self.service_args.get("port", DEFAULT_PORT) threading = self.service_args.get("threading", False) if threading: self.httpd = ThreadingHTTPServer(("", port), requestHandler) else: self.httpd = HTTPServer(("", port), requestHandler) self.signal_ready() self.logger.info("Starting {}Simple HTTP service on port: {}".format( "Threading " if threading else "", port)) self.httpd.serve_forever() def on_server_shutdown(self): """Shut down gracefully.""" if self.httpd: self.httpd.shutdown() self.logger.info("Simple HTTP service stopped") self.httpd = None def test(self): """Test service alerts and return a list of triggered event types.""" event_types = list() self.logger.debug("executing service test") requests.get("http://localhost:{}/".format( self.service_args.get("port", DEFAULT_PORT))) event_types.append(SIMPLE_HTTP_ALERT_TYPE_NAME) return event_types def __str__(self): return "Simple HTTP"
class TestDiscovery(unittest.TestCase): def setUp(self): if LIVE_CAMERA: multicast_ip = pysony.SSDP_ADDR else: multicast_ip = '224.0.0.111' self.ssdp_server = SSDPServer(multicast_ip) self.ssdp_server.register( manifestation=None, usn=uuid1().urn, st=pysony.SSDP_ST, location='http://127.0.0.1:64321/dd.xml', ) FileRequestHandler.FILE = DD_FILE self.http = HTTPServer(('localhost', 64321), FileRequestHandler) threading.Thread(target=self.ssdp_server.run).start() threading.Thread(target=self.http.serve_forever).start() self.cp = pysony.ControlPoint(multicast_ip) def tearDown(self): self.cp.close() if not LIVE_CAMERA: self.ssdp_server.kill.set() self.http.shutdown() self.http.socket.close() def test_discover(self): result = self.cp.discover() assert result == ['http://192.168.122.1:8080'] def test_ssdp_cycle(self): self.cp._send_ssdp(duration=1) responses = list(self.cp._listen_for_discover(duration=1)) self.assertEqual(len(responses), 1) output = self.cp._parse_ssdp_response(responses[0]) loc = urlparse(output['location']) if LIVE_CAMERA: assert loc.hostname == '192.168.122.1' else: assert loc.hostname == '127.0.0.1' assert loc.path.endswith('dd.xml') assert loc.port is not None assert output['st'] == pysony.SSDP_ST def test_parse_device_definition(self): with open(DD_FILE, 'rb') as fh: result = self.cp._parse_device_definition(fh) expected = { 'camera': 'http://192.168.122.1:8080/sony', 'accessControl': 'http://192.168.122.1:8080/sony', 'guide': 'http://192.168.122.1:8080/sony', } assert result == expected
def oauth_server(): # Start the OAuth server on a random port in the background server = HTTPServer(('', 0), OAuthHandler) server.url = 'http://{0}:{1}/'.format(*server.server_address) thread = threading.Thread(target=server.serve_forever) thread.start() try: yield server finally: server.shutdown() thread.join() server.server_close()
def run(self): """ Runs the server using Python's simple HTTPServer. TODO: make this multithreaded. """ httpd = HTTPServer((self.host, self.port), self._Handler) sa = httpd.socket.getsockname() serve_message = "Serving HTTP on {host} port {port} (http://{host}:{port}/) ..." print(serve_message.format(host=sa[0], port=sa[1])) try: httpd.serve_forever() except KeyboardInterrupt: print("\nKeyboard interrupt received, exiting.") httpd.shutdown()
def http_server(): """ Yields an HTTP server object, which stores JSON data from received POST requests in a .post_data queue. """ PostHandler.post_data = [] server = HTTPServer(("", 0), PostHandler) start_thread = threading.Thread(target=server.serve_forever) start_thread.daemon = True start_thread.start() yield server server.shutdown() start_thread.join()
def test(): host = 'localhost' # When I use port 0 here, it works for the first fetch and the # next one gets connection refused. Bummer. So instead, pick a # port that's *probably* not in use. import os port = (os.getpid() % 31000) + 1024 server = HTTPServer((host, port), FetcherTestHandler) import threading server_thread = threading.Thread(target=server.serve_forever) server_thread.setDaemon(True) server_thread.start() run_fetcher_tests(server) server.shutdown()
def mirror_server(mirrors_dict): mirror_file_path = '/mirrors.json' mirror_json_varname = 'ci_repos' mirror_data = {mirror_json_varname: mirrors_dict} mirror_json = json.dumps(mirror_data).encode('utf8') class MirrorRequestHandler(BaseHTTPRequestHandler): def do_GET(self): if self.path == mirror_file_path: self.send_response(200) self.send_header("Content-type", 'application/json') self.end_headers() self.wfile.write(mirror_json) else: self.send_error(404) server_address = ('127.0.0.1', 8765) server_url = 'http://{0}:{1}'.format(*server_address) server = HTTPServer(server_address, MirrorRequestHandler) sthread = Thread(target=server.serve_forever) sthread.start() try: # Wait for http server to start sleep(0.1) # ensure we won't implictly try to use proxies to access local server old_env = dict(((k, environ.pop(k)) for k in ('http_proxy', 'HTTP_PROXY') if k in environ)) try: yield dict( mirror_url=urljoin(server_url, mirror_file_path), json_varname=mirror_json_varname, bad_path_url=urljoin(server_url, '/bad_file'), bad_port_url=urljoin( 'http://{0}:8766'.format(server_address[0]), mirror_file_path), ) finally: environ.update(old_env) finally: server.shutdown() sthread.join()
def _zero_instance_app_through_http(): class JSONRequestHandler(BaseHTTPRequestHandler): def do_GET(self): # noqa: N802 self.send_response(200) self.send_header("Content-type", "application/json") self.end_headers() self.wfile.write( open('tests/data/marathon/apps/zero_instance_sleep.json', 'rb').read()) host = 'localhost' port = 12345 server = HTTPServer((host, port), JSONRequestHandler) thread = threading.Thread(target=server.serve_forever) thread.setDaemon(True) thread.start() with app('http://{}:{}'.format(host, port), 'zero-instance-app'): try: yield finally: server.shutdown()
def _zero_instance_app_through_http(): class JSONRequestHandler (BaseHTTPRequestHandler): def do_GET(self): self.send_response(200) self.send_header("Content-type", "application/json") self.end_headers() self.wfile.write(open( 'tests/data/marathon/apps/zero_instance_sleep.json', 'rb').read()) host = 'localhost' port = 12345 server = HTTPServer((host, port), JSONRequestHandler) thread = threading.Thread(target=server.serve_forever) thread.setDaemon(True) thread.start() with app('http://{}:{}'.format(host, port), 'zero-instance-app'): try: yield finally: server.shutdown()
def test_timeout(self): ''' Test timeout handling for stale requests. ''' original_timeout = suseapi.browser.DEFAULT_TIMEOUT suseapi.browser.DEFAULT_TIMEOUT = 0.1 server = HTTPServer(('localhost', 0), TimeoutHTTPHandler) port = server.server_address[1] server_thread = threading.Thread(target=server.serve_forever) server_thread.daemon = False server_thread.start() try: scraper = WebScraper(None, None, 'http://localhost:%d' % port, transport='urllib3') scraper.request('foo') scraper.browser.doc.choose_form(number=0) self.assertRaises(WebScraperError, scraper.submit) self.assertRaises(WebScraperError, scraper.request, 'bar?') finally: suseapi.browser.DEFAULT_TIMEOUT = original_timeout server.shutdown() server_thread.join()
def mirror_server(mirrors_dict): mirror_file_path = '/mirrors.json' mirror_corrupt_file_path = '/corrupt_mirrors.json' mirror_json_varname = 'ci_repos' mirror_data = {mirror_json_varname: mirrors_dict} mirror_json = json.dumps(mirror_data).encode('utf8') class MirrorRequestHandler(BaseHTTPRequestHandler): def do_GET(self): if self.path == mirror_file_path: self.send_response(200) self.send_header("Content-type", 'application/json') self.end_headers() self.wfile.write(mirror_json) elif self.path == mirror_corrupt_file_path: self.send_response(200) self.send_header("Content-type", 'application/json') self.end_headers() self.wfile.write('{"this": "is", "bad": "json"') else: self.send_error(404) for attempt in range(0,20): server_address = ('127.0.0.1', randrange(8765, 8876)) try: server = HTTPServer(server_address, MirrorRequestHandler) except socket.error as e: if e.errno == 98: continue raise break else: raise RuntimeError("Failed to allocate port for mirror_server fixture") server_url = 'http://{0}:{1}'.format(*server_address) sthread = Thread(target=server.serve_forever) sthread.start() try: # Wait for http server to start sleep(0.1) # ensure we won't implictly try to use proxies to access local server old_env = dict(( (k, environ.pop(k)) for k in ('http_proxy', 'HTTP_PROXY') if k in environ )) try: yield dict( mirror_url=urljoin(server_url, mirror_file_path), json_varname=mirror_json_varname, bad_path_url=urljoin(server_url, '/bad_file'), bad_port_url=urljoin( 'http://{0}:8764'.format(server_address[0]), mirror_file_path ), corrupt_url=urljoin(server_url, mirror_corrupt_file_path), ) finally: environ.update(old_env) finally: server.shutdown() sthread.join()
class OAuthHelper(object): params = OAuthHandler.params def __init__(self, reddit, term, config): self.term = term self.reddit = reddit self.config = config # Wait to initialize the server, we don't want to reserve the port # unless we know that the server needs to be used. self.server = None self.reddit.set_oauth_app_info(self.config['oauth_client_id'], self.config['oauth_client_secret'], self.config['oauth_redirect_uri']) # Reddit's mobile website works better on terminal browsers if not self.term.display: if '.compact' not in self.reddit.config.API_PATHS['authorize']: self.reddit.config.API_PATHS['authorize'] += '.compact' def authorize(self): self.params.update(state=None, code=None, error=None) # If we already have a token, request new access credentials if self.config.refresh_token: with self.term.loader('Logging in'): try: self.reddit.refresh_access_information( self.config.refresh_token) except (HTTPException, OAuthException) as e: # Reddit didn't accept the refresh-token # This appears to throw a generic 400 error instead of the # more specific invalid_token message that it used to send if isinstance(e, HTTPException): if e._raw.status_code != 400: # No special handling if the error is something # temporary like a 5XX. raise e # Otherwise we know the token is bad, so we can remove it. _logger.exception(e) self.clear_oauth_data() raise InvalidRefreshToken( ' Invalid user credentials!\n' 'The cached refresh token has been removed') return state = uuid.uuid4().hex authorize_url = self.reddit.get_authorize_url( state, scope=self.config['oauth_scope'], refreshable=True) if self.server is None: address = ('', self.config['oauth_redirect_port']) self.server = HTTPServer(address, OAuthHandler) if self.term.display: # Open a background browser (e.g. firefox) which is non-blocking. # The server will block until it responds to its first request, # at which point we can check the callback params. OAuthHandler.shutdown_on_request = True with self.term.loader('Opening browser for authorization'): self.term.open_browser(authorize_url) self.server.serve_forever() if self.term.loader.exception: # Don't need to call server.shutdown() because serve_forever() # is wrapped in a try-finally that doees it for us. return else: # Open the terminal webbrowser in a background thread and wait # while for the user to close the process. Once the process is # closed, the iloop is stopped and we can check if the user has # hit the callback URL. OAuthHandler.shutdown_on_request = False with self.term.loader('Redirecting to reddit', delay=0): # This load message exists to provide user feedback time.sleep(1) thread = threading.Thread(target=self.server.serve_forever) thread.daemon = True thread.start() try: self.term.open_browser(authorize_url) except Exception as e: # If an exception is raised it will be seen by the thread # so we don't need to explicitly shutdown() the server _logger.exception(e) self.term.show_notification('Browser Error', style='Error') else: self.server.shutdown() finally: thread.join() if self.params['error'] == 'access_denied': self.term.show_notification('Denied access', style='Error') return elif self.params['error']: self.term.show_notification('Authentication error', style='Error') return elif self.params['state'] is None: # Something went wrong but it's not clear what happened return elif self.params['state'] != state: self.term.show_notification('UUID mismatch', style='Error') return with self.term.loader('Logging in'): info = self.reddit.get_access_information(self.params['code']) if self.term.loader.exception: return message = 'Welcome {}!'.format(self.reddit.user.name) self.term.show_notification(message) self.config.refresh_token = info['refresh_token'] if self.config['persistent']: self.config.save_refresh_token() def clear_oauth_data(self): self.reddit.clear_authentication() self.config.delete_refresh_token()
class IPCamTrendnetTvIp100Service(ServerCustomService): """IP Cam TRENDnet TV-IP100 Service.""" httpd = None def alert(self, request, event): """Raise an alert.""" params = { EVENT_TYPE_FIELD_NAME: event, ORIGINATING_IP_FIELD_NAME: request.client_address[0], ORIGINATING_PORT_FIELD_NAME: request.client_address[1], REQUEST_FIELD_NAME: " ".join([request.command, request.path]), } self.add_alert_to_queue(params) def on_server_start(self): """Initialize Service.""" os.chdir(os.path.join(os.path.dirname(__file__), "www")) requestHandler = TrendnetTVIP100CamRequestHandler requestHandler.alert = self.alert requestHandler.logger = self.logger requestHandler.server_version = self.service_args.get("version", DEFAULT_SERVER_VERSION) requestHandler.image_src_url = self.service_args.get("image_src_url", None) requestHandler.image_src_path = self.service_args.get("image_src_path", None) if requestHandler.image_src_path and requestHandler.image_src_url: raise ValueError("cannot process both image_src_path and image_src_url") if not requestHandler.image_src_path and not requestHandler.image_src_url: raise ValueError("image_src_path or image_src_url must be provided") port = self.service_args.get("port") threading = self.service_args.get("threading", False) if threading: self.httpd = ThreadingHTTPServer(("", port), requestHandler) else: self.httpd = HTTPServer(("", port), requestHandler) self.signal_ready() self.logger.info( "Starting {}IP Cam TRENDnet TV-IP100 service on port: {}".format("Threading " if threading else "", port)) self.httpd.serve_forever() def on_server_shutdown(self): """Shut down gracefully.""" if self.httpd: self.httpd.shutdown() self.logger.info("IP Cam TRENDnet TV-IP100 service stopped") self.httpd = None def test(self): """Test service alerts and return a list of triggered event types.""" event_types = list() self.logger.debug("executing service test") # basic screen requests.get("http://localhost:{}".format(self.service_args.get("port"))) # camera shot requests.get("http://localhost:{}{}".format(self.service_args.get("port"), CAMERA_IMAGE_PATH)) # Invalid url requests.get("http://localhost:{}/asdasdad.html".format(self.service_args.get("port"))) # / redirect requests.get("http://localhost:{}/".format(self.service_args.get("port"))) # One alert for authorization attempt requests.get("http://localhost:{}/content.html".format(self.service_args.get("port")), headers={"Authorization": "username=\"test\""}) event_types.append(TRENDNET_ADMIN_ACCESS_EVENT) # And for POST requests.post("http://localhost:{}/content.html".format(self.service_args.get("port")), data={}) event_types.append(TRENDNET_ADMIN_POST_ATTEMPT) return event_types def __str__(self): return "IP Cam TRENDnet TV-IP100"
__test__ = False if __name__ == '__main__': import eventlet eventlet.monkey_patch() from six.moves.BaseHTTPServer import ( HTTPServer, BaseHTTPRequestHandler, ) import threading server = HTTPServer(('localhost', 0), BaseHTTPRequestHandler) thread = threading.Thread(target=server.serve_forever) # Before fixing it the code would never go pass this line because: # * socketserver.BaseServer that's used behind the scenes here uses # selectors.PollSelector if it's available and we don't have green poll # implementation so this just couldn't work # * making socketserver use selectors.SelectSelector wasn't enough as # until now we just failed to monkey patch selectors module # # Due to the issues above this thread.start() call effectively behaved # like calling server.serve_forever() directly in the current thread # # Original report: https://github.com/eventlet/eventlet/issues/249 thread.start() server.shutdown() print('pass')