def main(args): ''' Monitor Neubot state via command line ''' try: options, arguments = getopt.getopt(args[1:], 'D:v') except getopt.error: sys.exit('Usage: neubot api.client [-v] [-D property=value]') if arguments: sys.exit('Usage: neubot api.client [-v] [-D property=value]') address, port, verbosity = '127.0.0.1', '9774', 0 for name, value in options: if name == '-D': name, value = value.split('=', 1) if name == 'address': address = value elif name == 'port': port = value elif name == '-v': verbosity += 1 timestamp = 0 while True: try: connection = lib_http.HTTPConnection(address, port) connection.set_debuglevel(verbosity) connection.request('GET', '/api/state?t=%d' % timestamp) response = connection.getresponse() if response.status != 200: raise RuntimeError('Bad HTTP status: %d' % response.status) if response.getheader("content-type") != "application/json": raise RuntimeError("Unexpected contenty type") octets = response.read() dictionary = json.loads(octets) logging.info("APIStateTracker: received JSON: %s", json.dumps(dictionary, ensure_ascii=True)) if not "events" in dictionary: continue if not "current" in dictionary: raise RuntimeError("Incomplete dictionary") timestamp = max(0, int(dictionary["t"])) json.dumps(dictionary, sys.stdout) except KeyboardInterrupt: break except: error = asyncore.compact_traceback() logging.error('Exception: %s', str(error)) time.sleep(5)
def do_collect(self, stream, request): ''' Invoked on GET /speedtest/collect ''' stream.response_rewriter = self._rewrite_response request.uri = '/collect/speedtest' xmlreq = marshal.unmarshal_object(request.body.read(), 'application/xml', SpeedtestCollect) message = { 'uuid': xmlreq.client, 'timestamp': int(float(xmlreq.timestamp)), # old clients bug 'internal_address': xmlreq.internalAddress, 'real_address': xmlreq.realAddress, 'remote_address': xmlreq.remoteAddress, 'connect_time': xmlreq.connectTime, 'latency': xmlreq.latency, 'download_speed': xmlreq.downloadSpeed, 'upload_speed': xmlreq.uploadSpeed, 'privacy_informed': xmlreq.privacy_informed, 'privacy_can_collect': xmlreq.privacy_can_collect, 'privacy_can_share': xmlreq.privacy_can_share, 'platform': xmlreq.platform, 'neubot_version': xmlreq.neubot_version, } # XXX Here we don't rewrite content-length which becomes bogus request['content-type'] = 'application/json' request.body = StringIO.StringIO(json.dumps(message)) NEGOTIATE_SERVER.process_request(stream, request)
def check_response(self, response): if response.code != "200": raise ValueError("Bad HTTP response code") if response["content-type"] != "application/json": raise ValueError("Unexpected contenty type") octets = response.body.read() dictionary = json.loads(octets) LOG.debug("APIStateTracker: received JSON: " + json.dumps(dictionary, ensure_ascii=True)) if not "events" in dictionary: return if not "current" in dictionary: raise ValueError("Incomplete dictionary") t = dictionary["t"] if not type(t) == types.IntType and not type(t) == types.LongType: raise ValueError("Invalid type for current event time") if t < 0: raise ValueError("Invalid value for current event time") self.timestamp = t self.process_dictionary(dictionary)
def handle_connection_made(self, stream): ''' Invoked when the connection is established ''' # Note: this function MUST be callable multiple times logging.debug('skype_negotiate: negotiation in progress...') context = stream.opaque extra = context.extra # TODO(claudiu) Choose a random configuration from: # voice-upstream, voice-downstream, voice-bidirectional. # Thie assumes that neubot only runs on desktop clients, so # there is no need to do a bandwidth check in order to select # which test should be performed. request = {} # No options for now request['type'] = 'voice' request['direction'] = 'upstream' request['src-port'] = 5060 body = six.b(json.dumps(request)) host_header = utils_net.format_epnt((extra['address'], extra['port'])) self.append_request(stream, 'GET', '/negotiate/skype', 'HTTP/1.1') self.append_header(stream, 'Host', host_header) self.append_header(stream, 'User-Agent', utils_version.HTTP_HEADER) self.append_header(stream, 'Content-Type', 'application/json') self.append_header(stream, 'Content-Length', str(len(body))) self.append_header(stream, 'Cache-Control', 'no-cache') self.append_header(stream, 'Pragma', 'no-cache') if extra['authorization']: self.append_header(stream, 'Authorization', extra['authorization']) self.append_end_of_headers(stream) self.append_bytes(stream, body) http_utils.prettyprint_json(request, '>') self.send_message(stream) extra['requests'] += 1
def _api_config(self, stream, request, query): response = Message() indent, mimetype, sort_keys = None, "application/json", False dictionary = cgi.parse_qs(query) if "debug" in dictionary and utils.intify(dictionary["debug"][0]): indent, mimetype, sort_keys = 4, "text/plain", True if request.method == "POST": s = request.body.read() updates = qs_to_dictionary(s) privacy.check(updates) # Very low barrier to prevent damage from kiddies if "agent.interval" in updates: interval = int(updates["agent.interval"]) if interval < 1380 and interval != 0: raise ConfigError("Bad agent.interval") CONFIG.merge_api(updates, DATABASE.connection()) STATE.update("config", updates) # Empty JSON b/c '204 No Content' is treated as an error s = "{}" else: s = json.dumps(CONFIG.conf, sort_keys=sort_keys, indent=indent) stringio = StringIO.StringIO(s) response.compose(code="200", reason="Ok", body=stringio, mimetype=mimetype) stream.send_response(request, response)
def log_api(stream, request, query): ''' Implements /api/log ''' # Get logs and options logs = LOG.listify() options = cgi.parse_qs(query) # Reverse logs on request if utils.intify(options.get('reversed', ['0'])[0]): logs = reversed(logs) # Filter according to verbosity if utils.intify(options.get('verbosity', ['1'])[0]) < 2: logs = [ log for log in logs if log['severity'] != 'DEBUG' ] if utils.intify(options.get('verbosity', ['1'])[0]) < 1: logs = [ log for log in logs if log['severity'] != 'INFO' ] # Human-readable output? if utils.intify(options.get('debug', ['0'])[0]): logs = [ '%(timestamp)d [%(severity)s]\t%(message)s\r\n' % log for log in logs ] body = ''.join(logs).encode('utf-8') mimetype = 'text/plain; encoding=utf-8' else: body = json.dumps(logs) mimetype = 'application/json' # Compose and send response response = Message() response.compose(code='200', reason='Ok', body=body, mimetype=mimetype) stream.send_response(request, response)
def _start_collect(self, stream, result): ''' Start the COLLECT phase ''' STATE.update('collect') logging.debug('raw_negotiate: collect in progress...') context = stream.opaque extra = context.extra extra['local_result'] = result body = six.b(json.dumps(result)) host_header = utils_net.format_epnt((extra['address'], extra['port'])) self.append_request(stream, 'POST', '/collect/raw', 'HTTP/1.1') self.append_header(stream, 'Host', host_header) self.append_header(stream, 'User-Agent', utils_version.HTTP_HEADER) self.append_header(stream, 'Content-Type', 'application/json') self.append_header(stream, 'Content-Length', str(len(body))) self.append_header(stream, 'Cache-Control', 'no-cache') self.append_header(stream, 'Pragma', 'no-cache') self.append_header(stream, 'Connection', 'close') if extra['authorization']: self.append_header(stream, 'Authorization', extra['authorization']) self.append_end_of_headers(stream) self.append_bytes(stream, body) http_utils.prettyprint_json(result, '>') self.send_message(stream) context.body = six.StringIO() # Want to save body extra['requests'] += 1
def api_data(stream, request, query): ''' Get data stored on the local database ''' since, until = -1, -1 test = '' dictionary = cgi.parse_qs(query) if "test" in dictionary: test = str(dictionary["test"][0]) if "since" in dictionary: since = int(dictionary["since"][0]) if "until" in dictionary: until = int(dictionary["until"][0]) if test == 'bittorrent': table = table_bittorrent elif test == 'speedtest': table = table_speedtest elif test == 'raw': table = table_raw else: raise NotImplementedTest("Test not implemented") indent, mimetype, sort_keys = None, "application/json", False if "debug" in dictionary and utils.intify(dictionary["debug"][0]): indent, mimetype, sort_keys = 4, "text/plain", True response = Message() lst = table.listify(DATABASE.connection(), since, until) body = json.dumps(lst, indent=indent, sort_keys=sort_keys) response.compose(code="200", reason="Ok", body=body, mimetype=mimetype) stream.send_response(request, response)
def api_results(stream, request, query): ''' Provide results for queried tests ''' since, until = -1, -1 test = '' dictionary = cgi.parse_qs(query) if dictionary.has_key("test"): test = str(dictionary["test"][0]) if dictionary.has_key("since"): since = int(dictionary["since"][0]) if dictionary.has_key("until"): until = int(dictionary["until"][0]) if test == 'bittorrent': table = table_bittorrent elif test == 'speedtest': table = table_speedtest else: raise NotImplementedTest("Test '%s' is not implemented" % test) indent, mimetype, sort_keys = None, "application/json", False if "debug" in dictionary and utils.intify(dictionary["debug"][0]): indent, mimetype, sort_keys = 4, "text/plain", True response = Message() lst = table.listify(DATABASE.connection(), since, until) body = json.dumps(lst, indent=indent, sort_keys=sort_keys) response.compose(code="200", reason="Ok", body=body, mimetype=mimetype) stream.send_response(request, response)
def api_results(stream, request, query): ''' Populates www/results.html page ''' dictionary = cgi.parse_qs(query) test = CONFIG['www_default_test_to_show'] if 'test' in dictionary: test = str(dictionary['test'][0]) # Read the directory each time, so you don't need to restart the daemon # after you have changed the description of a test. available_tests = {} for filename in os.listdir(TESTDIR): if filename.endswith('.json'): index = filename.rfind('.json') if index == -1: raise RuntimeError('api_results: internal error') name = filename[:index] available_tests[name] = filename if not test in available_tests: raise NotImplementedTest('Test not implemented') # Allow power users to customize results.html heavily, by creating JSON # descriptions with local modifications. filepath = utils_path.append(TESTDIR, available_tests[test], False) if not filepath: raise RuntimeError("api_results: append() path failed") localfilepath = filepath + '.local' if os.path.isfile(localfilepath): filep = open(localfilepath, 'rb') else: filep = open(filepath, 'rb') response_body = json.loads(filep.read()) filep.close() # Add extra information needed to populate results.html selection that # allows to select which test results must be shown. response_body['available_tests'] = available_tests.keys() response_body['selected_test'] = test descrpath = filepath.replace('.json', '.html') if os.path.isfile(descrpath): filep = open(descrpath, 'rb') response_body['description'] = filep.read() filep.close() # Provide the web user interface some settings it needs, but only if they # were not already provided by the `.local` file. for variable in COPY_CONFIG_VARIABLES: if not variable in response_body: response_body[variable] = CONFIG[variable] # Note: DO NOT sort keys here: order MUST be preserved indent, mimetype = None, 'application/json' if 'debug' in dictionary and utils.intify(dictionary['debug'][0]): indent, mimetype = 4, 'text/plain' response = Message() body = json.dumps(response_body, indent=indent) response.compose(code='200', reason='Ok', body=body, mimetype=mimetype) stream.send_response(request, response)
def _do_negotiate(self, baton): ''' Respond to a /negotiate request ''' stream, request, position = baton module = request.uri.replace('/negotiate/', '') module = self.modules[module] request_body = json.load(request.body) parallelism = CONFIG['negotiate.parallelism'] unchoked = int(position < parallelism) response_body = { 'queue_pos': position, 'real_address': stream.peername[0], 'unchoked': unchoked, } if unchoked: extra = module.unchoke(stream, request_body) if not 'authorization' in extra: raise RuntimeError('Negotiate API violation') extra.update(response_body) response_body = extra else: response_body['authorization'] = '' response = Message() response.compose(code='200', reason='Ok', body=json.dumps(response_body), keepalive=True, mimetype='application/json') stream.send_response(request, response)
def _finalize_response(self, m, idx): # Tell upstream, if we have a new unchoke if idx < self._parallel: mod = self._mods[m["module"]] mod.unchoke(m) m["response_body"]["unchoked"] = idx < self._parallel m["response_body"]["queue_pos"] = idx m["response_body"] = json.dumps(m["response_body"])
def process_request(self, stream, request): ''' Process a /collect or /negotiate HTTP request ''' # # We always pass upstream the collect request. If it is # not authorized the module does not have the identifier in # its global table and will raise a KeyError. # Here we always keepalive=False so the HTTP layer closes # the connection and we are notified that the queue should # be changed. # if request.uri.startswith('/collect/'): module = request.uri.replace('/collect/', '') module = self.modules[module] request_body = json.load(request.body) response_body = module.collect_legacy(stream, request_body, request) response_body = json.dumps(response_body) response = Message() response.compose(code='200', reason='Ok', body=response_body, keepalive=False, mimetype='application/json') stream.send_response(request, response) # # The first time we see a stream, we decide whether to # accept or drop it, depending on the length of the # queue. The decision whether to accept or not depends # on the current queue length and follows the Random # Early Discard algorithm. When we accept it, we also # register a function to be called when the stream is # closed so that we can update the queue. And we # immediately send a response. # When it's not the first time we see a stream, we just # take note that we owe it a response. But we won't # respond until its queue position changes. # elif request.uri.startswith('/negotiate/'): if not stream in self.known: position = len(self.queue) min_thresh = CONFIG['negotiate.min_thresh'] max_thresh = CONFIG['negotiate.max_thresh'] if random.random() < float(position - min_thresh) / ( max_thresh - min_thresh): stream.close() return self.queue.append(stream) self.known.add(stream) stream.atclose(self._update_queue) self._do_negotiate((stream, request, position)) else: stream.opaque = request # For robustness else: raise RuntimeError('Unexpected URI')
def connection_ready(self, stream): LOG.complete() STATE.update("negotiate") LOG.start("BitTorrent: negotiating") request = Message() body = json.dumps({"target_bytes": self.conf["bittorrent.bytes.up"]}) request.compose(method="GET", pathquery="/negotiate/bittorrent", host=self.host_header, body=body, mimetype="application/json") request["authorization"] = self.conf.get("_authorization", "") stream.send_request(request)
def _api_configlabels(self, stream, request, query): indent, mimetype = None, "application/json" dictionary = cgi.parse_qs(query) if "debug" in dictionary and utils.intify(dictionary["debug"][0]): indent, mimetype = 4, "text/plain" response = Message() s = json.dumps(CONFIG.descriptions, sort_keys=True, indent=indent) stringio = StringIO.StringIO(s) response.compose(code="200", reason="Ok", body=stringio, mimetype=mimetype) stream.send_response(request, response)
def collect(self, m): # # WARNING! Here it would be possible to prevent # choked streams to invoke collect() but still it # would not be possible from here to say whether # the test is complete. So, to avoid creating # a false sense of security, we delegate the whole # decision of whether to accept the result or not # to the upstream module. Period. # mod = self._mods[m["module"]] mod.collect(m) m["response_body"] = json.dumps(m["response_body"])
def connection_ready(self, stream): uri = "http://%s/" % self.host_header logging.info("BitTorrent: connecting to %s ... done", uri) STATE.update("negotiate") logging.info("BitTorrent: negotiating in progress...") request = Message() body = json.dumps({"test_version": CONFIG['bittorrent_test_version'], "target_bytes": self.conf['bittorrent.bytes.up']}) request.compose(method="POST", pathquery="/negotiate/bittorrent", host=self.host_header, body=body, mimetype="application/json") request["authorization"] = self.conf.get("_authorization", "") stream.send_request(request)
def _api_state_complete(self, event, context): stream, request, query, t = context indent, mimetype = None, "application/json" dictionary = cgi.parse_qs(query) if "debug" in dictionary and utils.intify(dictionary["debug"][0]): indent, mimetype = 4, "text/plain" dictionary = STATE.dictionarize() octets = json.dumps(dictionary, indent=indent) stringio = StringIO.StringIO(octets) response = Message() response.compose(code="200", reason="Ok", body=stringio, mimetype=mimetype) stream.send_response(request, response)
def _api_state_complete(event, context): ''' Callback invoked when the /api/state has changed ''' stream, request, query, otime = context indent, mimetype = None, "application/json" dictionary = cgi.parse_qs(query) if "debug" in dictionary and utils.intify(dictionary["debug"][0]): indent, mimetype = 4, "text/plain" dictionary = STATE.dictionarize() octets = json.dumps(dictionary, indent=indent) response = Message() response.compose(code="200", reason="Ok", body=octets, mimetype=mimetype) stream.send_response(request, response)
def __json_to_mapped_row(result): ''' Fill mapped row with result dictionary ''' return { 'timestamp': result['server']['goodput']['ticks'], 'uuid': result['client']['uuid'], 'internal_address': result['client']['myname'], 'real_address': result['server']['peername'], 'remote_address': result['server']['myname'], 'neubot_version': result['client']['version'], 'platform': result['client']['platform'], 'connect_time': result['client']['connect_time'], 'latency': result['client']['alrtt_avg'], 'download_speed': (result['client']['goodput']['bytesdiff'] / result['client']['goodput']['timediff']), 'json_data': json.dumps(result), }
def test_body_a_dictionary(self): """Make sure we correctly read incoming dictionaries""" server = _ServerNegotiate(None) server.negotiator = _Negotiator() d = {"abc": 12, "k": "s", "uuu": 1.74} server.send_response = lambda m: self.assertEquals(d, m["request_body"]) message = Message() message.compose(pathquery="/collect/abcdefg", body=StringIO.StringIO(json.dumps(d)), mimetype="application/json") server.process_request(None, message)
def connection_ready(self, stream): ''' Invoked when the connection is ready ''' message = { 'accept': ['speedtest', 'bittorrent', 'skype'], 'version': utils_version.CANONICAL_VERSION, 'privacy_informed': CONFIG['privacy.informed'], 'privacy_can_collect': CONFIG['privacy.can_collect'], # Using the old name for backward compatibility 'privacy_can_share': CONFIG['privacy.can_publish'], } request = Message() request.compose(method='GET', pathquery='/rendezvous', mimetype='application/json', keepalive=False, host=self.host_header, body=json.dumps(message)) stream.send_request(request)
def peer_test_complete(self, stream, download_speed, rtt, target_bytes): self.success = True stream = self.http_stream # Update the downstream channel estimate estimate.DOWNLOAD = target_bytes self.my_side = { # The server will override our timestamp "timestamp": utils.timestamp(), "uuid": self.conf.get("uuid"), "internal_address": stream.myname[0], "real_address": self.conf.get("_real_address", ""), "remote_address": stream.peername[0], "privacy_informed": self.conf.get("privacy.informed", 0), "privacy_can_collect": self.conf.get("privacy.can_collect", 0), "privacy_can_publish": self.conf.get("privacy.can_publish", 0), # Upload speed measured at the server "connect_time": rtt, "download_speed": download_speed, # OS and version info "neubot_version": utils_version.to_numeric("0.4.12-rc2"), "platform": sys.platform, } logging.info("BitTorrent: collecting in progress...") STATE.update("collect") s = json.dumps(self.my_side) stringio = StringIO.StringIO(s) request = Message() request.compose( method="POST", pathquery="/collect/bittorrent", body=stringio, mimetype="application/json", host=self.host_header) request["authorization"] = self.conf.get("_authorization", "") stream.send_request(request)
def test_complete(self): """ Invoked when the test is complete """ stream = self.stream logging.info("dash: collect... in progress") STATE.update("collect") self.state = STATE_COLLECT body = json.dumps(self.measurements) request = Message() request.compose(method="POST", pathquery="/collect/dash", body=body, mimetype="application/json", host=self.host_header) request["authorization"] = self.authorization stream.set_timeout(15) stream.send_request(request)
def _api_speedtest(self, stream, request, query): since, until = -1, -1 dictionary = cgi.parse_qs(query) if dictionary.has_key("since"): since = int(dictionary["since"][0]) if dictionary.has_key("until"): until = int(dictionary["until"][0]) indent, mimetype, sort_keys = None, "application/json", False if "debug" in dictionary and utils.intify(dictionary["debug"][0]): indent, mimetype, sort_keys = 4, "text/plain", True response = Message() lst = table_speedtest.listify(DATABASE.connection(), since, until) s = json.dumps(lst, indent=indent, sort_keys=sort_keys) stringio = StringIO.StringIO(s) response.compose(code="200", reason="Ok", body=stringio, mimetype=mimetype) stream.send_response(request, response)
def log_api(stream, request, query): ''' Implements /api/log ''' # # CAVEAT Currently Neubot do not update logs "in real # time" using AJAX. If it did we would run in trouble # because each request for /api/log would generate a # new access log record. In turn, a new access log # record will cause a new "logwritten" event, leading # to a log-caused Comet storm. # # Get logs and options logs = LOG.listify() options = cgi.parse_qs(query) # Reverse logs on request if utils.intify(options.get('reversed', ['0'])[0]): logs = reversed(logs) # Filter according to verbosity if utils.intify(options.get('verbosity', ['1'])[0]) < 2: logs = [ log for log in logs if log['severity'] != 'DEBUG' ] if utils.intify(options.get('verbosity', ['1'])[0]) < 1: logs = [ log for log in logs if log['severity'] != 'INFO' ] # Human-readable output? if utils.intify(options.get('debug', ['0'])[0]): logs = [ '%(timestamp)d [%(severity)s]\t%(message)s\r\n' % log for log in logs ] body = ''.join(logs).encode('utf-8') mimetype = 'text/plain; encoding=utf-8' else: body = json.dumps(logs) mimetype = 'application/json' # Compose and send response response = Message() response.compose(code='200', reason='Ok', body=body, mimetype=mimetype) stream.send_response(request, response)
def connection_ready(self, stream): if self.iterations > MAX_ITERATIONS: raise RuntimeError("dash: too many negotiations") self.iterations += 1 STATE.update("negotiate") logging.info("dash: negotiate... in progress") body = json.dumps({ "dash_rates": DASH_RATES, }) request = Message() request.compose(method="POST", pathquery="/negotiate/dash", host=self.host_header, body=body, mimetype="application/json") request["authorization"] = self.authorization stream.set_timeout(300) stream.send_request(request)
def _api_log(self, stream, request, query): response = Message() dictionary = cgi.parse_qs(query) if "debug" in dictionary and utils.intify(dictionary["debug"][0]): stringio = StringIO.StringIO() for row in LOG.listify(): ln = "%d [%s]\t%s" % (row["timestamp"], row["severity"], row["message"]) stringio.write(ln.encode("utf-8")) stringio.write("\r\n") stringio.seek(0) mimetype = "text/plain" else: s = json.dumps(LOG.listify()) stringio = StringIO.StringIO(s) mimetype = "application/json" response.compose(code="200", reason="Ok", body=stringio, mimetype=mimetype) stream.send_response(request, response)
def handle_connection_made(self, stream): ''' Invoked when the connection is established ''' # Note: this function MUST be callable multiple times logging.debug('raw_negotiate: negotiation in progress...') context = stream.opaque extra = context.extra request = {} # No options for now body = six.b(json.dumps(request)) host_header = utils_net.format_epnt((extra['address'], extra['port'])) self.append_request(stream, 'POST', '/negotiate/raw', 'HTTP/1.1') self.append_header(stream, 'Host', host_header) self.append_header(stream, 'User-Agent', utils_version.HTTP_HEADER) self.append_header(stream, 'Content-Type', 'application/json') self.append_header(stream, 'Content-Length', str(len(body))) self.append_header(stream, 'Cache-Control', 'no-cache') self.append_header(stream, 'Pragma', 'no-cache') if extra['authorization']: self.append_header(stream, 'Authorization', extra['authorization']) self.append_end_of_headers(stream) self.append_bytes(stream, body) http_utils.prettyprint_json(request, '>') self.send_message(stream) context.body = six.StringIO() # Want to save body extra['requests'] += 1
def _api(self, stream, request, query): response = Message() response.compose(code="200", reason="Ok", body=StringIO.StringIO( json.dumps(sorted(self._dispatch.keys()), indent=4))) stream.send_response(request, response)