def _set_proxy_headers(self): for k, v in self.proxy_headers.items(): if k not in RESPONSE_EXCLUDE_HEADERS: logger.debug( "[%s] write header %s: %s", self.id, k, v, ) self.set_header(k, v)
def send_file(file_path, dashboard_name, handler): ''' Posts a file to the Jupyter Dashboards Server to be served as a dashboard :param file_path: The path of the file to send :param dashboard_name: The dashboard name under which it should be made available ''' # Make information about the request Host header available for use in # constructing the urls segs = handler.request.host.split(':') hostname = segs[0] if len(segs) > 1: port = segs[1] else: port = '' protocol = handler.request.protocol # Treat empty as undefined dashboard_server = os.getenv('DASHBOARD_SERVER_URL') if dashboard_server: dashboard_server = dashboard_server.format(protocol=protocol, hostname=hostname, port=port) upload_url = url_path_join(dashboard_server, UPLOAD_ENDPOINT, escape.url_escape(dashboard_name, False)) with open(file_path, 'rb') as file_content: headers = {} token = os.getenv('DASHBOARD_SERVER_AUTH_TOKEN') if token: headers['Authorization'] = 'token {}'.format(token) result = requests.post(upload_url, files={'file': file_content}, headers=headers, timeout=60, verify=not skip_ssl_verification()) if result.status_code >= 400: raise web.HTTPError(result.status_code) # Redirect to link specified in response body res_body = result.json() if 'link' in res_body: redirect_link = res_body['link'] else: # Compute redirect link using environment variables # First try redirect URL as it might be different from internal upload URL redirect_server = os.getenv('DASHBOARD_REDIRECT_URL') if redirect_server: redirect_root = redirect_server.format(hostname=hostname, port=port, protocol=protocol) else: redirect_root = dashboard_server redirect_link = url_path_join( redirect_root, VIEW_ENDPOINT, escape.url_escape(dashboard_name, False)) handler.redirect(redirect_link) else: access_log.debug('Can not deploy, DASHBOARD_SERVER_URL not set') raise web.HTTPError(500, log_message='No dashboard server configured')
def __call__(self, source, destination, buffer_size=None, source_finish=False): # Transfers Data from _source to _destination (chunk by chunk) # App -> Desktop, Desktop (reply) -> App self._source = source self._stream = source.request.connection.stream self._content_length = int( self._source.request.headers['Content-Length']) self._remaining = self._content_length self._source_finish = source_finish self._buffer_size = buffer_size if not self._buffer_size: self._get_buffer_size() self._destination = destination access_log.debug('[%s]: interaction %s->%s (%i/%i)..' % (self.RequestID, self._source.request.uri, self._destination.request.uri, self._content_length, self._buffer_size)) self._copy_headers() if not self._destination._check_closed(): self._destination.flush(callback=self._read_chunk)
def _read_chunk(self): if self._remaining < self._buffer_size: self._buffer_size = self._remaining access_log.debug('[%s]: _read_chunk %s (%i/%i)..' % (self.RequestID, self._source.request.uri, self._buffer_size, self._remaining)) self._stream.read_bytes(self._buffer_size, self._data_callback)
def get(self): url = self.get_query_argument("url") logger.debug("[%s]agent get url: %s", self.id, url) self.request_data = request_data = {"url": url} if not self.validate_request(request_data): raise gen.Return() yield self.handle_request(request_data)
def _streaming_callback(self, chunk): if self._finished: return if not self._headers_written: self._set_proxy_headers() self.flush() self.in_request_headers = False self.write(chunk) logger.debug("[%s] chunk length %s", self.id, len(chunk))
def _add_instance(self, server, port): _added=False if server==self.server and port and port not in self._instances and port!=self.port: self._instances.append(port) _added=True if server and server!=self.server and server not in self._servers: self._servers.append(server) _added=True if _added: access_log.debug('add instance: server=%s, port=%s' % (server,port))
def post(self): """ 对于程序来说, 必须提供时间以及rule_id """ start_time = self.get_argument('start_time', None) end_time = self.get_argument('end_time', None) rule_id = self.get_argument('rule_id', 0) try: # 时间检验 start_time = to_time_stamp(start_time) end_time = to_time_stamp(end_time) except (ValueError, TypeError) as e: access_log.error('Get err time {}, {}'.format( start_time, end_time)) self.write_json(None, status_code=400, msg='参数错误') return except Exception as e: access_log.error('Get error {}'.format(e)) try: # 试图进行转换 rule_id = int(rule_id) except (ValueError, TypeError) as e: access_log.error('Get Err {}' \ .format(rule_id)) self.write_json(None, status_code=400, msg='参数错误') return except Exception as e: access_log.error('Get error {}'.format(e)) last_sql = QUERY_SQL day1 = int(time_to_day(start_time)) day2 = int(time_to_day(end_time)) last_sql += ' AND fdate BETWEEN {} AND {}'.format(day1, day2) last_sql += ' AND generate_time > \'{}\' AND generate_time < \'{}\'' \ .format(time_print(start_time), time_print(end_time)) if rule_id != 0: last_sql += ' AND rule_id = {}'.format(rule_id) access_log.debug(last_sql) rlts = [] try: cur = self.db.cursor() cur.execute(last_sql) rlts = cur.fetchall() finally: cur.close() for r in rlts: r['generate_time'] = time_print( (date_to_stamp(r['generate_time']))) self.write_json(rlts)
def post(self): request_data = self.get_post_request_data() logger.debug("[%s]agent request data: %s", self.id, request_data) if not request_data: raise gen.Return() self.request_data = request_data if not self.validate_request(request_data): raise gen.Return() yield self.handle_request(request_data)
def _range_to_file(self, range_file, _range): access_log.debug("range_to_file (range=%s)" % str(_range)) fl = range_file if os.access(fl, os.F_OK | os.R_OK | os.W_OK): fd = open(fl, 'w') else: raise EnvironmentError, "File '%s' doesn't exist or you don't have enough permissions" % fl try: fd.write('\n'.join(_range)) finally: fd.close()
def send_file(file_path, dashboard_name, handler): ''' Posts a file to the Jupyter Dashboards Server to be served as a dashboard :param file_path: The path of the file to send :param dashboard_name: The dashboard name under which it should be made available ''' # Make information about the request Host header available for use in # constructing the urls segs = handler.request.host.split(':') hostname = segs[0] if len(segs) > 1: port = segs[1] else: port = '' protocol = handler.request.protocol # Treat empty as undefined dashboard_server = os.getenv('DASHBOARD_SERVER_URL') if dashboard_server: dashboard_server = dashboard_server.format(protocol=protocol, hostname=hostname, port=port) upload_url = url_path_join(dashboard_server, UPLOAD_ENDPOINT, escape.url_escape(dashboard_name, False)) with open(file_path, 'rb') as file_content: headers = {} token = os.getenv('DASHBOARD_SERVER_AUTH_TOKEN') if token: headers['Authorization'] = 'token {}'.format(token) result = requests.post(upload_url, files={'file': file_content}, headers=headers, timeout=60, verify=not skip_ssl_verification()) if result.status_code >= 400: raise web.HTTPError(result.status_code) # Redirect to link specified in response body res_body = result.json() if 'link' in res_body: redirect_link = res_body['link'] else: # Compute redirect link using environment variables # First try redirect URL as it might be different from internal upload URL redirect_server = os.getenv('DASHBOARD_REDIRECT_URL') if redirect_server: redirect_root = redirect_server.format(hostname=hostname, port=port, protocol=protocol) else: redirect_root = dashboard_server redirect_link = url_path_join(redirect_root, VIEW_ENDPOINT, escape.url_escape(dashboard_name, False)) handler.redirect(redirect_link) else: access_log.debug('Can not deploy, DASHBOARD_SERVER_URL not set') raise web.HTTPError(500, log_message='No dashboard server configured')
def _add_instance(self, server, port): _added = False if server == self.server and port and port not in self._instances and port != self.port: self._instances.append(port) _added = True if server and server != self.server and server not in self._servers: self._servers.append(server) _added = True if _added: access_log.debug('add instance: server=%s, port=%s' % (server, port))
def hello(self, server = None, port = None, instance_headers = None): # Says 'hello' to other instances/servers url, _headers = self._url('hello', server, port) access_log.debug("hello: %s" % url) if instance_headers is None: _headers.update(self._hello_headers) else: _headers.update(instance_headers) request=tornado.httpclient.HTTPRequest(url, body='', method="POST", use_gzip=False, headers=_headers) http_client = tornado.httpclient.AsyncHTTPClient() self._hello_awaiting +=1 http_client.fetch(request, self._response_hello)
def rule_del(self, rule_id): """ 删除操作并不是真的删除, 只是进行置位 { "ANALYZER_ID": 0, // ID为0, 表示这是一种广播操作. "MESSAGE": { // 报文主体 "COMMOND": "ADD_RULE", // 重载过程, 这里就认为是热重启 "SWH_ID": [ 14, 23, 19, 40 ], "COUNTER": [ // 计数器的filter { "CNT_ID" : 1 "SRC_IP": "192.118.0.2", "DST_IP": "192.119.0.1" } ] } } """ if rule_id == -1: access_log.error('Get wrong id, del failed') del_sql = """UPDATE `counter_rule` SET `is_valid` = '0' WHERE `counter_rule`.`id` = {}""" del_sql = del_sql.format(rule_id) access_log.debug(del_sql) try: cur = self.db.cursor() cur.execute(del_sql) self.db.commit() except Exception as e: access_log.error('Get error {}'.format(e)) self.write_json(None, status_code=400, msg='删除错误') return finally: cur.close() from q_listen import get_counter_rules, generate_sub pub_msg = {} pub_msg['ANALYZER_ID'] = 0 msg = {} msg['COMMOND'] = 'DEL_RULE' msg['COUNTER'] = [rule_id] pub_msg['MESSAGE'] = msg generate_sub(pub_msg) self.write_json('success')
def handle_request(self, request): if debug_level >= 4: access_log.debug('<<<<<<<< REQUEST <<<<<<<<\n%s' % pprint.pformat(request.__dict__)) requestobj, parsedurl = self.make_requestobj(request) if debug_level >= 3: access_log.debug('<<<<<<<< REQUESTOBJ <<<<<<<<\n%s' % pprint.pformat(requestobj.__dict__)) if debug_level >= 1: debugstr = 'serving request from %s:%d%s ' % (requestobj.host, requestobj.port or 80, requestobj.path) # tornado 4.x error ValueError('Body must be None for GET request') fix body_expected = requestobj.method in ('POST', 'PATCH', 'PUT') body_present = requestobj.body is not None if body_present and not body_expected: requestobj.body = None modrequestobj = req_callback(requestobj) if isinstance(modrequestobj, ResponseObj): self.handle_response(modrequestobj) return if debug_level >= 1: access_log.debug(debugstr + 'to %s:%d%s' % (modrequestobj.host, modrequestobj.port or 80, modrequestobj.path)) outreq = self.make_request(modrequestobj, parsedurl) if debug_level >= 2: access_log.debug('>>>>>>>> REQUEST >>>>>>>>\n%s %s\n%s' % (outreq.method, outreq.url, '\n'.join(['%s: %s' % (k, v) for k, v in outreq.headers.items()]))) # send the request def _resp_callback(response): self.handle_response(response, context=modrequestobj.context) client = tornado.httpclient.AsyncHTTPClient() try: client.fetch(outreq, _resp_callback, validate_cert=modrequestobj.validate_cert) except tornado.httpclient.HTTPError as e: if hasattr(e, 'response') and e.response: self.handle_response(e.response, context=modrequestobj.context, error=True) else: self.set_status(500) self.write('Internal server error:\n' + str(e)) self.finish()
def hello(self, server=None, port=None, instance_headers=None): # Says 'hello' to other instances/servers url, _headers = self._url('hello', server, port) access_log.debug("hello: %s" % url) if instance_headers is None: _headers.update(self._hello_headers) else: _headers.update(instance_headers) request = tornado.httpclient.HTTPRequest(url, body='', method="POST", use_gzip=False, headers=_headers) http_client = tornado.httpclient.AsyncHTTPClient() self._hello_awaiting += 1 http_client.fetch(request, self._response_hello)
def bundle(handler, abs_nb_path): ''' Uploads a notebook to a Jupyter Dashboard Server ''' # Get name of notebook from filename notebook_basename = os.path.basename(abs_nb_path) notebook_name = os.path.splitext(notebook_basename)[0] # Make information about the request Host header available for use in # constructing the urls segs = handler.request.host.split(':') hostname = segs[0] if len(segs) > 1: port = segs[1] else: port = '' protocol = handler.request.protocol # Treat empty as undefined dashboard_server = os.getenv('DASHBOARD_SERVER_URL') if dashboard_server: dashboard_server = dashboard_server.format(protocol=protocol, hostname=hostname, port=port) upload_url = url_path_join(dashboard_server, UPLOAD_ENDPOINT, escape.url_escape(notebook_name, False)) with open(abs_nb_path, 'rb') as notebook: headers = {} token = os.getenv('DASHBOARD_SERVER_AUTH_TOKEN') if token: # TODO: server side should expect Authorization: token <value> headers['Authorization'] = token result = requests.post(upload_url, files={'file': notebook}, headers=headers, timeout=60) if result.status_code >= 400: raise web.HTTPError(result.status_code) # Redirect for client might be different from internal upload URL redirect_server = os.getenv('DASHBOARD_REDIRECT_URL') if redirect_server: redirect_root = redirect_server.format(hostname=hostname, port=port, protocol=protocol) else: redirect_root = dashboard_server handler.redirect(url_path_join(redirect_root, VIEW_ENDPOINT, escape.url_escape(notebook_name, False))) else: access_log.debug('Can not deploy, DASHBOARD_SERVER_URL not set') raise web.HTTPError(500, log_message='No dashboard server configured')
def _rem_instance(self, request, error=None): headers=request.headers _server=headers.get(MN_TARGET_SERVER,'') _port=headers.get(MN_TARGET_PORT,'') assert (_server or _port) and not (_server and _port), \ 'Either server("%s") or port("%s") should be specified' % (_server,_port) _key = _server or _port if _key not in self._failed: self._failed[_key] = 1 else: self._failed[_key] += 1 access_log.debug('failed connect to instance "%s": %i (%s)' % (_key, self._failed[_key], str(error))) if self._failed[_key] > options.max_instance_failed: access_log.debug('remove instance "%s"' % _key) self._servers.remove(_server) if _server else self._instances.remove(_port) del self._failed[_key]
def get(self): access_token = self.get_secure_cookie("access_token") expires = self.get_secure_cookie("expires_time") uid = self.get_secure_cookie("uid") # validate access_token if access_token and expires and uid and expires > int(time.time()): http_client = AsyncHTTPClient() # generate api url, fetch use info url = url_concat("https://api.weibo.com/2/users/show.json", dict(access_token=access_token, uid=uid)) response = yield http_client.fetch(url) logger.debug(response.body) r = json.loads(response.body) self.write(r) else: raise tornado.web.HTTPError(403)
def _data_callback(self, data=None): self._remaining -= len(data) if options.stats_enabled: stats_mon._bytes(len(data)) if self._remaining > 0: _callback = self._read_chunk else: _callback = self._destination.finish if self._source_finish: self._completed = True IOLoop.instance().add_callback(self._source.finish) else: self._source._timeout = IOLoop.instance().add_timeout( MN_NO_REPLY_TIMEOUT, self._source._response_no_reply) if not self._destination._check_closed(): access_log.debug('[%s]: _data_callback %s (%i/%i)..' % (self.RequestID, self._destination.request.uri, len(data), self._remaining)) self._destination.request.write(data, callback = _callback)
def _rem_instance(self, request, error=None): headers = request.headers _server = headers.get(MN_TARGET_SERVER, '') _port = headers.get(MN_TARGET_PORT, '') assert (_server or _port) and not (_server and _port), \ 'Either server("%s") or port("%s") should be specified' % (_server,_port) _key = _server or _port if _key not in self._failed: self._failed[_key] = 1 else: self._failed[_key] += 1 access_log.debug('failed connect to instance "%s": %i (%s)' % (_key, self._failed[_key], str(error))) if self._failed[_key] > options.max_instance_failed: access_log.debug('remove instance "%s"' % _key) self._servers.remove( _server) if _server else self._instances.remove(_port) del self._failed[_key]
def __exit__(self, exc_type, exc_val, exc_tb): if exc_type == toro.Timeout: access_log.debug("[uid: %s] connection timeout" % self.client_uid) elif exc_type == StreamClosedError: access_log.warning("[uid: %s] stream closed unexpectedly" % self.client_uid) elif exc_type == ConnectError: self.stream.close() access_log.info("[uid: %s] connection refused: %s" % (self.client_uid, exc_val.message)) elif exc_type == Exception: access_log.exception("[uid: %s] error handling stream" % self.client_uid, exc_info=True) if exc_val is not None: if self.client is not None: self.client.disconnect() return True # suppress the raised exception
def _data_callback(self, data=None): self._remaining -= len(data) if options.stats_enabled: stats_mon._bytes(len(data)) if self._remaining > 0: _callback = self._read_chunk else: _callback = self._destination.finish if self._source_finish: self._completed = True IOLoop.instance().add_callback(self._source.finish) else: self._source._timeout = IOLoop.instance().add_timeout( MN_NO_REPLY_TIMEOUT, self._source._response_no_reply) if not self._destination._check_closed(): access_log.debug('[%s]: _data_callback %s (%i/%i)..' % (self.RequestID, self._destination.request.uri, len(data), self._remaining)) self._destination.request.write(data, callback=_callback)
async def list(self): access_log.debug("SUSHIDataSourceHandler.list") # limit, offset = self.paginate() async with dbManager.atomic(): app_log.debug("in atomic context manager") sources = await dbManager.execute(SUSHIDataSource.select()) app_log.debug("sources is %r", sources) total = await dbManager.count(SUSHIDataSource.select()) app_log.debug("count is %r", total) app_log.debug("calling serializer") serializer = SUSHIDataSourceSerializer(sources, many=True) app_log.debug("serializer retutned. Data are %r", serializer.data) self.set_status(status.HTTP_200_OK) return self.finish({ "total": total, "results": serializer.data, "headerData": headerData })
def __call__(self, source, destination, buffer_size = None, source_finish = False): # Transfers Data from _source to _destination (chunk by chunk) # App -> Desktop, Desktop (reply) -> App self._source = source self._stream = source.request.connection.stream self._content_length = int(self._source.request.headers['Content-Length']) self._remaining = self._content_length self._source_finish = source_finish self._buffer_size = buffer_size if not self._buffer_size: self._get_buffer_size() self._destination = destination access_log.debug('[%s]: interaction %s->%s (%i/%i)..' % (self.RequestID, self._source.request.uri, self._destination.request.uri, self._content_length, self._buffer_size)) self._copy_headers() if not self._destination._check_closed(): self._destination.flush(callback = self._read_chunk)
def __exit__(self, exc_type, exc_val, exc_tb): if exc_type == toro.Timeout: access_log.debug("[uid: %s] connection timeout" % self.client_uid) elif exc_type == StreamClosedError: access_log.warning('[uid: %s] stream closed unexpectedly' % self.client_uid) elif exc_type == ConnectError: self.stream.close() access_log.info('[uid: %s] connection refused: %s' % (self.client_uid, exc_val.message)) elif exc_type == Exception: access_log.exception('[uid: %s] error handling stream' % self.client_uid, exc_info=True) if exc_val is not None: if self.client is not None: self.client.disconnect() return True # suppress the raised exception
def get(self): # redirect_uri handler if self.get_argument("code", False): # check state if self.get_argument("state") != self.get_secure_cookie( "oauth_state"): raise tornado.web.HTTPError(403) # fetch access token user = yield self.get_authenticated_user( redirect_uri=options.callback_url, code=self.get_argument("code")) logger.debug("access_token %s" % user["access_token"]) logger.debug("expires_in %s" % user["expires_in"]) logger.debug("uid %s" % user["uid"]) # save access_token, expires etc. self.set_secure_cookie("access_token", user["access_token"]) self.set_secure_cookie("expires_time", str(user["expires_in"] + int(time.time()))) self.set_secure_cookie("uid", user["uid"]) self.write(user) # generate authorization url and redirect else: oauth_state = str(uuid.uuid4()) logger.debug(oauth_state) # save state self.set_secure_cookie("oauth_state", oauth_state) # csrf, force login yield self.authorize_redirect(client_id=options.app_key, redirect_uri=options.callback_url, response_type="code", extra_params=dict(state=oauth_state, forcelogin="******"))
def get(self): # redirect_uri handler if self.get_argument("code", False): # check state if self.get_argument("state") != self.get_secure_cookie("oauth_state"): raise tornado.web.HTTPError(403) # fetch access token user = yield self.get_authenticated_user( redirect_uri=options.callback_url, code=self.get_argument("code")) logger.debug("access_token %s" % user["access_token"]) logger.debug("expires_in %s" % user["expires_in"]) logger.debug("uid %s" % user["uid"]) # save access_token, expires etc. self.set_secure_cookie("access_token", user["access_token"]) self.set_secure_cookie("expires_time", str(user["expires_in"] + int(time.time()))) self.set_secure_cookie("uid", user["uid"]) self.write(user) # generate authorization url and redirect else: oauth_state = str(uuid.uuid4()) logger.debug(oauth_state) # save state self.set_secure_cookie("oauth_state", oauth_state) # csrf, force login yield self.authorize_redirect( client_id=options.app_key, redirect_uri=options.callback_url, response_type="code", extra_params=dict(state=oauth_state, forcelogin="******") )
def get_json_argument(self, name, default=None): """Find and return the argument with key 'name' from JSON request data. Similar to Tornado's get_argument() method. """ if default is None: default = self._ARG_DEFAULT if not self._json_args: self.load_json() if name not in self._json_args: if default is self._ARG_DEFAULT: access_log.debug("Missing argument '%s'" % name) raise tornado.web.MissingArgumentError(name) access_log.debug("Returning default argument %s, as we couldn't find " "'%s' in %s" % (default, name, self.request.arguments)) return default arg = self._json_args[name] access_log.debug("Found '%s': %s in JSON arguments" % (name, arg)) return arg
def log_request(handler): # log requests at INFO instead of WARNING for all status codes request_time = 1000.0 * handler.request.request_time() access_log.debug("%d %s %.2fms", handler.get_status(), handler._request_summary(), request_time)
def handle_response(self, response, context={}, error=False): if not isinstance(response, ResponseObj): if debug_level >= 4: access_log.debug('<<<<<<<< RESPONSE <<<<<<<\n%s' % pprint.pformat(response.__dict__)) # if content was gzipped we should remove the headers: # > Content-Encoding: gzip # > Transfer-Encoding: chunked # otherwise we get chunked-encoding error if 'gzip' in response.headers.get('Content-Encoding', ''): del response.headers['Content-Encoding'] if 'Transfer-Encoding' in response.headers: del response.headers['Transfer-Encoding'] responseobj = ResponseObj( code=response.code, headers=response.headers, pass_headers=True, body=response.body, context=context, ) else: responseobj = response if debug_level >= 3: responseprint = copy(responseobj) responseprint.body = "-- body content not displayed --" access_log.debug('<<<<<<<< RESPONSEOBJ <<<<<<<\n%s' % pprint.pformat(responseprint.__dict__)) if not error: mod = resp_callback(responseobj) else: mod = err_callback(responseobj) # set the response status code if mod.code == 599: self.set_status(500) self.write('Internal server error. Server unreachable.') self.finish() return self.set_status(mod.code) # set the response headers if type(mod.pass_headers) == bool: header_keys = mod.headers.keys() if mod.pass_headers else [] else: header_keys = mod.pass_headers for key in header_keys: if key.lower() == 'set-cookie': cookies = Cookie.BaseCookie() for c in mod.headers.get_list('Set-Cookie'): cookies.load(tornado.escape.native_str(c)) for cookie_key in cookies: cookie = cookies[cookie_key] params = dict(cookie) expires = params.pop('expires', None) if expires: expires = dateutil.parser.parse(expires) self.set_cookie( cookie.key, cookie.value, expires = expires, **params ) else: val = mod.headers.get(key) self.set_header(key, val) if debug_level >= 2: access_log.debug('>>>>>>>> RESPONSE (%s) >>>>>>>\n%s\n%s' % (mod.code, '\n'.join(['%s: %s' % (k, v) for k, v in self._headers.items()]), self._new_cookie.output() if hasattr(self, '_new_cookie') else '')) # set the response body if mod.body: self.write(mod.body) self.finish()
def log_request(handler): request_time = 1000.0 * handler.request.request_time() access_log.debug("%d %s %.2fms", handler.get_status(), handler._request_summary(), request_time)
def _handle_request(self): '''Translates a HTTP request into code to execute on a kernel.''' self.response_future = Future() kernel_client, kernel_id = yield self.kernel_pool.acquire() try: # Method not supported if self.request.method not in self.sources: raise UnsupportedMethodError(self.request.method) # Set the Content-Type and status to default values self.set_header('Content-Type', 'text/plain') self.set_status(200) # Get the source to execute in response to this request source_code = self.sources[self.request.method] # Build the request dictionary request = json.dumps({ 'body' : parse_body(self.request), 'args' : parse_args(self.request.query_arguments), 'path' : self.path_kwargs, 'headers' : headers_to_dict(self.request.headers) }) # Turn the request string into a valid code string request_code = format_request(request) # Run the request and source code and yield until there's a result access_log.debug('Request code for notebook cell is: {}'.format(request_code)) request_future = self.execute_code(kernel_client, kernel_id, request_code) yield request_future source_future = self.execute_code(kernel_client, kernel_id, source_code) source_result = yield source_future # If a response code cell exists, execute it if self.request.method in self.response_sources: response_code = self.response_sources[self.request.method] response_future = self.execute_code(kernel_client, kernel_id, response_code) # Wait for the response and parse the json value response_result = yield response_future response = json.loads(response_result) # Copy all the header values into the tornado response if 'headers' in response: for header in response['headers']: self.set_header(header, response['headers'][header]) # Set the status code if it exists if 'status' in response: self.set_status(response['status']) # Write the result of the source code execution self.write(source_result) # If there was a problem executing an code, return a 500 except CodeExecutionError as err: self.write(err.error_message) self.set_status(500) # An unspported method was called on this handler except UnsupportedMethodError: self.set_status(405) finally: # Always make sure we release the kernel and finish the request self.response_future.set_result(None) self.kernel_pool.release(kernel_id) self.finish()
def post(self): """ 这里是与用户交互的部分, 我们一定要做输入检验, 防止被注入 其中, 程序里必须要求时间参数, 起止时间均需要, 其他参数为可选参数 is_loop, is_drop, is_probe 只允许0,1作为输入, 或者不输入 """ start_time = self.get_argument('start_time', None) end_time = self.get_argument('end_time', None) ip_src = self.get_argument('ip_src', '0.0.0.0') ip_dst = self.get_argument('ip_dst', '0.0.0.0') protocol = self.get_argument('protocol', -1) is_loop = self.get_argument('is_loop', -1) is_drop = self.get_argument('is_drop', -1) is_probe = self.get_argument('is_probe', -1) try: # IP检验 import socket socket.inet_aton(ip_src) socket.inet_aton(ip_dst) except OSError as e: access_log.error('Get wrong ip {}, {}'.format(ip_src, ip_dst)) self.write_json(None, status_code=400, msg='参数错误') return except Exception as e: access_log.error('Get error {}'.format(e)) try: # 时间检验 start_time = to_time_stamp(start_time) end_time = to_time_stamp(end_time) except (ValueError, TypeError) as e: access_log.error('Get err time {}, {}'.format( start_time, end_time)) self.write_json(None, status_code=400, msg='参数错误') return except Exception as e: access_log.error('Get error {}'.format(e)) if start_time > end_time: # 起止时间一定要有大小 access_log.warning('Get err time {}, {}'.format( start_time, end_time)) self.write_json(None, status_code=400, msg='参数错误') return try: # 试图进行转换 is_probe = int(is_probe) is_drop = int(is_drop) is_loop = int(is_loop) protocol = int(protocol) except (ValueError, TypeError) as e: access_log.error('Get Err {}, {}, {}, {}' \ .format(is_probe, is_drop, is_loop, protocol)) self.write_json(None, status_code=400, msg='参数错误') return except Exception as e: access_log.error('Get error {}'.format(e)) last_sql = QUERY_SQL day1 = int(time_to_day(start_time)) day2 = int(time_to_day(end_time)) # 数据库中时间戳作为索引, 所以首先加入时间戳判断 last_sql += ' AND fdate BETWEEN {} AND {}'.format(day1, day2) # 以下的各种判断均排除缺省值 if ip_src != '0.0.0.0': last_sql += ' AND s_ip = {}'.format(ip_src) if ip_dst != '0.0.0.0': last_sql += ' AND d_ip = {}'.format(ip_dst) if is_loop != -1: last_sql += ' AND is_loop = {}'.format(is_loop) if is_drop != -1: last_sql += ' AND is_drop = {}'.format(is_drop) if is_probe != -1: last_sql += ' AND is_probe = {}'.format(is_probe) access_log.debug(last_sql) rlts = [] try: cur = self.db.cursor() cur.execute(last_sql) rlts = cur.fetchall() except Exception as e: access_log.error('Get error {}'.format(e)) finally: cur.close() ret_rlts = [] day1_ms = (start_time % (24 * 3600)) * 1000 day2_ms = (end_time % (24 * 3600)) * 1000 for r in rlts: # 简单过滤不符合时间的数据列 if r['fdate'] == day1 and r['generate_time'] < day1_ms: continue if r['fdate'] == day2 and r['generate_time'] > day2_ms: continue ret_rlts.append(r) self.write_json(rlts)
def rule_add(self, rule_name, ip_src, ip_dst, switch_id, protocol): # rule_name = rule_name[:30] # 不允许超过30位 import re if re.search('^\w*$', rule_name) and rule_name != '': pass else: access_log.error('Get wrong name {}'.format(rule_name)) self.write_json(None, status_code=400,\ msg='rule_name 只能由字母数字下划线组成') insert_sql = """INSERT INTO `counter_rule` (`rule_name`, `ip_src`, `ip_dst`, `protocol`, `switch_id`, `is_valid`) VALUES ('{}', '{}', '{}', '{}', '{}', '1') """ insert_sql = insert_sql.format(rule_name, ip_src, ip_dst, protocol, switch_id) access_log.debug(insert_sql) try: # 首先插入数据库 cur = self.db.cursor() cur.execute(insert_sql) self.db.commit() except Exception as e: access_log.error('Get error {}'.format(e)) self.write_json(None, status_code=400, msg='插入错误') return finally: cur.close() from q_listen import get_counter_rules, generate_sub query_sql = """SELECT * FROM counter_rule where rule_name = '{}' """ query_sql = query_sql.format(rule_name) access_log.debug(query_sql) rule_item = {} try: # 查找刚刚的记录 cur = self.db.cursor() cur.execute(query_sql.format(rule_name)) r = cur.fetchone() rule_item['CNT_ID'] = r['id'] rule_item['SRC_IP'] = r['ip_src'] rule_item['DST_IP'] = r['ip_dst'] rule_item['SWH_ID'] = r['switch_id'] rule_item['PTL'] = r['protocol'] except Exception as e: access_log.error('Get error {}'.format(e)) self.write_json(None, status_code=400, msg='查找错误') return finally: cur.close() pub_msg = {} pub_msg['ANALYZER_ID'] = 0 msg = {} msg['COMMOND'] = 'ADD_RULE' msg['COUNTER'] = [rule_item] pub_msg['MESSAGE'] = msg generate_sub(pub_msg) self.write_json('success')
def _handle_request(self): '''Translates a HTTP request into code to execute on a kernel.''' self.response_future = Future() kernel_client, kernel_id = yield self.kernel_pool.acquire() try: # Method not supported if self.request.method not in self.sources: raise UnsupportedMethodError(self.request.method) # Set the Content-Type and status to default values self.set_header('Content-Type', 'text/plain') self.set_status(200) # Get the source to execute in response to this request source_code = self.sources[self.request.method] # Build the request dictionary request = json.dumps({ 'body': parse_body(self.request), 'args': parse_args(self.request.query_arguments), 'path': self.path_kwargs, 'headers': headers_to_dict(self.request.headers) }) # Turn the request string into a valid code string request_code = format_request(request) # Run the request and source code and yield until there's a result access_log.debug( 'Request code for notebook cell is: {}'.format(request_code)) request_future = self.execute_code(kernel_client, kernel_id, request_code) yield request_future source_future = self.execute_code(kernel_client, kernel_id, source_code) source_result = yield source_future # If a response code cell exists, execute it if self.request.method in self.response_sources: response_code = self.response_sources[self.request.method] response_future = self.execute_code(kernel_client, kernel_id, response_code) # Wait for the response and parse the json value response_result = yield response_future response = json.loads(response_result) # Copy all the header values into the tornado response if 'headers' in response: for header in response['headers']: self.set_header(header, response['headers'][header]) # Set the status code if it exists if 'status' in response: self.set_status(response['status']) # Write the result of the source code execution self.write(source_result) # If there was a problem executing an code, return a 500 except CodeExecutionError as err: self.write(err.error_message) self.set_status(500) # An unspported method was called on this handler except UnsupportedMethodError: self.set_status(405) finally: # Always make sure we release the kernel and finish the request self.response_future.set_result(None) self.kernel_pool.release(kernel_id) self.finish()