def _on_request_body(self, data, exec_req_cb): self.body = data if self.method in ("POST", "PATCH", "PUT"): httputil.parse_body_arguments( self.headers.get("Content-Type", ""), data, self.arguments, self.files) exec_req_cb()
def _on_request_body(self, data): self._request.body = data if self._request.method in ("POST", "PATCH", "PUT"): httputil.parse_body_arguments( self._request.headers.get("Content-Type", ""), data, self._request.arguments, self._request.files) self.request_callback(self._request)
def __init__(self, data): method, url, version, headers, self._body = msgpack.unpackb(data) self._meta = dict() self._headers = dict(headers) self._meta["method"] = method self._meta["version"] = version self._meta["host"] = self._headers.get("Host") or self._headers.get("host", "") self._meta["remote_addr"] = self._headers.get("X-Real-IP") or self._headers.get("X-Forwarded-For", "") self._meta["query_string"] = urlparse.urlparse(url).query self._meta["cookies"] = dict() if "Cookie" in self._headers: try: cookies = Cookie.BaseCookie() cookies.load(escape.native_str(self._headers["Cookie"])) self._meta["cookies"] = dict((key, name.value) for key, name in cookies.iteritems()) except: pass tmp = urlparse.parse_qs(urlparse.urlparse(url).query) self._request = dict((k, v[0]) for k, v in tmp.iteritems() if len(v) > 0) self._files = None args = dict() files = dict() parse_body_arguments(self._headers.get("Content-Type", ""), self._body, args, files) self._request.update(dict((k, v[0]) for k, v in args.iteritems() if len(v) > 0)) self._files = files
def __parse_body_arguments(self, body): flag, body = self.common_func.convert_to_list_or_dict(string=body, s_type='dict') if not flag and isinstance(body, str) and re.match( r'^.*=.*(&.*=.*)*$', body) is not None: body = self.common_func.url_query_decode(body) if isinstance(body, dict): flag = True if not flag: self.headers = self.headers if len( self.headers) != 0 else self.default_headers try: request_body = dict() if re.match(r'^.*=.*(&.*=.*)*$', body) is not None: parse_body_arguments( content_type=self.headers['Content-Type'], body=body, arguments=request_body, files=request_body, headers=self.headers) if len(request_body) > 0: body = request_body for key in body: if isinstance(body[key], list): body[key] = body[key][0] body[key] = body[key].decode('utf8', errors='ignore') flag = True except Exception as e: log.warning(e) flag = False return flag, body
def __init__(self, data): method, url, version, headers, self._body = msgpack.unpackb(data) self._meta = dict() self._headers = dict(headers) self._meta['method'] = method self._meta['version'] = version self._meta['host'] = self._headers.get('Host') or self._headers.get('host', '') self._meta['remote_addr'] = self._headers.get('X-Real-IP') or self._headers.get('X-Forwarded-For', '') self._meta['query_string'] = urlparse.urlparse(url).query self._meta['cookies'] = dict() if 'Cookie' in self._headers: try: cookies = Cookie.BaseCookie() cookies.load(escape.native_str(self._headers['Cookie'])) self._meta['cookies'] = dict((key, name.value) for key, name in cookies.iteritems()) except: pass tmp = urlparse.parse_qs(urlparse.urlparse(url).query) self._request = dict((k, v[0]) for k, v in tmp.iteritems() if len(v) > 0) self._files = None args = dict() files = dict() parse_body_arguments(self._headers.get("Content-Type", ""), self._body, args, files) self._request.update(dict((k, v[0]) for k, v in args.iteritems() if len(v) > 0)) self._files = files
def __init__(self, data): method, url, version, headers, self._body = msgpack_unpackb(data) if six.PY3: method = method.decode() url = url.decode() version = version.decode() headers = [(k.decode(), v.decode()) for k, v in headers] self._headers = HTTPHeaders(headers) self._meta = { 'method': method, 'version': version, 'host': self._headers.get('Host', ''), 'remote_addr': self._headers.get('X-Real-IP') or self._headers.get('X-Forwarded-For', ''), 'query_string': urlparse.urlparse(url).query, 'cookies': dict(), 'parsed_cookies': http_parse_cookies(self._headers), } args = urlparse.parse_qs(urlparse.urlparse(url).query) self._files = dict() parse_body_arguments(self._headers.get("Content-Type", ""), self._body, args, self._files) self._request = dict_list_to_single(args)
def __init__(self, data): method, url, version, headers, self._body = msgpack.unpackb(data) self._meta = dict() self._headers = dict(headers) self._meta['method'] = method self._meta['version'] = version self._meta['host'] = self._headers.get('Host') or self._headers.get( 'host', '') self._meta['remote_addr'] = self._headers.get( 'X-Real-IP') or self._headers.get('X-Forwarded-For', '') self._meta['query_string'] = urlparse.urlparse(url).query self._meta['cookies'] = dict() if 'Cookie' in self._headers: try: cookies = Cookie.BaseCookie() cookies.load(escape.native_str(self._headers['Cookie'])) self._meta['cookies'] = dict( (key, name.value) for key, name in cookies.iteritems()) except: pass tmp = urlparse.parse_qs(urlparse.urlparse(url).query) self._request = dict( (k, v[0]) for k, v in tmp.iteritems() if len(v) > 0) self._files = None args = dict() files = dict() parse_body_arguments(self._headers.get("Content-Type", ""), self._body, args, files) self._request.update( dict((k, v[0]) for k, v in args.iteritems() if len(v) > 0)) self._files = files
def _on_request_body(self, data): self._request.body = data if self._request.method in ("POST", "PATCH", "PUT"): httputil.parse_body_arguments( self._request.headers.get("Content-Type", ""), data, self._request.body_arguments, self._request.files) for k, v in self._request.body_arguments.items(): self._request.arguments.setdefault(k, []).extend(v) self.request_callback(self._request)
def handle_request_body(self, method, headers, body): files = {} arguments = {} if method in ('POST', 'PATCH', 'PUT'): _log.debug('Parsing message body') httputil.parse_body_arguments(headers.get('Content-Type', ''), body, arguments, files) return arguments, files
async def post(self): args = {} files = {} t_httputil.parse_body_arguments(self.request.headers["Content-Type"], self.byts, args, files) item = files.get('file')[0] assert item['body'] == b'asdfasdf' assert item['filename'] == 'file' assert args.get('zip_password') == [b'test'] self.sendRestRetn(self.gotsize)
def post(self): (args, files) = ({}, {}) parse_body_arguments('application/x-www-form-urlencoded', self.request.body, args, files) request_data = dict([(a, b[0].decode('utf-8')) for (a, b) in args.items()]) status = None if 'date' in request_data and 'amount' in request_data and \ 'type' in request_data and 'remark' in request_data: status = self.add_record(request_data) data = self.get_record() total = int(sum([a['amount'] for a in data]) * 100) / 100.0 self.render('finance.html', bar_info_list=self.bar_info_list, data=data, status=status, total=total)
def get_image_data(): content_type = self.request.headers.get('content-type') body = b'{}'.format(self.request.body) parse_body_arguments(content_type, body, post_data, files) file_data = files['profile-image'][0].get('body') original_file_name = files['profile-image'][0].get('filename') extension = original_file_name.split('.')[-1] file_name = u'{}.{}'.format(slugify(current_user_id), extension) return file_data, file_name
def handle_request_body(self, message): options = message.body['options'] files = {} arguments = {} if options['method'] in ('POST', 'PATCH', 'PUT'): _log.debug('Parsing message body') httputil.parse_body_arguments( options['headers'].get('Content-Type', ''), message.body['body'], arguments, files) message.reply(body={'arguments': arguments, 'files': files})
def get_string_values_from_put_request(self, *arguments): # read www/x-www-form-urlencoded parameters string_values = [] if self.request.headers['Content-Type'] == 'application/x-www-form-urlencoded': values = {} files = {} parse_body_arguments('application/x-www-form-urlencoded', self.request.body, values, files) for arg in arguments: if arg not in values: print(f"Cannot find key: >>{arg}<< in dict {values}!") raise ValueError return [values[arg][0].decode() for arg in arguments] else: raise ValueError
def setConnected(self, device_type, device_number): # read www/x-www-form-urlencoded parameters if self.request.headers[ 'Content-Type'] == 'application/x-www-form-urlencoded': values = {} files = {} parse_body_arguments('application/x-www-form-urlencoded', self.request.body, values, files) value_str = values["Connected"][0].decode() value = (value_str == 'True') self.setResource(device_type, device_number, "Connected", value) else: self.set_status(400, "Value error")
def handle_request_body(self, method, headers, body): files = {} arguments = {} if method in ('POST', 'PATCH', 'PUT'): _log.debug('Parsing message body') httputil.parse_body_arguments( headers.get('Content-Type', ''), body, arguments, files) return arguments, files
def __init__(self, environ): """Parses the given WSGI environment to construct the request.""" self.method = environ["REQUEST_METHOD"] self.path = urllib_parse.quote( from_wsgi_str(environ.get("SCRIPT_NAME", ""))) self.path += urllib_parse.quote( from_wsgi_str(environ.get("PATH_INFO", ""))) self.uri = self.path self.arguments = {} self.query_arguments = {} self.body_arguments = {} self.query = environ.get("QUERY_STRING", "") if self.query: self.uri += "?" + self.query self.arguments = parse_qs_bytes(native_str(self.query), keep_blank_values=True) self.query_arguments = copy.deepcopy(self.arguments) self.version = "HTTP/1.1" self.headers = httputil.HTTPHeaders() if environ.get("CONTENT_TYPE"): self.headers["Content-Type"] = environ["CONTENT_TYPE"] if environ.get("CONTENT_LENGTH"): self.headers["Content-Length"] = environ["CONTENT_LENGTH"] for key in environ: if key.startswith("HTTP_"): self.headers[key[5:].replace("_", "-")] = environ[key] if self.headers.get("Content-Length"): self.body = environ["wsgi.input"].read( int(self.headers["Content-Length"])) else: self.body = "" self.protocol = environ["wsgi.url_scheme"] self.remote_ip = environ.get("REMOTE_ADDR", "") if environ.get("HTTP_HOST"): self.host = environ["HTTP_HOST"] else: self.host = environ["SERVER_NAME"] # Parse request body self.files = {} httputil.parse_body_arguments(self.headers.get("Content-Type", ""), self.body, self.body_arguments, self.files) for k, v in self.body_arguments.items(): self.arguments.setdefault(k, []).extend(v) self._start_time = time.time() self._finish_time = None
def test_make_mfd(self): args, files = {}, {} body, content_type = make_mfd({'arg1': 'value1'}, { 'file0': [ HTTPFile(filename='file0.rar', body='ARCHIVE', content_type='some/type\r\n\r\nBAD DATA') ], 'file1': [ HTTPFile(filename='file1.png', body='CAT PICTURE', content_type=media_types.IMAGE_PNG) ], 'file2': [HTTPFile(filename='file2.txt', body='TEXT')], 'file3': [ HTTPFile(filename=r'file3-"part1".unknown', body='BODY1'), HTTPFile(filename=r'file3-\part2\.unknown', body='BODY2'), ], }) parse_body_arguments(to_unicode(content_type), body, args, files) self.assertEqual(args['arg1'], [b'value1']) self.assertEqual(files['file0'][0]['filename'], 'file0.rar') self.assertEqual(files['file0'][0]['body'], b'ARCHIVE') self.assertEqual(files['file0'][0]['content_type'], 'some/type BAD DATA') self.assertEqual(files['file1'][0]['filename'], 'file1.png') self.assertEqual(files['file1'][0]['body'], b'CAT PICTURE') self.assertEqual(files['file1'][0]['content_type'], media_types.IMAGE_PNG) self.assertEqual(files['file2'][0]['filename'], 'file2.txt') self.assertEqual(files['file2'][0]['body'], b'TEXT') self.assertEqual(files['file2'][0]['content_type'], media_types.TEXT_PLAIN) self.assertEqual(files['file3'][0]['filename'], r'file3-"part1".unknown') self.assertEqual(files['file3'][0]['body'], b'BODY1') self.assertEqual(files['file3'][0]['content_type'], media_types.APPLICATION_OCTET_STREAM) self.assertEqual(files['file3'][1]['filename'], r'file3-\part2\.unknown') self.assertEqual(files['file3'][1]['body'], b'BODY2') self.assertEqual(files['file3'][1]['content_type'], media_types.APPLICATION_OCTET_STREAM)
def __init__(self, data): method, url, version, headers, self._body = msgpack_unpackb(data) self._headers = HTTPHeaders(headers) self._meta = { 'method': method, 'version': version, 'host': self._headers.get('Host', ''), 'remote_addr': self._headers.get('X-Real-IP') or self._headers.get('X-Forwarded-For', ''), 'query_string': urlparse.urlparse(url).query, 'cookies': dict(), 'parsed_cookies': http_parse_cookies(self._headers), } args = urlparse.parse_qs(urlparse.urlparse(url).query) self._files = dict() parse_body_arguments(self._headers.get("Content-Type", ""), self._body, args, self._files) self._request = dict_list_to_single(args)
def test_make_mfd(self): args, files = {}, {} body, content_type = make_mfd({'arg1': 'value1'}, { 'file0': [ HTTPFile(filename='file0.rar', body='ARCHIVE', content_type='some/type\r\n\r\nBAD DATA') ], 'file1': [ HTTPFile(filename='file1.png', body='CAT PICTURE', content_type='image/png') ], 'file2': [HTTPFile(filename='file2.txt', body='TEXT')], 'file3': [ HTTPFile(filename=r'file3-"part1".unknown', body='BODY1'), HTTPFile(filename=r'file3-\part2\.unknown', body='BODY2'), ], }) parse_body_arguments(to_unicode(content_type), body, args, files) self.assertEqual(args['arg1'], [b'value1']) self.assertEqual(files['file0'][0]['filename'], 'file0.rar') self.assertEqual(files['file0'][0]['body'], b'ARCHIVE') self.assertEqual(files['file0'][0]['content_type'], 'some/type BAD DATA') self.assertEqual(files['file1'][0]['filename'], 'file1.png') self.assertEqual(files['file1'][0]['body'], b'CAT PICTURE') self.assertEqual(files['file1'][0]['content_type'], 'image/png') self.assertEqual(files['file2'][0]['filename'], 'file2.txt') self.assertEqual(files['file2'][0]['body'], b'TEXT') self.assertEqual(files['file2'][0]['content_type'], 'text/plain') self.assertEqual(files['file3'][0]['filename'], r'file3-"part1".unknown') self.assertEqual(files['file3'][0]['body'], b'BODY1') self.assertEqual(files['file3'][0]['content_type'], 'application/octet-stream') self.assertEqual(files['file3'][1]['filename'], r'file3-\part2\.unknown') self.assertEqual(files['file3'][1]['body'], b'BODY2') self.assertEqual(files['file3'][1]['content_type'], 'application/octet-stream')
def __init__(self, environ): """Parses the given WSGI environment to construct the request.""" self.method = environ["REQUEST_METHOD"] self.path = urllib_parse.quote(from_wsgi_str(environ.get("SCRIPT_NAME", ""))) self.path += urllib_parse.quote(from_wsgi_str(environ.get("PATH_INFO", ""))) self.uri = self.path self.arguments = {} self.query_arguments = {} self.body_arguments = {} self.query = environ.get("QUERY_STRING", "") if self.query: self.uri += "?" + self.query self.arguments = parse_qs_bytes(native_str(self.query), keep_blank_values=True) self.query_arguments = copy.deepcopy(self.arguments) self.version = "HTTP/1.1" self.headers = httputil.HTTPHeaders() if environ.get("CONTENT_TYPE"): self.headers["Content-Type"] = environ["CONTENT_TYPE"] if environ.get("CONTENT_LENGTH"): self.headers["Content-Length"] = environ["CONTENT_LENGTH"] for key in environ: if key.startswith("HTTP_"): self.headers[key[5:].replace("_", "-")] = environ[key] if self.headers.get("Content-Length"): self.body = environ["wsgi.input"].read( int(self.headers["Content-Length"])) else: self.body = "" self.protocol = environ["wsgi.url_scheme"] self.remote_ip = environ.get("REMOTE_ADDR", "") if environ.get("HTTP_HOST"): self.host = environ["HTTP_HOST"] else: self.host = environ["SERVER_NAME"] # Parse request body self.files = {} httputil.parse_body_arguments(self.headers.get("Content-Type", ""), self.body, self.body_arguments, self.files) for k, v in self.body_arguments.items(): self.arguments.setdefault(k, []).extend(v) self._start_time = time.time() self._finish_time = None
def post(self, dispatcher_id=None): form = {} parse_body_arguments('application/x-www-form-urlencoded', self.request.body, form, {}) _payload = form.get('payload', ['']).pop() action = form.get('action', ['']).pop() payload = json.loads(_payload) dispatcher = self.application.settings.get('dispatcher') resp = yield gen.Task(dispatcher.send_msg, action, payload, self.context()) self.write(resp.payload) self.finish()
def handle_request_body(self, message): options = message.body['options'] files = {} arguments = {} if options['method'] in ('POST', 'PATCH', 'PUT'): _log.debug('Parsing message body') httputil.parse_body_arguments( options['headers'].get('Content-Type', ''), message.body['body'], arguments, files) message.reply( body={ 'arguments': arguments, 'files': files })
def __init__(self, handler, form_id = None): self._parmas = {} self._values = {} self._form_errors = {} self.form_id = form_id arguments = {} # re-parse qs, keep_blankvalues for formencode to validate # so formencode not_empty setting work. request = handler.request content_type = request.headers.get("Content-Type", "") files = {} parse_body_arguments(content_type,request.body,arguments,files) try: for key in files.keys(): arguments[key] = files.get(key) except Exception, error: pass
def test_make_mfd(self): args, files = {}, {} body, content_type = make_mfd( { 'arg1': 'value1' }, { 'file0': [HTTPFile(filename='file0.rar', body='ARCHIVE', content_type='some/type\r\n\r\nBAD DATA')], 'file1': [HTTPFile(filename='file1.png', body='CAT PICTURE', content_type='image/png')], 'file2': [HTTPFile(filename='file2.txt', body='TEXT')], 'file3': [ HTTPFile(filename=r'file3-"part1".unknown', body='BODY1'), HTTPFile(filename=r'file3-\part2\.unknown', body='BODY2'), ], } ) parse_body_arguments(to_unicode(content_type), body, args, files) self.assertEqual(args['arg1'], [b'value1']) self.assertEqual(files['file0'][0]['filename'], 'file0.rar') self.assertEqual(files['file0'][0]['body'], b'ARCHIVE') self.assertEqual(files['file0'][0]['content_type'], 'some/type BAD DATA') self.assertEqual(files['file1'][0]['filename'], 'file1.png') self.assertEqual(files['file1'][0]['body'], b'CAT PICTURE') self.assertEqual(files['file1'][0]['content_type'], 'image/png') self.assertEqual(files['file2'][0]['filename'], 'file2.txt') self.assertEqual(files['file2'][0]['body'], b'TEXT') self.assertEqual(files['file2'][0]['content_type'], 'text/plain') self.assertEqual(files['file3'][0]['filename'], r'file3-"part1".unknown') self.assertEqual(files['file3'][0]['body'], b'BODY1') self.assertEqual(files['file3'][0]['content_type'], 'application/octet-stream') self.assertEqual(files['file3'][1]['filename'], r'file3-\part2\.unknown') self.assertEqual(files['file3'][1]['body'], b'BODY2') self.assertEqual(files['file3'][1]['content_type'], 'application/octet-stream')
def _parse_body_args(self, body): args = {} parse_body_arguments('application/x-www-form-urlencoded', self.request.body, args, {}) # command, environment, component, branch = args['text'].split(' ') command_args = args['text'][0].split(' ') return { 'response_url': args.get('response_url')[0], 'user': args.get('user_name', [''])[0], 'channel': args['channel_name'][0] if len(args.get('channel_name', [])) else None, 'command': command_args[0] if len(command_args) else '', 'args': args['text'][0].split(' '), 'slash_command': args['command'][0] }
def _begin_request(self): """ Actually start executing this request. """ headers = self.m2req.headers self._request = HTTPRequest(connection=self, method=headers.get("METHOD"), uri=self.m2req.path, version=headers.get("VERSION"), headers=HTTPHeaders(headers), remote_ip=headers.get("x-forwarded-for")) if len(self.m2req.body) > 0: self._request.body = self.m2req.body if self._request.method in ("POST", "PATCH", "PUT"): parse_body_arguments( self._request.headers.get("Content-Type", ""), self._request.body, self._request.arguments, self._request.files) if self.m2req.is_disconnect(): self.finish() elif headers.get("x-mongrel2-upload-done", None): # there has been a file upload. expected = headers.get("x-mongrel2-upload-start", "BAD") upload = headers.get("x-mongrel2-upload-done", None) if expected == upload: self.request_callback(self._request) elif headers.get("x-mongrel2-upload-start", None): # this is just a notification that a file upload has started. Do # nothing for now! pass else: self.request_callback(self._request)
def post(self): self.set_status(202) user_data = {'username': '', 'password': ''} files = {} headers = {} util.parse_body_arguments(self.request.headers['Content-Type'], self.request.body, files, headers) for k in user_data.keys(): if k not in files.keys(): self.set_status(400, reason='incorrect data') print 'INF: incorrect request data' return user_data[k] = files[k][0] print "DEB: login username " + user_data['username'] user_cn = MySQLConnector() user_cookie = user_cn.create_cookie(user_data) if user_cookie is None: self.set_status(401, reason='no such user-password pair') # unauthorized return self.write(json.dumps({'user_cookie': user_cookie}))
def test_make_mfd(self): args, files = {}, {} body, content_type = make_mfd( { 'arg1': 'value1' }, { 'file0': [HTTPFile(filename='file0.rar', body='ARCHIVE', content_type='some/type\r\n\r\nBAD DATA')], 'file1': [HTTPFile(filename='file1.png', body='CAT PICTURE', content_type=media_types.IMAGE_PNG)], 'file2': [HTTPFile(filename='file2.txt', body='TEXT')], 'file3': [ HTTPFile(filename=r'file3-"part1".unknown', body='BODY1'), HTTPFile(filename=r'file3-\part2\.unknown', body='BODY2'), ], } ) parse_body_arguments(to_unicode(content_type), body, args, files) self.assertEqual(args['arg1'], [b'value1']) self.assertEqual(files['file0'][0]['filename'], 'file0.rar') self.assertEqual(files['file0'][0]['body'], b'ARCHIVE') self.assertEqual(files['file0'][0]['content_type'], 'some/type BAD DATA') self.assertEqual(files['file1'][0]['filename'], 'file1.png') self.assertEqual(files['file1'][0]['body'], b'CAT PICTURE') self.assertEqual(files['file1'][0]['content_type'], media_types.IMAGE_PNG) self.assertEqual(files['file2'][0]['filename'], 'file2.txt') self.assertEqual(files['file2'][0]['body'], b'TEXT') self.assertEqual(files['file2'][0]['content_type'], media_types.TEXT_PLAIN) self.assertEqual(files['file3'][0]['filename'], r'file3-"part1".unknown') self.assertEqual(files['file3'][0]['body'], b'BODY1') self.assertEqual(files['file3'][0]['content_type'], media_types.APPLICATION_OCTET_STREAM) self.assertEqual(files['file3'][1]['filename'], r'file3-\part2\.unknown') self.assertEqual(files['file3'][1]['body'], b'BODY2') self.assertEqual(files['file3'][1]['content_type'], media_types.APPLICATION_OCTET_STREAM)
def __init__(self, request, data): self._underlying_request = request method, url, version, headers, self._body = msgpack_unpackb(data) if six.PY3: method = method.decode() url = url.decode() version = version.decode() headers = [(k.decode(), v.decode()) for k, v in headers] self._headers = HTTPHeaders(headers) self._meta = { 'method': method, 'version': version, 'host': self._headers.get('Host', ''), 'remote_addr': self._headers.get('X-Real-IP') or self._headers.get('X-Forwarded-For', ''), 'query_string': urlparse.urlparse(url).query, 'cookies': dict(), 'parsed_cookies': http_parse_cookies(self._headers), } args = urlparse.parse_qs(urlparse.urlparse(url).query) self._files = dict() parse_body_arguments(self._headers.get("Content-Type", ""), self._body, args, self._files) self._request = dict_list_to_single(args)
def post(self): self.set_status(202) user_data = {'username': '', 'password': ''} files = {} headers = {} util.parse_body_arguments(self.request.headers['Content-Type'], self.request.body, files, headers) for k in user_data.keys(): if k not in files.keys(): self.set_status(400, reason='incorrect data') print 'INF: incorrect request data' return user_data[k] = files[k][0] print "DEB: login username " + user_data['username'] user_cn = MySQLConnector() user_cookie = user_cn.create_cookie(user_data) if user_cookie is None: self.set_status( 401, reason='no such user-password pair') # unauthorized return self.write(json.dumps({'user_cookie': user_cookie}))
def data_received(self, chunk): # pylint: disable=too-many-branches, too-many-statements if self.error: raise self.error # pylint: disable=raising-bad-type self.bytes_read += len(chunk) if len( self.data ) > 104857600: # Ensure the someone is not trying to fill RAM, 100MB raise error.SnakeError('Content-Length too large (truncated)') if self.stream: # Cache files to disk chunk = self.stream.tail + chunk chunk_len = len(chunk) i = 0 while i < chunk_len: if self.stream.state == 0: # Find start of header soh = chunk.find(b'--' + self.stream.boundary, i) if soh != -1: self.data += chunk[soh:soh + len(self.stream.boundary) + 4] i = soh + len(self.stream.boundary) + 4 self.stream.state = 1 continue elif self.stream.state == 1: # Find end of header eoh = chunk.find(b'\r\n\r\n', i) if eoh != -1: self.stream.header += chunk[i:eoh + 4] i = eoh + 4 if b'filename=' in self.stream.header: # We have a file self.stream.state = 2 else: self.stream.state = 3 self.data += self.stream.header self.stream.header = bytes() continue elif self.stream.state == 2: # Handle file based content soh = chunk.find(b'--' + self.stream.boundary, i) if soh != -1: f_path = path.join(self.stream.working_dir.name, str(self.stream.file_count)) with open(f_path, 'a+b') as f: f.write(chunk[i:soh - 2]) # -2 drops the extra '\r\n' self.data += bytes(f_path + '\r\n', 'utf-8') self.stream.file_count += 1 i = soh self.stream.state = 0 continue elif self.stream.state == 3: # Handle all other content soh = chunk.find(b'--' + self.stream.boundary, i) if soh != -1: self.data += chunk[i:soh] i = soh self.stream.state = 0 continue # Handle the overlapping tail if i + TAIL_SIZE < chunk_len: if self.stream.state == 2: f_path = path.join(self.stream.working_dir.name, str(self.stream.file_count)) with open(f_path, 'a+b') as f: f.write(chunk[i:chunk_len - TAIL_SIZE]) elif self.stream.state == 1: self.stream.header += chunk[i:chunk_len - TAIL_SIZE] else: self.data += chunk[i:chunk_len - TAIL_SIZE] self.stream.tail = chunk[chunk_len - TAIL_SIZE:] i += chunk_len else: self.stream.tail = chunk[i:] i += chunk_len else: # Otherwise be normal self.data += chunk if self.bytes_read >= self.content_length: # Finished, parse the new content httputil.parse_body_arguments(self.content_type, self.data, self.request.body_arguments, self.request.files, headers=None) for k, v in self.request.body_arguments.items(): self.request.arguments.setdefault(k, []).extend(v)
def _obtain_put_arguments(self): values = {} files = {} parse_body_arguments('application/x-www-form-urlencoded', self.request.body, values, files) return values
def get_dict(self, *keys, **extras): """Returns request dict of given keys. This is taken from the httpbin source code and modified to fit `tornado.web.RequestHandler`. https://github.com/kennethreitz/httpbin/blob/master/httpbin/helpers.py#L113 """ request = self.request _keys = { 'url', 'args', 'data', 'origin', 'headers', 'files', 'json' } assert _keys >= set(keys) args = {key: self.get_argument(key) for key in request.arguments.keys()} body = request.body content_type = request.headers.get('Content-Type', '') data = {} files = {} if body: parse_body_arguments(content_type, body, data, files) def stringify(value): if isinstance(value, list): value = [stringify(item) for item in value] elif isinstance(value, bytes): value = value.decode('utf-8') elif isinstance(value, dict): value = {key: stringify(val) for key, val in value.items()} return value # Convert bytes to str data = stringify(data) def elmentify(value): if isinstance(value, list): if len(value) == 1: value = value[0] return value data = {key:elmentify(value) for key, value in data.items()} try: _json = json_encode(data) except: _json = None xff = request.headers.get('X-Forwarded-For', '') origin = request.remote_ip if xff: origin = xff d = dict( url=request.uri, args=args, data=data, origin=origin, headers=request.headers, files=files, json=_json ) out_d = dict() for key in keys: out_d[key] = d.get(key) out_d.update(extras) return out_d
def post(self, slug): log.info('SlackbotHandler %s' % slug) workspace = self.settings['workspace_path'] # validate request pipelines_slug = _read_slackbot_state(workspace) if slug != pipelines_slug: raise HTTPError( 404, 'Slug doesn\'t match %s %s' % (slug, pipelines_slug)) # Compile the slackbot command configs the first time slackbot_commands = load_slackbot_commands(workspace) # def update_config(pipeline_config): # pipeline_config['slackbot'] = slackbot_configs # update_pipelines_config(workspace, update_config) log.debug('Generate slackbot commands. Found %s slackbot commands' % len(slackbot_commands.keys())) body_args = {} parse_body_arguments('application/x-www-form-urlencoded', self.request.body, body_args, {}) log.info('Body: %s' % json.dumps(body_args)) command_args = self._parse_body_args(body_args) if command_args['command'] in ['', 'usage', 'help', 'commands']: return self.usage(slackbot_commands, command_args['slash_command']) if command_args['command'] not in slackbot_commands: log.warn('Unknown slack command %s' % command_args['command']) self.write({ 'response_type': 'in_channel', 'text': 'Unknown command %s' % command_args['command'], 'attachments': [] }) return self.finish() command_config = slackbot_commands[command_args['command']] # slackbot_context = self._get_slackbot_context(command_args['command'], workspace) # if not slackbot_context: # raise HTTPError(404, 'Not found') pipeline_args = {'trigger': 'slackbot', 'slakbot_args': command_args} # Map args named_slackbot_args = {} if command_config.get('arguments'): if len(command_config.get('arguments')) != len( command_args['args']): log.warn('Incorrect amount of arguments %s, expecting %s' % (len(command_args['args']), len(command_config.get('arguments')))) return 'Incorrect amount of arguments %s, expecting %s' % (len( command_args['args']), len( command_config.get('arguments'))) for i, arg_name in enumerate(command_config.get('arguments', [])): named_slackbot_args[arg_name] = command_args['args'][i] pipeline_args.update(named_slackbot_args) pipeline_args[ 'webhook_content'] = named_slackbot_args # For legacy reasons def response_fn(handler, task_id): handler.write({ 'response_type': 'in_channel', 'text': '%s triggered by %s' % (command_args['command'], command_args.get('user')), 'attachments': [{ "text": yaml.dump(named_slackbot_args, default_flow_style=False) }] }) handler.finish() return _run_pipeline(self, workspace, command_config['slug'], params=pipeline_args, response_fn=response_fn)