def _HandleBlockStoreGetRequest(self, minfo): """ Get a value from the untrusted block store / cache { "key" : <>, } """ try: key = minfo['key'] except KeyError as ke: logger.error('missing field in request: %s', ke) raise Error(http.BAD_REQUEST, 'missing field {0}'.format(ke)) except Exception as e: logger.error( 'unknown excption unpacking request (BlockStoreGetRequest); {0}/{1}' .format(type(e), str(e))) raise Error( http.BAD_REQUEST, 'unknown error unpacking request (BlockStoreGetRequest)') try: response = self.Enclave.block_store_get(key) return {'result': response} except Exception as e: logger.error( 'unknown exception processing request (BlockStoreGetRequest); {0}/{1}' .format(type(e), str(e))) raise Error( http.BAD_REQUEST, 'uknown exception unpacking request (BlockStoreGetRequest)')
def _HandleUpdateContractRequest(self, minfo): # { # "encrypted_session_key" : <>, # "encrypted_request" : <> # } try: encrypted_session_key = minfo['encrypted_session_key'] encrypted_request = minfo['encrypted_request'] except KeyError as ke: logger.error('missing field in request: %s', ke) raise Error(http.BAD_REQUEST, 'missing field {0}'.format(ke)) except Exception as e: logger.error( 'unknown exception unpacking request (UpdateContractRequest); {0}/{1}' .format(type(e), str(e))) raise Error( http.BAD_REQUEST, 'unknown exception unpacking request (UpdateContractRequest)') try: response = self.Enclave.send_to_contract(encrypted_session_key, encrypted_request) return {'result': response} except Exception as e: logger.error( 'unknown exception processing request (UpdateContractRequest); {0}/{1}' .format(type(e), str(e))) raise Error( http.BAD_REQUEST, 'uknown exception unpacking request (UpdateContractRequest)')
def _HandleBlockStoreHeadRequest(self, minfo): """ Test if a key is in the untrusted block store / cache { "key" : <>, } """ try: key = minfo['key'] except KeyError as ke: logger.error('missing field in request: %s', ke) raise Error(http.BAD_REQUEST, 'missing field {0}'.format(ke)) except Exception as e: logger.error( 'unknown excption unpacking request (BlockStoreHeadRequest); {0}/{1}' .format(type(e), str(e))) raise Error( http.BAD_REQUEST, 'unknown error unpacking request (BlockStoreHeadRequest)') try: datalen = self.Enclave.block_store_head(key) return {'length': str(datalen)} except Exception as e: logger.error( 'unknown exception processing request (BlockStoreHeadRequest); {0}/{1}' .format(type(e), str(e))) raise Error( http.BAD_REQUEST, 'uknown exception unpacking request (BlockStoreHeadRequest)')
def _HandleVerifySecretRequest(self, minfo): ## { ## "contract_id" : <>, ## "creator_id" : <>, ## "secrets" : [ ## { ## "pspk" : <>, ## "encrypted_secret" : <> ## } ## ] ## } try: contractid = minfo['contract_id'] creatorid = minfo['creator_id'] secrets = minfo['secrets'] # verify the integrity of the secret list for secret in secrets: assert secret['pspk'] assert secret['encrypted_secret'] except KeyError as ke: logger.error('missing field in request: %s', ke) raise Error(http.BAD_REQUEST, 'missing field {0}'.format(ke)) try: verify_response = self.Enclave.verify_secrets( contractid, creatorid, secrets) return dict(verify_response) except: logger.exception('HandleVerifySecretsRequest') raise Error(http.BAD_REQUEST, "HandleVerifySecrets")
def _HandleBlockStorePutRequest(self, minfo): """ Store a value into the untrusted block store / cache { "key" : <>, "value" : <>, } """ try: key = minfo['key'] value = minfo['value'] except KeyError as ke: logger.error('missing field in request: %s', ke) raise Error(http.BAD_REQUEST, 'missing field {0}'.format(ke)) try: self.Enclave.block_store_put(key, value) return {'result': "OK"} except: logger.exception('HandleBlockStorePutRequest') raise Error(http.BAD_REQUEST, "HandleBlockStorePutRequest")
def parse_from_request(self, name, request): # type: (str, Request) -> Any """ Parse parameter by name from request object. Throws 400 client error if parameter is required, but missing. :param name: Name of parameter in query :param request: Twisted request object :return: Parsed value """ name_bytes = name.encode() if name_bytes not in request.args: if self.default is not None: return self.default if self.required: raise Error(BAD_REQUEST, message=b"%s is required" % name_bytes) else: return None if len(request.args[name_bytes]) != 1: raise Error(BAD_REQUEST, message=b"Pass exactly one argument for %s" % name_bytes) val = request.args[name_bytes][0] return self.parse(val)
def render_GET(self, request): print "Rendering ProcessView" if not "snapshot" in request.args: raise Error("No snapshot id specified") if not "pid" in request.args: raise Error("No process id specified") c = self.conn.cursor() request.setHeader("content-type", "text/html") process_id = request.args["pid"][0] snapshot_id = request.args["snapshot"][0] data = [['Memory type', "Value (Kb)"]] stmt = "select Process, %s from %s where pid = '%s' and snap_id = '%s'" % ( self.fields, self.process_detail_table, process_id, snapshot_id) print "stmt = %s" % stmt for row in c.execute(stmt): for (i, field) in enumerate(self.labels): data.append([self.labels[field], int(row[i + 1])]) flattenString( None, ProcessElement("static/process-stats.html", data, self.labels)).addCallback(self.renderOutput) request.write(self.output) return ""
def process_request(self, request): path = request.postpath if (("content" in request.args) and (len(request.args["content"]) > 0)): data = json.loads(request.args["content"][0]) else: data = {} if ((len(path) < 2) or (path[0].lower() != "v1")): return Error(http.NOT_FOUND, "Invalid url") # Requesting entries from signpost server # curl --data 'content={"port":8080,"ip":["10.10.0.3"]}' \ # http://10.10.0.2:8080/v1/register if (path[1].lower() == "register"): self._logger.info("[%s] signpost_register : %s" % (str(request.getClientIP()), str(data))) if ((type(data) is dict) and ('port' in data) and ('ip' in data)): return self._server_data.add_json_server_list(data) else: self._logger.error( "[%s] signpost_register: Cannot parse data section") return Error(http.NOT_FOUND, "Cannot parse data section") # Requesting entries from signpost server if (path[1].lower() == "signpost"): if ('data' in data): self._logger.info("[%s]:signpost_list_request" % (str(request.getClientIP()))) return self._server_data.get_json_server_list() # Adding any additional entries in the resource list for a # specific device and replying with the full list of data elif (path[1].lower() == "resources") and (len(path) >= 3): domain = path[2] device = None if (len(path) >= 4): device = path[3] # Need to make this a bit more clear maybe? if ((len(data) > 0) and (self._server_data.validate_resource_list(data))): self._server_data.add_json_resource_list( domain, device, data) self._logger.info("[%s] add_resource : %s" % (str(data))) return self._server_data.get_json_resource_list( domain, device) else: self._logger.info("[%s] invalid service request %s" % (str(request.getClientIP()), str(path))) return ("<html>Hello, world! %s (%d) </html>" % (path, len(path))) def server_loop(self): site = server.Site(self) reactor.listenTCP(8080, site) reactor.run() def run(self): self.server_loop()
def parse(self, val): # type: (bytes) -> int try: val_int = int(val) except (TypeError, ValueError): raise Error(BAD_REQUEST, b"Invalid integer: %s" % val) if self.min_val is not None and val_int < self.min_val: raise Error(BAD_REQUEST, b"Minimum value %d" % self.min_val) if self.max_val is not None and val_int > self.max_val: raise Error(BAD_REQUEST, b"Maximum value %d" % self.max_val) return val_int
def maybeAutoLogin(self, request): header = request.getHeader(self.header) if header is None: raise Error(403, b"missing http header " + self.header + b". Check your reverse proxy config!") res = self.headerRegex.match(header) if res is None: raise Error( 403, b'http header does not match regex! "' + header + b'" not matching ' + self.headerRegex.pattern) session = request.getSession() if session.user_info != dict(res.groupdict()): session.user_info = dict(res.groupdict()) yield self.updateUserInfo(request)
def render_post(self, request, components, msg): """ Process validator control commands """ encoding = request.getHeader('Content-Type') data = request.content.getvalue() try: if encoding == 'application/json': minfo = json2dict(data) else: raise Error( "", 'unknown message' ' encoding: {0}'.format(encoding)) except Error as e: LOGGER.info('exception while decoding http request %s; %s', request.path, traceback.format_exc(20)) raise Error(http.BAD_REQUEST, 'unable to decode incoming request {0}'.format(str(e))) # process /command try: if minfo['action'] == 'start': if self.Validator.delaystart is True: self.Validator.delaystart = False LOGGER.info("command received : %s", minfo['action']) minfo['action'] = 'started' else: LOGGER.warn("validator startup not delayed") minfo['action'] = 'running' else: LOGGER.warn("unknown command received") minfo['action'] = 'startup failed' request.responseHeaders.addRawHeader("content-type", encoding) result = dict2json(minfo) return result except Error as e: raise Error( int(e.status), 'exception while processing' ' request {0}; {1}'.format(request.path, str(e))) except: LOGGER.info('exception while processing http request %s; %s', request.path, traceback.format_exc(20)) raise Error( http.BAD_REQUEST, 'error processing http request' ' {0}'.format(request.path)) return msg
def parse(self, val): # type: (bytes) -> _JSON_TYPE try: data = json.loads(val) # type: _JSON_TYPE if self.validator is not None: self.validator.validate(data) except ValueError as ex: error_message = str(ex).encode("utf-8") raise Error(BAD_REQUEST, b"Invalid JSON: %s" % error_message) except ValidationError as ex: error_message = ex.message.encode("utf-8") raise Error(BAD_REQUEST, b"JSON schema error: %s" % error_message) else: return data
def maybeAutoLogin(self, request): header = request.getHeader(self.header) if header is None: raise Error(403, "missing http header %s. Check your reverse proxy config!" % ( self.header)) res = self.headerRegex.match(header) if res is None: raise Error( 403, 'http header does not match regex! "%s" not matching %s' % (header, self.headerRegex.pattern)) session = request.getSession() if not hasattr(session, "user_info"): session.user_info = dict(res.groupdict()) yield self.updateUserInfo(request)
def maybeAutoLogin(self, request): header = request.getHeader(self.header) if header is None: msg = b"missing http header " + self.header + b". Check your reverse proxy config!" raise Error(403, msg) res = self.headerRegex.match(header) if res is None: msg = b'http header does not match regex! "' + header + b'" not matching ' + \ self.headerRegex.pattern raise Error(403, msg) session = request.getSession() user_info = {k: bytes2unicode(v) for k, v in res.groupdict().items()} if session.user_info != user_info: session.user_info = user_info yield self.updateUserInfo(request)
def checkMediaType(self, request): ctype = request.getHeader(b'content-type') if not ctype: request.setResponseCode(http.BAD_REQUEST) raise Error(http.BAD_REQUEST, b"No specified Media Type") message = _parseContentType(ctype) if (message.maintype != b'application' or message.subtype != b'atom+xml' or message.getparam(b'type') != b'entry' or (message.getparam(b'charset') or b'utf-8') != b'utf-8'): raise Error(http.UNSUPPORTED_MEDIA_TYPE, b"Unsupported Media Type: %s" % ctype)
def parse_data(channel_info): channel_info = channel_info or {} data_raw = body.strip() data_lines = map(str.strip, data_raw.split('\n')) parse_header(channel_info, data_lines) header_fields = channel_info.get('header_fields') if not header_fields: raise Error( http.BAD_REQUEST, response= 'Could not process data, please supply field names via CSV header before sending readings' ) #print 'data_lines:', data_lines; pprint(data_lines) data_list = [] for data_line in data_lines: data_fields = map( str.strip, data_line.replace(';', ',').split(',')) #print 'header_fields, data_fields:', header_fields, data_fields data = OrderedDict(zip(header_fields, data_fields)) self.manipulate_data(data, channel_info) data_list.append(data) return data_list
def get_page_with_exception(self, url, *args, **kwds): try: raise Error(self.status, "There's been an error", self.payload) except: failure = Failure() return fail(failure)
def test_run_with_error_payload(self): """ If the returned HTTP error contains a payload, it's printed out. """ self.prepare_command("Bad Request", 400, "DescribeRegions", {"RegionName.0": "us-west-1"}, self.get_error_page, Error(400, None, "bar")) def check(result): url = ( "http://endpoint?AWSAccessKeyId=key&" "Action=DescribeRegions&RegionName.0=us-west-1&" "Signature=P6C7cQJ7j93uIJyv2dTbpQG3EI7ArGBJT%2FzVH%2BDFhyY%3D&" "SignatureMethod=HmacSHA256&SignatureVersion=2&" "Timestamp=2010-06-04T23%3A40%3A00Z&Version=2008-12-01") self.assertEqual("GET", self.method) self.assertEqual(url, self.url) self.assertEqual( "URL: %s\n" "\n" "HTTP status code: 400\n" "\n" "400 Bad Request\n" "\n" "bar\n" % url, self.output.getvalue()) deferred = self.command.run() deferred.addCallback(check) return deferred
def getChild(self, name, request): try: year = int(name) except ValueError: return Error() else: return YearPage(year)
def GET(self, urlpath, followRedirect=False, return_response=False, method="GET", clientnum=0, **kwargs): # if return_response=True, this fires with (data, statuscode, # respheaders) instead of just data. assert not isinstance(urlpath, unicode) url = self.client_baseurls[clientnum] + urlpath response = yield treq.request(method, url, persistent=False, allow_redirects=followRedirect, **kwargs) data = yield response.content() if return_response: # we emulate the old HTTPClientGetFactory-based response, which # wanted a tuple of (bytestring of data, bytestring of response # code like "200" or "404", and a # twisted.web.http_headers.Headers instance). Fortunately treq's # response.headers has one. defer.returnValue((data, str(response.code), response.headers)) if 400 <= response.code < 600: raise Error(response.code, response=data) defer.returnValue(data)
def getPage(self, url, timeout=None, headers=None): list_url_re = re.compile( r"https://bitbucket.org/api/2.0/repositories/{}/{}/pullrequests". format(self.owner, self.slug)) pr_url_re = re.compile( r"https://bitbucket.org/!api/2.0/repositories/{}/{}/pullrequests/(?P<id>\d+)" .format(self.owner, self.slug)) source_commit_url_re = re.compile( r"https://bitbucket.org/!api/2.0/repositories/(?P<src_owner>.*)/(?P<src_slug>.*)/commit/(?P<hash>\d+)") # noqa pylint: disable=line-too-long source_url_re = re.compile( r"https://bitbucket.org/!api/2.0/repositories/(?P<src_owner>.*)/(?P<src_slug>.*)" ) if list_url_re.match(url): return defer.succeed(self.request()) m = pr_url_re.match(url) if m: return self.pr_by_id[int(m.group("id"))].request() m = source_commit_url_re.match(url) if m: return self.src_by_url["{}/{}".format( m.group("src_owner"), m.group("src_slug"))].request() m = source_url_re.match(url) if m: return self.src_by_url["{}/{}".format( m.group("src_owner"), m.group("src_slug"))].repo_request() raise Error(code=404)
def parse(self, val): # type: (bytes) -> bool if val is None or val == b"false": return False if val == b"true": return True raise Error(BAD_REQUEST, message=b"Boolean parameter must be 'true' or 'false'")
def crawl(self, spidercls, *args, **kwargs): if isinstance(spidercls, str): spidercls = self.spider_loader.load(spidercls) for kw in kwargs: attr_or_m = getattr(spidercls, kw, None) if attr_or_m and callable(attr_or_m): msg = 'Crawl argument cannot override spider method.' msg += ' Got argument {} that overrides spider method {}' raise Error('400', message=msg.format(kw, getattr(spidercls, kw))) # creating our own crawler that will allow us to disable start requests easily crawler = ScrapyrtCrawler(spidercls, self.settings, self.scrapyrt_manager.start_requests) self.scrapyrt_manager.crawler = crawler # Connecting signals to handlers that control crawl process crawler.signals.connect(self.scrapyrt_manager.get_item, signals.item_scraped) crawler.signals.connect(self.scrapyrt_manager.collect_dropped, signals.item_dropped) crawler.signals.connect(self.scrapyrt_manager.spider_idle, signals.spider_idle) crawler.signals.connect(self.scrapyrt_manager.handle_spider_error, signals.spider_error) crawler.signals.connect(self.scrapyrt_manager.handle_scheduling, signals.request_scheduled) dfd = super(ScrapyrtCrawlerProcess, self).crawl(crawler, *args, **kwargs) _cleanup_handler = setup_spider_logging(crawler.spider, self.settings) def cleanup_logging(result): _cleanup_handler() return result return dfd.addBoth(cleanup_logging)
def parse(self, val): # type: (bytes) -> UUID val_str = val.decode() try: return UUID(val_str) except ValueError: raise Error(BAD_REQUEST, message=UUIDParam.MALFORMED_ERROR_MSG)
def render_POST(self, request, **kwargs): """ :param request: body should contain JSON Required keys in JSON posted: :spider_name: string name of spider to be scheduled. :request: json object request to be scheduled with spider. Note: request must contain url for spider. It may contain kwargs to scrapy request. """ request_body = request.content.getvalue() try: api_params = json.loads(request_body) except Exception as e: message = "Invalid JSON in POST body. {}" message = message.format(e) # TODO should be integer not string? raise Error('400', message=message) log.msg("{}".format(api_params)) self.validate_options(api_params) return self.prepare_crawl(api_params, **kwargs)
def parse(self, val): # type: (bytes) -> str val_str = val.decode(self.encoding) if val_str not in self.enum: error_msg = b"Parameter must be one of %s" % str(sorted(self.enum)).encode() raise Error(BAD_REQUEST, error_msg) return val_str
def render_GET(self, request): """ Handle GET requests, parse out q parameter to delegate to the correct internal function. There is two types of queries allowed: - to get the meta-data as json-rpc message: http://.../WebGL?q=meta&vid=456 - to get a binary webgl object: http://.../WebGL?q=mesh&vid=456&id=1&part=0 vid : visualization view ID (GlobalID of the view Proxy) id : id of the object in the scene part: WebGL has a size limit, therefore an object can be splitted in several part. This is the part index. """ try: q = self._get_parameter(request.args, 'q') vid = self._get_parameter(request.args, 'vid', False) if q == 'mesh': return self._render_GET_mesh(vid, request) elif q == 'meta': return self._render_GET_meta(vid, request) else: raise Error(400, "Invalid query for the WebGL resource") except Error as err: request.setResponseCode(err.status) return err.message
def render_POST(self, request, **kwargs): """ :param request: body should contain JSON Required keys in JSON posted: :spider_name: string name of spider to be scheduled. :request: json object request to be scheduled with spider. Note: request must contain url for spider. It may contain kwargs to scrapy request. """ request_body = request.content.getvalue() try: request_data = demjson.decode(request_body) except ValueError as e: message = "Invalid JSON in POST body. {}" message.format(e.pretty_description()) raise Error('400', message=message) log.msg("{}".format(request_data)) spider_data = self.get_required_argument(request_data, "request") error_msg = "Missing required key 'url' in 'request' object" self.get_required_argument(spider_data, "url", error_msg=error_msg) return self.prepare_crawl(request_data, spider_data, **kwargs)
def _render_GET_mesh(self, vid, request): """ Handle GET requests to get WebGL data for a particular object. """ object_id = self._get_parameter(request.args, 'id') part_number = self._get_parameter(request.args, 'part', False) part = 0 if part_number: part = int(part_number) view = self._get_view(vid) # There part is offset by 1 if part > 0: part = part - 1 data = pv_web_app.GetWebGLBinaryData(view.SMProxy, object_id, part) if data: request.setHeader('content-type', 'application/octet-stream+webgl') request.setHeader('Cache-Control', 'max-age=99999') data = base64.b64decode(data) else: raise Error(404, "Invalid request for WebGL object") return data
def test_charm_download_http_error(self): """Errors in donwloading a charm are reported as charm not found. """ def match_string(expected, value): self.assertTrue(isinstance(value, basestring)) self.assertIn(expected, value) return True mock_storage = self.mocker.patch(self.storage) mock_storage.get_url( MATCH(partial(match_string, "local_3a_series_2f_dummy-1"))) remote_url = "http://example.com/foobar.zip" self.mocker.result(remote_url) download_page = self.mocker.replace(downloadPage) download_page( MATCH(partial(match_string, "http://example.com/foobar.zip")), MATCH(partial(match_string, "local_3a_series_2f_dummy-1"))) self.mocker.result(fail(Error("400", "Bad Stuff", ""))) self.mocker.replay() charm, charm_state = yield self.publish_charm() charm_directory = self.makeDir() self.assertEqual(charm_state.bundle_url, remote_url) error = yield self.assertFailure( download_charm(self.client, charm_state.id, charm_directory), FileNotFound) self.assertIn(remote_url, str(error))