def fill_all_headers(self, req): """Set content-type, content-md5, date to the request.""" url = urlsplit(req.url) content_type, __ = mimetypes.guess_type(url.path) if content_type is None: content_type = self.DEFAULT_TYPE logger.warn("can not determine mime-type for {0}".format(url.path)) if self._expires is None: # sign with url, no content-type for url req.headers.setdefault("content-type", content_type) if ( req.body is not None and req.headers.get("content-md5") is None and self._allow_empty_md5 is False ): logger.debug("calculating content-md5") content, content_md5 = utils.cal_content_md5(req.body) req.body = content req.headers["content-md5"] = content_md5 logger.debug("new content-md5 is: {0}".format(content_md5)) else: logger.debug("skip content-md5 calculation") if self._expires is None: req.headers.setdefault( "date", time.strftime(self.DATE_FMT, time.gmtime()) ) return req
def gen_str_to_sign(self, req): """Generate string to sign using giving prepared request""" url = urlsplit(req.url) bucket_name = url.netloc.split(".", 1)[0] logger.debug(req.headers.items()) ucloud_headers = [ (k, v.strip()) for k, v in sorted(req.headers.lower_items()) if k.startswith("x-ucloud-") ] canonicalized_headers = "\n".join([ "{0}:{1}".format(k, v) for k, v in ucloud_headers ]) canonicalized_resource = "/{0}{1}".format(bucket_name, url.path) str_to_sign = "\n".join([ req.method, req.headers.get("content-md5", ""), req.headers.get("content-type", ""), req.headers.get("date", self._expires), canonicalized_headers + canonicalized_resource ]) return str_to_sign
def test_sign_in_url(self, mock_strftime): mock_strftime.return_value = 0 auth = UFileAuth("public-key", "private-key", expires=1) req = auth(self.request.prepare()) url = urlsplit(req.url) params = dict(parse_qsl(url.query)) eq_(params, PARAMS)
def __make_request(self, url, method=u'GET', headers=None, body_string=None, **kwargs): # remove retries arg if provided kwargs.pop(u'retries', None) path = urlparse(url).path # we must parse the query string so we can provide it if it exists so that we can pass it to the # build_vinyldns_request so that it can be properly included in the AWS signing... query = parse_qs(urlsplit(url).query) if query: # the problem with parse_qs is that it will return a list for ALL params, even if they are a single value # we need to essentially flatten the params if a param has only one value query = dict((k, v if len(v) > 1 else v[0]) for k, v in iteritems(query)) signed_headers, signed_body = self.__build_vinyldns_request(method, path, body_string, query, with_headers=headers or {}, **kwargs) response = self.session.request(method, url, data=signed_body, headers=signed_headers, **kwargs) return self.__check_response(response, method)
def clean_url(url): """ Returns an cleaned url starting with a scheme and folder with trailing / or an empty string """ if url and url.strip(): url = url.strip() if '://' not in url: url = '//' + url scheme, netloc, path, query, fragment = urlsplit(url, 'http') if not path: path += '/' cleaned_url = urlunsplit((scheme, netloc, path, query, fragment)) else: cleaned_url = '' return cleaned_url
def __call__(self, req): """Sign the request""" req = self.fill_all_headers(req) str_to_sign = self.gen_str_to_sign(req) logger.debug("string to sign is:\n{0}".format(str_to_sign)) hmac_sig = hmac.HMAC( self._private_key, utils.to_bytes(str_to_sign), hashlib.sha1 ) signature = base64.b64encode(hmac_sig.digest()).decode("utf8") if self._expires: url = urlsplit(req.url) params = dict(parse_qsl(url.query)) params.update(dict( Expires=self._expires, UCloudPublicKey=self._public_key, Signature=signature )) req.url = "{0}://{1}{2}?{3}".format( url.scheme, url.netloc, url.path, urlencode(params) ) else: req.headers["Authorization"] = "UCloud {0}:{1}".format( self._public_key, signature ) # remove empty header for key, val in req.headers.copy().items(): if not val: logger.debug("deleting empty header key: {0}".format(key)) del req.headers[key] return req
def _get_ssl_options(self, req, verify, cert): if urlsplit(req.url).scheme == "https": # If we are using the defaults, don't construct a new SSLContext. if req.ssl_options is not None: return req.ssl_options # deal with verify & cert ssl_options = {} if verify: cert_loc = None # Allow self-specified cert location. if verify is not True: cert_loc = verify if not cert_loc: cert_loc = DEFAULT_CA_BUNDLE_PATH if not cert_loc or not os.path.exists(cert_loc): raise IOError( "Could not find a suitable TLS CA certificate bundle, " "invalid path: {0}".format(cert_loc)) # you may change this to avoid server's certificate check ssl_options["cert_reqs"] = 2 # ssl.CERT_REQUIRED ssl_options["ca_certs"] = cert_loc if cert: if not isinstance(cert, basestring): cert_file = cert[0] key_file = cert[1] else: cert_file = cert key_file = None if cert_file and not os.path.exists(cert_file): raise IOError("Could not find the TLS certificate file, " "invalid path: {0}".format(conn.cert_file)) if key_file and not os.path.exists(key_file): raise IOError("Could not find the TLS key file, " "invalid path: {0}".format(conn.key_file)) if key_file is not None: ssl_options["keyfile"] = key_file if cert_file is not None: ssl_options["certfile"] = cert_file # SSL interoperability is tricky. We want to disable # SSLv2 for security reasons; it wasn't disabled by default # until openssl 1.0. The best way to do this is to use # the SSL_OP_NO_SSLv2, but that wasn't exposed to python # until 3.2. Python 2.7 adds the ciphers argument, which # can also be used to disable SSLv2. As a last resort # on python 2.6, we set ssl_version to TLSv1. This is # more narrow than we'd like since it also breaks # compatibility with servers configured for SSLv3 only, # but nearly all servers support both SSLv3 and TLSv1: # http://blog.ivanristic.com/2011/09/ssl-survey-protocol-support.html if (2, 7) <= sys.version_info: # In addition to disabling SSLv2, we also exclude certain # classes of insecure ciphers. ssl_options["ciphers"] = "DEFAULT:!SSLv2:!EXPORT:!DES" else: # This is really only necessary for pre-1.0 versions # of openssl, but python 2.6 doesn't expose version # information. ssl_options["ssl_version"] = 3 # ssl.PROTOCOL_TLSv1 return ssl_options return None
def make_request(self, url, method=u'GET', headers=None, body_string=None, sign_request=True, not_found_ok=False, **kwargs): # pull out status or None status_code = kwargs.pop(u'status', None) # remove retries arg if provided kwargs.pop(u'retries', None) path = urlparse(url).path # we must parse the query string so we can provide it if it exists so that we can pass it to the # build_vinyldns_request so that it can be properly included in the AWS signing... query = parse_qs(urlsplit(url).query) if query: # the problem with parse_qs is that it will return a list for ALL params, even if they are a single value # we need to essentially flatten the params if a param has only one value query = dict( (k, v if len(v) > 1 else v[0]) for k, v in iteritems(query)) if sign_request: signed_headers, signed_body = self.build_vinyldns_request( method, path, body_string, query, with_headers=headers or {}, **kwargs) else: signed_headers = headers or {} signed_body = body_string if not_found_ok: response = self.session_not_found_ok.request( method, url, data=signed_body, headers=signed_headers, **kwargs) else: response = self.session.request(method, url, data=signed_body, headers=signed_headers, **kwargs) if status_code is not None: if isinstance(status_code, collections.Iterable): assert_that(response.status_code, is_in(status_code)) else: assert_that(response.status_code, is_(status_code)) try: return response.status_code, response.json() except: return response.status_code, response.text
def _add_pagination_params(self, url, **kwargs): """ If a result limit is configured, enforces it by copying kwargs and inserting the request parameter that controls page size. :return: a copy of kwargs that contains the request parameter to control page size """ param_name = self.result_limit_param_name result_limit = self.result_limit original_url = url # return early if this session isn't configured to auto-set pagination parameters if not (result_limit and param_name): return url, kwargs # look for / replace pagination parameters encoded in the URL itself. Note this may be # a common use case, e.g. in DRF where each paged response includes the full URL of the # next results page url_parts = urlsplit(url) url_query_params = parse_qs(url_parts.query) found_in_url = param_name in url_query_params if found_in_url: url_param_val = url_query_params.get(param_name) updated_value = [str(self.result_limit)] if url_param_val != updated_value: logger.warning( 'An existing request parameter named "%(param)s" was present in the URL. This ' "value (%(url_val)s) will be overridden to %(new_val)s" % { "param": param_name, "url_val": url_param_val, "new_val": updated_value, }) url_query_params[param_name] = updated_value url = url_parts.geturl() logger.debug("......Original URL: %s, Updated URL: %s" % (original_url, url)) # look for & replace pagination parameter explicitly provided via request kwargs params = kwargs.get("params") if not params: params = {} if not found_in_url: params[param_name] = result_limit kwargs["params"] = params elif param_name in params.keys(): existing_value = params.get(param_name) if existing_value != self.result_limit: logger.warning( 'An existing request parameter named "%(param)s" was present. ' "This value (%(existing)s) will be overridden to (%(new)s)" % { "param": param_name, "existing": existing_value, "new": self.result_limit, }) params[param_name] = result_limit elif not found_in_url: params[param_name] = result_limit return url, kwargs
def post(self): form = RequestDataForm() data = form.data.data lines = data.splitlines(True) if len(lines) < 3: return 'data less 3 lines' origin_headers = [] body = [] body_start = False for index, line in enumerate(lines): if index == 0: method, path, _ = line.split(' ') continue if not line.split(): body_start = True continue if body_start: body.append(line) else: line = line.strip() key, value = line.split(': ', 1) origin_headers.append((key, value)) # for get header value header_dict = CaseInsensitiveDict(origin_headers) method = method.lower() body = ''.join(body) content_type = header_dict.get('Content-Type', '') # set headers headers = [] origin_host = header_dict.get('Host') if form.host.data and origin_host and form.host.data != origin_host: headers.append(('Host', header_dict.get('Host'))) user_agent = header_dict.get('User-Agent') referer = header_dict.get('Referer') if user_agent: headers.append(('User-Agent', user_agent)) if referer: headers.append(('Referer', referer)) # set cookie cookies = [] cookie = header_dict.get('Cookie') C = SimpleCookie(cookie) for morsel in C.values(): cookies.append((morsel.key, morsel.coded_value)) host = form.host.data or header_dict.get('Host') p = urlsplit(path) url = urljoin('http://{}'.format(host), p.path) params = [(x, repr_value(y)) for x, y in parse_qsl(p.query)] if not content_type: pass elif 'x-www-form-urlencoded' in content_type: body = [(x, repr_value(y)) for x, y in parse_qsl(body)] elif 'json' in content_type: body = [(x, repr_value(y)) for x, y in json.loads(body).items()] else: headers.append(('Content-Type', content_type)) code = render_template( 'code.html', method=method, url=url, params=params, body=body, headers=headers, cookies=cookies, content_type=content_type ) form = CodeForm() form.code.data = code name = form.save() url = url_for('run.run', filename=name) return redirect(url)
def post(self): form = RequestDataForm() data = form.data.data lines = data.splitlines(True) if len(lines) < 3: return 'data less 3 lines' origin_headers = [] body = [] body_start = False for index, line in enumerate(lines): if index == 0: method, path, _ = line.split(' ') continue if not line.split(): body_start = True continue if body_start: body.append(line) else: line = line.strip() key, value = line.split(': ', 1) origin_headers.append((key, value)) # for get header value header_dict = CaseInsensitiveDict(origin_headers) method = method.lower() body = ''.join(body) content_type = header_dict.get('Content-Type', '') # set headers headers = [] origin_host = header_dict.get('Host') if form.host.data and origin_host and form.host.data != origin_host: headers.append(('Host', header_dict.get('Host'))) user_agent = header_dict.get('User-Agent') referer = header_dict.get('Referer') if user_agent: headers.append(('User-Agent', user_agent)) if referer: headers.append(('Referer', referer)) # set cookie cookies = [] cookie = header_dict.get('Cookie') C = SimpleCookie(cookie) for morsel in C.values(): cookies.append((morsel.key, morsel.coded_value)) host = form.host.data or header_dict.get('Host') p = urlsplit(path) url = urljoin('http://{}'.format(host), p.path) params = [(x, repr_value(y)) for x, y in parse_qsl(p.query)] if not content_type: pass elif 'x-www-form-urlencoded' in content_type: body = [(x, repr_value(y)) for x, y in parse_qsl(body)] elif 'json' in content_type: body = [(x, repr_value(y)) for x, y in json.loads(body).items()] else: headers.append(('Content-Type', content_type)) code = render_template( 'code.html', method=method, url=url, params=params, body=body, headers=headers, cookies=cookies, content_type=content_type ) return render_template('code-page.html', code=code)
def _get_path(self, url): s_url = compat.urlsplit(url) return s_url.path