def on_get(self, external_project_id, **kw): LOG.debug('Start certificate_authorities on_get (limited)') plugin_name = kw.get('plugin_name') if plugin_name is not None: plugin_name = parse.unquote_plus(plugin_name) plugin_ca_id = kw.get('plugin_ca_id', None) if plugin_ca_id is not None: plugin_ca_id = parse.unquote_plus(plugin_ca_id) # refresh CA table, in case plugin entries have expired cert_resources.refresh_certificate_resources() project_model = res.get_or_create_project(external_project_id) if self._project_cas_defined(project_model.id): cas, offset, limit, total = self._get_subcas_and_project_cas( offset=kw.get('offset', 0), limit=kw.get('limit', None), plugin_name=plugin_name, plugin_ca_id=plugin_ca_id, project_id=project_model.id) else: cas, offset, limit, total = self._get_subcas_and_root_cas( offset=kw.get('offset', 0), limit=kw.get('limit', None), plugin_name=plugin_name, plugin_ca_id=plugin_ca_id, project_id=project_model.id) return self._display_cas(cas, offset, limit, total)
def load(self, url, offset, length): if not s3_avail: #pragma: no cover raise IOError('To load from s3 paths, ' + 'you must install boto: pip install boto') aws_access_key_id = self.aws_access_key_id aws_secret_access_key = self.aws_secret_access_key parts = urlsplit(url) if parts.username and parts.password: aws_access_key_id = unquote_plus(parts.username) aws_secret_access_key = unquote_plus(parts.password) bucket_name = parts.netloc.split('@', 1)[-1] else: bucket_name = parts.netloc if not self.s3conn: try: self.s3conn = connect_s3(aws_access_key_id, aws_secret_access_key) except Exception: #pragma: no cover self.s3conn = connect_s3(anon=True) bucket = self.s3conn.get_bucket(bucket_name) key = bucket.get_key(parts.path) if offset == 0 and length == -1: headers = {} else: headers = {'Range': BlockLoader._make_range_header(offset, length)} # Read range key.open_read(headers=headers) return key
def __init__(self, uri): u = urlparse(uri) self.host = u.hostname self.port = int(u.port or "22") self.username = unquote_plus(u.username) self.password = unquote_plus(u.password) self.path = u.path
def _verify_user_metadata(self, req_hdrs, name, value, key): # verify encrypted version of user metadata self.assertNotIn('X-Object-Meta-' + name, req_hdrs) expected_hdr = 'X-Object-Transient-Sysmeta-Crypto-Meta-' + name self.assertIn(expected_hdr, req_hdrs) enc_val, param = req_hdrs[expected_hdr].split(';') param = param.strip() self.assertTrue(param.startswith('swift_meta=')) actual_meta = json.loads( urlparse.unquote_plus(param[len('swift_meta='):])) self.assertEqual(Crypto.cipher, actual_meta['cipher']) meta_iv = base64.b64decode(actual_meta['iv']) self.assertEqual(FAKE_IV, meta_iv) self.assertEqual(base64.b64encode(encrypt(value, key, meta_iv)), enc_val) # if there is any encrypted user metadata then this header should exist self.assertIn('X-Object-Transient-Sysmeta-Crypto-Meta', req_hdrs) common_meta = json.loads( urlparse.unquote_plus( req_hdrs['X-Object-Transient-Sysmeta-Crypto-Meta'])) self.assertDictEqual( { 'cipher': Crypto.cipher, 'key_id': { 'v': 'fake', 'path': '/a/c/fake' } }, common_meta)
def get_params(): _log("get_params") param_string = sys.argv[2] if b64 or '%3D' in str(param_string) or not '&' in str( param_string) or not '=' in str(param_string): try: if '?' in str(param_string): param_string = param_string.split('?')[1] param_string = six.ensure_text( base64.b64decode(six.ensure_binary( unquote_plus(param_string)))) except: _log("Error, base64 can´t be decoded") commands = {} if param_string: split_commands = param_string[param_string.find('?') + 1:].split('&') for command in split_commands: _log("get_params command=" + str(command)) if len(command) > 0: if "=" in command: split_command = command.split('=') key = split_command[0] value = unquote_plus(split_command[1]) commands[key] = value else: commands[command] = "" _log("get_params " + repr(commands)) return commands
def get_url_params(self): params_str = self.path.split('?')[-1] if six.PY2: params_unicode = unquote_plus(params_str).decode('utf8') else: params_unicode = unquote_plus(params_str) params = params_unicode.split('&') return dict([param.split('=') for param in params])
def url_unquote_plus(v, name='(Unknown name)', md={}): if six.PY2 and isinstance(v, unicode): # unquote_plus does not handle unicode. Encoding to a "safe" # intermediate encoding before quoting, then unencoding the result. return unquote_plus(v.encode('utf-8')).decode('utf-8') elif six.PY3 and isinstance(v, bytes): return unquote_plus(v.decode('utf-8')).encode('utf-8') return unquote_plus(str(v))
def unquote(string): if six.PY2 and isinstance(string, six.text_type): string = unquote_plus(string.encode('utf-8')) else: string = unquote_plus(string) if isinstance(string, six.binary_type): string = string.decode('utf-8') return string
def load(self, url, offset, length): if not s3_avail: # pragma: no cover raise IOError('To load from s3 paths, ' + 'you must install boto3: pip install boto3') aws_access_key_id = self.aws_access_key_id aws_secret_access_key = self.aws_secret_access_key parts = urlsplit(url) if parts.username and parts.password: aws_access_key_id = unquote_plus(parts.username) aws_secret_access_key = unquote_plus(parts.password) bucket_name = parts.netloc.split('@', 1)[-1] else: bucket_name = parts.netloc key = parts.path[1:] if offset == 0 and length == -1: range_ = '' else: range_ = BlockLoader._make_range_header(offset, length) def s3_load(anon=False): if not self.client: if anon: config = Config(signature_version=UNSIGNED) else: config = None client = boto3.client( 's3', aws_access_key_id=aws_access_key_id, aws_secret_access_key=aws_secret_access_key, config=config) else: client = self.client res = client.get_object(Bucket=bucket_name, Key=key, Range=range_) if not self.client: self.client = client return res try: obj = s3_load(anon=False) except Exception: if not self.client: obj = s3_load(anon=True) else: raise return obj['Body']
def __init__(self, url, user='', password='', host='localhost', protocol='', path='', port=0, parameters=[]): errmsg = "".join(("Invalid url <", url, ">")) # chop protocol pieces = url.split('://') if len(pieces) == 1: url = pieces[0] elif len(pieces) == 2: protocol = pieces[0] url = pieces[1] else: raise ValueError(errmsg) pos = url.find('/') if pos < 0: server = url else: server = url[:pos] path = url[pos + 1:] if '@' in server: user, server = server.split('@') if ':' in user: user, password = user.split(':') if ':' in server: server, port = server.split(':') port = int(port) if server != '': host = server if '?' in path: path, pars = path.split('?') parameters = [pair.split('=') for pair in pars.split('&')] if any(len(pair) > 2 for pair in parameters): raise ValueError(errmsg) parameters = [[parse.unquote_plus(p) for p in pair] for pair in parameters] self.protocol = protocol self.user = user self.password = password self.host = host self.port = port self.path = parse.unquote_plus(path) self.parameters = parameters[:]
def parse_url_to_dict(url, assume_localhost=False): """Parse a url and return a dict with keys for all of the parts. The urlparse function() returns a wacky combination of a namedtuple with properties. """ from six.moves.urllib.parse import urlparse, urlsplit, urlunsplit, unquote_plus, ParseResult from six import text_type import re assert url is not None url = text_type(url) if re.match(r'^[a-zA-Z]:', url): url = path2url(url) p = urlparse(unquote_plus(url)) # urlparse leaves a '/' before the drive letter. p = ParseResult(p.scheme, p.netloc, p.path.lstrip('/'), p.params, p.query, p.fragment) else: p = urlparse(url) # '+' indicates that the scheme has a scheme extension if '+' in p.scheme: scheme_extension, scheme = p.scheme.split('+') else: scheme = p.scheme scheme_extension = None if scheme is '': scheme = 'file' return { 'scheme': scheme, 'scheme_extension': scheme_extension, 'netloc': p.netloc, 'hostname': p.hostname, 'path': p.path, 'params': p.params, 'query': p.query, 'fragment': unquote_plus(p.fragment) if p.fragment else None, 'username': p.username, 'password': p.password, 'port': p.port }
def unquote_plus(text): """unquote('%7e/abc+def') -> '~/abc def'""" try: if six.PY2 and isinstance(text, six.text_type): text = text.encode('utf-8') if six.PY2: result = parse.unquote_plus(text).decode('utf-8') else: # Enforce utf-8 validation result = parse.unquote_plus(text, errors="strict") return result except (UnicodeEncodeError, UnicodeDecodeError) as e: # for some reason there is a non-ascii character here. Let's assume it # is already unicode (because of originally decoding the file) return text
def parse(body): body = unquote_plus(body) soup = BeautifulSoup(body, 'html.parser') tr_tags = soup.find_all('tr') pairs = [parse_tr(tr) for tr in tr_tags if len(tr.find_all('td')) >= 4 and has_time(str(tr))] favorites, underdogs = zip(*pairs) df_picks = pd.DataFrame({'favorite': favorites, 'underdog': underdogs}) df_picks['my_pick'] = df_picks.apply( lambda row: choice([row['favorite'], row['underdog']]), axis=1) best_bets = sample(set(df_picks.my_pick.unique()), 3) def add_best_bet(pick,): pick = pick + '^^^' if pick in best_bets else pick pick = pick.replace('*', '') pick = pick.replace('^^^', '***') pick = pick[3:] if pick.startswith('At ') else pick return pick df_picks.my_pick = df_picks.my_pick.apply(add_best_bet) body_output = '\n'.join(df_picks.my_pick) text_joined = soup.get_text(" ").lower() text_joined = text_joined.replace('last week', '').replace('other week', '').replace('this week', '') text_parts = [t.strip() for t in text_joined.split()] week_index = text_parts.index("week") week_num = text_parts[week_index + 1] return body_output, week_num
def process_key_event(event, context): processor = EncryptExtantKeys(config) for record in event.get('Records', []): bucket = record['s3']['bucket']['name'] key = {'Key': unquote_plus(record['s3']['object']['key']), 'Size': record['s3']['object']['size']} version = record['s3']['object'].get('versionId') if version is not None: key['VersionId'] = version # lambda event is always latest version, but IsLatest # is not in record key['IsLatest'] = True method = processor.process_version else: method = processor.process_key try: result = retry(method, s3, key, bucket) except ClientError as e: # Ensure we know which key caused an issue print("error %s:%s code:%s" % ( bucket, key['Key'], e.response['Error'])) raise if not result: return print("remediated %s:%s" % (bucket, key['Key']))
def _manifest_middleware(self, data): url = self._session.get('manifest_middleware') if not url: return data data_path = xbmc.translatePath('special://temp/proxy.manifest') with open(data_path, 'wb') as f: f.write(data.encode('utf8')) url = add_url_args(url, _data_path=data_path, _headers=json.dumps(self._headers)) log.debug('PLUGIN MANIFEST MIDDLEWARE REQUEST: {}'.format(url)) dirs, files = run_plugin(url, wait=True) if not files: raise Exception('No data returned from plugin') path = unquote_plus(files[0]) split = path.split('|') data_path = split[0] if len(split) > 1: self._plugin_headers = dict( parse_qsl(u'{}'.format(split[1]), keep_blank_values=True)) with open(data_path, 'rb') as f: data = f.read().decode('utf8') if not ADDON_DEV: remove_file(data_path) return data
def on_get(self, external_project_id, **kw): LOG.debug('Start transport_keys on_get') plugin_name = kw.get('plugin_name', None) if plugin_name is not None: plugin_name = parse.unquote_plus(plugin_name) result = self.repo.get_by_create_date( plugin_name=plugin_name, offset_arg=kw.get('offset', 0), limit_arg=kw.get('limit', None), suppress_exception=True ) transport_keys, offset, limit, total = result if not transport_keys: transport_keys_resp_overall = {'transport_keys': [], 'total': total} else: transport_keys_resp = [ hrefs.convert_transport_key_to_href(s.id) for s in transport_keys ] transport_keys_resp_overall = hrefs.add_nav_hrefs( 'transport_keys', offset, limit, total, {'transport_keys': transport_keys_resp} ) transport_keys_resp_overall.update({'total': total}) return transport_keys_resp_overall
def get_service_interfaces(self, deployment_name, service_url): # get all the keys for the deployment keys = self.__get_all_keys_for_deployment_or_project(deployment_name) # find the keys that represent interfaces implemented by the service and read them interfaces = [] encoded_service_url = parse.quote_plus(service_url) for key in keys: interface_id, encoded_interface_url = self.__parse_s3_interface_key_for_deployment( key ) if deployment_name else self.__parse_s3_interface_key_for_project( key) if encoded_interface_url.startswith(encoded_service_url): res = self.__s3.get_object(Bucket=self.__bucket_name, Key=key) interface_swagger = res['Body'].read() interface = { 'InterfaceId': interface_id, 'InterfaceSwagger': interface_swagger, 'InterfaceUrl': parse.unquote_plus(encoded_interface_url) } interfaces.append(interface) # return the interfaces found return interfaces
def process_key_event(event, context): processor = EncryptExtantKeys(config) for record in event.get('Records', []): bucket = record['s3']['bucket']['name'] key = { 'Key': unquote_plus(record['s3']['object']['key']), 'Size': record['s3']['object']['size'] } version = record['s3']['object'].get('versionId') if version is not None: key['VersionId'] = version # lambda event is always latest version, but IsLatest # is not in record key['IsLatest'] = True method = processor.process_version else: method = processor.process_key try: result = retry(method, s3, key, bucket) except ClientError as e: # Ensure we know which key caused an issue print("error %s:%s code:%s" % (bucket, key['Key'], e.response['Error'])) raise if not result: return print("remediated %s:%s" % (bucket, key['Key']))
def generate_asset_url(match): data = match.groupdict() return ':download:`{name} <resources/{filename}>`'.format( name=data['display'], filename=unquote_plus(data['asset']), )
def test_run_with_task(app, client): # ScrapydWeb_demo.egg: custom_settings = {}, also output specific settings & arguments in the log upload_file_deploy(app, client, filename='ScrapydWeb_demo_no_request.egg', project=cst.PROJECT, redirect_project=cst.PROJECT) req(app, client, view='tasks.xhr', kws=dict(node=NODE, action='enable'), ins='STATE_RUNNING', nos='STATE_PAUSED') with app.test_request_context(): text, __ = req(app, client, view='schedule.run', kws=dict(node=NODE), data=run_data, location=url_for('tasks', node=NODE)) m = re.search(cst.TASK_NEXT_RUN_TIME_PATTERN, unquote_plus(text)) task_id = int(m.group(1)) print("task_id: %s" % task_id) metadata['task_id'] = task_id __, js = req(app, client, view='tasks.xhr', kws=dict(node=NODE, action='dump', task_id=task_id)) assert js['data']['selected_nodes'] == [1, 2]
def load_crypto_meta(value, b64decode=True): """ Build the crypto_meta from the json object. Note that json.loads always produces unicode strings; to ensure the resultant crypto_meta matches the original object: * cast all keys to str (effectively a no-op on py3), * base64 decode 'key' and 'iv' values to bytes, and * encode remaining string values as UTF-8 on py2 (while leaving them as native unicode strings on py3). :param value: a string serialization of a crypto meta dict :param b64decode: decode the 'key' and 'iv' values to bytes, default True :returns: a dict containing crypto meta items :raises EncryptionException: if an error occurs while parsing the crypto meta """ def b64_decode_meta(crypto_meta): return { str(name): ( base64.b64decode(val) if name in ('iv', 'key') and b64decode else b64_decode_meta(val) if isinstance(val, dict) else val.encode('utf8') if six.PY2 else val) for name, val in crypto_meta.items()} try: if not isinstance(value, six.string_types): raise ValueError('crypto meta not a string') val = json.loads(urlparse.unquote_plus(value)) if not isinstance(val, collections.Mapping): raise ValueError('crypto meta not a Mapping') return b64_decode_meta(val) except (KeyError, ValueError, TypeError) as err: msg = 'Bad crypto meta %r: %s' % (value, err) raise EncryptionException(msg)
def update_params_for_auth(self, method, url, headers, querys, body, auth_settings): """Updates header and query params based on authentication setting. :param method: Request method :param url: Request path, host included :param headers: Header parameters dict to be updated. :param querys: Query parameters tuple list to be updated. :param body: Request body :param auth_settings: Authentication setting identifiers list. """ if not auth_settings: return for auth in auth_settings: auth_setting = self.configuration.auth_settings().get(auth) if auth_setting: if auth_setting['type'] == 'apiv4': auth_headers = self.gen_sign( method, urlparse(url).path, unquote_plus(urlencode(querys)), body) headers.update(auth_headers) continue if auth_setting['in'] == 'cookie': headers['Cookie'] = auth_setting['value'] elif auth_setting['in'] == 'header': headers[auth_setting['key']] = auth_setting['value'] elif auth_setting['in'] == 'query': querys.append((auth_setting['key'], auth_setting['value'])) else: raise ApiValueError( 'Authentication token must be in `query` or `header`')
def __init__(self, xmlsource): self.xmldoc = parseString(xmlsource) self.name = unquote_plus( self._get_txt(self.xmldoc, "title", "no playlistname")) self.creator = self._get_txt(self.xmldoc, "creator", "no playlistcreator") self.parseTracks()
def decode_post_data(self, post_data): result = {} for item in post_data.split('&'): key, _sep, value = item.partition('=') result[key] = jsonutils.loads(urlparse.unquote_plus(value)) return result
def _verify_postback(self): # ### Now we don't really care what result was, just whether a flag was set or not. from paypal.standard.pdt.forms import PayPalPDTForm response_list = self.response.split('\n') response_dict = {} for i, line in enumerate(response_list): unquoted_line = unquote_plus(line).strip() if i == 0: self.st = unquoted_line else: if self.st != "SUCCESS": self.set_flag(line) break try: if not unquoted_line.startswith(' -'): k, v = unquoted_line.split('=') response_dict[k.strip()] = v.strip() except ValueError: pass qd = QueryDict('', mutable=True) qd.update(response_dict) qd.update(dict(ipaddress=self.ipaddress, st=self.st, flag_info=self.flag_info, flag=self.flag, flag_code=self.flag_code)) pdt_form = PayPalPDTForm(qd, instance=self) pdt_form.save(commit=False)
def _get_tool(self, id, tool_version=None, user=None): id = unquote_plus(id) tool = self.app.toolbox.get_tool(id, tool_version) if not tool or not tool.allow_user_access(user): raise exceptions.ObjectNotFound( "Could not find tool with id '%s'" % id) return tool
def _parse_fragment(self): # type: () -> URI subdirectory = self.subdirectory if self.subdirectory else "" fragment = self.fragment if self.fragment else "" if self.fragment is None: return self fragments = self.fragment.split("&") fragment_items = {} name = self.name if self.name else "" extras = self.extras for q in fragments: key, _, val = q.partition("=") val = unquote_plus(val) fragment_items[key] = val if key == "egg": from .utils import parse_extras name, stripped_extras = pip_shims.shims._strip_extras(val) if stripped_extras: extras = tuple(parse_extras(stripped_extras)) elif key == "subdirectory": subdirectory = val return attr.evolve( self, fragment_dict=fragment_items, subdirectory=subdirectory, fragment=fragment, extras=extras, name=name, )
def _parse_uri(cls, server, sanitize_username=False): """ Parses a MongoDB-formatted URI (e.g. mongodb://user:pass@server/db) and returns parsed elements and a sanitized URI. """ parsed = pymongo.uri_parser.parse_uri(server) username = parsed.get('username') password = parsed.get('password') db_name = parsed.get('database') nodelist = parsed.get('nodelist') auth_source = parsed.get('options', {}).get('authsource') # Remove password (and optionally username) from sanitized server URI. # To ensure that the `replace` works well, we first need to url-decode the raw server string # since the password parsed by pymongo is url-decoded decoded_server = unquote_plus(server) clean_server_name = decoded_server.replace( password, "*" * 5) if password else decoded_server if sanitize_username and username: username_pattern = u"{}[@:]".format(re.escape(username)) clean_server_name = re.sub(username_pattern, "", clean_server_name) return username, password, db_name, nodelist, clean_server_name, auth_source
def load_crypto_meta(value): """ Build the crypto_meta from the json object. Note that json.loads always produces unicode strings, to ensure the resultant crypto_meta matches the original object cast all key and value data to a str except the key and iv which are base64 decoded. This will work in py3 as well where all strings are unicode implying the cast is effectively a no-op. :param value: a string serialization of a crypto meta dict :returns: a dict containing crypto meta items :raises EncryptionException: if an error occurs while parsing the crypto meta """ def b64_decode_meta(crypto_meta): return { str(name): (base64.b64decode(val) if name in ('iv', 'key') else b64_decode_meta(val) if isinstance(val, dict) else val.encode('utf8')) for name, val in crypto_meta.items()} try: if not isinstance(value, six.string_types): raise ValueError('crypto meta not a string') val = json.loads(urlparse.unquote_plus(value)) if not isinstance(val, collections.Mapping): raise ValueError('crypto meta not a Mapping') return b64_decode_meta(val) except (KeyError, ValueError, TypeError) as err: msg = 'Bad crypto meta %r: %s' % (value, err) raise EncryptionException(msg)
def get_params(): _log("get_params") param_string = sys.argv[2] _log("get_params " + str(param_string)) commands = {} if param_string: split_commands = param_string[param_string.find('?') + 1:].split('&') for command in split_commands: _log("get_params command=" + str(command)) if len(command) > 0: if "=" in command: split_command = command.split('=') key = split_command[0] value = unquote_plus(split_command[1]) commands[key] = value else: commands[command] = "" _log("get_params " + repr(commands)) return commands
def _verify_postback(self): # ### Now we don't really care what result was, just whether a flag was set or not. from paypal.standard.pdt.forms import PayPalPDTForm response_list = self.response.split('\n') response_dict = {} for i, line in enumerate(response_list): unquoted_line = unquote_plus(line).strip() if i == 0: self.st = unquoted_line else: if self.st != "SUCCESS": self.set_flag(line) break try: if not unquoted_line.startswith(' -'): k, v = unquoted_line.split('=') response_dict[k.strip()] = v.strip() except ValueError: pass qd = QueryDict('', mutable=True) qd.update(response_dict) qd.update( dict(ipaddress=self.ipaddress, st=self.st, flag_info=self.flag_info, flag=self.flag, flag_code=self.flag_code)) pdt_form = PayPalPDTForm(qd, instance=self) pdt_form.save(commit=False)
def load_crypto_meta(value): """ Build the crypto_meta from the json object. Note that json.loads always produces unicode strings; to ensure the resultant crypto_meta matches the original object: * cast all keys to str (effectively a no-op on py3), * base64 decode 'key' and 'iv' values to bytes, and * encode remaining string values as UTF-8 on py2 (while leaving them as native unicode strings on py3). :param value: a string serialization of a crypto meta dict :returns: a dict containing crypto meta items :raises EncryptionException: if an error occurs while parsing the crypto meta """ def b64_decode_meta(crypto_meta): return { str(name): (base64.b64decode(val) if name in ('iv', 'key') else b64_decode_meta(val) if isinstance( val, dict) else val.encode('utf8') if six.PY2 else val) for name, val in crypto_meta.items() } try: if not isinstance(value, six.string_types): raise ValueError('crypto meta not a string') val = json.loads(urlparse.unquote_plus(value)) if not isinstance(val, collections.Mapping): raise ValueError('crypto meta not a Mapping') return b64_decode_meta(val) except (KeyError, ValueError, TypeError) as err: msg = 'Bad crypto meta %r: %s' % (value, err) raise EncryptionException(msg)
def _is_enterprise_target(self, url): """ Check if url belongs to enterprise app Args: url(str): url path """ unquoted_url = parse.unquote_plus(parse.quote(url)) return bool(re.match(r'^/enterprise/[a-z0-9\-]+/course', unquoted_url))
def _get_tool(self, id, tool_version=None, user=None): id = unquote_plus(id) tool = self.app.toolbox.get_tool(id, tool_version) if not tool: raise exceptions.ObjectNotFound("Could not find tool with id '%s'." % id) if not tool.allow_user_access(user): raise exceptions.AuthenticationFailed("Access denied, please login for tool with id '%s'." % id) return tool
def test_quote_plus_unicode_unichar(self): """ Test if unicode characters are quoted correctly """ s = u'\u9b54\u11b3\uaf92\u1111' encoded = quote_plus_unicode(s) self.assertEqual(s, unquote_plus(encoded))
def test_quote_plus_unicode_char(self): """ Test if a ASCII characters are quoted correctly """ s = u'Ab09' encoded = quote_plus_unicode(s) self.assertEqual(s, unquote_plus(encoded))
def test_quoter_unichar(self): """ Test if a unicode character is quoted correctly """ char = u'\u9b54' encoded = unicode_quoter(char) self.assertEqual(char, unquote_plus(encoded))
def test_quoter_char(self): """ Test if an ASCII character is quoted correctly """ char = u'A' encoded = unicode_quoter(char) self.assertEqual(char, unquote_plus(encoded))
def __init__(self, url, user='', password='', host='localhost', protocol='', path='', port=0, parameters=[]): errmsg = "".join( ("Invalid url <",url,">") ) # chop protocol pieces = url.split('://') if len(pieces) == 1: url = pieces[0] elif len(pieces) == 2: protocol = pieces[0] url = pieces[1] else: raise ValueError(errmsg) pos = url.find('/') if pos < 0: server = url else: server = url[:pos] path = url[pos+1:] if '@' in server: user,server = server.split('@') if ':' in user: user,password = user.split(':') if ':' in server: server,port = server.split(':') port = int(port) if server != '': host = server if '?' in path: path, pars = path.split('?') parameters = [pair.split('=') for pair in pars.split('&')] if any(len(pair) > 2 for pair in parameters): raise ValueError(errmsg) parameters = [[parse.unquote_plus(p) for p in pair] for pair in parameters] self.protocol = protocol self.user = user self.password = password self.host = host self.port = port self.path = parse.unquote_plus(path) self.parameters = parameters[:]
def on_get(self, external_project_id, **kw): def secret_fields(field): return putil.mime_types.augment_fields_with_content_types(field) LOG.debug('Start secrets on_get ' 'for project-ID %s:', external_project_id) name = kw.get('name', '') if name: name = parse.unquote_plus(name) bits = kw.get('bits', 0) try: bits = int(bits) except ValueError: # as per Github issue 171, if bits is invalid then # the default should be used. bits = 0 ctxt = controllers._get_barbican_context(pecan.request) user_id = None if ctxt: user_id = ctxt.user result = self.secret_repo.get_by_create_date( external_project_id, offset_arg=kw.get('offset', 0), limit_arg=kw.get('limit', None), name=name, alg=kw.get('alg'), mode=kw.get('mode'), bits=bits, suppress_exception=True, acl_only=kw.get('acl_only', None), user_id=user_id ) secrets, offset, limit, total = result if not secrets: secrets_resp_overall = {'secrets': [], 'total': total} else: secrets_resp = [ hrefs.convert_to_hrefs(secret_fields(s)) for s in secrets ] secrets_resp_overall = hrefs.add_nav_hrefs( 'secrets', offset, limit, total, {'secrets': secrets_resp} ) secrets_resp_overall.update({'total': total}) LOG.info(u._LI('Retrieved secret list for project: %s'), external_project_id) return secrets_resp_overall
def test_quoter_reserved(self): """ Test if a URI reserved character is quoted correctly """ char = u'+' encoded = unicode_quoter(char) self.assertNotEqual(char, encoded) self.assertEqual(char, unquote_plus(encoded))
def test_quote_plus_unicode_reserved(self): """ Test if a URI reserved characters are quoted correctly """ s = u'+ &' encoded = quote_plus_unicode(s) self.assertNotEqual(s, encoded) self.assertEqual(s, unquote_plus(encoded))
def test_quote_plus_unicode_compound(self): """ Test if a jumble of unicode, reserved and normal chars are quoted correctly """ s = u'\u9b54\u11b3+ A5&\uaf92\u1111' encoded = quote_plus_unicode(s) self.assertNotEqual(s, encoded) self.assertEqual(s, unquote_plus(encoded))
def unquote_plus(text): """unquote('%7e/abc+def') -> '~/abc def'""" try: if isinstance(text, unicode): text = text.encode('utf-8') return parse.unquote_plus(text).decode('utf-8') except UnicodeEncodeError as e: # for some reason there is a non-ascii character here. Let's assume it # is already unicode (because of originally decoding the file) return text
def get_parameter(kwargs, singular_name, plural_name=None): if singular_name in kwargs: p = kwargs[singular_name] else: p = flask.request.args.get(singular_name) if (not p) and plural_name: p = flask.request.args.get(plural_name) if p: return parse.unquote_plus(p).split(',') else: return []
def on_delete(self, req, resp, tenant_id, domain, entry): client = req.env['sl_client'] mgr = DNSManager(client) domain = unquote_plus(domain) zone_id = mgr._get_zone_id_from_name(domain)[0] record = mgr.get_records(zone_id, host=entry)[0] mgr.delete_record(record['id']) resp.status = 204
def on_get(self, external_project_id, **kw): LOG.debug('Start certificate_authorities on_get') plugin_name = kw.get('plugin_name') if plugin_name is not None: plugin_name = parse.unquote_plus(plugin_name) plugin_ca_id = kw.get('plugin_ca_id', None) if plugin_ca_id is not None: plugin_ca_id = parse.unquote_plus(plugin_ca_id) result = self.ca_repo.get_by_create_date( offset_arg=kw.get('offset', 0), limit_arg=kw.get('limit', None), plugin_name=plugin_name, plugin_ca_id=plugin_ca_id, suppress_exception=True ) cas, offset, limit, total = result if not cas: cas_resp_overall = {'cas': [], 'total': total} else: cas_resp = [ hrefs.convert_certificate_authority_to_href(s.id) for s in cas ] cas_resp_overall = hrefs.add_nav_hrefs( 'cas', offset, limit, total, {'cas': cas_resp} ) cas_resp_overall.update({'total': total}) return cas_resp_overall
def _request_body(self): data = super(JSONPolling, self)._request_body() # resolve %20%3F's, take out wrapping d="...", etc.. data = unquote_plus(data)[3:-1] \ .replace(r'\"', '"') \ .replace(r"\\", "\\") # For some reason, in case of multiple messages passed in one # query, IE7 sends it escaped, not utf-8 encoded. This dirty # hack handled it if data[0] == "\\": data = data.decode("unicode_escape").encode("utf-8") return data
def get_parameter(kwargs, singular_name, plural_name=None, use_default=True): if singular_name in kwargs: p = kwargs[singular_name] else: p = flask.request.args.get(singular_name) if (not p) and plural_name: p = flask.request.args.get(plural_name) if p: return parse.unquote_plus(p).split(',') elif use_default: default = get_default(singular_name) return [default] if default else [] else: return []
def _verify_user_metadata(self, req_hdrs, name, value, key): # verify encrypted version of user metadata self.assertNotIn('X-Object-Meta-' + name, req_hdrs) expected_hdr = 'X-Object-Transient-Sysmeta-Crypto-Meta-' + name self.assertIn(expected_hdr, req_hdrs) enc_val, param = req_hdrs[expected_hdr].split(';') param = param.strip() self.assertTrue(param.startswith('swift_meta=')) actual_meta = json.loads( urlparse.unquote_plus(param[len('swift_meta='):])) self.assertEqual(Crypto.cipher, actual_meta['cipher']) meta_iv = base64.b64decode(actual_meta['iv']) self.assertEqual(FAKE_IV, meta_iv) self.assertEqual( base64.b64encode(encrypt(value, key, meta_iv)), enc_val) # if there is any encrypted user metadata then this header should exist self.assertIn('X-Object-Transient-Sysmeta-Crypto-Meta', req_hdrs) common_meta = json.loads(urlparse.unquote_plus( req_hdrs['X-Object-Transient-Sysmeta-Crypto-Meta'])) self.assertDictEqual({'cipher': Crypto.cipher, 'key_id': {'v': 'fake', 'path': '/a/c/fake'}}, common_meta)