def signon(self, url='localhost:8889', login=None, password=None, startQueue=False, verbose=False): """Sign on to RTS2 JSON server. url - JSON API URL (can include username and login)""" # try to get username (and password) from url purl = urlparse.urlsplit(url) userpass,host = urllib.splituser(purl.netloc) (userpass, netloc) = urllib.splituser(purl.netloc) if userpass is not None: (login, password) = urllib.splitpasswd(userpass) url = netloc + purl.path if purl.query: url += '?' + query if purl.fragment: url += '#' + fragment if login is None: self.verbose.set_active(verbose) self.dialog.show_all() if self.run(): self.dialog.hide() sys.exit(-1) self.dialog.hide() else: # or just create server.. createJsonServer(url, login, password, verbose=verbose) getProxy().loadJson('/api/devices') if startQueue: getProxy().startQueue() getProxy().refresh()
def open_http(self, url, data=None): """Use HTTP protocol.""" import httplib user_passwd = None if type(url) is type(""): host, selector = splithost(url) if host: user_passwd, host = splituser(host) host = unquote(host) realhost = host else: host, selector = url urltype, rest = splittype(selector) url = rest user_passwd = None if string.lower(urltype) != 'http': realhost = None else: realhost, rest = splithost(rest) if realhost: user_passwd, realhost = splituser(realhost) if user_passwd: selector = "%s://%s%s" % (urltype, realhost, rest) #print "proxy via http:", host, selector if not host: raise IOError, ('http error', 'no host given') if user_passwd: import base64 auth = string.strip(base64.encodestring(user_passwd)) else: auth = None h = httplib.HTTP(host) if data is not None: h.putrequest('POST', selector) h.putheader('Content-type', 'application/x-www-form-urlencoded') h.putheader('Content-length', '%d' % len(data)) else: h.putrequest('GET', selector) for cookie in self.cookies.items(): h.putheader('Cookie', '%s=%s;' % cookie) if auth: h.putheader('Authorization', 'Basic %s' % auth) if realhost: h.putheader('Host', realhost) for args in self.addheaders: apply(h.putheader, args) h.endheaders() if data is not None: h.send(data + '\r\n') errcode, errmsg, headers = h.getreply() if headers and headers.has_key('set-cookie'): cookies = headers.getallmatchingheaders('set-cookie') for cookie in cookies: self.cookies.load(cookie) fp = h.getfile() if errcode == 200: return addinfourl(fp, headers, "http:" + url) else: if data is None: return self.http_error(url, fp, errcode, errmsg, headers) else: return self.http_error(url, fp, errcode, errmsg, headers, data)
def open_http(url, data=None): """Use HTTP protocol.""" import httplib user_passwd = None proxy_passwd= None if isinstance(url, str): host, selector = urllib.splithost(url) if host: user_passwd, host = urllib.splituser(host) host = urllib.unquote(host) realhost = host else: host, selector = url # check whether the proxy contains authorization information proxy_passwd, host = urllib.splituser(host) # now we proceed with the url we want to obtain urltype, rest = urllib.splittype(selector) url = rest user_passwd = None if urltype.lower() != 'http': realhost = None else: realhost, rest = urllib.splithost(rest) if realhost: user_passwd, realhost = urllib.splituser(realhost) if user_passwd: selector = "%s://%s%s" % (urltype, realhost, rest) if urllib.proxy_bypass(realhost): host = realhost #print "proxy via http:", host, selector if not host: raise IOError, ('http error', 'no host given') if proxy_passwd: import base64 proxy_auth = base64.b64encode(proxy_passwd).strip() else: proxy_auth = None if user_passwd: import base64 auth = base64.b64encode(user_passwd).strip() else: auth = None c = FakeHTTPConnection(host) if data is not None: c.putrequest('POST', selector) c.putheader('Content-Type', 'application/x-www-form-urlencoded') c.putheader('Content-Length', '%d' % len(data)) else: c.putrequest('GET', selector) if proxy_auth: c.putheader('Proxy-Authorization', 'Basic %s' % proxy_auth) if auth: c.putheader('Authorization', 'Basic %s' % auth) if realhost: c.putheader('Host', realhost) for args in urllib.URLopener().addheaders: c.putheader(*args) c.endheaders() return c
def open_https(self, url, data=None, ssl_context=None): if ssl_context is not None and isinstance(ssl_context, SSL.Context): self.ctx = ssl_context else: self.ctx = SSL.Context(DEFAULT_PROTOCOL) user_passwd = None if isinstance(url, basestring): host, selector = urllib.splithost(url) if host: user_passwd, host = urllib.splituser(host) host = urllib.unquote(host) realhost = host else: host, selector = url urltype, rest = urllib.splittype(selector) url = rest user_passwd = None if urltype.lower() != 'http': realhost = None else: realhost, rest = urllib.splithost(rest) if realhost: user_passwd, realhost = urllib.splituser(realhost) if user_passwd: selector = "%s://%s%s" % (urltype, realhost, rest) # print("proxy via http:", host, selector) if not host: raise IOError('http error', 'no host given') if user_passwd: import base64 auth = base64.encodestring(user_passwd).strip() else: auth = None # Start here! h = httpslib.HTTPSConnection(host=host, ssl_context=self.ctx) # h.set_debuglevel(1) # Stop here! if data is not None: h.putrequest('POST', selector) h.putheader('Content-type', 'application/x-www-form-urlencoded') h.putheader('Content-length', '%d' % len(data)) else: h.putrequest('GET', selector) if auth: h.putheader('Authorization', 'Basic %s' % auth) for args in self.addheaders: apply(h.putheader, args) h.endheaders() if data is not None: h.send(data + '\r\n') # Here again! resp = h.getresponse() fp = resp.fp return urllib.addinfourl(fp, resp.msg, "https:" + url)
def chek_pages(pages): try: for pages_url in pages: urllib.splitpasswd("*****@*****.**") urllib.splituser() code = urllib.urlopen(pages_url).getcode() print pages_url, code if code not in [200, 301]: failed_pages.append(pages_url) except socket.error, e: print "Ping Error", e
def url_permutations(url): """Try all permutations of hostname and path which can be applied to blacklisted URLs""" def url_host_permutations(host): if re.match(r'\d+\.\d+\.\d+\.\d+', host): yield host return parts = host.split('.') l = min(len(parts),5) if l > 4: yield host for i in xrange(l-1): yield '.'.join(parts[i-l:]) def url_path_permutations(path): if path != '/': yield path query = None if '?' in path: path, query = path.split('?', 1) if query is not None: yield path path_parts = path.split('/')[0:-1] curr_path = '' for i in xrange(min(4, len(path_parts))): curr_path = curr_path + path_parts[i] + '/' yield curr_path protocol, address_str = urllib.splittype(url) host, path = urllib.splithost(address_str) user, host = urllib.splituser(host) host, port = urllib.splitport(host) host = host.strip('/') for h in url_host_permutations(host): for p in url_path_permutations(path): yield '%s%s' % (h, p)
def get_host_info(self, host): """Get authorization info from host parameter Host may be a string, or a (host, x509-dict) tuple; if a string, it is checked for a "user:pw@host" format, and a "Basic Authentication" header is added if appropriate. @param host Host descriptor (URL or (URL, x509 info) tuple). @return A 3-tuple containing (actual host, extra headers, x509 info). The header and x509 fields may be None. """ x509 = {} if isinstance(host, types.TupleType): host, x509 = host auth, host = urllib.splituser(host) if auth: auth = base64.encodestring(urllib.unquote(auth)) auth = string.join(string.split(auth), "") # get rid of whitespace extra_headers = [ ("Authorization", "Basic " + auth) ] else: extra_headers = None return host, extra_headers, x509
def open_with_auth(url): """Open a urllib2 request, handling HTTP authentication""" scheme, netloc, path, params, query, frag = urlparse.urlparse(url) if scheme in ('http', 'https'): auth, host = urllib.splituser(netloc) else: auth = None if auth: auth = "Basic " + urllib2.unquote(auth).encode('base64').strip() new_url = urlparse.urlunparse((scheme,host,path,params,query,frag)) request = urllib2.Request(new_url) request.add_header("Authorization", auth) else: request = urllib2.Request(url) request.add_header('User-Agent', user_agent) fp = urllib2.urlopen(request) if auth: # Put authentication info back into request URL if same host, # so that links found on the page will work s2, h2, path2, param2, query2, frag2 = urlparse.urlparse(fp.url) if s2==scheme and h2==host: fp.url = urlparse.urlunparse((s2,netloc,path2,param2,query2,frag2)) return fp
def __init__(self, url, config = Config): proto, uri = urllib.splittype(url) # apply some defaults if uri[0:2] != '//': if proto != None: uri = proto + ':' + uri uri = '//' + uri proto = 'http' host, path = urllib.splithost(uri) try: int(host) host = 'localhost:' + host except: pass if not path: path = '/' if proto not in ('http', 'https', 'httpg'): raise IOError, "unsupported SOAP protocol" if proto == 'httpg' and not config.GSIclient: raise AttributeError, \ "GSI client not supported by this Python installation" if proto == 'https' and not config.SSLclient: raise AttributeError, \ "SSL client not supported by this Python installation" self.user,host = urllib.splituser(host) self.proto = proto self.host = host self.path = path
def test_node(request): """Runs an SSH call on a node.""" if authenticated_userid(request) is None: raise Forbidden() # trying an ssh connection connection = paramiko.client.SSHClient() connection.load_system_host_keys() connection.set_missing_host_key_policy(paramiko.WarningPolicy()) name = request.matchdict['name'] host, port = urllib.splitport(name) if port is None: port = 22 username, host = urllib.splituser(host) credentials = {} if username is not None and ':' in username: username, password = username.split(':', 1) credentials = {"username": username, "password": password} elif username is not None: password = None credentials = {"username": username} try: connection.connect(host, port=port, timeout=5, **credentials) return 'Connection to %r : OK' % name except (socket.gaierror, socket.timeout), error: return str(error)
def _urlclean(url): """Clean the url of uneccesary parts.""" # url decode any printable normal characters except reserved characters with special meanings in urls for c in _urlencpattern.findall(url): r = chr(int(c[1:3],16)) if r in _okurlchars: url = url.replace(c, r) # url encode any nonprintable or problematic characters (but not reserved chars) url = ''.join(map(lambda x: (x not in _reservedurlchars and x not in _okurlchars) and ('%%%02X' % ord(x)) or x, url)) # split the url in useful parts (discarding fragment) (scheme, netloc, path, query) = urlparse.urlsplit(url)[0:4] if ( scheme == "http" or scheme == "https" or scheme == "ftp" ): # http(s) urls should have a non-empty path if path == "": path="/" # make hostname lower case (userpass, hostport) = urllib.splituser(netloc) netloc=hostport.lower() # trim trailing : if netloc[-1:] == ":": netloc = netloc[:-1] if userpass is not None: netloc = userpass+"@"+netloc # put the url back together again return urlparse.urlunsplit((scheme, netloc, path, query, ""))
def parse_address_info(cls, server_addr="nats://*****:*****@127.0.0.1:4222"): '''\ parse the metadata nats server uri; Params: ===== addr: nats server address; Returns: ===== user: username to login nats server; pswd: password to login nats server; host: ip address of nats server; port: port of nats server ''' if type(server_addr) is not str: raise NotImplementException protocol, after_split = urllib.splittype(server_addr) if not protocol == "nats": raise NotImplementException auth_len = len(server_addr.split('@')) if auth_len > 1: auth, after_split = urllib.splituser(after_split) user_raw, pswd = urllib.splitpasswd(auth) user = user_raw.lstrip("/") _, after_split = urllib.splithost(after_split) host, port = urllib.splitport(after_split) else: user = pswd = None host, port = urllib.splitport(after_split) return user, pswd, host, int(port)
def __init__(self, url, config=Config): proto, uri = urllib.splittype(url) # apply some defaults if uri[0:2] != "//": if proto != None: uri = proto + ":" + uri uri = "//" + uri proto = "http" host, path = urllib.splithost(uri) try: int(host) host = "localhost:" + host except: pass if not path: path = "/" if proto not in ("http", "https", "httpg"): raise IOError, "unsupported SOAP protocol" if proto == "httpg" and not config.GSIclient: raise AttributeError, "GSI client not supported by this Python installation" if proto == "https" and not config.SSLclient: raise AttributeError, "SSL client not supported by this Python installation" self.user, host = urllib.splituser(host) self.proto = proto self.host = host self.path = path
def build_url_parts (self): """Set userinfo, host, port and anchor from self.urlparts. Also checks for obfuscated IP addresses. """ # check userinfo@host:port syntax self.userinfo, host = urllib.splituser(self.urlparts[1]) port = urlutil.default_ports.get(self.scheme, 0) host, port = urlutil.splitport(host, port=port) if port is None: raise LinkCheckerError(_("URL host %(host)r has invalid port") % {"host": host}) self.port = port # set host lowercase self.host = host.lower() if self.scheme in scheme_requires_host: if not self.host: raise LinkCheckerError(_("URL has empty hostname")) self.check_obfuscated_ip() if not self.port or self.port == urlutil.default_ports.get(self.scheme): host = self.host else: host = "%s:%d" % (self.host, self.port) if self.userinfo: self.urlparts[1] = "%s@%s" % (self.userinfo, host) else: self.urlparts[1] = host # safe anchor for later checking self.anchor = self.urlparts[4] if self.anchor is not None: assert isinstance(self.anchor, unicode), repr(self.anchor)
def dnslookup(url): """Replaces a hostname by its IP in an url. Uses gethostbyname to do a DNS lookup, so the nscd cache is used. If gevent has patched the standard library, makes sure it uses the original version because gevent uses its own mechanism based on the async libevent's evdns_resolve_ipv4, which does not use glibc's resolver. """ try: from gevent.socket import _socket gethostbyname = _socket.gethostbyname except ImportError: import socket gethostbyname = socket.gethostbyname # parsing parsed_url = urlparse.urlparse(url) host, port = urllib.splitport(parsed_url.netloc) user, host = urllib.splituser(host) # resolving the host host = gethostbyname(host) # recomposing if port is not None: host = '%s:%s' % (host, port) if user is not None: host = '%s@%s' % (user, host) parts = [parsed_url[0]] + [host] + list(parsed_url[2:]) return urlparse.urlunparse(parts)
def __init__(self, baseUri, headers=None, maxClients=None, maxConnections=None): self._headers = headers or HTTPHeaders() self._user = None self._passwd = None baseUri = baseUri.rstrip('/') self._scheme, loc, self._path, query, frag = urlparse.urlsplit(baseUri) userpass, self._hostport = urllib.splituser(loc) if userpass: self._user, self._passwd = urllib.splitpasswd(userpass) self._baseUri = urlparse.urlunsplit((self._scheme, self._hostport, self._path, None, None)) if self._scheme not in ('http', 'https'): raise ValueError(self._scheme) self._dispatcher = RequestDispatcher(maxClients=maxClients, maxConnections=maxConnections) self._queryFragment = urlparse.urlunsplit(('', '', '', query, frag))
def ftp_open(self, req): import ftplib import mimetypes host = req.get_host() if not host: raise IOError, ('ftp error', 'no host given') host, port = splitport(host) if port is None: port = ftplib.FTP_PORT else: port = int(port) # username/password handling user, host = splituser(host) if user: user, passwd = splitpasswd(user) else: passwd = None host = unquote(host) user = unquote(user or '') passwd = unquote(passwd or '') try: host = socket.gethostbyname(host) except socket.error, msg: raise URLError(msg)
def getUrlFd(url, headers=None, data=None, timeout=None): """getUrlFd(url, headers=None, data=None) Opens the given url and returns a file object. Headers and data are a dict and string, respectively, as per urllib2.Request's arguments.""" if headers is None: headers = defaultHeaders try: if not isinstance(url, urllib2.Request): (scheme, loc, path, query, frag) = urlparse.urlsplit(url) (user, host) = urllib.splituser(loc) url = urlparse.urlunsplit((scheme, host, path, query, '')) request = urllib2.Request(url, headers=headers, data=data) if user: request.add_header('Authorization', 'Basic %s' % base64.b64encode(user)) else: request = url request.add_data(data) httpProxy = force(proxy) if httpProxy: request.set_proxy(httpProxy, 'http') fd = urllib2.urlopen(request, timeout=timeout) return fd except socket.timeout, e: raise Error, TIMED_OUT
def set_proxy (self, proxy): """Parse given proxy information and store parsed values. Note that only http:// proxies are supported, both for ftp:// and http:// URLs. """ self.proxy = proxy self.proxytype = "http" self.proxyauth = None if not self.proxy: return proxyargs = {"proxy": self.proxy} self.proxytype, self.proxy = urllib.splittype(self.proxy) if self.proxytype not in ('http', 'https'): # Note that invalid proxies might raise TypeError in urllib2, # so make sure to stop checking at this point, not later. msg = _("Proxy value `%(proxy)s' must start with 'http:' or 'https:'.") \ % proxyargs raise LinkCheckerError(msg) self.proxy = urllib.splithost(self.proxy)[0] self.proxyauth, self.proxy = urllib.splituser(self.proxy) if self.ignore_proxy_host(): # log proxy without auth info log.debug(LOG_CHECK, "ignoring proxy %r", self.proxy) self.add_info(_("Ignoring proxy setting `%(proxy)s'.") % proxyargs) self.proxy = self.proxyauth = None return self.add_info(_("Using proxy `%(proxy)s'.") % proxyargs) if self.proxyauth is not None: if ":" not in self.proxyauth: self.proxyauth += ":" self.proxyauth = httputil.encode_base64(self.proxyauth) self.proxyauth = "Basic "+self.proxyauth log.debug(LOG_CHECK, "using proxy %r", self.proxy)
def make_connection(self, host): self.user_pass, self.realhost = splituser(host) proto, proxy, p1,p2,p3,p4 = urlparse.urlparse (self.proxies.get('http', '')) if proxy and not self.local: return httplib.HTTP(proxy) else: return httplib.HTTP(self.realhost)
def __init__(self, url): """ Initialize the downloader with the specified url string """ # FIXME: support HTTPS scheme, host, path, params, query, fragment = urlparse.urlparse(url) auth, host = urllib.splituser(host) self.host, self.port = urllib.splitport(host) if not self.port: self.port = 80 self.username = self.password = None if auth: self.username, self.password = urllib.splitpasswd(auth) self.url = urlparse.urlunparse((scheme, host, path, params, query, fragment)) self.nzbFilename = os.path.basename(path) self.tempFilename = os.path.join(Hellanzb.TEMP_DIR, tempFilename(self.TEMP_FILENAME_PREFIX) + '.nzb') # The HTTPDownloader self.downloader = None # The NZB category (e.g. 'Apps') self.nzbCategory = None # Whether or not the NZB file data is gzipped self.isGzipped = False
def do_cmd(self, cmd, **args): logging.debug('Repo path: %s' % self.path) url = urlparse(self.path) username, host = splituser(url.netloc) cookie = self.cookiejar[host] if cookie is not None: return super(formloginhttpsrepo, self).do_cmd(cmd, headers={'cookie' : cookie}, **args) else: logging.debug('We already have a cookie for host', host) try: return super(formloginhttpsrepo, self).do_cmd(cmd, **args) except error.RepoError, e: if 'does not appear to be an hg repository' in str(e): logging.debug('Accessing repo on this host for the first time') # <scheme>://<netloc>/<path>;<params>?<query>#<fragment> # TODO fixed for now, but do we have a cleaner way to get rid of username:passwd from the URL? scheme, netloc, path, params, query, fragment = url path_without_userpasswd = '%s://%s/%s;%s?%s#%s' % (scheme, host, path, params, query, fragment) cookie = get_the_cookie(path_without_userpasswd) logging.info('Got cookie', cookie) self.cookiejar[host] = cookie return super(formloginhttpsrepo, self).do_cmd(cmd, headers={'cookie' : cookie}, **args) else:
def open_http(self, url): """Use HTTP protocol.""" if isinstance(url, str): host, selector = urllib.splithost(url) if host: user_passwd, host = urllib.splituser(host) host = urllib.unquote(host) realhost = host else: host, selector = url urltype, rest = urllib.splittype(selector) url = rest user_passwd = None if urltype.lower() != "http": realhost = None else: realhost, rest = splithost(rest) if realhost: user_passwd, realhost = splituser(realhost) if user_passwd: selector = "%s://%s%s" % (urltype, realhost, rest) if proxy_bypass(realhost): host = realhost if not host: return -2 h = httplib.HTTP(host) h.putrequest("GET", selector) if realhost: h.putheader("Host", realhost) for args in self.addheaders: h.putheader(*args) h.endheaders() errcode, errmsg, headers = h.getreply() return errcode
def get_host(self): ret = urllib2.Request.get_host(self) userpass, hostport = urllib.splituser(ret) if userpass: userpass = urllib2.base64.b64encode(userpass) self.unredirected_hdrs['Authorization'] = 'Basic %s' % userpass return hostport
def _connect_ssh(self, host): (user, host) = urllib.splituser(host) if user is not None: (user, password) = urllib.splitpassword(user) else: password = None (host, port) = urllib.splitnport(host, 22) self._tunnel = get_ssh_vendor().connect_ssh(user, password, host, port, ["svnserve", "-t"]) return (self._tunnel.recv, self._tunnel.send)
def type_and_host(url): typ, rest = urllib.splittype(url) hostport, rest = urllib.splithost(rest) if hostport is None: host = None else: upwd, hostport = urllib.splituser(hostport) host, port = urllib.splitport(hostport) return typ, host
def _Dynamic_Fetch(self, request, response): """Trivial implementation of URLFetchService::Fetch(). Args: request: The fetch to perform, a URLFetchRequest response: The fetch response, a URLFetchResponse """ (protocol, host, path, parameters, query, fragment) = \ urlparse.urlparse(request.url()) port = urllib.splitport(urllib.splituser(host)[1])[1] # Force https if we are on port 443. Will break request which happen to be # non-ssl on this port. if str(port) == "443": protocol = "https" payload = None if request.method() == urlfetch_service_pb.URLFetchRequest.GET: method = 'GET' elif request.method() == urlfetch_service_pb.URLFetchRequest.POST: method = 'POST' payload = request.payload() elif request.method() == urlfetch_service_pb.URLFetchRequest.HEAD: method = 'HEAD' elif request.method() == urlfetch_service_pb.URLFetchRequest.PUT: method = 'PUT' payload = request.payload() elif request.method() == urlfetch_service_pb.URLFetchRequest.DELETE: method = 'DELETE' else: logging.error('Invalid method: %s', request.method()) raise apiproxy_errors.ApplicationError( urlfetch_service_pb.URLFetchServiceError.UNSPECIFIED_ERROR) if not (protocol == 'http' or protocol == 'https'): logging.error('Invalid protocol: %s', protocol) raise apiproxy_errors.ApplicationError( urlfetch_service_pb.URLFetchServiceError.INVALID_URL) if not host: logging.error('Missing host.') raise apiproxy_errors.ApplicationError( urlfetch_service_pb.URLFetchServiceError.FETCH_ERROR) sanitized_headers = self._SanitizeHttpHeaders(_UNTRUSTED_REQUEST_HEADERS, request.header_list()) request.clear_header() request.header_list().extend(sanitized_headers) deadline = _API_CALL_DEADLINE if request.has_deadline(): deadline = request.deadline() self._RetrieveURL(request.url(), payload, method, request.header_list(), request, response, follow_redirects=request.followredirects(), deadline=deadline)
def _open_resource(url_file_stream_or_string, handle_url): """URL, filename, or string --> stream This function lets you define parsers that take any input source (URL, pathname to local or network file, or actual data as a string) and deal with it in a uniform manner. Returned object is guaranteed to have all the basic stdio read methods (read, readline, readlines). Just .close() the object when you're done with it. The handle_url argument will be passed the input source if it's determined to be a retrievable URL. """ if hasattr(url_file_stream_or_string, 'read'): return url_file_stream_or_string if url_file_stream_or_string == '-': return sys.stdin if urlparse.urlparse(url_file_stream_or_string)[0] in ('http', 'https', 'ftp', 'file', 'feed'): # Deal with the feed URI scheme if url_file_stream_or_string.startswith('feed:http'): url_file_stream_or_string = url_file_stream_or_string[5:] elif url_file_stream_or_string.startswith('feed:'): url_file_stream_or_string = 'http:' + url_file_stream_or_string[5:] # test for inline user:password for basic auth auth = None if base64: urltype, rest = urllib.splittype(url_file_stream_or_string) realhost, rest = urllib.splithost(rest) if realhost: user_passwd, realhost = urllib.splituser(realhost) if user_passwd: url_file_stream_or_string = '%s://%s%s' % (urltype, realhost, rest) auth = base64.standard_b64encode(user_passwd).strip() # iri support try: if isinstance(url_file_stream_or_string,unicode): url_file_stream_or_string = url_file_stream_or_string.encode('idna').decode('utf-8') else: url_file_stream_or_string = url_file_stream_or_string.decode('utf-8').encode('idna').decode('utf-8') except: pass # Handle it as a URL. return handle_url(url_file_stream_or_string) # try to open with native open function (if url_file_stream_or_string is a filename) try: return open(url_file_stream_or_string, 'rb') except: pass # treat url_file_stream_or_string as string return _StringIO(str(url_file_stream_or_string))
def get_host_info(self, host): import urllib auth, host = urllib.splituser(host) extra_headers = {} if auth: import base64 auth = base64.encodestring(urllib.unquote(auth)) auth = string.join(string.split(auth), "") # get rid of whitespace extra_headers = {"Authorization": "Basic " + auth } return host, extra_headers
def ev_incoming_call_det(self, call, user_data): call.user_data = CallData(self, call) sd = SDP(call.details.media_offer_answer.raw_sdp) call.user_data.in_sd = sd to = urlsplit(call.details.destination_addr)[2] call.user_data.dest = splituser(to)[0] _from = urlsplit(call.details.originating_addr)[2] oad = splituser(_from)[0] log.debug('To "%s, From: %s" got SDP:\n%s', to, _from, sd) if oad and oad.isdigit(): call.user_data.oad = oad else: call.user_data.oad = '0' call.incoming_ringing()
def open_ftp(self, url): host, path = urllib.splithost(url) if not host: raise IOError, ('ftp error', 'no host given') host, port = urllib.splitport(host) user, host = urllib.splituser(host) # if user: user, passwd = splitpasswd(user) if user: passwd = getpass.getpass() else: passwd = None host = urllib.unquote(host) user = urllib.unquote(user or '') passwd = urllib.unquote(passwd or '') host = socket.gethostbyname(host) if not port: import ftplib port = ftplib.FTP_PORT else: port = int(port) path, attrs = urllib.splitattr(path) path = urllib.unquote(path) dirs = string.splitfields(path, '/') dirs, file = dirs[:-1], dirs[-1] if dirs and not dirs[0]: dirs = dirs[1:] key = (user, host, port, string.joinfields(dirs, '/')) # XXX thread unsafe! if len(self.ftpcache) > MAXFTPCACHE: # Prune the cache, rather arbitrarily for k in self.ftpcache.keys(): if k != key: v = self.ftpcache[k] del self.ftpcache[k] v.close() try: if not self.ftpcache.has_key(key): print 'Creating ftpwrapper: ',user,host,port,dirs self.ftpcache[key] = \ urllib.ftpwrapper(user, passwd, host, port, dirs) if not file: type = 'D' else: type = 'I' for attr in attrs: attr, value = urllib.splitvalue(attr) if string.lower(attr) == 'type' and \ value in ('a', 'A', 'i', 'I', 'd', 'D'): type = string.upper(value) (fp, retrlen) = self.ftpcache[key].retrfile(file, type) if retrlen is not None and retrlen >= 0: import mimetools, StringIO headers = mimetools.Message(StringIO.StringIO( 'Content-Length: %d\n' % retrlen)) else: headers = noheaders() return urllib.addinfourl(fp, headers, "ftp:" + url) except urllib.ftperrors(), msg: raise IOError, ('ftp error', msg), sys.exc_info()[2]
def open_ftp(self, url): host, path = urllib.splithost(url) if not host: raise IOError, ('ftp error', 'no host given') host, port = urllib.splitport(host) user, host = urllib.splituser(host) # if user: user, passwd = splitpasswd(user) if user: passwd = getpass.getpass() else: passwd = None host = urllib.unquote(host) user = urllib.unquote(user or '') passwd = urllib.unquote(passwd or '') host = socket.gethostbyname(host) if not port: import ftplib port = ftplib.FTP_PORT else: port = int(port) path, attrs = urllib.splitattr(path) path = urllib.unquote(path) dirs = string.splitfields(path, '/') dirs, file = dirs[:-1], dirs[-1] if dirs and not dirs[0]: dirs = dirs[1:] key = (user, host, port, string.joinfields(dirs, '/')) # XXX thread unsafe! if len(self.ftpcache) > MAXFTPCACHE: # Prune the cache, rather arbitrarily for k in self.ftpcache.keys(): if k != key: v = self.ftpcache[k] del self.ftpcache[k] v.close() try: if not self.ftpcache.has_key(key): print 'Creating ftpwrapper: ', user, host, port, dirs self.ftpcache[key] = \ urllib.ftpwrapper(user, passwd, host, port, dirs) if not file: type = 'D' else: type = 'I' for attr in attrs: attr, value = urllib.splitvalue(attr) if string.lower(attr) == 'type' and \ value in ('a', 'A', 'i', 'I', 'd', 'D'): type = string.upper(value) (fp, retrlen) = self.ftpcache[key].retrfile(file, type) if retrlen is not None and retrlen >= 0: import mimetools, StringIO headers = mimetools.Message( StringIO.StringIO('Content-Length: %d\n' % retrlen)) else: headers = noheaders() return urllib.addinfourl(fp, headers, "ftp:" + url) except urllib.ftperrors(), msg: raise IOError, ('ftp error', msg), sys.exc_info()[2]
def ftp_open(self, req): import ftplib import mimetypes host = req.get_host() if not host: raise URLError('ftp error: no host given') host, port = splitport(host) if port is None: port = ftplib.FTP_PORT else: port = int(port) user, host = splituser(host) if user: user, passwd = splitpasswd(user) else: passwd = None host = unquote(host) user = user or '' passwd = passwd or '' try: host = socket.gethostbyname(host) except socket.error as msg: raise URLError(msg) path, attrs = splitattr(req.get_selector()) dirs = path.split('/') dirs = map(unquote, dirs) dirs, file = dirs[:-1], dirs[-1] if dirs and not dirs[0]: dirs = dirs[1:] try: fw = self.connect_ftp(user, passwd, host, port, dirs, req.timeout) type = file and 'I' or 'D' for attr in attrs: attr, value = splitvalue(attr) if attr.lower() == 'type' and value in ('a', 'A', 'i', 'I', 'd', 'D'): type = value.upper() fp, retrlen = fw.retrfile(file, type) headers = '' mtype = mimetypes.guess_type(req.get_full_url())[0] if mtype: headers += 'Content-type: %s\n' % mtype if retrlen is not None and retrlen >= 0: headers += 'Content-length: %d\n' % retrlen sf = StringIO(headers) headers = mimetools.Message(sf) return addinfourl(fp, headers, req.get_full_url()) except ftplib.all_errors as msg: raise URLError, 'ftp error: %s' % msg, sys.exc_info()[2] return
def savefilename(self, url): type, rest = urllib.splittype(url) host, path = urllib.splithost(rest) path = path.lstrip("/") user, host = urllib.splituser(host) host, port = urllib.splitnport(host) host = host.lower() if not path or path[-1] == "/": path = path + "index.html" if os.sep != "/": path = os.sep.join(path.split("/")) path = os.path.join(host, path) return path
def parse_backend(url): def unescape(s): return urllib.unquote(s.replace('+', ' ')) try: proto, u = urllib.splittype(url) auth, path = urllib.splithost(u) user, host = urllib.splituser(auth) user, password = urllib.splitpasswd(user) host, _ = urllib.splitport(host) _, query = urllib.splitquery(path) args = query.split('&') hot_user = None hot_pass = None level = 0 mode = -1 for arg in args: k, v = arg.split('=', 2) v = unescape(v) if k == 'hot_user': hot_user = v elif k == 'hot_pass': hot_pass = v elif k == 'level': level = parse_level(v) elif k == 'mode': mode = parse_mode(v) elif k == 'extra': hot_addr, extra = parse_extra(v) if hot_user != None and hot_addr == None: hot_addr = host Backend = namedtuple('Backend', [ 'address', 'user', 'password', 'hot_addr', 'hot_user', 'hot_pass', 'extra', 'level', 'mode' ]) return Backend(address=host, user=user, password=password, hot_addr=hot_addr, hot_user=hot_user, hot_pass=hot_pass, extra=extra, level=level, mode=mode) except Exception: logging.getLogger('mwtm_executor').error('bad backend: %s', url, exc_info=True) _exit(1)
def __init__(self, uri, username=None, password=None, *args, **kwargs): xmlrpclib.ServerProxy.__init__(self, uri, *args, **kwargs) # Hide password userpass, hostport = urllib.splituser(self.__host) if userpass and not username: self.__host = hostport username, password = urllib.splitpasswd(userpass) if username: password = util.ProtectedString(urllib.quote(password)) self.__host = ProtectedTemplate('${user}:${password}@${host}', user=username, password=password, host=self.__host)
def get_host_info(self, host): x509 = {} if isinstance(host, TupleType): host, x509 = host import urllib auth, host = urllib.splituser(host) if auth: import base64 auth = base64.encodestring(urllib.unquote(auth)) auth = string.join(string.split(auth), '') extra_headers = [('Authorization', 'Basic ' + auth)] else: extra_headers = None return (host, extra_headers, x509)
def _setUpInternalUser(self): user = ''.join( [chr(random.randint(ord('a'), ord('z'))) for x in range(10)]) password = ''.join( [chr(random.randint(ord('a'), ord('z'))) for x in range(10)]) if isinstance(self.uri, str): schema, url = urllib.splittype(self.uri) if schema in ('http', 'https'): host, rest = urllib.splithost(url) olduser, host = urllib.splituser(host) uri = '%s://%s:%s@%s%s' % (schema, user, password, host, rest) self.uri = uri self.internalAuth = (user, password)
def savefilename(self, url): type, rest = urllib.splittype(url) host, path = urllib.splithost(rest) while path[:1] == "/": path = path[1:] user, host = urllib.splituser(host) host, port = urllib.splitnport(host) host = string.lower(host) if not path or path[-1] == "/": path = path + "index.html" if os.sep != "/": path = string.join(string.split(path, "/"), os.sep) path = os.path.join(host, path) return path
def __init__(self, host, handler, streams=None, ssh_config=None): # Store information about the peer and self.handler = handler self.host = host # ... initialize the read and write file objects. self.myChild = None if streams: self.rfile = streams[0] self.wfile = streams[1] pass else: self.user, ssh_host = urllib.splituser(self.host) # print self.user + " " + self.host + " " + handler # Use ssh unless we're on Windows with no ssh-agent running. nt = os.name == "nt" use_ssh = not nt or os.environ.has_key("SSH_AGENT_PID") flags = "" if self.user: flags = flags + " -l " + self.user pass if use_ssh and ssh_config: flags = flags + " -F " + ssh_config pass args = flags + " " + ssh_host + " " + handler if use_ssh: cmd = "ssh -x -C -o 'CompressionLevel 5' " + args pass else: # Use the PyTTY plink, equivalent to the ssh command. cmd = "plink -x -C " + args pass if not nt: # Popen3 objects, and the wait method, are Unix-only. self.myChild = popen2.Popen3(cmd, 0) self.rfile = self.myChild.fromchild self.wfile = self.myChild.tochild pass else: # Open the pipe in Binary mode so it doesn't mess with CR-LFs. self.rfile, self.wfile, self.errfile = popen2.popen3(cmd, mode='b') pass # print "wfile", self.wfile, "rfile", self.rfile pass return
def getRepositories( user_svn = None, user_cvs = None, protocol = "default" ): from urllib import splittype, splituser, splithost reps = {} for r in repositories : if repositories[r].has_key( protocol ) : reps[r] = repositories[r][protocol] if user_svn : i = 0 for r in user_svn : # FIXME: need some error checking protocol, rest = splittype( r ) rest, path = splithost( rest ) user, host = splituser( rest ) reps["user_svn_%d" % i] = SVNReposInfo( protocol, host, path, user ) i += 1 if user_cvs : i = 0 for r in user_cvs : # FIXME: need some error checking dummy, protocol, rest, path = r.split( ":" ) user, host = splituser( rest ) reps["user_cvs_%d" % i] = CVSReposInfo( protocol, host, path, user ) i += 1 return reps
def getServerUri(self): url = self.rmakeUrl type, rest = urllib.splittype(url) if type != 'unix': host, path = urllib.splithost(rest) user, host = urllib.splituser(host) host, port = urllib.splitport(host) if not port: port = 9999 user = '' if self.rmakeUser: user = '******' % (self.rmakeUser) url = '%s://%s%s:%s%s' % (type, user, host, port, path) return url
def get_host_info(self, host): x509 = {} if isinstance(host, types.TupleType): host, x509 = host auth, host = urllib.splituser(host) if auth: auth = base64.encodestring(urllib.unquote(auth)) auth = string.join(string.split(auth), "") # get rid of whitespace extra_headers = [("Authorization", "Basic " + auth)] else: extra_headers = None return host, extra_headers, x509
def ParseUrl(value): type = "" host = "" port = "" path = "" username = "" password = "" type, value = splittype(value or "") value, path = splithost(value or "") usps, value = splituser(value or "") if usps: username, password = splitpasswd(usps) host, port = splitnport(value or "", None) return type or "", host or "", str( port) if port else "", path or "", username or "", password or ""
def url_fix_host(urlparts): """Unquote and fix hostname. Returns is_idn.""" # if not urlparts[1]: # urlparts[2] = urllib.unquote(urlparts[2]) # return False userpass, netloc = urllib.splituser(urlparts[1]) if userpass: userpass = urllib.unquote(userpass) netloc, is_idn = idna_encode(urllib.unquote(netloc).lower()) # a leading backslash in path causes urlsplit() to add the # path components up to the first slash to host # try to find this case... i = netloc.find("\\") if i != -1: # ...and fix it by prepending the misplaced components to the path comps = netloc[i:] # note: still has leading backslash if not urlparts[2] or urlparts[2] == '/': urlparts[2] = comps else: urlparts[2] = "%s%s" % (comps, urllib.unquote(urlparts[2])) netloc = netloc[:i] else: # a leading ? in path causes urlsplit() to add the query to the # host name i = netloc.find("?") if i != -1: netloc, urlparts[3] = netloc.split('?', 1) # path urlparts[2] = urllib.unquote(urlparts[2]) if userpass and userpass != ':': # append AT for easy concatenation userpass += "@" else: userpass = "" if urlparts[0] in default_ports: dport = default_ports[urlparts[0]] host, port = splitport(netloc, port=dport) host = host.rstrip('. ') if port != dport: host = "%s:%d" % (host, port) netloc = host urlparts[1] = userpass + netloc return is_idn
def urlSplit(url, defaultPort=None): """A function to split a URL in the format <scheme>://<user>:<pass>@<host>:<port>/<path>;<params>#<fragment> into a tuple (<scheme>, <user>, <pass>, <host>, <port>, <path>, <params>, <fragment>) Any missing pieces (user/pass) will be set to None. If the port is missing, it will be set to defaultPort; otherwise, the port should be a numeric value. """ scheme, netloc, path, query, fragment = urlparse.urlsplit(url) userpass, hostport = urllib.splituser(netloc) host, port = urllib.splitnport(hostport, None) if userpass: user, passwd = urllib.splitpasswd(userpass) else: user, passwd = None, None return scheme, user, passwd, host, port, path, \ query or None, fragment or None
def _parse_proxy(proxy): scheme, r_scheme = splittype(proxy) if not r_scheme.startswith('/'): scheme = None authority = proxy else: if not r_scheme.startswith('//'): raise ValueError('proxy URL with no authority: %r' % proxy) end = r_scheme.find('/', 2) if end == -1: end = None authority = r_scheme[2:end] userinfo, hostport = splituser(authority) if userinfo is not None: user, password = splitpasswd(userinfo) else: user = password = None return (scheme, user, password, hostport)
def _getconnection(netloc): """Return a FTP connection object to the specified server.""" # NOTE: this method is not thread safe if _ftpconnections.has_key(netloc): return _ftpconnections[netloc] # split url into useful parts (userpass, host) = urllib.splituser(netloc) if userpass is not None: (user, passwd) = urllib.splitpasswd(userpass) else: (user, passwd) = ('anonymous', '') (host, port) = urllib.splitnport(host, ftplib.FTP_PORT) # initialize a new connection ftp = ftplib.FTP() debugio.debug('schemes.ftp._getconnection(): CONNECT: '+ftp.connect(host, port)) debugio.debug('schemes.ftp._getconnection(): LOGIN: '+ftp.login(user, passwd)) _ftpconnections[netloc] = ftp return ftp
def check_args(options, args): if not args: return # someway to test if it is a SIP URI if 'sip:' in args[0]: setattr(options, 'target_uri', args[0]) userpass, hostport = urllib.splituser(args[0]) if hostport[:4] == 'sip:': hostport = hostport[4:] host, port = urllib.splitnport(hostport, 5060) # XXX Overwrites! setattr(options, 'target_addr', (host, port)) if userpass: user, passwd = urllib.splitpasswd(userpass) user and setattr(options, 'user', user) passwd and setattr(options, 'passwd', passwd)
def build_http_request(self, host, handler, request_body): token = None user = None auth, host = urllib.splituser(host) if auth: user, token = urllib.splitpasswd(auth) url = self._scheme + "://" + host + handler if user is not None and token is None: token = self.auth_backend.get_token_for_endpoint(user, url) if token is None: raise LavaCommandError( "Username provided but no token found.") request = urllib2.Request(url, request_body) request.add_header("Content-Type", "text/xml") if token: auth = base64.b64encode(urllib.unquote(user + ':' + token)) request.add_header("Authorization", "Basic " + auth) return request
def __init__(self, url): type, uri = urllib.splittype(url) #if type not in ("http", "https"): # raise IOError, "unsupported XML-RPC protocol" self.host, self.url = urllib.splithost(uri) if self.url == "": self.url = "/" self.user = self.password = None self.user, self.host = urllib.splituser(self.host) try: self.user, self.password = urllib.splitpasswd(self.user) except TypeError: pass self.host, self.port = urllib.splitport(self.host) self.port = int(self.port) self.secure = type == 'https'
def _download_svn(self, url, filename): url = url.split('#', 1)[0] # remove any fragment for svn's sake creds = '' if url.lower().startswith('svn:') and '@' in url: scheme, netloc, path, p, q, f = urlparse.urlparse(url) if not netloc and path.startswith('//') and '/' in path[2:]: netloc, path = path[2:].split('/', 1) auth, host = urllib.splituser(netloc) if auth: if ':' in auth: user, pw = auth.split(':', 1) creds = " --username=%s --password=%s" % (user, pw) else: creds = " --username="******"Doing subversion checkout from %s to %s", url, filename) os.system("svn checkout%s -q %s %s" % (creds, url, filename)) return filename
def _GuessBase(self, required): """Returns the SVN base URL. Args: required: If true, exits if the url can't be guessed, otherwise None is returned. """ info = RunShell(["svn", "info"]) for line in info.splitlines(): words = line.split() if len(words) == 2 and words[0] == "URL:": url = words[1] scheme, netloc, path, params, query, fragment = urlparse.urlparse(url) username, netloc = urllib.splituser(netloc) if username: logging.info("Removed username from base URL") if netloc.endswith("svn.python.org"): if netloc == "svn.python.org": if path.startswith("/projects/"): path = path[9:] elif netloc != "*****@*****.**": ErrorExit("Unrecognized Python URL: %s" % url) base = "http://svn.python.org/view/*checkout*%s/" % path logging.info("Guessed Python base = %s", base) elif netloc.endswith("svn.collab.net"): if path.startswith("/repos/"): path = path[6:] base = "http://svn.collab.net/viewvc/*checkout*%s/" % path logging.info("Guessed CollabNet base = %s", base) elif netloc.endswith(".googlecode.com"): path = path + "/" base = urlparse.urlunparse(("http", netloc, path, params, query, fragment)) logging.info("Guessed Google Code base = %s", base) else: path = path + "/" base = urlparse.urlunparse((scheme, netloc, path, params, query, fragment)) logging.info("Guessed base = %s", base) return base if required: ErrorExit("Can't find URL in output from svn info") return None
def _clone_gitrepo(): """Clones project git repo into virtualenv""" # Puts git repo in ~/.ssh/config to avoid interaction due to missing known_hosts git_server = urllib.splituser( urllib.splittype(env.project['git_repo'])[0])[1] if not files.exists('~/.ssh/config') or not files.contains( '~/.ssh/config', git_server): files.append('~/.ssh/config', ['host %s' % git_server, ' StrictHostKeyChecking no']) branch = env.project.get('git_branch', 'master') if files.exists(_interpolate(DJANGO_PROJECT_DIR)): print _interpolate('project %(project)s already exists, updating') remote('git pull origin %s' % branch) else: with cd(_interpolate(VIRTUALENV_DIR)): run(_interpolate('git clone %(git_repo)s %(project)s')) if branch != 'master': remote('git fetch origin %s:%s' % (branch, branch)) remote('git checkout %s' % branch)
def _urlclean(url): """Clean the url of uneccesary parts.""" # make escaping consistent url = urlescape(url) # split the url in useful parts (scheme, netloc, path, query) = urlparse.urlsplit(url)[:4] if (scheme == 'http' or scheme == 'https' or scheme == 'ftp'): # http(s) urls should have a non-empty path if path == '': path = '/' # make hostname lower case (userpass, hostport) = urllib.splituser(netloc) netloc = hostport.lower() # trim trailing : if netloc[-1:] == ':': netloc = netloc[:-1] if userpass is not None: netloc = userpass + '@' + netloc # put the url back together again (discarding fragment) return urlparse.urlunsplit((scheme, netloc, path, query, ''))
def __init__(self, url, method, params): null_access.__init__(self, url, method, params) host, junk = urllib.splithost(url) userpasswd, host = urllib.splituser(host) host, port = urllib.splitport(host) # XXX I tried doing this using os.system(), but the file # descriptors that Grail has open seemed to be confusing # telnet or xterm. So we need to close all file descriptors, # and this must be done in the child process, so now that # we're forking anyway, we might as well use os.exec*. # XXX To do: reap child processes after they've died! # Use os.waitpid(-1, os.WNOHANG) to do this. # But perhaps we should only wait for pids originating in this # module. cmd = ["xterm", "-e", "telnet", host] if port: cmd.append(str(port)) pid = os.fork() if pid: # Parent process return # Child process try: # Close all file descriptors # XXX How to know how many there are? for i in range(3, 200): try: os.close(i) except os.error: pass # XXX Assume xterm is on $PATH os.execvp(cmd[0], cmd) # This doesn't return when successful except: print "Exception in os.execvp() or os.close()" # Don't fall back in the the parent's stack! os._exit(127)
def url_permutations(url): """Try all permutations of hostname and path which can be applied to blacklisted URLs """ def url_host_permutations(host): if re.match(r'\d+\.\d+\.\d+\.\d+', host): yield host return parts = host.split('.') l = min(len(parts), 5) if l > 4: yield host for i in range(l - 1): yield '.'.join(parts[i - l:]) def url_path_permutations(path): yield path query = None if '?' in path: path, query = path.split('?', 1) if query is not None: yield path path_parts = path.split('/')[0:-1] curr_path = '' for i in range(min(4, len(path_parts))): curr_path = curr_path + path_parts[i] + '/' yield curr_path protocol, address_str = urllib.splittype(url) host, path = urllib.splithost(address_str) user, host = urllib.splituser(host) host, port = urllib.splitport(host) host = host.strip('/') seen_permutations = set() for h in url_host_permutations(host): for p in url_path_permutations(path): u = '{}{}'.format(h, p) if u not in seen_permutations: yield u seen_permutations.add(u)
def _getBaseServerUrlData(self): """ Fetch serverUrl from ~/.rbuilderrc if it exists and is specified; removes user and password from the URL and returns them separately. @return serverUrl, user, password """ serverUrl = self._getBaseServerUrl() if not serverUrl: return (None, None, None) scheme, rest = serverUrl.split(':', 1) host = urllib.splithost(rest)[0] user = urllib.splituser(host)[0] if user: user, password = urllib.splitpasswd(user) else: password = None if password: serverUrl = serverUrl.replace(':%s' % password, '', 1) if user: serverUrl = serverUrl.replace('%s@' % user, '', 1) return serverUrl, user, password
def submit(urls, target, define=[], submithost=None, atonce=False, sid=None): if submithost is None: submithost = config.get("submit", "host") if submithost is None: # extract the submit host from the svn host type, rest = urllib.splittype(pkgdirurl) host, path = urllib.splithost(rest) user, host = urllib.splituser(host) submithost, port = urllib.splitport(host) del type, user, port, path, rest # runs a create-srpm in the server through ssh, which will make a # copy of the rpm in the export directory createsrpm = get_helper("create-srpm") baseargs = ["ssh", submithost, createsrpm, "-t", target] if not sid: sid = uuid.uuid4() define.append("sid=%s" % sid) for entry in reversed(define): baseargs.append("--define") baseargs.append(entry) cmdsargs = [] if len(urls) == 1: # be compatible with server-side repsys versions older than 1.6.90 url, rev = layout.split_url_revision(urls[0]) baseargs.append("-r") baseargs.append(str(rev)) baseargs.append(url) cmdsargs.append(baseargs) elif atonce: cmdsargs.append(baseargs + urls) else: cmdsargs.extend((baseargs + [url]) for url in urls) for cmdargs in cmdsargs: command = subprocess.list2cmdline(cmdargs) status, output = execcmd(command) if status == 0: print "Package submitted!" else: sys.stderr.write(output) sys.exit(status)
def do_open(self, http_class, req): host = req.get_host() if not host: raise URLError('no host given') user_passwd, host = urllib.splituser(host) host = urllib.unquote(host) if user_passwd: import base64 auth = base64.encodestring(user_passwd).strip() else: auth = None h = http_class(host) # will parse host:port if req.has_data(): data = req.get_data() h.putrequest('POST', req.get_selector()) if not req.headers.has_key('Content-type'): h.putheader('Content-type', 'application/x-www-form-urlencoded') if not req.headers.has_key('Content-length'): h.putheader('Content-length', '%d' % len(data)) else: h.putrequest('GET', req.get_selector()) if auth: h.putheader('Authorization', 'Basic %s' % auth) scheme, sel = urllib.splittype(req.get_selector()) sel_host, sel_path = urllib.splithost(sel) h.putheader('Host', sel_host or host) for args in self.parent.addheaders: h.putheader(*args) for k, v in req.headers.items(): h.putheader(k, v) # httplib will attempt to connect() here. be prepared # to convert a socket error to a URLError. try: h.endheaders() except socket.error, err: raise urllib2.URLError(err)