def checkLink(self, url): parsedURL = urlparse(url) httpConn = HTTP(parsedURL[1]) httpConn.putrequest('HEAD',parsedURL[2]) httpConn.endheaders() if httpConn.getreply()[0] == 200: return 1 else: return 0
def fetch_id_from_Google(self, cid, lac, country): latitude = 0 longitude = 0 device = "Motorola C123" country = Translator.Country[country] b_string = pack('>hqh2sh13sh5sh3sBiiihiiiiii', 21, 0, len(country), country, len(device), device, len('1.3.1'), "1.3.1", len('Web'), "Web", 27, 0, 0, 3, 0, cid, lac, 0, 0, 0, 0) http = HTTP('www.google.com', 80) http.putrequest('POST', '/glm/mmap') http.putheader('Content-Type', 'application/binary') http.putheader('Content-Length', str(len(b_string))) http.endheaders() http.send(b_string) code, msg, headers = http.getreply() try: bytes = http.file.read() (a, b,errorCode, latitude, longitude, c, d, e) = unpack(">hBiiiiih",bytes) latitude /= 1000000.0 longitude /= 1000000.0 status = CellIDDBStatus.CONFIRMED except: status = CellIDDBStatus.NOT_IN_DB return status, latitude, longitude
def checkURL(url): p = urlparse(url) h = HTTP(p[1]) h.putrequest('HEAD', p[2]) h.endheaders() if h.getreply()[0] == 200: return 1 else: return 0
def checkURL(url): p = urlparse(url) h = HTTP(p[1]) h.putrequest('HEAD', p[2]) h.endheaders() reply = h.getreply() print "reply:", reply print url, "response code:", reply[0]
def URL_exists(url): p = urlparse(url) h = HTTP(p[1]) h.putrequest('HEAD', p[2]) h.endheaders() if h.getreply()[0] == 200: return True else: return False
def urlExists(url): parsed_url = urlparse(url) http = HTTP(parsed_url[1]) http.putrequest('HEAD', parsed_url[2]) http.endheaders() reply = http.getreply() if reply[0] == 200: return True return False
def fetch_latlong_http(query): http = HTTP('www.google.com', 80) http.putrequest('POST', '/glm/mmap') http.putheader('Content-Type', 'application/binary') http.putheader('Content-Length', str(len(query))) http.endheaders() http.send(query) code, msg, headers = http.getreply() result = http.file.read() return result
def action_delete(): vip = sys.argv[3] h = HTTP('home.shaunkruger.com:8087') h.putrequest('DELETE','/module/mod_cluster_admin/vip/'+vip) h.putheader('Authorization','Basic '+base64.standard_b64encode('skruger:testing')) h.endheaders() errcode, errmsg, headers = h.getreply() if errcode == 200: f = h.getfile() print f.read() return
def getStatusCode(self, url): """ Status code """ try: p = urlparse(url) h = HTTP(p[1]) h.putrequest('HEAD', p[2]) h.endheaders() return h.getreply()[0] except Exception: return 110
def gethttpfile(url, size=1024 * 1024): from urllib import splithost from httplib import HTTP if not url.startswith('http:'): raise ValueError, "URL %s" % url host, selector = splithost(url[5:]) h = HTTP(host) h.putrequest('GET', url) h.endheaders() h.getreply() res = h.getfile().read(size) h.close() return res
def checkURL(self, url): """Try to connect to a given url. Result is True if url returns HTTP code 200, in any other case (like unreachable server or wrong HTTP code) result will be False """ try: p = urlparse(url) h = HTTP(p[1]) h.putrequest('HEAD', p[2]) h.endheaders() if h.getreply()[0] == 200: return True else: return False except Exception, e: return False
def check_url(url): try: p = urlparse(url) h = HTTP(p[1]) h.putrequest('HEAD', p[2]) h.endheaders() if h.getreply()[0] == 200: return True else: return False except Exception, e: log(str(e)) return False
def check_url(url): """check_url(url:str) -> bool Check if a URL exists using the url's header. """ try: p = urlparse(url) h = HTTP(p[1]) h.putrequest('HEAD', p[2]) h.endheaders() if h.getreply()[0] == 200: return True else: return False except: return False
def httpread( url_base, url_tail ): url_content = "" h = HTTP( url_base ) h.putrequest( 'GET', url_tail ) h.putheader('Accept', 'text/html') h.putheader('Accept', 'text/plain') h.endheaders() errcode, errmsg, headers = h.getreply() if errcode == 200: f = h.getfile() url_content = f.read() # Print the raw HTML return url_content
def _checkURL(url): """ Check if a url is alive (returns code 200(OK) or 401 (unauthorized)) """ try: p = urlparse(url) h = HTTP(p[1]) h.putrequest('HEAD', p[2]) h.endheaders() r = h.getreply() if r[0] == 200 or r[ 0] == 401: # CloudMan UI is pwd protected so include 401 return True except Exception: # No response or no good response pass return False
def RetrieveAsFile(self, host, path=''): from httplib import HTTP try: h = HTTP(host) except: self.logprint( "Failed to create HTTP connection to %s... is the network available?" % (host)) return None h.putrequest('GET', path) h.putheader('Accept', 'text/html') h.putheader('Accept', 'text/plain') h.endheaders() errcode, errmsg, headers = h.getreply() if errcode != 200: self.logprint("HTTP error code %d: %s" % (errcode, errmsg)) return None f = h.getfile() return f
def __makeRequest(self, url, path): from httplib import HTTP http = HTTP(HTTP_PROXY or url.replace("http://", "")) http.putrequest("GET", url + path) http.putheader("Accept", "text/plain") http.endheaders() htcode, errmsg, headers = http.getreply() if htcode not in (200, 403, 404): raise HTTPCaptachaUnexpectedResponse(htcode, errmsg) #endif file = http.getfile() content = file.read() file.close() return htcode, content
def action_list(): h = HTTP(surrogate_conf.get('hostname')) h.putrequest('GET','/module/mod_cluster_admin/vip') userpass = "******" % (surrogate_conf.get('username'),surrogate_conf.get('password')) h.putheader('Authorization','Basic '+base64.standard_b64encode(userpass)) h.endheaders() errcode, errmsg, headers = h.getreply() if errcode == 200: f = h.getfile() data= f.read() vip = json.loads(data) # Convert json string to python array object formatstr = "%-35s %-8s %s" # reusable format string for header and data output lines print formatstr % ("Address", "Status","Nodes") print "===============================================================================" for v in vip["items"]: print formatstr % (v["address"], v["status"],v["nodes"]) elif errcode == 401: print "Authentication error." else: print "HTTP %s: %s" % (errcode,errmsg) return
def server_lookup(self, addr): req = HTTP("deinadmin.de") req.putrequest("GET", "/projects/pharmcheck/pharmcheck.php?name=" + addr) req.putheader("Host", "deinadmin.de") req.putheader("User-Agent", "pharmcheck v0.3 2005-08-14") req.endheaders() ec, em, h = req.getreply() if (ec != 200): return [-2, ec, em] else: fd=req.getfile() lines=[] line=fd.readline() while line: lines.append(line[:-1]) # \n abschneiden. windows-kompatibel? line=fd.readline() fd.close() return [1, lines]
def download_web_page(domain, url): try: h = HTTP(domain) h.putrequest("GET", url) h.putheader('Accept', 'text/html') h.putheader('Accept', 'text/plain') h.endheaders() except: return (None) try: errcode, errmsg, headers = h.getreply() except: sys.stderr.write("Error in receiving response from " + domain + \ '\n') return None if errcode != 200: sys.stderr.write("Error in receiving response from " + domain + \ '\n') return None results = h.getfile().read() return (results)
from httplib import HTTP req = HTTP("www.zoo-berlin.de") req.putrequest("GET", "/?Knut") req.putheader("Accept", "text/html") req.putheader("User-Agent", "Python26") req.endheaders() ec, em, h = req.getreply() print ec, em fd = req.getfile() textlines = fd.read() print textlines fd.close()
def __init__(self): self._h = HTTP(API_DOMAIN)
def make_connection(self, host): self.realhost = host h = HTTP(self.proxy) return h
#a = "000E00000000000000000000000000001B0000000000000000000000030000" #b = hex(cid)[2:].zfill(8) + hex(lac)[2:].zfill(8) #c = hex(divmod(mnc,100)[1])[2:].zfill(8) + hex(divmod(mnc,100)[0])[2:].zfill(8) #string = (a + b + c + "FFFFFFFF00000000").decode("hex") #r = urllib.urlopen("http://www.google.com/glm/mmap",string).read().encode("hex") #if len(r) > 14: # lon, lat = float(int(r[14:22], 16)) / 1000000, float(int(r[22:30], 16)) / 1000000 cid, lac, mnc = [int(cid, 16), int(lac, 16), int(mnc)] country = 'fr' device = 'Nokia N95 8Gb' b_string = pack('>hqh2sh13sh5sh3sBiiihiiiiii', 21, 0, len(country), country, len(device), device, len('1.3.1'), "1.3.1", len('Web'), "Web", 27, 0, 0, 3, 0, cid, lac, 0, 0, 0, 0) http = HTTP('www.google.com', 80) http.putrequest('POST', '/glm/mmap') http.putheader('Content-Type', 'application/binary') http.putheader('Content-Length', str(len(b_string))) http.endheaders() http.send(b_string) code, msg, headers = http.getreply() bytes = http.file.read() (a, b, errorCode, latitude, longitude, c, d, e) = unpack(">hBiiiiih", bytes) lat = latitude / 1000000.0 lon = longitude / 1000000.0 print "[+] Coordinates : %f:%f" % (lat, lon) if not BTS.if_already_mapped(lat, lon):
def __call__(self, *args, **kw): method = self.method if method == 'PUT' and len(args) == 1 and not kw: query = [args[0]] args = () else: query = [] for i in range(len(args)): try: k = self.args[i] if kw.has_key(k): raise TypeError, 'Keyword arg redefined' kw[k] = args[i] except IndexError: raise TypeError, 'Too many arguments' headers = {} for k, v in self.headers.items(): headers[translate(k, dashtrans)] = v method = self.method if headers.has_key('Content-Type'): content_type = headers['Content-Type'] if content_type == 'multipart/form-data': return self._mp_call(kw) else: content_type = None if not method or method == 'POST': for v in kw.values(): if hasattr(v, 'read'): return self._mp_call(kw) can_marshal = type2marshal.has_key for k, v in kw.items(): t = type(v) if can_marshal(t): q = type2marshal[t](k, v) else: q = '%s=%s' % (k, quote(v)) query.append(q) url = self.rurl if query: query = '&'.join(query) method = method or 'POST' if method == 'PUT': headers['Content-Length'] = str(len(query)) if method != 'POST': url = "%s?%s" % (url, query) query = '' elif not content_type: headers['Content-Type'] = 'application/x-www-form-urlencoded' headers['Content-Length'] = str(len(query)) else: method = method or 'GET' if (self.username and self.password and not headers.has_key('Authorization')): headers['Authorization'] = ("Basic %s" % encodestring( '%s:%s' % (self.username, self.password)).replace('\012', '')) try: h = HTTP(self.host, self.port) h.putrequest(method, self.rurl) for hn, hv in headers.items(): h.putheader(translate(hn, dashtrans), hv) h.endheaders() if query: h.send(query) ec, em, headers = h.getreply() response = h.getfile().read() except: raise NotAvailable, RemoteException(NotAvailable, sys.exc_info()[1], self.url, query) if (ec - (ec % 100)) == 200: return (headers, response) self.handleError(query, ec, em, headers, response)
# # Update TPC News Address # # 1997.02.09 Jim Tittsler [email protected] # import os import regex from httplib import HTTP hostname = "www.dtinet.or.jp" url = "/~tpc/address.htm" print "TPC News Address Updater 1997.02.10 23:20JST [email protected]" print "Making connection to http://%s%s..." % (hostname, url) h = HTTP(hostname) h.putrequest('GET', url) h.putheader('Accept', 'text/html') h.putheader('Accept', 'text/plain') h.endheaders() errcode, errmsg, headers = h.getreply() if errcode != 200: print "Unable to connect" print "Reading current address..." f = h.getfile() cahtml = f.readlines() f.close() print " ", cahtml[1] windir = "C:\WINDOWS"
def http_import(self, url, method='GET', auth=None, parse_qs=0, timeout=5): HTTP_PREFIX = 'http://' # Get Query-String. qs = '' i = url.find('?') if i > 0: qs = url[i + 1:] url = url[:i] # Get Host. host = '' servername = url if servername.startswith(HTTP_PREFIX): servername = servername[len(HTTP_PREFIX):] if servername.find('/') > 0: servername = servername[:servername.find('/')] useproxy = True noproxy = ['localhost', '127.0.0.1'] + filter( lambda x: len(x) > 0, map(lambda x: x.strip(), self.getConfProperty('HTTP.noproxy', '').split(','))) for noproxyurl in noproxy: if fnmatch.fnmatch(servername, noproxyurl): useproxy = False break if useproxy: host = self.getConfProperty('HTTP.proxy', host) if len(host) == 0: # Remove HTTP-Prefix. if url.startswith(HTTP_PREFIX): url = url[len(HTTP_PREFIX):] i = url.find('/') if i > 0: host = url[:i] url = url[i:] else: host = url url = '/' # Get Port. i = host.find(':', max(0, host.find('@'))) port = 80 if i > 0: port = int(host[i + 1:]) host = host[:i] # Open HTTP connection. writeLog( self, "[http_import.%s]: %s:%i --> %s?%s" % (method, host, port, url, qs)) req = HTTP(host, port) # Set request-headers. if method.upper() == 'GET': if len(qs) > 0: qs = '?' + qs req.putrequest(method, url + qs) req.putheader('Host', host) authtobasic(auth, req) req.putheader('Accept', '*/*') req.endheaders() elif method.upper() == 'POST': req.putrequest(method, url) req.putheader('Host', host) authtobasic(auth, req) req.putheader('Accept', '*/*') req.putheader('Content-type', 'application/x-www-form-urlencoded') req.putheader('Content-length', '%d' % len(qs)) req.endheaders() # Send query string req.send(qs) # Send request. reply_code, message, headers = req.getreply() #### get parameter from content if reply_code == 404 or reply_code >= 500: error = "[%i]: %s at %s [%s]" % (reply_code, message, url, method) writeLog(self, "[http_import.error]: %s" % error) raise zExceptions.InternalError(error) elif reply_code == 200: # get content f = req.getfile() content = f.read() f.close() rtn = None if parse_qs: try: # return dictionary of value lists rtn = cgi.parse_qs(content, keep_blank_values=1, strict_parsing=1) except: # return string rtn = content else: # return string rtn = content if port != 80: rtn = rtn.replace('%s%s/' % (HTTP_PREFIX, host), '%s%s:%i/' % (HTTP_PREFIX, host, port)) return rtn else: result = '[' + str(reply_code) + ']: ' + str(message) writeLog(self, "[http_import.result]: %s" % result) return result
from urllib import splithost from httplib import HTTP if os.path.exists("/dev/shm"): TMPDIR = "/dev/shm" else: TMPDIR = "/tmp" TMPDIR += "/hda-analyzer" print "Using temporary directory: %s" % TMPDIR print "You may remove this directory when finished or if you like to" print "download the most recent copy of hda-analyzer tool." if not os.path.exists(TMPDIR): os.mkdir(TMPDIR) for f in FILES: dest = TMPDIR + '/' + f if os.path.exists(dest): print "File cached " + dest continue print "Downloading file %s" % f host, selector = splithost(URL[5:]) h = HTTP(host) h.putrequest('GET', URL + f) h.endheaders() h.getreply() contents = h.getfile().read(2 * 1024 * 1024) h.close() open(dest, "w+").write(contents) print "Downloaded all files, executing %s" % FILES[0] os.system("python2 %s" % TMPDIR + '/' + FILES[0] + ' ' + ' '.join(sys.argv[1:]))
from httplib import HTTP req = HTTP("www.example.com") req.putrequest("GET", "/index.html") req.putheader("Accept", "text/html") req.putheader("User-Agent", "MyPythonScript") req.endheaders() ec, em, h = req.getreply() print(ec, em) # 200 OK fd = req.getfile() textlines = fd.read() fd.close()
def getstatus(url): p = urlparse(url) h = HTTP(p[1]) h.putrequest('HEAD', p[2]) h.endheaders() return h.getreply()[0]