def __get_stats(self, server): import socket err = 104 while err == 104: try: conn = HTTPConnection(server, 81, timeout=1) conn.request(u'GET', u'/', None, {}) response = conn.getresponse() res = response.read() conn.close() err = 0 self.logger.debug(u'Connect %s uwsgi stats' % (server)) except socket.error as ex: err = ex[0] if response.status == 200: try: res = json.loads(res) self.logger.info(res) except: res = {u'vassals':None, u'blacklist':None} else: self.logger.error(u'Emperor %s does not respond' % server) res = u'Emperor %s does not respond' % server return res
def send_email(request): try: recipients = request.GET['to'].split(',') url = request.GET['url'] proto, server, path, query, frag = urlsplit(url) if query: path += '?' + query conn = HTTPConnection(server) conn.request('GET',path) resp = conn.getresponse() assert resp.status == 200, "Failed HTTP response %s %s" % (resp.status, resp.reason) rawData = resp.read() conn.close() message = MIMEMultipart() message['Subject'] = "Graphite Image" message['To'] = ', '.join(recipients) message['From'] = 'composer@%s' % gethostname() text = MIMEText( "Image generated by the following graphite URL at %s\r\n\r\n%s" % (ctime(),url) ) image = MIMEImage( rawData ) image.add_header('Content-Disposition', 'attachment', filename="composer_" + strftime("%b%d_%I%M%p.png")) message.attach(text) message.attach(image) s = SMTP(settings.SMTP_SERVER) s.sendmail('composer@%s' % gethostname(),recipients,message.as_string()) s.quit() return HttpResponse( "OK" ) except: return HttpResponse( format_exc() )
def check_url_path(path, redirected=0): if redirected > MAX_REDIRECTION_ALLOWED: return False try: code = '' parse_result = urlparse(path) server_name = parse_result.netloc urlpath = parse_result.path if not urlpath: # Just a server, as with a repo. with contextlib.closing(urllib2.urlopen(path)) as res: code = res.getcode() else: # socket.gaierror could be raised, # which is a child class of IOError conn = HTTPConnection(server_name, timeout=15) # Don't try to get the whole file: conn.request('HEAD', path) response = conn.getresponse() code = response.status conn.close() if code == 200: return True elif code == 301 or code == 302: for header in response.getheaders(): if header[0] == 'location': return check_url_path(header[1], redirected+1) else: return False except (urllib2.URLError, HTTPException, IOError, ValueError): return False return True
def retrieveHttpResponse(url): host = getHostFromUrl(url) status, reason, responseText = None, None, None if None != host: conn = HTTPConnection(host) else: conn = HTTPConnection() conn.connect() try: conn.putrequest("GET", url) conn.putheader( "Accept", "image/gif, image/x-xbitmap, image/jpeg, image/pjpeg, */*") conn.putheader("Host", host) conn.putheader( "User-Agent", "Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; .NET CLR 1.1.4322)" ) conn.putheader("Connection", "Keep-Alive") conn.endheaders() resp = conn.getresponse() status, reason, responseText = resp.status, resp.reason, resp.read() finally: conn.close() return status, reason, responseText
def check_url_path(path, redirected=0): if redirected > MAX_REDIRECTION_ALLOWED: return False try: code = '' parse_result = urlparse(path) server_name = parse_result.netloc urlpath = parse_result.path if not urlpath: # Just a server, as with a repo. with contextlib.closing(urllib2.urlopen(path)) as res: code = res.getcode() else: # socket.gaierror could be raised, # which is a child class of IOError conn = HTTPConnection(server_name, timeout=15) # Don't try to get the whole file: conn.request('HEAD', path) response = conn.getresponse() code = response.status conn.close() if code == 200: return True elif code == 301 or code == 302: for header in response.getheaders(): if header[0] == 'location': return check_url_path(header[1], redirected + 1) else: return False except (urllib2.URLError, HTTPException, IOError, ValueError): return False return True
def __request(self, body, headers): """ Creates an HTTP post request to the Switch with the given body and headers. :param body: POST message body :param headers: POST message headers :return: Boolean ON state of the switch. Returns -1 for Error state. """ conn = HTTPConnection(self.server, self.port) conn.request("POST", "/upnp/control/basicevent1", body, headers) response = conn.getresponse() state = WemoSwitch.ERROR_STATE if response.status == 200: # Parse the received XML and search for 'BinaryState' element tree = et.fromstring(response.read().decode("utf-8")) binary_state = tree.find('.//BinaryState') if binary_state is not None: state = binary_state.text else: state = WemoSwitch.ERROR_STATE conn.close() # 0 = off, 1 = on, -1 or Error = error if state == '1': state = True elif state == '0': state = False else: state = WemoSwitch.ERROR_STATE return state
def display_file(self, fname): request = '/cgi-bin/mj_wwwusr?passw=&list=GLOBAL&user=&func=help&extra=/../../../../../../../../%s' % fname try: if self.proxy_host and self.proxy_port > 0: cnx = HTTPConnection(self.proxy_host, self.proxy_port) if self.https == 0: cnx.request("GET", "http://" + self.host + "/" + request, None, {"Host": self.website}) else: cnx.request("GET", "https://" + self.host + "/" + request, None, {"Host": self.website}) else: if self.https == 0: cnx = HTTPConnection(self.host, self.port) else: cnx = HTTPSConnection(self.host, self.port) cnx.request("GET", request, None, {"Host": self.website}) resp = cnx.getresponse() body = resp.read() try: cnx.close() except Exception: pass except Exception: try: cnx.close() except Exception: pass body = "HTTP(S) Transfer error" if body.find("Error report") >= 0: return '' else: return body
def testImage(self): conn = HTTPConnection('localhost:8080') conn.request("GET", "/images/blank.gif") request = conn.getresponse() request.read() self.assertEqual(request.status, 200) conn.close()
def testMessage(self): conn = HTTPConnection('localhost:8080') conn.request("GET", "/message?id=1") request = conn.getresponse() result = request.read() self.assertEqual(request.status, 200) self.assertGreater(result.find('<div class="messagebox">'), 0) conn.request("GET", "/message?id=2") request = conn.getresponse() result = request.read() self.assertEqual(request.status, 200) self.assertGreater(result.find('Please select an email message from the list'), 0) conn.request("GET", "/message?id=-2") request = conn.getresponse() result = request.read() self.assertEqual(request.status, 200) self.assertGreater(result.find('Please select an email message from the list'), 0) conn = HTTPConnection('localhost:8080') conn.request("GET", "/message?id=1&onlymsg=true") request = conn.getresponse() result = request.read() result.find('The HTML Body') self.assertEqual(request.status, 200) self.assertEqual(result, 'The HTML Body') conn.close()
def test_response_unsupported_method(self): conn = HTTPConnection('localhost', 80, True) conn.request(None, "/") res = conn.getresponse() res.read() conn.close() self.assertEqual(res.status, 501)
def testResponse404(self): conn = HTTPConnection('localhost:8080') conn.request("GET", "/images/bad_file.gif") request = conn.getresponse() request.read() self.assertEqual(request.status, 404) conn.close()
def test_response_bad_status_line(self): conn = HTTPConnection('localhost', 80, True) conn.request("get", "/") res = conn.getresponse() res.read() conn.close() self.assertEqual(res.status, 501)
def test_response_valid_request(self): conn = HTTPConnection('localhost', 80, True) conn.request("GET", "/") res = conn.getresponse() res.read() conn.close() self.assertEqual(res.status, 400)
def test_response_2(self): conn = HTTPConnection('localhost', 80, True) conn.request("POST", None) res = conn.getresponse() res.read() conn.close() self.assertEqual(res.status, 400)
def perform(level, box, options): host = options["Host"] # hack conn = HTTPConnection(host, timeout=10000) # hack path = options["Path"] # hack horizontal = options["Horizontal"] # hack print "Retrieving " + path + " from " + host # hack conn.request("GET", path) response = conn.getresponse() print response.status, response.reason data = response.read() conn.close() reader = png.Reader(bytes=data) (width, height, pixels, metadata) = reader.asRGBA8() pixels = list(pixels) for x in xrange(0, width): # hack y = height # hack while y > 0: # hack colour = getPixel(pixels, x, height-y) # hack if not transparent(colour): (mat, dat) = closestMaterial(colour) if horizontal == True: level.setBlockAt(box.minx + x, box.miny, box.minz+y, mat) # hack level.setBlockDataAt(box.minx + x, box.miny, box.minz+y, dat) # hack else: level.setBlockAt(box.minx + x, box.miny+y, box.minz, mat) # hack level.setBlockDataAt(box.minx + x, box.miny+y, box.minz, dat) # hack y = y - 1 # hack
def perform(level, box, options): host = options["Host"] # hack conn = HTTPConnection(host, timeout=10000) # hack path = options["Path"] # hack horizontal = options["Horizontal"] # hack print "Retrieving " + path + " from " + host # hack conn.request("GET", path) response = conn.getresponse() print response.status, response.reason data = response.read() conn.close() reader = png.Reader(bytes=data) (width, height, pixels, metadata) = reader.asRGBA8() pixels = list(pixels) for x in xrange(0, width): # hack y = height # hack while y > 0: # hack colour = getPixel(pixels, x, height - y) # hack if not transparent(colour): (mat, dat) = closestMaterial(colour) if horizontal == True: level.setBlockAt(box.minx + x, box.miny, box.minz + y, mat) # hack level.setBlockDataAt(box.minx + x, box.miny, box.minz + y, dat) # hack else: level.setBlockAt(box.minx + x, box.miny + y, box.minz, mat) # hack level.setBlockDataAt(box.minx + x, box.miny + y, box.minz, dat) # hack y = y - 1 # hack
def send_youtube_information(msg, sock): global channels matches = re.finditer('youtube\.com(?P<link>/watch\S*)', msg) if not matches: return for match in matches: http_connection = HTTPConnection('www.youtube.com') http_connection.request('GET', match.group('link')) http_response = http_connection.getresponse() if http_response.status != 200: print "Error occured when fetching data" continue data = http_response.read(4096) titles = re.finditer('<title>(?P<title>.*)</title>', data, re.DOTALL) for title in titles: video_title = title.group('title') video_title = video_title.split('-', 1) video_title = video_title[1].strip() msg = "PRIVMSG " + channels + u" :\u0002" + video_title + \ u"\u000F www.youtube.com" + match.group('link') + "\r\n" sock.send(msg) http_connection.close() return
class HTTPSpeakerClient: """Emacspeak HTTP speech client, for HTTPSpeaker instances.""" def __init__(self, host="127.0.0.1", port=8000): "Initialize client to connect to server at given host and port." self._connection = HTTPConnection(host, port) def postCommand(self, command, arg=""): """Post command, with argument arg (default, empty), to the speech server. Returns the body of the server's HTTP response, if any. On error, HTTPSpeakerError is raised.""" body = command if arg: body += ": " + arg self._connection.request("POST", "/", body, {"Content-type": "text/plain"}) response = self._connection.getresponse() if response.status != 200: raise HTTPSpeakerError(response.status, response.reason) return response.read() def speak(self, text): "Speak the supplied string." self.postCommand("speak", text) def stop(self): "Stop speaking." self.postCommand("stop") def isSpeaking(self): "Return '0' when not speaking." return self.postCommand("isSpeaking") def close(self): "Close the connection to the speech server." self._connection.close()
def create(self, query, type, feed=True, freq=ALERT_FREQS[FREQ_AS_IT_HAPPENS]): """ Creates a new alert. :param query: the search terms the alert will match :param type: a value in :attr:`ALERT_TYPES` indicating the desired results :param feed: whether to deliver results via feed or email :param freq: a value in :attr:`ALERT_FREQS` indicating how often results should be delivered; used only for email alerts (feed alerts are updated in real time) """ headers = {'Cookie': self.cookie, 'Content-type': 'application/x-www-form-urlencoded; charset=utf-8'} params = safe_urlencode({ 'q': query, 'e': DELIVER_FEED if feed else self.email, 'f': ALERT_FREQS[FREQ_AS_IT_HAPPENS] if feed else freq, 't': ALERT_TYPES[type], 'sig': self._scrape_sig(), }) conn = HTTPConnection('www.google.com') conn.request('POST', '/alerts/create?hl=en&gl=us', params, headers) response = conn.getresponse() try: if response.status != 302: raise UnexpectedResponseError(response.status, response.getheaders(), response.read()) finally: conn.close()
class RedditParser: def __init__(self): self.c = HTTPConnection('www.reddit.com') self.r = RedditHTMLParser() def get_info(self,subreddit): try: self.c.request('GET','/r/%s/' % subreddit.lower()) html = self.c.getresponse().read() except Exception as e: error("Unable to get subreddit '%s'\n%s" % (subreddit,e)) return self.r.reset3() self.r.feed(html) reddits = set() for link,name in zip(self.r.Links,self.r.LinkNames): link = link.strip() name = name.strip() if link.find(' ')!=-1: continue elif link[:24]=='http://www.reddit.com/r/': reddit = link[24:] elif link[:3]=='/r/': reddit = link[3:] elif name.find(' ')!=-1: continue elif name[:24]=='http://www.reddit.com/r/': reddit = name[24:] elif name[:17]=='www.reddit.com/r/': reddit = name[17:] elif name[:13]=='reddit.com/r/': reddit = name[13:] elif name[:3]=='/r/': reddit = name[3:] elif name[:2]=='r/': reddit = name[2:] else: continue if reddit[-1]=='/': reddit = reddit[:-1] if reddit.find('/')==-1: reddits.add(reddit.lower()) return reddits,self.r.Subscribers def __del__(self): self.r.close() self.c.close()
def getSuggestions(self, queryString): if not queryString: return None else: query = '/complete/search?output=toolbar&client=youtube&xml=true&ds=yt' if self.gl: query += '&gl=' + self.gl if self.hl: query += '&hl=' + self.hl query += '&jsonp=self.getSuggestions&q=' + quote(queryString) try: connection = HTTPConnection('google.com') connection.request('GET', query, '', {'Accept-Encoding': 'UTF-8'}) except (CannotSendRequest, gaierror, error): print "[YouTube] Can not send request for suggestions" else: try: response = connection.getresponse() except BadStatusLine: print "[YouTube] Can not get a response from google" else: if response.status == 200: data = response.read() try: charset = response.getheader( 'Content-Type', 'text/xml; charset=ISO-8859-1').rsplit('=')[1] except: charset = 'ISO-8859-1' connection.close() return data.decode(charset).encode('utf-8') if connection: connection.close() return None
def send_email(request): try: recipients = request.GET["to"].split(",") url = request.GET["url"] proto, server, path, query, frag = urlsplit(url) if query: path += "?" + query conn = HTTPConnection(server) conn.request("GET", path) try: # Python 2.7+, use buffering of HTTP responses resp = conn.getresponse(buffering=True) except TypeError: # Python 2.6 and older resp = conn.getresponse() assert resp.status == 200, "Failed HTTP response %s %s" % (resp.status, resp.reason) rawData = resp.read() conn.close() message = MIMEMultipart() message["Subject"] = "Graphite Image" message["To"] = ", ".join(recipients) message["From"] = "composer@%s" % gethostname() text = MIMEText("Image generated by the following graphite URL at %s\r\n\r\n%s" % (ctime(), url)) image = MIMEImage(rawData) image.add_header("Content-Disposition", "attachment", filename="composer_" + strftime("%b%d_%I%M%p.png")) message.attach(text) message.attach(image) s = SMTP(settings.SMTP_SERVER) s.sendmail("composer@%s" % gethostname(), recipients, message.as_string()) s.quit() return HttpResponse("OK") except: return HttpResponse(format_exc())
def display_file(self, fname): request = '/sdk/%2E%2E/%2E%2E/%2E%2E/%2E%2E/%2E%2E/%2E%2E/' + fname try: if self.proxy_host and self.proxy_port > 0: cnx = HTTPConnection(self.proxy_host, self.proxy_port) if self.https == 0: cnx.request("GET","http://"+self.host+"/"+request,None,{"Host":self.website}) else: cnx.request("GET","https://"+self.host+"/"+request,None,{"Host":self.website}) else: if self.https == 0: cnx = HTTPConnection(self.host, self.port) else: cnx = HTTPSConnection(self.host, self.port) cnx.request("GET",request,None,{"Host":self.website}) resp = cnx.getresponse() body = resp.read() try: cnx.close() except Exception: pass except Exception: try: cnx.close() except Exception: pass body = "HTTP(S) Transfer error" if body.find("Error report") >= 0: return "" else: return body
def makePasswordResetRequest( host, path, targetUser, port=80 ): """ Make the password reset requests for users in accounts list """ loginPage = path + '/index.php?title=Special:UserLogin' passResetPage = path +'/index.php/Special:PasswordReset' headers = {"Content-type": "application/x-www-form-urlencoded", "Connection" : "Keep-Alive"} con = HTTPConnection(host, port) cookie = None # 1st request extracts the CSRF token which is used to recover mt_rand seed con.request( 'GET', loginPage, '', headers=headers ) resp = con.getresponse() data = resp.read() token = extractLoginToken( data ) if not token: return None # Use the token and the obtain cookie to submit a password reset request # for the target user. cookie = resp.getheader('Set-Cookie') cookie = cookie[:cookie.find(';')] headers['Cookie'] = cookie # Submit the password reset form params = {'wpUsername' : targetUser, 'wpEditToken':'+\\', 'title' : 'Special:PasswordReset', 'redirectparams' : '' } con.request( 'POST', passResetPage, urlencode(params), headers ) #data = con.getresponse().read() # To keep alive the connection con.close() return token
def run(self): if self.file: dataFile = open(self.file, "r") cdata = dataFile.read() conn = HTTPConnection(self.proxy) conn.request("GET", self.url) resp = conn.getresponse() rdata = resp.read() if rdata == cdata: self.result = True self.data = rdata conn.close() dataFile.close() else: conn = HTTPConnection(self.proxy) conn.request("GET", self.url) resp = conn.getresponse() rdata = resp.read() if resp.status == httplib.OK: self.result = True conn.close()
def _conectar_servico(self, servico, envio, resposta, ambiente=None): if ambiente is None: ambiente = self.ambiente self._servidor = CIDADE_WS[self.cidade][ambiente]['servidor'] self._url = CIDADE_WS[self.cidade][ambiente]['url'] self._soap_envio = SOAPEnvio() self._soap_envio.metodo = METODO_WS[servico]['metodo'] self._soap_envio.envio = envio self._soap_retorno = SOAPRetorno() self._soap_retorno.metodo = METODO_WS[servico]['metodo'] self._soap_retorno.resposta = resposta if (servico == WS_NFSE_ENVIO_LOTE): self.certificado.prepara_certificado_arquivo_pfx() self.certificado.assina_xmlnfe(envio) con = HTTPConnection(self._servidor) con.set_debuglevel(10) con.request('POST', '/' + self._url, self._soap_envio.xml, self._soap_envio.header) resp = con.getresponse() # Dados da resposta salvos para possível debug self._soap_retorno.resposta.version = resp.version self._soap_retorno.resposta.status = resp.status self._soap_retorno.resposta.reason = unicode( resp.reason.decode('utf-8')) self._soap_retorno.resposta.msg = resp.msg self._soap_retorno.resposta.original = unicode( resp.read().decode('utf-8')) # Tudo certo! if self._soap_retorno.resposta.status == 200: self._soap_retorno.xml = self._soap_retorno.resposta.original #except Exception, e: #raise e #else: con.close() print() print() print() print(self._soap_envio.xml) print() print() print() print(por_acentos(self._soap_retorno.resposta.original)) print() print() print() print(resposta.xml)
def search_on_playdeb(orig_file): global MIRROR_URL, GETDEB_SUBDIR http_connection = HTTPConnection(MIRROR_URL) basename = orig_file.split('_')[0] download_dir = 'http://' + MIRROR_URL + '/' + GETDEB_SUBDIR + '/ubuntu/pool/games/' + get_package_subdir(orig_file) + \ '/' + basename + '/' http_connection.request('GET', download_dir) http_response = http_connection.getresponse() if http_response.status != 200: return None data = http_response.read() http_connection.close() data = data.split('\n') package_lines = list() for line in data: if basename in line: package_lines.append(line) if len(package_lines) == 0: return None p_d = list() package_re = re.compile('<a .*?>(?P<orig>.*?)(?:\.diff\.gz|\.debian\.tar\.gz)<') download_re = re.compile('<a href="(?P<download>.*?)">') for line in package_lines: search_result = re.search(package_re, line) if not search_result: continue orig = search_result.group('orig') search_result = re.search(download_re, line) download = download_dir + search_result.group('download') p_d.append((orig,download)) return p_d
def _upload_file(self, data): if callable(data['make_snapshot_cb']): args = data['args'] if data['args'] is not None else [] kwargs = data['kwargs'] if data['kwargs'] is not None else {} try: data['path'] = data['make_snapshot_cb'](*args, **kwargs) except NotImplementedError: return with open(data['path'], 'rb') as file_handle: url = data['url'] parsed = urlparse(url) connection = HTTPConnection(parsed.netloc, timeout=10) try: connection.connect() request_url = parsed.path if parsed.query is not None and parsed.query != '': request_url += '?' request_url += parsed.query connection.request('POST', request_url, file_handle) resp = connection.getresponse() if resp.status >= 400: logger.error('Failed to upload file: http error %s: %s', resp.status, resp.read()) except Exception: logger.error('Failed to upload file: Network error %s', error_str()) finally: connection.close() if data.get('delete', False): os.unlink(data['path'])
def run(self): conn = HTTPConnection(self.proxy) tmpFlag = True dataFile = open(self.file, "r") cdata = dataFile.read() dataFile = open(self.file2, "r") cdata2 = dataFile.read() conn.request("GET", self.url) resp = conn.getresponse() rdata = resp.read() if rdata != cdata: tmpFlag = False if resp.will_close == True: tmpFlag = False connHdrs = {"Connection": "close"} conn.request("GET", self.url2, headers=connHdrs) resp = conn.getresponse() rdata2 = resp.read() if rdata2 != cdata2: tmpFlag = False if resp.will_close == False: tmpFlag = False if tmpFlag == True: self.result = True conn.close() dataFile.close()
def get(self, query): """ Executes a lookup request for the given query, which should be a ``dict`` of name value pairs to pass to the api. :: returns :: A JSON object """ request = self.encode(query) try: try: conn = HTTPConnection(self.HOST) conn.request(self.VERB, "?".join([self.URI, request])) except HTTPException as e: raise IMDBException("Could not connect to Movie API: %s" % str(e)) try: response = conn.getresponse() response = json.load(response) except ValueError as e: raise IMDBException("Could not parse response from Movie API: %s" % str(e)) if 'error' in response: raise IMDBHttpException(**response) conn.close() return response except Exception as e: conn.close() raise e
def send_email(request): try: recipients = request.GET['to'].split(',') url = request.GET['url'] proto, server, path, query, frag = urlsplit(url) if query: path += '?' + query conn = HTTPConnection(server) conn.request('GET',path) resp = conn.getresponse() assert resp.status == 200, "Failed HTTP response %s %s" % (resp.status, resp.reason) rawData = resp.read() conn.close() message = MIMEMultipart() message['Subject'] = "Graphite Image" message['To'] = ', '.join(recipients) message['From'] = 'composer@%s' % gethostname() text = MIMEText( "Image generated by the following graphite URL at %s\r\n\r\n%s" % (ctime(),url) ) image = MIMEImage( rawData ) image.add_header('Content-Disposition', 'attachment', filename="composer_" + strftime("%b%d_%I%M%p.png")) message.attach(text) message.attach(image) s = SMTP(settings.SMTP_SERVER) s.sendmail('composer@%s' % gethostname(),recipients,message.as_string()) s.quit() return HttpResponse( "OK" ) except: return HttpResponse( format_exc() )
def downloadSkin(username, cache = None, cape = False): host = "skins.minecraft.net" if cache == None else "textures.minecraft.net" conn = HTTPConnection(host, timeout = 10000) path = ("/Minecraft" + ("Cloak" if cape else "Skin") + "s/" + username + ".png") if cache == None else cache[29:] conn.request("GET", path) response = conn.getresponse() if cache == None and response.status == 301: location = response.getheader("Location", "non-cache") if location != "non-cache": return downloadSkin(username, location, cape) if cape and response.status != 200: return None if response.status != 200 and response.status != 403: conn.close() alert("Couldn't connect to " + host + path) if response.status == 403: alert("Not a premium user.") if response.status == 200: data = response.read() conn.close() reader = png.Reader(bytes = data) (width, height, pixels, metadata) = reader.asRGBA8() return (width, height, list(pixels), metadata) else: return None
def api(api_name, path): c = membase.incr(request.environ['REMOTE_ADDR']) if c == None: membase.set(request.environ['REMOTE_ADDR'], 1, 86400) elif c > 50000: abort(403) apis = {'silcc': 'www.opensilcc.com'} path = '/' + path if '?' in request.url: path += '?' + request.url.split('?')[1] api = HTTPConnection(apis[api_name], strict=True) api.connect() api.request(request.method, path, request.data) api_response = api.getresponse() api_response_content = api_response.read() api.close() gateway_response = make_response(api_response_content) gateway_response.status_code = api_response.status for header in ['Cache-Control', 'Content-Type', 'Pragma']: gateway_response.headers[header] = api_response.getheader(header) log_data = dict(api=api_name, path=path, data=request.data, response=api_response_content) log_entry = json.dumps(log_data) pika_channel.basic_publish(exchange='', routing_key='swiftgate', body=log_entry) return gateway_response
class TWizardBind: def __init__(self, confPath=""): self.Conf = ConfigParser.ConfigParser() self.Conf.read(confPath) self.SocketHost = self.Conf.get("Service", "SocketHost") self.SocketPort = self.Conf.get("Service", "SocketPort") self.GetRequestParameters = self.Conf.get("Service", "GetRequestParameters") def __call__(self, queryText): print queryText.encode("utf-8") #try: self.Conn = HTTPConnection(self.SocketHost, self.SocketPort) self.Conn.set_debuglevel(5) self.Conn.request("GET", self.GetRequestParameters + urllib2.quote(queryText.encode("utf-8"))) resp = self.Conn.getresponse() responseString = resp.read() self.Conn.close() print >> sys.stderr, "TWizardBind:Answer:", responseString jsonReplyObj = None try: jsonReplyObj = json.loads(responseString) print >> sys.stderr, "TWizardBind: Json object: ", jsonReplyObj except: print >> sys.stderr, "TWizardBind: can't parse json response" #except: # print >> sys.stderr, "TWizardBind: Something wrong with connection" return jsonReplyObj
def submit(): # Build the parameters headers = {"Content-type": "application/x-www-form-urlencoded"} info = urlencode( { "version": configured.VERSION, "comments": CTK.post["comments"], "traceback": unquote(CTK.post["traceback"]), "config": str(CTK.cfg), } ) # HTTP Request conn = HTTPConnection(URL_REPORT_HOST) conn.request("POST", URL_REPORT_URL, info, headers) response = conn.getresponse() data = response.read() conn.close() # Check the output if response.status != 200: return {"ret": "error"} if not "{'ret':'ok'}" in data: return {"ret": "error"} return {"ret": "ok"}
def _publish(self, user, passwd, args, server, uri, debug): from httplib import HTTPConnection from urllib import urlencode uri = uri + "?" + urlencode(args) if debug: print 'Connect to: http://%s' % server print 'GET %s' % uri print '\nFull URL: http://%s%s' % (server, uri) conn = HTTPConnection(server, timeout=5) if not conn: raise PublishException('Remote server connection timeout') conn.request("GET", uri) http = conn.getresponse() data = (http.status, http.reason, http.read()) conn.close() if not (data[0] == 200 and data[1] == 'OK' and data[2].find('success') >= 0): raise PublishException('Server returned invalid status: %d %s %s' % data) return data
def __init__(self, server): """ Creates an instance of the class and attempts to connected to the switch within the predefine port range. :param server: String with the IP of the Belkin Wemo Switch. """ self.server = server self.connected = False self.port = 49151 # Wemo ports can change, most user list ports in the range 49152-49155, # so do a quick connection check and rotate if it fails response_status = 0 while response_status != 200 and self.port < 49156: self.port += 1 conn = HTTPConnection(self.server, self.port, timeout=0.5) try: conn.request('GET', '/setup.xml') response = conn.getresponse() response_status = response.status except socket.timeout: #print('timeout port %s' % self.port) pass conn.close() # Check if the connection was successful and set it into self.connected if response_status == 200: self.connected = True else: self.connected = False self.port = WemoSwitch.ERROR_STATE
def url_exists(url): url_components = urlparse.urlparse(url) scheme = url_components.scheme hostname = url_components.hostname port = url_components.port timeout = 10 if scheme == 'http': connection = HTTPConnection(hostname, port, timeout=timeout) elif scheme == 'https': connection = HTTPSConnection(hostname, port, timeout=timeout) else: raise ValueError('{}: not an HTTP/HTTPS URL'.format(url)) try: path = url_components.path query = url_components.query relative_url = path if query: relative_url += '?' + query connection.request('GET', relative_url) response = connection.getresponse() status = response.status return status not in xrange(400, 600) except Exception: return False finally: connection.close()
def update(self, alert): """ Updates an existing alert which has been modified. """ headers = {'Cookie': self.cookie, 'Content-type': 'application/x-www-form-urlencoded; charset=utf-8'} sig, es, hps = self._scrape_sig_es_hps(alert) params = { 'd': DELIVER_TYPES.get(alert.deliver, DELIVER_DEFAULT_VAL), 'e': self.email, 'es': es, 'hps': hps, 'q': alert.query, 'se': 'Save', 'sig': sig, 't': ALERT_TYPES[alert.type], } if alert.deliver == DELIVER_EMAIL: params['f'] = ALERT_FREQS[alert.freq] params = safe_urlencode(params) conn = HTTPConnection('www.google.com') conn.request('POST', '/alerts/save?hl=en&gl=us', params, headers) response = conn.getresponse() try: if response.status != 302: raise UnexpectedResponseError(response.status, response.getheaders(), response.read()) finally: conn.close()
def run(self): if self.file: print "RUN THE THREAD CLIENT" dataFile = open(self.file, "r") cdata = dataFile.read() conn = HTTPConnection(self.proxy) conn.request("GET", self.url) resp = conn.getresponse() rdata = resp.read() if rdata == cdata: self.result = True self.data = rdata conn.close() dataFile.close() print "CONNECTION CLOSE" else: conn = HTTPConnection(self.proxy) conn.request("GET", self.url) resp = conn.getresponse() rdata = resp.read() if resp.status == httplib.OK: self.result = True conn.close()
def health_check(self, target): global dead_servers is_alive = 0 beaten = 0 p = re.compile("[a-z0-9-.]*?\.[a-z]+$") domain_tld = p.findall(target)[0] accepted_responses = (404, 302, 304, 301, 200) #logger.error('perform health check on ' + domain_tld) try: conn = HTTPConnection(domain_tld) conn.request('HEAD', '/') response = conn.getresponse() if response.status in accepted_responses: is_alive = 1 else: logger.error(ctime() + ': ' + target + ' is down, HTTP error code is: ' + response.status ) # FIXME # Bypass dns resolving errors exception, don't know why it always fail after running for about 1 hours. except gaierror: #logger.error(ctime() + ' ' + target + ' : Error: Name or service does not known' ) is_alive = 1 except error: logger.error(ctime() + ' ' + target + ' : Connection refused' ) finally: conn.close() if is_alive == 0 and target not in dead_servers: dead_servers.append(target) if is_alive == 1 and target in dead_servers: dead_servers.pop(dead_servers.index(target))
def upload(addr, url, formfields, filefields): #formfields 表单字段字典 # filefields 要上传的文件字典 # Create the sections for form fields formsections = [] # 用来保存表单信息 for name in formfields: section = [ '--' + BOUNDARY, 'Content-disposition: form-data; name="%s"' % name, '', formfields[name] ] formsections.append(CRLF.join(section) + CRLF) # # 收集要上传文件的文件信息 利用 os 包 fileinfo = [(os.path.getsize(filename), formname, filename) for formname, filename in filefields.items()] # 为每个文件创建 http 包头 filebytes = 0 fileheaders = [] for filesize, formname, filename in fileinfo: headers = [ '--'+BOUNDARY, 'Content-Disposition: form-data; name="%s"; filename="%s"' % \ (formname, filename), 'Content-length: %d' % filesize, '' ] fileheaders.append(CRLF.join(headers) + CRLF) filebytes += filesize # 关闭标记 closing = "--" + BOUNDARY + "--\r\n" # BOUNDARY 边界范围,分界线的用途 # 确定整个 请求 的长度 content_size = (sum(len(f) for f in formsections) + sum(len(f) for f in fileheaders) + filebytes + len(closing)) # Upload it conn = HTTPConnection(*addr) conn.putrequest("POST", url) # POSt 请求方式 url conn.putheader("Content-type", 'multipart/form-data; boundary=%s' % BOUNDARY) conn.putheader("Content-length", str(content_size)) conn.endheaders() # Send all form sections for s in formsections: conn.send(s.encode('latin-1')) # Send all files for head, filename in zip(fileheaders, filefields.values()): conn.send(head.encode('latin-1')) f = open(filename, "rb") while True: chunk = f.read(16384) if not chunk: break conn.send(chunk) f.close() conn.send(closing.encode('latin-1')) r = conn.getresponse() responsedata = r.read() conn.close() return responsedata
def run(self): headers = { 'Ntrip-Version': 'Ntrip/2.0', 'User-Agent': 'NTRIP ntrip_ros', 'Connection': 'close', 'Authorization': 'Basic ' + b64encode(self.ntc.ntrip_user + ':' + self.ntc.ntrip_pass) } connection = HTTPConnection(self.ntc.ntrip_server) connection.request('GET', '/'+self.ntc.ntrip_stream, self.ntc.nmea_gga, headers) response = connection.getresponse() if response.status != 200: raise Exception("blah") buf = "" rmsg = Message() while not self.stop: data = response.read(100) pos = data.find('\r\n') if pos != -1: rmsg.message = buf + data[:pos] rmsg.header.seq += 1 rmsg.header.stamp = rospy.get_rostime() buf = data[pos+2:] self.ntc.pub.publish(rmsg) else: buf += data connection.close()
def load_traffic_uncached(interval, what, iden, start_time = None, stop_time = None, npoints = None): """ Fetches pickled traffic from the traffic server and returns it as a list. On connection failure (or no data) returns an empy list. """ def format_date(d): if hasattr(d, "tzinfo"): if d.tzinfo is None: d = d.replace(tzinfo = g.tz) else: d = d.astimezone(g.tz) return ":".join(map(str, d.timetuple()[:6])) traffic_url = os.path.join(g.traffic_url, interval, what, iden) args = {} if start_time: args['start_time'] = format_date(start_time) if stop_time: args['stop_time'] = format_date(stop_time) if npoints: args['n'] = npoints u = urlparse(traffic_url) try: conn = HTTPConnection(u.hostname, u.port) conn.request("GET", u.path + query_string(args)) res = conn.getresponse() res = loads(res.read()) if res.status == 200 else [] conn.close() return res except socket.error: return []
def queryBooked(config, function, method, params, headers): """Send a Booked REST API request Args: config(ConfigParser): Config file input data function(string): Name of Booked REST API function method(string): POST or GET params(string): Arguments to REST function headers(string): HTTP header info containing auth data Returns: dict: contains Booked auth info JSON object: response from server """ connection = HTTPConnection(config.get("Server", "hostname")) connection.connect() if headers == None: creds = { "username": config.get("Authentication", "username"), "password": config.get("Authentication", "password") } authUrl = config.get("Server", "baseUrl") + "Authentication/Authenticate" session = query(connection, authUrl, "POST", creds, {}) headers = { "X-Booked-SessionToken": session['sessionToken'], "X-Booked-UserId": session['userId'] } url = config.get("Server", "baseUrl") + function data = query(connection, url, method, params, headers) connection.close() return (headers, data)
def fetchData ( server, markets, network ): marketsData = None networkData = None connection = HTTPConnection ( server, timeout=5 ) # markets connection.request ( 'GET', markets ) r = connection.getresponse ( ) if ( r.status == 200 ): marketsData = r.read ( ) else: stderr.write ( 'error: fetchData() got status %d for markets\n' % r.status ) return None # network connection.request ( 'GET', network ) r = connection.getresponse ( ) if ( r.status == 200 ): networkData = r.read ( ) else: stderr.write ( 'error: fetchData() got status %d for network\n' % r.status ) return None connection.close ( ) # parse JSON return ( json.loads(marketsData), json.loads(networkData), '' )
def getSuggestions(self, queryString): if not queryString: return None else: query = '/complete/search?output=toolbar&client=youtube&xml=true&ds=yt' if self.gl: query += '&gl=' + self.gl if self.hl: query += '&hl=' + self.hl query += '&jsonp=self.getSuggestions&q=' + quote(queryString) try: connection = HTTPConnection('google.com') connection.request('GET', query, '', {'Accept-Encoding': 'UTF-8'}) except (CannotSendRequest, gaierror, error): print "[YouTube] Can not send request for suggestions" else: try: response = connection.getresponse() except BadStatusLine: print "[YouTube] Can not get a response from google" else: if response.status == 200: data = response.read() try: charset = response.getheader('Content-Type', 'text/xml; charset=ISO-8859-1').rsplit('=')[1] except: charset = 'ISO-8859-1' connection.close() return data.decode(charset).encode('utf-8') if connection: connection.close() return None
def getCookies(self, spiderName): if spiderName in cookiesConfig: if not "domain" in cookiesConfig[ spiderName] or not "url" in cookiesConfig[ spiderName] or not "timeout" in cookiesConfig[ spiderName]: log.msg("%s 的 cookiesConfig配置不正确" % spiderName, log.ERROR) return {} config = cookiesConfig[spiderName] cookiesEndTime = datetime.datetime.now() if (not spiderName in self._cookies) or ( not spiderName in self._cookiesStartTime ) or (cookiesEndTime - self._cookiesStartTime[spiderName] ).seconds > config["timeout"]: log.msg("%s 获得新的cookies" % spiderName, log.INFO) self._cookiesStartTime[spiderName] = cookiesEndTime self._cookies[spiderName] = {} conn = HTTPConnection(config["domain"]) conn.request("HEAD", config["url"]) resp = conn.getresponse() conn.close() cks = resp.getheader("set-cookie") if cks: for v in cks.split(';'): ck = v.split('=') if ck and len(ck) == 2: self._cookies[spiderName][ ck[0].strip()] = ck[1].strip() # print self._cookies[spiderName] return self._cookies[spiderName] else: return {}
def run(self): conn = HTTPConnection(self.proxy) tmpFlag = True dataFile = open(self.file, "r") cdata = dataFile.read() dataFile = open(self.file2, "r") cdata2 = dataFile.read() conn.request("GET", self.url) resp = conn.getresponse() rdata = resp.read() if rdata != cdata: tmpFlag = False if resp.will_close == True: tmpFlag = False connHdrs = {"Connection": "close"} conn.request("GET", self.url2, headers=connHdrs) resp = conn.getresponse() rdata2 = resp.read() if rdata2 != cdata2: tmpFlag = False if resp.will_close == False: tmpFlag = False if tmpFlag == True: self.result = True conn.close() dataFile.close()
def testResponse404(self): conn = HTTPConnection('localhost:8080') conn.request("GET", "/images/bad_file.gif") request = conn.getresponse() request.read() self.assertEqual(request.status, 404) conn.close()
def _publish(args, server, uri): from httplib import HTTPConnection from urllib import urlencode args = dict((k,v) for k,v in args.items() if v != 'NA') uri = uri + "?" + urlencode(args) log.debug('Connect to: http://%s' % server) log.debug('GET %s' % uri) conn = HTTPConnection(server) if not conn: raise PublishException('Remote server connection error') conn.request("GET", uri) try: http = conn.getresponse() data = (http.status, http.reason, http.read()) conn.close() if not (data[0] == 200 and data[1] == 'OK'): raise PublishException('Server returned invalid status: %d %s %s' % data) except (DownloadError): logging.warning("Download error: ", sys.exc_info()[0]) data = None return data
def testImage(self): conn = HTTPConnection('localhost:8080') conn.request("GET", "/images/blank.gif") request = conn.getresponse() request.read() self.assertEqual(request.status, 200) conn.close()
class HTTPSpeakerClient: """Emacspeak HTTP speech client, for HTTPSpeaker instances.""" def __init__(self, host="127.0.0.1", port=8000): "Initialize client to connect to server at given host and port." self._connection=HTTPConnection(host, port) def postCommand(self, command, arg=""): """Post command, with argument arg (default, empty), to the speech server. Returns the body of the server's HTTP response, if any. On error, HTTPSpeakerError is raised.""" body = command if arg: body += ": " + arg self._connection.request("POST", "/", body, {"Content-type": "text/plain"}) response=self._connection.getresponse() if response.status != 200: raise HTTPSpeakerError(response.status, response.reason) return response.read() def speak(self, text): "Speak the supplied string." self.postCommand("speak", text) def stop(self): "Stop speaking." self.postCommand("stop") def isSpeaking(self): "Return '0' when not speaking." return self.postCommand("isSpeaking") def close(self): "Close the connection to the speech server." self._connection.close()
def testMessage(self): conn = HTTPConnection('localhost:8080') conn.request("GET", "/message?id=1") request = conn.getresponse() result = request.read() self.assertEqual(request.status, 200) self.assertGreater(result.find('<div class="messagebox">'), 0) conn.request("GET", "/message?id=2") request = conn.getresponse() result = request.read() self.assertEqual(request.status, 200) self.assertGreater( result.find('Please select an email message from the list'), 0) conn.request("GET", "/message?id=-2") request = conn.getresponse() result = request.read() self.assertEqual(request.status, 200) self.assertGreater( result.find('Please select an email message from the list'), 0) conn = HTTPConnection('localhost:8080') conn.request("GET", "/message?id=1&onlymsg=true") request = conn.getresponse() result = request.read() result.find('The HTML Body') self.assertEqual(request.status, 200) self.assertEqual(result, 'The HTML Body') conn.close()
def send_youtube_information(msg, sock): global channels matches = re.finditer('youtube\.com(?P<link>/watch\S*)', msg) if not matches: return for match in matches: http_connection = HTTPConnection('www.youtube.com') http_connection.request('GET', match.group('link')) http_response = http_connection.getresponse() if http_response.status != 200: print "Error occured when fetching data" continue data = http_response.read(4096) titles = re.finditer('<title>(?P<title>.*)</title>', data, re.DOTALL) for title in titles: video_title = title.group('title') video_title = video_title.split('-', 1) video_title = video_title[1].strip() msg = "PRIVMSG " + channels + u" :\u0002" + video_title + \ u"\u000F www.youtube.com" + match.group('link') + "\r\n" sock.send(msg) http_connection.close() return
def download_default_pages(names, prefix): from httplib import HTTPConnection host = 'trac.edgewall.org' if prefix and not prefix.endswith('/'): prefix += '/' conn = HTTPConnection(host) for name in names: if name in ('WikiStart', 'SandBox'): continue sys.stdout.write('Downloading %s%s' % (prefix, name)) conn.request('GET', '/wiki/%s%s?format=txt' % (prefix, name)) response = conn.getresponse() content = response.read() if prefix and (response.status != 200 or not content): sys.stdout.write(' %s' % name) conn.request('GET', '/wiki/%s?format=txt' % name) response = conn.getresponse() content = response.read() if response.status == 200 and content: with open('trac/wiki/default-pages/' + name, 'w') as f: lines = content.replace('\r\n', '\n').splitlines(True) f.write(''.join(line for line in lines if line.strip() != '[[TranslatedPages]]')) sys.stdout.write('\tdone.\n') else: sys.stdout.write('\tmissing or empty.\n') conn.close()
def test_multiple_headers_concatenated_per_rfc_3875_section_4_1_18(dev_server): server = dev_server(r''' from werkzeug.wrappers import Response def app(environ, start_response): start_response('200 OK', [('Content-Type', 'text/plain')]) return [environ['HTTP_XYZ'].encode()] ''') if sys.version_info[0] == 2: from httplib import HTTPConnection else: from http.client import HTTPConnection conn = HTTPConnection('127.0.0.1', server.port) conn.connect() conn.putrequest('GET', '/') conn.putheader('Accept', 'text/plain') conn.putheader('XYZ', ' a ') conn.putheader('X-INGNORE-1', 'Some nonsense') conn.putheader('XYZ', ' b') conn.putheader('X-INGNORE-2', 'Some nonsense') conn.putheader('XYZ', 'c ') conn.putheader('X-INGNORE-3', 'Some nonsense') conn.putheader('XYZ', 'd') conn.endheaders() conn.send(b'') res = conn.getresponse() assert res.status == 200 assert res.read() == b'a ,b,c ,d' conn.close()
def test_multiple_headers_concatenated_per_rfc_3875_section_4_1_18(dev_server): server = dev_server(r''' from werkzeug.wrappers import Response def app(environ, start_response): start_response('200 OK', [('Content-Type', 'text/plain')]) return [environ['HTTP_XYZ'].encode()] ''') if sys.version_info[0] == 2: from httplib import HTTPConnection else: from http.client import HTTPConnection conn = HTTPConnection('127.0.0.1', server.port) conn.connect() conn.putrequest('GET', '/') conn.putheader('Accept', 'text/plain') conn.putheader('XYZ', ' a ') conn.putheader('X-INGNORE-1', 'Some nonsense') conn.putheader('XYZ', ' b') conn.putheader('X-INGNORE-2', 'Some nonsense') conn.putheader('XYZ', 'c ') conn.putheader('X-INGNORE-3', 'Some nonsense') conn.putheader('XYZ', 'd') conn.endheaders() conn.send(b'') res = conn.getresponse() assert res.status == 200 assert res.read() == b'a ,b,c ,d' conn.close()
def run(self): if self.file: dataFile = open(self.file, "r") cdata = dataFile.read() conn = HTTPConnection(self.proxy) conn.request("GET", self.url) resp = conn.getresponse() rdata = resp.read() if rdata == cdata: self.result = True self.data = rdata conn.close() dataFile.close() else: conn = HTTPConnection(self.proxy) conn.request("GET", self.url) resp = conn.getresponse() rdata = resp.read() if resp.status == httplib.OK: self.result = True conn.close()
def _publish(self, args, server, uri): uri = uri + "?siteid=" + str( self.username) + "&siteAuthenticationKey=" + str( self.password) + "&" + urlencode(args) + "&softwaretype=Wfrog" self.logger.debug('Connect to: http://%s' % server) conn = HTTPConnection(server) if not conn: raise Exception, 'Remote server connection (%s) timeout!' % server self.logger.debug('GET %s' % uri) conn.request("GET", uri) conn.sock.settimeout(5.0) http = conn.getresponse() data = (http.status, http.reason, http.read()) conn.close() self.logger.debug('Response: %d, %s, %s' % data) if not (data[0] == 200 and data[1] == 'OK'): raise Exception, 'Server returned invalid status: %d %s %s' % data return data