def _create_subresource_for_resource(self, subresource_body, resource_id, server): '''This method creates the given subresource and assign it to the given resource unique identifier.''' subresource_body["resource_id"] = resource_id http_conn = HTTPConnection(server.hostname, server.port) http_conn.connect() http_conn.request("POST", self._endpoint_subresource_latest, json.dumps(subresource_body).encode(), headers={"Content-Type": "application/json"}) response = http_conn.getresponse() http_conn.close() self.assertEqual(201, response.status) self.assertEqual("application/json; charset=UTF-8", response.headers["Content-Type"]) self.assertEqual("0", response.headers["Content-Length"]) location = response.headers["Location"] self.assertIsNotNone(location) self._locations_subresource_delete.insert(0, location) subresource_id = int(location.split("/")[-1]) subresource_body["id"] = subresource_id
def urlopen(url, svprev, formdata): ua = "SPlayer Build %d" % svprev #prepare data #generate a random boundary boundary = "----------------------------" + "%x" % random.getrandbits(48) data = [] for item in formdata: data.append("--" + boundary + "\r\nContent-Disposition: form-data; name=\"" + item[0] + "\"\r\n\r\n" + item[1] + "\r\n") data.append("--" + boundary + "--\r\n") data = "".join(data) cl = str(len(data)) r = urlparse(url) h = HTTPConnection(r.hostname) h.connect() h.putrequest("POST", r.path, skip_host=True, skip_accept_encoding=True) h.putheader("User-Agent", ua) h.putheader("Host", r.hostname) h.putheader("Accept", "*/*") h.putheader("Content-Length", cl) h.putheader("Expect", "100-continue") h.putheader("Content-Type", "multipart/form-data; boundary=" + boundary) h.endheaders() h.send(data) resp = h.getresponse() if resp.status != OK: raise Exception("HTTP response " + str(resp.status) + ": " + resp.reason) return resp
def main(argv): utils.drop_privileges() socket.setdefaulttimeout(DEFAULT_TIMEOUT) servers = [] if json is None: utils.err("This collector requires the `json' Python module.") return 1 for conf in elasticsearch_conf.get_servers(): server = HTTPConnection( *conf ) try: server.connect() except socket.error as exc: if exc.errno == errno.ECONNREFUSED: continue raise servers.append( server ) if len( servers ) == 0: return 13 # No ES running, ask tcollector to not respawn us. lock = threading.Lock() while True: threads = [] for server in servers: status = node_status(server) version = status["version"]["number"] t = threading.Thread(target = _collect_server, args = (server, version, lock)) t.start() threads.append(t) for thread in threads: thread.join() time.sleep(COLLECTION_INTERVAL)
def connect(self): """ Override the connect() function to intercept calls to certain host/ports. If no app at host/port has been registered for interception then a normal HTTPConnection is made. """ if debuglevel: sys.stderr.write('connect: %s, %s\n' % ( self.host, self.port, )) try: (app, script_name) = self.get_app(self.host, self.port) if app: if debuglevel: sys.stderr.write('INTERCEPTING call to %s:%s\n' % \ (self.host, self.port,)) self.sock = FakeWSGISocket(app, self.host, self.port, script_name) else: HTTPConnection.connect(self) except Exception as e: if debuglevel: # intercept & print out tracebacks traceback.print_exc() raise
def create_tunnel_connection(tunnel_opts: TunnelOptions, dest_host: str, dest_port: int, server_name: str = None, proxy: ProxyOptions = None): conn = HTTPConnection(tunnel_opts.host, tunnel_opts.port) headers = { "Authorization": generate_basic_header(tunnel_opts.auth_login, tunnel_opts.auth_password), "Client": tunnel_opts.client.value, "Connection": 'keep-alive', "Server-Name": server_name or dest_host, "Host": tunnel_opts.host, "Secure": str(int(tunnel_opts.secure)), "HTTP2": str(int(tunnel_opts.http2)), } if proxy: headers["Proxy"] = generate_proxy_url(proxy=proxy) conn.set_tunnel(dest_host, port=dest_port, headers=headers) conn.connect() return conn
def send(self, req): print('iis send req : ', req) global ip_iis_server handler = HTTPConnection(ip_iis_server) handler._http_vsn = 10 handler._http_vsn_str = 'HTTP/1.0' try: handler.connect() except: print_exc() return False try: handler.request('GET', req) except: print_exc() return False try: ack = handler.getresponse() body = ack.read() except: print_exc() return False if ack.status == 200: return True else: print('status : ', ack.status) return False
def start_api_server(self): dir_path = os.path.dirname(os.path.abspath(__file__)) api_path = os.path.join(dir_path, 'api.py') redis_config = '{}:{}'.format(self.redis_server_host, self.redis_server_port) server_cmd_start = [ 'python3', api_path, '-p', str(self.api_server_port), '-s', redis_config ] server_proc = subprocess.Popen(server_cmd_start) counter = 0 while True: conn = HTTPConnection(self.api_server_host, port=self.api_server_port) try: conn.connect() conn.request('GET', '/') except (TimeoutError, ConnectionRefusedError) as ex: if counter == 5: raise ex time.sleep(self.time_wait) counter += 1 else: break try: server_proc.wait(self.time_wait) raise Exception('API server is not started') except subprocess.TimeoutExpired: pass return server_proc
def connect(self): """ Override the connect() function to intercept calls to certain host/ports. If no app at host/port has been registered for interception then a normal HTTPConnection is made. """ if debuglevel: sys.stderr.write('connect: %s, %s\n' % (self.host, self.port,)) try: (app, script_name) = self.get_app(self.host, self.port) if app: if debuglevel: sys.stderr.write('INTERCEPTING call to %s:%s\n' % (self.host, self.port,)) self.sock = wsgi_fake_socket(app, self.host, self.port, script_name, self.environ) else: HTTPConnection.connect(self) except Exception: if debuglevel: # intercept & print out tracebacks traceback.print_exc() raise
def connect(self): """ Override the connect() function to intercept calls to certain host/ports. """ if debuglevel: sys.stderr.write('connect: %s, %s\n' % ( self.host, self.port, )) try: (app, script_name) = self.get_app(self.host, self.port) if app: if debuglevel: sys.stderr.write('INTERCEPTING call to %s:%s\n' % \ (self.host, self.port,)) self.sock = wsgi_fake_socket(app, self.host, self.port, script_name) else: HTTPConnection.connect(self) except Exception as e: if debuglevel: # intercept & print out tracebacks traceback.print_exc() raise
class HTTPGetter: def __init__(self, baseUrl, maxPending=10): self.baseUrl = baseUrl self.parsedBaseUrl = urlparse(baseUrl) self.maxPending = maxPending self.requests = [] self.pendingRequests = [] self.httpConnection = HTTPConnection(self.parsedBaseUrl.netloc) self.httpRequestHeaders = headers = {'Host':self.parsedBaseUrl.netloc,'Content-Length':0,'Connection':'Keep-Alive','User-Agent':'FlightGear terrasync.py'} def doGet(self, httpGetCallback): conn = self.httpConnection request = httpGetCallback self.httpConnection.request("GET", self.parsedBaseUrl.path + request.src, None, self.httpRequestHeaders) httpGetCallback.result = self.httpConnection.getresponse() httpGetCallback.callback() def get(self, httpGetCallback): try: self.doGet(httpGetCallback) except HTTPException: # try to reconnect once #print("reconnect") self.httpConnection.close() self.httpConnection.connect() self.doGet(httpGetCallback)
def main(argv): utils.drop_privileges() socket.setdefaulttimeout(DEFAULT_TIMEOUT) servers = [] if json is None: utils.err("This collector requires the `json' Python module.") return 1 for conf in elasticsearch_conf.get_servers(): server = HTTPConnection(*conf) try: server.connect() except socket.error as exc: if exc.errno == errno.ECONNREFUSED: continue raise servers.append(server) if len(servers) == 0: return 13 # No ES running, ask tcollector to not respawn us. lock = threading.Lock() while True: threads = [] for server in servers: status = node_status(server) version = status["version"]["number"] t = threading.Thread(target=_collect_server, args=(server, version, lock)) t.start() threads.append(t) for thread in threads: thread.join() time.sleep(COLLECTION_INTERVAL)
def connect(self): if self.debuglevel == -1: console_write( u''' Urllib %s Debug General Connecting to %s on port %s ''', (self._debug_protocol, self.host, self.port)) HTTPConnection.connect(self)
def retrieve_item(server): http_conn = HTTPConnection(server.hostname, server.port) http_conn.connect() http_conn.request("GET", endpoint, headers={"Content-Type": "application/json"}) results["response"] = http_conn.getresponse() http_conn.close()
class HTTPGetter: def __init__(self, baseUrl, maxPending=10): self.baseUrl = baseUrl self.parsedBaseUrl = urlparse(baseUrl) self.maxPending = maxPending self.requests = [] self.pendingRequests = [] self.httpConnection = HTTPConnection(self.parsedBaseUrl.netloc) self.httpRequestHeaders = headers = {'Host':self.parsedBaseUrl.netloc,'Content-Length':0,'Connection':'Keep-Alive','User-Agent':'FlightGear terrasync.py'} def assemblePath(self, httpGetCallback): """Return the path-on-server for the file to download. Example: '/scenery/Airports/N/E/4/.dirindex' """ assert not self.parsedBaseUrl.path.endswith('/'), \ repr(self.parsedBaseUrl) return self.parsedBaseUrl.path + str(httpGetCallback.src) def assembleUrl(self, httpGetCallback): """Return the URL of the file to download.""" baseUrl = self.parsedBaseUrl.geturl() assert not baseUrl.endswith('/'), repr(baseUrl) return urljoin(baseUrl + '/', httpGetCallback.src.asRelative()) def doGet(self, httpGetCallback): conn = self.httpConnection pathOnServer = self.assemblePath(httpGetCallback) self.httpConnection.request("GET", pathOnServer, None, self.httpRequestHeaders) httpResponse = self.httpConnection.getresponse() # 'httpResponse' is an http.client.HTTPResponse instance return httpGetCallback.callback(self.assembleUrl(httpGetCallback), httpResponse) def get(self, httpGetCallback): nbRetries = nbRetriesLeft = 5 while True: try: return self.doGet(httpGetCallback) except HTTPException as exc: if nbRetriesLeft == 0: raise NetworkError( "after {nbRetries} retries for URL {url}: {errMsg}" .format(nbRetries=nbRetries, url=self.assembleUrl(httpGetCallback), errMsg=exc)) from exc # Try to reconnect self.httpConnection.close() time.sleep(1) self.httpConnection.connect() nbRetriesLeft -= 1
def retrieve_ui(server): http_conn = HTTPConnection(server.hostname, server.port) http_conn.connect() http_conn.request("GET", endpoint, headers={"Content-Type": "text/html"}) self._response = http_conn.getresponse() http_conn.close()
def http_proxy_socket(address, proxy, auth=None, headers=None): http_con = HTTPConnection(proxy[0], proxy[1]) headers = headers if headers is not None else {} if auth is not None: headers['proxy-authorization'] = 'Basic ' + b64encode(('%s:%s' % (auth[0], auth[1])).encode()).decode() http_con.set_tunnel(address[0], address[1], headers) http_con.connect() return http_con.sock
def get_tunneled_connection(host, port, proxy): headers = {} if proxy.username and proxy.password: headers['Proxy-Authorization'] = get_proxy_auth_header(proxy) connection = HTTPConnection(proxy.host, proxy.port) connection.set_tunnel(host, port, headers) connection.connect() return connection
def connect(self): if self.debuglevel == -1: console_write( u''' Urllib %s Debug General Connecting to %s on port %s ''', (self._debug_protocol, self.host, self.port) ) HTTPConnection.connect(self)
def connect(self): if not self.__ca_file: HTTPSConnection.connect(self) else: HTTPConnection.connect(self) if self.__ca_file == HTTPSConfigurableConnection.IGNORE: self.sock = ssl.wrap_socket(self.sock, cert_reqs=ssl.CERT_NONE) else: self.sock = ssl.wrap_socket(self.sock, ca_certs=self.__ca_file, cert_reqs=ssl.CERT_REQUIRED)
def makeRequest(url, values=None, verb='GET', accept="text/plain", contentType=None, secure=False, secureParam={}): headers = {} contentType = contentType or "application/x-www-form-urlencoded" headers = {"content-type": contentType, "Accept": accept, "cms-auth-status": "NONE"} if secure: headers.update({"cms-auth-status": "OK", "cms-authn-dn": "/DC=ch/OU=Organic Units/OU=Users/CN=Fake User", "cms-authn-name": "Fake User", "cms-authz-%s" % secureParam['role']: "group:%s site:%s" % (secureParam['group'], secureParam['site'])}) headers["cms-authn-hmac"] = _generateHash(secureParam["key"], headers) data = None if verb == 'GET' and values: data = urllib.parse.urlencode(values, doseq=True) elif verb != 'GET' and values: # needs to test other encoding type if contentType == "application/x-www-form-urlencoded": data = urllib.parse.urlencode(values) else: # for other encoding scheme values assumed to be encoded already data = values parser = urllib.parse.urlparse(url) uri = parser.path if parser.query: uri += "?" + parser.query if verb == 'GET' and data != None: uri = '%s?%s' % (uri, data) # need to specify Content-length for POST method # TODO: this function needs refactoring - too verb-related branching if verb != 'GET': if data: headers.update({"content-length": len(data)}) else: headers.update({"content-length": 0}) conn = HTTPConnection(parser.netloc) conn.connect() conn.request(verb, uri, data, headers) response = conn.getresponse() data = response.read() conn.close() cType = response.getheader('content-type').split(';')[0] # data returned could be something a json like: b'"foo"', so we need to properly load it #if '/json' in accept: # data = json.loads(data) return data, response.status, cType, response
def __connectionChecker(self): print("Conn check", self.server) conn = HTTPConnection(self.server, timeout=2) status = 1 try: conn.connect() except socket.error as e: if e.errno != errno.ECONNREFUSED: status = 0 conn.close() return status
class WebDAV(object): def __init__(self, url, username=None, password=None): # TODO: validate args self.connection = HTTPConnection(url) self.connection.close() self.headers = {} if username and password: self.auth(username, password) def auth(self, username, password): # TODO: validate args auth = b64encode(bytes(username + ':' + password, 'ascii')).decode('ascii') self.headers['Authorization'] = 'Basic %s' % auth def exists(self, path): # TODO: validate args response = self._request('GET', path) return response.status == 200 def upload(self, path, file_path): # TODO: validate args # if not name: if self.exists(path): return False if not os.path.isfile(file_path): return False with open(file_path, 'rb') as file: response = self._request('PUT', path, file.read()) return response.status == 201 def mkdir(self, path): # TODO: validate args if self.exists(path): return False response = self._request('MKCOL', path) return response.status def _request(self, method, path, file=None): if path[0] != '/': # account for stupidity path = '/' + path self.connection.connect() self.connection.request(method, path, file, headers=self.headers) response = self.connection.getresponse() self.connection.close() return response
def main(argv): if not (flume_conf and flume_conf.enabled() and flume_conf.get_settings()): sys.exit(13) settings = flume_conf.get_settings() if (settings['default_timeout']): DEFAULT_TIMEOUT = settings['default_timeout'] if (settings['default_timeout']): COLLECTION_INTERVAL = settings['collection_interval'] if (settings['flume_host']): FLUME_HOST = settings['flume_host'] if (settings['flume_port']): FLUME_PORT = settings['flume_port'] utils.drop_privileges() socket.setdefaulttimeout(DEFAULT_TIMEOUT) server = HTTPConnection(FLUME_HOST, FLUME_PORT) try: server.connect() except socket.error as exc: if exc.errno == errno.ECONNREFUSED: return 13 # No Flume server available, ask tcollector to not respawn us. raise if json is None: err("This collector requires the `json' Python module.") return 1 def printmetric(metric, value, **tags): if tags: tags = " " + " ".join("%s=%s" % (name, value) for name, value in tags.items()) else: tags = "" print(("flume.%s %d %s %s" % (metric, ts, value, tags))) while True: # Get the metrics ts = int(time.time()) # In case last call took a while. stats = flume_metrics(server) for metric in stats: (component, name) = metric.split(".") tags = {component.lower(): name} for key,value in stats[metric].items(): if key not in EXCLUDE: printmetric(key.lower(), value, **tags) time.sleep(COLLECTION_INTERVAL)
def update_item(server): http_conn = HTTPConnection(server.hostname, server.port) http_conn.connect() http_conn.request("PUT", endpoint, json.dumps(expected_body).encode(), {"Content-Type": "application/json"}) results["response"] = http_conn.getresponse() http_conn.request("GET", endpoint, headers={"Content-Type": "application/json"}) results["response"] = http_conn.getresponse() http_conn.close()
def testBasic(self): """ Test Basic """ console.terse("{0}\n".format(self.testBasic.__doc__)) console.terse("{0}\n".format("Connecting ...\n")) hc = HTTPConnection('127.0.0.1', port=8080, timeout=1.0,) hc.connect() console.terse("{0}\n".format("Get '/echo?name=fame' ...\n")) headers = odict([('Accept', 'application/json')]) hc.request(method='GET', path='/echo?name=fame', body=None, headers=headers ) response = hc.getresponse() console.terse(str(response.fileno()) + "\n") # must call this before read console.terse(str(response.getheaders()) + "\n") console.terse(str(response.msg) + "\n") console.terse(str(response.version) + "\n") console.terse(str(response.status) + "\n") console.terse(response.reason + "\n") console.terse(str(response.read()) + "\n") console.terse("{0}\n".format("Post ...\n")) headers = odict([('Accept', 'application/json'), ('Content-Type', 'application/json')]) body = odict([('name', 'Peter'), ('occupation', 'Engineer')]) body = ns2b(json.dumps(body, separators=(',', ':'))) hc.request(method='POST', path='/demo', body=body, headers=headers ) response = hc.getresponse() console.terse(str(response.fileno()) + "\n") # must call this before read console.terse(str(response.getheaders()) + "\n") console.terse(str(response.msg) + "\n") console.terse(str(response.version) + "\n") console.terse(str(response.status) + "\n") console.terse(response.reason+ "\n") console.terse(str(response.read()) + "\n") #console.terse("{0}\n".format("SSE stream ...\n")) #body = b'' #headers = odict([('Accept', 'application/json'), ('Content-Type', 'application/json')]) #hc.request(method='GET', path='/stream', body=body, headers=headers ) #response = hc.getresponse() #console.terse(str(response.fileno()) + "\n") # must call this before read #console.terse(str(response.getheaders()) + "\n") #console.terse(str(response.msg) + "\n") #console.terse(str(response.version) + "\n") #console.terse(str(response.status) + "\n") #console.terse(response.reason+ "\n") #console.terse(str(response.read()) + "\n") hc.close()
def post(cookie): web = HTTPConnection('localhost', 8118) web.set_tunnel('158.69.76.135') web.connect() web.putrequest('POST', '/level4.php') web.putheader('Cookie', 'HoldTheDoor=' + cookie) web.putheader('Content-Type', 'application/x-www-form-urlencoded') web.putheader('Referer', 'http://158.69.76.135/level4.php') body = b'id=701&holdthedoor=Submit&key=' + cookie.encode('ASCII') web.putheader('Content-Length', str(len(body))) web.endheaders() web.send(body) response = web.getresponse() web.close()
def getRemoteId(ip): """Send a GET request to given IP and return its id :param ip : ip to send the request to """ try: connection = HTTPConnection(ip, Parameters.COMMANDER_PORT_NUMBER) connection.connect() connection.request(Parameters.HTTP_GET_REQUEST, Parameters.URL_ID_QUERY, "", {}) probeId = connection.getresponse().read().decode(Parameters.REPLY_MESSAGE_ENCODING) # logger.logger.info("Id of probe with ip " + str(targetIp) + " is " + str(probeId)) connection.close() return probeId except Exception as e: raise ProbeConnectionFailed(e)
def use_http_client(): from http.client import HTTPConnection conn = HTTPConnection('ip.cn', 80) conn.set_debuglevel(1) conn.connect() conn.putrequest('GET', '/') conn.putheader( 'User-Agent', 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/61.0.3163.100 Safari/537.36' ) conn.endheaders() res = conn.getresponse() print('\n', res.read().decode('utf-8')) conn.close()
def collect(self): connection = HTTPConnection(host=self.cups_host, port=self.cups_port) try: connection.connect() connection.request(GET_METHOD, self.cups_url) response = connection.getresponse() if response.code == 200: response_text = "".join( [l.decode(ENCODING) for l in response.readlines()]) return CollectingResult(data=response_text) else: return CollectingResult(sucess=False, data=response.msg) except Exception as e: return CollectingResult(sucess=False, data=e.args[1]) finally: connection.close()
def getRemoteId(ip): """Send a GET request to given IP and return its id :param ip : ip to send the request to """ try: connection = HTTPConnection(ip, Parameters.COMMANDER_PORT_NUMBER) connection.connect() connection.request(Parameters.HTTP_GET_REQUEST, Parameters.URL_ID_QUERY, "", {}) probeId = connection.getresponse().read().decode( Parameters.REPLY_MESSAGE_ENCODING) # logger.logger.info("Id of probe with ip " + str(targetIp) + " is " + str(probeId)) connection.close() return probeId except Exception as e: raise ProbeConnectionFailed(e)
def connect(self) -> None: if not self.__ca_file: HTTPSConnection.connect(self) else: HTTPConnection.connect(self) # TODO: Use SSLContext.wrap_socket() instead of the deprecated ssl.wrap_socket()! # See https://docs.python.org/3/library/ssl.html#socket-creation if self.__ca_file == HTTPSConfigurableConnection.IGNORE: self.sock = ssl.wrap_socket( # pylint: disable=deprecated-method self.sock, cert_reqs=ssl.CERT_NONE, ) else: self.sock = ssl.wrap_socket( # pylint: disable=deprecated-method self.sock, ca_certs=self.__ca_file, cert_reqs=ssl.CERT_REQUIRED, )
def getRemoteId(targetIp): """Get the remote ID of the probe at targetIp :param targetIp: the IP where we should ask for the id """ try: connection = HTTPConnection( targetIp, Parameters.PORT_NUMBER, timeout=Parameters.GET_REMOTE_ID_CONNECT_TIMEOUT) connection.connect() connection.request(Parameters.HTTP_GET_REQUEST, Parameters.URL_SRV_ID_QUERY, "", {}) probeId = connection.getresponse().read().decode( Parameters.REPLY_MESSAGE_ENCODING) # logger.logger.info("Id of probe with ip " + str(targetIp) + " is " + str(probeId)) connection.close() return probeId except Exception as e: raise ProbeConnectionException(e)
def RunCmd(objCommand): host_proxy = "connect2.virtual.uniandes.edu.co" port_proxy = 443 host_server = "bigdata-cluster1-ambari.virtual.uniandes.edu.co" port_server = 22 user_server = "bigdata09" pass_server = "Rojo2020" http_con = HTTPConnection(host_proxy, port_proxy) http_con.set_tunnel(host_server, port_server) http_con.connect() sock = http_con.sock ssh = paramiko.SSHClient() ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) ssh.connect(hostname=host_server, username=user_server, password=pass_server, sock=sock) ssh.exec_command(objCommand) ssh.close() http_con.close()
def get_wanip_path(upnp_url): # get the profile xml file and read it into a variable # directory = urllib2.urlopen(upnp_url).read() # return upnp_url url = urlparse(upnp_url) # url = urlparse(url_idg) scheme = url.scheme host = url.netloc.split(':')[0] port = int(url.port) path = url.path # url print('host,port', host, port, path) c1 = HTTPConnection(host, port) c1.connect() c1.request('GET', path) bts = c1.getresponse().read() directory = bts.decode() # create a DOM object that represents the `directory` document # print(directory) dom = parseString(directory) # find all 'serviceType' elements service_types = dom.getElementsByTagName('serviceType') # iterate over service_types until we get either WANIPConnection # (this should also check for WANPPPConnection, which, if I remember correctly # exposed a similar SOAP interface on ADSL routers. for service in service_types: # print('service',service) # I'm using the fact that a 'serviceType' element contains a single text node, who's data can # be accessed by the 'data' attribute. # When I find the right element, I take a step up into its parent and search for 'controlURL' if service.childNodes[0].data.find('WANIPConnection') > 0: #for node in service.childNodes[0].data.find('WANIPConnection'): #print('NODE',node) # print('PATH',service.parentNode.getElementsByTagName('controlURL')[0].childNodes[0].data) path = service.parentNode.getElementsByTagName( 'controlURL')[0].childNodes[0].data return path
def connect(self): """ Override the connect() function to intercept calls to certain host/ports. """ if debuglevel: sys.stderr.write('connect: %s, %s\n' % (self.host, self.port,)) try: (app, script_name) = self.get_app(self.host, self.port) if app: if debuglevel: sys.stderr.write('INTERCEPTING call to %s:%s\n' % \ (self.host, self.port,)) self.sock = wsgi_fake_socket(app, self.host, self.port, script_name) else: HTTPConnection.connect(self) except Exception as e: if debuglevel: # intercept & print out tracebacks traceback.print_exc() raise
def test_server(): http_conn = HTTPConnection(hostname, port) http_conn.connect() for url, expected in zip( ["/small", "/large", "/www.cs.vt.edu-20200417.html", "/api/login"], [sfilecontent, lfilecontent, wwwcscont, "{}"], ): http_conn.request("GET", url) server_response = http_conn.getresponse() sfile = server_response.read().decode("utf-8") if server_response.status != OK: print("Server returned %s for %s, expected %d." % (server_response.status, url, OK)) sys.exit(-1) if (isinstance(expected, int) and len(sfile) >= expected or isinstance(expected, (str, bytes)) and sfile == expected): print("Retrieved %s ok." % (url)) else: print("Did not find expected content at %s." % (url)) sys.exit(-1) http_conn.close()
def DownloadFile(objRemoteFile, objLocalFile): host_proxy = "connect2.virtual.uniandes.edu.co" port_proxy = 443 host_server = "bigdata-cluster1-ambari.virtual.uniandes.edu.co" port_server = 22 user_server = "bigdata09" pass_server = "Rojo2020" http_con = HTTPConnection(host_proxy, port_proxy) http_con.set_tunnel(host_server, port_server) http_con.connect() sock = http_con.sock ssh = paramiko.SSHClient() ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) ssh.connect(hostname=host_server, username=user_server, password=pass_server, sock=sock) sftp = ssh.open_sftp() sftp.get(objRemoteFile, objLocalFile) sftp.close() ssh.close() http_con.close()
def _connection(self): conn = HTTPConnection(self._server, self._port) conn.connect() return conn
def _run(self): # Authenticate url = "%s://%s%s" % (self.scheme, self.host, self.url) # Connect and process the stream error_counter = 0 conn = None exception = None while self.running: if self.retry_count is not None and error_counter > self.retry_count: # quit if error count greater than retry count break try: if self.scheme == "http": if self.api.proxy_host: conn = HTTPConnection( self.api.proxy_host, self.api.proxy_port) else: conn = HTTPConnection(self.host) else: if self.api.proxy_host: conn = HTTPSConnection( self.api.proxy_host, self.api.proxy_port) else: conn = HTTPSConnection(self.host) self.auth.apply_auth(url, 'POST', self.headers, self.parameters) conn.connect() conn.sock.settimeout(self.timeout) _url = self.url if self.api.proxy_host: _url = self.scheme + self.host + url if self.method == "POST": if self.post_data: self.headers["Content-Length"] = len(self.post_data) else: self.headers["Content-Length"] = "0" conn.request('POST', _url, self.body, headers=self.headers) resp = conn.getresponse() if resp.status != 200: if self.listener.on_error(resp.status) is False: break error_counter += 1 sleep(self.retry_time) else: error_counter = 0 encoding = resp.getheader('content-encoding', '') if encoding.strip().lower() == 'gzip': self._read_gzip_loop(resp) else: self._read_loop(resp) except timeout: if self.listener.on_timeout() == False: break if self.running is False: break conn.close() sleep(self.snooze_time) except Exception as exception: # any other exception is fatal, so kill loop break # cleanup self.running = False if conn: conn.close() if exception: raise
def connect(self): HTTPConnection.connect(self) self.sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
def connect(self): if self.debuglevel == -1: console_write(u'Urllib %s Debug General' % self._debug_protocol, True) console_write(u" Connecting to %s on port %s" % (self.host, self.port)) HTTPConnection.connect(self)
# Android QPython 3 # yus 20150912 import android, time from http.client import HTTPConnection droid = android.Android() droid.startSensingTimed(1, 250) try: conn = HTTPConnection('192.168.0.100',8080) conn.connect() time.sleep(2.0) while True: time.sleep(0.1) #s = droid.readSensors().result s = droid.sensorsReadOrientation().result #p = ','.join(map(str,s)) p = '%.7f,%.7f,%.7f'%(s[0],s[1],s[2]) conn.request('GET', p) print(p) resp = conn.getresponse().read() #print(resp) except: pass finally: conn.close() droid.stopSensing()
class KyotoTycoonConnection(Connection): NAME_KEY = b'key' NAME_VALUE = b'value' NAME_DB = b'DB' NAME_XT = b'xt' NAME_ORIG = b'orig' NAME_ATOMIC = b'atomic' NAME_NUM = b'num' NAME_PREFIX = b'prefix' NAME_MAX = b'max' NAME_VSIZ = b'vsiz' NAME__ = b'_' NAME_ERROR = b'ERROR' def __init__(self, host, port, timeout=None): super(KyotoTycoonConnection, self).__init__([HTTPException]) self.key_serializer = StrSerializer() self.value_serializer = StrSerializer() self.connection = HTTPConnection(host, port, timeout=timeout) self.connection.connect() self.str = "%s#%d(%s:%d)" % (self.__class__.__name__, id(self), host, port) self._text_encoding = 'utf-8' def __str__(self): return self.str def close(self): self.connection.close() def _encode_text(self, t): return t.encode(self._text_encoding) def _decode_text(self, b): return b.decode(self._text_encoding) def _encode_int(self, i): if i is None: return None return self._encode_text(str(i)) def _decode_int(self, b): if b is None: return None return int(self._decode_text(b)) def _key_ser(self, v): return self.key_serializer.serialize(v) def _key_deser(self, b): return self.key_serializer.deserialize(b) def _value_ser(self, v): return self.value_serializer.serialize(v) def _value_deser(self, b): return self.value_serializer.deserialize(b) def call(self, name, input): in_encoding = URLColumnEncoding() body = TsvRpc.write(input, in_encoding) headers = {"Content-Type": TsvRpc.content_type_for(in_encoding)} self.connection.request("POST", "/rpc/%s" % name, body, headers) response = self.connection.getresponse() status, reason = response.status, response.reason out_encoding = TsvRpc.column_encoding_for(response.getheader("Content-Type")) x = response.read() output = TsvRpc.read(x, out_encoding) if out_encoding else None if status == 200: return output message = self._decode_text(assoc_get(output, self.NAME_ERROR) if output else reason) if status == 450: raise LogicalInconsistencyError(message) else: raise KyotoError(message) def void(self): self.call("void", []) def echo(self, records): input = [(self._key_ser(k), self._value_ser(v)) for k, v in records.items()] output = self.call("echo", input) return {self._key_deser(k): self._value_deser(v) for k, v in output} def report(self): output = self.call("report", []) return {self._decode_text(k): self._decode_text(v) for k, v in output} def status(self, db=None): input = [] assoc_append_if_not_none(input, self.NAME_DB, db) output = self.call("status", input) return {self._decode_text(k): self._decode_text(v) for k, v in output} def clear(self, db=None): input = [] assoc_append_if_not_none(input, self.NAME_DB, db) self.call("clear", input) def set(self, key, value, xt=None, db=None): input = [] assoc_append(input, self.NAME_KEY, self._key_ser(key)) assoc_append(input, self.NAME_VALUE, self._value_ser(value)) assoc_append_if_not_none(input, self.NAME_XT, self._encode_int(xt)) assoc_append_if_not_none(input, self.NAME_DB, db) self.call("set", input) def add(self, key, value, xt=None, db=None): input = [] assoc_append(input, self.NAME_KEY, self._key_ser(key)) assoc_append(input, self.NAME_VALUE, self._value_ser(value)) assoc_append_if_not_none(input, self.NAME_XT, self._encode_int(xt)) assoc_append_if_not_none(input, self.NAME_DB, db) self.call("add", input) def increment(self, key, num, orig=None, xt=None, db=None): input = [] assoc_append(input, self.NAME_KEY, self._key_ser(key)) assoc_append(input, self.NAME_NUM, self._encode_int(num)) assoc_append_if_not_none(input, self.NAME_ORIG, orig) assoc_append_if_not_none(input, self.NAME_XT, self._encode_int(xt)) assoc_append_if_not_none(input, self.NAME_DB, db) output = self.call("increment", input) return int(assoc_get(output, self.NAME_NUM)) def get(self, key, db=None): input = [] assoc_append(input, self.NAME_KEY, self._key_ser(key)) assoc_append_if_not_none(input, self.NAME_DB, db) output = self.call("get", input) return self._value_deser(assoc_get(output, self.NAME_VALUE)), self._decode_int(assoc_find(output, self.NAME_XT)) def check(self, key, db=None): input = [] assoc_append(input, self.NAME_KEY, self._key_ser(key)) assoc_append_if_not_none(input, self.NAME_DB, db) output = self.call("check", input) return int(assoc_get(output, self.NAME_VSIZ)), self._decode_int(assoc_find(output, self.NAME_XT)) def remove_bulk(self, keys, atomic=None, db=None): input = [] if atomic: assoc_append(input, self.NAME_ATOMIC, b'') assoc_append_if_not_none(input, self.NAME_DB, db) for key in keys: assoc_append(input, self.NAME__ + self._key_ser(key), b'') output = self.call("remove_bulk", input) return int(assoc_get(output, self.NAME_NUM)) def get_bulk(self, keys, atomic=None, db=None): input = [] if atomic: assoc_append(input, self.NAME_ATOMIC, b'') assoc_append_if_not_none(input, self.NAME_DB, db) for key in keys: assoc_append(input, self.NAME__ + self._key_ser(key), b'') output = self.call("get_bulk", input) return dict([(self._key_deser(k[1:]), self._value_deser(v)) for k, v in output if k.startswith(self.NAME__)]) def match_prefix(self, prefix, max=None, db=None): input = [] assoc_append(input, self.NAME_PREFIX, self._key_ser(prefix)) assoc_append_if_not_none(input, self.NAME_MAX, self._encode_int(max)) assoc_append_if_not_none(input, self.NAME_DB, db) output = self.call("match_prefix", input) return [self._key_deser(k[1:]) for k, v in output if k.startswith(self.NAME__)]
def connect(self): _HTTPConnection.connect(self) self.sock.setsockopt(IPPROTO_TCP, TCP_NODELAY, 1)