def send_email(request): try: recipients = request.GET['to'].split(',') url = request.GET['url'] proto, server, path, query, frag = urlsplit(url) if query: path += '?' + query conn = HTTPConnection(server) conn.request('GET',path) try: # Python 2.7+, use buffering of HTTP responses resp = conn.getresponse(buffering=True) except TypeError: # Python 2.6 and older resp = conn.getresponse() assert resp.status == 200, "Failed HTTP response %s %s" % (resp.status, resp.reason) rawData = resp.read() conn.close() message = MIMEMultipart() message['Subject'] = "Graphite Image" message['To'] = ', '.join(recipients) message['From'] = 'composer@%s' % gethostname() text = MIMEText( "Image generated by the following graphite URL at %s\r\n\r\n%s" % (ctime(),url) ) image = MIMEImage( rawData ) image.add_header('Content-Disposition', 'attachment', filename="composer_" + strftime("%b%d_%I%M%p.png")) message.attach(text) message.attach(image) s = SMTP(settings.SMTP_SERVER) s.sendmail('composer@%s' % gethostname(),recipients,message.as_string()) s.quit() return HttpResponse( "OK" ) except: return HttpResponse( format_exc() )
def wait_for_server_to_hangup(ipport): try_until = time() + 30 while True: try: conn = HTTPConnection(*ipport) conn.request('GET', '/') conn.getresponse() except Exception: break if time() > try_until: raise Exception( 'Still answering on %s:%s after 30 seconds' % ipport) sleep(0.1)
def status(self): succubus_status = super(AlppacaDaemon, self).status() if succubus_status != 0: print("succubus_status is {0}".format(str(succubus_status))) return succubus_status conn = HTTPConnection('169.254.169.254', timeout=0.1) try: conn.request("GET", "/") conn.getresponse() except Exception as e: print("Error: alppaca is not reachable via IP 169.254.169.254. {0}".format(e)) return 3 else: return 0
def compute_engine_id(): """Gets the Compute Engine project ID if it can be inferred. Uses 169.254.169.254 for the metadata server to avoid request latency from DNS lookup. See https://cloud.google.com/compute/docs/metadata#metadataserver for information about this IP address. (This IP is also used for Amazon EC2 instances, so the metadata flavor is crucial.) See https://github.com/google/oauth2client/issues/93 for context about DNS latency. :rtype: string or ``NoneType`` :returns: Compute Engine project ID if the metadata service is available, else ``None``. """ host = '169.254.169.254' uri_path = '/computeMetadata/v1/project/project-id' headers = {'Metadata-Flavor': 'Google'} connection = HTTPConnection(host, timeout=0.1) try: connection.request('GET', uri_path, headers=headers) response = connection.getresponse() if response.status == 200: return response.read() except socket.error: # socket.timeout or socket.error(64, 'Host is down') pass finally: connection.close()
def predict(self, examples): """Run prediction over HTTP/REST. :param examples: The input examples :return: The outcomes """ valid_example = all(k in examples for k in self.input_keys) if not valid_example: raise ValueError("should have keys: " + ",".join(self.input_keys)) v_str = '/versions/{}'.format( self.version) if self.version is not None else '' path = '/v1/models/{}{}:predict'.format(self.name, v_str) request = self.create_request(examples) headers = {'Content-type': 'application/json'} _, hostname, port = self.remote.split(':') conn = HTTPConnection(hostname.replace('//', ''), port) conn.request('POST', path, json.dumps(request), headers) response = conn.getresponse().read() outcomes_list = json.loads(response) if "error" in outcomes_list: raise ValueError("remote server returns error: {0}".format( outcomes_list["error"])) outcomes_list = outcomes_list["outputs"] outcomes_list = self.deserialize_response(examples, outcomes_list) return outcomes_list
def report_sauce_status(self, name, status, tags=[], remote_url=''): """Report test status and tags to SauceLabs """ job_id = BuiltIn().get_library_instance( 'Selenium2Library')._current_browser().session_id if USERNAME_ACCESS_KEY.match(remote_url): username, access_key =\ USERNAME_ACCESS_KEY.findall(remote_url)[0][1:] else: username = os.environ.get('SAUCE_USERNAME') access_key = os.environ.get('SAUCE_ACCESS_KEY') if not job_id: return u"No Sauce job id found. Skipping..." elif not username or not access_key: return u"No Sauce environment variables found. Skipping..." token = base64.encodestring('%s:%s' % (username, access_key))[:-1] body = json.dumps({'name': name, 'passed': status == 'PASS', 'tags': tags}) connection = HTTPConnection('saucelabs.com') connection.request('PUT', '/rest/v1/%s/jobs/%s' % ( username, job_id), body, headers={'Authorization': 'Basic %s' % token} ) return connection.getresponse().status
def kill_server(ipport, ipport2server, pids): server, number = get_server_number(ipport, ipport2server) err = Manager([server]).kill(number=number) if err: raise Exception("unable to kill %s" % (server if not number else "%s%s" % (server, number))) try_until = time() + 30 while True: try: conn = HTTPConnection(*ipport) conn.request("GET", "/") conn.getresponse() except Exception as err: break if time() > try_until: raise Exception("Still answering on %s:%s after 30 seconds" % ipport) sleep(0.1)
def retry_http_krb_sspi_auth(self, host, req): url = req.full_url scheme, _, host, path = url.split('/', 3) h = HTTPConnection(host) if scheme == 'http:' else HTTPSConnection( host) headers = dict(req.unredirected_hdrs) headers.update( dict((k, v) for k, v in req.headers.items() if k not in headers)) try: __, krb_context = kerberos.authGSSClientInit("HTTP@" + host) kerberos.authGSSClientStep(krb_context, "") negotiate_details = kerberos.authGSSClientResponse(krb_context) headers["Connection"] = "Keep-Alive" headers["Authorization"] = "Negotiate " + negotiate_details h.request(req.get_method(), req.selector, req.data, headers) response = h.getresponse() return addinfourl(response, response.msg, req.get_full_url(), response.status) except: # e = sys.exc_info()[0] # _log.warning(str(e)) # _log.warning('Failed Kerberos authentication') return None
def report_sauce_status(self, name, status, tags=[], remote_url=''): """Report test status and tags to SauceLabs """ job_id = BuiltIn().get_library_instance( 'Selenium2Library')._current_browser().session_id if USERNAME_ACCESS_KEY.match(remote_url): username, access_key =\ USERNAME_ACCESS_KEY.findall(remote_url)[0][1:] else: username = os.environ.get('SAUCE_USERNAME') access_key = os.environ.get('SAUCE_ACCESS_KEY') if not job_id: return u"No Sauce job id found. Skipping..." elif not username or not access_key: return u"No Sauce environment variables found. Skipping..." token = base64.encodestring('%s:%s' % (username, access_key))[:-1] body = json.dumps({ 'name': name, 'passed': status == 'PASS', 'tags': tags }) connection = HTTPConnection('saucelabs.com') connection.request('PUT', '/rest/v1/%s/jobs/%s' % (username, job_id), body, headers={'Authorization': 'Basic %s' % token}) return connection.getresponse().status
def _download_http_file(self, url, target_path): '''下载http文件 :param url: HTTP路径 :type url: string :param target_path: 下载到目标路径 :type target_path: string ''' url0 = url if url[:7] == 'http://': url = url[7:] pos = url.find('/') host = url[:pos] page = url[pos:] conn = HTTPConnection(host, port=80, timeout=60) # 60秒超时 conn.request('GET', page) res = conn.getresponse() if res.status != 200: raise RuntimeError('访问:%s 错误[HTTP错误码:%s]' % (url0, res.status)) target_size = int(res.getheader('Content-Length')) data = res.read() conn.close() if len(data) != target_size: return self._download_http_file(url0, target_path) else: f = open(target_path, 'wb') f.write(data) f.close()
def compute_engine_id(): """Gets the Compute Engine project ID if it can be inferred. Uses 169.254.169.254 for the metadata server to avoid request latency from DNS lookup. See https://cloud.google.com/compute/docs/metadata#metadataserver for information about this IP address. (This IP is also used for Amazon EC2 instances, so the metadata flavor is crucial.) See https://github.com/google/oauth2client/issues/93 for context about DNS latency. :rtype: string or ``NoneType`` :returns: Compute Engine project ID if the metadata service is available, else ``None``. """ host = '169.254.169.254' uri_path = '/computeMetadata/v1/project/project-id' headers = {'Metadata-Flavor': 'Google'} connection = HTTPConnection(host, timeout=0.1) try: connection.request('GET', uri_path, headers=headers) response = connection.getresponse() if response.status == 200: return response.read() except socket.error: # socket.timeout or socket.error(64, 'Host is down') pass finally: connection.close()
def kill_server(ipport, ipport2server): server, number = get_server_number(ipport, ipport2server) err = Manager([server]).kill(number=number) if err: raise Exception('unable to kill %s' % (server if not number else '%s%s' % (server, number))) try_until = time() + 30 while True: try: conn = HTTPConnection(*ipport) conn.request('GET', '/') conn.getresponse() except Exception as err: break if time() > try_until: raise Exception( 'Still answering on %s:%s after 30 seconds' % ipport) sleep(0.1)
def _check_storage(ipport, path): conn = HTTPConnection(*ipport) conn.request('GET', path) resp = conn.getresponse() # 404 because it's a nonsense path (and mount_check is false) # 507 in case the test target is a VM using mount_check if resp.status not in (404, 507): raise Exception('Unexpected status %s' % resp.status) return resp
def _check_storage(ipport, path): conn = HTTPConnection(*ipport) conn.request('GET', path) resp = conn.getresponse() # 404 because it's a nonsense path (and mount_check is false) # 507 in case the test target is a VM using mount_check if resp.status not in (404, 507): raise Exception( 'Unexpected status %s' % resp.status) return resp
def set_test_status(self, passed=True): connection = HTTPConnection("saucelabs.com") connection.request( 'PUT', '/rest/v1/{0}/jobs/{1}'.format(self.username, self.driver.session_id), json.dumps({"passed": passed}), headers={"Authorization": "Basic {0}".format(self.sauce_auth)}) result = connection.getresponse() return result.status == 200
def export_to_myexp(self, trans, id, myexp_username, myexp_password): """ Exports a workflow to myExperiment website. """ trans.workflow_building_mode = workflow_building_modes.ENABLED stored = self.get_stored_workflow(trans, id, check_ownership=False, check_accessible=True) # Convert workflow to dict. workflow_dict = self._workflow_to_dict(trans, stored) # # Create and submit workflow myExperiment request. # # Create workflow content JSON. workflow_content = json.dumps(workflow_dict, indent=4, sort_keys=True) # Create myExperiment request. request_raw = trans.fill_template( "workflow/myexp_export.mako", workflow_name=workflow_dict['name'], workflow_description=workflow_dict['annotation'], workflow_content=workflow_content, workflow_svg=self._workflow_to_svg_canvas(trans, stored).tostring() ) # strip() b/c myExperiment XML parser doesn't allow white space before XML; utf-8 handles unicode characters. request = unicodify(request_raw.strip(), 'utf-8') # Do request and get result. auth_header = base64.b64encode('%s:%s' % (myexp_username, myexp_password)) headers = {"Content-type": "text/xml", "Accept": "text/xml", "Authorization": "Basic %s" % auth_header} myexp_url = trans.app.config.get("myexperiment_url", self.__myexp_url) conn = HTTPConnection(myexp_url) # NOTE: blocks web thread. conn.request("POST", "/workflow.xml", request, headers) response = conn.getresponse() response_data = response.read() conn.close() # Do simple parse of response to see if export successful and provide user feedback. parser = SingleTagContentsParser('id') parser.feed(response_data) myexp_workflow_id = parser.tag_content workflow_list_str = " <br>Return to <a href='%s'>workflow list." % url_for(controller='workflows', action='list') if myexp_workflow_id: return trans.show_message( """Workflow '%s' successfully exported to myExperiment. <br/> <a href="http://%s/workflows/%s">Click here to view the workflow on myExperiment</a> %s """ % (stored.name, myexp_url, myexp_workflow_id, workflow_list_str), use_panels=True) else: return trans.show_error_message( "Workflow '%s' could not be exported to myExperiment. Error: %s %s" % (stored.name, response_data, workflow_list_str), use_panels=True)
def export_to_myexp(self, trans, id, myexp_username, myexp_password): """ Exports a workflow to myExperiment website. """ trans.workflow_building_mode = workflow_building_modes.ENABLED stored = self.get_stored_workflow(trans, id, check_ownership=False, check_accessible=True) # Convert workflow to dict. workflow_dict = self._workflow_to_dict(trans, stored) # # Create and submit workflow myExperiment request. # # Create workflow content JSON. workflow_content = json.dumps(workflow_dict, indent=4, sort_keys=True) # Create myExperiment request. request_raw = trans.fill_template( "workflow/myexp_export.mako", workflow_name=workflow_dict['name'], workflow_description=workflow_dict['annotation'], workflow_content=workflow_content, workflow_svg=self._workflow_to_svg_canvas(trans, stored).tostring() ) # strip() b/c myExperiment XML parser doesn't allow white space before XML; utf-8 handles unicode characters. request = unicodify(request_raw.strip(), 'utf-8') # Do request and get result. auth_header = base64.b64encode('%s:%s' % (myexp_username, myexp_password)) headers = {"Content-type": "text/xml", "Accept": "text/xml", "Authorization": "Basic %s" % auth_header} myexp_url = trans.app.config.myexperiment_target_url conn = HTTPConnection(myexp_url) # NOTE: blocks web thread. conn.request("POST", "/workflow.xml", request, headers) response = conn.getresponse() response_data = response.read() conn.close() # Do simple parse of response to see if export successful and provide user feedback. parser = SingleTagContentsParser('id') parser.feed(response_data) myexp_workflow_id = parser.tag_content workflow_list_str = " <br>Return to <a href='%s'>workflow list." % url_for(controller='workflows', action='list') if myexp_workflow_id: return trans.show_message( """Workflow '%s' successfully exported to myExperiment. <br/> <a href="http://%s/workflows/%s">Click here to view the workflow on myExperiment</a> %s """ % (stored.name, myexp_url, myexp_workflow_id, workflow_list_str), use_panels=True) else: return trans.show_error_message( "Workflow '%s' could not be exported to myExperiment. Error: %s %s" % (stored.name, response_data, workflow_list_str), use_panels=True)
def read(self, size): protocol, host, port, path = self.urlsplit(self.url) conn = HTTPConnection(host, port) headers = { 'Range': 'bytes=%d-%d' % (self.offset, self.offset + size - 1) } conn.request('GET', path, None, headers) response = conn.getresponse() data = response.read() self.offset += len(data) return data
def set_test_status(self, passed=True): connection = HTTPConnection("saucelabs.com") connection.request( 'PUT', '/rest/v1/{0}/jobs/{1}'.format( self.username, self.driver.session_id ), json.dumps({"passed": passed}), headers={"Authorization": "Basic {0}".format(self.sauce_auth)} ) result = connection.getresponse() return result.status == 200
class GoogleSuggestions(): def __init__(self): self.hl = "en" self.conn = None def prepareQuery(self): #GET /complete/search?output=toolbar&client=youtube-psuggest&xml=true&ds=yt&hl=en&jsonp=self.gotSuggestions&q=s #self.prepQuerry = "/complete/search?output=toolbar&client=youtube&xml=true&ds=yt&" self.prepQuerry = "/complete/search?output=chrome&client=chrome&" if self.hl is not None: self.prepQuerry = self.prepQuerry + "hl=" + self.hl + "&" self.prepQuerry = self.prepQuerry + "jsonp=self.gotSuggestions&q=" print("[MyTube - GoogleSuggestions] prepareQuery:", self.prepQuerry) def getSuggestions(self, queryString): self.prepareQuery() if queryString != "": query = self.prepQuerry + quote(queryString) self.conn = HTTPConnection("google.com") try: self.conn = HTTPConnection("google.com") self.conn.request("GET", query, "", {"Accept-Encoding": "UTF-8"}) except (CannotSendRequest, gaierror, error): self.conn.close() print("[MyTube - GoogleSuggestions] Can not send request for suggestions") return None else: try: response = self.conn.getresponse() except BadStatusLine: self.conn.close() print("[MyTube - GoogleSuggestions] Can not get a response from google") return None else: if response.status == 200: data = response.read() header = response.getheader("Content-Type", "text/xml; charset=ISO-8859-1") charset = "ISO-8859-1" try: charset = header.split(";")[1].split("=")[1] print("[MyTube - GoogleSuggestions] Got charset %s" % charset) except: print("[MyTube - GoogleSuggestions] No charset in Header, falling back to %s" % charset) data = data.decode(charset).encode("utf-8") self.conn.close() return data else: self.conn.close() return None else: return None
def retry_http_ntlm_sspi_auth(self, host, req, authenticatehdr, auth_header): url = req.full_url scheme, _, host, path = url.split('/', 3) h = HTTPConnection(host) if scheme == 'http:' else HTTPSConnection( host) headers = dict(req.unredirected_hdrs) headers.update( dict((k, v) for k, v in req.headers.items() if k not in headers)) headers["Connection"] = "Keep-Alive" headers[auth_header] = "NTLM " + self.get_auth_req() h.request(req.get_method(), req.selector, req.data, headers) response = h.getresponse() response.fp = None # keep-alive ntlmauth = response.headers.get(authenticatehdr) if ntlmauth is not None and ntlmauth.startswith('NTLM '): challenge = ntlmauth[5:] challenge_response = self.create_challenge_response(challenge) if challenge_response is None: _log.warning('Failed to authenticate using NTLM') return None headers["Connection"] = "Close" headers[auth_header] = "NTLM " + challenge_response h.request(req.get_method(), req.selector, req.data, headers) response = h.getresponse() return addinfourl(response, response.msg, req.get_full_url(), response.status)
def is_alive(self): """Test that the connection is still alive. Because the remote communication happens over HTTP we need to make an explicit request to the remote. It is allowed for WebDriver spec tests to not have a WebDriver session, since this may be what is tested. An HTTP request to an invalid path that results in a 404 is proof enough to us that the server is alive and kicking. """ conn = HTTPConnection(self.server.host, self.server.port) conn.request("HEAD", self.server.base_path + "invalid") res = conn.getresponse() return res.status == 404
def is_alive(self): """Test that the connection is still alive. Because the remote communication happens over HTTP we need to make an explicit request to the remote. It is allowed for WebDriver spec tests to not have a WebDriver session, since this may be what is tested. An HTTP request to an invalid path that results in a 404 is proof enough to us that the server is alive and kicking. """ conn = HTTPConnection(self.server.host, self.server.port) conn.request("HEAD", self.server.base_path + "invalid") res = conn.getresponse() return res.status == 404
def _findMatchUrl(self, tag): h3 = tag.find('h3') if not h3: return '' a = h3.find('a') url = a.attrs.get('href', '') # decode url host = parse.urlsplit(url).netloc path = url[len(parse.urljoin(url, '/')) - 1:] conn = HTTPConnection(host, timeout=10) conn.request('GET', path) req = conn.getresponse() r_url = req.getheader('Location') conn.close() return r_url
def send_email(request): try: recipients = request.GET['to'].split(',') url = request.GET['url'] proto, server, path, query, frag = urlsplit(url) if query: path += '?' + query conn = HTTPConnection(server) conn.request('GET', path) try: # Python 2.7+, use buffering of HTTP responses resp = conn.getresponse(buffering=True) except TypeError: # Python 2.6 and older resp = conn.getresponse() assert resp.status == 200, "Failed HTTP response %s %s" % (resp.status, resp.reason) rawData = resp.read() conn.close() message = MIMEMultipart() message['Subject'] = "Graphite Image" message['To'] = ', '.join(recipients) message['From'] = 'composer@%s' % gethostname() text = MIMEText( "Image generated by the following graphite URL at %s\r\n\r\n%s" % (ctime(), url)) image = MIMEImage(rawData) image.add_header('Content-Disposition', 'attachment', filename="composer_" + strftime("%b%d_%I%M%p.png")) message.attach(text) message.attach(image) s = SMTP(settings.SMTP_SERVER) s.sendmail('composer@%s' % gethostname(), recipients, message.as_string()) s.quit() return HttpResponse("OK") except: return HttpResponse(format_exc())
def _checkURL(self, url): """ Check if the ``url`` is *alive* (i.e., remote server returns code 200(OK) or 401 (unauthorized)). """ try: p = urlparse(url) h = HTTPConnection(p[1]) h.putrequest('HEAD', p[2]) h.endheaders() r = h.getresponse() if r.status in (200, 401): # CloudMan UI is pwd protected so include 401 return True except Exception: # No response or no good response pass return False
def check_server(ipport, ipport2server): server = ipport2server[ipport] if server[:-1] in ('account', 'container', 'object'): if int(server[-1]) > 4: return None path = '/connect/1/2' if server[:-1] == 'container': path += '/3' elif server[:-1] == 'object': path += '/3/4' try_until = time() + CHECK_SERVER_TIMEOUT while True: try: conn = HTTPConnection(*ipport) conn.request('GET', path) resp = conn.getresponse() # 404 because it's a nonsense path (and mount_check is false) # 507 in case the test target is a VM using mount_check if resp.status not in (404, 507): raise Exception( 'Unexpected status %s' % resp.status) break except Exception as err: if time() > try_until: print(err) print('Giving up on %s:%s after %s seconds.' % ( server, ipport, CHECK_SERVER_TIMEOUT)) raise err sleep(0.1) else: try_until = time() + CHECK_SERVER_TIMEOUT while True: try: url, token = get_auth('http://%s:%d/auth/v1.0' % ipport, 'test:tester', 'testing') account = url.split('/')[-1] head_account(url, token) return url, token, account except Exception as err: if time() > try_until: print(err) print('Giving up on proxy:8080 after 30 seconds.') raise err sleep(0.1) return None
def check_server(ipport, ipport2server, pids, timeout=CHECK_SERVER_TIMEOUT): server = ipport2server[ipport] if server[:-1] in ('account', 'container', 'object'): if int(server[-1]) > 4: return None path = '/connect/1/2' if server[:-1] == 'container': path += '/3' elif server[:-1] == 'object': path += '/3/4' try_until = time() + timeout while True: try: conn = HTTPConnection(*ipport) conn.request('GET', path) resp = conn.getresponse() # 404 because it's a nonsense path (and mount_check is false) # 507 in case the test target is a VM using mount_check if resp.status not in (404, 507): raise Exception( 'Unexpected status %s' % resp.status) break except Exception as err: if time() > try_until: print(err) print('Giving up on %s:%s after %s seconds.' % ( server, ipport, timeout)) raise err sleep(0.1) else: try_until = time() + timeout while True: try: url, token = get_auth('http://%s:%d/auth/v1.0' % ipport, 'test:tester', 'testing') account = url.split('/')[-1] head_account(url, token) return url, token, account except Exception as err: if time() > try_until: print(err) print('Giving up on proxy:8080 after 30 seconds.') raise err sleep(0.1) return None
def _checkURL(self, url): """ Check if the ``url`` is *alive* (i.e., remote server returns code 200(OK) or 401 (unauthorized)). """ try: p = urlparse(url) h = HTTPConnection(p[1]) h.putrequest('HEAD', p[2]) h.endheaders() r = h.getresponse() if r.status in ( 200, 401): # CloudMan UI is pwd protected so include 401 return True except Exception: # No response or no good response pass return False
def _get_version(self, master): if master is not None: conn = None host, port = master.split(':', 2) port = int(port) try: conn = HTTPConnection(host, port, timeout=self._timeout) conn.request('GET', '/version') resp = conn.getresponse() if resp.status < 200 or resp.status >= 300: return return json.loads(resp.read().decode('utf-8'))['version'] except Exception: logger.exception('Error') pass finally: if conn: conn.close()
def _get_version(self, master): if master is not None: conn = None host, port = master.split(':', 2) port = int(port) try: conn = HTTPConnection(host, port, timeout=self._timeout) conn.request('GET', '/version') resp = conn.getresponse() if resp.status < 200 or resp.status >= 300: return return json.loads(resp.read().decode('utf-8'))['version'] except Exception: logger.exception('Error') pass finally: if conn: conn.close()
def _request(self, method, path, body=None): payload = None if body is not None: try: payload = json.dumps(body) except ValueError: raise ValueError("Failed to encode request body as JSON: {}".format( json.dumps(body, indent=2))) if isinstance(payload, text_type): payload = body.encode("utf-8") conn = HTTPConnection(self.host, self.port) try: conn.request(method, path, payload) yield conn.getresponse() finally: conn.close()
def predict(self, examples, **kwargs): """Run prediction over HTTP/REST. :param examples: The input examples :return: The outcomes """ verify_example(examples, self.input_keys) request = self.create_request(examples) conn = HTTPConnection(self.hostname, self.port) conn.request('POST', self.path, json.dumps(request), self.headers) response = conn.getresponse().read() outcomes_list = json.loads(response) if "error" in outcomes_list: raise ValueError("remote server returns error: {0}".format(outcomes_list["error"])) outcomes_list = outcomes_list["outputs"] outcomes_list = self.deserialize_response(examples, outcomes_list) return outcomes_list
def test_http_client_response(): # Create a core request request = HttpRequest("GET", "www.httpbin.org") # Fake a transport based on http.client conn = HTTPConnection("www.httpbin.org") conn.request("GET", "/get") r1 = conn.getresponse() response = HttpClientTransportResponse(request, r1) # Don't assume too much in those assert, since we reach a real server assert response.internal_response is r1 assert response.reason is not None assert isinstance(response.status_code, int) assert len(response.headers.keys()) != 0 assert len(response.text()) != 0 assert "content-type" in response.headers assert "Content-Type" in response.headers
def check_server(ipport, ipport2server, pids, timeout=CHECK_SERVER_TIMEOUT): server = ipport2server[ipport] if server[:-1] in ("account", "container", "object"): if int(server[-1]) > 4: return None path = "/connect/1/2" if server[:-1] == "container": path += "/3" elif server[:-1] == "object": path += "/3/4" try_until = time() + timeout while True: try: conn = HTTPConnection(*ipport) conn.request("GET", path) resp = conn.getresponse() # 404 because it's a nonsense path (and mount_check is false) # 507 in case the test target is a VM using mount_check if resp.status not in (404, 507): raise Exception("Unexpected status %s" % resp.status) break except Exception as err: if time() > try_until: print(err) print("Giving up on %s:%s after %s seconds." % (server, ipport, timeout)) raise err sleep(0.1) else: try_until = time() + timeout while True: try: url, token = get_auth("http://%s:%d/auth/v1.0" % ipport, "test:tester", "testing") account = url.split("/")[-1] head_account(url, token) return url, token, account except Exception as err: if time() > try_until: print(err) print("Giving up on proxy:8080 after 30 seconds.") raise err sleep(0.1) return None
def predict(self, examples, **kwargs): """Run prediction over HTTP/REST. :param examples: The input examples :return: The outcomes """ verify_example(examples, self.input_keys) request = self.create_request(examples) conn = HTTPConnection(self.hostname, self.port) conn.request('POST', self.path, json.dumps(request), self.headers) response = conn.getresponse().read() outcomes_list = json.loads(response) if "error" in outcomes_list: raise ValueError("remote server returns error: {0}".format( outcomes_list["error"])) outcomes_list = outcomes_list["outputs"] outcomes_list = self.deserialize_response(examples, outcomes_list) return outcomes_list
def test_http_client_response(port, http_request, http_response): # Create a core request request = http_request("GET", "http://localhost:{}".format(port)) # Fake a transport based on http.client conn = HTTPConnection("localhost", port) conn.request("GET", "/get") r1 = conn.getresponse() response = create_transport_response(http_response, request, r1) if is_rest(http_response): response.read() # Don't assume too much in those assert, since we reach a real server assert response.internal_response is r1 assert response.reason is not None assert isinstance(response.status_code, int) assert len(response.headers.keys()) != 0 assert len(response.text()) != 0 assert "content-type" in response.headers assert "Content-Type" in response.headers
def pingSolr(): """ test if the solr server is available """ status = getLocal("solrStatus") if status is not None: return status conn = HTTPConnection("localhost", 8983) try: conn.request("GET", "/solr/plone/admin/ping") response = conn.getresponse() status = response.status == 200 msg = "INFO: solr return status '%s'" % response.status except error as e: status = False msg = 'WARNING: solr tests could not be run: "%s".' % e if not status: print(file=stderr) print("*" * len(msg), file=stderr) print(msg, file=stderr) print("*" * len(msg), file=stderr) print(file=stderr) setLocal("solrStatus", status) return status
def request(self, sitename, path, method='GET', data=None): url = self.base_url + '/' + sitename + path path = '/' + sitename + path if isinstance(data, dict): for k in data.keys(): if data[k] is None: del data[k] if web.config.debug: web.ctx.infobase_req_count = 1 + web.ctx.get( 'infobase_req_count', 0) a = time.time() _path = path _data = data headers = {} if data: if isinstance(data, dict): data = dict( (web.safestr(k), web.safestr(v)) for k, v in data.items()) data = urlencode(data) headers['Content-Type'] = 'application/x-www-form-urlencoded' if method == 'GET': path += '?' + data data = None stats.begin("infobase", path=path, method=method, data=data) conn = HTTPConnection(self.base_url) env = web.ctx.get('env') or {} if self.auth_token: c = SimpleCookie() c['infobase_auth_token'] = quote(self.auth_token) cookie = c.output(header='').strip() headers['Cookie'] = cookie # pass the remote ip to the infobase server headers['X-REMOTE-IP'] = web.ctx.get('ip') try: conn.request(method, path, data, headers=headers) response = conn.getresponse() stats.end() except socket.error: stats.end(error=True) logger.error("Unable to connect to infobase server", exc_info=True) raise ClientException("503 Service Unavailable", "Unable to connect to infobase server") cookie = response.getheader('Set-Cookie') if cookie: c = SimpleCookie() c.load(cookie) if 'infobase_auth_token' in c: auth_token = c['infobase_auth_token'].value # The auth token will be in urlquoted form, unquote it before use. # Otherwise, it will be quoted twice this value is set as cookie. auth_token = auth_token and unquote(auth_token) self.set_auth_token(auth_token) if web.config.debug: b = time.time() print("%.02f (%s):" % (round(b - a, 2), web.ctx.infobase_req_count), response.status, method, _path, _data, file=web.debug) if response.status == 200: return response.read() else: self.handle_error("%d %s" % (response.status, response.reason), response.read())
def stop_server(self): """send QUIT request to http server running on localhost:<port>""" conn = HTTPConnection("127.0.0.1:{}".format(self.port)) conn.request("QUIT", "/") conn.getresponse()
class Client: """ Crossbar.io HTTP bridge client. """ def __init__(self, url, key = None, secret = None, timeout = 5, context = None): """ Create a new Crossbar.io push client. The only mandatory argument is the Push service endpoint of the Crossbar.io instance to push to. For signed pushes, provide authentication key and secret. If those are not given, unsigned pushes are performed. :param url: URL of the HTTP bridge of Crossbar.io (e.g. http://example.com:8080/push). :type url: str :param key: Optional key to use for signing requests. :type key: str :param secret: When using signed request, the secret corresponding to key. :type secret: str :param timeout: Timeout for requests. :type timeout: int :param context: If the HTTP bridge is running on HTTPS (that is securely over TLS), then the context provides the SSL settings the client should use (e.g. the certificate chain against which to verify the server certificate). This parameter is only available on Python 2.7.9+ and Python 3 (otherwise the parameter is silently ignored!). See: https://docs.python.org/2/library/ssl.html#ssl.SSLContext :type context: obj or None """ if six.PY2: if type(url) == str: url = six.u(url) if type(key) == str: key = six.u(key) if type(secret) == str: secret = six.u(secret) assert(type(url) == six.text_type) assert((key and secret) or (not key and not secret)) assert(key is None or type(key) == six.text_type) assert(secret is None or type(secret) == six.text_type) assert(type(timeout) == int) if _HAS_SSL and _HAS_SSL_CLIENT_CONTEXT: assert(context is None or isinstance(context, ssl.SSLContext)) self._seq = 1 self._key = key self._secret = secret self._endpoint = _parse_url(url) self._endpoint['headers'] = { "Content-type": "application/json", "User-agent": "crossbarconnect-python" } if self._endpoint['secure']: if not _HAS_SSL: raise Exception("Bridge URL is using HTTPS, but Python SSL module is missing") if _HAS_SSL_CLIENT_CONTEXT: self._connection = HTTPSConnection(self._endpoint['host'], self._endpoint['port'], timeout = timeout, context = context) else: self._connection = HTTPSConnection(self._endpoint['host'], self._endpoint['port'], timeout = timeout) else: self._connection = HTTPConnection(self._endpoint['host'], self._endpoint['port'], timeout = timeout) def publish(self, topic, *args, **kwargs): """ Publish an event to subscribers on specified topic via Crossbar.io HTTP bridge. The event payload (positional and keyword) can be of any type that can be serialized to JSON. If `kwargs` contains an `options` attribute, this is expected to be a dictionary with the following possible parameters: * `exclude`: A list of WAMP session IDs to exclude from receivers. * `eligible`: A list of WAMP session IDs eligible as receivers. :param topic: Topic to push to. :type topic: str :param args: Arbitrary application payload for the event (positional arguments). :type args: list :param kwargs: Arbitrary application payload for the event (keyword arguments). :type kwargs: dict :returns int -- The event publication ID assigned by the broker. """ if six.PY2 and type(topic) == str: topic = six.u(topic) assert(type(topic) == six.text_type) ## this will get filled and later serialized into HTTP/POST body ## event = { 'topic': topic } if 'options' in kwargs: event['options'] = kwargs.pop('options') assert(type(event['options']) == dict) if args: event['args'] = args if kwargs: event['kwargs'] = kwargs try: body = json.dumps(event, separators = (',',':')) if six.PY3: body = body.encode('utf8') except Exception as e: raise Exception("invalid event payload - not JSON serializable: {0}".format(e)) params = { 'timestamp': _utcnow(), 'seq': self._seq, } if self._key: ## if the request is to be signed, create extra fields and signature params['key'] = self._key params['nonce'] = random.randint(0, 9007199254740992) # HMAC[SHA256]_{secret} (key | timestamp | seq | nonce | body) => signature hm = hmac.new(self._secret.encode('utf8'), None, hashlib.sha256) hm.update(params['key'].encode('utf8')) hm.update(params['timestamp'].encode('utf8')) hm.update(u"{0}".format(params['seq']).encode('utf8')) hm.update(u"{0}".format(params['nonce']).encode('utf8')) hm.update(body) signature = base64.urlsafe_b64encode(hm.digest()) params['signature'] = signature self._seq += 1 path = "{0}?{1}".format(parse.quote(self._endpoint['path']), parse.urlencode(params)) ## now issue the HTTP/POST ## self._connection.request('POST', path, body, self._endpoint['headers']) response = self._connection.getresponse() response_body = response.read().decode() if response.status != 202: raise Exception("publication request failed {0} [{1}] - {2}".format(response.status, response.reason, response_body)) try: res = json.loads(response_body) except Exception as e: raise Exception("publication request bogus result - {0}".format(e)) return res['id']
class Client: """ Crossbar.io HTTP bridge client. """ def __init__(self, url, key=None, secret=None, timeout=5, context=None): """ Create a new Crossbar.io push client. The only mandatory argument is the Push service endpoint of the Crossbar.io instance to push to. For signed pushes, provide authentication key and secret. If those are not given, unsigned pushes are performed. :param url: URL of the HTTP bridge of Crossbar.io (e.g. http://example.com:8080/push). :type url: str :param key: Optional key to use for signing requests. :type key: str :param secret: When using signed request, the secret corresponding to key. :type secret: str :param timeout: Timeout for requests. :type timeout: int :param context: If the HTTP bridge is running on HTTPS (that is securely over TLS), then the context provides the SSL settings the client should use (e.g. the certificate chain against which to verify the server certificate). This parameter is only available on Python 2.7.9+ and Python 3 (otherwise the parameter is silently ignored!). See: https://docs.python.org/2/library/ssl.html#ssl.SSLContext :type context: obj or None """ if six.PY2: if type(url) == str: url = six.u(url) if type(key) == str: key = six.u(key) if type(secret) == str: secret = six.u(secret) assert (type(url) == six.text_type) assert ((key and secret) or (not key and not secret)) assert (key is None or type(key) == six.text_type) assert (secret is None or type(secret) == six.text_type) assert (type(timeout) == int) if _HAS_SSL and _HAS_SSL_CLIENT_CONTEXT: assert (context is None or isinstance(context, ssl.SSLContext)) self._seq = 1 self._key = key self._secret = secret self._endpoint = _parse_url(url) self._endpoint['headers'] = { "Content-type": "application/json", "User-agent": "crossbarconnect-python" } if self._endpoint['secure']: if not _HAS_SSL: raise Exception( "Bridge URL is using HTTPS, but Python SSL module is missing" ) if _HAS_SSL_CLIENT_CONTEXT: self._connection = HTTPSConnection(self._endpoint['host'], self._endpoint['port'], timeout=timeout, context=context) else: self._connection = HTTPSConnection(self._endpoint['host'], self._endpoint['port'], timeout=timeout) else: self._connection = HTTPConnection(self._endpoint['host'], self._endpoint['port'], timeout=timeout) def publish(self, topic, *args, **kwargs): """ Publish an event to subscribers on specified topic via Crossbar.io HTTP bridge. The event payload (positional and keyword) can be of any type that can be serialized to JSON. If `kwargs` contains an `options` attribute, this is expected to be a dictionary with the following possible parameters: * `exclude`: A list of WAMP session IDs to exclude from receivers. * `eligible`: A list of WAMP session IDs eligible as receivers. :param topic: Topic to push to. :type topic: str :param args: Arbitrary application payload for the event (positional arguments). :type args: list :param kwargs: Arbitrary application payload for the event (keyword arguments). :type kwargs: dict :returns int -- The event publication ID assigned by the broker. """ if six.PY2 and type(topic) == str: topic = six.u(topic) assert (type(topic) == six.text_type) ## this will get filled and later serialized into HTTP/POST body ## event = {'topic': topic} if 'options' in kwargs: event['options'] = kwargs.pop('options') assert (type(event['options']) == dict) if args: event['args'] = args if kwargs: event['kwargs'] = kwargs try: body = json.dumps(event, separators=(',', ':')) if six.PY3: body = body.encode('utf8') except Exception as e: raise Exception( "invalid event payload - not JSON serializable: {0}".format(e)) params = { 'timestamp': _utcnow(), 'seq': self._seq, } if self._key: ## if the request is to be signed, create extra fields and signature params['key'] = self._key params['nonce'] = random.randint(0, 9007199254740992) # HMAC[SHA256]_{secret} (key | timestamp | seq | nonce | body) => signature hm = hmac.new(self._secret.encode('utf8'), None, hashlib.sha256) hm.update(params['key'].encode('utf8')) hm.update(params['timestamp'].encode('utf8')) hm.update(u"{0}".format(params['seq']).encode('utf8')) hm.update(u"{0}".format(params['nonce']).encode('utf8')) hm.update(body) signature = base64.urlsafe_b64encode(hm.digest()) params['signature'] = signature self._seq += 1 path = "{0}?{1}".format(parse.quote(self._endpoint['path']), parse.urlencode(params)) ## now issue the HTTP/POST ## self._connection.request('POST', path, body, self._endpoint['headers']) response = self._connection.getresponse() response_body = response.read() if response.status != 202: raise Exception( "publication request failed {0} [{1}] - {2}".format( response.status, response.reason, response_body)) try: res = json.loads(response_body) except Exception as e: raise Exception("publication request bogus result - {0}".format(e)) return res['id']
def stop_server(self): """send QUIT request to http server running on localhost:<port>""" conn = HTTPConnection("127.0.0.1:{}".format(self.port)) conn.request("QUIT", "/") conn.getresponse()