def get_req_set(self): req_set = [] paths = self.get_path() for i in paths: request = {} method = self.get_method(i) for j in method: resp = self.get_method_response(j) param, path_param, dependency = self.get_method_param(j) #produces = self.get_produces(j) request["host"] = self.get_host() request["basePath"] = self.get_base_path() request["schemes"] = self.get_schemes() request["path"] = i request["method"] = j request["consumes"] = self.get_consumes(j) request["produces"] = self.get_produces(j) #request["contentType"] = produces request["parameter"] = param request["pathParam"] = path_param request["response"] = resp request["dependency"] = dependency req_set.append(Request(request)) # for i in req_set: # print (i.req_param) return req_set
def set_Price(self): request = Request() actual_price = request.price print( "The actual price in the market is: {}, at what price would you want to buy?" .format(actual_price)) price_to_sell = int(input()) return price_to_sell
def read_request(self, new_con, epoll): con = new_con #print(f'creating new con {con}') msg = b'' yield while True: data = b'' try: data = con.recv(STEP) except ConnectionResetError: con.close() raise StopIteration except socket.error: pass if data.strip(): msg += data else: req = Request(msg.decode('utf-8')) #print(f"msg: {msg} in con: {con.fileno()}\nreq: {req}") epoll.modify(con.fileno(), select.EPOLLOUT) new_task = self.send_response(con, epoll, req) self._tasks[con.fileno()] = new_task raise StopIteration
def test_id(): r = Request() assert r.__hash__() != 0 assert r._id == -1 r._id = 1 assert r._id == 1
def test_await(): r = Request() r.await_resp(0.01) r.response_ready()
import sys sys.path.append("..") from annotator.annotator import Annotator from request.request import Request from bs4 import BeautifulSoup from elasticsearch import Elasticsearch import datetime annotator = Annotator() request = Request() es = Elasticsearch() def clean_url(url): '''Remove the protocol, url parameters, and ID selectors''' if '://' in url: url = ':'.join(url.split(':')[1:]) if '?' in url: url = '?'.join(url.split('?')[:1]) if '#' in url: url = '#'.join(url.split('#')[:1]) return (url) def get_story_title(url): '''Remove the little trailing bits that websites add to article titles. for example: Andrew McCabe turned over memo on Comey firing to Mueller - CNNPolitics becomes:
next(self._tasks[fileno]) except StopIteration: pass elif event & select.EPOLLOUT: try: next(self._tasks[fileno]) except StopIteration: pass yield except KeyboardInterrupt: print("server stop") # except Exception as e: # print(f'SOME EXCEPTION {e}', e) finally: epoll.unregister(server.fileno()) epoll.close() server.close() def stop(self): pass if __name__ == '__main__': loop = Loop() server = Server(loop, static_dir='/Users/kexibq/repos/technopark-highload') testdata = '''GET /Dockerfile''' r = Request(testdata) print(r) resp = server.handle_request(r) print(resp.header)