def __init__(self, attackCard, context): """ Initialize the Request with the attack """ self.attackCard = attackCard self.context = context self.defenseFilters = [ComparisonFilter(zoneType, FixedCriteria("defendFrom", zoneType, "==")) for zoneType in self.ZONES] self.cardsForZone = {} Request.__init__(self, [context.player])
def test_constructor(self): request_dictionary = {'method': 'get', 'name': 'namevalue', 'url': 'http://httpbin.org/get', 'headers': { 'myheader': 'headervalue', 'myheader2': 'headervalue2', }, 'params': { 'myparam': 'paramvalue' }, 'body': 'bodyvalue' } request = Request(request_dictionary) self.assertEqual('get', request.method) self.assertEqual('namevalue', request.name) self.assertEqual('http://httpbin.org/get', request.url) self.assertEqual('headervalue', request.headers['myheader']) self.assertEqual('paramvalue', request.params['myparam']) self.assertEqual('bodyvalue', request.body) self.assertEqual('Request name: namevalue' + os.linesep + ' Method : get' + os.linesep + ' Headers : myheader: headervalue, myheader2: headervalue2' + os.linesep + ' URL : http://httpbin.org/get' + os.linesep + ' Parameters: myparam=paramvalue' + os.linesep + ' Body : bodyvalue', request.__str__() )
def test_variables(self): request_dictionary = {'method': 'get', 'name': 'name', 'url': '{{urlvar}}', 'headers': { 'myheader': '{{headervar}}', 'myheader2': '{{header2var}}', }, 'params': { 'myparam': '{{paramvar}}' }, 'body': '{{bodyvar}}' } request = Request(request_dictionary) variable_list = request.get_variable_list() self.assertListEqual(['header2var', 'headervar', 'urlvar', 'paramvar', 'bodyvar'], variable_list) variables = {'urlvar': 'urlvalue', 'headervar': 'headervalue', 'header2var': 'header2value', 'paramvar': 'paramvalue', 'bodyvar': 'bodyvalue'} request.substitute_variables(variables) self.assertEqual('Request name: name' + os.linesep + ' Method : get' + os.linesep + ' Headers : myheader: headervalue, myheader2: header2value' + os.linesep + ' URL : urlvalue' + os.linesep + ' Parameters: myparam=paramvalue' + os.linesep + ' Body : bodyvalue', request.__str__())
def download_certificate(order): logger = loggers.get_logger(__name__) check_credential() logger.debug("Downloading certificate") # TODO this distinction shouldn't exist here if 'certificate' in order and 'id' in order['certificate'] and order['certificate']['id']: # for cert central accounts r = Request(raw_file=True).get('/certificate/{0}/download/format/pem_all'.format(order['certificate']['id'])) else: # for mpki/retail accounts params = {"format_type": "pem_all"} if 'sub_id' in order and order['sub_id']: params["sub_id"] = order['sub_id'] r = Request(raw_file=True).get('/certificate/download/order/{0}'.format(order['id']), params) if r.has_error: # This is an unrecoverable error. We can't see the API for some reason if r.is_response_error(): logger.error('Server request failed. Unable to access API.') sys.exit() else: logger.error("Server returned an error condition: {0}".format(r.get_message())) sys.exit() logger.debug("Downloaded certificate for order #{0}".format(order['id'])) certs = r.data.split("-----BEGIN") # 0 - empty, 1 - cert, 2 - intermediate, 3 - root... do we need root? return { "certificate": "-----BEGIN{0}".format(certs[1]), "intermediate": "-----BEGIN{0}".format(certs[2]), "root": "-----BEGIN{0}".format(certs[3]), }
def database(self, name, credentials=ADMIN, create=False, storage='memory'): """Sends a request the server for a database will both add new and query existing databases. Returns a Datase object """ if create: url = Database.action['post'] % (self.url, name, storage) response, content = self.request.post(url=url, data=None) if response.status == 200: return self.database(name=name, credentials=ADMIN) else: raise CompassException(content) else: url = Database.action['get'] % (self.url, name) user, password = credentials request = Request(user, password) response, content = request.get(url=url) if response.status == 200: data = json.loads(content) return Database(self.url, name=name, credentials=credentials, data=data).connect() else: raise CompassException(content)
def create(cls, card_number, cvv, expiry_month, expiry_year, **other_args): """ Creates a payment method. Transparent redirects are favored method for creating payment methods. Using this call places the burden of PCI compliance on the client since the data passes through it. :: pm = PaymentMethod.create('4242424242424242', '133', '07', '12') assert pm.is_sensitive_data_valid """ payload = { 'payment_method': { 'card_number': card_number, 'cvv': cvv, 'expiry_month': expiry_month, 'expiry_year': expiry_year, } } optional_data = dict((k, v) for k, v in other_args.iteritems() if k in cls.create_data) payload['payment_method'].update(**optional_data) payload = dict_to_xml(payload) # Send payload and return payment method. req = Request(cls.create_url, payload, method='post') req.add_header("Content-Type", "application/xml") return cls(fetch_url(req))
def get_issued_orders(domain_filter=None): logger = loggers.get_logger(__name__) check_credential() filters = '?filters[status]=issued&filters[product_type]=ssl_certificate' r = Request().get('/order/certificate{0}'.format(filters)) if r.has_error: # This is an unrecoverable error. We can't see the API for some reason if r.is_response_error(): logger.error('Server request failed. Unable to access API.') sys.exit() else: logger.error("Server returned an error condition: {0}".format(r.get_message())) sys.exit() order_list = r.data['orders'] if 'orders' in r.data else [] logger.debug("Collected order list with {0} orders".format(len(order_list))) orders = [] for order in order_list: if domain_filter and order['product']['type'] != 'client_certificate': if domain_filter in order['certificate']['dns_names']: orders.append(order) else: # Check for wildcard matches for dns_name in order['certificate']['dns_names']: # For dns_name *.example.com, the domain_filter ends with .example.com or equals example.com if (dns_name[:2] == '*.') and (dns_name[1:] == domain_filter[1-len(dns_name):] or dns_name[2:] == domain_filter): orders.append(order) break logger.debug("Returning {0} orders after filtering".format(len(orders))) return orders
def update(self, **other_args): """ Updates a payment method. Payment method can't be updated once it has been retained or redacted. :: pm = PaymentMethod.create('4242424242424242', '133', '07', '12') assert pm.is_sensitive_data_valid pm.update(first_name='dummy') if not pm.errors: assert pm.first_name == 'dummy' else: # deal with pm.errors """ payload = { 'payment_method': { } } optional_data = dict((k, v) for k, v in other_args.iteritems() if k in self.create_data) payload['payment_method'].update(**optional_data) payload = dict_to_xml(payload) # Send payload and return payment method. req = Request(self.update_url % self.payment_method_token, payload, method='put') req.add_header("Content-Type", "application/xml") res = fetch_url(req) self._update_fields(res) return self
def relationship(to, *args, **kwargs): tx = Transaction.get_transaction(kwargs.get("tx", None)) create_relationship_url = self._dic["create_relationship"] data = { "to": to.url, "type": relationship_name, } if "tx" in kwargs and isinstance(kwargs["tx"], Transaction): x = kwargs.pop("tx", None) del x # Makes pyflakes happy if kwargs: data.update({"data": kwargs}) if tx: return tx.subscribe(TX_POST, create_relationship_url, data=data, obj=self) response, content = Request().post(create_relationship_url, data=data) if response.status == 201: return Relationship(response.get("location")) elif response.status == 404: raise NotFoundError(response.status, "Node specified by the " \ "URI not of \"to\" node" \ "not found") else: raise StatusException(response.status, "Invalid data sent")
def friend_pages(self): '''Get friend's total page numbers ''' self.url.format(self.curpage, self.core_uid) http_request = Request(self.url) rsp_src = http_request.get_response() # Empty rsp_src will raise parse error try: assert rsp_src except AssertionError: print("except AssertionError, " "http_request.get_response() failed and return nothing, " "Check your network and cookie, ", file=error_log) return # Parse the page and get friend's total page numbers soup = BeautifulSoup(rsp_src) text = str(soup.findAll("a", attrs={"title": unicode("最后页", "utf-8")})) pattern = "curpage=[0-9]+" r = re.search(pattern, text) # try: result = int(text[r.start() + 8: r.end()]) except AttributeError: with open(r"./error.log", 'a+') as error_log: print("except AttributeError, re.search fail:", file=error_log) print("soup: ", soup, file=error_log) print("pattern: ", pattern, file=error_log) print("text: ", text, file=error_log) print("r: ", r, '\n', file=error_log) return return result
def action(self, rest_path): params = { 'api_key': self.api_key, 'auth_token': self.auth_token } req = Request(rest_path, **params) req.do() return req.result
def get_account_info(self): params = { 'action': 'get_account_info', 'api_key': self.api_key, 'auth_token': self.auth_token } req = Request('', api_version = 1, params = params) req.do() return req.result
def test_run_success_callback(self): mock_success_cb = Mock() req = Request('http://dummy', success_callback=mock_success_cb) mock_resp = Response(req, 200, 'content', {}) req.run_callback(mock_resp) mock_success_cb.assert_called_once_with(mock_resp)
def main(): req = Request() req.agents = [ ('Mozilla/5.0 (Windows NT 10.0) AppleWebKit/537.36 (KHTML, like ' 'Gecko) Chrome/46.0.2486.0 Safari/537.36 Edge/13.10586'), ('Mozilla/5.0 (Windows NT 10.0) AppleWebKit/537.36 (KHTML, like ' 'Gecko) Chrome/51.0.2704.103 Safari/537.36')] response = req.get(REQ) print __parse(response.text)
def test_run_error_callback(self): mock_error_cb = Mock() req = Request('http://dummy', error_callback=mock_error_cb) mock_resp = ErrorResponse(req, Exception()) req.run_callback(mock_resp) mock_error_cb.assert_called_once_with(mock_resp)
def main(): req = Request() req.agents = [ ('Mozilla/5.0 (Windows NT 10.0) AppleWebKit/537.36 (KHTML, like ' 'Gecko) Chrome/46.0.2486.0 Safari/537.36 Edge/13.10586'), ('Mozilla/5.0 (Windows NT 10.0) AppleWebKit/537.36 (KHTML, like ' 'Gecko) Chrome/51.0.2704.103 Safari/537.36')] response = req.get('http://utaukitune.ldblog.jp/archives/65696057.html') print __parse(response.text)
def home(request): if request.method == 'POST': form = TaskForm(request.POST) if form.is_valid(): cd = form.cleaned_data request = Request('me','www.baidu.com') re = request.start() return HttpResponse( re) else: form = TaskForm() return render_to_response('home.html',{'form':form }, RequestContext(request))
def relationship(to, *args, **kwargs): create_relationship_url = self._dic["create_relationship"] data = {"to": to.url, "type": relationship_name} if kwargs: data.update({"data": kwargs}) response, content = Request().post(create_relationship_url, data=data) if response.status == 201: return Relationship(response.get("location")) elif response.status == 404: raise NotFoundError(response.status, "Node specified by the " 'URI not of "to" node' "not found") else: raise StatusException(response.status, "Invalid data sent")
def view_task(self, task_id): """Get the details of the task with the given id.""" api_url = self.server_url + self.METHOD_VIEW_TASK + str(task_id) request = Request(api_url) log.info("Request to " + api_url) try: response = request.get() except HTTPError, e: log.error("Error in view_task: " + str(e)) raise CuckooError(str(e))
def describe(self): """ Returns a description of the vault by requesting it from Amazon Glacier :return: Parsed answer from Amazon Glacier :rtype: dictionary """ req = Request(self.connection, self.region, "GET", "/-/vaults/"+self.name) resp = req.send_request() return json.loads(resp.read())
def authtoken(self): if self.authenticated: raise TMDBError("Session is already authenticated") if self._authtoken is None: req = Request('authentication/token/new') req.lifetime = 0 dat = req.readJSON() if not dat['success']: raise TMDBError("Auth Token request failed") self._authtoken = dat['request_token'] self._authtokenexpiration = datetime.fromIso(dat['expires_at']) return self._authtoken
def demo1(url, data, headers, stream, verify, proxies, cert, timeout): s = Session() req = Request('GET', url, data=data, headers=headers) prepped = req.prepare() # -- prepare resp = s.send(prepped, stream=stream, verify=verify, proxies=proxies, cert=cert, timeout=timeout) print(resp.status_code)
def __parse(text): et = etree.fromstring(text, parser=etree.HTMLParser()) for e in et.xpath("//a[@class='pstl']"): suffix = e.attrib['href'] url = REQ + suffix req = Request() req.agents = [ ('Mozilla/5.0 (Windows NT 10.0) AppleWebKit/537.36 (KHTML, like ' 'Gecko) Chrome/46.0.2486.0 Safari/537.36 Edge/13.10586'), ('Mozilla/5.0 (Windows NT 10.0) AppleWebKit/537.36 (KHTML, like ' 'Gecko) Chrome/51.0.2704.103 Safari/537.36')] response = req.get(url) et2 = etree.fromstring(response.text, parser=etree.HTMLParser())
def process_data(self, data, captured): handler, eth_hdr, ip_hdr = self._get_handler(data) ip = None if handler is not None: if ip_hdr is not None: ip = int(ip_hdr.src.encode('hex'), 16) mac = int(eth_hdr.src.encode('hex'), 16) try: key, query, sub_type = handler.process(eth_hdr, ip_hdr) captured = datetime.datetime.fromtimestamp(captured) Request.create(type=key, query=query, ip=ip, mac=mac, captured=captured, sub_type=sub_type) except: print 'here'
def sessionid(self): if self._sessionid is None: if self._authtoken is None: raise TMDBError("No Auth Token to produce Session for") # TODO: check authtokenexpiration against current time req = Request('authentication/session/new', \ request_token=self._authtoken) req.lifetime = 0 dat = req.readJSON() if not dat['success']: raise TMDBError("Session generation failed") self._sessionid = dat['session_id'] return self._sessionid
def create_file_task(self, task_id, sample_path, sensor_filename, timeout, priority, memory, clock, enforce_timeout, custom_options): """Submit the given file to the cuckoo api in order to create a new task for it. Keyword arguments: task_id -- the manually specified id for the task sample_path -- the path to the sample file sensor_filename -- the name of the sensor file timeout -- timeout for the task priority -- priority to assign to the task (1-3) memory -- enable the creation of a full memory dump of the analysis machine clock -- sets the vm clock for this task to the given value enforce_timeout -- enable to enforce the execution for the full timeout value custom_options -- custom option parameters that will be appended to the option parameter of the cuckoo api """ api_url = self.server_url + self.METHOD_CREATE_FILE_TASK if sensor_filename: options_params = "sensor=" + sensor_filename else: options_params = "" if custom_options: options_params = options_params + ", " + custom_options post_params = { 'custom': task_id, 'options': options_params, 'timeout': timeout, 'priority': priority, 'memory': memory, 'clock': clock, 'enforce_timeout': enforce_timeout } files = {'file': open(sample_path, 'rb')} request = Request(api_url) log.info("Request to " + api_url + " with params " + str(post_params)) try: response = request.post(post_params, files) except HTTPError, e: log.error("Error in create_file_task: " + str(e)) raise CuckooError(str(e))
def __init__(self, url, password, proxy=None): Request.__init__(self, url, proxy) self.password = password self.extractor = re.compile("<%s>(.*)</%s>" % (self.password[2:], self.password[2:]), re.DOTALL) # self.extractor_debug = re.compile( "<%sDEBUG>(.*)</%sDEBUG>" % ( self.password[2:], self.password[2:] ), re.DOTALL ) self.parsed = urlparse.urlparse(self.url) self.data = None if not self.parsed.path: self.query = self.parsed.netloc.replace("/", " ") else: self.query = "".join(self.parsed.path.split(".")[:-1]).replace("/", " ")
class Connection: def __init__(self, host, public_key, private_key): self.host = host self.public_key = public_key self.private_key = private_key self.auth = Auth(public_key, private_key) self.request = Request(self.host, self.auth) def add_user(self): return self.request.post('/users/', {}) def delete_user(self, user): return self.request.delete('/users/' + user) def get_streams(self): return self.request.get('/streams/') def add_stream(self, stream): return self.request.post('/streams/', {'stream': stream}) def delete_stream(self, stream): return self.request.delete('/streams/' + stream + '/') def get_sources(self, stream): return self.request.get('/streams/' + stream + '/') def add_source(self, stream, source): return self.request.post('/streams/' + stream + '/', {'source': source}) def delete_source(self, stream, source): return self.request.delete('/streams/' + stream + '/' + source + '/')
def get_order(order_id): logger = loggers.get_logger(__name__) check_credential() r = Request().get('/order/certificate/{0}'.format(order_id)) if r.has_error: # This is an unrecoverable error. We can't see the API for some reason if r.is_response_error(): logger.error('Server request failed. Unable to access API.') sys.exit() else: logger.error("Server returned an error condition: {0}".format(r.get_message())) sys.exit() logger.debug("Returning order #{0}".format(r.data['id'])) return r.data
def get_duplicates(order_id): logger = loggers.get_logger(__name__) check_credential() logger.debug("Getting list of duplicates from API") r = Request().get('/order/certificate/{0}/duplicate'.format(order_id)) if r.has_error: # This is an unrecoverable error. We can't see the API for some reason if r.is_response_error(): logger.error('Server request failed. Unable to access API.') sys.exit() else: logger.error("Server returned an error condition: {0}".format(r.get_message())) sys.exit() logger.debug("Collected {0} duplicates for order_id {1}".format(len(r.data), order_id)) return r.data
def process_request(connection): with connection: r = request_from_connection(connection) log('request log:\n <{}>'.format(r)) # 处理 chrome 发送的空请求,防止程序崩溃 if len(r) > 0: # 把原始请求数据传给 Request 对象 request = Request(r) # 用 response_for_path 函数来得到 path 对应的响应内容 response = response_for_path(request) log("response log:\n <{}>".format(response)) # 把响应发送给客户端 connection.sendall(response) else: connection.sendall(b'') log('接收到了一个空请求')
def connectionLost(self, reason): """Called when the connection is shut down. Args: reason (twisted.python.failure.Failure): The reason the connection was lost. """ self.is_alive = False self.factory.logger.debug("Worker: Lost a client %r, reason %r", self.name, reason.getErrorMessage()) cleanup_message = CleanupUser(user_name=self.name) self.factory.logger.debug("Putting user cleanup request in queue") request = Request(self, cleanup_message, is_server_request=True) self.factory.request_queue.put(request) self.factory.logger.debug("Successfully queued the request")
def process_request(connection): r = connection.recv(1024) r = r.decode() log('request log:\n{}'.format(r)) # 把原始请求数据传给 Request 对象 request = Request(r) # 用 response_for_path 函数来得到 path 对应的响应内容 response = response_for_path(request) if 'static' in request.path: log("response for static size: {}".format(len(response))) else: log("response log:\n{}".format(response.decode())) # 把响应发送给客户端 connection.sendall(response) # 处理完请求, 关闭连接 connection.close()
def request(cls, method, data, klass=None): """ """ #body = "[{\"requestClass\":\"CityProductionService\",\"requestId\":9,\"requestData\":[[27]],\"__class__\":\"ServerRequest\",\"requestMethod\":\"pickupProduction\"}]" payload = [ OrderedDict([("requestClass", klass or cls.REQUEST_CLASS), ("requestId", Request.REQUEST_ID), ("requestData", data), ("__class__", "ServerRequest"), ("requestMethod", method)]) ] response = Request.request(payload) return response
def process_message(self, application, message): """This coroutine looks at the message, determines which handler will be used to process it, and then begins processing. The application is responsible for handling misconfigured routes. """ request = Request.parse_msg(message) if request.is_disconnect(): return # Ignore disconnect msgs. Dont have areason to do otherwise handler = application.route_message(request) result = handler() http_content = http_response(result['body'], result['status_code'], result['status_msg'], result['headers']) application.msg_conn.reply(request, http_content)
def process_request(connection): """对一个请求,用一个线程进行处理""" with connection: r = get_all_request(connection) log('此次请求的内容是:\n{}\n'.format(r)) if len(r) > 0: request = Request(r) # log('<F_process_request_request:\n {}'.format(request)) response = response_for_request(request) # log('<F_process_request_response:\n {}'.format(response)) connection.sendall(response) else: connection.sendall(b'') log('接收到一个空请求')
def all_devices(): pagenum = 0 totcount = 0 while True: retj = Request.create( 'GET', 'https://api.arrowconnect.io/api/v1/kronos/devices', { '_size': 200, '_page': pagenum, }) print(retj) pagecount = len(retj['data']) if not pagecount: break totcount += pagecount print(f"got page {pagenum}, count {totcount}") pagenum += 1
def run(self): idx = 0 while True: # Generate inter-arrival time s = self.inter_gen.next() yield self.env.timeout(s) # Generate request # NOTE Percentage must be integer is_heavy = random.randint(0, 999) < self.heavy_percent * 10 exec_time = self.heavy_exec_time if is_heavy else self.exec_time self.host.receive_request( Request(idx, exec_time, 0, self.env.now, self.flow_id, self.mean)) idx += 1
def read(self, key): """ The method reads an entry (Key-Value pair), from the database, based on the given key. It sends a request to the server and waits for the response. Parameters: key (Any hashable data type): The key of the entry. Returns: Response(success=True, message=None, data=[(...)]): If the read action was succesful. Note: The data list will contain the read entry. Response(success=False, message=Entry does not exist., data=[]): If the entry does not exist. """ request = Request(0, key, None, None) request = pickle.dumps(request) self.client_socket.send(request) return self._listen_for_response()
def delete(self, key): """ The method deletes an entry (Key-Value pair), from the database, based on the given key. It sends a request to the server and waits for the response. Parameters: key (Any hashable data type): The key of the entry that will be deleted. Returns: Response(success=True, message=None, data=[(...)]): If the delete action was succesful. Response(success=False, message=Entry could not be deleted., data=[]): If the delete action was not succesful. """ request = Request(2, key, None, None) request = pickle.dumps(request) self.client_socket.send(request) return self._listen_for_response()
def __init__(self): """ Default constructor """ self.requests = \ [Request(utils.get_text(request.find(ATTRIBUTES[attribute.ID].input_source)), self.create_component(request), utils.get_text(request.find(ATTRIBUTES[attribute.DESCRIPTION].input_source)), utils.get_timestamp(request.find(ATTRIBUTES[attribute.STARTTIME].input_source), TIMESTAMP_FORMAT), utils.get_timestamp(request.find(ATTRIBUTES[attribute.ENDTIME].input_source), TIMESTAMP_FORMAT), utils.get_text(request.find(ATTRIBUTES[attribute.OWNER].input_source)), ",".join([utils.get_text(email) for email in request.findAll(ATTRIBUTES[attribute.EMAIL].input_source)])) for request in BeautifulSoup(open(Config.instance().config[LOCATION]), "lxml") .findAll(REQUEST)] print("Number of requests found in input file: {0}".format( len(self.requests)))
def redact(self): """ Issues `redact` call to samurai API. :: pm = PaymentMethod.find(token) if not pm.redact().is_redacted: # Some thing prevented the redaction. # Check pm.errors else: # Successfully redacted. """ req = Request(self.redact_url % self.payment_method_token, method='post') res = fetch_url(req) self._update_fields(res) return self
def standup_mode(self, req: Request): if 'end.report' in req.intents(): self.call_next(req.user_id()) elif req.command().startswith('запомни тему '): self.add_theme(req) elif self.end_standup_re.match(req.command()): self.end_standup(req.user_id()) elif req.command() == 'продолжить': self.response['text'] = ' ' # Игнорируем не команды self.response['tts'] = self.tts() elif 'skip.person' in req.intents(): self.response['text'] = 'Хорошо, пропускаю.\n' self.response['tts'] = 'хорошо , пропускаю .' self.call_next(req.user_id()) else: self.response['text'] = 'Не смогла распознать команду. Во время проведения стендапа могу ' \ 'распознать следующие команды: "у меня всё", "продолжить", ' \ '"его|её сегодня нет", "запомнить тему ТЕМА", "закончи стендап"'
def _get_results(self, url): response, content = Request().get(url) if response.status == 200: data_list = json.loads(content) if self._index_for == NODE: return [Node(n['self'], data=n['data']) for n in data_list] else: return [Relationship(r['self'], data=r['data']) for r in data_list] elif response.status == 404: raise NotFoundError(response.status, "Node or relationship not found") else: raise StatusException(response.status, "Error requesting index with GET %s" \ % url)
def share_cfrags(usr_pk, sender_capsule, sender_ciphertext, connection): global key_fragment_arr for clients in list_of_clients: if clients!=connection: # get sender PK from ip src_pk = usr_pk src_id = pk_to_id[src_pk] dst_ip = clients.getpeername()[0] dst_id = ip_to_id[dst_ip][0] # get kfrag for sender and clients kfrags = key_fragment_arr.get_fragment(src_id, dst_id) # Compute the cfrag cfrags = [pre.reencrypt(kfrag, sender_capsule).to_bytes() for kfrag in kfrags] # Send the sender_capsule, cfrag, senderPk, sender_ciphertext req = Request.send_cfrag_request(sender_capsule.to_bytes(), cfrags, src_pk, sender_ciphertext) clients.send(req.serialize().encode(ENCODING))
def send_url(q, i): global sock global mutex while True: url = q.get() if url is None: break json_data = Request.create(method='GET', text=url) try: with mutex: # print(time.time(),'send i ',json_data) sock.send(json_data.encode(code)) except socket.timeout: print('send data timeout') except socket.error as ex: # print('send data error', ex) break
def parse(self, data): """Parse raw request and return Request object :type data: bytearray """ headers_end = self.get_headers_end(data) body_raw = data[headers_end:] head_fields = data[:headers_end]\ .decode('utf-8', errors='ignore')\ .splitlines() headers = {} for field in head_fields[1:-1]: try: key, value = field.split(':', maxsplit=1) headers[key] = value.strip() except ValueError: continue return Request(head_fields[0], headers, body_raw)
def run(self): idx = 0 while True: # Generate inter-arrival time s = self.inter_gen.next() yield self.env.timeout(s) exec_time = np.random.lognormal(self.mean, self.var) network_time = np.random.lognormal(self.network_mean, self.network_var) self.host.receive_request( Request(idx, exec_time, network_time, self.env.now, self.flow_id, self.log_mean)) idx += 1
def data_init(): issues = jira_service.get_issues_for_project(project_id="SALG", max_results=0) for issue in issues: if issue.fields.status.name in ("Rejected", "Done"): continue issue_datetime = dateutil.parser.parse(issue.fields.created) request = Request(issue, "SOURCE") if issue_datetime.year in year_data_dict: year_data_dict[issue_datetime.year].add_request(request) else: year_data_dict[issue_datetime.year] = YearData(issue_datetime.year) year_data_dict[issue_datetime.year].add_request(request)
def handler(event, context): # Validation request = Request(**loads(event.get("body", {}))) # Business logic, running safely under a transaction with TransactWrite(connection=Connection(), client_request_token=generate_uuid()) as transaction: transaction.save( Store( name=request.name, delay=request.delay, working_since=request.working_since.strftime( "%Y-%m-%d %H:%M:%S"), )) for employee_name in request.employees: transaction.save(Employee(name=employee_name))
def add(self, key, value): """ The method adds an entry (Key-Value pair) to the database. It sends a request to the server and waits for the response. Parameters: key (Any hashable data type): The key of the entry. value (Any data type): The value of the entry. Returns: Response(success=True, message=None, data=[(...)]): If the add action was succesful. Note: The data list will contain the added entry. Response(success=False, message=Entry could not be added., data=[]): If the add action was not succesful. """ request = Request(1, key, value, None) request = pickle.dumps(request) self.client_socket.send(request) return self._listen_for_response()
def handle_request(connection, address): received_bytes = connection.recv(4096) http_text = received_bytes.decode('utf-8') if len(http_text.split()) < 2: connection.close() return request = Request.from_http_and_address(http_text, address) log('Request is {}'.format(request)) log('Request path is {}'.format(request.path)) response = router.handle(request) # log('Response:\n' + response.decode('utf-8')) log('response is', response) connection.sendall(response) connection.close()
def visit(self): """ Visits the players city and prints out buildings we are interested in... good for sabotaging """ data = self.request('visitPlayer', [self.player_id]) city = Request.service(data, 'OtherPlayerService') buildings = city['city_map']['entities'] for building in buildings: if building['type'] not in ['residential', 'street', 'decoration']: #['goods', 'production', 'random_production']: pprint.pprint(building) return data
def handle_connection(self, new_socket, addr): """ 处理已经连接的socket :param new_socket: 已经连接的socket :param addr: 客户端的地址,是一个元组:(clientHost, clientPort) :return: """ # 接收客户端的请求数据 request_data = self.get_request_data(new_socket) self.logger.info('[INFO]handling client: {}:{} now.'.format( addr[0], addr[1])) print(request_data) # 将浏览器发送的HTTP数据封装成Request对象 self.request = Request(request_data.decode()) # 将request对象交给框架去处理,并从框架那里得到response对象 new_socket.sendall(b'HTTP/1.1 200 OK\r\n\r\nHello') new_socket.close()
def process_request(connection, r_d): r = request_cache(connection) # 因为 chrome 会发送空请求导致 split 得到空 list # 所以这里判断一下防止程序崩溃 if len(r.split()) < 2: return # 设置 request 的 method request = Request(r) # chrome会发空请求,下面2行代码可以看出 # log('r =====>', r) # log('r.split()=====>', r.split()) # 用 response_for_path 函数来得到 path 对应的响应内容 response = response_for_path(request, r_d) # 把响应发送给客户端 connection.sendall(response) # 处理完请求, 关闭连接 connection.close()
def func(): bucket = "rent-safe" recognizer = AWSComprehendEntityRecognizer(bucket) classifier = AwsComprehendClassifier(bucket) storage = MongoStorage() paragraphs = [ "865 Laurelwood Drive", "KW for Rent", "Pets - No animals, birds, reptiles, or pets of any kind will be kept on or about the premises without the written permission of the Landlord.", ] submit = SubmitForAnalysis(storage, classifier, recognizer) submit.execute(Request({"paragraphs": paragraphs}))
def httpHandler(data: str, root: str) -> bytes: firstLineParsed = False request = Request() for line in data.split('\r\n')[:-2]: if not firstLineParsed: chunks = line.split(' ') if len(chunks) != 3: return '' request.method = chunks[0] request.setUrl(chunks[1]) request.version = chunks[2] firstLineParsed = True else: chunks = line.split(':', 1) if len(chunks) != 2: return '' request.addHeader(chunks[0], chunks[1]) return handleHttpRequest(request, root).serialize()
def get_response(self): # Preparing slicing and ordering q = self.q params = self.params if self._order_by: orders = [] for o, order in enumerate(self._order_by): order_key = "_order_by_%s" % o if order_key not in params: nullable = "" if len(order) == 3: if order[2] is True: nullable = "!" elif order[2] is False: nullable = "?" orders.append(u"n.`{%s}`%s %s" % (order_key, nullable, order[1])) params[order_key] = order[0] if orders: q = u"%s order by %s" % (q, ", ".join(orders)) # Lazy slicing if isinstance(self._skip, int) and "_skip" not in params: q = u"%s skip {_skip} " % q params["_skip"] = self._skip if isinstance(self._limit, int) and "_limit" not in params: q = u"%s limit {_limit} " % q params["_limit"] = self._limit # Making the real resquest data = { "query": q, "params": params, } response, content = Request(**self._auth).post(self._cypher, data=data) if response.status == 200: response_json = json.loads(content) return response_json elif response.status == 400: err_msg = u"Cypher query exception" try: err_msg = "%s: %s" % (err_msg, json.loads(content)["message"]) except: err_msg = "%s: %s" % (err_msg, content) raise CypherException(err_msg) else: raise StatusException(response.status, "Invalid data sent")
def __init__(self, f): # First line params = f.readline().split(" ") videoCount = int(params[0]) endpointCount = int(params[1]) requestCount = int(params[2]) cacheCount = int(params[3]) cacheCapacity = int(params[4]) # Chaches self.caches = [] for i in range(cacheCount): self.caches.append(Cache(i, cacheCapacity)) # Videos line videoSizes = f.readline().split(" ") self.videos = [] for i in range(videoCount): self.videos.append(Video(i, int(videoSizes[i]))) # Endpoints section self.endpoints = [] for i in range(endpointCount): endpointLine = f.readline().split(" ") latency = int(endpointLine[0]) availableCacheCount = int(endpointLine[1]) caches = [] for j in range(availableCacheCount): cacheLine = f.readline().split(" ") cacheTuple = (self.caches[int(cacheLine[0])], int(cacheLine[1])) caches.append(cacheTuple) self.endpoints.append(Endpoint(latency, caches)) # Requests section self.requests = [] for i in range(requestCount): requestLine = f.readline().split(" ") videoId = int(requestLine[0]) endpointId = int(requestLine[1]) requestCount = int(requestLine[2]) request = Request(self.videos[videoId], self.endpoints[endpointId], requestCount) self.requests.append(request) self.endpoints[endpointId].requests.append(request)
def slide(self): self.__buffers = self.__buffers[1:] self.__buffers.append(Buffer()) _num_new_reqs = self.__req_gen.next _cur_time = self.__cur_time.inc_n_get() _new_reqs = [Request(_cur_time) for _ in range(_num_new_reqs)] self.__buffers[-1].put(_new_reqs) # _err = next(self.__err_gen()) # if _err >= 0: # return _err # else: # self.__buffers[0].put([Request(_cur_time - self.win_size) for _ in range(abs(_err))]) # return _err return 0