def login(self): header = {'Content-Type': 'application/json'} data = {'public_key': self.pub_key} try: resp = yield treq.post(url=self.url + 'login/', headers=header, json=data, persistent=False) confirm_info = yield treq.json_content(resp) print(confirm_info) self.nonce = confirm_info['message'] print('login succeed') except Exception as err: print(err) try: signature = crypto.ECCipher.geth_sign(self.priv_key, self.nonce) header_confirm = {'Content-Type': 'application/json'} data_confirm = {'public_key': self.pub_key, 'code': signature} resp = yield treq.post(self.url + 'confirm/', headers=header_confirm, json=data_confirm, persistent=False) confirm_info = yield treq.json_content(resp) print(confirm_info) self.token = confirm_info['message'] print('login confirmed') except Exception as err: print(err) return confirm_info['message']
def make_request(): if ssl: response = yield treq.post(url, agent=self.agent, **kwargs) else: response = yield treq.post(url, **kwargs) try: content = yield treq.content(response) except: log.msg("Error while reading body in HTTP response", level=logger.WARN) self.stats_client.incr('http.requests.errors.read_body_error') err = RuntimeError("Error while reading body in HTTP response (Response code: %d)." % response.code) err.status_code = response.code raise err else: if response.code == 200: defer.returnValue(content) else: log.msg("Error: Bad status code in HTTP response", http_response_code=response.code, http_content=content, level=logger.WARN) self.stats_client.incr('http.requests.errors.bad_status_code.%d' % response.code) err = RuntimeError('Error: Bad status code in HTTP response: %d' % response.code) err.status_code = response.code raise err
def processSaveOar(self, line): if (time.time() - self.startTime) > 60 * 60: #timeout, report and schedule next job data = { "Success": False, "Done": True, "Message": "Timeout. Oar save took too long" } treq.post(self.currentJob['report'].encode('ascii'), urlencode(data), headers={ 'Content-Type': 'application/x-www-form-urlencoded' }).addErrback(self.errorCB) print "load oar did not complete within time limit for region %s" % self.region self.processLog = False reactor.callLater(10, self.pollForWork) if not "[ARCHIVER]" in line: return if "Finished" in line: oar = os.path.join(self.currentJob['location'], "mgm.oar") d = treq.post( self.currentJob['upload'].encode('ascii'), files={'file': (self.currentJob['region'], open(oar, 'rb'))}) def removeWhenDone(result): os.remove(oar) d.addCallback(removeWhenDone) print "save oar finished successfully for region %s" % self.region self.processLog = False reactor.callLater(10, self.pollForWork)
def processLoadOar(self, line): if (time.time() - self.startTime) > 60 * 60: #timeout, report and schedule next job data = { "Success": False, "Done": True, "Message": "Timeout. Oar load took too long" } treq.post(self.currentJob['report'].encode('ascii'), urlencode(data), headers={ 'Content-Type': 'application/x-www-form-urlencoded' }).addErrback(self.errorCB) print "load oar did not complete within time limit for region %s" % self.region self.processLog = False reactor.callLater(10, self.pollForWork) if not "[ARCHIVER]" in line: return if "Successfully" in line: data = {"Success": False, "Done": True, "Message": "Unknown error"} treq.post(self.currentJob['report'].encode('ascii'), urlencode(data), headers={ 'Content-Type': 'application/x-www-form-urlencoded' }).addErrback(self.errorCB) print "load oar completed successfully for region %s" % self.region self.processLog = False reactor.callLater(10, self.pollForWork)
def http_post(self, url, data, callback): connection_pool = self.config["pool"] if "pool" in self.config else None treq.post(url, data, timeout=HTTP_TIMEOUT, pool=connection_pool).addCallbacks( lambda response, url=url, callback=callback: self.http_response(response, url, callback), errback=lambda error, url=url: anode.Log(logging.ERROR).log("Plugin", "error", lambda: "[{}] error processing HTTP GET [{}] with [{}]".format( self.name, url, error.getErrorMessage())))
def login(self, username=None): header = {'Content-Type': 'application/json'} data = {'public_key': self.public_key, 'username': username} resp = yield treq.post(url=self.url + 'account/v1/login/', headers=header, json=data, persistent=False) confirm_info = yield treq.json_content(resp) logger.debug("login response: %s", confirm_info) self.nonce = confirm_info['message'] logger.debug('nonce: %s', self.nonce) signature = ECCipher.create_signature(self.account.private_key, self.nonce) header_confirm = {'Content-Type': 'application/json'} data_confirm = { 'public_key': self.public_key, 'code': Encoder.bytes_to_hex(signature) } resp = yield treq.post(self.url + 'account/v1/confirm/', headers=header_confirm, json=data_confirm, persistent=False) confirm_info = yield treq.json_content(resp) self.token = confirm_info['message'] return confirm_info['status']
def make_request(): if ssl: response = yield treq.post(url, agent=self.agent, **kwargs) else: response = yield treq.post(url, **kwargs) try: content = yield treq.content(response) except: log.msg("Error while reading body in HTTP response", level=logger.WARN) self.stats_client.incr('http.requests.errors.read_body_error') err = RuntimeError( "Error while reading body in HTTP response (Response code: %d)." % response.code) err.status_code = response.code raise err else: if response.code == 200: defer.returnValue(content) else: log.msg("Error: Bad status code in HTTP response", http_response_code=response.code, http_content=content, level=logger.WARN) self.stats_client.incr( 'http.requests.errors.bad_status_code.%d' % response.code) err = RuntimeError( 'Error: Bad status code in HTTP response: %d' % response.code) err.status_code = response.code raise err
def _oneShotPostActions(self, deviceGraph, inferred): """ Inferred graph may contain some one-shot statements. We'll send statement objects to anyone on web sockets, and also generate POST requests as described in the graph. one-shot statement ?s ?p ?o with this in the graph: ?osp a :OneShotPost ?osp :subject ?s ?osp :predicate ?p this will cause a post to ?o """ # nothing in this actually makes them one-shot yet. they'll # just fire as often as we get in here, which is not desirable log.debug("_oneShotPostActions") def err(e): log.warn("post %s failed", postTarget) for osp in deviceGraph.subjects(RDF.type, ROOM['OneShotPost']): s = deviceGraph.value(osp, ROOM['subject']) p = deviceGraph.value(osp, ROOM['predicate']) if s is None or p is None: continue # log.info("checking for %s %s", s, p) for postTarget in inferred.objects(s, p): log.debug("post target %r", postTarget) # this packet ought to have 'oneShot' in it somewhere self.sendToLiveClients({"s": s, "p": p, "o": postTarget}) log.debug(" POST %s", postTarget) if not self.mockOutput: treq.post(postTarget, timeout=2).addErrback(err)
def setup_nodes(request, node_id): """The 'setup_nodes' endpoint is called by the controller node and sets up the attributes for all the main nodes """ servicemap = node.servicemap print "Servicemap is: " pprint(servicemap) if node_id == 'all' or node_id in node.inv_table.keys(): for route in servicemap: node_id = route['id'] addresses = node.inv_table[node_id] for addr in addresses: # Do the initial setup url = 'http://' + addr + '/setup' node_routes_json = json.dumps(route['next_hops']) print "NODE ROUTES JSON: ", str(node_routes_json) d = treq.post(url, data=node_routes_json) d.addCallback(node.ack_response) # Set attributes attributes = route['attr'] for stat_key, stat_val in attributes.iteritems(): attr_url = 'http://' + addr + '/' + str(stat_key) + '/' + str(stat_val) d = treq.post(attr_url) d.addCallback(node.ack_response) # TODO: Logic is a bit broken here because if we're only setting up one node_id, we still iterate through the entire list return "OK" else: print "Skipping non-matched URL."
def post_to_slack(cls, exc): """ Post an exception to slack """ url = os.environ.get('ARSCCA_TWISTED_SLACK_HOOK') if url: print(f'ERROR: {exc}') print(f'Posting to Slack: {url}') else: print(f'ERROR: {exc}') print('NOT POSTING TO SLACK because no URL provided') return text = exc.__repr__() # Asterisks for <b></b> # Double line feed for newline if 'ResponseNeverReceived' in text: text = '*TimeoutError*' elif 'ConnectionRefusedError' in text: text = '*ConnectionRefusedError*' payload = { 'text': text, 'username': '******', 'icon_emoji': ':ghost:' } # Using treq instead of requests # Note in this case it has the same signature treq.post(url, json=payload, timeout=5)
def processSaveIar(self, line): if (time.time() - self.startTime) > 60 * 60: #timeout, report and schedule next job data = { "Success": False, "Done": True, "Message": "Timeout. Iar save took too long" } treq.post(self.currentJob['report'].encode('ascii'), urlencode(data), headers={ 'Content-Type': 'application/x-www-form-urlencoded' }).addErrback(self.errorCB) print "save iar did not complete within time limit on region %s" % self.region self.processLog = False reactor.callLater(10, self.pollForWork) if not "[INVENTORY ARCHIVER]" in line: return if "Saved archive" in line: iarName = "%s.iar" % (self.currentJob['user'].replace(" ", "")) iar = os.path.join(self.currentJob["location"], iarName) d = treq.post( self.currentJob['upload'].encode('ascii'), files={'file': (self.currentJob['user'], open(iar, 'rb'))}) def removeWhenDone(result): os.remove(iar) d.addCallback(removeWhenDone) print "save iar completed successfully for region %s" % self.region self.processLog = False reactor.callLater(10, self.pollForWork)
def suggestPrefixes(self, ctx, prefixes): """ when writing files for this ctx, try to use these n3 prefixes. async, not guaranteed to finish before any particular file flush """ treq.post(self.rdfdbRoot + 'prefixes', json.dumps({ 'ctx': ctx, 'prefixes': prefixes }).encode('utf8'))
def reportLogs(self): def errorCB(reason): print "Error sending %s: %s" % (self.region, reason.getErrorMessage()) if len(self.messages) > 0: data = {'log': json.dumps(self.messages)} treq.post(self.url.encode('ascii'), urlencode(data), headers={ 'Content-Type': 'application/x-www-form-urlencoded' }).addErrback(errorCB) self.messages = []
def start_responding(self, server_name, challenge, response): from twisted.internet import reactor validation = _validation(response) full_name = challenge.validation_domain_name(server_name) subdomain = _split_zone(full_name, self._zone_name) if subdomain == '': subdomain = '@' url = ('https://dns.api.gandi.net/api/v5/domains/' '{zone}/records').format( zone=self._zone_name, subdomain=subdomain, type='TXT', ) body = { "rrset_name": subdomain, "rrset_type": "TXT", "rrset_ttl": 300, "rrset_values": [validation] } print(body) response = yield treq.post(url, json=body, headers=self._headers()) print((yield treq.json_content(response))) yield deferLater(reactor, self._settle_delay, lambda: None) print("start settled")
def start(self): host = "http://%(host)s"%(self.args) res = "/nexus/service/local/artifact/maven/content" artifact = self.args["artifact"] fileName = self.args["file"] fileName = os.path.join(self.builder.basedir,fileName) g,a,v,e = artifact.split(":") params = { 'r': self.args["repo"], 'g': g, 'a': a, 'v': v, 'e': e, 'p': e} print "posting",host+res, params, fileName,self.args["cred"] res = yield treq.post( host+res, data = params, files = {'file':open(fileName,"rb")}, auth=self.args["cred"]) print "res",res output = yield treq.content(res) print "info", res.code, output self.sendStatus({'info':(res.code,output)}) self.sendStatus({'rc': 0 if res.code == 201 else 1})
def get_access_token(self, code): """ Gets the access token for the app given the code Parameters: - code - the response code """ payload = {'redirect_uri': self.redirect_uri, 'code': code, 'grant_type': 'authorization_code'} if self.scope: payload['scope'] = self.scope if self.state: payload['state'] = self.state auth_header = base64.b64encode(self.client_id + ':' + self.client_secret) headers = {'Authorization': ['Basic %s' % auth_header]} d = treq.post(self.OAUTH_TOKEN_URL, data=payload, headers=Headers(headers)) response = yield d if response.status_code is not 200: raise SpotifyOauthError(response.reason) token_info = response.json() token_info = self._add_custom_values_to_token_info(token_info) self._save_token_info(token_info) defer.returnValue(token_info)
def remaining(self): """ Get the remaining time-to-live of this lease. :returns: TTL in seconds. :rtype: int """ if self._expired: raise Expired() obj = { u'ID': self.lease_id, } data = json.dumps(obj).encode('utf8') url = u'{}/v3alpha/kv/lease/timetolive'.format(self._client._url).encode() response = yield treq.post(url, data, headers=self._client._REQ_HEADERS) obj = yield treq.json_content(response) ttl = obj.get(u'TTL', None) if not ttl: self._expired = True raise Expired() # grantedTTL = int(obj[u'grantedTTL']) # header = Header._parse(obj[u'header']) if u'header' in obj else None returnValue(ttl)
def set(self, key, value, lease=None, prev_kv=None): """ Put puts the given key into the key-value store. A put request increments the revision of the key-value store and generates one event in the event history. URL: /v3alpha/kv/put :param key: key is the key, in bytes, to put into the key-value store. :type key: bytes :param lease: lease is the lease ID to associate with the key in the key-value store. A lease\nvalue of 0 indicates no lease. :type lease: int :param prev_kv: If prev_kv is set, etcd gets the previous key-value pair before changing it.\nThe previous key-value pair will be returned in the put response. :type prev_kv: bool :param value: value is the value, in bytes, to associate with the key in the key-value store. :key value: bytes """ url = u'{}/v3alpha/kv/put'.format(self._url).encode() obj = { u'key': binascii.b2a_base64(key).decode(), u'value': binascii.b2a_base64(value).decode() } data = json.dumps(obj).encode('utf8') response = yield treq.post(url, data, headers=self.REQ_HEADERS) obj = yield treq.json_content(response) revision = obj[u'header'][u'revision'] returnValue(revision)
def item_scraped(self, item, response, spider): """ Send the scraped item to the server """ if type(item).__name__ not in [ 'ListingItem', ]: raise DropItem("Invalid item type - {}".format( type(item).__name__)) _logger = self.logger @inlineCallbacks def _cb(resp): text = yield resp.text(encoding='UTF-8') if resp.code >= 400: _logger.error( "{}: HTTP Error: failed to create/update item - {}".format( resp.code, text)) d = treq.post('http://{}:{}/api/resource/raw_data/'.format( config['PriceWatchWeb']['host'], config['PriceWatchWeb']['port']), json.dumps(self._serialize(item)).encode('ascii'), headers={b'Content-Type': [b'application/json']}) d.addCallback(_cb) # The next item will be scraped only after # deferred (d) is fired return d
def keys(self): """ Retrieves keys associated with the lease. :returns: The keys. :rtype: list of bytes """ if self._expired: raise Expired() obj = { u'ID': self.lease_id, u'keys': True } data = json.dumps(obj).encode('utf8') url = u'{}/v3alpha/kv/lease/timetolive'.format(self._client._url).encode() response = yield treq.post(url, data, headers=self._client._REQ_HEADERS) obj = yield treq.json_content(response) ttl = obj.get(u'TTL', None) if not ttl: self._expired = True raise Expired() # grantedTTL = int(obj[u'grantedTTL']) # header = Header._parse(obj[u'header']) if u'header' in obj else None keys = [binascii.a2b_base64(key) for key in obj.get(u'keys', [])] returnValue(keys)
def attach(self, host, container, **kwargs): def c(v): return 1 if kwargs.get(v) else 0 params = { 'logs': c('logs'), 'stream': c('stream'), 'stdin': 0, 'stdout': c('stdout'), 'stderr': c('stderr') } result = Deferred() def on_content(line): if line: self.log.debug("{}: {}".format(host, line.strip())) url = self._make_url( host.url, 'containers/{}/attach'.format(container.id)) d = treq.post( url=url, params=params, pool=self.pool) d.addCallback(_Reader.listen, kwargs.get('stop_line')) def on_error(failure): pass d.addErrback(on_error) return result
def _send_request(self): if not self.queue: if self.request_loop.running: self.request_loop.stop() return now = time.time() - 1 # 1 second buffer if (self.rate_remaining < 1+1 and self.rate_reset > now or DiscordRestApiLoop.global_wait > now): self.log.warn("Rate limited: {}".format(self.channel_id)) return payload = self.queue.pop() method = payload['method'] url = payload['url'] content = payload['content'] # url = '{}/channels/{}/messages'.format(HOST, self.channel_id) content = json.dumps({"content": content}) self.log.debug('at _send_request: {} url {}'.format(self.channel_id, url)) if method == 'post': d = treq.post(url, content, headers=HEADERS) elif method == 'patch': d = treq.patch(url, content, headers=HEADERS) elif method == 'delete': d = treq.delete(url, headers=HEADERS) elif method == 'get': d = treq.get(url, headers=HEADERS) d.addCallback(self.update_rate_limits) if not self.queue: self.request_loop.stop()
def insertMultipleLogEntries(self, entries): d = treq.post(self.path + "v1/insertmultipleentries", params={"entries": utils.arbJSONDump(entries)}) d.addCallback(treq.json_content) d.addCallback(self._verify) return d
def _make_request(self, method, payload): """ Actually make the HTTP request. :rtype : twisted.internet.defer.Deferred """ # user.getRecentTracks works # user.getrecenttracks works # User.getRecentTracks doesn't work, but doesn't error sensibly # So f**k that, send it lower() to make sure method = method.lower() final_payload = { "api_key": self._api_key, "format": "json", "method": method } final_payload.update(payload) # Convert unicode strings to utf8-encoded bytestrings (treq doesn't # seem to be able to encode unicode strings properly) for k, v in final_payload.iteritems(): if isinstance(v, unicode): final_payload[k] = v.encode("utf8") deferred = treq.post(self.API_URL, final_payload, headers={"User-Agent": "Ultros-contrib/LastFM"}) deferred.addCallback(self._handle_response) return deferred
def insertSingleLogEntry(self, entry): d = treq.post(self.path + "v1/insertentry", params={"entry": utils.arbJSONDump(entry)}) d.addCallback(treq.json_content) d.addCallback(self._verify) return d
def verify(self, remote_ip, challenge, response): """ :param remote_ip: :param challenge: :param response: :returns: Deferred :raises: Exception """ def handle_response(response): def parse_content(content): if response.code != 200: log.msg('Received code: %d from Google for recaptcha' % response.code) raise Exception("Recaptacha returned code: %d" % response.code) c = content.splitlines().append('') #being cheeky here if len(c) < 2: log.err("Received unexpected response from recaptcha: %s" % content) raise Exception("Received unexpected response from recaptcha: %s" % content) return [c[0] == "true", c[1]] #eheh c[1] always exist return response.content().addCallback(parse_content) d = treq.post(self.url, data={ 'privatekey': self.private_key, 'remoteip': remote_ip, 'challenge': challenge, 'response': response}, timeout=5) d.addCallback(handle_response) return d
def add_to_load_balancer(endpoint, auth_token, lb_config, ip_address): """ Add an IP addressed to a load balancer based on the lb_config. TODO: Handle load balancer node metadata. :param str endpoint: Load balancer endpoint URI. :param str auth_token: Keystone Auth Token. :param str lb_config: An lb_config dictionary. :param str ip_address: The IP Address of the node to add to the load balancer. :return: Deferred that fires with the Add Node to load balancer response as a dict. """ lb_id = lb_config['loadBalancerId'] port = lb_config['port'] path = append_segments(endpoint, 'loadbalancers', str(lb_id), 'nodes') d = treq.post(path, headers=headers(auth_token), data=json.dumps({"nodes": [{"address": ip_address, "port": port, "condition": "ENABLED", "type": "PRIMARY"}]})) d.addCallback(check_success, [200, 202]) d.addErrback(wrap_request_error, endpoint, 'add') return d.addCallback(treq.json_content)
def call(self, url, params=None): """Override Mandrill's call method to return a deferred.""" params = params or {} params['key'] = self.apikey self.log('POST to {}{}.json: {}'.format(ROOT, url, params)) start = time.time() full_url = '{}{}.json'.format(ROOT, url) response = yield treq.post( full_url, data=json.dumps(params), headers=HEADERS) result = yield response.json() complete_time = time.time() - start self.log('Received {} in {}ms: {}'.format( response.code, complete_time * 1000, result)) self.last_request = dict( url=url, request_body=params, response_body=result, response=response, time=complete_time) if response.code >= 400: raise self.cast_error(result) returnValue(result)
def post(self, call, data={}): url = self.endpoint + call headers = self.get_auth(call, data) response = yield treq.post(url) content = yield response.content() result = json.loads(content) returnValue(result)
def tts(input_xml, affect='surprise', ptype='disagree'): mary_host = "172.17.0.1" mary_port = "59125" maryxml = f'''<?xml version="1.0" encoding="UTF-8" ?> <maryxml version="0.4" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns=" http://mary.dfki.de/2002/MaryXML" xml:lang="en-US"> <p> {input_xml} </p> </maryxml>''' query_hash = { "INPUT_TEXT": maryxml, "INPUT_TYPE": "RAWMARYXML", "LOCALE": "en_US", "VOICE": "cmu-slt-hsmm", "OUTPUT_TYPE": "AUDIO", "AUDIO": "WAVE", } query = urlencode(query_hash).replace('+', '%20') resp = yield treq.post("http://%s:%s/process?" % (mary_host, mary_port) + query) wav = yield treq.content(resp) which = '%.1f' % time.time() recentFiles[which] = wav url = f'http://10.2.0.1:9010/data/{which}' print('save', len(wav), 'bytes at', url) return url.encode('ascii')
def watch(self, group_id, config_key, index=None): def handle_response(response): response.addCallback(key_changed) def key_changed(result): if group_id in self.groups: key = result['key'].lstrip(config_key) if key != 'lb' and key not in self.blocked_instances: if result.get('newKey'): # add endpoint endpoint = json.loads(result['value']) self.add_endpoint(group_id, endpoint) elif result['action'] == 'DELETE': # remove endpoint endpoint = json.loads(result['prevValue']) self.remove_endpoint(group_id, endpoint) self.watch(group_id, config_key, result['index']) url = 'http://127.0.0.1:4001/v1/watch%s' % config_key if index: deferred = post(url, data={'index': index}) else: deferred = get(url) deferred.addCallback(handle_response) return True
def _post_api_twisted(path, payload_str, access_token=None): def post_data_cb(data, resp): resp._content = data _parse_response(path, SETTINGS['access_token'], payload_str, resp) def post_cb(resp): r = requests.Response() r.status_code = resp.code r.headers.update(resp.headers.getAllRawHeaders()) return treq.content(resp).addCallback(post_data_cb, r) headers = {'Content-Type': ['application/json; charset=utf-8']} if access_token is not None: headers['X-Rollbar-Access-Token'] = [access_token] url = urljoin(SETTINGS['endpoint'], path) try: encoded_payload = payload_str.encode('utf8') except (UnicodeDecodeError, UnicodeEncodeError): encoded_payload = payload_str d = treq.post(url, encoded_payload, headers=headers, timeout=SETTINGS.get('timeout', DEFAULT_TIMEOUT)) d.addCallback(post_cb)
def execute(self, rcs, success_codes=None): """Executes the scaling policy. :param TestResources rcs: The integration test resources instance. This provides useful information to complete the request, like which endpoint to use to make the API request. :param iterable success_codes: An iterable of HTTP status codes to expect in the success case. Defaults to 202. :return: A :class:`Deferred` which, when triggered, removes the scaling policy. It returns the test resources supplied, easing continuity of integration test code. """ return ( treq.post( "%sexecute" % self.link, headers=headers(str(rcs.token)), pool=self.scaling_group.pool, ).addCallback(check_success, [202] if success_codes is None else success_codes) # Policy execution does not return anything meaningful, # per http://tinyurl.com/ndds6ap (link to docs.rackspace). # So, we forcefully return our resources here. .addCallback(lambda _, x: x, rcs) )
def revoke(self): """ Revokes a lease. All keys attached to the lease will expire and be deleted. :returns: Response header. :rtype: instance of :class:`txaioetcd.Header` """ if self._expired: raise Expired() obj = { # ID is the lease ID to revoke. When the ID is revoked, all # associated keys will be deleted. u'ID': self.lease_id, } data = json.dumps(obj).encode('utf8') url = u'{}/v3alpha/kv/lease/revoke'.format(self._client._url).encode() response = yield treq.post(url, data, headers=self._client._REQ_HEADERS) obj = yield treq.json_content(response) header = Header._parse(obj[u'header']) if u'header' in obj else None self._expired = True returnValue(header)
def callRemote(self, method, *args): scheme = "https://" if self.secure else "http://" netloc = self.host if self.port: netloc += ":%s" % self.port if self.user: auth = self.user if self.password: auth += ":%s" % self.password netloc = "%s@%s" % (auth, netloc) url = scheme + netloc + self.path data = self.payloadTemplate % ( method, xmlrpclib.dumps(args, allow_none=self.allowNone), ) headers = {"User-Agent": "Twisted/XMLRPC", "Content-Type": "text/xml"} response = yield treq.post( url, data, headers=headers, timeout=self.connectTimeout, reactor=self._reactor, ) body = yield treq.content(response) parsed = xmlrpclib.loads(body, use_datetime=self.useDateTime)[0][0] returnValue(parsed)
def delete(self, key, range_end=None, prev_kv=False): """ Delete value(s) from etcd. URL: /v3alpha/kv/deleterange :param key: key is the first key to delete in the range. :type key: bytes :param range_end: range_end is the key following the last key to delete for the range [key, range_end).\nIf range_end is not given, the range is defined to contain only the key argument.\nIf range_end is one bit larger than the given key, then the range is all keys with the prefix (the given key).\nIf range_end is '\\0', the range is all keys greater than or equal to the key argument. :key range_end: bytes :param prev_kv: If prev_kv is set, etcd gets the previous key-value pairs before deleting it.\nThe previous key-value pairs will be returned in the delete response. :key prev_kv: bool """ url = u'{}/v3alpha/kv/deleterange'.format(self._url).encode() obj = { u'key': binascii.b2a_base64(key).decode(), u'range_end': binascii.b2a_base64(range_end).decode() if range_end else None, u'prev_kv': prev_kv } data = json.dumps(obj).encode('utf8') response = yield treq.post(url, data, headers=self.REQ_HEADERS) obj = yield treq.json_content(response) res = Deleted.parse(obj) returnValue(res)
def send_message(self, receiver, subject, text): self.log.info('sending mail via mailgun: receiver={receiver}, subject="{subject}", textlen={textlen}', receiver=receiver, subject=subject, textlen=len(text)) data = {"from": self._mailgun_from, "to": [receiver], "subject": subject, "text": text} res = None self.log.debug('Mailgun URL={url}', url=self._submit_url) try: if self._access_key and self._submit_url: res = yield treq.post(self._submit_url, auth=("api", self._access_key), data=data) else: self.log.warn('Mailgun not configured! This is the mail that would have been sent: {mail}', mail=json.dumps(data)) res = None except Exception as e: print('Exception:', e) print('Result:', res) self.log.failure() raise returnValue(res)
def handle_locationstate(self, net, bsc, bts, trx, data): """ Handle location-state TRAP: parse trap content, build SOAP context and use treq's routines to post it while setting up async handlers """ (ts, fx, lat, lon, height, opr, adm, pol, mcc, mnc) = data.split(',') tstamp = datetime.datetime.fromtimestamp(float(ts)).isoformat() self.factory.log.debug( 'location-state@%s.%s.%s.%s (%s) [%s/%s] => %s' % (net, bsc, bts, trx, tstamp, mcc, mnc, data)) ctx = self.factory.client.registerSiteLocation(bsc, float(lon), float(lat), fix.get(fx, 0), tstamp, oper.get(opr, 2), admin.get(adm, 2), policy.get(pol, 3)) d = post(self.factory.location, ctx.envelope) d.addCallback( collect, partial(handle_reply, ctx.process_reply, self.transport.write, self.factory.log) ) # treq's collect helper is handy to get all reply content at once using closure on ctx d.addErrback(lambda e, bsc: self.factory.log.critical( "HTTP POST error %s while trying to register BSC %s" % (e, bsc)), bsc) # handle HTTP errors # Ensure that we run only limited number of requests in parallel: yield self.factory.semaphore.acquire() yield d # we end up here only if semaphore is available which means it's ok to fire the request without exceeding the limit self.factory.semaphore.release()
def process_item(self, item, spider): # If no processing is made, translation will # be N/A item['translation'] = "N/A" if self.blocking_delay > 0.001: # This is a bad bad thing time.sleep(self.blocking_delay) if self.async_delay > 0.001: # Emulate an asynchronous call to a translation function delay = self.async_delay translate = lambda: "calculated-%s" % item['info'] translation = yield deferLater(reactor, delay, translate) item['translation'] = translation if self.downloader_api: # Do an API call using Scrapy's downloader formdata = dict(text=item['info']) request = FormRequest(self.base + "api", formdata=formdata) response = yield self.crawler.engine.download(request, spider) item['translation'] = json.loads(response.body)['translation'] if self.treq_api: # Do an API call using treq response = yield post(self.base + "api", {"text": item['info']}) json_response = yield response.json() item['translation'] = json_response['translation'] defer.returnValue(item)
def main(self, reactor, options): """ See :py:meth:`ICommandLineScript.main` for parameter documentation. :return: A ``Deferred`` which fires when the deployment is complete or has encountered an error. """ body = dumps({"applications": options["application_config"], "deployment": options["deployment_config"]}) posted = post(options["url"], data=body, headers={b"content-type": b"application/json"}, persistent=False) def fail(msg): raise SystemExit(msg) def got_response(response): if response.code != OK: d = json_content(response) def got_error(error): if isinstance(error, dict): error = error[u"description"] + u"\n" else: error = u"Unknown error: " + unicode(error) + "\n" fail(error) d.addCallback(got_error) return d else: sys.stdout.write(_OK_MESSAGE) posted.addCallback(got_response) return posted
def send_notification(self, processor, url, data, ttl, claims=None): """Send out a notification to a url for a processor""" url = url.encode("utf-8") headers = {"TTL": str(ttl)} crypto_key = self._crypto_key claims = claims or self._claims if self._vapid and claims: headers.update(self._vapid.sign(claims)) crypto_key = "{};p256ecdsa={}".format( crypto_key, self._vapid.public_key_urlsafe_base64 ) if data: headers.update({ "Content-Type": "application/octet-stream", "Content-Encoding": "aesgcm", "Crypto-key": crypto_key, "Encryption": self._encryption, }) d = treq.post(url, data=data, headers=headers, allow_redirects=False, agent=self._agent) d.addCallback(self._sent_notification, processor) d.addErrback(self._error_notif, processor)
def lookupClan(name): response = yield treq.post("http://www.bungie.net/Platform/Group/Search/", data=json.dumps({ "contents": { "searchValue": name }, "currentPage": 1, "itemsPerPage": 1 })) data = yield treq.json_content(response) if data["Response"]["results"]: clan = data["Response"]["results"][0]["detail"] if clan["memberCount"] > 1000: returnValue({ "id": 0, "name": '"{}" is too big'.format(clan["name"]), "motto": "Clans with over 1,000 members can't be processed", }) else: returnValue({ "id": int(clan["groupId"]), "name": clan["name"], "motto": HTMLParser.HTMLParser().unescape(clan["about"]), }) else: returnValue({ "id": 0, "name": "No Clan Found", "motto": "Better luck next time" })
def upload_file(self, name, content_type, file_data): """ :param name: filename :type name: str :param content_type: :type content_type: str :param file_data: :type file_data: :returns: Deferred :raises: Exception """ def handle_response(response): def parse_content(content): if response.code != 201: log.msg("Received code: %s from zendesk for file upload: msg %s" % (response.code, content)) raise ZENDESK_ERROR else: log.msg("Received 201 Created from Zendesk. Content: %s" % content) token_data = json.loads(content) token = token_data['upload']['token'] return token return response.content().addCallback(parse_content) # replace spaces in name with _ name = string.replace(name, ' ', '_') d = treq.post("https://%s.zendesk.com/api/v2/uploads.json?filename=%s" % (self.domain, name), data=file_data, headers={"Content-Type": content_type}, auth=("%s/token" % self.api_username, self.api_token)) d.addCallback(handle_response) return d
def upload_file_info(self, hashcode, path, size, product_id, remote_type, remote_uri, name, encrypted_key): # fixme: another argument aes_key should be passed and encrypted logger.debug("upload file info to market") header = { "MARKET-KEY": self.public_key, "MARKET-TOKEN": self.token, 'Content-Type': 'application/json' } data = { "public_key": self.public_key, "hashcode": hashcode, "path": path, "size": size, "client_id": product_id, "remote_type": remote_type, "remote_uri": remote_uri, "is_published": "False", "aes_key": encrypted_key, "market_hash": "hash", "name": name } url = self.url + 'user_data/v1/uploaded_file/add/' logger.debug('upload file info payload: %s', data) logger.debug('upload file info url: %s', url) resp = yield treq.post(url, headers=header, json=data, persistent=False) confirm_info = yield treq.json_content(resp) logger.debug('upload file info to market: %s', confirm_info) return confirm_info['status']
def login(self, email, password, accountType = ACCOUNT_TYPE_HOSTED_OR_GOOGLE): params = {"Email": email, "Passwd": password, "service": self.SERVICE, "accountType": accountType, "has_permission": "1", "source": "android", "androidId": self.androidId, "app": "com.android.vending", "sdk_version": "16" } resp = yield treq.post(self.URL_LOGIN, params) if resp.code == http.OK: data = yield treq.content(resp) data = data.split() params = {} for d in data: k, v = d.split("=") params[k.strip()] = v.strip() if "Auth" in params: self.setAuthSubToken(params["Auth"]) self.loggedIn = True else: raise LoginError("Auth token not found.") else: if resp.code == http.FORBIDDEN: data = yield treq.content(resp) params = {} for d in data.split('\n'): d = d.strip() if d: k, v = d.split("=", 1) params[k.strip()] = v.strip() if "error" in params: raise LoginError(params["error"]) else: raise LoginError("Login failed.") else: data = yield treq.content(resp) raise LoginError("Login failed: error %d <%s>" % (resp.code, data.rstrip(),))
def publish_product(self, selected_id, title, description, price, tags, start_date, end_date, file_md5): header = {'Content-Type': 'application/json'} header['MARKET-KEY'] = self.pub_key header['MARKET-TOKEN'] = self.token data = { 'owner_address': self.pub_key, 'title': title, 'description': description, 'price': price, 'tags': tags, 'start_date': start_date, 'end_date': end_date, 'file_md5': file_md5 } signature_source = str(self.pub_key) + str(title) + str( description) + str(price) + MarketClient.str_to_timestamp( start_date) + MarketClient.str_to_timestamp(end_date) + str( file_md5) signature = crypto.ECCipher.geth_sign(self.priv_key, signature_source) data['signature'] = signature resp = yield treq.post(self.url + 'product/publish/', headers=header, json=data) confirm_info = yield treq.json_content(resp) print(confirm_info) print('publish succeed') self.message_hash = confirm_info['data']['market_hash'] publish_file_update(self.message_hash, selected_id) print(self.message_hash) return confirm_info['status']
def main(self, reactor, options): """ See :py:meth:`ICommandLineScript.main` for parameter documentation. :return: A ``Deferred`` which fires when the deployment is complete or has encountered an error. """ if options["nossh"]: ready = succeed(None) else: ready = self._configure_ssh( options["deployment_config"]["nodes"].keys()) body = dumps({"applications": options["application_config"], "deployment": options["deployment_config"]}) ready.addCallback( lambda _: post(options["url"], data=body, headers={b"content-type": b"application/json"}, persistent=False)) def fail(msg): raise SystemExit(msg) def got_response(response): if response.code != OK: d = json_content(response) d.addCallback( lambda error: fail(error[u"description"] + u"\n")) return d else: sys.stdout.write(_OK_MESSAGE) ready.addCallback(got_response) return ready
def start_responding(self, server_name, challenge, response): validation = _validation(response) full_name = challenge.validation_domain_name(server_name) # subdomain = _split_zone(full_name, self._zone_name) zones_list_url = str(base.child("zones").set("name", self._zone_name)) response = yield treq.get(zones_list_url, headers=self._headers()) data = yield response.json() assert len(data['result']) == 1 zone_id = data['result'][0]['id'] records_base = base.child("zones").child(zone_id).child("dns_records") records_query_url = str( records_base.set("type", "TXT").set("name", full_name)) response = yield treq.get(records_query_url, headers=self._headers()) data = yield response.json() records = data['result'] dns_record = { "type": "TXT", "ttl": 120, "name": full_name, "content": validation } if records: put_to = str(records_base.child(records[0]["id"])) response = yield treq.put(put_to, json=dns_record, headers=self._headers()) else: post_to = str(records_base) response = yield treq.post(post_to, json=dns_record, headers=self._headers()) yield response.json() yield deferLater(self._reactor, self._settle_delay, lambda: None)
def submit(server_url, result): """ Post the given result to the given server. :param str server_url: The server's URL. :param dict result: The JSON-compatible result. :return: Deferred that fires with an ID of the submitted result on success or with SubmitFailure if the result is rejected by the server. This function may also return any of the ``treq`` failures. """ req = post( server_url + "/v1/benchmark-results", json.dumps(result), headers=({'Content-Type': ['application/json']}), ) def get_response_content(response): d = json_content(response) d.addCallback(lambda content: (response, content)) return d req.addCallback(get_response_content) def process_response(response_and_content): (response, content) = response_and_content if response.code != CREATED: raise SubmitFailure(response.code, response.phrase, content['message']) else: return content['id'] req.addCallback(process_response) return req
def impersonate_user(auth_endpoint, identity_admin_token, username, expire_in=10800): """ Acquire an auth-token for a user via impersonation. :param str auth_endpoint: Identity API endpoint URL. :param str identity_admin_token: Auth token that has the appropriate permissions to impersonate other users. :param str username: Username to impersonate. :param str expire_in: Number of seconds for which the token will be valid. :return: Decoded JSON as dict. """ d = treq.post( append_segments(auth_endpoint, 'RAX-AUTH', 'impersonation-tokens'), json.dumps({ "RAX-AUTH:impersonation": { "user": {"username": username}, "expire-in-seconds": expire_in } }), headers=headers(identity_admin_token)) d.addCallback(check_success, [200, 203]) d.addErrback(wrap_request_error, auth_endpoint, data='impersonation') d.addCallback(treq.json_content) return d
def authenticate_user(auth_endpoint, username, password): """ Authenticate to a Identity auth endpoint with a username and password. :param str auth_endpoint: Identity API endpoint URL. :param str username: Username to authenticate as. :param str password: Password for the specified user. :return: Decoded JSON response as dict. """ d = treq.post( append_segments(auth_endpoint, 'tokens'), json.dumps( { "auth": { "passwordCredentials": { "username": username, "password": password } } }), headers=headers()) d.addCallback(check_success, [200, 203]) d.addErrback(wrap_request_error, auth_endpoint, data=('authenticating', username)) d.addCallback(treq.json_content) return d
def _flush_urls(self, spider): """ Flushes the URLs. """ if not self._urls: return target = self._targets[(self._batch - 1) % len(self._targets)] logger.info("Posting batch %d with %d URLs to %s", self._batch, len(self._urls), target) data = [ ("project", self._project), ("spider", spider.name), ("setting", "FEED_URI=%s" % self._feed_uri), ("batch", str(self._batch)), ] debug_data = "target (%d): %s" % (len(self._urls), data) json_urls = json.dumps(self._urls) data.append(("setting", "DISTRIBUTED_START_URLS=%s" % json_urls)) d = treq.post("http://%s/schedule.json" % target, data=data, timeout=5, persistent=False) d.addBoth(lambda resp: (debug_data, resp)) self._scrapyd_submits_to_wait.append(d) self._urls = [] self._batch += 1
def rtm_start(self, bot_token, **kwargs): params = {'token': bot_token} params.update(kwargs) d = treq.post('https://slack.com/api/rtm.start', params=params) d.addCallback(lambda response: response.json()) d.addCallback(self.connect_ws) return d
def trigger_convergence(authenticator, region, group): token, catalog = yield authenticator.authenticate_tenant(group["tenantId"]) endpoint = public_endpoint_url(catalog, "autoscale", region) resp = yield treq.post( append_segments(endpoint, "groups", group["groupId"], "converge"), headers=headers(token), data="") if resp.code != 204: raise ValueError("bad code", resp.code)
def create_room(host, room_no, users, alias_prefix): create_url = "%s/_matrix/client/api/v1/createRoom" % (str(host),) access_token = "token_%d" % (users[0],) r = yield treq.post( create_url, params={"access_token": access_token}, data=json.dumps({ "preset": "public_chat", "room_alias_name": "%s_%d" % (alias_prefix, room_no,), }), headers={'Content-type': ['application/json']}, ) js = yield r.json() assert r.code == 200, "Status code: %d. %r" % (r.code, js) room_id = js["room_id"] results[room_no] = { "room_id": str(room_id), "alias": str(js["room_alias"]), "users": users, } join_url = "%s/_matrix/client/api/v1/join/%s" % (str(host), str(room_id)) sys.stderr.write("Created %s (%s). Joing %d users.\n" % (room_no, room_id, len(users),)) sys.stderr.flush() for user in users[1:]: r = yield treq.post( join_url, params={"access_token": "token_%d" % (user,)}, headers={'Content-type': ['application/json']}, data={}, ) yield r.json() assert(r.code == 200) sys.stderr.write("Finished room no. %d\n" % (room_no,)) sys.stderr.flush()
def request(): deferred = treq.post('http://api.host/v2/loadtest/messages', auth=('api', 'api-key'), data={'from': 'Loadtest <*****@*****.**>', 'to': '*****@*****.**', 'subject': "test"}, pool=pool) deferred.addCallback(request_done) return deferred