def test_get_kernels(self): '''Server should respond with running kernel information.''' self.app.web_app.settings['kg_list_kernels'] = True response = yield self.http_client.fetch( self.get_url('/api/kernels') ) self.assertEqual(response.code, 200) kernels = json_decode(response.body) self.assertEqual(len(kernels), 0) # Launch a kernel response = yield self.http_client.fetch( self.get_url('/api/kernels'), method='POST', body='{}' ) self.assertEqual(response.code, 201) kernel = json_decode(response.body) # Check the list again response = yield self.http_client.fetch( self.get_url('/api/kernels') ) self.assertEqual(response.code, 200) kernels = json_decode(response.body) self.assertEqual(len(kernels), 1) self.assertEqual(kernels[0]['id'], kernel['id'])
def is_login_valid(self, form_data): print(form_data) http_client = httpclient.HTTPClient() login_url = "http://localhost:8007/api/v1/user/login" response_body = None code = 0 data = { 'payload': form_data } try: response = http_client.fetch(httpclient.HTTPRequest( url=login_url, method='POST', body=json_encode(data))) code = response.code response_body = json_decode(response.body) except httpclient.HTTPError as e: # HTTPError is raised for non-200 responses; the response # can be found in e.response. logger.debug("Error: %s" % str(e)) code = e.response.code response_body = json_decode(e.response.body) except Exception as e: # Other errors are possible, such as IOError. logger.error("Error: %s" % str(e)) http_client.close() return { 'status': code, 'response': response_body }
def test_post_valid(self): dontcare, uploads_dir = get_mountpoint('uploads')[0] foo_fp = os.path.join(uploads_dir, '1', 'foo.txt') bar_fp = os.path.join(uploads_dir, '1', 'bar.txt') with open(foo_fp, 'w') as fp: fp.write("@x\nATGC\n+\nHHHH\n") with open(bar_fp, 'w') as fp: fp.write("@x\nATGC\n+\nHHHH\n") prep = StringIO(EXP_PREP_TEMPLATE.format(1)) prep_table = load_template_to_dataframe(prep) response = self.post('/api/v1/study/1/preparation?data_type=16S', data=prep_table.T.to_dict(), headers=self.headers, asjson=True) prepid = json_decode(response.body)['id'] uri = '/api/v1/study/1/preparation/%d/artifact' % prepid # 1 -> fwd or rev sequences in fastq # 3 -> barcodes body = {'artifact_type': 'FASTQ', 'filepaths': [['foo.txt', 1], ['bar.txt', 'raw_barcodes']], 'artifact_name': 'a name is a name'} response = self.post(uri, data=body, headers=self.headers, asjson=True) self.assertEqual(response.code, 201) obs = json_decode(response.body)['id'] prep_instance = PrepTemplate(prepid) exp = prep_instance.artifact.id self.assertEqual(obs, exp)
async def get(self, id: Union[int, str], *, organization_id: Union[str, int], columns=None): url = '{base_url}/{id}?{query}'.format( base_url=self.module_url, id=id, query=urlencode({ 'organization_id': organization_id, **self.base_query})) try: logger.info('GET: {}'.format(url)) response = await self.http_client.fetch(url, method='GET') except HTTPClientError as http_error: http_code = http_error.code response = json_decode(http_error.response.body.decode("utf-8")) message = str(response['code']) + ': ' + response['message'] raise HTTPError(http_code, reason=message) else: response = json_decode(response.body.decode("utf-8")) results = [v for k, v in response.items() if k not in ['code', 'message']] # noqa if len(results) != 1: ValueError('More then one resource was returned.') return results[0]
def test_login_failed_empty(self): post_data = {'action': 'Login', 'email': '', 'password': '******'} body = urlencode(post_data) self.http_client.fetch(self.get_url('/login/email'), self.stop, method='POST', body=body, follow_redirects=False) response = self.wait() self.assertEqual(json_decode(response.body)['status'], 'failed') self.assertEqual(json_decode(response.body)['error'], 'Email and password are mandatory') post_data = {'action': 'Login', 'email': 'foo', 'password': ''} body = urlencode(post_data) self.http_client.fetch(self.get_url('/login/email'), self.stop, method='POST', body=body, follow_redirects=False) response = self.wait() self.assertEqual(json_decode(response.body)['status'], 'failed') self.assertEqual(json_decode(response.body)['error'], 'Email and password are mandatory')
def backtest2(code_full, backtest_id): hostPort = get_hostPort(SERVICES_DOCKER) kg_host = os.getenv('GATEWAY_HOST', ('127.0.0.1:%s' % hostPort)) client = AsyncHTTPClient() response = yield client.fetch( 'http://{}/api/kernels'.format(kg_host), method='POST', body='{}' ) kernel = json_decode(response.body) ws_url = 'ws://{}/api/kernels/{}/channels?session_id={}'.format( kg_host, url_escape(kernel['id']), uuid.uuid1() ) ws = yield websocket_connect(ws_url) # Send an execute request ws.write_message(json_encode({ 'header': { 'username': '', 'version': '5.0', 'session': '', 'msg_id': 'test-msg', 'msg_type': 'execute_request' }, 'parent_header': {}, 'channel': 'shell', 'content': { 'code': code_full, 'silent': False, 'store_history': False, 'user_expressions' : {} }, 'metadata': {}, 'buffers': {} })) # Look for stream output for the print in the execute while 1: msg = yield ws.read_message() msg = json_decode(msg) msg_type = msg['msg_type'] parent_msg_id = msg['parent_header']['msg_id'] if msg_type == 'stream' and parent_msg_id == 'test-msg': #app_log.info('Content:', msg['content']['text']) break if msg_type == 'error' and parent_msg_id == 'test-msg': #app_log.info(msg) save_error_log(msg, 'backtestservices', backtest_id) #app_log.info('Content:', msg['content']['evalue']) break ws.close()
def post(self): data = json_decode(self.request.body) logger.debug('Sonarr download: %s', data) event_type = data['EventType'] if event_type in ['Test', 'Rename']: return http_client = AsyncHTTPClient() for episode in data['Episodes']: id = episode['Id'] headers = {'X-Api-Key':env.settings.sonarr_api_key} request = HTTPRequest( method='GET', headers=headers, url='%s/api/Episode/%d' % (env.settings.sonarr_url, id)) response = yield http_client.fetch(request) episode_data = json_decode(response.body) logger.debug('Sonarr episode data: %s', episode_data) file_id = episode_data['episodeFileId'] request = HTTPRequest( method='GET', headers=headers, url='%s/api/EpisodeFile/%d' % (env.settings.sonarr_url, file_id)) response = yield http_client.fetch(request) file_data = json_decode(response.body) logger.debug('Sonarr file data: %s', file_data) path = file_data['path'] name = file_data['sceneName']+os.path.splitext(path)[1] logger.info("ADD (sonarr): %s -> %s", path, name) SuperliminalCore.add_video(path, name)
def post(self): """Add a facility.""" global compressed_facilities if not revisit_online: raise tornado.web.HTTPError(502) new_facility = json_decode(self.request.body) c_facilities_json = json_decode(compressed_facilities) facility_data = ( c_facilities_json['facilities']['children']['wn']['data'][0] ) uncompressed = json_decode(lzs.decompressFromUTF16(facility_data)) uncompressed.append({ '_version': 0, 'active': True, 'coordinates': new_facility['coordinates'], 'createdAt': '2014-04-23T20:32:20.043Z', 'href': ( 'http://localhost:3000/api/v0/facilities/{}.json'.format( new_facility['uuid'] ) ), 'identifiers': [], 'name': new_facility['name'], 'properties': new_facility['properties'], 'updatedAt': '2014-04-23T20:32:20.043Z', 'uuid': new_facility['uuid'], }) compressed = lzs.compressToUTF16(json_encode(uncompressed)) c_facilities_json['facilities']['children']['wn']['data'] = [ compressed ] compressed_facilities = json_encode(c_facilities_json).encode() self.set_status(201)
def open(self, document_id): print('Websocket opened') current_user = self.get_current_user() self.user_info = SessionUserInfo() doc_db, can_access = self.user_info.init_access( document_id, current_user) if can_access: if doc_db.id in DocumentWS.sessions: self.doc = DocumentWS.sessions[doc_db.id] self.id = max(self.doc['participants']) + 1 print("id when opened %s" % self.id) else: self.id = 0 self.doc = dict() self.doc['db'] = doc_db self.doc['participants'] = dict() self.doc['last_diffs'] = json_decode(doc_db.last_diffs) self.doc['comments'] = json_decode(doc_db.comments) self.doc['settings'] = json_decode(doc_db.settings) self.doc['contents'] = json_decode(doc_db.contents) self.doc['metadata'] = json_decode(doc_db.metadata) self.doc['version'] = doc_db.version self.doc['diff_version'] = doc_db.diff_version self.doc['comment_version'] = doc_db.comment_version self.doc['title'] = doc_db.title self.doc['id'] = doc_db.id DocumentWS.sessions[doc_db.id] = self.doc self.doc['participants'][self.id] = self response = dict() response['type'] = 'welcome' self.write_message(response)
def test_json_errors(self): """Handlers should always return JSON errors.""" # A handler that we override response = yield self.http_client.fetch( self.get_url('/api/kernels'), raise_error=False ) body = json_decode(response.body) self.assertEqual(response.code, 403) self.assertEqual(body['reason'], 'Forbidden') # A handler from the notebook base response = yield self.http_client.fetch( self.get_url('/api/kernels/1-2-3-4-5'), raise_error=False ) body = json_decode(response.body) self.assertEqual(response.code, 404) # Base handler json_errors decorator does not capture reason properly # self.assertEqual(body['reason'], 'Not Found') self.assertIn('1-2-3-4-5', body['message']) # The last resort not found handler response = yield self.http_client.fetch( self.get_url('/fake-endpoint'), raise_error=False ) body = json_decode(response.body) self.assertEqual(response.code, 404) self.assertEqual(body['reason'], 'Not Found')
def require_auth(handler, kwargs): auth = handler.request.headers.get('Authorization') if auth: parts = auth.split() if parts[0].lower() != 'bearer': handler._transforms = [] handler.writejson(json_decode(str(ApiHTTPError(10405)))) handler.finish() elif len(parts) == 1: handler._transforms = [] handler.writejson(json_decode(str(ApiHTTPError(10405)))) handler.finish() elif len(parts) > 2: handler._transforms = [] handler.writejson(json_decode(str(ApiHTTPError(10405)))) handler.finish() token = parts[1] try: res = jwt.decode( token, secret_key, options=options ) except Exception, e: handler._transforms = [] handler.set_status(200) handler.writejson({'message': e.message, 'code': 10416}) handler.finish()
def post(self): habrachat_cookie = self.get_cookie("habrachat") if not habrachat_cookie: habrachat_cookie = _session_id() self.set_cookie("habrachat", habrachat_cookie) token = self.get_argument("token", None) if not token: log.warning("Not have Token") self.finish() return client = httpclient.AsyncHTTPClient() response = yield client.fetch( "http://u-login.com/token.php?token=%s&host=%s://%s" % (token, self.request.protocol, self.request.host), use_gzip=True ) if response.code != 200: log.warning("Not have access to u-login") self.finish() return json_response = json_decode(response.body) if "error_type" in json_response: log.warning("Error auth: %s" % json_response["error_message"]) self.finish() return json_response = json_decode(response.body) if "error" in json_response: log.warning("Error auth: %s" % json_response["error"]) self.finish() return identity = json_response.get("identity") if not identity: log.error("Not have indentity! json: %s" % json_response) log.info("New user indetity: %s" % identity) user_id = hashlib.md5(utf8(identity)).hexdigest() new_user = {"id": user_id, "name": None} new_user_name = "" if "nickname" in json_response: new_user_name = json_response.get("nickname", "") if not new_user["name"] and "first_name" in json_response: new_user_name = json_response.get("first_name", "") new_user["name"] = new_user_name[:20].replace("[", "{").replace("]", "}").encode('UTF-8') new_user["avatar"] = json_response.get("photo") new_user["ismoderator"] = identity in options.moderators old_user_settings = yield tornado.gen.Task(self.redis.get, "setting_"+user_id) if not old_user_settings: new_user_settings = { "revert_chat_order": False, "send_message_enter": False } yield tornado.gen.Task(self.redis.set, "setting_"+user_id, json_encode(recursive_unicode(new_user_settings))) yield tornado.gen.Task(self.redis.set, habrachat_cookie, json_encode(recursive_unicode(new_user))) self.redirect("/")
def run(self): books = self.books lock = self.lock http_client = tornado.httpclient.HTTPClient() _body = 'userName='******'&userPubKey=' + escape.url_escape(self.user.pubKey) _body += '&host=' + escape.url_escape(self.user.host) _body += '&port=' + escape.url_escape(str(self.user.port)) _body += '&bookID=' + escape.url_escape(self.bookID) _body += '&invitation=' + escape.url_escape(' ') _body += '&approverID=' + escape.url_escape(' ') _body += '&password='******'http://' + str(self.host) + ':' + str(self.port) try: response = http_client.fetch(address + '/join', method = 'POST', body = _body) b = escape.json_decode(response.body) owner = User(**b['owner']) lock.acquire() book = Book.copyCover(owner, b['title'], b['signature'], b['createdAt'], self.path) books[book.getID()] = book lock.release() response = http_client.fetch(address + '/members', method = 'POST', body = _body) members = escape.json_decode(response.body) _updateMembers(members, self.user, book, lock, address) except tornado.httpclient.HTTPError as e: #todo: log print("Error:" + str(e)) except ValueError: print('ValueError')
def get(self): # get otp from request otp = self.get_argument("otp") try: # make request to auth server response = yield AsyncHTTPClient().fetch( self.auth_url + '/control/get_user', method="POST", headers=self.auth_headers, body=urlencode({"user_otp": otp}) ) except ClientHTTPError as error: # check content type for json to try and parse result if error.response and error.response.headers.get('Content-Type') is 'application/json; charset=UTF-8': # decode the response content = json_decode(error.response.body) # catch client otp errors if content['status_code'] == 400: raise ServerHTTPError(401, reason=content['message']) # handle all other cases as internal errors raise error else: # set client cookie and respond content = json_decode(response.body) self.set_secure_cookie(self.cookie_name, str(content['result']['id'])) self.write({"result": "OK"}) self.finish()
def get(self, article_id): logging.info("got article_id %r in uri", article_id) logging.info(self.request) url = "http://" + STP + "/blogs/articles/" + article_id http_client = HTTPClient() response = http_client.fetch(url, method="GET") logging.info("got _article response %r", response.body) _article = json_decode(response.body) _timestamp = _article["timestamp"] _datetime = timestamp_datetime(_timestamp / 1000) _article["timestamp"] = _datetime try: _article['accountNickname'] except: _article['accountNickname'] = "anonymous" url = "http://" + STP + "/blogs/my-articles/" + article_id + "/paragraphs" http_client = HTTPClient() response = http_client.fetch(url, method="GET") logging.info("got _paragraphs response %r", response.body) _paragraphs = json_decode(response.body) self.render('blog/article-edit.html', article=_article, paragraphs=_paragraphs)
def run(self): book = self.book lock = self.lock whoToAsk = _setWhoToAsk(self.user, self.whoToAsk, self.book, self.connectAll, self.maxConnection) http_client = tornado.httpclient.HTTPClient() _body = 'userName='******'&userPubKey=' + escape.url_escape(self.user.pubKey) _body += '&host=' + escape.url_escape(self.user.host) _body += '&port=' + escape.url_escape(str(self.user.port)) _body += '&invitation=' + escape.url_escape(self.user.invitation if self.user.invitation is not None else ' ') _body += '&approverID=' + escape.url_escape(self.user.approverID if self.user.approverID is not None else ' ') _body += '&bookID=' + escape.url_escape(self.book.getID()) _body += '&password='******'http://' + str(user.host) + ':' + str(user.port) if self.askMember: response = http_client.fetch(address + '/members', method = 'POST', body = _body) members = escape.json_decode(response.body) _updateMembers(members, self.user, book, lock, address) if self.askAction: response = http_client.fetch(address + '/versions', method = 'POST', body = _body) versions = escape.json_decode(response.body) _updateActions(versions, self.user, book, lock, address) except tornado.httpclient.HTTPError as e: #todo: log print("Error:" + str(e)) pass except ValueError: print('ValueError')
def post(self): ts_data = json_decode(self.get_argument('ts_data')) model_id = json_decode(self.get_argument('modelID')) meta_feats = json_decode(self.get_argument('meta_features', 'null')) impute_kwargs = json_decode(self.get_argument('impute_kwargs', '{}')) model = Model.query.get(model_id) model_data = joblib.load(model.file_uri) if hasattr(model_data, 'best_estimator_'): model_data = model_data.best_estimator_ features_to_use = model.featureset.features_list fset = featurize.featurize_time_series(*ts_data, features_to_use=features_to_use, meta_features=meta_feats, raise_exceptions=False) fset = featurize.impute_featureset(fset, **impute_kwargs) fset.index = fset.index.astype(str) # ensure JSON-encodable data = {'preds': model_data.predict(fset)} if hasattr(model_data, 'predict_proba'): data['pred_probs'] = pd.DataFrame(model_data.predict_proba(fset), index=fset.index, columns=model_data.classes_) else: data['pred_probs'] = [] pred_info = Prediction.format_pred_data(fset, data) return self.success(pred_info)
def put(self, path, request): """ Handle an HTTP PUT request. This method handles an HTTP PUT request, returning a JSON response. :param path: URI path of request :param request: HTTP request object :return: an ApiAdapterResponse object containing the appropriate response """ # Update the target specified in the path, or all targets if none specified try: json_decode(request.body) # ensure request body is JSON. Will throw a TypeError if not if "/" in path: path_elem, target_path = path.split('/', 1) else: path_elem = path target_path = "" for target in self.targets: if path_elem == '' or path_elem == target.name: target.remote_set(target_path, request.body) response = self.param_tree.get(path) status_code = 200 except ParameterTreeError as param_tree_err: response = {'error': str(param_tree_err)} status_code = 400 except (TypeError, ValueError) as type_val_err: response = {'error': 'Failed to decode PUT request body: {}'.format(str(type_val_err))} status_code = 415 return ApiAdapterResponse(response, status_code=status_code)
def test_emit_setup_with_bad_counter_value_type(self): ws = yield self.ws_connect('/ws/counter') time.sleep(SLEEPING_TIME) response = yield ws.read_message() self.assertDictEqual(json_decode(response), { 'event': 'counter_connection', 'data': { 'message': 'Got new connection.', 'counter_value': 0 # Initial value of counter } }) yield ws.write_message(json_encode({ 'event': 'setup', 'data': { 'counter_value': 'not_an_integer' } })) time.sleep(SLEEPING_TIME) response = yield ws.read_message() self.assertDictEqual(json_decode(response), { 'event': 'counter_error', 'data': { 'message': 'Setup initial counter value: FAIL.', 'details': '"value" is not an integer.' } }) self.close(ws)
def _get_api_token(self, world_id, st): world_ip = dmm.WORLD_IP[world_id-1] url = dmm.GET_FLASH_URL % (world_ip, self.owner, int(time.time()*1000)) body = urlencode({'url': url, 'httpMethod': 'GET', 'authz': 'signed', 'st': st, 'contentType': 'JSON', 'numEntries': '3', 'getSummaries': 'false', 'signOwner': 'true', 'signViewer': 'true', 'gadget': 'http://203.104.209.7/gadget.xml', 'container': 'dmm'}) try: req = yield self.http_client.fetch(dmm.MAKE_REQUEST_URL, method='POST', headers=self.headers, body=body, connect_timeout=self.connect_timeout, request_timeout=self.request_timeout, proxy_host=proxy_host, proxy_port=proxy_port) except (CurlError, HTTPError): raise OoiAuthError('连接api_token服务器失败') svdata = json_decode(native_str(req.body)[27:]) if svdata[url]['rc'] != 200: raise OoiAuthError('获取api_token失败') svdata = json_decode(svdata[url]['body'][7:]) if svdata['api_result'] != 1: raise OoiAuthError('获取api_token失败') return world_ip, svdata['api_token'], svdata['api_starttime']
def test_1_register(self): """first test function""" client = yield websocket_connect("ws://localhost:8080/websocket", self.io_loop) request_id = 0 client.write_message(json_encode({ "action":"register", "request_id": request_id, "args":{ "username":"******", "password":"******" }})) response = yield self.read_until_request_id(client, request_id) print('got: {}'.format(response)) response = json_decode(response) user = response["result"] request_id += 1 client.write_message(json_encode({ "action":"session_start", "request_id": request_id, "args":{ "user_id":user["id"]}})) response = yield self.read_until_request_id(client, request_id) print('got: {}'.format(response)) response = json_decode(response) session = response["result"] request_id += 1 client.write_message(json_encode({ "action":"draw_chat_object", "request_id": request_id, "args":{ "user_id":user["id"], "session_id": session["id"], "type_": "Text", "color": "255:0:0:255", "point_x": 100, "point_y": 60, "value": "foo2"}})) response = yield self.read_until_request_id(client, request_id) print('got: {}'.format(response)) request_id += 1 client.write_message(json_encode({ "action":"draw_chat_object", "request_id": request_id, "args":{ "user_id":user["id"], "session_id": session["id"], "type_": "Text", "color": "0:0:255:255", "point_x": 100, "point_y": 110, "value": "bar2"}})) response = yield self.read_until_request_id(client, request_id) print('got: {}'.format(response))
def get_authenticated_user(self, redirect_uri, callback, scope=None, **args): """ class RenrenHandler(tornado.web.RequestHandler, RenrenGraphMixin): @tornado.web.asynchronous @gen.engine def get(self): self.get_authenticated_user( callback=(yield gen.Callback('key')), redirect_uri=url) user = yield gen.Wait('key') if not user: raise web.HTTPError(500, "Renren auth failed") # do something else self.finish() """ code = self.get_argument('code', None) if not code: self.authorize_redirect(redirect_uri, scope=scope, **args) return self.get_access_token( code, callback=(yield gen.Callback('_RenrenGraphMixin.get_authenticated_user')), redirect_uri=redirect_uri) response = yield gen.Wait('_RenrenGraphMixin.get_authenticated_user') if not response: callback(None) return try: user = json_decode(response.body) except: logging.warning("Error response %s fetching %s", response.body, response.request.url) callback(None) return if 'error' in user: logging.warning("Error response %s fetching %s", user['error_description'], response.request.url) callback(None) return #{{{ get session key self.renren_request('renren_api/session_key', user['access_token'], callback=(yield gen.Callback('_RenrenGraphMixin._session_key'))) response = yield gen.Wait('_RenrenGraphMixin._session_key') if response.error and not response.body: logging.warning("Error response %s fetching %s", response.error, response.request.url) elif response.error: logging.warning("Error response %s fetching %s: %s", response.error, response.request.url, response.body) else: try: user['session'] = json_decode(response.body) except: pass #}}} #TODO delete when renren graph api released callback(user) return
def test_json_encode(self): # json deals with strings, not bytes. On python 2 byte strings will # convert automatically if they are utf8; on python 3 byte strings # are not allowed. self.assertEqual(json_decode(json_encode(u("\u00e9"))), u("\u00e9")) if bytes is str: self.assertEqual(json_decode(json_encode(utf8(u("\u00e9")))), u("\u00e9")) self.assertRaises(UnicodeDecodeError, json_encode, b"\xe9")
def then_i_got_a_response_with_json_inside(step): try: json_decode(world.response.body) assert world.response.headers["Content-type"] == 'application/json' except: assert False, 'JSON parsing failed' world.response = world.browser.get("document/")
def test_json_decode(self): # json_decode accepts both bytes and unicode, but strings it returns # are always unicode. self.assertEqual(json_decode(b'"foo"'), u("foo")) self.assertEqual(json_decode(u('"foo"')), u("foo")) # Non-ascii bytes are interpreted as utf8 self.assertEqual(json_decode(utf8(u('"\u00e9"'))), u("\u00e9"))
def test_types(self): response = self.fetch("/typecheck?foo=bar") data = json_decode(response.body) self.assertEqual(data, {}) response = self.fetch("/typecheck", method="POST", body="foo=bar") data = json_decode(response.body) self.assertEqual(data, {})
def test_types(self): headers = {"Cookie": "foo=bar"} response = self.fetch("/typecheck?foo=bar", headers=headers) data = json_decode(response.body) self.assertEqual(data, {}) response = self.fetch("/typecheck", method="POST", body="foo=bar", headers=headers) data = json_decode(response.body) self.assertEqual(data, {})
def authenticate(self, username, password): """ Authenticate user with using the Crowd service API. :returns: a Future that must a resolve to None or a valid AuthUser object. """ auth_url = self._crowd_url auth_url += self._CROWD_AUTH_URL auth_url += "?username="******"value":password } request = HTTPRequest(auth_url, method="POST", auth_mode="basic", auth_username=self._crowd_username, auth_password=self._crowd_password, headers=self._crowd_headers, body=json_encode(auth_body) ) fetch_time = time.clock() try: response = yield self._client.fetch(request) except HTTPError as e: if e.code == 400: # Expected status code from the Crowd API # for unsuccessful user authentication. body = json_decode(e.response.body) _LOGGER.warn("Authentication failure for username: %s: %s", username, body["message"]) return # Re-raise execption raise fetch_time = (time.clock() - fetch_time) * 1000 if fetch_time > 100: _LOGGER.warn("Authentication request success: %sms", fetch_time) else: _LOGGER.info("Authentication request success: %sms", fetch_time) if "Set-Cookie" in response.headers: if "Cookie" in self._crowd_headers: del self._crowd_headers["Cookie"] for cookie in response.headers.get_list("Set-Cookie"): self._crowd_headers.add("Cookie", cookie) body = json_decode(response.body) if "name" not in body: _LOGGER.warn("Missing 'name' attribute in Crowd response") return user = AuthUser() user.username = body["name"] raise Return(user)
def _parse_user_response(self, callback, session, user): if user is None: callback(None) return # user is a weirdly encoded json string ('"{\\"meta\\"...) # luckily double decoding works user = json_decode(json_decode(user)) user.update(dict(access_token = session['access_token'])) callback(user)
async def filter(self, *, organization_id: Union[str, int], term: Optional[str]=None, columns: Optional[list]=None, offset: int=0, limit: Optional[int]=None): if limit == 0: return [] elif not term and limit and limit <= self.service.MAX_PAGE_SIZE: batch_size = limit else: batch_size = self.service.MAX_PAGE_SIZE paging = True page_index = max(ceil(offset / batch_size), 1) results = [] # Loop until we reach index we need, unless their is a search term. # If search term we need all records. while paging and (term or not limit or len(results) < limit): url = '{base_url}?{query}'.format( base_url=self.module_url, query=urlencode({ 'organization_id': organization_id, 'per_page': batch_size, 'page': page_index, **self.base_query})) try: logger.info('GET: {}'.format(url)) response = await self.http_client.fetch(url, method='GET') except HTTPClientError as http_error: http_code = http_error.code body = http_error.response.body response = json_decode(body.decode("utf-8")) message = str(response['code']) + ': ' + response['message'] raise HTTPError(http_code, reason=message) else: response = json_decode(response.body.decode("utf-8")) results += response[self.name] page_index += 1 paging = response['page_context']['has_more_page'] def fuzzy_score(items): values = [str(v).lower() for v in items.values() if v] target = ' '.join(values) return fuzz.partial_ratio(term, target) if term: results = sorted(results, key=fuzzy_score, reverse=True) results = results[:limit] if columns: return [{k: pl[k] for k in columns if k in columns} for pl in results] else: return results
def post(self): """ Receives POST requests. :return: """ # Getting body body = json_decode(self.request.body) try: # Getting parameters msisdn = str(body['msisdn']) la = str(body['la']) text = str(body['text']) carrier = str(body['carrier']) # Getting service class according to carrier service = getattr(services, "Mt" + carrier.capitalize()) # Getting configs configs = service.get_configs(self.application.settings['config']) # Async call if "async" in body: priority = body['async']['priority'] callback = body['async']['callback'] service.send.apply_async( args=[configs, msisdn, la, text, callback], queue="globalsdp.mt.{0}.{1}".format(carrier, priority), serializer=settings.CELERY_SERIALIZATION) # Log success log.info("MT queued. " "Request body: {0}. " "Request headers: {1}. " "Operation Hash: {2}. ".format( body, self.request.headers, LOG_HASH_MT)) # Return success return self.success({ "message": "MT successfully queued", "success": 1 }) # Sync call response = service.send(configs, msisdn, la, text) if response.status_code not in [200, 201, 202]: # Log error log.error("Could not send MT. " "Request body: {0}. " "Request headers: {1}. " "Response body: {2}. " "Response code: {3}. " "Operation Hash: {4}. ".format( body, self.request.headers, response.text, response.status_code, LOG_HASH_MT)) # Return error return self.error({ "message": "Could not send MT", "success": 0 }) # Log success log.info("MT sent to partner. " "Request body: {0}. " "Request headers: {1}. " "Response body: {2}. " "Response code: {3}. " "Operation Hash: {4}. ".format(body, self.request.headers, response.text, response.status_code, LOG_HASH_MT)) # Return success return self.success({ "message": "MT sent: {0}".format(response.text), "success": 1 }) except Exception as e: # Log error log.error("Could not send MT. " "Error: {0}. " "Request body: {1}. " "Request headers: {2}. " "Operation Hash: {3}. ".format(e, body, self.request.headers, LOG_HASH_MT)) # Return error return self.error({ "message": "Could not send MT", "success": 0 }, 500)
async def post(self, *args, **kwargs): self.set_status(200) self.finish() telegram = self.application.telegram data = json_decode(self.request.body) logger.info('Got message from telegram: {}'.format(data)) response = None if 'callback_query' not in data: conversation_id = data['message']['chat']['id'] else: conversation_id = data['callback_query']['message']['chat']['id'] if conversation_id in self.CONTEXT: context = self.CONTEXT[conversation_id] else: context = { 'state': 'main' } self.CONTEXT[conversation_id] = context if context['state'] == 'main': if 'callback_query' not in data: message = data['message']['text'] response = self.application.bot.respond_faq(message) message_type = 'answer' if type(response) in [tuple, str] else 'questions' if message_type == 'questions': # save to cache self.CACHE[conversation_id] = list(response.values()) else: if conversation_id in self.CACHE: self.CACHE.pop(conversation_id) guides = self.application.bot.respond_guides(message) logger.info('Guides: {}'.format(guides)) logger.info('FAQS: {}'.format(response)) if guides is not None and response != self.application.bot.default_answer: logger.info('Sending both guides and faqs') await telegram.send(conversation_id, response, message_type) await telegram.send_guides(conversation_id, guides) elif guides is not None: logger.info('Sending only guides') await telegram.send_guides(conversation_id, guides) else: logger.info('Sending only faqs') await telegram.send(conversation_id, response, message_type) else: message = data['callback_query']['data'] if message[0] != 'g': index = int(message) logger.info('Responding to question from cache...') cached = self.CACHE.get(conversation_id, None) if cached is not None: response = cached[index] message_type = 'answer' await telegram.send(conversation_id, response, message_type) else: # guide was selected index = int(message[1:]) guide = self.application.bot_data['guides'][index] machine = Machine(guide) self.CONTEXT[conversation_id].update({ 'state': 'guide', 'machine': machine }) response = machine.current_question['description'] answers = machine.current_question['answers'] await telegram.send_guide_item(response, answers, conversation_id) else: if 'callback_query' in data: machine = self.CONTEXT[conversation_id]['machine'] resp = machine.next_state(data['callback_query']['data']) if not resp.get('answers', None): self.CONTEXT[conversation_id].update({ 'state': 'main', 'machine': None }) logger.info('Last guide item data: {}'.format(resp)) await telegram.send(conversation_id, resp['description'], 'answer') else: await telegram.send_guide_item(resp['description'], resp['answers'], conversation_id)
def on_message(self, message): command = escape.json_decode(message) allowed_commands = self.config['commands'] log.debug('received message: %r', command) log.debug(command.keys()) if not set(command.keys()) <= {'command', 'live-view', 'path', 'tail-lines', 'script'}: return if command['command'] not in allowed_commands: log.warn('disallowed or unsupported command: %r', command['command']) return path = os.path.abspath(command['path']) live_path = glob('%s/*.log' %(path)) if not self.file_lister.is_path_allowed(path): log.warn('request to unlisted file: %r', path) return self.killall() if 'tail' == command['command'] and command['live-view']: n = command.get('tail-lines', self.initial_tail_lines) proc = self.cmd_control.tail(n, live_path, STREAM, STREAM) self.processes['tail'] = proc outcb = partial(self.stdout_callback, path, proc.stdout) errcb = partial(self.stderr_callback, path, proc.stderr) proc.stdout.read_until_close(outcb, outcb) proc.stderr.read_until_close(errcb, errcb) elif 'grep' == command['command']: n = command.get('tail-lines', self.initial_tail_lines) regex = command.get('script', '.*') log.debug('n = %s, path = %s, regex = %s' %(n, path, regex)) if not command['live-view'] and self.toolpaths.cmd_sift: proc_grep = self.cmd_control.all_grep(path, regex, STREAM, STREAM) elif not command['live-view'] and not self.toolpaths.cmd_sift: proc_zcat, proc_grep = self.cmd_control.all_grep(path, regex, STREAM, STREAM) elif command['live-view']: proc_tail, proc_grep = self.cmd_control.tail_grep(n, live_path, regex, STREAM, STREAM) self.processes['grep'] = proc_grep outcb = partial(self.stdout_callback, path, proc_grep.stdout) errcb = partial(self.stderr_callback, path, proc_grep.stderr) proc_grep.stdout.read_until_close(outcb, outcb) proc_grep.stderr.read_until_close(errcb, errcb) elif 'awk' in command['command']: n = command.get('tail-lines', self.initial_tail_lines) script = command.get('script', '{print $0}') if not command['live-view']: proc_zcat, proc_awk = self.cmd_control.all_awk(path, script, STREAM, STREAM) else: proc_tail, proc_awk = self.cmd_control.tail_awk(n, live_path, script, STREAM, STREAM) self.processes['tail'], self.processes['awk'] = proc_tail, proc_awk outcb = partial(self.stdout_callback, path, proc_awk.stdout) errcb = partial(self.stderr_callback, path, proc_awk.stderr) proc_awk.stdout.read_until_close(outcb, outcb) proc_awk.stderr.read_until_close(errcb, errcb) elif 'sed' == command['command']: n = command.get('tail-lines', self.initial_tail_lines) script = command.get('script', 's|.*|&|') if not command['live-view']: proc_zcat, proc_sed = self.cmd-control.all_sed(path, script, STREAM, STREAM) else: proc_tail, proc_sed = self.cmd_control.tail_sed(n, live_path, script, STREAM, STREAM) self.processes['tail'], self.processes['sed'] = proc_tail, proc_sed outcb = partial(self.stdout_callback, path, proc_sed.stdout) errcb = partial(self.stderr_callback, path, proc_sed.stderr) proc_sed.stdout.read_until_close(outcb, outcb) proc_sed.stderr.read_until_close(errcb, errcb)
async def get_authenticated_user(self, redirect_uri: str, code: str) -> Dict[str, Any]: """Handles the login for the Google user, returning an access token. The result is a dictionary containing an ``access_token`` field ([among others](https://developers.google.com/identity/protocols/OAuth2WebServer#handlingtheresponse)). Unlike other ``get_authenticated_user`` methods in this package, this method does not return any additional information about the user. The returned access token can be used with `OAuth2Mixin.oauth2_request` to request additional information (perhaps from ``https://www.googleapis.com/oauth2/v2/userinfo``) Example usage: .. testcode:: class GoogleOAuth2LoginHandler(tornado.web.RequestHandler, tornado.auth.GoogleOAuth2Mixin): async def get(self): if self.get_argument('code', False): access = await self.get_authenticated_user( redirect_uri='http://your.site.com/auth/google', code=self.get_argument('code')) user = await self.oauth2_request( "https://www.googleapis.com/oauth2/v1/userinfo", access_token=access["access_token"]) # Save the user and access token with # e.g. set_secure_cookie. else: await self.authorize_redirect( redirect_uri='http://your.site.com/auth/google', client_id=self.settings['google_oauth']['key'], scope=['profile', 'email'], response_type='code', extra_params={'approval_prompt': 'auto'}) .. testoutput:: :hide: .. versionchanged:: 6.0 The ``callback`` argument was removed. Use the returned awaitable object instead. """ # noqa: E501 handler = cast(RequestHandler, self) http = self.get_auth_http_client() body = urllib.parse.urlencode({ "redirect_uri": redirect_uri, "code": code, "client_id": handler.settings[self._OAUTH_SETTINGS_KEY]["key"], "client_secret": handler.settings[self._OAUTH_SETTINGS_KEY]["secret"], "grant_type": "authorization_code", }) response = await http.fetch( self._OAUTH_ACCESS_TOKEN_URL, method="POST", headers={"Content-Type": "application/x-www-form-urlencoded"}, body=body, ) return escape.json_decode(response.body)
def post(self): data = json_decode(self.request.body) histogram = WordsHistogram(data['article']) self.write(histogram.to_json())
def extract_results(self, response): r = escape.json_decode(response.body) return r["results"]
def put(self, id=None, o=None): """ PUT /slices/<id> :return: """ events = [] response = [] if not self.get_current_user(): self.userError('permission denied user not logged in') return if not self.request.body: self.userError("empty request") return if self.isUrn(id): filter = {'id': id} elif self.isHrn(id): filter = {'hrn': id} else: self.userError('id or hrn format error') return try: data = escape.json_decode(self.request.body) except json.decoder.JSONDecodeError as e: self.userError("malformed request", e.msg) return cursor = yield r.table('slices') \ .filter(filter) \ .run(self.dbconnection) while (yield cursor.fetch_next()): slice = yield cursor.next() if not slice: self.userError("problem with db") return # handle authority as dict if "authority" in data and type(data["authority"]) is dict: data["authority"] = data["authority"]["id"] # handle project as dict if "project" in data and type(data["project"]) is dict: data["project"] = data["project"]["id"] # handle user as dict if all(isinstance(n, dict) for n in data['users']): data['users'] = [x['id'] for x in data['users']] # convert resources as string to dict # we need resources as dict to get the configuration of resources if any(isinstance(n, str) for n in data['resources']): try: resources = [] for x in data['resources']: if isinstance(x, str): resource = yield self.getResource(x) else: resource = x resources.append(resource) data['resources'] = resources except Exception as e: import traceback traceback.print_exc() self.userError("resource id wrong or unknown") return # All the logic happens in services.workers.slices # Worker compares the current Slice in DB and the event.data sent ## adding users ## removing users ## adding resources ## removing resources # update slice event = self.update_slice(data, slice) result = yield dispatch(self.dbconnection, event) # Leases: handled by POST /leases and DELETE /leases/<id> self.write( json.dumps( { "result": "success", "events": result['generated_keys'], "error": None, "debug": None, }, cls=myJSONEncoder))
def post(self): """ jobid: jobname: status: message: [] :return: """ body = json_decode(self.request.body) job_id = body.get('jobid', '') task_id = body.get('taskid', '') task_name = body.get('jobname', '') task_status = body.get('status', '') task_message = body.get('messages', []) if job_id == '' or task_name == '' or task_status == '' or task_message == []: res = { "status": JobCallbackResponseStatus.fail.value, "message": "some argument is null" } logger.error( "job callback fail: {}".format("some argument is null")) self.write(json.dumps(res)) self.finish() else: logger.info( 'Job_ID: {}, Task_id: {}, Job_Step: {}, Task_Status: {}'. format(job_id, task_id, task_name, task_status)) zk = self.application.zk if zk.update_callback_by_taskid(job_id, task_id, task_status, task_message): logger.info( "update callback by taskid sucess: jobid={}, taskid={}". format(job_id, task_id)) else: logger.error( "update callback by taskid failed: jobid={}, taskid={}". format(job_id, task_id)) for message in task_message: logger.info('"Host": {}, "status": {}, "message": {}'.format( message['host'], message['status'], message['message'])) if zk.handler_task(job_id, task_id, task_name, task_message, task_status): logger.info("handler task success after callback") if zk.is_exist_signal(job_id): zk.send_signal(job_id) res = { "status": JobCallbackResponseStatus.success.value, "message": "callback receive success, and handler task success after callback" } else: logger.error("handler task fail after callback") res = { "status": JobCallbackResponseStatus.success.value, "message": "callback receive success, but handler task fail after callback" } self.write(json_encode(res)) self.finish()
def get_current_user(self): cookie = self.get_secure_cookie('userinfo') if cookie: return json_decode(cookie) return None
async def post(self, *args, **kwargs): skip_downloaded = self.get_argument('skip_downloaded', None) scene = self.get_argument('scene', None) paused = self.get_argument('paused', None) default_ep_status = self.get_argument('default_ep_status', None) anime = self.get_argument('anime', None) flatten_folders = self.get_argument('flatten_folders', None) quality_preset = self.get_argument('quality_preset', None) subtitles = self.get_argument('subtitles', None) search_format = self.get_argument('search_format', None) any_qualities = self.get_arguments('anyQualities') best_qualities = self.get_arguments('bestQualities') to_edit = self.get_argument('toEdit', None) i = 0 dir_map = {} while True: cur_arg = self.get_argument('orig_root_dir_{}'.format(i), None) if not cur_arg: break end_dir = self.get_argument('new_root_dir_{}'.format(i)) dir_map[cur_arg] = end_dir i += 1 show_ids = to_edit.split("|") warnings, errors = [], [] for curShow in show_ids: cur_warnings = [] cur_errors = [] show_obj = find_show(int(curShow)) if not show_obj: continue cur_root_dir = os.path.dirname(show_obj.location) cur_show_dir = os.path.basename(show_obj.location) if cur_root_dir in dir_map and cur_root_dir != dir_map[cur_root_dir]: new_show_dir = os.path.join(dir_map[cur_root_dir], cur_show_dir) sickrage.app.log.info( "For show " + show_obj.name + " changing dir from " + show_obj.location + " to " + new_show_dir) else: new_show_dir = show_obj.location if skip_downloaded == 'keep': new_skip_downloaded = show_obj.skip_downloaded else: new_skip_downloaded = True if skip_downloaded == 'enable' else False new_skip_downloaded = 'on' if new_skip_downloaded else 'off' if scene == 'keep': new_scene = show_obj.scene else: new_scene = True if scene == 'enable' else False new_scene = 'on' if new_scene else 'off' if paused == 'keep': new_paused = show_obj.paused else: new_paused = True if paused == 'enable' else False new_paused = 'on' if new_paused else 'off' if default_ep_status == 'keep': new_default_ep_status = show_obj.default_ep_status else: new_default_ep_status = int(default_ep_status) if anime == 'keep': new_anime = show_obj.anime else: new_anime = True if anime == 'enable' else False new_anime = 'on' if new_anime else 'off' if search_format == 'keep': new_search_format = show_obj.search_format else: new_search_format = int(search_format) if flatten_folders == 'keep': new_flatten_folders = show_obj.flatten_folders else: new_flatten_folders = True if flatten_folders == 'enable' else False new_flatten_folders = 'on' if new_flatten_folders else 'off' if subtitles == 'keep': new_subtitles = show_obj.subtitles else: new_subtitles = True if subtitles == 'enable' else False new_subtitles = 'on' if new_subtitles else 'off' if quality_preset == 'keep': any_qualities, best_qualities = Quality.split_quality(show_obj.quality) elif try_int(quality_preset, None): best_qualities = [] status, message = edit_show(show=curShow, location=new_show_dir, any_qualities=any_qualities, best_qualities=best_qualities, exceptions_list=[], default_ep_status=new_default_ep_status, skip_downloaded=new_skip_downloaded, flatten_folders=new_flatten_folders, paused=new_paused, search_format=new_search_format, subtitles=new_subtitles, anime=new_anime, scene=new_scene, direct_call=True) if status is False: cur_warnings += json_decode(message)['warnings'] cur_errors += json_decode(message)['errors'] if cur_warnings: sickrage.app.log.warning("Warnings: " + str(cur_warnings)) warnings.append('<b>%s:</b>\n<ul>' % show_obj.name + ' '.join( ['<li>%s</li>' % warning for warning in cur_warnings]) + "</ul>") if cur_errors: sickrage.app.log.error("Errors: " + str(cur_errors)) errors.append('<b>%s:</b>\n<ul>' % show_obj.name + ' '.join( ['<li>%s</li>' % error for error in cur_errors]) + "</ul>") if len(warnings) > 0: sickrage.app.alerts.message( _('{num_warnings:d} warning{plural} while saving changes:').format(num_warnings=len(warnings), plural="" if len(warnings) == 1 else "s"), " ".join(warnings)) if len(errors) > 0: sickrage.app.alerts.error( _('{num_errors:d} error{plural} while saving changes:').format(num_errors=len(errors), plural="" if len(errors) == 1 else "s"), " ".join(errors)) return self.redirect("/manage/")
def on_message(self, message): command = escape.json_decode(message) allowed_commands = self.config['commands'] log.debug('received message: %r', command) if not set(command.keys()) <= {'mode', 'path', 'tail-lines', 'script'}: log.warn('invalid message received: %r', command) return if command['mode'] not in allowed_commands: log.warn('disallowed command: %r', command['mode']) return path = command['path'] if not self.file_lister.is_path_allowed(path): log.warn('request to unlisted file: %r', path) return self.killall() if 'tail' == command['mode']: n = command.get('tail-lines', 10) self.tail = self.cmd.tail(n, path, STREAM, STREAM) outcb = partial(self.stdout_callback, path, self.tail.stdout) errcb = partial(self.stderr_callback, path, self.tail.stderr) self.tail.stdout.read_until_close(outcb, outcb) self.tail.stderr.read_until_close(errcb, errcb) elif 'grep' == command['mode']: n = command.get('tail-lines', 10) regex = command.get('script', '.*') self.tail, self.grep = self.cmd.tail_grep(n, path, regex, STREAM, STREAM) outcb = partial(self.stdout_callback, path, self.grep.stdout) errcb = partial(self.stderr_callback, path, self.grep.stderr) self.grep.stdout.read_until_close(outcb, outcb) self.grep.stderr.read_until_close(errcb, errcb) elif 'awk' in command['mode']: n = command.get('tail-lines', 10) script = command.get('script', '{print $0}') self.tail, self.awk = self.cmd.tail_awk(n, path, script, STREAM, STREAM) outcb = partial(self.stdout_callback, path, self.awk.stdout) errcb = partial(self.stderr_callback, path, self.awk.stderr) self.awk.stdout.read_until_close(outcb, outcb) self.awk.stderr.read_until_close(errcb, errcb) elif 'sed' == command['mode']: n = command.get('tail-lines', 10) script = command.get('script', 's|.*|&|') self.tail, self.sed = self.cmd.tail_sed(n, path, script, STREAM, STREAM) outcb = partial(self.stdout_callback, path, self.sed.stdout) errcb = partial(self.stderr_callback, path, self.sed.stderr) self.sed.stdout.read_until_close(outcb, outcb) self.sed.stderr.read_until_close(errcb, errcb)
def post(self, id=None, o=None): """ POST /slices { shortname: string, project: string } :return: """ if not self.get_current_user(): self.userError('permission denied user not logged in') return if not self.request.body: self.userError("empty request") return try: data = escape.json_decode(self.request.body) except json.decoder.JSONDecodeError as e: self.userError("malformed request", e.msg) return try: # Check if the user has the right to create a slice under this project u = yield r.table('users').get(self.get_current_user()['id']).run( self.dbconnection) if isinstance(data['project'], dict): project_id = data['project']['id'] data['project'] = project_id if data['project'] in u['pi_authorities']: data['authority'] = data['project'] else: self.userError("your user has no rights on project: %s" % data['project']) return except Exception: self.userError("not authenticated or project not specified") return try: event = Event({ 'action': EventAction.CREATE, 'user': self.get_current_user()['id'], 'object': { 'type': ObjectType.SLICE, 'id': None, }, 'data': data }) except AttributeError as e: self.userError("Can't create request", e) return except Exception as e: self.userError("Can't create request", e) return else: result = yield dispatch(self.dbconnection, event) self.write( json.dumps( { "result": "success", "events": result['generated_keys'], "error": None, "debug": None, }, cls=myJSONEncoder))
async def test_success(http_server_client): response = await http_server_client.fetch("/") assert response.code == 200 assert json_decode(response.body) == {"test": "ok"}
def on_message(self, message): obj = json_decode(message) self.subject.on_next(obj)
def test_query_string_encoding(self): response = self.fetch("/echo?foo=%C3%A9") data = json_decode(response.body) self.assertEqual(data, {u"foo": [u"\u00e9"]})
def test_get_invalid(self): response = self.get('/api/v1/study/0', headers=self.headers) self.assertEqual(response.code, 404) self.assertEqual(json_decode(response.body), {'message': 'Study not found'})
def testNonExistentFile(self): response = self.fetch('/api/chunked-contents/in/some/dir/foo.py') self.assertEqual(response.code, 404) # Body is a JSON response. json_response = escape.json_decode(response.body) self.assertIn('No such file or directory', json_response['message'])
def test_get_no_study(self): response = self.get('/api/v1/study/0/status', headers=self.headers) self.assertEqual(response.code, 404) obs = json_decode(response.body) exp = {'message': 'Study not found'} self.assertEqual(obs, exp)
async def get_authenticated_user( self, redirect_uri: str, client_id: str, client_secret: str, code: str, extra_fields: Dict[str, Any] = None, ) -> Optional[Dict[str, Any]]: """Handles the login for the Facebook user, returning a user object. Example usage: .. testcode:: class FacebookGraphLoginHandler(tornado.web.RequestHandler, tornado.auth.FacebookGraphMixin): async def get(self): if self.get_argument("code", False): user = await self.get_authenticated_user( redirect_uri='/auth/facebookgraph/', client_id=self.settings["facebook_api_key"], client_secret=self.settings["facebook_secret"], code=self.get_argument("code")) # Save the user with e.g. set_secure_cookie else: await self.authorize_redirect( redirect_uri='/auth/facebookgraph/', client_id=self.settings["facebook_api_key"], extra_params={"scope": "read_stream,offline_access"}) .. testoutput:: :hide: This method returns a dictionary which may contain the following fields: * ``access_token``, a string which may be passed to `facebook_request` * ``session_expires``, an integer encoded as a string representing the time until the access token expires in seconds. This field should be used like ``int(user['session_expires'])``; in a future version of Tornado it will change from a string to an integer. * ``id``, ``name``, ``first_name``, ``last_name``, ``locale``, ``picture``, ``link``, plus any fields named in the ``extra_fields`` argument. These fields are copied from the Facebook graph API `user object <https://developers.facebook.com/docs/graph-api/reference/user>`_ .. versionchanged:: 4.5 The ``session_expires`` field was updated to support changes made to the Facebook API in March 2017. .. versionchanged:: 6.0 The ``callback`` argument was removed. Use the returned awaitable object instead. """ http = self.get_auth_http_client() args = { "redirect_uri": redirect_uri, "code": code, "client_id": client_id, "client_secret": client_secret, } fields = set([ "id", "name", "first_name", "last_name", "locale", "picture", "link" ]) if extra_fields: fields.update(extra_fields) response = await http.fetch( self._oauth_request_token_url(**args) # type: ignore ) args = escape.json_decode(response.body) session = { "access_token": args.get("access_token"), "expires_in": args.get("expires_in"), } assert session["access_token"] is not None user = await self.facebook_request( path="/me", access_token=session["access_token"], appsecret_proof=hmac.new( key=client_secret.encode("utf8"), msg=session["access_token"].encode("utf8"), digestmod=hashlib.sha256, ).hexdigest(), fields=",".join(fields), ) if user is None: return None fieldmap = {} for field in fields: fieldmap[field] = user.get(field) # session_expires is converted to str for compatibility with # older versions in which the server used url-encoding and # this code simply returned the string verbatim. # This should change in Tornado 5.0. fieldmap.update({ "access_token": session["access_token"], "session_expires": str(session.get("expires_in")), }) return fieldmap
def test_gzip(self): response = self.post_gzip('foo=bar') self.assertEquals(json_decode(response.body), {u'foo': [u'bar']})
async def twitter_request(self, path: str, access_token: Dict[str, Any], post_args: Dict[str, Any] = None, **args: Any) -> Any: """Fetches the given API path, e.g., ``statuses/user_timeline/btaylor`` The path should not include the format or API version number. (we automatically use JSON format and API version 1). If the request is a POST, ``post_args`` should be provided. Query string arguments should be given as keyword arguments. All the Twitter methods are documented at http://dev.twitter.com/ Many methods require an OAuth access token which you can obtain through `~OAuthMixin.authorize_redirect` and `~OAuthMixin.get_authenticated_user`. The user returned through that process includes an 'access_token' attribute that can be used to make authenticated requests via this method. Example usage: .. testcode:: class MainHandler(tornado.web.RequestHandler, tornado.auth.TwitterMixin): @tornado.web.authenticated async def get(self): new_entry = await self.twitter_request( "/statuses/update", post_args={"status": "Testing Tornado Web Server"}, access_token=self.current_user["access_token"]) if not new_entry: # Call failed; perhaps missing permission? yield self.authorize_redirect() return self.finish("Posted a message!") .. testoutput:: :hide: .. versionchanged:: 6.0 The ``callback`` argument was removed. Use the returned awaitable object instead. """ if path.startswith("http:") or path.startswith("https:"): # Raw urls are useful for e.g. search which doesn't follow the # usual pattern: http://search.twitter.com/search.json url = path else: url = self._TWITTER_BASE_URL + path + ".json" # Add the OAuth resource request signature if we have credentials if access_token: all_args = {} all_args.update(args) all_args.update(post_args or {}) method = "POST" if post_args is not None else "GET" oauth = self._oauth_request_parameters(url, access_token, all_args, method=method) args.update(oauth) if args: url += "?" + urllib.parse.urlencode(args) http = self.get_auth_http_client() if post_args is not None: response = await http.fetch(url, method="POST", body=urllib.parse.urlencode(post_args)) else: response = await http.fetch(url) return escape.json_decode(response.body)
def test_uncompressed(self): response = self.fetch('/', method='POST', body='foo=bar') self.assertEquals(json_decode(response.body), {u'foo': [u'bar']})
def post(self): data = escape.json_decode(self.request.body) print("[DEBUG] ", data) self.write({"ok": True})
def fetch_json(self, *args, **kwargs): response = self.fetch(*args, **kwargs) response.rethrow() return json_decode(response.body)
async def test_login_check_spam(self): """Makes sure spamming of connections that timeout quickly get closed and removed from the login manager correctly. """ await self.create_test_users() request_token = 'abcdefg{}' fs = [] LIMIT = 1000 TIMEOUT = 1.0 for i in range(0, LIMIT): c = AsyncHTTPClient(force_instance=True) f = asyncio.ensure_future( c.fetch(self.get_url("/login/{}".format( request_token.format(i))), request_timeout=60.0 if i == 0 or i == LIMIT - 1 else TIMEOUT)) fs.append(f) if i % 10 == 0 or i == LIMIT - 1: await asyncio.sleep(0) resp = await self.fetch_signed("/login/{}".format( request_token.format(0)), signing_key=TEST_PRIVATE_KEY, method="GET") self.assertResponseCodeEqual(resp, 204) resp = await fs[0] self.assertResponseCodeEqual(resp, 200) body = json_decode(resp.body) self.assertTrue('auth_token' in body) auth_token = body['auth_token'] # verify token resp = await self.fetch("/login/verify/{}".format(auth_token)) self.assertResponseCodeEqual(resp, 200) body = json_decode(resp.body) self.assertEqual(body['token_id'], TEST_ADDRESS) resp = await self.fetch_signed("/login/{}".format( request_token.format(LIMIT - 1)), signing_key=TEST_PRIVATE_KEY, method="GET") self.assertResponseCodeEqual(resp, 204) resp = await fs[-1] self.assertResponseCodeEqual(resp, 200) body = json_decode(resp.body) self.assertTrue('auth_token' in body) auth_token = body['auth_token'] # verify token resp = await self.fetch("/login/verify/{}".format(auth_token)) self.assertResponseCodeEqual(resp, 200) body = json_decode(resp.body) self.assertEqual(body['token_id'], TEST_ADDRESS) await asyncio.sleep(TIMEOUT) for f in fs[1:-1]: if not f.done(): f.cancel() try: f.result() except: pass keys = len(LoginManager._instance._keys) while keys != 0: await asyncio.sleep((keys // 500) + 1) keys_ = len(LoginManager._instance._keys) if keys_ >= keys: self.fail("Login manager not reducing keys") keys = keys_ self.assertEqual(len(LoginManager._instance._keys), 0)
def test_empty_post_parameters(self): response = self.fetch("/echo", method="POST", body="foo=&bar=") data = json_decode(response.body) self.assertEqual(data, {u"foo": [u""], u"bar": [u""]})
def post(self): data = json_decode(self.request.body) bot.add_friend(data['user_name'], data['verify_msg']) self.write('ok')
def test_empty_query_string(self): response = self.fetch("/echo?foo=&foo=") data = json_decode(response.body) self.assertEqual(data, {u"foo": [u"", u""]})
def test_twitter_show_user(self): response = self.fetch("/twitter/client/show_user?name=somebody") response.rethrow() self.assertEqual( json_decode(response.body), {"name": "Somebody", "screen_name": "somebody"} )
def post(self): print(json_decode(self.request.body)) response = {'id': '12345'} self.write(response)