def test_connect(c, s, a, b): future = c.submit(lambda x: x + 1, 1) x = c.submit(slowinc, 1, delay=1, retries=5) yield future http_client = AsyncHTTPClient() for suffix in ['info/main/workers.html', 'info/worker/' + url_escape(a.address) + '.html', 'info/task/' + url_escape(future.key) + '.html', 'info/main/logs.html', 'info/logs/' + url_escape(a.address) + '.html', 'info/call-stack/' + url_escape(x.key) + '.html', 'info/call-stacks/' + url_escape(a.address) + '.html', 'json/counts.json', 'json/identity.json', 'json/index.html', 'individual-plots.json', ]: response = yield http_client.fetch('http://localhost:%d/%s' % (s.services['bokeh'].port, suffix)) assert response.code == 200 body = response.body.decode() if suffix.endswith('.json'): json.loads(body) else: assert xml.etree.ElementTree.fromstring(body) is not None assert not re.search("href=./", body) # no absolute links
def post(self): username = self.get_argument("email", "").strip().lower() password = self.get_argument("newpass", "") info = {} for info_column in ("name", "affiliation", "address", "phone"): hold = self.get_argument(info_column, None) if hold: info[info_column] = hold created = False try: created = User.create(username, password, info) except QiitaDBDuplicateError: msg = "Email already registered as a user" if created: info = created.info try: send_email(username, "QIITA: Verify Email Address", "Please " "click the following link to verify email address: " "%s/auth/verify/%s?email=%s" % (qiita_config.base_url, info['user_verify_code'], url_escape(username))) except: msg = ("Unable to send verification email. Please contact the " "qiita developers at <a href='mailto:qiita-help" "@gmail.com'>[email protected]</a>") self.redirect(u"/?level=danger&message=" + url_escape(msg)) return self.redirect(u"/") else: error_msg = u"?error=" + url_escape(msg) self.redirect(u"/auth/create/" + error_msg)
def work_link(ID): """Return an HTML link for the work designed by ID (using /serie/ for a serie, /movie/ for a movie and /episode/ for an episode)""" if ID[0] != '"': # Movie res = re.search('^(.+) \(([0-9]{4})[^\)]*\)', ID) if res: title = escape(res.group(1)) year = escape(res.group(2)) return ('<a href="/movie/%s">%s (%s)</a>' % (url_escape(ID), title, year)) else: return ('Invalid movie ID: %s' % escape(ID)) elif ID.find('{') != -1: # Episode res = re.search('^"(.+)" \(([0-9]{4})[^\)]*\) \{([^\(]*)(\(#([0-9]{1,3})\.([0-9]{1,3})\))?\}', ID) if res: title = escape(res.group(1)) year = escape(res.group(2)) epi_name = escape(res.group(3)) season = escape(res.group(5) or '') epi_num = escape(res.group(6) or '') return ('<a href="/episode/%s">%s (%s) %sx%s: %s</a>' % (url_escape(ID), title, year, season, epi_num, epi_name)) else: return ('Invalid episode ID: %s' % escape(ID)) else: # Serie res = re.search('"(.+)" \(([0-9]{4})[^\)]*\)', ID) if res: title = escape(res.group(1)) year = escape(res.group(2)) return ('<a href="/serie/%s">%s (%s)</a>' % (url_escape(ID), title, year)) else: return ('Invalid serie ID: %s' % escape(ID))
def post(self): para_keys=('name','ps','psbak') para_dict={} mgr_code_u = self.get_argument('mgr_code',default='') if mgr_code_u and mgr_code_u==mgr_code: usertype = 1 else: usertype = 0 for key in para_keys: para_dict.update({key:self.get_argument(key,default='')}) if all(para_dict.values()): if para_dict['ps']==para_dict['psbak']: astu = Stu(name=para_dict['name'],psswd=para_dict['ps'],usertype=usertype) if astu.had_name(): self.redirect('/signup?msg='+url_escape("用户名已经被注册!")) return astu.save() re = Stu(name=para_dict['name'],psswd=para_dict['ps']).isRgstr() if re: self.set_secure_cookie('name',str(re[1])) self.set_secure_cookie('id',str(re[0])) self.set_secure_cookie('usertype',str(re[3])) self.redirect('/') else: self.redirect('/signup?msg='+url_escape("密码不匹配")) else: self.redirect('/signup?msg='+url_escape("参数不全"))
def post(self,sbjct_id): all_sbjcts = StuSbjct().getSbjcts() crrnt_sbjct = self.getCrrntSbjct(all_sbjcts,sbjct_id) if all_sbjcts else () name = self.get_argument("name",default='') psswd = self.get_argument("psswd",default='') if name and psswd: re = Stu(name=name,psswd=psswd,ipaddr=self.request.remote_ip).isRgstr() # print(self.request.remote_ip) if re: self.set_secure_cookie('name',str(re[1])) self.set_secure_cookie('id',str(re[0])) self.set_secure_cookie('usertype',str(re[3])) if crrnt_sbjct and (crrnt_sbjct[3] or re[3]): all_answrs = StuAnswr().getAnswrs(crrnt_sbjct[0]) elif crrnt_sbjct: all_answrs = StuAnswr().getSelfAnswr(crrnt_sbjct[0],int(re[0])) else: all_answrs = [] all_hlp_self = self.getHlps() self.render('index.html',dict(stu_id=re[0],name=name,usertype=re[3], all_sbjcts=all_sbjcts,crrnt_sbjct=crrnt_sbjct,info='', all_answrs=all_answrs,all_hlp_self=all_hlp_self)) else: self.redirect('/?info=' + url_escape("登录失败!")) else: self.redirect('/?info=' + url_escape("登录失败!"))
def _render(self, login_error=None, username=None): return self.render_template('login.html', next=url_escape(self.get_argument('next', default='')), repourl=url_escape(self.get_argument('repourl', default='')), username=username, login_error=login_error, )
def post(self): try: username = self.get_argument('username') # TODO: replace with JWT from OpenID Connect auth = self.get_argument('auth') except tornado.web.MissingArgumentError: self.write('Must specify username and auth.') return user_id = yield self.db.retrieve_user_id_from_username(username) if user_id is None: self.redirect('/login?error={0}' .format(escape.url_escape('Unknown username or password.'))) else: user_auth = yield self.db.retrieve_user_auth(user_id) hashed_auth = yield pool.submit( bcrypt.hashpw, auth.encode('utf-8'), user_auth.encode('utf-8')) if user_auth == hashed_auth: session_auth = yield self.db.retrieve_user_session_auth(user_id) self.set_secure_cookie('session_auth', session_auth, expires_days=SESSION_MAX_AGE_DAYS) self.redirect('/') else: self.redirect('/login?error={0}' .format(escape.url_escape( 'Unknown username or password.')))
def set_blink(self, message, type="info"): """ Sets the blink, a one-time transactional message that is shown on the next page load """ self.set_cookie("blink_message", escape.url_escape(message), httponly=True) self.set_cookie("blink_type", escape.url_escape(type), httponly=True)
def _render(self, login_error=None, username=None): return self.render_template( "login.html", next=url_escape(self.get_argument("next", default="")), repourl=url_escape(self.get_argument("repourl", default="")), username=username, login_error=login_error, )
def send_mail_reset(to, token, name): subject = u'子曰--密码重置' html = (u"<html><head></head><body>" u"<p>" + name + u",您对密码进行了重置,请您点击下面链接完成密码重置操作!</p><br>" u"<p><a href='http://www.afewords.com/check?type=reset&email=" + url_escape(to)+ u"&token="+ token +u"'>重置链接</a></p>" u"<p>或者将链接复制至地址栏完成密码重置:http://www.afewords.com/check?type=reset&email="+url_escape(to)+"&token=" + token + u"</p>" u"<body></html>") return send_mail(to, subject, html)
def delete_attachment(self, doc, attachment_name): '''Delete a named attachment to the specified doc. The doc shall be a dict, at least with the keys: _id and _rev''' if '_rev' not in doc or '_id' not in doc: raise KeyError('Missing id or revision information in doc') url = '/{0}/{1}/{2}?rev={3}'.format(self.db_name, url_escape(doc['_id']), url_escape(attachment_name), doc['_rev']) return self._http_delete(url)
def send_mail_reg(to, token, name): subject = u'子曰--验证注册' html = (u"<html><head></head><body>" u"<p>" + name + u",欢迎您注册子曰,请您点击下面链接进行邮箱验证操作!</p>" u"<br>" u"<p><a href='http://www.afewords.com/check?email="+ url_escape(to)+ u"&token=" + token + u"'>验证链接</a></p><br>" u"<p>或者将链接复制至地址栏完成邮箱激活:http://www.afewords.com/check?email="+url_escape(to)+"&token=" + token + u"</p>" u"</body></html>") return send_mail(to, subject, html)
def params(param): key, value = param if value is None: return key = escape.url_escape(str(key)) value = escape.url_escape(str(value)) self.log.info((key, value)) return '%s=%s' % (key, value)
def send_file(file_path, dashboard_name, handler): ''' Posts a file to the Jupyter Dashboards Server to be served as a dashboard :param file_path: The path of the file to send :param dashboard_name: The dashboard name under which it should be made available ''' # Make information about the request Host header available for use in # constructing the urls segs = handler.request.host.split(':') hostname = segs[0] if len(segs) > 1: port = segs[1] else: port = '' protocol = handler.request.protocol # Treat empty as undefined dashboard_server = os.getenv('DASHBOARD_SERVER_URL') if dashboard_server: dashboard_server = dashboard_server.format(protocol=protocol, hostname=hostname, port=port) upload_url = url_path_join(dashboard_server, UPLOAD_ENDPOINT, escape.url_escape(dashboard_name, False)) with open(file_path, 'rb') as file_content: headers = {} token = os.getenv('DASHBOARD_SERVER_AUTH_TOKEN') if token: headers['Authorization'] = 'token {}'.format(token) result = requests.post(upload_url, files={'file': file_content}, headers=headers, timeout=60, verify=not skip_ssl_verification()) if result.status_code >= 400: raise web.HTTPError(result.status_code) # Redirect to link specified in response body res_body = result.json() if 'link' in res_body: redirect_link = res_body['link'] else: # Compute redirect link using environment variables # First try redirect URL as it might be different from internal upload URL redirect_server = os.getenv('DASHBOARD_REDIRECT_URL') if redirect_server: redirect_root = redirect_server.format(hostname=hostname, port=port, protocol=protocol) else: redirect_root = dashboard_server redirect_link = url_path_join(redirect_root, VIEW_ENDPOINT, escape.url_escape(dashboard_name, False)) handler.redirect(redirect_link) else: access_log.debug('Can not deploy, DASHBOARD_SERVER_URL not set') raise web.HTTPError(500, log_message='No dashboard server configured')
def test_url_escape_quote_plus(self): unescaped = '+ #%' plus_escaped = '%2B+%23%25' escaped = '%2B%20%23%25' self.assertEqual(url_escape(unescaped), plus_escaped) self.assertEqual(url_escape(unescaped, plus=False), escaped) self.assertEqual(url_unescape(plus_escaped), unescaped) self.assertEqual(url_unescape(escaped, plus=False), unescaped) self.assertEqual(url_unescape(plus_escaped, encoding=None), utf8(unescaped)) self.assertEqual(url_unescape(escaped, encoding=None, plus=False), utf8(unescaped))
def _star(self, notebook_name, note_name, star, redir=True): starred = self.get_starred() full_name = u'%s/%s' % (notebook_name, note_name) if star == 'set' and full_name not in starred: starred.append(full_name) elif star == 'unset' and full_name in starred: starred.remove(full_name) self.set_cookie('starred_notes', b64encode(','.join(starred).encode('utf8')), expires_days=365) if redir: self.redirect('/%s/%s' % (url_escape(notebook_name).replace('#', '%23'), url_escape(note_name).replace('#', '%23')))
def recurse(obj,prefix=''): if isinstance(obj,dict): for k in obj: recurse(obj[k],prefix+'['+k+']') elif isinstance(obj,list): for i,v in enumerate(obj): if isinstance(v,(dict,list)): recurse(v,prefix+'[%d]'%i) else: recurse(v,prefix+'[]') else: ret.append(url_escape(prefix)+'='+url_escape(str(obj)))
def post(self): username = self.get_argument("email", "").strip().lower() password = self.get_argument("newpass", "") info = {} for info_column in ("name", "affiliation", "address", "phone"): hold = self.get_argument(info_column, None) if hold: info[info_column] = hold created = False try: created = User.create(username, password, info) except QiitaDBDuplicateError: msg = "Email already registered as a user" if created: info = created.info try: # qiita_config.base_url doesn't have a / at the end, but the # qiita_config.portal_dir has it at the beginning but not at # the end. This constructs the correct URL url = qiita_config.base_url + qiita_config.portal_dir send_email(username, "QIITA: Verify Email Address", "Please " "click the following link to verify email address: " "%s/auth/verify/%s?email=%s\n\nBy clicking you are " "accepting our term and conditions: " "%s/iframe/?iframe=qiita-terms" % (url, info['user_verify_code'], url_escape(username), url)) except Exception: msg = ("Unable to send verification email. Please contact the " "qiita developers at <a href='mailto:qiita-help" "@gmail.com'>[email protected]</a>") self.redirect(u"%s/?level=danger&message=%s" % (qiita_config.portal_dir, url_escape(msg))) return msg = ("<h3>User Successfully Created</h3><p>Your Qiita account " "has been successfully created. An email has been sent to " "the email address you provided. This email contains " "instructions on how to activate your account.</p>" "<p>If you don't receive your activation email within a " "couple of minutes, check your spam folder. If you still " "don't see it, send us an email at <a " "href=\"mailto:[email protected]\">[email protected]" "</a>.</p>") self.redirect(u"%s/?level=success&message=%s" % (qiita_config.portal_dir, url_escape(msg))) else: error_msg = u"?error=" + url_escape(msg) self.redirect(u"%s/auth/create/%s" % (qiita_config.portal_dir, error_msg))
def person_info(t, person, ID, admin): (fname, lname, num) = person res = ('<a href="/person/%s/%s/%s">%s %s</a>' % (url_escape(fname), url_escape(lname), url_escape(num), url_escape(fname), url_escape(lname))) if admin: res += (' (<a href="/admin/delete/%s/%s/%s/%s/%s">delete</a>)' % (t, url_escape(fname), url_escape(lname), url_escape(num), url_escape(ID))) return res
def run(self): book = self.book lock = self.lock whoToAsk = _setWhoToAsk(self.user, self.whoToAsk, self.book, self.connectAll, self.maxConnection) http_client = tornado.httpclient.HTTPClient() _body = 'userName='******'&userPubKey=' + escape.url_escape(self.user.pubKey) _body += '&host=' + escape.url_escape(self.user.host) _body += '&port=' + escape.url_escape(str(self.user.port)) _body += '&invitation=' + escape.url_escape(self.user.invitation if self.user.invitation is not None else ' ') _body += '&approverID=' + escape.url_escape(self.user.approverID if self.user.approverID is not None else ' ') _body += '&bookID=' + escape.url_escape(self.book.getID()) _body += '&password='******'http://' + str(user.host) + ':' + str(user.port) if self.askMember: response = http_client.fetch(address + '/members', method = 'POST', body = _body) members = escape.json_decode(response.body) _updateMembers(members, self.user, book, lock, address) if self.askAction: response = http_client.fetch(address + '/versions', method = 'POST', body = _body) versions = escape.json_decode(response.body) _updateActions(versions, self.user, book, lock, address) except tornado.httpclient.HTTPError as e: #todo: log print("Error:" + str(e)) pass except ValueError: print('ValueError')
def save_attachment(self, doc, attachment): '''Save an attachment to the specified doc. The attachment shall be a dict with keys: `mimetype`, `name`, `data`. The doc shall be a dict, at least having the key `_id`, and if doc is existing in the database, it shall also contain the key `_rev`''' if any(key not in attachment for key in ('mimetype', 'name', 'data')): raise KeyError('Attachment dict is missing one or more required ' 'keys') url = '/{0}/{1}/{2}{3}'.format(self.db_name, url_escape(doc['_id']), url_escape(attachment['name']), '?rev={0}'.format(doc['_rev']) if '_rev' in doc else '') headers = {'Content-Type': attachment['mimetype']} body = attachment['data'] return self._http_put(url, body, headers=headers)
def _updateMembers(members, user, book, lock, address): lock.acquire() ids = [id for id in members if id not in book.members] freezeCount = 0 while ids: id = ids.pop(0) newcomer = User(**members[id]) if newcomer.approverID in book.members and User.isValidInvitation(book.members[newcomer.approverID], newcomer, book.getID()): book.members[id] = newcomer book.versions[id] = 0 freezeCount = 0 else: freezeCount += 1 ids.append(id) if freezeCount >= len(ids): break lock.release() lock.acquire() myMembers = dict(book.members) lock.release() for user in myMembers: if user not in members: http_client = tornado.httpclient.HTTPClient() _body = 'userName='******'&userPubKey=' + escape.url_escape(user.pubKey) _body += '&host=' + escape.url_escape(user.host) _body += '&port=' + escape.url_escape(str(user.port)) _body += '&invitation=' + escape.url_escape(user.invitation if user.invitation is not None else ' ') _body += '&approverID=' + escape.url_escape(user.approverID if user.approverID is not None else ' ') _body += '&bookID=' + escape.url_escape(book.getID()) _body += '&password='******'/newmembers', method = 'POST', body = _body) break
def run(self): books = self.books lock = self.lock http_client = tornado.httpclient.HTTPClient() _body = 'userName='******'&userPubKey=' + escape.url_escape(self.user.pubKey) _body += '&host=' + escape.url_escape(self.user.host) _body += '&port=' + escape.url_escape(str(self.user.port)) _body += '&bookID=' + escape.url_escape(self.bookID) _body += '&invitation=' + escape.url_escape(' ') _body += '&approverID=' + escape.url_escape(' ') _body += '&password='******'http://' + str(self.host) + ':' + str(self.port) try: response = http_client.fetch(address + '/join', method = 'POST', body = _body) b = escape.json_decode(response.body) owner = User(**b['owner']) lock.acquire() book = Book.copyCover(owner, b['title'], b['signature'], b['createdAt'], self.path) books[book.getID()] = book lock.release() response = http_client.fetch(address + '/members', method = 'POST', body = _body) members = escape.json_decode(response.body) _updateMembers(members, self.user, book, lock, address) except tornado.httpclient.HTTPError as e: #todo: log print("Error:" + str(e)) except ValueError: print('ValueError')
def get(self): logger.debug('RootHandler.get') args = self.request.arguments code = None lang = self.get_argument("lang", None) interacts = None if "c" in args: # The code is explicitly specified code = self.get_argument("c") elif "z" in args: # The code is base64-compressed def get_decompressed(name): a = args[name][-1] # We allow the user to strip off the ``=`` padding at the end # so that the URL doesn't have to have any escaping. # Here we add back the ``=`` padding if we need it. a += "=" * ((4 - (len(a) % 4)) % 4) return zlib.decompress( base64.urlsafe_b64decode(a)).decode("utf8") try: code = get_decompressed("z") if "interacts" in args: interacts = get_decompressed("interacts") except Exception as e: self.set_status(400) self.finish("Invalid zipped code: %s\n" % (e.message,)) return elif "q" in args: # The code is referenced by a permalink identifier. q = self.get_argument("q") try: code, lang, interacts = (yield tornado.gen.Task( self.application.db.get, q))[0] except LookupError: logger.warning("ID not found in permalink database %s", q) self.set_status(404) self.finish("ID not found in permalink database") return if code is not None: code = url_escape(code, plus=False) if interacts is not None: interacts = url_escape(interacts, plus=False) autoeval = self.get_argument( "autoeval", "false" if code is None else "true" ) self.render( "root.html", code=code, lang=lang, interacts=interacts, autoeval=autoeval)
def test_get_not_authed(self): response = self.get('/ag_new_barcode/') self.assertEqual(response.code, 200) port = self.get_http_port() self.assertEqual(response.effective_url, 'http://localhost:%d/login/?next=%s' % (port, url_escape('/ag_new_barcode/')))
def post(self): fm = ProjectPreForm(self) if fm.validate(): self.redirect("/submit/project?url=%s" % escape.url_escape(fm._values["website"])) else: fm.render("project/submit_pre.html")
def respond_zip(handler, name, output, resources): """Zip up the output and resource files and respond with the zip file. Returns True if it has served a zip file, False if there are no resource files, in which case we serve the plain output file. """ # Check if we have resource files we need to zip output_files = resources.get("outputs", None) if not output_files: return False # Headers zip_filename = os.path.splitext(name)[0] + ".zip" handler.set_header("Content-Disposition", 'attachment; filename="%s"' % escape.url_escape(zip_filename)) handler.set_header("Content-Type", "application/zip") # Prepare the zip file buffer = io.BytesIO() zipf = zipfile.ZipFile(buffer, mode="w", compression=zipfile.ZIP_DEFLATED) output_filename = os.path.splitext(name)[0] + resources["output_extension"] zipf.writestr(output_filename, cast_bytes(output, "utf-8")) for filename, data in output_files.items(): zipf.writestr(os.path.basename(filename), data) zipf.close() handler.finish(buffer.getvalue()) return True
def setUp(self): super(TestProjectHandlers, self).setUp() self.database = 'couch_test' self.project_id = '测试项目' self.url = "/api/v1/{0}/project/{1}".format(self.database, url_escape(self.project_id)) self.json_header = {"Content-Type": "application/json"} self.type = {'type': 'project'}
def render_auth_page(self, request, response, environ, scopes, client): """Auth page is a redirect to login page""" response.status_code = 302 response.headers['Location'] = self.login_url + '?next={}'.format( url_escape(request.handler.request.path + '?' + request.handler.request.query) ) return response
def test_max_kernels(self): """Number of kernels should be limited.""" app = self.get_app() app.settings["kg_max_kernels"] = 1 # Request a kernel response = yield self.http_client.fetch(self.get_url("/api/kernels"), method="POST", body="{}") self.assertEqual(response.code, 201) # Request another response2 = yield self.http_client.fetch( self.get_url("/api/kernels"), method="POST", body="{}", raise_error=False ) self.assertEqual(response2.code, 402) # Shut down the kernel kernel = json_decode(response.body) response = yield self.http_client.fetch( self.get_url("/api/kernels/" + url_escape(kernel["id"])), method="DELETE" ) self.assertEqual(response.code, 204) # Try again response = yield self.http_client.fetch(self.get_url("/api/kernels"), method="POST", body="{}") self.assertEqual(response.code, 201)
def test_saving_a_comment_is_stored(self): #submit a comment to /share_key/save_comment body = """This is a comment. A multi-line comment.& That is all.&_xsrf=asdf """ request = HTTPRequest( self.get_url('/p/%s/comment' % self.shf.share_key), 'POST', {'Cookie': '_xsrf=%s;sid=%s' % (self.xsrf, self.sid)}, "body=%s&_xsrf=%s" % (url_escape(body), self.xsrf)) self.http_client.fetch(request, self.stop) response = self.wait() comments = self.shf.comments() self.assertEqual(len(comments), 1) self.assertEqual(comments[0].body, body.strip())
def __init__(self, gamename, game, session, nolog=False): self.gamename = gamename self.game = game dirname = os.path.join(os.path.dirname(__file__), "logs") if not os.path.exists(dirname): os.mkdir(dirname) if nolog: logfile = None self.logfile_name = None else: self.logfile_name = os.path.abspath( os.path.join(dirname, gamename + "_" + url_escape(session) + ".html")) logfile = open(self.logfile_name, "w") self.game_context = self.game.make_actorcontext_with_io( TornadoGameIO(logfile, frontispiece=session[0:8])) self.session = session threading.Thread.__init__(self)
def test_query_parsing(self): querys = [ '', 'query=', 'query=hola', 'query=' + url_escape('{}'), self.get_body('^', 1, 1), self.get_body('*'), '&%$(&%(&%$ +++////))', ] for query in querys: with self.subTest(query=query): response = yield self.send_query(query) self.assertIsNone(response['result']) response = yield self.send_f_query('+', 1, 1) self.assertIsNotNone(response['result'])
def client_fetch(*parts, headers={}, params={}, **kwargs): # Handle URL strings path = url_escape(url_path_join(*parts), plus=False) urlparts = urllib.parse.urlparse('ws://localhost:{}'.format(jp_http_port)) urlparts = urlparts._replace( path=path, query=urllib.parse.urlencode(params) ) url = urlparts.geturl() # Add auth keys to header headers.update(jp_auth_header) # Make request. req = tornado.httpclient.HTTPRequest( url, headers=jp_auth_header, connect_timeout=120 ) return tornado.websocket.websocket_connect(req)
def showImage(show=None, which=None): media = None media_format = ('normal', 'thumb')[which in ('banner_thumb', 'poster_thumb', 'small')] try: if which[0:6] == 'banner': media = Banner(show, media_format) elif which[0:6] == 'fanart': media = FanArt(show, media_format) elif which[0:6] == 'poster': media = Poster(show, media_format) elif which[0:7] == 'network': media = Network(show, media_format) static_url = url_escape(media.get_media, False) return static_url except: pass
async def _connect(self, kernel_id, message_callback): # websocket is initialized before connection self.ws = None self.kernel_id = kernel_id ws_url = url_path_join(GatewayClient.instance().ws_url, GatewayClient.instance().kernels_endpoint, url_escape(kernel_id), 'channels') self.log.info('Connecting to {}'.format(ws_url)) kwargs = {} kwargs = GatewayClient.instance().load_connection_args(**kwargs) request = HTTPRequest(ws_url, **kwargs) self.ws_future = websocket_connect(request) self.ws_future.add_done_callback(self._connection_done) loop = IOLoop.current() loop.add_future(self.ws_future, lambda future: self._read_messages(message_callback))
def get_temp_qrcode_url(event_id): access_token = yield get_access_token() url = wxconfig.temp_qrcode_url.format(access_token) param = { 'expire_seconds': 2592000, 'action_name': 'QR_SCENE', 'action_info': { 'scene': { 'scene_id': event_id } } } http_client = AsyncHTTPClient() response = yield http_client.fetch(url, **{'method': 'POST', 'body': json.dumps(param, ensure_ascii=False)}) result = json.loads(response.body.decode()) ticket = escape.url_escape(result['ticket']) return wxconfig.temp_qrcode_ticket_url.format(ticket)
def uploadFile(self, localPath, targetPath, datastream=None, isOverWrite=False, isCreateSuperFile=False): targetPath = url_escape(targetPath) if isOverWrite: ondup = 'overwrite' else: ondup = "newcopy" import os if isCreateSuperFile: appendURL = "&type=tmpfile" else: appendURL = "" if datastream == None: fp = open(localPath, "rb") datastream = fp.read() fp.close() md = hashlib.md5() md.update(datastream) md5sum = md.hexdigest() fname = os.path.basename(localPath) urlTemplate = "https://c.pcs.baidu.com/rest/2.0/pcs/file?method=upload&path=%s&access_token=%s&ondup=%s%s" % ( targetPath, self.tokens["access_token"], ondup, appendURL) body, boundary = self.encode_baidu_formdata(datastream, fname) headers = { "Content-Type": 'multipart/form-data; boundary=%s' % boundary } resp = yield self.fetch(urlTemplate, method="POST", body=body, headers=headers) jsonresp = json.loads(resp.body) if md5sum != jsonresp["md5"]: raise IOError("md5 check error", "in __file__") raise gen.Return(jsonresp)
def get(self, format, path): exporter = get_exporter(format, config=self.config, log=self.log) path = path.strip('/') model = self.contents_manager.get(path=path) name = model['name'] if model['type'] != 'notebook': # not a notebook, redirect to files return FilesRedirectHandler.redirect_to_files(self, path) self.set_header('Last-Modified', model['last_modified']) try: output, resources = exporter.from_notebook_node( model['content'], resources={ "metadata": { "name": name[:name.rfind('.')], "modified_date": (model['last_modified'] .strftime(text.date_format)) }, "config_dir": self.application.settings['config_dir'], } ) except Exception as e: self.log.exception("nbconvert failed: %s", e) raise web.HTTPError(500, "nbconvert failed: %s" % e) if respond_zip(self, name, output, resources): return # Force download if requested if self.get_argument('download', 'false').lower() == 'true': filename = os.path.splitext(name)[0] + resources['output_extension'] self.set_header('Content-Disposition', 'attachment; filename="%s"' % escape.url_escape(filename)) # MIME type if exporter.output_mimetype: self.set_header('Content-Type', '%s; charset=utf-8' % exporter.output_mimetype) self.finish(output)
def post(self, *args, **kwargs): self.set_header('Content-Type', 'application/json') detection_id = ObjectId() app_log.info( "app=detection,function=detect,detection_id=%s,application_id=%s,session_id=%s,q=%s", detection_id, self.param_extractor.application_id(), self.param_extractor.session_id(), self.param_extractor.query()) if False: url = "%smessage?v=%s&q=%s&msg_id=%s" % ( WIT_URL, WIT_URL_VERSION, url_escape(self.param_extractor.query()), str(detection_id)) r = HTTPRequest(url, headers={"Authorization": "Bearer %s" % WIT_TOKEN}) client = AsyncHTTPClient() client.fetch(r, callback=self.wit_call_back) else: date = datetime.now() outcomes = self.brute_detector.detect(self.param_extractor.query()) self.data_response.insert(self.param_extractor.user_id(), self.param_extractor.application_id(), self.param_extractor.session_id(), detection_id, "brute", date, self.param_extractor.query(), outcomes=outcomes) self.set_status(202) self.set_header("Location", "/%s" % str(detection_id)) self.set_header("_id", str(detection_id)) self.finish() Worker(self.param_extractor.user_id(), self.param_extractor.application_id(), self.param_extractor.session_id(), detection_id, date, self.param_extractor.query(), self.param_extractor.skip_slack_log(), detection_type="wit", outcomes=outcomes).start()
def get(self, path, include_body=True): cm = self.contents_manager if cm.is_hidden(path): self.log.info("Refusing to serve hidden file, via 404 Error") raise web.HTTPError(404) path = path.strip('/') if '/' in path: _, name = path.rsplit('/', 1) else: name = path model = cm.get(path, type='file', content=include_body) if self.get_argument("download", False): self.set_header( 'Content-Disposition', 'attachment; filename="%s"' % escape.url_escape(name)) # get mimetype from filename if name.endswith('.ipynb'): self.set_header('Content-Type', 'application/x-ipynb+json') else: cur_mime = mimetypes.guess_type(name)[0] if cur_mime == 'text/plain': self.set_header('Content-Type', 'text/plain; charset=UTF-8') elif cur_mime is not None: self.set_header('Content-Type', cur_mime) else: if model['format'] == 'base64': self.set_header('Content-Type', 'application/octet-stream') else: self.set_header('Content-Type', 'text/plain; charset=UTF-8') if include_body: if model['format'] == 'base64': b64_bytes = model['content'].encode('ascii') self.write(decodebytes(b64_bytes)) elif model['format'] == 'json': self.write(json.dumps(model['content'])) else: self.write(model['content']) self.flush()
def test_adding_video_makes_it_show_up_in_friends_shake(self): user2 = User(name='user2', email='*****@*****.**', email_confirmed=1, is_paid=1) user2.set_password('asdfasdf') user2.save() user2.subscribe(self.user.shake()) url = 'https://vimeo.com/20379529' request = HTTPRequest( self.get_url('/tools/save-video'), 'POST', {"Cookie": "sid=%s;_xsrf=%s" % (self.sid, self.xsrf)}, "url=%s&_xsrf=%s" % (url_escape(url), self.xsrf)) self.http_client.fetch(request, self.stop) response = self.wait() sfs = Sharedfile.from_subscriptions(user2.id) self.assertTrue(len(sfs) > 0) self.assertEqual(sfs[0].name, url)
async def post(self) -> None: lexer = self.get_body_argument("lexer") raw = self.get_body_argument("code", strip=False) expiry = self.get_body_argument("expiry") filename = self.get_body_argument("filename", None) if not raw or not raw.strip(): log.info("APINew.post: a paste was submitted without content") raise tornado.web.HTTPError(400) if lexer not in utility.list_languages(): log.info("APINew.post: a paste was submitted with an invalid lexer") raise tornado.web.HTTPError(400) if expiry not in configuration.expiries: log.info( "APINew.post: a paste was submitted with an invalid expiry" ) raise tornado.web.HTTPError(400) paste = database.Paste( utility.slug_create(), configuration.expiries[expiry], "deprecated-api", ) paste.files.append(database.File(paste.slug, raw, lexer, filename)) with database.session() as session: session.add(paste) session.commit() req_url = self.request.full_url() location = paste.slug if filename: location += "#" + url_escape(filename) self.write( { "paste_id": paste.slug, "removal_id": paste.removal, "paste_url": urljoin(req_url, f"/{location}"), "raw_url": urljoin(req_url, f"/raw/{paste.files[0].slug}"), } )
def test_max_kernels_per_user(self): """ Number of kernels should be limited per user. """ self.get_app() self.app.max_kernels_per_user = 1 # Request a kernel for bob bob_response = yield self.http_client.fetch( self.get_url('/api/kernels'), method='POST', body='{"env": {"KERNEL_USERNAME": "******"} }') self.assertEqual(bob_response.code, 201) # Request a kernel for alice alice_response = yield self.http_client.fetch( self.get_url('/api/kernels'), method='POST', body='{"env": {"KERNEL_USERNAME": "******"} }') self.assertEqual(alice_response.code, 201) # Request another for alice - 403 expected failed_response = yield self.http_client.fetch( self.get_url('/api/kernels'), method='POST', body='{"env": {"KERNEL_USERNAME": "******"} }', raise_error=False) self.assertEqual(failed_response.code, 403) # Shut down the kernel for alice kernel = json_decode(alice_response.body) response = yield self.http_client.fetch( self.get_url('/api/kernels/' + url_escape(kernel['id'])), method='DELETE') self.assertEqual(response.code, 204) # Try again for alice - expect success alice_response = yield self.http_client.fetch( self.get_url('/api/kernels'), method='POST', body='{"env": {"KERNEL_USERNAME": "******"} }') self.assertEqual(alice_response.code, 201)
def bundle(handler, model): ''' Bundles a notebook as a static web application on this Jupyter Notebook server and redirects the requestor there. ''' try: # Noteook implementation passes ContentManager models. This bundler # only works with local files anyway. abs_nb_path = os.path.join( handler.settings['contents_manager'].root_dir, model['path']) except KeyError: # Original jupyter_cms implementation passes absolute path on disk abs_nb_path = model # Get name of notebook from filename notebook_basename = os.path.basename(abs_nb_path) notebook_name = os.path.splitext(notebook_basename)[0] # Always put the app in a local_dashboards directory output_dir = pjoin(handler.notebook_dir, 'local_dashboards', notebook_name) # Clean up bundle dir if it exists shutil.rmtree(output_dir, True) os.makedirs(output_dir) # Generate the index.html file bundle_index(output_dir, abs_nb_path, DEFAULT_TEMPLATE_PATH) # Include frontend files referenced via the bundler tools mechanism bundle_file_references(output_dir, abs_nb_path, handler.tools) # Include static web assets (e.g., Thebe) bundle_web_static(output_dir) # Copy declarative widgets if they were used bundle_declarative_widgets(output_dir, abs_nb_path) # Redirect the user to the new local app bundle_url_path = url_path_join(handler.settings['base_url'], 'files', 'local_dashboards', escape.url_escape(notebook_name, False), 'index.html') handler.redirect(bundle_url_path)
def client_fetch(*parts, headers=None, params=None, **kwargs): if not headers: headers = {} if not params: params = {} # Handle URL strings path_url = url_escape(url_path_join(*parts), plus=False) base_path_url = url_path_join(jp_base_url, path_url) urlparts = urllib.parse.urlparse(f"ws://localhost:{jp_http_port}") urlparts = urlparts._replace(path=base_path_url, query=urllib.parse.urlencode(params)) url = urlparts.geturl() # Add auth keys to header headers.update(jp_auth_header) # Make request. req = tornado.httpclient.HTTPRequest(url, headers=headers, connect_timeout=120) return tornado.websocket.websocket_connect(req)
def _render(self, login_error=None, username=None): landing_url = os.getenv("LANDING_URL") crisp_website_id = os.getenv("CRISP_WEBSITE_ID") auth_url = os.getenv("AUTH_URL", "http://127.0.0.1:5000") return self.render_template( "native-login.html", next=url_escape(self.get_argument("next", default="/hub")), username=username, login_error=login_error, custom_html=self.authenticator.custom_html, login_url=self.settings["login_url"], landing_url=landing_url, crisp_website_id=crisp_website_id, auth_url=auth_url, authenticator_login_url=url_concat( self.authenticator.login_url(self.hub.base_url), {"next": self.get_argument("next", "")}, ), )
def search(self, text): movies = [] try: text = escape.url_escape(text) text = "http://movie.vndailys.com/search.php?q=%s&limit=20" % text data = yield http_client(text, c_try=5, c_delay=self.delay) data = data.split('</a></li>',1)[1].rsplit('</a></li>',1)[0].split('</a></li>') movies = [] for m in data: link = m.split('<a href="',1)[1].split('"',1)[0] title = ' - '.join(m.split('<strong>',1)[1].split('<br ',1)[0].split('</strong><br>')) image = 'http://phimtv.vn/' + m.split('src="',1)[1].split('"',1)[0] movies.append({ "link": link, "title": title, "image": image }) except Exception as e: traceback.print_exc(file=sys.stdout) return movies
def _connect(self, kernel_id): ws_url = url_path_join(KG_URL.replace('http', 'ws'), '/api/kernels', url_escape(kernel_id), 'channels') self.log.info('Connecting to {}'.format(ws_url)) parameters = { "headers": KG_HEADERS, "validate_cert": VALIDATE_KG_CERT, "auth_username": KG_HTTP_USER, "auth_password": KG_HTTP_PASS, "connect_timeout": KG_CONNECT_TIMEOUT, "request_timeout": KG_REQUEST_TIMEOUT } if KG_CLIENT_KEY: parameters["client_key"] = KG_CLIENT_KEY parameters["client_cert"] = KG_CLIENT_CERT if KG_CLIENT_CA: parameters["ca_certs"] = KG_CLIENT_CA request = HTTPRequest(ws_url, **parameters) self.ws_future = websocket_connect(request) self.ws = yield self.ws_future
def force_browser_download_content(handler, fname): fname = filter_unsafe_http_header_value(fname) if not fname: fname = u'未命名' agent = httpagentparser.detect( handler.request.headers.get('User-Agent', u'')) browser = agent.get('browser', None) if agent else None header_set = False escaped_fname = url_escape(fname, False) if browser: if browser.get('name', u'') == 'Microsoft Internet Explorer' and\ browser.get('version', u'') in ('7.0', '8.0'): handler.set_header('Content-Disposition', 'attachment;filename={}'.format(escaped_fname)) header_set = True if not header_set: handler.set_header( 'Content-Disposition', 'attachment;filename="{}";filename*=UTF-8\'\'{}'.format( fname.encode('utf-8'), escaped_fname))
def get(self): content = self.get_query_argument('chl', strip=False) url = 'https://chart.googleapis.com/chart?cht=qr&chs=%dx%d&chl=%s&chld=|0'\ % (200, 200, escape.url_escape('ss://' + content, plus=False)) request = HTTPRequest(url) if options.debug: logging.debug("qrcode url: " + url) request.proxy_host = '127.0.0.1' request.proxy_port = 8123 client = curl_httpclient.CurlAsyncHTTPClient() else: client = AsyncHTTPClient() response = yield client.fetch(request) self.write_png(response.body)
def redirect(self, url, *args, **kwargs): purl = urlparse(url) eurl = urlunparse(( purl.scheme, purl.netloc, "/".join([ url_escape(url_unescape(p), plus=False) for p in purl.path.split("/") ]), purl.params, purl.query, purl.fragment )) return super(BaseHandler, self).redirect( eurl, *args, **kwargs )
async def connect(address, kernel_id): base_url = 'http://' + address base_ws_url = 'ws://' + address client = AsyncHTTPClient() if not kernel_id: response = await client.fetch('{}/api/kernels'.format(base_url), method='POST', auth_username='******', auth_password='******', body=json_encode({'name': 'python'})) kernel = json_decode(response.body) kernel_id = kernel['id'] ws_req = HTTPRequest(url='{}/api/kernels/{}/channels'.format( base_ws_url, url_escape(kernel_id)), auth_username='******', auth_password='******') ws = await websocket_connect(ws_req) return ws, kernel_id
async def start_kernel(self, **kwargs): """Starts a kernel via HTTP in an asynchronous manner. Parameters ---------- `**kwargs` : optional keyword arguments that are passed down to build the kernel_cmd and launching the kernel (e.g. Popen kwargs). """ kernel_id = kwargs.get('kernel_id') if kernel_id is None: kernel_name = kwargs.get('kernel_name', 'python3') self.log.debug("Request new kernel at: %s" % self.kernels_url) # Let KERNEL_USERNAME take precedent over http_user config option. if os.environ.get('KERNEL_USERNAME') is None and GatewayClient.instance().http_user: os.environ['KERNEL_USERNAME'] = GatewayClient.instance().http_user kernel_env = {k: v for (k, v) in dict(os.environ).items() if k.startswith('KERNEL_') or k in GatewayClient.instance().env_whitelist.split(",")} # Add any env entries in this request kernel_env.update(kwargs.get('env', {})) # Convey the full path to where this notebook file is located. if kwargs.get('cwd') is not None and kernel_env.get('KERNEL_WORKING_DIR') is None: kernel_env['KERNEL_WORKING_DIR'] = kwargs['cwd'] json_body = json_encode({'name': kernel_name, 'env': kernel_env}) response = await gateway_request(self.kernels_url, method='POST', body=json_body) self.kernel = json_decode(response.body) self.kernel_id = self.kernel['id'] self.log.info("GatewayKernelManager started kernel: {}, args: {}".format(self.kernel_id, kwargs)) else: self.kernel_id = kernel_id self.kernel = await self.refresh_model() self.log.info("GatewayKernelManager using existing kernel: {}".format(self.kernel_id)) self.kernel_url = url_path_join(self.kernels_url, url_escape(str(self.kernel_id)))
def post(self): ds_name = self.get_argument('ds_name') ds_kind = self.get_argument('ds_kind') description = self.get_argument('desc') resample = (self.get_argument('lb', ''), self.get_argument('ub', ''), self.get_argument('step', '')) if not any(resample): resample = None if ds_kind not in DATASETS: self.visible_error(400, 'Invalid dataset kind.', 'Invalid ds_kind: %r', ds_kind) return if ds_name in DATASETS[ds_kind]: self.visible_error(403, 'Dataset already exists.', 'ds import would clobber existing: %s [%s]', ds_name, ds_kind) return if not self.request.files or 'spectra' not in self.request.files: self.visible_error(400, 'No spectrum data uploaded.') return meta_file, = self.request.files.get('metadata', [None]) spectra_file, = self.request.files['spectra'] err = yield gen.Task(_async_ds_upload, meta_file, spectra_file, ds_name, ds_kind, resample, description) if err: self.visible_error(*err) return # Return a link to the new dataset to signal the upload succeeded. self.write('/explorer?ds_kind=%s&ds_name=%s' % (ds_kind, url_escape(ds_name, plus=False))) # Kick off a background thread to save this new dataset to disk. t = Thread(target=_save_ds, args=(ds_kind, ds_name)) t.daemon = True t.start()
def post(self, preprocessed_data_id): user = self.current_user # make sure user is admin and can therefore actually submit to EBI if user.level != 'admin': raise HTTPError(403, reason="User %s cannot submit to EBI!" % user.id) submission_type = self.get_argument('submission_type') if submission_type not in ['ADD', 'MODIFY']: raise HTTPError(403, reason="User: %s, %s is not a recognized " "submission type" % (user.id, submission_type)) study = Artifact(preprocessed_data_id).study state = study.ebi_submission_status if state == 'submitting': message = "Cannot resubmit! Current state is: %s" % state self.display_template(preprocessed_data_id, message, 'danger') else: qiita_plugin = Software.from_name_and_version('Qiita', 'alpha') cmd = qiita_plugin.get_command('submit_to_EBI') params = Parameters.load(cmd, values_dict={ 'artifact': preprocessed_data_id, 'submission_type': submission_type }) job = ProcessingJob.create(user, params, True) r_client.set('ebi_submission_%s' % preprocessed_data_id, dumps({ 'job_id': job.id, 'is_qiita_job': True })) job.submit() level = 'success' message = 'EBI submission started. Job id: %s' % job.id self.redirect("%s/study/description/%d?level=%s&message=%s" % (qiita_config.portal_dir, study.id, level, url_escape(message)))
def post_add_tracer(self): """新建一个追踪对象 1、生成二维码 2、写入数据库 3、创建静态页面 """ title = self.get_body_argument("tracer_title", default="") content = self.get_body_argument("tracer_content", default="") tracer_id = self.get_body_argument("tracer_id", default=Tracer.gen_id(12)) tracer_url = self.get_body_argument("tracer_url", default="") + \ "/" + tracer_id + ".html" qr_url = "" client = AsyncHTTPClient() req = HTTPRequest( url="http://" + self.request.host + self.reverse_url("QRViewer") + "?token=" + options.qr_token, method="POST", body="key=" + tracer_id + "&value=" + tracer_url, ) resp = yield client.fetch(req) if resp.code == 200: ret = json.load(StringIO(resp.body)) print(ret) qr_url = ret["url"] tracer = Tracer( id=tracer_id, title=title, content=content, clicked=0, posted=int(time.time()), qr=qr_url, url=tracer_url, ) self._gen_tracer_page(tracer) self.dbsession.merge(tracer) self.dbsession.commit() self.redirect( self.reverse_url("TracerShower", tracer_id) + "?next=" + url_escape(self.reverse_url("TracerManager", "list", 0)))
def _gen_test_tag_resp(cls, request): response = {} if 'tests_tag' in Settings and Settings['tests_tag']['tag'] in request['tags']: response['tag'] = Settings['tests_tag']['tag'] try: api_url = Settings['tests_tag']['tag_api_endpoint'].replace('%SHA%', request['revision']) api_body = Settings['tests_tag']['tag_api_body'].replace('%SHA%', request['revision']) api_resp = urllib2.urlopen(api_url, api_body) response['tag'] = xhtml_escape(json.loads(api_resp.read())['tag']) except Exception as e: response['tag'] += ": ERROR connecting to server" logging.error(e) response['url'] = '' if 'url_api_endpoint' in Settings['tests_tag']: try: result_api_url = Settings['tests_tag']['url_api_endpoint'].replace('%SHA%', request['revision']) result_api_url = result_api_url.replace('%BRANCH%', request['branch']) result_api_body = Settings['tests_tag']['url_api_body'].replace('%SHA%', request['revision']) result_api_body = result_api_body.replace('%BRANCH%', request['branch']) resp = urllib2.urlopen(result_api_url, result_api_body) result_id = url_escape(json.loads(resp.read())['id']) if result_id != '': response['url'] = Settings['tests_tag']['url_tmpl'].replace( '%ID%', result_id ).replace( '%SHA%', request['revision'] ) response['url'] = response['url'].replace('%BRANCH%', request['branch']) except Exception as e: logging.warning(e) logging.warning( "Couldn't load results for results test URL from %s with body %s" % ( Settings['tests_tag']['url_api_endpoint'].replace('%SHA%', request['revision']), Settings['tests_tag']['url_api_body'].replace('%SHA%', request['revision']) ) ) return response
def _view(self, url, **kwargs): body = kwargs.get('body', {}) options = [] if kwargs: for key, value in kwargs.items(): if key == 'body': continue if key == 'keys': body.update({'keys': value}) else: value = url_escape(value if key in ( 'startkey_docid', 'endkey_docid', 'stale') else json_encode(value)) options.append('='.join([key, value])) if options: url = '{0}?{1}'.format(url, '&'.join(options)) if body: r = yield self._http_post(url, json_encode(body)) else: r = yield self._http_get(url) raise gen.Return(r)
def spawn_kernel(self, kernel_body='{}'): ''' Code to spawn a kernel and return a websocket connection to it. ''' # Request a kernel response = yield self.http_client.fetch( self.get_url('/api/kernels'), method='POST', body=kernel_body ) self.assertEqual(response.code, 201) # Connect to the kernel via websocket kernel = json_decode(response.body) ws_url = 'ws://localhost:{}/api/kernels/{}/channels'.format( self.get_http_port(), url_escape(kernel['id']) ) ws = yield websocket_connect(ws_url) raise Return(ws)