def test_old_files_redirect(self): """pre-2.0 'files/' prefixed links are properly redirected""" nbdir = self.notebook_dir.name base = self.base_url() os.mkdir(pjoin(nbdir, 'files')) os.makedirs(pjoin(nbdir, 'sub', 'files')) for prefix in ('', 'sub'): with open(pjoin(nbdir, prefix, 'files', 'f1.txt'), 'w') as f: f.write(prefix + '/files/f1') with open(pjoin(nbdir, prefix, 'files', 'f2.txt'), 'w') as f: f.write(prefix + '/files/f2') with open(pjoin(nbdir, prefix, 'f2.txt'), 'w') as f: f.write(prefix + '/f2') with open(pjoin(nbdir, prefix, 'f3.txt'), 'w') as f: f.write(prefix + '/f3') url = url_path_join(base, 'notebooks', prefix, 'files', 'f1.txt') r = requests.get(url) self.assertEqual(r.status_code, 200) self.assertEqual(r.text, prefix + '/files/f1') url = url_path_join(base, 'notebooks', prefix, 'files', 'f2.txt') r = requests.get(url) self.assertEqual(r.status_code, 200) self.assertEqual(r.text, prefix + '/files/f2') url = url_path_join(base, 'notebooks', prefix, 'files', 'f3.txt') r = requests.get(url) self.assertEqual(r.status_code, 200) self.assertEqual(r.text, prefix + '/f3')
def test_redirect(self): r = requests.get(url_path_join(self.base_url(), 'tree/foo/bar.ipynb')) self.assertEqual(r.url, self.base_url() + 'notebooks/foo/bar.ipynb') r = requests.get(url_path_join(self.base_url(), 'tree/foo/baz.txt')) self.assertEqual( r.url, url_path_join(self.base_url(), 'files/foo/baz.txt'))
def get(self, path=''): cm = self.contents_manager if cm.path_exists(path): # it's a *directory*, redirect to /tree url = url_path_join(self.base_url, 'tree', path) else: orig_path = path # otherwise, redirect to /files parts = path.split('/') path = '/'.join(parts[:-1]) name = parts[-1] if not cm.file_exists(name=name, path=path) and 'files' in parts: # redirect without files/ iff it would 404 # this preserves pre-2.0-style 'files/' links self.log.warn("Deprecated files/ URL: %s", orig_path) parts.remove('files') path = '/'.join(parts[:-1]) if not cm.file_exists(name=name, path=path): raise web.HTTPError(404) url = url_path_join(self.base_url, 'files', path, name) url = url_escape(url) self.log.debug("Redirecting %s to %s", self.request.path, url) self.redirect(url)
def redirect_to_files(self, path): """make redirect logic a reusable static method so it can be called from other handlers. """ cm = self.contents_manager if cm.dir_exists(path): # it's a *directory*, redirect to /tree url = url_path_join(self.base_url, 'tree', path) else: orig_path = path # otherwise, redirect to /files parts = path.split('/') if not cm.file_exists(path=path) and 'files' in parts: # redirect without files/ iff it would 404 # this preserves pre-2.0-style 'files/' links self.log.warn("Deprecated files/ URL: %s", orig_path) parts.remove('files') path = '/'.join(parts) if not cm.file_exists(path=path): raise web.HTTPError(404) url = url_path_join(self.base_url, 'files', path) url = url_escape(url) self.log.debug("Redirecting %s to %s", self.request.path, url) self.redirect(url)
def get(self): query_string = self.get_query_argument('qs') reindex = bool(self.get_query_argument('reindex', 'true') == 'true') if reindex: self.index.update_index() results, total = self.index.search(query_string) for result in results: rel_path = result['path'][self.work_dir_len:] if rel_path.endswith('.ipynb'): # take it at face value that the extension implies notebook url = url_path_join(self.base_url, 'notebooks', rel_path) else: url = url_path_join(self.base_url, 'edit', rel_path) # Add URLs result['url'] = url result['tree_url'] = url_path_join(self.base_url, 'tree', os.path.dirname(rel_path)) # Add relative paths result['rel_dirname'] = os.path.dirname(rel_path) result['rel_path'] = rel_path self.write(dict(results=results, total=total)) self.finish()
def test_hidden_files(self): not_hidden = [ u'å b', u'å b/ç. d', ] hidden = [ u'.å b', u'å b/.ç d', ] dirs = not_hidden + hidden nbdir = self.notebook_dir.name for d in dirs: path = pjoin(nbdir, d.replace('/', os.sep)) if not os.path.exists(path): os.mkdir(path) with open(pjoin(path, 'foo'), 'w') as f: f.write('foo') with open(pjoin(path, '.foo'), 'w') as f: f.write('.foo') url = self.base_url() for d in not_hidden: path = pjoin(nbdir, d.replace('/', os.sep)) r = requests.get(url_path_join(url, 'files', d, 'foo')) r.raise_for_status() self.assertEqual(r.text, 'foo') r = requests.get(url_path_join(url, 'files', d, '.foo')) self.assertEqual(r.status_code, 404) for d in hidden: path = pjoin(nbdir, d.replace('/', os.sep)) for foo in ('foo', '.foo'): r = requests.get(url_path_join(url, 'files', d, foo)) self.assertEqual(r.status_code, 404)
def test_hidden_files(self): not_hidden = [u"å b", pjoin(u"å b/ç. d")] hidden = [u".å b", pjoin(u"å b/.ç d")] dirs = not_hidden + hidden nbdir = self.notebook_dir.name for d in dirs: path = pjoin(nbdir, d.replace("/", os.sep)) if not os.path.exists(path): os.mkdir(path) with open(pjoin(path, "foo"), "w") as f: f.write("foo") with open(pjoin(path, ".foo"), "w") as f: f.write(".foo") url = self.base_url() for d in not_hidden: path = pjoin(nbdir, d.replace("/", os.sep)) r = requests.get(url_path_join(url, "files", d, "foo")) r.raise_for_status() self.assertEqual(r.content, b"foo") r = requests.get(url_path_join(url, "files", d, ".foo")) self.assertEqual(r.status_code, 403) for d in hidden: path = pjoin(nbdir, d.replace("/", os.sep)) for foo in ("foo", ".foo"): r = requests.get(url_path_join(url, "files", d, foo)) self.assertEqual(r.status_code, 403)
def get(self, name, user_path): current_user = self.get_current_user() if current_user and current_user.name == name: # logged in, work with spawner is_log_request = self.get_argument('get_logs', False) is_failed = False is_up = False if current_user.spawner: spawner = current_user.spawner is_running = yield spawner.is_running() log_lines = spawner.user_log is_failed = spawner.is_failed is_up = spawner.is_up if spawner.is_empty and not is_failed: self.redirect(url_path_join(self.hub.server.base_url, 'home')) return else: log_lines = [] if current_user.stop_pending and not is_failed: self.redirect(url_path_join(self.hub.server.base_url, 'home')) return if is_log_request: resp = { 'log': log_lines } if is_failed: resp.update({ 'failed': 1 }) elif is_up: resp.update({ 'done': 1 }) self.finish(json_encode(resp)) else: if is_up: self.set_login_cookie(current_user) target = '%s://%s/user/%s' % ( self.request.protocol, self.request.host, current_user.name ) self.log.info('redirecting to %s' % target) self.redirect(target) return metrica = MetricaIdsMixin() g_id = metrica.g_analitics_id ya_id = metrica.ya_metrica_id html = self.render_template( "spawn_pending.html", version=__version__, g_analitics_id=g_id, ya_metrica_id=ya_id ) self.finish(html) else: # logged in as a different user, redirect target = url_path_join(self.base_url, 'login') self.redirect(target)
def get(self, name, user_path): current_user = self.get_current_user() if current_user and current_user.name == name: # logged in, work with spawner is_log_request = self.get_argument('get_logs', False) is_failed = False is_up = False if current_user.spawner: spawner = current_user.spawner is_running = yield spawner.is_running() log_lines = spawner.user_log is_failed = spawner.is_failed is_up = spawner.is_up if spawner.is_empty and not is_failed: self.redirect(url_path_join(self.hub.server.base_url, 'home')) return else: log_lines = [] if current_user.stop_pending and not is_failed: self.redirect(url_path_join(self.hub.server.base_url, 'home')) return if is_log_request: resp = { 'log': log_lines } if is_failed: resp.update({ 'failed': 1 }) elif is_up: resp.update({ 'done': 1 }) self.finish(json_encode(resp)) else: if is_up: self.set_login_cookie(current_user) target = '/user/%s/' % ( current_user.name ) self.log.info('redirecting to %s' % target) self.redirect(target) return metrica = MetricaIdsMixin() g_id = metrica.g_analitics_id ya_id = metrica.ya_metrica_id html = self.render_template( "spawn_pending.html", version=__version__, g_analitics_id=g_id, ya_metrica_id=ya_id ) self.finish(html) else: # logged in as a different user, redirect target = url_path_join(self.base_url, 'login') self.redirect(target)
def init_handlers(nbapp): webapp = nbapp.web_app base_url = webapp.settings['base_url'] webapp.add_handlers(".*$", [ (url_path_join(base_url, r"/api/contents%s/gitlog" % path_regex), GitLogHandler), (url_path_join(base_url, r"/api/contents%s/nbdiff" % path_regex), NotebookDiffHandler), (url_path_join(base_url, r"/api/contents%s/nbdiff/%s" % (path_regex, _commit_regex)), NotebookDiffHandler), (url_path_join(base_url, r"/api/contents%s/nbdiff%s" % (path_regex, _path2_regex)), NotebookDiffHandler), ])
def _req(self, verb, section, body=None): response = requests.request(verb, url_path_join(self.base_url, 'api/config', section), data=body, ) response.raise_for_status() return response
def _req(self, verb, path, body=None, params=None): response = requests.request(verb, url_path_join(self.base_url, 'api/contents', path), data=body, params=params, ) response.raise_for_status() return response
def verify_token(self, cookie_name, encrypted_cookie): """method for token verification""" cookie_cache = self.settings['cookie_cache'] if encrypted_cookie in cookie_cache: # we've seen this token before, don't ask upstream again return cookie_cache[encrypted_cookie] hub_api_url = self.settings['hub_api_url'] hub_api_key = self.settings['hub_api_key'] r = requests.get(url_path_join( hub_api_url, "authorizations/cookie", cookie_name, quote(encrypted_cookie, safe=''), ), headers = {'Authorization' : 'token %s' % hub_api_key}, ) if r.status_code == 404: data = None elif r.status_code == 403: self.log.error("I don't have permission to verify cookies, my auth token may have expired: [%i] %s", r.status_code, r.reason) raise HTTPError(500, "Permission failure checking authorization, I may need to be restarted") elif r.status_code >= 500: self.log.error("Upstream failure verifying auth token: [%i] %s", r.status_code, r.reason) raise HTTPError(502, "Failed to check authorization (upstream problem)") elif r.status_code >= 400: self.log.warn("Failed to check authorization: [%i] %s", r.status_code, r.reason) raise HTTPError(500, "Failed to check authorization") else: data = r.json() cookie_cache[encrypted_cookie] = data return data
def test_download(self): nbdir = self.notebook_dir.name base = self.base_url() text = 'hello' with open(pjoin(nbdir, 'test.txt'), 'w') as f: f.write(text) r = requests.get(url_path_join(base, 'files', 'test.txt')) disposition = r.headers.get('Content-Disposition', '') self.assertNotIn('attachment', disposition) r = requests.get(url_path_join(base, 'files', 'test.txt') + '?download=1') disposition = r.headers.get('Content-Disposition', '') self.assertIn('attachment', disposition) self.assertIn('filename="test.txt"', disposition)
def post(self): # Creates a new session #(unless a session already exists for the named nb) sm = self.session_manager nbm = self.notebook_manager km = self.kernel_manager model = self.get_json_body() if model is None: raise web.HTTPError(400, "No JSON data provided") try: name = model['notebook']['name'] except KeyError: raise web.HTTPError(400, "Missing field in JSON data: name") try: path = model['notebook']['path'] except KeyError: raise web.HTTPError(400, "Missing field in JSON data: path") # Check to see if session exists if sm.session_exists(name=name, path=path): model = sm.get_session(name=name, path=path) else: kernel_id = km.start_kernel(cwd=nbm.get_os_path(path)) model = sm.create_session(name=name, path=path, kernel_id=kernel_id, ws_url=self.ws_url) location = url_path_join(self.base_url, 'api', 'sessions', model['id']) self.set_header('Location', url_escape(location)) self.set_status(201) self.finish(json.dumps(model, default=date_default))
def get(self): user = self.get_current_user() repourl = self.get_argument('repourl', '') do_fork = self.get_argument('do_fork', False) do_push = self.get_argument('do_push', False) notify_message = self.get_argument('message', '') if repourl: self.redirect( url_concat(url_path_join(self.hub.server.base_url, 'spawn'), {'repourl': repourl})) return branch_name = commit_sha = None repo_url = '' fork_exists = False repository_changed = False if user.running: branch_name = user.spawner.branch_name commit_sha = user.spawner.commit_sha repo_url = user.spawner.repo_url if user.running and getattr(user, 'login_service', '') == 'github': if do_fork: self.log.info('Will fork %s' % user.spawner.repo_url) result = yield fork_repo(user.spawner, user.token) self.redirect( url_concat('/hub/home', dict(message='Successfully forked'))) return if do_push: self.log.info('Will push to fork') result = yield push_repo(user, user.spawner, user.token) result = str(result, 'ascii') self.log.info('Got after push: %s' % result) message = 'Successfully pushed' if 'Update through everware' not in result: message = result self.redirect( url_concat('/hub/home', dict(message='Push result: %s' % message))) return fork_exists = yield does_fork_exist(user.name, user.spawner, user.token) repository_changed = yield is_repository_changed(user) if hasattr(user, 'login_service'): loginservice = user.login_service else: loginservice = 'none' html = self.render_template('home.html', user=user, repourl=repo_url, login_service=loginservice, fork_exists=fork_exists, repository_changed=repository_changed, branch_name=branch_name, commit_sha=commit_sha, notify_message=notify_message, version=__version__) self.finish(html)
def _req(self, verb, path, body=None): response = requests.request(verb, url_path_join(self.base_url, 'api/notebooks', path), data=body, ) response.raise_for_status() return response
def post(self): # Creates a new session # (unless a session already exists for the named nb) sm = self.session_manager nbm = self.notebook_manager km = self.kernel_manager model = self.get_json_body() if model is None: raise web.HTTPError(400, "No JSON data provided") try: name = model["notebook"]["name"] except KeyError: raise web.HTTPError(400, "Missing field in JSON data: name") try: path = model["notebook"]["path"] except KeyError: raise web.HTTPError(400, "Missing field in JSON data: path") # Check to see if session exists if sm.session_exists(name=name, path=path): model = sm.get_session(name=name, path=path) else: # allow nbm to specify kernels cwd kernel_path = nbm.get_kernel_path(name=name, path=path) kernel_id = km.start_kernel(path=kernel_path) model = sm.create_session(name=name, path=path, kernel_id=kernel_id) location = url_path_join(self.base_url, "api", "sessions", model["id"]) self.set_header("Location", url_escape(location)) self.set_status(201) self.finish(json.dumps(model, default=date_default))
def test_from_post_zip(self): nbmodel_url = url_path_join(self.base_url(), "api/contents/foo/testnb.ipynb") nbmodel = requests.get(nbmodel_url).json() r = self.nbconvert_api.from_post(format="latex", nbmodel=nbmodel) self.assertIn(u"application/zip", r.headers["Content-Type"]) self.assertIn(u".zip", r.headers["Content-Disposition"])
def get(self, path): self.log.warn("/api/notebooks is deprecated, use /api/contents") self.redirect(url_path_join( self.base_url, 'api/contents', path ))
def post(self): # Creates a new session #(unless a session already exists for the named nb) sm = self.session_manager model = self.get_json_body() if model is None: raise web.HTTPError(400, "No JSON data provided") try: name = model['notebook']['name'] except KeyError: raise web.HTTPError(400, "Missing field in JSON data: notebook.name") try: path = model['notebook']['path'] except KeyError: raise web.HTTPError(400, "Missing field in JSON data: notebook.path") try: kernel_name = model['kernel']['name'] except KeyError: raise web.HTTPError(400, "Missing field in JSON data: kernel.name") # Check to see if session exists if sm.session_exists(name=name, path=path): model = sm.get_session(name=name, path=path) else: model = sm.create_session(name=name, path=path, kernel_name=kernel_name) location = url_path_join(self.base_url, 'api', 'sessions', model['id']) self.set_header('Location', url_escape(location)) self.set_status(201) self.finish(json.dumps(model, default=date_default))
def _get_bluemix_app(self, abs_nb_path): ''' Creates a temporary git repository containing the application bundle. Redirects the user's browser to the Bluemix deploy URL with a pointer back to the git repository. :param abs_nb_path: ''' md = self._create_app_bundle(abs_nb_path, '.git') converter.add_cf_manifest( md['bundle_dir'], md['kernel_server'], md['notebook_basename'], md['tmpnb_mode'] ) converter.to_git_repository(md['bundle_dir']) # The jupyter_server already includes the base_url bundle_url_path = url_path_join('bundle', md['bundle_id'], md['notebook_basename'] + '.git' ) # Include repository URL as the argument to deployer repository = escape.url_escape(md['jupyter_server'] + bundle_url_path) self.redirect(BLUEMIX_DEPLOY+repository)
def get(self, path): ''' Redirect relative requests for components to the global store. Makes components easier to relocate later. ''' url = url_path_join(self.settings['base_url'], 'urth_components', path) self.redirect(url, permanent=True)
def post(self): # Creates a new session #(unless a session already exists for the named nb) sm = self.session_manager cm = self.contents_manager km = self.kernel_manager model = self.get_json_body() if model is None: raise web.HTTPError(400, "No JSON data provided") try: name = model['notebook']['name'] except KeyError: raise web.HTTPError(400, "Missing field in JSON data: notebook.name") try: path = model['notebook']['path'] except KeyError: raise web.HTTPError(400, "Missing field in JSON data: notebook.path") try: kernel_name = model['kernel']['name'] except KeyError: self.log.debug("No kernel name specified, using default kernel") kernel_name = None # Check to see if session exists if sm.session_exists(name=name, path=path): model = sm.get_session(name=name, path=path) else: model = sm.create_session(name=name, path=path, kernel_name=kernel_name) location = url_path_join(self.base_url, 'api', 'sessions', model['id']) self.set_header('Location', url_escape(location)) self.set_status(201) self.finish(json.dumps(model, default=date_default))
def _req(self, verb, path, body=None): response = requests.request(verb, url_path_join(self.base_url, path), data=body, ) response.raise_for_status() return response
def test_from_post_zip(self): nbmodel_url = url_path_join(self.base_url(), 'api/notebooks/foo/testnb.ipynb') nbmodel = requests.get(nbmodel_url).json() r = self.nbconvert_api.from_post(format='latex', nbmodel=nbmodel) self.assertIn(u'application/zip', r.headers['Content-Type']) self.assertIn(u'.zip', r.headers['Content-Disposition'])
def test_from_post_zip(self): nbmodel_url = url_path_join(self.base_url(), 'api/contents/foo/testnb.ipynb') nbmodel = requests.get(nbmodel_url).json() r = self.nbconvert_api.from_post(format='latex', nbmodel=nbmodel) self.assertIn(u'application/zip', r.headers['Content-Type']) self.assertIn(u'.zip', r.headers['Content-Disposition'])
def post(self): km = self.kernel_manager kernel_id = km.start_kernel() model = km.kernel_model(kernel_id) location = url_path_join(self.base_url, 'api', 'kernels', kernel_id) self.set_header('Location', url_escape(location)) self.set_status(201) self.finish(jsonapi.dumps(model))
def load_jupyter_server_extension(nb_app): web_app = nb_app.web_app host_pattern = '.*$' route_pattern = url_path_join(web_app.settings['base_url'], '/search') handler_kwargs = dict(work_dir=nb_app.notebook_dir) web_app.add_handlers(host_pattern, [ (route_pattern, SearchHandler, handler_kwargs) ])
def post(self): km = self.kernel_manager kernel_id = km.start_kernel() model = km.kernel_model(kernel_id, self.ws_url) location = url_path_join(self.base_url, 'api', 'kernels', kernel_id) self.set_header('Location', url_escape(location)) self.set_status(201) self.finish(jsonapi.dumps(model))
def _req(self, verb, section, body=None): response = requests.request( verb, url_path_join(self.base_url, 'api/config', section), data=body, ) response.raise_for_status() return response
def get(self): user = self.get_current_user() repourl = self.get_argument('repourl', '') do_fork = self.get_argument('do_fork', False) do_push = self.get_argument('do_push', False) if repourl: self.log.info('Got %s in home' % repourl) self.redirect(url_concat( url_path_join(self.hub.server.base_url, 'spawn'), { 'repourl': repourl } )) return if user.running and hasattr(user, "login_service") and user.login_service == "github": if do_fork: self.log.info('Will fork %s' % user.spawner.repo_url) yield _fork_github_repo( user.spawner.repo_url, user.token, ) self.redirect('/hub/home') return if do_push: self.log.info('Will push to fork') yield _push_github_repo( user, user.spawner.repo_url, user.token, ) self.redirect('/hub/home') return repo_url = user.spawner.repo_url fork_exists = yield _github_fork_exists( user.name, user.spawner.repo_url, user.token, ) repository_changed = yield _repository_changed(user) else: repo_url = '' fork_exists = False repository_changed = False if hasattr(user, 'login_service'): loginservice = user.login_service else: loginservice = 'none' html = self.render_template('home.html', user=user, repourl=repo_url, login_service=loginservice, fork_exists=fork_exists, repository_changed=repository_changed, ) self.finish(html)
def get(self, name, user_path): current_user = self.get_current_user() if current_user and current_user.name == name: # logged in, work with spawner if current_user.stop_pending: self.redirect(url_path_join(self.hub.server.base_url, 'home')) return is_log_request = self.get_argument('get_logs', False) is_failed = False is_done = False if current_user.spawner: log_lines = current_user.spawner.user_log is_failed = current_user.spawner.is_failed is_running = yield current_user.spawner.is_running() if not current_user.spawn_pending and not is_failed and is_running: is_done = True else: log_lines = [] if is_log_request: resp = { 'log': log_lines } if is_failed: resp.update({ 'failed': 1 }) elif is_done: resp.update({ 'done': 1 }) self.finish(json_encode(resp)) else: if is_done: self.set_login_cookie(current_user) html = self.render_template( "spawn_pending.html", user=current_user, need_wait=int(is_done) ) self.finish(html) else: # logged in as a different user, redirect target = url_path_join(self.base_url, 'user', current_user.name, user_path or '') self.redirect(target)
def test_get_nb_no_content(self): for d, name in self.dirs_nbs: path = url_path_join(d, name + '.ipynb') nb = self.api.read(path, content=False).json() self.assertEqual(nb['name'], u'%s.ipynb' % name) self.assertEqual(nb['path'], path) self.assertEqual(nb['type'], 'notebook') self.assertIn('content', nb) self.assertEqual(nb['content'], None)
def post(self, path="", name=None): """post creates a new checkpoint""" cm = self.contents_manager checkpoint = cm.create_checkpoint(name, path) data = json.dumps(checkpoint, default=date_default) location = url_path_join(self.base_url, "api/contents", path, name, "checkpoints", checkpoint["id"]) self.set_header("Location", url_escape(location)) self.set_status(201) self.finish(data)
def location_url(self, path): """Return the full URL location of a file. Parameters ---------- path : unicode The API path of the file, such as "foo/bar.txt". """ return url_escape(url_path_join(self.base_url, "api", "contents", path))
def get(self): user = self.get_current_user() repourl = self.get_argument('repourl', '') do_fork = self.get_argument('do_fork', False) do_push = self.get_argument('do_push', False) if repourl: self.log.info('Got %s in home' % repourl) self.redirect( url_concat(url_path_join(self.hub.server.base_url, 'spawn'), {'repourl': repourl})) return if user.running and hasattr( user, "login_service") and user.login_service == "github": if do_fork: self.log.info('Will fork %s' % user.spawner.repo_url) yield _fork_github_repo( user.spawner.repo_url, user.token, ) self.redirect('/hub/home') return if do_push: self.log.info('Will push to fork') yield _push_github_repo( user, user.spawner.repo_url, user.token, ) self.redirect('/hub/home') return repo_url = user.spawner.repo_url fork_exists = yield _github_fork_exists( user.name, user.spawner.repo_url, user.token, ) repository_changed = yield _repository_changed(user) else: repo_url = '' fork_exists = False repository_changed = False if hasattr(user, 'login_service'): loginservice = user.login_service else: loginservice = 'none' html = self.render_template( 'home.html', user=user, repourl=repo_url, login_service=loginservice, fork_exists=fork_exists, repository_changed=repository_changed, ) self.finish(html)
def _check_created(self, resp, path, type='notebook'): self.assertEqual(resp.status_code, 201) location_header = py3compat.str_to_unicode(resp.headers['Location']) self.assertEqual(location_header, url_escape(url_path_join(u'/api/contents', path))) rjson = resp.json() self.assertEqual(rjson['name'], path.rsplit('/', 1)[-1]) self.assertEqual(rjson['path'], path) self.assertEqual(rjson['type'], type) isright = self.isdir if type == 'directory' else self.isfile assert isright(path)
def post(self, path='', name=None): """post creates a new checkpoint""" nbm = self.notebook_manager checkpoint = nbm.create_checkpoint(name, path) data = json.dumps(checkpoint, default=date_default) location = url_path_join(self.base_project_url, 'api/notebooks', path, name, 'checkpoints', checkpoint['id']) self.set_header('Location', url_escape(location)) self.set_status(201) self.finish(data)
def location_url(self, path): """Return the full URL location of a file. Parameters ---------- path : unicode The API path of the file, such as "foo/bar.txt". """ return url_escape(url_path_join(self.base_url, 'api', 'contents', path))
def post(self, path=''): """post creates a new checkpoint""" cm = self.contents_manager checkpoint = yield gen.maybe_future(cm.create_checkpoint(path)) data = json.dumps(checkpoint, default=date_default) location = url_path_join(self.base_url, 'api/contents', path, 'checkpoints', checkpoint['id']) self.set_header('Location', url_escape(location)) self.set_status(201) self.finish(data)
def _check_nb_created(self, resp, name, path): self.assertEqual(resp.status_code, 201) location_header = py3compat.str_to_unicode(resp.headers['Location']) self.assertEqual(location_header, url_escape(url_path_join(u'/api/notebooks', path, name))) self.assertEqual(resp.json()['name'], name) assert os.path.isfile(pjoin( self.notebook_dir.name, path.replace('/', os.sep), name, ))
def content_security_policy(self): """The default Content-Security-Policy header Can be overridden by defining Content-Security-Policy in settings['headers'] """ return '; '.join([ "frame-ancestors 'self'", # Make sure the report-uri is relative to the base_url "report-uri " + url_path_join(self.base_url, csp_report_uri), ])
def test_contents_manager(self): "make sure ContentsManager returns right files (ipynb, bin, txt)." nbdir = self.notebook_dir.name base = self.base_url() nb = new_notebook( cells=[ new_markdown_cell(u'Created by test ³'), new_code_cell("print(2*6)", outputs=[ new_output("stream", text="12"), ]) ] ) with io.open(pjoin(nbdir, 'testnb.ipynb'), 'w', encoding='utf-8') as f: write(nb, f, version=4) with io.open(pjoin(nbdir, 'test.bin'), 'wb') as f: f.write(b'\xff' + os.urandom(5)) f.close() with io.open(pjoin(nbdir, 'test.txt'), 'w') as f: f.write(u'foobar') f.close() r = requests.get(url_path_join(base, 'files', 'testnb.ipynb')) self.assertEqual(r.status_code, 200) self.assertIn('print(2*6)', r.text) json.loads(r.text) r = requests.get(url_path_join(base, 'files', 'test.bin')) self.assertEqual(r.status_code, 200) self.assertEqual(r.headers['content-type'], 'application/octet-stream') self.assertEqual(r.content[:1], b'\xff') self.assertEqual(len(r.content), 6) r = requests.get(url_path_join(base, 'files', 'test.txt')) self.assertEqual(r.status_code, 200) self.assertEqual(r.headers['content-type'], 'text/plain') self.assertEqual(r.text, 'foobar')
def get(self, name, user_path): current_user = self.get_current_user() if current_user and current_user.name == name: # logged in, work with spawner is_log_request = self.get_argument('get_logs', False) is_failed = False is_done = False if current_user.spawner: spawner = current_user.spawner is_running = yield spawner.is_running() log_lines = spawner.user_log is_failed = spawner.is_failed if not current_user.spawn_pending and not is_failed and is_running: is_done = True if spawner.is_empty and not is_failed: self.redirect( url_path_join(self.hub.server.base_url, 'home')) return else: log_lines = [] if current_user.stop_pending and not is_failed: self.redirect(url_path_join(self.hub.server.base_url, 'home')) return if is_log_request: resp = {'log': log_lines} if is_failed: resp.update({'failed': 1}) elif is_done: resp.update({'done': 1}) self.finish(json_encode(resp)) else: if is_done: self.set_login_cookie(current_user) html = self.render_template("spawn_pending.html", user=current_user, need_wait=int(is_done), version=__version__) self.finish(html) else: # logged in as a different user, redirect target = url_path_join(self.base_url, 'login') self.redirect(target)
def test_get_nb_contents(self): for d, name in self.dirs_nbs: path = url_path_join(d, name + '.ipynb') nb = self.api.read(path).json() self.assertEqual(nb['name'], u'%s.ipynb' % name) self.assertEqual(nb['path'], path) self.assertEqual(nb['type'], 'notebook') self.assertIn('content', nb) self.assertEqual(nb['format'], 'json') self.assertIn('metadata', nb['content']) self.assertIsInstance(nb['content']['metadata'], dict)
def test_contents_manager(self): "make sure ContentsManager returns right files (ipynb, bin, txt)." nbdir = self.notebook_dir.name base = self.base_url() nb = new_notebook(name='testnb') ws = new_worksheet() nb.worksheets = [ws] ws.cells.append(new_heading_cell(u'Created by test ³')) cc1 = new_code_cell(input=u'print(2*6)') cc1.outputs.append(new_output(output_text=u'12', output_type='stream')) ws.cells.append(cc1) with io.open(pjoin(nbdir, 'testnb.ipynb'), 'w', encoding='utf-8') as f: write(nb, f, format='ipynb') with io.open(pjoin(nbdir, 'test.bin'), 'wb') as f: f.write(b'\xff' + os.urandom(5)) f.close() with io.open(pjoin(nbdir, 'test.txt'), 'w') as f: f.write(u'foobar') f.close() r = requests.get(url_path_join(base, 'files', 'testnb.ipynb')) self.assertEqual(r.status_code, 200) self.assertIn('print(2*6)', r.text) json.loads(r.text) r = requests.get(url_path_join(base, 'files', 'test.bin')) self.assertEqual(r.status_code, 200) self.assertEqual(r.headers['content-type'], 'application/octet-stream') self.assertEqual(r.content[:1], b'\xff') self.assertEqual(len(r.content), 6) r = requests.get(url_path_join(base, 'files', 'test.txt')) self.assertEqual(r.status_code, 200) self.assertEqual(r.headers['content-type'], 'text/plain') self.assertEqual(r.text, 'foobar')
def _req(self, verb, path, body=None): response = requests.request(verb, url_path_join(self.base_url, 'api/sessions', path), data=body) if 400 <= response.status_code < 600: try: response.reason = response.json()['message'] except: pass response.raise_for_status() return response
def location_url(self, name, path): """Return the full URL location of a file. Parameters ---------- name : unicode The base name of the file, such as "foo.ipynb". path : unicode The API path of the file, such as "foo/bar". """ return url_escape( url_path_join(self.base_url, 'api', 'contents', path, name))
def init_webapp(self): # load the hub related settings into the tornado settings dict env = os.environ s = self.tornado_settings s['cookie_cache'] = {} s['user'] = self.user s['hub_api_key'] = env.pop('JPY_API_TOKEN') s['hub_prefix'] = self.hub_prefix s['cookie_name'] = self.cookie_name s['login_url'] = url_path_join(self.hub_prefix, 'login') s['hub_api_url'] = self.hub_api_url super(SingleUserNotebookApp, self).init_webapp()