Esempio n. 1
0
async def test_list_notebooks(jp_fetch, contents, path, name):
    response = await jp_fetch(
        'api',
        'contents',
        path,
        method='GET',
    )
    data = json.loads(response.body.decode())
    nbs = notebooks_only(data)
    assert len(nbs) > 0
    assert name + '.ipynb' in [n['name'] for n in nbs]
    assert url_path_join(path, name + '.ipynb') in [n['path'] for n in nbs]
Esempio n. 2
0
 def client_fetch(*parts, headers={}, params={}, **kwargs):
     # Handle URL strings
     path_url = url_escape(url_path_join(jp_base_url, *parts), plus=False)
     params_url = urllib.parse.urlencode(params)
     url = path_url + "?" + params_url
     # Add auth keys to header
     headers.update(jp_auth_header)
     # Make request.
     return http_server_client.fetch(url,
                                     headers=headers,
                                     request_timeout=20,
                                     **kwargs)
    def _prepare_handlers(self):
        webapp = self.serverapp.web_app

        # Get handlers defined by extension subclass.
        self.initialize_handlers()

        # prepend base_url onto the patterns that we match
        new_handlers = []
        for handler_items in self.handlers:
            # Build url pattern including base_url
            pattern = url_path_join(webapp.settings['base_url'], handler_items[0])
            handler = handler_items[1]

            # Get handler kwargs, if given
            kwargs = {}
            if issubclass(handler, ExtensionHandlerMixin):
                kwargs['name'] = self.name

            try:
                kwargs.update(handler_items[2])
            except IndexError:
                pass

            new_handler = (pattern, handler, kwargs)
            new_handlers.append(new_handler)

        # Add static endpoint for this extension, if static paths are given.
        if len(self.static_paths) > 0:
            # Append the extension's static directory to server handlers.
            static_url = url_path_join(self.static_url_prefix, "(.*)")

            # Construct handler.
            handler = (
                static_url,
                webapp.settings['static_handler_class'],
                {'path': self.static_paths}
            )
            new_handlers.append(handler)

        webapp.add_handlers('.*$', new_handlers)
Esempio n. 4
0
def _load_jupyter_server_extension(serverapp):
    """
    Called when the extension is loaded.

    Args:
        nb_server_app (NotebookWebApplication): handle to the Notebook webserver instance.
    """
    web_app = serverapp.web_app
    base_url = web_app.settings["base_url"]
    host_pattern = ".*$"

    if not isinstance(serverapp.contents_manager, MetaManager):

        warnings.warn(_mm_config_warning_msg)
        return

    resources_url = "jupyterfs/resources"
    print("Installing jupyter-fs resources handler on path %s" %
          url_path_join(base_url, resources_url))
    web_app.add_handlers(
        host_pattern,
        [(url_path_join(base_url, resources_url), MetaManagerHandler)])
Esempio n. 5
0
    async def get_current_user(self) -> Dict[str, str]:
        """Get the current user information.

        Returns:
            JSON description of the user matching the access token
        """
        # Check server compatibility
        await self.check_server_version()

        git_url = url_path_join(self.base_api_url, "user")
        data = await self._call_gitlab(git_url, has_pagination=False)

        return {"username": data["username"]}
Esempio n. 6
0
    async def list_prs(self, username: str,
                       pr_filter: str) -> List[Dict[str, str]]:
        """Returns the list of pull requests for the given user.

        Args:
            username: User ID for the versioning service
            pr_filter: Filter to add to the pull requests requests
        Returns:
            The list of pull requests
        """
        search_filter = self.get_search_filter(username, pr_filter)

        # Use search API to find matching pull requests and return
        git_url = url_path_join(
            self.base_api_url, "/merge_requests?state=opened&" + search_filter)

        results = await self._call_gitlab(git_url)

        data = []
        for result in results:
            url = url_path_join(
                self.base_api_url,
                "projects",
                str(result["project_id"]),
                "merge_requests",
                str(result["iid"]),
            )
            data.append({
                "id": url,
                "title": result["title"],
                "body": result["description"],
                "internalId": result["id"],
                "link": result["web_url"],
            })

        # Reset cache
        self._merge_requests_cache = {}

        return data
Esempio n. 7
0
 def client_fetch(*parts, headers={}, params={}, **kwargs):
     # Handle URL strings
     path = url_escape(url_path_join(*parts), plus=False)
     urlparts = urllib.parse.urlparse(base_url)
     urlparts = urlparts._replace(
         path=path,
         query=urllib.parse.urlencode(params),
     )
     url = urlparts.geturl()
     # Add auth keys to header
     headers.update(auth_header)
     # Make request.
     return http_client.fetch(url, headers=headers, **kwargs)
Esempio n. 8
0
def setup_handlers(web_app, url_path, server_app):
    dictionaries = discover_dictionaries(server_app)
    host_pattern = ".*$"
    base_url = web_app.settings["base_url"]

    # Prepend the base_url so that it works in a JupyterHub setting
    handlers = []
    for lang in dictionaries:
        lang_url = url_path_join(base_url, url_path, lang['id'])
        handlers.append(
            (r"{}/(.*\.(?:aff|dic))".format(lang_url), StaticFileHandler, {
                "path": lang['path']
            }))
    web_app.add_handlers(host_pattern, handlers)

    LanguageManagerHandler.lang_dictionaries = dictionaries_to_url(
        dictionaries, url_path_join(base_url, url_path))

    # Prepend the base_url so that it works in a JupyterHub setting
    route_pattern = url_path_join(base_url, url_path, "language_manager")
    handlers = [(route_pattern, LanguageManagerHandler)]
    web_app.add_handlers(host_pattern, handlers)
Esempio n. 9
0
def setup_handlers(web_app):
    host_pattern = ".*$"

    base_url = web_app.settings["base_url"]

    endpoints = [
        ("install_notebook", InstallNotebookHandler),
        ("catalog", CatalogHandler),
        ("contest_submit", ContestSubmitHandler),
    ]
    handlers = [(url_path_join(base_url, "edc_jlab", endpoint), handler)
                for endpoint, handler in endpoints]
    web_app.add_handlers(host_pattern, handlers)
Esempio n. 10
0
    def _req(self, verb, path, body=None):
        response = self.request(verb,
                                url_path_join('api/sessions', path),
                                data=body)

        if 400 <= response.status_code < 600:
            try:
                response.reason = response.json()['message']
            except:
                pass
        response.raise_for_status()

        return response
Esempio n. 11
0
async def test_trailing_slash(
    jp_ensure_app_fixture,
    uri,
    expected,
    http_server_client,
    jp_auth_header,
    jp_base_url,
):
    # http_server_client raises an exception when follow_redirects=False
    with pytest.raises(tornado.httpclient.HTTPClientError) as err:
        await http_server_client.fetch(
            url_path_join(jp_base_url, uri),
            headers=jp_auth_header,
            request_timeout=20,
            follow_redirects=False,
        )
    # Capture the response from the raised exception value.
    response = err.value.response
    assert response is not None
    assert response.code == 302
    assert "Location" in response.headers
    assert response.headers["Location"] == url_path_join(jp_base_url, expected)
Esempio n. 12
0
async def test_list_notebooks(jp_fetch, contents, path, name):
    response = await jp_fetch(
        "api",
        "contents",
        path,
        method="GET",
    )
    data = json.loads(response.body.decode())
    nbs = notebooks_only(data)
    assert len(nbs) > 0
    assert name + ".ipynb" in [normalize("NFC", n["name"]) for n in nbs]
    assert url_path_join(
        path, name + ".ipynb") in [normalize("NFC", n["path"]) for n in nbs]
Esempio n. 13
0
def setup_handlers(web_app):
    host_pattern = ".*$"
    base_url = web_app.settings["base_url"]
    # pass the jupyterlab server root directory to
    # environment variable `GREENFLOWROOT`. Note, this
    # variable is not meant to be overwritten by user.
    # This variable can be used by other utility function
    # to compute the absolute path of the files.
    os.environ['GREENFLOWROOT'] = os.getcwd()
    # load all the graphs given the input gq.yaml file contents
    route_pattern0 = url_path_join(base_url, "greenflowlab", "load_graph")
    route_pattern1 = url_path_join(base_url, "greenflowlab", "all_nodes")
    route_pattern2 = url_path_join(base_url, "greenflowlab", "load_graph_path")
    route_pattern3 = url_path_join(base_url, "greenflowlab", "register_node")
    route_pattern4 = url_path_join(base_url, "greenflowlab",
                                   "register_plugins")
    handlers = [(route_pattern0, RouteHandlerLoadGraph),
                (route_pattern1, RouteHandlerLoadAllNodes),
                (route_pattern2, RouteHandlerLoadGraphFromPath),
                (route_pattern3, RouteHandlerRegister),
                (route_pattern4, RouteHandlerPlugins)]
    web_app.add_handlers(host_pattern, handlers)
Esempio n. 14
0
def load_jupyter_server_extension(server_app):
    web_app = server_app.web_app

    nbconvert_template_paths = []
    static_paths = [STATIC_ROOT]
    template_paths = []

    # common configuration options between the server extension and the application
    voila_configuration = VoilaConfiguration(parent=server_app)
    collect_template_paths(nbconvert_template_paths, static_paths,
                           template_paths, voila_configuration.template)

    jenv_opt = {"autoescape": True}
    env = Environment(loader=FileSystemLoader(template_paths),
                      extensions=['jinja2.ext.i18n'],
                      **jenv_opt)
    web_app.settings['voila_jinja2_env'] = env

    nbui = gettext.translation('nbui',
                               localedir=os.path.join(ROOT, 'i18n'),
                               fallback=True)
    env.install_gettext_translations(nbui, newstyle=False)

    host_pattern = '.*$'
    base_url = url_path_join(web_app.settings['base_url'])
    # First look into 'nbextensions_path' configuration key (classic notebook)
    # and fall back to default path for nbextensions (jupyter server).
    if 'nbextensions_path' in web_app.settings:
        nbextensions_path = web_app.settings['nbextensions_path']
    else:
        nbextensions_path = jupyter_path('nbextensions')
    web_app.add_handlers(
        host_pattern,
        [
            (url_path_join(
                base_url, '/voila/render' + path_regex), VoilaHandler, {
                    'config': server_app.config,
                    'nbconvert_template_paths': nbconvert_template_paths,
                    'voila_configuration': voila_configuration
                }),
            (url_path_join(base_url, '/voila'), VoilaTreeHandler),
            (url_path_join(base_url,
                           '/voila/tree' + path_regex), VoilaTreeHandler),
            (url_path_join(base_url,
                           '/voila/static/(.*)'), MultiStaticFileHandler, {
                               'paths': static_paths
                           }),
            # this handler serves the nbextensions similar to the classical notebook
            (
                url_path_join(base_url, r'/voila/nbextensions/(.*)'),
                FileFindHandler,
                {
                    'path': nbextensions_path,
                    'no_cache_paths':
                    ['/'],  # don't cache anything in nbextensions
                },
            )
        ])
Esempio n. 15
0
 def client_fetch(*parts, headers={}, params={}, **kwargs):
     # Handle URL strings
     path = url_escape(url_path_join(*parts), plus=False)
     urlparts = urllib.parse.urlparse("ws://localhost:{}".format(http_port))
     urlparts = urlparts._replace(path=path,
                                  query=urllib.parse.urlencode(params))
     url = urlparts.geturl()
     # Add auth keys to header
     headers.update(auth_header)
     # Make request.
     req = tornado.httpclient.HTTPRequest(url,
                                          headers=auth_header,
                                          connect_timeout=120)
     return tornado.websocket.websocket_connect(req)
Esempio n. 16
0
def _load_jupyter_server_extension(server_app):
    """Load the nbserver extension"""
    webapp = server_app.web_app
    webapp.settings["env_manager"] = EnvManager(
        server_app.contents_manager.root_dir, server_app.kernel_spec_manager)

    base_url = webapp.settings["base_url"]
    webapp.add_handlers(
        ".*$",
        [(url_path_join(base_url, NS, pat), handler)
         for pat, handler in default_handlers],
    )

    get_logger().info("Server extension enabled")
Esempio n. 17
0
def setup_handlers(web_app):
    handlers = [('/wipp/info', InfoCheckHandler),
                ('/wipp/ui_urls', WippUiUrls),
                ('/wipp/register', WippRegisterNotebook),
                ('/wipp/imageCollections', WippImageCollections),
                ('/wipp/imageCollections/search', WippImageCollectionsSearch),
                ('/wipp/csvCollections', WippCsvCollections),
                ('/wipp/csvCollections/search', WippCsvCollectionsSearch)]

    base_url = web_app.settings["base_url"]
    handlers = [(url_path_join(base_url, x[0]), x[1]) for x in handlers]

    host_pattern = ".*$"
    web_app.add_handlers(host_pattern, handlers)
Esempio n. 18
0
def setup_handlers(web_app, host_allowlist):
    host_pattern = '.*$'
    web_app.add_handlers('.*', [
        (url_path_join(web_app.settings['base_url'],
                       r'/proxy/(.*):(\d+)(.*)'), RemoteProxyHandler, {
                           'absolute_url': False,
                           'host_allowlist': host_allowlist
                       }),
        (url_path_join(
            web_app.settings['base_url'],
            r'/proxy/absolute/(.*):(\d+)(.*)'), RemoteProxyHandler, {
                'absolute_url': True,
                'host_allowlist': host_allowlist
            }),
        (url_path_join(web_app.settings['base_url'],
                       r'/proxy/(\d+)(.*)'), LocalProxyHandler, {
                           'absolute_url': False
                       }),
        (url_path_join(web_app.settings['base_url'],
                       r'/proxy/absolute/(\d+)(.*)'), LocalProxyHandler, {
                           'absolute_url': True
                       }),
    ])
Esempio n. 19
0
    def content_security_policy(self):
        """The default Content-Security-Policy header

        Can be overridden by defining Content-Security-Policy in settings['headers']
        """
        if 'Content-Security-Policy' in self.settings.get('headers', {}):
            # user-specified, don't override
            return self.settings['headers']['Content-Security-Policy']

        return '; '.join([
            "frame-ancestors 'self'",
            # Make sure the report-uri is relative to the base_url
            "report-uri " + self.settings.get('csp_report_uri', url_path_join(self.base_url, csp_report_uri)),
        ])
Esempio n. 20
0
    async def post(self, *args, **kwargs):
        self.log.debug("Pipeline Export handler now executing post request")

        parent = self.settings.get("elyra")
        payload = self.get_json_body()

        self.log.debug(
            f"JSON payload: {json.dumps(payload, indent=2, separators=(',', ': '))}"
        )

        pipeline_definition = payload["pipeline"]
        pipeline_export_format = payload["export_format"]
        pipeline_export_path = payload["export_path"]
        pipeline_overwrite = payload["overwrite"]

        response = await PipelineValidationManager.instance().validate(
            pipeline=pipeline_definition)
        self.log.debug(
            f"Validation checks completed. Results as follows: {response.to_json()}"
        )

        if not response.has_fatal:
            pipeline = PipelineParser(
                root_dir=self.settings["server_root_dir"],
                parent=parent).parse(pipeline_definition)

            pipeline_exported_path = await PipelineProcessorManager.instance(
            ).export(pipeline, pipeline_export_format, pipeline_export_path,
                     pipeline_overwrite)
            json_msg = json.dumps({"export_path": pipeline_export_path})
            self.set_status(201)
            self.set_header("Content-Type", "application/json")
            location = url_path_join(self.base_url, "api", "contents",
                                     pipeline_exported_path)
            self.set_header("Location", location)
        else:
            json_msg = json.dumps({
                "reason":
                responses.get(400),
                "message":
                "Errors found in pipeline",
                "timestamp":
                datetime.now().strftime("%Y-%m-%d %H:%M:%S"),
                "issues":
                response.to_json().get("issues"),
            })
            self.set_status(400)

        self.set_header("Content-Type", "application/json")
        await self.finish(json_msg)
Esempio n. 21
0
 async def post(self, path=""):
     """post creates a new checkpoint"""
     cm = self.contents_manager
     checkpoint = await ensure_async(cm.create_checkpoint(path))
     data = json.dumps(checkpoint, default=json_default)
     location = url_path_join(
         self.base_url,
         "api/contents",
         url_escape(path),
         "checkpoints",
         url_escape(checkpoint["id"]),
     )
     self.set_header("Location", location)
     self.set_status(201)
     self.finish(data)
Esempio n. 22
0
 async def __get_content(self, url: str, filename: str, sha: str) -> str:
     link = url_concat(
         url_path_join(url, "contents", filename),
         {"ref": sha},
     )
     try:
         return await self._call_github(
             link,
             media_type="application/vnd.github.v3.raw",
             load_json=False)
     except HTTPError as e:
         if e.status_code == 404:
             return ""
         else:
             raise e
Esempio n. 23
0
def load_jupyter_server_extension(server_app):
    """
    Called during notebook start
    """
    resuseconfig = ResourceUseDisplay(parent=server_app)
    server_app.web_app.settings[
        "jupyter_resource_usage_display_config"] = resuseconfig
    base_url = server_app.web_app.settings["base_url"]

    server_app.web_app.add_handlers(
        ".*", [(url_path_join(base_url, "/api/metrics/v1"), ApiHandler)])

    callback = ioloop.PeriodicCallback(
        PrometheusHandler(PSUtilMetricsLoader(server_app)), 1000)
    callback.start()
Esempio n. 24
0
    async def post(self):
        km = self.kernel_manager
        model = self.get_json_body()
        if model is None:
            model = {'name': km.default_kernel_name}
        else:
            model.setdefault('name', km.default_kernel_name)

        kernel_id = await km.start_kernel(kernel_name=model['name'],
                                          path=model.get('path'))
        model = await ensure_async(km.kernel_model(kernel_id))
        location = url_path_join(self.base_url, 'api', 'kernels',
                                 url_escape(kernel_id))
        self.set_header('Location', location)
        self.set_status(201)
        self.finish(json.dumps(model, default=date_default))
Esempio n. 25
0
    async def post(self):
        km = self.kernel_manager
        model = self.get_json_body()
        if model is None:
            model = {"name": km.default_kernel_name}
        else:
            model.setdefault("name", km.default_kernel_name)

        kernel_id = await km.start_kernel(kernel_name=model["name"],
                                          path=model.get("path"))
        model = await ensure_async(km.kernel_model(kernel_id))
        location = url_path_join(self.base_url, "api", "kernels",
                                 url_escape(kernel_id))
        self.set_header("Location", location)
        self.set_status(201)
        self.finish(json.dumps(model, default=json_default))
Esempio n. 26
0
    def post(self):
        km = self.kernel_manager
        model = self.get_json_body()
        if model is None:
            model = {"name": km.default_kernel_name}
        else:
            model.setdefault("name", km.default_kernel_name)

        kernel_id = yield maybe_future(
            km.start_kernel(kernel_name=model["name"]))
        model = yield maybe_future(km.kernel_model(kernel_id))
        location = url_path_join(self.base_url, "api", "kernels",
                                 url_escape(kernel_id))
        self.set_header("Location", location)
        self.set_status(201)
        self.finish(json.dumps(model, default=date_default))
Esempio n. 27
0
async def create_kernel(response: Response, model: NewKernelModel = None):
    self = router.app
    km = self.kernel_manager
    if model is None:
        kernel_name = km.default_kernel_name
    elif model.name is None:
        kernel_name = km.default_kernel_name
    else:
        kernel_name = model.name
    kernel_id = await maybe_future(km.start_kernel(kernel_name=kernel_name))
    kernel = await maybe_future(km.kernel_model(kernel_id))
    location = url_path_join(self.base_url, 'api', 'kernels',
                             url_escape(kernel_id))
    response.headers['Location'] = location
    # Validate model!
    return Kernel(**kernel)
def _load_jupyter_server_extension(lab_app):
    """Register the API handler to receive HTTP requests from the frontend extension.

    Parameters
    ----------
    lab_app: jupyterlab.labapp.LabApp
        JupyterLab application instance

    """
    web_app = lab_app.web_app
    host_pattern = '.*$'

    base_url = web_app.settings["base_url"]

    url_path = "jupyterlab_requirements"

    # Prepend the base_url so that it works in a jupyterhub setting
    custom_handlers = [
        (url_path_join(base_url,
                       f"/{url_path}/thoth/config"), ThothConfigHandler),
        (url_path_join(base_url,
                       f"/{url_path}/thoth/resolution"), ThothAdviseHandler),
        (url_path_join(base_url,
                       f"/{url_path}/thoth/invectio"), ThothInvectioHandler),
        (url_path_join(base_url, f"/{url_path}/pipenv"), PipenvHandler),
        (url_path_join(base_url, f"/{url_path}/kernel/packages"),
         DependencyInstalledHandler),
        (url_path_join(base_url, f"/{url_path}/kernel/install"),
         DependencyInstallHandler),
        (url_path_join(base_url,
                       f"/{url_path}/kernel/python"), PythonVersionHandler),
        (url_path_join(base_url,
                       f"/{url_path}/kernel/create"), JupyterKernelHandler),
        (url_path_join(base_url, f"/{url_path}/file/dependencies"),
         DependenciesFilesHandler),
    ]

    web_app.add_handlers(host_pattern, custom_handlers)

    lab_app.log.info(f"Registered JupyterLab extension at URL {url_path}")
async def test_view(jp_fetch, jp_serverapp, jp_root_dir, exists, name):
    """Test /view/$path for a few cases"""
    if exists:
        jp_root_dir.joinpath(name).write_text(name)

    if not exists:
        with pytest.raises(tornado.httpclient.HTTPClientError) as e:
            await jp_fetch("view", name, method="GET")
        assert expected_http_error(e, 404), [name, e]
    else:
        r = await jp_fetch("view", name, method="GET")
        assert r.code == 200
        assert r.headers["content-type"] == "text/html; charset=UTF-8"
        html = r.body.decode()
        src = find_iframe_src(html)
        assert src == url_path_join(jp_serverapp.base_url, f"/files/{name}")
Esempio n. 30
0
def _load_jupyter_server_extension(nb_server_app):
    web_app = nb_server_app.web_app
    host_pattern = '.*$'
    web_app.add_handlers(host_pattern, [
        (url_path_join(web_app.settings['base_url'], r'/elyra/{}'.format(YamlSpecHandler.get_resource_metadata()[0])),
         YamlSpecHandler),
        (url_path_join(web_app.settings['base_url'], r'/elyra/metadata/%s' % (namespace_regex)), MetadataHandler),
        (url_path_join(web_app.settings['base_url'], r'/elyra/metadata/%s/%s' % (namespace_regex, resource_regex)),
         MetadataResourceHandler),
        (url_path_join(web_app.settings['base_url'], r'/elyra/schema/%s' % (namespace_regex)), SchemaHandler),
        (url_path_join(web_app.settings['base_url'], r'/elyra/schema/%s/%s' % (namespace_regex, resource_regex)),
         SchemaResourceHandler),
        (url_path_join(web_app.settings['base_url'], r'/elyra/namespace'), NamespaceHandler),
        (url_path_join(web_app.settings['base_url'], r'/elyra/pipeline/schedule'), PipelineSchedulerHandler),
        (url_path_join(web_app.settings['base_url'], r'/elyra/pipeline/export'), PipelineExportHandler),
    ])
    # Create PipelineProcessorManager instance passing root directory
    PipelineProcessorManager.instance(root_dir=web_app.settings['server_root_dir'], parent=nb_server_app)