示例#1
0
 def get_notebook(self, notebook_name, notebook_version, datalake_id):
     try:
         result = self.api.get_notebook_by_datalake_id_and_version(
             self.api_key, datalake_id, notebook_name, notebook_version)
         notebook = self.notebook_model_from_db(result)
         content = FileContentsManager()
         content.save(notebook, '/notebook.ipynb')
     except ApiException as e:
         print(e.body)['message']
示例#2
0
  def __init__(self, **kwargs):
    print('Creating the combined contents manager...')
    super(CombinedContentsManager, self).__init__(**kwargs)

    file_cm = FileContentsManager(**kwargs)
    file_cm.checkpoints = GenericFileCheckpoints(**file_cm.checkpoints_kwargs)
    gcs_cm = GCSContentsManager(**kwargs)
    self._content_managers = {
        'Local Disk': file_cm,
        'GCS': gcs_cm,
    }
    def setUp(self):

        self.td = TemporaryDirectory()
        self.checkpoints = PostgresCheckpoints(
            user_id='test',
            db_url=TEST_DB_URL,
        )
        self.contents = FileContentsManager(
            root_dir=self.td.name,
            checkpoints=self.checkpoints,
        )

        self.checkpoints.ensure_user()
示例#4
0
def test_initialize_post_save_binding():
    """Test that post_save hook can be successfully bound to a Jupyter config."""
    jupyter_config_obj = Config(FileContentsManager=FileContentsManager())
    jupyter_config.initialize_post_save_hook(jupyter_config_obj)
    assert isinstance(jupyter_config_obj.FileContentsManager,
                      FileContentsManager)
    assert jupyter_config_obj.FileContentsManager.post_save_hook is export.post_save
示例#5
0
def test_initialize_post_save_execution(monkeypatch, caplog):
    """Test that post_save initialization works as expected and bound post_save executes."""
    caplog.set_level(logging.DEBUG)

    jupyter_config_obj = Config(FileContentsManager=FileContentsManager())

    def mocked_post_save(model, os_path, contents_manager):
        """Append a token to os_path to certify that function ran."""
        os_path.append("nbautoexport")

    monkeypatch.setattr(nbautoexport_root, "post_save", mocked_post_save)

    # Initialize post_save hook
    jupyter_config.initialize_post_save_hook(jupyter_config_obj)

    assert caplog_contains(
        caplog,
        level=logging.INFO,
        in_msg="nbautoexport | Successfully registered post-save hook",
    )
    assert isinstance(jupyter_config_obj.FileContentsManager,
                      FileContentsManager)
    assert callable(jupyter_config_obj.FileContentsManager.post_save_hook)

    # Execute post_save hook
    os_path_list = []
    jupyter_config_obj.FileContentsManager.run_post_save_hook(
        model=None, os_path=os_path_list)
    assert os_path_list == ["nbautoexport"]
示例#6
0
def test_initialize_post_save_import_error_caught(monkeypatch, caplog,
                                                  jupyter_app):
    """Test that missing nbautoexport error is caught and properly logged."""

    real_import = __builtins__["__import__"]

    def mock_import(name, globals=None, locals=None, fromlist=(), level=0):
        if name == "nbautoexport":
            raise ModuleNotFoundError("No module named 'nbautoexport'")
        return real_import(name=name,
                           globals=globals,
                           locals=locals,
                           fromlist=fromlist,
                           level=level)

    monkeypatch.setattr(builtins, "__import__", mock_import)
    monkeypatch.delitem(sys.modules, "nbautoexport")

    jupyter_config_obj = Config(FileContentsManager=FileContentsManager())

    # Initialize post_save hook
    # Should run through since error is caught
    jupyter_config.initialize_post_save_hook(jupyter_config_obj)

    assert caplog_contains(
        caplog,
        name=jupyter_app.log.name,
        level=logging.ERROR,
        in_msg="ModuleNotFoundError: No module named 'nbautoexport'",
    )
示例#7
0
def main():
    port = 8001
    server_url = 'http://localhost:{}'.format(port)

    cur_dir = os.path.abspath(os.getcwd())

    settings = {
        #"static_path": os.path.abspath(
        #    os.path.join(os.path.dirname(__file__), '..', '..', 'client', 'dist')),

        # TODO: implement
        "login_url": "/login",
        "base_url": "/",
        "contents_manager": FileContentsManager(),
        "xsrf_cookies": False,
        'allow_origin': '*',
        'headers': {
            'Access-Control-Allow-Origin': '*',
            'Access-Control-Allow-Headers': '*',
            'Access-Control-Allow-Methods': '*'
        }
    }

    application = tornado.web.Application(
        [(r"/echo", WSEchoHandler), *kernel_handlers, *contents_handlers],
        **settings)

    init_terminal(application, cur_dir, server_url, settings)

    log.info("listening on {}".format(port))
    application.listen(port)
    tornado.ioloop.IOLoop.current().start()
示例#8
0
文件: test_gist.py 项目: dalejung/nbx
    def test_model_to_files(self):
        with TemporaryDirectory() as td:
            fm = FileContentsManager(root_dir=td)
            model = fm.new_untitled(type='notebook')
            # need content
            model = fm.get(model['path'])
            files = model_to_files(model)
            name = model['name']
            # files should only contain one file
            assert_items_equal(files, [name])

            # add a file
            model['__files'] = {'file1.txt': 'file1txt content'}
            files = model_to_files(model)
            assert_items_equal(files, [name, 'file1.txt'])
            assert files['file1.txt'] == 'file1txt content'
示例#9
0
def test_initialize_post_save_existing(monkeypatch):
    """Test that handling of existing post_save hook works properly."""

    jupyter_config_obj = Config(FileContentsManager=FileContentsManager())

    def old_post_save(model, os_path, contents_manager):
        """Append a token to os_path to certify that function ran."""
        os_path.append("old_post_save")

    jupyter_config_obj.FileContentsManager.post_save_hook = old_post_save

    def mocked_post_save(model, os_path, contents_manager):
        """Append a token to os_path to certify that function ran."""
        os_path.append("nbautoexport")

    monkeypatch.setattr(nbautoexport_root, "post_save", mocked_post_save)

    jupyter_config.initialize_post_save_hook(jupyter_config_obj)

    assert isinstance(jupyter_config_obj.FileContentsManager,
                      FileContentsManager)
    assert callable(jupyter_config_obj.FileContentsManager.post_save_hook)
    os_path_list = []
    jupyter_config_obj.FileContentsManager.run_post_save_hook(
        model=None, os_path=os_path_list)
    assert os_path_list == ["old_post_save", "nbautoexport"]
示例#10
0
def test_initialize_post_save_import_error_caught(monkeypatch):
    """Test that bound post_save hook with given signature can be successfully run."""

    jupyter_config_obj = Config(FileContentsManager=FileContentsManager())

    monkeypatch.delattr(nbautoexport_root, "post_save")

    # Expect: ImportError: cannot import name 'post_save' from 'nbautoexport'
    jupyter_config.initialize_post_save_hook(jupyter_config_obj)
示例#11
0
    def test_download_checkpoints(self):
        """
        Create two checkpoints for two notebooks, then call
        download_checkpoints.

        Assert that we get the correct version of both notebooks.
        """
        self.contents.new({'type': 'directory'}, 'subdir')
        paths = ('a.ipynb', 'subdir/a.ipynb')
        expected_content = {}
        for path in paths:
            # Create and checkpoint.
            self.contents.new(path=path)

            self.contents.create_checkpoint(path)

            model = self.add_markdown_cell(path)
            self.contents.create_checkpoint(path)

            # Assert greater because FileContentsManager creates a checkpoint
            # on creation, but this isn't part of the spec.
            self.assertGreater(len(self.contents.list_checkpoints(path)), 2)

            # Store the content to verify correctness after download.
            expected_content[path] = model['content']

        with TemporaryDirectory() as td:
            download_checkpoints(
                self.checkpoints.db_url,
                td,
                user='******',
            )

            fm = FileContentsManager(root_dir=td)
            root_entries = sorted(m['path'] for m in fm.get('')['content'])
            self.assertEqual(root_entries, ['a.ipynb', 'subdir'])
            subdir_entries = sorted(
                m['path'] for m in fm.get('subdir')['content']
            )
            self.assertEqual(subdir_entries, ['subdir/a.ipynb'])
            for path in paths:
                content = fm.get(path)['content']
                self.assertEqual(expected_content[path], content)
    def test_download_checkpoints(self):
        """
        Create two checkpoints for two notebooks, then call
        download_checkpoints.

        Assert that we get the correct version of both notebooks.
        """
        self.contents.new({'type': 'directory'}, 'subdir')
        paths = ('a.ipynb', 'subdir/a.ipynb')
        expected_content = {}
        for path in paths:
            # Create and checkpoint.
            self.contents.new(path=path)

            self.contents.create_checkpoint(path)

            model = self.add_markdown_cell(path)
            self.contents.create_checkpoint(path)

            # Assert greater because FileContentsManager creates a checkpoint
            # on creation, but this isn't part of the spec.
            self.assertGreater(len(self.contents.list_checkpoints(path)), 2)

            # Store the content to verify correctness after download.
            expected_content[path] = model['content']

        with TemporaryDirectory() as td:
            download_checkpoints(
                self.checkpoints.db_url,
                td,
                user='******',
            )

            fm = FileContentsManager(root_dir=td)
            root_entries = sorted(m['path'] for m in fm.get('')['content'])
            self.assertEqual(root_entries, ['a.ipynb', 'subdir'])
            subdir_entries = sorted(m['path']
                                    for m in fm.get('subdir')['content'])
            self.assertEqual(subdir_entries, ['subdir/a.ipynb'])
            for path in paths:
                content = fm.get(path)['content']
                self.assertEqual(expected_content[path], content)
示例#13
0
 def create_file(self, file_path):
     """Creates a new file with the given name in JH
     """
     full_path = self.data_path / file_path
     if full_path.exists():
         raise IOError('File already exists.')
     path_rel_cwd = Path(full_path).relative_to(Path.cwd())
     if file_path.lower().endswith('.ipynb'):
         FileContentsManager().new(path=str(path_rel_cwd))
     else:
         full_path.touch()
    def setUp(self):

        mgr_roots = ['A', '', u'unicodé']
        self.temp_dirs = {prefix: TemporaryDirectory() for prefix in mgr_roots}
        self.temp_dir_names = {
            prefix: v.name
            for prefix, v in iteritems(self.temp_dirs)
        }
        self._managers = {
            prefix: FileContentsManager(root_dir=self.temp_dir_names[prefix])
            for prefix in mgr_roots
        }
        self.contents_manager = HybridContents(managers=self._managers)
    def setUp(self):

        self.td = TemporaryDirectory()
        self.checkpoints = PostgresCheckpoints(
            user_id='test',
            db_url=TEST_DB_URL,
        )
        self.contents = FileContentsManager(
            root_dir=self.td.name,
            checkpoints=self.checkpoints,
        )

        self.checkpoints.ensure_user()
示例#16
0
    def setUp(self):
        super().setUp()
        mock_settings = {
            "BookstoreSettings": {
                "fs_cloning_basedir": os.path.join(test_dir, 'test_files')
            }
        }
        config = Config(mock_settings)

        self.mock_application = Mock(
            spec=Application,
            ui_methods={},
            ui_modules={},
            settings={
                'jinja2_env': Environment(),
                "config": config,
                "contents_manager": FileContentsManager(),
            },
        )
示例#17
0
    def setUp(self):
        super().setUp()
        mock_settings = {
            "BookstoreSettings": {
                "s3_access_key_id": "mock_id",
                "s3_secret_access_key": "mock_access",
            }
        }
        config = Config(mock_settings)

        self.mock_application = Mock(
            spec=Application,
            ui_methods={},
            ui_modules={},
            settings={
                'jinja2_env': Environment(),
                "config": config,
                "contents_manager": FileContentsManager(),
            },
        )
示例#18
0
def test_initialize_post_save_execution(monkeypatch):
    """Test that bound post_save hook with given signature can be successfully run."""

    jupyter_config_obj = Config(FileContentsManager=FileContentsManager())

    def mocked_post_save(model, os_path, contents_manager):
        """Append a token to os_path to certify that function ran."""
        os_path.append("nbautoexport")

    monkeypatch.setattr(nbautoexport_root, "post_save", mocked_post_save)

    jupyter_config.initialize_post_save_hook(jupyter_config_obj)

    assert isinstance(jupyter_config_obj.FileContentsManager,
                      FileContentsManager)
    assert callable(jupyter_config_obj.FileContentsManager.post_save_hook)
    os_path_list = []
    jupyter_config_obj.FileContentsManager.run_post_save_hook(
        model=None, os_path=os_path_list)
    assert os_path_list == ["nbautoexport"]
示例#19
0
def test_initialize_post_save_double_import_error_caught(
        monkeypatch, caplog, capsys, jupyter_app):
    """Test that both missing nbautoexport error and missing jupyer_core are caught and properly
    logged."""

    real_import = __builtins__["__import__"]

    def mock_import(name, globals=None, locals=None, fromlist=(), level=0):
        if name == "nbautoexport":
            raise ModuleNotFoundError("No module named 'nbautoexport'")
        if name == "jupyter_core.application":
            raise ModuleNotFoundError(
                "No module named 'jupyter_core.application'")
        return real_import(name=name,
                           globals=globals,
                           locals=locals,
                           fromlist=fromlist,
                           level=level)

    monkeypatch.setattr(builtins, "__import__", mock_import)
    monkeypatch.delitem(sys.modules, "nbautoexport")
    monkeypatch.delitem(sys.modules, "jupyter_core.application")

    jupyter_config_obj = Config(FileContentsManager=FileContentsManager())

    # Initialize post_save hook
    # Should run through since error is caught
    jupyter_config.initialize_post_save_hook(jupyter_config_obj)

    # Caplog should be empty, since logger didn't work
    assert len(caplog.record_tuples) == 0

    # Errors should be in stderr
    captured = capsys.readouterr()
    assert "ModuleNotFoundError: No module named 'jupyter_core.application'" in captured.err
    assert "ModuleNotFoundError: No module named 'nbautoexport'" in captured.err
示例#20
0
import logging
import json
import nbformat
from .websockets import WebSocketHandlerMixin

import builtins
# A stray _ in the notebook.services.contents.manager causes an error without this.
builtins._ = lambda x: x
from notebook.services.contents.filemanager import FileContentsManager

logger = logging.getLogger("pangeo-dashboard-server")

k_spec_manager = jupyter_client.kernelspec.KernelSpecManager()
k_manager = jupyter_client.MultiKernelManager()

filemanager = FileContentsManager()

from datetime import date, datetime


def json_serial(obj):
    """JSON serializer for objects not serializable by default json code"""

    if isinstance(obj, (datetime, date)):
        return obj.isoformat()


class JSONHandeler(web.RequestHandler):
    def set_default_headers(self, *args, **kwargs):
        return self.set_header('Content-Type', 'application/json')
class TestUploadDownload(TestCase):
    def setUp(self):

        self.td = TemporaryDirectory()
        self.checkpoints = PostgresCheckpoints(
            user_id='test',
            db_url=TEST_DB_URL,
        )
        self.contents = FileContentsManager(
            root_dir=self.td.name,
            checkpoints=self.checkpoints,
        )

        self.checkpoints.ensure_user()

    def tearDown(self):
        self.td.cleanup()
        clear_test_db()

    def add_markdown_cell(self, path):
        # Load and update
        model = self.contents.get(path=path)
        model['content'].cells.append(
            new_markdown_cell('Created by test: ' + path))

        # Save and checkpoint again.
        self.contents.save(model, path=path)
        return model

    def test_download_checkpoints(self):
        """
        Create two checkpoints for two notebooks, then call
        download_checkpoints.

        Assert that we get the correct version of both notebooks.
        """
        self.contents.new({'type': 'directory'}, 'subdir')
        paths = ('a.ipynb', 'subdir/a.ipynb')
        expected_content = {}
        for path in paths:
            # Create and checkpoint.
            self.contents.new(path=path)

            self.contents.create_checkpoint(path)

            model = self.add_markdown_cell(path)
            self.contents.create_checkpoint(path)

            # Assert greater because FileContentsManager creates a checkpoint
            # on creation, but this isn't part of the spec.
            self.assertGreater(len(self.contents.list_checkpoints(path)), 2)

            # Store the content to verify correctness after download.
            expected_content[path] = model['content']

        with TemporaryDirectory() as td:
            download_checkpoints(
                self.checkpoints.db_url,
                td,
                user='******',
            )

            fm = FileContentsManager(root_dir=td)
            root_entries = sorted(m['path'] for m in fm.get('')['content'])
            self.assertEqual(root_entries, ['a.ipynb', 'subdir'])
            subdir_entries = sorted(m['path']
                                    for m in fm.get('subdir')['content'])
            self.assertEqual(subdir_entries, ['subdir/a.ipynb'])
            for path in paths:
                content = fm.get(path)['content']
                self.assertEqual(expected_content[path], content)

    def test_checkpoint_all(self):
        """
        Test that checkpoint_all correctly makes a checkpoint for all files.
        """
        paths = populate(self.contents)
        original_content_minus_trust = {
            # Remove metadata that we expect to have dropped
            path: strip_transient(self.contents.get(path)['content'])
            for path in paths
        }

        original_cps = {}
        for path in paths:
            # Create a checkpoint, then update the file.
            original_cps[path] = self.contents.create_checkpoint(path)
            self.add_markdown_cell(path)

        # Verify that we still have the old version checkpointed.
        cp_content = {
            path: self.checkpoints.get_notebook_checkpoint(
                cp['id'],
                path,
            )['content']
            for path, cp in iteritems(original_cps)
        }
        self.assertEqual(original_content_minus_trust, cp_content)

        new_cps = checkpoint_all(
            self.checkpoints.db_url,
            self.td.name,
            self.checkpoints.user_id,
        )

        new_cp_content = {
            path: self.checkpoints.get_notebook_checkpoint(
                cp['id'],
                path,
            )['content']
            for path, cp in iteritems(new_cps)
        }
        for path, new_content in iteritems(new_cp_content):
            old_content = original_content_minus_trust[_norm_unicode(path)]
            self.assertEqual(
                new_content['cells'][:-1],
                old_content['cells'],
            )
            self.assertEqual(
                new_content['cells'][-1],
                new_markdown_cell('Created by test: ' + _norm_unicode(path)),
            )
示例#22
0
 def _contents_manager(self):
     """ simplest reasonable kernel manager
     """
     return FileContentsManager(root_dir=self.notebook_dir, parent=self)
示例#23
0
class TestUploadDownload(TestCase):

    def setUp(self):

        drop_testing_db_tables()
        migrate_testing_db()

        self.td = TemporaryDirectory()
        self.checkpoints = PostgresCheckpoints(
            user_id='test',
            db_url=TEST_DB_URL,
        )
        self.contents = FileContentsManager(
            root_dir=self.td.name,
            checkpoints=self.checkpoints,
        )

        self.checkpoints.ensure_user()

    def tearDown(self):
        self.td.cleanup()

    def add_markdown_cell(self, path):
        # Load and update
        model = self.contents.get(path=path)
        model['content'].cells.append(
            new_markdown_cell('Created by test: ' + path)
        )

        # Save and checkpoint again.
        self.contents.save(model, path=path)
        return model

    def test_download_checkpoints(self):
        """
        Create two checkpoints for two notebooks, then call
        download_checkpoints.

        Assert that we get the correct version of both notebooks.
        """
        self.contents.new({'type': 'directory'}, 'subdir')
        paths = ('a.ipynb', 'subdir/a.ipynb')
        expected_content = {}
        for path in paths:
            # Create and checkpoint.
            self.contents.new(path=path)

            self.contents.create_checkpoint(path)

            model = self.add_markdown_cell(path)
            self.contents.create_checkpoint(path)

            # Assert greater because FileContentsManager creates a checkpoint
            # on creation, but this isn't part of the spec.
            self.assertGreater(len(self.contents.list_checkpoints(path)), 2)

            # Store the content to verify correctness after download.
            expected_content[path] = model['content']

        with TemporaryDirectory() as td:
            download_checkpoints(
                self.checkpoints.db_url,
                td,
                user='******',
            )

            fm = FileContentsManager(root_dir=td)
            root_entries = sorted(m['path'] for m in fm.get('')['content'])
            self.assertEqual(root_entries, ['a.ipynb', 'subdir'])
            subdir_entries = sorted(
                m['path'] for m in fm.get('subdir')['content']
            )
            self.assertEqual(subdir_entries, ['subdir/a.ipynb'])
            for path in paths:
                content = fm.get(path)['content']
                self.assertEqual(expected_content[path], content)

    def test_checkpoint_all(self):
        """
        Test that checkpoint_all correctly makes a checkpoint for all files.
        """
        paths = populate(self.contents)
        original_content_minus_trust = {
            # Remove metadata that we expect to have dropped
            path: strip_transient(self.contents.get(path)['content'])
            for path in paths
        }

        original_cps = {}
        for path in paths:
            # Create a checkpoint, then update the file.
            original_cps[path] = self.contents.create_checkpoint(path)
            self.add_markdown_cell(path)

        # Verify that we still have the old version checkpointed.
        cp_content = {
            path: self.checkpoints.get_notebook_checkpoint(
                cp['id'],
                path,
            )['content']
            for path, cp in iteritems(original_cps)
        }
        self.assertEqual(original_content_minus_trust, cp_content)

        new_cps = checkpoint_all(
            self.checkpoints.db_url,
            self.td.name,
            self.checkpoints.user_id,
        )

        new_cp_content = {
            path: self.checkpoints.get_notebook_checkpoint(
                cp['id'],
                path,
            )['content']
            for path, cp in iteritems(new_cps)
        }
        for path, new_content in iteritems(new_cp_content):
            old_content = original_content_minus_trust[_norm_unicode(path)]
            self.assertEqual(
                new_content['cells'][:-1],
                old_content['cells'],
            )
            self.assertEqual(
                new_content['cells'][-1],
                new_markdown_cell('Created by test: ' + _norm_unicode(path)),
            )
 def setUp(self):
     self._temp_dir = TemporaryDirectory()
     self.td = self._temp_dir.name
     self._file_manager = FileContentsManager(root_dir=self.td)
     self.contents_manager = HybridContents(
         managers={'': self._file_manager})
示例#25
0
def file_contents_manager(notebook_file):
    config = Config(FileContentsManager=FileContentsManager())
    config.FileContentsManager.root_dir = str(notebook_file.parent)
    initialize_post_save_hook(config)
    return config.FileContentsManager