def test_query_doc_load_positions(tmp_path):
    cache = DocutilsCache(str(tmp_path), echo=False)
    position = {
        "block": True,
        "endCharacter": 9,
        "endLine": 0,
        "parent_uuid": None,
        "startCharacter": 0,
        "startLine": 0,
        "title": "hyperlink_target",
        "category": "hyperlink_target",
        "uuid": "uuid_1",
        "section_level": None,
        "role_name": None,
        "directive_name": None,
        "directive_data": None,
    }
    cache.update_doc(
        uri="test.rst",
        mtime=datetime(2019, 12, 30, 0, 0, 0),
        positions=[position],
        references=[],
        targets=[],
        doc_symbols=[],
        lints=[],
    )
    doc = cache.query_doc("test.rst", load_positions=True)

    assert doc.positions[0].column_dict(drop=("pk", )) == position
def test_query_at_position(tmp_path):
    cache = DocutilsCache(str(tmp_path), echo=False)
    positions = [
        {
            "uuid": "uuid_1",
            "startLine": 0,
            "startCharacter": 0,
            "endLine": 0,
            "endCharacter": 9,
            "block": False,
        },
        {
            "uuid": "uuid_2",
            "startLine": 1,
            "startCharacter": 5,
            "endLine": 3,
            "endCharacter": 9,
            "block": False,
        },
    ]
    cache.update_doc(
        uri="test.rst",
        mtime=datetime(2019, 12, 30, 0, 0, 0),
        positions=positions,
        references=[],
        targets=[],
        doc_symbols=[],
        lints=[],
    )
    assert cache.query_at_position(uri="test.rst", line=4, character=6) is None
    cache.query_at_position(uri="test.rst", line=0,
                            character=6).uuid == "uuid_1"
    cache.query_at_position(uri="test.rst", line=2,
                            character=6).uuid == "uuid_2"
示例#3
0
    def __init__(self, root_uri: str, server, config: Config):
        self._config = config
        self._root_uri = root_uri
        self._server = server
        self._root_uri_scheme = uris.urlparse(self._root_uri)[0]
        self._root_path = uris.to_fs_path(self._root_uri)
        self._open_docs = {}

        self._root_uri_hash = hashlib.md5(root_uri.encode("utf-8")).hexdigest()
        # TODO persist cache?
        remove_default_cache_path(self._root_uri_hash)
        path = create_default_cache_path(self._root_uri_hash, "database")
        self._db = DocutilsCache(str(path), echo=False)

        self._update_env()
def test_query_doc_load_lints(tmp_path):
    cache = DocutilsCache(str(tmp_path), echo=False)
    lint = {
        "source": "docutils",
        "line": 20,
        "category": "ERROR",
        "level": 3,
        "description": 'Unknown interpreted text role "sdf".',
    }
    cache.update_doc(
        uri="test.rst",
        mtime=datetime(2019, 12, 30, 0, 0, 0),
        positions=[],
        references=[],
        targets=[],
        doc_symbols=[],
        lints=[lint],
    )
    doc = cache.query_doc("test.rst", load_lints=True)

    assert doc.lints[0].column_dict(drop=("pk", )) == lint
def test_query_doc(tmp_path):
    cache = DocutilsCache(str(tmp_path), echo=False)
    cache.update_doc(
        uri="test.rst",
        mtime=datetime(2019, 12, 30, 0, 0, 0),
        positions=[],
        references=[],
        targets=[],
        doc_symbols=[{
            "contents": []
        }],
        lints=[],
    )
    doc = cache.query_doc("test.rst")
    assert doc.column_dict() == {
        "pk": 1,
        "mtime": datetime(2019, 12, 30, 0, 0, 0),
        "uri": "test.rst",
        "symbols": [{
            "contents": []
        }],
    }
def test_update_doc(tmp_path, data_regression):
    cache = DocutilsCache(str(tmp_path), echo=False)
    cache.update_doc(
        uri="test.rst",
        mtime=datetime(2019, 12, 30, 0, 0, 0),
        positions=[],
        references=[],
        targets=[],
        doc_symbols=[{}],
        lints=[],
    )
    cache.update_doc(
        uri="test.rst",
        mtime=datetime(2019, 12, 30, 0, 0, 1),
        positions=[{
            "uuid": "uuid_1",
            "block": True,
            "endCharacter": 9,
            "endLine": 0,
            "parent_uuid": None,
            "startCharacter": 0,
            "startLine": 0,
            "title": "hyperlink_target",
            "category": "hyperlink_target",
        }],
        doc_symbols=[{
            "children": []
        }],
        lints=[{
            "source": "docutils",
            "line": 20,
            "category": "ERROR",
            "level": 3,
            "description": 'Unknown interpreted text role "sdf".',
        }],
        targets=[{
            "uuid": "uuid_2",
            "position_uuid": "uuid_1",
            "node_type": "substitution_definition",
            "classes": [],
            "names": [],
        }],
        references=[{
            "position_uuid": "uuid_1",
            "target_uuid": "uuid_2",
            "node_type": "substitution_reference",
            "classes": [],
        }],
    )
    data_regression.check(cache.to_dict())
def test_update_conf_file(tmp_path, data_regression):
    cache = DocutilsCache(str(tmp_path), echo=False)
    app_env = create_sphinx_app()
    roles, directives = retrieve_namespace(app_env)
    cache.update_conf_file(
        "conf.py",
        mtime=datetime(2019, 12, 30, 0, 0, 0),
        roles=roles,
        directives=directives,
    )
    cache.update_conf_file(
        "conf.py",
        mtime=datetime(2019, 12, 30, 0, 0, 1),
        roles=roles,
        directives=directives,
    )
    data_regression.check(
        cache.to_dict(order_by={
            "roles": "name",
            "directives": "name"
        }))
示例#8
0
class Workspace(object):
    """Store an in-memory representation of the open workspace files."""
    def __init__(self, root_uri: str, server, config: Config):
        self._config = config
        self._root_uri = root_uri
        self._server = server
        self._root_uri_scheme = uris.urlparse(self._root_uri)[0]
        self._root_path = uris.to_fs_path(self._root_uri)
        self._open_docs = {}

        self._root_uri_hash = hashlib.md5(root_uri.encode("utf-8")).hexdigest()
        # TODO persist cache?
        remove_default_cache_path(self._root_uri_hash)
        path = create_default_cache_path(self._root_uri_hash, "database")
        self._db = DocutilsCache(str(path), echo=False)

        self._update_env()

    def _update_env(self):
        """Update the sphinx application."""
        # TODO how to watch conf.py for changes? (or at least have command to update)
        # TODO use self.source_roots to find conf path?
        # TODO allow source directory to be different to conf path
        conf_path = self._config.settings.get("conf_path", None)
        logger.debug(f"Settings: {self._config.settings}")
        if conf_path and not os.path.exists(conf_path):
            self.server.show_message(
                f"The path set in `rst_lsp.conf_path` does not exist: {conf_path}",
                msg_type=MessageType.Error,
            )
            conf_path = None
        elif conf_path:
            conf_path = os.path.realpath(conf_path)
        logger.debug(f"Using conf dir: {conf_path}")
        try:
            self._app_env = create_sphinx_app(
                conf_dir=os.path.dirname(conf_path) if conf_path else None,
                doctree_dir=create_default_cache_path(self._root_uri_hash,
                                                      "doctrees"),
                output_dir=create_default_cache_path(self._root_uri_hash,
                                                     "outputs"),
            )
        except Exception as err:
            self.server.show_message(
                ("An error occurred creating a sphinx application from "
                 f"`rst_lsp.conf_path`: {conf_path}.\n\n"
                 f"{err}"),
                msg_type=MessageType.Error,
            )
            conf_path = None
            self._app_env = create_sphinx_app(
                conf_dir=None,
                doctree_dir=create_default_cache_path(self._root_uri_hash,
                                                      "doctrees"),
                output_dir=create_default_cache_path(self._root_uri_hash,
                                                     "outputs"),
            )
        roles, directives = retrieve_namespace(self._app_env)
        self._db.update_conf_file(conf_path, datetime.datetime.utcnow(), roles,
                                  directives)
        # TODO if local, use os.path.getmtime?
        # TODO when to remove roles and directives with 'removed' status?

    def close(self):
        # TODO persist cache?
        remove_default_cache_path(self._root_uri_hash)

    @property
    def documents(self) -> dict:
        return self._open_docs

    @property
    def database(self) -> DocutilsCache:
        """Return the workspace database.

        If any document's source text hasn't been parsed/assessed, since its last change
        (or config update), then that will be done, and the database updated,
        before returning.
        """
        for doc in self._open_docs.values():
            result = doc.get_assessment()  # type: SourceAssessResult
            self._db.update_doc(
                doc.uri,
                doc.mtime,
                doc_symbols=result.doc_symbols,
                positions=result.positions,
                targets=result.targets,
                references=result.references,
                lints=result.linting,
            )
        return self._db

    @property
    def app_env(self) -> SphinxAppEnv:
        return self._app_env

    @property
    def root_path(self) -> str:
        return self._root_path

    @property
    def root_uri(self) -> str:
        return self._root_uri

    @property
    def server(self):
        return self._server

    @property
    def config(self):
        return self._config

    def get_document(self, doc_uri: str):
        """Return a managed document if-present, else create one pointing at disk.

        See https://github.com/Microsoft/language-server-protocol/issues/177
        """
        doc = self._open_docs.get(doc_uri, None)
        if doc is None:
            doc = self._create_document({"uri": doc_uri})
        return doc

    def put_document(self, document: TextDocument):
        self._open_docs[document["uri"]] = self._create_document(document)

    def rm_document(self, doc_uri):
        # TODO remove from database? or get notification when rst are deleted
        # see also m_workspace__did_change_watched_files
        self._open_docs.pop(doc_uri)

    def update_document(self, doc_uri, change: TextEdit, version=None):
        self._open_docs[doc_uri].apply_change(change)
        self._open_docs[doc_uri].version = version

    def update_config(self, config):
        self._config = config
        self._update_env()
        for doc_uri in self.documents:
            self.get_document(doc_uri).update_config(config)

        # TODO configuration option, whether to read all files

        conf_file = self._db.query_conf_file()
        if not conf_file or not os.path.exists(conf_file.uri):
            return
        exclude_patterns = (
            self.app_env.app.config.exclude_patterns +
            self.app_env.app.config.templates_path +
            [uris.to_fs_path(uri) for uri in self.documents]
            # TODO current doc exclude doesn't appear to be working
        )
        all_paths = find_all_files(os.path.dirname(conf_file.uri),
                                   exclude_patterns=exclude_patterns)
        self.server.log_message(f"parsing {len(all_paths)} closed files")

        # start in separate thread, so the request can be returned
        future = self._server._endpoint._executor_service.submit(
            self.parse_closed_files, paths=all_paths)
        future.add_done_callback(self.notify_files)

    def notify_files(self, future: Future):
        if future.cancelled():
            self.server.log_message("cancelled parsing closed files")
        self.server.log_message(
            f"finished parsing {future.result()} closed files")

    def parse_closed_files(self, paths):
        # TODO send progress to client (will require next LSP version 3.15.0)
        passed = 0
        for path in paths:
            try:
                with open(path) as handle:
                    source = handle.read()
                # TODO check doc not in database with same mtime
                result = assess_source(source, self.app_env, doc_uri=path)
                self._db.update_doc(
                    uri=uris.from_fs_path(path),
                    # TODO use os.path.getmtime(path)?
                    mtime=datetime.datetime.utcnow(),
                    doc_symbols=result.doc_symbols,
                    positions=result.positions,
                    targets=result.targets,
                    references=result.references,
                    lints=result.linting,
                )
                self.server.log_message(
                    f"file parsed: {uris.from_fs_path(path)}")
                passed += 1
            except Exception as err:
                self.server.log_message(f"file parse failed: {path}: {err}",
                                        MessageType.Error)
        # TODO now remove removed roles/directives from database
        return passed

    @property
    def is_local(self):
        """Test if the directory is local (i.e. can be accessed by ``os``)."""
        return (self._root_uri_scheme == "" or self._root_uri_scheme
                == "file") and os.path.exists(self._root_path)

    def source_roots(self, document_path: str, filename: str = "conf.py"):
        """Return the source roots for the given document."""
        if not self.is_local:
            return None
        files = find_parents(self._root_path, document_path, [filename]) or []
        return list(
            set((os.path.dirname(project_file)
                 for project_file in files))) or [self._root_path]

    def _create_document(self, document: TextDocument):
        return Document(
            document["uri"],
            source=document.get("text", None),
            version=document.get("version", None),
            config=self._config,
            workspace=self,
        )
def test_init(tmp_path, data_regression):
    cache = DocutilsCache(str(tmp_path), echo=False)
    data_regression.check(cache.to_dict())