Example #1
0
def init_kb(root, exist_ok=False, config=None) -> bool:
    success = False

    try:
        root = Config.get_root(root)

        os.makedirs(str(root), exist_ok=exist_ok)
        Config.create(root=root, config=config)

        KB(root=root)
        success = True

    except FileExistsError as e:
        logger.error(e)

    return success
Example #2
0
def sync_config(root: Optional[Path] = typer.Option(None)):
    """ Update configuration while keeping the current Secret Key. """

    root = Config.get_root(root)
    config = Config.create(root=root)
    old_json = config.json(indent=4)
    typer.echo(f"Getting current config: {config.file_path}")

    # retain the secret key
    default_config.secret_key = config.secret_key

    # write the new config
    with config.file_path.open(mode="w") as fp:
        new_json = default_config.json(indent=4)
        fp.write(new_json)
        fp.write("\n")

    typer.echo(f"Config updated: {config.file_path}")
    diff_gen = Differ().compare(old_json.splitlines(), new_json.splitlines())
    typer.echo("\n".join(diff_gen))
Example #3
0
    def __init__(self, root=None):
        self.config = Config.create(root=root)

        self.user_store = self.config.create_user_store()

        self.normalizer = self.config.create_normalizer()

        self.tokenizer = self.config.create_tokenizer()

        self.graph = self.config.create_graph(normalizer=self.normalizer)

        self.pipelines = {}
        for name, pipeline_config in self.config.pipelines.items():
            pipeline = pipeline_config.create_pipeline(self)
            self.pipelines[name] = pipeline
Example #4
0
def clear(
    root: Optional[Path] = typer.Option(None),
    force: bool = typer.Option(False, "--force", "-f"),
):
    """ Clear local KB """

    root = Config.get_root(root)

    if root.exists():
        if not force:
            typer.confirm(f"Clearing {root}. Are you sure?", abort=True)

    kb = KB(root=root)
    kb.clear()
    services.finish("Clear", True)
Example #5
0
def bootstrap(
        root: Optional[Path] = typer.Option(None),
        dry_run: bool = typer.Option(False, "--dry-run"),
        wipe: bool = typer.Option(False, "--wipe"),
):
    """ Load HopeIQ local data store. """
    root = Config.get_root(root)
    typer.echo(f"Initializing kb: `{root}`")
    services.init_kb(root=root, exist_ok=True, config=default_config)

    if wipe:
        typer.echo(f"Clearing kb: `{root}`")
        commands.clear(root=root, force=True)

    file_name = "igniteiq.jsonl.gz"
    igniteiq_jsonl = Path(__file__).parent.parent.parent / "iiq" / file_name
    if not igniteiq_jsonl.exists():
        igniteiq_jsonl = Path("/data/iiq/") / file_name
        assert igniteiq_jsonl.exists(), f"Could not find {file_name}"

    commands.load(
        in_file=igniteiq_jsonl,
        root=root,
        file_format="jsonl",
        dry_run=dry_run,
        skip_reindex=True,
    )

    commands.load(
        in_file=code_path / "hope_ontologies.xlsx",
        root=root,
        file_format="ontologies",
        dry_run=dry_run,
        skip_reindex=True,
        is_binary=True,
    )

    # reindex
    if not dry_run:
        commands.reindex(root=root)
Example #6
0
from urllib.parse import unquote

from fastapi import APIRouter, Body, security, Depends

from entitykb import (
    ProxyKB,
    models,
    Config,
    Direction,
    exceptions,
    User,
    UserToken,
)

router = APIRouter()
config = Config.create()
kb = ProxyKB()

# nodes


@router.get("/nodes/{key}", tags=["nodes"])
def get_node(key: str) -> dict:
    """ Parse text and return document object. """
    key = unquote(key)
    return kb.get_node(key)


@router.post("/nodes", tags=["nodes"])
async def save_node(node: dict = Body(...)) -> dict:
    """ Saves nodes to graph and terms to index. """
Example #7
0
default_config = Config(
    graph="entitykb.Graph",
    modules=["ontologykb", "hopeiq"],
    normalizer="entitykb.LatinLowercaseNormalizer",
    searcher="entitykb.DefaultSearcher",
    tokenizer="ontologykb.PathTokenizer",
    pipelines={
        "default": {
            "extractor":
            "entitykb.DefaultExtractor",
            "resolvers": [
                "entitykb.TermResolver",
                "entitykb.contrib.date.DateResolver",
                "ontologykb.PersonResolver",
                "ontologykb.MutationGrammarResolver",
                "ontologykb.IntensityResolver",
                "ontologykb.ClockPositionResolver",
                "ontologykb.MeasurementResolver",
                "ontologykb.pathology_stage.PathologyStageResolver",
                "ontologykb.SystemIDResolver",
                "ontologykb.LymphNodesResolver",
            ],
            "filterers": ["ontologykb.CombinedFilterer"],
        },
        "mm": {
            "extractor":
            "entitykb.DefaultExtractor",
            "resolvers": [
                "entitykb.TermResolver",
                "entitykb.contrib.date.DateResolver",
                "ontologykb.MeasurementResolver",
            ],
            "filterers": ["hopeiq.MMFilterer"]
        }
    },
)