Exemplo n.º 1
0
def load(path_to_db: Union[Engine, str],
         echo: bool = False) -> Tuple[Session, Namespace]:
    """
    Load an already parsed database from disk or connect to a server and yield a database session to start querying on
    with the classes defined in the model namespace.

    Afterwards, the database can be queried using SQLAlchemy query syntax, providing the CIM classes contained in the
    :class:`~argparse.Namespace` return value.

    :param path_to_db: Path to the cim snapshot or a :class:`~cimpyorm.backend.Engine`.
    :param echo: Echo the SQL sent to the backend engine (SQLAlchemy option).

    :return: :class:`sqlalchemy.orm.session.Session`, :class:`argparse.Namespace`
    """
    import cimpyorm.Model.Schema as Schema
    from cimpyorm.Model import Source
    if isinstance(path_to_db, Engine):
        _backend = path_to_db
        _backend.echo = _backend.echo or echo
    elif os.path.isfile(path_to_db):
        _backend = SQLite(path_to_db, echo)
    else:
        raise NotImplementedError(
            f"Unable to connect to database {path_to_db}")

    session = _backend.session
    _backend.reset()

    _si = session.query(Source.SourceInfo).first()
    v = _si.cim_version
    log.info(f"CIM Version {v}")
    schema = Schema.Schema(session)
    schema.init_model(session)
    model = schema.model
    return session, model
Exemplo n.º 2
0
 def drop(self):
     try:
         log.info(f"Dropping database {self.path} at {self.host}.")
         self.engine.execute(f"DROP DATABASE {self.path};")
     except OperationalError:
         pass
     self._engine = None
Exemplo n.º 3
0
 def drop(self):
     try:
         os.remove(self.path)
         log.info(f"Removed old database {self.path}.")
         self._engine = None
     except FileNotFoundError:
         pass
Exemplo n.º 4
0
def test_merged_nsmaps(path):
    expected = {}
    for file in parseable_files(path):
        for key, value in et.parse(file).getroot().nsmap.items():
            expected[key] = value
    tree = merge(path)
    log.info(
        f"{len(expected.keys())} entries expected in nsmap. {len(tree.getroot().nsmap.keys())} found"
    )
    log.debug(f"Expected: {expected.keys()}")
    log.debug(f"Found: {tree.getroot().nsmap.keys()}")
    assert tree.getroot().nsmap == expected
Exemplo n.º 5
0
    def engine(self) -> SA_Engine:
        """
        :param echo:

        :param database:

        :return:
        """
        if not self._engine:
            log.info(f"Database: {self.path}")
            engine = self._connect_engine()
            self._engine = engine
        return self._engine
Exemplo n.º 6
0
    def init_model(self, session):
        additionalNodes = self.class_hierarchy()

        hierarchy = additionalNodes
        try:
            for c in hierarchy:
                c.init_type(aux.Base)
        except InvalidRequestError:
            pass
        session.commit()
        session.flush()
        nsmap = session.query(SchemaInfo).one().nsmap
        for c in hierarchy:
            c.generate(nsmap)
        log.info(f"Generated {len(hierarchy)} classes")
Exemplo n.º 7
0
def parse_entries(entries, schema, silence_tqdm=False):
    classes = dict(
        schema.session.query(schema.Element_classes["CIMClass"].name,
                             schema.Element_classes["CIMClass"]).all())
    created = []
    for classname, elements in entries.items():
        if classname in classes.keys():
            for uuid, element in tqdm(elements.items(),
                                      desc=f"Reading {classname}",
                                      leave=False,
                                      disable=silence_tqdm):
                argmap, insertables = classes[classname].parse_values(
                    element, schema.session)
                created.append(classes[classname].class_(id=uuid, **argmap))
                for insertable in insertables:
                    schema.session.execute(insertable)
        else:
            log.info(f"{classname} not implemented. Skipping.")
    return created
Exemplo n.º 8
0
def parse(dataset: Union[str, Path],
          backend: Engine = SQLite(),
          silence_tqdm: bool = False) -> Tuple[Session, Namespace]:
    """
    Parse a database into a database backend and yield a database session to start querying on with the classes defined
    in the model namespace.

    Afterwards, the database can be queried using SQLAlchemy query syntax, providing the CIM classes contained in the
    :class:`~argparse.Namespace` return value.

    :param dataset: Path to the cim snapshot.
    :param backend: Database backend to be used (defaults to a SQLite on-disk database in the dataset location).
    :param silence_tqdm: Silence tqdm progress bars

    :return: :class:`sqlalchemy.orm.session.Session`, :class:`argparse.Namespace`
    """
    from cimpyorm import Parser
    backend.update_path(dataset)
    # Reset database
    backend.drop()
    backend.reset()
    # And connect
    engine, session = backend.connect()

    files = Parser.get_files(dataset)
    from cimpyorm.Model.Source import SourceInfo
    sources = frozenset([SourceInfo(file) for file in files])
    session.add_all(sources)
    session.commit()

    cim_version = Parser.get_cim_version(sources)

    schema = Schema(version=cim_version, session=session)
    backend.generate_tables(schema)

    log.info(f"Parsing data.")
    entries = Parser.merge_sources(sources)
    elements = Parser.parse_entries(entries, schema, silence_tqdm=silence_tqdm)
    log.info(f"Passing {len(elements):,} objects to database.")
    session.bulk_save_objects(elements)
    session.flush()
    log.debug(f"Start commit.")
    session.commit()
    log.debug(f"Finished commit.")

    if engine.dialect.name == "mysql":
        log.debug("Enabling foreign key checks in mysql database.")
        session.execute("SET foreign_key_checks='ON'")

    log.info("Exit.")

    model = schema.model
    return session, model
Exemplo n.º 9
0
 def generate_tables(self, schema):
     g = schema.inheritance_graph
     hierarchy = list(bfs_tree(g, "__root__"))
     hierarchy.remove("__root__")
     log.info(f"Creating map prefixes.")
     for c in hierarchy:
         c.class_.compile_map(c.nsmap)
     # ToDo: create_all is quite slow, maybe this can be sped up. Currently low priority.
     log.info(f"Creating table metadata.")
     for child in g["__root__"]:
         child.class_.metadata.create_all(self.engine)
     log.info(f"Backend model ready.")
Exemplo n.º 10
0
 def __init__(self, session=None, version: str = "16"):
     """
     Initialize a Backend object, containing information about the schema elements
     :param file_or_tree: The schema file or a parsed root
     """
     self.g = None
     if not session:
         backend = InMemory()
         backend.reset()
         session = backend.session
     rdfs_path = find_rdfs_path(version)
     if not rdfs_path:
         raise FileNotFoundError(
             "Failed to find schema file. Please provide one.")
     tree = merge(rdfs_path)
     log.info(f"Dynamic code generation.")
     if session.query(SchemaElement).count():
         # A schema is already present, so just load it instead of recreating
         self.session = session
         self.Element_classes = {
             c.__name__: c
             for c in [
                 CIMPackage, CIMClass, CIMProp, CIMDT, CIMEnum,
                 CIMEnumValue, CIMDTUnit, CIMDTValue, CIMDTMultiplier,
                 CIMDTDenominatorUnit, CIMDTDenominatorMultiplier
             ]
         }
         self.Elements = {
             c.__name__: {
                 cim_class.name: cim_class
                 for cim_class in session.query(c).all()
             }
             for c in self.Element_classes.values()
         }
     else:
         self.session = session
         if isinstance(tree, type(et.ElementTree())):
             self.file = None
             self.root = tree.getroot()
         else:
             self.file = tree
             self.root = et.parse(tree).getroot()
         self.Element_classes = {
             c.__name__: c
             for c in [
                 CIMPackage, CIMClass, CIMProp, CIMDT, CIMEnum,
                 CIMEnumValue, CIMDTUnit, CIMDTValue, CIMDTMultiplier,
                 CIMDTDenominatorUnit, CIMDTDenominatorMultiplier
             ]
         }
         self.Elements = {
             c.__name__: defaultdict(list)
             for c in self.Element_classes.values()
         }
         self._init_parser()
         self._generate()
         for _, Cat_Elements in self.Elements.items():
             self.session.add_all(list(Cat_Elements.values()))
             self.session.commit()
         log.debug(f"Backend generated")
         session.add(SchemaInfo(self.root.nsmap))
         self.init_model(session)
Exemplo n.º 11
0
except ModuleNotFoundError:
    pass


try:
    # See if we already know a schemaroot
    CONFIG["Paths"]["SCHEMAROOT"] = get_path("SCHEMAROOT")
    if not os.path.isdir(CONFIG["Paths"]["SCHEMAROOT"]):
        # Is schemaroot an actual directory?
        log.warning(f"Invalid schema path in configuration.")
        raise NotADirectoryError
except (KeyError, NotADirectoryError):
    if os.path.isdir(os.path.join(_PACKAGEROOT, "res", "schemata")):
        # Look in the default path
        CONFIG["Paths"]["SCHEMAROOT"] = os.path.join(_PACKAGEROOT, "res", "schemata")
        log.info(f"Found schemata in default location.")
    else:
        # Ask user to configure
        log.warning(f"No schemata configured. Use cimpyorm.configure(path_to_schemata) to set-up.")
        from cimpyorm.api import configure

try:
    # See if we already know a datasetroot
    CONFIG["Paths"]["DATASETROOT"] = get_path("DATASETROOT")
    if not os.path.isdir(CONFIG["Paths"]["DATASETROOT"]):
        # Is datasetroot an actual directory?
        log.warning(f"Invalid dataset path in configuration.")
        raise NotADirectoryError
except (KeyError, NotADirectoryError):
    if os.path.isdir(os.path.join(_PACKAGEROOT, "res", "datasets")):
        # Look in the default path