def test_scenario_filter_gets_added_to_filter_model(self):
     url = "sqlite:///" + os.path.join(self._temp_dir.name, "db.sqlite")
     db_map = DiffDatabaseMapping(url, create=True)
     import_scenarios(db_map, (("scenario", True), ))
     db_map.commit_session("Add test data.")
     db_map.connection.close()
     self._link.connection.receive_resources_from_source(
         [database_resource("provider", url)])
     self._link.refresh_resource_filter_model()
     self.assertTrue(self._link.connection.has_filters())
     filter_model = self._link.resource_filter_model
     self.assertEqual(filter_model.rowCount(), 1)
     self.assertEqual(filter_model.columnCount(), 1)
     index = filter_model.index(0, 0)
     self.assertEqual(index.data(), url)
     root_item = filter_model.itemFromIndex(index)
     self.assertEqual(root_item.rowCount(), 2)
     self.assertEqual(root_item.columnCount(), 1)
     scenario_title_item = root_item.child(0, 0)
     self.assertEqual(scenario_title_item.index().data(), "Scenario filter")
     self.assertEqual(scenario_title_item.rowCount(), 2)
     self.assertEqual(scenario_title_item.columnCount(), 1)
     scenario_item = scenario_title_item.child(0, 0)
     self.assertEqual(scenario_item.index().data(), "Select all")
     scenario_item = scenario_title_item.child(1, 0)
     self.assertEqual(scenario_item.index().data(), "scenario")
示例#2
0
 def setUp(self):
     app_settings = MagicMock()
     self._temp_dir = TemporaryDirectory()
     url = "sqlite:///" + os.path.join(self._temp_dir.name, "db.sqlite")
     db_map = DiffDatabaseMapping(url, create=True)
     import_object_classes(db_map, ("class1",))
     import_object_parameters(db_map, (("class1", "parameter1"), ("class1", "parameter2")))
     import_objects(db_map, (("class1", "object1"), ("class1", "object2")))
     import_object_parameter_values(
         db_map,
         (
             ("class1", "object1", "parameter1", 1.0),
             ("class1", "object2", "parameter1", 3.0),
             ("class1", "object1", "parameter2", 5.0),
             ("class1", "object2", "parameter2", 7.0),
         ),
     )
     db_map.commit_session("Add test data.")
     db_map.connection.close()
     with patch("spinetoolbox.spine_db_manager.SpineDBManager.thread", new_callable=PropertyMock) as mock_thread:
         mock_thread.return_value = QApplication.instance().thread()
         self._db_mngr = SpineDBManager(app_settings, None)
         with patch.object(SpineDBEditor, "restore_ui"):
             self._editor = SpineDBEditor(self._db_mngr, {url: db_map.codename})
     object_class_index = self._editor.object_tree_model.index(0, 0)
     self._editor.object_tree_model.fetchMore(object_class_index)
     index = self._editor.object_tree_model.index(0, 0, object_class_index)
     self._editor.reload_pivot_table(index)
     self._model = self._editor.pivot_table_model
     self._model.start_fetching()
示例#3
0
 def test_tool_filter_gets_added_to_filter_model(self):
     url = "sqlite:///" + os.path.join(self._temp_dir.name, "db.sqlite")
     db_map = DiffDatabaseMapping(url, create=True)
     import_tools(db_map, ("tool", ))
     db_map.commit_session("Add test data.")
     db_map.connection.close()
     self._link.handle_dag_changed(
         [ProjectItemResource(MetaObject("provider", ""), "database", url)])
     self._link.refresh_resource_filter_model()
     self.assertTrue(self._link.connection.has_filters())
     filter_model = self._link.resource_filter_model
     self.assertEqual(filter_model.rowCount(), 1)
     self.assertEqual(filter_model.columnCount(), 1)
     index = filter_model.index(0, 0)
     self.assertEqual(index.data(), url)
     root_item = filter_model.itemFromIndex(index)
     self.assertEqual(root_item.rowCount(), 2)
     self.assertEqual(root_item.columnCount(), 1)
     tool_title_item = root_item.child(1, 0)
     self.assertEqual(tool_title_item.index().data(), "Tool filter")
     self.assertEqual(tool_title_item.rowCount(), 2)
     self.assertEqual(tool_title_item.columnCount(), 1)
     tool_item = tool_title_item.child(0, 0)
     self.assertEqual(tool_item.index().data(), "Select all")
     tool_item = tool_title_item.child(1, 0)
     self.assertEqual(tool_item.index().data(), "tool")
 def _open_db_writing(self, url: str):
     """Open Spine DB at url
     """
     try:
         self._db_map = DiffDatabaseMapping(url)
     except SpineDBVersionError:
         logging.error(f"Wrong Spine DB version in {url}")
         raise RuntimeError
     except SpineDBAPIError:
         logging.error(f"Could not open db at {url} for writing")
         raise RuntimeError
示例#5
0
 def test_fetch_tools(self):
     connection = Connection("source", "bottom", "destination", "top")
     url = "sqlite:///" + os.path.join(self._temp_dir.name, "db.sqlite")
     db_map = DiffDatabaseMapping(url, create=True)
     import_tools(db_map, ("tool",))
     db_map.commit_session("Add test data.")
     db_map.connection.close()
     resources = [database_resource("source", url)]
     connection.receive_resources_from_source(resources)
     self.assertFalse(connection.has_filters())
     connection.fetch_database_items()
     self.assertTrue(connection.has_filters())
     self.assertEqual(connection.resource_filters, {resources[0].label: {"tool_filter": {1: False}}})
示例#6
0
 def export_to_sqlite(self, file_path, data_for_export):
     """Exports given data into SQLite file."""
     url = URL("sqlite", database=file_path)
     if not self.db_mngr.is_url_available(url, self):
         return
     create_new_spine_database(url)
     db_map = DiffDatabaseMapping(url)
     import_data(db_map, **data_for_export)
     try:
         db_map.commit_session("Export initial data from Spine Toolbox.")
     except SpineDBAPIError as err:
         self.msg_error.emit(f"[SpineDBAPIError] Unable to export file <b>{db_map.codename}</b>: {err.msg}")
     else:
         self.sqlite_file_exported.emit(file_path)
示例#7
0
 def setUp(self):
     if self._tool_output_path.exists():
         shutil.rmtree(self._tool_output_path)
     self._database_path.parent.mkdir(parents=True, exist_ok=True)
     if self._database_path.exists():
         self._database_path.unlink()
     url = "sqlite:///" + str(self._database_path)
     db_map = DiffDatabaseMapping(url, create=True)
     import_alternatives(db_map, ("alternative_1", "alternative_2"))
     import_scenarios(db_map, (("scenario_1", True), ("scenario_2", True)))
     import_scenario_alternatives(db_map, (("scenario_1", "alternative_1"),
                                           ("scenario_2", "alternative_2")))
     db_map.commit_session("Add test data.")
     db_map.connection.close()
 def __init__(self, db_url, datapackage_descriptor, datapackage_base_path):
     super().__init__()
     self.db_url = db_url
     create_new_spine_database(self.db_url)
     self.db_map = DiffDatabaseMapping(db_url, getpass.getuser())
     self.datapackage = Package(datapackage_descriptor, datapackage_base_path)
     self.signaler = Signaler()
     self.resource_data = dict()
     self.object_class_count = None
     self.object_count = None
     self.relationship_class_count = None
     self.relationship_count = None
     self.parameter_count = None
     self.parameter_value_count = None
     for resource in self.datapackage.resources:
         self.resource_data[resource.name] = resource.read(cast=False)
示例#9
0
 def test_toggle_tool_filter(self):
     url = "sqlite:///" + os.path.join(self._temp_dir.name, "db.sqlite")
     db_map = DiffDatabaseMapping(url, create=True)
     import_tools(db_map, ("tool", ))
     db_map.commit_session("Add test data.")
     db_map.connection.close()
     self._link.handle_dag_changed(
         [ProjectItemResource(MetaObject("provider", ""), "database", url)])
     self._link.refresh_resource_filter_model()
     filter_model = self._link.resource_filter_model
     filter_model.set_online(url, "tool_filter", {1: True})
     self.assertEqual(self._link.connection.resource_filters,
                      {url: {
                          "tool_filter": {
                              1: True
                          }
                      }})
 def test_toggle_scenario_filter(self):
     url = "sqlite:///" + os.path.join(self._temp_dir.name, "db.sqlite")
     db_map = DiffDatabaseMapping(url, create=True)
     import_scenarios(db_map, (("scenario", True), ))
     db_map.commit_session("Add test data.")
     db_map.connection.close()
     self._link.connection.receive_resources_from_source(
         [database_resource("provider", url)])
     self._link.refresh_resource_filter_model()
     filter_model = self._link.resource_filter_model
     filter_model.set_online(url, "scenario_filter", {1: True})
     self.assertEqual(self._link.connection.resource_filters,
                      {url: {
                          "scenario_filter": {
                              1: True
                          }
                      }})
示例#11
0
 def import_from_sqlite(self, file_path):
     url = URL("sqlite", database=file_path)
     filename = os.path.split(file_path)[1]
     try:
         db_map = DiffDatabaseMapping(url)
     except (SpineDBAPIError, SpineDBVersionError) as err:
         self.msg.emit(f"Could'n import file {filename}: {str(err)}")
         return
     data = export_data(db_map)
     self.import_data(data)
     self.msg.emit(f"File {filename} successfully imported.")
示例#12
0
 def export_to_excel(self, file_path, data_for_export):
     """Exports given data into Excel file."""
     # NOTE: We import data into an in-memory Spine db and then export that to excel.
     url = URL("sqlite", database="")
     db_map = DiffDatabaseMapping(url, create=True)
     import_data(db_map, **data_for_export)
     file_name = os.path.split(file_path)[1]
     try:
         export_spine_database_to_xlsx(db_map, file_path)
     except PermissionError:
         self.msg_error.emit(
             f"Unable to export file <b>{file_name}</b>.<br/>" "Close the file in Excel and try again."
         )
     except OSError:
         self.msg_error.emit(f"[OSError] Unable to export file <b>{file_name}</b>.")
     else:
         self.file_exported.emit(file_path)
)


@unique
class P(IntEnum):
    CLASS = 0
    OBJECT = 1
    NAME = 2
    X = 3
    ALTERNATIVE = 4


in_url = sys.argv[1]
out_url = sys.argv[2]
in_db = DatabaseMapping(in_url)
out_db = DiffDatabaseMapping(out_url)

link_relationship_class_cn = "commodity__node"

parameters_gnu = {"apparent_power": "unitSizeMVA", "capacity": "capacity", 
                  "capacity_value": "availabilityCapacityMargin", "conversion_coefficient": "conversionCoeff",
                  "fom_cost": "fomCosts", "inertia": "inertia",
                  "investment_cost": "invCosts", "ramp_limit": "maxRampDown",
                  "subunit_capacity": "unitSize", "vom_cost": "vomCosts",
                  "shutdown_cost": "shutdownCost", "start_cost": "startCostCold"}
parameters_gnu2 = {"ramp_limit": "maxRampUp"}
source_relationship_class_gnu = "node__unit__io"
link_relationship_class_gnu = "commodity__node"
target_relationship_class_gnu = "grid__node__unit__io"

parameters_u = {"annuity": "annuity"} 
示例#14
0
 def _get_db_map(self):
     try:
         self._db_map = DiffDatabaseMapping(*self._db_map_args, **self._db_map_kwargs)
     except (SpineDBVersionError, SpineDBAPIError) as err:
         self._err = err
示例#15
0
def _get_db_map(url):
    try:
        return DiffDatabaseMapping(url, upgrade=False, username="******")
    except (SpineDBAPIError, SpineDBVersionError) as err:
        print("Unable to create database mapping for {0}, moving on...: {1}".
              format(url, err))
class DatapackageToSpineConverter(QRunnable):
    def __init__(self, db_url, datapackage_descriptor, datapackage_base_path):
        super().__init__()
        self.db_url = db_url
        create_new_spine_database(self.db_url)
        self.db_map = DiffDatabaseMapping(db_url, getpass.getuser())
        self.datapackage = Package(datapackage_descriptor, datapackage_base_path)
        self.signaler = Signaler()
        self.resource_data = dict()
        self.object_class_count = None
        self.object_count = None
        self.relationship_class_count = None
        self.relationship_count = None
        self.parameter_count = None
        self.parameter_value_count = None
        for resource in self.datapackage.resources:
            self.resource_data[resource.name] = resource.read(cast=False)

    def number_of_steps(self):
        self.object_class_count = len(self.datapackage.resources)
        self.object_count = sum(len(self.resource_data[x.name]) for x in self.datapackage.resources)
        self.relationship_class_count = len([x for x in self.datapackage.resources if x.schema.foreign_keys])
        self.relationship_count = sum(
            len(self.resource_data[x.name]) for x in self.datapackage.resources if x.schema.foreign_keys
        )
        self.parameter_count = sum(
            len(x.schema.fields)
            - len(x.schema.primary_key)
            - len([i for fk in x.schema.foreign_keys for i in fk["fields"]])
            for x in self.datapackage.resources
        )
        self.parameter_value_count = sum(
            len(self.resource_data[x.name])
            * (
                len(x.schema.fields)
                - len(x.schema.primary_key)
                - len([i for fk in x.schema.foreign_keys for i in fk["fields"]])
            )
            for x in self.datapackage.resources
        )
        return (
            self.object_class_count
            + self.object_count
            + self.relationship_class_count
            + self.relationship_count
            + self.parameter_count
            + self.parameter_value_count
        )

    def run(self):
        try:
            self._run()
            self.signaler.finished.emit()
        except SpineDBAPIError as e:
            self.signaler.failed.emit(e.msg)

    def _run(self):
        step = 0
        self.signaler.progressed.emit(step, "")
        object_class_names = [x.name for x in self.db_map.object_class_list()]
        parameter_names = [x.name for x in self.db_map.parameter_definition_list()]
        object_class_name_lists = [
            x.object_class_name_list.split(",") for x in self.db_map.wide_relationship_class_list()
        ]
        object_classes = list()
        pre_relationship_classes = list()
        pre_parameters = list()
        for resource in self.datapackage.resources:
            if resource.name not in object_class_names:
                object_classes.append(dict(name=resource.name))
                object_class_names.append(resource.name)
            primary_key = resource.schema.primary_key
            foreign_keys = resource.schema.foreign_keys
            reference_resource_names = [fk["reference"]["resource"] for fk in foreign_keys]
            for reference_resource_name in reference_resource_names:
                if reference_resource_name not in object_class_names:
                    object_classes.append(dict(name=reference_resource_name))
                    object_class_names.append(reference_resource_name)
            if reference_resource_names:
                object_class_name_list = [resource.name] + reference_resource_names
                relationship_class_name = "__".join(object_class_name_list)
                pre_relationship_classes.append(
                    dict(object_class_name_list=object_class_name_list, name=relationship_class_name)
                )
                object_class_name_lists.append(object_class_name_list)
            for field in resource.schema.fields:
                # Skip fields in primary key
                if field.name in primary_key:
                    continue
                # Skip fields in any foreign key
                if field in [x for fk in foreign_keys for x in fk["fields"]]:
                    continue
                parameter_name = resource.name + "_" + field.name
                if parameter_name not in parameter_names:
                    pre_parameters.append(dict(object_class_name=resource.name, name=parameter_name))
                    parameter_names.append(parameter_name)
        self.signaler.progressed.emit(step, "Adding object classes...")
        self.db_map.add_object_classes(*object_classes)
        step += self.object_class_count
        object_class_name_id = {x.name: x.id for x in self.db_map.object_class_list()}
        relationship_classes = [
            dict(object_class_id_list=[object_class_name_id[n] for n in r['object_class_name_list']], name=r['name'])
            for r in pre_relationship_classes
        ]
        self.signaler.progressed.emit(step, "Adding relationship classes...")
        self.db_map.add_wide_relationship_classes(*relationship_classes)
        step += self.relationship_class_count
        parameters = [
            dict(object_class_id=object_class_name_id[p['object_class_name']], name=p['name']) for p in pre_parameters
        ]
        self.signaler.progressed.emit(step, "Adding parameters...")
        self.db_map.add_parameter_definitions(*parameters)
        step += self.parameter_count
        relationship_class_name_id = {x.name: x.id for x in self.db_map.wide_relationship_class_list()}
        parameter_name_id = {x.name: x.id for x in self.db_map.parameter_definition_list()}
        object_names = [x.name for x in self.db_map.object_list()]
        # Create list of object and preliminary parameter value dicts.
        objects = list()
        pre_parameter_values = list()
        for resource in self.datapackage.resources:
            object_class_id = object_class_name_id[resource.name]
            primary_key = resource.schema.primary_key
            foreign_keys = resource.schema.foreign_keys
            foreign_keys_fields = [x for fk in foreign_keys for x in fk["fields"]]
            for i, row in enumerate(self.resource_data[resource.name]):
                row_dict = dict(zip(resource.schema.field_names, row))
                if primary_key:
                    object_name_suffix = "_".join(row_dict[field] for field in primary_key)
                else:
                    object_name_suffix = str(i)
                object_name = resource.name + "_" + object_name_suffix
                if not object_name in object_names:
                    objects.append(dict(class_id=object_class_id, name=object_name))
                    object_names.append(object_name)
                for field_name, value in row_dict.items():
                    if field_name in primary_key:
                        continue
                    if field_name in foreign_keys_fields:
                        continue
                    parameter_name = resource.name + "_" + field_name
                    parameter_id = parameter_name_id[parameter_name]
                    pre_parameter_values.append(dict(object_name=object_name, parameter_id=parameter_id, value=value))
        self.signaler.progressed.emit(step, "Adding objects...")
        self.db_map.add_objects(*objects)
        step += self.object_count
        object_name_id = {x.name: x.id for x in self.db_map.object_list()}
        parameter_values = [
            dict(object_id=object_name_id[p['object_name']], parameter_id=p['parameter_id'], value=p['value'])
            for p in pre_parameter_values
        ]
        self.signaler.progressed.emit(step, "Adding parameter values...")
        self.db_map.add_parameter_values(*parameter_values)
        step += self.parameter_value_count
        # Create dictionary of reference resource names => list of reference fields names
        reference_resource_dict = dict()
        for resource in self.datapackage.resources:
            foreign_keys = resource.schema.foreign_keys
            for foreign_key in foreign_keys:
                reference_resource_name = foreign_key["reference"]["resource"]
                reference_fields_names = foreign_key["reference"]["fields"]
                reference_resource_dict.setdefault(reference_resource_name, list()).append(reference_fields_names)
        # Create dictionary of reference resource name => reference fields names
        # => reference key => object id
        reference_object_id_dict = dict()
        for reference_resource_name, reference_fields_names_list in reference_resource_dict.items():
            reference_resource = self.datapackage.get_resource(reference_resource_name)
            reference_primary_key = reference_resource.schema.primary_key
            reference_object_id_dict[reference_resource_name] = d1 = dict()
            for reference_fields_names in reference_fields_names_list:
                d1[",".join(reference_fields_names)] = d2 = dict()
                for i, row in enumerate(self.resource_data[reference_resource_name]):
                    row_dict = dict(zip(reference_resource.schema.field_names, row))
                    # Find object id
                    if reference_primary_key:
                        reference_object_name_suffix = "_".join(row_dict[field] for field in reference_primary_key)
                    else:
                        reference_object_name_suffix = str(i)
                    reference_object_name = reference_resource_name + reference_object_name_suffix
                    reference_object_id = object_name_id[reference_object_name]
                    key = ",".join([row_dict[x] for x in reference_fields_names])
                    d2[key] = (reference_object_id, reference_object_name)
        # Create list of relationships
        relationships = list()
        for resource in self.datapackage.resources:
            primary_key = resource.schema.primary_key
            foreign_keys = resource.schema.foreign_keys
            reference_resource_names = [fk['reference']['resource'] for fk in foreign_keys]
            if not reference_resource_names:
                continue
            object_class_name_list = [resource.name] + reference_resource_names
            relationship_class_name = "__".join(object_class_name_list)
            relationship_class_id = relationship_class_name_id[relationship_class_name]
            for i, row in enumerate(self.resource_data[resource.name]):
                row_dict = dict(zip(resource.schema.field_names, row))
                if primary_key:
                    object_name_suffix = "_".join(row_dict[field] for field in primary_key)
                else:
                    object_name_suffix = str(i)
                object_name = resource.name + object_name_suffix
                object_id = object_name_id[object_name]
                object_id_list = [object_id]
                object_name_list = [object_name]
                for fk in foreign_keys:
                    fields_names = fk['fields']
                    reference_resource_name = fk['reference']['resource']
                    reference_fields_names = fk['reference']['fields']
                    key = ",".join([row_dict[x] for x in fields_names])
                    d1 = reference_object_id_dict[reference_resource_name]
                    d2 = d1[",".join(reference_fields_names)]
                    try:
                        reference_object_id, reference_object_name = d2[key]
                    except KeyError:
                        break
                    object_id_list.append(reference_object_id)
                    object_name_list.append(reference_object_name)
                else:
                    relationship_name = relationship_class_name + "_" + "__".join(object_name_list)
                    relationships.append(
                        dict(class_id=relationship_class_id, object_id_list=object_id_list, name=relationship_name)
                    )
        self.signaler.progressed.emit(step, "Adding relationships...")
        self.db_map.add_wide_relationships(*relationships)
        step += self.relationship_count
        self.db_map.commit_session("Automatically generated by Spine Toolbox.")
        self.signaler.progressed.emit(step, "")
示例#17
0
 def test_filter_stacks(self):
     """Tests filter stacks are properly applied."""
     with TemporaryDirectory() as temp_dir:
         url = "sqlite:///" + os.path.join(temp_dir, "db.sqlite")
         db_map = DiffDatabaseMapping(url, create=True)
         import_scenarios(db_map, (("scen1", True), ("scen2", True)))
         import_tools(db_map, ("toolA", ))
         db_map.commit_session("Add test data.")
         db_map.connection.close()
         url_a_fw = _make_resource(url)
         url_b_fw = _make_resource("db:///url_b_fw")
         url_c_bw = _make_resource("db:///url_c_bw")
         mock_item_a = self._mock_item("item_a",
                                       resources_forward=[url_a_fw],
                                       resources_backward=[])
         mock_item_b = self._mock_item("item_b",
                                       resources_forward=[url_b_fw],
                                       resources_backward=[])
         mock_item_c = self._mock_item("item_c",
                                       resources_forward=[],
                                       resources_backward=[url_c_bw])
         items = {
             "item_a": mock_item_a,
             "item_b": mock_item_b,
             "item_c": mock_item_c
         }
         connections = [
             {
                 "from": ("item_a", "right"),
                 "to": ("item_b", "left"),
                 "resource_filters": {
                     url_a_fw.label: {
                         "scenario_filter": [1, 2],
                         "tool_filter": [1]
                     }
                 },
             },
             {
                 "from": ("item_b", "bottom"),
                 "to": ("item_c", "left")
             },
         ]
         successors = {"item_a": ["item_b"], "item_b": ["item_c"]}
         execution_permits = {
             "item_a": True,
             "item_b": True,
             "item_c": True
         }
         engine = SpineEngine(items=items,
                              connections=connections,
                              node_successors=successors,
                              execution_permits=execution_permits)
         engine._make_item = lambda name, direction: engine._items[name]
         with patch("spine_engine.spine_engine.create_timestamp"
                    ) as mock_create_timestamp:
             mock_create_timestamp.return_value = "timestamp"
             engine.run()
         # Check that item_b has been executed two times, with the right filters
         self.assertEqual(len(mock_item_b.fwd_flt_stacks), 2)
         self.assertEqual(len(mock_item_b.bwd_flt_stacks), 2)
         self.assertIn(
             (scenario_filter_config("scen1"), tool_filter_config("toolA")),
             mock_item_b.fwd_flt_stacks)
         self.assertIn(
             (scenario_filter_config("scen2"), tool_filter_config("toolA")),
             mock_item_b.fwd_flt_stacks)
         self.assertIn(
             (execution_filter_config({
                 "execution_item": "item_b",
                 "scenarios": ["scen1"],
                 "timestamp": "timestamp"
             }), ),
             mock_item_b.bwd_flt_stacks,
         )
         self.assertIn(
             (execution_filter_config({
                 "execution_item": "item_b",
                 "scenarios": ["scen2"],
                 "timestamp": "timestamp"
             }), ),
             mock_item_b.bwd_flt_stacks,
         )
         # Check that item_c has also been executed two times
         self.assertEqual(len(mock_item_c.fwd_flt_stacks), 2)
class SpineDB(object):
    """Class for working with a Spine database, especially when adding data 
    """

    def __init__(self, url: str, mode='r', create=False):
        """Open Spine database at url for modifying
        
        Raises:
            RuntimeError: Could not open database
        """
        if mode == 'r' and not create:
            self._open_db_reading(url)
        elif mode == 'w' and not create:
            self._open_db_writing(url)
        elif create:
            self._create_db(url)

    def _open_db_reading(self, url: str):
        """Open Spine DB at url for reading
        """
        try:
            self._db_map = DatabaseMapping(url)
        except SpineDBVersionError:
            logging.error(f"Wrong Spine DB version in {url}")
            raise RuntimeError
        except SpineDBAPIError:
            logging.error(f"Could not open db at {url} for reading")
            raise RuntimeError

    def _open_db_writing(self, url: str):
        """Open Spine DB at url
        """
        try:
            self._db_map = DiffDatabaseMapping(url)
        except SpineDBVersionError:
            logging.error(f"Wrong Spine DB version in {url}")
            raise RuntimeError
        except SpineDBAPIError:
            logging.error(f"Could not open db at {url} for writing")
            raise RuntimeError

    def _create_db(self, url: str):
        """Create Spine DB at url
        """
        logging.info(f"Creating a new Spine DB at '{url}'")
        try:
            create_new_spine_database(url)
        except SQLAlchemyArgumentError as e:
            raise RuntimeError(e)
        except SpineDBAPIError as e:
            raise RuntimeError(e)
        else:
            self._open_db_writing(url)

    def import_object_classes(self, class_name) -> int:
        """Add object classes from a list of class name and description tuples
        Example::

            class_name = ['new_object_class', ('another_object_class', 'description', 123456)]
            import_object_classes(class_name)

        Returns:
            n_imported (int): Number of improrted entities
        """
        n_imported, errors = import_functions.import_object_classes(
            self._db_map, class_name)
        if errors:
            self._handle_errors(errors)
        return n_imported

    def import_objects(self, objects) -> int:
        """Add objects of specific class from a list of class name and object name tuples

        Returns:
            n_imported (int): Number of improrted entities    
        """
        n_imported, errors = import_functions.import_objects(
            self._db_map, objects)
        if errors:
            self._handle_errors(errors)
        return n_imported

    def import_object_parameter_values(self, object_parameter_values) -> int:
        """Import object parameter values from a list of object class name, object name, parameter name and value tuples
        Example::

            object_parameter_values = [('object_class_name', 'object_name', 'parameter_name', 123.4),
                    ('object_class_name', 'object_name', 'parameter_name2',
                        '{"type":"time_series", "data": [1,2,3]}')}
                    ('object_class_name', 'object_name', 'parameter_name',
                        '{"type":"time_series", "data": [1,2,3]}'}, 'alternative')]
            import_object_parameter_values(db_map, data)

        Args:
        object_parameter_values (List[List/Tuple]): list/set/iterable of lists/tuples with
            object_class_name, object name, parameter name, (deserialized) parameter value,
            optional name of an alternative
        Returns:
            n_imported (int): Number of improrted entities
        """
        n_imported, errors = import_functions.import_object_parameter_values(
            self._db_map, object_parameter_values
        )
        if errors:
            self._handle_errors(errors)
        return n_imported

    def import_object_groups(self, object_groups) -> int:
        """Add objects of specific class from a list of class name and object name tuples

        Returns:
            n_imported (int): Number of improrted entities
        """
        n_imported_members, errors = import_functions.import_object_groups(
            self._db_map, object_groups)
        if errors:
            self._handle_errors(errors)
        return n_imported_members

    def import_relationship_classes(self, class_description) -> int:
        """Imports relationship classes.

         Example::

            class_description = [
                ('new_rel_class', ['object_class_1', 'object_class_2']),
                ('another_rel_class', ['object_class_3', 'object_class_4'], 'description'),
            ]
            import_relationship_classes(class_description)

        Returns:
            n_imported (int): Number of improrted entities
        """
        n_imported, errors = import_functions.import_relationship_classes(
            self._db_map, class_description
        )
        if errors:
            self._handle_errors(errors)
        return n_imported

    def import_relationships(self, relationships) -> int:
        """Import relationships from a list of relationship name and object name list tuples

        Example::

        relationships = [('relationship_class_name', ('object_name1', 'object_name2'))]
        import_relationships(relationships)

        Returns:
            n_imported (int): Number of improrted entities    
        """
        n_imported, errors = import_functions.import_relationships(
            self._db_map, relationships
        )
        if errors:
            self._handle_errors(errors)
        return n_imported

    def import_relationship_parameter_values(self, relationship_parameter_values) -> int:
        """Import relationship parameter values from a list of relationship name,
        object name list, parameter name and value tuples
        
        Returns:
            n_imported (int): Number of improrted entities    
        """
        n_imported, errors = import_functions.import_relationship_parameter_values(
            self._db_map, relationship_parameter_values
        )
        if errors:
            self._handle_errors(errors)
        return n_imported

    def import_alternatives(self, data) -> int:
        """
        Imports alternatives.

        Example:

            data = ['new_alternative', ('another_alternative', 'description')]

        Args:
            data (Iterable): an iterable of alternative names,
                or of lists/tuples with alternative names and optional descriptions

        Returns:
            tuple of int and list: Number of successfully inserted alternatives, list of errors
        """
        n_imported, errors = import_functions.import_alternatives(self._db_map, data)
        if errors:
            self._handle_errors(errors)
        return n_imported

    def import_data(self, data) -> int:
        """Import data
        
        Args:
            data (dict): Dictionary mapping entity types to definitions
            
        Returns:
            n_imported (int): Number of improrted entities            
        """
        n_imported, errors = import_functions.import_data(self._db_map, **data)
        if errors:
            self._handle_errors(errors)
        return n_imported

    def export_data(self) -> dict:
        """
        :return:
        """
        try:
            _data_dict = export_functions.export_data(self._db_map)
        except Exception as e:
            self._handle_errors(e)
        return _data_dict

    def _handle_errors(self, errors: list):
        for e in errors:
            logging.warning(e)

    def commit(self, message):
        """Commit current changes
        """
        try:
            self._db_map.commit_session(message)
        except SpineDBAPIError as e:
            logging.warning(e)
示例#19
0
class SpineDB(object):
    """
    Class for working with a Spine database, especially when adding data
    """

    def __init__(self, url: str, mode='r', create=False):
        """
        Open Spine database at url for modifying
        
        Raises:
            RuntimeError: Could not open database
        """
        if mode == 'r' and not create:
            self._open_db_reading(url)
        elif mode == 'w' and not create:
            self._open_db_writing(url)
        elif create:
            self._create_db(url)

    def _open_db_reading(self, url: str):
        """
        Open Spine DB at url for reading
        """
        try:
            self._db_map = DatabaseMapping(url)
        except SpineDBVersionError:
            logging.error(f"Wrong Spine DB version in {url}")
            raise RuntimeError
        except SpineDBAPIError:
            logging.error(f"Could not open db at {url} for reading")
            raise RuntimeError

    def _open_db_writing(self, url: str):
        """
        Open Spine DB at url
        """
        try:
            self._db_map = DiffDatabaseMapping(url)
        except SpineDBVersionError:
            logging.error(f"Wrong Spine DB version in {url}")
            raise RuntimeError
        except SpineDBAPIError:
            logging.error(f"Could not open db at {url} for writing")
            raise RuntimeError

    def _create_db(self, url: str):
        """
        Create Spine DB at url
        """
        logging.info(f"Creating a new Spine DB at '{url}'")
        try:
            create_new_spine_database(url)
        except SQLAlchemyArgumentError as e:
            raise RuntimeError(e)
        except SpineDBAPIError as e:
            raise RuntimeError(e)
        else:
            self._open_db_writing(url)

    def import_object_classes(self, class_name) -> int:
        """
        Add object classes from a list of class name and description tuples
        Example::

            class_name = ['new_object_class', ('another_object_class', 'description', 123456)]
            import_object_classes(class_name)

        Returns:
            n_imported (int): Number of improrted entities
        """
        n_imported, errors = import_functions.import_object_classes(
            self._db_map, class_name)
        if errors:
            _handle_errors(errors)
        return n_imported

    def import_objects(self, objects) -> int:
        """
        Add objects of specific class from a list of class name and object name tuples

        Returns:
            n_imported (int): Number of improrted entities    
        """
        n_imported, errors = import_functions.import_objects(
            self._db_map, objects)
        if errors:
            _handle_errors(errors)
        return n_imported

    def import_object_parameter_values(self, object_parameter_values) -> int:
        """
        Import object parameter values from a list of object class name, object name, parameter name and value tuples
        Example::

            object_parameter_values = [('object_class_name', 'object_name', 'parameter_name', 123.4),
                    ('object_class_name', 'object_name', 'parameter_name2',
                        '{"type":"time_series", "data": [1,2,3]}')}
                    ('object_class_name', 'object_name', 'parameter_name',
                        '{"type":"time_series", "data": [1,2,3]}'}, 'alternative')]
            import_object_parameter_values(db_map, data)

        Args:
        object_parameter_values (List[List/Tuple]): list/set/iterable of lists/tuples with
            object_class_name, object name, parameter name, (deserialized) parameter value,
            optional name of an alternative
        Returns:
            n_imported (int): Number of improrted entities
        """
        n_imported, errors = import_functions.import_object_parameter_values(
            self._db_map, object_parameter_values
        )
        if errors:
            _handle_errors(errors)
        return n_imported

    def import_object_groups(self, object_groups) -> int:
        """
        Add objects of specific class from a list of class name and object name tuples

        Returns:
            n_imported (int): Number of improrted entities
        """
        n_imported_members, errors = import_functions.import_object_groups(
            self._db_map, object_groups)
        if errors:
            _handle_errors(errors)
        return n_imported_members

    def import_relationship_classes(self, class_description) -> int:
        """
        Imports relationship classes.

         Example::

            class_description = [
                ('new_rel_class', ['object_class_1', 'object_class_2']),
                ('another_rel_class', ['object_class_3', 'object_class_4'], 'description'),
            ]
            import_relationship_classes(class_description)

        Returns:
            n_imported (int): Number of improrted entities
        """
        n_imported, errors = import_functions.import_relationship_classes(
            self._db_map, class_description
        )
        if errors:
            _handle_errors(errors)
        return n_imported

    def import_relationships(self, relationships) -> int:
        """
        Import relationships from a list of relationship name and object name list tuples

        Example::

        relationships = [('relationship_class_name', ('object_name1', 'object_name2'))]
        import_relationships(relationships)

        Returns:
            n_imported (int): Number of improrted entities    
        """
        n_imported, errors = import_functions.import_relationships(
            self._db_map, relationships
        )
        if errors:
            _handle_errors(errors)
        return n_imported

    def import_relationship_parameter_values(self, relationship_parameter_values) -> int:
        """
        Import relationship parameter values from a list of relationship name,
        object name list, parameter name and value tuples
        
        Returns:
            n_imported (int): Number of improrted entities    
        """
        n_imported, errors = import_functions.import_relationship_parameter_values(
            self._db_map, relationship_parameter_values
        )
        if errors:
            _handle_errors(errors)
        return n_imported

    def import_alternatives(self, data) -> int:
        """
        Imports alternatives.

        Example:

            data = ['new_alternative', ('another_alternative', 'description')]

        Args:
            data (Iterable): an iterable of alternative names,
                or of lists/tuples with alternative names and optional descriptions

        Returns:
            tuple of int and list: Number of successfully inserted alternatives, list of errors
        """
        n_imported, errors = import_functions.import_alternatives(self._db_map, data)
        if errors:
            _handle_errors(errors)
        return n_imported

    def import_data(self, data) -> int:
        """
        Import data
        
        Args:
            data (dict): Dictionary mapping entity types to definitions
            
        Returns:
            n_imported (int): Number of improrted entities            
        """
        n_imported, errors = import_functions.import_data(self._db_map, **data)
        if errors:
            _handle_errors(errors)
        return n_imported

    def export_data(self) -> dict:
        """
        This function exports all data from the SpineDB. Should be used with consequent use of import_ functions.

        :return: Dict with all data.
        """
        try:
            _data_dict = export_functions.export_data(self._db_map)
            return _data_dict
        except Exception as e:
            _handle_errors([e])
            logging.warning(e)

    def commit(self, message):
        """
        Commit current changes
        """
        try:
            self._db_map.commit_session(message)
        except SpineDBAPIError as e:
            logging.warning(e)

    """
    Additions from this point made by Jim Hommes.
    """

    def close_connection(self):
        """
        Close the connection to the SpineDB. Necessary use through Spine as when an exception is thrown, Spine does not
        automatically close the connection.
        """
        self._db_map.connection.close()

    def query_object_parameter_values_by_object_class(self, object_class_name):
        """
        When not all data is required, this function can be used to query all parameter values for a certain
        object class.

        :param object_class_name: Name of the object class.
        :return: Dict with object_class_name, object_name, parameter_name, parameter_value and alternative
        """
        subquery = self._db_map.object_parameter_value_sq
        return [{'object_class_name': value_row.object_class_name,
                 'object_name': value_row.object_name,
                 'parameter_name': value_row.parameter_name,
                 'parameter_value': from_database(value_row.value, value_row.type),
                 'alternative': value_row.alternative_name}
                for value_row
                in self._db_map.query(subquery).filter(subquery.c.object_class_name == object_class_name).all()]

    def query_object_parameter_values_by_object_classes(self, object_class_name_list):
        """
        Practically same function as query_object_parameter_values_by_object_class but multiple object classes
        can be specified.

        :param object_class_name_list: List of object class names.
        :return: Dict with object_class_name, object_name, parameter_name, parameter_value and alternative
        """
        subquery = self._db_map.object_parameter_value_sq
        return [{'object_class_name': value_row.object_class_name,
                 'object_name': value_row.object_name,
                 'parameter_name': value_row.parameter_name,
                 'parameter_value': value_row.value,
                 'alternative': value_row.alternative_name}
                for value_row
                in self._db_map.query(subquery).filter(subquery.c.object_class_name.in_(object_class_name_list)).all()]

    def query_object_parameter_values_by_object_class_and_object_name(self, object_class_name, object_name):
        """
        When not all data is required, this function can be used to query all parameter values for a certain
        object class and object name. Handy for objects with only one value.

        :param object_class_name: Name of the object class.
        :param object_name: Name of the object.
        :return: Dict with object_class_name, object_name, parameter_name, parameter_value and alternative
        """
        subquery = self._db_map.object_parameter_value_sq
        return [{'object_class_name': value_row.object_class_name,
                 'object_name': value_row.object_name,
                 'parameter_name': value_row.parameter_name,
                 'parameter_value': from_database(value_row.value, value_row.type),
                 'alternative': value_row.alternative_name}
                for value_row
                in self._db_map.query(subquery).filter(subquery.c.object_class_name == object_class_name).filter(subquery.c.object_name == object_name).all()]
示例#20
0
    def _create_database(directory):
        """Creates a database with objects, relationship, parameters and values."""
        url = TestExcelIntegration._sqlite_url(_TEMP_SQLITE_FILENAME,
                                               directory)
        create_new_spine_database(url)
        db_map = DiffDatabaseMapping(url,
                                     username='******',
                                     upgrade=True)

        # create empty database for loading excel into
        url = TestExcelIntegration._sqlite_url(_TEMP_SQLITE_TEST_FILENAME,
                                               directory)
        create_new_spine_database(url)
        db_map_test = DiffDatabaseMapping(url,
                                          username='******',
                                          upgrade=True)

        # delete all object_classes to empty database
        oc = set(oc.id for oc in db_map_test.object_class_list().all())
        if oc:
            db_map_test.remove_items(object_class_ids=oc)
            db_map_test.commit_session('empty database')

        oc = set(oc.id for oc in db_map.object_class_list().all())
        if oc:
            db_map.remove_items(object_class_ids=oc)
            db_map.commit_session('empty database')

        # create object classes
        oc_1 = db_map.add_object_class(**{'name': 'object_class_1'})
        oc_2 = db_map.add_object_class(**{'name': 'object_class_2'})
        oc_3 = db_map.add_object_class(**{'name': 'object_class_3'})

        # create relationship classes
        relc1 = db_map.add_wide_relationship_class(
            **{
                'name': 'relationship_class',
                'object_class_id_list': [oc_1.id, oc_2.id]
            })
        relc2 = db_map.add_wide_relationship_class(
            **{
                'name': 'relationship_class2',
                'object_class_id_list': [oc_1.id, oc_2.id]
            })

        # create objects
        oc1_obj1 = db_map.add_object(**{
            'name': 'oc1_obj1',
            'class_id': oc_1.id
        })
        oc1_obj2 = db_map.add_object(**{
            'name': 'oc1_obj2',
            'class_id': oc_1.id
        })
        oc2_obj1 = db_map.add_object(**{
            'name': 'oc2_obj1',
            'class_id': oc_2.id
        })
        oc2_obj2 = db_map.add_object(**{
            'name': 'oc2_obj2',
            'class_id': oc_2.id
        })
        oc3_obj1 = db_map.add_object(**{
            'name': 'oc3_obj1',
            'class_id': oc_3.id
        })

        # add relationships
        rel1 = db_map.add_wide_relationship(
            **{
                'name': 'rel1',
                'class_id': relc1.id,
                'object_id_list': [oc1_obj1.id, oc2_obj1.id]
            })
        rel2 = db_map.add_wide_relationship(
            **{
                'name': 'rel2',
                'class_id': relc1.id,
                'object_id_list': [oc1_obj2.id, oc2_obj2.id]
            })

        # create parameters
        p1 = db_map.add_parameter_definitions(*[{
            'name': 'parameter1',
            'object_class_id': oc_1.id
        }])[0].first()
        p2 = db_map.add_parameter_definitions(*[{
            'name': 'parameter2',
            'object_class_id': oc_1.id
        }])[0].first()
        p3 = db_map.add_parameter_definitions(*[{
            'name': 'parameter3',
            'object_class_id': oc_2.id
        }])[0].first()
        p4 = db_map.add_parameter_definitions(*[{
            'name': 'parameter4',
            'object_class_id': oc_2.id
        }])[0].first()
        p5 = db_map.add_parameter_definitions(*[{
            'name': 'parameter5',
            'object_class_id': oc_3.id
        }])[0].first()
        p6 = db_map.add_parameter_definitions(*[{
            'name': 'parameter6',
            'object_class_id': oc_3.id
        }])[0].first()
        rel_p1 = db_map.add_parameter_definitions(
            *[{
                'name': 'rel_parameter1',
                'relationship_class_id': relc1.id
            }])[0].first()
        rel_p2 = db_map.add_parameter_definitions(
            *[{
                'name': 'rel_parameter2',
                'relationship_class_id': relc1.id
            }])[0].first()
        rel_p3 = db_map.add_parameter_definitions(
            *[{
                'name': 'rel_parameter3',
                'relationship_class_id': relc1.id
            }])[0].first()
        rel_p4 = db_map.add_parameter_definitions(
            *[{
                'name': 'rel_parameter4',
                'relationship_class_id': relc1.id
            }])[0].first()

        # add parameter values
        db_map.add_parameter_value(
            **{
                'parameter_definition_id': p1.id,
                'object_id': oc1_obj1.id,
                'object_class_id': oc_1.id,
                'value': '0'
            })
        db_map.add_parameter_value(
            **{
                'parameter_definition_id': p2.id,
                'object_id': oc1_obj2.id,
                'object_class_id': oc_1.id,
                'value': '3.5'
            })
        db_map.add_parameter_value(
            **{
                'parameter_definition_id': p3.id,
                'object_id': oc2_obj1.id,
                'object_class_id': oc_2.id,
                'value': '[1, 2, 3, 4]',
            })
        db_map.add_parameter_value(
            **{
                'parameter_definition_id': p4.id,
                'object_id': oc2_obj2.id,
                'object_class_id': oc_2.id,
                'value': '[5, 6, 7]',
            })
        db_map.add_parameter_value(
            **{
                'parameter_definition_id': rel_p1.id,
                'relationship_id': rel1.id,
                'relationship_class_id': relc1.id,
                'value': '0',
            })
        db_map.add_parameter_value(
            **{
                'parameter_definition_id': rel_p2.id,
                'relationship_id': rel2.id,
                'relationship_class_id': relc1.id,
                'value': '4',
            })
        db_map.add_parameter_value(
            **{
                'parameter_definition_id': rel_p3.id,
                'relationship_id': rel1.id,
                'relationship_class_id': relc1.id,
                'value': '[5, 6, 7]',
            })
        db_map.add_parameter_value(
            **{
                'parameter_definition_id': rel_p4.id,
                'relationship_id': rel2.id,
                'relationship_class_id': relc1.id,
                'value': '[1, 2, 3, 4]',
            })

        time = [
            np.datetime64('2005-02-25T00:00'),
            np.datetime64('2005-02-25T01:00'),
            np.datetime64('2005-02-25T02:00')
        ]
        value = [1, 2, 3]
        ts_val = to_database(
            TimeSeriesVariableResolution(time, value, False, False))
        db_map.add_parameter_value(
            **{
                'parameter_definition_id': p5.id,
                'object_id': oc3_obj1.id,
                'object_class_id': oc_3.id,
                'value': ts_val
            })

        timepattern = ['m1', 'm2', 'm3']
        value = [1.1, 2.2, 3.3]
        ts_val = to_database(TimePattern(timepattern, value))
        db_map.add_parameter_value(
            **{
                'parameter_definition_id': p6.id,
                'object_id': oc3_obj1.id,
                'object_class_id': oc_3.id,
                'value': ts_val
            })

        # commit
        db_map.commit_session('test')

        return db_map, db_map_test