Exemple #1
0
 def test_execute_forward_skip_deselected_file(self):
     with TemporaryDirectory() as temp_dir:
         data_file = Path(temp_dir, "data.dat")
         self._write_simple_data(data_file)
         mappings = {data_file: "deselected"}
         database_path = Path(temp_dir).joinpath("database.sqlite")
         database_url = 'sqlite:///' + str(database_path)
         create_new_spine_database(database_url)
         gams_path = ""
         executable = ExecutableItem("name", mappings, temp_dir,
                                     sys.executable, gams_path, True,
                                     mock.MagicMock())
         database_resources = [
             ProjectItemResource(None, "database", database_url)
         ]
         self.assertTrue(
             executable.execute(database_resources,
                                ExecutionDirection.BACKWARD))
         file_resources = [
             ProjectItemResource(None, "file", data_file.as_uri())
         ]
         self.assertTrue(
             executable.execute(file_resources, ExecutionDirection.FORWARD))
         database_map = DatabaseMapping(database_url)
         class_list = database_map.object_class_list().all()
         self.assertEqual(len(class_list), 0)
         database_map.connection.close()
def _set_domain_export_dependencies(domain_names, domain_metadatas,
                                    database_url):
    """Returns data structures that are useful when determining if a set is eligible for export."""
    domain_dependencies = {name: [] for name in domain_names}
    try:
        database_map = DatabaseMapping(database_url)
    except SpineDBAPIError:
        return dict(), dict()
    try:
        set_dependencies = dict()
        for domain_name, domain_metadata in zip(domain_names,
                                                domain_metadatas):
            if domain_metadata.is_additional:
                continue
            object_class_id = (database_map.query(
                database_map.object_class_sq).filter(
                    database_map.object_class_sq.c.name ==
                    domain_name).first().id)
            relationships = database_map.wide_relationship_class_list(
                object_class_id=object_class_id).all()
            depending_relationships = domain_dependencies[domain_name]
            for relationship in relationships:
                depending_relationships.append(relationship.name)
                depending_domains = set_dependencies.setdefault(
                    relationship.name, dict())
                depending_domains[domain_name] = domain_metadata.is_exportable(
                )
    except SpineDBAPIError:
        return dict(), dict()
    finally:
        database_map.connection.close()
    return domain_dependencies, set_dependencies
Exemple #3
0
 def setUp(self):
     if os.path.exists('TestDatabaseAPI.sqlite'):
         os.remove('TestDatabaseAPI.sqlite')
     self.object_number = 100
     self.object_class_number = 100
     self.number_wide_relationship = 100
     self.number_of_parameter = 100
     self.number_of_parameter_value = 100
     self.db = create_new_spine_database('sqlite:///TestDatabaseAPI.sqlite')
     self.db_map = DatabaseMapping('sqlite:///TestDatabaseAPI.sqlite')
Exemple #4
0
 def export_to_sqlite(self, file_path, data_for_export, caller):
     """Exports given data into SQLite file."""
     url = URL("sqlite", database=file_path)
     if not self._db_mngr.is_url_available(url, caller):
         return
     create_new_spine_database(url)
     db_map = DatabaseMapping(url)
     import_data(db_map, **data_for_export)
     try:
         db_map.commit_session("Export data from Spine Toolbox.")
     except SpineDBAPIError as err:
         error_msg = {None: [f"[SpineDBAPIError] Unable to export file <b>{db_map.codename}</b>: {err.msg}"]}
         caller.msg_error.emit(error_msg)
     else:
         caller.sqlite_file_exported.emit(file_path)
Exemple #5
0
    def test_create_engine_and_session(self):

        db = create_new_spine_database(
            'sqlite:///test_create_engine_and_session.sqlite')
        db.connect()

        m = DatabaseMapping('sqlite:///test_create_engine_and_session.sqlite',
                            create_all=False)

        assert isinstance(m, DatabaseMapping)

        assert not isinstance(m.session, Session)

        m.create_engine_and_session()

        assert isinstance(m.session, Session)
 def from_dict(pack_dict, database_url, logger):
     """Restores the settings pack from a dictionary."""
     pack = SettingsPack(pack_dict["output_file_name"])
     pack.state = SettingsState(pack_dict["state"])
     if pack.state not in (SettingsState.OK, SettingsState.INDEXING_PROBLEM):
         return pack
     pack.settings = gdx.SetSettings.from_dict(pack_dict["settings"])
     try:
         db_map = DatabaseMapping(database_url)
         value_type_logger = _UnsupportedValueTypeLogger(
             f"Exporter settings ignoring some parameters from database '{database_url}':", logger
         )
         pack.indexing_settings = gdx.indexing_settings_from_dict(
             pack_dict["indexing_settings"], db_map, value_type_logger
         )
     except SpineDBAPIError as error:
         logger.msg_error.emit(
             f"Failed to fully restore Exporter settings. Error while reading database '{database_url}': {error}"
         )
         return pack
     else:
         db_map.connection.close()
     pack.indexing_domains = [gdx.Set.from_dict(set_dict) for set_dict in pack_dict["indexing_domains"]]
     pack.merging_settings = {
         parameter_name: gdx.MergingSetting.from_dict(setting_dict)
         for parameter_name, setting_dict in pack_dict["merging_settings"].items()
     }
     pack.merging_domains = [gdx.Set.from_dict(set_dict) for set_dict in pack_dict["merging_domains"]]
     latest_commit = pack_dict.get("latest_database_commit")
     if latest_commit is not None:
         try:
             pack.last_database_commit = dateutil.parser.parse(latest_commit)
         except ValueError as error:
             logger.msg_error.emit(f"Failed to read latest database commit: {error}")
     return pack
Exemple #7
0
 def update(self):
     """Updates the settings according to changes in the database."""
     db_map = DatabaseMapping(self._database_url)
     try:
         self._entity_class_infos = _gather_entity_class_infos(db_map)
     finally:
         db_map.connection.close()
     for settings_widget in self._setting_widgets:
         settings_widget.update(self._entity_class_infos)
Exemple #8
0
def _latest_database_commit_time_stamp(url):
    """Returns the latest commit timestamp from database at given URL or None."""
    try:
        database_map = DatabaseMapping(url)
    except SpineDBAPIError:
        return None
    else:
        time_stamp = latest_database_commit_time_stamp(database_map)
        database_map.connection.close()
        return time_stamp
Exemple #9
0
 def import_from_sqlite(self, file_path):
     url = URL("sqlite", database=file_path)
     filename = os.path.split(file_path)[1]
     try:
         db_map = DatabaseMapping(url)
     except (SpineDBAPIError, SpineDBVersionError) as err:
         self.msg.emit(f"Could'n import file {filename}: {str(err)}")
         return
     data = export_data(db_map)
     self.import_data(data)
     self.msg.emit(f"File {filename} successfully imported.")
 def _open_db_reading(self, url: str):
     """Open Spine DB at url for reading
     """
     try:
         self._db_map = DatabaseMapping(url)
     except SpineDBVersionError:
         logging.error(f"Wrong Spine DB version in {url}")
         raise RuntimeError
     except SpineDBAPIError:
         logging.error(f"Could not open db at {url} for reading")
         raise RuntimeError
Exemple #11
0
 def _add_setting(self, parameter_name=None, merging_setting=None):
     """Inserts a new settings widget to the widget list."""
     if self._entity_class_infos is None:
         db_map = DatabaseMapping(self._database_url)
         try:
             self._entity_class_infos = _gather_entity_class_infos(db_map)
         finally:
             db_map.connection.close()
     settings_widget = ParameterMergingSettings(self._entity_class_infos, self, parameter_name, merging_setting)
     settings_widget.removal_requested.connect(self._remove_setting)
     self._ui.settings_area_layout.insertWidget(0, settings_widget)
     self._setting_widgets.append(settings_widget)
Exemple #12
0
    def fetch_database_items(self):
        """Reads filter information from database."""
        resource_filters = dict()
        id_to_name_cache = dict()

        def update_filters(label, filter_type, db_row):
            filters_by_type = self._resource_filters.get(label)
            is_on = False
            if filters_by_type is not None:
                ids = filters_by_type.get(filter_type)
                if ids is not None:
                    currently_on = ids.get(db_row.id)
                    if currently_on is not None:
                        is_on = currently_on
            resource_filters.setdefault(label, dict()).setdefault(
                filter_type, dict())[db_row.id] = is_on
            id_to_name_cache.setdefault(filter_type,
                                        dict())[db_row.id] = db_row.name

        for resource in self._resources:
            url = resource.url
            if not url:
                continue
            try:
                db_map = DatabaseMapping(url)
            except (SpineDBAPIError, SpineDBVersionError):
                continue
            try:
                for scenario_row in db_map.query(db_map.scenario_sq):
                    update_filters(resource.label, SCENARIO_FILTER_TYPE,
                                   scenario_row)
                for tool_row in db_map.query(db_map.tool_sq):
                    update_filters(resource.label, TOOL_FILTER_TYPE, tool_row)
            finally:
                db_map.connection.close()
        self._resource_filters = resource_filters
        self._id_to_name_cache = id_to_name_cache
Exemple #13
0
 def _read_settings(self):
     """Reads fresh gdx settings from the database."""
     try:
         database_map = DatabaseMapping(self._database_url)
     except SpineDBAPIError as error:
         self.database_unavailable.emit(self._database_url)
         return None, None, None
     try:
         time_stamp = latest_database_commit_time_stamp(database_map)
         settings = gdx.make_set_settings(database_map)
         logger = _Logger(self._database_url, self)
         indexing_settings = gdx.make_indexing_settings(database_map, logger)
     except gdx.GdxExportException as error:
         self.errored.emit(self._database_url, error)
         return None, None, None
     finally:
         database_map.connection.close()
     return time_stamp, settings, indexing_settings
Exemple #14
0
 def export_to_excel(self, file_path, data_for_export, caller):  # pylint: disable=no-self-use
     """Exports given data into Excel file."""
     # NOTE: We import data into an in-memory Spine db and then export that to excel.
     url = URL("sqlite", database="")
     db_map = DatabaseMapping(url, create=True)
     import_data(db_map, **data_for_export)
     file_name = os.path.split(file_path)[1]
     try:
         os.remove(file_path)
         export_spine_database_to_xlsx(db_map, file_path)
     except PermissionError:
         error_msg = {
             None: [f"Unable to export file <b>{file_name}</b>.<br/>Close the file in Excel and try again."]
         }
         caller.msg_error.emit(error_msg)
     except OSError:
         error_msg = {None: [f"[OSError] Unable to export file <b>{file_name}</b>."]}
         caller.msg_error.emit(error_msg)
     else:
         caller.file_exported.emit(file_path)
Exemple #15
0
 def _update_merging_settings(self, updated_settings):
     """Updates the parameter merging settings according to changes in the database"""
     try:
         database_map = DatabaseMapping(self._database_url)
     except SpineDBAPIError as error:
         self.errored.emit(self._database_url, error)
         return None, None
     try:
         updated_merging_settings = gdx.update_merging_settings(
             self._previous_merging_settings, updated_settings, database_map
         )
     except gdx.GdxExportException as error:
         self.errored.emit(self._database_url, error)
         return None, None
     finally:
         database_map.connection.close()
     updated_merging_domains = list(map(gdx.merging_domain, updated_merging_settings.values()))
     for domain in updated_merging_domains:
         metadata = gdx.SetMetadata(gdx.ExportFlag.FORCED_EXPORTABLE, True)
         updated_settings.add_or_replace_domain(domain, metadata)
     return updated_merging_settings, updated_merging_domains
 def run(self):
     """Constructs settings and parameter index settings and sends them away using signals."""
     try:
         database_map = DatabaseMapping(self._database_url)
     except SpineDBAPIError as error:
         self.errored.emit(self._database_url, error)
         return
     try:
         if not self.isInterruptionRequested():
             settings = gdx.make_settings(database_map)
         if not self.isInterruptionRequested():
             indexing_settings = gdx.make_indexing_settings(database_map)
     except gdx.GdxExportException as error:
         self.errored.emit(self._database_url, error)
         return
     finally:
         database_map.connection.close()
     if not self.isInterruptionRequested():
         self.settings_read.emit(self._database_url, settings)
         self.indexing_settings_read.emit(self._database_url,
                                          indexing_settings)
     self.finished.emit(self._database_url)
    import_relationship_parameter_values,
)


@unique
class P(IntEnum):
    CLASS = 0
    OBJECT = 1
    NAME = 2
    X = 3
    ALTERNATIVE = 4


in_url = sys.argv[1]
out_url = sys.argv[2]
in_db = DatabaseMapping(in_url)
out_db = DiffDatabaseMapping(out_url)

link_relationship_class_cn = "commodity__node"

parameters_gnu = {"apparent_power": "unitSizeMVA", "capacity": "capacity", 
                  "capacity_value": "availabilityCapacityMargin", "conversion_coefficient": "conversionCoeff",
                  "fom_cost": "fomCosts", "inertia": "inertia",
                  "investment_cost": "invCosts", "ramp_limit": "maxRampDown",
                  "subunit_capacity": "unitSize", "vom_cost": "vomCosts",
                  "shutdown_cost": "shutdownCost", "start_cost": "startCostCold"}
parameters_gnu2 = {"ramp_limit": "maxRampUp"}
source_relationship_class_gnu = "node__unit__io"
link_relationship_class_gnu = "commodity__node"
target_relationship_class_gnu = "grid__node__unit__io"
Exemple #18
0
import sys
from spinedb_api import DatabaseMapping, from_database

url = sys.argv[1]
db_map = DatabaseMapping(url)
parameter_value = from_database(
    db_map.query(db_map.parameter_value_sq).first().value)
with open("out.dat", "w") as out_file:
    out_file.write(f"{parameter_value}")
db_map.connection.close()
Exemple #19
0
class TestDatabaseAPI(unittest.TestCase):
    def setUp(self):
        if os.path.exists('TestDatabaseAPI.sqlite'):
            os.remove('TestDatabaseAPI.sqlite')
        self.object_number = 100
        self.object_class_number = 100
        self.number_wide_relationship = 100
        self.number_of_parameter = 100
        self.number_of_parameter_value = 100
        self.db = create_new_spine_database('sqlite:///TestDatabaseAPI.sqlite')
        self.db_map = DatabaseMapping('sqlite:///TestDatabaseAPI.sqlite')

    def test_create_db(self):
        # create a in memory database
        m = MetaData()
        db = create_new_spine_database('sqlite://')
        m.reflect(db.engine)
        assert len(m.tables.values()) == 9

    def test_create_engine_and_session(self):

        db = create_new_spine_database(
            'sqlite:///test_create_engine_and_session.sqlite')
        db.connect()

        m = DatabaseMapping('sqlite:///test_create_engine_and_session.sqlite',
                            create_all=False)

        assert isinstance(m, DatabaseMapping)

        assert not isinstance(m.session, Session)

        m.create_engine_and_session()

        assert isinstance(m.session, Session)

    def test_add_object_class_and_object(self):

        objects_before_insert = self.db_map.session.query(
            self.db_map.Object).count()
        objectclasses_before_insert = self.db_map.session.query(
            self.db_map.ObjectClass).count()
        fake = Faker()
        obj_class_ids = list()
        [
            obj_class_ids.append(
                self.db_map.add_object_class(
                    **{
                        'name': fake.pystr(min_chars=None, max_chars=40)
                    }).id) for i in range(self.object_class_number)
        ]
        [
            self.db_map.add_object(
                **{
                    'name': fake.pystr(min_chars=None, max_chars=40),
                    'class_id': random.choice(obj_class_ids)
                }) for i in range(self.object_number)
        ]

        assert self.db_map.session.query(self.db_map.Object).count(
        ) == self.object_number + objects_before_insert
        assert self.db_map.session.query(self.db_map.ObjectClass).count(
        ) == self.object_class_number + objectclasses_before_insert

    def test_single_object(self):

        assert self.db_map.single_object(1)
        assert self.db_map.single_object_class(1)

    def test_add_wide_relationship(self):
        fake = Faker()
        relationship_before_insert = self.db_map.session.query(
            self.db_map.Relationship).count()

        obj_ids_list = list()
        obj_class_ids = list()

        [
            obj_class_ids.append(
                self.db_map.add_object_class(
                    **{
                        'name': fake.pystr(min_chars=None, max_chars=40)
                    }).id) for i in range(self.object_class_number)
        ]
        [
            obj_ids_list.append(
                self.db_map.add_object(
                    **{
                        'name': fake.pystr(min_chars=None, max_chars=40),
                        'class_id': random.choice(obj_class_ids)
                    }).id) for i in range(self.object_number)
        ]

        [
            self.db_map.add_wide_relationship(
                **{
                    'object_id_list': [obj_ids_list[i]],
                    'dimension': 1,
                    'class_id': random.choice(obj_class_ids),
                    'name': fake.pystr(min_chars=None, max_chars=10),
                }) for i in range(self.number_wide_relationship)
        ]

        assert self.db_map.session.query(self.db_map.Relationship).count(
        ) == self.number_wide_relationship + relationship_before_insert

    def test_add_wide_relationship_class(self):
        fake = Faker()

        relationship_class_before_insert = self.db_map.session.query(
            self.db_map.RelationshipClass).count()

        obj_ids_list = list()
        obj_class_ids = list()

        [
            obj_class_ids.append(
                self.db_map.add_object_class(
                    **{
                        'name': fake.pystr(min_chars=None, max_chars=40)
                    }).id) for i in range(self.object_class_number)
        ]
        [
            obj_ids_list.append(
                self.db_map.add_object(
                    **{
                        'name': fake.pystr(min_chars=None, max_chars=40),
                        'class_id': random.choice(obj_class_ids)
                    }).id) for i in range(self.object_number)
        ]

        [
            self.db_map.add_wide_relationship_class(
                **{
                    'object_class_id_list': [obj_class_ids[i]],
                    'dimension': 1,
                    'object_class_id': random.choice(obj_class_ids),
                    'name': fake.pystr(min_chars=None, max_chars=10),
                }) for i in range(self.number_wide_relationship)
        ]

        assert self.db_map.session.query(self.db_map.RelationshipClass).count(
        ) == self.number_wide_relationship + relationship_class_before_insert

    def test_add_parameter(self):
        fake = Faker()

        obj_ids_list = list()
        obj_class_ids = list()
        relationship_list_ids = list()

        parameters_before_insert = self.db_map.session.query(
            self.db_map.Parameter).count()

        [
            obj_class_ids.append(
                self.db_map.add_object_class(
                    **{
                        'name': fake.pystr(min_chars=None, max_chars=40)
                    }).id) for i in range(self.object_class_number)
        ]
        [
            obj_ids_list.append(
                self.db_map.add_object(
                    **{
                        'name': fake.pystr(min_chars=None, max_chars=40),
                        'class_id': random.choice(obj_class_ids)
                    }).id) for i in range(self.object_number)
        ]

        [
            relationship_list_ids.append(
                self.db_map.add_wide_relationship(
                    **{
                        'object_id_list': [
                            random.choice(obj_ids_list) for i in range(
                                random.randint(1, len(obj_ids_list)))
                        ],
                        'dimension':
                        4,
                        'class_id':
                        random.choice(obj_class_ids),
                        'name':
                        fake.pystr(min_chars=None, max_chars=10)
                    }).id) for i in range(self.number_wide_relationship)
        ]

        [
            self.db_map.add_parameter(
                **{
                    'name':
                    fake.pystr(min_chars=None, max_chars=40),
                    'relationship_class_id':
                    random.choice(relationship_list_ids),
                    'object_class_id':
                    random.choice(obj_ids_list),
                    'can_have_time_series':
                    fake.boolean(chance_of_getting_true=50),
                    'can_have_time_pattern':
                    fake.boolean(chance_of_getting_true=50),
                    'can_be_stochastic':
                    fake.boolean(chance_of_getting_true=50)
                }) for i in range(self.number_of_parameter)
        ]

        assert self.db_map.session.query(self.db_map.Parameter).count(
        ) == self.number_of_parameter + parameters_before_insert

    def test_add_parameter_value(self):
        fake = Faker()

        obj_ids_list = list()
        obj_class_ids = list()
        relationship_list_ids = list()
        parameter_list_ids = list()

        parameters_before_insert = self.db_map.session.query(
            self.db_map.Parameter).count()

        [
            obj_class_ids.append(
                self.db_map.add_object_class(
                    **{
                        'name': fake.pystr(min_chars=None, max_chars=40)
                    }).id) for i in range(self.object_class_number)
        ]
        [
            obj_ids_list.append(
                self.db_map.add_object(
                    **{
                        'name': fake.pystr(min_chars=None, max_chars=40),
                        'class_id': random.choice(obj_class_ids)
                    }).id) for i in range(self.object_number)
        ]

        [
            relationship_list_ids.append(
                self.db_map.add_wide_relationship(
                    **{
                        'object_id_list': [
                            random.choice(obj_ids_list) for i in range(
                                random.randint(1, len(obj_ids_list)))
                        ],
                        'dimension':
                        4,
                        'class_id':
                        random.choice(obj_class_ids),
                        'name':
                        fake.pystr(min_chars=None, max_chars=10)
                    }).id) for i in range(self.number_wide_relationship)
        ]

        [
            parameter_list_ids.append(
                self.db_map.add_parameter(
                    **{
                        'name':
                        fake.pystr(min_chars=None, max_chars=40),
                        'relationship_class_id':
                        random.choice(relationship_list_ids),
                        'object_class_id':
                        random.choice(obj_ids_list),
                        'can_have_time_series':
                        fake.boolean(chance_of_getting_true=50),
                        'can_have_time_pattern':
                        fake.boolean(chance_of_getting_true=50),
                        'can_be_stochastic':
                        fake.boolean(chance_of_getting_true=50)
                    }).id) for i in range(self.number_of_parameter)
        ]

        [
            self.db_map.add_parameter_value(
                **{
                    'json':
                    str(fake.pydict(nb_elements=3, variable_nb_elements=True)),
                    'parameter_definition_id':
                    parameter_list_ids[i],
                    'object_id':
                    obj_ids_list[i],
                    'value':
                    fake.pyfloat(
                        left_digits=None, right_digits=None, positive=False),
                    'expression':
                    str(fake.pydict(nb_elements=3, variable_nb_elements=True))
                }) for i in range(self.number_of_parameter_value)
        ]

    def test_get_or_add_object_class(self):
        obj_class_ids = list()
        fake = Faker()
        object_before_insert = self.db_map.session.query(
            self.db_map.ObjectClass).count()
        [
            obj_class_ids.append(
                self.db_map.get_or_add_object_class(
                    **{
                        'name': fake.pystr(min_chars=None, max_chars=40)
                    }).id) for i in range(self.object_class_number)
        ]
        assert self.db_map.session.query(self.db_map.ObjectClass).count(
        ) == object_before_insert + self.object_class_number

    def test_get_or_add_wide_relationship_class(self):
        fake = Faker()

        relationship_class_before_insert = self.db_map.session.query(
            self.db_map.RelationshipClass).count()

        obj_ids_list = list()
        obj_class_ids = list()

        [
            obj_class_ids.append(
                self.db_map.add_object_class(
                    **{
                        'name': fake.pystr(min_chars=None, max_chars=40)
                    }).id) for i in range(self.object_class_number)
        ]
        [
            obj_ids_list.append(
                self.db_map.add_object(
                    **{
                        'name': fake.pystr(min_chars=None, max_chars=40),
                        'class_id': random.choice(obj_class_ids)
                    }).id) for i in range(self.object_number)
        ]

        [
            self.db_map.get_or_add_wide_relationship_class(
                **{
                    'object_class_id_list': [obj_class_ids[i]],
                    'dimension': 1,
                    'object_class_id': random.choice(obj_class_ids),
                    'name': fake.pystr(min_chars=None, max_chars=10),
                }) for i in range(self.number_wide_relationship)
        ]

        assert self.db_map.session.query(self.db_map.RelationshipClass).count(
        ) == self.number_wide_relationship + relationship_class_before_insert

    def test_get_or_add_parameter(self):
        fake = Faker()

        obj_ids_list = list()
        obj_class_ids = list()
        relationship_list_ids = list()

        parameters_before_insert = self.db_map.session.query(
            self.db_map.Parameter).count()

        [
            obj_class_ids.append(
                self.db_map.add_object_class(
                    **{
                        'name': fake.pystr(min_chars=None, max_chars=40)
                    }).id) for i in range(self.object_class_number)
        ]
        [
            obj_ids_list.append(
                self.db_map.add_object(
                    **{
                        'name': fake.pystr(min_chars=None, max_chars=40),
                        'class_id': random.choice(obj_class_ids)
                    }).id) for i in range(self.object_number)
        ]

        [
            relationship_list_ids.append(
                self.db_map.add_wide_relationship(
                    **{
                        'object_id_list': [
                            random.choice(obj_ids_list) for i in range(
                                random.randint(1, len(obj_ids_list)))
                        ],
                        'dimension':
                        4,
                        'class_id':
                        random.choice(obj_class_ids),
                        'name':
                        fake.pystr(min_chars=None, max_chars=10)
                    }).id) for i in range(self.number_wide_relationship)
        ]

        [
            self.db_map.get_or_add_parameter(
                **{
                    'name':
                    fake.pystr(min_chars=None, max_chars=40),
                    'relationship_class_id':
                    random.choice(relationship_list_ids),
                    'object_class_id':
                    random.choice(obj_ids_list),
                    'can_have_time_series':
                    fake.boolean(chance_of_getting_true=50),
                    'can_have_time_pattern':
                    fake.boolean(chance_of_getting_true=50),
                    'can_be_stochastic':
                    fake.boolean(chance_of_getting_true=50)
                }) for i in range(self.number_of_parameter)
        ]

        assert self.db_map.session.query(self.db_map.Parameter).count(
        ) == self.number_of_parameter + parameters_before_insert

    def test_rename_object_class(self):

        fake = Faker()
        obj_class_ids = list()
        [
            obj_class_ids.append(
                self.db_map.add_object_class(**{
                    'name': fake.name()
                }).id) for i in range(self.object_class_number)
        ]

        new_elem = self.db_map.get_or_add_object_class(**{'name': fake.name()})

        self.db_map.rename_object_class(new_elem.id, "TEST_PASSED")

        renamed_element = self.db_map.get_or_add_object_class(
            **{'name': "TEST_PASSED"})

        assert new_elem.id == renamed_element.id
        assert renamed_element.name == "TEST_PASSED"

    def test_rename_object(self):
        obj_ids_list = list()
        obj_class_ids = list()

        fake = Faker()

        [
            obj_class_ids.append(
                self.db_map.add_object_class(
                    **{
                        'name': fake.pystr(min_chars=None, max_chars=40)
                    }).id) for i in range(self.object_class_number)
        ]
        [
            obj_ids_list.append(
                self.db_map.add_object(
                    **{
                        'name': fake.pystr(min_chars=None, max_chars=40),
                        'class_id': random.choice(obj_class_ids)
                    }).id) for i in range(self.object_number)
        ]

        test_id = random.choice(obj_ids_list)

        self.db_map.rename_object(test_id, "TEST_PASSED")

        renamed_element = self.db_map.single_object(
            name="TEST_PASSED").one_or_none()

        assert renamed_element.id == test_id

    def test_rename_relationship_class(self):
        fake = Faker()
        obj_ids_list = list()
        obj_class_ids = list()
        obj_relationship_class_ids = list()

        [
            obj_class_ids.append(
                self.db_map.add_object_class(
                    **{
                        'name': fake.pystr(min_chars=None, max_chars=40)
                    }).id) for i in range(self.object_class_number)
        ]
        [
            obj_ids_list.append(
                self.db_map.add_object(
                    **{
                        'name': fake.pystr(min_chars=None, max_chars=40),
                        'class_id': random.choice(obj_class_ids)
                    }).id) for i in range(self.object_number)
        ]

        [
            obj_relationship_class_ids.append(
                self.db_map.add_wide_relationship_class(
                    **{
                        'object_class_id_list': [obj_class_ids[i]],
                        'dimension': 1,
                        'object_class_id': random.choice(obj_class_ids),
                        'name': fake.pystr(min_chars=None, max_chars=10),
                    })) for i in range(self.number_wide_relationship)
        ]

        test_id = random.choice(obj_relationship_class_ids).id

        self.db_map.rename_relationship_class(test_id, "TEST_PASSED_CORRECTLY")

        renamed_element = self.db_map.single_wide_relationship_class(
            name="TEST_PASSED_CORRECTLY").one_or_none()

        assert renamed_element.id == test_id

    def test_rename_relationship(self):
        fake = Faker()

        relationship_before_insert = self.db_map.session.query(
            self.db_map.Relationship).count()

        obj_ids_list = list()
        obj_class_ids = list()
        obj_relationship_ids = list()

        [
            obj_class_ids.append(
                self.db_map.add_object_class(
                    **{
                        'name': fake.pystr(min_chars=None, max_chars=40)
                    }).id) for i in range(self.object_class_number)
        ]
        [
            obj_ids_list.append(
                self.db_map.add_object(
                    **{
                        'name': fake.pystr(min_chars=None, max_chars=40),
                        'class_id': random.choice(obj_class_ids)
                    }).id) for i in range(self.object_number)
        ]

        [
            obj_relationship_ids.append(
                self.db_map.add_wide_relationship(
                    **{
                        'object_id_list': [obj_ids_list[i]],
                        'dimension': 1,
                        'class_id': random.choice(obj_class_ids),
                        'name': fake.pystr(min_chars=None, max_chars=10),
                    })) for i in range(self.number_wide_relationship)
        ]

        test_id = random.choice(obj_relationship_ids).id

        self.db_map.rename_relationship(test_id, "TEST_PASSED_CORRECTLY")

        renamed_element = self.db_map.single_wide_relationship(
            name="TEST_PASSED_CORRECTLY").one_or_none()

        assert renamed_element.id == test_id

    def test_update_parameter(self):
        fake = Faker()

        obj_ids_list = list()
        obj_class_ids = list()
        relationship_list_ids = list()
        obj_parameter_ids = list()

        [
            obj_class_ids.append(
                self.db_map.add_object_class(
                    **{
                        'name': fake.pystr(min_chars=None, max_chars=40)
                    }).id) for i in range(self.object_class_number)
        ]
        [
            obj_ids_list.append(
                self.db_map.add_object(
                    **{
                        'name': fake.pystr(min_chars=None, max_chars=40),
                        'class_id': random.choice(obj_class_ids)
                    }).id) for i in range(self.object_number)
        ]

        [
            relationship_list_ids.append(
                self.db_map.add_wide_relationship(
                    **{
                        'object_id_list': [
                            random.choice(obj_ids_list) for i in range(
                                random.randint(1, len(obj_ids_list)))
                        ],
                        'dimension':
                        4,
                        'class_id':
                        random.choice(obj_class_ids),
                        'name':
                        fake.pystr(min_chars=None, max_chars=10)
                    }).id) for i in range(self.number_wide_relationship)
        ]

        [
            obj_parameter_ids.append(
                self.db_map.add_parameter(
                    **{
                        'name':
                        fake.pystr(min_chars=None, max_chars=40),
                        'relationship_class_id':
                        random.choice(relationship_list_ids),
                        'object_class_id':
                        random.choice(obj_ids_list),
                        'can_have_time_series':
                        fake.boolean(chance_of_getting_true=50),
                        'can_have_time_pattern':
                        fake.boolean(chance_of_getting_true=50),
                        'can_be_stochastic':
                        fake.boolean(chance_of_getting_true=50)
                    })) for i in range(self.number_of_parameter)
        ]

        test_id = random.choice(obj_parameter_ids).id

        self.db_map.update_parameter(test_id, "name",
                                     "PARAMETER_UPDATED_CORRECTLY")

        updated_parameter = self.db_map.single_parameter(test_id).one_or_none()

        assert updated_parameter.name == "PARAMETER_UPDATED_CORRECTLY"

    def test_update_parameter_value(self):
        fake = Faker()

        obj_ids_list = list()
        obj_class_ids = list()
        relationship_list_ids = list()
        parameter_list_ids = list()
        parameter_value_ids = list()

        [
            obj_class_ids.append(
                self.db_map.add_object_class(
                    **{
                        'name': fake.pystr(min_chars=None, max_chars=40)
                    }).id) for i in range(self.object_class_number)
        ]
        [
            obj_ids_list.append(
                self.db_map.add_object(
                    **{
                        'name': fake.pystr(min_chars=None, max_chars=40),
                        'class_id': random.choice(obj_class_ids)
                    }).id) for i in range(self.object_number)
        ]

        [
            relationship_list_ids.append(
                self.db_map.add_wide_relationship(
                    **{
                        'object_id_list': [
                            random.choice(obj_ids_list) for i in range(
                                random.randint(1, len(obj_ids_list)))
                        ],
                        'dimension':
                        4,
                        'class_id':
                        random.choice(obj_class_ids),
                        'name':
                        fake.pystr(min_chars=None, max_chars=10)
                    }).id) for i in range(self.number_wide_relationship)
        ]

        [
            parameter_list_ids.append(
                self.db_map.add_parameter(
                    **{
                        'name':
                        fake.pystr(min_chars=None, max_chars=40),
                        'relationship_class_id':
                        random.choice(relationship_list_ids),
                        'object_class_id':
                        random.choice(obj_ids_list),
                        'can_have_time_series':
                        fake.boolean(chance_of_getting_true=50),
                        'can_have_time_pattern':
                        fake.boolean(chance_of_getting_true=50),
                        'can_be_stochastic':
                        fake.boolean(chance_of_getting_true=50)
                    }).id) for i in range(self.number_of_parameter)
        ]

        [
            parameter_value_ids.append(
                self.db_map.add_parameter_value(
                    **{
                        'json':
                        str(
                            fake.pydict(nb_elements=3,
                                        variable_nb_elements=True)),
                        'parameter_definition_id':
                        parameter_list_ids[i],
                        'object_id':
                        obj_ids_list[i],
                        'value':
                        fake.pyfloat(left_digits=None,
                                     right_digits=None,
                                     positive=False),
                        'expression':
                        str(
                            fake.pydict(nb_elements=3,
                                        variable_nb_elements=True))
                    })) for i in range(self.number_of_parameter_value)
        ]

        test_id = random.choice(parameter_value_ids).id

        self.db_map.update_parameter_value(test_id, "expression",
                                           "PARAMETER_UPDATED_CORRECTLY")

        updated_parameter = self.db_map.single_parameter_value(
            test_id).one_or_none()

        assert updated_parameter.expression == "PARAMETER_UPDATED_CORRECTLY"

    def test_remove_object_class(self):
        fake = Faker()
        obj_class_ids = list()
        [
            obj_class_ids.append(
                self.db_map.add_object_class(
                    **{
                        'name': fake.pystr(min_chars=None, max_chars=40)
                    }).id) for i in range(self.object_class_number)
        ]
        [
            self.db_map.add_object(
                **{
                    'name': fake.pystr(min_chars=None, max_chars=40),
                    'class_id': random.choice(obj_class_ids)
                }) for i in range(self.object_number)
        ]

        objects_before_deletion = self.db_map.session.query(
            self.db_map.Object).count()
        objectclasses_before_deletion = self.db_map.session.query(
            self.db_map.ObjectClass).count()

        remove_object_class_candidate = random.choice(obj_class_ids)

        self.db_map.remove_object_class(remove_object_class_candidate)

        assert self.db_map.session.query(self.db_map.ObjectClass).count(
        ) == objectclasses_before_deletion - 1

    def test_remove_object(self):
        fake = Faker()
        obj_class_ids = list()
        obj_ids = list()
        [
            obj_class_ids.append(
                self.db_map.add_object_class(
                    **{
                        'name': fake.pystr(min_chars=None, max_chars=40)
                    }).id) for i in range(self.object_class_number)
        ]
        [
            obj_ids.append(
                self.db_map.add_object(
                    **{
                        'name': fake.pystr(min_chars=None, max_chars=40),
                        'class_id': random.choice(obj_class_ids)
                    })) for i in range(self.object_number)
        ]

        objects_before_deletion = self.db_map.session.query(
            self.db_map.Object).count()

        remove_object_candidate = random.choice(obj_ids).id

        self.db_map.remove_object(remove_object_candidate)

        assert self.db_map.session.query(
            self.db_map.Object).count() == objects_before_deletion - 1

    def test_remove_relationship_class(self):
        fake = Faker()
        obj_ids_list = list()
        obj_class_ids = list()
        obj_relationship_class_ids = list()

        [
            obj_class_ids.append(
                self.db_map.add_object_class(
                    **{
                        'name': fake.pystr(min_chars=None, max_chars=40)
                    }).id) for i in range(self.object_class_number)
        ]
        [
            obj_ids_list.append(
                self.db_map.add_object(
                    **{
                        'name': fake.pystr(min_chars=None, max_chars=40),
                        'class_id': random.choice(obj_class_ids)
                    }).id) for i in range(self.object_number)
        ]

        [
            obj_relationship_class_ids.append(
                self.db_map.add_wide_relationship_class(
                    **{
                        'object_class_id_list': [obj_class_ids[i]],
                        'dimension': 1,
                        'object_class_id': random.choice(obj_class_ids),
                        'name': fake.pystr(min_chars=None, max_chars=10),
                    })) for i in range(self.number_wide_relationship)
        ]

        test_id = random.choice(obj_relationship_class_ids).id

        number_of_relationship_classes_before_deletion = self.db_map.session.query(
            self.db_map.RelationshipClass).count()

        self.db_map.remove_relationship_class(test_id)

        assert self.db_map.session.query(self.db_map.RelationshipClass).count(
        ) == number_of_relationship_classes_before_deletion - 1

    def test_remove_relationship(self):
        fake = Faker()

        relationship_before_insert = self.db_map.session.query(
            self.db_map.Relationship).count()

        obj_ids_list = list()
        obj_class_ids = list()
        obj_relationship_ids = list()

        [
            obj_class_ids.append(
                self.db_map.add_object_class(
                    **{
                        'name': fake.pystr(min_chars=None, max_chars=40)
                    }).id) for i in range(self.object_class_number)
        ]
        [
            obj_ids_list.append(
                self.db_map.add_object(
                    **{
                        'name': fake.pystr(min_chars=None, max_chars=40),
                        'class_id': random.choice(obj_class_ids)
                    }).id) for i in range(self.object_number)
        ]

        [
            obj_relationship_ids.append(
                self.db_map.add_wide_relationship(
                    **{
                        'object_id_list': [obj_ids_list[i]],
                        'dimension': 1,
                        'class_id': random.choice(obj_class_ids),
                        'name': fake.pystr(min_chars=None, max_chars=10),
                    })) for i in range(self.number_wide_relationship)
        ]

        test_id = random.choice(obj_relationship_ids).id

        number_of_relationship_before_deletion = self.db_map.session.query(
            self.db_map.Relationship).count()

        self.db_map.remove_relationship(test_id)

        assert self.db_map.session.query(self.db_map.Relationship).count(
        ) == number_of_relationship_before_deletion - 1

    def test_remove_parameter(self):
        fake = Faker()

        obj_ids_list = list()
        obj_class_ids = list()
        relationship_list_ids = list()
        obj_parameter_ids = list()

        [
            obj_class_ids.append(
                self.db_map.add_object_class(
                    **{
                        'name': fake.pystr(min_chars=None, max_chars=40)
                    }).id) for i in range(self.object_class_number)
        ]
        [
            obj_ids_list.append(
                self.db_map.add_object(
                    **{
                        'name': fake.pystr(min_chars=None, max_chars=40),
                        'class_id': random.choice(obj_class_ids)
                    }).id) for i in range(self.object_number)
        ]

        [
            relationship_list_ids.append(
                self.db_map.add_wide_relationship(
                    **{
                        'object_id_list': [
                            random.choice(obj_ids_list) for i in range(
                                random.randint(1, len(obj_ids_list)))
                        ],
                        'dimension':
                        4,
                        'class_id':
                        random.choice(obj_class_ids),
                        'name':
                        fake.pystr(min_chars=None, max_chars=10)
                    }).id) for i in range(self.number_wide_relationship)
        ]

        [
            obj_parameter_ids.append(
                self.db_map.add_parameter(
                    **{
                        'name':
                        fake.pystr(min_chars=None, max_chars=40),
                        'relationship_class_id':
                        random.choice(relationship_list_ids),
                        'object_class_id':
                        random.choice(obj_ids_list),
                        'can_have_time_series':
                        fake.boolean(chance_of_getting_true=50),
                        'can_have_time_pattern':
                        fake.boolean(chance_of_getting_true=50),
                        'can_be_stochastic':
                        fake.boolean(chance_of_getting_true=50)
                    })) for i in range(self.number_of_parameter)
        ]

        test_id = random.choice(obj_parameter_ids).id

        number_of_parameter_before_deletion = self.db_map.session.query(
            self.db_map.Parameter).count()

        self.db_map.remove_parameter(test_id)

        assert self.db_map.session.query(self.db_map.Parameter).count(
        ) == number_of_parameter_before_deletion - 1

    def test_remove_parameter_value(self):
        fake = Faker()

        obj_ids_list = list()
        obj_class_ids = list()
        relationship_list_ids = list()
        parameter_list_ids = list()
        parameter_value_ids = list()

        [
            obj_class_ids.append(
                self.db_map.add_object_class(
                    **{
                        'name': fake.pystr(min_chars=None, max_chars=40)
                    }).id) for i in range(self.object_class_number)
        ]
        [
            obj_ids_list.append(
                self.db_map.add_object(
                    **{
                        'name': fake.pystr(min_chars=None, max_chars=40),
                        'class_id': random.choice(obj_class_ids)
                    }).id) for i in range(self.object_number)
        ]

        [
            relationship_list_ids.append(
                self.db_map.add_wide_relationship(
                    **{
                        'object_id_list': [
                            random.choice(obj_ids_list) for i in range(
                                random.randint(1, len(obj_ids_list)))
                        ],
                        'dimension':
                        4,
                        'class_id':
                        random.choice(obj_class_ids),
                        'name':
                        fake.pystr(min_chars=None, max_chars=10)
                    }).id) for i in range(self.number_wide_relationship)
        ]

        [
            parameter_list_ids.append(
                self.db_map.add_parameter(
                    **{
                        'name':
                        fake.pystr(min_chars=None, max_chars=40),
                        'relationship_class_id':
                        random.choice(relationship_list_ids),
                        'object_class_id':
                        random.choice(obj_ids_list),
                        'can_have_time_series':
                        fake.boolean(chance_of_getting_true=50),
                        'can_have_time_pattern':
                        fake.boolean(chance_of_getting_true=50),
                        'can_be_stochastic':
                        fake.boolean(chance_of_getting_true=50)
                    }).id) for i in range(self.number_of_parameter)
        ]

        [
            parameter_value_ids.append(
                self.db_map.add_parameter_value(
                    **{
                        'json':
                        str(
                            fake.pydict(nb_elements=3,
                                        variable_nb_elements=True)),
                        'parameter_definition_id':
                        parameter_list_ids[i],
                        'object_id':
                        obj_ids_list[i],
                        'value':
                        fake.pyfloat(left_digits=None,
                                     right_digits=None,
                                     positive=False),
                        'expression':
                        str(
                            fake.pydict(nb_elements=3,
                                        variable_nb_elements=True))
                    })) for i in range(self.number_of_parameter_value)
        ]

        test_id = random.choice(parameter_value_ids).id

        number_of_parameter_value_before_deletion = self.db_map.session.query(
            self.db_map.ParameterValue).count()

        self.db_map.remove_parameter_value(test_id)

        assert self.db_map.session.query(self.db_map.ParameterValue).count(
        ) == number_of_parameter_value_before_deletion - 1

    def test_reset_mapping(self):
        fake = Faker()

        obj_ids_list = list()
        obj_class_ids = list()
        relationship_list_ids = list()
        parameter_list_ids = list()
        parameter_value_ids = list()

        [
            obj_class_ids.append(
                self.db_map.add_object_class(
                    **{
                        'name': fake.pystr(min_chars=None, max_chars=40)
                    }).id) for i in range(self.object_class_number)
        ]
        [
            obj_ids_list.append(
                self.db_map.add_object(
                    **{
                        'name': fake.pystr(min_chars=None, max_chars=40),
                        'class_id': random.choice(obj_class_ids)
                    }).id) for i in range(self.object_number)
        ]

        [
            relationship_list_ids.append(
                self.db_map.add_wide_relationship(
                    **{
                        'object_id_list': [
                            random.choice(obj_ids_list) for i in range(
                                random.randint(1, len(obj_ids_list)))
                        ],
                        'dimension':
                        4,
                        'class_id':
                        random.choice(obj_class_ids),
                        'name':
                        fake.pystr(min_chars=None, max_chars=10)
                    }).id) for i in range(self.number_wide_relationship)
        ]

        [
            parameter_list_ids.append(
                self.db_map.add_parameter(
                    **{
                        'name':
                        fake.pystr(min_chars=None, max_chars=40),
                        'relationship_class_id':
                        random.choice(relationship_list_ids),
                        'object_class_id':
                        random.choice(obj_ids_list),
                        'can_have_time_series':
                        fake.boolean(chance_of_getting_true=50),
                        'can_have_time_pattern':
                        fake.boolean(chance_of_getting_true=50),
                        'can_be_stochastic':
                        fake.boolean(chance_of_getting_true=50)
                    }).id) for i in range(self.number_of_parameter)
        ]

        [
            parameter_value_ids.append(
                self.db_map.add_parameter_value(
                    **{
                        'json':
                        str(
                            fake.pydict(nb_elements=3,
                                        variable_nb_elements=True)),
                        'parameter_definition_id':
                        parameter_list_ids[i],
                        'object_id':
                        obj_ids_list[i],
                        'value':
                        fake.pyfloat(left_digits=None,
                                     right_digits=None,
                                     positive=False),
                        'expression':
                        str(
                            fake.pydict(nb_elements=3,
                                        variable_nb_elements=True))
                    })) for i in range(self.number_of_parameter_value)
        ]

        self.db_map.reset_mapping()

        assert self.db_map.session.query(
            self.db_map.ParameterValue).count() == 0
        assert self.db_map.session.query(self.db_map.Parameter).count() == 0
        assert self.db_map.session.query(self.db_map.Object).count() == 0
        assert self.db_map.session.query(
            self.db_map.RelationshipClass).count() == 0
        assert self.db_map.session.query(self.db_map.Relationship).count() == 0

    def tearDown(self):
        """Overridden method. Runs after each test.
        Use this to free resources after a test if needed.
        """
        # delete temp excel file if it exists

        self.db_map.close()

        try:
            os.remove("TestDatabaseAPI.sqlite")
        except OSError as e:
            pass

        try:
            os.remove("test_create_engine_and_session.sqlite")
        except OSError as e:
            pass