コード例 #1
0
 def test_execute_forward_skip_deselected_file(self):
     with TemporaryDirectory() as temp_dir:
         data_file = Path(temp_dir, "data.dat")
         self._write_simple_data(data_file)
         mappings = {data_file: "deselected"}
         database_path = Path(temp_dir).joinpath("database.sqlite")
         database_url = 'sqlite:///' + str(database_path)
         create_new_spine_database(database_url)
         gams_path = ""
         executable = ExecutableItem("name", mappings, temp_dir,
                                     sys.executable, gams_path, True,
                                     mock.MagicMock())
         database_resources = [
             ProjectItemResource(None, "database", database_url)
         ]
         self.assertTrue(
             executable.execute(database_resources,
                                ExecutionDirection.BACKWARD))
         file_resources = [
             ProjectItemResource(None, "file", data_file.as_uri())
         ]
         self.assertTrue(
             executable.execute(file_resources, ExecutionDirection.FORWARD))
         database_map = DatabaseMapping(database_url)
         class_list = database_map.object_class_list().all()
         self.assertEqual(len(class_list), 0)
         database_map.connection.close()
コード例 #2
0
 def create_temp_db(self):
     """Let's create a real db to more easily test complicated stuff (such as opening a tree view)."""
     temp_db_path = os.path.join(self.ds.data_dir, "temp_db.sqlite")
     sqlite_url = "sqlite:///" + temp_db_path
     create_new_spine_database(sqlite_url)
     url = dict(dialect="sqlite", database="temp_db.sqlite")
     self.ds._url = self.ds.parse_url(url)  # Set an URL for the Data Store
     return temp_db_path
コード例 #3
0
 def export_to_sqlite(self, file_path, data_for_export):
     """Exports given data into SQLite file."""
     url = URL("sqlite", database=file_path)
     if not self.db_mngr.is_url_available(url, self):
         return
     create_new_spine_database(url)
     db_map = DiffDatabaseMapping(url)
     import_data(db_map, **data_for_export)
     try:
         db_map.commit_session("Export initial data from Spine Toolbox.")
     except SpineDBAPIError as err:
         self.msg_error.emit(f"[SpineDBAPIError] Unable to export file <b>{db_map.codename}</b>: {err.msg}")
     else:
         self.sqlite_file_exported.emit(file_path)
コード例 #4
0
 def __init__(self, db_url, datapackage_descriptor, datapackage_base_path):
     super().__init__()
     self.db_url = db_url
     create_new_spine_database(self.db_url)
     self.db_map = DiffDatabaseMapping(db_url, getpass.getuser())
     self.datapackage = Package(datapackage_descriptor, datapackage_base_path)
     self.signaler = Signaler()
     self.resource_data = dict()
     self.object_class_count = None
     self.object_count = None
     self.relationship_class_count = None
     self.relationship_count = None
     self.parameter_count = None
     self.parameter_value_count = None
     for resource in self.datapackage.resources:
         self.resource_data[resource.name] = resource.read(cast=False)
コード例 #5
0
 def setUp(self):
     """Overridden method. Runs before each test. Makes instances of SpineDBEditor classes."""
     with mock.patch(
             "spinetoolbox.spine_db_editor.widgets.spine_db_editor.SpineDBEditor.restore_ui"
     ), mock.patch(
             "spinetoolbox.spine_db_editor.widgets.spine_db_editor.SpineDBEditor.show"
     ):
         mock_settings = mock.Mock()
         mock_settings.value.side_effect = lambda *args, **kwards: 0
         self.db_mngr = SpineDBManager(mock_settings, None)
         # TODO: Use a temp file?
         url = "sqlite:///test.sqlite"
         create_new_spine_database(url)
         self.db_mngr.fetch_db_maps_for_listener = lambda *args: None
         self.spine_db_editor = SpineDBEditor(self.db_mngr, {url: "db"})
         self.db_map = self.spine_db_editor.first_db_map
         self.spine_db_editor.pivot_table_model = mock.MagicMock()
コード例 #6
0
 def setUp(self):
     """Overridden method. Runs before each test. Makes instances of SpineDBEditor classes."""
     self._temp_dir = TemporaryDirectory()
     url = "sqlite:///" + os.path.join(self._temp_dir.name, "test.sqlite")
     create_new_spine_database(url)
     with mock.patch(
             "spinetoolbox.spine_db_editor.widgets.spine_db_editor.SpineDBEditor.restore_ui"
     ), mock.patch(
             "spinetoolbox.spine_db_editor.widgets.spine_db_editor.SpineDBEditor.show"
     ):
         mock_settings = mock.Mock()
         mock_settings.value.side_effect = lambda *args, **kwards: 0
         with mock.patch(
                 "spinetoolbox.spine_db_manager.SpineDBManager.thread",
                 new_callable=mock.PropertyMock) as mock_thread:
             mock_thread.return_value = QApplication.instance().thread()
             self.db_mngr = SpineDBManager(mock_settings, None)
         self.spine_db_editor = SpineDBEditor(self.db_mngr, {url: "db"})
         self.db_map = self.spine_db_editor.first_db_map
         self.spine_db_editor.pivot_table_model = mock.MagicMock()
コード例 #7
0
    def _create_database(directory):
        """Creates a database with objects, relationship, parameters and values."""
        url = TestExcelIntegration._sqlite_url(_TEMP_SQLITE_FILENAME,
                                               directory)
        create_new_spine_database(url)
        db_map = DiffDatabaseMapping(url,
                                     username='******',
                                     upgrade=True)

        # create empty database for loading excel into
        url = TestExcelIntegration._sqlite_url(_TEMP_SQLITE_TEST_FILENAME,
                                               directory)
        create_new_spine_database(url)
        db_map_test = DiffDatabaseMapping(url,
                                          username='******',
                                          upgrade=True)

        # delete all object_classes to empty database
        oc = set(oc.id for oc in db_map_test.object_class_list().all())
        if oc:
            db_map_test.remove_items(object_class_ids=oc)
            db_map_test.commit_session('empty database')

        oc = set(oc.id for oc in db_map.object_class_list().all())
        if oc:
            db_map.remove_items(object_class_ids=oc)
            db_map.commit_session('empty database')

        # create object classes
        oc_1 = db_map.add_object_class(**{'name': 'object_class_1'})
        oc_2 = db_map.add_object_class(**{'name': 'object_class_2'})
        oc_3 = db_map.add_object_class(**{'name': 'object_class_3'})

        # create relationship classes
        relc1 = db_map.add_wide_relationship_class(
            **{
                'name': 'relationship_class',
                'object_class_id_list': [oc_1.id, oc_2.id]
            })
        relc2 = db_map.add_wide_relationship_class(
            **{
                'name': 'relationship_class2',
                'object_class_id_list': [oc_1.id, oc_2.id]
            })

        # create objects
        oc1_obj1 = db_map.add_object(**{
            'name': 'oc1_obj1',
            'class_id': oc_1.id
        })
        oc1_obj2 = db_map.add_object(**{
            'name': 'oc1_obj2',
            'class_id': oc_1.id
        })
        oc2_obj1 = db_map.add_object(**{
            'name': 'oc2_obj1',
            'class_id': oc_2.id
        })
        oc2_obj2 = db_map.add_object(**{
            'name': 'oc2_obj2',
            'class_id': oc_2.id
        })
        oc3_obj1 = db_map.add_object(**{
            'name': 'oc3_obj1',
            'class_id': oc_3.id
        })

        # add relationships
        rel1 = db_map.add_wide_relationship(
            **{
                'name': 'rel1',
                'class_id': relc1.id,
                'object_id_list': [oc1_obj1.id, oc2_obj1.id]
            })
        rel2 = db_map.add_wide_relationship(
            **{
                'name': 'rel2',
                'class_id': relc1.id,
                'object_id_list': [oc1_obj2.id, oc2_obj2.id]
            })

        # create parameters
        p1 = db_map.add_parameter_definitions(*[{
            'name': 'parameter1',
            'object_class_id': oc_1.id
        }])[0].first()
        p2 = db_map.add_parameter_definitions(*[{
            'name': 'parameter2',
            'object_class_id': oc_1.id
        }])[0].first()
        p3 = db_map.add_parameter_definitions(*[{
            'name': 'parameter3',
            'object_class_id': oc_2.id
        }])[0].first()
        p4 = db_map.add_parameter_definitions(*[{
            'name': 'parameter4',
            'object_class_id': oc_2.id
        }])[0].first()
        p5 = db_map.add_parameter_definitions(*[{
            'name': 'parameter5',
            'object_class_id': oc_3.id
        }])[0].first()
        p6 = db_map.add_parameter_definitions(*[{
            'name': 'parameter6',
            'object_class_id': oc_3.id
        }])[0].first()
        rel_p1 = db_map.add_parameter_definitions(
            *[{
                'name': 'rel_parameter1',
                'relationship_class_id': relc1.id
            }])[0].first()
        rel_p2 = db_map.add_parameter_definitions(
            *[{
                'name': 'rel_parameter2',
                'relationship_class_id': relc1.id
            }])[0].first()
        rel_p3 = db_map.add_parameter_definitions(
            *[{
                'name': 'rel_parameter3',
                'relationship_class_id': relc1.id
            }])[0].first()
        rel_p4 = db_map.add_parameter_definitions(
            *[{
                'name': 'rel_parameter4',
                'relationship_class_id': relc1.id
            }])[0].first()

        # add parameter values
        db_map.add_parameter_value(
            **{
                'parameter_definition_id': p1.id,
                'object_id': oc1_obj1.id,
                'object_class_id': oc_1.id,
                'value': '0'
            })
        db_map.add_parameter_value(
            **{
                'parameter_definition_id': p2.id,
                'object_id': oc1_obj2.id,
                'object_class_id': oc_1.id,
                'value': '3.5'
            })
        db_map.add_parameter_value(
            **{
                'parameter_definition_id': p3.id,
                'object_id': oc2_obj1.id,
                'object_class_id': oc_2.id,
                'value': '[1, 2, 3, 4]',
            })
        db_map.add_parameter_value(
            **{
                'parameter_definition_id': p4.id,
                'object_id': oc2_obj2.id,
                'object_class_id': oc_2.id,
                'value': '[5, 6, 7]',
            })
        db_map.add_parameter_value(
            **{
                'parameter_definition_id': rel_p1.id,
                'relationship_id': rel1.id,
                'relationship_class_id': relc1.id,
                'value': '0',
            })
        db_map.add_parameter_value(
            **{
                'parameter_definition_id': rel_p2.id,
                'relationship_id': rel2.id,
                'relationship_class_id': relc1.id,
                'value': '4',
            })
        db_map.add_parameter_value(
            **{
                'parameter_definition_id': rel_p3.id,
                'relationship_id': rel1.id,
                'relationship_class_id': relc1.id,
                'value': '[5, 6, 7]',
            })
        db_map.add_parameter_value(
            **{
                'parameter_definition_id': rel_p4.id,
                'relationship_id': rel2.id,
                'relationship_class_id': relc1.id,
                'value': '[1, 2, 3, 4]',
            })

        time = [
            np.datetime64('2005-02-25T00:00'),
            np.datetime64('2005-02-25T01:00'),
            np.datetime64('2005-02-25T02:00')
        ]
        value = [1, 2, 3]
        ts_val = to_database(
            TimeSeriesVariableResolution(time, value, False, False))
        db_map.add_parameter_value(
            **{
                'parameter_definition_id': p5.id,
                'object_id': oc3_obj1.id,
                'object_class_id': oc_3.id,
                'value': ts_val
            })

        timepattern = ['m1', 'm2', 'm3']
        value = [1.1, 2.2, 3.3]
        ts_val = to_database(TimePattern(timepattern, value))
        db_map.add_parameter_value(
            **{
                'parameter_definition_id': p6.id,
                'object_id': oc3_obj1.id,
                'object_class_id': oc_3.id,
                'value': ts_val
            })

        # commit
        db_map.commit_session('test')

        return db_map, db_map_test