def get_mapped_data(self, tables_mappings, options, table_types, table_row_types, max_rows=-1): """ Overrides io_api method to check for some parameter value types. """ mapped_data, errors = super().get_mapped_data(tables_mappings, options, table_types, table_row_types, max_rows) for key in ("object_parameter_values", "relationship_parameter_values"): for index, value in enumerate(mapped_data[key]): val = value[-1] if isinstance(val, str) and val and val[0] == "{": try: val = from_database(val) value = value[:-1] + (val, ) mapped_data[key][index] = value except ParameterValueFormatError: pass return mapped_data, errors
def _make_layout_generator(self): """Returns a layout generator for the current graph. Returns: GraphLayoutGenerator """ parameter_positions = { p["entity_id"]: dict(from_database(p["value"]).value_to_database_data()) for p in self.db_mngr.get_items_by_field( self.db_map, "parameter value", "parameter_name", self._POS_PARAM_NAME ) } if self._persistent: persisted_positions = { item.entity_id: {"x": item.pos().x(), "y": item.pos().y()} for item in self.ui.graphicsView.items() if isinstance(item, EntityItem) } else: persisted_positions = {} persisted_positions.update(parameter_positions) entity_ids = self.object_ids + self.relationship_ids heavy_positions = { ind: persisted_positions[id_] for ind, id_ in enumerate(entity_ids) if id_ in persisted_positions } return GraphLayoutGenerator( len(entity_ids), self.src_inds, self.dst_inds, self._ARC_LENGTH_HINT, heavy_positions=heavy_positions )
def update_value_list_in_db(self, child, value): value_list = self._new_value_list(child.child_number(), value) data = [(self.name, from_database(value)) for value in value_list] self.db_mngr.import_data( {self.db_map: { "parameter_value_lists": data }})
def _get_parameter_positions(self, parameter_name): if not parameter_name: yield from [] for db_map in self.db_maps: for p in self.db_mngr.get_items_by_field(db_map, "parameter_value", "parameter_name", parameter_name): pos = from_database(p["value"]) if isinstance(pos, float): yield (db_map, p["entity_id"]), pos
def _get_relationships_and_parameters(db): """Exports all relationship data from spine database into unstacked list of lists Args: db (spinedb_api.DatabaseMapping): database mapping for database Returns: (List, List) First list contains parameter data, second one json data """ rel_class = db.query(db.wide_relationship_class_sq).all() rel = db.query(db.wide_relationship_sq).all() rel_par = db.query(db.relationship_parameter_definition_sq).all() rel_par_value = db.query(db.relationship_parameter_value_sq).all() rel_class_id_2_name = {rc.id: rc.name for rc in rel_class} out_data = [[ r.relationship_class_name, r.object_name_list, r.parameter_name, from_database(r.value) ] for r in rel_par_value] rel_with_par = set(r.object_name_list for r in rel_par_value) rel_without_par = [[ rel_class_id_2_name[r.class_id], r.object_name_list, None, None ] for r in rel if r.object_name_list not in rel_with_par] rel_class_par = [[r.relationship_class_name, None, r.parameter_name, None] for r in rel_par] rel_class_with_par = [r.relationship_class_name for r in rel_par] rel_class_without_par = [[r.name, None, None, None] for r in rel_class if r.name not in rel_class_with_par] rel_data = out_data + rel_without_par + rel_class_par + rel_class_without_par rel_par = [] rel_json = [] rel_ts = [] rel_timepattern = [] for d in rel_data: if d[3] is None or isinstance(d[3], (int, float, str, DateTime, Duration)): rel_par.append(d) elif isinstance(d[3], list): rel_json.append(d) rel_par.append(d[:-1] + [None]) elif isinstance(d[3], TimeSeries): rel_ts.append(d) rel_par.append(d[:-1] + [None]) elif isinstance(d[3], TimePattern): rel_timepattern.append(d) rel_par.append(d[:-1] + [None]) else: warnings.warn( f"Skipping export of unsuported parameter type: {type(d[3])}") return rel_par, rel_json, rel_class, rel_ts, rel_timepattern
def _get_objects_and_parameters(db): """Exports all object data from spine database into unstacked list of lists Args: db (spinedb_api.DatabaseMapping): database mapping for database Returns: (List, List) First list contains parameter data, second one json data """ # get all objects obj = db.query(db.object_sq).all() # get all object classes obj_class = db.query(db.object_class_sq).all() obj_class_id_2_name = {oc.id: oc.name for oc in obj_class} # get all parameter values pval = db.query(db.object_parameter_value_sq).all() # get all parameter definitions par = db.query(db.object_parameter_definition_sq).all() # make all in same format par = [(p.object_class_name, None, p.parameter_name, None) for p in par] pval = [(p.object_class_name, p.object_name, p.parameter_name, from_database(p.value)) for p in pval] obj = [(obj_class_id_2_name[p.class_id], p.name, None, None) for p in obj] obj_class = [(p.name, None, None, None) for p in obj_class] object_and_par = pval + par + obj + obj_class object_par = [] object_json = [] object_ts = [] object_timepattern = [] for d in object_and_par: if d[3] is None or isinstance(d[3], (int, float, str, DateTime, Duration)): object_par.append(d) elif isinstance(d[3], list): object_json.append(d) object_par.append(d[:-1] + (None, )) elif isinstance(d[3], TimeSeries): object_ts.append(d) object_par.append(d[:-1] + (None, )) elif isinstance(d[3], TimePattern): object_timepattern.append(d) object_par.append(d[:-1] + (None, )) else: warnings.warn( f"Skipping export of unsuported parameter type: {type(d[3])}") return object_par, object_json, object_ts, object_timepattern
def _read_value(value_in_database): try: value = from_database(value_in_database) except ParameterValueFormatError: raise GdxExportException("Failed to read parameter value.") if isinstance(value, int): value = float(value) if value is not None and not isinstance(value, (float, IndexedValue, bool)): raise GdxExportException( f"Unsupported parameter value type '{type(value).__name__}'.") return value
def reset_value_list(self, value_list): curr_value_list = self.compile_value_list() if value_list == curr_value_list: return value_count = len(value_list) curr_value_count = len(curr_value_list) if value_count > curr_value_count: added_count = value_count - curr_value_count children = [ValueItem() for _ in range(added_count)] self.insert_children(curr_value_count, *children) elif curr_value_count > value_count: removed_count = curr_value_count - value_count self.remove_children(value_count, removed_count) for child, value in zip(self.children, value_list): child.value = from_database(value)
def setData(self, index, value, role=Qt.EditRole): """ Sets data in the map. Args: index (QModelIndex): an index to the model value (str): JSON representation of the value role (int): a role Returns: True if the operation was successful """ if not index.isValid() or role != Qt.EditRole: return False if not value: self._rows[index.row()][index.column()] = None return True try: new_value = from_database(value) except ParameterValueFormatError: return False if not isinstance(new_value, (str, float, Duration, DateTime)): return False self._rows[index.row()][index.column()] = from_database(value) return True
def createEditor(self, parent, option, index): if self._is_entity_index(index): return super().createEditor(parent, option, index) if self.parent().pivot_table_model.index_in_data(index): try: value = from_database(index.data(role=Qt.EditRole)) except ParameterValueFormatError: value = None if isinstance(value, (DateTime, Duration, TimePattern, TimeSeries)) or value is None: value_name = index.model().sourceModel().value_name( index) # FIXME: get the actual name self.parameter_value_editor_requested.emit( index, value_name, value) return None return CustomLineEditor(parent)
def createEditor(self, parent, option, index): """If the parameter has associated a value list, returns a SearchBarEditor . Otherwise returns or requests a dedicated parameter value editor. """ db_map = self._get_db_map(index) if not db_map: return None value_list = self._get_value_list(index, db_map) if value_list: editor = SearchBarEditor(self.parent(), parent) value_list = [from_database(x) for x in value_list.split(",")] editor.set_data(index.data(PARSED_ROLE), value_list) editor.data_committed.connect(lambda editor=editor, index=index: self._close_editor(editor, index)) return editor return self._create_or_request_parameter_value_editor( parent, option, index, db_map)
def _convert_to_data_type(self, indexes, values): """ Converts values from string to current data type filtering failed conversions. Args: indexes (list of QModelIndex): indexes values (list of str): values to convert Returns: tuple: indexes and converted values """ filtered = list() converted = list() if self._data_type == float: for index, value in zip(indexes, values): if value is None: converted.append(numpy.nan) filtered.append(index) continue try: number = locale.atof(value) converted.append(number) filtered.append(index) except ValueError: pass elif self._data_type == str: for index, value in zip(indexes, values): converted.append(str(value) if value is not None else "") filtered.append(index) else: for index, value in zip(indexes, values): try: data = self._data_type(value) converted.append(data) filtered.append(index) continue except SpineDBAPIError: pass try: data = from_database(value) if isinstance(data, self._data_type): converted.append(data) filtered.append(index) except ParameterValueFormatError: pass return filtered, converted
def _make_pivot_proxy_model(): """Returns a prefilled PivotTableModel.""" db_mngr = MagicMock() db_mngr.get_value.side_effect = lambda db_map, item_type, id_, role: from_database( id_) mock_db_map = Mock() mock_db_map.codename = "codename" db_mngr.undo_action.__getitem__.side_effect = lambda key: QAction() db_mngr.redo_action.__getitem__.side_effect = lambda key: QAction() with patch.object(DataStoreForm, "restore_ui"), patch.object(DataStoreForm, "show"): data_store_widget = DataStoreForm(db_mngr, mock_db_map) data_store_widget.create_header_widget = lambda *args, **kwargs: None data_store_widget.load_parameter_value_data = lambda: { ('1', 'int_col'): '-3', ('2', 'int_col'): '-1', ('3', 'int_col'): '2', ('1', 'float_col'): '1.1', ('2', 'float_col'): '1.2', ('3', 'float_col'): '1.3', ('1', 'time_series_col'): '{"type": "time_series", "data": {"2019-07-10T13:00": 2.3, "2019-07-10T13:20": 5.0}}', ( '2', 'time_series_col', ): '{"type": "time_series", "index": {"start": "2019-07-10T13:00", "resolution": "20 minutes"}, "data": [3.3, 4.0]}', ('3', 'time_series_col'): '{"type": "time_series", "data": {"2019-07-10T13:00": 4.3, "2019-07-10T13:20": 3.0}}', } data_store_widget.pivot_table_model = model = ParameterValuePivotTableModel( data_store_widget) object_class_names = {"object": 1} model.call_reset_model(object_class_names, pivot=(['object'], ['parameter'], [], ())) model.start_fetching() data_store_widget.pivot_table_model = model data_store_widget.pivot_table_proxy.setSourceModel(model) return data_store_widget.pivot_table_proxy
def _read_pasted_text(text): """ Parses a given CSV table Args: text (str): a CSV table Returns: list of list: a list of table rows """ data = list() with io.StringIO(text) as input_stream: reader = csv.reader(input_stream, delimiter='\t') for row in reader: data_row = list() for cell in row: try: number = locale.atof(cell) data_row.append(number) continue except ValueError: pass try: # Try parsing Duration before DateTime because DateTime will happily accept strings like '1h' value = Duration(cell) data_row.append(value) continue except SpineDBAPIError: pass try: value = DateTime(cell) data_row.append(value) continue except SpineDBAPIError: pass try: value = from_database(cell) data_row.append(value) continue except ParameterValueFormatError: pass data_row.append(cell) data.append(data_row) return data
def __init__(self, parent_index, value_name="", value=None, parent_widget=None): from ..ui.parameter_value_editor import Ui_ParameterValueEditor super().__init__(parent_widget) self._parent_model = parent_index.model() self._parent_index = parent_index self._ui = Ui_ParameterValueEditor() self._ui.setupUi(self) self.setWindowTitle(value_name + " - Edit value") self.setWindowFlag(Qt.WindowMinMaxButtonsHint) self._ui.button_box.accepted.connect(self.accept) self._ui.button_box.rejected.connect(self.reject) self._time_pattern_editor = TimePatternEditor() self._plain_value_editor = PlainParameterValueEditor() self._time_series_fixed_resolution_editor = TimeSeriesFixedResolutionEditor( ) self._time_series_variable_resolution_editor = TimeSeriesVariableResolutionEditor( ) self._datetime_editor = DatetimeEditor() self._duration_editor = DurationEditor() self._ui.editor_stack.addWidget(self._plain_value_editor) self._ui.editor_stack.addWidget( self._time_series_fixed_resolution_editor) self._ui.editor_stack.addWidget( self._time_series_variable_resolution_editor) self._ui.editor_stack.addWidget(self._time_pattern_editor) self._ui.editor_stack.addWidget(self._datetime_editor) self._ui.editor_stack.addWidget(self._duration_editor) self._ui.parameter_type_selector.activated.connect( self._change_parameter_type) if value is None: try: value = from_database( self._parent_model.data(parent_index, Qt.EditRole)) except ParameterValueFormatError as error: self._select_default_view( message="Failed to load value: {}".format(error)) return self._select_editor(value)
def _collect_single_column_values(model, column, rows, hints): """ Collects selected parameter values from a single column. The return value of this function depends on what type of data the given column contains. In case of plain numbers, a list of floats and a single label string are returned. In case of time series, a list of TimeSeries objects is returned, accompanied by a list of labels, each label corresponding to one of the time series. Args: model (QAbstractTableModel): a table model column (int): a column index to the model rows (Sequence): row indexes to plot hints (PlottingHints): a plot support object Returns: a tuple of values and label(s) """ values = list() labels = list() for row in sorted(rows): data_index = model.index(row, column) if not hints.is_index_in_data(model, data_index): continue data = model.data(data_index, role=Qt.EditRole) if data: try: value = from_database(data) except ParameterValueFormatError: value = None if isinstance(value, (float, int)): values.append(float(value)) elif isinstance(value, TimeSeries): labels.append(hints.cell_label(model, data_index)) values.append(value) else: raise PlottingError("Cannot plot value on row {}".format(row)) if not values: return values, labels _raise_if_types_inconsistent(values) if isinstance(values[0], float): labels.append(hints.column_label(model, column)) return values, labels
def _create_or_request_parameter_value_editor(self, parent, option, index, db_map): """Returns a CustomLineEditor or NumberParameterInlineEditor if the data from index is not of special type. Otherwise, emit the signal to request a standalone `ParameterValueEditor` from parent widget. """ try: value = from_database(index.data(role=Qt.EditRole)) except ParameterValueFormatError: value = None if isinstance(value, (DateTime, Duration, TimePattern, TimeSeries)): value_name = index.model().value_name(index) self.parameter_value_editor_requested.emit(index, value_name, value) return None if isinstance(value, (float, int)): editor = NumberParameterInlineEditor(parent) else: editor = CustomLineEditor(parent) editor.set_data(index.data(Qt.EditRole)) return editor
def _set_data(self, index, value): """ Sets data for given index. In case of errors the value at index is replaced by an ``_ErrorCell`` sentinel. Args: index (QModelIndex): an index value (str): value in database format """ if not self._data: self._data = [None] try: element = from_database(value) except ParameterValueFormatError as error: self._data[index.row()] = _ErrorCell(value, f"Cannot parse: {error}") else: if not isinstance(element, self._data_type): self._data[index.row()] = _ErrorCell( value, f"Expected '{self._data_type.__name__}', not {type(element).__name__}" ) else: self._data[index.row()] = element
def data(self, index, role=Qt.DisplayRole): if role != PARSED_ROLE: return None return from_database(self._table[index.row()][index.column()])
import sys from spinedb_api import DatabaseMapping, from_database url = sys.argv[1] db_map = DatabaseMapping(url) parameter_value = from_database( db_map.query(db_map.parameter_value_sq).first().value) with open("out.dat", "w") as out_file: out_file.write(f"{parameter_value}") db_map.connection.close()
def fetch_more(self): children = [ValueItem(from_database(value)) for value in self.value_list] empty_child = self.empty_child() self.append_children(*children, empty_child) self._fetched = True
def _make_pivot_proxy_model(): """Returns a prefilled PivotTableModel.""" db_mngr = MagicMock() db_mngr.get_value.side_effect = lambda db_map, item_type, id_, role: from_database( id_) mock_db_map = MagicMock() mock_db_map.codename = "codename" db_mngr.undo_action.__getitem__.side_effect = lambda key: QAction() db_mngr.redo_action.__getitem__.side_effect = lambda key: QAction() with patch.object(SpineDBEditor, "restore_ui"), patch.object(SpineDBEditor, "show"): spine_db_editor = SpineDBEditor(db_mngr, mock_db_map) spine_db_editor.create_header_widget = lambda *args, **kwargs: None simple_map = Map(["a", "b"], [-1.1, -2.2]) nested_map = Map( ["a", "b"], [ Map([DateTime("2020-11-13T11:00"), DateTime("2020-11-13T12:00")], [-1.1, -2.2]), Map([DateTime("2020-11-13T11:00"), DateTime("2020-11-13T12:00")], [-3.3, -4.4]), ], ) nested_map_with_time_series = Map( ["a", "b"], [ Map( [DateTime("2020-11-13T11:00"), DateTime("2020-11-13T12:00")], [ TimeSeriesVariableResolution( ["2020-11-13T11:00", "2020-11-13T12:00"], [-1.1, -2.2], False, False), TimeSeriesVariableResolution( ["2020-11-13T12:00", "2020-11-13T13:00"], [-3.3, -4.4], False, False), ], ), Map( [DateTime("2020-11-13T11:00"), DateTime("2020-11-13T12:00")], [ TimeSeriesVariableResolution( ["2020-11-13T11:00", "2020-11-13T12:00"], [-5.5, -6.6], False, False), TimeSeriesVariableResolution( ["2020-11-13T12:00", "2020-11-13T13:00"], [-7.7, -8.8], False, False), ], ), ], ) data = { ('1', 'int_col', 'base_alternative'): '-3', ('2', 'int_col', 'base_alternative'): '-1', ('3', 'int_col', 'base_alternative'): '2', ('1', 'float_col', 'base_alternative'): '1.1', ('2', 'float_col', 'base_alternative'): '1.2', ('3', 'float_col', 'base_alternative'): '1.3', ( '1', 'time_series_col', 'base_alternative', ): '{"type": "time_series", "data": {"2019-07-10T13:00": 2.3, "2019-07-10T13:20": 5.0}}', ( '2', 'time_series_col', 'base_alternative', ): '{"type": "time_series", "index": {"start": "2019-07-10T13:00", "resolution": "20 minutes"}, "data": [3.3, 4.0]}', ( '3', 'time_series_col', 'base_alternative', ): '{"type": "time_series", "data": {"2019-07-10T13:00": 4.3, "2019-07-10T13:20": 3.0}}', ("1", "map_col", "base_alternative"): to_database(simple_map), ("2", "map_col", "base_alternative"): to_database(nested_map), ("3", "map_col", "base_alternative"): to_database(nested_map_with_time_series), } data = { tuple((db, k) for k in key) + (db, ): (db, value) for key, value in data.items() } spine_db_editor.load_parameter_value_data = lambda: data spine_db_editor.pivot_table_model = model = ParameterValuePivotTableModel( spine_db_editor) with patch.object( SpineDBEditor, "current_object_class_ids", new_callable=PropertyMock) as mock_current_object_class_ids: mock_current_object_class_ids.return_value = {"object": {db: 1}} model.call_reset_model(pivot=(['object'], ['parameter', 'alternative'], ['database'], (db, ))) model.start_fetching() spine_db_editor.pivot_table_model = model spine_db_editor.pivot_table_proxy.setSourceModel(model) return spine_db_editor.pivot_table_proxy