def export_to_sqlite(self, file_path, data_for_export): """Exports given data into SQLite file.""" url = URL("sqlite", database=file_path) if not self.db_mngr.is_url_available(url, self): return create_new_spine_database(url) db_map = DiffDatabaseMapping(url) import_data(db_map, **data_for_export) try: db_map.commit_session("Export initial data from Spine Toolbox.") except SpineDBAPIError as err: self.msg_error.emit(f"[SpineDBAPIError] Unable to export file <b>{db_map.codename}</b>: {err.msg}") else: self.sqlite_file_exported.emit(file_path)
def _import(all_data, url, logs_dir, cancel_on_error): try: db_map = spinedb_api.DiffDatabaseMapping(url, upgrade=False, username="******") except (spinedb_api.SpineDBAPIError, spinedb_api.SpineDBVersionError) as err: print("Unable to create database mapping, all import operations will be omitted: {0}".format(err)) return all_import_errors = [] for data in all_data: import_num, import_errors = spinedb_api.import_data(db_map, **data) all_import_errors += import_errors if import_errors and cancel_on_error: if db_map.has_pending_changes(): db_map.rollback_session() elif import_num: db_map.commit_session("Import data by Spine Toolbox Importer") print("Inserted {0} data with {1} errors into {2}".format(import_num, len(import_errors), url)) db_map.connection.close() if all_import_errors: # Log errors in a time stamped file into the logs directory timestamp = _create_log_file_timestamp() logfilepath = os.path.abspath(os.path.join(logs_dir, timestamp + "_error.log")) with open(logfilepath, 'w') as f: for err in all_import_errors: f.write("{0}\n".format(err.msg)) # Make error log file anchor with path as tooltip logfile_anchor = ( "<a style='color:#BB99FF;' title='" + logfilepath + "' href='file:///" + logfilepath + "'>error log</a>" ) rollback_text = ", rolling back" if cancel_on_error else "" print("Import errors{0}. Logfile: {1}".format(rollback_text, logfile_anchor), file=sys.stderr)
def export_to_excel(self, file_path, data_for_export): """Exports given data into Excel file.""" # NOTE: We import data into an in-memory Spine db and then export that to excel. url = URL("sqlite", database="") db_map = DiffDatabaseMapping(url, create=True) import_data(db_map, **data_for_export) file_name = os.path.split(file_path)[1] try: export_spine_database_to_xlsx(db_map, file_path) except PermissionError: self.msg_error.emit( f"Unable to export file <b>{file_name}</b>.<br/>" "Close the file in Excel and try again." ) except OSError: self.msg_error.emit(f"[OSError] Unable to export file <b>{file_name}</b>.") else: self.file_exported.emit(file_path)
def export_to_excel(self, file_path, data_for_export, caller): # pylint: disable=no-self-use """Exports given data into Excel file.""" # NOTE: We import data into an in-memory Spine db and then export that to excel. url = URL("sqlite", database="") db_map = DatabaseMapping(url, create=True) import_data(db_map, **data_for_export) file_name = os.path.split(file_path)[1] try: os.remove(file_path) export_spine_database_to_xlsx(db_map, file_path) except PermissionError: error_msg = { None: [f"Unable to export file <b>{file_name}</b>.<br/>Close the file in Excel and try again."] } caller.msg_error.emit(error_msg) except OSError: error_msg = {None: [f"[OSError] Unable to export file <b>{file_name}</b>."]} caller.msg_error.emit(error_msg) else: caller.file_exported.emit(file_path)
def run(from_urls, to_urls, logs_dir, cancel_on_error): print("starting combiner program") from_db_maps = [ db_map for db_map in (_get_db_map(url) for url in from_urls) if db_map ] to_db_maps = [ db_map for db_map in (_get_db_map(url) for url in to_urls) if db_map ] from_db_map_data = { from_db_map: export_data(from_db_map) for from_db_map in from_db_maps } all_errors = [] for to_db_map in to_db_maps: to_db_map_import_count = 0 to_db_map_error_count = 0 for from_db_map, data in from_db_map_data.items(): import_count, import_errors = import_data(to_db_map, **data) all_errors += import_errors if import_errors and cancel_on_error: if to_db_map.has_pending_changes(): to_db_map.rollback_session() elif import_count: to_db_map.commit_session( f"Import {import_count} items from {from_db_map.db_url} by Spine Toolbox Combiner" ) to_db_map_import_count += import_count to_db_map_error_count += len(import_errors) print("Merged {0} data with {1} errors into {2}".format( to_db_map_import_count, to_db_map_error_count, to_db_map.db_url)) for db_map in from_db_maps + to_db_maps: db_map.connection.close() if all_errors: # Log errors in a time stamped file into the logs directory timestamp = _create_log_file_timestamp() logfilepath = os.path.abspath( os.path.join(logs_dir, timestamp + "_error.log")) with open(logfilepath, 'w') as f: for err in all_errors: f.write("{0}\n".format(err)) # Make error log file anchor with path as tooltip logfile_anchor = ("<a style='color:#BB99FF;' title='" + logfilepath + "' href='file:///" + logfilepath + "'>error log</a>") print("Import errors. Logfile: {0}".format(logfile_anchor), file=sys.stderr)
def import_data(self, data, errors): errors = [f"{table_name}: {error_message}" for table_name, error_message in errors] try: import_num, import_errors = spinedb_api.import_data(self._db_map, **data) import_errors = [f"{e.db_type}: {e.msg}" for e in import_errors] errors.extend(import_errors) except spinedb_api.SpineIntegrityError as err: self._db_map.rollback_session() self._error_widget.set_import_state(0, [err.msg]) self.set_error_widget_as_main_widget() except spinedb_api.SpineDBAPIError as err: self._db_map.rollback_session() self._error_widget.set_import_state(0, ["Unable to import Data: %s", err.msg]) self.set_error_widget_as_main_widget() if errors: self._error_widget.set_import_state(import_num, errors) self.set_error_widget_as_main_widget() return False return True
def _import_xlsx_to_database(self, excel_file_name, db_map): connector = ExcelConnector(None) connector.connect_to_source(excel_file_name) sheets = connector.get_tables() table_mappings = { sheet_name: settings["mapping"] for sheet_name, settings in sheets.items() if settings["mapping"] is not None } table_options = { sheet_name: settings["options"] for sheet_name, settings in sheets.items() if settings["options"] is not None } data, errors = connector.get_mapped_data(table_mappings, table_options, {}, {}) import_num, import_errors = import_data(db_map, **data) self.assertFalse(import_errors) db_map.commit_session('Excel import') return import_num