def _remove_plugin(self, plugin_name): """Removes installed plugin. Args: plugin_name (str): plugin name """ plugin_dict = self._installed_plugins.pop(plugin_name) plugin_dir = plugin_dict["plugin_dir"] # Remove specs from model specifications = plugin_dict["specifications"] deserialized_paths = [ deserialize_path(path, plugin_dir) for paths in specifications.values() for path in paths ] for path in deserialized_paths: spec_dict = self._toolbox.parse_specification_file(path) row = self._toolbox.specification_model.specification_row( spec_dict["name"]) if row >= 0: self._toolbox.do_remove_specification(row, ask_verification=False) # Remove plugin dir shutil.rmtree(plugin_dir) self._plugin_toolbars[plugin_name].deleteLater() self._toolbox.refresh_toolbars()
def test_deserialize_path_with_absolute_path(self): with TemporaryDirectory() as project_dir: serialized = { "type": "path", "relative": False, "path": str(Path(gettempdir(), "file.fat").as_posix()) } deserialized = deserialize_path(serialized, project_dir) self.assertEqual(deserialized, str(Path(gettempdir(), "file.fat")))
def fix_file_selection(item_dict): old_selection = item_dict.get("file_selection", list()) new_selection = list() for path, selected in old_selection: deserialized = deserialize_path(path, project_dir) if deserialized.startswith("{") and deserialized.endswith("}"): # Fix old-style data store resource labels '{db_url@item name}'. deserialized = deserialized[1:-1] new_selection.append([deserialized, selected]) item_dict["file_selection"] = new_selection
def test_deserialize_path_with_relative_path(self): project_dir = gettempdir() serialized = { "type": "path", "relative": True, "path": "subdir/file.fat" } deserialized = deserialize_path(serialized, project_dir) self.assertEqual(deserialized, str(Path(project_dir, "subdir", "file.fat")))
def fix_cmd_line_args(item_dict): old_args = item_dict.get("cmd_line_args", list()) new_args = list() for arg in old_args: deserialized = deserialize_path(arg, project_dir) if deserialized.startswith("{") and deserialized.endswith("}"): # Fix old-style data store resource labels '{db_url@item name}'. deserialized = deserialized[1:-1] # We assume all args are resource labels. This may not always be true, though, and needs to be # fixed manually once the project has been loaded. new_args.append({"type": "resource", "arg": deserialized}) item_dict["cmd_line_args"] = new_args
def test_deserialize_path_with_relative_file_url(self): project_dir = gettempdir() serialized = { "type": "file_url", "relative": True, "path": "subdir/database.sqlite", "scheme": "sqlite" } deserialized = deserialize_path(serialized, project_dir) expected = "sqlite:///" + str( Path(project_dir, "subdir", "database.sqlite")) self.assertEqual(deserialized, expected)
def test_deserialize_path_with_absolute_file_url(self): with TemporaryDirectory() as project_dir: path = str(Path(gettempdir(), "database.sqlite").as_posix()) serialized = { "type": "file_url", "relative": False, "path": path, "scheme": "sqlite" } deserialized = deserialize_path(serialized, project_dir) expected = "sqlite:///" + str(Path(gettempdir(), "database.sqlite")) self.assertEqual(deserialized, expected)
def _download_plugin(plugin, plugin_local_dir): # 1. Create paths plugin_remote_file = plugin["url"] plugin_remote_dir = urljoin(plugin_remote_file, '.') plugin_local_file = os.path.join(plugin_local_dir, "plugin.json") # 2. Download and parse plugin.json file _download_file(plugin_remote_file, plugin_local_file) with open(plugin_local_file) as fh: plugin_dict = json.load(fh) # 3. Download specification .json files specifications = plugin_dict["specifications"] serialized_paths = (path for paths in specifications.values() for path in paths) for serialized in serialized_paths: local_file = deserialize_path(serialized, plugin_local_dir) remote_file = deserialize_remote_path(serialized, plugin_remote_dir) _download_file(remote_file, local_file) # 4. Download include files in tool specs serialized_includes = [] for serialized in specifications.get("Tool", ()): spec_file = deserialize_path(serialized, plugin_local_dir) with open(spec_file) as fh: spect_dict = json.load(fh) includes = spect_dict["includes"] includes_main_path = spect_dict.get("includes_main_path", ".") spec_dir = os.path.dirname(spec_file) includes_main_path = os.path.join(spec_dir, includes_main_path) includes = [ os.path.join(includes_main_path, include) for include in includes ] serialized_includes += [ serialize_path(include, plugin_local_dir) for include in includes ] for serialized in serialized_includes: local_file = deserialize_path(serialized, plugin_local_dir) remote_file = deserialize_remote_path(serialized, plugin_remote_dir) _download_file(remote_file, local_file)
def load_individual_plugin(self, plugin_dir): """Loads plugin from directory and returns all the specs in a list. Args: plugin_dir (str): path of plugin dir with "plugin.json" in it. Returns: list(ProjectItemSpecification) """ plugin_file = os.path.join(plugin_dir, "plugin.json") if not os.path.isfile(plugin_file): return with open(plugin_file, "r") as fh: try: plugin_dict = json.load(fh) except json.decoder.JSONDecodeError: self._toolbox.msg_error.emit( f"Error in plugin file <b>{plugin_file}</b>. Invalid JSON." ) return try: name = plugin_dict["name"] plugin_dict["plugin_dir"] = plugin_dir self._installed_plugins[name] = plugin_dict specifications = plugin_dict["specifications"] except KeyError as key: self._toolbox.msg_error.emit( f"Error in plugin file <b>{plugin_file}</b>. Key '{key}' not found." ) return deserialized_paths = [ deserialize_path(path, plugin_dir) for paths in specifications.values() for path in paths ] plugin_specs = self._plugin_specs[name] = [] for path in deserialized_paths: spec = self._toolbox.load_specification_from_file(path) if not spec: continue spec.plugin = name plugin_specs.append(spec) for spec in plugin_specs: self._toolbox.do_add_specification(spec) toolbar = self._plugin_toolbars[name] = PluginToolBar( name, parent=self._toolbox) toolbar.setup(plugin_specs) self._toolbox.addToolBar(Qt.TopToolBarArea, toolbar)
def _specifications(project_dict, project_dir, specification_factories, app_settings, logger): """ Creates project item specifications. Args: project_dict (dict): a serialized project dictionary project_dir (str): path to a directory containing the ``.spinetoolbox`` dir specification_factories (dict): a mapping from item type to specification factory app_settings (QSettings): Toolbox settings logger (LoggerInterface): a logger Returns: dict: a mapping from item type and specification name to specification """ specifications = dict() specifications_dict = project_dict["project"].get("specifications", {}) definition_file_paths = dict() for item_type, serialized_paths in specifications_dict.items(): definition_file_paths[item_type] = [deserialize_path(path, project_dir) for path in serialized_paths] for item_type, paths in definition_file_paths.items(): for definition_path in paths: try: with open(definition_path, "r") as definition_file: try: definition = json.load(definition_file) except ValueError: logger.msg_error.emit(f"Item specification file '{definition_path}' not valid") continue except FileNotFoundError: logger.msg_error.emit(f"Specification file <b>{definition_path}</b> does not exist") continue factory = specification_factories.get(item_type) if factory is None: continue specification = factory.make_specification( definition, definition_path, app_settings, logger, embedded_julia_console=None, embedded_python_console=None, ) specifications.setdefault(item_type, dict())[specification.name] = specification return specifications
def _specification_dicts(project_dict, project_dir, logger): """ Loads project item specification dictionaries. Args: project_dict (dict): a serialized project dictionary project_dir (str): path to a directory containing the ``.spinetoolbox`` dir logger (LoggerInterface): a logger Returns: dict: a mapping from item type to a list of specification dicts """ specification_dicts = dict() specification_file_paths = dict() for item_type, serialized_paths in project_dict["project"].get( "specifications", {}).items(): specification_file_paths[item_type] = [ deserialize_path(path, project_dir) for path in serialized_paths ] for item_type, paths in specification_file_paths.items(): for path in paths: try: with open(path, "r") as definition_file: try: specification_dict = json.load(definition_file) except ValueError: logger.msg_error.emit( f"Item specification file '{path}' not valid") continue except FileNotFoundError: logger.msg_error.emit( f"Specification file <b>{path}</b> does not exist") continue specification_dict["definition_file_path"] = path specification_dicts.setdefault(item_type, list()).append(specification_dict) return specification_dicts
def upgrade_v2_to_v3(self, old, project_dir, factories): """Upgrades version 2 project dictionary to version 3. Changes: 1. Move "specifications" from "project" -> "Tool" to just "project" 2. The "mappings" from importer items are used to build Importer specifications Args: old (dict): Version 2 project dictionary project_dir (str): Path to current project directory factories (dict): Mapping of item type to item factory Returns: dict: Version 3 project dictionary """ new = copy.deepcopy(old) project = new["project"] project["version"] = 3 # Put DT specs in their own subkey project["specifications"]["Data Transformer"] = dt_specs = [] tool_specs = project["specifications"].get("Tool", []) for i, spec in reversed(list(enumerate(tool_specs))): spec_path = deserialize_path(spec, project_dir) if not os.path.exists(spec_path): self._toolbox.msg_warning.emit(f"Upgrading Tool spec failed. <b>{spec_path}</b> does not exist.") continue with open(spec_path, "r") as fp: try: spec = json.load(fp) except ValueError: continue if spec.get("item_type") == "Data Transformer": dt_specs.append(tool_specs.pop(i)) project["specifications"]["Importer"] = importer_specs = [] for item_name, old_item_dict in old["items"].items(): item_type = old_item_dict["type"] if item_type == "Exporter": # Factories don't contain 'Exporter' anymore. item_type = "GdxExporter" try: new["items"][item_name] = ( factories[item_type].item_class().upgrade_v2_to_v3(item_name, old_item_dict, self) ) except KeyError: # This happens when a Combiner is encountered. # Factories do not contain 'Combiner' anymore if item_type == "Combiner": new["items"][item_name] = old_item_dict else: print(f"Some unknown item_type encountered: {item_type}") if item_type == "Importer": mappings = old_item_dict.get("mappings") # Sanitize old mappings, as we use to do in Importer.from_dict if mappings is None: mappings = list() # Convert table_types and table_row_types keys to int since json always has strings as keys. for _, mapping in mappings: table_types = mapping.get("table_types", {}) mapping["table_types"] = { table_name: {int(col): t for col, t in col_types.items()} for table_name, col_types in table_types.items() } table_row_types = mapping.get("table_row_types", {}) mapping["table_row_types"] = { table_name: {int(row): t for row, t in row_types.items()} for table_name, row_types in table_row_types.items() } # Convert serialized paths to absolute in mappings _fix_1d_array_to_array(mappings) # Make item specs from sanitized mappings for k, (label, mapping) in enumerate(mappings): spec_name = self.make_unique_importer_specification_name(item_name, label, k) spec = dict(name=spec_name, item_type="Importer", mapping=mapping) spec_path = os.path.join(project_dir, spec_name + ".json") # FIXME: Let's try and handle write errors here... with open(spec_path, "w") as fp: json.dump(spec, fp, indent=4) importer_specs.append(serialize_path(spec_path, project_dir)) return new
def test_deserialize_path_with_non_file_url(self): project_dir = gettempdir() serialized = {"type": "url", "path": "http://www.spine-model.org/"} deserialized = deserialize_path(serialized, project_dir) self.assertEqual(deserialized, "http://www.spine-model.org/")