def test_serialize_path_makes_absolute_paths_from_paths_not_in_project_dir( self): with TemporaryDirectory() as project_dir: with TemporaryDirectory() as path: serialized = serialize_path(path, project_dir) expected_path = str(Path(path).as_posix()) self.assertEqual(serialized, { "type": "path", "relative": False, "path": expected_path })
def test_serialize_path_makes_relative_paths_from_paths_in_project_dir( self): with TemporaryDirectory() as path: project_dir = gettempdir() serialized = serialize_path(path, project_dir) expected_path = str(Path(path).relative_to(project_dir).as_posix()) self.assertEqual(serialized, { "type": "path", "relative": True, "path": expected_path })
def _download_plugin(plugin, plugin_local_dir): # 1. Create paths plugin_remote_file = plugin["url"] plugin_remote_dir = urljoin(plugin_remote_file, '.') plugin_local_file = os.path.join(plugin_local_dir, "plugin.json") # 2. Download and parse plugin.json file _download_file(plugin_remote_file, plugin_local_file) with open(plugin_local_file) as fh: plugin_dict = json.load(fh) # 3. Download specification .json files specifications = plugin_dict["specifications"] serialized_paths = (path for paths in specifications.values() for path in paths) for serialized in serialized_paths: local_file = deserialize_path(serialized, plugin_local_dir) remote_file = deserialize_remote_path(serialized, plugin_remote_dir) _download_file(remote_file, local_file) # 4. Download include files in tool specs serialized_includes = [] for serialized in specifications.get("Tool", ()): spec_file = deserialize_path(serialized, plugin_local_dir) with open(spec_file) as fh: spect_dict = json.load(fh) includes = spect_dict["includes"] includes_main_path = spect_dict.get("includes_main_path", ".") spec_dir = os.path.dirname(spec_file) includes_main_path = os.path.join(spec_dir, includes_main_path) includes = [ os.path.join(includes_main_path, include) for include in includes ] serialized_includes += [ serialize_path(include, plugin_local_dir) for include in includes ] for serialized in serialized_includes: local_file = deserialize_path(serialized, plugin_local_dir) remote_file = deserialize_remote_path(serialized, plugin_remote_dir) _download_file(remote_file, local_file)
def upgrade_v2_to_v3(self, old, project_dir, factories): """Upgrades version 2 project dictionary to version 3. Changes: 1. Move "specifications" from "project" -> "Tool" to just "project" 2. The "mappings" from importer items are used to build Importer specifications Args: old (dict): Version 2 project dictionary project_dir (str): Path to current project directory factories (dict): Mapping of item type to item factory Returns: dict: Version 3 project dictionary """ new = copy.deepcopy(old) project = new["project"] project["version"] = 3 # Put DT specs in their own subkey project["specifications"]["Data Transformer"] = dt_specs = [] tool_specs = project["specifications"].get("Tool", []) for i, spec in reversed(list(enumerate(tool_specs))): spec_path = deserialize_path(spec, project_dir) if not os.path.exists(spec_path): self._toolbox.msg_warning.emit(f"Upgrading Tool spec failed. <b>{spec_path}</b> does not exist.") continue with open(spec_path, "r") as fp: try: spec = json.load(fp) except ValueError: continue if spec.get("item_type") == "Data Transformer": dt_specs.append(tool_specs.pop(i)) project["specifications"]["Importer"] = importer_specs = [] for item_name, old_item_dict in old["items"].items(): item_type = old_item_dict["type"] if item_type == "Exporter": # Factories don't contain 'Exporter' anymore. item_type = "GdxExporter" try: new["items"][item_name] = ( factories[item_type].item_class().upgrade_v2_to_v3(item_name, old_item_dict, self) ) except KeyError: # This happens when a Combiner is encountered. # Factories do not contain 'Combiner' anymore if item_type == "Combiner": new["items"][item_name] = old_item_dict else: print(f"Some unknown item_type encountered: {item_type}") if item_type == "Importer": mappings = old_item_dict.get("mappings") # Sanitize old mappings, as we use to do in Importer.from_dict if mappings is None: mappings = list() # Convert table_types and table_row_types keys to int since json always has strings as keys. for _, mapping in mappings: table_types = mapping.get("table_types", {}) mapping["table_types"] = { table_name: {int(col): t for col, t in col_types.items()} for table_name, col_types in table_types.items() } table_row_types = mapping.get("table_row_types", {}) mapping["table_row_types"] = { table_name: {int(row): t for row, t in row_types.items()} for table_name, row_types in table_row_types.items() } # Convert serialized paths to absolute in mappings _fix_1d_array_to_array(mappings) # Make item specs from sanitized mappings for k, (label, mapping) in enumerate(mappings): spec_name = self.make_unique_importer_specification_name(item_name, label, k) spec = dict(name=spec_name, item_type="Importer", mapping=mapping) spec_path = os.path.join(project_dir, spec_name + ".json") # FIXME: Let's try and handle write errors here... with open(spec_path, "w") as fp: json.dump(spec, fp, indent=4) importer_specs.append(serialize_path(spec_path, project_dir)) return new