Ejemplo n.º 1
0
    def __init__(self, parent=None):
        super().__init__(parent)

        self._application = Application.getInstance()
        self._container_registry = self._application.getContainerRegistry()
        self._plugin_registry = self._application.getPluginRegistry()

        #JSON files that keep track of all installed packages.
        self._user_package_management_file_path = None
        self._bundled_package_management_file_path = None
        for search_path in Resources.getSearchPaths():
            candidate_bundled_path = os.path.join(search_path,
                                                  "bundled_packages.json")
            if os.path.exists(candidate_bundled_path):
                self._bundled_package_management_file_path = candidate_bundled_path
        for search_path in (Resources.getDataStoragePath(),
                            Resources.getConfigStoragePath()):
            candidate_user_path = os.path.join(search_path, "packages.json")
            if os.path.exists(candidate_user_path):
                self._user_package_management_file_path = candidate_user_path
        if self._user_package_management_file_path is None:  #Doesn't exist yet.
            self._user_package_management_file_path = os.path.join(
                Resources.getDataStoragePath(), "packages.json")

        self._bundled_package_dict = {}  # A dict of all bundled packages
        self._installed_package_dict = {}  # A dict of all installed packages
        self._to_remove_package_set = set(
        )  # A set of packages that need to be removed at the next start
        self._to_install_package_dict = {
        }  # A dict of packages that need to be installed at the next start
Ejemplo n.º 2
0
    def __init__(self, application, parent = None):
        super().__init__(parent)

        self._application = application
        self._container_registry = self._application.getContainerRegistry()
        self._plugin_registry = self._application.getPluginRegistry()

        #JSON files that keep track of all installed packages.
        self._user_package_management_file_path = None #type: str
        self._bundled_package_management_file_paths = [] #type: List[str]
        for search_path in Resources.getSearchPaths():
            candidate_bundled_path = os.path.join(search_path, "bundled_packages.json")
            if os.path.exists(candidate_bundled_path):
                Logger.log("i", "Found bundled packages location: {location}".format(location = search_path))
                self._bundled_package_management_file_paths.append(candidate_bundled_path)
        for search_path in (Resources.getDataStoragePath(), Resources.getConfigStoragePath()):
            candidate_user_path = os.path.join(search_path, "packages.json")
            if os.path.exists(candidate_user_path):
                self._user_package_management_file_path = candidate_user_path
        if self._user_package_management_file_path is None: #Doesn't exist yet.
            self._user_package_management_file_path = os.path.join(Resources.getDataStoragePath(), "packages.json")

        self._installation_dirs_dict = {"plugins": os.path.abspath(Resources.getStoragePath(Resources.Plugins))}  # type: Dict[str, str]

        self._bundled_package_dict = {}     # A dict of all bundled packages
        self._installed_package_dict = {}   # A dict of all installed packages
        self._to_remove_package_set = set() # A set of packages that need to be removed at the next start
        self._to_install_package_dict = {}  # A dict of packages that need to be installed at the next start
Ejemplo n.º 3
0
    def test_factoryReset(self):
        # FIXME: This is a temporary workaround. A proper fix should be to make the home directory configurable so a
        #        unique temporary directory can be used for each test and it can removed afterwards.
        # HACK: Record the number of files and directories in the data storage directory before the factory reset,
        # so after the reset, we can compare if there's a new ZIP file being created. Note that this will not always
        # work, especially when there are multiple tests running on the same host at the same time.
        original_filenames = os.listdir(
            os.path.dirname(Resources.getDataStoragePath()))

        Resources.factoryReset()
        # Check if the data is deleted!
        assert len(os.listdir(Resources.getDataStoragePath())) == 0

        # The data folder should still be there, but it should also have created a zip with the data it deleted.
        new_filenames = os.listdir(
            os.path.dirname(Resources.getDataStoragePath()))
        assert len(new_filenames) - len(original_filenames) == 1

        # Clean up after our ass.
        folder = os.path.dirname(Resources.getDataStoragePath())
        for file in os.listdir(folder):
            file_path = os.path.join(folder, file)
            try:
                os.unlink(file_path)
            except:
                pass
        folder = os.path.dirname(Resources.getDataStoragePath())
        for file in os.listdir(folder):
            file_path = os.path.join(folder, file)
            try:
                os.unlink(file_path)
            except:
                pass
Ejemplo n.º 4
0
    def __init__(self) -> None:
        """
        Creates the version upgrade plug-in from 4.4 to 4.5.

        In this case the plug-in will also check for stacks that need to be
        deleted.
        """

        # Only delete hidden stacks when upgrading from version 4.4. Not 4.3 or 4.5, just when you're starting out from 4.4.
        # If you're starting from an earlier version, you can't have had the bug that produces too many hidden stacks (https://github.com/Ultimaker/Cura/issues/6731).
        # If you're starting from a later version, the bug was already fixed.
        data_storage_root = os.path.dirname(Resources.getDataStoragePath())
        folders = set(os.listdir(data_storage_root))  # All version folders.
        folders = set(filter(
            lambda p: re.fullmatch(r"\d+\.\d+", p),
            folders))  # Only folders with a correct version number as name.
        folders.difference_update(
            {os.path.basename(Resources.getDataStoragePath())}
        )  # Remove current version from candidates (since the folder was just copied).
        if folders:
            latest_version = max(
                folders,
                key=Version)  # Sort them by semantic version numbering.
            if latest_version == "4.4":
                self.removeHiddenStacks()
Ejemplo n.º 5
0
    def __init__(self,
                 application: "QtApplication",
                 parent: Optional[QObject] = None) -> None:
        super().__init__(parent)

        self._application = application
        self._container_registry = self._application.getContainerRegistry()
        self._plugin_registry = self._application.getPluginRegistry()

        # JSON files that keep track of all installed packages.
        self._user_package_management_file_path = None  # type: Optional[str]
        self._bundled_package_management_file_paths = []  # type: List[str]
        for search_path in Resources.getAllPathsForType(
                Resources.BundledPackages):
            if not os.path.isdir(search_path):
                continue

            # Load all JSON files that are located in the bundled_packages directory.
            for file_name in os.listdir(search_path):
                if not file_name.endswith(".json"):
                    continue
                file_path = os.path.join(search_path, file_name)
                if not os.path.isfile(file_path):
                    continue
                self._bundled_package_management_file_paths.append(file_path)
                Logger.log(
                    "i", "Found bundled packages JSON file: {location}".format(
                        location=file_path))

        for search_path in (Resources.getDataStoragePath(),
                            Resources.getConfigStoragePath()):
            candidate_user_path = os.path.join(search_path, "packages.json")
            if os.path.exists(candidate_user_path):
                self._user_package_management_file_path = candidate_user_path
        if self._user_package_management_file_path is None:  # Doesn't exist yet.
            self._user_package_management_file_path = os.path.join(
                Resources.getDataStoragePath(), "packages.json")

        self._installation_dirs_dict = {
            "plugins":
            os.path.abspath(Resources.getStoragePath(Resources.Plugins))
        }  # type: Dict[str, str]

        self._bundled_package_dict = {
        }  # type: Dict[str, Dict[str, Any]] # A dict of all bundled packages
        self._installed_package_dict = {
        }  # type: Dict[str, Dict[str, Any]] # A dict of all installed packages
        self._to_remove_package_set = set(
        )  # type: Set[str] # A set of packages that need to be removed at the next start
        self._to_install_package_dict = {
        }  # type: Dict[str, Dict[str, Any]]  # A dict of packages that need to be installed at the next start
        self._dismissed_packages = set(
        )  # type: Set[str] # A set of packages that are dismissed by the user

        # There can be plugins that provide remote packages (and thus, newer / different versions for a package).
        self._available_package_versions = {
        }  # type: Dict[str, Set[UMVersion]]

        self._packages_with_update_available = set()  # type: Set[str]
Ejemplo n.º 6
0
    def _createCustomFdmPrinterExtruderStack(self, machine_id: str, position: int, quality_id: str, material_id: str) -> None:
        stack_id = "custom_extruder_%s" % (position + 1)
        if self._current_fdm_printer_count > 1:
            stack_id += " #%s" % self._current_fdm_printer_count

        definition_id = "custom_extruder_%s" % (position + 1)

        # create a definition changes container for this stack
        definition_changes_parser = self._getCustomFdmPrinterDefinitionChanges(stack_id)
        definition_changes_id = definition_changes_parser["general"]["name"]
        # create a user settings container
        user_settings_parser = self._getCustomFdmPrinterUserSettings(stack_id)
        user_settings_id = user_settings_parser["general"]["name"]

        parser = configparser.ConfigParser()
        parser.add_section("general")
        parser["general"]["version"] = str(2)
        parser["general"]["name"] = "Extruder %s" % (position + 1)
        parser["general"]["id"] = stack_id

        parser.add_section("metadata")
        parser["metadata"]["type"] = "extruder_train"
        parser["metadata"]["machine"] = machine_id
        parser["metadata"]["position"] = str(position)

        parser.add_section("containers")
        parser["containers"]["0"] = user_settings_id
        parser["containers"]["1"] = "empty_quality_changes"
        parser["containers"]["2"] = quality_id
        parser["containers"]["3"] = material_id
        parser["containers"]["4"] = "empty_variant"
        parser["containers"]["5"] = definition_changes_id
        parser["containers"]["6"] = definition_id

        definition_changes_output = io.StringIO()
        definition_changes_parser.write(definition_changes_output)
        definition_changes_filename = quote_plus(definition_changes_id) + ".inst.cfg"

        user_settings_output = io.StringIO()
        user_settings_parser.write(user_settings_output)
        user_settings_filename = quote_plus(user_settings_id) + ".inst.cfg"

        extruder_output = io.StringIO()
        parser.write(extruder_output)
        extruder_filename = quote_plus(stack_id) + ".extruder.cfg"

        extruder_stack_dir = os.path.join(Resources.getDataStoragePath(), "extruders")
        definition_changes_dir = os.path.join(Resources.getDataStoragePath(), "definition_changes")
        user_settings_dir = os.path.join(Resources.getDataStoragePath(), "user")

        with open(os.path.join(definition_changes_dir, definition_changes_filename), "w", encoding = "utf-8") as f:
            f.write(definition_changes_output.getvalue())
        with open(os.path.join(user_settings_dir, user_settings_filename), "w", encoding = "utf-8") as f:
            f.write(user_settings_output.getvalue())
        with open(os.path.join(extruder_stack_dir, extruder_filename), "w", encoding = "utf-8") as f:
            f.write(extruder_output.getvalue())
Ejemplo n.º 7
0
    def _getUpgradeTasks(self) -> Iterator[UpgradeTask]:
        storage_path_prefixes = set()
        storage_path_prefixes.add(Resources.getConfigStoragePath())
        storage_path_prefixes.add(Resources.getDataStoragePath())

        # Make sure the types and paths are ordered so we always get the same results.
        self._storage_paths = collections.OrderedDict(sorted(self._storage_paths.items()))
        for key in self._storage_paths:
            self._storage_paths[key] = collections.OrderedDict(sorted(self._storage_paths[key].items()))

        # Use pattern: /^(pattern_a|pattern_b|pattern_c|...)$/
        combined_regex_ignored_files = "^(" + "|".join(self._ignored_files) + ")"
        for old_configuration_type, version_storage_paths_dict in self._storage_paths.items():
            for src_version, storage_paths in version_storage_paths_dict.items():
                for prefix in storage_path_prefixes:
                    for storage_path in storage_paths:
                        path = os.path.join(prefix, storage_path)
                        for configuration_file in self._getFilesInDirectory(path):
                            # Get file version. Only add this upgrade task if the current file version matches with
                            # the defined version that scans through this folder.
                            if re.match(combined_regex_ignored_files, configuration_file):
                                continue
                            try:
                                with open(os.path.join(path, configuration_file), "r", encoding = "utf-8") as f:
                                    file_version = self._get_version_functions[old_configuration_type](f.read())
                                    if file_version != src_version:
                                        continue
                            except:
                                Logger.log("w", "Failed to get file version: %s, skip it", configuration_file)
                                continue

                            Logger.log("i", "Create upgrade task for configuration file [%s] with type [%s] and source version [%s]",
                                       configuration_file, old_configuration_type, file_version)
                            yield UpgradeTask(storage_path = path, file_name = configuration_file,
                                              configuration_type = old_configuration_type)
Ejemplo n.º 8
0
    def _getUpgradeTasks(self) -> Iterator[UpgradeTask]:
        storage_path_prefixes = set()
        storage_path_prefixes.add(Resources.getConfigStoragePath())
        storage_path_prefixes.add(Resources.getDataStoragePath())

        # Make sure the types and paths are ordered so we always get the same results.
        self._storage_paths = collections.OrderedDict(sorted(self._storage_paths.items()))
        for key in self._storage_paths:
            self._storage_paths[key] = collections.OrderedDict(sorted(self._storage_paths[key].items()))

        combined_regex_ignored_files = "(" + ")|(".join(self._ignored_files) + ")"
        for old_configuration_type, version_storage_paths_dict in self._storage_paths.items():
            for src_version, storage_paths in version_storage_paths_dict.items():
                for prefix in storage_path_prefixes:
                    for storage_path in storage_paths:
                        path = os.path.join(prefix, storage_path)
                        for configuration_file in self._getFilesInDirectory(path):
                            # Get file version. Only add this upgrade task if the current file version matches with
                            # the defined version that scans through this folder.
                            if re.match(combined_regex_ignored_files, configuration_file):
                                continue
                            try:
                                with open(os.path.join(path, configuration_file), "r", encoding = "utf-8") as f:
                                    file_version = self._get_version_functions[old_configuration_type](f.read())
                                    if file_version != src_version:
                                        continue
                            except:
                                Logger.log("w", "Failed to get file version: %s, skip it", configuration_file)
                                continue

                            Logger.log("i", "Create upgrade task for configuration file [%s] with type [%s] and source version [%s]",
                                       configuration_file, old_configuration_type, file_version)
                            yield UpgradeTask(storage_path = path, file_name = configuration_file,
                                              configuration_type = old_configuration_type)
Ejemplo n.º 9
0
    def upgrade(self):
        Logger.log("i", "Looking for old configuration files to upgrade.")
        upgraded = False  #Did we upgrade something?
        paths = self._findShortestUpgradePaths()
        for old_configuration_type, storage_paths in self._storage_paths.items(
        ):
            for storage_path in storage_paths:
                storage_path_config = os.path.join(
                    Resources.getConfigStoragePath(), storage_path)
                for configuration_file in self._getFilesInDirectory(
                        storage_path_config, exclude_paths=["old", "cache"]):
                    upgraded |= self._upgradeFile(storage_path_config,
                                                  configuration_file,
                                                  old_configuration_type,
                                                  paths)
                storage_path_data = os.path.join(
                    Resources.getDataStoragePath(),
                    storage_path)  #A second place to look.
                if storage_path_data != storage_path_config:
                    for configuration_file in self._getFilesInDirectory(
                            storage_path_data, exclude_paths=["old", "cache"]):
                        upgraded |= self._upgradeFile(storage_path_data,
                                                      configuration_file,
                                                      old_configuration_type,
                                                      paths)

        if upgraded:
            message = UM.Message(text=catalogue.i18nc(
                "@info:version-upgrade",
                "A configuration from an older version of {0} was imported.",
                UM.Application.getInstance().getApplicationName()))
            message.show()
        return upgraded
Ejemplo n.º 10
0
    def restore(self) -> bool:
        if not self.zip_file or not self.meta_data or not self.meta_data.get("cura_release", None):
            # We can restore without the minimum required information.
            Logger.log("w", "Tried to restore a Cura backup without having proper data or meta data.")
            self._showMessage(
                self.catalog.i18nc("@info:backup_failed",
                                   "Tried to restore a Cura backup without having proper data or meta data."))
            return False

        current_version = self._application.getVersion()
        version_to_restore = self.meta_data.get("cura_release", "master")

        if current_version < version_to_restore:
            # Cannot restore version newer than current because settings might have changed.
            Logger.log("d", "Tried to restore a Cura backup of version {version_to_restore} with cura version {current_version}".format(version_to_restore = version_to_restore, current_version = current_version))
            self._showMessage(
                self.catalog.i18nc("@info:backup_failed",
                                   "Tried to restore a Cura backup that is higher than the current version."))
            return False

        version_data_dir = Resources.getDataStoragePath()
        archive = ZipFile(io.BytesIO(self.zip_file), "r")
        extracted = self._extractArchive(archive, version_data_dir)

        # Under Linux, preferences are stored elsewhere, so we copy the file to there.
        if Platform.isLinux():
            preferences_file_name = self._application.getApplicationName()
            preferences_file = Resources.getPath(Resources.Preferences, "{}.cfg".format(preferences_file_name))
            backup_preferences_file = os.path.join(version_data_dir, "{}.cfg".format(preferences_file_name))
            Logger.log("d", "Moving preferences file from %s to %s", backup_preferences_file, preferences_file)
            shutil.move(backup_preferences_file, preferences_file)

        return extracted
Ejemplo n.º 11
0
    def makeFromCurrent(self) -> None:
        """Create a back-up from the current user config folder."""

        cura_release = self._application.getVersion()
        version_data_dir = Resources.getDataStoragePath()

        Logger.log("d", "Creating backup for Cura %s, using folder %s",
                   cura_release, version_data_dir)

        # obfuscate sensitive secrets
        secrets = self._obfuscate()

        # Ensure all current settings are saved.
        self._application.saveSettings()

        # We copy the preferences file to the user data directory in Linux as it's in a different location there.
        # When restoring a backup on Linux, we move it back.
        if Platform.isLinux(
        ):  #TODO: This should check for the config directory not being the same as the data directory, rather than hard-coding that to Linux systems.
            preferences_file_name = self._application.getApplicationName()
            preferences_file = Resources.getPath(
                Resources.Preferences, "{}.cfg".format(preferences_file_name))
            backup_preferences_file = os.path.join(
                version_data_dir, "{}.cfg".format(preferences_file_name))
            if os.path.exists(preferences_file) and (
                    not os.path.exists(backup_preferences_file)
                    or not os.path.samefile(preferences_file,
                                            backup_preferences_file)):
                Logger.log("d", "Copying preferences file from %s to %s",
                           preferences_file, backup_preferences_file)
                shutil.copyfile(preferences_file, backup_preferences_file)

        # Create an empty buffer and write the archive to it.
        buffer = io.BytesIO()
        archive = self._makeArchive(buffer, version_data_dir)
        if archive is None:
            return
        files = archive.namelist()

        # Count the metadata items. We do this in a rather naive way at the moment.
        machine_count = max(
            len([s for s in files if "machine_instances/" in s]) - 1, 0
        )  # If people delete their profiles but not their preferences, it can still make a backup, and report -1 profiles. Server crashes on this.
        material_count = max(
            len([s for s in files if "materials/" in s]) - 1, 0)
        profile_count = max(
            len([s for s in files if "quality_changes/" in s]) - 1, 0)
        plugin_count = len([s for s in files if "plugin.json" in s])

        # Store the archive and metadata so the BackupManager can fetch them when needed.
        self.zip_file = buffer.getvalue()
        self.meta_data = {
            "cura_release": cura_release,
            "machine_count": str(machine_count),
            "material_count": str(material_count),
            "profile_count": str(profile_count),
            "plugin_count": str(plugin_count)
        }
        # Restore the obfuscated settings
        self._illuminate(**secrets)
Ejemplo n.º 12
0
    def isReadOnly(self, container_id: str) -> bool:
        """Returns whether a container is read-only or not.

        A container can only be modified if it is stored in the data directory.
        :return: Whether the specified container is read-only.
        """

        if container_id in self._is_read_only_cache:
            return self._is_read_only_cache[container_id]
        if self._storage_path == "":
            self._storage_path = os.path.realpath(
                Resources.getDataStoragePath())
        storage_path = self._storage_path

        file_path = self._id_to_path[
            container_id]  # If KeyError: We don't know this ID.

        # The container is read-only if file_path is not a subdirectory of storage_path.
        if Platform.isWindows():
            # On Windows, if the paths provided to commonpath() don't come from the same drive,
            # a ValueError will be raised.
            try:
                result = os.path.commonpath([
                    storage_path, os.path.realpath(file_path)
                ]) != storage_path
            except ValueError:
                result = True
        else:
            result = os.path.commonpath(
                [storage_path, os.path.realpath(file_path)]) != storage_path

        result |= ContainerRegistry.getInstance().isExplicitReadOnly(
            container_id)
        self._is_read_only_cache[container_id] = result
        return result
Ejemplo n.º 13
0
    def restore(self) -> bool:
        if not self.zip_file or not self.meta_data or not self.meta_data.get("cura_release", None):
            # We can restore without the minimum required information.
            Logger.log("w", "Tried to restore a Cura backup without having proper data or meta data.")
            self._showMessage(
                self.catalog.i18nc("@info:backup_failed",
                                   "Tried to restore a Cura backup without having proper data or meta data."))
            return False

        current_version = self._application.getVersion()
        version_to_restore = self.meta_data.get("cura_release", "master")

        if current_version < version_to_restore:
            # Cannot restore version newer than current because settings might have changed.
            Logger.log("d", "Tried to restore a Cura backup of version {version_to_restore} with cura version {current_version}".format(version_to_restore = version_to_restore, current_version = current_version))
            self._showMessage(
                self.catalog.i18nc("@info:backup_failed",
                                   "Tried to restore a Cura backup that is higher than the current version."))
            return False

        version_data_dir = Resources.getDataStoragePath()
        archive = ZipFile(io.BytesIO(self.zip_file), "r")
        extracted = self._extractArchive(archive, version_data_dir)

        # Under Linux, preferences are stored elsewhere, so we copy the file to there.
        if Platform.isLinux():
            preferences_file_name = self._application.getApplicationName()
            preferences_file = Resources.getPath(Resources.Preferences, "{}.cfg".format(preferences_file_name))
            backup_preferences_file = os.path.join(version_data_dir, "{}.cfg".format(preferences_file_name))
            Logger.log("d", "Moving preferences file from %s to %s", backup_preferences_file, preferences_file)
            shutil.move(backup_preferences_file, preferences_file)

        return extracted
Ejemplo n.º 14
0
    def _checkCustomFdmPrinterHasExtruderStack(self, machine_id: str) -> bool:
        # go through all extruders and make sure that this custom FDM printer has extruder stacks.
        extruder_stack_dir = os.path.join(Resources.getDataStoragePath(),
                                          "extruders")
        has_extruders = False
        for item in os.listdir(extruder_stack_dir):
            file_path = os.path.join(extruder_stack_dir, item)
            if not os.path.isfile(file_path):
                continue

            parser = configparser.ConfigParser()
            try:
                parser.read([file_path])
            except:
                # skip, it is not a valid stack file
                continue

            if "metadata" not in parser:
                continue
            if "machine" not in parser["metadata"]:
                continue

            if machine_id != parser["metadata"]["machine"]:
                continue
            has_extruders = True
            break

        return has_extruders
Ejemplo n.º 15
0
    def _checkCustomFdmPrinterHasExtruderStack(self, machine_id: str) -> bool:
        # go through all extruders and make sure that this custom FDM printer has extruder stacks.
        extruder_stack_dir = os.path.join(Resources.getDataStoragePath(), "extruders")
        has_extruders = False
        for item in os.listdir(extruder_stack_dir):
            file_path = os.path.join(extruder_stack_dir, item)
            if not os.path.isfile(file_path):
                continue

            parser = configparser.ConfigParser()
            try:
                parser.read([file_path])
            except:
                # skip, it is not a valid stack file
                continue

            if "metadata" not in parser:
                continue
            if "machine" not in parser["metadata"]:
                continue

            if machine_id != parser["metadata"]["machine"]:
                continue
            has_extruders = True
            break

        return has_extruders
Ejemplo n.º 16
0
    def __convertVariant(self, variant_path):
        # Copy the variant to the machine_instances/*_settings.inst.cfg
        variant_config = configparser.ConfigParser(interpolation=None)
        with open(variant_path, "r") as fhandle:
            variant_config.read_file(fhandle)

        config_name = "Unknown Variant"
        if variant_config.has_section("general") and variant_config.has_option("general", "name"):
            config_name = variant_config.get("general", "name")
            if config_name.endswith("_variant"):
                config_name = config_name[:-len("_variant")] + "_settings"
                variant_config.set("general", "name", config_name)

        if not variant_config.has_section("metadata"):
            variant_config.add_section("metadata")
        variant_config.set("metadata", "type", "definition_changes")

        resource_path = Resources.getDataStoragePath()
        machine_instances_dir = os.path.join(resource_path, "machine_instances")

        if variant_path.endswith("_variant.inst.cfg"):
            variant_path = variant_path[:-len("_variant.inst.cfg")] + "_settings.inst.cfg"

        with open(os.path.join(machine_instances_dir, os.path.basename(variant_path)), "w") as fp:
            variant_config.write(fp)

        return config_name
Ejemplo n.º 17
0
    def __init__(self, application, parent=None):
        super().__init__(parent)

        self._application = application
        self._container_registry = self._application.getContainerRegistry()
        self._plugin_registry = self._application.getPluginRegistry()

        # JSON files that keep track of all installed packages.
        self._user_package_management_file_path = None  # type: str
        self._bundled_package_management_file_paths = []  # type: List[str]
        for search_path in Resources.getAllPathsForType(
                Resources.BundledPackages):
            if not os.path.isdir(search_path):
                continue

            # Load all JSON files that are located in the bundled_packages directory.
            for file_name in os.listdir(search_path):
                if not file_name.endswith(".json"):
                    continue
                file_path = os.path.join(search_path, file_name)
                if not os.path.isfile(file_path):
                    continue
                self._bundled_package_management_file_paths.append(file_path)
                Logger.log(
                    "i", "Found bundled packages JSON file: {location}".format(
                        location=file_path))

        for search_path in (Resources.getDataStoragePath(),
                            Resources.getConfigStoragePath()):
            candidate_user_path = os.path.join(search_path, "packages.json")
            if os.path.exists(candidate_user_path):
                self._user_package_management_file_path = candidate_user_path
        if self._user_package_management_file_path is None:  #Doesn't exist yet.
            self._user_package_management_file_path = os.path.join(
                Resources.getDataStoragePath(), "packages.json")

        self._installation_dirs_dict = {
            "plugins":
            os.path.abspath(Resources.getStoragePath(Resources.Plugins))
        }  # type: Dict[str, str]

        self._bundled_package_dict = {}  # A dict of all bundled packages
        self._installed_package_dict = {}  # A dict of all installed packages
        self._to_remove_package_set = set(
        )  # A set of packages that need to be removed at the next start
        self._to_install_package_dict = {
        }  # A dict of packages that need to be installed at the next start
Ejemplo n.º 18
0
    def _backupAndStartClean(self):
        # backup the current cura directories and create clean ones
        from cura.CuraVersion import CuraVersion
        from UM.Resources import Resources
        # The early crash may happen before those information is set in Resources, so we need to set them here to
        # make sure that Resources can find the correct place.
        Resources.ApplicationIdentifier = "cura"
        Resources.ApplicationVersion = CuraVersion
        config_path = Resources.getConfigStoragePath()
        data_path = Resources.getDataStoragePath()
        cache_path = Resources.getCacheStoragePath()

        folders_to_backup = []
        folders_to_remove = []  # only cache folder needs to be removed

        folders_to_backup.append(config_path)
        if data_path != config_path:
            folders_to_backup.append(data_path)

        # Only remove the cache folder if it's not the same as data or config
        if cache_path not in (config_path, data_path):
            folders_to_remove.append(cache_path)

        for folder in folders_to_remove:
            shutil.rmtree(folder, ignore_errors=True)
        for folder in folders_to_backup:
            base_name = os.path.basename(folder)
            root_dir = os.path.dirname(folder)

            import datetime
            date_now = datetime.datetime.now().strftime("%Y%m%d_%H%M%S")
            idx = 0
            file_name = base_name + "_" + date_now
            zip_file_path = os.path.join(root_dir, file_name + ".zip")
            while os.path.exists(zip_file_path):
                idx += 1
                file_name = base_name + "_" + date_now + "_" + idx
                zip_file_path = os.path.join(root_dir, file_name + ".zip")
            try:
                # remove the .zip extension because make_archive() adds it
                zip_file_path = zip_file_path[:-4]
                shutil.make_archive(zip_file_path,
                                    "zip",
                                    root_dir=root_dir,
                                    base_dir=base_name)

                # remove the folder only when the backup is successful
                shutil.rmtree(folder, ignore_errors=True)
                # create an empty folder so Resources will not try to copy the old ones
                os.makedirs(folder, 0o0755, exist_ok=True)

            except Exception as e:
                Logger.logException("e", "Failed to backup [%s] to file [%s]",
                                    folder, zip_file_path)
                if not self.has_started:
                    print("Failed to backup [%s] to file [%s]: %s", folder,
                          zip_file_path, e)

        self.early_crash_dialog.close()
Ejemplo n.º 19
0
    def restore(self) -> bool:
        """Restore this back-up.

        :return: Whether we had success or not.
        """

        if not self.zip_file or not self.meta_data or not self.meta_data.get("cura_release", None):
            # We can restore without the minimum required information.
            Logger.log("w", "Tried to restore a Cura backup without having proper data or meta data.")
            self._showMessage(self.catalog.i18nc("@info:backup_failed",
                                                 "Tried to restore a Cura backup without having proper data or meta data."),
                              message_type = Message.MessageType.ERROR)
            return False

        current_version = Version(self._application.getVersion())
        version_to_restore = Version(self.meta_data.get("cura_release", "master"))

        if current_version < version_to_restore:
            # Cannot restore version newer than current because settings might have changed.
            Logger.log("d", "Tried to restore a Cura backup of version {version_to_restore} with cura version {current_version}".format(version_to_restore = version_to_restore, current_version = current_version))
            self._showMessage(self.catalog.i18nc("@info:backup_failed",
                                                 "Tried to restore a Cura backup that is higher than the current version."),
                              message_type = Message.MessageType.ERROR)
            return False

        # Get the current secrets and store since the back-up doesn't contain those
        secrets = self._obfuscate()

        version_data_dir = Resources.getDataStoragePath()
        try:
            archive = ZipFile(io.BytesIO(self.zip_file), "r")
        except LookupError as e:
            Logger.log("d", f"The following error occurred while trying to restore a Cura backup: {str(e)}")
            Message(self.catalog.i18nc("@info:backup_failed",
                                       "The following error occurred while trying to restore a Cura backup:") + str(e),
                    title = self.catalog.i18nc("@info:title", "Backup"),
                    message_type = Message.MessageType.ERROR).show()

            return False
        extracted = self._extractArchive(archive, version_data_dir)

        # Under Linux, preferences are stored elsewhere, so we copy the file to there.
        if Platform.isLinux():
            preferences_file_name = self._application.getApplicationName()
            preferences_file = Resources.getPath(Resources.Preferences, "{}.cfg".format(preferences_file_name))
            backup_preferences_file = os.path.join(version_data_dir, "{}.cfg".format(preferences_file_name))
            Logger.log("d", "Moving preferences file from %s to %s", backup_preferences_file, preferences_file)
            shutil.move(backup_preferences_file, preferences_file)

        # Read the preferences from the newly restored configuration (or else the cached Preferences will override the restored ones)
        self._application.readPreferencesFromConfiguration()

        # Restore the obfuscated settings
        self._illuminate(**secrets)

        return extracted
Ejemplo n.º 20
0
 def _getUpgradeTasks(self):
     storage_path_prefixes = set()
     storage_path_prefixes.add(Resources.getConfigStoragePath())
     storage_path_prefixes.add(Resources.getDataStoragePath())
     for old_configuration_type, storage_paths in self._storage_paths.items():
         for prefix in storage_path_prefixes:
             for storage_path in storage_paths:
                 path = os.path.join(prefix, storage_path)
                 for configuration_file in self._getFilesInDirectory(path):
                     yield UpgradeTask(storage_path = path, file_name = configuration_file, configuration_type = old_configuration_type)
Ejemplo n.º 21
0
 def _getUpgradeTasks(self) -> Iterator[UpgradeTask]:
     storage_path_prefixes = set()
     storage_path_prefixes.add(Resources.getConfigStoragePath())
     storage_path_prefixes.add(Resources.getDataStoragePath())
     for old_configuration_type, storage_paths in self._storage_paths.items():
         for prefix in storage_path_prefixes:
             for storage_path in storage_paths:
                 path = os.path.join(prefix, storage_path)
                 for configuration_file in self._getFilesInDirectory(path):
                     yield UpgradeTask(storage_path = path, file_name = configuration_file, configuration_type = old_configuration_type)
Ejemplo n.º 22
0
    def __init__(self, application: "QtApplication", parent: Optional[QObject] = None) -> None:
        super().__init__(parent)

        self._application = application
        self._container_registry = self._application.getContainerRegistry()
        self._plugin_registry = self._application.getPluginRegistry()

        # JSON files that keep track of all installed packages.
        self._user_package_management_file_path = None  # type: Optional[str]
        self._bundled_package_management_file_paths = []  # type: List[str]
        for search_path in Resources.getAllPathsForType(Resources.BundledPackages):
            if not os.path.isdir(search_path):
                continue

            # Load all JSON files that are located in the bundled_packages directory.
            for file_name in os.listdir(search_path):
                if not file_name.endswith(".json"):
                    continue
                file_path = os.path.join(search_path, file_name)
                if not os.path.isfile(file_path):
                    continue
                self._bundled_package_management_file_paths.append(file_path)
                Logger.log("i", "Found bundled packages JSON file: {location}".format(location = file_path))

        for search_path in (Resources.getDataStoragePath(), Resources.getConfigStoragePath()):
            candidate_user_path = os.path.join(search_path, "packages.json")
            if os.path.exists(candidate_user_path):
                self._user_package_management_file_path = candidate_user_path
        if self._user_package_management_file_path is None:  # Doesn't exist yet.
            self._user_package_management_file_path = os.path.join(Resources.getDataStoragePath(), "packages.json")

        self._installation_dirs_dict = {"plugins": os.path.abspath(Resources.getStoragePath(Resources.Plugins))}  # type: Dict[str, str]

        self._bundled_package_dict = {}  # type: Dict[str, Dict[str, Any]] # A dict of all bundled packages
        self._installed_package_dict = {}  # type: Dict[str, Dict[str, Any]] # A dict of all installed packages
        self._to_remove_package_set = set()  # type: Set[str] # A set of packages that need to be removed at the next start
        self._to_install_package_dict = {}  # type: Dict[str, Dict[str, Any]]  # A dict of packages that need to be installed at the next start

        # There can be plugins that provide remote packages (and thus, newer / different versions for a package).
        self._available_package_versions = {}  # type: Dict[str, Set[UMVersion]]

        self._packages_with_update_available = set()  # type: Set[str]
Ejemplo n.º 23
0
    def validate(self, name, position):
        #Check for file name length of the current settings container (which is the longest file we're saving with the name).
        try:
            filename_max_length = os.statvfs(Resources.getDataStoragePath()).f_namemax
        except AttributeError: #Doesn't support statvfs. Probably because it's not a Unix system.
            filename_max_length = 255 #Assume it's Windows on NTFS.
        escaped_name = urllib.parse.quote_plus(name)
        current_settings_filename = escaped_name + "_current_settings." + ContainerRegistry.getMimeTypeForContainer(InstanceContainer).preferredSuffix
        if len(current_settings_filename) > filename_max_length:
            return QValidator.Invalid

        return QValidator.Acceptable #All checks succeeded.
Ejemplo n.º 24
0
    def _purgePackage(self, package_id: str) -> None:
        # Iterate through all directories in the data storage directory and look for sub-directories that belong to
        # the package we need to remove, that is the sub-dirs with the package_id as names, and remove all those dirs.
        data_storage_dir = os.path.abspath(Resources.getDataStoragePath())

        for root, dir_names, _ in os.walk(data_storage_dir):
            for dir_name in dir_names:
                package_dir = os.path.join(root, dir_name, package_id)
                if os.path.exists(package_dir):
                    Logger.log("i", "Removing '%s' for package [%s]", package_dir, package_id)
                    shutil.rmtree(package_dir)
            break
Ejemplo n.º 25
0
    def validate(self, name, position):
        #Check for file name length of the current settings container (which is the longest file we're saving with the name).
        try:
            filename_max_length = os.statvfs(Resources.getDataStoragePath()).f_namemax
        except AttributeError: #Doesn't support statvfs. Probably because it's not a Unix system.
            filename_max_length = 255 #Assume it's Windows on NTFS.
        escaped_name = urllib.parse.quote_plus(name)
        current_settings_filename = escaped_name + "_current_settings." + ContainerRegistry.getMimeTypeForContainer(InstanceContainer).preferredSuffix
        if len(current_settings_filename) > filename_max_length:
            return QValidator.Invalid

        return QValidator.Acceptable #All checks succeeded.
Ejemplo n.º 26
0
    def _purgePackage(self, package_id: str) -> None:
        # Iterate through all directories in the data storage directory and look for sub-directories that belong to
        # the package we need to remove, that is the sub-dirs with the package_id as names, and remove all those dirs.
        data_storage_dir = os.path.abspath(Resources.getDataStoragePath())

        for root, dir_names, _ in os.walk(data_storage_dir):
            for dir_name in dir_names:
                package_dir = os.path.join(root, dir_name, package_id)
                if os.path.exists(package_dir):
                    Logger.log("i", "Removing '%s' for package [%s]", package_dir, package_id)
                    shutil.rmtree(package_dir)
            break
Ejemplo n.º 27
0
    def __getUserVariants(self):
        resource_path = Resources.getDataStoragePath()
        variants_dir = os.path.join(resource_path, "variants")

        result = []
        for entry in os.scandir(variants_dir):
            if entry.name.endswith('.inst.cfg') and entry.is_file():
                config = configparser.ConfigParser(interpolation = None)
                with open(entry.path, "r") as fhandle:
                    config.read_file(fhandle)
                if config.has_section("general") and config.has_option("general", "name"):
                    result.append( { "path": entry.path, "name": config.get("general", "name") } )
        return result
Ejemplo n.º 28
0
    def __init__(self, parent = None):
        super().__init__(parent)

        self._application = Application.getInstance()
        self._container_registry = self._application.getContainerRegistry()
        self._plugin_registry = self._application.getPluginRegistry()

        # JSON file that keeps track of all installed packages.
        self._package_management_file_path = os.path.join(os.path.abspath(Resources.getDataStoragePath()),
                                                          "packages.json")
        self._installed_package_dict = {}  # a dict of all installed packages
        self._to_remove_package_set = set()  # a set of packages that need to be removed at the next start
        self._to_install_package_dict = {}  # a dict of packages that need to be installed at the next start
Ejemplo n.º 29
0
    def isReadOnly(self, container_id: str) -> bool:
        storage_path = os.path.realpath(Resources.getDataStoragePath())
        file_path = self._id_to_path[container_id] #If KeyError: We don't know this ID.

        # The container is read-only if file_path is not a subdirectory of storage_path.
        if Platform.isWindows():
            # On Windows, if the paths provided to commonpath() don't come from the same drive,
            # a ValueError will be raised.
            try:
                result = os.path.commonpath([storage_path, os.path.realpath(file_path)]) != storage_path
            except ValueError:
                result = True
        else:
            result = os.path.commonpath([storage_path, os.path.realpath(file_path)]) != storage_path
        return result
Ejemplo n.º 30
0
    def test_factoryReset(self):
        Resources.factoryReset()
        # Check if the data is deleted!
        assert len(os.listdir(Resources.getDataStoragePath())) == 0

        # The data folder should still be there, but it should also have created a zip with the data it deleted.
        assert len(os.listdir(os.path.dirname(
            Resources.getDataStoragePath()))) == 2

        # Clean up after our ass.
        folder = os.path.dirname(Resources.getDataStoragePath())
        for file in os.listdir(folder):
            file_path = os.path.join(folder, file)
            try:
                os.unlink(file_path)
            except:
                pass
        folder = os.path.dirname(Resources.getDataStoragePath())
        for file in os.listdir(folder):
            file_path = os.path.join(folder, file)
            try:
                os.unlink(file_path)
            except:
                pass
Ejemplo n.º 31
0
    def __init__(self, parent = None):
        super().__init__(parent)

        #Compute the validation regex for printer names. This is limited by the maximum file name length.
        try:
            filename_max_length = os.statvfs(Resources.getDataStoragePath()).f_namemax
        except AttributeError: #Doesn't support statvfs. Probably because it's not a Unix system.
            filename_max_length = 255 #Assume it's Windows on NTFS.
        machine_name_max_length = filename_max_length - len("_current_settings.") - len(ContainerRegistry.getMimeTypeForContainer(InstanceContainer).preferredSuffix)
        # Characters that urllib.parse.quote_plus escapes count for 12! So now
        # we must devise a regex that allows only 12 normal characters or 1
        # special character, and that up to [machine_name_max_length / 12] times.
        maximum_special_characters = int(machine_name_max_length / 12)
        unescaped = r"[a-zA-Z0-9_\-\.\/]"
        self.machine_name_regex = r"^((" + unescaped + "){0,12}|.){0," + str(maximum_special_characters) + r"}$"
Ejemplo n.º 32
0
    def __init__(self, parent = None):
        super().__init__(parent)

        #Compute the validation regex for printer names. This is limited by the maximum file name length.
        try:
            filename_max_length = os.statvfs(Resources.getDataStoragePath()).f_namemax
        except AttributeError: #Doesn't support statvfs. Probably because it's not a Unix system.
            filename_max_length = 255 #Assume it's Windows on NTFS.
        machine_name_max_length = filename_max_length - len("_current_settings.") - len(ContainerRegistry.getMimeTypeForContainer(InstanceContainer).preferredSuffix)
        # Characters that urllib.parse.quote_plus escapes count for 12! So now
        # we must devise a regex that allows only 12 normal characters or 1
        # special character, and that up to [machine_name_max_length / 12] times.
        maximum_special_characters = int(machine_name_max_length / 12)
        unescaped = r"[a-zA-Z0-9_\-\.\/]"
        self.machine_name_regex = r"^((" + unescaped + "){0,12}|.){0," + str(maximum_special_characters) + r"}$"
Ejemplo n.º 33
0
 def _getUpgradeTasks(self):
     exclude_folders = ["old", "cache", "plugins"]
     for old_configuration_type, storage_paths in self._storage_paths.items(
     ):
         storage_path_prefixes = set(
             Resources.getSearchPaths())  #Set removes duplicates.
         storage_path_prefixes.add(Resources.getConfigStoragePath())
         storage_path_prefixes.add(Resources.getDataStoragePath())
         for prefix in storage_path_prefixes:
             for storage_path in storage_paths:
                 path = os.path.join(prefix, storage_path)
                 for configuration_file in self._getFilesInDirectory(
                         path, exclude_paths=exclude_folders):
                     yield UpgradeTask(
                         storage_path=path,
                         file_name=configuration_file,
                         configuration_type=old_configuration_type)
Ejemplo n.º 34
0
    def makeFromCurrent(self) -> None:
        cura_release = CuraApplication.getInstance().getVersion()
        version_data_dir = Resources.getDataStoragePath()

        Logger.log("d", "Creating backup for Cura %s, using folder %s",
                   cura_release, version_data_dir)

        # Ensure all current settings are saved.
        CuraApplication.getInstance().saveSettings()

        # We copy the preferences file to the user data directory in Linux as it's in a different location there.
        # When restoring a backup on Linux, we move it back.
        if Platform.isLinux():
            preferences_file_name = CuraApplication.getInstance(
            ).getApplicationName()
            preferences_file = Resources.getPath(
                Resources.Preferences, "{}.cfg".format(preferences_file_name))
            backup_preferences_file = os.path.join(
                version_data_dir, "{}.cfg".format(preferences_file_name))
            Logger.log("d", "Copying preferences file from %s to %s",
                       preferences_file, backup_preferences_file)
            shutil.copyfile(preferences_file, backup_preferences_file)

        # Create an empty buffer and write the archive to it.
        buffer = io.BytesIO()
        archive = self._makeArchive(buffer, version_data_dir)
        if archive is None:
            return
        files = archive.namelist()

        # Count the metadata items. We do this in a rather naive way at the moment.
        machine_count = len([s
                             for s in files if "machine_instances/" in s]) - 1
        material_count = len([s for s in files if "materials/" in s]) - 1
        profile_count = len([s for s in files if "quality_changes/" in s]) - 1
        plugin_count = len([s for s in files if "plugin.json" in s])

        # Store the archive and metadata so the BackupManager can fetch them when needed.
        self.zip_file = buffer.getvalue()
        self.meta_data = {
            "cura_release": cura_release,
            "machine_count": str(machine_count),
            "material_count": str(material_count),
            "profile_count": str(profile_count),
            "plugin_count": str(plugin_count)
        }
Ejemplo n.º 35
0
    def isReadOnly(self, container_id: str) -> bool:
        if container_id in self._is_read_only_cache:
            return self._is_read_only_cache[container_id]
        storage_path = os.path.realpath(Resources.getDataStoragePath())
        file_path = self._id_to_path[container_id]  # If KeyError: We don't know this ID.

        # The container is read-only if file_path is not a subdirectory of storage_path.
        if Platform.isWindows():
            # On Windows, if the paths provided to commonpath() don't come from the same drive,
            # a ValueError will be raised.
            try:
                result = os.path.commonpath([storage_path, os.path.realpath(file_path)]) != storage_path
            except ValueError:
                result = True
        else:
            result = os.path.commonpath([storage_path, os.path.realpath(file_path)]) != storage_path
        self._is_read_only_cache[container_id] = result
        return result
Ejemplo n.º 36
0
    def upgrade(self):
        Logger.log("i", "Looking for old configuration files to upgrade.")
        upgraded = False #Did we upgrade something?
        paths = self._findShortestUpgradePaths()
        for old_configuration_type, storage_paths in self._storage_paths.items():
            for storage_path in storage_paths:
                storage_path_config = os.path.join(Resources.getConfigStoragePath(), storage_path)
                for configuration_file in self._getFilesInDirectory(storage_path_config, exclude_paths = ["old", "cache"]):
                    upgraded |= self._upgradeFile(storage_path_config, configuration_file, old_configuration_type, paths)
                storage_path_data = os.path.join(Resources.getDataStoragePath(), storage_path) #A second place to look.
                if storage_path_data != storage_path_config:
                    for configuration_file in self._getFilesInDirectory(storage_path_data, exclude_paths = ["old", "cache"]):
                        upgraded |= self._upgradeFile(storage_path_data, configuration_file, old_configuration_type, paths)

        if upgraded:
            message = UM.Message(text=catalogue.i18nc("@info:version-upgrade", "A configuration from an older version of {0} was imported.", UM.Application.getInstance().getApplicationName()))
            message.show()
        return upgraded
Ejemplo n.º 37
0
    def getPackageFiles(package_id) -> List[Tuple[str, List[str]]]:
        data_storage_dir = os.path.abspath(Resources.getDataStoragePath())

        os_walk = []
        dirs_to_check = []
        result = []  # 2-tuples of (dir, file_names)
        for root_path, dir_names, file_names in os.walk(data_storage_dir):
            os_walk.append((root_path, dir_names, file_names))
            for dir_name in dir_names:
                package_dir = os.path.join(root_path, dir_name, package_id)
                if os.path.exists(package_dir):
                    dirs_to_check.append(package_dir)

        for root_path, dir_names, file_names in os_walk:
            for dir_to_check in dirs_to_check:
                if root_path.startswith(dir_to_check):
                    result.append((root_path, file_names))

        return result
Ejemplo n.º 38
0
    def getPackageFiles(package_id) -> List[Tuple[str, List[str]]]:
        data_storage_dir = os.path.abspath(Resources.getDataStoragePath())

        os_walk = []
        dirs_to_check = []
        result = []  # 2-tuples of (dir, file_names)
        for root_path, dir_names, file_names in os.walk(data_storage_dir):
            os_walk.append((root_path, dir_names, file_names))
            for dir_name in dir_names:
                package_dir = os.path.join(root_path, dir_name, package_id)
                if os.path.exists(package_dir):
                    dirs_to_check.append(package_dir)

        for root_path, dir_names, file_names in os_walk:
            for dir_to_check in dirs_to_check:
                if root_path.startswith(dir_to_check):
                    result.append((root_path, file_names))

        return result
Ejemplo n.º 39
0
    def _acquireNextUniqueCustomFdmPrinterExtruderStackIdIndex(self) -> int:
        extruder_stack_dir = os.path.join(Resources.getDataStoragePath(), "extruders")
        file_name_list = os.listdir(extruder_stack_dir)
        file_name_list = [os.path.basename(file_name) for file_name in file_name_list]
        while True:
            self._current_fdm_printer_count += 1
            stack_id_exists = False
            for position in range(8):
                stack_id = "custom_extruder_%s" % (position + 1)
                if self._current_fdm_printer_count > 1:
                    stack_id += " #%s" % self._current_fdm_printer_count

                if stack_id in file_name_list:
                    stack_id_exists = True
                    break
            if not stack_id_exists:
                break

        return self._current_fdm_printer_count
Ejemplo n.º 40
0
    def __init__(self, parent=None):
        super().__init__(parent)

        self._application = Application.getInstance()
        self._container_registry = self._application.getContainerRegistry()
        self._plugin_registry = self._application.getPluginRegistry()

        # JSON file that keeps track of all installed packages.
        self._bundled_package_management_file_path = os.path.join(
            os.path.abspath(Resources.getBundledResourcesPath()),
            "packages.json")
        self._user_package_management_file_path = os.path.join(
            os.path.abspath(Resources.getDataStoragePath()), "packages.json")

        self._bundled_package_dict = {}  # A dict of all bundled packages
        self._installed_package_dict = {}  # A dict of all installed packages
        self._to_remove_package_set = set(
        )  # A set of packages that need to be removed at the next start
        self._to_install_package_dict = {
        }  # A dict of packages that need to be installed at the next start
Ejemplo n.º 41
0
    def makeFromCurrent(self) -> None:
        cura_release = self._application.getVersion()
        version_data_dir = Resources.getDataStoragePath()

        Logger.log("d", "Creating backup for Cura %s, using folder %s", cura_release, version_data_dir)

        # Ensure all current settings are saved.
        self._application.saveSettings()

        # We copy the preferences file to the user data directory in Linux as it's in a different location there.
        # When restoring a backup on Linux, we move it back.
        if Platform.isLinux(): #TODO: This should check for the config directory not being the same as the data directory, rather than hard-coding that to Linux systems.
            preferences_file_name = self._application.getApplicationName()
            preferences_file = Resources.getPath(Resources.Preferences, "{}.cfg".format(preferences_file_name))
            backup_preferences_file = os.path.join(version_data_dir, "{}.cfg".format(preferences_file_name))
            if os.path.exists(preferences_file) and (not os.path.exists(backup_preferences_file) or not os.path.samefile(preferences_file, backup_preferences_file)):
                Logger.log("d", "Copying preferences file from %s to %s", preferences_file, backup_preferences_file)
                shutil.copyfile(preferences_file, backup_preferences_file)

        # Create an empty buffer and write the archive to it.
        buffer = io.BytesIO()
        archive = self._makeArchive(buffer, version_data_dir)
        if archive is None:
            return
        files = archive.namelist()

        # Count the metadata items. We do this in a rather naive way at the moment.
        machine_count = len([s for s in files if "machine_instances/" in s]) - 1
        material_count = len([s for s in files if "materials/" in s]) - 1
        profile_count = len([s for s in files if "quality_changes/" in s]) - 1
        plugin_count = len([s for s in files if "plugin.json" in s])
        
        # Store the archive and metadata so the BackupManager can fetch them when needed.
        self.zip_file = buffer.getvalue()
        self.meta_data = {
            "cura_release": cura_release,
            "machine_count": str(machine_count),
            "material_count": str(material_count),
            "profile_count": str(profile_count),
            "plugin_count": str(plugin_count)
        }
Ejemplo n.º 42
0
    def _acquireNextUniqueCustomFdmPrinterExtruderStackIdIndex(self) -> int:
        extruder_stack_dir = os.path.join(Resources.getDataStoragePath(),
                                          "extruders")
        file_name_list = os.listdir(extruder_stack_dir)
        file_name_list = [
            os.path.basename(file_name) for file_name in file_name_list
        ]
        while True:
            self._current_fdm_printer_count += 1
            stack_id_exists = False
            for position in range(8):
                stack_id = "custom_extruder_%s" % (position + 1)
                if self._current_fdm_printer_count > 1:
                    stack_id += " #%s" % self._current_fdm_printer_count

                if stack_id in file_name_list:
                    stack_id_exists = True
                    break
            if not stack_id_exists:
                break

        return self._current_fdm_printer_count
Ejemplo n.º 43
0
    def restore(self) -> bool:
        """
        Restore this backups
        :return: A boolean whether we had success or not.
        """
        if not self.zip_file or not self.meta_data or not self.meta_data.get("cura_release", None):
            # We can restore without the minimum required information.
            Logger.log("w", "Tried to restore a Cura backup without having proper data or meta data.")
            self._showMessage(
                self.catalog.i18nc("@info:backup_failed",
                                   "Tried to restore a Cura backup without having proper data or meta data."))
            return False

        current_version = CuraApplication.getInstance().getVersion()
        version_to_restore = self.meta_data.get("cura_release", "master")
        if current_version != version_to_restore:
            # Cannot restore version older or newer than current because settings might have changed.
            # Restoring this will cause a lot of issues so we don't allow this for now.
            self._showMessage(
                self.catalog.i18nc("@info:backup_failed",
                                   "Tried to restore a Cura backup that does not match your current version."))
            return False

        version_data_dir = Resources.getDataStoragePath()
        archive = ZipFile(io.BytesIO(self.zip_file), "r")
        extracted = self._extractArchive(archive, version_data_dir)

        # Under Linux, preferences are stored elsewhere, so we copy the file to there.
        if Platform.isLinux():
            preferences_file_name = CuraApplication.getInstance().getApplicationName()
            preferences_file = Resources.getPath(Resources.Preferences, "{}.cfg".format(preferences_file_name))
            backup_preferences_file = os.path.join(version_data_dir, "{}.cfg".format(preferences_file_name))
            Logger.log("d", "Moving preferences file from %s to %s", backup_preferences_file, preferences_file)
            shutil.move(backup_preferences_file, preferences_file)

        return extracted
Ejemplo n.º 44
0
    def export(self):
        config = configparser.ConfigParser(
            interpolation=None
        )  # Build a config file in the form of version 2.

        config.add_section("general")
        config.set("general", "name", self._name)
        config.set("general", "id", self._name)
        config.set(
            "general", "version", "2"
        )  # Hard-code version 2, since if this number changes the programmer MUST change this entire function.

        import VersionUpgrade21to22  # Import here to prevent circular dependencies.
        has_machine_qualities = self._type_name in VersionUpgrade21to22.VersionUpgrade21to22.VersionUpgrade21to22.machinesWithMachineQuality(
        )
        type_name = VersionUpgrade21to22.VersionUpgrade21to22.VersionUpgrade21to22.translatePrinter(
            self._type_name)
        active_material = VersionUpgrade21to22.VersionUpgrade21to22.VersionUpgrade21to22.translateMaterial(
            self._active_material_name)
        variant = VersionUpgrade21to22.VersionUpgrade21to22.VersionUpgrade21to22.translateVariant(
            self._variant_name, type_name)
        variant_materials = VersionUpgrade21to22.VersionUpgrade21to22.VersionUpgrade21to22.translateVariantForMaterials(
            self._variant_name, type_name)

        #Convert to quality profile if we have one of the built-in profiles, otherwise convert to a quality-changes profile.
        if self._active_profile_name in VersionUpgrade21to22.VersionUpgrade21to22.VersionUpgrade21to22.builtInProfiles(
        ):
            active_quality = VersionUpgrade21to22.VersionUpgrade21to22.VersionUpgrade21to22.translateProfile(
                self._active_profile_name)
            active_quality_changes = "empty_quality_changes"
        else:
            active_quality = VersionUpgrade21to22.VersionUpgrade21to22.VersionUpgrade21to22.getQualityFallback(
                type_name, variant, active_material)
            active_quality_changes = self._active_profile_name

        if has_machine_qualities:  #This machine now has machine-quality profiles.
            active_material += "_" + variant_materials

        #Create a new user profile and schedule it to be upgraded.
        user_profile = configparser.ConfigParser(interpolation=None)
        user_profile["general"] = {
            "version": "2",
            "name": "Current settings",
            "definition": type_name
        }
        user_profile["metadata"] = {"type": "user", "machine": self._name}
        user_profile["values"] = {}

        version_upgrade_manager = UM.VersionUpgradeManager.VersionUpgradeManager.getInstance(
        )
        user_storage = os.path.join(
            Resources.getDataStoragePath(),
            next(iter(version_upgrade_manager.getStoragePaths("user"))))
        user_profile_file = os.path.join(
            user_storage,
            urllib.parse.quote_plus(self._name) + "_current_settings.inst.cfg")
        if not os.path.exists(user_storage):
            os.makedirs(user_storage)
        with open(user_profile_file, "w", encoding="utf-8") as file_handle:
            user_profile.write(file_handle)
        version_upgrade_manager.upgradeExtraFile(
            user_storage, urllib.parse.quote_plus(self._name), "user")

        containers = [
            self._name +
            "_current_settings",  #The current profile doesn't know the definition ID when it was upgraded, only the instance ID, so it will be invalid. Sorry, your current settings are lost now.
            active_quality_changes,
            active_quality,
            active_material,
            variant,
            type_name
        ]
        config.set("general", "containers", ",".join(containers))

        config.add_section("metadata")
        config.set("metadata", "type", "machine")

        VersionUpgrade21to22.VersionUpgrade21to22.VersionUpgrade21to22.translateSettings(
            self._machine_setting_overrides)
        config.add_section("values")
        for key, value in self._machine_setting_overrides.items():
            config.set("values", key, str(value))

        output = io.StringIO()
        config.write(output)
        return [self._filename], [output.getvalue()]
Ejemplo n.º 45
0
    def removeHiddenStacks(self) -> None:
        """
        If starting the upgrade from 4.4, this will remove any hidden printer
        stacks from the configuration folder as well as all of the user profiles
        and definition changes profiles.

        This will ONLY run when upgrading from 4.4, not when e.g. upgrading from
        4.3 to 4.6 (through 4.4). This is because it's to fix a bug
        (https://github.com/Ultimaker/Cura/issues/6731) that occurred in 4.4
        only, so only there will it have hidden stacks that need to be deleted.
        If people upgrade from 4.3 they don't need to be deleted. If people
        upgrade from 4.5 they have already been deleted previously or never got
        the broken hidden stacks.
        """
        Logger.log("d", "Removing all hidden container stacks.")
        hidden_global_stacks = set(
        )  # Which global stacks have been found? We'll delete anything referred to by these. Set of stack IDs.
        hidden_extruder_stacks = set(
        )  # Which extruder stacks refer to the hidden global profiles?
        hidden_instance_containers = set(
        )  # Which instance containers are referred to by the hidden stacks?
        exclude_directories = {"plugins"}

        # First find all of the hidden container stacks.
        data_storage = Resources.getDataStoragePath()
        for root, dirs, files in os.walk(data_storage):
            dirs[:] = [dir for dir in dirs if dir not in exclude_directories]
            for filename in fnmatch.filter(files, "*.global.cfg"):
                parser = configparser.ConfigParser(interpolation=None)
                try:
                    parser.read(os.path.join(root, filename))
                except OSError:  # File not found or insufficient rights.
                    continue
                except configparser.Error:  # Invalid file format.
                    continue
                if "metadata" in parser and "hidden" in parser[
                        "metadata"] and parser["metadata"]["hidden"] == "True":
                    stack_id = urllib.parse.unquote_plus(
                        os.path.basename(filename).split(".")[0])
                    hidden_global_stacks.add(stack_id)
                    # The user container and definition changes container are specific to this stack. We need to delete those too.
                    if "containers" in parser:
                        if "0" in parser["containers"]:  # User container.
                            hidden_instance_containers.add(
                                parser["containers"]["0"])
                        if "6" in parser[
                                "containers"]:  # Definition changes container.
                            hidden_instance_containers.add(
                                parser["containers"]["6"])
                    os.remove(os.path.join(root, filename))

        # Walk a second time to find all extruder stacks referring to these hidden container stacks.
        for root, dirs, files in os.walk(data_storage):
            dirs[:] = [dir for dir in dirs if dir not in exclude_directories]
            for filename in fnmatch.filter(files, "*.extruder.cfg"):
                parser = configparser.ConfigParser(interpolation=None)
                try:
                    parser.read(os.path.join(root, filename))
                except OSError:  # File not found or insufficient rights.
                    continue
                except configparser.Error:  # Invalid file format.
                    continue
                if "metadata" in parser and "machine" in parser[
                        "metadata"] and parser["metadata"][
                            "machine"] in hidden_global_stacks:
                    stack_id = urllib.parse.unquote_plus(
                        os.path.basename(filename).split(".")[0])
                    hidden_extruder_stacks.add(stack_id)
                    # The user container and definition changes container are specific to this stack. We need to delete those too.
                    if "containers" in parser:
                        if "0" in parser["containers"]:  # User container.
                            hidden_instance_containers.add(
                                parser["containers"]["0"])
                        if "6" in parser[
                                "containers"]:  # Definition changes container.
                            hidden_instance_containers.add(
                                parser["containers"]["6"])
                    os.remove(os.path.join(root, filename))

        # Walk a third time to remove all instance containers that are referred to by either of those.
        for root, dirs, files in os.walk(data_storage):
            dirs[:] = [dir for dir in dirs if dir not in exclude_directories]
            for filename in fnmatch.filter(files, "*.inst.cfg"):
                container_id = urllib.parse.unquote_plus(
                    os.path.basename(filename).split(".")[0])
                if container_id in hidden_instance_containers:
                    try:
                        os.remove(os.path.join(root, filename))
                    except OSError:  # Is a directory, file not found, or insufficient rights.
                        continue
Ejemplo n.º 46
0
    def export(self):
        config = configparser.ConfigParser(interpolation = None) # Build a config file in the form of version 2.

        config.add_section("general")
        config.set("general", "name", self._name)
        config.set("general", "id", self._name)
        config.set("general", "version", "2") # Hard-code version 2, since if this number changes the programmer MUST change this entire function.

        import VersionUpgrade21to22 # Import here to prevent circular dependencies.
        has_machine_qualities = self._type_name in VersionUpgrade21to22.VersionUpgrade21to22.VersionUpgrade21to22.machinesWithMachineQuality()
        type_name = VersionUpgrade21to22.VersionUpgrade21to22.VersionUpgrade21to22.translatePrinter(self._type_name)
        active_material = VersionUpgrade21to22.VersionUpgrade21to22.VersionUpgrade21to22.translateMaterial(self._active_material_name)
        variant = VersionUpgrade21to22.VersionUpgrade21to22.VersionUpgrade21to22.translateVariant(self._variant_name, type_name)
        variant_materials = VersionUpgrade21to22.VersionUpgrade21to22.VersionUpgrade21to22.translateVariantForMaterials(self._variant_name, type_name)

        #Convert to quality profile if we have one of the built-in profiles, otherwise convert to a quality-changes profile.
        if self._active_profile_name in VersionUpgrade21to22.VersionUpgrade21to22.VersionUpgrade21to22.builtInProfiles():
            active_quality = VersionUpgrade21to22.VersionUpgrade21to22.VersionUpgrade21to22.translateProfile(self._active_profile_name)
            active_quality_changes = "empty_quality_changes"
        else:
            active_quality = VersionUpgrade21to22.VersionUpgrade21to22.VersionUpgrade21to22.getQualityFallback(type_name, variant, active_material)
            active_quality_changes = self._active_profile_name

        if has_machine_qualities: #This machine now has machine-quality profiles.
            active_material += "_" + variant_materials

        #Create a new user profile and schedule it to be upgraded.
        user_profile = configparser.ConfigParser(interpolation = None)
        user_profile["general"] = {
            "version": "2",
            "name": "Current settings",
            "definition": type_name
        }
        user_profile["metadata"] = {
            "type": "user",
            "machine": self._name
        }
        user_profile["values"] = {}

        version_upgrade_manager = UM.VersionUpgradeManager.VersionUpgradeManager.getInstance()
        user_storage = os.path.join(Resources.getDataStoragePath(), next(iter(version_upgrade_manager.getStoragePaths("user"))))
        user_profile_file = os.path.join(user_storage, urllib.parse.quote_plus(self._name) + "_current_settings.inst.cfg")
        if not os.path.exists(user_storage):
            os.makedirs(user_storage)
        with open(user_profile_file, "w", encoding = "utf-8") as file_handle:
            user_profile.write(file_handle)
        version_upgrade_manager.upgradeExtraFile(user_storage, urllib.parse.quote_plus(self._name), "user")

        containers = [
            self._name + "_current_settings", #The current profile doesn't know the definition ID when it was upgraded, only the instance ID, so it will be invalid. Sorry, your current settings are lost now.
            active_quality_changes,
            active_quality,
            active_material,
            variant,
            type_name
        ]
        config.set("general", "containers", ",".join(containers))

        config.add_section("metadata")
        config.set("metadata", "type", "machine")

        VersionUpgrade21to22.VersionUpgrade21to22.VersionUpgrade21to22.translateSettings(self._machine_setting_overrides)
        config.add_section("values")
        for key, value in self._machine_setting_overrides.items():
            config.set("values", key, str(value))

        output = io.StringIO()
        config.write(output)
        return [self._filename], [output.getvalue()]