def test_temp_env_var_that_already_exist(self):
        """
        Check if temp_env_var sets and restores the variable
        """
        # Create a unique env var and make sure it doesn't currectly exist.
        env_var_name = "ENV_VAR_" + uuid.uuid4().hex
        self.assertFalse(env_var_name in os.environ)

        # Temporarily set the env var.
        with temp_env_var(**{env_var_name: "test_value"}):

            # Make sure it is set.
            self.assertTrue(env_var_name in os.environ)
            self.assertEquals(os.environ[env_var_name], "test_value")

            # Override the existing variable with a new one
            with temp_env_var(**{env_var_name: "test_value_2"}):

                # Make sure it was overriden
                self.assertTrue(env_var_name in os.environ)
                self.assertEquals(os.environ[env_var_name], "test_value_2")

            # Make sure the original one was restore.
            self.assertTrue(env_var_name in os.environ)
            self.assertEquals(os.environ[env_var_name], "test_value")

        # Make sure it is gone.
        self.assertFalse(env_var_name in os.environ)
Exemple #2
0
    def test_temp_env_var_that_already_exist(self):
        """
        Check if temp_env_var sets and restores the variable
        """
        # Create a unique env var and make sure it doesn't currectly exist.
        env_var_name = "ENV_VAR_" + uuid.uuid4().hex
        self.assertFalse(env_var_name in os.environ)

        # Temporarily set the env var.
        with temp_env_var(**{env_var_name: "test_value"}):

            # Make sure it is set.
            self.assertTrue(env_var_name in os.environ)
            self.assertEqual(os.environ[env_var_name], "test_value")

            # Override the existing variable with a new one
            with temp_env_var(**{env_var_name: "test_value_2"}):

                # Make sure it was overriden
                self.assertTrue(env_var_name in os.environ)
                self.assertEqual(os.environ[env_var_name], "test_value_2")

            # Make sure the original one was restore.
            self.assertTrue(env_var_name in os.environ)
            self.assertEqual(os.environ[env_var_name], "test_value")

        # Make sure it is gone.
        self.assertFalse(env_var_name in os.environ)
Exemple #3
0
    def test_get_entity_from_environment(self, _):
        """
        Ensure the ToolkitManager can extract the entities from the environment
        """

        # no env set
        mgr = ToolkitManager()
        self.assertEqual(mgr.get_entity_from_environment(), None)

        # std case
        with temp_env_var(SHOTGUN_ENTITY_TYPE="Shot", SHOTGUN_ENTITY_ID="123"):
            self.assertEqual(mgr.get_entity_from_environment(), {
                "type": "Shot",
                "id": 123
            })
        # site mismatch
        with temp_env_var(SHOTGUN_SITE="https://some.other.site",
                          SHOTGUN_ENTITY_TYPE="Shot",
                          SHOTGUN_ENTITY_ID="123"):
            self.assertEqual(mgr.get_entity_from_environment(), None)

        # invalid data case
        with temp_env_var(SHOTGUN_ENTITY_TYPE="Shot",
                          SHOTGUN_ENTITY_ID="invalid"):
            self.assertEqual(mgr.get_entity_from_environment(), None)
Exemple #4
0
    def test_pipeline_config_id_env_var(self, _):
        """
        Tests the SHOTGUN_PIPELINE_CONFIGURATION_ID being picked up at init
        """
        mgr = ToolkitManager()
        self.assertEqual(mgr.pipeline_configuration, None)

        with temp_env_var(SHOTGUN_PIPELINE_CONFIGURATION_ID="123"):
            mgr = ToolkitManager()
            self.assertEqual(mgr.pipeline_configuration, 123)

        with temp_env_var(SHOTGUN_PIPELINE_CONFIGURATION_ID="invalid"):
            mgr = ToolkitManager()
            self.assertEqual(mgr.pipeline_configuration, None)
Exemple #5
0
    def test_pipeline_config_id_env_var(self, _):
        """
        Tests the SHOTGUN_PIPELINE_CONFIGURATION_ID being picked up at init
        """
        mgr = ToolkitManager()
        self.assertEqual(mgr.pipeline_configuration, None)

        with temp_env_var(SHOTGUN_PIPELINE_CONFIGURATION_ID="123"):
            mgr = ToolkitManager()
            self.assertEqual(mgr.pipeline_configuration, 123)

        with temp_env_var(SHOTGUN_PIPELINE_CONFIGURATION_ID="invalid"):
            mgr = ToolkitManager()
            self.assertEqual(mgr.pipeline_configuration, None)
Exemple #6
0
    def test_cleanup(self):
        """
        Ensures that after a successful update the backup folder created by the
        update process is properly deleted 
        """
        resolver = sgtk.bootstrap.resolver.ConfigurationResolver(
            plugin_id="backup_tests"
        )
        with temp_env_var(SGTK_REPO_ROOT=self._core_repo_path):
            config = resolver.resolve_configuration(
                {"type": "dev", "name": "backup_tests", "path": self._temp_test_path}, self.mockgun
            )
            self.assertIsInstance(config, sgtk.bootstrap.resolver.CachedConfiguration)
            config_root_path = config.path.current_os

            # Update the configuration
            config.update_configuration()
            core_install_backup_path = os.path.join(config_root_path, "install", "core.backup")
            # check that there are no directory items in core backup folder other than then placeholder file
            self.assertEqual(os.listdir(core_install_backup_path), ['placeholder'])
            config_install_backup_path = os.path.join(config_root_path, "install", "config.backup")
            # check that there are no directory items in config backup folder other than then placeholder file
            self.assertEqual(os.listdir(config_install_backup_path), ['placeholder'])

            # Update a second time and check that backup was cleaned up again
            config.update_configuration()
            self.assertEqual(os.listdir(core_install_backup_path), ['placeholder'])
            self.assertEqual(os.listdir(config_install_backup_path), ['placeholder'])
Exemple #7
0
    def test_env_vars_present(self, get_api_core_config_location_mock):
        """
        Ensures files using environment variables are translated properly.
        """
        test_host = "https://envvar.shotgunstudio.com"
        test_key = "env_var_key"
        test_script = "env_var_script"
        test_proxy = "env_var_proxy"
        test_appstore_proxy = "env_var_appstore_proxy"

        with temp_env_var(SGTK_TEST_HOST=test_host,
                          SGTK_TEST_KEY=test_key,
                          SGTK_TEST_SCRIPT=test_script,
                          SGTK_TEST_PROXY=test_proxy,
                          SGTK_TEST_APPSTORE_PROXY=test_appstore_proxy):
            self.assertEqual(
                tank.util.shotgun.connection._parse_config_data(
                    {
                        "host": "$SGTK_TEST_HOST",
                        "api_key": "$SGTK_TEST_KEY",
                        "api_script": "$SGTK_TEST_SCRIPT",
                        "http_proxy": "$SGTK_TEST_PROXY",
                        "app_store_http_proxy": "$SGTK_TEST_APPSTORE_PROXY"
                    }, "default", "not_a_file.cfg"), {
                        "host": test_host,
                        "api_key": test_key,
                        "api_script": test_script,
                        "http_proxy": test_proxy,
                        "app_store_http_proxy": test_appstore_proxy
                    })
 def _download_app_store_bundle(self, target=None):
     """
     Creates an app store descriptor and attempts to download it locally.
     :param target: The path to which the bundle is to be downloaded.
     """
     if target:
         with temp_env_var(SHOTGUN_BUNDLE_CACHE_PATH=target):
             desc = sgtk.descriptor.create_descriptor(
                 None,
                 sgtk.descriptor.Descriptor.FRAMEWORK,
                 {"name": "tk-test-bundle2", "version": "v1.0.0", "type": "app_store"}
             )
     else:
         desc = sgtk.descriptor.create_descriptor(
             None,
             sgtk.descriptor.Descriptor.FRAMEWORK,
             {"name": "tk-test-bundle2", "version": "v1.0.0", "type": "app_store"}
         )
     io_descriptor_app_store = "tank.descriptor.io_descriptor.appstore.IODescriptorAppStore"
     with patch(
         "%s._IODescriptorAppStore__create_sg_app_store_connection" % io_descriptor_app_store,
         return_value=(self.mockgun, None)
     ):
         with patch(
             "%s._IODescriptorAppStore__refresh_metadata" % io_descriptor_app_store,
             return_value=self._metadata
         ):
             with patch(
                 "tank.util.shotgun.download_and_unpack_attachment",
                 side_effect=self._download_and_unpack_attachment
             ):
                 desc.download_local()
Exemple #9
0
 def _download_app_store_bundle(self, target=None):
     """
     Creates an app store descriptor and attempts to download it locally.
     :param target: The path to which the bundle is to be downloaded.
     """
     if target:
         with temp_env_var(SHOTGUN_BUNDLE_CACHE_PATH=target):
             desc = sgtk.descriptor.create_descriptor(
                 None, sgtk.descriptor.Descriptor.FRAMEWORK, {
                     "name": "tk-test-bundle2",
                     "version": "v1.0.0",
                     "type": "app_store"
                 })
     else:
         desc = sgtk.descriptor.create_descriptor(
             None, sgtk.descriptor.Descriptor.FRAMEWORK, {
                 "name": "tk-test-bundle2",
                 "version": "v1.0.0",
                 "type": "app_store"
             })
     io_descriptor_app_store = "tank.descriptor.io_descriptor.appstore.IODescriptorAppStore"
     with patch("%s._IODescriptorAppStore__create_sg_app_store_connection" %
                io_descriptor_app_store,
                return_value=(self.mockgun, None)):
         with patch("%s._IODescriptorAppStore__refresh_metadata" %
                    io_descriptor_app_store,
                    return_value=self._metadata):
             with patch("tank.util.shotgun.download_and_unpack_attachment",
                        side_effect=self._download_and_unpack_attachment):
                 desc.download_local()
Exemple #10
0
    def test_cleanup_read_only(self):
        """
        Ensures that backup cleanup will succeed even with read only folder items
        """
        resolver = sgtk.bootstrap.resolver.ConfigurationResolver(
            plugin_id="backup_tests_read_only")
        with temp_env_var(SGTK_REPO_ROOT=self._core_repo_path):
            config = resolver.resolve_configuration(
                {
                    "type": "dev",
                    "name": "backup_tests_read_only",
                    "path": self._temp_test_path
                }, self.tk.shotgun)
            self.assertIsInstance(config,
                                  sgtk.bootstrap.resolver.CachedConfiguration)
            config_root_path = config.path.current_os
            core_install_backup_path = os.path.join(config_root_path,
                                                    "install", "core.backup")
            config_install_backup_path = os.path.join(config_root_path,
                                                      "install",
                                                      "config.backup")

            # First update, no backup
            config.update_configuration()

            def dont_cleanup_backup_folders(self, config, core):
                self.config_backup_folder_path = config
                self.core_backup_folder_path = core

            with patch.object(sgtk.bootstrap.resolver.CachedConfiguration,
                              '_cleanup_backup_folders',
                              new=dont_cleanup_backup_folders):
                # Update the configuration, but don't clean up backups in order to ...
                config.update_configuration()
                config_backup_folder_path = config.config_backup_folder_path
                core_backup_folder_path = config.core_backup_folder_path
                read_only_file_name = os.path.join(core_backup_folder_path,
                                                   "test.txt")

            # ... create a read only file ...
            with open(read_only_file_name, "w") as f:
                f.write("Test")
            file_permissions = os.stat(read_only_file_name)[stat.ST_MODE]
            os.chmod(read_only_file_name, file_permissions & ~stat.S_IWRITE)
            if sys.platform == "win32":
                # ... and a read only folder
                folder_permissions = os.stat(config_install_backup_path)[
                    stat.ST_MODE]
                os.chmod(config_install_backup_path,
                         folder_permissions & ~stat.S_IWRITE)

            # Now try to clean up the backup folders with read-only file
            config._cleanup_backup_folders(config_backup_folder_path,
                                           core_backup_folder_path)

            # Verify that backup folders were cleaned up
            self.assertEqual(os.listdir(core_install_backup_path),
                             ['placeholder'])
            self.assertEqual(os.listdir(config_install_backup_path),
                             ['placeholder'])
Exemple #11
0
    def test_shotgun_bundle_cache(self, _):
        """
        Ensures ToolkitManager deals property with bundle cache from the user and from
        environment variables.
        """

        # Ensure the list is empty by default.
        mgr = ToolkitManager()
        self.assertEqual(mgr._get_bundle_cache_fallback_paths(), [])

        # If the user bundle cache is set, we should see it in the results.
        mgr.bundle_cache_fallback_paths = ["/a/b/c", "/d/e/f"]
        self.assertEqual(set(mgr._get_bundle_cache_fallback_paths()),
                         set(["/a/b/c", "/d/e/f"]))

        # Reset the user bundle cache.
        mgr.bundle_cache_fallback_paths = []
        self.assertEqual(mgr._get_bundle_cache_fallback_paths(), [])

        # Set the environment variable which allows to inherit paths from another process.
        with temp_env_var(SHOTGUN_BUNDLE_CACHE_FALLBACK_PATHS=os.pathsep.join(
            ["/g/h/i", "/j/k/l", "/a/b/c"])):
            # Should see the content from the environment variable.
            self.assertEqual(set(mgr._get_bundle_cache_fallback_paths()),
                             set(["/g/h/i", "/j/k/l", "/a/b/c"]))

            # Add a few user specified folders.
            mgr.bundle_cache_fallback_paths = ["/a/b/c", "/d/e/f"]

            self.assertEqual(set(mgr._get_bundle_cache_fallback_paths()),
                             set(["/a/b/c", "/d/e/f", "/g/h/i", "/j/k/l"]))

        # Now that the env var is not set anymore we should see its bundle caches.
        self.assertEqual(set(mgr._get_bundle_cache_fallback_paths()),
                         set(["/a/b/c", "/d/e/f"]))
Exemple #12
0
    def test_local_bundle_cache(self):
        """
        Ensures the local bundle cache path is included in the pipeline config
        yml file during updates.
        """
        resolver = sgtk.bootstrap.resolver.ConfigurationResolver(
            plugin_id="backup_tests")
        with temp_env_var(SGTK_REPO_ROOT=self._core_repo_path):

            config = resolver.resolve_configuration(
                {
                    "type": "dev",
                    "name": "backup_tests",
                    "path": self._temp_test_path
                }, self.mockgun)
            self.assertIsInstance(config,
                                  sgtk.bootstrap.resolver.CachedConfiguration)

            config_root_path = config.path.current_os

            # Update the configuration
            config.update_configuration()

            # make sure the local bundle cache path was written to the pipeline
            # config file
            config_metadata = get_metadata(config_root_path)

            local_bundle_cache_path = os.path.join(
                config.descriptor.get_path(), "bundle_cache")

            # make sure the local bundle cache path is part of the written
            # config metadata file
            self.assertTrue(local_bundle_cache_path in
                            config_metadata["bundle_cache_fallback_roots"])
    def _test_read_env_var_in_pipeline_configuration_yml(self, folder_name, pipeline_config_data):
        """
        Ensures environment variables are properly translated for a given file format.

        :param folder_name: Name of the configuration to create on disk.
        :param pipeline_config_data: Data to insert into shotgun.yml
        """
        env_var_pipeline = os.path.join(
            self.tank_temp, folder_name
        )
        core_folder = os.path.join(env_var_pipeline, "config", "core")
        pipeline_configuration_yml_path = os.path.join(
            core_folder, "pipeline_configuration.yml"
        )

        os.makedirs(core_folder)

        with open(pipeline_configuration_yml_path, "w") as fh:
            yaml.safe_dump(pipeline_config_data, fh)

        with open(os.path.join(core_folder, "roots.yml"), "w") as fh:
            fh.write("{}")

        test_project_name = "test_project_name"
        test_project_id = 12345
        test_pc_id = 67890
        test_pc_name = "test_pc_name"
        # tank.pipeline_config is actually a local variable inside tank/__init__.py,
        # so get the class from somewhere else...

        with temp_env_var(
            SGTK_TEST_PROJECT_NAME=test_project_name,
            SGTK_TEST_PROJECT_ID=str(test_project_id),
            SGTK_TEST_PC_ID=str(test_pc_id),
            SGTK_TEST_PC_NAME=test_pc_name
        ):
            pc = tank.pipelineconfig_factory.PipelineConfiguration(
                env_var_pipeline
            )

        self.assertEqual(
            pc.get_name(),
            test_pc_name
        )

        self.assertEqual(
            pc.get_shotgun_id(),
            test_pc_id
        )

        self.assertEqual(
            pc.get_project_id(),
            test_project_id
        )

        self.assertEqual(
            pc.get_project_disk_name(),
            test_project_name
        )
    def test_cache_locations(self):
        """
        Tests locations of caches when using fallback paths and
        the bundle cache path environment variable.
        """
        sg = self.mockgun

        root_a = os.path.join(self.project_root, "cache_root_a")
        root_b = os.path.join(self.project_root, "cache_root_b")
        root_c = os.path.join(self.project_root, "cache_root_c")
        root_d = os.path.join(self.project_root, "cache_root_d")
        root_env = os.path.join(self.project_root, "cache_root_env")

        location = {
            "type": "app_store",
            "version": "v1.1.1",
            "name": "tk-bundle"
        }

        d = sgtk.descriptor.create_descriptor(
            sg,
            sgtk.descriptor.Descriptor.APP,
            location,
            bundle_cache_root_override=root_a,
            fallback_roots=[root_b, root_c, root_d],
        )

        self.assertEqual(
            d._io_descriptor._get_primary_cache_path(),
            os.path.join(root_a, "app_store", "tk-bundle", "v1.1.1"),
        )

        # the bundle cache path set in the environment should
        # take precedence other cache paths.
        with temp_env_var(SHOTGUN_BUNDLE_CACHE_PATH=root_env):
            desc_env = sgtk.descriptor.create_descriptor(
                sg,
                sgtk.descriptor.Descriptor.APP,
                location,
                bundle_cache_root_override=root_a,
                fallback_roots=[root_b, root_c, root_d],
            )

            self.assertEqual(
                desc_env._io_descriptor._get_primary_cache_path(),
                os.path.join(root_env, "app_store", "tk-bundle", "v1.1.1"),
            )

        self.assertEqual(
            d._io_descriptor._get_cache_paths(),
            [
                os.path.join(root_b, "app_store", "tk-bundle", "v1.1.1"),
                os.path.join(root_c, "app_store", "tk-bundle", "v1.1.1"),
                os.path.join(root_d, "app_store", "tk-bundle", "v1.1.1"),
                os.path.join(root_a, "app_store", "tk-bundle", "v1.1.1"),
                os.path.join(root_a, "apps", "app_store", "tk-bundle",
                             "v1.1.1"),  # legacy path
            ],
        )
Exemple #15
0
 def test_path_with_env_var_in_front(self, _):
     """
     Validate that relative path are processed correctly on all platforms.
     """
     include = os.path.join("$INCLUDE_ENV_VAR", "include.yml")
     with temp_env_var(INCLUDE_ENV_VAR=os.getcwd()):
         self.assertEqual(self._resolve_includes(include),
                          [os.path.join(os.getcwd(), "include.yml")])
Exemple #16
0
 def test_relative_path_with_env_var(self, _):
     """
     Validate that relative path with env vars are processed correctly
     """
     relative_include = "$INCLUDE_ENV_VAR/include.yml"
     with temp_env_var(INCLUDE_ENV_VAR=os.getcwd()):
         self.assertEqual(self._resolve_includes(relative_include),
                          [os.path.join(os.getcwd(), "include.yml")])
Exemple #17
0
 def test_env_var_only(self, _):
     """
     Validate that a lone environment variable will resolve on all platforms.
     """
     resolved_include = os.path.join(os.getcwd(), "test.yml")
     with temp_env_var(INCLUDE_ENV_VAR=resolved_include):
         os.environ["INCLUDE_ENV_VAR"]
         self.assertEqual(self._resolve_includes("$INCLUDE_ENV_VAR"),
                          [resolved_include])
 def test_relative_path_with_env_var(self, _):
     """
     Validate that relative path with env vars are processed correctly
     """
     relative_include = "$INCLUDE_ENV_VAR/include.yml"
     with temp_env_var(INCLUDE_ENV_VAR=os.getcwd()):
         self.assertEqual(
             self._resolve_includes(relative_include),
             [os.path.join(os.getcwd(), "include.yml")]
         )
 def test_path_with_env_var_in_middle(self, _):
     """
     Validate that relative path are processed correctly on all platforms.
     """
     include = os.path.join(os.getcwd(), "$INCLUDE_ENV_VAR", "include.yml")
     with temp_env_var(INCLUDE_ENV_VAR="includes"):
         self.assertEqual(
             self._resolve_includes(include),
             [os.path.expandvars(include)]
         )
Exemple #20
0
    def test_cleanup_read_only(self):
        """
        Ensures that backup cleanup will succeed even with read only folder items
        """
        resolver = sgtk.bootstrap.resolver.ConfigurationResolver(
            plugin_id="backup_tests_read_only"
        )
        with temp_env_var(SGTK_REPO_ROOT=self._core_repo_path):
            config = resolver.resolve_configuration(
                {"type": "dev", "name": "backup_tests_read_only", "path": self._temp_test_path}, self.mockgun
            )
            self.assertIsInstance(config, sgtk.bootstrap.resolver.CachedConfiguration)
            config_root_path = config.path.current_os
            core_install_backup_path = os.path.join(config_root_path, "install", "core.backup")
            config_install_backup_path = os.path.join(config_root_path, "install", "config.backup")

            # First update, no backup
            config.update_configuration()
            
            def dont_cleanup_backup_folders(self, config, core):
                self.config_backup_folder_path = config
                self.core_backup_folder_path = core

            with patch.object(sgtk.bootstrap.resolver.CachedConfiguration, '_cleanup_backup_folders', new=dont_cleanup_backup_folders):
                # Update the configuration, but don't clean up backups in order to ...
                config.update_configuration()
                config_backup_folder_path = config.config_backup_folder_path
                core_backup_folder_path = config.core_backup_folder_path
                read_only_file_name = os.path.join(core_backup_folder_path, "test.txt")

            # ... create a read only file ...
            with open(read_only_file_name, "w") as f:
                f.write("Test")
            file_permissions = os.stat(read_only_file_name)[stat.ST_MODE]
            os.chmod(read_only_file_name, file_permissions & ~stat.S_IWRITE)
            if sys.platform == "win32":
                # ... and a read only folder
                folder_permissions = os.stat(config_install_backup_path)[stat.ST_MODE]
                os.chmod(config_install_backup_path, folder_permissions & ~stat.S_IWRITE)

            # Now try to clean up the backup folders with read-only file
            config._cleanup_backup_folders(config_backup_folder_path, core_backup_folder_path)

            # Verify that backup folders were cleaned up
            # Only the 'placeholder' file should remain
            self.assertEqual(os.listdir(core_install_backup_path), ['placeholder'])
            self.assertEqual(os.listdir(config_install_backup_path), ['placeholder'])

            # Try deleting the 'config_install_backup_path' parent folder
            # which was deliberately set to READ_ONLY on Windows
            # and verify it no longer exists afterward.
            parent_folder = os.path.join(config_install_backup_path, os.pardir)
            sgtk.util.filesystem.safe_delete_folder(parent_folder)
            self.assertFalse(os.path.exists(parent_folder))
 def test_env_var_only(self, _):
     """
     Validate that a lone environment variable will resolve on all platforms.
     """
     resolved_include = os.path.join(os.getcwd(), "test.yml")
     with temp_env_var(INCLUDE_ENV_VAR=resolved_include):
         os.environ["INCLUDE_ENV_VAR"]
         self.assertEqual(
             self._resolve_includes("$INCLUDE_ENV_VAR"),
             [resolved_include]
         )
Exemple #22
0
    def test_cleanup_with_fail(self):
        """
        Ensures that after an update with a cleanup failure, the succeeding update 
        process completes smoothly
        """
        resolver = sgtk.bootstrap.resolver.ConfigurationResolver(
            plugin_id="backup_tests_with_fail"
        )
        with temp_env_var(SGTK_REPO_ROOT=self._core_repo_path):
            config = resolver.resolve_configuration(
                {"type": "dev", "name": "backup_tests_with_fail", "path": self._temp_test_path}, self.mockgun
            )
            self.assertIsInstance(config, sgtk.bootstrap.resolver.CachedConfiguration)
            config_root_path = config.path.current_os
            core_install_backup_path = os.path.join(config_root_path, "install", "core.backup")

            # First update, no backup
            config.update_configuration()

            def dont_cleanup_backup_folders(self, config, core):
                self.config_backup_folder_path = config
                self.core_backup_folder_path = core

            # Update the configuration, but don't clean up backups
            with patch.object(sgtk.bootstrap.resolver.CachedConfiguration, '_cleanup_backup_folders', new=dont_cleanup_backup_folders):
                config.update_configuration()
                config_backup_folder_path = config.config_backup_folder_path
                core_backup_folder_path = config.core_backup_folder_path
                in_use_file_name = os.path.join(core_backup_folder_path, "test.txt")
            
            # Create a file
            with open(in_use_file_name, "w") as f:
                f.write("Test")
                config._cleanup_backup_folders(config_backup_folder_path, core_backup_folder_path)

            if sys.platform == "win32":
                # check that the backup folder was left behind, it is one of the 2 items, the cleanup failed
                self.assertEqual(2, len(os.listdir(core_install_backup_path)))  # ['placeholder', core_backup_folder_path]
            else:
                # on Unix, having the file open won't fail the folder removal
                self.assertEqual(os.listdir(core_install_backup_path), ['placeholder'])
            config_install_backup_path = os.path.join(config_root_path, "install", "config.backup")
            # check that there are no directory items in config backup folder other than then placeholder file
            self.assertEqual(os.listdir(config_install_backup_path), ['placeholder'])

            # Update a second time and check that the new backup was cleaned up...
            config.update_configuration()
            if sys.platform == "win32":
                # ... but the previous backup remains
                self.assertEqual(2, len(os.listdir(core_install_backup_path))) # ['placeholder', core_backup_folder_path]
            else:
                self.assertEqual(os.listdir(core_install_backup_path), ['placeholder'])
            self.assertEqual(os.listdir(config_install_backup_path), ['placeholder'])
Exemple #23
0
 def mock_remote_path_cache(self):
     """
     Mocks a remote path cache that can be updated.
     """
     # Override the SHOTGUN_HOME so that path cache is read from another location.
     with temp_env_var(SHOTGUN_HOME=os.path.join(self.tank_temp, "other_path_cache_root")):
         pc = path_cache.PathCache(self.tk)
         pc.synchronize()
         try:
             yield pc
         finally:
             pc.close()
Exemple #24
0
    def test_get_entity_from_environment(self, _):
        """
        Ensure the ToolkitManager can extract the entities from the environment
        """

        # no env set
        mgr = ToolkitManager()
        self.assertEqual(mgr.get_entity_from_environment(), None)

        # std case
        with temp_env_var(
            SHOTGUN_ENTITY_TYPE="Shot",
            SHOTGUN_ENTITY_ID="123"
        ):
            self.assertEqual(
                mgr.get_entity_from_environment(),
                {"type": "Shot", "id": 123}
            )
        # site mismatch
        with temp_env_var(
            SHOTGUN_SITE="https://some.other.site",
            SHOTGUN_ENTITY_TYPE="Shot",
            SHOTGUN_ENTITY_ID="123"
        ):
            self.assertEqual(
                mgr.get_entity_from_environment(),
                None
            )

        # invalid data case
        with temp_env_var(
            SHOTGUN_ENTITY_TYPE="Shot",
            SHOTGUN_ENTITY_ID="invalid"
        ):
            self.assertEqual(
                mgr.get_entity_from_environment(),
                None
            )
    def test_local_properties_persistance(self):
        """
        Ensures local properties can be reloaded and reaccessed by a new
        manager instance.
        """

        # Indirectly create tasks, since we can't create them directly without a
        # PublishPluginInstance object.
        manager = self._create_manager()
        manager.collect_session()

        # Save the session to disk.
        fd, temp_file_path = tempfile.mkstemp()
        manager.save(temp_file_path)

        # Creating a second manager will force the plugins to be reloaded by it.
        new_manager = self._create_manager()

        # Loads the tree.
        new_manager.load(temp_file_path)

        with temp_env_var(TEST_LOCAL_PROPERTIES="1"):
            # Create a generator that will ensure all tasks sees the right local properties.
            def task_yielder(manager):
                nb_items_processed = 0
                for item in manager.tree:
                    for task in item.tasks:
                        (is_valid, error) = yield task
                        # The validate method of both plugins will raise an error
                        # if the the values can be retrieved.
                        # We're raising if the test passes in the validate method
                        # because we want to make sure the validate method
                        # and the validation code is actually being called. If
                        # some other error was raised due to a bug, it would be
                        # caught by the errorEqual.
                        self.assertFalse(is_valid)
                        self.assertEqual(
                            str(error),
                            "local_properties was serialized properly.")
                        nb_items_processed += 1

                # Make sure some tasks have been processed. We don't want a false-positive
                # where no items have failed publishing because somehow no tasks
                # were available due to a misconfiguration error in the test.
                self.assertNotEqual(nb_items_processed, 0)

            # Validate with our custom yielder. Each task that fails reports an error.
            self.assertEqual(
                len(new_manager.validate(task_yielder(new_manager))), 6)
    def test_temp_env_var_that_didnt_exist(self):
        """
        Check if temp_env_var sets and restores the variable
        """
        # Create a unique env var and make sure it doesn't currectly exist.
        env_var_name = "ENV_VAR_" + uuid.uuid4().hex
        self.assertFalse(env_var_name in os.environ)

        # Temporarily set the env var.
        with temp_env_var(**{env_var_name: "test_value"}):
            self.assertTrue(env_var_name in os.environ)
            self.assertEquals(os.environ[env_var_name], "test_value")

        # Make sure it is gone.
        self.assertFalse(env_var_name in os.environ)
Exemple #27
0
    def test_temp_env_var_that_didnt_exist(self):
        """
        Check if temp_env_var sets and restores the variable
        """
        # Create a unique env var and make sure it doesn't currectly exist.
        env_var_name = "ENV_VAR_" + uuid.uuid4().hex
        self.assertFalse(env_var_name in os.environ)

        # Temporarily set the env var.
        with temp_env_var(**{env_var_name: "test_value"}):
            self.assertTrue(env_var_name in os.environ)
            self.assertEqual(os.environ[env_var_name], "test_value")

        # Make sure it is gone.
        self.assertFalse(env_var_name in os.environ)
Exemple #28
0
 def _download_git_branch_bundle(self, target=None):
     """
     Downloads the data given by the git descriptors to disk.
     :param target: The path to which the bundle is to be downloaded.
     """
     location_dict_branch = {
         "type": "git_branch",
         "path": self.git_repo_uri,
         "branch": "33014_nuke_studio"
     }
     if target:
         with temp_env_var(SHOTGUN_BUNDLE_CACHE_PATH=target):
             desc_git_branch = self._create_desc(location_dict_branch, True)
     else:
         desc_git_branch = self._create_desc(location_dict_branch, True)
     desc_git_branch.download_local()
 def _download_git_tag_bundle(self, target=None):
     """
     Downloads the data given by the git descriptors to disk.
     :param target: The path to which the bundle is to be downloaded.
     """
     location_dict_tag = {
         "type": "git",
         "path": self.git_repo_uri,
         "version": "v0.15.0"
     }
     if target:
         with temp_env_var(SHOTGUN_BUNDLE_CACHE_PATH=target):
             desc_git_tag = self._create_desc(location_dict_tag)
     else:
         desc_git_tag = self._create_desc(location_dict_tag)
     desc_git_tag.download_local()
Exemple #30
0
 def _download_git_tag_bundle(self, target=None):
     """
     Downloads the data given by the git descriptors to disk.
     :param target: The path to which the bundle is to be downloaded.
     """
     location_dict_tag = {
         "type": "git",
         "path": self.git_repo_uri,
         "version": "v0.15.0"
     }
     if target:
         with temp_env_var(SHOTGUN_BUNDLE_CACHE_PATH=target):
             desc_git_tag = self._create_desc(location_dict_tag)
     else:
         desc_git_tag = self._create_desc(location_dict_tag)
     desc_git_tag.download_local()
Exemple #31
0
    def test_localize_config_with_interpreter_as_env_var(self):
        """
        Test for interpreter file in a localized config.
        """
        config_root = self._create_pipeline_configuration(
            "localized_core_with_interpreter_as_env_var")
        # Create interpreter file for good config.
        self._create_interpreter_file(config_root, "$SGTK_TEST_INTERPRETER")

        # Patch os.path.exists since /i/wish/this/was/python3 is obviously not a real
        # file name.
        with patch("os.path.exists", return_value=True):
            with temp_env_var(
                    SGTK_TEST_INTERPRETER="/i/wish/this/was/python3"):
                self.assertEqual(
                    pipelineconfig_utils.get_python_interpreter_for_config(
                        config_root), "/i/wish/this/was/python3")
    def _test_read_env_var_in_pipeline_configuration_yml(
        self, folder_name, pipeline_config_data
    ):
        """
        Ensures environment variables are properly translated for a given file format.

        :param folder_name: Name of the configuration to create on disk.
        :param pipeline_config_data: Data to insert into shotgun.yml
        """
        env_var_pipeline = os.path.join(self.tank_temp, folder_name)
        core_folder = os.path.join(env_var_pipeline, "config", "core")
        pipeline_configuration_yml_path = os.path.join(
            core_folder, "pipeline_configuration.yml"
        )

        os.makedirs(core_folder)

        with open(pipeline_configuration_yml_path, "w") as fh:
            yaml.safe_dump(pipeline_config_data, fh)

        with open(os.path.join(core_folder, "roots.yml"), "w") as fh:
            fh.write("{}")

        test_project_name = "test_project_name"
        test_project_id = 12345
        test_pc_id = 67890
        test_pc_name = "test_pc_name"
        # tank.pipeline_config is actually a local variable inside tank/__init__.py,
        # so get the class from somewhere else...

        with temp_env_var(
            SGTK_TEST_PROJECT_NAME=test_project_name,
            SGTK_TEST_PROJECT_ID=str(test_project_id),
            SGTK_TEST_PC_ID=str(test_pc_id),
            SGTK_TEST_PC_NAME=test_pc_name,
        ):
            pc = tank.pipelineconfig_factory.PipelineConfiguration(env_var_pipeline)

        self.assertEqual(pc.get_name(), test_pc_name)

        self.assertEqual(pc.get_shotgun_id(), test_pc_id)

        self.assertEqual(pc.get_project_id(), test_project_id)

        self.assertEqual(pc.get_project_disk_name(), test_project_name)
    def test_localize_config_with_interpreter_as_env_var(self):
        """
        Test for interpreter file in a localized config.
        """
        config_root = self._create_pipeline_configuration(
            "localized_core_with_interpreter_as_env_var"
        )
        # Create interpreter file for good config.
        self._create_interpreter_file(config_root, "$SGTK_TEST_INTERPRETER")

        # Patch os.path.exists since /i/wish/this/was/python3 is obviously not a real
        # file name.
        with patch("os.path.exists", return_value=True):
            with temp_env_var(SGTK_TEST_INTERPRETER="/i/wish/this/was/python3"):
                self.assertEqual(
                    pipelineconfig_utils.get_python_interpreter_for_config(config_root),
                    "/i/wish/this/was/python3"
                )
    def test_nuke_studio_path_append(self):
        """
        Tests the generation of HIERO_PLUGIN_PATH environment variable , to make
        sure that the toolkit startup paths for Nuke Studio are correctly appended to any pre-existing paths.
        Checks against pre-existing and non-existent scenarios.
        :return: None
        """
        hiero_path_1 = os.path.join(tempfile.gettempdir(), "gizmo_1")
        hiero_path_2 = os.path.join(tempfile.gettempdir(), "gizmo_1")
        plugin_path = os.path.join(repo_root, "plugins", "basic")

        # create a temp pre-existing HIERO_PLUGIN_PATH to test preservation, when appending toolkit startup path
        hiero_path_env = {"HIERO_PLUGIN_PATH": os.pathsep.join([hiero_path_1, hiero_path_2])}
        with temp_env_var(**hiero_path_env):

            # create launcher
            nuke_launcher = sgtk.platform.create_engine_launcher(
                self.tk,
                sgtk.context.create_empty(self.tk),
                "tk-nuke",
                ["10.0v5"]
            )

            # generate launch env
            launch_info = nuke_launcher.prepare_launch("/path/to/nuke", ["--studio"], None)

            # ensure that the nuke studio path was preserved and placed first in the path
            self.assertEqual(
                launch_info.environment["HIERO_PLUGIN_PATH"],
                os.pathsep.join([hiero_path_1, hiero_path_2, plugin_path])
            )


        # now test without stuff in the heiro path

        # generate launch env
        launch_info = nuke_launcher.prepare_launch("/path/to/nuke", ["--studio"], None)

        # ensure that the nuke studio path was preserved and placed first in the path
        self.assertEqual(
            launch_info.environment["HIERO_PLUGIN_PATH"],
            plugin_path
        )
Exemple #35
0
    def test_local_bundle_cache(self):
        """
        Ensures the local bundle cache path is included in the pipeline config
        yml file during updates.
        """
        resolver = sgtk.bootstrap.resolver.ConfigurationResolver(
            plugin_id="backup_tests"
        )
        with temp_env_var(SGTK_REPO_ROOT=self._core_repo_path):

            config = resolver.resolve_configuration(
                {
                    "type": "dev",
                    "name": "backup_tests",
                    "path": self._temp_test_path
                },
                self.mockgun
            )
            self.assertIsInstance(
                config,
                sgtk.bootstrap.resolver.CachedConfiguration
            )

            config_root_path = config.path.current_os

            # Update the configuration
            config.update_configuration()

            # make sure the local bundle cache path was written to the pipeline
            # config file
            config_metadata = get_metadata(config_root_path)

            local_bundle_cache_path = os.path.join(
                config.descriptor.get_path(),
                "bundle_cache"
            )

            # make sure the local bundle cache path is part of the written
            # config metadata file
            self.assertTrue(
                local_bundle_cache_path in
                config_metadata["bundle_cache_fallback_roots"]
            )
    def _download_git_branch_bundle(self, target=None):
        """
        Downloads the data given by the git descriptors to disk.
        :param target: The path to which the bundle is to be downloaded.
        :returns: a descriptor instance
        """
        location_dict_branch = {
            "type": "git_branch",
            "path": self.git_repo_uri,
            "branch": "33014_nuke_studio"
        }
        if target:
            with temp_env_var(SHOTGUN_BUNDLE_CACHE_PATH=target):
                desc_git_branch = self._create_desc(location_dict_branch, True)
        else:
            desc_git_branch = self._create_desc(location_dict_branch, True)
        desc_git_branch.download_local()

        return desc_git_branch
Exemple #37
0
    def test_hiero_path_append(self):
        """
        Tests the generation of HIERO_PLUGIN_PATH environment variable , to make
        sure that the toolkit startup paths for Hiero are correctly appended to any pre-existing paths.
        Checks against pre-existing and non-existent scenarios.
        :return: None
        """
        hiero_path_1 = os.path.join(tempfile.gettempdir(), "gizmo_1")
        hiero_path_2 = os.path.join(tempfile.gettempdir(), "gizmo_1")
        plugin_path = os.path.join(repo_root, "plugins", "basic")

        # create a temp pre-existing HIERO_PLUGIN_PATH to test preservation, when appending toolkit startup path
        hiero_path_env = {
            "HIERO_PLUGIN_PATH": os.pathsep.join([hiero_path_1, hiero_path_2])
        }
        with temp_env_var(**hiero_path_env):

            # create launcher
            nuke_launcher = sgtk.platform.create_engine_launcher(
                self.tk, sgtk.context.create_empty(self.tk), "tk-nuke",
                ["10.0v5"])

            # generate launch env
            launch_info = nuke_launcher.prepare_launch("/path/to/nuke",
                                                       ["--hiero"], None)

            # ensure that the hiero path was preserved and placed first in the path
            self.assertEqual(
                launch_info.environment["HIERO_PLUGIN_PATH"],
                os.pathsep.join([hiero_path_1, hiero_path_2, plugin_path]),
            )

        # now test without stuff in the Hiero path

        # generate launch env
        launch_info = nuke_launcher.prepare_launch("/path/to/nuke",
                                                   ["--hiero"], None)

        # ensure that the Hiero path was preserved and placed first in the path
        self.assertEqual(launch_info.environment["HIERO_PLUGIN_PATH"],
                         plugin_path)
Exemple #38
0
 def _download_shotgun_bundle(self, target=None):
     """
     Creates a shotgun entity descriptor and attempts to download it locally.
     :param target: The path to which the bundle is to be downloaded
     """
     location = {
         "type": "shotgun",
         "entity_type": "PipelineConfiguration",
         "name": "primary",
         "project_id": 123,
         "field": "sg_config",
         "version": 456
     }
     if target:
         with temp_env_var(SHOTGUN_BUNDLE_CACHE_PATH=target):
             desc = self._create_desc(location)
     else:
         desc = self._create_desc(location)
     with patch("tank.util.shotgun.download_and_unpack_attachment",
                side_effect=self._download_and_unpack_attachment):
         desc.download_local()
Exemple #39
0
    def test_cleanup(self):
        """
        Ensures that after a successful update the backup folder created by the
        update process is properly deleted
        """
        resolver = sgtk.bootstrap.resolver.ConfigurationResolver(
            plugin_id="backup_tests")
        with temp_env_var(SGTK_REPO_ROOT=self._core_repo_path):
            config = resolver.resolve_configuration(
                {
                    "type": "dev",
                    "name": "backup_tests",
                    "path": self._temp_test_path
                },
                self.mockgun,
            )
            self.assertIsInstance(config,
                                  sgtk.bootstrap.resolver.CachedConfiguration)
            config_root_path = config.path.current_os

            # Update the configuration
            config.update_configuration()
            core_install_backup_path = os.path.join(config_root_path,
                                                    "install", "core.backup")
            # check that there are no directory items in core backup folder other than then placeholder file
            self.assertEqual(os.listdir(core_install_backup_path),
                             ["placeholder"])
            config_install_backup_path = os.path.join(config_root_path,
                                                      "install",
                                                      "config.backup")
            # check that there are no directory items in config backup folder other than then placeholder file
            self.assertEqual(os.listdir(config_install_backup_path),
                             ["placeholder"])

            # Update a second time and check that backup was cleaned up again
            config.update_configuration()
            self.assertEqual(os.listdir(core_install_backup_path),
                             ["placeholder"])
            self.assertEqual(os.listdir(config_install_backup_path),
                             ["placeholder"])
Exemple #40
0
    def test_shotgun_bundle_cache(self, _):
        """
        Ensures ToolkitManager deals property with bundle cache from the user and from
        environment variables.
        """

        # Ensure the list is empty by default.
        mgr = ToolkitManager()
        self.assertEqual(mgr._get_bundle_cache_fallback_paths(), [])

        # If the user bundle cache is set, we should see it in the results.
        mgr.bundle_cache_fallback_paths = ["/a/b/c", "/d/e/f"]
        self.assertEqual(
            set(mgr._get_bundle_cache_fallback_paths()), set(["/a/b/c", "/d/e/f"]))

        # Reset the user bundle cache.
        mgr.bundle_cache_fallback_paths = []
        self.assertEqual(mgr._get_bundle_cache_fallback_paths(), [])

        # Set the environment variable which allows to inherit paths from another process.
        with temp_env_var(
            SHOTGUN_BUNDLE_CACHE_FALLBACK_PATHS=os.pathsep.join(["/g/h/i", "/j/k/l", "/a/b/c"])
        ):
            # Should see the content from the environment variable.
            self.assertEqual(
                set(mgr._get_bundle_cache_fallback_paths()), set(["/g/h/i", "/j/k/l", "/a/b/c"]))

            # Add a few user specified folders.
            mgr.bundle_cache_fallback_paths = ["/a/b/c", "/d/e/f"]

            self.assertEqual(
                set(mgr._get_bundle_cache_fallback_paths()),
                set(["/a/b/c", "/d/e/f", "/g/h/i", "/j/k/l"])
            )

        # Now that the env var is not set anymore we should see its bundle caches.
        self.assertEqual(
            set(mgr._get_bundle_cache_fallback_paths()), set(["/a/b/c", "/d/e/f"])
        )
 def _download_shotgun_bundle(self, target=None):
     """
     Creates a shotgun entity descriptor and attempts to download it locally.
     :param target: The path to which the bundle is to be downloaded
     """
     location = {
         "type": "shotgun",
         "entity_type": "PipelineConfiguration",
         "name": "primary",
         "project_id": 123,
         "field": "sg_config",
         "version": 456
     }
     if target:
         with temp_env_var(SHOTGUN_BUNDLE_CACHE_PATH=target):
             desc = self._create_desc(location)
     else:
         desc = self._create_desc(location)
     with patch(
             "tank.util.shotgun.download_and_unpack_attachment",
             side_effect=self._download_and_unpack_attachment):
         desc.download_local()
    def test_env_vars_present(self, get_api_core_config_location_mock):
        """
        Ensures files using environment variables are translated properly.
        """
        test_host = "https://envvar.shotgunstudio.com"
        test_key = "env_var_key"
        test_script = "env_var_script"
        test_proxy = "env_var_proxy"
        test_appstore_proxy = "env_var_appstore_proxy"

        with temp_env_var(
            SGTK_TEST_HOST=test_host,
            SGTK_TEST_KEY=test_key,
            SGTK_TEST_SCRIPT=test_script,
            SGTK_TEST_PROXY=test_proxy,
            SGTK_TEST_APPSTORE_PROXY=test_appstore_proxy
        ):
            self.assertEqual(
                tank.util.shotgun.connection._parse_config_data(
                    {
                        "host": "$SGTK_TEST_HOST",
                        "api_key": "$SGTK_TEST_KEY",
                        "api_script": "$SGTK_TEST_SCRIPT",
                        "http_proxy": "$SGTK_TEST_PROXY",
                        "app_store_http_proxy": "$SGTK_TEST_APPSTORE_PROXY"

                    },
                    "default",
                    "not_a_file.cfg"
                ),
                {
                    "host": test_host,
                    "api_key": test_key,
                    "api_script": test_script,
                    "http_proxy": test_proxy,
                    "app_store_http_proxy": test_appstore_proxy
                }
            )
Exemple #43
0
    def test_cleanup_with_fail(self):
        """
        Ensures that after an update with a cleanup failure, the succeeding update
        process completes smoothly
        """
        resolver = sgtk.bootstrap.resolver.ConfigurationResolver(
            plugin_id="backup_tests_with_fail")
        with temp_env_var(SGTK_REPO_ROOT=self._core_repo_path):
            config = resolver.resolve_configuration(
                {
                    "type": "dev",
                    "name": "backup_tests_with_fail",
                    "path": self._temp_test_path,
                },
                self.mockgun,
            )
            self.assertIsInstance(config,
                                  sgtk.bootstrap.resolver.CachedConfiguration)
            config_root_path = config.path.current_os
            core_install_backup_path = os.path.join(config_root_path,
                                                    "install", "core.backup")

            # First update, no backup
            config.update_configuration()

            def dont_cleanup_backup_folders(self, config, core):
                self.config_backup_folder_path = config
                self.core_backup_folder_path = core

            # Update the configuration, but don't clean up backups
            with patch.object(
                    sgtk.bootstrap.resolver.CachedConfiguration,
                    "_cleanup_backup_folders",
                    new=dont_cleanup_backup_folders,
            ):
                config.update_configuration()
                config_backup_folder_path = config.config_backup_folder_path
                core_backup_folder_path = config.core_backup_folder_path
                in_use_file_name = os.path.join(core_backup_folder_path,
                                                "test.txt")

            # Create a file
            with open(in_use_file_name, "w") as f:
                f.write("Test")
                config._cleanup_backup_folders(config_backup_folder_path,
                                               core_backup_folder_path)

            if is_windows():
                # check that the backup folder was left behind, it is one of the 2 items, the cleanup failed
                self.assertEqual(2, len(os.listdir(core_install_backup_path))
                                 )  # ['placeholder', core_backup_folder_path]
            else:
                # on Unix, having the file open won't fail the folder removal
                self.assertEqual(os.listdir(core_install_backup_path),
                                 ["placeholder"])
            config_install_backup_path = os.path.join(config_root_path,
                                                      "install",
                                                      "config.backup")
            # check that there are no directory items in config backup folder other than then placeholder file
            self.assertEqual(os.listdir(config_install_backup_path),
                             ["placeholder"])

            # Update a second time and check that the new backup was cleaned up...
            config.update_configuration()
            if is_windows():
                # ... but the previous backup remains
                self.assertEqual(2, len(os.listdir(core_install_backup_path))
                                 )  # ['placeholder', core_backup_folder_path]
            else:
                self.assertEqual(os.listdir(core_install_backup_path),
                                 ["placeholder"])
            self.assertEqual(os.listdir(config_install_backup_path),
                             ["placeholder"])
Exemple #44
0
    def test_cleanup_read_only(self):
        """
        Ensures that backup cleanup will succeed even with read only folder items
        """
        resolver = sgtk.bootstrap.resolver.ConfigurationResolver(
            plugin_id="backup_tests_read_only")
        with temp_env_var(SGTK_REPO_ROOT=self._core_repo_path):
            config = resolver.resolve_configuration(
                {
                    "type": "dev",
                    "name": "backup_tests_read_only",
                    "path": self._temp_test_path,
                },
                self.mockgun,
            )
            self.assertIsInstance(config,
                                  sgtk.bootstrap.resolver.CachedConfiguration)
            config_root_path = config.path.current_os
            core_install_backup_path = os.path.join(config_root_path,
                                                    "install", "core.backup")
            config_install_backup_path = os.path.join(config_root_path,
                                                      "install",
                                                      "config.backup")

            # First update, no backup
            config.update_configuration()

            def dont_cleanup_backup_folders(self, config, core):
                self.config_backup_folder_path = config
                self.core_backup_folder_path = core

            with patch.object(
                    sgtk.bootstrap.resolver.CachedConfiguration,
                    "_cleanup_backup_folders",
                    new=dont_cleanup_backup_folders,
            ):
                # Update the configuration, but don't clean up backups in order to ...
                config.update_configuration()
                config_backup_folder_path = config.config_backup_folder_path
                core_backup_folder_path = config.core_backup_folder_path
                read_only_file_name = os.path.join(core_backup_folder_path,
                                                   "test.txt")

            # ... create a read only file ...
            with open(read_only_file_name, "w") as f:
                f.write("Test")
            file_permissions = os.stat(read_only_file_name)[stat.ST_MODE]
            os.chmod(read_only_file_name, file_permissions & ~stat.S_IWRITE)
            if is_windows():
                # ... and a read only folder
                folder_permissions = os.stat(config_install_backup_path)[
                    stat.ST_MODE]
                os.chmod(config_install_backup_path,
                         folder_permissions & ~stat.S_IWRITE)

            # Now try to clean up the backup folders with read-only file
            config._cleanup_backup_folders(config_backup_folder_path,
                                           core_backup_folder_path)

            # Verify that backup folders were cleaned up
            # Only the 'placeholder' file should remain
            self.assertEqual(os.listdir(core_install_backup_path),
                             ["placeholder"])
            self.assertEqual(os.listdir(config_install_backup_path),
                             ["placeholder"])

            # Try deleting the 'config_install_backup_path' parent folder
            # which was deliberately set to READ_ONLY on Windows
            # and verify it no longer exists afterward.
            parent_folder = os.path.join(config_install_backup_path, os.pardir)
            sgtk.util.filesystem.safe_delete_folder(parent_folder)
            self.assertFalse(os.path.exists(parent_folder))