def test_core_features(self):
        """
        Checks that core features are reported properly. This prevents us from
        removing something by mistake in info.yml, which would be quite
        catastrophic.
        """
        # Create a an IoDescriptor-like object returning the core's info.yml.
        with open(
            os.path.join(
                os.path.dirname(__file__),
                "..", "..", "info.yml"
            )
        ) as fh:
            info = yaml.safe_load(fh)

        io_desc = Mock()
        io_desc.get_manifest.return_value = info
        desc = sgtk.descriptor.CoreDescriptor(io_desc)

        features = {
            "bootstrap.lean_config.version": 1
        }

        # Make sure every feature is at the expected version.
        for feature, value in features.iteritems():
            self.assertEqual(desc.get_feature_info(feature), value)

        # Make sure there weren't new features introduced.
        self.assertEqual(desc.get_features_info(), features)
Exemple #2
0
    def test_write_project_sandbox_config(self):
        """
        Expects project configuration sandboxes are written out properly.
        """
        self._create_test_data(create_project=True)
        path = self.__cw.write_pipeline_config_file(
            self.__project_configuration["id"], self.__project["id"],
            "basic.plugin", self.FALLBACK_PATHS, self.__descriptor)

        with open(path, "r") as fh:
            config_info = yaml.safe_load(fh)

        self.assertDictEqual(
            config_info, {
                "pc_id": self.__project_configuration["id"],
                "pc_name": self.__project_configuration["code"],
                "project_id": self.__project["id"],
                "project_name": self.__project["tank_name"],
                "plugin_id": "basic.plugin",
                "published_file_entity_type": "PublishedFile",
                "use_bundle_cache": True,
                "bundle_cache_fallback_roots": self.FALLBACK_PATHS,
                "use_shotgun_path_cache": True,
                "source_descriptor": self.__descriptor.get_dict()
            })
    def test_write_project_sandbox_config(self):
        """
        Expects project configuration sandboxes are written out properly.
        """
        self._create_test_data(create_project=True)
        path = self.__cw.write_pipeline_config_file(
            self.__project_configuration["id"],
            self.__project["id"],
            "basic.plugin",
            self.FALLBACK_PATHS,
            self.__descriptor
        )

        with open(path, "r") as fh:
            config_info = yaml.safe_load(fh)

        self.assertDictEqual(
            config_info,
            {
                "pc_id": self.__project_configuration["id"],
                "pc_name": self.__project_configuration["code"],
                "project_id": self.__project["id"],
                "project_name": self.__project["tank_name"],
                "plugin_id": "basic.plugin",
                "published_file_entity_type": "PublishedFile",
                "use_bundle_cache": True,
                "bundle_cache_fallback_roots": self.FALLBACK_PATHS,
                "use_shotgun_path_cache": True,
                "source_descriptor": self.__descriptor.get_dict()
            }
        )
def get_parameters():
    # Read dcc state information from disk
    dcc_state_file = sys.argv[1]
    with open(dcc_state_file, "rt") as f:
        dcc_state = yaml.safe_load(f)

    # Extract the published file path.
    publish_tree_file = sys.argv[2]

    # Create the context object and figure out in which one we'll publishing
    # to.
    import sgtk
    context = sgtk.Context.from_dict(
        None, dcc_state["toolkit"]["context"]
    )
    context_entity = context.task or context.entity or context.project

    return (
        dcc_state["session_path"],
        dcc_state["toolkit"]["pipeline_configuration_id"],
        context_entity,
        dcc_state["toolkit"]["app_instance_name"],
        dcc_state["toolkit"]["engine_instance_name"],
        publish_tree_file
    )
    def test_core_features(self):
        """
        Checks that core features are reported properly. This prevents us from
        removing something by mistake in info.yml, which would be quite
        catastrophic.
        """
        # Create a an IoDescriptor-like object returning the core's info.yml.
        with open(
            os.path.join(
                os.path.dirname(__file__),
                "..", "..", "info.yml"
            )
        ) as fh:
            info = yaml.safe_load(fh)

        io_desc = Mock()
        io_desc.get_manifest.return_value = info
        desc = self._create_core_desc(io_desc)

        features = {
            "bootstrap.lean_config.version": 1
        }

        # Make sure every feature is at the expected version.
        for feature, value in features.iteritems():
            self.assertEqual(desc.get_feature_info(feature), value)

        # Make sure there weren't new features introduced.
        self.assertEqual(desc.get_features_info(), features)
    def _setup_fixtures(self, name="config", parameters=None):
        """
        See doc for setup fixtures.
        """

        parameters = parameters or {}

        # figure out root point of fixtures config
        config_root = os.path.join(self.fixtures_root, name)

        # first figure out core location
        if "core" in parameters:
            # This config is not simple, as it is piece from a env and hooks folder from one
            # location and the core from another.
            simple_config = False
            # convert slashes to be windows friendly
            core_path_suffix = parameters["core"].replace("/", os.sep)
            core_source = os.path.join(config_root, core_path_suffix)
        else:
            # This config is simple, as it is based on a config that is layed out into a single folder.
            simple_config = True
            # use the default core fixture
            core_source = os.path.join(config_root, "core")

        # Check if the tests wants the files to be copied.
        installed_config = parameters.get("installed_config", False)

        # If the config is not simple of the tests wants the files to be copied
        if not simple_config or installed_config:
            # copy core over to target
            core_target = os.path.join(self.project_config, "core")
            self._copy_folder(core_source, core_target)
            # now copy the rest of the config fixtures
            for config_folder in ["env", "hooks", "bundles"]:
                config_source = os.path.join(config_root, config_folder)
                if os.path.exists(config_source):
                    config_target = os.path.join(self.project_config, config_folder)
                    self._copy_folder(config_source, config_target)
        else:
            # We're going to be using a cached configuration, so set up the source_descriptor.
            pc_yml_location = os.path.join(self.pipeline_config_root, "config", "core", "pipeline_configuration.yml")
            with open(pc_yml_location, "r") as fh:
                pc_data = yaml.safe_load(fh)
            pc_data["source_descriptor"] = {"path": config_root, "type": "path"}
            with open(pc_yml_location, "w") as fh:
                fh.write(yaml.dump(pc_data))

            # Update where the config root variable points to.
            self.project_config = config_root

        # need to reload the pipeline config to respect the config data from
        # the fixtures
        self.reload_pipeline_config()

        if not ("skip_template_reload" in parameters and parameters["skip_template_reload"]):
            # no skip_template_reload flag set to true. So go ahead and reload
            self.tk.reload_templates()
    def _setup_fixtures(self, name="config", parameters=None):
        """
        See doc for setup fixtures.
        """

        parameters = parameters or {}

        # figure out root point of fixtures config
        config_root = os.path.join(self.fixtures_root, name)

        # first figure out core location
        if "core" in parameters:
            # This config is not simple, as it is piece from a env and hooks folder from one
            # location and the core from another.
            simple_config = False
            # convert slashes to be windows friendly
            core_path_suffix = parameters["core"].replace("/", os.sep)
            core_source = os.path.join(config_root, core_path_suffix)
        else:
            # This config is simple, as it is based on a config that is layed out into a single folder.
            simple_config = True
            # use the default core fixture
            core_source = os.path.join(config_root, "core")

        # Check if the tests wants the files to be copied.
        installed_config = parameters.get("installed_config", False)

        # If the config is not simple of the tests wants the files to be copied
        if not simple_config or installed_config:
            # copy core over to target
            core_target = os.path.join(self.project_config, "core")
            self._copy_folder(core_source, core_target)
            # now copy the rest of the config fixtures
            for config_folder in ["env", "hooks", "bundles"]:
                config_source = os.path.join(config_root, config_folder)
                if os.path.exists(config_source):
                    config_target = os.path.join(self.project_config, config_folder)
                    self._copy_folder(config_source, config_target)
        else:
            # We're going to be using a cached configuration, so set up the source_descriptor.
            pc_yml_location = os.path.join(self.pipeline_config_root, "config", "core", "pipeline_configuration.yml")
            with open(pc_yml_location, "r") as fh:
                pc_data = yaml.safe_load(fh)
            pc_data["source_descriptor"] = {"path": config_root, "type": "path"}
            with open(pc_yml_location, "w") as fh:
                fh.write(yaml.dump(pc_data))

            # Update where the config root variable points to.
            self.project_config = config_root

        # need to reload the pipeline config to respect the config data from
        # the fixtures
        self.reload_pipeline_config()

        if not ("skip_template_reload" in parameters and parameters["skip_template_reload"]):
            # no skip_template_reload flag set to true. So go ahead and reload
            self.tk.reload_templates()
    def test_character_escaping(self):
        """
        Ensure that the ' characte is properly escaped
        when writing out install_location.yml
        """
        new_config_root = os.path.join(self.tank_temp, self.short_test_name,
                                       "O'Connell")

        writer = ConfigurationWriter(
            ShotgunPath.from_current_os_path(new_config_root), self.mockgun)

        install_location_path = os.path.join(new_config_root, "config", "core",
                                             "install_location.yml")

        os.makedirs(os.path.dirname(install_location_path))

        writer.write_install_location_file()

        with open(install_location_path, "rt") as f:
            paths = yaml.safe_load(f)
            path = ShotgunPath(paths["Windows"], paths["Linux"],
                               paths["Darwin"])
        assert path.current_os == new_config_root