示例#1
0
    def write_install_location_file(self):
        """
        Writes the install location file
        """
        config_path = self._path.current_os

        # write the install_location file for our new setup
        sg_code_location = os.path.join(config_path, "config", "core",
                                        "install_location.yml")

        if os.path.exists(sg_code_location):
            # We want to log that we're overwriting an existing file, but this file
            # is almost exclusively auto generated, so we can do it as a debug.
            log.debug(
                "The file 'core/install_location.yml' exists in the configuration "
                "but will be overwritten with an auto generated file.")

        with filesystem.auto_created_yml(sg_code_location) as fh:

            fh.write("# This file reflects the paths in the pipeline\n")
            fh.write("# configuration defined for this project.\n")
            fh.write("\n")

            locations = {}
            locations["Windows"] = self._path.windows
            locations["Darwin"] = self._path.macosx
            locations["Linux"] = self._path.linux

            yaml.safe_dump(locations, fh, default_flow_style=False)
    def _submit_to_farm(self, dcc_state, tree):
        """
        Submits the job to the render farm.

        :param dict dcc_state: Information about the DCC and Toolkit.
        :param tree: The tree of items and tasks that has just been published.
        :type tree: :ref:`publish-api-tree`
        """
        # TODO: You are the render farm experts. We'll just mock the submission
        # here.

        submission_folder = "/var/tmp/webinar"
        if not os.path.exists(submission_folder):
            os.makedirs(submission_folder)

        tree.save_file(
            os.path.join(submission_folder, "publish2_tree.txt")
        )

        with open(
            os.path.join(submission_folder, "dcc_state.txt"), "wt"
        ) as f:
            # Make sure you call safe_dump, as accentuated characters
            # might not get encoded properly otherwise.
            yaml.safe_dump(dcc_state, f)

        self.logger.info(
            "Publishing context and state has been saved on disk for farm rendering.",
            extra={
                "action_show_folder": {
                    "path": submission_folder
                }
            }
        )
示例#3
0
    def write_config_info_file(self, config_descriptor):
        """
        Writes a cache file with info about where the config came from.

        :param config_descriptor: Config descriptor object
        """
        config_info_file = self.get_descriptor_metadata_file()

        with self._open_auto_created_yml(config_info_file) as fh:
            fh.write("# This file contains metadata describing what exact version\n")
            fh.write("# Of the config that was downloaded from Shotgun\n")
            fh.write("\n")
            fh.write("# Below follows details for the sg attachment that is\n")
            fh.write("# reflected within this local configuration.\n")
            fh.write("\n")

            metadata = {}
            # bake in which version of the deploy logic was used to push this config
            metadata["deploy_generation"] = constants.BOOTSTRAP_LOGIC_GENERATION
            # and include details about where the config came from
            metadata["config_descriptor"] = config_descriptor.get_dict()

            # write yaml
            yaml.safe_dump(metadata, fh)
            fh.write("\n")
            fh.write("# End of file.\n")
示例#4
0
    def _write_pipeline_config_file(self):
        """
        Writes out the config/core/pipeline_config.yml
        """
        # the pipeline config metadata
        # resolve project name and pipeline config name from shotgun.
        if self._pipeline_config_id:
            # look up pipeline config name and project name via the pc
            log.debug("Checking pipeline config in Shotgun...")

            sg_data = self._sg_connection.find_one(
                constants.PIPELINE_CONFIGURATION_ENTITY_TYPE,
                [["id", "is", self._pipeline_config_id]],
                ["code", "project.Project.tank_name"]
            )

            project_name = sg_data["project.Project.tank_name"] or constants.UNNAMED_PROJECT_NAME
            pipeline_config_name = sg_data["code"] or constants.UNMANAGED_PIPELINE_CONFIG_NAME

        elif self._project_id:
            # no pc. look up the project name via the project id
            log.debug("Checking project in Shotgun...")

            sg_data = self._sg_connection.find_one(
                "Project",
                [["id", "is", self._project_id]],
                ["tank_name"]
            )

            project_name = sg_data["tank_name"] or constants.UNNAMED_PROJECT_NAME
            pipeline_config_name = constants.UNMANAGED_PIPELINE_CONFIG_NAME

        else:
            project_name = "Site"
            pipeline_config_name = constants.UNMANAGED_PIPELINE_CONFIG_NAME

        pipeline_config_content = {
            "pc_id": self._pipeline_config_id,
            "pc_name": pipeline_config_name,
            "project_id": self._project_id,
            "project_name": project_name,
            "entry_point": self._entry_point,
            "published_file_entity_type": "PublishedFile",
            "use_bundle_cache": True,
            "bundle_cache_fallback_roots": self._bundle_cache_fallback_paths,
            "use_shotgun_path_cache": True
        }

        # write pipeline_configuration.yml
        pipeline_config_path = os.path.join(
            self._path.current_os,
            "config",
            "core",
            constants.PIPELINECONFIG_FILE
        )

        with self.__open_auto_created_yml(pipeline_config_path) as fh:
            yaml.safe_dump(pipeline_config_content, fh)
            fh.write("\n")
            fh.write("# End of file.\n")
示例#5
0
    def write_config_info_file(self, config_descriptor):
        """
        Writes a cache file with info about where the config came from.

        :param config_descriptor: Config descriptor object
        """
        config_info_file = self.get_descriptor_metadata_file()

        with self._open_auto_created_yml(config_info_file) as fh:
            fh.write("# This file contains metadata describing what exact version\n")
            fh.write("# Of the config that was downloaded from Shotgun\n")
            fh.write("\n")
            fh.write("# Below follows details for the sg attachment that is\n")
            fh.write("# reflected within this local configuration.\n")
            fh.write("\n")

            metadata = {}
            # bake in which version of the deploy logic was used to push this config
            metadata["deploy_generation"] = constants.BOOTSTRAP_LOGIC_GENERATION
            # and include details about where the config came from
            metadata["config_descriptor"] = config_descriptor.get_dict()

            # write yaml
            yaml.safe_dump(metadata, fh)
            fh.write("\n")
            fh.write("# End of file.\n")
    def _test_read_env_var_in_pipeline_configuration_yml(self, folder_name, pipeline_config_data):
        """
        Ensures environment variables are properly translated for a given file format.

        :param folder_name: Name of the configuration to create on disk.
        :param pipeline_config_data: Data to insert into shotgun.yml
        """
        env_var_pipeline = os.path.join(
            self.tank_temp, folder_name
        )
        core_folder = os.path.join(env_var_pipeline, "config", "core")
        pipeline_configuration_yml_path = os.path.join(
            core_folder, "pipeline_configuration.yml"
        )

        os.makedirs(core_folder)

        with open(pipeline_configuration_yml_path, "w") as fh:
            yaml.safe_dump(pipeline_config_data, fh)

        with open(os.path.join(core_folder, "roots.yml"), "w") as fh:
            fh.write("{}")

        test_project_name = "test_project_name"
        test_project_id = 12345
        test_pc_id = 67890
        test_pc_name = "test_pc_name"
        # tank.pipeline_config is actually a local variable inside tank/__init__.py,
        # so get the class from somewhere else...

        with temp_env_var(
            SGTK_TEST_PROJECT_NAME=test_project_name,
            SGTK_TEST_PROJECT_ID=str(test_project_id),
            SGTK_TEST_PC_ID=str(test_pc_id),
            SGTK_TEST_PC_NAME=test_pc_name
        ):
            pc = tank.pipelineconfig_factory.PipelineConfiguration(
                env_var_pipeline
            )

        self.assertEqual(
            pc.get_name(),
            test_pc_name
        )

        self.assertEqual(
            pc.get_shotgun_id(),
            test_pc_id
        )

        self.assertEqual(
            pc.get_project_id(),
            test_project_id
        )

        self.assertEqual(
            pc.get_project_disk_name(),
            test_project_name
        )
示例#7
0
    def _write_pipeline_config_file(self):
        """
        Writes out the config/core/pipeline_config.yml
        """
        # the pipeline config metadata
        # resolve project name and pipeline config name from shotgun.
        if self._pipeline_config_id:
            # look up pipeline config name and project name via the pc
            log.debug("Checking pipeline config in Shotgun...")

            sg_data = self._sg_connection.find_one(
                constants.PIPELINE_CONFIGURATION_ENTITY_TYPE,
                [["id", "is", self._pipeline_config_id]],
                ["code", "project.Project.tank_name"])

            project_name = sg_data[
                "project.Project.tank_name"] or constants.UNNAMED_PROJECT_NAME
            pipeline_config_name = sg_data[
                "code"] or constants.UNMANAGED_PIPELINE_CONFIG_NAME

        elif self._project_id:
            # no pc. look up the project name via the project id
            log.debug("Checking project in Shotgun...")

            sg_data = self._sg_connection.find_one(
                "Project", [["id", "is", self._project_id]], ["tank_name"])

            project_name = sg_data[
                "tank_name"] or constants.UNNAMED_PROJECT_NAME
            pipeline_config_name = constants.UNMANAGED_PIPELINE_CONFIG_NAME

        else:
            project_name = "Site"
            pipeline_config_name = constants.UNMANAGED_PIPELINE_CONFIG_NAME

        pipeline_config_content = {
            "pc_id": self._pipeline_config_id,
            "pc_name": pipeline_config_name,
            "project_id": self._project_id,
            "project_name": project_name,
            "entry_point": self._entry_point,
            "published_file_entity_type": "PublishedFile",
            "use_bundle_cache": True,
            "bundle_cache_fallback_roots": self._bundle_cache_fallback_paths,
            "use_shotgun_path_cache": True
        }

        # write pipeline_configuration.yml
        pipeline_config_path = os.path.join(self._path.current_os, "config",
                                            "core",
                                            constants.PIPELINECONFIG_FILE)

        with self.__open_auto_created_yml(pipeline_config_path) as fh:
            yaml.safe_dump(pipeline_config_content, fh)
            fh.write("\n")
            fh.write("# End of file.\n")
示例#8
0
def _write_yaml_file(file_path, users_data):
    """
    Writes the yaml file at a given location.

    :param file_path: Where to write the users data
    :param users_data: Dictionary to write to disk.
    """
    old_umask = os.umask(0077)
    try:
        with open(file_path, "w") as users_file:
            yaml.safe_dump(users_data, users_file)
    finally:
        os.umask(old_umask)
示例#9
0
    def __write_data_file(self, fh, data):
        """
        Writes the yaml data to a supplied file handle

        :param fh: An open file handle to write to.
        :param data: yaml data structure to write
        """

        # the ruamel parser doesn't have 2.5 support so
        # only use it on 2.6+
        if self._use_ruamel_yaml_parser and not (sys.version_info < (2, 6)):
            # note that we are using the RoundTripDumper in order to
            # preserve the structure when writing the file to disk.
            #
            # the default_flow_style=False tells the parse to write
            # any modified values on multi-line form, e.g.
            #
            # foo:
            #   bar: 3
            #   baz: 4
            #
            # rather than
            #
            # foo: { bar: 3, baz: 4 }
            #
            # note that safe_dump is not needed when using the
            # roundtrip dumper, it will adopt a 'safe' behaviour
            # by default.
            from tank_vendor import ruamel_yaml
            ruamel_yaml.dump(data,
                             fh,
                             default_flow_style=False,
                             Dumper=ruamel_yaml.RoundTripDumper)
        else:
            # use pyyaml parser
            #
            # using safe_dump instead of dump ensures that we
            # don't serialize any non-std yaml content. In particular,
            # this causes issues if a unicode object containing a 7-bit
            # ascii string is passed as part of the data. in this case,
            # dump will write out a special format which is later on
            # *loaded in* as a unicode object, even if the content doesn't
            # need unicode handling. And this causes issues down the line
            # in toolkit code, assuming strings:
            #
            # >>> yaml.dump({"foo": u"bar"})
            # "{foo: !!python/unicode 'bar'}\n"
            # >>> yaml.safe_dump({"foo": u"bar"})
            # '{foo: bar}\n'
            #
            yaml.safe_dump(data, fh)
示例#10
0
    def __write_data_file(self, fh, data):
        """
        Writes the yaml data to a supplied file handle

        :param fh: An open file handle to write to.
        :param data: yaml data structure to write
        """

        # the ruamel parser doesn't have 2.5 support so
        # only use it on 2.6+
        if self._use_ruamel_yaml_parser and not(sys.version_info < (2,6)):
            # note that we are using the RoundTripDumper in order to
            # preserve the structure when writing the file to disk.
            #
            # the default_flow_style=False tells the parse to write
            # any modified values on multi-line form, e.g.
            #
            # foo:
            #   bar: 3
            #   baz: 4
            #
            # rather than
            #
            # foo: { bar: 3, baz: 4 }
            #
            # note that safe_dump is not needed when using the
            # roundtrip dumper, it will adopt a 'safe' behaviour
            # by default.
            from tank_vendor import ruamel_yaml
            ruamel_yaml.dump(data,
                             fh,
                             default_flow_style=False,
                             Dumper=ruamel_yaml.RoundTripDumper)
        else:
            # use pyyaml parser
            #
            # using safe_dump instead of dump ensures that we
            # don't serialize any non-std yaml content. In particular,
            # this causes issues if a unicode object containing a 7-bit
            # ascii string is passed as part of the data. in this case,
            # dump will write out a special format which is later on
            # *loaded in* as a unicode object, even if the content doesn't
            # need unicode handling. And this causes issues down the line
            # in toolkit code, assuming strings:
            #
            # >>> yaml.dump({"foo": u"bar"})
            # "{foo: !!python/unicode 'bar'}\n"
            # >>> yaml.safe_dump({"foo": u"bar"})
            # '{foo: bar}\n'
            #
            yaml.safe_dump(data, fh)
示例#11
0
    def _update_core_api_descriptor(self):
        """
        Updates the core_api.yml descriptor file.
        """
        core_api_yaml_path = os.path.join(os.path.dirname(self._install_root),
                                          "config", "core", "core_api.yml")

        message = "# Shotgun Pipeline Toolkit configuration file. This file was automatically\n"\
                  "# created during the latest core update.\n"
        with open(core_api_yaml_path, "w") as f:
            f.writelines(message)
            yaml.safe_dump({"location": self._new_core_descriptor.get_dict()},
                           f,
                           default_flow_style=False)
示例#12
0
    def _update_metadata(self, updates):
        """
        Updates the pipeline configuration on disk with the passed in values.

        :param updates: Dictionary of values to update in the pipeline configuration
        """
        # get current settings
        curr_settings = self._get_metadata()

        # apply updates to existing cache
        curr_settings.update(updates)

        # write the record to disk
        pipe_config_sg_id_path = os.path.join(self.get_config_location(),
                                              "core",
                                              constants.PIPELINECONFIG_FILE)

        old_umask = os.umask(0)
        try:
            os.chmod(pipe_config_sg_id_path, 0o666)
            # and write the new file
            fh = open(pipe_config_sg_id_path, "wt")
            # using safe_dump instead of dump ensures that we
            # don't serialize any non-std yaml content. In particular,
            # this causes issues if a unicode object containing a 7-bit
            # ascii string is passed as part of the data. in this case,
            # dump will write out a special format which is later on
            # *loaded in* as a unicode object, even if the content doesn't
            # need unicode handling. And this causes issues down the line
            # in toolkit code, assuming strings:
            #
            # >>> yaml.dump({"foo": u"bar"})
            # "{foo: !!python/unicode 'bar'}\n"
            # >>> yaml.safe_dump({"foo": u"bar"})
            # '{foo: bar}\n'
            #
            yaml.safe_dump(curr_settings, fh)
        except Exception as exp:
            raise TankError("Could not write to configuration file '%s'. "
                            "Error reported: %s" %
                            (pipe_config_sg_id_path, exp))
        finally:
            fh.close()
            os.umask(old_umask)

        self._project_id = curr_settings.get("project_id")
        self._pc_id = curr_settings.get("pc_id")
        self._pc_name = curr_settings.get("pc_name")
示例#13
0
    def _update_core_api_descriptor(self):
        """
        Updates the core_api.yml descriptor file.
        """
        core_api_yaml_path = os.path.join(
            os.path.dirname(self._install_root), "config", "core", "core_api.yml"
        )

        message = "# Shotgun Pipeline Toolkit configuration file. This file was automatically\n"\
                  "# created during the latest core update.\n"
        with open(core_api_yaml_path, "w") as f:
            f.writelines(message)
            yaml.safe_dump(
                {"location": self._new_core_descriptor.get_dict()}, f,
                default_flow_style=False
            )
示例#14
0
    def write_shotgun_file(self, descriptor):
        """
        Writes config/core/shotgun.yml
        """

        source_config_sg_file = os.path.join(
            descriptor.get_path(),
            "core",
            constants.CONFIG_SHOTGUN_FILE
        )

        dest_config_sg_file = os.path.join(
            self._path.current_os,
            "config",
            "core",
            constants.CONFIG_SHOTGUN_FILE
        )

        # If there is a shotgun.yml file at the source location, read it
        # in as the default metadata.
        #
        # This allows to centralize proxy settings in a shotgun.yml that
        # gets distributed every time a configuration is written.
        if os.path.exists(source_config_sg_file):
            log.debug("shotgun.yml found in the config at '%s'.", source_config_sg_file)
            with open(source_config_sg_file, "rb") as fh:
                metadata = yaml.load(fh)
        else:
            log.debug(
                "File '%s' does not exist in the config. shotgun.yml will only contain the host.",
                source_config_sg_file
            )
            metadata = {}

        with self._open_auto_created_yml(dest_config_sg_file) as fh:
            # ensure the metadata has the host set. We shouldn't assume the shotgun.yml
            # file that can be distributed with the config has the host set, as it
            # could be used on two different Shotgun servers, for example a production
            # server and a staging server that are both hosted locally.
            metadata["host"] = self._sg_connection.base_url
            # write yaml
            yaml.safe_dump(metadata, fh)
            fh.write("\n")
            fh.write("# End of file.\n")

        log.debug("Wrote %s", dest_config_sg_file)
示例#15
0
    def update_roots_file(self, config_descriptor):
        """
        Updates roots.yml based on local storage defs in shotgun.

        :param config_descriptor: Config descriptor object
        """
        log.debug("Creating storage roots file...")

        # get list of storages in Shotgun
        sg_data = self._sg_connection.find("LocalStorage", [],
                                           fields=["id", "code"] +
                                           ShotgunPath.SHOTGUN_PATH_FIELDS)

        # organize them by name
        storage_by_name = {}
        for storage in sg_data:
            storage_by_name[storage["code"]] = storage

        # now write out roots data
        roots_data = {}

        for storage_name in config_descriptor.required_storages:

            if storage_name not in storage_by_name:
                raise TankBootstrapError(
                    "A '%s' storage is defined by %s but is "
                    "not defined in Shotgun." %
                    (storage_name, config_descriptor))
            storage_path = ShotgunPath.from_shotgun_dict(
                storage_by_name[storage_name])
            roots_data[storage_name] = storage_path.as_shotgun_dict()

        roots_file = os.path.join(self._path.current_os, "config", "core",
                                  constants.STORAGE_ROOTS_FILE)

        if os.path.exists(roots_file):
            # warn if this file already exists
            log.warning(
                "The file 'core/%s' exists in the configuration "
                "but will be overwritten with an auto generated file." %
                constants.STORAGE_ROOTS_FILE)

        with self._open_auto_created_yml(roots_file) as fh:
            yaml.safe_dump(roots_data, fh)
            fh.write("\n")
            fh.write("# End of file.\n")
    def _test_read_env_var_in_pipeline_configuration_yml(
        self, folder_name, pipeline_config_data
    ):
        """
        Ensures environment variables are properly translated for a given file format.

        :param folder_name: Name of the configuration to create on disk.
        :param pipeline_config_data: Data to insert into shotgun.yml
        """
        env_var_pipeline = os.path.join(self.tank_temp, folder_name)
        core_folder = os.path.join(env_var_pipeline, "config", "core")
        pipeline_configuration_yml_path = os.path.join(
            core_folder, "pipeline_configuration.yml"
        )

        os.makedirs(core_folder)

        with open(pipeline_configuration_yml_path, "w") as fh:
            yaml.safe_dump(pipeline_config_data, fh)

        with open(os.path.join(core_folder, "roots.yml"), "w") as fh:
            fh.write("{}")

        test_project_name = "test_project_name"
        test_project_id = 12345
        test_pc_id = 67890
        test_pc_name = "test_pc_name"
        # tank.pipeline_config is actually a local variable inside tank/__init__.py,
        # so get the class from somewhere else...

        with temp_env_var(
            SGTK_TEST_PROJECT_NAME=test_project_name,
            SGTK_TEST_PROJECT_ID=str(test_project_id),
            SGTK_TEST_PC_ID=str(test_pc_id),
            SGTK_TEST_PC_NAME=test_pc_name,
        ):
            pc = tank.pipelineconfig_factory.PipelineConfiguration(env_var_pipeline)

        self.assertEqual(pc.get_name(), test_pc_name)

        self.assertEqual(pc.get_shotgun_id(), test_pc_id)

        self.assertEqual(pc.get_project_id(), test_project_id)

        self.assertEqual(pc.get_project_disk_name(), test_project_name)
示例#17
0
    def _update_metadata(self, updates):
        """
        Updates the pipeline configuration on disk with the passed in values.

        :param updates: Dictionary of values to update in the pipeline configuration
        """
        # get current settings
        curr_settings = self._get_metadata()

        # apply updates to existing cache
        curr_settings.update(updates)

        # write the record to disk
        pipe_config_sg_id_path = self._get_pipeline_config_file_location()

        old_umask = os.umask(0)
        try:
            os.chmod(pipe_config_sg_id_path, 0o666)
            # and write the new file
            fh = open(pipe_config_sg_id_path, "wt")
            # using safe_dump instead of dump ensures that we
            # don't serialize any non-std yaml content. In particular,
            # this causes issues if a unicode object containing a 7-bit
            # ascii string is passed as part of the data. in this case,
            # dump will write out a special format which is later on
            # *loaded in* as a unicode object, even if the content doesn't
            # need unicode handling. And this causes issues down the line
            # in toolkit code, assuming strings:
            #
            # >>> yaml.dump({"foo": u"bar"})
            # "{foo: !!python/unicode 'bar'}\n"
            # >>> yaml.safe_dump({"foo": u"bar"})
            # '{foo: bar}\n'
            #
            yaml.safe_dump(curr_settings, fh)
        except Exception as exp:
            raise TankError("Could not write to configuration file '%s'. "
                            "Error reported: %s" % (pipe_config_sg_id_path, exp))
        finally:
            fh.close()
            os.umask(old_umask)

        self._project_id = curr_settings.get("project_id")
        self._pc_id = curr_settings.get("pc_id")
        self._pc_name = curr_settings.get("pc_name")
    def write_shotgun_file(self, descriptor):
        """
        Writes config/core/shotgun.yml
        """

        source_config_sg_file = os.path.join(
            descriptor.get_path(),
            "core",
            constants.CONFIG_SHOTGUN_FILE
        )

        dest_config_sg_file = os.path.join(
            self._path.current_os,
            "config",
            "core",
            constants.CONFIG_SHOTGUN_FILE
        )

        # If there is a shotgun.yml file at the source location, read it
        # in as the default metadata.
        #
        # This allows to centralize proxy settings in a shotgun.yml that
        # gets distributed every time a configuration is written.
        if os.path.exists(source_config_sg_file):
            log.debug("shotgun.yml found in the config at '%s'.", source_config_sg_file)
            with open(source_config_sg_file, "rb") as fh:
                metadata = yaml.load(fh)
        else:
            log.debug(
                "File '%s' does not exist in the config. shotgun.yml will only contain the host.",
                source_config_sg_file
            )
            metadata = {}

        with filesystem.auto_created_yml(dest_config_sg_file) as fh:
            # ensure the metadata has the host set. We shouldn't assume the shotgun.yml
            # file that can be distributed with the config has the host set, as it
            # could be used on two different Shotgun servers, for example a production
            # server and a staging server that are both hosted locally.
            metadata["host"] = connection.sanitize_url(self._sg_connection.base_url)
            # write yaml
            yaml.safe_dump(metadata, fh)

        log.debug("Wrote %s", dest_config_sg_file)
示例#19
0
    def update_roots_file(self, config_descriptor):
        """
        Updates roots.yml based on local storage defs in shotgun.

        :param config_descriptor: Config descriptor object
        """
        log.debug("Creating storage roots file...")

        # get list of storages in Shotgun
        sg_data = self._sg_connection.find(
            "LocalStorage",
            [],
            fields=["id", "code"] + ShotgunPath.SHOTGUN_PATH_FIELDS)

        # organize them by name
        storage_by_name = {}
        for storage in sg_data:
            storage_by_name[storage["code"]] = storage

        # now write out roots data
        roots_data = {}

        for storage_name in config_descriptor.required_storages:

            if storage_name not in storage_by_name:
                raise TankBootstrapError(
                    "A '%s' storage is defined by %s but is "
                    "not defined in Shotgun." % (storage_name, config_descriptor)
                )
            storage_path = ShotgunPath.from_shotgun_dict(storage_by_name[storage_name])
            roots_data[storage_name] = storage_path.as_shotgun_dict()

        roots_file = os.path.join(
            self._path.current_os,
            "config",
            "core",
            constants.STORAGE_ROOTS_FILE
        )

        with self._open_auto_created_yml(roots_file) as fh:
            yaml.safe_dump(roots_data, fh)
            fh.write("\n")
            fh.write("# End of file.\n")
示例#20
0
    def write_shotgun_file(self):
        """
        Writes config/core/shotgun.yml
        """
        sg_file = os.path.join(self._path.current_os, "config", "core",
                               constants.CONFIG_SHOTGUN_FILE)

        with self._open_auto_created_yml(sg_file) as fh:

            metadata = {}
            # bake in which version of the deploy logic was used to push this config
            metadata["host"] = self._sg_connection.base_url
            # and include details about where the config came from
            metadata["http_proxy"] = self._sg_connection.config.raw_http_proxy
            # write yaml
            yaml.safe_dump(metadata, fh)
            fh.write("\n")
            fh.write("# End of file.\n")

        log.debug("Wrote %s" % sg_file)
示例#21
0
    def _update_pipeline_config(self, updates):
        """
        Updates the pipeline configuration on disk with the passed in values.

        :param updates: Dictionary of values to update in the pipeline configuration
        """
        # get current settings
        curr_settings = pipelineconfig_utils.get_metadata(self._pc_root)

        # add path cache setting
        curr_settings.update(updates)

        # write the record to disk
        pipe_config_sg_id_path = os.path.join(self._pc_root, "config", "core",
                                              "pipeline_configuration.yml")

        old_umask = os.umask(0)
        try:
            os.chmod(pipe_config_sg_id_path, 0666)
            # and write the new file
            fh = open(pipe_config_sg_id_path, "wt")
            # using safe_dump instead of dump ensures that we
            # don't serialize any non-std yaml content. In particular,
            # this causes issues if a unicode object containing a 7-bit
            # ascii string is passed as part of the data. in this case,
            # dump will write out a special format which is later on
            # *loaded in* as a unicode object, even if the content doesn't
            # need unicode handling. And this causes issues down the line
            # in toolkit code, assuming strings:
            #
            # >>> yaml.dump({"foo": u"bar"})
            # "{foo: !!python/unicode 'bar'}\n"
            # >>> yaml.safe_dump({"foo": u"bar"})
            # '{foo: bar}\n'
            #
            yaml.safe_dump(curr_settings, fh)
        except Exception, exp:
            raise TankError(
                "Could not write to pipeline configuration settings file %s. "
                "Error reported: %s" % (pipe_config_sg_id_path, exp))
    def _update_pipeline_config(self, updates):
        """
        Updates the pipeline configuration on disk with the passed in values.

        :param updates: Dictionary of values to update in the pipeline configuration
        """
        # get current settings
        curr_settings = pipelineconfig_utils.get_metadata(self._pc_root)
        
        # add path cache setting
        curr_settings.update(updates)
        
        # write the record to disk
        pipe_config_sg_id_path = os.path.join(self._pc_root, "config", "core", "pipeline_configuration.yml")        
        
        old_umask = os.umask(0)
        try:
            os.chmod(pipe_config_sg_id_path, 0666)
            # and write the new file
            fh = open(pipe_config_sg_id_path, "wt")
            # using safe_dump instead of dump ensures that we
            # don't serialize any non-std yaml content. In particular,
            # this causes issues if a unicode object containing a 7-bit
            # ascii string is passed as part of the data. in this case, 
            # dump will write out a special format which is later on 
            # *loaded in* as a unicode object, even if the content doesn't  
            # need unicode handling. And this causes issues down the line
            # in toolkit code, assuming strings:
            #
            # >>> yaml.dump({"foo": u"bar"})
            # "{foo: !!python/unicode 'bar'}\n"
            # >>> yaml.safe_dump({"foo": u"bar"})
            # '{foo: bar}\n'
            #            
            yaml.safe_dump(curr_settings, fh)
        except Exception, exp:
            raise TankError("Could not write to pipeline configuration settings file %s. "
                            "Error reported: %s" % (pipe_config_sg_id_path, exp))
示例#23
0
    def _update_roots_file(self):
        """
        Updates roots.yml based on local storage defs in shotgun.
        """
        log.debug("Creating storage roots file...")

        # get list of storages in Shotgun
        sg_data = self._sg_connection.find("LocalStorage", [],
                                           fields=["id", "code"] +
                                           ShotgunPath.SHOTGUN_PATH_FIELDS)

        # organize them by name
        storage_by_name = {}
        for storage in sg_data:
            storage_by_name[storage["code"]] = storage

        # now write out roots data
        roots_data = {}

        for storage_name in self._descriptor.required_storages:

            if storage_name not in storage_by_name:
                raise TankBootstrapError(
                    "A '%s' storage is defined by %s but is "
                    "not defined in Shotgun." %
                    (storage_name, self._descriptor))
            storage_path = ShotgunPath.from_shotgun_dict(
                storage_by_name[storage_name])
            roots_data[storage_name] = storage_path.as_shotgun_dict()

        roots_file = os.path.join(self._path.current_os, "config", "core",
                                  constants.STORAGE_ROOTS_FILE)

        with self.__open_auto_created_yml(roots_file) as fh:
            yaml.safe_dump(roots_data, fh)
            fh.write("\n")
            fh.write("# End of file.\n")
示例#24
0
    def _write_shotgun_file(self):
        """
        Writes config/core/shotgun.yml
        """
        sg_file = os.path.join(
            self._path.current_os,
            "config",
            "core",
            constants.CONFIG_SHOTGUN_FILE
        )

        with self.__open_auto_created_yml(sg_file) as fh:

            metadata = {}
            # bake in which version of the deploy logic was used to push this config
            metadata["host"] = self._sg_connection.base_url
            # and include details about where the config came from
            metadata["http_proxy"] = self._sg_connection.config.raw_http_proxy
            # write yaml
            yaml.safe_dump(metadata, fh)
            fh.write("\n")
            fh.write("# End of file.\n")

        log.debug("Wrote %s" % sg_file)
    def write_pipeline_config_file(
        self,
        pipeline_config_id,
        project_id,
        plugin_id,
        bundle_cache_fallback_paths,
        source_descriptor
    ):
        """
        Writes out the the pipeline configuration file config/core/pipeline_config.yml

        This will populate all relevant parameters required for a toolkit runtime setup.
        Project and pipeline configuration names will be resolved from Shotgun.

        :param pipeline_config_id: Pipeline config id or None for an unmanaged config.
        :param project_id: Project id or None for the site config or for a baked config.
        :param plugin_id: Plugin id string to identify the scope for a particular plugin
                          or integration. For more information,
                          see :meth:`~sgtk.bootstrap.ToolkitManager.plugin_id`. For
                          non-plugin based toolkit projects, this value is None.
        :param bundle_cache_fallback_paths: List of bundle cache fallback paths.
        :param source_descriptor: Descriptor object used to identify
            which descriptor the pipeline configuration originated from.
            For configurations where this source may not be directly accessible,
            (e.g. baked configurations), this can be set to ``None``.

        :returns: Path to the configuration file that was written out.
        """
        if project_id:
            # Look up the project name via the project id
            log.debug("Checking project in Shotgun...")
            sg_data = self._sg_connection.find_one(
                "Project",
                [["id", "is", project_id]],
                ["tank_name"]
            )

            # When the given project id cannot be found, raise a meaningful exception.
            if not sg_data:
                msg = "Unknown project id %s" % project_id
                log.debug("Raising ValueError('%s')" % msg)
                raise ValueError(msg)

            project_name = sg_data["tank_name"] or constants.UNNAMED_PROJECT_NAME
        else:
            project_name = constants.UNNAMED_PROJECT_NAME

        # the pipeline config metadata
        # resolve project name and pipeline config name from shotgun.
        if pipeline_config_id:
            # look up pipeline config name and project name via the pc
            log.debug("Checking pipeline config in Shotgun...")

            sg_data = self._sg_connection.find_one(
                constants.PIPELINE_CONFIGURATION_ENTITY_TYPE,
                [["id", "is", pipeline_config_id]],
                ["code"]
            )
            pipeline_config_name = sg_data["code"] or constants.UNMANAGED_PIPELINE_CONFIG_NAME
        elif project_id:
            pipeline_config_name = constants.UNMANAGED_PIPELINE_CONFIG_NAME
        else:
            # this is either a site config or a baked config.
            # in the latter case, the project name will be overridden at
            # runtime (along with many other parameters).
            project_name = "Site"
            pipeline_config_name = constants.UNMANAGED_PIPELINE_CONFIG_NAME

        pipeline_config_content = {
            "pc_id": pipeline_config_id,
            "pc_name": pipeline_config_name,
            "project_id": project_id,
            "project_name": project_name,
            "plugin_id": plugin_id,
            "published_file_entity_type": "PublishedFile",
            "use_bundle_cache": True,
            "bundle_cache_fallback_roots": bundle_cache_fallback_paths,
            "use_shotgun_path_cache": True,
        }

        if source_descriptor:
            pipeline_config_content["source_descriptor"] = source_descriptor.get_dict()

        # write pipeline_configuration.yml
        pipeline_config_path = os.path.join(
            self._path.current_os,
            "config",
            "core",
            constants.PIPELINECONFIG_FILE
        )

        if os.path.exists(pipeline_config_path):
            # warn if this file already exists
            log.warning(
                "The file 'core/%s' exists in the configuration "
                "but will be overwritten with an auto generated file." % constants.PIPELINECONFIG_FILE
            )

        with filesystem.auto_created_yml(pipeline_config_path) as fh:
            yaml.safe_dump(pipeline_config_content, fh)

        return pipeline_config_path
示例#26
0
def run_project_setup(log, sg, setup_params):
    """
    Execute the project setup.
    No validation is happening at this point - ensure that you have run the necessary validation
    methods in the parameters object.

    :param log: python logger object
    :param sg: shotgun api connection to the associated site
    :param setup_params: Parameters object which holds gathered project settings
    """
    log.info("")
    log.info("Starting project setup.")

    # get the location of the configuration
    config_location_curr_os = setup_params.get_configuration_location(
        sys.platform)
    config_location_mac = setup_params.get_configuration_location("darwin")
    config_location_linux = setup_params.get_configuration_location("linux2")
    config_location_win = setup_params.get_configuration_location("win32")

    # project id
    project_id = setup_params.get_project_id()
    if project_id:
        sg_project_link = {"id": project_id, "type": "Project"}
    else:
        sg_project_link = None

    # get all existing pipeline configurations
    setup_params.report_progress_from_installer(
        "Checking Pipeline Configurations...")

    pcs = sg.find(constants.PIPELINE_CONFIGURATION_ENTITY,
                  [["project", "is", sg_project_link]],
                  ["code", "linux_path", "windows_path", "mac_path"])

    if len(pcs) > 0:
        if setup_params.get_force_setup():
            # if we have the force flag enabled, remove any pipeline configurations
            for x in pcs:
                log.warning(
                    "Force mode: Deleting old pipeline configuration %s..." %
                    x["code"])
                sg.delete(constants.PIPELINE_CONFIGURATION_ENTITY, x["id"])

        elif not setup_params.get_auto_path_mode():
            # this is a normal setup, e.g. not with the force flag on
            # nor an auto-path where each machine effectively manages its own config
            # for this case, we don't allow the process to proceed if a config exists
            raise TankError(
                "Cannot set up this project! Pipeline configuration entries already exist in Shotgun."
            )

        else:
            # auto path mode
            # make sure that all PCs have empty paths set, either None values or ""
            for x in pcs:
                if x["linux_path"] or x["windows_path"] or x["mac_path"]:
                    raise TankError(
                        "Cannot set up this project! Non-auto-path style pipeline "
                        "configuration entries already exist in Shotgun.")

    # first do disk structure setup, this is most likely to fail.
    setup_params.report_progress_from_installer(
        "Creating main folder structure...")
    log.info("Installing configuration into '%s'..." % config_location_curr_os)
    if not os.path.exists(config_location_curr_os):
        # note that we have already validated that creation is possible
        os.makedirs(config_location_curr_os, 0o775)

    # create pipeline config base folder structure
    filesystem.ensure_folder_exists(
        os.path.join(config_location_curr_os, "cache"), 0o777)
    filesystem.ensure_folder_exists(
        os.path.join(config_location_curr_os, "config"), 0o775)
    filesystem.ensure_folder_exists(
        os.path.join(config_location_curr_os, "install"), 0o775)
    filesystem.ensure_folder_exists(
        os.path.join(config_location_curr_os, "install", "core"), 0o777)
    filesystem.ensure_folder_exists(
        os.path.join(config_location_curr_os, "install", "core", "python"),
        0o777)
    filesystem.ensure_folder_exists(
        os.path.join(config_location_curr_os, "install", "core.backup"), 0o777,
        True)
    filesystem.ensure_folder_exists(
        os.path.join(config_location_curr_os, "install", "engines"), 0o777,
        True)
    filesystem.ensure_folder_exists(
        os.path.join(config_location_curr_os, "install", "apps"), 0o777, True)
    filesystem.ensure_folder_exists(
        os.path.join(config_location_curr_os, "install", "frameworks"), 0o777,
        True)

    # copy the configuration into place
    setup_params.report_progress_from_installer(
        "Setting up template configuration...")
    setup_params.create_configuration(
        os.path.join(config_location_curr_os, "config"))

    # copy the tank binaries to the top of the config
    setup_params.report_progress_from_installer(
        "Copying binaries and API proxies...")
    log.debug("Copying Toolkit binaries...")
    core_api_root = os.path.abspath(
        os.path.join(os.path.dirname(__file__), "..", "..", ".."))
    root_binaries_folder = os.path.join(core_api_root, "setup",
                                        "root_binaries")
    for file_name in os.listdir(root_binaries_folder):
        src_file = os.path.join(root_binaries_folder, file_name)
        tgt_file = os.path.join(config_location_curr_os, file_name)
        shutil.copy(src_file, tgt_file)
        os.chmod(tgt_file, 0o775)

    # copy the python stubs
    log.debug("Copying python stubs...")
    tank_proxy = os.path.join(core_api_root, "setup", "tank_api_proxy")
    filesystem.copy_folder(
        tank_proxy,
        os.path.join(config_location_curr_os, "install", "core", "python"))

    # specify the parent files in install/core/core_PLATFORM.cfg
    log.debug("Creating core redirection config files...")
    setup_params.report_progress_from_installer(
        "Writing configuration files...")

    core_path = os.path.join(config_location_curr_os, "install", "core",
                             "core_Darwin.cfg")
    core_location = setup_params.get_associated_core_path("darwin")
    fh = open(core_path, "wt")
    fh.write(core_location if core_location else "undefined")
    fh.close()

    core_path = os.path.join(config_location_curr_os, "install", "core",
                             "core_Linux.cfg")
    core_location = setup_params.get_associated_core_path("linux2")
    fh = open(core_path, "wt")
    fh.write(core_location if core_location else "undefined")
    fh.close()

    core_path = os.path.join(config_location_curr_os, "install", "core",
                             "core_Windows.cfg")
    core_location = setup_params.get_associated_core_path("win32")
    fh = open(core_path, "wt")
    fh.write(core_location if core_location else "undefined")
    fh.close()

    # write the install_location file for our new setup
    sg_code_location = os.path.join(config_location_curr_os, "config", "core",
                                    "install_location.yml")

    # if we are basing our setup on an existing project setup, make sure we can write to the file.
    if os.path.exists(sg_code_location):
        os.chmod(sg_code_location, 0o666)

    fh = open(sg_code_location, "wt")
    fh.write("# Shotgun Pipeline Toolkit configuration file\n")
    fh.write("# This file was automatically created by setup_project\n")
    fh.write("# This file reflects the paths in the primary pipeline\n")
    fh.write("# configuration defined for this project.\n")
    fh.write("\n")
    fh.write("Windows: '%s'\n" % config_location_win)
    fh.write("Darwin: '%s'\n" % config_location_mac)
    fh.write("Linux: '%s'\n" % config_location_linux)
    fh.write("\n")
    fh.write("# End of file.\n")
    fh.close()

    # write the roots.yml file in the config to match our settings
    roots_data = {}
    default_storage_name = setup_params.default_storage_name
    for storage_name in setup_params.get_required_storages():

        roots_data[storage_name] = {
            "windows_path":
            setup_params.get_storage_path(storage_name, "win32"),
            "linux_path":
            setup_params.get_storage_path(storage_name, "linux2"),
            "mac_path": setup_params.get_storage_path(storage_name, "darwin")
        }

        # if this is the default storage, ensure it is explicitly marked in the
        # roots file
        if default_storage_name and storage_name == default_storage_name:
            roots_data[storage_name]["default"] = True

        # if there is a SG local storage associated with this root, make sure
        # it is explicit in the the roots file. this allows roots to exist that
        # are not named the same as the storage in SG
        sg_storage_id = setup_params.get_storage_shotgun_id(storage_name)
        if sg_storage_id is not None:
            roots_data[storage_name]["shotgun_storage_id"] = sg_storage_id

    storage_roots = StorageRoots.from_metadata(roots_data)
    config_folder = os.path.join(config_location_curr_os, "config")
    storage_roots.write(sg, config_folder, storage_roots)

    # now ensure there is a tank folder in every storage
    setup_params.report_progress_from_installer(
        "Setting up project storage folders...")
    for storage_name in setup_params.get_required_storages():

        log.info("Setting up %s storage..." % storage_name)

        # get the project path for this storage
        current_os_path = setup_params.get_project_path(
            storage_name, sys.platform)
        log.debug("Project path: %s" % current_os_path)

    # Create Project.tank_name and PipelineConfiguration records in Shotgun
    #
    # This logic has some special complexity when the auto_path mode is in use.

    setup_params.report_progress_from_installer("Registering in Shotgun...")

    if setup_params.get_auto_path_mode():
        # first, check the project name. If there is no project name in Shotgun, populate it
        # with the project name which is specified via the project name parameter.
        # if there isn't yet an entry, create it.
        # This is consistent with the anticipated future behaviour we expect when we
        # switch from auto_path to a zip file based approach.

        project_name = setup_params.get_project_disk_name()

        # Site configs are not associated to a project, so no need to look for tank_name on a project
        # that doesn't exist.
        if project_id is not None:
            data = sg.find_one("Project", [["id", "is", project_id]],
                               ["tank_name"])
            if data["tank_name"] is None:
                log.info("Registering project in Shotgun...")
                log.debug("Shotgun: Setting Project.tank_name to %s" %
                          project_name)
                sg.update("Project", project_id, {"tank_name": project_name})

            else:
                # there is already a name. Check that it matches the name in the project params
                # if not, then use the existing name and issue a warning!
                if data["tank_name"] != project_name:
                    log.warning(
                        "You have supplied the project disk name '%s' as part of the project setup "
                        "parameters, however the name '%s' has already been registered in Shotgun for "
                        "this project. This name will be used instead of the suggested disk "
                        "name." % (project_name, data["tank_name"]))
                    project_name = data["tank_name"]

        log.info("Creating Pipeline Configuration in Shotgun...")
        # this is an auto-path project, meaning that shotgun doesn't store the location
        # to the pipeline configuration. Because an auto-path location is often set up
        # on multiple machines, check first if the entry exists and in that case skip creation
        data = sg.find_one(
            constants.PIPELINE_CONFIGURATION_ENTITY,
            [["code", "is", constants.PRIMARY_PIPELINE_CONFIG_NAME],
             ["project", "is", sg_project_link]], ["id"])

        if data is None:
            log.info("Creating Pipeline Configuration in Shotgun...")
            data = {
                "project": sg_project_link,
                "code": constants.PRIMARY_PIPELINE_CONFIG_NAME
            }
            pc_entity = sg.create(constants.PIPELINE_CONFIGURATION_ENTITY,
                                  data)
            pipeline_config_id = pc_entity["id"]
            log.debug("Created data: %s" % pc_entity)
        else:
            pipeline_config_id = data["id"]

    else:
        # normal mode.
        if project_id:
            log.info("Registering project in Shotgun...")
            project_name = setup_params.get_project_disk_name()
            log.debug("Shotgun: Setting Project.tank_name to %s" %
                      project_name)
            sg.update("Project", project_id, {"tank_name": project_name})

        log.info("Creating Pipeline Configuration in Shotgun...")
        data = {
            "project": sg_project_link,
            "linux_path": config_location_linux,
            "windows_path": config_location_win,
            "mac_path": config_location_mac,
            "code": constants.PRIMARY_PIPELINE_CONFIG_NAME
        }

        # create pipeline configuration record
        pc_entity = sg.create(constants.PIPELINE_CONFIGURATION_ENTITY, data)
        pipeline_config_id = pc_entity["id"]
        log.debug("Created data: %s" % pc_entity)

    # write the record to disk
    pipe_config_sg_id_path = os.path.join(config_location_curr_os, "config",
                                          "core",
                                          constants.PIPELINECONFIG_FILE)
    log.debug("Writing to pc cache file %s" % pipe_config_sg_id_path)

    # determine the entity type to use for Published Files:
    pf_entity_type = _get_published_file_entity_type(log, sg)

    data = {}
    data["project_name"] = project_name
    data["pc_id"] = pipeline_config_id
    data["project_id"] = project_id
    data["pc_name"] = constants.PRIMARY_PIPELINE_CONFIG_NAME
    data["published_file_entity_type"] = pf_entity_type

    # all 0.15+ projects are pushing folders to Shotgun by default
    data["use_shotgun_path_cache"] = True

    try:
        fh = open(pipe_config_sg_id_path, "wt")
        # using safe_dump instead of dump ensures that we
        # don't serialize any non-std yaml content. In particular,
        # this causes issues if a unicode object containing a 7-bit
        # ascii string is passed as part of the data. in this case,
        # dump will write out a special format which is later on
        # *loaded in* as a unicode object, even if the content doesn't
        # need unicode handling. And this causes issues down the line
        # in toolkit code, assuming strings:
        #
        # >>> yaml.dump({"foo": u"bar"})
        # "{foo: !!python/unicode 'bar'}\n"
        # >>> yaml.safe_dump({"foo": u"bar"})
        # '{foo: bar}\n'
        #
        yaml.safe_dump(data, fh)
        fh.close()
    except Exception as exp:
        raise TankError(
            "Could not write to pipeline configuration cache file %s. "
            "Error reported: %s" % (pipe_config_sg_id_path, exp))

    ##########################################################################################
    # install apps

    # We now have a fully functional tank setup! Time to start it up...
    tk = sgtk_from_path(config_location_curr_os)
    log.debug("Instantiated tk instance: %s" % tk)
    pc = tk.pipeline_configuration

    # each entry in the config template contains instructions about which version of the app
    # to use. First loop over all environments and gather all descriptors we should download,
    # then go ahead and download and post-install them

    log.info("Downloading and installing apps...")

    # pass 1 - populate list of all descriptors
    descriptors = []
    for env_name in pc.get_environments():

        env_obj = pc.get_environment(env_name)

        for engine in env_obj.get_engines():
            descriptors.append(env_obj.get_engine_descriptor(engine))

            for app in env_obj.get_apps(engine):
                descriptors.append(env_obj.get_app_descriptor(engine, app))

        for framework in env_obj.get_frameworks():
            descriptors.append(env_obj.get_framework_descriptor(framework))

    # pass 2 - download all apps
    num_descriptors = len(descriptors)
    for idx, descriptor in enumerate(descriptors):

        # note that we push percentages here to the progress bar callback
        # going from 0 to 100
        progress = (int)((float)(idx) / (float)(num_descriptors) * 100)
        setup_params.report_progress_from_installer("Downloading apps...",
                                                    progress)

        if not descriptor.exists_local():
            log.info(
                "Downloading %s to the local Toolkit install location..." %
                descriptor)
            descriptor.download_local()

        else:
            log.info("Item %s is already locally installed." % descriptor)

    # create required shotgun fields
    setup_params.report_progress_from_installer("Running post install...")
    for descriptor in descriptors:
        descriptor.ensure_shotgun_fields_exist(tk)
        # run post install hook
        descriptor.run_post_install(tk)

    ##########################################################################################
    # post processing of the install

    # run after project create script if it exists
    setup_params.report_progress_from_installer(
        "Running post-setup scripts...")
    after_script_path = os.path.join(config_location_curr_os, "config",
                                     "after_project_create.py")
    if os.path.exists(after_script_path):
        log.info("Found a post-install script %s" % after_script_path)
        log.info("Executing post-install commands...")
        sys.path.insert(0, os.path.dirname(after_script_path))
        try:
            import after_project_create
            after_project_create.create(sg=sg, project_id=project_id, log=log)
        except Exception as e:
            if ("API read() invalid/missing string entity" in e.__str__()
                    and "\"type\"=>\"TankType\"" in e.__str__()):
                # Handle a specific case where an old version of the
                # after_project_create script set up TankType entities which
                # are now disabled following the migration to the
                # new PublishedFileType entity
                log.info("")
                log.warning(
                    "The post install script failed to complete.  This is most likely because it "
                    "is from an old configuration that is attempting to create 'TankType' entities "
                    "which are now disabled in Shotgun.")
            else:
                log.info("")
                log.error("The post install script failed to complete: %s" % e)
        else:
            log.info("Post install phase complete!")
        finally:
            sys.path.pop(0)

    log.info("")
    log.info("Your Toolkit Project has been fully set up.")
    log.info("")
示例#27
0
    def write(cls, sg_connection, config_folder, storage_roots):
        """
        Given a ``StorageRoots`` object, write it's metadata to the standard
        roots location within the supplied config folder. The method will write
        the corresponding local storage paths to the file as defined in Shotgun.
        This action will overwrite any existing storage roots file defined by
        the configuration.

        :param sg_connection: An existing SG connection, used to query local
            storage entities to ensure paths are up-to-date when the file is
            written.
        :param config_folder: The configuration folder under which the required
            roots file is written.
        :param storage_roots: A ``StorageRoots`` object instance that defines
            the required roots.
        """

        (local_storage_lookup,
         unmapped_roots) = storage_roots.get_local_storages(sg_connection)

        roots_file = os.path.join(config_folder, cls.STORAGE_ROOTS_FILE_PATH)

        log.debug("Writing storage roots to: %s" % (roots_file, ))

        # raise an error if there are any roots that can not be mapped to SG
        # local storage entries
        if unmapped_roots:
            raise TankError(
                "The following storages are defined by %s but can not be "
                "mapped to a local storage in Shotgun: %s" %
                (roots_file, ", ".join(unmapped_roots)))

        if os.path.exists(roots_file):
            # warn if this file already exists
            log.warning(
                "The file '%s' exists in the configuration "
                "but will be overwritten with an auto generated file." %
                (roots_file, ))

        # build up a new metadata dict
        roots_metadata = storage_roots.metadata

        for root_name, root_info in storage_roots:

            # get the cached SG storage dict
            sg_local_storage = local_storage_lookup[root_name]

            # get the local storage as a ShotgunPath object
            storage_sg_path = ShotgunPath.from_shotgun_dict(sg_local_storage)

            # update the root's metadata with the dictionary of all
            # sys.platform-style paths
            root_info.update(storage_sg_path.as_shotgun_dict())

        log.debug("Writing storage roots metadata: %s" % (roots_metadata, ))

        # write the new metadata to disk
        with filesystem.auto_created_yml(roots_file) as fh:
            yaml.safe_dump(roots_metadata, fh, default_flow_style=False)

        log.debug("Finished writing storage roots file: %s" % (roots_file, ))
示例#28
0
    def write_pipeline_config_file(self, pipeline_config_id, project_id, plugin_id, bundle_cache_fallback_paths):
        """
        Writes out the the pipeline configuration file config/core/pipeline_config.yml

        This will populate all relevant parameters required for a toolkit runtime setup.
        Project and pipeline configuration names will be resolved from Shotgun.

        :param pipeline_config_id: Pipeline config id or None for an unmanaged config.
        :param project_id: Project id or None for the site config or for a baked config.
        :param plugin_id: Plugin id string to identify the scope for a particular plugin
                          or integration. For more information,
                          see :meth:`~sgtk.bootstrap.ToolkitManager.plugin_id`. For
                          non-plugin based toolkit projects, this value is None.
        :param bundle_cache_fallback_paths: List of bundle cache fallback paths.
        """
        # the pipeline config metadata
        # resolve project name and pipeline config name from shotgun.
        if pipeline_config_id:
            # look up pipeline config name and project name via the pc
            log.debug("Checking pipeline config in Shotgun...")

            sg_data = self._sg_connection.find_one(
                constants.PIPELINE_CONFIGURATION_ENTITY_TYPE,
                [["id", "is", pipeline_config_id]],
                ["code", "project.Project.tank_name"]
            )

            project_name = sg_data["project.Project.tank_name"] or constants.UNNAMED_PROJECT_NAME
            pipeline_config_name = sg_data["code"] or constants.UNMANAGED_PIPELINE_CONFIG_NAME

        elif project_id:
            # no pc. look up the project name via the project id
            log.debug("Checking project in Shotgun...")

            sg_data = self._sg_connection.find_one(
                "Project",
                [["id", "is", project_id]],
                ["tank_name"]
            )

            project_name = sg_data["tank_name"] or constants.UNNAMED_PROJECT_NAME
            pipeline_config_name = constants.UNMANAGED_PIPELINE_CONFIG_NAME

        else:
            # this is either a site config or a baked config.
            # in the latter case, the project name will be overridden at
            # runtime (along with many other parameters).
            project_name = "Site"
            pipeline_config_name = constants.UNMANAGED_PIPELINE_CONFIG_NAME

        pipeline_config_content = {
            "pc_id": pipeline_config_id,
            "pc_name": pipeline_config_name,
            "project_id": project_id,
            "project_name": project_name,
            "plugin_id": plugin_id,
            "published_file_entity_type": "PublishedFile",
            "use_bundle_cache": True,
            "bundle_cache_fallback_roots": bundle_cache_fallback_paths,
            "use_shotgun_path_cache": True
        }

        # write pipeline_configuration.yml
        pipeline_config_path = os.path.join(
            self._path.current_os,
            "config",
            "core",
            constants.PIPELINECONFIG_FILE
        )

        with self._open_auto_created_yml(pipeline_config_path) as fh:
            yaml.safe_dump(pipeline_config_content, fh)
            fh.write("\n")
            fh.write("# End of file.\n")
示例#29
0
def _do_clone(log, tk, source_pc_id, user_id, new_name, target_linux, target_mac, target_win):
    """
    Clones the current configuration
    """

    curr_os = ShotgunPath.get_shotgun_storage_key()
    source_pc = tk.shotgun.find_one(constants.PIPELINE_CONFIGURATION_ENTITY, 
                                    [["id", "is", source_pc_id]], 
                                    ["code", "project", "linux_path", "windows_path", "mac_path"])
    source_folder = source_pc.get(curr_os)
    
    target_folder = {
        "linux2": target_linux,
        "win32": target_win,
        "darwin": target_mac
    }[sys.platform]
    
    log.debug("Cloning %s -> %s" % (source_folder, target_folder))
    
    if not os.path.exists(source_folder):
        raise TankError("Cannot clone! Source folder '%s' does not exist!" % source_folder)
    
    if os.path.exists(target_folder):
        raise TankError("Cannot clone! Target folder '%s' already exists!" % target_folder)

    # Register the new entity in Shotgun. This is being done first, because one
    # common problem is the user's permissions not being sufficient to allow them
    # to create the PC entity. In this situation, we want to fail quickly and not
    # leave garbage files on disk. As such, we do this first, then copy the config
    # on disk.
    data = {"linux_path": target_linux,
            "windows_path":target_win,
            "mac_path": target_mac,
            "code": new_name,
            "project": source_pc["project"],
            "users": [ {"type": "HumanUser", "id": user_id} ] 
            }
    log.debug("Create sg: %s" % str(data))
    pc_entity = tk.shotgun.create(constants.PIPELINE_CONFIGURATION_ENTITY, data)
    log.debug("Created in SG: %s" % str(pc_entity))
    
    # copy files and folders across
    try:
        os.mkdir(target_folder, 0o777)
        os.mkdir(os.path.join(target_folder, "cache"), 0o777)
        filesystem.copy_folder(
            os.path.join(source_folder, "config"),
            os.path.join(target_folder, "config"),
            skip_list=[]
        )
        filesystem.copy_folder(
            os.path.join(source_folder, "install"),
            os.path.join(target_folder, "install")
        )
        shutil.copy(os.path.join(source_folder, "tank"), os.path.join(target_folder, "tank"))
        shutil.copy(os.path.join(source_folder, "tank.bat"), os.path.join(target_folder, "tank.bat"))
        os.chmod(os.path.join(target_folder, "tank.bat"), 0o777)
        os.chmod(os.path.join(target_folder, "tank"), 0o777)

        sg_code_location = os.path.join(target_folder, "config", "core", "install_location.yml")
        if os.path.exists(sg_code_location):
            os.chmod(sg_code_location, 0o666)
            os.remove(sg_code_location)
        fh = open(sg_code_location, "wt")
        fh.write("# Shotgun Pipeline Toolkit configuration file\n")
        fh.write("# This file was automatically created by tank clone\n")
        fh.write("# This file reflects the paths in the pipeline configuration\n")
        fh.write("# entity which is associated with this location (%s).\n" % new_name)
        fh.write("\n")
        fh.write("Windows: '%s'\n" % target_win)
        fh.write("Darwin: '%s'\n" % target_mac)
        fh.write("Linux: '%s'\n" % target_linux)
        fh.write("\n")
        fh.write("# End of file.\n")
        fh.close()
    
    except Exception as e:
        raise TankError("Could not create file system structure: %s" % e)

    # lastly, update the pipeline_configuration.yml file
    try:
        
        sg_pc_location = os.path.join(
            target_folder,
            "config",
            "core",
            constants.PIPELINECONFIG_FILE
        )
        
        # read the file first
        fh = open(sg_pc_location, "rt")
        try:
            data = yaml.load(fh)
        finally:
            fh.close()

        # now delete it        
        if os.path.exists(sg_pc_location):
            os.chmod(sg_pc_location, 0o666)
            os.remove(sg_pc_location)

        # now update some fields            
        data["pc_id"] = pc_entity["id"]
        data["pc_name"] = new_name 
        
        # and write the new file
        fh = open(sg_pc_location, "wt")

        # using safe_dump instead of dump ensures that we
        # don't serialize any non-std yaml content. In particular,
        # this causes issues if a unicode object containing a 7-bit
        # ascii string is passed as part of the data. in this case, 
        # dump will write out a special format which is later on 
        # *loaded in* as a unicode object, even if the content doesn't  
        # need unicode handling. And this causes issues down the line
        # in toolkit code, assuming strings:
        #
        # >>> yaml.dump({"foo": u"bar"})
        # "{foo: !!python/unicode 'bar'}\n"
        # >>> yaml.safe_dump({"foo": u"bar"})
        # '{foo: bar}\n'
        #
        yaml.safe_dump(data, fh)
        fh.close()

    except Exception as e:
        raise TankError("Could not update pipeline_configuration.yml file: %s" % e)
    

    return {"source": source_folder, "target": target_folder, "id": pc_entity["id"] }
示例#30
0
    def write_pipeline_config_file(self, pipeline_config_id, project_id,
                                   plugin_id, bundle_cache_fallback_paths,
                                   source_descriptor):
        """
        Writes out the the pipeline configuration file config/core/pipeline_config.yml

        This will populate all relevant parameters required for a toolkit runtime setup.
        Project and pipeline configuration names will be resolved from Shotgun.

        :param pipeline_config_id: Pipeline config id or None for an unmanaged config.
        :param project_id: Project id or None for the site config or for a baked config.
        :param plugin_id: Plugin id string to identify the scope for a particular plugin
                          or integration. For more information,
                          see :meth:`~sgtk.bootstrap.ToolkitManager.plugin_id`. For
                          non-plugin based toolkit projects, this value is None.
        :param bundle_cache_fallback_paths: List of bundle cache fallback paths.
        :param source_descriptor: Descriptor object used to identify
            which descriptor the pipeline configuration originated from.
            For configurations where this source may not be directly accessible,
            (e.g. baked configurations), this can be set to ``None``.

        :returns: Path to the configuration file that was written out.
        """
        if project_id:
            # Look up the project name via the project id
            log.debug("Checking project in Shotgun...")
            sg_data = self._sg_connection.find_one("Project",
                                                   [["id", "is", project_id]],
                                                   ["tank_name"])

            # When the given project id cannot be found, raise a meaningful exception.
            if not sg_data:
                msg = "Unknown project id %s" % project_id
                log.debug("Raising ValueError('%s')" % msg)
                raise ValueError(msg)

            project_name = sg_data[
                "tank_name"] or constants.UNNAMED_PROJECT_NAME
        else:
            project_name = constants.UNNAMED_PROJECT_NAME

        # the pipeline config metadata
        # resolve project name and pipeline config name from shotgun.
        if pipeline_config_id:
            # look up pipeline config name and project name via the pc
            log.debug("Checking pipeline config in Shotgun...")

            sg_data = self._sg_connection.find_one(
                constants.PIPELINE_CONFIGURATION_ENTITY_TYPE,
                [["id", "is", pipeline_config_id]], ["code"])
            pipeline_config_name = sg_data[
                "code"] or constants.UNMANAGED_PIPELINE_CONFIG_NAME
        elif project_id:
            pipeline_config_name = constants.UNMANAGED_PIPELINE_CONFIG_NAME
        else:
            # this is either a site config or a baked config.
            # in the latter case, the project name will be overridden at
            # runtime (along with many other parameters).
            project_name = "Site"
            pipeline_config_name = constants.UNMANAGED_PIPELINE_CONFIG_NAME

        pipeline_config_content = {
            "pc_id": pipeline_config_id,
            "pc_name": pipeline_config_name,
            "project_id": project_id,
            "project_name": project_name,
            "plugin_id": plugin_id,
            "published_file_entity_type": "PublishedFile",
            "use_bundle_cache": True,
            "bundle_cache_fallback_roots": bundle_cache_fallback_paths,
            "use_shotgun_path_cache": True,
        }

        if source_descriptor:
            pipeline_config_content[
                "source_descriptor"] = source_descriptor.get_dict()

        # write pipeline_configuration.yml
        pipeline_config_path = os.path.join(self._path.current_os, "config",
                                            "core",
                                            constants.PIPELINECONFIG_FILE)

        if os.path.exists(pipeline_config_path):
            # warn if this file already exists
            log.warning(
                "The file 'core/%s' exists in the configuration "
                "but will be overwritten with an auto generated file." %
                constants.PIPELINECONFIG_FILE)

        with filesystem.auto_created_yml(pipeline_config_path) as fh:
            yaml.safe_dump(pipeline_config_content, fh)

        return pipeline_config_path
示例#31
0
def run_project_setup(log, sg, setup_params):
    """
    Execute the project setup.
    No validation is happening at this point - ensure that you have run the necessary validation
    methods in the parameters object.

    :param log: python logger object
    :param sg: shotgun api connection to the associated site
    :param setup_params: Parameters object which holds gathered project settings
    """
    log.info("")
    log.info("Starting project setup.")

    # get the location of the configuration
    config_location_curr_os = setup_params.get_configuration_location(sys.platform)
    config_location_mac = setup_params.get_configuration_location("darwin")
    config_location_linux = setup_params.get_configuration_location("linux2")
    config_location_win = setup_params.get_configuration_location("win32")

    # project id
    project_id = setup_params.get_project_id()
    if project_id:
        sg_project_link = {"id": project_id, "type": "Project"}
    else:
        sg_project_link = None

    # get all existing pipeline configurations
    setup_params.report_progress_from_installer("Checking Pipeline Configurations...")

    pcs = sg.find(constants.PIPELINE_CONFIGURATION_ENTITY,
                  [["project", "is", sg_project_link]],
                  ["code", "linux_path", "windows_path", "mac_path"])

    if len(pcs) > 0:
        if setup_params.get_force_setup():
            # if we have the force flag enabled, remove any pipeline configurations
            for x in pcs:
                log.warning("Force mode: Deleting old pipeline configuration %s..." % x["code"])
                sg.delete(constants.PIPELINE_CONFIGURATION_ENTITY, x["id"])

        elif not setup_params.get_auto_path_mode():
            # this is a normal setup, e.g. not with the force flag on
            # nor an auto-path where each machine effectively manages its own config
            # for this case, we don't allow the process to proceed if a config exists
            raise TankError("Cannot set up this project! Pipeline configuration entries already exist in Shotgun.")

        else:
            # auto path mode
            # make sure that all PCs have empty paths set, either None values or ""
            for x in pcs:
                if x["linux_path"] or x["windows_path"] or x["mac_path"]:
                    raise TankError("Cannot set up this project! Non-auto-path style pipeline "
                                    "configuration entries already exist in Shotgun.")

    # first do disk structure setup, this is most likely to fail.
    setup_params.report_progress_from_installer("Creating main folder structure...")
    log.info("Installing configuration into '%s'..." % config_location_curr_os )
    if not os.path.exists(config_location_curr_os):
        # note that we have already validated that creation is possible
        os.makedirs(config_location_curr_os, 0o775)

    # create pipeline config base folder structure
    filesystem.ensure_folder_exists(os.path.join(config_location_curr_os, "cache"), 0o777)
    filesystem.ensure_folder_exists(os.path.join(config_location_curr_os, "config"), 0o775)
    filesystem.ensure_folder_exists(os.path.join(config_location_curr_os, "install"), 0o775)
    filesystem.ensure_folder_exists(os.path.join(config_location_curr_os, "install", "core"), 0o777)
    filesystem.ensure_folder_exists(os.path.join(config_location_curr_os, "install", "core", "python"), 0o777)
    filesystem.ensure_folder_exists(os.path.join(config_location_curr_os, "install", "core.backup"), 0o777, True)
    filesystem.ensure_folder_exists(os.path.join(config_location_curr_os, "install", "engines"), 0o777, True)
    filesystem.ensure_folder_exists(os.path.join(config_location_curr_os, "install", "apps"), 0o777, True)
    filesystem.ensure_folder_exists(os.path.join(config_location_curr_os, "install", "frameworks"), 0o777, True)

    # copy the configuration into place
    setup_params.report_progress_from_installer("Setting up template configuration...")
    setup_params.create_configuration(os.path.join(config_location_curr_os, "config"))

    # copy the tank binaries to the top of the config
    setup_params.report_progress_from_installer("Copying binaries and API proxies...")
    log.debug("Copying Toolkit binaries...")
    core_api_root = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "..", ".."))
    root_binaries_folder = os.path.join(core_api_root, "setup", "root_binaries")
    for file_name in os.listdir(root_binaries_folder):
        src_file = os.path.join(root_binaries_folder, file_name)
        tgt_file = os.path.join(config_location_curr_os, file_name)
        shutil.copy(src_file, tgt_file)
        os.chmod(tgt_file, 0o775)

    # copy the python stubs
    log.debug("Copying python stubs...")
    tank_proxy = os.path.join(core_api_root, "setup", "tank_api_proxy")
    filesystem.copy_folder(
        tank_proxy,
        os.path.join(config_location_curr_os, "install", "core", "python")
    )

    # specify the parent files in install/core/core_PLATFORM.cfg
    log.debug("Creating core redirection config files...")
    setup_params.report_progress_from_installer("Writing configuration files...")

    core_path = os.path.join(config_location_curr_os, "install", "core", "core_Darwin.cfg")
    core_location = setup_params.get_associated_core_path("darwin")
    fh = open(core_path, "wt")
    fh.write(core_location if core_location else "undefined")
    fh.close()

    core_path = os.path.join(config_location_curr_os, "install", "core", "core_Linux.cfg")
    core_location = setup_params.get_associated_core_path("linux2")
    fh = open(core_path, "wt")
    fh.write(core_location if core_location else "undefined")
    fh.close()

    core_path = os.path.join(config_location_curr_os, "install", "core", "core_Windows.cfg")
    core_location = setup_params.get_associated_core_path("win32")
    fh = open(core_path, "wt")
    fh.write(core_location if core_location else "undefined")
    fh.close()

    # write the install_location file for our new setup
    sg_code_location = os.path.join(config_location_curr_os, "config", "core", "install_location.yml")

    # if we are basing our setup on an existing project setup, make sure we can write to the file.
    if os.path.exists(sg_code_location):
        os.chmod(sg_code_location, 0o666)

    fh = open(sg_code_location, "wt")
    fh.write("# Shotgun Pipeline Toolkit configuration file\n")
    fh.write("# This file was automatically created by setup_project\n")
    fh.write("# This file reflects the paths in the primary pipeline\n")
    fh.write("# configuration defined for this project.\n")
    fh.write("\n")
    fh.write("Windows: '%s'\n" % config_location_win)
    fh.write("Darwin: '%s'\n" % config_location_mac)
    fh.write("Linux: '%s'\n" % config_location_linux)
    fh.write("\n")
    fh.write("# End of file.\n")
    fh.close()

    # write the roots.yml file in the config to match our settings
    roots_data = {}
    default_storage_name = setup_params.default_storage_name
    for storage_name in setup_params.get_required_storages():

        roots_data[storage_name] = {
            "windows_path": setup_params.get_storage_path(storage_name, "win32"),
            "linux_path": setup_params.get_storage_path(storage_name, "linux2"),
            "mac_path": setup_params.get_storage_path(storage_name, "darwin")
        }

        # if this is the default storage, ensure it is explicitly marked in the
        # roots file
        if default_storage_name and storage_name == default_storage_name:
            roots_data[storage_name]["default"] = True

        # if there is a SG local storage associated with this root, make sure
        # it is explicit in the the roots file. this allows roots to exist that
        # are not named the same as the storage in SG
        sg_storage_id = setup_params.get_storage_shotgun_id(storage_name)
        if sg_storage_id is not None:
            roots_data[storage_name]["shotgun_storage_id"] = sg_storage_id

    storage_roots = StorageRoots.from_metadata(roots_data)
    config_folder = os.path.join(config_location_curr_os, "config")
    storage_roots.write(sg, config_folder, storage_roots)

    # now ensure there is a tank folder in every storage
    setup_params.report_progress_from_installer("Setting up project storage folders...")
    for storage_name in setup_params.get_required_storages():

        log.info("Setting up %s storage..." % storage_name )

        # get the project path for this storage
        current_os_path = setup_params.get_project_path(storage_name, sys.platform)
        log.debug("Project path: %s" % current_os_path )


    # Create Project.tank_name and PipelineConfiguration records in Shotgun
    #
    # This logic has some special complexity when the auto_path mode is in use.

    setup_params.report_progress_from_installer("Registering in Shotgun...")

    if setup_params.get_auto_path_mode():
        # first, check the project name. If there is no project name in Shotgun, populate it
        # with the project name which is specified via the project name parameter.
        # if there isn't yet an entry, create it.
        # This is consistent with the anticipated future behaviour we expect when we
        # switch from auto_path to a zip file based approach.

        project_name = setup_params.get_project_disk_name()

        # Site configs are not associated to a project, so no need to look for tank_name on a project
        # that doesn't exist.
        if project_id is not None:
            data = sg.find_one("Project", [["id", "is", project_id]], ["tank_name"])
            if data["tank_name"] is None:
                log.info("Registering project in Shotgun...")
                log.debug("Shotgun: Setting Project.tank_name to %s" % project_name)
                sg.update("Project", project_id, {"tank_name": project_name})

            else:
                # there is already a name. Check that it matches the name in the project params
                # if not, then use the existing name and issue a warning!
                if data["tank_name"] != project_name:
                    log.warning("You have supplied the project disk name '%s' as part of the project setup "
                                "parameters, however the name '%s' has already been registered in Shotgun for "
                                "this project. This name will be used instead of the suggested disk "
                                "name." % (project_name, data["tank_name"]) )
                    project_name = data["tank_name"]

        log.info("Creating Pipeline Configuration in Shotgun...")
        # this is an auto-path project, meaning that shotgun doesn't store the location
        # to the pipeline configuration. Because an auto-path location is often set up
        # on multiple machines, check first if the entry exists and in that case skip creation
        data = sg.find_one(constants.PIPELINE_CONFIGURATION_ENTITY,
                           [["code", "is", constants.PRIMARY_PIPELINE_CONFIG_NAME],
                            ["project", "is", sg_project_link]],
                           ["id"])

        if data is None:
            log.info("Creating Pipeline Configuration in Shotgun...")
            data = {"project": sg_project_link, "code": constants.PRIMARY_PIPELINE_CONFIG_NAME}
            pc_entity = sg.create(constants.PIPELINE_CONFIGURATION_ENTITY, data)
            pipeline_config_id = pc_entity["id"]
            log.debug("Created data: %s" % pc_entity)
        else:
            pipeline_config_id = data["id"]

    else:
        # normal mode.
        if project_id:
            log.info("Registering project in Shotgun...")
            project_name = setup_params.get_project_disk_name()
            log.debug("Shotgun: Setting Project.tank_name to %s" % project_name)
            sg.update("Project", project_id, {"tank_name": project_name})

        log.info("Creating Pipeline Configuration in Shotgun...")
        data = {"project": sg_project_link,
                "linux_path": config_location_linux,
                "windows_path": config_location_win,
                "mac_path": config_location_mac,
                "code": constants.PRIMARY_PIPELINE_CONFIG_NAME}

        # create pipeline configuration record
        pc_entity = sg.create(constants.PIPELINE_CONFIGURATION_ENTITY, data)
        pipeline_config_id = pc_entity["id"]
        log.debug("Created data: %s" % pc_entity)

    # write the record to disk
    pipe_config_sg_id_path = os.path.join(
        config_location_curr_os,
        "config",
        "core",
        constants.PIPELINECONFIG_FILE
    )
    log.debug("Writing to pc cache file %s" % pipe_config_sg_id_path)

    # determine the entity type to use for Published Files:
    pf_entity_type = _get_published_file_entity_type(log, sg)

    data = {}
    data["project_name"] = project_name
    data["pc_id"] = pipeline_config_id
    data["project_id"] = project_id
    data["pc_name"] = constants.PRIMARY_PIPELINE_CONFIG_NAME
    data["published_file_entity_type"] = pf_entity_type

    # all 0.15+ projects are pushing folders to Shotgun by default
    data["use_shotgun_path_cache"] = True

    try:
        fh = open(pipe_config_sg_id_path, "wt")
        # using safe_dump instead of dump ensures that we
        # don't serialize any non-std yaml content. In particular,
        # this causes issues if a unicode object containing a 7-bit
        # ascii string is passed as part of the data. in this case,
        # dump will write out a special format which is later on
        # *loaded in* as a unicode object, even if the content doesn't
        # need unicode handling. And this causes issues down the line
        # in toolkit code, assuming strings:
        #
        # >>> yaml.dump({"foo": u"bar"})
        # "{foo: !!python/unicode 'bar'}\n"
        # >>> yaml.safe_dump({"foo": u"bar"})
        # '{foo: bar}\n'
        #
        yaml.safe_dump(data, fh)
        fh.close()
    except Exception as exp:
        raise TankError("Could not write to pipeline configuration cache file %s. "
                        "Error reported: %s" % (pipe_config_sg_id_path, exp))


    ##########################################################################################
    # install apps

    # We now have a fully functional tank setup! Time to start it up...
    tk = sgtk_from_path(config_location_curr_os)
    log.debug("Instantiated tk instance: %s" % tk)
    pc = tk.pipeline_configuration

    # each entry in the config template contains instructions about which version of the app
    # to use. First loop over all environments and gather all descriptors we should download,
    # then go ahead and download and post-install them

    log.info("Downloading and installing apps...")

    # pass 1 - populate list of all descriptors
    descriptors = []
    for env_name in pc.get_environments():

        env_obj = pc.get_environment(env_name)

        for engine in env_obj.get_engines():
            descriptors.append( env_obj.get_engine_descriptor(engine) )

            for app in env_obj.get_apps(engine):
                descriptors.append( env_obj.get_app_descriptor(engine, app) )

        for framework in env_obj.get_frameworks():
            descriptors.append( env_obj.get_framework_descriptor(framework) )

    # pass 2 - download all apps
    num_descriptors = len(descriptors)
    for idx, descriptor in enumerate(descriptors):

        # note that we push percentages here to the progress bar callback
        # going from 0 to 100
        progress = (int)((float)(idx)/(float)(num_descriptors)*100)
        setup_params.report_progress_from_installer("Downloading apps...", progress)

        if not descriptor.exists_local():
            log.info("Downloading %s to the local Toolkit install location..." % descriptor)
            descriptor.download_local()

        else:
            log.info("Item %s is already locally installed." % descriptor)

    # create required shotgun fields
    setup_params.report_progress_from_installer("Running post install...")
    for descriptor in descriptors:
        descriptor.ensure_shotgun_fields_exist(tk)
        # run post install hook
        descriptor.run_post_install(tk)


    ##########################################################################################
    # post processing of the install

    # run after project create script if it exists
    setup_params.report_progress_from_installer("Running post-setup scripts...")
    after_script_path = os.path.join(config_location_curr_os, "config", "after_project_create.py")
    if os.path.exists(after_script_path):
        log.info("Found a post-install script %s" % after_script_path)
        log.info("Executing post-install commands...")
        sys.path.insert(0, os.path.dirname(after_script_path))
        try:
            import after_project_create
            after_project_create.create(sg=sg, project_id=project_id, log=log)
        except Exception as e:
            if ("API read() invalid/missing string entity" in e.__str__()
                and "\"type\"=>\"TankType\"" in e.__str__()):
                # Handle a specific case where an old version of the
                # after_project_create script set up TankType entities which
                # are now disabled following the migration to the
                # new PublishedFileType entity
                log.info("")
                log.warning("The post install script failed to complete.  This is most likely because it "
                            "is from an old configuration that is attempting to create 'TankType' entities "
                            "which are now disabled in Shotgun.")
            else:
                log.info("")
                log.error("The post install script failed to complete: %s" % e)
        else:
            log.info("Post install phase complete!")
        finally:
            sys.path.pop(0)

    log.info("")
    log.info("Your Toolkit Project has been fully set up.")
    log.info("")
示例#32
0
def run_project_setup(log, sg, setup_params):
    """
    Execute the project setup.
    No validation is happening at this point - ensure that you have run the necessary validation
    methods in the parameters object.

    :param log: python logger object
    :param sg: shotgun api connection to the associated site
    :param setup_params: Parameters object which holds gathered project settings
    """
    log.info("")
    log.info("Starting project setup.")

    # get the location of the configuration
    config_location_curr_os = setup_params.get_configuration_location(sys.platform)
    config_location_mac = setup_params.get_configuration_location("darwin")
    config_location_linux = setup_params.get_configuration_location("linux2")
    config_location_win = setup_params.get_configuration_location("win32")

    # project id
    project_id = setup_params.get_project_id()
    if project_id:
        sg_project_link = {"id": project_id, "type": "Project"}
    else:
        sg_project_link = None

    # get all existing pipeline configurations
    setup_params.report_progress_from_installer("Checking Pipeline Configurations...")

    pcs = sg.find(constants.PIPELINE_CONFIGURATION_ENTITY,
                  [["project", "is", sg_project_link]],
                  ["code", "linux_path", "windows_path", "mac_path"])

    if len(pcs) > 0:
        if setup_params.get_force_setup():
            # if we have the force flag enabled, remove any pipeline configurations
            for x in pcs:
                log.warning("Force mode: Deleting old pipeline configuration %s..." % x["code"])
                sg.delete(constants.PIPELINE_CONFIGURATION_ENTITY, x["id"])

        elif not setup_params.get_auto_path_mode():
            # this is a normal setup, e.g. not with the force flag on
            # nor an auto-path where each machine effectively manages its own config
            # for this case, we don't allow the process to proceed if a config exists
            raise TankError("Cannot set up this project! Pipeline configuration entries already exist in Shotgun.")

        else:
            # auto path mode
            # make sure that all PCs have empty paths set, either None values or ""
            for x in pcs:
                if x["linux_path"] or x["windows_path"] or x["mac_path"]:
                    raise TankError("Cannot set up this project! Non-auto-path style pipeline "
                                    "configuration entries already exist in Shotgun.")

    # first do disk structure setup, this is most likely to fail.
    setup_params.report_progress_from_installer("Creating main folder structure...")
    log.info("Installing configuration into '%s'..." % config_location_curr_os )
    if not os.path.exists(config_location_curr_os):
        # note that we have already validated that creation is possible
        os.makedirs(config_location_curr_os, 0775)

    # create pipeline config base folder structure
    filesystem.ensure_folder_exists(os.path.join(config_location_curr_os, "cache"), 0777)
    filesystem.ensure_folder_exists(os.path.join(config_location_curr_os, "config"), 0775)
    filesystem.ensure_folder_exists(os.path.join(config_location_curr_os, "install"), 0775)
    filesystem.ensure_folder_exists(os.path.join(config_location_curr_os, "install", "core"), 0777)
    filesystem.ensure_folder_exists(os.path.join(config_location_curr_os, "install", "core", "python"), 0777)
    filesystem.ensure_folder_exists(os.path.join(config_location_curr_os, "install", "core.backup"), 0777, True)
    filesystem.ensure_folder_exists(os.path.join(config_location_curr_os, "install", "engines"), 0777, True)
    filesystem.ensure_folder_exists(os.path.join(config_location_curr_os, "install", "apps"), 0777, True)
    filesystem.ensure_folder_exists(os.path.join(config_location_curr_os, "install", "frameworks"), 0777, True)

    # copy the configuration into place
    setup_params.report_progress_from_installer("Setting up template configuration...")
    setup_params.create_configuration(os.path.join(config_location_curr_os, "config"))

    # copy the tank binaries to the top of the config
    setup_params.report_progress_from_installer("Copying binaries and API proxies...")
    log.debug("Copying Toolkit binaries...")
    core_api_root = os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "..", ".."))
    root_binaries_folder = os.path.join(core_api_root, "setup", "root_binaries")
    for file_name in os.listdir(root_binaries_folder):
        src_file = os.path.join(root_binaries_folder, file_name)
        tgt_file = os.path.join(config_location_curr_os, file_name)
        shutil.copy(src_file, tgt_file)
        os.chmod(tgt_file, 0775)

    # copy the python stubs
    log.debug("Copying python stubs...")
    tank_proxy = os.path.join(core_api_root, "setup", "tank_api_proxy")
    filesystem.copy_folder(
        tank_proxy,
        os.path.join(config_location_curr_os, "install", "core", "python")
    )

    # specify the parent files in install/core/core_PLATFORM.cfg
    log.debug("Creating core redirection config files...")
    setup_params.report_progress_from_installer("Writing configuration files...")

    core_path = os.path.join(config_location_curr_os, "install", "core", "core_Darwin.cfg")
    core_location = setup_params.get_associated_core_path("darwin")
    fh = open(core_path, "wt")
    fh.write(core_location if core_location else "undefined")
    fh.close()

    core_path = os.path.join(config_location_curr_os, "install", "core", "core_Linux.cfg")
    core_location = setup_params.get_associated_core_path("linux2")
    fh = open(core_path, "wt")
    fh.write(core_location if core_location else "undefined")
    fh.close()

    core_path = os.path.join(config_location_curr_os, "install", "core", "core_Windows.cfg")
    core_location = setup_params.get_associated_core_path("win32")
    fh = open(core_path, "wt")
    fh.write(core_location if core_location else "undefined")
    fh.close()

    # write the install_location file for our new setup
    sg_code_location = os.path.join(config_location_curr_os, "config", "core", "install_location.yml")

    # if we are basing our setup on an existing project setup, make sure we can write to the file.
    if os.path.exists(sg_code_location):
        os.chmod(sg_code_location, 0666)

    fh = open(sg_code_location, "wt")
    fh.write("# Shotgun Pipeline Toolkit configuration file\n")
    fh.write("# This file was automatically created by setup_project\n")
    fh.write("# This file reflects the paths in the primary pipeline\n")
    fh.write("# configuration defined for this project.\n")
    fh.write("\n")
    fh.write("Windows: '%s'\n" % config_location_win)
    fh.write("Darwin: '%s'\n" % config_location_mac)
    fh.write("Linux: '%s'\n" % config_location_linux)
    fh.write("\n")
    fh.write("# End of file.\n")
    fh.close()

    # update the roots.yml file in the config to match our settings
    # reshuffle list of associated local storages to be a dict keyed by storage name
    # and with keys mac_path/windows_path/linux_path

    log.debug("Writing %s..." % constants.STORAGE_ROOTS_FILE)
    roots_path = os.path.join(config_location_curr_os, "config", "core", constants.STORAGE_ROOTS_FILE)

    roots_data = {}
    for storage_name in setup_params.get_required_storages():

        roots_data[storage_name] = {"windows_path": setup_params.get_storage_path(storage_name, "win32"),
                                    "linux_path": setup_params.get_storage_path(storage_name, "linux2"),
                                    "mac_path": setup_params.get_storage_path(storage_name, "darwin")}

    try:
        fh = open(roots_path, "wt")
        # using safe_dump instead of dump ensures that we
        # don't serialize any non-std yaml content. In particular,
        # this causes issues if a unicode object containing a 7-bit
        # ascii string is passed as part of the data. in this case,
        # dump will write out a special format which is later on
        # *loaded in* as a unicode object, even if the content doesn't
        # need unicode handling. And this causes issues down the line
        # in toolkit code, assuming strings:
        #
        # >>> yaml.dump({"foo": u"bar"})
        # "{foo: !!python/unicode 'bar'}\n"
        # >>> yaml.safe_dump({"foo": u"bar"})
        # '{foo: bar}\n'
        #
        yaml.safe_dump(roots_data, fh)
        fh.close()
    except Exception, exp:
        raise TankError("Could not write to roots file %s. "
                        "Error reported: %s" % (roots_path, exp))
示例#33
0
def run_project_setup(log, sg, setup_params):
    """
    Execute the project setup.
    No validation is happening at this point - ensure that you have run the necessary validation
    methods in the parameters object.

    :param log: python logger object
    :param sg: shotgun api connection to the associated site
    :param setup_params: Parameters object which holds gathered project settings
    """
    log.info("")
    log.info("Starting project setup.")

    # get the location of the configuration
    config_location_curr_os = setup_params.get_configuration_location(
        sys.platform)
    config_location_mac = setup_params.get_configuration_location("darwin")
    config_location_linux = setup_params.get_configuration_location("linux2")
    config_location_win = setup_params.get_configuration_location("win32")

    # project id
    project_id = setup_params.get_project_id()
    if project_id:
        sg_project_link = {"id": project_id, "type": "Project"}
    else:
        sg_project_link = None

    # get all existing pipeline configurations
    setup_params.report_progress_from_installer(
        "Checking Pipeline Configurations...")

    pcs = sg.find(constants.PIPELINE_CONFIGURATION_ENTITY,
                  [["project", "is", sg_project_link]],
                  ["code", "linux_path", "windows_path", "mac_path"])

    if len(pcs) > 0:
        if setup_params.get_force_setup():
            # if we have the force flag enabled, remove any pipeline configurations
            for x in pcs:
                log.warning(
                    "Force mode: Deleting old pipeline configuration %s..." %
                    x["code"])
                sg.delete(constants.PIPELINE_CONFIGURATION_ENTITY, x["id"])

        elif not setup_params.get_auto_path_mode():
            # this is a normal setup, e.g. not with the force flag on
            # nor an auto-path where each machine effectively manages its own config
            # for this case, we don't allow the process to proceed if a config exists
            raise TankError(
                "Cannot set up this project! Pipeline configuration entries already exist in Shotgun."
            )

        else:
            # auto path mode
            # make sure that all PCs have empty paths set, either None values or ""
            for x in pcs:
                if x["linux_path"] or x["windows_path"] or x["mac_path"]:
                    raise TankError(
                        "Cannot set up this project! Non-auto-path style pipeline "
                        "configuration entries already exist in Shotgun.")

    # first do disk structure setup, this is most likely to fail.
    setup_params.report_progress_from_installer(
        "Creating main folder structure...")
    log.info("Installing configuration into '%s'..." % config_location_curr_os)
    if not os.path.exists(config_location_curr_os):
        # note that we have already validated that creation is possible
        os.makedirs(config_location_curr_os, 0775)

    # create pipeline config base folder structure
    filesystem.ensure_folder_exists(
        os.path.join(config_location_curr_os, "cache"), 0777)
    filesystem.ensure_folder_exists(
        os.path.join(config_location_curr_os, "config"), 0775)
    filesystem.ensure_folder_exists(
        os.path.join(config_location_curr_os, "install"), 0775)
    filesystem.ensure_folder_exists(
        os.path.join(config_location_curr_os, "install", "core"), 0777)
    filesystem.ensure_folder_exists(
        os.path.join(config_location_curr_os, "install", "core", "python"),
        0777)
    filesystem.ensure_folder_exists(
        os.path.join(config_location_curr_os, "install", "core.backup"), 0777,
        True)
    filesystem.ensure_folder_exists(
        os.path.join(config_location_curr_os, "install", "engines"), 0777,
        True)
    filesystem.ensure_folder_exists(
        os.path.join(config_location_curr_os, "install", "apps"), 0777, True)
    filesystem.ensure_folder_exists(
        os.path.join(config_location_curr_os, "install", "frameworks"), 0777,
        True)

    # copy the configuration into place
    setup_params.report_progress_from_installer(
        "Setting up template configuration...")
    setup_params.create_configuration(
        os.path.join(config_location_curr_os, "config"))

    # copy the tank binaries to the top of the config
    setup_params.report_progress_from_installer(
        "Copying binaries and API proxies...")
    log.debug("Copying Toolkit binaries...")
    core_api_root = os.path.abspath(
        os.path.join(os.path.dirname(__file__), "..", "..", ".."))
    root_binaries_folder = os.path.join(core_api_root, "setup",
                                        "root_binaries")
    for file_name in os.listdir(root_binaries_folder):
        src_file = os.path.join(root_binaries_folder, file_name)
        tgt_file = os.path.join(config_location_curr_os, file_name)
        shutil.copy(src_file, tgt_file)
        os.chmod(tgt_file, 0775)

    # copy the python stubs
    log.debug("Copying python stubs...")
    tank_proxy = os.path.join(core_api_root, "setup", "tank_api_proxy")
    filesystem.copy_folder(
        tank_proxy,
        os.path.join(config_location_curr_os, "install", "core", "python"))

    # specify the parent files in install/core/core_PLATFORM.cfg
    log.debug("Creating core redirection config files...")
    setup_params.report_progress_from_installer(
        "Writing configuration files...")

    core_path = os.path.join(config_location_curr_os, "install", "core",
                             "core_Darwin.cfg")
    core_location = setup_params.get_associated_core_path("darwin")
    fh = open(core_path, "wt")
    fh.write(core_location if core_location else "undefined")
    fh.close()

    core_path = os.path.join(config_location_curr_os, "install", "core",
                             "core_Linux.cfg")
    core_location = setup_params.get_associated_core_path("linux2")
    fh = open(core_path, "wt")
    fh.write(core_location if core_location else "undefined")
    fh.close()

    core_path = os.path.join(config_location_curr_os, "install", "core",
                             "core_Windows.cfg")
    core_location = setup_params.get_associated_core_path("win32")
    fh = open(core_path, "wt")
    fh.write(core_location if core_location else "undefined")
    fh.close()

    # write the install_location file for our new setup
    sg_code_location = os.path.join(config_location_curr_os, "config", "core",
                                    "install_location.yml")

    # if we are basing our setup on an existing project setup, make sure we can write to the file.
    if os.path.exists(sg_code_location):
        os.chmod(sg_code_location, 0666)

    fh = open(sg_code_location, "wt")
    fh.write("# Shotgun Pipeline Toolkit configuration file\n")
    fh.write("# This file was automatically created by setup_project\n")
    fh.write("# This file reflects the paths in the primary pipeline\n")
    fh.write("# configuration defined for this project.\n")
    fh.write("\n")
    fh.write("Windows: '%s'\n" % config_location_win)
    fh.write("Darwin: '%s'\n" % config_location_mac)
    fh.write("Linux: '%s'\n" % config_location_linux)
    fh.write("\n")
    fh.write("# End of file.\n")
    fh.close()

    # update the roots.yml file in the config to match our settings
    # reshuffle list of associated local storages to be a dict keyed by storage name
    # and with keys mac_path/windows_path/linux_path

    log.debug("Writing %s..." % constants.STORAGE_ROOTS_FILE)
    roots_path = os.path.join(config_location_curr_os, "config", "core",
                              constants.STORAGE_ROOTS_FILE)

    roots_data = {}
    for storage_name in setup_params.get_required_storages():

        roots_data[storage_name] = {
            "windows_path":
            setup_params.get_storage_path(storage_name, "win32"),
            "linux_path":
            setup_params.get_storage_path(storage_name, "linux2"),
            "mac_path": setup_params.get_storage_path(storage_name, "darwin")
        }

    try:
        fh = open(roots_path, "wt")
        # using safe_dump instead of dump ensures that we
        # don't serialize any non-std yaml content. In particular,
        # this causes issues if a unicode object containing a 7-bit
        # ascii string is passed as part of the data. in this case,
        # dump will write out a special format which is later on
        # *loaded in* as a unicode object, even if the content doesn't
        # need unicode handling. And this causes issues down the line
        # in toolkit code, assuming strings:
        #
        # >>> yaml.dump({"foo": u"bar"})
        # "{foo: !!python/unicode 'bar'}\n"
        # >>> yaml.safe_dump({"foo": u"bar"})
        # '{foo: bar}\n'
        #
        yaml.safe_dump(roots_data, fh)
        fh.close()
    except Exception, exp:
        raise TankError("Could not write to roots file %s. "
                        "Error reported: %s" % (roots_path, exp))
示例#34
0
    def write(cls, sg_connection, config_folder, storage_roots):
        """
        Given a ``StorageRoots`` object, write it's metadata to the standard
        roots location within the supplied config folder. The method will write
        the corresponding local storage paths to the file as defined in Shotgun.
        This action will overwrite any existing storage roots file defined by
        the configuration.

        :param sg_connection: An existing SG connection, used to query local
            storage entities to ensure paths are up-to-date when the file is
            written.
        :param config_folder: The configuration folder under which the required
            roots file is written.
        :param storage_roots: A ``StorageRoots`` object instance that defines
            the required roots.
        """

        (local_storage_lookup, unmapped_roots) = \
            storage_roots.get_local_storages(sg_connection)

        roots_file = os.path.join(config_folder, cls.STORAGE_ROOTS_FILE_PATH)

        log.debug("Writing storage roots to: %s" % (roots_file,))

        # raise an error if there are any roots that can not be mapped to SG
        # local storage entries
        if unmapped_roots:
            raise TankError(
                "The following storages are defined by %s but can not be "
                "mapped to a local storage in Shotgun: %s" % (
                    roots_file,
                    ", ".join(unmapped_roots)
                )
            )

        if os.path.exists(roots_file):
            # warn if this file already exists
            log.warning(
                "The file '%s' exists in the configuration "
                "but will be overwritten with an auto generated file." %
                (roots_file,)
            )

        # build up a new metadata dict
        roots_metadata = storage_roots.metadata

        for root_name, root_info in storage_roots:

            # get the cached SG storage dict
            sg_local_storage = local_storage_lookup[root_name]

            # get the local storage as a ShotgunPath object
            storage_sg_path = ShotgunPath.from_shotgun_dict(sg_local_storage)

            # update the root's metadata with the dictionary of all
            # sys.platform-style paths
            root_info.update(storage_sg_path.as_shotgun_dict())

        log.debug("Writing storage roots metadata: %s" % (roots_metadata,))

        # write the new metadata to disk
        with filesystem.auto_created_yml(roots_file) as fh:
            yaml.safe_dump(roots_metadata, fh, default_flow_style=False)

        log.debug("Finished writing storage roots file: %s" % (roots_file,))
                #
                # using safe_dump instead of dump ensures that we
                # don't serialize any non-std yaml content. In particular,
                # this causes issues if a unicode object containing a 7-bit
                # ascii string is passed as part of the data. in this case, 
                # dump will write out a special format which is later on 
                # *loaded in* as a unicode object, even if the content doesn't  
                # need unicode handling. And this causes issues down the line
                # in toolkit code, assuming strings:
                #
                # >>> yaml.dump({"foo": u"bar"})
                # "{foo: !!python/unicode 'bar'}\n"
                # >>> yaml.safe_dump({"foo": u"bar"})
                # '{foo: bar}\n'
                #                
                yaml.safe_dump(data, fh)
                
        except Exception, e:
            raise TankError("Could not write to environment file '%s'. "
                            "Error reported: %s" % (path, e))
        finally:
            fh.close()

    def set_yaml_preserve_mode(self, val):
        """
        If set to true, the ruamel parser will be used instead of the 
        traditional pyyaml one. This parser will preserve structure and 
        comments and generally try to more gracefully update the yaml 
        content
        
        :param val: True to enable new parser, false to disable
示例#36
0
def _do_clone(log, tk, source_pc_id, user_id, new_name, target_linux,
              target_mac, target_win):
    """
    Clones the current configuration
    """

    curr_os = ShotgunPath.get_shotgun_storage_key()
    source_pc = tk.shotgun.find_one(
        constants.PIPELINE_CONFIGURATION_ENTITY,
        [["id", "is", source_pc_id]],
        ["code", "project", "linux_path", "windows_path", "mac_path"],
    )
    source_folder = source_pc.get(curr_os)

    target_folder = None
    if is_windows():
        target_folder = target_win
    elif is_macos():
        target_folder = target_mac
    elif is_linux():
        target_folder = target_linux

    log.debug("Cloning %s -> %s" % (source_folder, target_folder))

    if not os.path.exists(source_folder):
        raise TankError("Cannot clone! Source folder '%s' does not exist!" %
                        source_folder)

    if os.path.exists(target_folder):
        raise TankError("Cannot clone! Target folder '%s' already exists!" %
                        target_folder)

    # Register the new entity in Shotgun. This is being done first, because one
    # common problem is the user's permissions not being sufficient to allow them
    # to create the PC entity. In this situation, we want to fail quickly and not
    # leave garbage files on disk. As such, we do this first, then copy the config
    # on disk.
    data = {
        "linux_path": target_linux,
        "windows_path": target_win,
        "mac_path": target_mac,
        "code": new_name,
        "project": source_pc["project"],
        "users": [{
            "type": "HumanUser",
            "id": user_id
        }],
    }
    log.debug("Create sg: %s" % str(data))
    pc_entity = tk.shotgun.create(constants.PIPELINE_CONFIGURATION_ENTITY,
                                  data)
    log.debug("Created in SG: %s" % str(pc_entity))

    # copy files and folders across
    try:
        os.mkdir(target_folder, 0o777)
        os.mkdir(os.path.join(target_folder, "cache"), 0o777)
        filesystem.copy_folder(
            os.path.join(source_folder, "config"),
            os.path.join(target_folder, "config"),
            skip_list=[],
        )
        filesystem.copy_folder(
            os.path.join(source_folder, "install"),
            os.path.join(target_folder, "install"),
        )
        shutil.copy(os.path.join(source_folder, "tank"),
                    os.path.join(target_folder, "tank"))
        shutil.copy(
            os.path.join(source_folder, "tank.bat"),
            os.path.join(target_folder, "tank.bat"),
        )
        os.chmod(os.path.join(target_folder, "tank.bat"), 0o777)
        os.chmod(os.path.join(target_folder, "tank"), 0o777)

        sg_code_location = os.path.join(target_folder, "config", "core",
                                        "install_location.yml")
        if os.path.exists(sg_code_location):
            os.chmod(sg_code_location, 0o666)
            os.remove(sg_code_location)
        fh = open(sg_code_location, "wt")
        fh.write("# Shotgun Pipeline Toolkit configuration file\n")
        fh.write("# This file was automatically created by tank clone\n")
        fh.write(
            "# This file reflects the paths in the pipeline configuration\n")
        fh.write("# entity which is associated with this location (%s).\n" %
                 new_name)
        fh.write("\n")
        fh.write("Windows: '%s'\n" % target_win)
        fh.write("Darwin: '%s'\n" % target_mac)
        fh.write("Linux: '%s'\n" % target_linux)
        fh.write("\n")
        fh.write("# End of file.\n")
        fh.close()

    except Exception as e:
        raise TankError("Could not create file system structure: %s" % e)

    # lastly, update the pipeline_configuration.yml file
    try:

        sg_pc_location = os.path.join(target_folder, "config", "core",
                                      constants.PIPELINECONFIG_FILE)

        # read the file first
        fh = open(sg_pc_location, "rt")
        try:
            data = yaml.load(fh)
        finally:
            fh.close()

        # now delete it
        if os.path.exists(sg_pc_location):
            os.chmod(sg_pc_location, 0o666)
            os.remove(sg_pc_location)

        # now update some fields
        data["pc_id"] = pc_entity["id"]
        data["pc_name"] = new_name

        # and write the new file
        fh = open(sg_pc_location, "wt")

        # using safe_dump instead of dump ensures that we
        # don't serialize any non-std yaml content. In particular,
        # this causes issues if a unicode object containing a 7-bit
        # ascii string is passed as part of the data. in this case,
        # dump will write out a special format which is later on
        # *loaded in* as a unicode object, even if the content doesn't
        # need unicode handling. And this causes issues down the line
        # in toolkit code, assuming strings:
        #
        # >>> yaml.dump({"foo": u"bar"})
        # "{foo: !!python/unicode 'bar'}\n"
        # >>> yaml.safe_dump({"foo": u"bar"})
        # '{foo: bar}\n'
        #
        yaml.safe_dump(data, fh)
        fh.close()

    except Exception as e:
        raise TankError(
            "Could not update pipeline_configuration.yml file: %s" % e)

    return {
        "source": source_folder,
        "target": target_folder,
        "id": pc_entity["id"]
    }
示例#37
0
    def write_pipeline_config_file(self, pipeline_config_id, project_id, plugin_id, bundle_cache_fallback_paths):
        """
        Writes out the the pipeline configuration file config/core/pipeline_config.yml

        This will populate all relevant parameters required for a toolkit runtime setup.
        Project and pipeline configuration names will be resolved from Shotgun.

        :param pipeline_config_id: Pipeline config id or None for an unmanaged config.
        :param project_id: Project id or None for the site config or for a baked config.
        :param plugin_id: Plugin id string to identify the scope for a particular plugin
                          or integration. For more information,
                          see :meth:`~sgtk.bootstrap.ToolkitManager.plugin_id`. For
                          non-plugin based toolkit projects, this value is None.
        :param bundle_cache_fallback_paths: List of bundle cache fallback paths.
        """
        # the pipeline config metadata
        # resolve project name and pipeline config name from shotgun.
        if pipeline_config_id:
            # look up pipeline config name and project name via the pc
            log.debug("Checking pipeline config in Shotgun...")

            sg_data = self._sg_connection.find_one(
                constants.PIPELINE_CONFIGURATION_ENTITY_TYPE,
                [["id", "is", pipeline_config_id]],
                ["code", "project.Project.tank_name"]
            )

            project_name = sg_data["project.Project.tank_name"] or constants.UNNAMED_PROJECT_NAME
            pipeline_config_name = sg_data["code"] or constants.UNMANAGED_PIPELINE_CONFIG_NAME

        elif project_id:
            # no pc. look up the project name via the project id
            log.debug("Checking project in Shotgun...")

            sg_data = self._sg_connection.find_one(
                "Project",
                [["id", "is", project_id]],
                ["tank_name"]
            )

            # When the given project id cannot be found, raise a meaningful exception.
            if not sg_data:
                msg = "Unknown project id %s" % project_id
                log.debug("Raising ValueError('%s')" % msg)
                raise ValueError(msg)

            project_name = sg_data["tank_name"] or constants.UNNAMED_PROJECT_NAME
            pipeline_config_name = constants.UNMANAGED_PIPELINE_CONFIG_NAME

        else:
            # this is either a site config or a baked config.
            # in the latter case, the project name will be overridden at
            # runtime (along with many other parameters).
            project_name = "Site"
            pipeline_config_name = constants.UNMANAGED_PIPELINE_CONFIG_NAME

        pipeline_config_content = {
            "pc_id": pipeline_config_id,
            "pc_name": pipeline_config_name,
            "project_id": project_id,
            "project_name": project_name,
            "plugin_id": plugin_id,
            "published_file_entity_type": "PublishedFile",
            "use_bundle_cache": True,
            "bundle_cache_fallback_roots": bundle_cache_fallback_paths,
            "use_shotgun_path_cache": True
        }

        # write pipeline_configuration.yml
        pipeline_config_path = os.path.join(
            self._path.current_os,
            "config",
            "core",
            constants.PIPELINECONFIG_FILE
        )

        with self._open_auto_created_yml(pipeline_config_path) as fh:
            yaml.safe_dump(pipeline_config_content, fh)
            fh.write("\n")
            fh.write("# End of file.\n")
示例#38
0
        fh = open(pipe_config_sg_id_path, "wt")
        # using safe_dump instead of dump ensures that we
        # don't serialize any non-std yaml content. In particular,
        # this causes issues if a unicode object containing a 7-bit
        # ascii string is passed as part of the data. in this case,
        # dump will write out a special format which is later on
        # *loaded in* as a unicode object, even if the content doesn't
        # need unicode handling. And this causes issues down the line
        # in toolkit code, assuming strings:
        #
        # >>> yaml.dump({"foo": u"bar"})
        # "{foo: !!python/unicode 'bar'}\n"
        # >>> yaml.safe_dump({"foo": u"bar"})
        # '{foo: bar}\n'
        #
        yaml.safe_dump(data, fh)
        fh.close()
    except Exception, exp:
        raise TankError(
            "Could not write to pipeline configuration cache file %s. "
            "Error reported: %s" % (pipe_config_sg_id_path, exp))

    ##########################################################################################
    # install apps

    # We now have a fully functional tank setup! Time to start it up...
    tk = sgtk_from_path(config_location_curr_os)
    log.debug("Instantiated tk instance: %s" % tk)
    pc = tk.pipeline_configuration

    # each entry in the config template contains instructions about which version of the app