Esempio n. 1
0
    def _update_deploy_file(self,
                            generation=None,
                            descriptor=None,
                            corrupt=False):
        """
        Updates the deploy file.

        :param generation: If set, will update the generation number of the config.
        :param descriptor: If set, will update the descriptor of the config.
        :param corrupt: If set, will corrupt the configuration file.
        """
        path = self._cached_config._config_writer.get_descriptor_metadata_file(
        )
        if corrupt:
            data = "corrupted"
        else:
            with open(path, "rt") as fh:
                data = yaml.load(fh, Loader=yaml.FullLoader)
                if generation is not None:
                    data["deploy_generation"] = generation
                if descriptor is not None:
                    data["config_descriptor"] = descriptor

        with open(path, "wt") as fh:
            yaml.dump(data, fh)
Esempio n. 2
0
    def test_flexible_primary(self):
        """
        Tests getting storage paths back from a pipeline config without a 'primary'
        storage.
        """

        # take one path out and mark as undefined
        new_roots = copy.deepcopy(self.roots)
        new_roots["master"] = new_roots.pop("primary")
        root_file = open(self.root_file_path, "w")
        root_file.write(yaml.dump(new_roots))
        root_file.close()
        # We should get a TankError if we don't have a primary storage in a
        # multi-roots file.
        with self.assertRaisesRegexp(TankError, "Could not identify a default storage"):
            pc = tank.pipelineconfig_factory.from_path(self.project_root)
        # Only keep the master storage
        del new_roots["publish"]
        del new_roots["render"]
        root_file = open(self.root_file_path, "w")
        root_file.write(yaml.dump(new_roots))
        root_file.close()
        pc = tank.pipelineconfig_factory.from_path(self.project_root)
        self.assertEqual(pc.get_all_platform_data_roots().keys(), ["master"])
        self.assertEqual(pc.get_data_roots().keys(), ["master"])
        self.assertEqual(self.project_root, pc.get_primary_data_root())
Esempio n. 3
0
    def turn_on_shotgun_path_cache(self):
        """
        Updates the pipeline configuration settings to have the shotgun based (v0.15+)
        path cache functionality enabled.
        
        Note that you need to force a full path sync once this command has been executed. 
        """

        if self.get_shotgun_path_cache_enabled():
            raise TankError("Shotgun based path cache already turned on!")

        # get current settings
        curr_settings = pipelineconfig_utils.get_metadata(self._pc_root)

        # add path cache setting
        curr_settings["use_shotgun_path_cache"] = True

        # write the record to disk
        pipe_config_sg_id_path = os.path.join(self._pc_root, "config", "core",
                                              "pipeline_configuration.yml")

        old_umask = os.umask(0)
        try:
            os.chmod(pipe_config_sg_id_path, 0666)
            # and write the new file
            fh = open(pipe_config_sg_id_path, "wt")
            yaml.dump(curr_settings, fh)
        except Exception, exp:
            raise TankError(
                "Could not write to pipeline configuration settings file %s. "
                "Error reported: %s" % (pipe_config_sg_id_path, exp))
 def turn_on_shotgun_path_cache(self):
     """
     Updates the pipeline configuration settings to have the shotgun based (v0.15+)
     path cache functionality enabled.
     
     Note that you need to force a full path sync once this command has been executed. 
     """
     
     if self.get_shotgun_path_cache_enabled():
         raise TankError("Shotgun based path cache already turned on!")
             
     # get current settings
     curr_settings = pipelineconfig_utils.get_metadata(self._pc_root)
     
     # add path cache setting
     curr_settings["use_shotgun_path_cache"] = True
     
     # write the record to disk
     pipe_config_sg_id_path = os.path.join(self._pc_root, "config", "core", "pipeline_configuration.yml")        
     
     old_umask = os.umask(0)
     try:
         os.chmod(pipe_config_sg_id_path, 0666)
         # and write the new file
         fh = open(pipe_config_sg_id_path, "wt")
         yaml.dump(curr_settings, fh)
     except Exception, exp:
         raise TankError("Could not write to pipeline configuration settings file %s. "
                         "Error reported: %s" % (pipe_config_sg_id_path, exp))
Esempio n. 5
0
    def test_flexible_primary(self):
        """
        Tests getting storage paths back from a pipeline config without a 'primary'
        storage.
        """

        # take one path out and mark as undefined
        new_roots = copy.deepcopy(self.roots)
        new_roots["master"] = new_roots.pop("primary")
        root_file = open(self.root_file_path, "w")
        root_file.write(yaml.dump(new_roots))
        root_file.close()
        # We should get a TankError if we don't have a primary storage in a
        # multi-roots file.
        with self.assertRaisesRegex(TankError,
                                    "Could not identify a default storage"):
            pc = tank.pipelineconfig_factory.from_path(self.project_root)
        # Only keep the master storage
        del new_roots["publish"]
        del new_roots["render"]
        root_file = open(self.root_file_path, "w")
        root_file.write(yaml.dump(new_roots))
        root_file.close()
        pc = tank.pipelineconfig_factory.from_path(self.project_root)
        self.assertEqual(pc.get_all_platform_data_roots().keys(), ["master"])
        self.assertEqual(pc.get_data_roots().keys(), ["master"])
        self.assertEqual(self.project_root, pc.get_primary_data_root())
Esempio n. 6
0
    def _update_pipeline_config(self, updates):
        """
        Updates the pipeline configuration on disk with the passed in values.

        :param updates: Dictionary of values to update in the pipeline configuration
        """
        # get current settings
        curr_settings = pipelineconfig_utils.get_metadata(self._pc_root)

        # add path cache setting
        curr_settings.update(updates)

        # write the record to disk
        pipe_config_sg_id_path = os.path.join(self._pc_root, "config", "core",
                                              "pipeline_configuration.yml")

        old_umask = os.umask(0)
        try:
            os.chmod(pipe_config_sg_id_path, 0666)
            # and write the new file
            fh = open(pipe_config_sg_id_path, "wt")
            yaml.dump(curr_settings, fh)
        except Exception, exp:
            raise TankError(
                "Could not write to pipeline configuration settings file %s. "
                "Error reported: %s" % (pipe_config_sg_id_path, exp))
Esempio n. 7
0
 def __write_data(self, path, data):
     """
     writes the main data to disk, raw form
     """
     try:
         env_file = open(path, "wt")
         yaml.dump(data, env_file)
     except Exception, exp:
         raise TankError("Could not write environment file %s. Error reported: %s" % (path, exp))
Esempio n. 8
0
 def __write_data(self, path, data):
     """
     writes the main data to disk, raw form
     """
     try:
         env_file = open(path, "wt")
         yaml.dump(data, env_file)
     except Exception, exp:
         raise TankError(
             "Could not write environment file %s. Error reported: %s" %
             (path, exp))
Esempio n. 9
0
def _write_yaml_file(file_path, users_data):
    """
    Writes the yaml file at a given location.

    :param file_path: Where to write the users data
    :param users_data: Dictionary to write to disk.
    """
    old_umask = os.umask(0077)
    try:
        with open(file_path, "w") as users_file:
            yaml.dump(users_data, users_file)
    finally:
        os.umask(old_umask)
Esempio n. 10
0
def _write_yaml_file(file_path, users_data):
    """
    Writes the yaml file at a given location.

    :param file_path: Where to write the users data
    :param users_data: Dictionary to write to disk.
    """
    old_umask = os.umask(0077)
    try:
        with open(file_path, "w") as users_file:
            yaml.dump(users_data, users_file)
    finally:
        os.umask(old_umask)
Esempio n. 11
0
    def _add_snapshot_comment(self, snapshot_file_path, comment):
        """
        Add a comment to the comment file for a snapshot file.  The comments are stored
        in the following format:

        {<snapshot file name> : {
            comment:    String - comment to store
            sg_user:    Shotgun entity dictionary representing the user that created the snapshot
            }
         ...
        }

        :param str file_path: path to the snapshot file.
        :param str comment: comment string to save.

        """
        # validate to make sure path is sane
        if not self._snapshot_template.validate(snapshot_file_path):
            self._app.log_warning("Could not add comment to "
                                  "invalid snapshot path %s!" %
                                  snapshot_file_path)
            return

        # get comments file path:
        comments_file_path = self._get_comments_file_path(snapshot_file_path)
        self._app.log_debug("Snapshot: Adding comment to file %s" %
                            comments_file_path)

        # load yml file
        comments = {}
        if os.path.exists(comments_file_path):
            with open(comments_file_path, "r") as fp:
                comments = yaml.load(fp) or {}

        # comment is now a dictionary so that we can also include the user:
        comments_value = {
            "comment": comment,
            "sg_user": self._app.context.user
        }

        # add entry for snapshot file:
        comments_key = os.path.basename(snapshot_file_path)
        comments[comments_key] = comments_value

        # and save yml file
        old_umask = os.umask(0)
        try:
            with open(comments_file_path, "w") as fp:
                yaml.dump(comments, fp)
        finally:
            os.umask(old_umask)
def write_location(descriptor):
    """
    Writes the descriptor dictionary to disk in the BUNDLE_ROOT/resources/location.yml file.
    """
    # Local import since sgtk is lazily loaded.
    from tank_vendor import yaml
    old_umask = os.umask(0077)
    try:
        # Write the toolkit descriptor information to disk.
        location_yml_path = os.path.join(_get_location_yaml_location(descriptor.get_path()))
        with open(location_yml_path, "w") as location_yml:
            location_yml.write(copyright)
            yaml.dump(descriptor.get_location(), location_yml)
    finally:
        os.umask(old_umask)
Esempio n. 13
0
def write_location(descriptor):
    """
    Writes the descriptor dictionary to disk in the BUNDLE_ROOT/resources/location.yml file.
    """
    # Local import since sgtk is lazily loaded.
    from tank_vendor import yaml
    old_umask = os.umask(0077)
    try:
        # Write the toolkit descriptor information to disk.
        location_yml_path = os.path.join(_get_location_yaml_location(descriptor.get_path()))
        with open(location_yml_path, "w") as location_yml:
            location_yml.write(copyright)
            yaml.dump(descriptor.get_location(), location_yml)
    finally:
        os.umask(old_umask)
Esempio n. 14
0
    def test_installed_config_manifest(self):
        """
        Ensures the manifest is read correctly.
        """
        # Create a manifest file for the pipeline configuration.
        with open(os.path.join(self.pipeline_config_root, "config", "info.yml"), "w") as fh:
            fh.write(
                yaml.dump({
                    "display_name": "Unit Test Configuration",
                    "requires_shotgun_version": "v6.3.0",
                    "requires_core_version": "HEAD"
                })
            )

        self.assertEqual(
            self.tk.configuration_descriptor.display_name,
            "Unit Test Configuration"
        )

        self.assertEqual(
            self.tk.configuration_descriptor.description,
            "No description available."
        )

        self.assertEqual(
            self.tk.configuration_descriptor.version_constraints["min_sg"],
            "v6.3.0"
        )

        self.assertEqual(
            self.tk.configuration_descriptor.version_constraints["min_core"],
            "HEAD"
        )
Esempio n. 15
0
    def test_all_paths(self):
        """
        Tests getting storage paths back from a pipeline config. 
        """
        
        # take one path out and mark as undefined
        new_roots = copy.deepcopy(self.roots)
        new_roots["render"]["linux_path"] = None
        
        root_file = open(self.root_file_path, "w")
        root_file.write(yaml.dump(new_roots))
        root_file.close()

        pc = tank.pipelineconfig_factory.from_path(self.project_root)
        result = pc.get_all_platform_data_roots()
        
        platform_lookup = {"win32": "windows_path", "darwin": "mac_path", "linux2": "linux_path"}

        project_name = os.path.basename(self.project_root)
        for root_name, platform_paths in result.items():
            for platform in platform_paths:
                root_path = platform_paths[platform]
                shotgun_path_key = platform_lookup[platform] 
                if new_roots[root_name][shotgun_path_key] is None:
                    expected_path = None
                elif platform == "win32":
                    expected_path = "%s\\%s" % (new_roots[root_name][shotgun_path_key], project_name)
                else:
                    expected_path = "%s/%s" % (new_roots[root_name][shotgun_path_key], project_name)
                self.assertEqual(expected_path, root_path)
Esempio n. 16
0
    def test_self_contained_config_core_descriptor(self):
        """
        Ensures that a configuration with a local bundle cache can return a core
        descriptor that points inside the configuration if the core is cached there.
        """
        config_root = os.path.join(self.tank_temp, "self_contained_config")
        core_location = os.path.join(
            config_root, "bundle_cache", "app_store", "tk-core", "v0.18.133"
        )
        self.create_file(
            os.path.join(core_location, "info.yml"),
            ""
        )
        self.create_file(
            os.path.join(config_root, "core", "core_api.yml"),
            yaml.dump({"location": {"type": "app_store", "name": "tk-core", "version": "v0.18.133"}})
        )

        config_desc = create_descriptor(
            self.mockgun,
            Descriptor.CONFIG,
            "sgtk:descriptor:path?path={0}".format(config_root)
        )
        core_desc = config_desc.resolve_core_descriptor()
        self.assertEqual(
            core_desc.get_path(),
            core_location
        )
Esempio n. 17
0
    def test_installed_config_manifest(self):
        """
        Ensures the manifest is read correctly.
        """
        # Create a manifest file for the pipeline configuration.
        with open(os.path.join(self.pipeline_config_root, "config", "info.yml"), "w") as fh:
            fh.write(
                yaml.dump({
                    "display_name": "Unit Test Configuration",
                    "requires_shotgun_version": "v6.3.0",
                    "requires_core_version": "HEAD"
                })
            )

        self.assertEqual(
            self.tk.configuration_descriptor.display_name,
            "Unit Test Configuration"
        )

        self.assertEqual(
            self.tk.configuration_descriptor.description,
            "No description available."
        )

        self.assertEqual(
            self.tk.configuration_descriptor.version_constraints["min_sg"],
            "v6.3.0"
        )

        self.assertEqual(
            self.tk.configuration_descriptor.version_constraints["min_core"],
            "HEAD"
        )
Esempio n. 18
0
    def create_storage_root(self,
                            project_name,
                            root_name,
                            update_roots_file=True):
        """
        Convenience method to create a storage root (e.g. an alternate project root)
        """

        storage_root_name = os.path.join(self.tank_temp, root_name,
                                         project_name)
        storage = {
            "type": "LocalStorage",
            "code": root_name,
            "windows_path": os.path.join(self.tank_temp, root_name),
            "linux_path": os.path.join(self.tank_temp, root_name),
            "mac_path": os.path.join(self.tank_temp, root_name),
        }
        self.add_to_sg_mock_db(storage)

        if self._do_io and update_roots_file:
            if not self.roots:
                self.roots = {}

            self.roots[root_name] = {}
            for os_name in ["windows_path", "linux_path", "mac_path"]:
                self.roots[root_name][os_name] = os.path.dirname(
                    storage_root_name)

            roots_path = os.path.join(self.pipeline_config_root, "config",
                                      "core", "roots.yml")
            roots_file = open(roots_path, "w")
            roots_file.write(yaml.dump(self.roots))
            roots_file.close()

        return (storage_root_name, storage)
Esempio n. 19
0
    def test_self_contained_config_core_descriptor(self):
        """
        Ensures that a configuration with a local bundle cache can return a core
        descriptor that points inside the configuration if the core is cached there.
        """
        config_root = os.path.join(self.tank_temp, "self_contained_config")
        core_location = os.path.join(config_root, "bundle_cache", "app_store",
                                     "tk-core", "v0.18.133")
        self.create_file(os.path.join(core_location, "info.yml"), "")
        self.create_file(
            os.path.join(config_root, "core", "core_api.yml"),
            yaml.dump({
                "location": {
                    "type": "app_store",
                    "name": "tk-core",
                    "version": "v0.18.133",
                }
            }),
        )

        config_desc = create_descriptor(
            self.mockgun,
            Descriptor.CONFIG,
            "sgtk:descriptor:path?path={0}".format(config_root),
        )
        core_desc = config_desc.resolve_core_descriptor()
        self.assertEqual(core_desc.get_path(), core_location)
    def test_project_root_mismatch(self):
        """
        Case that root name specified in projects yml file does not exist in roots file.
        """
        # remove root name from the roots file
        self.setup_multi_root_fixtures()

        # should be fine
        folder.configuration.FolderConfiguration(self.tk, self.schema_location)

        roots_file = os.path.join(
            self.tk.pipeline_configuration.get_path(),
            "config",
            "core",
            "schema",
            "alternate_1.yml",
        )

        fh = open(roots_file, "r")
        data = yaml.load(fh, Loader=yaml.FullLoader)
        fh.close()
        data["root_name"] = "some_bogus_Data"
        fh = open(roots_file, "w")
        fh.write(yaml.dump(data))
        fh.close()

        self.assertRaises(
            TankError,
            folder.configuration.FolderConfiguration,
            self.tk,
            self.schema_location,
        )
Esempio n. 21
0
    def test_project_root_mismatch(self):
        """
        Case that root name specified in projects yml file does not exist in roots file.
        """
        # remove root name from the roots file
        self.setup_multi_root_fixtures()
        self.tk = tank.Tank(self.project_root)
        
        # should be fine
        folder.configuration.FolderConfiguration(self.tk, self.schema_location)

        roots_file = os.path.join(self.tk.pipeline_configuration.get_path(), "config", "core", "schema", "alternate_1.yml")
        
        fh = open(roots_file, "r")
        data = yaml.load(fh)
        fh.close()
        data["root_name"] = "some_bogus_Data"
        fh = open(roots_file, "w")
        fh.write(yaml.dump(data))
        fh.close()

        self.tk = tank.Tank(self.project_root)

        self.assertRaises(TankError,
                          folder.configuration.FolderConfiguration,
                          self.tk,
                          self.schema_location)
    def setUp(self):

        # set up a project named temp, so that it will end up in c:\temp
        super(TestTankFromPathWindowsNoSlash,
              self).setUp(parameters={"project_tank_name": self.PROJECT_NAME})

        # set up std fixtures
        self.setup_fixtures()

        # patch primary local storage def
        self.primary_storage["windows_path"] = self.STORAGE_ROOT
        # re-add it
        self.add_to_sg_mock_db(self.primary_storage)

        # now re-write roots.yml
        roots = {"primary": {}}
        for os_name in ["windows_path", "linux_path", "mac_path"]:
            # TODO make os specific roots
            roots["primary"][os_name] = self.sg_pc_entity[os_name]
        roots_path = os.path.join(self.pipeline_config_root, "config", "core",
                                  "roots.yml")
        roots_file = open(roots_path, "w")
        roots_file.write(yaml.dump(roots))
        roots_file.close()

        # need a new pipeline config object that is
        # using the new roots def file we just created
        self.pipeline_configuration = sgtk.pipelineconfig_factory.from_path(
            self.pipeline_config_root)
        # push this new pipeline config into the tk api
        self.tk._Tank__pipeline_config = self.pipeline_configuration
        # force reload templates
        self.tk.reload_templates()
Esempio n. 23
0
    def setUp(self):

        # set up a project named temp, so that it will end up in c:\temp

        super(TestTankFromPathWindowsNoSlash, self).setUp(project_tank_name=self.PROJECT_NAME)

        # set up std fixtures
        self.setup_fixtures()

        # patch primary local storage def
        self.primary_storage["windows_path"] = self.STORAGE_ROOT
        # re-add it
        self.add_to_sg_mock_db(self.primary_storage)

        # now re-write roots.yml
        roots = {"primary": {}}
        for os_name in ["windows_path", "linux_path", "mac_path"]:
            #TODO make os specific roots
            roots["primary"][os_name] = self.sg_pc_entity[os_name]
        roots_path = os.path.join(self.pipeline_config_root,
                                  "config",
                                  "core",
                                  "roots.yml")
        roots_file = open(roots_path, "w")
        roots_file.write(yaml.dump(roots))
        roots_file.close()

        # need a new PC object that is using the new roots def file we just created
        self.pipeline_configuration = sgtk.pipelineconfig_factory.from_path(self.pipeline_config_root)
        # push this new PC into the tk api
        self.tk._Tank__pipeline_config = self.pipeline_configuration
        # force reload templates
        self.tk.reload_templates()
Esempio n. 24
0
    def test_all_paths(self):
        """
        Tests getting storage paths back from a pipeline config. 
        """
        
        # take one path out and mark as undefined
        new_roots = copy.deepcopy(self.roots)
        new_roots["render"]["linux_path"] = None
        
        root_file = open(self.root_file_path, "w")
        root_file.write(yaml.dump(new_roots))
        root_file.close()

        pc = tank.pipelineconfig_factory.from_path(self.project_root)
        result = pc.get_all_platform_data_roots()
        
        platform_lookup = {"win32": "windows_path", "darwin": "mac_path", "linux2": "linux_path"}

        project_name = os.path.basename(self.project_root)
        for root_name, platform_paths in result.items():
            for platform in platform_paths:
                root_path = platform_paths[platform]
                shotgun_path_key = platform_lookup[platform] 
                if new_roots[root_name][shotgun_path_key] is None:
                    expected_path = None
                elif platform == "win32":
                    expected_path = "%s\\%s" % (new_roots[root_name][shotgun_path_key], project_name)
                else:
                    expected_path = "%s/%s" % (new_roots[root_name][shotgun_path_key], project_name)
                self.assertEqual(expected_path, root_path)
    def save_comments(self, file_path, comment):
        """
        Add a comment to the comment file for the saved file.  The comments 
        are stored in the following format:
        
        {<file name> : {
            comment:    String - comment to store
            sg_user:    Shotgun entity dictionary representing the user that created the snapshot
            }
         ...
        }

        :param str file_path: path to the snapshot file.
        :param str comments: comment string to save.
        """

        # clense the comment string
        orig_comment = comment
        comment = ""
        for c in orig_comment:
            if c in ['\n', ';', '\'', '}', '{', '`', '~', ':', '@', '<', '>', '\\']:
                comment += '_'
            else:
                comment += c

        # get comments file path:
        comments_file_path = self._get_comments_file_path(file_path)
        self._app.log_debug("Save_As: Adding comment to file %s" % comments_file_path)
        
        # load yml file
        comments = {}
        if os.path.exists(comments_file_path):
            comments = yaml.load(open(comments_file_path, "r"))

        # comment is now a dictionary so that we can also include the user:
        comments_value = {"comment":comment, "sg_user":self._app.context.user}
        
        # add entry for snapshot file:
        comments_key = os.path.basename(file_path)
        comments[comments_key] = comments_value
        
        # and save yml file
        old_umask = os.umask(0)
        try:
            yaml.dump(comments, open(comments_file_path, "w"))
        finally:
            os.umask(old_umask)
Esempio n. 26
0
    def setUp(self):
        
        # set up two storages and two projects
        
        super(TestTankFromPathOverlapStorage, self).setUp(project_tank_name="foo")
        

        # add second project
        self.project_2 = {"type": "Project",
                          "id": 2345,
                          "tank_name": "bar",
                          "name": "project_name"}
        
        # define entity for pipeline configuration
        self.project_2_pc = {"type": "PipelineConfiguration",
                             "code": "Primary", 
                             "id": 123456, 
                             "project": self.project_2, 
                             "windows_path": "F:\\temp\\bar_pc",
                             "mac_path": "/tmp/bar_pc",
                             "linux_path": "/tmp/bar_pc"}
        
        self.add_to_sg_mock_db(self.project_2)
        self.add_to_sg_mock_db(self.project_2_pc)

        # set up std fixtures
        self.setup_multi_root_fixtures()

        # patch storages
        self.alt_storage_1["windows_path"] = "C:\\temp"
        self.alt_storage_1["mac_path"] = "/tmp"
        self.alt_storage_1["linux_path"] = "/tmp"
        
        self.alt_storage_2["windows_path"] = "C:\\temp\\foo"
        self.alt_storage_2["mac_path"] = "/tmp/foo"
        self.alt_storage_2["linux_path"] = "/tmp/foo"

        self.add_to_sg_mock_db(self.alt_storage_1)
        self.add_to_sg_mock_db(self.alt_storage_2)

        # Write roots file
        roots = {"primary": {}, "alternate_1": {}, "alternate_2": {}}
        for os_name in ["windows_path", "linux_path", "mac_path"]:
            roots["primary"][os_name] = os.path.dirname(self.project_root)
            roots["alternate_1"][os_name] = self.alt_storage_1[os_name]
            roots["alternate_2"][os_name] = self.alt_storage_2[os_name]              
        roots_path = os.path.join(self.pipeline_config_root, "config", "core", "roots.yml")     
        roots_file = open(roots_path, "w") 
        roots_file.write(yaml.dump(roots))
        roots_file.close()
                
        # need a new PC object that is using the new roots def file we just created
        self.pipeline_configuration = sgtk.pipelineconfig_factory.from_path(self.pipeline_config_root)
        # push this new PC into the tk api
        self.tk._Tank__pipeline_config = self.pipeline_configuration         
        # force reload templates
        self.tk.reload_templates()
Esempio n. 27
0
    def _setup_fixtures(self, name="config", parameters=None):
        """
        See doc for setup fixtures.
        """

        parameters = parameters or {}

        # figure out root point of fixtures config
        config_root = os.path.join(self.fixtures_root, name)

        # first figure out core location
        if "core" in parameters:
            # This config is not simple, as it is piece from a env and hooks folder from one
            # location and the core from another.
            simple_config = False
            # convert slashes to be windows friendly
            core_path_suffix = parameters["core"].replace("/", os.sep)
            core_source = os.path.join(config_root, core_path_suffix)
        else:
            # This config is simple, as it is based on a config that is layed out into a single folder.
            simple_config = True
            # use the default core fixture
            core_source = os.path.join(config_root, "core")

        # Check if the tests wants the files to be copied.
        installed_config = parameters.get("installed_config", False)

        # If the config is not simple of the tests wants the files to be copied
        if not simple_config or installed_config:
            # copy core over to target
            core_target = os.path.join(self.project_config, "core")
            self._copy_folder(core_source, core_target)
            # now copy the rest of the config fixtures
            for config_folder in ["env", "hooks", "bundles"]:
                config_source = os.path.join(config_root, config_folder)
                if os.path.exists(config_source):
                    config_target = os.path.join(self.project_config, config_folder)
                    self._copy_folder(config_source, config_target)
        else:
            # We're going to be using a cached configuration, so set up the source_descriptor.
            pc_yml_location = os.path.join(self.pipeline_config_root, "config", "core", "pipeline_configuration.yml")
            with open(pc_yml_location, "r") as fh:
                pc_data = yaml.safe_load(fh)
            pc_data["source_descriptor"] = {"path": config_root, "type": "path"}
            with open(pc_yml_location, "w") as fh:
                fh.write(yaml.dump(pc_data))

            # Update where the config root variable points to.
            self.project_config = config_root

        # need to reload the pipeline config to respect the config data from
        # the fixtures
        self.reload_pipeline_config()

        if not ("skip_template_reload" in parameters and parameters["skip_template_reload"]):
            # no skip_template_reload flag set to true. So go ahead and reload
            self.tk.reload_templates()
Esempio n. 28
0
    def setUp(self):

        # set up two storages and two projects

        super(TestTankFromPathOverlapStorage, self).setUp(project_tank_name="foo")


        # add second project
        self.project_2 = {"type": "Project",
                          "id": 2345,
                          "tank_name": "bar",
                          "name": "project_name"}

        # define entity for pipeline configuration
        self.project_2_pc = {"type": "PipelineConfiguration",
                             "code": "Primary",
                             "id": 123456,
                             "project": self.project_2,
                             "windows_path": "F:\\temp\\bar_pc",
                             "mac_path": "/tmp/bar_pc",
                             "linux_path": "/tmp/bar_pc"}

        self.add_to_sg_mock_db(self.project_2)
        self.add_to_sg_mock_db(self.project_2_pc)

        # set up std fixtures
        self.setup_multi_root_fixtures()

        # patch storages
        self.alt_storage_1["windows_path"] = "C:\\temp"
        self.alt_storage_1["mac_path"] = "/tmp"
        self.alt_storage_1["linux_path"] = "/tmp"

        self.alt_storage_2["windows_path"] = "C:\\temp\\foo"
        self.alt_storage_2["mac_path"] = "/tmp/foo"
        self.alt_storage_2["linux_path"] = "/tmp/foo"

        self.add_to_sg_mock_db(self.alt_storage_1)
        self.add_to_sg_mock_db(self.alt_storage_2)

        # Write roots file
        roots = {"primary": {}, "alternate_1": {}, "alternate_2": {}}
        for os_name in ["windows_path", "linux_path", "mac_path"]:
            roots["primary"][os_name] = os.path.dirname(self.project_root)
            roots["alternate_1"][os_name] = self.alt_storage_1[os_name]
            roots["alternate_2"][os_name] = self.alt_storage_2[os_name]
        roots_path = os.path.join(self.pipeline_config_root, "config", "core", "roots.yml")
        roots_file = open(roots_path, "w")
        roots_file.write(yaml.dump(roots))
        roots_file.close()

        # need a new PC object that is using the new roots def file we just created
        self.pipeline_configuration = sgtk.pipelineconfig_factory.from_path(self.pipeline_config_root)
        # push this new PC into the tk api
        self.tk._Tank__pipeline_config = self.pipeline_configuration
        # force reload templates
        self.tk.reload_templates()
Esempio n. 29
0
    def _setup_fixtures(self, name="config", parameters=None):
        """
        See doc for setup fixtures.
        """

        parameters = parameters or {}

        # figure out root point of fixtures config
        config_root = os.path.join(self.fixtures_root, name)

        # first figure out core location
        if "core" in parameters:
            # This config is not simple, as it is piece from a env and hooks folder from one
            # location and the core from another.
            simple_config = False
            # convert slashes to be windows friendly
            core_path_suffix = parameters["core"].replace("/", os.sep)
            core_source = os.path.join(config_root, core_path_suffix)
        else:
            # This config is simple, as it is based on a config that is layed out into a single folder.
            simple_config = True
            # use the default core fixture
            core_source = os.path.join(config_root, "core")

        # Check if the tests wants the files to be copied.
        installed_config = parameters.get("installed_config", False)

        # If the config is not simple of the tests wants the files to be copied
        if not simple_config or installed_config:
            # copy core over to target
            core_target = os.path.join(self.project_config, "core")
            self._copy_folder(core_source, core_target)
            # now copy the rest of the config fixtures
            for config_folder in ["env", "hooks", "bundles"]:
                config_source = os.path.join(config_root, config_folder)
                if os.path.exists(config_source):
                    config_target = os.path.join(self.project_config, config_folder)
                    self._copy_folder(config_source, config_target)
        else:
            # We're going to be using a cached configuration, so set up the source_descriptor.
            pc_yml_location = os.path.join(self.pipeline_config_root, "config", "core", "pipeline_configuration.yml")
            with open(pc_yml_location, "r") as fh:
                pc_data = yaml.safe_load(fh)
            pc_data["source_descriptor"] = {"path": config_root, "type": "path"}
            with open(pc_yml_location, "w") as fh:
                fh.write(yaml.dump(pc_data))

            # Update where the config root variable points to.
            self.project_config = config_root

        # need to reload the pipeline config to respect the config data from
        # the fixtures
        self.reload_pipeline_config()

        if not ("skip_template_reload" in parameters and parameters["skip_template_reload"]):
            # no skip_template_reload flag set to true. So go ahead and reload
            self.tk.reload_templates()
Esempio n. 30
0
    def _add_snapshot_comment(self, snapshot_file_path, comment):
        """
        Add a comment to the comment file for a snapshot file.  The comments are stored
        in the following format:
        
        {<snapshot file name> : {
            comment:    String - comment to store
            sg_user:    Shotgun entity dictionary representing the user that created the snapshot
            }
         ...
        }

        :param str file_path: path to the snapshot file.
        :param str comment: comment string to save.

        """
        # validate to make sure path is sane
        if not self._snapshot_template.validate(snapshot_file_path):
            self._app.log_warning("Could not add comment to "
                                         "invalid snapshot path %s!" % snapshot_file_path)
            return

        # get comments file path:        
        comments_file_path = self._get_comments_file_path(snapshot_file_path)
        self._app.log_debug("Snapshot: Adding comment to file %s" % comments_file_path)
        
        # load yml file
        comments = {}
        if os.path.exists(comments_file_path):
            comments = yaml.load(open(comments_file_path, "r"))

        # comment is now a dictionary so that we can also include the user:
        comments_value = {"comment":comment, "sg_user":self._app.context.user}
            
        # add entry for snapshot file:
        comments_key = os.path.basename(snapshot_file_path)
        comments[comments_key] = comments_value
        
        # and save yml file
        old_umask = os.umask(0) 
        try:
            yaml.dump(comments, open(comments_file_path, "w"))
        finally:
            os.umask(old_umask) 
Esempio n. 31
0
    def setup_multi_root_fixtures(self):
        """
        Helper method which sets up a standard multi-root set of fixtures
        """
        self.setup_fixtures(parameters = {"core": "core.override/multi_root_core", 
                                          "skip_template_reload": True})
        
        # Add multiple project roots
        project_name = os.path.basename(self.project_root)
        self.alt_root_1 = os.path.join(self.tank_temp, "alternate_1", project_name)
        self.alt_root_2 = os.path.join(self.tank_temp, "alternate_2", project_name)
        
        # add local storages to represent the alternate root points
        self.alt_storage_1 = {"type": "LocalStorage",
                              "id": 7778,
                              "code": "alternate_1",
                              "windows_path": os.path.join(self.tank_temp, "alternate_1"),
                              "linux_path": os.path.join(self.tank_temp, "alternate_1"),
                              "mac_path": os.path.join(self.tank_temp, "alternate_1") }
        self.add_to_sg_mock_db(self.alt_storage_1)
        
        self.alt_storage_2 = {"type": "LocalStorage",
                              "id": 7779,
                              "code": "alternate_2",
                              "windows_path": os.path.join(self.tank_temp, "alternate_2"),
                              "linux_path": os.path.join(self.tank_temp, "alternate_2"),
                              "mac_path": os.path.join(self.tank_temp, "alternate_2") }
        self.add_to_sg_mock_db(self.alt_storage_2)
        
        # Write roots file
        roots = {"primary": {}, "alternate_1": {}, "alternate_2": {}}
        for os_name in ["windows_path", "linux_path", "mac_path"]:
            #TODO make os specific roots
            roots["primary"][os_name]     = os.path.dirname(self.project_root)
            roots["alternate_1"][os_name] = os.path.dirname(self.alt_root_1)
            roots["alternate_2"][os_name] = os.path.dirname(self.alt_root_2)
        roots_path = os.path.join(self.pipeline_config_root, "config", "core", "roots.yml")     
        roots_file = open(roots_path, "w") 
        roots_file.write(yaml.dump(roots))
        roots_file.close()
        
        # need a new pipeline config object that is using the
        # new roots def file we just created
        self.pipeline_configuration = sgtk.pipelineconfig_factory.from_path(self.pipeline_config_root)
        # push this new pipeline config into the tk api
        self.tk._Sgtk__pipeline_config = self.pipeline_configuration

        # force reload templates
        self.tk.reload_templates()
        
        # add project root folders
        # primary path was already added in base setUp
        self.add_production_path(self.alt_root_1, self.project)
        self.add_production_path(self.alt_root_2, self.project)
        
        self.tk.create_filesystem_structure("Project", self.project["id"])
Esempio n. 32
0
    def setup_multi_root_fixtures(self):
        """
        Helper method which sets up a standard multi-root set of fixtures
        """
        self.setup_fixtures(parameters = {"core": "core.override/multi_root_core",
                                          "skip_template_reload": True})

        # Add multiple project roots
        project_name = os.path.basename(self.project_root)
        self.alt_root_1 = os.path.join(self.tank_temp, "alternate_1", project_name)
        self.alt_root_2 = os.path.join(self.tank_temp, "alternate_2", project_name)

        # add local storages to represent the alternate root points
        self.alt_storage_1 = {"type": "LocalStorage",
                              "id": 7778,
                              "code": "alternate_1",
                              "windows_path": os.path.join(self.tank_temp, "alternate_1"),
                              "linux_path": os.path.join(self.tank_temp, "alternate_1"),
                              "mac_path": os.path.join(self.tank_temp, "alternate_1") }
        self.add_to_sg_mock_db(self.alt_storage_1)

        self.alt_storage_2 = {"type": "LocalStorage",
                              "id": 7779,
                              "code": "alternate_2",
                              "windows_path": os.path.join(self.tank_temp, "alternate_2"),
                              "linux_path": os.path.join(self.tank_temp, "alternate_2"),
                              "mac_path": os.path.join(self.tank_temp, "alternate_2") }
        self.add_to_sg_mock_db(self.alt_storage_2)

        # Write roots file
        roots = {"primary": {}, "alternate_1": {}, "alternate_2": {}}
        for os_name in ["windows_path", "linux_path", "mac_path"]:
            #TODO make os specific roots
            roots["primary"][os_name]     = os.path.dirname(self.project_root)
            roots["alternate_1"][os_name] = os.path.dirname(self.alt_root_1)
            roots["alternate_2"][os_name] = os.path.dirname(self.alt_root_2)
        roots_path = os.path.join(self.pipeline_config_root, "config", "core", "roots.yml")
        roots_file = open(roots_path, "w")
        roots_file.write(yaml.dump(roots))
        roots_file.close()

        # need to reload the pipeline config object that to respect the
        # new roots definition file we just created
        self.reload_pipeline_config()

        # force reload templates
        self.tk.reload_templates()

        # add project root folders
        # primary path was already added in base setUp
        self.add_production_path(self.alt_root_1, self.project)
        self.add_production_path(self.alt_root_2, self.project)

        self.tk.create_filesystem_structure("Project", self.project["id"])
    def _update_deploy_file(self, generation=None, descriptor=None, corrupt=False):
        """
        Updates the deploy file.

        :param generation: If set, will update the generation number of the config.
        :param descriptor: If set, will update the descriptor of the config.
        :param corrupt: If set, will corrupt the configuration file.
        """
        path = self._cached_config._config_writer.get_descriptor_metadata_file()
        if corrupt:
            data = "corrupted"
        else:
            with open(path, "rt") as fh:
                data = yaml.load(fh)
                if generation is not None:
                    data["deploy_generation"] = generation
                if descriptor is not None:
                    data["config_descriptor"] = descriptor

        with open(path, "wt") as fh:
            yaml.dump(data, fh)
Esempio n. 34
0
    def _update_pipeline_config(self, updates):
        """
        Updates the pipeline configuration on disk with the passed in values.

        :param updates: Dictionary of values to update in the pipeline configuration
        """
        # get current settings
        curr_settings = pipelineconfig_utils.get_metadata(self._pc_root)
        
        # add path cache setting
        curr_settings.update(updates)
        
        # write the record to disk
        pipe_config_sg_id_path = os.path.join(self._pc_root, "config", "core", "pipeline_configuration.yml")        
        
        old_umask = os.umask(0)
        try:
            os.chmod(pipe_config_sg_id_path, 0666)
            # and write the new file
            fh = open(pipe_config_sg_id_path, "wt")
            yaml.dump(curr_settings, fh)
        except Exception, exp:
            raise TankError("Could not write to pipeline configuration settings file %s. "
                            "Error reported: %s" % (pipe_config_sg_id_path, exp))
    def _write_mock_config(self, shotgun_yml_data=None):
        """
        Creates a fake config with the provided shotgun.yml data.
        """
        mock_config_root = os.path.join(self.tank_temp, "template", self.id())
        # Make sure the bundle "exists" on disk.
        os.makedirs(mock_config_root)

        if shotgun_yml_data:
            self.create_file(
                os.path.join(mock_config_root, "core", "shotgun.yml"),
                yaml.dump(shotgun_yml_data))

        return sgtk.descriptor.create_descriptor(
            self.mockgun, sgtk.descriptor.Descriptor.CONFIG,
            dict(type="dev", path=mock_config_root))
Esempio n. 36
0
    def _write_mock_config(self, shotgun_yml_data=None):
        """
        Creates a fake config with the provided shotgun.yml data.
        """
        # Make the file name not too long or we'll run into file length issues on Windows.
        mock_config_root = os.path.join(self.tank_temp, "template",
                                        "%s" % self.short_test_name)
        # Make sure the bundle "exists" on disk.
        os.makedirs(mock_config_root)

        if shotgun_yml_data:
            self.create_file(
                os.path.join(mock_config_root, "core", "shotgun.yml"),
                yaml.dump(shotgun_yml_data))

        return sgtk.descriptor.create_descriptor(
            self.mockgun, sgtk.descriptor.Descriptor.CONFIG,
            dict(type="dev", path=mock_config_root))
Esempio n. 37
0
    def setup_multi_root_fixtures(self):
        self.setup_fixtures(core_config="multi_root_core")
        # Add multiple project roots
        project_name = os.path.basename(self.project_root)
        self.alt_root_1 = os.path.join(self.tank_temp, "alternate_1",
                                       project_name)
        self.alt_root_2 = os.path.join(self.tank_temp, "alternate_2",
                                       project_name)

        # add backlink files to storage
        tank_code = os.path.join(self.project_root, "tank")
        data = "- {darwin: '%s', linux2: '%s', win32: '%s'}" % (
            tank_code, tank_code, tank_code)
        self.create_file(
            os.path.join(self.alt_root_1, "tank", "config",
                         "tank_configs.yml"), data)
        self.create_file(
            os.path.join(self.alt_root_2, "tank", "config",
                         "tank_configs.yml"), data)

        # Write roots file
        roots = {"primary": {}, "alternate_1": {}, "alternate_2": {}}
        for os_name in ["windows_path", "linux_path", "mac_path"]:
            #TODO make os specific roots
            roots["primary"][os_name] = os.path.dirname(self.project_root)
            roots["alternate_1"][os_name] = os.path.dirname(self.alt_root_1)
            roots["alternate_2"][os_name] = os.path.dirname(self.alt_root_2)
        roots_path = os.path.join(self.project_root, "tank", "config", "core",
                                  "roots.yml")
        roots_file = open(roots_path, "w")
        roots_file.write(yaml.dump(roots))
        roots_file.close()

        # need a new PC object that is using the new roots def file we just created
        self.pipeline_configuration = sgtk.pipelineconfig.from_path(
            os.path.join(self.project_root, "tank"))

        # add project root folders
        # primary path was already added in base setUp
        self.add_production_path(self.alt_root_1, self.project)
        self.add_production_path(self.alt_root_2, self.project)
        # use Tank object to write project info
        tk = sgtk.Sgtk(self.project_root)
        tk.create_filesystem_structure("Project", self.project["id"])
    def _write_mock_config(self, shotgun_yml_data=None):
        """
        Creates a fake config with the provided shotgun.yml data.
        """
        mock_config_root = os.path.join(self.tank_temp, "template", self.id())
        # Make sure the bundle "exists" on disk.
        os.makedirs(mock_config_root)

        if shotgun_yml_data:
            self.create_file(
                os.path.join(mock_config_root, "core", "shotgun.yml"),
                yaml.dump(shotgun_yml_data)
            )

        return sgtk.descriptor.create_descriptor(
            self.mockgun,
            sgtk.descriptor.Descriptor.CONFIG,
            dict(type="dev", path=mock_config_root)
        )
    def _write_mock_config(self, shotgun_yml_data=None):
        """
        Creates a fake config with the provided shotgun.yml data.
        """
        # Make the file name not too long or we'll run into file length issues on Windows.
        mock_config_root = os.path.join(self.tank_temp, "template", "%s" % self.short_test_name)
        # Make sure the bundle "exists" on disk.
        os.makedirs(mock_config_root)

        if shotgun_yml_data:
            self.create_file(
                os.path.join(mock_config_root, "core", "shotgun.yml"),
                yaml.dump(shotgun_yml_data)
            )

        return sgtk.descriptor.create_descriptor(
            self.mockgun,
            sgtk.descriptor.Descriptor.CONFIG,
            dict(type="dev", path=mock_config_root)
        )
Esempio n. 40
0
    def test_paths(self):
        """Test paths match those in roots for current os."""
        root_file = open(self.root_file_path, "w")
        root_file.write(yaml.dump(self.roots))
        root_file.close()

        pc = tank.pipelineconfig_factory.from_path(self.project_root)
        result = pc.get_data_roots()

        # Determine platform
        if is_macos():
            platform = "mac_path"
        elif is_linux():
            platform = "linux_path"
        elif is_windows():
            platform = "windows_path"

        project_name = os.path.basename(self.project_root)
        for root_name, root_path in result.items():
            expected_path = os.path.join(self.roots[root_name][platform],
                                         project_name)
            self.assertEqual(expected_path, root_path)
Esempio n. 41
0
    def test_paths(self):
        """Test paths match those in roots for current os."""
        root_file =  open(self.root_file_path, "w") 
        root_file.write(yaml.dump(self.roots))
        root_file.close()

        pc = tank.pipelineconfig.from_path(self.project_root)
        result = pc.get_data_roots()
        
        # Determine platform
        system = sys.platform.lower()

        if system == 'darwin':
            platform = "mac_path"
        elif system.startswith('linux'):
            platform = 'linux_path'
        elif system == 'win32':
            platform = 'windows_path'

        project_name = os.path.basename(self.project_root)
        for root_name, root_path in result.items():
            expected_path = os.path.join(self.roots[root_name][platform], project_name)
            self.assertEqual(expected_path, root_path)
Esempio n. 42
0
    def setup_multi_root_fixtures(self):
        self.setup_fixtures(core_config="multi_root_core")
        # Add multiple project roots
        project_name = os.path.basename(self.project_root)
        self.alt_root_1 = os.path.join(self.tank_temp, "alternate_1", project_name)
        self.alt_root_2 = os.path.join(self.tank_temp, "alternate_2", project_name)
        
        # add backlink files to storage
        tank_code = os.path.join(self.project_root, "tank")
        data = "- {darwin: '%s', linux2: '%s', win32: '%s'}" % (tank_code, tank_code, tank_code) 
        self.create_file(os.path.join(self.alt_root_1, "tank", "config", "tank_configs.yml"), data)
        self.create_file(os.path.join(self.alt_root_2, "tank", "config", "tank_configs.yml"), data)


        # Write roots file
        roots = {"primary": {}, "alternate_1": {}, "alternate_2": {}}
        for os_name in ["windows_path", "linux_path", "mac_path"]:
            #TODO make os specific roots
            roots["primary"][os_name]     = os.path.dirname(self.project_root)
            roots["alternate_1"][os_name] = os.path.dirname(self.alt_root_1)
            roots["alternate_2"][os_name] = os.path.dirname(self.alt_root_2)
        roots_path = os.path.join(self.project_root, "tank", "config", "core", "roots.yml")     
        roots_file = open(roots_path, "w") 
        roots_file.write(yaml.dump(roots))
        roots_file.close()
        
        # need a new PC object that is using the new roots def file we just created
        self.pipeline_configuration = sgtk.pipelineconfig.from_path(os.path.join(self.project_root, "tank"))
        
        # add project root folders
        # primary path was already added in base setUp
        self.add_production_path(self.alt_root_1, self.project)
        self.add_production_path(self.alt_root_2, self.project)
        # use Tank object to write project info
        tk = sgtk.Sgtk(self.project_root)
        tk.create_filesystem_structure("Project", self.project["id"])
Esempio n. 43
0
    def setup_multi_root_fixtures(self):
        """
        Helper method which sets up a standard multi-root set of fixtures
        """
        # The primary storage needs to be named "primary" in multi-root mode.
        if self.primary_root_name != "primary":
            self.primary_root_name = "primary"
            self.primary_storage = {"type": "LocalStorage",
                                    "id": 8888,
                                    "code": self.primary_root_name,
                                    "windows_path": self.tank_temp,
                                    "linux_path": self.tank_temp,
                                    "mac_path": self.tank_temp}

            self.add_to_sg_mock_db(self.primary_storage)

        self._setup_fixtures(parameters={"core": "core.override/multi_root_core",
                                         "skip_template_reload": True})

        # Add multiple project roots
        project_name = os.path.basename(self.project_root)
        self.alt_root_1 = os.path.join(self.tank_temp, "alternate_1", project_name)
        self.alt_root_2 = os.path.join(self.tank_temp, "alternate_2", project_name)
        self.alt_root_3 = os.path.join(self.tank_temp, "alternate_3", project_name)
        self.alt_root_4 = os.path.join(self.tank_temp, "alternate_4", project_name)

        # add local storages to represent the alternate root points
        self.alt_storage_1 = {"type": "LocalStorage",
                              "id": 7778,
                              "code": "alternate_1",
                              "windows_path": os.path.join(self.tank_temp, "alternate_1"),
                              "linux_path": os.path.join(self.tank_temp, "alternate_1"),
                              "mac_path": os.path.join(self.tank_temp, "alternate_1")}
        self.add_to_sg_mock_db(self.alt_storage_1)

        self.alt_storage_2 = {"type": "LocalStorage",
                              "id": 7779,
                              "code": "alternate_2",
                              "windows_path": os.path.join(self.tank_temp, "alternate_2"),
                              "linux_path": os.path.join(self.tank_temp, "alternate_2"),
                              "mac_path": os.path.join(self.tank_temp, "alternate_2")}
        self.add_to_sg_mock_db(self.alt_storage_2)

        self.alt_storage_3 = {"type": "LocalStorage",
                              "id": 7780,
                              "code": "alternate_3",
                              "windows_path": os.path.join(self.tank_temp, "alternate_3"),
                              "linux_path": os.path.join(self.tank_temp, "alternate_3"),
                              "mac_path": os.path.join(self.tank_temp, "alternate_3")}
        self.add_to_sg_mock_db(self.alt_storage_3)

        self.alt_storage_4 = {"type": "LocalStorage",
                              "id": 7781,
                              "code": "alternate_4",
                              "windows_path": os.path.join(self.tank_temp, "alternate_4"),
                              "linux_path": os.path.join(self.tank_temp, "alternate_4"),
                              "mac_path": os.path.join(self.tank_temp, "alternate_4")}
        self.add_to_sg_mock_db(self.alt_storage_4)

        # Write roots file
        roots = {"primary": {}, "alternate_1": {}, "alternate_2": {}, "alternate_3": {}, "alternate_4": {}}
        for os_name in ["windows_path", "linux_path", "mac_path"]:
            # TODO make os specific roots
            roots["primary"][os_name]     = os.path.dirname(self.project_root)
            roots["alternate_1"][os_name] = os.path.dirname(self.alt_root_1)
            roots["alternate_2"][os_name] = os.path.dirname(self.alt_root_2)

            # NOTE: swap the mapped roots
            roots["alternate_3"][os_name] = os.path.dirname(self.alt_root_4)
            roots["alternate_4"][os_name] = os.path.dirname(self.alt_root_3)

        # swap the mapped storage ids
        roots["alternate_3"]["shotgun_storage_id"] = 7781  # local storage 4
        roots["alternate_4"]["shotgun_storage_id"] = 7780  # local storage 3

        roots_path = os.path.join(self.pipeline_config_root, "config", "core", "roots.yml")
        roots_file = open(roots_path, "w")
        roots_file.write(yaml.dump(roots))
        roots_file.close()

        # need to reload the pipeline config object that to respect the
        # new roots definition file we just created
        self.reload_pipeline_config()

        # force reload templates
        self.tk.reload_templates()

        # add project root folders
        # primary path was already added in base setUp
        self.add_production_path(self.alt_root_1, self.project)
        self.add_production_path(self.alt_root_2, self.project)

        self.tk.create_filesystem_structure("Project", self.project["id"])
Esempio n. 44
0
        #remove the comment from the yml
        comments_file_path = self._get_comments_file_path(snapshot_path)

        self._app.log_debug("Snapshot: Deleting comment from file %s" % comments_file_path)
        
        try:
            if os.path.exists(comments_file_path):
                comments = yaml.load(open(comments_file_path, "r"))
                comments_key = os.path.basename(snapshot_path)
                del comments[comments_key]   

                # and save yml file
                old_umask = os.umask(0) 
                try:           
                    yaml.dump(comments, open(comments_file_path, "w"))
                finally:
                    os.umask(old_umask) 
        except TankError, e:
           self._app.log_exception("Snapshot Delete Failed!")
           return









Esempio n. 45
0
    # determine the entity type to use for Published Files:
    pf_entity_type = _get_published_file_entity_type(log, sg)

    data = {}
    data["project_name"] = project_name
    data["pc_id"] = pipeline_config_id
    data["project_id"] = project_id
    data["pc_name"] = constants.PRIMARY_PIPELINE_CONFIG_NAME
    data["published_file_entity_type"] = pf_entity_type

    # all 0.15+ projects are pushing folders to Shotgun by default
    data["use_shotgun_path_cache"] = True

    try:
        fh = open(pipe_config_sg_id_path, "wt")
        yaml.dump(data, fh)
        fh.close()
    except Exception, exp:
        raise TankError(
            "Could not write to pipeline configuration cache file %s. "
            "Error reported: %s" % (pipe_config_sg_id_path, exp))

    if sg_app_store:
        # we have an app store connection
        # write a custom event to the shotgun event log
        log.debug("Writing app store stats...")
        data = {}
        data[
            "description"] = "%s: An Toolkit Project was created" % sg.base_url
        data["event_type"] = "TankAppStore_Project_Created"
        data["user"] = sg_app_store_script_user
Esempio n. 46
0
    def setUp(self, project_tank_name="project_code"):
        """
        Creates and registers test project.
        """
        self.tank_temp = TANK_TEMP
        self.tank_source_path = TANK_SOURCE_PATH

        self.init_cache_location = os.path.join(self.tank_temp,
                                                "init_cache.cache")

        def _get_cache_location_mock():
            return self.init_cache_location

        tank.pipelineconfig_factory._get_cache_location = _get_cache_location_mock

        # define entity for test project
        self.project = {
            "type": "Project",
            "id": 1,
            "tank_name": project_tank_name,
            "name": "project_name"
        }

        self.project_root = os.path.join(
            self.tank_temp,
            self.project["tank_name"].replace("/", os.path.sep))

        self.pipeline_config_root = os.path.join(self.tank_temp,
                                                 "pipeline_configuration")

        # move away previous data
        self._move_project_data()

        # create new structure
        os.makedirs(self.project_root)
        os.makedirs(self.pipeline_config_root)

        # project level config directories
        self.project_config = os.path.join(self.pipeline_config_root, "config")

        # create project cache directory
        project_cache_dir = os.path.join(self.pipeline_config_root, "cache")
        os.mkdir(project_cache_dir)

        # define entity for pipeline configuration
        self.sg_pc_entity = {
            "type": "PipelineConfiguration",
            "code": "Primary",
            "id": 123,
            "project": self.project,
            "windows_path": self.pipeline_config_root,
            "mac_path": self.pipeline_config_root,
            "linux_path": self.pipeline_config_root
        }

        # add files needed by the pipeline config
        pc_yml = os.path.join(self.pipeline_config_root, "config", "core",
                              "pipeline_configuration.yml")
        pc_yml_data = (
            "{ project_name: %s, use_shotgun_path_cache: true, pc_id: %d, "
            "project_id: %d, pc_name: %s}\n\n" %
            (self.project["tank_name"], self.sg_pc_entity["id"],
             self.project["id"], self.sg_pc_entity["code"]))
        self.create_file(pc_yml, pc_yml_data)

        loc_yml = os.path.join(self.pipeline_config_root, "config", "core",
                               "install_location.yml")
        loc_yml_data = "Windows: '%s'\nDarwin: '%s'\nLinux: '%s'" % (
            self.pipeline_config_root, self.pipeline_config_root,
            self.pipeline_config_root)
        self.create_file(loc_yml, loc_yml_data)

        roots = {"primary": {}}
        for os_name in ["windows_path", "linux_path", "mac_path"]:
            #TODO make os specific roots
            roots["primary"][os_name] = self.tank_temp
        roots_path = os.path.join(self.pipeline_config_root, "config", "core",
                                  "roots.yml")
        roots_file = open(roots_path, "w")
        roots_file.write(yaml.dump(roots))
        roots_file.close()

        self.pipeline_configuration = sgtk.pipelineconfig_factory.from_path(
            self.pipeline_config_root)
        self.tk = tank.Tank(self.pipeline_configuration)

        # set up mockgun and make sure shotgun connection calls route via mockgun

        self.mockgun = MockGun_Shotgun("http://unit_test_mock_sg", "mock_user",
                                       "mock_key")

        def get_associated_sg_base_url_mocker():
            return "http://unit_test_mock_sg"

        def create_sg_connection_mocker():
            return self.mockgun

        tank.util.shotgun.get_associated_sg_base_url = get_associated_sg_base_url_mocker
        tank.util.shotgun.create_sg_connection = create_sg_connection_mocker

        # add project to mock sg and path cache db
        self.add_production_path(self.project_root, self.project)

        # add pipeline configuration
        self.add_to_sg_mock_db(self.sg_pc_entity)

        # add local storage
        self.primary_storage = {
            "type": "LocalStorage",
            "id": 7777,
            "code": "primary",
            "windows_path": self.tank_temp,
            "linux_path": self.tank_temp,
            "mac_path": self.tank_temp
        }

        self.add_to_sg_mock_db(self.primary_storage)
Esempio n. 47
0
    def test_get_valid_path(self):
        """
        Test that the correct data is retrieved when a valid path is
        provided to YamlCache.get().

        Also ensure that the data returned from the cache is a copy of the
        cached data (to ensure it is clean and hasn't been accidentally
        overrwritten by the calling code) and that the cache correctly reloads
        the data from the file when it has been modified.
        """
        yaml_path = os.path.join(self.tank_temp, "test_data.yml")

        test_data = [
            1,
            "two",
            {
                "three": "A",
                "four": [5, 6, 7],
                8: {
                    9: "nine",
                    "ten": 10
                }
            },
            [11, "twelve"],
            {
                13: 13,
                "fourteen": "fourteen"
            },
        ]

        modified_test_data = copy.deepcopy(test_data)
        modified_test_data.append({15: [16, "seventeen"]})

        # 1. Check that the cache loads the data correctly
        #

        # write out the test data:
        yaml_file = open(yaml_path, "w")
        try:
            yaml_file.write(yaml.dump(test_data))
        finally:
            yaml_file.close()

        # create a yaml cache instance and get the data from the file:
        yaml_cache = YamlCache()
        read_data = yaml_cache.get(yaml_path)

        # check the read data matches the input data:
        self.assertEqual(read_data, test_data)

        # 2. Ensure that the data returned is a copy of the cached data
        #

        # inspect the cache itself and make sure that the data returned is a copy
        # of the internal cached data and not the internal cached data itself:
        self.assertEqual(len(yaml_cache._cache), 1)
        self.assertEqual(list(yaml_cache._cache.keys())[0], yaml_path)
        self.assertEqual(
            list(yaml_cache._cache.values())[0]["data"], read_data)
        cached_data_id = id(list(yaml_cache._cache.values())[0]["data"])
        self.assertNotEqual(cached_data_id, id(read_data))

        # 3. Check that the data doesn't get reloaded if it hasn't changed
        #

        # ask for the data again...
        read_data = yaml_cache.get(yaml_path)

        # ...and check that the cached data is exactly the same (has the same id):
        self.assertEqual(len(yaml_cache._cache), 1)
        new_cached_data_id = id(list(yaml_cache._cache.values())[0]["data"])
        self.assertEqual(cached_data_id, new_cached_data_id)

        # 4. Check that the data does get reloaded if it has changed:
        #

        # update the data in the file:
        yaml_file = open(yaml_path, "w")
        try:
            yaml_file.write(yaml.dump(modified_test_data))
        finally:
            yaml_file.close()

        # ask for the data again...
        read_data = yaml_cache.get(yaml_path)

        # ...and check that the data in the cache has been updated:
        self.assertEqual(read_data, modified_test_data)
Esempio n. 48
0
    def init_engine(self):
        
        # note! not using the import as this confuses nuke's calback system
        # (several of the key scene callbacks are in the main init file...)
        import tk_nuke
        
        self.log_debug("%s: Initializing..." % self)

        # now check that there is a location on disk which corresponds to the context
        if self.context.project is None:
            # must have at least a project in the context to even start!
            raise tank.TankError("The nuke engine needs at least a project in the context "
                                 "in order to start! Your context: %s" % self.context)
                
        # make sure we are not running that bloody nuke PLE!
        if nuke.env.get("ple") == True:
            self.log_error("The Nuke Engine does not work with the Nuke PLE!")
            return
        
        # make sure that nuke has a higher version than 6.3v5
        # this is because of pyside
        nuke_version = (nuke.env.get("NukeVersionMajor"), nuke.env.get("NukeVersionMinor"), nuke.env.get("NukeVersionRelease"))
        
        if nuke_version[0] < 6:
            self.log_error("Nuke 6.3v5 is the minimum version supported!")
            return
        elif (nuke_version[0] == 6
              and nuke_version[1] < 3):
            self.log_error("Nuke 6.3v5 is the minimum version supported!")
            return
        elif (nuke_version[0] == 6
              and nuke_version[1] == 3
              and nuke_version[2] < 5):
            self.log_error("Nuke 6.3v5 is the minimum version supported!")
            return
        
        # keep track of if a UI exists
        self._ui_enabled = nuke.env.get("gui")

        # versions > 7.x have not yet been tested so show a message to that effect:
        if nuke_version[0] > 7:
            # this is an untested version of Nuke
            msg = ("The Shotgun Pipeline Toolkit has not yet been fully tested with Nuke %d.%dv%d. "
                   "You can continue to use the Toolkit but you may experience bugs or "
                   "instability.  Please report any issues you see to [email protected]" 
                   % (nuke_version[0], nuke_version[1], nuke_version[2]))
            
            # show nuke message if in UI mode, this is the first time the engine has been started
            # and the warning dialog isn't overriden by the config:
            if (self._ui_enabled 
                and not "TANK_NUKE_ENGINE_INIT_NAME" in os.environ
                and nuke_version[0] >= self.get_setting("compatibility_dialog_min_version", 8)):
                nuke.message("Warning - Shotgun Pipeline Toolkit!\n\n%s" % msg)
                           
            # and log the warning
            self.log_warning(msg)
            
        # now prepare tank so that it will be picked up by any new processes
        # created by file->new or file->open.
            
        # Store data needed for bootstrapping Tank in env vars. Used in startup/menu.py
        os.environ["TANK_NUKE_ENGINE_INIT_NAME"] = self.instance_name
        os.environ["TANK_NUKE_ENGINE_INIT_CONTEXT"] = yaml.dump(self.context)
        os.environ["TANK_NUKE_ENGINE_INIT_PROJECT_ROOT"] = self.tank.project_path
        
        # add our startup path to the nuke init path
        startup_path = os.path.abspath(os.path.join( os.path.dirname(__file__), "startup"))
        tank.util.append_path_to_env_var("NUKE_PATH", startup_path)        
    
        # we also need to pass the path to the python folder down to the init script
        # because nuke python does not have a __file__ attribute for that file
        local_python_path = os.path.abspath(os.path.join( os.path.dirname(__file__), "python"))
        os.environ["TANK_NUKE_ENGINE_MOD_PATH"] = local_python_path
            
        # make sure callbacks tracking the context switching are active
        tk_nuke.tank_ensure_callbacks_registered()
Esempio n. 49
0
    def setUp(self):
        """Creates and registers test project."""
        self.tank_temp = TANK_TEMP
        self.tank_source_path = TANK_SOURCE_PATH

        # mocking shotgun data (see add_to_sg_mock)
        self._sg_mock_db = {}

        # define entity for test project
        self.project = {
            "type": "Project",
            "id": 1,
            "tank_name": "project_code",
            "name": "project_name"
        }

        self.project_root = os.path.join(self.tank_temp,
                                         self.project["tank_name"])

        # create project directory
        self._move_project_data()
        os.mkdir(self.project_root)
        project_tank = os.path.join(self.project_root, "tank")
        os.mkdir(project_tank)

        # project level config directories
        self.project_config = os.path.join(project_tank, "config")

        # create project cache directory
        project_cache_dir = os.path.join(project_tank, "cache")
        os.mkdir(project_cache_dir)

        # create back-link file from project storage
        data = "- {darwin: '%s', linux2: '%s', win32: '%s'}" % (
            project_tank, project_tank, project_tank)
        self.create_file(
            os.path.join(project_tank, "config", "tank_configs.yml"), data)

        # add files needed by the pipeline config

        pc_yml = os.path.join(project_tank, "config", "core",
                              "pipeline_configuration.yml")
        pc_yml_data = "{ project_name: %s, pc_id: 123, project_id: 12345, pc_name: Primary}\n\n" % self.project[
            "tank_name"]
        self.create_file(pc_yml, pc_yml_data)

        loc_yml = os.path.join(project_tank, "config", "core",
                               "install_location.yml")
        loc_yml_data = "Windows: '%s'\nDarwin: '%s'\nLinux: '%s'" % (
            project_tank, project_tank, project_tank)
        self.create_file(loc_yml, loc_yml_data)

        roots = {"primary": {}}
        for os_name in ["windows_path", "linux_path", "mac_path"]:
            #TODO make os specific roots
            roots["primary"][os_name] = os.path.dirname(self.project_root)
        roots_path = os.path.join(project_tank, "config", "core", "roots.yml")
        roots_file = open(roots_path, "w")
        roots_file.write(yaml.dump(roots))
        roots_file.close()

        self.pipeline_configuration = sgtk.pipelineconfig.from_path(
            project_tank)

        # add project to mock sg and path cache db
        self.add_production_path(self.project_root, self.project)

        # change to return our shotgun object
        def return_sg(*args, **kws):
            return self.sg_mock

        sgtk.util.shotgun.create_sg_connection = return_sg
        sg_pc_location = os.path.join(target_folder, "config", "core", "pipeline_configuration.yml")

        # read the file first
        fh = open(sg_pc_location, "rt")
        try:
            data = yaml.load(fh)
        finally:
            fh.close()

        # now delete it
        if os.path.exists(sg_pc_location):
            os.chmod(sg_pc_location, 0666)
            os.remove(sg_pc_location)

        # now update some fields
        data["pc_id"] = pc_entity["id"]
        data["pc_name"] = new_name

        # and write the new file
        fh = open(sg_pc_location, "wt")
        yaml.dump(data, fh)
        fh.close()

    except Exception, e:
        raise TankError("Could not update pipeline_configuration.yml file: %s" % e)

    finally:
        os.umask(old_umask)

    return {"source": source_folder, "target": target_folder, "id": pc_entity["id"]}
Esempio n. 51
0
 def test_equal_yml(self):
     context_1 = context.Context(**self.kws)
     serialized = yaml.dump(context_1)
     context_2 = yaml.load(serialized)
     self.assertTrue(context_1 == context_2)
    def execute(self, local_path, publish_data, p4, **kwargs):
        """
        Store the specified publish data so that it can be retrieved lated by
        the corresponding load_publish_data hook
        
        :param local_path:      String
                                Local path to the file being published

        :param p4:              P4 instance
                                The Perforce connection to use if needed.
                        
        :param publish_data:    Dictionary
                                Dictionary of data to store for the published file.  This data will match the
                                parameters expected by the 'sgtk.util.register_publish()' function.
        """

        # The default implementation stores the publish data in a p4 attribute so
        # that it lives with the file:
        #
        #    shotgun_metadata - store a yaml version of all metadata
        #
        # If a thumbnail is specified in the publish_data then this is uploaded to
        # Shotgun as an attachment to the current project.
        if not local_path or not publish_data:
            return

        sg_metadata = copy.deepcopy(publish_data)

        p4_fw = self.parent
        from P4 import P4Exception

        # make sure we have a Perforce connection:
        p4 = p4 if p4 else p4_fw.connection.connect()

        # convert dependencies from local to depot paths:
        dependency_paths = sg_metadata.get("dependency_paths", [])
        if dependency_paths:
            depot_dependency_paths = p4_fw.util.client_to_depot_paths(p4, dependency_paths)
            depot_dependency_paths = [dp for dp in depot_dependency_paths if dp]
            sg_metadata["dependency_paths"] = depot_dependency_paths

        # replace context with a serialized version:
        ctx = sg_metadata.get("context")
        if ctx:
            ctx_str = sgtk.context.serialize(ctx)
            sg_metadata["context"] = ctx_str

        # store thumbnail as Project attachment in Shotgun:
        thumbnail_path = sg_metadata.get("thumbnail_path")
        if thumbnail_path and os.path.exists(thumbnail_path):
            attachment_id = self.__upload_file_to_sg(thumbnail_path)
            sg_metadata["thumbnail_path"] = (thumbnail_path, attachment_id)

        # format as yaml data:
        sg_metadata_str = yaml.dump(sg_metadata)

        # set the 'shotgun_metadata' attribute on the file in Perforce:
        try:
            # use '-p' to create a propogating attribute that will propogate with the file
            # when the file is opened for add, edit or delete.  This will ensure subsequent
            # changes to the file retain this information unless it's modified by a future
            # publish
            p4.run_attribute("-p", "-n", StorePublishData.PUBLISH_ATTRIB_NAME, "-v", sg_metadata_str, local_path)
        except P4Exception, e:
            raise TankError("Failed to store publish data in Perforce attribute for file '%s'" % local_path)
Esempio n. 53
0
    def setUp(self, parameters=None):
        """
        Sets up a Shotgun Mockgun instance with a project and a basic project scaffold on
        disk.

        :param parameters: Dictionary with additional parameters to control the setup.
                           The method currently supports the following parameters:

                           - 'project_tank_name': 'name' - Set the tank_name of the project to
                                                  something explicit. If not specified, this
                                                  will default to 'project_code'

                           - 'mockgun_schema_path': '/path/to/file' - Pass a specific schema to use with mockgun.
                                                    If not specified, the tk-core fixture schema
                                                    will be used.

                           - 'mockgun_schema_entity_path': '/path/to/file' - Pass a specific entity schema to use with
                                                           mockgun. If not specified, the tk-core fixture schema
                                                           will be used.
                           - 'primary_root_name': 'name' - Set the primary root name, default to 'unit_tests'.


        """
        self.addCleanup(self._assert_teardown_called)
        # Override SHOTGUN_HOME so that unit tests can be sandboxed.
        self._old_shotgun_home = os.environ.get(self.SHOTGUN_HOME)
        os.environ[self.SHOTGUN_HOME] = TANK_TEMP

        # Make sure the global settings instance has been reset so anything from a previous test doesn't
        # leak into the next one.
        UserSettings.clear_singleton()

        parameters = parameters or {}

        self._do_io = parameters.get("do_io", True)

        if "project_tank_name" in parameters:
            project_tank_name = parameters["project_tank_name"]
        else:
            # default project name
            project_tank_name = "project_code"

        # now figure out mockgun location
        # 1. see if we have it explicitly specified in the parameters
        # 2. if not, check if the fixtures location has a mockgun folder
        # 3. if not, fall back on built in mockgun fixtures

        if "mockgun_schema_path" in parameters:
            mockgun_schema_path = parameters["mockgun_schema_path"]

        elif os.path.exists(os.path.join(self.fixtures_root, "mockgun")):
            mockgun_schema_path = os.path.join(
                self.fixtures_root,
                "mockgun",
                "schema.pickle"
            )

        else:
            # use the std core fixtures
            mockgun_schema_path = os.path.join(
                self.tank_source_path,
                "tests",
                "fixtures",
                "mockgun",
                "schema.pickle"
            )

        if "mockgun_schema_entity_path" in parameters:
            mockgun_schema_entity_path = parameters["mockgun_schema_entity_path"]

        elif os.path.exists(os.path.join(self.fixtures_root, "mockgun")):
            mockgun_schema_entity_path = os.path.join(
                self.fixtures_root,
                "mockgun",
                "schema_entity.pickle"
            )

        else:
            # use the std core fixtures
            mockgun_schema_entity_path = os.path.join(
                self.tank_source_path,
                "tests",
                "fixtures",
                "mockgun",
                "schema_entity.pickle"
            )

        # The name to use for our primary storage
        self.primary_root_name = parameters.get("primary_root_name", "unit_tests")

        # set up mockgun to use our schema
        mockgun.Shotgun.set_schema_paths(mockgun_schema_path, mockgun_schema_entity_path)

        self.tank_temp = TANK_TEMP

        self.cache_root = os.path.join(self.tank_temp, "cache_root")

        # Mock this so that authentication manager works even tough we are not in a config.
        # If we don't mock it than the path cache calling get_current_user will fail.
        self._mock_return_value(
            "tank.util.shotgun.connection.get_associated_sg_config_data",
            {"host": "https://somewhere.shotgunstudio.com"}
        )

        # define entity for test project
        self.project = {
            "type": "Project",
            "id": 1,
            "tank_name": project_tank_name,
            "name": "project_name",
            "archived": False,
        }

        self.project_root = os.path.join(self.tank_temp, self.project["tank_name"].replace("/", os.path.sep))

        self.pipeline_config_root = os.path.join(self.tank_temp, "pipeline_configuration")

        if self._do_io:
            # move away previous data
            self._move_project_data()

            # create new structure
            os.makedirs(self.project_root)
            os.makedirs(self.pipeline_config_root)

            # # copy tank util scripts
            shutil.copy(
                os.path.join(self.tank_source_path, "setup", "root_binaries", "tank"),
                os.path.join(self.pipeline_config_root, "tank")
            )
            shutil.copy(
                os.path.join(self.tank_source_path, "setup", "root_binaries", "tank.bat"),
                os.path.join(self.pipeline_config_root, "tank.bat")
            )

        # project level config directories
        self.project_config = os.path.join(self.pipeline_config_root, "config")

        # create project cache directory
        project_cache_dir = os.path.join(self.pipeline_config_root, "cache")
        if self._do_io:
            os.mkdir(project_cache_dir)

        # define entity for pipeline configuration
        self.sg_pc_entity = {"type": "PipelineConfiguration",
                             "code": "Primary",
                             "id": 123,
                             "project": self.project,
                             "windows_path": self.pipeline_config_root,
                             "mac_path": self.pipeline_config_root,
                             "linux_path": self.pipeline_config_root}

        # add files needed by the pipeline config
        pc_yml = os.path.join(self.pipeline_config_root, "config", "core", "pipeline_configuration.yml")
        pc_yml_data = ("{ project_name: %s, use_shotgun_path_cache: true, pc_id: %d, "
                       "project_id: %d, pc_name: %s}\n\n" % (self.project["tank_name"],
                                                             self.sg_pc_entity["id"],
                                                             self.project["id"],
                                                             self.sg_pc_entity["code"]))
        if self._do_io:
            self.create_file(pc_yml, pc_yml_data)

        loc_yml = os.path.join(self.pipeline_config_root, "config", "core", "install_location.yml")
        loc_yml_data = "Windows: '%s'\nDarwin: '%s'\nLinux: '%s'" % (
            self.pipeline_config_root, self.pipeline_config_root, self.pipeline_config_root
        )
        if self._do_io:
            self.create_file(loc_yml, loc_yml_data)

        # inject this file which toolkit is probing for to determine
        # if an installation has been localized.
        localize_token_file = os.path.join(self.pipeline_config_root, "install", "core", "_core_upgrader.py")
        if self._do_io:
            self.create_file(localize_token_file, "foo bar")

        roots = {self.primary_root_name: {}}
        for os_name in ["windows_path", "linux_path", "mac_path"]:
            # TODO make os specific roots
            roots[self.primary_root_name][os_name] = self.tank_temp

        if self._do_io:
            roots_path = os.path.join(self.pipeline_config_root, "config", "core", "roots.yml")
            roots_file = open(roots_path, "w")
            roots_file.write(yaml.dump(roots))
            roots_file.close()

        # clear bundle in-memory cache
        sgtk.descriptor.io_descriptor.factory.g_cached_instances = {}

        if self._do_io:
            self.pipeline_configuration = sgtk.pipelineconfig_factory.from_path(self.pipeline_config_root)
            self.tk = tank.Tank(self.pipeline_configuration)

        # set up mockgun and make sure shotgun connection calls route via mockgun
        self.mockgun = mockgun.Shotgun("http://unit_test_mock_sg", "mock_user", "mock_key")
        # fake a version response from the server
        self.mockgun.server_info = {"version": (7, 0, 0)}

        self._mock_return_value("tank.util.shotgun.connection.get_associated_sg_base_url", "http://unit_test_mock_sg")
        self._mock_return_value("tank.util.shotgun.connection.create_sg_connection", self.mockgun)
        self._mock_return_value("tank.util.shotgun.get_associated_sg_base_url", "http://unit_test_mock_sg")
        self._mock_return_value("tank.util.shotgun.create_sg_connection", self.mockgun)

        # add project to mock sg and path cache db
        if self._do_io:
            self.add_production_path(self.project_root, self.project)

        # add pipeline configuration
        self.add_to_sg_mock_db(self.project)
        self.add_to_sg_mock_db(self.sg_pc_entity)

        # add local storage
        self.primary_storage = {"type": "LocalStorage",
                                "id": 7777,
                                "code": self.primary_root_name,
                                "windows_path": self.tank_temp,
                                "linux_path": self.tank_temp,
                                "mac_path": self.tank_temp}

        self.add_to_sg_mock_db(self.primary_storage)

        # back up the authenticated user in case a unit test doesn't clean up correctly.
        self._authenticated_user = sgtk.get_authenticated_user()
Esempio n. 54
0
    def setUp(self, parameters=None):
        """
        Sets up a Shotgun Mockgun instance with a project and a basic project scaffold on
        disk.

        :param parameters: Dictionary with additional parameters to control the setup.
                           The method currently supports the following parameters:

                           - 'project_tank_name': 'name' - Set the tank_name of the project to
                                                  something explicit. If not specified, this
                                                  will default to 'project_code'

                           - 'mockgun_schema_path': '/path/to/file' - Pass a specific schema to use with mockgun.
                                                    If not specified, the tk-core fixture schema
                                                    will be used.

                           - 'mockgun_schema_entity_path': '/path/to/file' - Pass a specific entity schema to use with
                                                           mockgun. If not specified, the tk-core fixture schema
                                                           will be used.


        """
        # Override SHOTGUN_HOME so that unit tests can be sandboxed.
        self._old_shotgun_home = os.environ.get(self.SHOTGUN_HOME)
        os.environ[self.SHOTGUN_HOME] = TANK_TEMP

        # Make sure the global settings instance has been reset so anything from a previous test doesn't
        # leak into the next one.
        UserSettings.clear_singleton()

        parameters = parameters or {}

        if "project_tank_name" in parameters:
            project_tank_name = parameters["project_tank_name"]
        else:
            # default project name
            project_tank_name = "project_code"

        # now figure out mockgun location
        # 1. see if we have it explicitly specified in the parameters
        # 2. if not, check if the fixtures location has a mockgun folder
        # 3. if not, fall back on built in mockgun fixtures

        if "mockgun_schema_path" in parameters:
            mockgun_schema_path = parameters["mockgun_schema_path"]

        elif os.path.exists(os.path.join(self.fixtures_root, "mockgun")):
            mockgun_schema_path = os.path.join(
                self.fixtures_root,
                "mockgun",
                "schema.pickle"
            )

        else:
            # use the std core fixtures
            mockgun_schema_path = os.path.join(
                self.tank_source_path,
                "tests",
                "fixtures",
                "mockgun",
                "schema.pickle"
            )


        if "mockgun_schema_entity_path" in parameters:
            mockgun_schema_entity_path = parameters["mockgun_schema_entity_path"]

        elif os.path.exists(os.path.join(self.fixtures_root, "mockgun")):
            mockgun_schema_entity_path = os.path.join(
                self.fixtures_root,
                "mockgun",
                "schema_entity.pickle"
            )

        else:
            # use the std core fixtures
            mockgun_schema_entity_path = os.path.join(
                self.tank_source_path,
                "tests",
                "fixtures",
                "mockgun",
                "schema_entity.pickle"
            )

        # set up mockgun to use our schema
        mockgun.Shotgun.set_schema_paths(mockgun_schema_path, mockgun_schema_entity_path)

        self.tank_temp = TANK_TEMP

        self.cache_root = os.path.join(self.tank_temp, "cache_root")

        # Mock this so that authentication manager works even tough we are not in a config.
        # If we don't mock it than the path cache calling get_current_user will fail.
        self._mock_return_value(
            "tank.util.shotgun.get_associated_sg_config_data",
            {"host": "https://somewhere.shotguntudio.com"}
        )

        # define entity for test project
        self.project = {"type": "Project",
                        "id": 1,
                        "tank_name": project_tank_name,
                        "name": "project_name"}

        self.project_root = os.path.join(self.tank_temp, self.project["tank_name"].replace("/", os.path.sep) )

        self.pipeline_config_root = os.path.join(self.tank_temp, "pipeline_configuration")

        # move away previous data
        self._move_project_data()

        # create new structure
        os.makedirs(self.project_root)
        os.makedirs(self.pipeline_config_root)

        # project level config directories
        self.project_config = os.path.join(self.pipeline_config_root, "config")

        # create project cache directory
        project_cache_dir = os.path.join(self.pipeline_config_root, "cache")
        os.mkdir(project_cache_dir)

        # define entity for pipeline configuration
        self.sg_pc_entity = {"type": "PipelineConfiguration",
                             "code": "Primary",
                             "id": 123,
                             "project": self.project,
                             "windows_path": self.pipeline_config_root,
                             "mac_path": self.pipeline_config_root,
                             "linux_path": self.pipeline_config_root}



        # add files needed by the pipeline config
        pc_yml = os.path.join(self.pipeline_config_root, "config", "core", "pipeline_configuration.yml")
        pc_yml_data = ("{ project_name: %s, use_shotgun_path_cache: true, pc_id: %d, "
                       "project_id: %d, pc_name: %s}\n\n" % (self.project["tank_name"],
                                                             self.sg_pc_entity["id"],
                                                             self.project["id"],
                                                             self.sg_pc_entity["code"]))
        self.create_file(pc_yml, pc_yml_data)

        loc_yml = os.path.join(self.pipeline_config_root, "config", "core", "install_location.yml")
        loc_yml_data = "Windows: '%s'\nDarwin: '%s'\nLinux: '%s'" % (self.pipeline_config_root, self.pipeline_config_root, self.pipeline_config_root)
        self.create_file(loc_yml, loc_yml_data)

        # inject this file which toolkit is probing for to determine
        # if an installation has been localized.
        localize_token_file = os.path.join(self.pipeline_config_root, "install", "core", "_core_upgrader.py")
        self.create_file(localize_token_file, "foo bar")

        roots = {"primary": {}}
        for os_name in ["windows_path", "linux_path", "mac_path"]:
            #TODO make os specific roots
            roots["primary"][os_name] = self.tank_temp
        roots_path = os.path.join(self.pipeline_config_root, "config", "core", "roots.yml")
        roots_file = open(roots_path, "w")
        roots_file.write(yaml.dump(roots))
        roots_file.close()

        # clear bundle in-memory cache
        sgtk.descriptor.io_descriptor.factory.g_cached_instances = {}

        self.pipeline_configuration = sgtk.pipelineconfig_factory.from_path(self.pipeline_config_root)
        self.tk = tank.Tank(self.pipeline_configuration)

        # set up mockgun and make sure shotgun connection calls route via mockgun
        self.mockgun = mockgun.Shotgun("http://unit_test_mock_sg", "mock_user", "mock_key")
        # fake a version response from the server
        self.mockgun.server_info = {"version": (7, 0, 0)}

        self._mock_return_value("tank.util.shotgun.get_associated_sg_base_url", "http://unit_test_mock_sg")
        self._mock_return_value("tank.util.shotgun.create_sg_connection", self.mockgun)

        # add project to mock sg and path cache db
        self.add_production_path(self.project_root, self.project)

        # add pipeline configuration
        self.add_to_sg_mock_db(self.sg_pc_entity)

        # add local storage
        self.primary_storage = {"type": "LocalStorage",
                                "id": 7777,
                                "code": "primary",
                                "windows_path": self.tank_temp,
                                "linux_path": self.tank_temp,
                                "mac_path": self.tank_temp }

        self.add_to_sg_mock_db(self.primary_storage)

        # back up the authenticated user in case a unit test doesn't clean up correctly.
        self._authenticated_user = sgtk.get_authenticated_user()
Esempio n. 55
0
 def test_equal_yml(self):
     context_1 = context.Context(**self.kws)
     serialized = yaml.dump(context_1)
     context_2 = yaml.load(serialized)
     self.assertTrue(context_1 == context_2)
Esempio n. 56
0
    def init_engine(self):

        # note! not using the import as this confuses nuke's calback system
        # (several of the key scene callbacks are in the main init file...)
        import tk_nuke

        self.log_debug("%s: Initializing..." % self)

        # now check that there is a location on disk which corresponds to the context
        if self.context.project is None:
            # must have at least a project in the context to even start!
            raise tank.TankError(
                "The nuke engine needs at least a project in the context "
                "in order to start! Your context: %s" % self.context)

        # make sure we are not running that bloody nuke PLE!
        if nuke.env.get("ple") == True:
            self.log_error("The Nuke Engine does not work with the Nuke PLE!")
            return

        # make sure that nuke has a higher version than 6.3v5
        # this is because of pyside
        nuke_version = (nuke.env.get("NukeVersionMajor"),
                        nuke.env.get("NukeVersionMinor"),
                        nuke.env.get("NukeVersionRelease"))

        if nuke_version[0] < 6:
            self.log_error("Nuke 6.3v5 is the minimum version supported!")
            return
        elif (nuke_version[0] == 6 and nuke_version[1] < 3):
            self.log_error("Nuke 6.3v5 is the minimum version supported!")
            return
        elif (nuke_version[0] == 6 and nuke_version[1] == 3
              and nuke_version[2] < 5):
            self.log_error("Nuke 6.3v5 is the minimum version supported!")
            return

        # keep track of if a UI exists
        self._ui_enabled = nuke.env.get("gui")

        # versions > 8.0 have not yet been tested so show a message to that effect:
        if nuke_version[0] > 8 or (nuke_version[0] == 8
                                   and nuke_version[1] > 0):
            # this is an untested version of Nuke
            msg = (
                "The Shotgun Pipeline Toolkit has not yet been fully tested with Nuke %d.%dv%d. "
                "You can continue to use the Toolkit but you may experience bugs or "
                "instability.  Please report any issues you see to [email protected]"
                % (nuke_version[0], nuke_version[1], nuke_version[2]))

            # show nuke message if in UI mode, this is the first time the engine has been started
            # and the warning dialog isn't overriden by the config:
            if (self._ui_enabled
                    and not "TANK_NUKE_ENGINE_INIT_NAME" in os.environ
                    and nuke_version[0] >= self.get_setting(
                        "compatibility_dialog_min_version", 9)):
                nuke.message("Warning - Shotgun Pipeline Toolkit!\n\n%s" % msg)

            # and log the warning
            self.log_warning(msg)

        # now prepare tank so that it will be picked up by any new processes
        # created by file->new or file->open.

        # Store data needed for bootstrapping Tank in env vars. Used in startup/menu.py
        os.environ["TANK_NUKE_ENGINE_INIT_NAME"] = self.instance_name
        os.environ["TANK_NUKE_ENGINE_INIT_CONTEXT"] = yaml.dump(self.context)
        os.environ[
            "TANK_NUKE_ENGINE_INIT_PROJECT_ROOT"] = self.tank.project_path

        # add our startup path to the nuke init path
        startup_path = os.path.abspath(
            os.path.join(os.path.dirname(__file__), "startup"))
        tank.util.append_path_to_env_var("NUKE_PATH", startup_path)

        # we also need to pass the path to the python folder down to the init script
        # because nuke python does not have a __file__ attribute for that file
        local_python_path = os.path.abspath(
            os.path.join(os.path.dirname(__file__), "python"))
        os.environ["TANK_NUKE_ENGINE_MOD_PATH"] = local_python_path

        # make sure callbacks tracking the context switching are active
        tk_nuke.tank_ensure_callbacks_registered()
Esempio n. 57
0
    def setUp(self, project_tank_name = "project_code"):
        """
        Creates and registers test project.
        """
        self.tank_temp = TANK_TEMP
        self.tank_source_path = TANK_SOURCE_PATH

        self.init_cache_location = os.path.join(self.tank_temp, "init_cache.cache") 

        def _get_cache_location_mock():
            return self.init_cache_location

        tank.pipelineconfig_factory._get_cache_location = _get_cache_location_mock

        # define entity for test project
        self.project = {"type": "Project",
                        "id": 1,
                        "tank_name": project_tank_name,
                        "name": "project_name"}

        self.project_root = os.path.join(self.tank_temp, self.project["tank_name"].replace("/", os.path.sep) )
        
        self.pipeline_config_root = os.path.join(self.tank_temp, "pipeline_configuration")
          
        # move away previous data
        self._move_project_data()
        
        # create new structure
        os.makedirs(self.project_root)
        os.makedirs(self.pipeline_config_root)

        # project level config directories
        self.project_config = os.path.join(self.pipeline_config_root, "config")

        # create project cache directory
        project_cache_dir = os.path.join(self.pipeline_config_root, "cache")
        os.mkdir(project_cache_dir)
        
        # define entity for pipeline configuration
        self.sg_pc_entity = {"type": "PipelineConfiguration",
                             "code": "Primary", 
                             "id": 123, 
                             "project": self.project, 
                             "windows_path": self.pipeline_config_root,
                             "mac_path": self.pipeline_config_root,
                             "linux_path": self.pipeline_config_root}
        


        # add files needed by the pipeline config        
        pc_yml = os.path.join(self.pipeline_config_root, "config", "core", "pipeline_configuration.yml")
        pc_yml_data = ("{ project_name: %s, use_shotgun_path_cache: true, pc_id: %d, "
                       "project_id: %d, pc_name: %s}\n\n" % (self.project["tank_name"], 
                                                             self.sg_pc_entity["id"], 
                                                             self.project["id"], 
                                                             self.sg_pc_entity["code"]))
        self.create_file(pc_yml, pc_yml_data)
        
        loc_yml = os.path.join(self.pipeline_config_root, "config", "core", "install_location.yml")
        loc_yml_data = "Windows: '%s'\nDarwin: '%s'\nLinux: '%s'" % (self.pipeline_config_root, self.pipeline_config_root, self.pipeline_config_root)
        self.create_file(loc_yml, loc_yml_data)
        
        roots = {"primary": {}}
        for os_name in ["windows_path", "linux_path", "mac_path"]:
            #TODO make os specific roots
            roots["primary"][os_name] = self.tank_temp        
        roots_path = os.path.join(self.pipeline_config_root, "config", "core", "roots.yml")
        roots_file = open(roots_path, "w") 
        roots_file.write(yaml.dump(roots))
        roots_file.close()        
                
        self.pipeline_configuration = sgtk.pipelineconfig_factory.from_path(self.pipeline_config_root)
        self.tk = tank.Tank(self.pipeline_configuration)
        
        # set up mockgun and make sure shotgun connection calls route via mockgun
        
        self.mockgun = MockGun_Shotgun("http://unit_test_mock_sg", "mock_user", "mock_key")
        
        def get_associated_sg_base_url_mocker():
            return "http://unit_test_mock_sg"
        
        def create_sg_connection_mocker():
            return self.mockgun
            
        tank.util.shotgun.get_associated_sg_base_url = get_associated_sg_base_url_mocker
        tank.util.shotgun.create_sg_connection = create_sg_connection_mocker
        
        # add project to mock sg and path cache db
        self.add_production_path(self.project_root, self.project)
        
        # add pipeline configuration
        self.add_to_sg_mock_db(self.sg_pc_entity)
        
        # add local storage
        self.primary_storage = {"type": "LocalStorage",
                                "id": 7777,
                                "code": "primary",
                                "windows_path": self.tank_temp,
                                "linux_path": self.tank_temp,
                                "mac_path": self.tank_temp }
        
        self.add_to_sg_mock_db(self.primary_storage)
Esempio n. 58
0
def _project_setup_internal(log, sg, sg_app_store, sg_app_store_script_user,
                            setup_params):
    """
    Project setup, internal method.

    :param log: python logger object
    :param sg: shotgun api connection to the associated site
    :param sg_app_store: toolkit app store sg connection
    :param sg_app_store_script_user: The script user used to connect to the app store, as a shotgun link-dict
    :param setup_params: Parameters object which holds gathered project settings
    """

    log.info("")
    log.info("Starting project setup.")

    # get the location of the configuration
    config_location_curr_os = setup_params.get_configuration_location(
        sys.platform)
    config_location_mac = setup_params.get_configuration_location("darwin")
    config_location_linux = setup_params.get_configuration_location("linux2")
    config_location_win = setup_params.get_configuration_location("win32")

    # project id
    project_id = setup_params.get_project_id()

    # get all existing pipeline configurations
    setup_params.report_progress_from_installer(
        "Checking Pipeline Configurations...")

    pcs = sg.find(constants.PIPELINE_CONFIGURATION_ENTITY,
                  [["project", "is", {
                      "id": project_id,
                      "type": "Project"
                  }]], ["code", "linux_path", "windows_path", "mac_path"])

    if len(pcs) > 0:
        if setup_params.get_force_setup():
            # if we have the force flag enabled, remove any pipeline configurations
            for x in pcs:
                log.warning(
                    "Force mode: Deleting old pipeline configuration %s..." %
                    x["code"])
                sg.delete(constants.PIPELINE_CONFIGURATION_ENTITY, x["id"])

        elif not setup_params.get_auto_path_mode():
            # this is a normal setup, e.g. not with the force flag on
            # nor an auto-path where each machine effectively manages its own config
            # for this case, we don't allow the process to proceed if a config exists
            raise TankError(
                "Cannot set up this project! Pipeline configuration entries already exist in Shotgun."
            )

        else:
            # auto path mode
            # make sure that all PCs have empty paths set, either None values or ""
            for x in pcs:
                if x["linux_path"] or x["windows_path"] or x["mac_path"]:
                    raise TankError(
                        "Cannot set up this project! Non-auto-path style pipeline "
                        "configuration entries already exist in Shotgun.")

    # first do disk structure setup, this is most likely to fail.
    setup_params.report_progress_from_installer(
        "Creating main folder structure...")
    log.info("Installing configuration into '%s'..." % config_location_curr_os)
    if not os.path.exists(config_location_curr_os):
        # note that we have already validated that creation is possible
        os.makedirs(config_location_curr_os, 0775)

    # create pipeline config base folder structure
    _make_folder(log, os.path.join(config_location_curr_os, "cache"), 0777)
    _make_folder(log, os.path.join(config_location_curr_os, "config"), 0775)
    _make_folder(log, os.path.join(config_location_curr_os, "install"), 0775)
    _make_folder(log, os.path.join(config_location_curr_os, "install", "core"),
                 0777)
    _make_folder(
        log, os.path.join(config_location_curr_os, "install", "core",
                          "python"), 0777)
    _make_folder(
        log, os.path.join(config_location_curr_os, "install", "core.backup"),
        0777, True)
    _make_folder(log,
                 os.path.join(config_location_curr_os, "install", "engines"),
                 0777, True)
    _make_folder(log, os.path.join(config_location_curr_os, "install", "apps"),
                 0777, True)
    _make_folder(
        log, os.path.join(config_location_curr_os, "install", "frameworks"),
        0777, True)

    # copy the configuration into place
    setup_params.report_progress_from_installer(
        "Setting up template configuration...")
    setup_params.create_configuration(
        os.path.join(config_location_curr_os, "config"))

    # copy the tank binaries to the top of the config
    setup_params.report_progress_from_installer(
        "Copying binaries and API proxies...")
    log.debug("Copying Toolkit binaries...")
    core_api_root = os.path.abspath(
        os.path.join(os.path.dirname(__file__), "..", "..", "..", ".."))
    root_binaries_folder = os.path.join(core_api_root, "setup",
                                        "root_binaries")
    for file_name in os.listdir(root_binaries_folder):
        src_file = os.path.join(root_binaries_folder, file_name)
        tgt_file = os.path.join(config_location_curr_os, file_name)
        shutil.copy(src_file, tgt_file)
        os.chmod(tgt_file, 0775)

    # copy the python stubs
    log.debug("Copying python stubs...")
    tank_proxy = os.path.join(core_api_root, "setup", "tank_api_proxy")
    _copy_folder(
        log, tank_proxy,
        os.path.join(config_location_curr_os, "install", "core", "python"))

    # specify the parent files in install/core/core_PLATFORM.cfg
    log.debug("Creating core redirection config files...")
    setup_params.report_progress_from_installer(
        "Writing configuration files...")

    core_path = os.path.join(config_location_curr_os, "install", "core",
                             "core_Darwin.cfg")
    core_location = setup_params.get_associated_core_path("darwin")
    fh = open(core_path, "wt")
    fh.write(core_location if core_location else "undefined")
    fh.close()

    core_path = os.path.join(config_location_curr_os, "install", "core",
                             "core_Linux.cfg")
    core_location = setup_params.get_associated_core_path("linux2")
    fh = open(core_path, "wt")
    fh.write(core_location if core_location else "undefined")
    fh.close()

    core_path = os.path.join(config_location_curr_os, "install", "core",
                             "core_Windows.cfg")
    core_location = setup_params.get_associated_core_path("win32")
    fh = open(core_path, "wt")
    fh.write(core_location if core_location else "undefined")
    fh.close()

    # write a file location file for our new setup
    sg_code_location = os.path.join(config_location_curr_os, "config", "core",
                                    "install_location.yml")

    # if we are basing our setup on an existing project setup, make sure we can write to the file.
    if os.path.exists(sg_code_location):
        os.chmod(sg_code_location, 0666)

    fh = open(sg_code_location, "wt")
    fh.write("# Shotgun Pipeline Toolkit configuration file\n")
    fh.write("# This file was automatically created by setup_project\n")
    fh.write("# This file reflects the paths in the primary pipeline\n")
    fh.write("# configuration defined for this project.\n")
    fh.write("\n")
    fh.write("Windows: '%s'\n" % config_location_win)
    fh.write("Darwin: '%s'\n" % config_location_mac)
    fh.write("Linux: '%s'\n" % config_location_linux)
    fh.write("\n")
    fh.write("# End of file.\n")
    fh.close()

    # update the roots.yml file in the config to match our settings
    # resuffle list of associated local storages to be a dict keyed by storage name
    # and with keys mac_path/windows_path/linux_path

    log.debug("Writing %s..." % constants.STORAGE_ROOTS_FILE)
    roots_path = os.path.join(config_location_curr_os, "config", "core",
                              constants.STORAGE_ROOTS_FILE)

    roots_data = {}
    for storage_name in setup_params.get_required_storages():

        roots_data[storage_name] = {
            "windows_path":
            setup_params.get_storage_path(storage_name, "win32"),
            "linux_path":
            setup_params.get_storage_path(storage_name, "linux2"),
            "mac_path": setup_params.get_storage_path(storage_name, "darwin")
        }

    try:
        fh = open(roots_path, "wt")
        yaml.dump(roots_data, fh)
        fh.close()
    except Exception, exp:
        raise TankError("Could not write to roots file %s. "
                        "Error reported: %s" % (roots_path, exp))
Esempio n. 59
0
    def setUp(self, project_tank_name = "project_code"):
        """Creates and registers test project."""
        self.tank_temp = TANK_TEMP
        self.tank_source_path = TANK_SOURCE_PATH

        # mocking shotgun data (see add_to_sg_mock)
        self._sg_mock_db = {}

        # define entity for test project
        self.project = {"type": "Project",
                        "id": 1,
                        "tank_name": project_tank_name,
                        "name": "project_name"}

        self.project_root = os.path.join(self.tank_temp, self.project["tank_name"].replace("/", os.path.sep) )
          
        # create project directory
        self._move_project_data()
        
        os.makedirs(self.project_root)
        
        project_tank = os.path.join(self.project_root, "tank")
        os.mkdir(project_tank)

        # project level config directories
        self.project_config = os.path.join(project_tank, "config")

        # create project cache directory
        project_cache_dir = os.path.join(project_tank, "cache")
        os.mkdir(project_cache_dir)

        # create back-link file from project storage
        data = "- {darwin: '%s', linux2: '%s', win32: '%s'}" % (project_tank, project_tank, project_tank) 
        self.create_file(os.path.join(project_tank, "config", "tank_configs.yml"), data)

        # add files needed by the pipeline config
        
        pc_yml = os.path.join(project_tank, "config", "core", "pipeline_configuration.yml")
        pc_yml_data = "{ project_name: %s, pc_id: 123, project_id: 12345, pc_name: Primary}\n\n" % self.project["tank_name"]        
        self.create_file(pc_yml, pc_yml_data)
        
        loc_yml = os.path.join(project_tank, "config", "core", "install_location.yml")
        loc_yml_data = "Windows: '%s'\nDarwin: '%s'\nLinux: '%s'" % (project_tank, project_tank, project_tank)
        self.create_file(loc_yml, loc_yml_data)
        
        roots = {"primary": {}}
        for os_name in ["windows_path", "linux_path", "mac_path"]:
            #TODO make os specific roots
            roots["primary"][os_name] = self.tank_temp        
        roots_path = os.path.join(project_tank, "config", "core", "roots.yml")
        roots_file = open(roots_path, "w") 
        roots_file.write(yaml.dump(roots))
        roots_file.close()        
        
        self.pipeline_configuration = sgtk.pipelineconfig.from_path(project_tank)        

        # add project to mock sg and path cache db
        self.add_production_path(self.project_root, self.project)
        
        # change to return our shotgun object
        def return_sg(*args, **kws):
            return self.sg_mock

        sgtk.util.shotgun.create_sg_connection = return_sg