Exemple #1
0
    def _copy_work_to_publish(self, settings, item):
        # Validate templates
        work_template = item.properties.get("work_template")
        if not work_template:
            self.logger.debug("No work template set on the item. "
                              "Skipping copy file to publish location.")
            return

        publish_template = item.properties.get("publish_template")
        if not publish_template:
            self.logger.debug("No publish template set on the item. "
                              "Skipping copying file to publish location.")
            return

        # Source path
        source_path = item.properties["path"]
        target_path = self._get_target_path(item)

        try:
            publish_folder = os.path.dirname(target_path)
            ensure_folder_exists(publish_folder)
            self._translate_file(source_path, target_path, item)
        except Exception as e:
            raise Exception("Failed to copy work file from '%s' to '%s'.\n%s" %
                            (source_path, target_path, traceback.format_exc()))

        self.logger.debug("Copied work file '%s' to publish file '%s'." %
                          (source_path, target_path))
    def publish_files(self, task_settings, item, publish_path):
        """
        Overrides the inherited method for copying the work file to the publish location
        to instead export out the scene geometry to the publish_path location.

        :param task_settings: Dictionary of Settings. The keys are strings, matching
            the keys returned in the settings property. The values are `Setting`
            instances.
        :param item: Item to process
        :param publish_path: The output path to publish files to
        """
        # Creating a temporary file on the publish path, where the alembic from maya would be
        # exported
        publish_file_temp = tempfile.NamedTemporaryFile(mode='w+b',
                                                        suffix='.abc')

        publish_path_temp = publish_file_temp.name.replace("\\", "/")

        # Deciding what type of alembic needs to be exported, based on the current plugin name
        current_plugin = self.plugin.name

        # ensure the publish folder exists:
        publish_folder = os.path.dirname(publish_path)
        ensure_folder_exists(publish_folder)

        try:
            if current_plugin == "Publish Geometry":
                # Exporting alembic to a temp location.
                # This will later be renamed and written to the publish_path
                self._export_abc_cache(task_settings=task_settings,
                                       item=item,
                                       export_path=publish_path_temp)

            elif current_plugin == "Publish GPU Alembic Cache":
                current_lod_item = item.get_property("lod_full_name")

                # Clearing the selection for gpu cache export and only selecting the group that
                # needs to be exported
                cmds.select(clear=True)
                cmds.select(current_lod_item)

                self._export_gpu_abc_cache(task_settings=task_settings,
                                           item=item,
                                           export_path=publish_path_temp)

            # Renaming top group name to be the asset name, in exported alembic.
            asset_name = item.context.entity["name"]
            self._rename_abc_top_group(publish_path_temp, str(publish_path),
                                       asset_name)
        except Exception as e:
            raise Exception("Failed to export Geometry: %s" % e)

        self.logger.debug("Exported group %s to '%s'." %
                          (item.properties.fields["node"], publish_path))

        # Finally destroying the temporary file
        publish_file_temp.close()

        return [publish_path]
def _save_session(path):
    """
    Save the current session to the supplied path.
    """
    # max won't ensure that the folder is created when saving, so we must make sure it exists
    folder = os.path.dirname(path)
    ensure_folder_exists(folder)
    MaxPlus.FileManager.Save(path)
Exemple #4
0
def _save_session(path):
    """
    Save the current session to the supplied path.
    """
    # Motionbuilder won't ensure that the folder is created when saving, so we must make sure it exists
    ensure_folder_exists(os.path.dirname(path))

    mb_app.FileSave(path)
def _save_session(path):
    """
    Save the current session to the supplied path.
    """
    # Motionbuilder won't ensure that the folder is created when saving, so we must make sure it exists
    ensure_folder_exists(os.path.dirname(path))

    mb_app.FileSave(path)
Exemple #6
0
def _save_session(path):
    """
    Save the current session to the supplied path.
    """
    # Nuke won't ensure that the folder is created when saving, so we must make sure it exists
    ensure_folder_exists(os.path.dirname(path))

    nuke.scriptSaveAs(path, True)
def _save_session(path):
    """
    Save the current session to the supplied path.
    """
    # max won't ensure that the folder is created when saving, so we must make sure it exists
    folder = os.path.dirname(path)
    ensure_folder_exists(folder)
    MaxPlus.FileManager.Save(path)
def _save_session(path):
    """
    Save the current session to the supplied path.
    """
    # max won't ensure that the folder is created when saving, so we must make sure it exists
    folder = os.path.dirname(path)
    ensure_folder_exists(folder)
    pymxs.runtime.saveMaxFile(path)
    def _save_session(self, path, version, item):
        """
        Save the current session to the supplied path.
        """
        ensure_folder_exists(os.path.dirname(path))
        nuke.scriptSaveAs(path, True)

        # Save the updated property
        item.properties.path = path
Exemple #10
0
 def _export(self, settings, item, path):
     node_full_document_name = item.properties.get("node_full_document_name")
     active_document = rumba.active_document()
     if active_document:
         node = active_document.child(node_full_document_name)
         if node:
             folder = os.path.dirname(path)
             ensure_folder_exists(folder)
             node.write(path)
 def execute(self, path, bundle_obj, **kwargs):
     """
     Handle folder creation issued from an app, framework or engine.
     
     :param path: path to create
     :param bundle_object: object requesting the creation. This is a legacy
                           parameter and we recommend using self.parent instead.
     """
     filesystem.ensure_folder_exists(path, permissions=0777)
    def _save_session(self, path, version, item):
        """
        Save the current session to the supplied path.
        """
        ensure_folder_exists(os.path.dirname(path))
        tde4.saveProject(path)

        # Save the updated property
        item.properties.path = path
 def execute(self, path, bundle_obj, **kwargs):
     """
     Handle folder creation issued from an app, framework or engine.
     
     :param path: path to create
     :param bundle_object: object requesting the creation. This is a legacy
                           parameter and we recommend using self.parent instead.
     """
     filesystem.ensure_folder_exists(path, permissions=0777)
Exemple #14
0
def _save_session(path):
    """
    Save the current session to the supplied path.
    """

    # Ensure that the folder is created when saving
    folder = os.path.dirname(path)
    ensure_folder_exists(folder)

    bpy.ops.wm.save_mainfile(filepath=path)
Exemple #15
0
    def _save_session(self, path, version, item):
        """
        Save the current session to the supplied path.
        """
        # Nuke Studio won't ensure that the folder is created when saving, so we must make sure it exists
        ensure_folder_exists(os.path.dirname(path))
        item.properties.project.saveAs(path)

        # Save the updated property
        item.properties.path = path
Exemple #16
0
    def publish_files(self, task_settings, item, publish_path):
        """
        Overrides the inherited method for copying the work file to the publish location
        to instead export out the scene geometry to the publish_path location.

        :param task_settings: Dictionary of Settings. The keys are strings, matching
            the keys returned in the settings property. The values are `Setting`
            instances.
        :param item: Item to process
        :param publish_path: The output path to publish files to
        """
        publisher = self.parent

        # set the alembic args that make the most sense when working with Mari.
        # These flags will ensure the export of an Alembic file that contains
        # all visible geometry from the current scene together with UV's and
        # face sets for use in Mari.
        alembic_args = [
            # only renderable objects (visible and not templated)
            "-renderableOnly",
            # write shading group set assignments (Maya 2015+)
            "-writeFaceSets",
            # write uv's (only the current uv set gets written)
            "-uvWrite"
        ]

        # find the animated frame range to use:
        start_frame, end_frame = _find_scene_animation_range()
        if start_frame and end_frame:
            alembic_args.append("-fr %d %d" % (start_frame, end_frame))

        # Set the output path:
        # Note: The AbcExport command expects forward slashes!
        alembic_args.append("-file %s" % publish_path.replace("\\", "/"))

        # build the export command.  Note, use AbcExport -help in Maya for
        # more detailed Alembic export help
        abc_export_cmd = ("AbcExport -j \"%s\"" % " ".join(alembic_args))

        # ...and execute it:
        try:
            # ensure the publish folder exists:
            publish_folder = os.path.dirname(publish_path)
            ensure_folder_exists(publish_folder)

            publisher.log_debug("Executing command: %s" % abc_export_cmd)
            cmds.refresh(suspend=True)
            mel.eval(abc_export_cmd)
            cmds.refresh(suspend=False)
        except Exception as e:
            raise Exception("Failed to export Geometry: %s" % e)

        self.logger.debug("Exported scene geometry to '%s'." % publish_path)

        return [publish_path]
Exemple #17
0
def _save_session(path):
    """
    Save the current session to the supplied path.
    """

    # Ensure that the folder is created when saving
    folder = os.path.dirname(path)
    ensure_folder_exists(folder)

    engine = sgtk.platform.current_engine()
    engine.app.save_project_as(path)
def _save_session(path, project):
    """
    Save the current session to the supplied path.
    :param path: str path to save the file
    :param project: Nuke Studio Project obj
    :return: None
    """

    # Nuke Studio won't ensure that the folder is created when saving, so we must make sure it exists
    ensure_folder_exists(os.path.dirname(path))
    project.saveAs(path)
def _save_session(path):
    """
    Save the current session to the supplied path.
    """

    # Ensure that the folder is created when saving
    folder = os.path.dirname(path)
    ensure_folder_exists(folder)

    comp = fusion.GetCurrentComp()
    comp.Save(path)
def _save_session(path):
    """
    Save the current session to the supplied path.
    """

    # Ensure that the folder is created when saving
    folder = os.path.dirname(path)
    ensure_folder_exists(folder)

    with disabled_updates():
        ix.application.save_project(path)
def _save_session(path):
    """
    Save the current session to the supplied path.
    """

    # Ensure that the folder is created when saving
    folder = os.path.dirname(path)
    ensure_folder_exists(folder)

    engine = sgtk.platform.current_engine()
    engine.app.save_project_as(path)
Exemple #22
0
def _save_session(path):
    """
    Save the current session to the supplied path.
    """

    # Ensure that the folder is created when saving
    folder = os.path.dirname(path)
    ensure_folder_exists(folder)

    natron_app = NatronGui.natron.getActiveInstance()
    natron_app.saveProjectAs(path)
Exemple #23
0
def _save_session(path, project):
    """
    Save the current session to the supplied path.
    :param path: str path to save the file
    :param project: Nuke Studio Project obj
    :return: None
    """

    # Nuke Studio won't ensure that the folder is created when saving, so we must make sure it exists
    ensure_folder_exists(os.path.dirname(path))
    project.saveAs(path)
    def _symlink_files(self, item):
        """
        This method handles symlink an item's publish_path to publish_symlink_path,
        assuming publish_symlink_path is already populated.

        If the item has "sequence_paths" set, it will attempt to symlink all paths
        assuming they meet the required criteria.
        """

        publisher = self.parent

        source_path = item.properties["publish_path"]
        dest_path = item.properties["publish_symlink_path"]

        # ---- get a list of files to be symlinked
        if item.properties["is_sequence"]:
            work_files = item.properties.get("sequence_paths", [])
        else:
            work_files = [item.properties["path"]]

        # ---- symlink the publish files to the publish symlink path
        processed_files = []
        for work_file in work_files:

            if item.properties["is_sequence"]:
                frame_num = publisher.util.get_frame_number(work_file)
                source_file = publisher.util.get_path_for_frame(source_path, frame_num)
                dest_file = publisher.util.get_path_for_frame(dest_path, frame_num)
            else:
                source_file = source_path
                dest_file = dest_path

            # If the symlink paths and publish path are the same, skip...
            if dest_file == source_file:
                continue

            # symlink the file
            try:
                dest_folder = os.path.dirname(dest_file)
                ensure_folder_exists(dest_folder)
                symlink_file(source_file, dest_file)
            except Exception as e:
                raise Exception(
                    "Failed to symlink published file from '%s' to '%s'.\n%s" %
                    (source_file, dest_file, traceback.format_exc())
                )

            self.logger.debug(
                "Symlinked published file '%s' to '%s'." % (source_file, dest_file)
            )
            processed_files.append(dest_file)

        return processed_files
    def _copy_outsource_to_work(self, settings, item):

        work_template = item.properties.get("work_template")
        if not work_template:
            self.logger.debug("No work template set on the item. "
                              "Skipping copy file to publish location.")
            return

        # by default, the path that was collected for publishing
        outsource_files = [item.properties.path]
        # ---- copy the outsource files to the work location

        if not item.properties.fields:
            self.logger.debug("No item fields supplied from collector."
                              "Required to resolve template paths.")
            return

        for outsource_file in outsource_files:

            # if not work_template.validate(outsource_file):
            #     self.logger.warning(
            #         "Work file '%s' did not match work template '%s'. "
            #         "Publishing in place." % (outsource_file, work_template)
            #     )
            #     return

            # work_fields = work_template.get_fields(outsource_file)

            # missing_keys = work_template.missing_keys(work_fields)

            # if missing_keys:
            #     self.logger.warning(
            #         "Work file '%s' missing keys required for the publish "
            #         "template: %s" % (outsource_file, missing_keys)
            #     )
            #     return

            work_file = work_template.apply_fields(item.properties.fields)
            self.logger.debug(">>>>> work_file: %s" % str(work_file))
            self.logger.debug(">>>>> outsource_file: %s" % str(outsource_file))

            # copy the file
            try:
                work_folder = os.path.dirname(work_file)
                ensure_folder_exists(work_folder)
                copy_file(outsource_file, work_file)
            except Exception:
                raise Exception(
                    "Failed to copy outsource file from '%s' to '%s'.\n%s" %
                    (outsource_file, work_file, traceback.format_exc()))

            self.logger.debug("Copied work file '%s' to work file '%s'." %
                              (outsource_file, work_file))
def _save_session(path):
    """
    Save the current session to the supplied path.
    """

    # Ensure that the folder is created when saving
    folder = os.path.dirname(path)
    ensure_folder_exists(folder)

    doc = c4d.documents.GetActiveDocument()
    c4d.documents.SaveDocument(doc, str(path), c4d.SAVEDOCUMENTFLAGS_NONE, c4d.FORMAT_C4DEXPORT)
    c4d.documents.LoadFile(path)
    def _copy_work_to_publish(self, settings, item):
        """
        This method handles exporting a layer and copying it to a designated
        publish location.
        This method requires a "publish_template" be set on the supplied item.
        The method will not attempt to copy files if any of the above
        requirements are not met. If the requirements are met, the file will
        ensure the publish path folder exists and then copy the file to that
        location.
        """

        krita_app = Krita.instance()

        node = item.properties["node"]
        session_path = item.properties["session_path"]
        active_doc = item.properties.get("session_document")

        export_path = self.get_export_path(settings, item)
        export_path_folder = os.path.dirname(export_path)
        ensure_folder_exists(export_path_folder)

        # this is so the publisher picks the right location for this layer
        item.properties.path = export_path

        # export the layer
        with _batch_mode(True):
            self._export_layer(node, export_path, active_doc)

        publish_path = self.get_publish_path(settings, item)

        # if the publish path is different that were the layer was exported
        # copy the file over
        if not os.path.normpath(publish_path) == os.path.normpath(export_path):
            publish_folder = os.path.dirname(publish_path)
            ensure_folder_exists(publish_folder)

            # copy the file to the publish location
            try:
                copy_file(export_path, publish_path)
                self.logger.debug(
                    "Copied exported files '%s' to publish folder '%s'." %
                    (export_path, publish_path))
            except Exception:
                raise TankError(
                    "Failed to copy exported file from '%s' to '%s'.\n%s" %
                    (export_path, publish_path, traceback.format_exc()))
        else:
            self.logger.debug("Skipping copy file to publish location.")

        # this is so the publisher picks the right location for this layer
        item.properties.path = publish_path
        item.set_thumbnail_from_path(publish_path)
Exemple #28
0
def _save_session(path):
    """
    Save the current session to the supplied path.
    """

    # Ensure that the folder is created when saving
    folder = os.path.dirname(path)
    ensure_folder_exists(folder)

    active_doc = rumba.active_document()
    if active_doc:
        main_window = rumbapy.widget("MainWindow")
        main_window.save_at(path)
    def _copy_files(self, dest_path, item):
        """
        Overriding this method to process cdl files, instead of simply copying it to the publish location.

        This method handles copying an item's path(s) to a designated location.
        """
        cc_dict = item.properties["cc_data"]

        # ensure that the folder actually exists!
        dest_folder = os.path.dirname(dest_path)
        ensure_folder_exists(dest_folder)

        self.write_cc(cc_path=dest_path, **cc_dict)
    def copy_folder(self, src_folders, dest_folder, seal_folder=False):
        """
        This method handles copying an item's folder(s) to a designated location.

        """

        publisher = self.parent

        logger = publisher.logger

        # ---- copy the src folders to the dest location
        processed_folders = []
        for src_folder in src_folders:

            if not os.path.isdir(src_folder):
                continue

            # If the folder paths are the same, lock permissions
            if src_folder == dest_folder:
                filesystem.freeze_permissions(dest_folder)
                continue

            # copy the folder method
            try:
                filesystem.ensure_folder_exists(dest_folder)
                filesystem.copy_folder(src_folder,
                                       dest_folder,
                                       folder_permissions=stat.S_IRUSR
                                       | stat.S_IRGRP | stat.S_IROTH)

                if seal_folder:
                    try:
                        filesystem.seal_file(dest_folder)
                    except Exception as e:
                        # primary function is to copy. Do not raise exception if sealing fails.
                        self.logger.warning(
                            "File '%s' could not be sealed, skipping: %s" %
                            (dest_folder, e))
                        self.logger.warning(traceback.format_exc())

            except Exception as e:
                raise Exception(
                    "Failed to copy folder from '%s' to '%s'.\n%s" %
                    (src_folder, dest_folder, traceback.format_exc()))

            logger.debug("Copied folder '%s' to '%s'." %
                         (src_folder, dest_folder))
            processed_folders.append(dest_folder)

        return processed_folders
Exemple #31
0
def _save_session(path):
    """
    Save the current session to the supplied path.
    """

    # Ensure that the folder is created when saving
    folder, file = os.path.split(path)
    ensure_folder_exists(folder)

    doc = c4d.documents.GetActiveDocument()

    doc.SetDocumentName(file)
    doc.SetDocumentPath(folder)
    c4d.documents.SaveDocument(doc, path, c4d.SAVEDOCUMENTFLAGS_NONE,
                               c4d.FORMAT_C4DEXPORT)
Exemple #32
0
def _save_session(path):
    """
    Save the current session to the supplied path.
    """

    # Ensure that the folder is created when saving
    folder = os.path.dirname(path)
    ensure_folder_exists(folder)

    active_doc = _session_document()
    success = active_doc.saveAs(path)
    active_doc.waitForDone()

    if success:
        active_doc.setFileName(path)
    def copy_files(self,
                   src_files,
                   dest_path,
                   seal_files=False,
                   is_sequence=False):
        """
        This method handles copying an item's path(s) to a designated location.

        If the item has "sequence_paths" set, it will attempt to copy all paths
        assuming they meet the required criteria.
        """

        publisher = self.parent

        logger = publisher.logger

        # ---- copy the src files to the dest location
        processed_files = []
        for src_file in src_files:

            if is_sequence:
                frame_num = self.get_frame_number(src_file)
                dest_file = self.get_path_for_frame(dest_path, frame_num)
            else:
                dest_file = dest_path

            # If the file paths are the same, lock permissions
            if src_file == dest_file:
                filesystem.freeze_permissions(dest_file)
                continue

            # copy the file
            try:
                dest_folder = os.path.dirname(dest_file)
                filesystem.ensure_folder_exists(dest_folder)
                filesystem.copy_file(src_file,
                                     dest_file,
                                     permissions=stat.S_IRUSR | stat.S_IRGRP
                                     | stat.S_IROTH,
                                     seal=seal_files)
            except Exception as e:
                raise Exception("Failed to copy file from '%s' to '%s'.\n%s" %
                                (src_file, dest_file, traceback.format_exc()))

            logger.debug("Copied file '%s' to '%s'." % (src_file, dest_file))
            processed_files.append(dest_file)

        return processed_files
    def _copy_files(self, dest_path, item):
        """
        This method handles copying an item's path(s) to a designated location.

        If the item has "sequence_paths" set, it will attempt to copy all paths
        assuming they meet the required criteria.
        """

        publisher = self.parent

        # ---- get a list of files to be copied
        if item.properties["is_sequence"]:
            work_files = item.properties.get("sequence_paths", [])
        else:
            work_files = [item.properties["path"]]

        # ---- copy the work files to the publish location
        processed_files = []
        for work_file in work_files:

            if item.properties["is_sequence"]:
                frame_num = publisher.util.get_frame_number(work_file)
                dest_file = publisher.util.get_path_for_frame(dest_path, frame_num)
            else:
                dest_file = dest_path

            # If the file paths are the same, skip...
            if work_file == dest_file:
                continue

            # copy the file
            try:
                dest_folder = os.path.dirname(dest_file)
                ensure_folder_exists(dest_folder)
                copy_file(work_file, dest_file,
                          permissions=stat.S_IRUSR | stat.S_IWUSR | stat.S_IRGRP | stat.S_IROTH)
            except Exception as e:
                raise Exception(
                    "Failed to copy work file from '%s' to '%s'.\n%s" %
                    (work_file, dest_file, traceback.format_exc())
                )

            self.logger.debug(
                "Copied work file '%s' to '%s'." % (work_file, dest_file)
            )
            processed_files.append(dest_file)

        return processed_files
    def _save_session(self, path, version, item):
        """
        Save the current session to the supplied path.
        """

        ensure_folder_exists(os.path.dirname(path))
        active_project = fx.activeProject()
        if path != active_project.path:
            save_path = self.parent.engine.utils.get_stripped_project_path(
                path)
            active_project.save(save_path)
        else:
            active_project.save()

        # Save the updated property
        item.properties.path = path
    def execute(self, path, bundle_obj, **kwargs):
        """
        Creates folders on disk.

        Toolkit bundles call this method when they want to ensure that
        a leaf-level folder structure exists on disk. In the case where customization
        is required, the hook is passed the bundle that issued the original request.
        This should allow for some sophisticated introspection inside the hook.

        The default implementation creates these folders with read/write
        permissions for everyone.

        :param str path: path to create
        :param bundle_object: Object requesting the creation. This is a legacy
                              parameter and we recommend using self.parent instead.
        :type bundle_object: :class:`~sgtk.platform.Engine`, :class:`~sgtk.platform.Framework`
            or :class:`~sgtk.platform.Application`
        """
        filesystem.ensure_folder_exists(path, permissions=0777)
def _save_session(path):
    """
    Save the current session to the supplied path.
    """

    # Maya can choose the wrong file type so we should set it here
    # explicitly based on the extension
    maya_file_type = None
    if path.lower().endswith(".ma"):
        maya_file_type = "mayaAscii"
    elif path.lower().endswith(".mb"):
        maya_file_type = "mayaBinary"

    # Maya won't ensure that the folder is created when saving, so we must make sure it exists
    folder = os.path.dirname(path)
    ensure_folder_exists(folder)

    cmds.file(rename=path)

    # save the scene:
    if maya_file_type:
        cmds.file(save=True, force=True, type=maya_file_type)
    else:
        cmds.file(save=True, force=True)
    def get_path_cache_path(self, project_id, plugin_id, pipeline_configuration_id):
        """
        Establish a location for the path cache database file.

        This hook method was introduced in Toolkit v0.18 and replaces the previous
        ``path_cache`` method. If you already have implemented ``path_cache``,
        this will be detected and called instead, however we strongly recommend
        that you tweak your hook.

        Overriding this method in a hook allows a user to change the location on disk where
        the path cache file is located. The path cache file holds a temporary cache representation
        of the ``FilesystemLocation`` entities stored in Shotgun for a project.

        The default implementation will create a folder inside the user's home folder or
        under ``SHOTGUN_HOME``.

        :param int project_id: The Shotgun id of the project to store caches for. None if
                               the configuration is a site configuration.
        :param str plugin_id: Unique string to identify the scope for a particular plugin
                              or integration. For more information,
                              see :meth:`~sgtk.bootstrap.ToolkitManager.plugin_id`. For
                              non-plugin based toolkit projects, this value is None.
        :param int pipeline_configuration_id: The Shotgun pipeline configuration id to store caches
                                              for. If the pipeline configuration is unmanaged, it
                                              will be ``None``
        :returns: The path to a path cache file. This file should exist when this method returns.
        :rtype: str
        """
        # backwards compatibility with custom hooks created before 0.18
        if hasattr(self, "path_cache") and callable(getattr(self, "path_cache")):
            # there is a custom version of the legacy hook path_cache
            log.warning(
                "Detected old core cache hook implementation. "
                "It is strongly recommended that this is upgraded."
            )

            # call legacy hook to make sure we call the custom
            # implementation that is provided by the user.
            # this implementation expects project id 0 for
            # the site config, so ensure that's the case too
            if project_id is None:
                project_id = 0

            return self.path_cache(project_id, pipeline_configuration_id)

        cache_filename = "path_cache.db"

        tk = self.parent

        cache_root = LocalFileStorageManager.get_configuration_root(
            tk.shotgun_url,
            project_id,
            plugin_id,
            pipeline_configuration_id,
            LocalFileStorageManager.CACHE
        )

        target_path = os.path.join(cache_root, cache_filename)

        if os.path.exists(target_path):
            # new style path cache file exists, return it
            return target_path

        # The target path does not exist. This could be because it just hasn't
        # been created yet, or it could be because of a core upgrade where the
        # cache root directory structure has changed (such is the case with
        # v0.17.x -> v0.18.x). To account for this scenario, see if the target
        # exists in an old location first, and if so, return that path instead.
        legacy_cache_root = LocalFileStorageManager.get_configuration_root(
            tk.shotgun_url,
            project_id,
            plugin_id,
            pipeline_configuration_id,
            LocalFileStorageManager.CACHE,
            generation=LocalFileStorageManager.CORE_V17
        )

        legacy_target_path = os.path.join(legacy_cache_root, cache_filename)

        if os.path.exists(legacy_target_path):
            # legacy path cache file exists, return it
            return legacy_target_path

        # neither new style or legacy path cache exists. use the new style
        filesystem.ensure_folder_exists(cache_root)
        filesystem.touch_file(target_path)

        return target_path
    def validatePage(self):
        """The 'next' button was pushed. See if the mappings are valid."""

        logger.debug("Validating the storage mappings page...")

        # the wizard instance and its UI
        wiz = self.wizard()
        ui = wiz.ui

        # clear any errors
        ui.storage_errors.setText("")

        # get the path key for the current os
        current_os_key = ShotgunPath.get_shotgun_storage_key()

        logger.debug("Current OS storage path key: %s" % (current_os_key,))

        # temp lists of widgets that need attention
        invalid_widgets = []
        not_on_disk_widgets = []

        # keep track of the first invalid widget so we can ensure it is visible
        # to the user in the list.
        first_invalid_widget = None

        logger.debug("Checking all map widgets...")

        # see if each of the mappings is valid
        for map_widget in self._map_widgets:

            logger.debug(
                "Checking mapping for root: %s" %
                (map_widget.root_name,)
            )

            if not map_widget.mapping_is_valid():
                # something is wrong with this widget's mapping
                invalid_widgets.append(map_widget)
                if first_invalid_widget is None:
                    first_invalid_widget = map_widget

            storage = map_widget.local_storage or {}
            current_os_path = storage.get(current_os_key)

            if current_os_path and not os.path.exists(current_os_path):
                # the current os path for this widget doesn't exist on disk
                not_on_disk_widgets.append(map_widget)

        if invalid_widgets:
            # tell the user which roots don't have valid mappings
            root_names = [w.root_name for w in invalid_widgets]
            logger.debug("Invalid mappings for roots: %s" % (root_names))
            ui.storage_errors.setText(
                "The mappings for these roots are invalid: <b>%s</b>" %
                (", ".join(root_names),)
            )
            if first_invalid_widget:
                ui.storage_map_area.ensureWidgetVisible(first_invalid_widget)
            return False

        if not_on_disk_widgets:

            # try to create the folders for current OS if they don't exist
            failed_to_create = []
            for widget in not_on_disk_widgets:

                storage = widget.local_storage
                folder = storage[current_os_key]

                logger.debug(
                    "Ensuring folder on disk for storage '%s': %s" %
                    (storage["code"], folder)
                )

                # try to create the missing path for the current OS. this will
                # help ensure the storage specified in SG is valid and the
                # project data can be written to this root.
                try:
                    ensure_folder_exists(folder)
                except Exception:
                    logger.error("Failed to create folder: %s" % (folder,))
                    logger.error(traceback.format_exc())
                    failed_to_create.append(storage["code"])

            if failed_to_create:
                # some folders weren't created. let the user know.
                ui.storage_errors.setText(
                    "Unable to create folders on disk for these storages: %s."
                    "Please check to make sure you have permission to create "
                    "these folders. See the tk-desktop log for more info." %
                    (", ".join(failed_to_create),)
                )

        # ---- now we've mapped the roots, and they're all valid, we need to
        #      update the root information on the core wizard

        for map_widget in self._map_widgets:

            root_name = map_widget.root_name
            root_info = map_widget.root_info
            storage_data = map_widget.local_storage

            # populate the data defined prior to mapping
            updated_storage_data = root_info

            # update the mapped shotgun data
            updated_storage_data["shotgun_storage_id"] = storage_data["id"]
            updated_storage_data["linux_path"] = str(storage_data["linux_path"])
            updated_storage_data["mac_path"] = str(storage_data["mac_path"])
            updated_storage_data["windows_path"] = str(
                storage_data["windows_path"])

            # now update the core wizard's root info
            wiz.core_wizard.update_storage_root(
                self._uri,
                root_name,
                updated_storage_data
            )

            # store the fact that we've mapped this root name with this
            # storage name. we can use this information to make better
            # guesses next time this user is mapping storages.
            self._historical_mappings[root_name] = storage_data["code"]
            self._settings.store(
                self.HISTORICAL_MAPPING_KEY,
                self._historical_mappings
            )

        logger.debug("Storage mappings are valid.")

        # if we made it here, then we should be valid.
        try:
            wiz.core_wizard.set_config_uri(self._uri)
        except Exception as e:
            error = (
                "Unknown error when setting the configuration uri:\n%s" %
                str(e)
            )
            logger.error(error)
            logger.error(traceback.print_exc())
            ui.storage_errors.setText(error)
            return False

        return True
    def get_bundle_data_cache_path(self, project_id, plugin_id, pipeline_configuration_id, bundle):
        """
        Establish a cache folder for an app, engine or framework.

        This hook method was introduced in Toolkit v0.18 and replaces the previous ``bundle_cache``
        method. If you already have implemented ``bundle_cache``, this will be detected and called
        instead, however we strongly recommend that you tweak your hook.

        Apps, Engines or Frameworks commonly cache data on disk. This can be
        small files, Shotgun queries, thumbnails, etc. This method implements the
        logic which defines this location on disk. The cache should be organized in
        a way so that all instances of the app can re-use the same data. Bundles
        which need to cache things per-instance can implement this using a sub
        folder inside the bundle cache location.

        It is possible to omit some components of the path by explicitly passing
        a ``None`` value for them. Only the bundle name is required. For example,
        with ``project_id=None``, a site level cache path will be returned.
        Omitting the ``project_id`` can be used to cache data for the site
        configuration, or to share data accross all projects belonging to a
        common site.

        The default implementation will create a folder inside the user's home folder or
        under ``SHOTGUN_HOME``.

        :param int project_id: The Shotgun id of the project to store caches for, or None.
        :param str plugin_id: Unique string to identify the scope for a particular plugin
                              or integration, or None. For more information,
                              see :meth:`~sgtk.bootstrap.ToolkitManager.plugin_id`. For
                              non-plugin based toolkit projects, this value is None.
        :param int pipeline_configuration_id: The Shotgun pipeline config id to store caches for
                                              or ``None`` if the pipeline configuration is unmanaged.
        :param bundle: The app, engine or framework object which is requesting the cache folder.
        :type bundle: :class:`~sgtk.platform.Engine`, :class:`~sgtk.platform.Framework` or
                      :class:`~sgtk.platform.Application`
        :returns: The path to a folder which should exist on disk.
        :rtype: str
        """
        # backwards compatibility with custom hooks created before 0.18
        if hasattr(self, "bundle_cache") and callable(getattr(self, "bundle_cache")):
            # there is a custom version of the legacy hook path_cache
            log.warning(
                "Detected old core cache hook implementation. "
                "It is strongly recommended that this is upgraded."
            )

            # call legacy hook to make sure we call the custom
            # implementation that is provided by the user.
            # this implementation expects project id 0 for
            # the site config, so ensure that's the case too
            if project_id is None:
                project_id = 0

            return self.bundle_cache(project_id, pipeline_configuration_id, bundle)

        tk = self.parent
        cache_root = LocalFileStorageManager.get_configuration_root(
            tk.shotgun_url,
            project_id,
            plugin_id,
            pipeline_configuration_id,
            LocalFileStorageManager.CACHE
        )

        # in the interest of trying to minimize path lengths (to avoid
        # the MAX_PATH limit on windows, we apply some shortcuts

        # if the bundle is a framework, we shorten it:
        # tk-framework-shotgunutils --> fw-shotgunutils
        # if the bundle is a multi-app, we shorten it:
        # tk-multi-workfiles2 --> tm-workfiles2
        bundle_name = bundle.name
        bundle_name = bundle_name.replace("tk-framework-", "fw-")
        bundle_name = bundle_name.replace("tk-multi-", "tm-")

        target_path = os.path.join(cache_root, bundle_name)

        if os.path.exists(target_path):
            # new style cache bundle folder exists, return it
            return target_path

        # The target path does not exist. This could be because it just hasn't
        # been created yet, or it could be because of a core upgrade where the
        # cache root directory structure has changed (such is the case with
        # v0.17.x -> v0.18.x). To account for this scenario, see if the target
        # exists in an old location first, and if so, return that path instead.
        legacy_cache_root = LocalFileStorageManager.get_configuration_root(
            tk.shotgun_url,
            project_id,
            plugin_id,
            pipeline_configuration_id,
            LocalFileStorageManager.CACHE,
            generation=LocalFileStorageManager.CORE_V17
        )
        legacy_target_path = os.path.join(legacy_cache_root, bundle.name)

        if os.path.exists(legacy_target_path):
            # legacy cache bundle folder exists, return it
            return legacy_target_path

        # neither new style or legacy path cache exists. use the new style
        filesystem.ensure_folder_exists(target_path)

        return target_path
    def publish(self, settings, item):
        """
        Executes the publish logic for the given item and settings.

        :param settings: Dictionary of Settings. The keys are strings, matching
                         the keys returned in the settings property.
                         The values are `Setting` instances.
        :param item: Item to process
        """

        publisher = self.parent

        publish_template = item.properties["publish_template"]
        publish_type =  item.properties["publish_type"]

        # Get fields from the current context
        fields = {}
        ctx_fields = self.parent.context.as_template_fields(publish_template)
        fields.update(ctx_fields)

        context_entity_type = self.parent.context.entity['type']
        publish_name = context_entity_type + "_textures"

        existing_publishes = self._find_publishes(self.parent.context, publish_name, publish_type)
        version = max([p["version_number"] for p in existing_publishes] or [0]) + 1
        fields["version"] = version

        publish_path = publish_template.apply_fields(fields)
        publish_path = sgtk.util.ShotgunPath.normalize(publish_path)

        # make sure destination folder exists
        ensure_folder_exists(publish_path)

        textures = item.properties["textures"]

        for src in textures:
            _, filenamefile = os.path.split(src)
            dst = os.path.join(publish_path, filenamefile)
            sgtk.util.filesystem.copy_file(src, dst) 

        self.logger.info("A Publish will be created in Shotgun and linked to:")
        self.logger.info("  %s" % (publish_path,))

        # arguments for publish registration

        # add dependencies
        dependency_paths = []
        if "sg_publish_path" in item.parent.properties:
            self.logger.debug("Added dependency: %s" % item.parent.properties.sg_publish_path)
            dependency_paths.append(item.parent.properties.sg_publish_path)

        self.logger.info("Registering publish...")

        publish_data = {
            "tk": publisher.sgtk,
            "context": item.context,
            "comment": item.description,
            "path": publish_path,
            "name": publish_name,
            "version_number": version,
            "thumbnail_path": item.get_thumbnail_as_path(),
            "published_file_type": publish_type,
            "dependency_paths": dependency_paths,
        }

        # log the publish data for debugging
        self.logger.debug(
            "Populated Publish data...",
            extra={
                "action_show_more_info": {
                    "label": "Publish Data",
                    "tooltip": "Show the complete Publish data dictionary",
                    "text": "<pre>%s</pre>" % (pprint.pformat(publish_data),)
                }
            }
        )

        # create the publish and stash it in the item properties for other
        # plugins to use.
        item.properties["sg_publish_data"] = sgtk.util.register_publish(
            **publish_data)

        # inject the publish path such that children can refer to it when
        # updating dependency information
        item.properties["sg_publish_path"] = publish_path

        self.logger.info("Publish registered!")

        # now that we've published. keep a handle on the path that was published
        item.properties["path"] = publish_path
def _build_bundle_cache(sg_connection, target_path, config_descriptor_uri):
    """
    Perform a build of the bundle cache.

    This will build the bundle cache for a given config descriptor.

    :param sg_connection: Shotgun connection
    :param target_path: Path to build
    :param config_descriptor_uri: Descriptor of the configuration to cache.
    """
    logger.info("The build will generated into '%s'" % target_path)

    bundle_cache_root = os.path.join(target_path, BUNDLE_CACHE_ROOT_FOLDER_NAME)

    # try to create target path
    logger.info("Creating bundle cache folder...")
    filesystem.ensure_folder_exists(bundle_cache_root)

    # Resolve the configuration
    cfg_descriptor = create_descriptor(
        sg_connection,
        Descriptor.CONFIG,
        config_descriptor_uri,
        # If the user hasn't specified the version to retrieve, resolve the latest from Shotgun.
        resolve_latest=is_descriptor_version_missing(config_descriptor_uri)
    )

    logger.info("Resolved config %r" % cfg_descriptor)
    logger.info("Runtime config descriptor uri will be %s" % config_descriptor_uri)

    # cache config in bundle cache
    logger.info("Downloading and caching config...")

    cfg_descriptor.ensure_local()

    # copy the config payload across to the plugin bundle cache
    cfg_descriptor.clone_cache(bundle_cache_root)

    # cache all apps, engines and frameworks
    cache_apps(sg_connection, cfg_descriptor, bundle_cache_root)

    if cfg_descriptor.associated_core_descriptor:
        logger.info("Config is specifying a custom core in config/core/core_api.yml.")
        logger.info("This will be used when the config is executing.")
        logger.info("Ensuring this core (%s) is cached..." % cfg_descriptor.associated_core_descriptor)
        bootstrap_core_desc = create_descriptor(
            sg_connection,
            Descriptor.CORE,
            cfg_descriptor.associated_core_descriptor,
            bundle_cache_root_override=bundle_cache_root
        )
        # cache it
        bootstrap_core_desc.ensure_local()
        bootstrap_core_desc.clone_cache(bundle_cache_root)

    cleanup_bundle_cache(bundle_cache_root)

    logger.info("")
    logger.info("Build complete!")
    logger.info("")
    logger.info("- Your bundle cache is ready in '%s'" % target_path)
    logger.info("- All dependencies have been baked out into the bundle_cache folder")
    logger.info("")