def _show_area_in_fs(self, file, environment, template):
        """
        """
        # build fields starting with the context:
        fields = environment.context.as_template_fields(template)
        if file:
            # add any additional fields that we can extract from the work or publish paths
            file_fields = {}
            if file.is_local and environment.work_template:
                try:
                    file_fields = environment.work_template.get_fields(
                        file.path)
                except TankError as e:
                    pass
            elif file.is_published and environment.publish_template:
                try:
                    file_fields = environment.publish_template.get_fields(
                        file.publish_path)
                except TankError as e:
                    pass
            # combine with the context fields, preferring the context
            fields = dict(
                chain(six.iteritems(fields), six.iteritems(file_fields)))

        # try to build a path from the template with these fields:
        while template and template.missing_keys(fields):
            template = template.parent
        if not template:
            # failed to find a template with no missing keys!
            return
        path = template.apply_fields(fields)

        # finally, show the path:
        self._show_in_fs(path)
Example #2
0
    def __create_output_node(self, path):
        """
        Create the Nuke output node for the movie.

        :param str path:           Path of the output movie

        :returns:               Pre-configured Write node
        :rtype:                 Nuke node
        """
        # get the Write node settings we'll use for generating the Quicktime
        wn_settings = self.__get_quicktime_settings()

        node = nuke.nodes.Write(file_type=wn_settings.get("file_type"))

        # apply any additional knob settings provided by the hook. Now that the knob has been
        # created, we can be sure specific file_type settings will be valid.
        for knob_name, knob_value in six.iteritems(wn_settings):
            if knob_name != "file_type":
                node.knob(knob_name).setValue(knob_value)

        # Don't fail if we're in proxy mode. The default Nuke publish will fail if
        # you try and publish while in proxy mode. But in earlier versions of
        # tk-multi-publish (< v0.6.9) if there is no proxy template set, it falls
        # back on the full-res version and will succeed. This handles that case
        # and any custom cases where you may want to send your proxy render to
        # screening room.
        root_node = nuke.root()
        is_proxy = root_node["proxy"].value()
        if is_proxy:
            self.__app.log_info("Proxy mode is ON. Rendering proxy.")
            node["proxy"].setValue(path.replace(os.sep, "/"))
        else:
            node["file"].setValue(path.replace(os.sep, "/"))

        return node
    def __init__(self, file, file_versions, environment):
        """
        """
        all_versions = [v for v, f in six.iteritems(file_versions)]
        max_version = max(all_versions) if all_versions else 0

        sandbox_user = None
        if (environment and environment.contains_user_sandboxes
                and environment.context and environment.context.user
                and g_user_cache.current_user
                and environment.context.user["id"] !=
                g_user_cache.current_user["id"]):
            sandbox_user = environment.context.user.get(
                "name", "Unknown").split(" ")[0]

        label = ""
        if file.version == max_version:
            label = "Open"
        else:
            label = "Open v%03d" % file.version
        if not file.editable:
            label = "%s (Read-only)" % label
        if sandbox_user is not None:
            label = "%s from %s's Sandbox" % (label, sandbox_user)

        OpenFileAction.__init__(self, label, file, file_versions, environment)
    def find_file_versions(self, work_area, file_key, clean_only=False):
        """
        Find all file versions for the specified file key and context.

        :param work_area:       The work area to find the file version for
        :param file_key:        A unique file key that can be used to locate all versions of a single file
        :param clean_only:      If False then dirty cache entries will be included in the returned results.  If
                                True then they will be omitted. Defaults to False.
        :returns:               A dictionary {version:FileItem} of all file versions found.
        """
        _, entry = self._find_entry(work_area)
        if not entry:
            # return None as we don't have a cached result for this context!
            return None

        if clean_only and entry.is_dirty:
            return None

        file_info = entry.file_info.get(file_key)
        if not file_info:
            # although we have a cache entry, we don't have any files for the key!
            return {}

        # return a dictionary of version:FileItem entries:
        return dict([(v, f) for v, f in six.iteritems(file_info.versions)])
Example #5
0
        def minimize(self):
            """
            Minimize the size of the cache by removing any entries that are no longer valid
            """
            if not self.enabled:
                return

            self._cache = dict([(k, v) for k, v in six.iteritems(self._cache)
                                if v[0].isValid()])
 def __init__(self, label, file, file_versions, environment):
     """
     """
     # Q. should the next version include the current version?
     all_versions = [v for v, f in six.iteritems(file_versions)
                     ] + [file.version]
     max_version = max(all_versions)
     self._version = max_version + 1
     label = "%s (as v%03d)" % (label, self._version)
     OpenFileAction.__init__(self, label, file, file_versions, environment)
Example #7
0
def sanitize_qt(val):
    """
    Converts a value to a tk friendly and consistent representation.
    - QVariants are converted to native python structures
    - QStrings are coverted to utf-8 encoded strs
    - unicode objets are converted to utf-8 encoded strs

    :param val: input object
    :returns: cleaned up data
    """

    # test things in order of probable occurrence for speed
    if val is None:
        return None

    elif six.PY2 and isinstance(val, unicode):
        return val.encode("UTF-8")

    elif HAS_QSTRING and isinstance(val, QtCore.QString):
        # convert any QStrings to utf-8 encoded strings
        # note the cast to str because pyqt returns a QByteArray
        return str(val.toUtf8())

    elif HAS_QBYTEARRAY and isinstance(val, QtCore.QByteArray):
        # convert byte arrays to strs
        return str(val)

    elif HAS_QVARIANT and isinstance(val, QtCore.QVariant):
        # convert any QVariant to their python native equivalents
        val = val.toPyObject()
        # and then sanitize this
        return sanitize_qt(val)

    elif isinstance(val, list):
        return [sanitize_qt(d) for d in val]

    elif isinstance(val, dict):
        new_val = {}
        for (k, v) in six.iteritems(val):
            # both keys and values can be bad
            safe_key = sanitize_qt(k)
            safe_val = sanitize_qt(v)
            new_val[safe_key] = safe_val
        return new_val

    # QT Version: 5.9.5
    # PySide Version: 5.9.0a1
    # The value should be `int` but it is `long`.
    # longs do not exist in Python 3, so we need to cast those.
    elif six.PY2 and isinstance(val, long):
        val = int(val)
        return val
    else:
        return val
Example #8
0
    def execute(self, parent_ui):
        """
        """
        publish_versions = [
            v for v, f in six.iteritems(self.file_versions) if f.is_published
        ]
        if not publish_versions:
            return

        max_publish_version = max(publish_versions)
        self._open_url_for_published_file(
            self.file_versions[max_publish_version])
    def _create_local_file_actions(self, file_item, file_versions):
        """
        Creates a list of actions if the file item is a local file.

        :param file_item: File item to generate actions for.
        :param file_versions: Filtered list of file versions for the current user.

        :returns: List of actions.
        """

        actions = []

        if not file_item.is_local:
            return actions

        # all actions available when selection is a work file
        # ------------------------------------------------------------------
        actions.append(SeparatorAction())

        # add the general open action - this just opens the file in-place.
        actions.append(
            OpenWorkfileAction(file_item, file_versions, self._work_area))

        if self._in_other_users_sandbox:
            # file is in another user sandbox so add appropriate actions:
            actions.append(
                ContinueFromWorkFileAction(file_item, file_versions,
                                           self._work_area))

            if self._change_work_area and self._can_copy_to_work_area:
                actions.append(
                    CopyAndOpenFileInCurrentWorkAreaAction(
                        file_item, file_versions, self._work_area))

        else:
            # file isn't in a different sandbox so add regular open actions:
            if file_item.editable:
                # determine if this version is the latest:
                all_versions = [v for v, f in six.iteritems(file_versions)]
                max_version = max(all_versions) if all_versions else 0
                if file_item.version != max_version:
                    actions.append(
                        ContinueFromWorkFileAction(file_item, file_versions,
                                                   self._work_area))

            if self._change_work_area and self._can_copy_to_work_area:
                actions.append(
                    CopyAndOpenFileInCurrentWorkAreaAction(
                        file_item, file_versions, self._work_area))

        return actions
    def _create_show_in_actions(self, file_item, file_versions):
        """
        Creates a list of actions to show the file item in Shotgun or on the file system.

        :param file_item: File item to generate actions for.
        :param file_versions: Filtered list of file versions for the current user.

        :returns: List of actions.
        """
        show_in_actions = []
        if file_item.is_local:
            show_in_actions.append(
                ShowWorkFileInFileSystemAction(file_item, file_versions,
                                               self._work_area))
        else:
            if self._work_area.work_area_template:
                show_in_actions.append(
                    ShowWorkAreaInFileSystemAction(file_item, file_versions,
                                                   self._work_area))

        if file_item.is_published:
            show_in_actions.append(
                ShowPublishInFileSystemAction(file_item, file_versions,
                                              self._work_area))
            show_in_actions.append(
                ShowPublishInShotgunAction(file_item, file_versions,
                                           self._work_area))
        else:
            if self._work_area.publish_area_template:
                show_in_actions.append(
                    ShowPublishAreaInFileSystemAction(file_item, file_versions,
                                                      self._work_area))

            # see if we have any publishes:
            publish_versions = [
                v for v, f in six.iteritems(file_versions) if f.is_published
            ]
            if publish_versions:
                show_in_actions.append(
                    ShowLatestPublishInShotgunAction(file_item, file_versions,
                                                     self._work_area))

        actions = []
        if show_in_actions:
            # ------------------------------------------------------------------
            actions.append(SeparatorAction())
            actions.extend(show_in_actions)

        return actions
Example #11
0
def sanitize_for_qt_model(val):
    """
    Useful when you have shotgun (or other) data and want to
    prepare it for storage as role data in a model.

    Qt/pyside/pyqt automatically changes the data to be unicode
    according to internal rules of its own, sometimes resulting in
    unicode errors. A safe strategy for storing unicode data inside
    Qt model roles is therefore to ensure everything is converted to
    unicode prior to insertion into the model. This method ensures
    that. All string values will be coonverted to unicode. UTF-8
    is assumed for all strings:

    in:  {"a":"aaa", "b": 123, "c": {"x":"y", "z":"aa"}, "d": [ {"x":"y", "z":"aa"} ] }
    out: {'a': u'aaa', 'c': {'x': u'y', 'z': u'aa'}, 'b': 123, 'd': [{'x': u'y', 'z': u'aa'}]}

    This method is the counterpart to sanitize_qt() which is the reciprocal
    of this operation. When working with Qt models and shotgun data,
    we recommend the following best practices:

    - when sg data is inserted into a role in model, run it through
      sanitize_for_qt_model() first
    - When taking it back out again, run it through sanitize_qt()

    :param val: value to convert
    :returns: sanitized data
    """

    if isinstance(val, list):
        return [sanitize_for_qt_model(d) for d in val]

    elif isinstance(val, dict):
        new_val = {}
        for (k, v) in six.iteritems(val):
            # go through dictionary and convert each value separately
            new_val[k] = sanitize_for_qt_model(v)
        return new_val

    elif six.PY2 and isinstance(val, str):
        return val.decode("UTF-8")

    # for everything else, just pass through
    return val
Example #12
0
    def _sg_data_matches_r(self, sg_data, compare_fields, reg_exp):
        """
        """
        if isinstance(compare_fields, list):
            # e.g. ["one", "two", {"three":"four", "five":["six", "seven"]}]
            for cf in compare_fields:
                if isinstance(cf, dict):
                    # e.g. {"three":"four", "five":["six", "seven"]}
                    for key, value in six.iteritems(cf):
                        data = sg_data.get(key)
                        if data:
                            if self._sg_data_matches_r(data, value, reg_exp):
                                return True
                else:
                    # e.g. "one"
                    if self._sg_data_matches_r(sg_data, cf, reg_exp):
                        return True
        else:
            # e.g. "one"
            val = sg_data.get(compare_fields)
            if val != None and reg_exp.indexIn(str(val)) != -1:
                return True

        return False
Example #13
0
    def execute(self, parent_ui):
        """
        """
        if not self.file:
            return False

        # this is the smart action where all the logic tries to decide what the actual
        # action should be!
        # print "Opening file '%s' which is in user sandbox '%s'" % (self.file.path, self.environment.context.user["name"])

        # get information about the max local & publish versions:
        local_versions = [
            v for v, f in six.iteritems(self.file_versions) if f.is_local
        ]
        publish_versions = [
            v for v, f in six.iteritems(self.file_versions) if f.is_published
        ]
        max_local_version = max(local_versions) if local_versions else None
        max_publish_version = max(
            publish_versions) if publish_versions else None
        max_version = max(0, max_local_version or 0, max_publish_version or 0)

        if (self._publishes_visible and self.file.is_published
                and (not self._workfiles_visible or not self.file.is_local)):
            # opening a publish and either not showing work files or the file isn't local
            if self.file.version < max_publish_version:
                # opening an old version of a publish!
                return self._open_previous_publish(self.file, self.environment,
                                                   parent_ui)
            else:
                # opening the most recent version of a publish!
                latest_work_file = None
                if max_local_version != None:
                    latest_work_file = self.file_versions[max_local_version]
                return self._open_publish_with_check(
                    self.file,
                    latest_work_file,
                    self.environment,
                    max_version + 1,
                    parent_ui,
                )

        elif self._workfiles_visible and self.file.is_local:
            # opening a workfile and either not showing publishes or the file hasn't been published
            # OR
            # opening a file that is both local and published and both are visible in the view!
            # (is this the right thing to do when a file is both local and a publish??)
            if self.file.version < max_local_version:
                # opening an old version of work file:
                return self._open_previous_workfile(self.file,
                                                    self.environment,
                                                    parent_ui)
            else:
                # opening the most recent version of a work file!
                latest_publish = None
                if max_publish_version != None:
                    latest_publish = self.file_versions[max_publish_version]
                return self._open_workfile_with_check(
                    self.file,
                    latest_publish,
                    self.environment,
                    max_version + 1,
                    parent_ui,
                )
        else:
            # this shouldn't happen and is in here primarily for debug purposes!
            raise NotImplementedError(
                "Unsure what action to take when opening this file!")

        # didn't do anything!
        return False
    def _process_publish_files(
        self,
        sg_publishes,
        publish_template,
        work_template,
        context,
        name_map,
        version_compare_ignore_fields,
        filter_file_key=None,
    ):
        """
        """
        files = {}

        # and add in publish details:
        ctx_fields = context.as_template_fields(work_template)

        for sg_publish in sg_publishes:
            file_details = {}

            # always have a path:
            publish_path = sg_publish["path"]

            # determine the work path fields from the publish fields + ctx fields:
            # The order is important as it ensures that the user is correct if the
            # publish file is in a user sandbox but we also need to be careful not
            # to overrwrite fields that are being ignored when comparing work files
            publish_fields = publish_template.get_fields(publish_path)
            wp_fields = publish_fields.copy()
            # publishes uses 'pub_name' where as work files use 'name'
            if 'pub_name' in publish_fields and publish_fields['pub_name']:
                wp_fields['name'] = publish_fields['pub_name']
            for k, v in ctx_fields.items():
                if k not in version_compare_ignore_fields:
                    wp_fields[k] = v

            # build the unique file key for the publish path.  All files that share the same key are considered
            # to be different versions of the same file.
            file_key = FileItem.build_file_key(
                wp_fields, work_template, version_compare_ignore_fields
            )
            if filter_file_key and file_key != filter_file_key:
                # we can ignore this file completely!
                continue

            # resolve the work path:
            work_path = ""
            try:
                work_path = work_template.apply_fields(wp_fields)
            except TankError as e:
                # unable to generate a work path - this means we are probably missing a field so it's going to
                # be a problem matching this publish up with its corresponding work file!
                work_path = ""

            # copy common fields from sg_publish:
            #
            file_details = dict(
                [(k, v) for k, v in six.iteritems(sg_publish) if k != "path"]
            )

            # get version from fields if not specified in publish file:
            if file_details["version"] == None:
                file_details["version"] = publish_fields.get("version", 0)

            # entity
            file_details["entity"] = context.entity

            # local file modified details:
            if os.path.exists(publish_path):
                try:
                    modified_at = os.path.getmtime(publish_path)
                    file_details["modified_at"] = datetime.fromtimestamp(
                        modified_at, tz=sg_timezone.local
                    )
                except OSError:
                    # ignore OSErrors as it's probably a permissions thing!
                    pass
                file_details["modified_by"] = g_user_cache.get_file_last_modified_user(
                    publish_path
                )
            else:
                # just use the publish info
                file_details["modified_at"] = sg_publish.get("published_at")
                file_details["modified_by"] = sg_publish.get("published_by")

            if not file_details["name"]:
                # make sure all files with the same key have the same name:
                file_details["name"] = name_map.get_name(
                    file_key, publish_path, publish_template, publish_fields
                )

            # add new file item for this publish.  Note that we also keep track of the
            # work path even though we don't know if this publish has a corresponding
            # work file.
            files[(file_key, file_details["version"])] = {
                "key": file_key,
                "work_path": work_path,
                "is_published": True,
                "publish_path": publish_path,
                "publish_details": file_details,
            }
        return files
    def _process_work_files(
        self,
        work_files,
        work_template,
        context,
        name_map,
        version_compare_ignore_fields,
        filter_file_key=None,
    ):
        """
        :param work_files: A list of dictionaries with file details.
        :param work_template: The template which was used to generate the files list.
        :param context: The context for which the files are retrieved.
        :param name_map: A :class:`_FileNameMap` instance.
        :param version_compare_ignore_fields: A list of template fields to ignore
                                              when building a key for the file.
        :param filter_file_key: A unique file 'key' that, if specified, will limit
                                the returned list of files to just those that match.
        returns: A dictionary where keys are (file key, version number) tuples
                  and values are dictionaries which can be used to instantiate
                  :class:`FileItem`.
        """
        files = {}

        for work_file in work_files:

            # always have the work path:
            work_path = work_file["path"]

            # get fields for work file:
            wf_fields = work_template.get_fields(work_path)
            wf_ctx = None

            # Build the unique file key for the work path.
            # All files that share the same key are considered
            # to be different versions of the same file.
            #
            file_key = FileItem.build_file_key(
                wf_fields, work_template, version_compare_ignore_fields
            )
            if filter_file_key and file_key != filter_file_key:
                # we can ignore this file completely!
                continue

            # copy common fields from work_file:
            #
            file_details = dict(
                [(k, v) for k, v in six.iteritems(work_file) if k != "path"]
            )

            # get version from fields if not specified in work file:
            if not file_details["version"]:
                file_details["version"] = wf_fields.get("version", 0)

            # if no task try to determine from context or path:
            if not file_details["task"]:
                if context.task:
                    file_details["task"] = context.task
                else:
                    # try to create a context from the path and see if that contains a task:
                    wf_ctx = self._app.sgtk.context_from_path(work_path, context)
                    if wf_ctx and wf_ctx.task:
                        file_details["task"] = wf_ctx.task

            # Add additional fields:
            #

            # Entity:
            file_details["entity"] = context.entity

            # File modified details:
            if not file_details["modified_at"]:
                try:
                    modified_at = os.path.getmtime(work_path)
                    file_details["modified_at"] = datetime.fromtimestamp(
                        modified_at, tz=sg_timezone.local
                    )
                except OSError:
                    # ignore OSErrors as it's probably a permissions thing!
                    pass

            if not file_details["modified_by"]:
                file_details["modified_by"] = g_user_cache.get_file_last_modified_user(
                    work_path
                )

            if not file_details["name"]:
                # make sure all files with the same key have the same name:
                file_details["name"] = name_map.get_name(
                    file_key, work_path, work_template, wf_fields
                )

            # add to the list of files
            files[(file_key, file_details["version"])] = {
                "key": file_key,
                "is_work_file": True,
                "work_path": work_path,
                "work_details": file_details,
            }

        return files
    def find_files(
        self, work_template, publish_template, context, filter_file_key=None
    ):
        """
        Find files using the specified context, work and publish templates

        :param work_template:       The template to use when searching for work files
        :param publish_template:    The template to use when searching for publish files
        :param context:             The context to search for file with
        :param filter_file_key:     A unique file 'key' that if specified will limit the returned list of files to just
                                    those that match.  This 'key' should be generated using the FileItem.build_file_key()
                                    method.
        :returns:                   A list of FileItem instances, one for each unique version of a file found in either
                                    the work or publish areas
        """
        # can't find anything without a work template!
        if not work_template:
            return []

        # determien the publish filters to use from the context:
        publish_filters = [["entity", "is", context.entity or context.project]]
        if context.task:
            publish_filters.append(["task", "is", context.task])
        else:
            publish_filters.append(["task", "is", None])

        # get the list of valid file extensions if set:
        valid_file_extensions = [
            ".%s" % ext if not ext.startswith(".") else ext
            for ext in self._app.get_setting("file_extensions", [])
        ]

        # get list of fields that should be ignored when comparing work files:
        version_compare_ignore_fields = self._app.get_setting(
            "version_compare_ignore_fields", []
        )

        # find all work & publish files and filter out any that should be ignored:
        work_files = self._find_work_files(
            context, work_template, version_compare_ignore_fields
        )
        filtered_work_files = self._filter_work_files(work_files, valid_file_extensions)

        published_files = self._find_publishes(publish_filters)
        filtered_published_files = self._filter_publishes(
            published_files, publish_template, valid_file_extensions
        )

        # turn these into FileItem instances:
        name_map = FileFinder._FileNameMap()
        work_file_item_details = self._process_work_files(
            filtered_work_files,
            work_template,
            context,
            name_map,
            version_compare_ignore_fields,
            filter_file_key,
        )
        work_file_items = dict(
            [
                (k, FileItem(**kwargs))
                for k, kwargs in six.iteritems(work_file_item_details)
            ]
        )

        publish_item_details = self._process_publish_files(
            filtered_published_files,
            publish_template,
            work_template,
            context,
            name_map,
            version_compare_ignore_fields,
            filter_file_key,
        )
        publish_items = dict(
            [
                (k, FileItem(**kwargs))
                for k, kwargs in six.iteritems(publish_item_details)
            ]
        )

        # and aggregate the results:
        file_items = list(work_file_items.values())
        for file_key_and_version, publish in six.iteritems(publish_items):
            work_file = work_file_items.get(file_key_and_version)
            if not work_file:
                file_items.append(publish)
                continue

            # merge with work file:
            work_file.update_from_publish(publish)

        return file_items