def _create_previous_versions_actions_menus(self, file_item):
        """
        Creates a list of previous versions menus if the file item has previous versions.

        :param file_item: File item to generate actions for.

        :returns: List of actions.
        """
        actions = []
        # ------------------------------------------------------------------
        actions.append(SeparatorAction())

        actions.extend(
            self._create_previous_versions_actions_menu(
                "Previous Work Files",
                [
                    item for item in six.itervalues(file_item.versions)
                    if file_item.version > item.version and item.is_local
                ],
            ))

        actions.extend(
            self._create_previous_versions_actions_menu(
                "Previous Publishes",
                [
                    item for item in six.itervalues(file_item.versions)
                    if file_item.version > item.version and item.is_published
                ],
            ))

        return actions
 def destroy(self):
     """
     Destroy this model and any deferred models attached to it.
     """
     for deferred_model in six.itervalues(self._deferred_models):
         deferred_model.destroy()
     self._deferred_models = {}
     super(ShotgunDeferredEntityModel, self).destroy()
 def clear(self):
     """
     Clear the data we hold.
     """
     self._deferred_cache = ShotgunDataHandlerCache()
     for deferred_model in six.itervalues(self._deferred_models):
         deferred_model.clear()
     self._deferred_models = {}
     super(ShotgunDeferredEntityModel, self).clear()
Esempio n. 4
0
    def get_children(self, parent_uid):
        """
        Generator that returns all childen for the given item.

        :param parent_uid: unique id for cache item
        :returns: :class:`ShotgunItemData` instances
        """
        from .data_item import ShotgunItemData  # local import to avoid cycles

        if parent_uid is None:
            # this is the root
            cache_node = self._cache
        else:
            # resolve cache node from uid
            cache_node = self._cache[self.CACHE_BY_UID].get(parent_uid)

        if cache_node:
            for item in six.itervalues(cache_node[self.CACHE_CHILDREN]):
                data_item = ShotgunItemData(item)
                yield data_item
Esempio n. 5
0
    def test_data_path(self):
        """
        Test project/site data paths.
        """
        app = self.engine.apps["test_app"]
        project_data_cache_path = app.cache_location
        # We should have the project id in the path
        self.assertTrue("%sp%d" %
                        (os.path.sep,
                         app.context.project["id"]) in project_data_cache_path)
        site_data_cache_path = app.site_cache_location
        # We should not have the project id in the path
        self.assertFalse(
            "%sp%d" %
            (os.path.sep, app.context.project["id"]) in site_data_cache_path)
        # The path should end with "/site/<bundle name>"
        self.assertTrue(
            site_data_cache_path.endswith(
                "%ssite%s%s" % (os.path.sep, os.path.sep, app.name)))
        # Test frameworks
        for fw in six.itervalues(app.frameworks):
            fw_data_cache_path = fw.cache_location
            # We should have the project id in the path
            self.assertTrue(
                "%sp%d" %
                (os.path.sep, app.context.project["id"]) in fw_data_cache_path)
            fw_data_cache_path = fw.site_cache_location
            # We should not have the project id in the path
            self.assertFalse(
                "%sp%d" %
                (os.path.sep, app.context.project["id"]) in fw_data_cache_path)

            # The path should end with "/site/<bundle name>"
            self.assertTrue(
                fw_data_cache_path.endswith(
                    "%ssite%s%s" % (os.path.sep, os.path.sep, fw.name)))
Esempio n. 6
0
    def format_tooltip(self):
        """
        Format text for a tooltip containing all useful information about
        the file item.  Tooltips look something like this:

            Name, version
            This is the latest version of this file
            ---------------------------------------
            Updated by:
            name, date, time
            (or) ----------------------------------
            Published by:
            name, data, time
            description
            (optional) ----------------------------
            This file is not editable
            not editable reason

        :returns:   Formatted rich-text string that can be used in a Tooltip for the file
                    item
        """
        tooltip = ""

        # figure out the latest version and the latest publish version:
        latest_version = self
        latest_publish_version = self if self.is_published else None
        if self.versions:
            max_version = max(self.versions)
            if max_version > latest_version.version:
                latest_version = self.versions[max_version]

            publish_versions = [
                f.version for f in six.itervalues(self.versions)
                if f.is_published
            ]
            if publish_versions:
                max_pub_version = max(publish_versions)
                if (not latest_publish_version
                        or max_pub_version > latest_publish_version.version):
                    latest_publish_version = self.versions[max_pub_version]

        # add the file name and version:
        tooltip += "<b>%s, v%03d</b><br>" % (self.name, self.version)

        # add in some text describing if this is the latest version or not.
        if latest_version == self:
            tooltip += "<i>This is the latest version of this file</i>"
        else:
            if latest_version.is_published:
                tooltip += (
                    "<i>A more recent Publish (v%03d) is available</i>" %
                    latest_version.version)
            elif latest_version.is_local:
                tooltip += (
                    "<i>A more recent Work File (v%03d) is available</i>" %
                    latest_version.version)

        if self.is_published:
            # add in published info
            tooltip += "<hr>"
            tooltip += "<b>Published by:</b><br>"
            tooltip += self.format_published_by_details(single_line=True)
            tooltip += "<br>"
            tooltip += "<i>%s</i>" % self.format_publish_description()
        elif self.is_local:
            # add in local info:
            tooltip += "<hr>"
            tooltip += "<b>Modified by:</b><br>"
            tooltip += self.format_modified_by_details(single_line=True)

            if latest_publish_version:
                # also add some information about the most recent publish:
                tooltip += "<hr>"
                tooltip += ("<b>Last Published as v%03d by:</b><br>" %
                            latest_publish_version.version)
                tooltip += latest_publish_version.format_published_by_details(
                    single_line=True)
                tooltip += "<br>"
                tooltip += (
                    "<i>%s</i>" %
                    latest_publish_version.format_publish_description())

        # if the file isn't editable then add this to the tooltip as well
        if not self.editable:
            tooltip += "<hr>"
            tooltip += "<b>The file is not editable</b><br>"
            tooltip += self.not_editable_reason

        return tooltip
Esempio n. 7
0
def do_localize(log, sg_connection, target_config_path, interaction_interface):
    """
    Perform the actual localize command.

    :param log: logging object
    :param sg_connection: An open shotgun connection
    :param str target_config_path: Path to the config that should be localized.
    :param interaction_interface: Interface to use to interact with the user
    """

    # the configuration to localize
    target_pipeline_config = pipelineconfig_factory.from_path(
        target_config_path)

    # the core install location for the current config. this will be resolved to
    # a linked config or the config where the running core lives
    source_config_path = target_pipeline_config.get_install_location()

    log.info("")
    if target_pipeline_config.is_localized():
        # if we're here, there's already a core in the config's install folder
        raise TankError(
            "Looks like your current pipeline configuration already has a "
            "local install of the core!")

    # if a core descriptor is supplied, ensure it is cached locally and use it
    # as the core to localize.
    if pipelineconfig_utils.has_core_descriptor(target_config_path):
        core_descriptor = pipelineconfig_utils.get_core_descriptor(
            target_config_path, sg_connection)
        core_descriptor.ensure_local()
        source_core_path = core_descriptor.get_path()
        source_core_version = core_descriptor.get_version()

        log.info("Core descriptor %s, specified in core/core_api.yml, "
                 "will be installed." % (core_descriptor.get_uri()))

    else:
        # fall back to using the core that exists in the source config
        source_core_path = os.path.join(source_config_path, "install", "core")

        # resolve the version of core
        source_core_version = target_pipeline_config.get_associated_core_version(
        )

        log.info("This will copy the Core API in %s \n"
                 "into the Pipeline configuration %s." %
                 (source_core_path, target_config_path))

    log.info("")
    # check with user if they wanna continue
    if not interaction_interface.ask_yn_question("Do you want to proceed"):
        # user says no!
        log.info("Operation cancelled.")
        return

    log.debug("About to localize '%s'" % target_config_path)
    log.debug("Associated core is '%s', version %s" %
              (source_core_path, source_core_version))
    log.debug("The version of core running this code is %s" %
              pipelineconfig_utils.get_currently_running_api_version())

    # proceed with setup
    log.info("")

    # define the install paths for the source and target configs
    source_install_path = os.path.join(source_config_path, "install")
    target_install_path = os.path.join(target_config_path, "install")

    try:

        # ---- Step 1: Localize all bundles...

        if is_version_older(source_core_version, "v0.18.0"):
            # now if we are localizing a pre-0.18 core, it means we are using
            # modern (post 0.18) core code to copy a 0.17 core across into the
            # configuration in this case, the old storage logic for descriptors
            # applies. We handle this by brute forcing it and copying all items
            # across in the install folder.

            log.debug(
                "Using a 0.18 core to localize a 0.17 core. Falling back on "
                "blanket copy of install.")

            # copy all the contents of the install location across except for
            # the contents in the core and core.backup folders - these are
            # handled explicitly later on

            for name in os.listdir(source_install_path):

                if name in ["core", "core.backup"]:
                    # skip now and handle separately
                    continue

                if name.startswith(".") or name.startswith("_"):
                    # skip system directories such as __MACOSX and .DS_store
                    continue

                source = os.path.join(source_install_path, name)
                target = os.path.join(target_install_path, name)
                log.info("Localizing the %s folder..." % name)
                filesystem.copy_folder(source, target)

        else:
            # 0.18 descriptor based API implementation

            # First get a list of all bundle descriptors.
            # Key by descriptor uri, which ensures no repetition.
            descriptors = {}
            for env_name in target_pipeline_config.get_environments():

                env_obj = target_pipeline_config.get_environment(env_name)

                for engine in env_obj.get_engines():
                    descriptor = env_obj.get_engine_descriptor(engine)
                    descriptors[descriptor.get_uri()] = descriptor

                    for app in env_obj.get_apps(engine):
                        descriptor = env_obj.get_app_descriptor(engine, app)
                        descriptors[descriptor.get_uri()] = descriptor

                for framework in env_obj.get_frameworks():
                    descriptor = env_obj.get_framework_descriptor(framework)
                    descriptors[descriptor.get_uri()] = descriptor

            for idx, descriptor in enumerate(six.itervalues(descriptors)):
                # print one based indices for more human friendly output
                log.info("%s/%s: Copying %s..." %
                         (idx + 1, len(descriptors), descriptor))
                descriptor.clone_cache(target_install_path)

        # ---- Step 2: Backup the target core and copy the new core across...

        # construct paths to the installed "core" and "core.backup" folders in
        # the target config
        target_core_path = os.path.join(target_install_path, "core")
        target_core_backup_path = os.path.join(target_install_path,
                                               "core.backup")

        log.info("Backing up existing Core API...")

        # timestamped folder name in "core.backup"
        target_core_backup_folder_name = datetime.datetime.now().strftime(
            "%Y%m%d_%H%M%S")

        # full path to the core backup folder (including timestamped folder)
        target_core_backup_folder_path = os.path.join(
            target_core_backup_path, target_core_backup_folder_name)

        # do the actual copy of whatever's currently in "install/core" to the
        # timestampted backup folder
        src_files = filesystem.copy_folder(target_core_path,
                                           target_core_backup_folder_path)

        # clean out the "core" folder
        log.debug("Clearing out core target location...")
        for f in src_files:
            filesystem.safe_delete_file(f)

        log.info("Copying Core %s \nto %s" %
                 (source_core_path, target_core_path))
        filesystem.copy_folder(source_core_path, target_core_path)

        # Step 3: Copy some core config files across.
        log.info("Copying Core configuration files...")
        for fn in CORE_FILES_FOR_LOCALIZE:
            src = os.path.join(source_config_path, "config", "core", fn)
            tgt = os.path.join(target_config_path, "config", "core", fn)
            log.debug("Copy %s -> %s" % (src, tgt))

            # If we're copying any other file than app_store.yml, it is
            # mandatory. If we're copying app_store.yml, only copy it if it
            # exists. This is because when you are localizing a core,
            # app_store.yml might be present or not depending if you are
            # migrating a core configured with a pre Shotgun 6 or post Shotgun 6
            # site. In the latter, AppStore credentials can be retrieved using a
            # session token and therefore we don't need the AppStore credentials
            # to be saved on disk.
            if fn != "app_store.yml" or os.path.exists(src):
                filesystem.copy_file(src, tgt, permissions=0o666)

    except Exception as e:
        log.exception("Could not localize Toolkit API.")
        raise TankError("Could not localize Toolkit API: %s" % e)

    log.info("The Core API was successfully localized.")

    log.info("")
    log.info("Localize complete! "
             "This pipeline configuration now has an independent API.")
    log.info("")
    log.info("")