Example #1
0
    def create_node_layer_item(
        self,
        settings,
        parent_item,
        node,
        display_name=None,
        icon_name=None,
        is_header=False,
    ):
        publisher = self.parent

        if display_name is None:
            display_name = node.name()

        # create the layers item for the publish hierarchy
        layer_item = parent_item.create_item("krita.layer", "Krita Layer",
                                             display_name)

        # get the icon path to display for this item
        if icon_name is None:
            icon_name = "krita_layer.png"

        icon_path = os.path.join(self.disk_location, os.pardir, "icons",
                                 icon_name)

        layer_item.set_icon_from_path(icon_path)
        layer_item.properties["node"] = node
        layer_item.properties["node_name"] = create_valid_filename(node.name())
        layer_item.properties["publish_name"] = create_valid_filename(
            display_name)
        layer_item.properties["publish_type"] = "Krita Layer"
        layer_item.properties["is_header"] = is_header

        if is_header:
            layer_item.type_spec = "krita.layer.header"

        # if a work template is defined, add it to the item properties so
        # that it can be used by attached publish plugins
        work_template = None
        work_template_setting = settings.get("Work Template")

        if work_template_setting:
            work_template = publisher.engine.get_template_by_name(
                work_template_setting.value)
            if not work_template:
                raise TankError("Missing Work Template in templates.yml: %s " %
                                work_template_setting.value)

        layer_item.properties["work_template"] = work_template

        return layer_item
Example #2
0
        raise Exception("Error creating toolkit logger: %s" % (e, ))

    logger.info("Validating command line arguments...")

    # ensure the core plugin build script exists
    logger.info("Finding plugin build script...")
    build_script = os.path.join(args["core"], CORE_BUILD_SCRIPT)
    if not os.path.exists(build_script):
        raise Exception(
            "Could not find plugin build script in supplied core: %s" %
            (build_script, ))

    # ensure the extension name is valid
    logger.info("Ensuring valid plugin & extension build names...")
    from sgtk.util.filesystem import create_valid_filename
    args["extension_name"] = create_valid_filename(args["extension_name"])
    logger.info("Extension name: %s" % (args["extension_name"]))

    # make sure version is valid
    logger.info("Verifying supplied version...")
    if args["version"]:
        if not re.match(r"^v\d+\.\d+\.\d+$", args["version"]):
            raise Exception(
                "Supplied version doesn't match the format 'v#.#.#'. Supplied: %s"
                % (args["version"], ))
    else:
        args["version"] = "dev"

    # if signing requested, validate those args
    if args["sign"]:
def _validate_args(args):
    """
    Validate the parsed args. Will raise if there are errors.

    Sets up the logger if core can be imported.

    Adds some additional values based on supplied args including the engine
    directory, plugin path, etc.

    Returns a dictionary of the parsed arguments of the following form:

        {
            'core': '/path/to/tk-core',
            'extension_name': 'extesion.name.here',
            'bundle_cache': True,
            'plugin_name': 'plugin_name',
            'sign': ['/path/to/ZXPSignCmd', '/path/to/cert', 'cert_password'],
            'version': 'v1.0.0',
            'output_dir': '/path/to/output/dir',
            'engine_dir': '/path/to/the/engine/repo',
            'plugin_dir': '/path/to/the/engine/plugin',
        }
    """

    # convert the args namespace to a dict
    args = vars(args)

    # ensure core path exists and build script is there
    if not os.path.exists(args["core"]):
        raise Exception(
            "Supplied core path does not exist: %s" % (args["core"],)
        )

    # make sure we can import core
    try:
        sgtk_dir = os.path.join(args["core"], "python")
        sys.path.insert(0, sgtk_dir)  # make sure this one is found first
        import sgtk as imported_sgtk
        global sgtk
        sgtk = imported_sgtk
    except Exception as e:
        raise Exception("Error import supplied core: %s" % (e,))

    # setup the logger for use from here on out
    try:
        # set up std toolkit logging to file
        sgtk.LogManager().initialize_base_file_handler("build_plugin")

        # set up output of all sgtk log messages to stdout
        sgtk.LogManager().initialize_custom_handler()

        global logger
        logger = sgtk.LogManager.get_logger("build_extension")

    except Exception as e:
        raise Exception("Error creating toolkit logger: %s" % (e,))

    logger.info("Validating command line arguments...")

    # ensure the core plugin build script exists
    logger.info("Finding plugin build script...")
    build_script = os.path.join(args["core"], CORE_BUILD_SCRIPT)
    if not os.path.exists(build_script):
        raise Exception(
            "Could not find plugin build script in supplied core: %s" %
            (build_script,)
        )

    # ensure the extension name is valid
    logger.info("Ensuring valid plugin & extension build names...")
    from sgtk.util.filesystem import create_valid_filename
    args["extension_name"] = create_valid_filename(args["extension_name"])
    logger.info("Extension name: %s" % (args["extension_name"]))

    # make sure version is valid
    logger.info("Verifying supplied version...")
    if args["version"]:
        if not re.match(r"^v\d+\.\d+\.\d+$", args["version"]):
            raise Exception(
                "Supplied version doesn't match the format 'v#.#.#'. Supplied: %s" %
                (args["version"],)
            )
    else:
        args["version"] = "dev"

    # if signing requested, validate those args
    if args["sign"]:

        logger.info("Verifying 'ZXPSignCmd` path...")
        if not os.path.exists(args["sign"][0]):
            raise Exception(
                "The supplied 'ZXPSignCmd' does not exist. Supplied path: %s " %
                (args["sign"][0],)
            )

        logger.info("Verifying certificate path...")
        if not os.path.exists(args["sign"][1]):
            raise Exception(
                "The supplied certificate does not exist. Supplied path: %s " %
                (args["sign"][1],)
            )

    # get the full path to the engine repo
    logger.info("Populating the engine directory...")
    args["engine_dir"] = os.path.abspath(
        os.path.join(
            os.path.dirname(__file__),
            os.pardir
        )
    )

    # ensure the plugin can be found in the engine
    logger.info("Validating plugin name...")
    plugin_dir = os.path.join(
        args["engine_dir"],
        "cep"
    )
    if not os.path.exists(plugin_dir):
        raise Exception(
            "Could not find plugin '%s' in engine." % (args["plugin_name"],)
        )
    args["plugin_dir"] = plugin_dir

    # if output dir defined, ensure it exists. populate args with engine dir
    # if not.
    logger.info("Determining output directory...")
    if args["output_dir"]:
        if not os.path.exists(args["output_dir"]):
            from sgtk.util.filesystem import ensure_folder_exists
            ensure_folder_exists(args["output_dir"])
    else:
        args["output_dir"] = args["engine_dir"]

    # return the validate args
    logger.info("Command line arguments validated.")
    return args
    def collect_rumba_selection_as_nodes(self, settings, parent_item):
        """
        Creates an item that represents the animation as Alembic Cache to publish.

        :param parent_item: Parent Item instance

        :returns: Item of type rumba.animation.alembic_cache
        """

        publisher = self.parent

        # get the path to the current file
        path = _session_path()

        if not path:
            # no document is active, so nothing to see here!
            return

        selection = rumba.selection()
        if not selection:
            return

        # this is the same behaviour that happens when Save Node is used from
        # the File menu, it multiple nodes are selected, only the first one is
        node = selection[0]
        if node.is_referenced():
            self.logger.warning(
                "Exporting of reference nodes is not supported in Rumba at the moment. The selected node `%s` won't be added for publishing."
                % node.name()
            )
            return None

        node_name = create_valid_filename(node.name())
        display_name = "Node: `%s`" % node.name()

        # create the session item for the publish hierarchy
        item = parent_item.create_item("rumba.node", "Rumba Node", display_name)

        # get the icon path to display for this item
        icon_path = os.path.join(
            self.disk_location, os.pardir, "icons", "rumbanode.png"
        )
        item.set_icon_from_path(icon_path)

        # if a work template is defined, add it to the item properties so
        # that it can be used by attached publish plugins
        work_template_setting = settings.get("Work Template")
        if work_template_setting:
            work_template = publisher.engine.get_template_by_name(
                work_template_setting.value
            )

            # store the template on the item for use by publish plugins. we
            # can't evaluate the fields here because there's no guarantee the
            # current session path won't change once the item has been created.
            # the attached publish plugins will need to resolve the fields at
            # execution time.
            item.properties["work_template"] = work_template
            item.properties["publish_type"] = "Rumba Node"
            item.properties["node_full_document_name"] = node.full_document_name()
            item.properties["extra_fields"] = {"rumba.node.name": node_name}

        self.logger.info(
            "Collected Selection as Rumba Nodes animation as Alembic Cache for publishing."
        )

        return item