Esempio n. 1
0
def write_file(path, contents, overwrite=False):
    """
    Write contents to file. If overwrite is False will not overwrite file if already exists.
    Will also create any necessary directories
    
    :param path: location to write file at
    :type path: string
    :param contents: text contents to write to file
    :type contents: string
    :param overwrite: if True will overwrite existing files
    :type overwrite: boolean
    
    """
    log = logging.get_logger()
    
    if os.path.exists(path) and not overwrite:
        return False
    
    directory = os.path.dirname(path)
    if not os.path.isdir(directory):
        os.makedirs(directory)
        log.info("Created missing directory %s" % directory)
        
    fd = open(path, "w")
    fd.write(contents)
    fd.close()
    
    return True
Esempio n. 2
0
def generate(output_directory, source_directory, update=False):
    """
    Automatically generate autodoc templates for all files in sources
        
    :param output_directory: documentation directory to create autodoc structure under (e.g. docs/technical)
    :param source_directory: directory containing source files for autodoc parsing
    :param update: if True will overwrite existing files

    .. note::
    
        Does not remove files or directories.
        
    """
    log = logging.get_logger()
    
    if not os.path.isdir(source_directory):
        log.error("Could not autodoc - source path is not a valid directory: %s" % source_directory)
        return False
    
    log.info("Output directory set to: %s" % output_directory)
    log.info("Update existing files set to: %s" % update)

    try:
        os.makedirs(output_directory)
    except Exception:
        if not os.path.isdir(output_directory):
            log.error("Could not autodoc - output directory did not exist and could not be created")
            return False 
    
    log.info("Scanning for source files in %s" % source_directory)

    # recursively generate files
    _generate(source_directory, output_directory, update=update, log=log)
Esempio n. 3
0
def validate(project_directory):
    """
    Validate structure at project_directory
    
    :param project_directory: the project root directory containing docs directory structure
    
    :returns: True if structure valid else False
    
    """
    log = logging.get_logger()
    log.info("Validating docs structure under %s" % project_directory)

    destination = get_docs_directory(project_directory)
    source = config.template_path
    valid = True
    
    for path, dirs, files in os.walk(source):
        # ignore hidden directories (.svn etc)
        for d in dirs:
            if d.startswith("."):
                dirs.remove(d)
                
        path_parts = path.replace(source, "").split(os.sep)
        path_parts.insert(0, destination)
        dest_path = os.path.join(*path_parts)
        if os.path.isdir(dest_path):
            log.info("Found required directory %s" % (dest_path))
        else:
            valid = False
            log.warn("Missing directory %s" % (dest_path))
            
        for file in files:
            if file.startswith("."):
                continue
            
            file_path = os.path.join(dest_path, file)
            if os.path.isfile(file_path):
                log.info("Found required file %s" % (file_path))
            else:
                valid = False
                log.warn("Missing file %s" % (file_path))
    
    if valid:
        log.info("Structure was valid.")
    else:
        log.warn("Structure was invalid.")
        
    return valid
Esempio n. 4
0
def setup():
    """
    Setup fixtures
    """
    log = logging.get_logger()
    log.info("Generating test project fixture...")
    
    test_project = os.path.join(os.path.dirname(__file__), "fixtures", "test_project")
    from miki import structure
    docs_directory = structure.get_docs_directory(test_project)
    build_directory = structure.get_build_directory(docs_directory)
    
    if os.path.exists(build_directory):
        log.warn("Clearing existing build directory at %s" % build_directory)
        shutil.rmtree(build_directory)
    structure.create(test_project, force=True)
Esempio n. 5
0
def setup():
    """
    Setup fixtures
    """
    log = logging.get_logger()
    log.info("Generating test project fixture...")

    test_project = os.path.join(os.path.dirname(__file__), "fixtures",
                                "test_project")
    from miki import structure
    docs_directory = structure.get_docs_directory(test_project)
    build_directory = structure.get_build_directory(docs_directory)

    if os.path.exists(build_directory):
        log.warn("Clearing existing build directory at %s" % build_directory)
        shutil.rmtree(build_directory)
    structure.create(test_project, force=True)
Esempio n. 6
0
def generate(project_directory, update=False):
    """
    Generate appropriate documentation structure under project_directory
    
    :param project_directory: the project root directory to create docs directory under
    :param update: if True will overwrite existing files

    .. note::
    
        Does not remove files or directories.
    
    """
    log = logging.get_logger()
    destination = get_docs_directory(project_directory)
    source = config.template_path
    
    log.info("Creating structure at %s" % destination)
    if update:
        log.info("Update flag set - will overwrite existing files.")
    _copytree(source, destination, log, overwrite=update)
Esempio n. 7
0
def main(arguments=None):
    """
    Miki main entry point.
    
    :param arguments: if specified denotes list of arguments to use, else retrieves argument list from sys.argv
    
    """
    from rodin import logging
    from rodin.parsers.argument import ArgumentParser
    
    from miki import builder, structure, autodoc, errors
    
    if arguments is None:
        arguments = sys.argv[1:]
    
    log = logging.get_logger()
    return_code = 0
    
    # handle arguments
    parser = ArgumentParser(description="Documentation Assistant")
    
    common = ArgumentParser(add_help=False)
    common.add_argument("-p", "--project-directory", help="Project directory. ", default=os.path.abspath("."))
    common.add_argument("-s", "--source-directory", help="Source directory. Can be relative to project directory, default is 'sources'.", action="append")
    common.add_argument("-l", "--loud", help="Output more information during operations.", default=False, action="store_true")
    
    subparsers = parser.add_subparsers(dest='mode', help="Use mode -h/--help for mode specific help")
        
    build_parser = subparsers.add_parser("build", help="Build documentation output.", parents=[common])
    build_parser.add_argument("-n", "--project-name", help="Project name.", default=None)
    build_parser.add_argument("-r", "--project-release", help="Project release string, such as 2.0.3.", default=None)
    build_parser.add_argument("-v", "--project-version", help="Project version string, such as 2.0.", default=None)
    
    build_parser.add_argument("-d", "--doxyfile", help="Location of doxyfile. Can be relative to project directory.", default="Doxyfile")
    build_parser.add_argument("-c", "--clean", help="Remove existing target output before build.", default=False, action="store_true")
    build_parser.add_argument("-t", "--target", action="append", choices=builder.VALID_TARGETS, help="A target to build. Can be specified multiple times.")
    
    structure_parser = subparsers.add_parser("structure", help="Manage documentation structure.", parents=[common])
    structure_group = structure_parser.add_mutually_exclusive_group(required=True)
    structure_group.add_argument("-c", "--create", help="Create documentation structure from template.", default=False, action="store_true")
    structure_group.add_argument("-u", "--update", help="Update existing structure to match template. Will overwrite, but not remove existing files.", default=False, action="store_true")
    structure_group.add_argument("-v", "--validate", help="Validate existing structure.", default=False, action="store_true")
    
    create_parser = subparsers.add_parser("autodoc", help="Automatically generate reference files for all sources.", parents=[common])
    create_parser.add_argument("-d", "--output-directory", help="Directory to output generated files in. Can be relative to docs directory.", default='technical')
    create_parser.add_argument("-u", "--update", help="Overwrite existing files.", default=False, action="store_true")
    
    args = parser.parse_args(arguments)

    # sort out common arguments
    # are we in docs directory by accident
    if os.path.basename(args.project_directory) ==  "docs":
        log.warn("Project directory appears to be a documentation directory - switching up a level.")
        args.project_directory = os.path.dirname(args.project_directory)

    if not args.source_directory:
        args.source_directory = ["sources"]
    
    log.info("Project directory is: %s" % args.project_directory)
    docs_directory = structure.get_docs_directory(args.project_directory)

    # now specialise
    if args.mode == "structure":
        if args.create:
            structure.generate(args.project_directory, update=False)
        elif args.update:
            structure.generate(args.project_directory, update=True)
        elif args.validate:
            structure.validate(args.project_directory)
            
    elif args.mode == "autodoc":
        output_directory_root = os.path.join(docs_directory, args.output_directory)
        for source_directory in args.source_directory:
            source_directory = os.path.join(args.project_directory, source_directory)
            output_directory = output_directory_root
            if len(args.source_directory) > 1:
                output_directory = os.path.join(output_directory, os.path.dirname(source_directory))
            autodoc.generate(output_directory, source_directory, update=args.update)
    
    elif args.mode == "build":
        
        if not os.path.isdir(docs_directory):
            log.error("Docs directory does not exist - cannot build documentation! (Expected location: %s)" % docs_directory)
            return 1
                
        defaults = {}
        
        # guess project name if not set
        if args.project_name:
            defaults["project"] = args.project_name
        else:
            defaults["project"] = os.path.basename(args.project_directory)
            # Check for release build which has project name one level higher with an intermediate release directory
            match = re.search("v\d\d_\d\d_\d\d", defaults["project"])
            if match:
                defaults["project"] = os.path.basename(os.path.dirname(args.project_directory))

        log.info("Project name is: %s" % defaults["project"])
            
        if args.project_release:
            defaults["release"] = args.project_release
        else:
            # guess
            release_folder = os.path.basename(args.project_directory)
            match = re.search("v\d\d_\d\d_\d\d", release_folder)
            if match:
                defaults["release"] = release_folder[1:]
            
        if args.project_version:
            defaults["version"] = args.project_version
            
        # auto add discovered Python packages to PYTHONPATH
        defaults["modindex_common_prefix"] = []
        for source_directory in args.source_directory:
            source_directory = os.path.join(args.project_directory, source_directory)
                
            if not os.path.isdir(source_directory):
                log.warn("Source path is not a valid directory: %s" % source_directory )
            else:
                log.info("Scanning source directory: %s" % source_directory)
                python_package_paths = []
                doxyfile = None
                for path, dirs, files in os.walk(source_directory, topdown=True):
                    if "__init__.py" in files:
                        python_package_paths.append(path)
                        # don't descend any further
                        del dirs[:]
                            
                log.info("Looking for Python packages...")
                python_package_paths.sort()
                for path in python_package_paths:
                    path, entry = os.path.split(path)
                    defaults["modindex_common_prefix"].append("%s." % entry)
                    if not path in sys.path:
                        sys.path.insert(0, path)
                        log.info("Added source path to PYTHONPATH: %s" % path)
        
        for entry in defaults["modindex_common_prefix"]:
            log.info("Added package name '%s' to common prefixes." % entry)
        
        # Check for doxyfile
        if not "/" in args.doxyfile:
            args.doxyfile = os.path.join(args.project_directory, args.doxyfile)
        args.doxyfile = os.path.abspath(args.doxyfile)
        if os.path.isfile(args.doxyfile):
            log.info("Found Doxyfile: %s" % args.doxyfile)
        else:
            args.doxyfile = None
            
        # set default target to html
        if not args.target:
            args.target = ["html"]
              
        # Build docs
        build_directory = structure.get_build_directory(docs_directory)
        
        try:
            builder.build(docs_directory, 
                          build_directory, 
                          defaults=defaults, 
                          targets=args.target,
                          verbose=args.loud,
                          doxyfile=args.doxyfile,
                          clean=args.clean)
        except errors.BuilderError, error:
            log.error("Build failed: %s" % error)
            return_code = 1
        except Exception, error:
            log.exception("Build failed")
            return_code = 1
Esempio n. 8
0
import os
import traceback
#import hou
import napalm.core as nap_core
import node_curves as node_curves
import kip.kip_reader as kip_reader
reload(node_curves)
reload(kip_reader)
from rodin import logging
from kip.kip_curve_class import *
from kip.kip_napalm_class import *
from kip.utils.kipError import *
from kip.template import *


rodin_logger = logging.get_logger('kipHoudini')
napalm_func = Napalm()

GLOBAL_FPS = 24
GLOBAL_TIME = 1


class HoudiniWriter(object):

    """
    Creating houdini curve writer class

    *Parents:*

        None
Esempio n. 9
0


import math
import traceback
import os
import maya.cmds as cmds
import node_curves as node_curves
import kip.kip_reader as kip_reader
reload(node_curves)
from rodin import logging
from kip.kip_curve_class import *
from kip.kip_napalm_class import *
from kip.utils.kipError import *

rodin_logger = logging.get_logger('kipMaya')
napalm_func = Napalm()

class MayaWriter(object):
    """
    Creating maya curve writer class

    *Parents:*

        None

    *Children:*

        * :func:`writeOutCurves`

    """
Esempio n. 10
0
def main(arguments=None):
    """
    Miki main entry point.
    
    :param arguments: if specified denotes list of arguments to use, else retrieves argument list from sys.argv
    
    """
    from rodin import logging
    from rodin.parsers.argument import ArgumentParser

    from miki import builder, structure, autodoc, errors

    if arguments is None:
        arguments = sys.argv[1:]

    log = logging.get_logger()
    return_code = 0

    # handle arguments
    parser = ArgumentParser(description="Documentation Assistant")

    common = ArgumentParser(add_help=False)
    common.add_argument("-p",
                        "--project-directory",
                        help="Project directory. ",
                        default=os.path.abspath("."))
    common.add_argument(
        "-s",
        "--source-directory",
        help=
        "Source directory. Can be relative to project directory, default is 'sources'.",
        action="append")
    common.add_argument("-l",
                        "--loud",
                        help="Output more information during operations.",
                        default=False,
                        action="store_true")

    subparsers = parser.add_subparsers(
        dest='mode', help="Use mode -h/--help for mode specific help")

    build_parser = subparsers.add_parser("build",
                                         help="Build documentation output.",
                                         parents=[common])
    build_parser.add_argument("-n",
                              "--project-name",
                              help="Project name.",
                              default=None)
    build_parser.add_argument("-r",
                              "--project-release",
                              help="Project release string, such as 2.0.3.",
                              default=None)
    build_parser.add_argument("-v",
                              "--project-version",
                              help="Project version string, such as 2.0.",
                              default=None)

    build_parser.add_argument(
        "-d",
        "--doxyfile",
        help="Location of doxyfile. Can be relative to project directory.",
        default="Doxyfile")
    build_parser.add_argument(
        "-c",
        "--clean",
        help="Remove existing target output before build.",
        default=False,
        action="store_true")
    build_parser.add_argument(
        "-t",
        "--target",
        action="append",
        choices=builder.VALID_TARGETS,
        help="A target to build. Can be specified multiple times.")

    structure_parser = subparsers.add_parser(
        "structure", help="Manage documentation structure.", parents=[common])
    structure_group = structure_parser.add_mutually_exclusive_group(
        required=True)
    structure_group.add_argument(
        "-c",
        "--create",
        help="Create documentation structure from template.",
        default=False,
        action="store_true")
    structure_group.add_argument(
        "-u",
        "--update",
        help=
        "Update existing structure to match template. Will overwrite, but not remove existing files.",
        default=False,
        action="store_true")
    structure_group.add_argument("-v",
                                 "--validate",
                                 help="Validate existing structure.",
                                 default=False,
                                 action="store_true")

    create_parser = subparsers.add_parser(
        "autodoc",
        help="Automatically generate reference files for all sources.",
        parents=[common])
    create_parser.add_argument(
        "-d",
        "--output-directory",
        help=
        "Directory to output generated files in. Can be relative to docs directory.",
        default='technical')
    create_parser.add_argument("-u",
                               "--update",
                               help="Overwrite existing files.",
                               default=False,
                               action="store_true")

    args = parser.parse_args(arguments)

    # sort out common arguments
    # are we in docs directory by accident
    if os.path.basename(args.project_directory) == "docs":
        log.warn(
            "Project directory appears to be a documentation directory - switching up a level."
        )
        args.project_directory = os.path.dirname(args.project_directory)

    if not args.source_directory:
        args.source_directory = ["sources"]

    log.info("Project directory is: %s" % args.project_directory)
    docs_directory = structure.get_docs_directory(args.project_directory)

    # now specialise
    if args.mode == "structure":
        if args.create:
            structure.generate(args.project_directory, update=False)
        elif args.update:
            structure.generate(args.project_directory, update=True)
        elif args.validate:
            structure.validate(args.project_directory)

    elif args.mode == "autodoc":
        output_directory_root = os.path.join(docs_directory,
                                             args.output_directory)
        for source_directory in args.source_directory:
            source_directory = os.path.join(args.project_directory,
                                            source_directory)
            output_directory = output_directory_root
            if len(args.source_directory) > 1:
                output_directory = os.path.join(
                    output_directory, os.path.dirname(source_directory))
            autodoc.generate(output_directory,
                             source_directory,
                             update=args.update)

    elif args.mode == "build":

        if not os.path.isdir(docs_directory):
            log.error(
                "Docs directory does not exist - cannot build documentation! (Expected location: %s)"
                % docs_directory)
            return 1

        defaults = {}

        # guess project name if not set
        if args.project_name:
            defaults["project"] = args.project_name
        else:
            defaults["project"] = os.path.basename(args.project_directory)
            # Check for release build which has project name one level higher with an intermediate release directory
            match = re.search("v\d\d_\d\d_\d\d", defaults["project"])
            if match:
                defaults["project"] = os.path.basename(
                    os.path.dirname(args.project_directory))

        log.info("Project name is: %s" % defaults["project"])

        if args.project_release:
            defaults["release"] = args.project_release
        else:
            # guess
            release_folder = os.path.basename(args.project_directory)
            match = re.search("v\d\d_\d\d_\d\d", release_folder)
            if match:
                defaults["release"] = release_folder[1:]

        if args.project_version:
            defaults["version"] = args.project_version

        # auto add discovered Python packages to PYTHONPATH
        defaults["modindex_common_prefix"] = []
        for source_directory in args.source_directory:
            source_directory = os.path.join(args.project_directory,
                                            source_directory)

            if not os.path.isdir(source_directory):
                log.warn("Source path is not a valid directory: %s" %
                         source_directory)
            else:
                log.info("Scanning source directory: %s" % source_directory)
                python_package_paths = []
                doxyfile = None
                for path, dirs, files in os.walk(source_directory,
                                                 topdown=True):
                    if "__init__.py" in files:
                        python_package_paths.append(path)
                        # don't descend any further
                        del dirs[:]

                log.info("Looking for Python packages...")
                python_package_paths.sort()
                for path in python_package_paths:
                    path, entry = os.path.split(path)
                    defaults["modindex_common_prefix"].append("%s." % entry)
                    if not path in sys.path:
                        sys.path.insert(0, path)
                        log.info("Added source path to PYTHONPATH: %s" % path)

        for entry in defaults["modindex_common_prefix"]:
            log.info("Added package name '%s' to common prefixes." % entry)

        # Check for doxyfile
        if not "/" in args.doxyfile:
            args.doxyfile = os.path.join(args.project_directory, args.doxyfile)
        args.doxyfile = os.path.abspath(args.doxyfile)
        if os.path.isfile(args.doxyfile):
            log.info("Found Doxyfile: %s" % args.doxyfile)
        else:
            args.doxyfile = None

        # set default target to html
        if not args.target:
            args.target = ["html"]

        # Build docs
        build_directory = structure.get_build_directory(docs_directory)

        try:
            builder.build(docs_directory,
                          build_directory,
                          defaults=defaults,
                          targets=args.target,
                          verbose=args.loud,
                          doxyfile=args.doxyfile,
                          clean=args.clean)
        except errors.BuilderError, error:
            log.error("Build failed: %s" % error)
            return_code = 1
        except Exception, error:
            log.exception("Build failed")
            return_code = 1
Esempio n. 11
0
# -----------------------------------------------------------------------------
import datetime
import os
import types
from PyQt4 import QtGui, QtCore, uic
from rodin import logging

log = logging.get_logger("grind.concorde.ui")

# -----------------------------------------------------------------------------
def ui_load(name):
    template = uic.loadUiType(os.path.join(os.path.split(__file__)[0], "ui", name))[0]
    return template


# -----------------------------------------------------------------------------
class ConcordeWindow(ui_load("Concorde.ui"), QtGui.QMainWindow):
    def __init__(self):
        QtGui.QMainWindow.__init__(self)
        self.setupUi(self)


# -----------------------------------------------------------------------------
class AttribBase:

    # -----------------------------------------------------------------------------
    def setRanges(self, value):
        if value < self.valueSB.minimum():
            self.valueSB.setMinimum(value)
        if value > self.valueSB.maximum():
            self.valueSB.setMaximum(value)
Esempio n. 12
0
def build(source,
          destination,
          defaults=None,
          targets=None,
          verbose=False,
          doxyfile=None,
          clean=False):
    """
    Build documentation in source outputting to destination directory.
    
    :param source: directory - the docs directory containing conf.py
    :param destination: directory to build in (e.g. _build)
    :param defaults: dictionary of default values to pass to Sphinx configuration (e.g. project name).
    :param targets: list of output targets to build (html, pdf, latex etc). If None a default set is used.  
    :param verbose: if True will output more information as build proceeds
    :param doxyfile: if set will attempt to build doxygen documentation sources first using doxyfile
    :param clean: if True will remove each targets output directory before build
    :raises BuilderError: on fail
    
    .. note::
        
        Sub-folders per documentation type will be made under the destination directory.
            
    .. versionchanged:: 1.0
       Miki now reports all warnings and errors regardless of whether verbose argument is True or False.
        
    """
    log = logging.get_logger()
    important_message_reg = re.compile("(WARNING|ERROR|FAILED)")

    # Sphinx likes elements to be interpreted as relative to conf.py so change directory for build to source
    # wrap in a try,finally block to ensure directory gets changed back even if there are errors
    current_directory = os.getcwd()
    os.chdir(source)
    try:
        config_file = structure.get_config_file(source)
        if not os.path.isfile(config_file):
            raise errors.BuilderError(
                "Cannot build - required config file not found at expected location: %s"
                % config_file)

        # update configuration with passed arguments
        if defaults:
            log.info("Adding defaults to configuration: %s" % defaults)
            sphinx_config.__dict__.update(defaults)
            sphinx_config.__dict__.update(
                sphinx_config.compute_derived_values(**defaults))

        # build targets
        if targets is None:
            targets = ["html", "pdf"]
        if doxyfile:
            targets.insert(0, "doxygen")

        for target in targets:
            if target not in VALID_TARGETS:
                raise errors.BuilderError(
                    "Invalid target '%s' specified - must be one of %s." %
                    (target, ", ".join(VALID_TARGETS)))

            output = os.path.join(destination, target)

            if clean and os.path.exists(output):
                log.info("Cleaning existing output for %s target %s" %
                         (target, output))
                shutil.rmtree(output)

            log.info("Building %s in %s" % (target, output))
            try:
                os.makedirs(output)
            except OSError, e:
                if e.errno == errno.EEXIST:
                    pass
                else:
                    raise

            # At present the pdf builder breaks the standard code-block directive
            # so it is added in on a need only basis
            if target == "pdf":
                sphinx_config.extensions.append("rst2pdf.pdfbuilder")
            else:
                if "rst2pdf.pdfbuilder" in sphinx_config.extensions:
                    sphinx_config.extensions.remove("rst2pdf.pdfbuilder")

            if target == "doxygen":
                # read doxyfile contents
                fd = open(doxyfile, "r")
                contents = fd.read()
                fd.close()

                # doxygen will take the last specified argument as the main one when encountering the same argument
                # more than once, so can just append the overrides
                contents = contents.split("\n")
                contents.append("PROJECT_NAME = %s" %
                                defaults.get("project", "Unknown"))
                contents.append("OUTPUT_DIRECTORY = %s" % output)
                contents.append("GENERATE_XML = YES")
                contents.append("XML_OUTPUT = xml")
                contents.append("CREATE_SUBDIRS = NO")
                contents = "\n".join(contents)

                # now run doxygen in a subprocess
                p = subprocess.Popen(["doxygen", "-"],
                                     stdin=subprocess.PIPE,
                                     stdout=subprocess.PIPE,
                                     stderr=subprocess.STDOUT,
                                     cwd=os.path.dirname(doxyfile))
                output = p.communicate(input=contents)
                if verbose:
                    print output[0]
                if p.returncode != 0:
                    raise errors.BuilderError("Doxygen build failed: %s" %
                                              output[0])
            else:
                # Sphinx build
                args = ["sphinx-build"]
                args.extend(["-b", target])
                args.append(source)
                args.append(output)

                # redirect output if not verbose
                saved_out = sys.stdout
                saved_err = sys.stderr
                logging.silence("rst2pdf")
                stdout = StringIO.StringIO()
                stderr = stdout
                sys.stdout = stdout
                sys.stderr = stderr

                try:
                    result = sphinx_main(args)
                except Exception, error:
                    pass
                finally:
                    sys.stdout = saved_out
                    sys.stderr = saved_err

                output = stdout.getvalue()

                # parse output for errors and warnings
                failed = False
                if "Exception" in output:
                    log.exception(output)
                else:
                    lines = output.split(os.linesep)
                    for line in lines:
                        match = important_message_reg.search(line)
                        if match:
                            if match.group(1) == 'WARNING':
                                log.warning(line)
                            elif match.group(1) in ('ERROR', 'FAILED'):
                                log.error(line)
                                failed = True
                        elif verbose:
                            log.info(line)

                # handle errors
                if failed:
                    raise errors.BuilderError(
                        "Errors occurred during build. Use -l/--loud as build argument for full output."
                    )
Esempio n. 13
0
import sys
import math
from pimath import *
from PyQt4 import QtCore, QtGui, QtOpenGL
from camera import Camera

import grind

#-----------------------------------------------------------------------------
from rodin import logging
log = logging.get_logger('grind.mangle.gl_widget')

try:
    from OpenGL.GL import *
    from OpenGL.GLU import *
except ImportError:
    app = QtGui.QApplication(sys.argv)
    QtGui.QMessageBox.critical(None, "mangle", "PyOpenGL must be installed to run this example.")
    sys.exit(1)


class GLWidget(QtOpenGL.QGLWidget):
    xRotationChanged = QtCore.pyqtSignal(int)
    yRotationChanged = QtCore.pyqtSignal(int)
    zRotationChanged = QtCore.pyqtSignal(int)

    def __init__(self, parent=None):
        super(GLWidget, self).__init__(parent)

        self.renderable = None
Esempio n. 14
0
import os
import traceback
#import hou
import napalm.core as nap_core
import node_curves as node_curves
import kip.kip_reader as kip_reader
reload(node_curves)
reload(kip_reader)
from rodin import logging
from kip.kip_curve_class import *
from kip.kip_napalm_class import *
from kip.utils.kipError import *
from kip.template import *

rodin_logger = logging.get_logger('kipHoudini')
napalm_func = Napalm()

GLOBAL_FPS = 24
GLOBAL_TIME = 1


class HoudiniWriter(object):
    """
    Creating houdini curve writer class

    *Parents:*

        None

    *Children:*
Esempio n. 15
0
def main(arguments=None):
    """
    Main entry point.
    
    .. versionadded:: v00_01_00
    .. versionchanged:: v00_02_00
        Changed to use vastly improved model implementation, added update support,
        added rodin logging, applied general cleanup.
    .. versionchanged:: v00_03_00
        Updated to use Shotgun connection utility class.
    .. versionchanged:: v00_04_00
        Updated to use config file for mappings and commands settings.
    .. versionchanged:: v00_08_01
        Added support for result sorting to read().
    .. versionchanged:: 1.4.0
        Sort the list of entities that get passed into the '-e' argument parser choices for easier reading.
    
    .. todo::
        Consider the use of rodin subparsers for each mode which is supported by the CLI; allows arguments for each
        mode to be specified more explicitly (and help make things more robust).
    .. todo::
        Implement stronger argument format/value validation.
    .. todo::
        Return some kind of status code on successful/unsuccessful completion (?).
    """
    from grenade.common.model import find, FIND_ONE, ModelBuilder
    from grenade.config import commands, mappings

    from rodin import logging
    from rodin.parsers.argument import ArgumentParser

    if arguments is None:
        arguments = sys.argv[1:]

    # parse command line options
    entities = mappings.keys()
    entities.sort(
    )  # sort the list of available entities for easy to read command-line usage documentation

    parser = ArgumentParser(description='Grenade CLI.')

    parser.add_argument('-m',
                        '--mode',
                        help='Command mode',
                        choices=commands,
                        default='read')
    parser.add_argument('-e',
                        '--entity',
                        help='Shotgun entity to operate on.',
                        choices=entities,
                        default=None,
                        required=True)
    parser.add_argument('-a',
                        '--args',
                        help='Command arguments (depends on selected mode)',
                        default='\"\"',
                        required=True)
    parser.add_argument('-v',
                        '--verbose',
                        help='Set verbosity level',
                        choices='01',
                        default=0)

    args = parser.parse_args(arguments)

    params = args.args

    verbose = False
    if int(args.verbose) > 0:
        logging.set_level(logging.INFO)
    else:
        logging.set_level(logging.WARNING)

    entity = args.entity
    command = args.mode

    # create a grenade logger
    log = logging.get_logger('grenade')

    # create a shotgun session
    from .utils.connection import Connection

    env = get_environment()

    shotgun = Connection(env['SG_HOST'], env['SG_USER'], env['SG_SKEY'])
    session = shotgun.connect(log)

    if not session:
        sys.exit()  # no point going on if we failed to connect!

    # go ahead and run the command
    translator = mappings[entity]['translator']

    if command == 'create':
        try:
            log.info('Creating %s where %s\n' % (entity, params))

            instance = ModelBuilder()(session, entity, translator(session),
                                      **eval(params)).create()
            print instance['id']
        except Exception, e:
            log.error(str(e))
Esempio n. 16
0
__contact__ = "*****@*****.**"
__status__ = "Development"

import math
import traceback
import os
import maya.cmds as cmds
import node_curves as node_curves
import kip.kip_reader as kip_reader
reload(node_curves)
from rodin import logging
from kip.kip_curve_class import *
from kip.kip_napalm_class import *
from kip.utils.kipError import *

rodin_logger = logging.get_logger('kipMaya')
napalm_func = Napalm()


class MayaWriter(object):
    """
    Creating maya curve writer class

    *Parents:*

        None

    *Children:*

        * :func:`writeOutCurves`
Esempio n. 17
0
def build(source, destination, defaults=None, targets=None, verbose=False, doxyfile=None, clean=False):
    """
    Build documentation in source outputting to destination directory.
    
    :param source: directory - the docs directory containing conf.py
    :param destination: directory to build in (e.g. _build)
    :param defaults: dictionary of default values to pass to Sphinx configuration (e.g. project name).
    :param targets: list of output targets to build (html, pdf, latex etc). If None a default set is used.  
    :param verbose: if True will output more information as build proceeds
    :param doxyfile: if set will attempt to build doxygen documentation sources first using doxyfile
    :param clean: if True will remove each targets output directory before build
    :raises BuilderError: on fail
    
    .. note::
        
        Sub-folders per documentation type will be made under the destination directory.
            
    .. versionchanged:: 1.0
       Miki now reports all warnings and errors regardless of whether verbose argument is True or False.
        
    """
    log = logging.get_logger()
    important_message_reg = re.compile("(WARNING|ERROR|FAILED)")

    # Sphinx likes elements to be interpreted as relative to conf.py so change directory for build to source
    # wrap in a try,finally block to ensure directory gets changed back even if there are errors
    current_directory = os.getcwd()
    os.chdir(source)
    try:
        config_file = structure.get_config_file(source)
        if not os.path.isfile(config_file):
            raise errors.BuilderError(
                "Cannot build - required config file not found at expected location: %s" % config_file
            )

        # update configuration with passed arguments
        if defaults:
            log.info("Adding defaults to configuration: %s" % defaults)
            sphinx_config.__dict__.update(defaults)
            sphinx_config.__dict__.update(sphinx_config.compute_derived_values(**defaults))

        # build targets
        if targets is None:
            targets = ["html", "pdf"]
        if doxyfile:
            targets.insert(0, "doxygen")

        for target in targets:
            if target not in VALID_TARGETS:
                raise errors.BuilderError(
                    "Invalid target '%s' specified - must be one of %s." % (target, ", ".join(VALID_TARGETS))
                )

            output = os.path.join(destination, target)

            if clean and os.path.exists(output):
                log.info("Cleaning existing output for %s target %s" % (target, output))
                shutil.rmtree(output)

            log.info("Building %s in %s" % (target, output))
            try:
                os.makedirs(output)
            except OSError, e:
                if e.errno == errno.EEXIST:
                    pass
                else:
                    raise

            # At present the pdf builder breaks the standard code-block directive
            # so it is added in on a need only basis
            if target == "pdf":
                sphinx_config.extensions.append("rst2pdf.pdfbuilder")
            else:
                if "rst2pdf.pdfbuilder" in sphinx_config.extensions:
                    sphinx_config.extensions.remove("rst2pdf.pdfbuilder")

            if target == "doxygen":
                # read doxyfile contents
                fd = open(doxyfile, "r")
                contents = fd.read()
                fd.close()

                # doxygen will take the last specified argument as the main one when encountering the same argument
                # more than once, so can just append the overrides
                contents = contents.split("\n")
                contents.append("PROJECT_NAME = %s" % defaults.get("project", "Unknown"))
                contents.append("OUTPUT_DIRECTORY = %s" % output)
                contents.append("GENERATE_XML = YES")
                contents.append("XML_OUTPUT = xml")
                contents.append("CREATE_SUBDIRS = NO")
                contents = "\n".join(contents)

                # now run doxygen in a subprocess
                p = subprocess.Popen(
                    ["doxygen", "-"],
                    stdin=subprocess.PIPE,
                    stdout=subprocess.PIPE,
                    stderr=subprocess.STDOUT,
                    cwd=os.path.dirname(doxyfile),
                )
                output = p.communicate(input=contents)
                if verbose:
                    print output[0]
                if p.returncode != 0:
                    raise errors.BuilderError("Doxygen build failed: %s" % output[0])
            else:
                # Sphinx build
                args = ["sphinx-build"]
                args.extend(["-b", target])
                args.append(source)
                args.append(output)

                # redirect output if not verbose
                saved_out = sys.stdout
                saved_err = sys.stderr
                logging.silence("rst2pdf")
                stdout = StringIO.StringIO()
                stderr = stdout
                sys.stdout = stdout
                sys.stderr = stderr

                try:
                    result = sphinx_main(args)
                except Exception, error:
                    pass
                finally:
                    sys.stdout = saved_out
                    sys.stderr = saved_err

                output = stdout.getvalue()

                # parse output for errors and warnings
                failed = False
                if "Exception" in output:
                    log.exception(output)
                else:
                    lines = output.split(os.linesep)
                    for line in lines:
                        match = important_message_reg.search(line)
                        if match:
                            if match.group(1) == "WARNING":
                                log.warning(line)
                            elif match.group(1) in ("ERROR", "FAILED"):
                                log.error(line)
                                failed = True
                        elif verbose:
                            log.info(line)

                # handle errors
                if failed:
                    raise errors.BuilderError(
                        "Errors occurred during build. Use -l/--loud as build argument for full output."
                    )
Esempio n. 18
0
__authors__ = ["kurian.os"]
__version__ = "$Revision: 104960 $".split()[1]
__revision__ = __version__
__date__ = "$Date:  July 19, 2011 12:00:00 PM$".split()[1]
__copyright__ = "2011"
__license__ = "Copyright 2011 Dr D Studios Pty Limited"
__contact__ = "*****@*****.**"
__status__ = "Development"

import os
import napalm.core as nap_core
from rodin import logging
from kip.kip_napalm_class import *
from kip.kip_curve_class import *

rodin_logger = logging.get_logger('kip')
napalm_func = Napalm()


class ReadCurve():
    """

    This class will read napalm channel file and map file then it will try to find the channel data based on the map file . Those data will be re formatted as a curve class object.

    *Parents:*

        None

    *Children:*

        * :func:`getCurves`
Esempio n. 19
0
import sys
import math
from pimath import *
from PyQt4 import QtCore, QtGui, QtOpenGL
from camera import Camera

import grind

#-----------------------------------------------------------------------------
from rodin import logging
log = logging.get_logger('grind.mangle.gl_widget')

try:
    from OpenGL.GL import *
    from OpenGL.GLU import *
except ImportError:
    app = QtGui.QApplication(sys.argv)
    QtGui.QMessageBox.critical(
        None, "mangle", "PyOpenGL must be installed to run this example.")
    sys.exit(1)


class GLWidget(QtOpenGL.QGLWidget):
    xRotationChanged = QtCore.pyqtSignal(int)
    yRotationChanged = QtCore.pyqtSignal(int)
    zRotationChanged = QtCore.pyqtSignal(int)

    def __init__(self, parent=None):
        super(GLWidget, self).__init__(parent)

        self.renderable = None
__revision__ = __version__
__date__ = "$Date:  July 19, 2011 12:00:00 PM$".split()[1]
__copyright__ = "2011"
__license__ = "Copyright 2011 Dr D Studios Pty Limited"
__contact__ = "*****@*****.**"
__status__ = "Development"


import math
import traceback
from rodin import logging
from kip.kip_curve_class import *
from kip.utils.kipError import *
import maya.cmds as cmds

rodin_logger = logging.get_logger("kipMaya")


class NodeCurves(object):
    """
    Creating maya curve node class

    *Parents:*
        None

    *Children:*

        * :func:`getCurves`

    """
Esempio n. 21
0
__status__ = "Development"



import os
import traceback
import re
#import nuke
from rodin import logging
import kip.kip_reader as kip_reader
import node_curves as knob_curves
from kip.kip_napalm_class import *
from kip.template import *
from kip.utils.kipError import *

rodin_logger = logging.get_logger('kipNuke')
napalm_func = Napalm()

class NukeWriter():
    """
    Creating nuke curve writer class

    *Parents:*

        None

    *Children:*

        * :func:`writeOutCurves`

    """
__version__ = "$Revision: 104960 $".split()[1]
__revision__ = __version__
__date__ = "$Date:  July 19, 2011 12:00:00 PM$".split()[1]
__copyright__ = "2011"
__license__ = "Copyright 2011 Dr D Studios Pty Limited"
__contact__ = "*****@*****.**"
__status__ = "Development"

import os
import traceback
import time
import napalm.core as nap_core
from rodin import logging
from rodin.decorators import deprecated

rodin_logger = logging.get_logger('kip')

class Napalm():
    """
    Main napalm class for writing napalm or xml file , this module will output napalm table
    structure with subtables based on the data recevied from curve class object.If writer
    files to write napalm file then it will return False .

    *Parents:*

        None

    *Children:*

        * :func:`writeNapalm`
Esempio n. 23
0
#-----------------------------------------------------------------------------
import datetime
import os
import types
from PyQt4 import QtGui, QtCore, uic
from rodin import logging
log = logging.get_logger('grind.concorde.ui')


#-----------------------------------------------------------------------------
def ui_load(name):
    template = uic.loadUiType(
        os.path.join(os.path.split(__file__)[0], "ui", name))[0]
    return template


#-----------------------------------------------------------------------------
class ConcordeWindow(ui_load("Concorde.ui"), QtGui.QMainWindow):
    def __init__(self):
        QtGui.QMainWindow.__init__(self)
        self.setupUi(self)


#-----------------------------------------------------------------------------
class AttribBase:

    #-----------------------------------------------------------------------------
    def setRanges(self, value):
        if value < self.valueSB.minimum():
            self.valueSB.setMinimum(value)
        if value > self.valueSB.maximum():
Esempio n. 24
0
__version__ = "$Revision: 104963 $".split()[1]
__revision__ = __version__
__date__ = "$Date:  July 19, 2011 12:00:00 PM$".split()[1]
__copyright__ = "2011"
__license__ = "Copyright 2011 Dr D Studios Pty Limited"
__contact__ = "*****@*****.**"
__status__ = "Development"


import math
#import nuke
from rodin import logging
from kip.kip_curve_class import *
from kip.utils.kipError import *

rodin_logger = logging.get_logger('kipNuke')


class NodeCurves():
    """
    Creating nuke curve node class

    *Parents:*

        None

    *Children:*

        * :func:`getCurves`

    """
Esempio n. 25
0
def _generate(source, target, package=None, log=None, update=False):
    """
    Walk the tree top down from source generating necessary documents
    
    Generates:
    * A folder for each folder encountered.
    * A Python module autodoc reference file for each Python module encountered
    * An index file in each auto generated folder that acts as table of contents for folder.
    
    :param source: directory containing source files
    :type source: string
    :param target: directory to place generated files and new folders in
    :type target: string
    :param package: package parts for current Python package
    :type package: list
    :param log: package parts for current Python package
    :type log: logger instance to use for log messages (will generate one if not specified)
    :param update: will overwrite existing files if set to True
    :type update: boolean
    
    """
    if not log:
        log = logging.get_logger()
        
    name = os.path.basename(source)
    ext = ".rst"
    index_name = "index"
    toc = []
    if package is None:
        package = []
    
    # get items
    items = os.listdir(source)
    
    # is this path a package
    if "__init__.py" in items:
        package.append(name)
    else:
        package = []
    
    for item in items:
        
        # ignore certain items (such as hidden ones)
        if item[0] in ("."):
            continue
        
        item_path = os.path.join(source, item)
                
        if os.path.isdir(item_path):
            # continue walking tree
            _generate(item_path, os.path.join(target, item), package=package[:], log=log, update=update)
            
            # add to table of contents as item/index
            toc.append("%s/%s" % (item, index_name))
            
        elif os.path.isfile(item_path):
            # only interested in Python files
            if item_path.endswith(".py"):
                
                # only include __init__.py if it contains interesting code
                if item == "__init__.py" and not contains_code(item_path):
                    log.info("Excluded package file because it contained no code contents: %s" % item_path)
                    continue
                
                # generate contents for module
                package.append(item[:-3])
                module = ".".join(package)
                package.pop()
                
                title = item[:-3]
                if title == "__init__":
                    title = package[-1]
                title = title.replace("_", " ").strip().title()
                
                # is there code in the file - if so include a code listing summary
                with_listing = False
                if contains_code(item_path):
                    with_listing = True
                                    
                # is there a class definition in the file - if so include inheritance diagrams 
                with_inheritance_diagram = False
                if contains_class(item_path):
                    with_inheritance_diagram = True
                    
                contents = generate_module_contents(title, 
                                                    module, 
                                                    with_inheritance_diagram=with_inheritance_diagram,
                                                    with_listing=with_listing)
                                
                # write the file 
                destination = os.path.join(source.replace(source, target), "%s%s" % (item[:-3], ext)) 
                if write_file(destination, contents):
                    log.info("Generated module file %s" % destination)
                elif update:
                    if write_file(destination, contents, overwrite=update):
                        log.info("Updated module file %s" % destination)
                else:
                    log.warn("Path exists - will not overwrite %s" % destination)
                
                # add to table of contents
                toc.append(item[:-3])
    
    # create index file
    toc.sort()
    title = "Contents"
    if package:
        title = " ".join([w.title() for w in package])
    contents = generate_index_contents(title, toc)
    
    # write the file 
    destination = os.path.join(source.replace(source, target), "index%s" % (ext)) 
    if write_file(destination, contents):
        log.info("Generated index file %s" % destination)
    elif update:
        if write_file(destination, contents, overwrite=update):
            log.info("Updated index file %s" % destination)
    else:
        log.warn("Path exists - will not overwrite %s" % destination)