Beispiel #1
0
def runFix(jobconf, classesObj):

    def fixPng():
        return

    def removeBOM(fpath):
        content = open(fpath, "rb").read()
        if content.startswith(codecs.BOM_UTF8):
            console.debug("removing BOM: %s" % filePath)
            open(fpath, "wb").write(content[len(codecs.BOM_UTF8):])
        return

    # - Main ---------------------------------------------------------------

    if not isinstance(jobconf.get("fix-files", False), types.DictType):
        return

    console = Context.console
    classes = classesObj.keys()
    fixsettings = ExtMap(jobconf.get("fix-files"))

    # Fixing JS source files
    console.info("Fixing whitespace in source files...")
    console.indent()

    console.info("Fixing files: ", False)
    numClasses = len(classes)
    eolStyle = fixsettings.get("eol-style", "LF")
    tabWidth = fixsettings.get("tab-width", 2)
    for pos, classId in enumerate(classes):
        console.progress(pos+1, numClasses)
        classEntry   = classesObj[classId]
        filePath     = classEntry.path
        fileEncoding = classEntry.encoding
        fileContent  = filetool.read(filePath, fileEncoding)
        # Caveat: as filetool.read already calls any2Unix, converting to LF will
        # not work as the file content appears unchanged to this function
        if eolStyle == "CR":
            fixedContent = textutil.any2Mac(fileContent)
        elif eolStyle == "CRLF":
            fixedContent = textutil.any2Dos(fileContent)
        else:
            fixedContent = textutil.any2Unix(fileContent)
        fixedContent = textutil.normalizeWhiteSpace(textutil.removeTrailingSpaces(textutil.tab2Space(fixedContent, tabWidth)))
        if fixedContent != fileContent:
            console.debug("modifying file: %s" % filePath)
            filetool.save(filePath, fixedContent, fileEncoding)
        # this has to go separate, as it requires binary operation
        removeBOM(filePath)

    console.outdent()

    # Fixing PNG files -- currently just a stub!
    if fixsettings.get("fix-png", False):
        console.info("Fixing PNGs...")
        console.indent()
        fixPng()
        console.outdent()

    return
Beispiel #2
0
    def createResourceStruct(resources, formatAsTree=False, updateOnlyExistingSprites=False):
        
        skippatt = re.compile(r'\.(meta|py)$', re.I)
        result = {}
        if formatAsTree:
            result = ExtMap()

        # Filter unwanted files
        for res in resources:
            if skippatt.search(res.path):
                continue
            result[res.id] = res

        # Update simple images
        for combImg in (x for x in result.values() if isinstance(x, CombinedImage)):
            for embImg in combImg.embeds:
                if embImg.id in result:
                    result[embImg.id].attachCombinedImage(combImg)
                elif not updateOnlyExistingSprites:
                    embImg.attachCombinedImage(combImg)
                    result[embImg.id] = embImg

        # Flatten out the resource representation
        for resid, res in result.items():
            result[resid] = res.toResinfo()

        # ExtMap returns nested maps
        if formatAsTree:
            result = result.getData()

        return result
Beispiel #3
0
    def generateResourceInfoCode(self, script, settings, libraries, format=False):

        def addResourceInfoToPackages(script):
            for package in script.packages:
                package_resources = []
                # TODO: the next is a hack, since package.classes are still id's
                package_classes   = [x for x in script.classesObj if x.id in package.classes]
                for clazz in package_classes:
                    package_resources.extend(clazz.resources)
                package.data.resources = ResourceHandler.createResourceStruct(package_resources, formatAsTree=resources_tree,
                                                             updateOnlyExistingSprites=True)
            return


        # -- main --------------------------------------------------------------

        compConf       = self._job.get ("compile-options")
        compConf       = ExtMap (compConf)
        resources_tree = compConf.get ("code/resources-tree", False)

        classes = ResourceHandler.mapResourcesToClasses (libraries, script.classesObj,
                                            self._job.get("asset-let", {}))
        filteredResources = []
        for clazz in classes:
            filteredResources.extend(clazz.resources)
        resdata = ResourceHandler.createResourceStruct (filteredResources, formatAsTree=resources_tree,
                                           updateOnlyExistingSprites=True)
        # add resource info to packages
        addResourceInfoToPackages(script)

        return resdata # end: generateResourceInfoCode()
Beispiel #4
0
    def generateResourceInfoCode(self, script, settings, libraries, format=False):

        def addResourceInfoToPackages(script):
            for package in script.packages:
                package_resources = []
                # TODO: the next is a hack, since package.classes are still id's
                package_classes   = [x for x in script.classesObj if x.id in package.classes]
                for clazz in package_classes:
                    package_resources.extend(clazz.resources)
                package.data.resources = rh.createResourceStruct(package_resources, formatAsTree=resources_tree,
                                                             updateOnlyExistingSprites=True)
            return


        # -- main --------------------------------------------------------------

        compConf       = self._job.get ("compile-options")
        compConf       = ExtMap (compConf)
        resources_tree = compConf.get ("code/resources-tree", False)
        rh             = self._resourceHandler

        classes = rh.mapResourcesToClasses (libraries, script.classesObj)
        filteredResources = []
        for clazz in classes:
            filteredResources.extend(clazz.resources)
        resdata = rh.createResourceStruct (filteredResources, formatAsTree=resources_tree,
                                           updateOnlyExistingSprites=True)
        # add resource info to packages
        addResourceInfoToPackages(script)

        return resdata # end: generateResourceInfoCode()
Beispiel #5
0
    def createResourceStruct(resources,
                             formatAsTree=False,
                             updateOnlyExistingSprites=False):

        skippatt = re.compile(r'\.(meta|py)$', re.I)
        result = {}
        if formatAsTree:
            result = ExtMap()

        # Filter unwanted files
        for res in resources:
            if skippatt.search(res.path):
                continue
            result[res.id] = res

        # Update simple images
        for combImg in (x for x in result.values()
                        if isinstance(x, CombinedImage)):
            for embImg in combImg.embeds:
                if embImg.id in result:
                    result[embImg.id].attachCombinedImage(combImg)
                elif not updateOnlyExistingSprites:
                    embImg.attachCombinedImage(combImg)
                    result[embImg.id] = embImg

        # Flatten out the resource representation
        for resid, res in result.items():
            result[resid] = res.toResinfo()

        # ExtMap returns nested maps
        if formatAsTree:
            result = result.getData()

        return result
Beispiel #6
0
    def createResourceStruct(resources,
                             formatAsTree=False,
                             updateOnlyExistingSprites=False):

        skippatt = re.compile(r'\.(meta|py)$', re.I)
        result = {}
        if formatAsTree:
            result = ExtMap()

        # Filter unwanted files
        for res in resources:
            if skippatt.search(res.path):
                continue
            result[res.id] = res

        # Update simple images
        for combImg in (x for x in result.values()
                        if isinstance(x, CombinedImage)):
            for embImg in combImg.embeds:
                if embImg.id in result:
                    result[embImg.id].attachCombinedImage(combImg)
                elif not updateOnlyExistingSprites:
                    embImg.attachCombinedImage(combImg)
                    result[embImg.id] = embImg

        # Flatten out the resource representation
        for resid, res in result.items():
            result[resid] = res.toResinfo()

            # Unify font map aliases
            if isinstance(res, FontMap):
                for glyphname, code in res.mapping.iteritems():
                    fdsc = "@%s/%s" % (res.alias, glyphname)
                    if not fdsc in result:
                        try:
                            result[fdsc] = [
                                result[resid][1],
                                round(result[resid][2] / code[1]), code[0]
                            ]
                        except:
                            pass
                del result[resid][4]

        # ExtMap returns nested maps
        if formatAsTree:
            result = result.getData()

        return result
 def manifest_from_url(self, url):
     urlobj = urllib.urlopen(url)  # urllib does handle https
     assert urlobj.getcode(
     ) == 200, "Could not access the contrib catalog URL: %s" % url
     manifest = urlobj.read()
     manifest = ExtMap(json.loads(manifest))
     return manifest
Beispiel #8
0
 def includeGlobalDefaults(self):
     global_defaults = {}
     global_defaults[Key.LET_KEY] = self._config.get(Key.LET_KEY, {})
     global_defaults[Key.CONFIG_WARNINGS] = self._config.get(
         Key.CONFIG_WARNINGS, {})
     global_defaults = ExtMap(global_defaults)  # using ExtMap to fake a Job
     self.mergeJob(global_defaults)
Beispiel #9
0
 def __init__(self, classesObj, cache, console, require, use, context):
     self._classesObj = classesObj
     self._cache   = cache
     self._console = console
     self._context = context
     self._jobconf = context.get('jobconf', ExtMap())
     self._require = require
     self._use     = use
     self.counter  = 0
Beispiel #10
0
    def applyToConfig(config, confpy):
        # load conf.py
        confMig = loadConfpy(confpy)
        changed = False

        for key, action in confMig["transformations"].items():
            if not key.startswith("/"):  # job-level key
                for job in config.get("jobs").values():
                    job = ExtMap(job)
                    changed = handleMap(job, key, action) or changed
        return changed
Beispiel #11
0
    def createResourceStruct(resources, formatAsTree=False, updateOnlyExistingSprites=False):
        
        skippatt = re.compile(r'\.(meta|py)$', re.I)
        result = {}
        if formatAsTree:
            result = ExtMap()

        # Filter unwanted files
        for res in resources:
            if skippatt.search(res.path):
                continue
            result[res.id] = res

        # Update simple images
        for combImg in (x for x in result.values() if isinstance(x, CombinedImage)):
            for embImg in combImg.embeds:
                if embImg.id in result:
                    result[embImg.id].attachCombinedImage(combImg)
                elif not updateOnlyExistingSprites:
                    embImg.attachCombinedImage(combImg)
                    result[embImg.id] = embImg

        # Flatten out the resource representation
        for resid, res in result.items():
            result[resid] = res.toResinfo()

            # Unify font map aliases
            if isinstance(res, FontMap):
                for glyphname, code in res.mapping.iteritems():
                  fdsc = "@%s/%s" % (res.alias, glyphname)
                  if not fdsc in result:
                      result[fdsc] = [result[resid][1], round(result[resid][2] / code[1]), code[0]]
                del result[resid][4]

        # ExtMap returns nested maps
        if formatAsTree:
            result = result.getData()

        return result
Beispiel #12
0
    def runPrettyPrinting(self, classesObj):
        if not isinstance(self._job.get("pretty-print", False), types.DictType):
            return

        self._console.info("Pretty-printing code...")
        self._console.indent()
        ppsettings = ExtMap(self._job.get("pretty-print"))  # get the pretty-print config settings

        # init options
        def options(): pass
        pretty.defaultOptions(options)

        # modify according to config
        if 'general/indent-string' in ppsettings:
            options.prettypIndentString = ppsettings.get('general/indent-string')
        if 'comments/block/add' in ppsettings:
            options.prettypCommentsBlockAdd = ppsettings.get('comments/trailing/keep-column')
        if 'comments/trailing/keep-column' in ppsettings:
            options.prettypCommentsTrailingKeepColumn = ppsettings.get('comments/trailing/keep-column')
        if 'comments/trailing/comment-cols' in ppsettings:
            options.prettypCommentsTrailingCommentCols = ppsettings.get('comments/trailing/comment-cols')
        if 'comments/trailing/padding' in ppsettings:
            options.prettypCommentsInlinePadding = ppsettings.get('comments/trailing/padding')
        if 'code/align-with-curlies' in ppsettings:
            options.prettypAlignBlockWithCurlies = ppsettings.get('code/align-with-curlies')
        if 'code/open-curly/newline-before' in ppsettings:
            options.prettypOpenCurlyNewlineBefore = ppsettings.get('code/open-curly/newline-before')
        if 'code/open-curly/indent-before' in ppsettings:
            options.prettypOpenCurlyIndentBefore = ppsettings.get('code/open-curly/indent-before')

        self._console.info("Pretty-printing files: ", False)
        numClasses = len(classesObj)
        for pos, classId in enumerate(classesObj):
            self._console.progress(pos+1, numClasses)
            tree = classesObj[classId].tree()
            result = [u'']
            result = pretty.prettyNode(tree, options, result)
            compiled = u''.join(result)
            filetool.save(self._classes[classId].path, compiled)

        self._console.outdent()

        return
Beispiel #13
0
def do_lint(file_, popup):
    if popup:
        logger = util.PopupLogger()
    else:
        logger = util.TextMateLogger()

    logger.printHeader("qooxdoo JavaScript lint", "qooxdoo JavaScript lint")
    try:
        opts = lint.defaultOptions()
        opts.allowed_globals = ['qx', 'qxWeb', 'q']

        tree_ = treegenerator.createFileTree_from_string(
            codecs.open(file_, "r", "utf-8").read())
        tree_ = scopes.create_scopes(tree_)
        if not getattr(context, 'console', None):
            context.console = Log()
        if not getattr(context, 'jobconf', None):
            context.jobconf = ExtMap()
        context.jobconf.set("lint-check/warn-unknown-jsdoc-keys", True)
        lint.lint_check(tree_, "", opts)

    except treegenerator.SyntaxException, e:
        logger.log(file_, 0, 0, str(e))
Beispiel #14
0
def runFix(jobconf, classesObj):
    def fixPng():
        return

    def removeBOM(fpath):
        content = open(fpath, "rb").read()
        if content.startswith(codecs.BOM_UTF8):
            console.debug("removing BOM: %s" % filePath)
            open(fpath, "wb").write(content[len(codecs.BOM_UTF8):])
        return

    # - Main ---------------------------------------------------------------

    if not isinstance(jobconf.get("fix-files", False), types.DictType):
        return

    console = Context.console
    classes = classesObj.keys()
    fixsettings = ExtMap(jobconf.get("fix-files"))

    # Fixing JS source files
    console.info("Fixing whitespace in source files...")
    console.indent()

    console.info("Fixing files: ", False)
    numClasses = len(classes)
    eolStyle = fixsettings.get("eol-style", "LF")
    tabWidth = fixsettings.get("tab-width", 2)
    for pos, classId in enumerate(classes):
        console.progress(pos + 1, numClasses)
        classEntry = classesObj[classId]
        filePath = classEntry.path
        fileEncoding = classEntry.encoding
        fileContent = filetool.read(filePath, fileEncoding)
        # Caveat: as filetool.read already calls any2Unix, converting to LF will
        # not work as the file content appears unchanged to this function
        if eolStyle == "CR":
            fixedContent = textutil.any2Mac(fileContent)
        elif eolStyle == "CRLF":
            fixedContent = textutil.any2Dos(fileContent)
        else:
            fixedContent = textutil.any2Unix(fileContent)
        fixedContent = textutil.normalizeWhiteSpace(
            textutil.removeTrailingSpaces(
                textutil.tab2Space(fixedContent, tabWidth)))
        if fixedContent != fileContent:
            console.debug("modifying file: %s" % filePath)
            filetool.save(filePath, fixedContent, fileEncoding)
        # this has to go separate, as it requires binary operation
        removeBOM(filePath)

    console.outdent()

    # Fixing PNG files -- currently just a stub!
    if fixsettings.get("fix-png", False):
        console.info("Fixing PNGs...")
        console.indent()
        fixPng()
        console.outdent()

    return
Beispiel #15
0
def patchConfig(configPath, migVersions):

    def loadConfpy(confpyPath):
        namespace = {}
        execfile(confpyPath, namespace)
        return namespace

    def handleMap(extmap, key, action):
        changed = False
        # string-value action
        if key in extmap:
            if isinstance(action, types.StringTypes): # it's a rename
                if action == "": # it's a delete
                    extmap.delete(key)
                else:
                    extmap.rename(key, action)
            # function-value action
            elif isinstance(action, types.FunctionType):
                currval = extmap.get(key)
                action(extmap, key, currval)
            changed = True
        return changed

    def applyToConfig(config, confpy):
        # load conf.py
        confMig = loadConfpy(confpy)
        changed = False

        for key, action in confMig["transformations"].items():
            if not key.startswith("/"):  # job-level key
                for job in config.get("jobs").values():
                    job = ExtMap(job)
                    changed = handleMap(job, key, action) or changed
        return changed


    def write_new(config):
        conf_str = json.dumpsPretty(config.getData())
        filetool.save(configPath, conf_str)
        return

    def write_backup(configPath):
        import shutil
        configPathBackup = configPath + ".orig"
        if not os.path.exists(configPathBackup):
            shutil.copy(configPath, configPathBackup)
        return

    def get_confpy(vers):
        res = None
        versionPatchPath = os.path.join(getPatchDirectory(), vers)
        # require a potential config.py in the root directory
        if os.path.exists(versionPatchPath + '/' + "config.py"):
            res = os.path.join(versionPatchPath, "config.py")
        return res

    # --------------------------------------------------------------------------

    # get current config
    config = json.loadStripComments(configPath)
    config = ExtMap(config)

    # apply migration files
    changed = False
    for vers in migVersions:
        confpy = get_confpy(vers)
        if confpy:
            changed = applyToConfig(config, confpy) or changed

    # write new config
    if changed:
        # backup old config
        write_backup(configPath)
        # write new config
        write_new(config)

    return
    def runCompiled(self, script, treeCompiler):

        def getAppName(memo={}):
            if not 'appname' in memo:
                appname = self._job.get("let/APPLICATION")
                if not appname:
                    raise RuntimeError, "Need an application name in config (key let/APPLICATION)"
                else:
                    memo['appname'] = appname
            return memo['appname']

        def getOutputFile():
            filePath = compConf.get("paths/file")
            if not filePath:
                filePath = os.path.join("build", "script", getAppName() + ".js")
            return filePath

        def getFileUri(scriptUri):
            appfile = os.path.basename(fileRelPath)
            fileUri = os.path.join(scriptUri, appfile)  # make complete with file name
            fileUri = Path.posifyPath(fileUri)
            return fileUri

        def generateBootScript(bootPackage=""):

            def packagesOfFiles(fileUri, packages):
                # returns list of lists, each containing destination file name of the corresp. part
                # npackages = [['script/gui-0.js'], ['script/gui-1.js'],...]
                npackages = []
                file = os.path.basename(fileUri)
                for packageId in range(len(packages)):
                    packageFileName = self._resolveFileName(file, variants, settings, packageId)
                    npackages.append((packageFileName,))
                return npackages

            self._console.info("Generating boot script...")
            bootBlocks = []

            # For resource list
            resourceUri = compConf.get('uris/resource', 'resource')
            resourceUri = Path.posifyPath(resourceUri)

            globalCodes = self.generateGlobalCodes(libs, translationMaps, settings, variants, format, resourceUri, scriptUri)

            filesPackages = packagesOfFiles(fileUri, packages)
            bootBlocks.append(self.generateBootCode(parts, filesPackages, boot, variants, settings, bootPackage, globalCodes, "build", format))

            if format:
                bootContent = "\n\n".join(bootBlocks)
            else:
                bootContent = "".join(bootBlocks)

            return bootContent

        def writePackages(compiledPackages, startId=0):
            for packageId, content in enumerate(compiledPackages):
                writePackage(content, startId + packageId)
            return

        def writePackage(content, packageId=""):
            # Construct file name
            resolvedFilePath = self._resolveFileName(filePath, variants, settings, packageId)

            # Save result file
            filetool.save(resolvedFilePath, content)

            if compConf.get("paths/gzip"):
                filetool.gzip(resolvedFilePath, content)

            self._console.debug("Done: %s" % self._computeContentSize(content))
            self._console.debug("")

            return

        # ----------------------------------------------------------------------

        if not self._job.get("compile-dist", False):
            return

        packages   = script.packages
        parts      = script.parts
        boot       = script.boot
        variants   = script.variants
        classList  = script.classes

        self._treeCompiler = treeCompiler
        self._classList    = classList

        compConf = ExtMap(self._job.get("compile-dist"))

        # Read in base file name
        fileRelPath = getOutputFile()
        filePath    = self._config.absPath(fileRelPath)

        # Read in uri prefixes
        scriptUri = compConf.get('uris/script', 'script')
        scriptUri = Path.posifyPath(scriptUri)
        fileUri = getFileUri(scriptUri)

        # Read in compiler options
        optimize = compConf.get("code/optimize", [])
        self._treeCompiler.setOptimize(optimize)

        # Whether the code should be formatted
        format = compConf.get("code/format", False)

        # Read in settings
        settings = self.getSettings()

        # Get translation maps
        locales = compConf.get("code/locales", [])
        translationMaps = self.getTranslationMaps(packages, variants, locales)

        libs = self._job.get("library", [])

        # Generating packages
        self._console.info("Generating packages...")
        self._console.indent()

        bootPackage = ""
        compiledPackages = []
        for packageId, classes in enumerate(packages):
            self._console.info("Compiling package #%s:" % packageId, False)
            self._console.indent()

            # Compile file content
            compiledContent = self._treeCompiler.compileClasses(classes, variants, optimize, format)
            compiledPackages.append(compiledContent)

            self._console.debug("Done: %s" % self._computeContentSize(compiledContent))
            self._console.outdent()

        self._console.outdent()

        # Generating boot script
        if not len(compiledPackages):
            raise RuntimeError("No valid boot package generated.")

        if self._job.get("packages/loader-with-boot", True):
            content = generateBootScript(compiledPackages[0])
            writePackage(content)
            writePackages(compiledPackages[1:], 1)
        else:
            content = generateBootScript()
            writePackage(content)
            writePackages(compiledPackages)

        return
Beispiel #17
0
    def runCompiled(self, script, treeCompiler, version="build"):

        def getOutputFile(compileType):
            filePath = compConf.get("paths/file")
            if not filePath:
                filePath = os.path.join(compileType, "script", script.namespace + ".js")
            return filePath

        def getFileUri(scriptUri):
            appfile = os.path.basename(fileRelPath)
            fileUri = os.path.join(scriptUri, appfile)  # make complete with file name
            fileUri = Path.posifyPath(fileUri)
            return fileUri

        ##
        # returns the Javascript code for the initial ("boot") script as a string,
        #  using the loader.tmpl template and filling its placeholders
        def generateBootCode(parts, packages, boot, script, compConf, variants, settings, bootCode, globalCodes, version="source", decodeUrisFile=None, format=False):

            ##
            # create a map with part names as key and array of package id's and
            # return as string
            def partsMap(script):
                partData = {}
                packages = script.packagesSortedSimple()
                #print "packages: %r" % packages
                for part in script.parts:
                    partData[part] = script.parts[part].packagesAsIndices(packages)
                    #print "part '%s': %r" % (part, script.parts[part].packages)
                partData = json.dumpsCode(partData)

                return partData

            def fillTemplate(vals, template):
                # Fill the code template with various vals 
                templ  = MyTemplate(template)
                result = templ.safe_substitute(vals)

                return result

            def packageUrisToJS1(packages, version, namespace=None):
                # Translate URI data to JavaScript
                
                allUris = []
                for packageId, package in enumerate(packages):
                    packageUris = []
                    for fileId in package:

                        if version == "build":
                            # TODO: gosh, the next is an ugly hack!
                            #namespace  = self._resourceHandler._genobj._namespaces[0]  # all name spaces point to the same paths in the libinfo struct, so any of them will do
                            if not namespace:
                                namespace  = script.namespace  # all name spaces point to the same paths in the libinfo struct, so any of them will do
                            relpath    = OsPath(fileId)
                        else:
                            namespace  = self._classes[fileId]["namespace"]
                            relpath    = OsPath(self._classes[fileId]["relpath"])

                        shortUri = Uri(relpath.toUri())
                        packageUris.append("%s:%s" % (namespace, shortUri.encodedValue()))
                    allUris.append(packageUris)

                return allUris

            ##
            # Translate URI data to JavaScript
            # using Package objects
            def packageUrisToJS(packages, version):

                allUris = []
                for packageId, package in enumerate(packages):
                    packageUris = []
                    if package.file: # build
                        namespace = "__out__"
                        fileId    = package.file
                        relpath    = OsPath(fileId)
                        shortUri   = Uri(relpath.toUri())
                        packageUris.append("%s:%s" % (namespace, shortUri.encodedValue()))
                    else: # "source" :
                        for clazz in package.classes:
                            namespace  = self._classes[clazz]["namespace"]
                            relpath    = OsPath(self._classes[clazz]["relpath"])
                            shortUri   = Uri(relpath.toUri())
                            packageUris.append("%s:%s" % (namespace, shortUri.encodedValue()))
                    allUris.append(packageUris)

                return allUris


            def loadTemplate(bootCode):
                # try custom loader templates
                loaderFile = compConf.get("paths/loader-template", None)
                if not loaderFile:
                    # use default templates
                    if version=="build":
                        #loaderFile = os.path.join(filetool.root(), os.pardir, "data", "generator", "loader-build.tmpl.js")
                        # TODO: test-wise using generic template
                        loaderFile = os.path.join(filetool.root(), os.pardir, "data", "generator", "loader.tmpl.js")
                    else:
                        #loaderFile = os.path.join(filetool.root(), os.pardir, "data", "generator", "loader-source.tmpl.js")
                        loaderFile = os.path.join(filetool.root(), os.pardir, "data", "generator", "loader.tmpl.js")
                
                template = filetool.read(loaderFile)

                return template

            # ---------------------------------------------------------------

            if not parts:
                return ""

            result           = ""
            vals             = {}
            packages         = script.packagesSortedSimple()
            loader_with_boot = self._job.get("packages/loader-with-boot", True)

            # stringify data in globalCodes
            for entry in globalCodes:
                globalCodes[entry] = json.dumpsCode(globalCodes[entry])
                # undo damage done by simplejson to raw strings with escapes \\ -> \
                globalCodes[entry] = globalCodes[entry].replace('\\\\\\', '\\').replace(r'\\', '\\')  # " gets tripple escaped, therefore the first .replace()

            vals.update(globalCodes)

            if version=="build":
                vals["Resources"] = json.dumpsCode({})  # TODO: undo Resources from globalCodes!!!
            vals["Boot"] = '"%s"' % boot
            if version == "build":
                vals["BootPart"] = bootCode
            else:
                vals["BootPart"] = ""
                # fake package data
                for key, package in enumerate(packages): 
                    vals["BootPart"] += "qx.$$packageData['%d']={};\n" % key

            # Translate part information to JavaScript
            vals["Parts"] = partsMap(script)

            # Translate URI data to JavaScript
            #vals["Uris"] = packageUrisToJS1(packages, version)
            vals["Uris"] = packageUrisToJS(packages, version)
            vals["Uris"] = json.dumpsCode(vals["Uris"])

            # Add potential extra scripts
            vals["UrisBefore"] = []
            if self._job.get("add-script", False):
                additional_scripts = self._job.get("add-script",[])
                for additional_script in additional_scripts:
                    vals["UrisBefore"].append(additional_script["uri"])
            vals["UrisBefore"] = json.dumpsCode(vals["UrisBefore"])

            # Whether boot package is inline
            if version == "source":
                vals["BootIsInline"] = json.dumpsCode(False)
            else:
                vals["BootIsInline"] = json.dumpsCode(loader_with_boot)
                
            # Closure package information
            cParts = {}
            if version == "build":
                for part in script.parts:
                    if not loader_with_boot or part != "boot":
                        cParts[part] = True
            vals["ClosureParts"] = json.dumpsCode(cParts)

            # Package Hashes
            vals["PackageHashes"] = {}
            for key, package in enumerate(packages):
                if package.hash:
                    vals["PackageHashes"][key] = package.hash
                else:
                    vals["PackageHashes"][key] = "%d" % key  # fake code package hashes in source ver.
            vals["PackageHashes"] = json.dumpsCode(vals["PackageHashes"])

            # Script hook for qx.$$loader.decodeUris() function
            vals["DecodeUrisPlug"] = ""
            if decodeUrisFile:
                plugCode = filetool.read(self._config.absPath(decodeUrisFile))  # let it bomb if file can't be read
                vals["DecodeUrisPlug"] = plugCode.strip()
            
            # Enable "?nocache=...." for script loading?
            vals["NoCacheParam"] = "true" if self._job.get("compile-options/uris/add-nocache-param", True) else "false"

            # Locate and load loader basic script
            template = loadTemplate(bootCode)

            # Fill template gives result
            result = fillTemplate(vals, template)

            return result


        ##
        # shallow layer above generateBootCode(), and its only client
        def generateBootScript(globalCodes, script, bootPackage="", compileType="build"):

            self._console.info("Generating boot script...")

            if not self._job.get("packages/i18n-with-boot", True):
                # remove I18N info from globalCodes, so they don't go into the loader
                globalCodes["Translations"] = {}
                globalCodes["Locales"]      = {}
            else:
                if compileType == "build":
                    # also remove them here, as this info is now with the packages
                    globalCodes["Translations"] = {}
                    globalCodes["Locales"]      = {}

            plugCodeFile = compConf.get("code/decode-uris-plug", False)
            if compileType == "build":
                filepackages = [(x.file,) for x in packages]
                bootContent  = generateBootCode(parts, filepackages, boot, script, compConf, variants, settings, bootPackage, globalCodes, compileType, plugCodeFile, format)
            else:
                filepackages = [x.classes for x in packages]
                bootContent  = generateBootCode(parts, filepackages, boot, script, compConf, variants={}, settings={}, bootCode=None, globalCodes=globalCodes, version=compileType, decodeUrisFile=plugCodeFile, format=format)


            return bootContent


        def getPackageData(package):
            data = {}
            data["resources"]    = package.data.resources
            data["translations"] = package.data.translations
            data["locales"]      = package.data.locales
            data = json.dumpsCode(data)
            data += ';\n'
            return data


        def compilePackage(packageIndex, package):
            self._console.info("Compiling package #%s:" % packageIndex, False)
            self._console.indent()

            # Compile file content
            pkgCode = self._treeCompiler.compileClasses(package.classes, variants, optimize, format)
            pkgData = getPackageData(package)
            hash    = sha.getHash(pkgData + pkgCode)[:12]  # first 12 chars should be enough

            isBootPackage = packageIndex == 0
            if isBootPackage:
                compiledContent = ("qx.$$packageData['%s']=" % hash) + pkgData + pkgCode
            else:
                compiledContent  = u'''qx.$$packageData['%s']=%s\n''' % (hash, pkgData)
                compiledContent += u'''qx.Part.$$notifyLoad("%s", function() {\n%s\n});''' % (hash, pkgCode)
            
            #
            package.hash = hash  # to fill qx.$$loader.packageHashes in generateBootScript()

            self._console.debug("Done: %s" % self._computeContentSize(compiledContent))
            self._console.outdent()

            return compiledContent


        ##
        # takes an array of (po-data, locale-data) dict pairs
        # merge all po data and all cldr data in a single dict each
        def mergeTranslationMaps(transMaps):
            poData = {}
            cldrData = {}

            for pac_dat, loc_dat in transMaps:
                for loc in pac_dat:
                    if loc not in poData:
                        poData[loc] = {}
                    poData[loc].update(pac_dat[loc])
                for loc in loc_dat:
                    if loc not in cldrData:
                        cldrData[loc] = {}
                    cldrData[loc].update(loc_dat[loc])

            return (poData, cldrData)


        # -- Main - runCompiled ------------------------------------------------

        # Early return
        compileType = self._job.get("compile/type", "")
        if compileType not in ("build", "source"):
            return

        packages   = script.packagesSortedSimple()
        parts      = script.parts
        boot       = script.boot
        variants   = script.variants
        libraries  = script.libraries

        self._treeCompiler = treeCompiler
        self._variants     = variants
        self._script       = script

        self._console.info("Generate %s version..." % compileType)
        self._console.indent()

        # - Evaluate job config ---------------------
        # Compile config
        compConf = self._job.get("compile-options")
        compConf = ExtMap(compConf)

        # Whether the code should be formatted
        format = compConf.get("code/format", False)
        script.scriptCompress = compConf.get("paths/gzip", False)

        # Read in settings
        settings = self.getSettings()
        script.settings = settings

        # Read libraries
        libs = self._job.get("library", [])

        # Get translation maps
        locales = compConf.get("code/locales", [])
        translationMaps = self.getTranslationMaps(packages, variants, locales)

        # Read in base file name
        fileRelPath = getOutputFile(compileType)
        filePath    = self._config.absPath(fileRelPath)
        script.baseScriptPath = filePath

        if compileType == "build":
            # read in uri prefixes
            scriptUri = compConf.get('uris/script', 'script')
            scriptUri = Path.posifyPath(scriptUri)
            fileUri   = getFileUri(scriptUri)
            # for resource list
            resourceUri = compConf.get('uris/resource', 'resource')
            resourceUri = Path.posifyPath(resourceUri)
        else:
            # source version needs place where the app HTML ("index.html") lives
            self.approot = self._config.absPath(compConf.get("paths/app-root", ""))
            resourceUri = None
            scriptUri   = None

        # Get global script data (like qxlibraries, qxresources,...)
        globalCodes                = {}
        globalCodes["Settings"]    = settings
        globalCodes["Variants"]    = self.generateVariantsCode(variants)
        globalCodes["Libinfo"]     = self.generateLibInfoCode(libs, format, resourceUri, scriptUri)
        # add synthetic output lib
        if scriptUri: out_sourceUri= scriptUri
        else:
            out_sourceUri = self._computeResourceUri({'class': ".", 'path': os.path.dirname(script.baseScriptPath)}, OsPath(""), rType="class", appRoot=self.approot)
            out_sourceUri = os.path.normpath(out_sourceUri.encodedValue())
        globalCodes["Libinfo"]['__out__'] = { 'sourceUri': out_sourceUri }
        globalCodes["Resources"]    = self.generateResourceInfoCode(script, settings, libraries, format)
        globalCodes["Translations"],\
        globalCodes["Locales"]      = mergeTranslationMaps(translationMaps)

        # Potentally create dedicated I18N packages
        i18n_as_parts = not self._job.get("packages/i18n-with-boot", True)
        if i18n_as_parts:
            script = self.generateI18NParts(script, globalCodes)
            self.writePackages([p for p in script.packages if getattr(p, "__localeflag", False)], script)

        if compileType == "build":

            # - Specific job config ---------------------
            # read in compiler options
            optimize = compConf.get("code/optimize", [])
            self._treeCompiler.setOptimize(optimize)

            # - Generating packages ---------------------
            self._console.info("Generating packages...")
            self._console.indent()

            bootPackage = ""
            for packageIndex, package in enumerate(packages):
                package.compiled = compilePackage(packageIndex, package)

            self._console.outdent()
            if not len(packages):
                raise RuntimeError("No valid boot package generated.")

            # - Put loader and packages together -------
            loader_with_boot = self._job.get("packages/loader-with-boot", True)
            # handle loader and boot package
            if not loader_with_boot:
                loadPackage = Package(0)            # make a dummy Package for the loader
                packages.insert(0, loadPackage)

            # attach file names (do this before calling generateBootScript)
            for package, fileName in zip(packages, self.packagesFileNames(script.baseScriptPath, len(packages))):
                package.file = os.path.basename(fileName)
                if self._job.get("compile-options/paths/scripts-add-hash", False):
                    package.file = self._fileNameWithHash(package.file, package.hash)

            # generate and integrate boot code
            if loader_with_boot:
                # merge loader code with first package
                bootCode = generateBootScript(globalCodes, script, packages[0].compiled)
                packages[0].compiled = bootCode
            else:
                loaderCode = generateBootScript(globalCodes, script)
                packages[0].compiled = loaderCode

            # write packages
            self.writePackages(packages, script)

        # ---- 'source' version ------------------------------------------------
        else:

            sourceContent = generateBootScript(globalCodes, script, bootPackage="", compileType=compileType)

            # Construct file name
            resolvedFilePath = self._resolveFileName(filePath, variants, settings)

            # Save result file
            filetool.save(resolvedFilePath, sourceContent)

            if compConf.get("paths/gzip"):
                filetool.gzip(resolvedFilePath, sourceContent)

            self._console.outdent()
            self._console.debug("Done: %s" % self._computeContentSize(sourceContent))
            self._console.outdent()

        self._console.outdent()

        return  # runCompiled()
Beispiel #18
0
def runLogDependencies(jobconf, script):

    ##
    # A generator to yield all using dependencies of classes in packages;
    def lookupUsingDeps(packages, includeTransitives, forceFreshDeps=False):

        ##
        # has classId been yielded?
        def hasVisibleDeps(classId):
            # judged from the contents of its deps arrays
            load_names = [x.name for x in classDeps["load"]]
            run_names = [x.name for x in classDeps["run"]]
            return set(load_names).union(run_names).difference(ignored_names)

        for packageId, package in enumerate(packages):
            for classObj in package.classes:
                classId = classObj.id
                classDeps, _ = classObj.getCombinedDeps(
                    script.classesAll,
                    variants,
                    script.jobconfig,
                    projectClassNames=False,
                    force=forceFreshDeps,
                    tree=classObj._tmp_tree)
                ignored_names = [x.name for x in classDeps["ignore"]]
                loads = classDeps["load"]
                runs = classDeps["run"]

                # strip transitive dependencies
                if not includeTransitives:
                    loads1, loads = loads[:], []
                    for dep in loads1:
                        # if the .requestor is different from classId, it must have been
                        # included through a transitive analysis
                        if dep.requestor == classId:
                            loads.append(dep)

                # project class names
                loads1, loads = loads[:], []
                for dep in loads1:
                    if dep.name not in (x.name for x in loads):
                        loads.append(dep)
                runs1, runs = runs[:], []
                for dep in runs1:
                    if dep.name not in (x.name for x in runs):
                        runs.append(dep)

                # yield dependencies
                for dep in loads:
                    if dep.name not in ignored_names:
                        yield (packageId, classId, dep.name, 'load')

                load_names = [x.name for x in loads]
                for dep in runs:
                    if dep.name not in ignored_names and dep.name not in load_names:
                        yield (packageId, classId, dep.name, 'run')

                if not hasVisibleDeps(classId):
                    # yield two empty relations, so that classId is at least visible to consumer
                    yield (packageId, classId, None, 'load')
                    yield (packageId, classId, None, 'run')

        return

    ##
    # A generator to yield all used-by dependencies of classes in packages;
    # will report used-by relations of a specific class in sequence
    def lookupUsedByDeps(packages, includeTransitives, forceFreshDeps=False):

        depsMap = {}

        # build up depsMap {"classId" : ("packageId", [<load_deps>,...], [<run_deps>, ...]) }
        for packageId, package in enumerate(packages):
            for classObj in package.classes:
                classId = classObj.id
                if classId not in depsMap:
                    depsMap[classId] = (packageId, [], [])
                classDeps, _ = classObj.getCombinedDeps(
                    script.classesAll,
                    variants,
                    script.jobconfig,
                    projectClassNames=False,
                    force=forceFreshDeps)
                ignored_names = [x.name for x in classDeps["ignore"]]
                loads = classDeps["load"]
                runs = classDeps["run"]

                # strip transitive dependencies
                if not includeTransitives:
                    loads1, loads = loads[:], []
                    for dep in loads1:
                        # if the .requestor is different from classId, it must be included
                        # through a transitive analysis
                        if dep.requestor == classId:
                            loads.append(dep)

                # project class names
                loads1, loads = loads[:], []
                for dep in loads1:
                    if dep.name not in (x.name for x in loads):
                        loads.append(dep)
                runs1, runs = runs[:], []
                for dep in runs1:
                    if dep.name not in (x.name for x in runs):
                        runs.append(dep)

                # collect dependencies
                for dep in loads:
                    if dep.name not in ignored_names:
                        if dep.name not in depsMap:
                            depsMap[dep.name] = (
                                packageId, [], []
                            )  # the packageId is bogus here
                        depsMap[dep.name][1].append(classId)
                for dep in runs:
                    if dep.name not in ignored_names:
                        if dep.name not in depsMap:
                            depsMap[dep.name] = (packageId, [], [])
                        depsMap[dep.name][2].append(classId)

        # yield depsMap
        for depId, depVal in depsMap.items():
            packageId = depVal[0]
            usedByLoad = depVal[1]
            usedByRun = depVal[2]

            for classId in usedByLoad:
                yield (packageId, depId, classId, 'load')

            for classId in usedByRun:
                yield (packageId, depId, classId, 'run')

            if not usedByLoad + usedByRun:  # this class isn't used at all
                # yield two empty relations, so that classId is at least visible to consumer
                yield (packageId, depId, None, 'load')
                yield (packageId, depId, None, 'run')

        return

    def depsToJsonFile(classDepsIter, depsLogConf):
        data = {}
        for (packageId, classId, depId, loadOrRun) in classDepsIter:
            if classId not in data:
                data[classId] = {}
                data[classId]["load"] = []
                data[classId]["run"] = []

            data[classId][loadOrRun].append(depId)

        file = depsLogConf.get('json/file', "deps.json")
        console.info("Writing dependency data to file: %s" % file)
        pretty = depsLogConf.get('json/pretty', None)
        if pretty:
            indent = 2
            separators = (', ', ': ')
        else:
            indent = None
            separators = (',', ':')
        filetool.save(
            file,
            json.dumps(data,
                       sort_keys=True,
                       indent=indent,
                       separators=separators))

        return

    def depsToProviderFormat(classDepsIter, depsLogConf):
        ##
        # duplicates CodeProvider.passesOutputFilter
        def passesOutputFilter(resId):
            # must match some include expressions
            if not filter(None, [
                    x.search(resId) for x in inclregexps
            ]):  # [None, None, _sre.match, None, _sre.match, ...]
                return False
            # must not match any exclude expressions
            if filter(None, [x.search(resId) for x in exclregexps]):
                return False
            return True

        # ---------------------------------------

        inclregexps = jobconf.get("provider/include", ["*"])
        exclregexps = jobconf.get("provider/exclude", [])
        inclregexps = map(textutil.toRegExp, inclregexps)
        exclregexps = map(textutil.toRegExp, exclregexps)
        replace_dots = depsLogConf.get("json/replace-dots-in", [])
        slashes_keys = 'keys' in replace_dots
        slashes_vals = 'values' in replace_dots

        classToDeps = {}
        # Class deps
        for (packageId, classId, depId, loadOrRun) in classDepsIter:
            if passesOutputFilter(classId):
                if classId not in classToDeps:
                    classToDeps[classId] = {}
                    classToDeps[classId]["load"] = []
                    classToDeps[classId]["run"] = []
                if depId != None:
                    classToDeps[classId][loadOrRun].append(depId)

        if slashes_vals:
            # transform dep items
            for key, val in classToDeps.items():
                newval = []
                for ldep in val["load"]:
                    newdep = ldep.replace(".", "/")
                    newval.append(newdep)
                val["load"] = newval
                newval = []
                for ldep in val["run"]:
                    newdep = ldep.replace(".", "/")
                    newval.append(newdep)
                val["run"] = newval

        # Resource deps
        # class list
        classObjs = [
            x for x in script.classesObj if x.id in classToDeps.keys()
        ]
        # map resources to class.resources
        classObjs = Class.mapResourcesToClasses(script.libraries, classObjs,
                                                jobconf.get("asset-let", {}))

        for clazz in classObjs:
            reskeys = ["/resource/resources#" + x.id for x in clazz.resources]
            classToDeps[clazz.id]["run"].extend(reskeys)

        # Message key deps
        for classId in classToDeps:
            #classKeys, _ = Locale.getTranslation(classId, {})
            classKeys, _ = script.classesAll[classId].messageStrings({})
            transIds = set(x['id']
                           for x in classKeys)  # get the msgid's, uniquely
            transIds.update(x['plural'] for x in classKeys
                            if 'plural' in x)  # add plural keys
            transKeys = ["/translation/i18n-${lang}#" + x for x in transIds]
            classToDeps[classId]["run"].extend(transKeys)

        # CLDR dependency
        for classId in classToDeps:
            if script.classesAll[classId].getHints("cldr"):
                classToDeps[classId]["run"].append(
                    "/locale/locale-${lang}#cldr")

        if slashes_keys:
            # transform dep keys ("qx.Class" -> "qx/Class.js")
            for key, val in classToDeps.items():
                if key.find(".") > -1:
                    newkey = key.replace(".", "/")
                    classToDeps[newkey] = classToDeps[key]
                    del classToDeps[key]

        # sort information for each class (for stable output)
        for classvals in classToDeps.values():
            for key in classvals:
                classvals[key] = sorted(classvals[key], reverse=True)

        # write to file
        file_ = depsLogConf.get('json/file', "deps.json")
        console.info("Writing dependency data to file: %s" % file_)
        pretty = depsLogConf.get('json/pretty', None)
        if pretty:
            indent = 2
            separators = (', ', ': ')
        else:
            indent = None
            separators = (',', ':')
        filetool.save(
            file_,
            json.dumps(classToDeps,
                       sort_keys=True,
                       indent=indent,
                       separators=separators))

        return

    def depsToFlareFile(classDepsIter, depsLogConf):
        data = {}
        for (packageId, classId, depId, loadOrRun) in classDepsIter:
            if classId not in data:
                data[classId] = {}
                data[classId]['name'] = classId
                data[classId]["size"] = 1000
                data[classId]["imports"] = []

            if loadOrRun == 'load':
                data[classId]['imports'].append(depId)

        output = []
        for cid in data.keys():
            output.append(data[cid])

        file = depsLogConf.get('flare/file', "flare.json")
        console.info("Writing dependency data to file: %s" % file)
        pretty = depsLogConf.get('flare/pretty', None)
        if pretty:
            indent = 2
            separators = (', ', ': ')
        else:
            indent = None
            separators = (',', ':')
        filetool.save(
            file,
            json.dumps(output,
                       sort_keys=True,
                       indent=indent,
                       separators=separators))

        return

    def depsToDotFile(classDepsIter, depsLogConf):
        def getNodeAttribs(classId, useCompiledSize=False, optimize=[]):
            # return color according to size
            attribs = []
            color = fontsize = None
            sizes = {  # (big-threshold, medium-threshold)
                'compiled': (8000, 2000),
                'source': (20000, 5000)
            }
            compOptions = CompileOptions()
            compOptions.optimize = optimize
            compOptions.variantset = variants
            compOptions.format = True  # guess it's most likely
            if classId in script.classesAll:
                if useCompiledSize:
                    fsize = script.classesAll[classId].getCompiledSize(
                        compOptions, featuremap=script._featureMap)
                    mode = 'compiled'
                else:
                    fsize = script.classesAll[classId].size
                    mode = 'source'

                if fsize > sizes[mode][0]:
                    color = "red"
                    fontsize = 15
                elif fsize > sizes[mode][1]:
                    color = "green"
                    fontsize = 13
                else:
                    color = "blue"
                    fontsize = 10

            if fontsize:
                attribs.append(("fontsize", fontsize))
            if color:
                attribs.append(("color", color))
            return attribs

        def addEdges(gr, gr1, st, st_nodes, mode):
            # rather gr.add_spanning_tree(st), go through individual edges for coloring
            for v in st.iteritems():
                if None in v:  # drop edges with a None node
                    continue
                v2, v1 = v
                if gr.has_edge(v1, v2):
                    gr1.add_edge(v1, v2, attrs=gr.get_edge_attributes(v1, v2))
                else:
                    gr1.add_edge(
                        v1,
                        v2,
                    )
            if not mode or not mode == "span-tree-only":  # add additional dependencies
                for v1 in st_nodes:  # that are not covered by the span tree
                    for v2 in st_nodes:
                        if None in (v1, v2):
                            continue
                        if gr.has_edge(v1, v2):
                            gr1.add_edge(v1,
                                         v2,
                                         attrs=gr.get_edge_attributes(v1, v2))
            return

        def addNodes(gr, st_nodes):
            # rather gr.add_nodes(st), go through indiviudal nodes for coloring
            useCompiledSize = depsLogConf.get("dot/compiled-class-size", True)
            optimize = jobconf.get("compile-options/code/optimize", [])
            for cid in st_nodes:
                if cid == None:  # None is introduced in st
                    continue
                attribs = getNodeAttribs(cid, useCompiledSize, optimize)
                gr.add_node(cid, attrs=attribs)
            return

        def writeDotFile(gr1, depsLogConf):
            file = depsLogConf.get('dot/file', "deps.dot")
            dot = gr1.write(fmt='dotwt')
            console.info("Writing dependency graph to file: %s" % file)
            filetool.save(file, dot)
            return

        def getFormatMode(depsLogConf):
            format = mode = None
            mode = depsLogConf.get('dot/span-tree-only', None)
            if mode:
                mode = "span-tree-only"
            return format, mode

        def createPrinterGraph(gr, depsLogConf):
            # create a helper graph for output
            format, mode = getFormatMode(depsLogConf)
            searchRoot = depsLogConf.get(
                'dot/root')  # get the root node for the spanning tree
            searchRadius = depsLogConf.get('dot/radius', None)
            if searchRadius:
                filter = graph.filters.radius(searchRadius)
            else:
                filter = graph.filters.null()
            st, op = gr.breadth_first_search(
                root=searchRoot, filter=filter)  # get the spanning tree
            gr1 = graph.digraph()
            st_nodes = set(st.keys() + st.values())
            addNodes(gr1, st_nodes)
            addEdges(gr, gr1, st, st_nodes, mode)
            return gr1

        # -- Main (depsToDotFile) ------------------------------------------

        phase = depsLogConf.get('phase', None)
        gr = graph.digraph()
        #graphAddNodes(gr, script.classes)
        graphAddEdges(classDepsIter, gr, phase)
        gr1 = createPrinterGraph(gr, depsLogConf)
        writeDotFile(gr1, depsLogConf)
        return

    def depsToTerms(classDepsIter):

        depends = {}
        for (packageId, classId, depId, loadOrRun) in classDepsIter:
            if classId not in depends:
                depends[classId] = {}
                depends[classId]['load'] = []
                depends[classId]['run'] = []
            depends[classId][loadOrRun].append(depId)

        for classId, classDeps in depends.items():
            console.info("depends(%r, %r, %r)" %
                         (classId, classDeps['load'], classDeps['run']))

        return

    def collectDispersedDependencies(classDepsIter):
        depsMap = {}
        # collect relations of a single class
        for (packageId, classId, depId, loadOrRun) in classDepsIter:
            if classId not in depsMap:
                depsMap[classId] = (packageId, [], [])
            if loadOrRun == "load":
                depsMap[classId][1].append(depId)
            elif loadOrRun == "run":
                depsMap[classId][2].append(depId)
        return depsMap

    def depsToConsole(classDepsIter, type):
        oPackageId = ''
        console.indent()
        console.indent()
        relstring = "Uses" if type == "using" else "Used by"
        depsMap = collectDispersedDependencies(classDepsIter)

        for classId in sorted(depsMap.keys()):
            classVals = depsMap[classId]
            packageId = classVals[0]
            depsLoad = classVals[1]
            depsRun = classVals[2]

            if packageId != oPackageId:
                oPackageId = packageId
                console.outdent()
                console.info("Package %s" % packageId)
                console.indent()
                for partId in parts:
                    if packageId in (x.id for x in parts[partId].packages):
                        console.info("Part %s" % partId)

            console.info("Class: %s" % classId)

            console.indent()
            for depId in sorted(depsLoad):
                console.info("%s: %s (load)" % (relstring, depId))
            for depId in sorted(depsRun):
                console.info("%s: %s (run)" % (relstring, depId))
            console.outdent()

        console.outdent()
        console.outdent()
        return

    def graphAddEdges(classDepsIter, gr, pLoadOrRun):

        loadAttrs = [('color', 'red')]
        runAttrs = []

        for (packageId, classId, depId, loadOrRun) in classDepsIter:
            if not gr.has_node(classId):
                graphAddNode(gr, classId)
            if not gr.has_node(depId):
                graphAddNode(gr, depId)
            if loadOrRun == 'load' and pLoadOrRun != "runtime":
                gr.add_edge(classId, depId, attrs=loadAttrs)
            if loadOrRun == 'run' and pLoadOrRun != "loadtime":
                gr.add_edge(classId, depId, attrs=runAttrs)

        return

    def graphAddNodes(gr, clsList):
        for cid in clsList:
            graphAddNode(gr, cid)

    def graphAddNode(gr, cid):
        if cid in script.classesAll:
            fsize = script.classesAll[cid].size
            if fsize > 20000:
                color = "red"
            elif fsize > 5000:
                color = "green"
            else:
                color = "blue"
        else:
            color = "blue"
        gr.add_node(cid, attrs=[("color", color)])
        return

    def logDeps(depsLogConf, type):

        mainformat = depsLogConf.get('format', None)
        includeTransitives = depsLogConf.get('include-transitive-load-deps',
                                             True)
        forceFreshDeps = depsLogConf.get('force-fresh-deps', False)

        # TODO: debug
        for cls in (c for p in packages for c in p.classes):
            #print cls.id
            pass

        if type == "using":
            classDepsIter = lookupUsingDeps(packages, includeTransitives,
                                            forceFreshDeps)
        else:
            classDepsIter = lookupUsedByDeps(packages, includeTransitives,
                                             forceFreshDeps)

        if mainformat == 'dot':
            depsToDotFile(classDepsIter, depsLogConf)
        elif mainformat == 'json':
            depsToJsonFile(classDepsIter, depsLogConf)
        elif mainformat == 'flare':
            depsToFlareFile(classDepsIter, depsLogConf)
        elif mainformat == 'term':
            depsToTerms(classDepsIter)
        elif mainformat == 'provider':
            depsToProviderFormat(classDepsIter, depsLogConf)
        else:
            depsToConsole(classDepsIter, type)

        return

    # -- Main (runLogDependencies) ------------------

    depsLogConf = jobconf.get("log/dependencies", False)
    if not depsLogConf:
        return

    console = Context.console
    console.info("Dependency logging  ", feed=False)
    console.indent()

    packages = script.packagesSorted()
    parts = script.parts
    variants = script.variants
    depsLogConf = ExtMap(depsLogConf)

    type = depsLogConf.get('type', None)
    if type in ("used-by", "using"):
        logDeps(depsLogConf, type)
    else:
        console.error(
            'Dependency log type "%s" not in ["using", "used-by"]; skipping...'
            % type)

    console.outdent()
    console.dotclear()
    return
Beispiel #19
0
    def runCompiled(self, script, treeCompiler, version="build"):

        def getOutputFile(compileType):
            filePath = compConf.get("paths/file")
            if not filePath:
                filePath = os.path.join(compileType, "script", self.getAppName() + ".js")
            return filePath

        def getFileUri(scriptUri):
            appfile = os.path.basename(fileRelPath)
            fileUri = os.path.join(scriptUri, appfile)  # make complete with file name
            fileUri = Path.posifyPath(fileUri)
            return fileUri

        def generateBootScript(globalCodes, script, bootPackage="", compileType="build"):

            def packagesOfFiles(fileUri, packages):
                # returns list of lists, each containing destination file name of the corresp. part
                # npackages = [['script/gui-0.js'], ['script/gui-1.js'],...]
                npackages = []
                file = os.path.basename(fileUri)
                if self._job.get("packages/loader-with-boot", True):
                    totalLen = len(packages)
                else:
                    totalLen = len(packages) + 1
                for packageId, packageFileName in enumerate(self.packagesFileNames(file, totalLen, classPackagesOnly=True)):
                    npackages.append((packageFileName,))
                    packages[packageId].file = packageFileName  # TODO: very unnice to fix this here
                return npackages

            # besser: fixPackagesFiles()
            def packagesOfFilesX(fileUri, packages):
                # returns list of lists, each containing destination file name of the corresp. package
                # npackages = [['script/gui-0.js'], ['script/gui-1.js'],...]
                file = os.path.basename(fileUri)
                loader_with_boot = self._job.get("packages/loader-with-boot", True)
                for packageId, package in enumerate(packages):
                    if loader_with_boot:
                        suffix = packageId - 1
                        if suffix < 0:
                            suffix = ""
                    else:
                        suffix = packageId
                    packageFileName = self._resolveFileName(file, self._variants, self._settings, suffix)
                    package.file = packageFileName

                return packages

            # ----------------------------------------------------------------------------
            self._console.info("Generating boot script...")

            if not self._job.get("packages/i18n-with-boot", True):
                globalCodes = self.writeI18NFiles(globalCodes, script)
                # remove I18N info from globalCodes, so they don't go into the loader
                globalCodes["Translations"] = {}
                globalCodes["Locales"]      = {}
            else:
                if compileType == "build":
                    # also remove them here, as this info is now with the packages
                    globalCodes["Translations"] = {}
                    globalCodes["Locales"]      = {}

            plugCodeFile = compConf.get("code/decode-uris-plug", False)
            if compileType == "build":
                filepackages = packagesOfFiles(fileUri, packages)
                bootContent = self.generateBootCode(parts, filepackages, boot, script, compConf, variants, settings, bootPackage, globalCodes, compileType, plugCodeFile, format)
            else:
                filepackages = [x.classes for x in packages]
                bootContent = self.generateBootCode(parts, filepackages, boot, script, compConf, variants={}, settings={}, bootCode=None, globalCodes=globalCodes, version=compileType, decodeUrisFile=plugCodeFile, format=format)


            return bootContent


        def getPackageData(package):
            data = {}
            data["resources"]    = package.data.resources
            data["translations"] = package.data.translations
            data["locales"]      = package.data.locales
            data = json.dumpsCode(data)
            data += ';\n'
            return data

        def compilePackage(packageIndex, package):
            self._console.info("Compiling package #%s:" % packageIndex, False)
            self._console.indent()

            # Compile file content
            pkgCode = self._treeCompiler.compileClasses(package.classes, variants, optimize, format)
            pkgData = getPackageData(package)
            hash    = sha.getHash(pkgData + pkgCode)[:12]  # first 12 chars should be enough

            isBootPackage = packageIndex == 0
            if isBootPackage:
                compiledContent = ("qx.$$packageData['%s']=" % hash) + pkgData + pkgCode
            else:
                compiledContent  = u'''qx.$$packageData['%s']=%s\n''' % (hash, pkgData)
                compiledContent += u'''qx.Part.$$notifyLoad("%s", function() {\n%s\n});''' % (hash, pkgCode)
            
            #
            package.hash = hash  # to fill qx.$$loader.packageHashes in generateBootScript()

            self._console.debug("Done: %s" % self._computeContentSize(compiledContent))
            self._console.outdent()

            return compiledContent

        # -- Main --------------------------------------------------------------

        # Early return
        compileType = self._job.get("compile/type", "")
        if compileType not in ("build", "source"):
            return

        packages   = script.packagesSortedSimple()
        parts      = script.parts
        boot       = script.boot
        variants   = script.variants

        self._classList    = script.classes
        self._treeCompiler = treeCompiler
        self._variants     = variants

        self._console.info("Generate %s version..." % compileType)
        self._console.indent()

        # - Evaluate job config ---------------------
        # Compile config
        compConf = self._job.get("compile-options")
        compConf = ExtMap(compConf)

        # Whether the code should be formatted
        format = compConf.get("code/format", False)
        script.scriptCompress = compConf.get("paths/gzip", False)

        # Read in settings
        settings = self.getSettings()
        script.settings = settings

        # Read libraries
        libs = self._job.get("library", [])

        # Get translation maps
        locales = compConf.get("code/locales", [])
        translationMaps = self.getTranslationMaps(packages, variants, locales)

        # Read in base file name
        fileRelPath = getOutputFile(compileType)
        filePath    = self._config.absPath(fileRelPath)
        script.baseScriptPath = filePath

        if compileType == "build":
            # read in uri prefixes
            scriptUri = compConf.get('uris/script', 'script')
            scriptUri = Path.posifyPath(scriptUri)
            fileUri   = getFileUri(scriptUri)
            # for resource list
            resourceUri = compConf.get('uris/resource', 'resource')
            resourceUri = Path.posifyPath(resourceUri)
        else:
            # source version needs place where the app HTML ("index.html") lives
            self.approot = self._config.absPath(compConf.get("paths/app-root", ""))
            resourceUri = None
            scriptUri   = None

        # Get global script data (like qxlibraries, qxresources,...)
        globalCodes = self.generateGlobalCodes(script, libs, translationMaps, settings, variants, format, resourceUri, scriptUri)

        if compileType == "build":

            # - Specific job config ---------------------
            # read in compiler options
            optimize = compConf.get("code/optimize", [])
            self._treeCompiler.setOptimize(optimize)

            # - Generating packages ---------------------
            self._console.info("Generating packages...")
            self._console.indent()

            bootPackage = ""
            for packageIndex, package in enumerate(packages):
                package.compiled = compilePackage(packageIndex, package)

            self._console.outdent()
            if not len(packages):
                raise RuntimeError("No valid boot package generated.")

            # - Put loader and packages together -------
            loader_with_boot = self._job.get("packages/loader-with-boot", True)
            # handle loader and boot package
            if loader_with_boot:
                bootCode = generateBootScript(globalCodes, script, packages[0].compiled)
                packages[0].compiled = bootCode
            else:
                loaderCode = generateBootScript(globalCodes, script)
                loadPackage = Package(0)            # make a dummy Package for the loader
                loadPackage.compiled = loaderCode
                packages.insert(0, loadPackage)

            # attach file names
            for package, fileName in zip(packages, self.packagesFileNames(script.baseScriptPath, len(packages))):
                package.file = fileName

            # write packages
            self.writePackages(packages, script)

        # ---- 'source' version ------------------------------------------------
        else:

            sourceContent = generateBootScript(globalCodes, script, bootPackage="", compileType=compileType)

            # Construct file name
            resolvedFilePath = self._resolveFileName(filePath, variants, settings)

            # Save result file
            filetool.save(resolvedFilePath, sourceContent)

            if compConf.get("paths/gzip"):
                filetool.gzip(resolvedFilePath, sourceContent)

            self._console.outdent()
            self._console.debug("Done: %s" % self._computeContentSize(sourceContent))
            self._console.outdent()

        self._console.outdent()

        return  # runCompiled()
Beispiel #20
0
    def runCompiled(self, script, treeCompiler, version="build"):

        def getOutputFile(compileType):
            filePath = compConf.get("paths/file")
            if not filePath:
                filePath = os.path.join(compileType, "script", script.namespace + ".js")
            return filePath

        def getFileUri(scriptUri):
            appfile = os.path.basename(fileRelPath)
            fileUri = os.path.join(scriptUri, appfile)  # make complete with file name
            fileUri = Path.posifyPath(fileUri)
            return fileUri

        ##
        # returns the Javascript code for the initial ("boot") script as a string,
        #  using the loader.tmpl template and filling its placeholders
        def generateBootCode(parts, packages, boot, script, compConf, variants, settings, bootCode, globalCodes, version="source", decodeUrisFile=None, format=False):

            ##
            # create a map with part names as key and array of package id's and
            # return as string
            def partsMap(script):
                partData = {}
                packages = script.packagesSortedSimple()
                #print "packages: %r" % packages
                for part in script.parts:
                    partData[part] = script.parts[part].packagesAsIndices(packages)
                    #print "part '%s': %r" % (part, script.parts[part].packages)
                partData = json.dumpsCode(partData)

                return partData

            def fillTemplate(vals, template):
                # Fill the code template with various vals 
                templ  = MyTemplate(template)
                result = templ.safe_substitute(vals)

                return result

            def packageUrisToJS1(packages, version, namespace=None):
                # Translate URI data to JavaScript
                
                allUris = []
                for packageId, package in enumerate(packages):
                    packageUris = []
                    for fileId in package:

                        if version == "build":
                            # TODO: gosh, the next is an ugly hack!
                            #namespace  = self._resourceHandler._genobj._namespaces[0]  # all name spaces point to the same paths in the libinfo struct, so any of them will do
                            if not namespace:
                                namespace  = script.namespace  # all name spaces point to the same paths in the libinfo struct, so any of them will do
                            relpath    = OsPath(fileId)
                        else:
                            namespace  = self._classes[fileId]["namespace"]
                            relpath    = OsPath(self._classes[fileId]["relpath"])

                        shortUri = Uri(relpath.toUri())
                        packageUris.append("%s:%s" % (namespace, shortUri.encodedValue()))
                    allUris.append(packageUris)

                return allUris

            ##
            # Translate URI data to JavaScript
            # using Package objects
            def packageUrisToJS(packages, version):

                allUris = []
                for packageId, package in enumerate(packages):
                    packageUris = []
                    if package.file: # build
                        namespace = "__out__"
                        fileId    = package.file
                        relpath    = OsPath(fileId)
                        shortUri   = Uri(relpath.toUri())
                        packageUris.append("%s:%s" % (namespace, shortUri.encodedValue()))
                    else: # "source" :
                        for clazz in package.classes:
                            namespace  = self._classes[clazz]["namespace"]
                            relpath    = OsPath(self._classes[clazz]["relpath"])
                            shortUri   = Uri(relpath.toUri())
                            packageUris.append("%s:%s" % (namespace, shortUri.encodedValue()))
                    allUris.append(packageUris)

                return allUris


            def loadTemplate(bootCode):
                # try custom loader templates
                loaderFile = compConf.get("paths/loader-template", None)
                if not loaderFile:
                    # use default templates
                    if version=="build":
                        #loaderFile = os.path.join(filetool.root(), os.pardir, "data", "generator", "loader-build.tmpl.js")
                        # TODO: test-wise using generic template
                        loaderFile = os.path.join(filetool.root(), os.pardir, "data", "generator", "loader.tmpl.js")
                    else:
                        #loaderFile = os.path.join(filetool.root(), os.pardir, "data", "generator", "loader-source.tmpl.js")
                        loaderFile = os.path.join(filetool.root(), os.pardir, "data", "generator", "loader.tmpl.js")
                
                template = filetool.read(loaderFile)

                return template

            # ---------------------------------------------------------------

            if not parts:
                return ""

            result           = ""
            vals             = {}
            packages         = script.packagesSortedSimple()
            loader_with_boot = self._job.get("packages/loader-with-boot", True)

            # stringify data in globalCodes
            for entry in globalCodes:
                globalCodes[entry] = json.dumpsCode(globalCodes[entry])
                # undo damage done by simplejson to raw strings with escapes \\ -> \
                globalCodes[entry] = globalCodes[entry].replace('\\\\\\', '\\').replace(r'\\', '\\')  # " gets tripple escaped, therefore the first .replace()

            vals.update(globalCodes)

            if version=="build":
                vals["Resources"] = json.dumpsCode({})  # TODO: undo Resources from globalCodes!!!
            vals["Boot"] = '"%s"' % boot
            if version == "build":
                vals["BootPart"] = bootCode
            else:
                vals["BootPart"] = ""
                # fake package data
                for key, package in enumerate(packages): 
                    vals["BootPart"] += "qx.$$packageData['%d']={};\n" % key

            # Translate part information to JavaScript
            vals["Parts"] = partsMap(script)

            # Translate URI data to JavaScript
            #vals["Uris"] = packageUrisToJS1(packages, version)
            vals["Uris"] = packageUrisToJS(packages, version)
            vals["Uris"] = json.dumpsCode(vals["Uris"])

            # Add potential extra scripts
            vals["UrisBefore"] = []
            if self._job.get("add-script", False):
                additional_scripts = self._job.get("add-script",[])
                for additional_script in additional_scripts:
                    vals["UrisBefore"].append(additional_script["uri"])
            vals["UrisBefore"] = json.dumpsCode(vals["UrisBefore"])

            # Whether boot package is inline
            if version == "source":
                vals["BootIsInline"] = json.dumpsCode(False)
            else:
                vals["BootIsInline"] = json.dumpsCode(loader_with_boot)
                
            # Closure package information
            cParts = {}
            if version == "build":
                for part in script.parts:
                    if not loader_with_boot or part != "boot":
                        cParts[part] = True
            vals["ClosureParts"] = json.dumpsCode(cParts)

            # Package Hashes
            vals["PackageHashes"] = {}
            for key, package in enumerate(packages):
                if package.hash:
                    vals["PackageHashes"][key] = package.hash
                else:
                    vals["PackageHashes"][key] = "%d" % key  # fake code package hashes in source ver.
            vals["PackageHashes"] = json.dumpsCode(vals["PackageHashes"])

            # Script hook for qx.$$loader.decodeUris() function
            vals["DecodeUrisPlug"] = ""
            if decodeUrisFile:
                plugCode = filetool.read(self._config.absPath(decodeUrisFile))  # let it bomb if file can't be read
                vals["DecodeUrisPlug"] = plugCode.strip()
            
            # Enable "?nocache=...." for script loading?
            vals["NoCacheParam"] = "true" if self._job.get("compile-options/uris/add-nocache-param", True) else "false"

            # Add build details
            vals["Build"] = int(time.time()*1000) 
            vals["Type"] = version
            
            # Locate and load loader basic script
            template = loadTemplate(bootCode)

            # Fill template gives result
            result = fillTemplate(vals, template)

            return result


        ##
        # shallow layer above generateBootCode(), and its only client
        def generateBootScript(globalCodes, script, bootPackage="", compileType="build"):

            self._console.info("Generating boot script...")

            if not self._job.get("packages/i18n-with-boot", True):
                # remove I18N info from globalCodes, so they don't go into the loader
                globalCodes["Translations"] = {}
                globalCodes["Locales"]      = {}
            else:
                if compileType == "build":
                    # also remove them here, as this info is now with the packages
                    globalCodes["Translations"] = {}
                    globalCodes["Locales"]      = {}

            plugCodeFile = compConf.get("code/decode-uris-plug", False)
            if compileType == "build":
                filepackages = [(x.file,) for x in packages]
                bootContent  = generateBootCode(parts, filepackages, boot, script, compConf, variants, settings, bootPackage, globalCodes, compileType, plugCodeFile, format)
            else:
                filepackages = [x.classes for x in packages]
                bootContent  = generateBootCode(parts, filepackages, boot, script, compConf, variants={}, settings={}, bootCode=None, globalCodes=globalCodes, version=compileType, decodeUrisFile=plugCodeFile, format=format)


            return bootContent


        def getPackageData(package):
            data = {}
            data["resources"]    = package.data.resources
            data["translations"] = package.data.translations
            data["locales"]      = package.data.locales
            data = json.dumpsCode(data)
            data += ';\n'
            return data


        def compilePackage(packageIndex, package):
            self._console.info("Compiling package #%s:" % packageIndex, False)
            self._console.indent()

            # Compile file content
            pkgCode = self._treeCompiler.compileClasses(package.classes, variants, optimize, format)
            pkgData = getPackageData(package)
            hash    = sha.getHash(pkgData + pkgCode)[:12]  # first 12 chars should be enough

            isBootPackage = packageIndex == 0
            if isBootPackage:
                compiledContent = ("qx.$$packageData['%s']=" % hash) + pkgData + pkgCode
            else:
                compiledContent  = u'''qx.$$packageData['%s']=%s\n''' % (hash, pkgData)
                compiledContent += u'''qx.Part.$$notifyLoad("%s", function() {\n%s\n});''' % (hash, pkgCode)
            
            #
            package.hash = hash  # to fill qx.$$loader.packageHashes in generateBootScript()

            self._console.debug("Done: %s" % self._computeContentSize(compiledContent))
            self._console.outdent()

            return compiledContent


        ##
        # takes an array of (po-data, locale-data) dict pairs
        # merge all po data and all cldr data in a single dict each
        def mergeTranslationMaps(transMaps):
            poData = {}
            cldrData = {}

            for pac_dat, loc_dat in transMaps:
                for loc in pac_dat:
                    if loc not in poData:
                        poData[loc] = {}
                    poData[loc].update(pac_dat[loc])
                for loc in loc_dat:
                    if loc not in cldrData:
                        cldrData[loc] = {}
                    cldrData[loc].update(loc_dat[loc])

            return (poData, cldrData)


        # -- Main - runCompiled ------------------------------------------------

        # Early return
        compileType = self._job.get("compile/type", "")
        if compileType not in ("build", "source"):
            return

        packages   = script.packagesSortedSimple()
        parts      = script.parts
        boot       = script.boot
        variants   = script.variants
        libraries  = script.libraries

        self._treeCompiler = treeCompiler
        self._variants     = variants
        self._script       = script

        self._console.info("Generate %s version..." % compileType)
        self._console.indent()

        # - Evaluate job config ---------------------
        # Compile config
        compConf = self._job.get("compile-options")
        compConf = ExtMap(compConf)

        # Whether the code should be formatted
        format = compConf.get("code/format", False)
        script.scriptCompress = compConf.get("paths/gzip", False)

        # Read in settings
        settings = self.getSettings()
        script.settings = settings

        # Read libraries
        libs = self._job.get("library", [])

        # Get translation maps
        locales = compConf.get("code/locales", [])
        translationMaps = self.getTranslationMaps(packages, variants, locales)

        # Read in base file name
        fileRelPath = getOutputFile(compileType)
        filePath    = self._config.absPath(fileRelPath)
        script.baseScriptPath = filePath

        if compileType == "build":
            # read in uri prefixes
            scriptUri = compConf.get('uris/script', 'script')
            scriptUri = Path.posifyPath(scriptUri)
            fileUri   = getFileUri(scriptUri)
            # for resource list
            resourceUri = compConf.get('uris/resource', 'resource')
            resourceUri = Path.posifyPath(resourceUri)
        else:
            # source version needs place where the app HTML ("index.html") lives
            self.approot = self._config.absPath(compConf.get("paths/app-root", ""))
            resourceUri = None
            scriptUri   = None

        # Get global script data (like qxlibraries, qxresources,...)
        globalCodes                = {}
        globalCodes["Settings"]    = settings
        globalCodes["Variants"]    = self.generateVariantsCode(variants)
        globalCodes["Libinfo"]     = self.generateLibInfoCode(libs, format, resourceUri, scriptUri)
        # add synthetic output lib
        if scriptUri: out_sourceUri= scriptUri
        else:
            out_sourceUri = self._computeResourceUri({'class': ".", 'path': os.path.dirname(script.baseScriptPath)}, OsPath(""), rType="class", appRoot=self.approot)
            out_sourceUri = os.path.normpath(out_sourceUri.encodedValue())
        globalCodes["Libinfo"]['__out__'] = { 'sourceUri': out_sourceUri }
        globalCodes["Resources"]    = self.generateResourceInfoCode(script, settings, libraries, format)
        globalCodes["Translations"],\
        globalCodes["Locales"]      = mergeTranslationMaps(translationMaps)

        # Potentally create dedicated I18N packages
        i18n_as_parts = not self._job.get("packages/i18n-with-boot", True)
        if i18n_as_parts:
            script = self.generateI18NParts(script, globalCodes)
            self.writePackages([p for p in script.packages if getattr(p, "__localeflag", False)], script)

        if compileType == "build":

            # - Specific job config ---------------------
            # read in compiler options
            optimize = compConf.get("code/optimize", [])
            self._treeCompiler.setOptimize(optimize)

            # - Generating packages ---------------------
            self._console.info("Generating packages...")
            self._console.indent()

            bootPackage = ""
            for packageIndex, package in enumerate(packages):
                package.compiled = compilePackage(packageIndex, package)

            self._console.outdent()
            if not len(packages):
                raise RuntimeError("No valid boot package generated.")

            # - Put loader and packages together -------
            loader_with_boot = self._job.get("packages/loader-with-boot", True)
            # handle loader and boot package
            if not loader_with_boot:
                loadPackage = Package(0)            # make a dummy Package for the loader
                packages.insert(0, loadPackage)

            # attach file names (do this before calling generateBootScript)
            for package, fileName in zip(packages, self.packagesFileNames(script.baseScriptPath, len(packages))):
                package.file = os.path.basename(fileName)
                if self._job.get("compile-options/paths/scripts-add-hash", False):
                    package.file = self._fileNameWithHash(package.file, package.hash)

            # generate and integrate boot code
            if loader_with_boot:
                # merge loader code with first package
                bootCode = generateBootScript(globalCodes, script, packages[0].compiled)
                packages[0].compiled = bootCode
            else:
                loaderCode = generateBootScript(globalCodes, script)
                packages[0].compiled = loaderCode

            # write packages
            self.writePackages(packages, script)

        # ---- 'source' version ------------------------------------------------
        else:

            sourceContent = generateBootScript(globalCodes, script, bootPackage="", compileType=compileType)

            # Construct file name
            resolvedFilePath = self._resolveFileName(filePath, variants, settings)

            # Save result file
            filetool.save(resolvedFilePath, sourceContent)

            if compConf.get("paths/gzip"):
                filetool.gzip(resolvedFilePath, sourceContent)

            self._console.outdent()
            self._console.debug("Done: %s" % self._computeContentSize(sourceContent))
            self._console.outdent()

        self._console.outdent()

        return  # runCompiled()
Beispiel #21
0
    def runPrettyPrinting(self, classes, classesObj):
        "Gather all relevant config settings and pass them to the compiler"

        if not isinstance(self._job.get("pretty-print", False), types.DictType):
            return

        self._console.info("Pretty-printing code...")
        self._console.indent()
        ppsettings = ExtMap(self._job.get("pretty-print"))  # get the pretty-print config settings

        # init options
        parser  = optparse.OptionParser()
        compiler.addCommandLineOptions(parser)
        (options, args) = parser.parse_args([])

        # modify according to config
        setattr(options, 'prettyPrint', True)  # turn on pretty-printing
        if ppsettings.get('general/indent-string',False):
            setattr(options, 'prettypIndentString', ppsettings.get('general/indent-string'))
        if ppsettings.get('comments/trailing/keep-column',False):
            setattr(options, 'prettypCommentsTrailingKeepColumn', ppsettings.get('comments/trailing/keep-column'))
        if ppsettings.get('comments/trailing/comment-cols',False):
            setattr(options, 'prettypCommentsTrailingCommentCols', ppsettings.get('comments/trailing/comment-cols'))
        if ppsettings.get('comments/trailing/padding',False):
            setattr(options, 'prettypCommentsInlinePadding', ppsettings.get('comments/trailing/padding'))
        if ppsettings.get('blocks/align-with-curlies',False):
            setattr(options, 'prettypAlignBlockWithCurlies', ppsettings.get('blocks/align-with-curlies'))
        if ppsettings.get('blocks/open-curly/newline-before',False):
            setattr(options, 'prettypOpenCurlyNewlineBefore', ppsettings.get('blocks/open-curly/newline-before'))
        if ppsettings.get('blocks/open-curly/indent-before',False):
            setattr(options, 'prettypOpenCurlyIndentBefore', ppsettings.get('blocks/open-curly/indent-before'))

        self._console.info("Pretty-printing files: ", False)
        numClasses = len(classes)
        for pos, classId in enumerate(classes):
            self._console.progress(pos+1, numClasses)
            #tree = treeLoader.getTree(classId)
            tree = classesObj[classId].tree()
            compiled = compiler.compile(tree, options)
            filetool.save(self._classes[classId]['path'], compiled)

        self._console.outdent()

        return
Beispiel #22
0
 def getFeature(self, feature, default=None):
     dataMap = ExtMap(self._data)
     return dataMap.get(feature, default)
Beispiel #23
0
 def getFeature(self, feature, default=None):
     dataMap = ExtMap(self._data)
     return dataMap.get(feature, default)
Beispiel #24
0
    def generateResourceInfoCode(self, script, settings, libs, format=False):

        # some helper functions
        def extractAssetPart(libresuri, imguri):
            pre,libsfx,imgsfx = Path.getCommonPrefix(libresuri, imguri) # split libresuri from imguri
            if imgsfx[0] == os.sep: imgsfx = imgsfx[1:]  # strip leading '/'
            return imgsfx                # use the bare img suffix as its asset Id

        ##
        # finds the package that needs this resource <assetId> and adds it
        # (polymorphic in the 4th param, use either simpleResVal *or* combImgObj as kwarg)
        # TODO: this might be very expensive (lots of lookup's)
        def addResourceToPackages(script, classToResourceMap, assetId, simpleResVal=None, combImgObj=None):

            ##
            # match an asset id or a combined image object
            def assetRexMatchItem(assetRex, item):
                if combImgObj:
                    # combined image = object(used:True/False, embeds: {id:ImgInfoFmt}, info:ImgInfoFmt)
                    for embId in item.embeds:
                        if assetRex.match(embId):
                            return True
                    return False
                else:
                    # assetId
                    return assetRex.match(item)

            # --------------------------------------------------------
            if combImgObj:
                resvalue = combImgObj.info.flatten()
                checkval = combImgObj
            else:
                resvalue = simpleResVal
                checkval = assetId

            classesUsing = set(())
            for clazz, assetSet in classToResourceMap.items():
                for assetRex in assetSet:
                    if assetRexMatchItem(assetRex, checkval):
                        classesUsing.add(clazz)
                        break
            for package in script.packages:
                if classesUsing.intersection(set(package.classes)):
                    package.data.resources[assetId] = resvalue
            return

        ##
        # return the (potentially empty) list of embedded image id's of a
        # combined image that are in filteredResources
        def requiredEmbeds(combImg, filteredResourceIds):  # combImg = {info: ImgInfoFmt, embeds: {id:ImgInfoFmt}}
            return (x for x in combImg.embeds if x in filteredResourceIds)


        ##
        # create the final form of the data to be returned by generateResourceInfoCode
        def serialize(filteredResources, combinedImages, resdata):
            for resId, resval in filteredResources.items():
                # build up resdata
                if isinstance(resval, ImgInfoFmt):
                    resvalue = resval.flatten()
                else:  # handle other resources
                    resvalue = resval
                resdata[resId] = resvalue
            return resdata


        ##
        # loop through resources, invoking addResourceToPackages
        def addResourcesToPackages(resdata, combinedImages, classToResourceMap):
            for resId, resvalue in resdata.items():
                # register the resource with the package needing it
                addResourceToPackages(script, classToResourceMap, resId, simpleResVal=resvalue)

            # for combined images, we have to check their embedded images against the packages
            for combId, combImg in combinedImages.items():
                if combId in resdata:
                    addResourceToPackages(script, classToResourceMap, combId, combImgObj=combImg)

            # handle tree structure of resource info
            if resources_tree:
                resdata = resdata._data

            return resdata


        ##
        # get the resource Id and resource value
        def analyseResource(resource, lib):
            ##
            # compute the resource value of an image for the script
            def imgResVal(resource):
                imgId = resource

                # Cache or generate
                if (imgId  in imgLookupTable and
                    imgLookupTable[imgId ]["time"] > os.stat(imgId ).st_mtime):
                    imageInfo = imgLookupTable[imgId ]["content"]
                else:
                    imageInfo = self._imageInfo.getImageInfo(imgId , assetId)
                    imgLookupTable[imgId ] = {"time": time.time(), "content": imageInfo}

                # Now process the image information
                # use an ImgInfoFmt object, to abstract from flat format
                imgfmt = ImgInfoFmt()
                imgfmt.lib = lib['namespace']
                if not 'type' in imageInfo:
                    raise RuntimeError, "Unable to get image info from file: %s" % imgId 
                imgfmt.type = imageInfo['type']

                # Add this image
                # imageInfo = {width, height, filetype}
                if not 'width' in imageInfo or not 'height' in imageInfo:
                    raise RuntimeError, "Unable to get image info from file: %s" % imgId 
                imgfmt.width  = imageInfo['width']
                imgfmt.height = imageInfo['height']
                imgfmt.type   = imageInfo['type']

                return imgfmt

            # ----------------------------------------------------------
            imgpatt = re.compile(r'\.(png|jpeg|jpg|gif)$', re.I)
            librespath = os.path.normpath(os.path.join(lib['path'], lib['resource']))
            assetId = extractAssetPart(librespath, resource)
            assetId = Path.posifyPath(assetId)
            if imgpatt.search(resource): # handle images
                resvalue = imgResVal(resource)
            else:  # handle other resources
                resvalue = lib['namespace']
            return assetId, resvalue


        ##
        # collect resources from the libs and put them in suitable data structures
        def registerResources(libs, filteredResources, combinedImages):
            skippatt = re.compile(r'\.(meta|py)$', re.I)
            for lib in libs:
                resourceList = self._resourceHandler.findAllResources([lib], None)
                # resourceList = [file1,file2,...]
                for resource in resourceList:
                    if skippatt.search(resource):
                        continue
                    if assetFilter(resource):  # add those anyway
                        resId, resVal            = analyseResource(resource, lib)
                        filteredResources[resId] = resVal
                    if self._resourceHandler.isCombinedImage(resource):  # register those for later evaluation
                        combId, combImgFmt     = analyseResource(resource, lib)
                        combObj                = CombinedImage(resource) # this parses also the .meta file
                        combObj.info           = combImgFmt
                        combinedImages[combId] = combObj
            return filteredResources, combinedImages

        ##
        # apply combined image info to the simple images, improving and extending
        # filteredResources
        def incorporateCombinedImages(filteredResources, combinedImages):
            for combId, combImg in combinedImages.items():  # combImg.embeds = {resId : ImgFmt}
                filteredResourceIds = filteredResources.keys()
                for embId in requiredEmbeds(combImg, filteredResourceIds):
                    # patch simle image info
                    lib = filteredResources[embId].lib                    # keep lib info
                    filteredResources[embId]      = combImg.embeds[embId] # replace info with combined info
                    filteredResources[embId].lib  = lib                   # restore original lib
                    # add combined image
                    if combId not in filteredResourceIds:
                        filteredResources[combId] = combImg.info
            return filteredResources


        # -- main --------------------------------------------------------------

        self._console.info("Analyzing assets...")
        self._console.indent()

        compConf       = self._job.get("compile-options")
        compConf       = ExtMap(compConf)
        resources_tree = compConf.get("code/resources-tree", False)
        resdata        = {}
        if resources_tree:
            resdata = ExtMap()

        self._imageInfo                = ImageInfo(self._console, self._cache)
        assetFilter, classToResourceMap= self._resourceHandler.getResourceFilterByAssets(self._classList)

        # read img cache file
        cacheId = "imginfo-%s" % self._config._fname
        imgLookupTable = cache.read(cacheId, None)
        if imgLookupTable == None:
            imgLookupTable = {}

        filteredResources = {}          # type {resId : ImgInfoFmt|string}
        combinedImages    = {}          # type {imgId : CombinedImage}
        # 1st pass gathering relevant images and other resources from the libraries
        filteredResources, combinedImages = registerResources(libs, filteredResources,
                                                              combinedImages)
        # 2nd pass patching simple image infos with combined info
        filteredResources = incorporateCombinedImages(filteredResources, combinedImages)

        # 3rd pass consume the info from filteredResources in various ways
        resdata     = serialize(filteredResources, combinedImages, resdata)
        addResourcesToPackages(resdata, combinedImages, classToResourceMap)
        if resources_tree:
            resdata = resdata.getData()
        
        # write img cache file
        cache.write(cacheId, imgLookupTable)
        self._console.outdent()

        return resdata
Beispiel #25
0
def runLogDependencies(jobconf, script):

    ##
    # A generator to yield all using dependencies of classes in packages;
    def lookupUsingDeps(packages, includeTransitives, forceFreshDeps=False):

        ##
        # has classId been yielded?
        def hasVisibleDeps(classId):
            # judged from the contents of its deps arrays
            load_names = [x.name for x in classDeps["load"]]
            run_names  = [x.name for x in classDeps["run"]]
            return set(load_names).union(run_names).difference(ignored_names)

        for packageId, package in enumerate(packages):
            for classObj in package.classes:
                classId = classObj.id
                classDeps, _ = classObj.getCombinedDeps(script.classesAll, variants, script.jobconfig, projectClassNames=False, force=forceFreshDeps, tree=classObj._tmp_tree)
                ignored_names = [x.name for x in classDeps["ignore"]]
                loads = classDeps["load"]
                runs = classDeps["run"]

                # strip transitive dependencies
                if not includeTransitives:
                    loads1, loads = loads[:], []
                    for dep in loads1:
                        # if the .requestor is different from classId, it must have been
                        # included through a transitive analysis
                        if dep.requestor == classId:
                            loads.append(dep)

                # project class names
                loads1, loads = loads[:], []
                for dep in loads1:
                    if dep.name not in (x.name for x in loads):
                        loads.append(dep)
                runs1, runs = runs[:], []
                for dep in runs1:
                    if dep.name not in (x.name for x in runs):
                        runs.append(dep)

                # yield dependencies
                for dep in loads:
                    if dep.name not in ignored_names:
                        yield (packageId, classId, dep.name, 'load')

                load_names = [x.name for x in loads]
                for dep in runs:
                    if dep.name not in ignored_names and dep.name not in load_names:
                        yield (packageId, classId, dep.name, 'run')

                if not hasVisibleDeps(classId):
                    # yield two empty relations, so that classId is at least visible to consumer
                    yield (packageId, classId, None, 'load')
                    yield (packageId, classId, None, 'run')

        return


    ##
    # A generator to yield all used-by dependencies of classes in packages;
    # will report used-by relations of a specific class in sequence
    def lookupUsedByDeps(packages, includeTransitives, forceFreshDeps=False):

        depsMap = {}

        # build up depsMap {"classId" : ("packageId", [<load_deps>,...], [<run_deps>, ...]) }
        for packageId, package in enumerate(packages):
            for classObj in package.classes:
                classId = classObj.id
                if classId not in depsMap:
                    depsMap[classId] = (packageId, [], [])
                classDeps, _ = classObj.getCombinedDeps(script.classesAll, variants, script.jobconfig, projectClassNames=False, force=forceFreshDeps)
                ignored_names = [x.name for x in classDeps["ignore"]]
                loads = classDeps["load"]
                runs  = classDeps["run"]

                # strip transitive dependencies
                if not includeTransitives:
                    loads1, loads = loads[:], []
                    for dep in loads1:
                        # if the .requestor is different from classId, it must be included
                        # through a transitive analysis
                        if dep.requestor == classId:
                            loads.append(dep)

                # project class names
                loads1, loads = loads[:], []
                for dep in loads1:
                    if dep.name not in (x.name for x in loads):
                        loads.append(dep)
                runs1, runs = runs[:], []
                for dep in runs1:
                    if dep.name not in (x.name for x in runs):
                        runs.append(dep)

                # collect dependencies
                for dep in loads:
                    if dep.name not in ignored_names:
                        if dep.name not in depsMap:
                            depsMap[dep.name] = (packageId, [], [])  # the packageId is bogus here
                        depsMap[dep.name][1].append(classId)
                for dep in runs:
                    if dep.name not in ignored_names:
                        if dep.name not in depsMap:
                            depsMap[dep.name] = (packageId, [], [])
                        depsMap[dep.name][2].append(classId)

        # yield depsMap
        for depId, depVal in depsMap.items():
            packageId   = depVal[0]
            usedByLoad  = depVal[1]
            usedByRun   = depVal[2]

            for classId in usedByLoad:
                yield (packageId, depId, classId, 'load')

            for classId in usedByRun:
                yield (packageId, depId, classId, 'run')

            if not usedByLoad + usedByRun: # this class isn't used at all
                # yield two empty relations, so that classId is at least visible to consumer
                yield (packageId, depId, None, 'load')
                yield (packageId, depId, None, 'run')

        return


    def depsToJsonFile(classDepsIter, depsLogConf):
        data = {}
        for (packageId, classId, depId, loadOrRun) in classDepsIter:
            if classId not in data:
                data[classId] = {}
                data[classId]["load"] = []
                data[classId]["run"] = []

            data[classId][loadOrRun].append(depId)

        file = depsLogConf.get('json/file', "deps.json")
        console.info("Writing dependency data to file: %s" % file)
        pretty = depsLogConf.get('json/pretty', None)
        if pretty:
            indent     = 2
            separators = (', ', ': ')
        else:
            indent     = None
            separators = (',', ':')
        filetool.save(file, json.dumps(data, sort_keys=True, indent=indent, separators=separators))

        return


    def depsToProviderFormat(classDepsIter, depsLogConf):
        ##
        # duplicates CodeProvider.passesOutputFilter
        def passesOutputFilter(resId):
            # must match some include expressions
            if not filter(None, [x.search(resId) for x in inclregexps]):  # [None, None, _sre.match, None, _sre.match, ...]
                return False
            # must not match any exclude expressions
            if filter(None, [x.search(resId) for x in exclregexps]):
                return False
            return True

        # ---------------------------------------

        inclregexps = jobconf.get("provider/include", ["*"])
        exclregexps = jobconf.get("provider/exclude", [])
        inclregexps = map(textutil.toRegExp, inclregexps)
        exclregexps = map(textutil.toRegExp, exclregexps)
        replace_dots = depsLogConf.get("json/replace-dots-in", [])
        slashes_keys = 'keys' in replace_dots
        slashes_vals = 'values' in replace_dots

        classToDeps = {}
        # Class deps
        for (packageId, classId, depId, loadOrRun) in classDepsIter:
            if passesOutputFilter(classId):
                if classId not in classToDeps:
                    classToDeps[classId] = {}
                    classToDeps[classId]["load"] = []
                    classToDeps[classId]["run"] = []
                if depId != None:
                    classToDeps[classId][loadOrRun].append(depId)

        if slashes_vals:
            # transform dep items
            for key, val in classToDeps.items():
                newval = []
                for ldep in val["load"]:
                    newdep = ldep.replace(".", "/")
                    newval.append(newdep)
                val["load"] = newval
                newval = []
                for ldep in val["run"]:
                    newdep = ldep.replace(".", "/")
                    newval.append(newdep)
                val["run"] = newval

        # Resource deps
        # class list
        classObjs = [x for x in script.classesObj if x.id in classToDeps.keys()]
        # map resources to class.resources
        classObjs = Class.mapResourcesToClasses(script.libraries, classObjs, jobconf.get("asset-let", {}))

        for clazz in classObjs:
            reskeys = ["/resource/resources#"+x.id for x in clazz.resources]
            classToDeps[clazz.id]["run"].extend(reskeys)

        # Message key deps
        for classId in classToDeps:
            #classKeys, _ = Locale.getTranslation(classId, {})
            classKeys, _ = script.classesAll[classId].messageStrings({})
            transIds  = set(x['id'] for x in classKeys) # get the msgid's, uniquely
            transIds.update(x['plural'] for x in classKeys if 'plural' in x) # add plural keys
            transKeys = ["/translation/i18n-${lang}#" + x for x in transIds]
            classToDeps[classId]["run"].extend(transKeys)

        # CLDR dependency
        for classId in classToDeps:
            if script.classesAll[classId].getHints("cldr"):
                classToDeps[classId]["run"].append("/locale/locale-${lang}#cldr")

        if slashes_keys:
            # transform dep keys ("qx.Class" -> "qx/Class.js")
            for key, val in classToDeps.items():
                if key.find(".")>-1:
                    newkey = key.replace(".", "/")
                    classToDeps[newkey] = classToDeps[key]
                    del classToDeps[key]

        # sort information for each class (for stable output)
        for classvals in classToDeps.values():
            for key in classvals:
                classvals[key] = sorted(classvals[key], reverse=True)

        # write to file
        file_ = depsLogConf.get('json/file', "deps.json")
        console.info("Writing dependency data to file: %s" % file_)
        pretty = depsLogConf.get('json/pretty', None)
        if pretty:
            indent     = 2
            separators = (', ', ': ')
        else:
            indent     = None
            separators = (',', ':')
        filetool.save(file_, json.dumps(classToDeps, sort_keys=True, indent=indent, separators=separators))

        return


    def depsToFlareFile(classDepsIter, depsLogConf):
        data = {}
        for (packageId, classId, depId, loadOrRun) in classDepsIter:
            if classId not in data:
                data[classId] = {}
                data[classId]['name'] = classId
                data[classId]["size"] = 1000
                data[classId]["imports"] = []

            if loadOrRun == 'load':
                data[classId]['imports'].append(depId)

        output = []
        for cid in data.keys():
            output.append(data[cid])

        file = depsLogConf.get('flare/file', "flare.json")
        console.info("Writing dependency data to file: %s" % file)
        pretty = depsLogConf.get('flare/pretty', None)
        if pretty:
            indent = 2
            separators = (', ', ': ')
        else:
            indent = None
            separators = (',', ':')
        filetool.save(file, json.dumps(output, sort_keys=True, indent=indent, separators=separators))

        return

    def depsToDotFile(classDepsIter, depsLogConf):

        def getNodeAttribs(classId, useCompiledSize=False, optimize=[]):
            # return color according to size
            attribs = []
            color = fontsize = None
            sizes = {      # (big-threshold, medium-threshold)
                'compiled' : (8000, 2000),
                'source'   : (20000, 5000)
            }
            compOptions = CompileOptions()
            compOptions.optimize = optimize
            compOptions.variantset = variants
            compOptions.format = True # guess it's most likely
            if classId in script.classesAll:
                if useCompiledSize:
                    fsize = script.classesAll[classId].getCompiledSize(compOptions, featuremap=script._featureMap)
                    mode  = 'compiled'
                else:
                    fsize = script.classesAll[classId].size
                    mode  = 'source'

                if fsize > sizes[mode][0]:
                    color = "red"
                    fontsize = 15
                elif fsize > sizes[mode][1]:
                    color = "green"
                    fontsize = 13
                else:
                    color = "blue"
                    fontsize = 10

            if fontsize:
                attribs.append(("fontsize",fontsize))
            if color:
                attribs.append(("color",color))
            return attribs

        def addEdges(gr, gr1, st, st_nodes, mode):
            # rather gr.add_spanning_tree(st), go through individual edges for coloring
            for v in st.iteritems():
                if None in v:  # drop edges with a None node
                    continue
                v2, v1 = v
                if gr.has_edge(v1,v2):
                    gr1.add_edge(v1, v2, attrs=gr.get_edge_attributes(v1, v2))
                else:
                    gr1.add_edge(v1, v2, )
            if not mode or not mode == "span-tree-only":  # add additional dependencies
                for v1 in st_nodes:                       # that are not covered by the span tree
                    for v2 in st_nodes:
                        if None in (v1, v2):
                            continue
                        if gr.has_edge(v1, v2):
                            gr1.add_edge(v1, v2, attrs=gr.get_edge_attributes(v1, v2))
            return

        def addNodes(gr, st_nodes):
            # rather gr.add_nodes(st), go through indiviudal nodes for coloring
            useCompiledSize = depsLogConf.get("dot/compiled-class-size", True)
            optimize        = jobconf.get("compile-options/code/optimize", [])
            for cid in st_nodes:
                if cid == None:  # None is introduced in st
                    continue
                attribs = getNodeAttribs(cid, useCompiledSize, optimize)
                gr.add_node(cid, attrs=attribs)
            return

        def writeDotFile(gr1, depsLogConf):
            file = depsLogConf.get('dot/file', "deps.dot")
            dot = gr1.write(fmt='dotwt')
            console.info("Writing dependency graph to file: %s" % file)
            filetool.save(file, dot)
            return

        def getFormatMode(depsLogConf):
            format = mode = None
            mode = depsLogConf.get('dot/span-tree-only', None)
            if mode:
                mode = "span-tree-only"
            return format, mode

        def createPrinterGraph(gr, depsLogConf):
            # create a helper graph for output
            format, mode = getFormatMode(depsLogConf)
            searchRoot   = depsLogConf.get('dot/root')  # get the root node for the spanning tree
            searchRadius = depsLogConf.get('dot/radius', None)
            if searchRadius:
                filter    = graph.filters.radius(searchRadius)
            else:
                filter    = graph.filters.null()
            st, op = gr.breadth_first_search(root=searchRoot, filter=filter) # get the spanning tree
            gr1 = graph.digraph()
            st_nodes = set(st.keys() + st.values())
            addNodes(gr1, st_nodes)
            addEdges(gr, gr1, st, st_nodes, mode)
            return gr1

        # -- Main (depsToDotFile) ------------------------------------------

        phase = depsLogConf.get('phase', None)
        gr    = graph.digraph()
        #graphAddNodes(gr, script.classes)
        graphAddEdges(classDepsIter, gr, phase)
        gr1   = createPrinterGraph(gr, depsLogConf)
        writeDotFile(gr1, depsLogConf)
        return


    def depsToTerms(classDepsIter):

        depends = {}
        for (packageId, classId, depId, loadOrRun) in classDepsIter:
            if classId not in depends:
                depends[classId]         = {}
                depends[classId]['load'] = []
                depends[classId]['run']  = []
            depends[classId][loadOrRun].append(depId)

        for classId, classDeps in depends.items():
            console.info("depends(%r, %r, %r)" % (classId, classDeps['load'], classDeps['run']))

        return


    def collectDispersedDependencies(classDepsIter):
        depsMap = {}
        # collect relations of a single class
        for (packageId, classId, depId, loadOrRun) in classDepsIter:
            if classId not in depsMap:
                depsMap[classId] = (packageId, [], [])
            if loadOrRun == "load":
                depsMap[classId][1].append(depId)
            elif loadOrRun == "run":
                depsMap[classId][2].append(depId)
        return depsMap


    def depsToConsole(classDepsIter, type):
        oPackageId = ''
        console.indent()
        console.indent()
        relstring = "Uses" if type == "using" else "Used by"
        depsMap = collectDispersedDependencies(classDepsIter)

        for classId in sorted(depsMap.keys()):
            classVals = depsMap[classId]
            packageId = classVals[0]
            depsLoad  = classVals[1]
            depsRun   = classVals[2]

            if packageId != oPackageId:
                oPackageId = packageId
                console.outdent()
                console.info("Package %s" % packageId)
                console.indent()
                for partId in parts:
                    if packageId in (x.id for x in parts[partId].packages):
                        console.info("Part %s" % partId)

            console.info("Class: %s" % classId)

            console.indent()
            for depId in sorted(depsLoad):
                console.info("%s: %s (load)" % (relstring, depId))
            for depId in sorted(depsRun):
                console.info("%s: %s (run)"  % (relstring, depId))
            console.outdent()

        console.outdent()
        console.outdent()
        return


    def graphAddEdges(classDepsIter, gr, pLoadOrRun):

        loadAttrs = [('color','red')]
        runAttrs  = []

        for (packageId, classId, depId, loadOrRun) in classDepsIter:
            if not gr.has_node(classId):
                graphAddNode(gr, classId)
            if not gr.has_node(depId):
                graphAddNode(gr, depId)
            if loadOrRun == 'load' and pLoadOrRun != "runtime":
                gr.add_edge(classId, depId, attrs = loadAttrs)
            if loadOrRun == 'run' and pLoadOrRun != "loadtime":
                gr.add_edge(classId, depId, attrs = runAttrs)

        return


    def graphAddNodes(gr, clsList):
        for cid in clsList:
            graphAddNode(gr, cid)


    def graphAddNode(gr, cid):
        if cid in script.classesAll:
            fsize = script.classesAll[cid].size
            if fsize > 20000:
                color = "red"
            elif fsize > 5000:
                color = "green"
            else:
                color = "blue"
        else:
            color = "blue"
        gr.add_node(cid, attrs=[("color", color)])
        return


    def logDeps(depsLogConf, type):

        mainformat = depsLogConf.get('format', None)
        includeTransitives = depsLogConf.get('include-transitive-load-deps', True)
        forceFreshDeps = depsLogConf.get('force-fresh-deps', False)

        # TODO: debug
        for cls in (c for p in packages for c in p.classes):
            #print cls.id
            pass

        if type == "using":
            classDepsIter = lookupUsingDeps(packages, includeTransitives, forceFreshDeps)
        else:
            classDepsIter = lookupUsedByDeps(packages, includeTransitives, forceFreshDeps)

        if mainformat == 'dot':
            depsToDotFile(classDepsIter, depsLogConf)
        elif mainformat == 'json':
            depsToJsonFile(classDepsIter, depsLogConf)
        elif mainformat == 'flare':
            depsToFlareFile(classDepsIter, depsLogConf)
        elif mainformat == 'term':
            depsToTerms(classDepsIter)
        elif mainformat == 'provider':
            depsToProviderFormat(classDepsIter, depsLogConf)
        else:
            depsToConsole(classDepsIter, type)

        return

    # -- Main (runLogDependencies) ------------------

    depsLogConf = jobconf.get("log/dependencies", False)
    if not depsLogConf:
       return

    console = Context.console
    console.info("Dependency logging  ", feed=False)
    console.indent()

    packages   = script.packagesSorted()
    parts      = script.parts
    variants   = script.variants
    depsLogConf = ExtMap(depsLogConf)

    type = depsLogConf.get('type', None)
    if type in ("used-by", "using"):
        logDeps(depsLogConf, type)
    else:
        console.error('Dependency log type "%s" not in ["using", "used-by"]; skipping...' % type)

    console.outdent()
    console.dotclear()
    return
Beispiel #26
0
    def runPrettyPrinting(self, classes, classesObj):
        "Gather all relevant config settings and pass them to the compiler"

        if not isinstance(self._job.get("pretty-print", False), types.DictType):
            return

        self._console.info("Pretty-printing code...")
        self._console.indent()
        ppsettings = ExtMap(self._job.get("pretty-print"))  # get the pretty-print config settings

        # init options
        parser  = optparse.OptionParser()
        compiler.addCommandLineOptions(parser)
        (options, args) = parser.parse_args([])

        # modify according to config
        setattr(options, 'prettyPrint', True)  # turn on pretty-printing
        if ppsettings.get('general/indent-string',False):
            setattr(options, 'prettypIndentString', ppsettings.get('general/indent-string'))
        if ppsettings.get('comments/trailing/keep-column',False):
            setattr(options, 'prettypCommentsTrailingKeepColumn', ppsettings.get('comments/trailing/keep-column'))
        if ppsettings.get('comments/trailing/comment-cols',False):
            setattr(options, 'prettypCommentsTrailingCommentCols', ppsettings.get('comments/trailing/comment-cols'))
        if ppsettings.get('comments/trailing/padding',False):
            setattr(options, 'prettypCommentsInlinePadding', ppsettings.get('comments/trailing/padding'))
        if ppsettings.get('blocks/align-with-curlies',False):
            setattr(options, 'prettypAlignBlockWithCurlies', ppsettings.get('blocks/align-with-curlies'))
        if ppsettings.get('blocks/open-curly/newline-before',False):
            setattr(options, 'prettypOpenCurlyNewlineBefore', ppsettings.get('blocks/open-curly/newline-before'))
        if ppsettings.get('blocks/open-curly/indent-before',False):
            setattr(options, 'prettypOpenCurlyIndentBefore', ppsettings.get('blocks/open-curly/indent-before'))

        self._console.info("Pretty-printing files: ", False)
        numClasses = len(classes)
        for pos, classId in enumerate(classes):
            self._console.progress(pos+1, numClasses)
            #tree = treeLoader.getTree(classId)
            tree = classesObj[classId].tree()
            compiled = compiler.compile(tree, options)
            filetool.save(self._classes[classId]['path'], compiled)

        self._console.outdent()

        return