コード例 #1
0
ファイル: loader.py プロジェクト: emtee40/testingazuan
def getTree(fileDb, fileId, options):
    if not fileDb[fileId].has_key("tree"):
        if options.verbose:
            print "    - Generating tree for %s..." % fileId

        useCache = False
        loadCache = False

        fileEntry = fileDb[fileId]
        filePath = fileEntry["path"]

        if options.cacheDirectory != None:
            cachePath = os.path.join(filetool.normalize(options.cacheDirectory), fileId + "-tree.pcl")
            useCache = True

            if not filetool.checkCache(filePath, cachePath):
                loadCache = True

        if loadCache:
            tree = filetool.readCache(cachePath)
        else:
            tree = treegenerator.createSyntaxTree(getTokens(fileDb, fileId, options))

            if useCache:
                if options.verbose:
                    print "    - Caching tree for %s..." % fileId

                filetool.storeCache(cachePath, tree)

        fileDb[fileId]["tree"] = tree

    return fileDb[fileId]["tree"]
コード例 #2
0
ファイル: loader.py プロジェクト: eean/webrok
def getTree(fileDb, fileId, options):
    if not fileDb[fileId].has_key("tree"):
        if options.verbose:
            print "    - Generating tree for %s..." % fileId

        useCache = False
        loadCache = False

        fileEntry = fileDb[fileId]
        filePath = fileEntry["path"]

        if options.cacheDirectory != None:
            cachePath = os.path.join(filetool.normalize(options.cacheDirectory), fileId + "-tree.pcl")
            useCache = True

            if not filetool.checkCache(filePath, cachePath):
                loadCache = True

        if loadCache:
            tree = filetool.readCache(cachePath)
        else:
            tree = treegenerator.createSyntaxTree(getTokens(fileDb, fileId, options))

            if useCache:
                if options.verbose:
                    print "    - Caching tree for %s..." % fileId

                filetool.storeCache(cachePath, tree)

        fileDb[fileId]["tree"] = tree

    return fileDb[fileId]["tree"]
コード例 #3
0
ファイル: loader.py プロジェクト: technosaurus/samba4-GPL2
def getStrings(fileDb, fileId, options):
  if not fileDb[fileId].has_key("strings"):
    if options.verbose:
      print "    - Searching for strings in %s..." % fileId

    useCache = False
    loadCache = False

    fileEntry = fileDb[fileId]
    filePath = fileEntry["path"]

    if options.cacheDirectory != None:
      cachePath = os.path.join(filetool.normalize(options.cacheDirectory), fileId + "-strings.pcl")
      useCache = True

      if not filetool.checkCache(filePath, cachePath, getInternalModTime(options)):
        loadCache = True

    if loadCache:
      strings = filetool.readCache(cachePath)
    else:
      strings = stringoptimizer.search(getTree(fileDb, fileId, options), options.verbose)

      if useCache:
        if options.verbose:
          print "    - Caching strings for %s..." % fileId

        filetool.storeCache(cachePath, strings)

    fileDb[fileId]["strings"] = strings

  return fileDb[fileId]["strings"]
コード例 #4
0
ファイル: loader.py プロジェクト: technosaurus/samba4-GPL2
def getTokens(fileDb, fileId, options):
  if not fileDb[fileId].has_key("tokens"):
    if options.verbose:
      print "    - Generating tokens for %s..." % fileId

    useCache = False
    loadCache = False

    fileEntry = fileDb[fileId]

    filePath = fileEntry["path"]
    fileEncoding = fileEntry["encoding"]

    if options.cacheDirectory != None:
      cachePath = os.path.join(filetool.normalize(options.cacheDirectory), fileId + "-tokens.pcl")
      useCache = True

      if not filetool.checkCache(filePath, cachePath, getInternalModTime(options)):
        loadCache = True

    if loadCache:
      tokens = filetool.readCache(cachePath)
    else:
      fileContent = filetool.read(filePath, fileEncoding)
      tokens = tokenizer.parseStream(fileContent, fileId)

      if useCache:
        if options.verbose:
          print "    - Caching tokens for %s..." % fileId

        filetool.storeCache(cachePath, tokens)

    fileDb[fileId]["tokens"] = tokens

  return fileDb[fileId]["tokens"]
コード例 #5
0
ファイル: loader.py プロジェクト: eean/webrok
def indexClassPath(classPath, listIndex, options, fileDb={}, moduleDb={}):
    classPath = filetool.normalize(classPath)
    counter = 0

    # Search for other indexed lists
    if len(options.classEncoding) > listIndex:
        classEncoding = options.classEncoding[listIndex]
    else:
        classEncoding = "utf-8"

    if len(options.classUri) > listIndex:
        classUri = options.classUri[listIndex]
    else:
        classUri = None

    if len(options.resourceInput) > listIndex:
        resourceInput = options.resourceInput[listIndex]
    else:
        resourceInput = None

    if len(options.resourceOutput) > listIndex:
        resourceOutput = options.resourceOutput[listIndex]
    else:
        resourceOutput = None

    for root, dirs, files in os.walk(classPath):

        # Filter ignored directories
        for ignoredDir in config.DIRIGNORE:
            if ignoredDir in dirs:
                dirs.remove(ignoredDir)

        # Searching for files
        for fileName in files:
            if os.path.splitext(fileName)[1] == config.JSEXT and not fileName.startswith("."):
                filePath = os.path.join(root, fileName)
                filePathId = filePath.replace(classPath + os.sep, "").replace(config.JSEXT, "").replace(os.sep, ".")

                indexFile(
                    filePath,
                    filePathId,
                    classPath,
                    listIndex,
                    classEncoding,
                    classUri,
                    resourceInput,
                    resourceOutput,
                    options,
                    fileDb,
                    moduleDb,
                )
                counter += 1

    return counter
コード例 #6
0
def indexSingleScriptInput(classPath,
                           listIndex,
                           options,
                           fileDb={},
                           moduleDb={}):
    classPath = filetool.normalize(classPath)
    counter = 0

    # Search for other indexed lists
    if len(options.classEncoding) > listIndex:
        classEncoding = options.classEncoding[listIndex]
    else:
        classEncoding = "utf-8"

    if len(options.classUri) > listIndex:
        classUri = options.classUri[listIndex]
    else:
        classUri = None

    if len(options.resourceInput) > listIndex:
        resourceInput = options.resourceInput[listIndex]
    else:
        resourceInput = None

    if len(options.resourceOutput) > listIndex:
        resourceOutput = options.resourceOutput[listIndex]
    else:
        resourceOutput = None

    for root, dirs, files in os.walk(classPath):

        # Filter ignored directories
        for ignoredDir in config.DIRIGNORE:
            if ignoredDir in dirs:
                dirs.remove(ignoredDir)

        # Searching for files
        for fileName in files:
            if os.path.splitext(fileName)[1] == config.JSEXT:
                filePath = os.path.join(root, fileName)
                filePathId = filePath.replace(classPath + os.sep, "").replace(
                    config.JSEXT, "").replace(os.sep, ".")

                indexFile(filePath, filePathId, classPath, listIndex,
                          classEncoding, classUri, resourceInput,
                          resourceOutput, options, fileDb, moduleDb)
                counter += 1

    return counter
コード例 #7
0
ファイル: loader.py プロジェクト: eean/webrok
def getTokens(fileDb, fileId, options):
    if not fileDb[fileId].has_key("tokens"):
        if options.verbose:
            print "    - Generating tokens for %s..." % fileId

        useCache = False
        loadCache = False

        fileEntry = fileDb[fileId]

        filePath = fileEntry["path"]
        fileEncoding = fileEntry["encoding"]

        if options.cacheDirectory != None:
            cachePath = os.path.join(filetool.normalize(options.cacheDirectory), fileId + "-tokens.pcl")
            useCache = True

            if not filetool.checkCache(filePath, cachePath):
                loadCache = True

        if loadCache:
            tokens = filetool.readCache(cachePath)
        else:
            fileContent = filetool.read(filePath, fileEncoding)

            # TODO: This hack is neccesary because the current parser cannot handle comments
            #       without a context.
            if fileDb[fileId]["meta"]:
                fileContent += "\n(function() {})()"

            tokens = tokenizer.parseStream(fileContent, fileId)

            if useCache:
                if options.verbose:
                    print "    - Caching tokens for %s..." % fileId

                filetool.storeCache(cachePath, tokens)

        fileDb[fileId]["tokens"] = tokens

    return fileDb[fileId]["tokens"]
コード例 #8
0
ファイル: loader.py プロジェクト: emtee40/testingazuan
def getTokens(fileDb, fileId, options):
    if not fileDb[fileId].has_key("tokens"):
        if options.verbose:
            print "    - Generating tokens for %s..." % fileId

        useCache = False
        loadCache = False

        fileEntry = fileDb[fileId]

        filePath = fileEntry["path"]
        fileEncoding = fileEntry["encoding"]

        if options.cacheDirectory != None:
            cachePath = os.path.join(filetool.normalize(options.cacheDirectory), fileId + "-tokens.pcl")
            useCache = True

            if not filetool.checkCache(filePath, cachePath):
                loadCache = True

        if loadCache:
            tokens = filetool.readCache(cachePath)
        else:
            fileContent = filetool.read(filePath, fileEncoding)

            # TODO: This hack is neccesary because the current parser cannot handle comments
            #       without a context.
            if fileDb[fileId]["meta"]:
                fileContent += "\n(function() {})()"

            tokens = tokenizer.parseStream(fileContent, fileId)

            if useCache:
                if options.verbose:
                    print "    - Caching tokens for %s..." % fileId

                filetool.storeCache(cachePath, tokens)

        fileDb[fileId]["tokens"] = tokens

    return fileDb[fileId]["tokens"]
コード例 #9
0
def getTokens(fileDb, fileId, options):
    if not fileDb[fileId].has_key("tokens"):
        if options.verbose:
            print "    - Generating tokens for %s..." % fileId

        useCache = False
        loadCache = False

        fileEntry = fileDb[fileId]

        filePath = fileEntry["path"]
        fileEncoding = fileEntry["encoding"]

        if options.cacheDirectory != None:
            cachePath = os.path.join(
                filetool.normalize(options.cacheDirectory),
                fileId + "-tokens.pcl")
            useCache = True

            if not filetool.checkCache(filePath, cachePath,
                                       getInternalModTime(options)):
                loadCache = True

        if loadCache:
            tokens = filetool.readCache(cachePath)
        else:
            fileContent = filetool.read(filePath, fileEncoding)
            tokens = tokenizer.parseStream(fileContent, fileId)

            if useCache:
                if options.verbose:
                    print "    - Caching tokens for %s..." % fileId

                filetool.storeCache(cachePath, tokens)

        fileDb[fileId]["tokens"] = tokens

    return fileDb[fileId]["tokens"]
コード例 #10
0
def getStrings(fileDb, fileId, options):
    if not fileDb[fileId].has_key("strings"):
        if options.verbose:
            print "    - Searching for strings in %s..." % fileId

        useCache = False
        loadCache = False

        fileEntry = fileDb[fileId]
        filePath = fileEntry["path"]

        if options.cacheDirectory != None:
            cachePath = os.path.join(
                filetool.normalize(options.cacheDirectory),
                fileId + "-strings.pcl")
            useCache = True

            if not filetool.checkCache(filePath, cachePath,
                                       getInternalModTime(options)):
                loadCache = True

        if loadCache:
            strings = filetool.readCache(cachePath)
        else:
            strings = stringoptimizer.search(getTree(fileDb, fileId, options),
                                             options.verbose)

            if useCache:
                if options.verbose:
                    print "    - Caching strings for %s..." % fileId

                filetool.storeCache(cachePath, strings)

        fileDb[fileId]["strings"] = strings

    return fileDb[fileId]["strings"]
コード例 #11
0
ファイル: loader.py プロジェクト: eean/webrok
def indexFile(
    filePath,
    filePathId,
    classPath,
    listIndex,
    classEncoding,
    classUri,
    resourceInput,
    resourceOutput,
    options,
    fileDb={},
    moduleDb={},
):

    ########################################
    # Checking cache
    ########################################

    useCache = False
    loadCache = False
    cachePath = None

    if options.cacheDirectory != None:
        cachePath = os.path.join(filetool.normalize(options.cacheDirectory), filePathId + "-entry.pcl")
        useCache = True

        if not filetool.checkCache(filePath, cachePath):
            loadCache = True

    ########################################
    # Loading file content / cache
    ########################################

    if loadCache:
        fileEntry = filetool.readCache(cachePath)
        fileId = filePathId

    else:
        fileContent = filetool.read(filePath, classEncoding)

        # Extract ID
        fileContentId = extractFileContentId(fileContent)

        # Search for valid ID
        if fileContentId == None:
            if not filePathId.endswith("__init__"):
                print "    - Could not extract ID from file: %s. Fallback to path %s!" % (filePath, filePathId)
            fileId = filePathId

        else:
            fileId = fileContentId

        if fileId != filePathId:
            print "    - ID mismatch: CONTENT=%s != PATH=%s" % (fileContentId, filePathId)
            if not options.migrateSource:
                sys.exit(1)

        fileEntry = {
            "autoDependencies": False,
            "cached": False,
            "cachePath": cachePath,
            "meta": fileId.endswith("__init__"),
            "ignoreDeps": extractIgnore(fileContent, fileId),
            "optionalDeps": extractOptional(fileContent, fileId),
            "loadtimeDeps": extractLoadtimeDeps(fileContent, fileId),
            "runtimeDeps": extractRuntimeDeps(fileContent, fileId),
            "resources": extractResources(fileContent, fileId),
            "embeds": extractEmbeds(fileContent, fileId),
            "modules": extractModules(fileContent, fileId),
        }

    ########################################
    # Additional data
    ########################################

    # We don't want to cache these items
    fileEntry["path"] = filePath
    fileEntry["pathId"] = filePathId
    fileEntry["encoding"] = classEncoding
    fileEntry["resourceInput"] = resourceInput
    fileEntry["resourceOutput"] = resourceOutput
    fileEntry["classUri"] = classUri
    fileEntry["listIndex"] = listIndex
    fileEntry["classPath"] = classPath

    ########################################
    # Registering file
    ########################################

    # Register to file database
    fileDb[fileId] = fileEntry

    # Register to module database
    for moduleId in fileEntry["modules"]:
        if moduleDb.has_key(moduleId):
            moduleDb[moduleId].append(fileId)
        else:
            moduleDb[moduleId] = [fileId]
コード例 #12
0
ファイル: loader.py プロジェクト: emtee40/testingazuan
def indexFile(filePath, filePathId, classPath, listIndex, classEncoding, classUri, resourceInput, resourceOutput, options, fileDb={}, moduleDb={}):

    ########################################
    # Checking cache
    ########################################

    useCache = False
    loadCache = False
    cachePath = None

    if options.cacheDirectory != None:
        cachePath = os.path.join(filetool.normalize(options.cacheDirectory), filePathId + "-entry.pcl")
        useCache = True

        if not filetool.checkCache(filePath, cachePath):
            loadCache = True



    ########################################
    # Loading file content / cache
    ########################################

    if loadCache:
        fileEntry = filetool.readCache(cachePath)
        fileId = filePathId

    else:
        fileContent = filetool.read(filePath, classEncoding)

        # Extract ID
        fileContentId = extractFileContentId(fileContent)

        # Search for valid ID
        if fileContentId == None:
            if not filePathId.endswith("__init__"):
                print "    - Could not extract ID from file: %s. Fallback to path %s!" % (filePath, filePathId)
            fileId = filePathId

        else:
            fileId = fileContentId

        if fileId != filePathId:
            print "    - ID mismatch: CONTENT=%s != PATH=%s" % (fileContentId, filePathId)
            if not options.migrateSource:
                sys.exit(1)

        fileEntry = {
            "autoDependencies" : False,
            "cached" : False,
            "cachePath" : cachePath,
            "meta" : fileId.endswith("__init__"),
            "ignoreDeps" : extractIgnore(fileContent, fileId),
            "optionalDeps" : extractOptional(fileContent, fileId),
            "loadtimeDeps" : extractLoadtimeDeps(fileContent, fileId),
            "runtimeDeps" : extractRuntimeDeps(fileContent, fileId),
            "resources" : extractResources(fileContent, fileId),
            "embeds" : extractEmbeds(fileContent, fileId),
            "modules" : extractModules(fileContent, fileId)
        }



    ########################################
    # Additional data
    ########################################

    # We don't want to cache these items
    fileEntry["path"] = filePath
    fileEntry["pathId"] = filePathId
    fileEntry["encoding"] = classEncoding
    fileEntry["resourceInput"] = resourceInput
    fileEntry["resourceOutput"] = resourceOutput
    fileEntry["classUri"] = classUri
    fileEntry["listIndex"] = listIndex
    fileEntry["classPath"] = classPath


    ########################################
    # Registering file
    ########################################

    # Register to file database
    fileDb[fileId] = fileEntry

    # Register to module database
    for moduleId in fileEntry["modules"]:
        if moduleDb.has_key(moduleId):
            moduleDb[moduleId].append(fileId)
        else:
            moduleDb[moduleId] = [fileId]
コード例 #13
0
def execute(fileDb, moduleDb, options, pkgid="", names=[]):

    additionalOutput = []

    ######################################################################
    #  SORT OF INCLUDE LIST
    ######################################################################

    print
    print "  SORT OF INCLUDE LIST:"
    print "----------------------------------------------------------------------------"

    if options.verbose:
        print "  * Include (with dependencies): %s" % options.includeWithDeps
        print "  * Include (without dependencies): %s" % options.includeWithoutDeps
        print "  * Exclude (with dependencies): %s" % options.excludeWithDeps
        print "  * Exclude (without dependencies): %s" % options.excludeWithoutDeps

    print "  * Sorting %s classes..." % len(fileDb)

    sortedIncludeList = loader.getSortedList(options, fileDb, moduleDb)

    if len(sortedIncludeList) == 0:
        print "    - No class files to include. Exciting!"
        sys.exit(1)
    else:
        print "    - Including %s classes" % len(sortedIncludeList)

    if options.printIncludes:
        print
        print "  PRINT OF INCLUDE ORDER:"
        print "----------------------------------------------------------------------------"
        print "  * The files will be included in this order:"
        for fileId in sortedIncludeList:
            print "    - %s" % fileId

    if options.printDeps:
        print
        print "  OUTPUT OF DEPENDENCIES:"
        print "----------------------------------------------------------------------------"
        print "  * These are all included files with their dependencies:"
        for fileId in sortedIncludeList:
            print "    - %s" % fileId
            if len(fileDb[fileId]["loadtimeDeps"]) > 0:
                print "      - Loadtime: "
                for depEntry in fileDb[fileId]["loadtimeDeps"]:
                    print "        - %s" % depEntry

            if len(fileDb[fileId]["afterDeps"]) > 0:
                print "      - After: "
                for depEntry in fileDb[fileId]["afterDeps"]:
                    print "        - %s" % depEntry

            if len(fileDb[fileId]["runtimeDeps"]) > 0:
                print "      - Runtime: "
                for depEntry in fileDb[fileId]["runtimeDeps"]:
                    print "        - %s" % depEntry

            if len(fileDb[fileId]["loadDeps"]) > 0:
                print "      - Load: "
                for depEntry in fileDb[fileId]["loadDeps"]:
                    print "        - %s" % depEntry

            if len(fileDb[fileId]["optionalDeps"]) > 0:
                print "      - Optional: "
                for depEntry in fileDb[fileId]["optionalDeps"]:
                    print "        - %s" % depEntry

    ######################################################################
    #  GRAPHVIZ OUTPUT
    ######################################################################

    if options.depDotFile:
        graph.store(fileDb, sortedIncludeList, options)

    ######################################################################
    #  SOURCE MIGRATION
    ######################################################################

    if options.migrateSource:
        print
        print "  SOURCE MIGRATION:"
        print "----------------------------------------------------------------------------"

        print "  * Migrate Source Code..."

        migrator.handle(sortedIncludeList, fileDb, options)

        # Return after migration: Ignore other jobs
        return

    ######################################################################
    #  GENERATION OF PRETTY PRINTED CODE
    ######################################################################

    if options.fixSource:
        print
        print "  FIX SOURCE CODE:"
        print "----------------------------------------------------------------------------"

        if options.verbose:
            print "  * Fixing code..."
        else:
            print "  * Fixing code: ",

        for fileId in sortedIncludeList:
            if options.verbose:
                print "    - Reading %s" % fileId

            fileEntry = fileDb[fileId]

            filePath = fileEntry["path"]
            fileEncoding = fileEntry["encoding"]

            fileContent = filetool.read(filePath, fileEncoding)
            fixedContent = textutil.removeTrailingSpaces(
                textutil.tab2Space(textutil.any2Unix(fileContent), 2))

            if fixedContent != fileContent:
                if options.verbose:
                    print "      - Storing modifications..."
                else:
                    sys.stdout.write("!")
                    sys.stdout.flush()

                filetool.save(filePath, fixedContent, fileEncoding)

            elif not options.verbose:
                sys.stdout.write(".")
                sys.stdout.flush()

        if not options.verbose:
            print

        # Return after fixing: Ignore other jobs
        return

    ######################################################################
    #  GENERATION OF PRETTY PRINTED CODE
    ######################################################################

    if options.prettyPrint:
        print
        print "  GENERATION OF PRETTY PRINTED CODE:"
        print "----------------------------------------------------------------------------"

        if options.verbose:
            print "  * Pretty printing..."
        else:
            print "  * Pretty printing: ",

        for fileId in sortedIncludeList:
            if options.verbose:
                print "    - Compiling %s" % fileId
            else:
                sys.stdout.write(".")
                sys.stdout.flush()

            prettyFileContent = compiler.compile(
                loader.getTree(fileDb, fileId, options), True)

            if not prettyFileContent.endswith("\n"):
                prettyFileContent += "\n"

            filetool.save(fileDb[fileId]["path"], prettyFileContent)

        if not options.verbose:
            print

        # Return after pretty print: Ignore other jobs
        return

    ######################################################################
    #  STRING OPTIMIZATION
    ######################################################################

    if options.optimizeStrings:
        print
        print "  STRING OPTIMIZATION:"
        print "----------------------------------------------------------------------------"

        if options.verbose:
            print "  * Searching strings..."
        else:
            print "  * Searching strings: ",

        stringMap = {}

        for fileId in sortedIncludeList:
            if options.verbose:
                print "    - %s" % fileId
            else:
                sys.stdout.write(".")
                sys.stdout.flush()

            localMap = loader.getStrings(fileDb, fileId, options)

            for value in localMap:
                if value in stringMap:
                    stringMap[value] += localMap[value]
                else:
                    stringMap[value] = localMap[value]

        if not options.verbose:
            print

        counter = 0
        for value in stringMap:
            counter += stringMap[value]

        stringList = stringoptimizer.sort(stringMap)

        print "  * Found %s strings (used %s times)" % (len(stringMap),
                                                        counter)

        if options.verbose:
            print "  * Replacing strings..."
        else:
            print "  * Replacing strings: ",

        for fileId in sortedIncludeList:
            if options.verbose:
                print "    - %s" % fileId
            else:
                sys.stdout.write(".")
                sys.stdout.flush()

            stringoptimizer.replace(loader.getTree(fileDb, fileId, options),
                                    stringList, "$" + pkgid, options.verbose)

        if not options.verbose:
            print

        print "  * Generating replacement..."
        additionalOutput.append(
            stringoptimizer.replacement(stringList, "$" + pkgid))

    ######################################################################
    #  LOCAL VARIABLE OPTIMIZATION
    ######################################################################

    if options.optimizeVariables:
        print
        print "  LOCAL VARIABLE OPTIMIZATION:"
        print "----------------------------------------------------------------------------"

        if options.verbose:
            print "  * Optimizing variables..."
        else:
            print "  * Optimizing variables: ",

        for fileId in sortedIncludeList:
            if options.verbose:
                print "    - %s" % fileId
            else:
                sys.stdout.write(".")
                sys.stdout.flush()

            variableoptimizer.search(
                loader.getTree(fileDb, fileId, options), [],
                0,
                "$",
                skipPrefix=options.optimizeVariablesSkipPrefix,
                debug=options.enableDebug)

        if not options.verbose:
            print

    ######################################################################
    #  NAME OBFUSCATION
    ######################################################################

    if options.obfuscateIdentifiers:
        print
        print "  OBFUSCATE IDENTIFIERS:"
        print "----------------------------------------------------------------------------"

        if options.verbose:
            print "  * Obfuscating identifiers..."
        else:
            print "  * Obfuscating identifiers: ",

        counter = 0

        for fileId in sortedIncludeList:
            if options.verbose:
                print "    - %s" % fileId
            else:
                sys.stdout.write(".")
                sys.stdout.flush()

            counter += obfuscator.update(
                loader.getTree(fileDb, fileId, options), names, "$$")

        if not options.verbose:
            print

        print "  * Updated %s names" % counter

    ######################################################################
    #  TOKEN STORAGE
    ######################################################################

    if options.storeTokens:
        print
        print "  TOKEN STORAGE:"
        print "----------------------------------------------------------------------------"

        if options.tokenOutputDirectory == None:
            print "  * You must define the token output directory!"
            sys.exit(1)

        if options.verbose:
            print "  * Storing tokens..."
        else:
            print "  * Storing tokens: ",

        for fileId in sortedIncludeList:
            tokenString = tokenizer.convertTokensToString(
                loader.getTokens(fileDb, fileId, options))

            if options.verbose:
                print "    * writing tokens for %s (%s KB)..." % (
                    fileIdm, len(tokenString) / 1000.0)
            else:
                sys.stdout.write(".")
                sys.stdout.flush()

            filetool.save(
                os.path.join(filetool.normalize(options.tokenOutputDirectory),
                             fileId + config.TOKENEXT), tokenString)

        if not options.verbose:
            print

    ######################################################################
    #  TREE STORAGE
    ######################################################################

    if options.storeTree:
        print
        print "  TREE STORAGE:"
        print "----------------------------------------------------------------------------"

        if options.treeOutputDirectory == None:
            print "  * You must define the tree output directory!"
            sys.exit(1)

        if options.verbose:
            print "  * Storing tree..."
        else:
            print "  * Storing tree: ",

        for fileId in sortedIncludeList:
            treeString = "<?xml version=\"1.0\" encoding=\"" + options.xmlOutputEncoding + "\"?>\n" + tree.nodeToXmlString(
                loader.getTree(fileDb, fileId, options))

            if options.verbose:
                print "    * writing tree for %s (%s KB)..." % (
                    fileId, len(treeString) / 1000.0)
            else:
                sys.stdout.write(".")
                sys.stdout.flush()

            filetool.save(
                os.path.join(filetool.normalize(options.treeOutputDirectory),
                             fileId + config.XMLEXT), treeString)

        if not options.verbose:
            print

    ######################################################################
    #  GENERATION OF API
    ######################################################################

    if options.generateApiDocumentation:
        print
        print "  GENERATION OF API:"
        print "----------------------------------------------------------------------------"

        if options.apiDocumentationJsonFile == None and options.apiDocumentationXmlFile == None:
            print "  * You must define one of JSON or XML API documentation file!"

        docTree = None

        if options.verbose:
            print "  * Generating API tree..."
        else:
            print "  * Generating API tree: ",

        for fileId in sortedIncludeList:
            if options.verbose:
                print "    - %s" % fileId
            else:
                sys.stdout.write(".")
                sys.stdout.flush()

            docTree = api.createDoc(loader.getTree(fileDb, fileId, options),
                                    docTree)

        if not options.verbose:
            print

        if docTree:
            print "  * Finalizing tree..."
            api.postWorkPackage(docTree, docTree)

        if options.apiDocumentationXmlFile != None:
            print "  * Writing XML API file to %s" % options.apiDocumentationXmlFile

            xmlContent = "<?xml version=\"1.0\" encoding=\"" + options.xmlOutputEncoding + "\"?>\n"

            if options.addNewLines:
                xmlContent += "\n" + tree.nodeToXmlString(docTree)
            else:
                xmlContent += tree.nodeToXmlString(docTree, "", "", "")

            filetool.save(options.apiDocumentationXmlFile, xmlContent,
                          options.xmlOutputEncoding)

        if options.apiDocumentationJsonFile != None:
            print "  * Writing JSON API file to %s" % options.apiDocumentationJsonFile

            if options.addNewLines:
                jsonContent = tree.nodeToJsonString(docTree)
            else:
                jsonContent = tree.nodeToJsonString(docTree, "", "", "")

            filetool.save(options.apiDocumentationJsonFile, jsonContent,
                          options.scriptOutputEncoding)

    ######################################################################
    #  CREATE COPY OF RESOURCES
    ######################################################################

    if options.copyResources:

        print
        print "  CREATE COPY OF RESOURCES:"
        print "----------------------------------------------------------------------------"

        resources.copy(options, sortedIncludeList, fileDb)

    ######################################################################
    #  GENERATION OF SETTINGS
    ######################################################################

    if options.generateSourceScript or options.generateCompiledScript:
        settingsStr = ""

        if len(options.defineRuntimeSetting) != 0:
            print
            print "  GENERATION OF SETTINGS:"
            print "----------------------------------------------------------------------------"

            print "  * Processing input data..."
            settingsStr = settings.generate(options)

            if options.settingsScriptFile:
                print "   * Storing result to %s" % options.settingsScriptFile
                filetool.save(options.settingsScriptFile, settingsStr)

                # clear settings for build and source
                settingsStr = ""

    ######################################################################
    #  GENERATION OF SOURCE VERSION
    ######################################################################

    if options.generateSourceScript:
        print
        print "  GENERATION OF SOURCE SCRIPT:"
        print "----------------------------------------------------------------------------"

        if options.sourceScriptFile == None and (
                options.sourceTemplateInputFile == None
                or options.sourceTemplateOutputFile == None):
            print "  * You must define at least one source script file or template input/output."
            sys.exit(1)

        if options.sourceScriptFile:
            options.sourceScriptFile = os.path.normpath(
                options.sourceScriptFile)

        if options.sourceTemplateInputFile:
            options.sourceTemplateInputFile = os.path.normpath(
                options.sourceTemplateInputFile)

        if options.sourceTemplateOutputFile:
            options.sourceTemplateOutputFile = os.path.normpath(
                options.sourceTemplateOutputFile)

        print "  * Generating script block..."

        # Handling line feed setting
        sourceLineFeed = ""
        if options.addNewLines:
            sourceLineFeed = "\n"

        # Generating inline code...
        inlineCode = ""
        inlineCode += settingsStr + sourceLineFeed
        inlineCode += "qx.IS_SOURCE=true;%s" % sourceLineFeed
        inlineCode += "qx.VERSION=\"%s\";%s" % (options.version,
                                                sourceLineFeed)
        inlineCode += "".join(additionalOutput)

        # Generating script block
        scriptBlocks = ""
        scriptBlocks += '<script type="text/javascript">%s</script>' % inlineCode
        for fileId in sortedIncludeList:
            if fileDb[fileId]["classUri"] == None:
                print "  * Missing class URI definition for class path %s." % fileDb[
                    fileId]["classPath"]
                sys.exit(1)

            scriptBlocks += '<script type="text/javascript" src="%s%s"></script>' % (
                os.path.join(fileDb[fileId]["classUri"],
                             fileDb[fileId]["pathId"].replace(
                                 ".", os.sep)), config.JSEXT)
            scriptBlocks += sourceLineFeed

        if options.sourceScriptFile != None:
            print "  * Storing includer as %s..." % options.sourceScriptFile
            sourceScript = "document.write('%s');" % scriptBlocks.replace(
                "'", "\\'")
            if options.addNewLines:
                sourceScript = sourceScript.replace("\n", "\\\n")
            filetool.save(options.sourceScriptFile, sourceScript,
                          options.scriptOutputEncoding)

        if options.sourceTemplateInputFile != None and options.sourceTemplateOutputFile != None:
            print "  * Patching template: %s => %s" % (
                options.sourceTemplateInputFile,
                options.sourceTemplateOutputFile)
            tmpl = filetool.read(options.sourceTemplateInputFile)
            res = tmpl.replace(options.sourceTemplateReplace, scriptBlocks)
            filetool.save(options.sourceTemplateOutputFile, res,
                          options.scriptOutputEncoding)

    ######################################################################
    #  GENERATION OF COMPILED VERSION
    ######################################################################

    if options.generateCompiledScript:
        print
        print "  GENERATION OF COMPILED SCRIPT:"
        print "----------------------------------------------------------------------------"

        buildLineFeed = ""
        if options.addNewLines:
            buildLineFeed = "\n"

        inlineCode = ""
        inlineCode += settingsStr + buildLineFeed
        inlineCode += "qx.IS_SOURCE=false;%s" % buildLineFeed
        inlineCode += "qx.VERSION=\"%s\";%s" % (options.version, buildLineFeed)
        inlineCode += "".join(additionalOutput)

        compiledOutput = inlineCode

        if options.compiledScriptFile == None:
            print "  * You must define the compiled script file!"
            sys.exit(1)

        if options.verbose:
            print "  * Compiling..."
        else:
            print "  * Compiling: ",

        for fileId in sortedIncludeList:
            if options.verbose:
                print "    - Compiling %s" % fileId
            else:
                sys.stdout.write(".")
                sys.stdout.flush()

            compiledFileContent = compiler.compile(
                loader.getTree(fileDb, fileId, options), False,
                options.addNewLines, options.enableDebug)

            if options.addFileIds:
                compiledOutput += "\n\n\n/* ID: " + fileId + " */\n" + compiledFileContent + "\n"
            else:
                compiledOutput += compiledFileContent

            if not compiledOutput.endswith(
                    ";") and not compiledOutput.endswith("\n"):
                compiledOutput += ";"

        if not options.verbose:
            print

        print "  * Storing output as %s..." % options.compiledScriptFile
        filetool.save(options.compiledScriptFile, compiledOutput,
                      options.scriptOutputEncoding)
コード例 #14
0
ファイル: generator.py プロジェクト: technosaurus/samba4-GPL2
def execute(fileDb, moduleDb, options, pkgid="", names=[]):

  additionalOutput = []


  ######################################################################
  #  SORT OF INCLUDE LIST
  ######################################################################

  print
  print "  SORT OF INCLUDE LIST:"
  print "----------------------------------------------------------------------------"

  if options.verbose:
    print "  * Include (with dependencies): %s" % options.includeWithDeps
    print "  * Include (without dependencies): %s" % options.includeWithoutDeps
    print "  * Exclude (with dependencies): %s" % options.excludeWithDeps
    print "  * Exclude (without dependencies): %s" % options.excludeWithoutDeps

  print "  * Sorting %s classes..." % len(fileDb)

  sortedIncludeList = loader.getSortedList(options, fileDb, moduleDb)

  if len(sortedIncludeList) == 0:
  	print "    - No class files to include. Exciting!"
  	sys.exit(1)
  else:
  	print "    - Including %s classes" % len(sortedIncludeList)

  if options.printIncludes:
    print
    print "  PRINT OF INCLUDE ORDER:"
    print "----------------------------------------------------------------------------"
    print "  * The files will be included in this order:"
    for fileId in sortedIncludeList:
      print "    - %s" % fileId

  if options.printDeps:
    print
    print "  OUTPUT OF DEPENDENCIES:"
    print "----------------------------------------------------------------------------"
    print "  * These are all included files with their dependencies:"
    for fileId in sortedIncludeList:
      print "    - %s" % fileId
      if len(fileDb[fileId]["loadtimeDeps"]) > 0:
        print "      - Loadtime: "
        for depEntry in fileDb[fileId]["loadtimeDeps"]:
          print "        - %s" % depEntry

      if len(fileDb[fileId]["afterDeps"]) > 0:
        print "      - After: "
        for depEntry in fileDb[fileId]["afterDeps"]:
          print "        - %s" % depEntry

      if len(fileDb[fileId]["runtimeDeps"]) > 0:
        print "      - Runtime: "
        for depEntry in fileDb[fileId]["runtimeDeps"]:
          print "        - %s" % depEntry

      if len(fileDb[fileId]["loadDeps"]) > 0:
        print "      - Load: "
        for depEntry in fileDb[fileId]["loadDeps"]:
          print "        - %s" % depEntry

      if len(fileDb[fileId]["optionalDeps"]) > 0:
        print "      - Optional: "
        for depEntry in fileDb[fileId]["optionalDeps"]:
          print "        - %s" % depEntry





  ######################################################################
  #  GRAPHVIZ OUTPUT
  ######################################################################

  if options.depDotFile:
    graph.store(fileDb, sortedIncludeList, options)






  ######################################################################
  #  SOURCE MIGRATION
  ######################################################################

  if options.migrateSource:
    print
    print "  SOURCE MIGRATION:"
    print "----------------------------------------------------------------------------"

    print "  * Migrate Source Code..."

    migrator.handle(sortedIncludeList, fileDb, options)

    # Return after migration: Ignore other jobs
    return





  ######################################################################
  #  GENERATION OF PRETTY PRINTED CODE
  ######################################################################

  if options.fixSource:
    print
    print "  FIX SOURCE CODE:"
    print "----------------------------------------------------------------------------"

    if options.verbose:
      print "  * Fixing code..."
    else:
      print "  * Fixing code: ",

    for fileId in sortedIncludeList:
      if options.verbose:
        print "    - Reading %s" % fileId

      fileEntry = fileDb[fileId]

      filePath = fileEntry["path"]
      fileEncoding = fileEntry["encoding"]

      fileContent = filetool.read(filePath, fileEncoding)
      fixedContent = textutil.removeTrailingSpaces(textutil.tab2Space(textutil.any2Unix(fileContent), 2))

      if fixedContent != fileContent:
        if options.verbose:
          print "      - Storing modifications..."
        else:
          sys.stdout.write("!")
          sys.stdout.flush()

        filetool.save(filePath, fixedContent, fileEncoding)

      elif not options.verbose:
        sys.stdout.write(".")
        sys.stdout.flush()

    if not options.verbose:
      print

    # Return after fixing: Ignore other jobs
    return






  ######################################################################
  #  GENERATION OF PRETTY PRINTED CODE
  ######################################################################

  if options.prettyPrint:
    print
    print "  GENERATION OF PRETTY PRINTED CODE:"
    print "----------------------------------------------------------------------------"

    if options.verbose:
      print "  * Pretty printing..."
    else:
      print "  * Pretty printing: ",

    for fileId in sortedIncludeList:
      if options.verbose:
        print "    - Compiling %s" % fileId
      else:
        sys.stdout.write(".")
        sys.stdout.flush()

      prettyFileContent = compiler.compile(loader.getTree(fileDb, fileId, options), True)

      if not prettyFileContent.endswith("\n"):
        prettyFileContent += "\n"

      filetool.save(fileDb[fileId]["path"], prettyFileContent)

    if not options.verbose:
      print

    # Return after pretty print: Ignore other jobs
    return



  ######################################################################
  #  STRING OPTIMIZATION
  ######################################################################

  if options.optimizeStrings:
    print
    print "  STRING OPTIMIZATION:"
    print "----------------------------------------------------------------------------"

    if options.verbose:
      print "  * Searching strings..."
    else:
      print "  * Searching strings: ",

    stringMap = {}

    for fileId in sortedIncludeList:
      if options.verbose:
        print "    - %s" % fileId
      else:
        sys.stdout.write(".")
        sys.stdout.flush()

      localMap = loader.getStrings(fileDb, fileId, options)

      for value in localMap:
        if value in stringMap:
          stringMap[value] += localMap[value]
        else:
          stringMap[value] = localMap[value]

    if not options.verbose:
      print

    counter = 0
    for value in stringMap:
      counter += stringMap[value]

    stringList = stringoptimizer.sort(stringMap)

    print "  * Found %s strings (used %s times)" % (len(stringMap), counter)

    if options.verbose:
      print "  * Replacing strings..."
    else:
      print "  * Replacing strings: ",

    for fileId in sortedIncludeList:
      if options.verbose:
        print "    - %s" % fileId
      else:
        sys.stdout.write(".")
        sys.stdout.flush()

      stringoptimizer.replace(loader.getTree(fileDb, fileId, options), stringList, "$" + pkgid, options.verbose)

    if not options.verbose:
      print

    print "  * Generating replacement..."
    additionalOutput.append(stringoptimizer.replacement(stringList, "$" + pkgid))






  ######################################################################
  #  LOCAL VARIABLE OPTIMIZATION
  ######################################################################

  if options.optimizeVariables:
    print
    print "  LOCAL VARIABLE OPTIMIZATION:"
    print "----------------------------------------------------------------------------"

    if options.verbose:
      print "  * Optimizing variables..."
    else:
      print "  * Optimizing variables: ",

    for fileId in sortedIncludeList:
      if options.verbose:
        print "    - %s" % fileId
      else:
        sys.stdout.write(".")
        sys.stdout.flush()

      variableoptimizer.search(loader.getTree(fileDb, fileId, options), [], 0, "$", skipPrefix = options.optimizeVariablesSkipPrefix, debug = options.enableDebug)

    if not options.verbose:
      print






  ######################################################################
  #  NAME OBFUSCATION
  ######################################################################

  if options.obfuscateIdentifiers:
    print
    print "  OBFUSCATE IDENTIFIERS:"
    print "----------------------------------------------------------------------------"

    if options.verbose:
      print "  * Obfuscating identifiers..."
    else:
      print "  * Obfuscating identifiers: ",

    counter = 0

    for fileId in sortedIncludeList:
      if options.verbose:
        print "    - %s" % fileId
      else:
        sys.stdout.write(".")
        sys.stdout.flush()

      counter += obfuscator.update(loader.getTree(fileDb, fileId, options), names, "$$")

    if not options.verbose:
      print

    print "  * Updated %s names" % counter






  ######################################################################
  #  TOKEN STORAGE
  ######################################################################

  if options.storeTokens:
    print
    print "  TOKEN STORAGE:"
    print "----------------------------------------------------------------------------"

    if options.tokenOutputDirectory == None:
      print "  * You must define the token output directory!"
      sys.exit(1)

    if options.verbose:
      print "  * Storing tokens..."
    else:
      print "  * Storing tokens: ",

    for fileId in sortedIncludeList:
      tokenString = tokenizer.convertTokensToString(loader.getTokens(fileDb, fileId, options))

      if options.verbose:
        print "    * writing tokens for %s (%s KB)..." % (fileIdm, len(tokenString) / 1000.0)
      else:
        sys.stdout.write(".")
        sys.stdout.flush()

      filetool.save(os.path.join(filetool.normalize(options.tokenOutputDirectory), fileId + config.TOKENEXT), tokenString)

    if not options.verbose:
      print




  ######################################################################
  #  TREE STORAGE
  ######################################################################

  if options.storeTree:
    print
    print "  TREE STORAGE:"
    print "----------------------------------------------------------------------------"

    if options.treeOutputDirectory == None:
      print "  * You must define the tree output directory!"
      sys.exit(1)

    if options.verbose:
      print "  * Storing tree..."
    else:
      print "  * Storing tree: ",

    for fileId in sortedIncludeList:
      treeString = "<?xml version=\"1.0\" encoding=\"" + options.xmlOutputEncoding + "\"?>\n" + tree.nodeToXmlString(loader.getTree(fileDb, fileId, options))

      if options.verbose:
        print "    * writing tree for %s (%s KB)..." % (fileId, len(treeString) / 1000.0)
      else:
        sys.stdout.write(".")
        sys.stdout.flush()

      filetool.save(os.path.join(filetool.normalize(options.treeOutputDirectory), fileId + config.XMLEXT), treeString)

    if not options.verbose:
      print





  ######################################################################
  #  GENERATION OF API
  ######################################################################

  if options.generateApiDocumentation:
    print
    print "  GENERATION OF API:"
    print "----------------------------------------------------------------------------"

    if options.apiDocumentationJsonFile == None and options.apiDocumentationXmlFile == None:
      print "  * You must define one of JSON or XML API documentation file!"

    docTree = None

    if options.verbose:
      print "  * Generating API tree..."
    else:
      print "  * Generating API tree: ",

    for fileId in sortedIncludeList:
      if options.verbose:
        print "    - %s" % fileId
      else:
        sys.stdout.write(".")
        sys.stdout.flush()

      docTree = api.createDoc(loader.getTree(fileDb, fileId, options), docTree)

    if not options.verbose:
      print

    if docTree:
      print "  * Finalizing tree..."
      api.postWorkPackage(docTree, docTree)

    if options.apiDocumentationXmlFile != None:
      print "  * Writing XML API file to %s" % options.apiDocumentationXmlFile

      xmlContent = "<?xml version=\"1.0\" encoding=\"" + options.xmlOutputEncoding + "\"?>\n"

      if options.addNewLines:
        xmlContent += "\n" + tree.nodeToXmlString(docTree)
      else:
        xmlContent += tree.nodeToXmlString(docTree, "", "", "")

      filetool.save(options.apiDocumentationXmlFile, xmlContent, options.xmlOutputEncoding)

    if options.apiDocumentationJsonFile != None:
      print "  * Writing JSON API file to %s" % options.apiDocumentationJsonFile

      if options.addNewLines:
        jsonContent = tree.nodeToJsonString(docTree)
      else:
        jsonContent = tree.nodeToJsonString(docTree, "", "", "")

      filetool.save(options.apiDocumentationJsonFile, jsonContent, options.scriptOutputEncoding)





  ######################################################################
  #  CREATE COPY OF RESOURCES
  ######################################################################

  if options.copyResources:

    print
    print "  CREATE COPY OF RESOURCES:"
    print "----------------------------------------------------------------------------"

    resources.copy(options, sortedIncludeList, fileDb)






  ######################################################################
  #  GENERATION OF SETTINGS
  ######################################################################

  if options.generateSourceScript or options.generateCompiledScript:
    settingsStr = ""

    if len(options.defineRuntimeSetting) != 0:
      print
      print "  GENERATION OF SETTINGS:"
      print "----------------------------------------------------------------------------"

      print "  * Processing input data..."
      settingsStr = settings.generate(options)

      if options.settingsScriptFile:
        print "   * Storing result to %s" % options.settingsScriptFile
        filetool.save(options.settingsScriptFile, settingsStr)

        # clear settings for build and source
        settingsStr = ""





  ######################################################################
  #  GENERATION OF SOURCE VERSION
  ######################################################################

  if options.generateSourceScript:
    print
    print "  GENERATION OF SOURCE SCRIPT:"
    print "----------------------------------------------------------------------------"

    if options.sourceScriptFile == None and (options.sourceTemplateInputFile == None or options.sourceTemplateOutputFile == None):
      print "  * You must define at least one source script file or template input/output."
      sys.exit(1)

    if options.sourceScriptFile:
      options.sourceScriptFile = os.path.normpath(options.sourceScriptFile)

    if options.sourceTemplateInputFile:
      options.sourceTemplateInputFile = os.path.normpath(options.sourceTemplateInputFile)

    if options.sourceTemplateOutputFile:
      options.sourceTemplateOutputFile = os.path.normpath(options.sourceTemplateOutputFile)


    print "  * Generating script block..."

    # Handling line feed setting
    sourceLineFeed = "";
    if options.addNewLines:
      sourceLineFeed = "\n";


    # Generating inline code...
    inlineCode = ""
    inlineCode += settingsStr + sourceLineFeed
    inlineCode += "qx.IS_SOURCE=true;%s" % sourceLineFeed
    inlineCode += "qx.VERSION=\"%s\";%s" % (options.version, sourceLineFeed)
    inlineCode += "".join(additionalOutput)


    # Generating script block
    scriptBlocks = ""
    scriptBlocks += '<script type="text/javascript">%s</script>' % inlineCode
    for fileId in sortedIncludeList:
      if fileDb[fileId]["classUri"] == None:
        print "  * Missing class URI definition for class path %s." % fileDb[fileId]["classPath"]
        sys.exit(1)

      scriptBlocks += '<script type="text/javascript" src="%s%s"></script>' % (os.path.join(fileDb[fileId]["classUri"], fileDb[fileId]["pathId"].replace(".", os.sep)), config.JSEXT)
      scriptBlocks += sourceLineFeed



    if options.sourceScriptFile != None:
      print "  * Storing includer as %s..." % options.sourceScriptFile
      sourceScript = "document.write('%s');" % scriptBlocks.replace("'", "\\'")
      if options.addNewLines:
        sourceScript = sourceScript.replace("\n", "\\\n")
      filetool.save(options.sourceScriptFile, sourceScript, options.scriptOutputEncoding)

    if options.sourceTemplateInputFile != None and options.sourceTemplateOutputFile != None:
      print "  * Patching template: %s => %s" % (options.sourceTemplateInputFile, options.sourceTemplateOutputFile)
      tmpl = filetool.read(options.sourceTemplateInputFile)
      res = tmpl.replace(options.sourceTemplateReplace, scriptBlocks)
      filetool.save(options.sourceTemplateOutputFile, res, options.scriptOutputEncoding)





  ######################################################################
  #  GENERATION OF COMPILED VERSION
  ######################################################################

  if options.generateCompiledScript:
    print
    print "  GENERATION OF COMPILED SCRIPT:"
    print "----------------------------------------------------------------------------"

    buildLineFeed = "";
    if options.addNewLines:
      buildLineFeed = "\n";

    inlineCode = ""
    inlineCode += settingsStr + buildLineFeed
    inlineCode += "qx.IS_SOURCE=false;%s" % buildLineFeed
    inlineCode += "qx.VERSION=\"%s\";%s" % (options.version, buildLineFeed)
    inlineCode += "".join(additionalOutput)

    compiledOutput = inlineCode

    if options.compiledScriptFile == None:
      print "  * You must define the compiled script file!"
      sys.exit(1)

    if options.verbose:
      print "  * Compiling..."
    else:
      print "  * Compiling: ",

    for fileId in sortedIncludeList:
      if options.verbose:
        print "    - Compiling %s" % fileId
      else:
        sys.stdout.write(".")
        sys.stdout.flush()

      compiledFileContent = compiler.compile(loader.getTree(fileDb, fileId, options), False, options.addNewLines, options.enableDebug)

      if options.addFileIds:
        compiledOutput += "\n\n\n/* ID: " + fileId + " */\n" + compiledFileContent + "\n"
      else:
        compiledOutput += compiledFileContent

      if not compiledOutput.endswith(";") and not compiledOutput.endswith("\n"):
        compiledOutput += ";"

    if not options.verbose:
      print

    print "  * Storing output as %s..." % options.compiledScriptFile
    filetool.save(options.compiledScriptFile, compiledOutput, options.scriptOutputEncoding)
コード例 #15
0
def execute(fileDb, moduleDb, options, pkgid="", names=[]):


    ######################################################################
    #  SORT OF INCLUDE LIST
    ######################################################################

    print
    print "  GENERATE CLASS INCLUDE LIST:"
    print "----------------------------------------------------------------------------"

    sortedIncludeList = loader.getSortedList(options, fileDb, moduleDb)

    if options.printIncludes:
        print
        print "  PRINT OF INCLUDE ORDER:"
        print "----------------------------------------------------------------------------"
        print "  * The files will be included in this order:"
        for fileId in sortedIncludeList:
            print "    - %s" % fileId


    if options.printIncludesFile:
        includeFile = open(options.printIncludesFile, "w")
        for fileId in sortedIncludeList:
            includeFile.write(fileId + "\n")
        includeFile.close()


    if options.printDependencies:
        print
        print "  OUTPUT OF DEPENDENCIES:"
        print "----------------------------------------------------------------------------"
        print "  * These are all included files with their dependencies:"
        for fileId in sortedIncludeList:
            print "    - %s" % fileId
            if len(fileDb[fileId]["loadtimeDeps"]) > 0:
                print "      - Loadtime: "
                for depEntry in fileDb[fileId]["loadtimeDeps"]:
                    print "        - %s" % depEntry

            if len(fileDb[fileId]["runtimeDeps"]) > 0:
                print "      - Runtime: "
                for depEntry in fileDb[fileId]["runtimeDeps"]:
                    print "        - %s" % depEntry

            if len(fileDb[fileId]["optionalDeps"]) > 0:
                print "      - Optional: "
                for depEntry in fileDb[fileId]["optionalDeps"]:
                    print "        - %s" % depEntry





    ######################################################################
    #  GRAPHVIZ OUTPUT
    ######################################################################

    if options.depDotFile:
        graph.store(fileDb, sortedIncludeList, options)






    ######################################################################
    #  SOURCE MIGRATION
    ######################################################################

    if options.migrateSource:
        print
        print "  SOURCE MIGRATION:"
        print "----------------------------------------------------------------------------"

        print "  * Migrate Source Code..."

        migrator.handle(fileDb, options, options.migrationTarget,
                        options.migrationInput, options.verbose)

        # Return after migration: Ignore other jobs
        return





    ######################################################################
    #  GENERATION OF FIXED CODE
    ######################################################################

    if options.fixSource:
        print
        print "  FIX SOURCE CODE:"
        print "----------------------------------------------------------------------------"

        if options.verbose:
            print "  * Fixing code..."
        else:
            print "  * Fixing code: ",

        for fileId in sortedIncludeList:
            if options.verbose:
                print "    - Reading %s" % fileId

            fileEntry = fileDb[fileId]

            filePath = fileEntry["path"]
            fileEncoding = fileEntry["encoding"]

            fileContent = filetool.read(filePath, fileEncoding)
            fixedContent = textutil.removeTrailingSpaces(textutil.tab2Space(textutil.any2Unix(fileContent), 2))

            if fixedContent != fileContent:
                if options.verbose:
                    print "      - Storing modifications..."
                else:
                    sys.stdout.write("!")
                    sys.stdout.flush()

                filetool.save(filePath, fixedContent, fileEncoding)

            elif not options.verbose:
                sys.stdout.write(".")
                sys.stdout.flush()

        if not options.verbose:
            print

        # Return after fixing: Ignore other jobs
        return






    ######################################################################
    #  GENERATION OF PRETTY PRINTED CODE
    ######################################################################

    if options.prettyPrint:
        print
        print "  GENERATION OF PRETTY PRINTED CODE:"
        print "----------------------------------------------------------------------------"

        if options.verbose:
            print "  * Pretty printing..."
        else:
            print "  * Pretty printing: ",

        for fileId in sortedIncludeList:
            if options.verbose:
                print "    - Compiling %s" % fileId
            else:
                sys.stdout.write(".")
                sys.stdout.flush()

            prettyFileContent = compiler.compile(loader.getTree(fileDb, fileId, options), options)

            if not prettyFileContent.endswith("\n"):
                prettyFileContent += "\n"

            filetool.save(fileDb[fileId]["path"], prettyFileContent)

        if not options.verbose:
            print

        # Return after pretty print: Ignore other jobs
        return





    ######################################################################
    #  INLINE CODE
    ######################################################################

    inlineSourceCode = []
    inlineCompiledCode = []







    ######################################################################
    #  SUPPORT FOR VARIANTS
    ######################################################################

    if len(options.useVariant) > 0 and not options.generateSourceScript:
        print
        print "  VARIANT OPTIMIZATION:"
        print "----------------------------------------------------------------------------"

        variantMap = {}
        for variant in options.useVariant:
            keyValue = variant.split(":")
            if len(keyValue) != 2:
                print "  * Error: Variants must be specified as key value pair separated by ':'!"
                sys.exit(1)

            variantMap[keyValue[0]] = keyValue[1]

        if options.verbose:
            print "  * Optimizing for variant setup..."
        else:
            print "  * Optimizing for variant setup: ",

        for fileId in sortedIncludeList:
            if options.verbose:
                print "    - %s" % fileId
            else:
                sys.stdout.write(".")
                sys.stdout.flush()

            if variantoptimizer.search(loader.getTree(fileDb, fileId, options), variantMap, fileId, options.verbose):
                if options.verbose:
                    print "      - Modified!"

        if not options.verbose:
            print









    ######################################################################
    #  BASE CALL OPTIMIZATION
    ######################################################################

    if options.optimizeBaseCall:
        print
        print "  BASE CALL OPTIMIZATION:"
        print "----------------------------------------------------------------------------"

        if options.verbose:
            print "  * Optimizing this.base calls..."
        else:
            print "  * Optimizing this.base calls: ",

        counter = 0

        for fileId in sortedIncludeList:
            if options.verbose:
                print "    - %s" % fileId
            else:
                sys.stdout.write(".")
                sys.stdout.flush()

            counter += basecalloptimizer.patch(loader.getTree(fileDb, fileId, options))

        if not options.verbose:
            print

        print "  * Optimized %s calls" % counter





    ######################################################################
    #  ACCESSOR OBFUSCATION
    ######################################################################

    if options.obfuscateAccessors:
        print
        print "  ACCESSOR OBFUSCATION:"
        print "----------------------------------------------------------------------------"

        if options.verbose:
            print "  * Obfuscating..."
        else:
            print "  * Obfuscating: ",

        for fileId in sortedIncludeList:
            if options.verbose:
                print "    - %s" % fileId
            else:
                sys.stdout.write(".")
                sys.stdout.flush()

            accessorobfuscator.process(loader.getTree(fileDb, fileId, options), options.verbose)

        if not options.verbose:
            print




    ######################################################################
    #  STRING OPTIMIZATION
    ######################################################################

    if options.optimizeStrings:
        print
        print "  STRING OPTIMIZATION:"
        print "----------------------------------------------------------------------------"

        if options.verbose:
            print "  * Searching strings..."
        else:
            print "  * Searching strings: ",

        stringMap = {}

        for fileId in sortedIncludeList:
            if options.verbose:
                print "    - %s" % fileId
            else:
                sys.stdout.write(".")
                sys.stdout.flush()

            localMap = stringoptimizer.search(loader.getTree(fileDb, fileId, options), options.verbose)

            for value in localMap:
                if value in stringMap:
                    stringMap[value] += localMap[value]
                else:
                    stringMap[value] = localMap[value]

        if not options.verbose:
            print

        counter = 0
        for value in stringMap:
            counter += stringMap[value]

        stringList = stringoptimizer.sort(stringMap)

        print "  * Found %s string instances (%s unique)" % (counter, len(stringMap))

        if options.verbose:
            print "  * Replacing strings..."
        else:
            print "  * Replacing strings: ",

        for fileId in sortedIncludeList:
            if options.verbose:
                print "    - %s" % fileId
            else:
                sys.stdout.write(".")
                sys.stdout.flush()

            stringoptimizer.replace(loader.getTree(fileDb, fileId, options), stringList, "$" + pkgid, options.verbose)

        if not options.verbose:
            print

        print "  * Generating replacement..."
        inlineCompiledCode.append(stringoptimizer.replacement(stringList, "$" + pkgid))









    ######################################################################
    #  LOCAL VARIABLE OPTIMIZATION
    ######################################################################

    if options.optimizeVariables or options.obfuscate:
        print
        print "  LOCAL VARIABLE OPTIMIZATION:"
        print "----------------------------------------------------------------------------"

        if options.verbose:
            print "  * Optimizing variables..."
        else:
            print "  * Optimizing variables: ",

        counter = 0

        for fileId in sortedIncludeList:
            if options.verbose:
                print "    - %s" % fileId
            else:
                sys.stdout.write(".")
                sys.stdout.flush()

            counter += variableoptimizer.search(
                loader.getTree(fileDb, fileId, options),
                [], 0, 0, "$",
                skipPrefix = options.optimizeVariablesSkipPrefix,
                verbose = options.verbose
            )

        if not options.verbose:
            print

        print "  * Optimized %s variables" % counter





    ######################################################################
    #  PRIVATE MEMBER OPTIMIZATION
    ######################################################################

    if options.optimizePrivate:
        print
        print "  PRIVATE MEMBER OPTIMIZATION:"
        print "----------------------------------------------------------------------------"

        if options.verbose:
            print "  * Optimizing private members..."
        else:
            print "  * Optimizing private members: ",

        counter = 0

        for fileId in sortedIncludeList:
            if options.verbose:
                print "    - %s" % fileId
            else:
                sys.stdout.write(".")
                sys.stdout.flush()

            counter += privateoptimizer.patch("",loader.getTree(fileDb, fileId, options), {}, "$", options.verbose)

        if not options.verbose:
            print

        print "  * Optimized %s keys" % counter





    ######################################################################
    #  OBFUSCATION
    ######################################################################

    if options.obfuscate:
        print
        print "  OBFUSCATION:"
        print "----------------------------------------------------------------------------"

        if options.verbose:
            print "  * Searching for assignments..."
        else:
            print "  * Searching for assignments: ",

        identifiers = {}

        for fileId in sortedIncludeList:
            if options.verbose:
                print "    - %s" % fileId
            else:
                sys.stdout.write(".")
                sys.stdout.flush()

            obfuscator.search(loader.getTree(fileDb, fileId, options), identifiers, options.verbose)

        for fileId in sortedIncludeList:
            if options.verbose:
                print "    - %s" % fileId
            else:
                sys.stdout.write(".")
                sys.stdout.flush()

            obfuscator.patch(loader.getTree(fileDb, fileId, options), identifiers, options.verbose)

        if not options.verbose:
            print





    ######################################################################
    #  TOKEN STORAGE
    ######################################################################

    if options.storeTokens:
        print
        print "  TOKEN STORAGE:"
        print "----------------------------------------------------------------------------"

        if options.tokenOutputDirectory == None:
            print "  * You must define the token output directory!"
            sys.exit(1)

        if options.verbose:
            print "  * Storing tokens..."
        else:
            print "  * Storing tokens: ",

        for fileId in sortedIncludeList:
            tokenString = tokenizer.convertTokensToString(loader.getTokens(fileDb, fileId, options))

            if options.verbose:
                print "    * Writing tokens for %s (%s KB)..." % (fileIdm, len(tokenString) / 1000.0)
            else:
                sys.stdout.write(".")
                sys.stdout.flush()

            filetool.save(os.path.join(filetool.normalize(options.tokenOutputDirectory), fileId + config.TOKENEXT), tokenString)

        if not options.verbose:
            print







    ######################################################################
    #  TREE STORAGE
    ######################################################################

    if options.storeTree:
        print
        print "  TREE STORAGE:"
        print "----------------------------------------------------------------------------"

        if options.treeOutputDirectory == None:
            print "  * You must define the tree output directory!"
            sys.exit(1)

        if options.verbose:
            print "  * Storing tree..."
        else:
            print "  * Storing tree: ",

        for fileId in sortedIncludeList:
            treeString = "<?xml version=\"1.0\" encoding=\"" + options.xmlOutputEncoding + "\"?>\n" + tree.nodeToXmlString(loader.getTree(fileDb, fileId, options))

            if options.verbose:
                print "    * Writing tree for %s (%s KB)..." % (fileId, len(treeString) / 1000.0)
            else:
                sys.stdout.write(".")
                sys.stdout.flush()

            filetool.save(os.path.join(filetool.normalize(options.treeOutputDirectory), fileId + config.XMLEXT), treeString)

        if not options.verbose:
            print








    ######################################################################
    #  GENERATION OF API
    ######################################################################

    if options.generateApiDocumentation:
        print
        print "  GENERATION OF API:"
        print "----------------------------------------------------------------------------"

        if options.apiDocumentationJsonFile == None and options.apiDocumentationXmlFile == None:
            print "  * You must define one of JSON or XML API documentation file!"

        docTree = None

        if options.verbose:
            print "  * Generating API tree..."
        else:
            print "  * Generating API tree: ",

        hasDocError = False
        for fileId in sortedIncludeList:
            if options.verbose:
                print "    - %s" % fileId
            else:
                sys.stdout.write(".")
                sys.stdout.flush()


            (docTree, error) = api.createDoc(loader.getTree(fileDb, fileId, options), docTree)
            hasDocError = hasDocError or error

        if hasDocError:
            print
            print "  * Building API failed!!"
            sys.exit(0)

        if not options.verbose:
            print

        if docTree:
            print "  * Finalizing tree..."
            api.postWorkPackage(docTree, docTree)

        if options.addNewLines:
            childPrefix = "  "
            newLine = "\n"
        else:
            childPrefix = ""
            newLine = ""


        if options.apiDocumentationXmlFile != None:
            print "  * Writing XML API file to %s" % options.apiDocumentationXmlFile

            if options.apiSeparateFiles:
                packages = api.packagesToXmlString(docTree, "", childPrefix, newLine)
                filetool.save(options.apiDocumentationXmlFile, packages, options.scriptOutputEncoding)

                for cls in api.classNodeIterator(docTree):
                    classContent = "<?xml version=\"1.0\" encoding=\"" + options.xmlOutputEncoding + "\"?>\n"
                    classContent += tree.nodeToXmlString(cls, "", childPrefix, newLine)
                    fileName = os.path.join(os.path.dirname(options.apiDocumentationXmlFile), cls.get("fullName") + ".xml")
                    filetool.save(fileName, classContent, options.xmlOutputEncoding)

            else:
                xmlContent = "<?xml version=\"1.0\" encoding=\"" + options.xmlOutputEncoding + "\"?>\n"
                xmlContent += tree.nodeToXmlString(docTree, "", childPrefix, newLine)
                filetool.save(options.apiDocumentationXmlFile, xmlContent, options.xmlOutputEncoding)



        if options.apiDocumentationJsonFile != None:
            print "  * Writing JSON API file to %s" % options.apiDocumentationJsonFile

            if options.apiSeparateFiles:
                packages = api.packagesToJsonString(docTree, "", childPrefix, newLine)
                filetool.save(options.apiDocumentationJsonFile, packages, options.scriptOutputEncoding)

                for cls in api.classNodeIterator(docTree):
                    classContent = tree.nodeToJsonString(cls, "", childPrefix, newLine)
                    fileName = os.path.join(os.path.dirname(options.apiDocumentationJsonFile), cls.get("fullName") + ".js")
                    filetool.save(fileName, classContent, options.scriptOutputEncoding)

            else:
                jsonContent = tree.nodeToJsonString(docTree, "", childPrefix, newLine)
                filetool.save(options.apiDocumentationJsonFile, jsonContent, options.scriptOutputEncoding)

        # create search index
        if options.apiDocumentationIndexFile != None:
            print "  * Generating API index..."
            jsonContent = tree.nodeToIndexString(docTree, "", childPrefix, newLine)
            print "  * Writing API search index to %s" % options.apiDocumentationIndexFile
            filetool.save(options.apiDocumentationIndexFile, jsonContent, options.scriptOutputEncoding)






    ######################################################################
    #  CREATE COPY OF RESOURCES
    ######################################################################

    if options.copyResources:

        print
        print "  CREATE COPY OF RESOURCES:"
        print "----------------------------------------------------------------------------"

        resources.copy(options, sortedIncludeList, fileDb)






    ######################################################################
    #  GENERATION OF SETTINGS
    ######################################################################

    if options.generateSourceScript or options.generateCompiledScript:
        print
        print "  INCLUSION OF SETTINGS:"
        print "----------------------------------------------------------------------------"

        if options.generateSourceScript:
            print "  * Processing settings of source version..."

            settingsArr = options.useSetting[:]
            settingsArr.append("qx.version:%s" % options.version)
            settingsArr.append("qx.isSource:true")
            inlineSourceCode.append(settings.generate(settingsArr, options.addNewLines))

        if options.generateCompiledScript:
            print "  * Processing settings of compiled version..."

            settingsArr = options.useSetting[:]
            settingsArr.append("qx.version:%s" % options.version)
            settingsArr.append("qx.isSource:false")
            inlineCompiledCode.append(settings.generate(settingsArr, options.addNewLines))






    ######################################################################
    #  GENERATION OF VARIANTS
    ######################################################################

    if options.generateSourceScript or options.generateCompiledScript:
        print
        print "  INCLUSION OF VARIANTS:"
        print "----------------------------------------------------------------------------"

        if options.generateSourceScript:
            print "  * Processing variants of source version..."

            variantsArr = options.useVariant[:]
            inlineSourceCode.append(variants.generate(variantsArr, options.addNewLines))

        if options.generateCompiledScript:
            print "  * Processing variants of compiled version..."

            variantsArr = options.useVariant[:]
            inlineCompiledCode.append(variants.generate(variantsArr, options.addNewLines))







    ######################################################################
    #  GENERATION OF SOURCE VERSION
    ######################################################################

    if options.generateSourceScript:
        print
        print "  GENERATION OF SOURCE SCRIPT:"
        print "----------------------------------------------------------------------------"

        if options.sourceScriptFile == None and (options.sourceTemplateInputFile == None or options.sourceTemplateOutputFile == None):
            print "  * You must define at least one source script file or template input/output."
            sys.exit(1)

        if options.sourceScriptFile:
            options.sourceScriptFile = os.path.normpath(options.sourceScriptFile)

        if options.sourceTemplateInputFile:
            options.sourceTemplateInputFile = os.path.normpath(options.sourceTemplateInputFile)

        if options.sourceTemplateOutputFile:
            options.sourceTemplateOutputFile = os.path.normpath(options.sourceTemplateOutputFile)


        print "  * Generating script block..."

        # Handling line feed setting
        sourceLineFeed = "";
        if options.addNewLines:
            sourceLineFeed = "\n";

        # Generating script block
        scriptBlocks = ""
        scriptBlocks += '<script type="text/javascript">%s</script>' % "".join(inlineSourceCode)
        for fileId in sortedIncludeList:
            if fileDb[fileId]["meta"]:
              continue

            if fileDb[fileId]["classUri"] == None:
                print "  * Missing class URI definition for class path %s." % fileDb[fileId]["classPath"]
                sys.exit(1)

            scriptBlocks += '<script type="text/javascript" src="%s%s"></script>' % (os.path.join(fileDb[fileId]["classUri"], fileDb[fileId]["pathId"].replace(".", '/')), config.JSEXT)
            scriptBlocks += sourceLineFeed

        # Writing includer
        if options.sourceScriptFile != None:
            print "  * Storing includer as %s..." % options.sourceScriptFile
            sourceScript = "document.write('%s');" % scriptBlocks.replace("'", "\\'")
            if options.addNewLines:
                sourceScript = sourceScript.replace("\n", "\\\n")
            filetool.save(options.sourceScriptFile, sourceScript, options.scriptOutputEncoding)

        # Patching template
        if options.sourceTemplateInputFile != None and options.sourceTemplateOutputFile != None:
            print "  * Patching template: %s => %s" % (options.sourceTemplateInputFile, options.sourceTemplateOutputFile)
            tmpl = filetool.read(options.sourceTemplateInputFile)
            res = tmpl.replace(options.sourceTemplateReplace, scriptBlocks)
            filetool.save(options.sourceTemplateOutputFile, res, options.scriptOutputEncoding)





    ######################################################################
    #  GENERATION OF COMPILED VERSION
    ######################################################################

    if options.generateCompiledScript:
        print
        print "  GENERATION OF COMPILED SCRIPT:"
        print "----------------------------------------------------------------------------"

        buildLineFeed = "";
        if options.addNewLines:
            buildLineFeed = "\n";

        compiledOutput = "".join(inlineCompiledCode)

        if options.compiledScriptFile == None:
            print "  * You must define the compiled script file!"
            sys.exit(1)

        if options.verbose:
            print "  * Compiling..."
        else:
            print "  * Compiling: ",

        for fileId in sortedIncludeList:
            if fileDb[fileId]["meta"]:
              continue

            if options.verbose:
                print "    - Compiling %s" % fileId
            else:
                sys.stdout.write(".")
                sys.stdout.flush()

            if options.prettyPrint:
                options.prettyPrint = False  # make sure it's disabled

            compiledFileContent = compiler.compile(loader.getTree(fileDb, fileId, options), options, options.addNewLines, options.verbose)

            if options.addFileIds:
                compiledOutput += "\n\n\n/* ID: " + fileId + " */\n" + compiledFileContent + "\n"
            else:
                compiledOutput += compiledFileContent

            if not compiledOutput.endswith(";") and not compiledOutput.endswith("\n"):
                compiledOutput += ";"

        if not options.verbose:
            print

        print "  * Storing output as %s..." % options.compiledScriptFile
        filetool.save(options.compiledScriptFile, compiledOutput, options.scriptOutputEncoding)