def migrateFile(
                filePath, compiledPatches, compiledInfos,
                hasPatchModule=False, options=None, encoding="UTF-8"):

    logging.info("  - File: %s" % filePath)

    # Read in original content
    fileContent = filetool.read(filePath, encoding)

    fileId = loader.extractFileContentId(fileContent);

    # Apply patches
    patchedContent = fileContent

    if hasPatchModule and fileId is not None:

        import patch
        tree = treegenerator.createSyntaxTree(tokenizer.parseStream(fileContent))

        # If there were any changes, compile the result
        if patch.patch(fileId, tree):
            options.prettyPrint = True  # make sure it's set
            patchedContent = compiler.compile(tree, options)

    # apply RE patches
    patchedContent = regtool(patchedContent, compiledPatches, True, filePath)
    patchedContent = regtool(patchedContent, compiledInfos, False, filePath)

    # Write file
    if patchedContent != fileContent:
        logging.info("    - %s has been modified. Storing modifications ..." % filePath)
        filetool.save(filePath, patchedContent, encoding)
Exemple #2
0
def main():
  allowed = [ "any2Dos", "any2Mac", "any2Unix", "convertDos2Mac", "convertDos2Unix", "convertMac2Dos", "convertMac2Unix", "convertUnix2Dos", "convertUnix2Mac", "spaces2Tab", "tab2Space" ]
  
  parser = optparse.OptionParser()

  parser.add_option("-q", "--quiet", action="store_false", dest="verbose", default=False, help="Quiet output mode.")
  parser.add_option("-v", "--verbose", action="store_true", dest="verbose", help="Verbose output mode.")
  parser.add_option("-c", "--command", dest="command", default="normalize", help="Normalize a file")
  parser.add_option("--encoding", dest="encoding", default="utf-8", metavar="ENCODING", help="Defines the encoding expected for input files.")

  (options, args) = parser.parse_args()
  
  if not options.command in allowed:
    print "Unallowed command: %s" % options.command
    sys.exit(1)

  if len(args) == 0:
    print "Needs one or more arguments (files) to modify!"
    sys.exit(1)
    
  for fileName in args:
    if options.verbose:
      print "  * Running %s on: %s" % (options.command, fileName)
    
    origFileContent = filetool.read(fileName, options.encoding)
    patchedFileContent = eval(options.command + "(origFileContent)")
    
    if patchedFileContent != origFileContent:
      filetool.save(fileName, patchedFileContent, options.encoding)
Exemple #3
0
def getTokens(fileDb, fileId, options):
  if not fileDb[fileId].has_key("tokens"):
    if options.verbose:
      print "    - Generating tokens for %s..." % fileId

    useCache = False
    loadCache = False

    fileEntry = fileDb[fileId]

    filePath = fileEntry["path"]
    fileEncoding = fileEntry["encoding"]

    if options.cacheDirectory != None:
      cachePath = os.path.join(filetool.normalize(options.cacheDirectory), fileId + "-tokens.pcl")
      useCache = True

      if not filetool.checkCache(filePath, cachePath, getInternalModTime(options)):
        loadCache = True

    if loadCache:
      tokens = filetool.readCache(cachePath)
    else:
      fileContent = filetool.read(filePath, fileEncoding)
      tokens = tokenizer.parseStream(fileContent, fileId)

      if useCache:
        if options.verbose:
          print "    - Caching tokens for %s..." % fileId

        filetool.storeCache(cachePath, tokens)

    fileDb[fileId]["tokens"] = tokens

  return fileDb[fileId]["tokens"]
Exemple #4
0
def main():
    allowed = [
        "any2Dos", "any2Mac", "any2Unix", "convertDos2Mac", "convertDos2Unix",
        "convertMac2Dos", "convertMac2Unix", "convertUnix2Dos",
        "convertUnix2Mac", "spaces2Tab", "tab2Space"
    ]

    parser = optparse.OptionParser()

    parser.add_option("-q",
                      "--quiet",
                      action="store_false",
                      dest="verbose",
                      default=False,
                      help="Quiet output mode.")
    parser.add_option("-v",
                      "--verbose",
                      action="store_true",
                      dest="verbose",
                      help="Verbose output mode.")
    parser.add_option("-c",
                      "--command",
                      dest="command",
                      default="normalize",
                      help="Normalize a file")
    parser.add_option("--encoding",
                      dest="encoding",
                      default="utf-8",
                      metavar="ENCODING",
                      help="Defines the encoding expected for input files.")

    (options, args) = parser.parse_args()

    if not options.command in allowed:
        print "Unallowed command: %s" % options.command
        sys.exit(1)

    if len(args) == 0:
        print "Needs one or more arguments (files) to modify!"
        sys.exit(1)

    for fileName in args:
        if options.verbose:
            print "  * Running %s on: %s" % (options.command, fileName)

        origFileContent = filetool.read(fileName, options.encoding)
        patchedFileContent = eval(options.command + "(origFileContent)")

        if patchedFileContent != origFileContent:
            filetool.save(fileName, patchedFileContent, options.encoding)
Exemple #5
0
def query(path):
  if os.path.exists(path):
    entries = os.path.join(path, ".svn", "entries")

    if os.path.exists(entries):
      content = filetool.read(entries)

      mtch = DIRINFO.search(content)
      if mtch:
        folder = mtch.group(2)
        if folder in [ "tags", "branches" ]:
          folder = mtch.group(3)

        revision = mtch.group(1)

        return revision, folder

  return None, None
Exemple #6
0
def readPatchInfoFiles(baseDir):
    """
    Reads all patch/info files from a directory and compiles the containing
    regular expressions.
    Retuns a list comiled RE (the output of entryCompiler)
    """
    patchList = []
    emptyLine = re.compile("^\s*$")

    for root, dirs, files in os.walk(baseDir):

        # Filter ignored directories
        for ignoredDir in config.DIRIGNORE:
            if ignoredDir in dirs:
                dirs.remove(ignoredDir)

        # Searching for files
        for fileName in files:
            filePath = os.path.join(root, fileName)

            fileContent = textutil.any2Unix(filetool.read(filePath, "utf-8"))
            patchList.append({
                "path": filePath,
                "content": fileContent.split("\n")
            })

            logging.debug("    - %s" % filePath)

    logging.debug("    - Compiling expressions...")

    compiledPatches = []

    for patchFile in patchList:
        logging.debug("      - %s" % os.path.basename(patchFile["path"]))
        for line in patchFile["content"]:
            if emptyLine.match(line) or line.startswith(
                    "#") or line.startswith("//"):
                continue

            compiled = entryCompiler(line)
            if compiled != None:
                compiledPatches.append(compiled)

    return compiledPatches
Exemple #7
0
def getTokens(fileDb, fileId, options):
    if not fileDb[fileId].has_key("tokens"):
        if options.verbose:
            print "    - Generating tokens for %s..." % fileId

        useCache = False
        loadCache = False

        fileEntry = fileDb[fileId]

        filePath = fileEntry["path"]
        fileEncoding = fileEntry["encoding"]

        if options.cacheDirectory != None:
            cachePath = os.path.join(filetool.normalize(options.cacheDirectory), fileId + "-tokens.pcl")
            useCache = True

            if not filetool.checkCache(filePath, cachePath):
                loadCache = True

        if loadCache:
            tokens = filetool.readCache(cachePath)
        else:
            fileContent = filetool.read(filePath, fileEncoding)

            # TODO: This hack is neccesary because the current parser cannot handle comments
            #       without a context.
            if fileDb[fileId]["meta"]:
                fileContent += "\n(function() {})()"

            tokens = tokenizer.parseStream(fileContent, fileId)

            if useCache:
                if options.verbose:
                    print "    - Caching tokens for %s..." % fileId

                filetool.storeCache(cachePath, tokens)

        fileDb[fileId]["tokens"] = tokens

    return fileDb[fileId]["tokens"]
Exemple #8
0
def getTokens(fileDb, fileId, options):
    if not fileDb[fileId].has_key("tokens"):
        if options.verbose:
            print "    - Generating tokens for %s..." % fileId

        useCache = False
        loadCache = False

        fileEntry = fileDb[fileId]

        filePath = fileEntry["path"]
        fileEncoding = fileEntry["encoding"]

        if options.cacheDirectory != None:
            cachePath = os.path.join(filetool.normalize(options.cacheDirectory), fileId + "-tokens.pcl")
            useCache = True

            if not filetool.checkCache(filePath, cachePath):
                loadCache = True

        if loadCache:
            tokens = filetool.readCache(cachePath)
        else:
            fileContent = filetool.read(filePath, fileEncoding)

            # TODO: This hack is neccesary because the current parser cannot handle comments
            #       without a context.
            if fileDb[fileId]["meta"]:
                fileContent += "\n(function() {})()"

            tokens = tokenizer.parseStream(fileContent, fileId)

            if useCache:
                if options.verbose:
                    print "    - Caching tokens for %s..." % fileId

                filetool.storeCache(cachePath, tokens)

        fileDb[fileId]["tokens"] = tokens

    return fileDb[fileId]["tokens"]
def readPatchInfoFiles(baseDir):
    """
    Reads all patch/info files from a directory and compiles the containing
    regular expressions.
    Retuns a list comiled RE (the output of entryCompiler)
    """
    patchList = []
    emptyLine = re.compile("^\s*$")

    for root, dirs, files in os.walk(baseDir):

        # Filter ignored directories
        for ignoredDir in config.DIRIGNORE:
            if ignoredDir in dirs:
                dirs.remove(ignoredDir)

        # Searching for files
        for fileName in files:
            filePath = os.path.join(root, fileName)

            fileContent = textutil.any2Unix(filetool.read(filePath, "utf-8"))
            patchList.append({"path":filePath, "content":fileContent.split("\n")})

            logging.debug("    - %s" % filePath)

    logging.debug("    - Compiling expressions...")

    compiledPatches = []

    for patchFile in patchList:
        logging.debug("      - %s" % os.path.basename(patchFile["path"]))
        for line in patchFile["content"]:
            if emptyLine.match(line) or line.startswith("#") or line.startswith("//"):
                continue

            compiled = entryCompiler(line)
            if compiled != None:
                compiledPatches.append(compiled)

    return compiledPatches
Exemple #10
0
def getTokens(fileDb, fileId, options):
    if not fileDb[fileId].has_key("tokens"):
        if options.verbose:
            print "    - Generating tokens for %s..." % fileId

        useCache = False
        loadCache = False

        fileEntry = fileDb[fileId]

        filePath = fileEntry["path"]
        fileEncoding = fileEntry["encoding"]

        if options.cacheDirectory != None:
            cachePath = os.path.join(
                filetool.normalize(options.cacheDirectory),
                fileId + "-tokens.pcl")
            useCache = True

            if not filetool.checkCache(filePath, cachePath,
                                       getInternalModTime(options)):
                loadCache = True

        if loadCache:
            tokens = filetool.readCache(cachePath)
        else:
            fileContent = filetool.read(filePath, fileEncoding)
            tokens = tokenizer.parseStream(fileContent, fileId)

            if useCache:
                if options.verbose:
                    print "    - Caching tokens for %s..." % fileId

                filetool.storeCache(cachePath, tokens)

        fileDb[fileId]["tokens"] = tokens

    return fileDb[fileId]["tokens"]
Exemple #11
0
def migrateFile(filePath,
                compiledPatches,
                compiledInfos,
                hasPatchModule=False,
                options=None,
                encoding="UTF-8"):

    logging.info("  - File: %s" % filePath)

    # Read in original content
    fileContent = filetool.read(filePath, encoding)

    fileId = loader.extractFileContentId(fileContent)

    # Apply patches
    patchedContent = fileContent

    if hasPatchModule and fileId is not None:

        import patch
        tree = treegenerator.createSyntaxTree(
            tokenizer.parseStream(fileContent))

        # If there were any changes, compile the result
        if patch.patch(fileId, tree):
            options.prettyPrint = True  # make sure it's set
            patchedContent = compiler.compile(tree, options)

    # apply RE patches
    patchedContent = regtool(patchedContent, compiledPatches, True, filePath)
    patchedContent = regtool(patchedContent, compiledInfos, False, filePath)

    # Write file
    if patchedContent != fileContent:
        logging.info("    - %s has been modified. Storing modifications ..." %
                     filePath)
        filetool.save(filePath, patchedContent, encoding)
Exemple #12
0
def query(path):
    if os.path.exists(path):
        entries = os.path.join(path, ".svn", "entries")

        if os.path.exists(entries):

            # old (svn 1.3) XML style format
            try:
                tree = ElementTree.parse(entries)
                for entry in tree.findall("{svn:}entry"):
                    revision = entry.get("revision")
                    url = entry.get("url")
                    if revision != None and url != None:
                        url = url.split("/")

                        folder = url[5]
                        if folder in ["tags", "branches"]:
                            folder = url[6]

                        return revision, folder
                        #return revision
            except Exception, e:
                pass

            # new (svn 1.4) file format
            content = filetool.read(entries)

            mtch = DIRINFO.search(content)
            if mtch:
                folder = mtch.group(2)
                if folder in ["tags", "branches"]:
                    folder = mtch.group(3)

                revision = mtch.group(1)

                return revision, folder
def query(path):
    if os.path.exists(path):
        entries = os.path.join(path, ".svn", "entries")

        if os.path.exists(entries):

            # old (svn 1.3) XML style format
            try:
                tree = ElementTree.parse(entries)
                for entry in tree.findall("{svn:}entry"):
                    revision = entry.get("revision")
                    url = entry.get("url")
                    if revision != None and url != None:
                        url = url.split("/")

                        folder = url[5]
                        if folder in ["tags", "branches"]:
                            folder = url[6]

                        return revision, folder
                        #return revision
            except Exception, e:
                pass

            # new (svn 1.4) file format
            content = filetool.read(entries)

            mtch = DIRINFO.search(content)
            if mtch:
                folder = mtch.group(2)
                if folder in ["tags", "branches"]:
                    folder = mtch.group(3)

                revision = mtch.group(1)

                return revision, folder
Exemple #14
0
def handle(fileList, fileDb, options):
  confPath = os.path.join(os.path.join(os.path.dirname(os.path.abspath(sys.argv[0])), "migration"), options.migrationTarget)

  infoPath = os.path.join(confPath, "info")
  patchPath = os.path.join(confPath, "patches")

  importedModule = False
  infoList = []
  patchList = []
  htmlList = getHtmlList(options)




  print "  * Number of script input files: %s" % len(fileList)
  print "  * Number of HTML input files: %s" % len(htmlList)
  print "  * Update to version: %s" % options.migrationTarget



  print "  * Searching for patch module..."

  for root, dirs, files in os.walk(confPath):

    # Filter ignored directories
    for ignoredDir in config.DIRIGNORE:
      if ignoredDir in dirs:
        dirs.remove(ignoredDir)

    # Searching for files
    for fileName in files:
      filePath = os.path.join(root, fileName)

      if os.path.splitext(fileName)[1] != config.PYEXT:
        continue

      if fileName == "patch.py":
        print "    - Importing..."

        if not root in sys.path:
          sys.path.insert(0, root)

        import patch
        importedModule = True







  emptyLine = re.compile("^\s*$")



  print "  * Searching for info expression data..."

  for root, dirs, files in os.walk(infoPath):

    # Filter ignored directories
    for ignoredDir in config.DIRIGNORE:
      if ignoredDir in dirs:
        dirs.remove(ignoredDir)

    # Searching for files
    for fileName in files:
      filePath = os.path.join(root, fileName)

      fileContent = textutil.any2Unix(filetool.read(filePath, "utf-8"))
      infoList.append({"path":filePath, "content":fileContent.split("\n")})

      if options.verbose:
        print "    - %s" % filePath

  print "    - Number of info files: %s" % len(infoList)

  print "    - Compiling expressions..."

  compiledInfos = []

  for infoFile in infoList:
    print "      - %s" % os.path.basename(infoFile["path"])
    for line in infoFile["content"]:
      if emptyLine.match(line) or line.startswith("#") or line.startswith("//"):
        continue

      compiled = entryCompiler(line)
      if compiled != None:
        compiledInfos.append(compiled)

  print "    - Number of infos: %s" % len(compiledInfos)




  print "  * Searching for patch expression data..."

  for root, dirs, files in os.walk(patchPath):

    # Filter ignored directories
    for ignoredDir in config.DIRIGNORE:
      if ignoredDir in dirs:
        dirs.remove(ignoredDir)

    # Searching for files
    for fileName in files:
      filePath = os.path.join(root, fileName)

      fileContent = textutil.any2Unix(filetool.read(filePath, "utf-8"))
      patchList.append({"path":filePath, "content":fileContent.split("\n")})

      if options.verbose:
        print "    - %s" % filePath

  print "    - Number of patch files: %s" % len(patchList)

  print "    - Compiling expressions..."

  compiledPatches = []

  for patchFile in patchList:
    print "      - %s" % os.path.basename(patchFile["path"])
    for line in patchFile["content"]:
      if emptyLine.match(line) or line.startswith("#") or line.startswith("//"):
        continue

      compiled = entryCompiler(line)
      if compiled != None:
        compiledPatches.append(compiled)

  print "    - Number of patches: %s" % len(compiledPatches)








  print
  print "  FILE PROCESSING:"
  print "----------------------------------------------------------------------------"

  if len(fileList) > 0:
    print "  * Processing script files:"

    for fileId in fileList:
      fileEntry = fileDb[fileId]

      filePath = fileEntry["path"]
      fileEncoding = fileEntry["encoding"]

      print "    - %s" % fileId

      # Read in original content
      fileContent = filetool.read(filePath, fileEncoding)
      patchedContent = fileContent

      # Apply patches
      if importedModule:
        tree = treegenerator.createSyntaxTree(tokenizer.parseStream(patchedContent))

        # If there were any changes, compile the result
        if patch.patch(fileId, tree):
          patchedContent = compiler.compile(tree, True)

      patchedContent = regtool(patchedContent, compiledPatches, True, options)
      patchedContent = regtool(patchedContent, compiledInfos, False, options)

      # Write file
      if patchedContent != fileContent:
        print "      - Store modifications..."
        filetool.save(filePath, patchedContent, fileEncoding)

    print "  * Done"



  if len(htmlList) > 0:
    print "  * Processing HTML files:"

    for filePath in htmlList:
      print "    - %s" % filePath

      # Read in original content
      fileContent = filetool.read(filePath)

      patchedContent = fileContent
      patchedContent = regtool(patchedContent, compiledPatches, True, options)
      patchedContent = regtool(patchedContent, compiledInfos, False, options)

      # Write file
      if patchedContent != fileContent:
        print "      - Store modifications..."
        filetool.save(filePath, patchedContent)

    print "  * Done"
Exemple #15
0
def indexFile(filePath, filePathId, classPath, listIndex, classEncoding, classUri, resourceInput, resourceOutput, options, fileDb={}, moduleDb={}):

    ########################################
    # Checking cache
    ########################################

    useCache = False
    loadCache = False
    cachePath = None

    if options.cacheDirectory != None:
        cachePath = os.path.join(filetool.normalize(options.cacheDirectory), filePathId + "-entry.pcl")
        useCache = True

        if not filetool.checkCache(filePath, cachePath):
            loadCache = True



    ########################################
    # Loading file content / cache
    ########################################

    if loadCache:
        fileEntry = filetool.readCache(cachePath)
        fileId = filePathId

    else:
        fileContent = filetool.read(filePath, classEncoding)

        # Extract ID
        fileContentId = extractFileContentId(fileContent)

        # Search for valid ID
        if fileContentId == None:
            if not filePathId.endswith("__init__"):
                print "    - Could not extract ID from file: %s. Fallback to path %s!" % (filePath, filePathId)
            fileId = filePathId

        else:
            fileId = fileContentId

        if fileId != filePathId:
            print "    - ID mismatch: CONTENT=%s != PATH=%s" % (fileContentId, filePathId)
            if not options.migrateSource:
                sys.exit(1)

        fileEntry = {
            "autoDependencies" : False,
            "cached" : False,
            "cachePath" : cachePath,
            "meta" : fileId.endswith("__init__"),
            "ignoreDeps" : extractIgnore(fileContent, fileId),
            "optionalDeps" : extractOptional(fileContent, fileId),
            "loadtimeDeps" : extractLoadtimeDeps(fileContent, fileId),
            "runtimeDeps" : extractRuntimeDeps(fileContent, fileId),
            "resources" : extractResources(fileContent, fileId),
            "embeds" : extractEmbeds(fileContent, fileId),
            "modules" : extractModules(fileContent, fileId)
        }



    ########################################
    # Additional data
    ########################################

    # We don't want to cache these items
    fileEntry["path"] = filePath
    fileEntry["pathId"] = filePathId
    fileEntry["encoding"] = classEncoding
    fileEntry["resourceInput"] = resourceInput
    fileEntry["resourceOutput"] = resourceOutput
    fileEntry["classUri"] = classUri
    fileEntry["listIndex"] = listIndex
    fileEntry["classPath"] = classPath


    ########################################
    # Registering file
    ########################################

    # Register to file database
    fileDb[fileId] = fileEntry

    # Register to module database
    for moduleId in fileEntry["modules"]:
        if moduleDb.has_key(moduleId):
            moduleDb[moduleId].append(fileId)
        else:
            moduleDb[moduleId] = [fileId]
def execute(fileDb, moduleDb, options, pkgid="", names=[]):


    ######################################################################
    #  SORT OF INCLUDE LIST
    ######################################################################

    print
    print "  GENERATE CLASS INCLUDE LIST:"
    print "----------------------------------------------------------------------------"

    sortedIncludeList = loader.getSortedList(options, fileDb, moduleDb)

    if options.printIncludes:
        print
        print "  PRINT OF INCLUDE ORDER:"
        print "----------------------------------------------------------------------------"
        print "  * The files will be included in this order:"
        for fileId in sortedIncludeList:
            print "    - %s" % fileId


    if options.printIncludesFile:
        includeFile = open(options.printIncludesFile, "w")
        for fileId in sortedIncludeList:
            includeFile.write(fileId + "\n")
        includeFile.close()


    if options.printDependencies:
        print
        print "  OUTPUT OF DEPENDENCIES:"
        print "----------------------------------------------------------------------------"
        print "  * These are all included files with their dependencies:"
        for fileId in sortedIncludeList:
            print "    - %s" % fileId
            if len(fileDb[fileId]["loadtimeDeps"]) > 0:
                print "      - Loadtime: "
                for depEntry in fileDb[fileId]["loadtimeDeps"]:
                    print "        - %s" % depEntry

            if len(fileDb[fileId]["runtimeDeps"]) > 0:
                print "      - Runtime: "
                for depEntry in fileDb[fileId]["runtimeDeps"]:
                    print "        - %s" % depEntry

            if len(fileDb[fileId]["optionalDeps"]) > 0:
                print "      - Optional: "
                for depEntry in fileDb[fileId]["optionalDeps"]:
                    print "        - %s" % depEntry





    ######################################################################
    #  GRAPHVIZ OUTPUT
    ######################################################################

    if options.depDotFile:
        graph.store(fileDb, sortedIncludeList, options)






    ######################################################################
    #  SOURCE MIGRATION
    ######################################################################

    if options.migrateSource:
        print
        print "  SOURCE MIGRATION:"
        print "----------------------------------------------------------------------------"

        print "  * Migrate Source Code..."

        migrator.handle(fileDb, options, options.migrationTarget,
                        options.migrationInput, options.verbose)

        # Return after migration: Ignore other jobs
        return





    ######################################################################
    #  GENERATION OF FIXED CODE
    ######################################################################

    if options.fixSource:
        print
        print "  FIX SOURCE CODE:"
        print "----------------------------------------------------------------------------"

        if options.verbose:
            print "  * Fixing code..."
        else:
            print "  * Fixing code: ",

        for fileId in sortedIncludeList:
            if options.verbose:
                print "    - Reading %s" % fileId

            fileEntry = fileDb[fileId]

            filePath = fileEntry["path"]
            fileEncoding = fileEntry["encoding"]

            fileContent = filetool.read(filePath, fileEncoding)
            fixedContent = textutil.removeTrailingSpaces(textutil.tab2Space(textutil.any2Unix(fileContent), 2))

            if fixedContent != fileContent:
                if options.verbose:
                    print "      - Storing modifications..."
                else:
                    sys.stdout.write("!")
                    sys.stdout.flush()

                filetool.save(filePath, fixedContent, fileEncoding)

            elif not options.verbose:
                sys.stdout.write(".")
                sys.stdout.flush()

        if not options.verbose:
            print

        # Return after fixing: Ignore other jobs
        return






    ######################################################################
    #  GENERATION OF PRETTY PRINTED CODE
    ######################################################################

    if options.prettyPrint:
        print
        print "  GENERATION OF PRETTY PRINTED CODE:"
        print "----------------------------------------------------------------------------"

        if options.verbose:
            print "  * Pretty printing..."
        else:
            print "  * Pretty printing: ",

        for fileId in sortedIncludeList:
            if options.verbose:
                print "    - Compiling %s" % fileId
            else:
                sys.stdout.write(".")
                sys.stdout.flush()

            prettyFileContent = compiler.compile(loader.getTree(fileDb, fileId, options), options)

            if not prettyFileContent.endswith("\n"):
                prettyFileContent += "\n"

            filetool.save(fileDb[fileId]["path"], prettyFileContent)

        if not options.verbose:
            print

        # Return after pretty print: Ignore other jobs
        return





    ######################################################################
    #  INLINE CODE
    ######################################################################

    inlineSourceCode = []
    inlineCompiledCode = []







    ######################################################################
    #  SUPPORT FOR VARIANTS
    ######################################################################

    if len(options.useVariant) > 0 and not options.generateSourceScript:
        print
        print "  VARIANT OPTIMIZATION:"
        print "----------------------------------------------------------------------------"

        variantMap = {}
        for variant in options.useVariant:
            keyValue = variant.split(":")
            if len(keyValue) != 2:
                print "  * Error: Variants must be specified as key value pair separated by ':'!"
                sys.exit(1)

            variantMap[keyValue[0]] = keyValue[1]

        if options.verbose:
            print "  * Optimizing for variant setup..."
        else:
            print "  * Optimizing for variant setup: ",

        for fileId in sortedIncludeList:
            if options.verbose:
                print "    - %s" % fileId
            else:
                sys.stdout.write(".")
                sys.stdout.flush()

            if variantoptimizer.search(loader.getTree(fileDb, fileId, options), variantMap, fileId, options.verbose):
                if options.verbose:
                    print "      - Modified!"

        if not options.verbose:
            print









    ######################################################################
    #  BASE CALL OPTIMIZATION
    ######################################################################

    if options.optimizeBaseCall:
        print
        print "  BASE CALL OPTIMIZATION:"
        print "----------------------------------------------------------------------------"

        if options.verbose:
            print "  * Optimizing this.base calls..."
        else:
            print "  * Optimizing this.base calls: ",

        counter = 0

        for fileId in sortedIncludeList:
            if options.verbose:
                print "    - %s" % fileId
            else:
                sys.stdout.write(".")
                sys.stdout.flush()

            counter += basecalloptimizer.patch(loader.getTree(fileDb, fileId, options))

        if not options.verbose:
            print

        print "  * Optimized %s calls" % counter





    ######################################################################
    #  ACCESSOR OBFUSCATION
    ######################################################################

    if options.obfuscateAccessors:
        print
        print "  ACCESSOR OBFUSCATION:"
        print "----------------------------------------------------------------------------"

        if options.verbose:
            print "  * Obfuscating..."
        else:
            print "  * Obfuscating: ",

        for fileId in sortedIncludeList:
            if options.verbose:
                print "    - %s" % fileId
            else:
                sys.stdout.write(".")
                sys.stdout.flush()

            accessorobfuscator.process(loader.getTree(fileDb, fileId, options), options.verbose)

        if not options.verbose:
            print




    ######################################################################
    #  STRING OPTIMIZATION
    ######################################################################

    if options.optimizeStrings:
        print
        print "  STRING OPTIMIZATION:"
        print "----------------------------------------------------------------------------"

        if options.verbose:
            print "  * Searching strings..."
        else:
            print "  * Searching strings: ",

        stringMap = {}

        for fileId in sortedIncludeList:
            if options.verbose:
                print "    - %s" % fileId
            else:
                sys.stdout.write(".")
                sys.stdout.flush()

            localMap = stringoptimizer.search(loader.getTree(fileDb, fileId, options), options.verbose)

            for value in localMap:
                if value in stringMap:
                    stringMap[value] += localMap[value]
                else:
                    stringMap[value] = localMap[value]

        if not options.verbose:
            print

        counter = 0
        for value in stringMap:
            counter += stringMap[value]

        stringList = stringoptimizer.sort(stringMap)

        print "  * Found %s string instances (%s unique)" % (counter, len(stringMap))

        if options.verbose:
            print "  * Replacing strings..."
        else:
            print "  * Replacing strings: ",

        for fileId in sortedIncludeList:
            if options.verbose:
                print "    - %s" % fileId
            else:
                sys.stdout.write(".")
                sys.stdout.flush()

            stringoptimizer.replace(loader.getTree(fileDb, fileId, options), stringList, "$" + pkgid, options.verbose)

        if not options.verbose:
            print

        print "  * Generating replacement..."
        inlineCompiledCode.append(stringoptimizer.replacement(stringList, "$" + pkgid))









    ######################################################################
    #  LOCAL VARIABLE OPTIMIZATION
    ######################################################################

    if options.optimizeVariables or options.obfuscate:
        print
        print "  LOCAL VARIABLE OPTIMIZATION:"
        print "----------------------------------------------------------------------------"

        if options.verbose:
            print "  * Optimizing variables..."
        else:
            print "  * Optimizing variables: ",

        counter = 0

        for fileId in sortedIncludeList:
            if options.verbose:
                print "    - %s" % fileId
            else:
                sys.stdout.write(".")
                sys.stdout.flush()

            counter += variableoptimizer.search(
                loader.getTree(fileDb, fileId, options),
                [], 0, 0, "$",
                skipPrefix = options.optimizeVariablesSkipPrefix,
                verbose = options.verbose
            )

        if not options.verbose:
            print

        print "  * Optimized %s variables" % counter





    ######################################################################
    #  PRIVATE MEMBER OPTIMIZATION
    ######################################################################

    if options.optimizePrivate:
        print
        print "  PRIVATE MEMBER OPTIMIZATION:"
        print "----------------------------------------------------------------------------"

        if options.verbose:
            print "  * Optimizing private members..."
        else:
            print "  * Optimizing private members: ",

        counter = 0

        for fileId in sortedIncludeList:
            if options.verbose:
                print "    - %s" % fileId
            else:
                sys.stdout.write(".")
                sys.stdout.flush()

            counter += privateoptimizer.patch("",loader.getTree(fileDb, fileId, options), {}, "$", options.verbose)

        if not options.verbose:
            print

        print "  * Optimized %s keys" % counter





    ######################################################################
    #  OBFUSCATION
    ######################################################################

    if options.obfuscate:
        print
        print "  OBFUSCATION:"
        print "----------------------------------------------------------------------------"

        if options.verbose:
            print "  * Searching for assignments..."
        else:
            print "  * Searching for assignments: ",

        identifiers = {}

        for fileId in sortedIncludeList:
            if options.verbose:
                print "    - %s" % fileId
            else:
                sys.stdout.write(".")
                sys.stdout.flush()

            obfuscator.search(loader.getTree(fileDb, fileId, options), identifiers, options.verbose)

        for fileId in sortedIncludeList:
            if options.verbose:
                print "    - %s" % fileId
            else:
                sys.stdout.write(".")
                sys.stdout.flush()

            obfuscator.patch(loader.getTree(fileDb, fileId, options), identifiers, options.verbose)

        if not options.verbose:
            print





    ######################################################################
    #  TOKEN STORAGE
    ######################################################################

    if options.storeTokens:
        print
        print "  TOKEN STORAGE:"
        print "----------------------------------------------------------------------------"

        if options.tokenOutputDirectory == None:
            print "  * You must define the token output directory!"
            sys.exit(1)

        if options.verbose:
            print "  * Storing tokens..."
        else:
            print "  * Storing tokens: ",

        for fileId in sortedIncludeList:
            tokenString = tokenizer.convertTokensToString(loader.getTokens(fileDb, fileId, options))

            if options.verbose:
                print "    * Writing tokens for %s (%s KB)..." % (fileIdm, len(tokenString) / 1000.0)
            else:
                sys.stdout.write(".")
                sys.stdout.flush()

            filetool.save(os.path.join(filetool.normalize(options.tokenOutputDirectory), fileId + config.TOKENEXT), tokenString)

        if not options.verbose:
            print







    ######################################################################
    #  TREE STORAGE
    ######################################################################

    if options.storeTree:
        print
        print "  TREE STORAGE:"
        print "----------------------------------------------------------------------------"

        if options.treeOutputDirectory == None:
            print "  * You must define the tree output directory!"
            sys.exit(1)

        if options.verbose:
            print "  * Storing tree..."
        else:
            print "  * Storing tree: ",

        for fileId in sortedIncludeList:
            treeString = "<?xml version=\"1.0\" encoding=\"" + options.xmlOutputEncoding + "\"?>\n" + tree.nodeToXmlString(loader.getTree(fileDb, fileId, options))

            if options.verbose:
                print "    * Writing tree for %s (%s KB)..." % (fileId, len(treeString) / 1000.0)
            else:
                sys.stdout.write(".")
                sys.stdout.flush()

            filetool.save(os.path.join(filetool.normalize(options.treeOutputDirectory), fileId + config.XMLEXT), treeString)

        if not options.verbose:
            print








    ######################################################################
    #  GENERATION OF API
    ######################################################################

    if options.generateApiDocumentation:
        print
        print "  GENERATION OF API:"
        print "----------------------------------------------------------------------------"

        if options.apiDocumentationJsonFile == None and options.apiDocumentationXmlFile == None:
            print "  * You must define one of JSON or XML API documentation file!"

        docTree = None

        if options.verbose:
            print "  * Generating API tree..."
        else:
            print "  * Generating API tree: ",

        hasDocError = False
        for fileId in sortedIncludeList:
            if options.verbose:
                print "    - %s" % fileId
            else:
                sys.stdout.write(".")
                sys.stdout.flush()


            (docTree, error) = api.createDoc(loader.getTree(fileDb, fileId, options), docTree)
            hasDocError = hasDocError or error

        if hasDocError:
            print
            print "  * Building API failed!!"
            sys.exit(0)

        if not options.verbose:
            print

        if docTree:
            print "  * Finalizing tree..."
            api.postWorkPackage(docTree, docTree)

        if options.addNewLines:
            childPrefix = "  "
            newLine = "\n"
        else:
            childPrefix = ""
            newLine = ""


        if options.apiDocumentationXmlFile != None:
            print "  * Writing XML API file to %s" % options.apiDocumentationXmlFile

            if options.apiSeparateFiles:
                packages = api.packagesToXmlString(docTree, "", childPrefix, newLine)
                filetool.save(options.apiDocumentationXmlFile, packages, options.scriptOutputEncoding)

                for cls in api.classNodeIterator(docTree):
                    classContent = "<?xml version=\"1.0\" encoding=\"" + options.xmlOutputEncoding + "\"?>\n"
                    classContent += tree.nodeToXmlString(cls, "", childPrefix, newLine)
                    fileName = os.path.join(os.path.dirname(options.apiDocumentationXmlFile), cls.get("fullName") + ".xml")
                    filetool.save(fileName, classContent, options.xmlOutputEncoding)

            else:
                xmlContent = "<?xml version=\"1.0\" encoding=\"" + options.xmlOutputEncoding + "\"?>\n"
                xmlContent += tree.nodeToXmlString(docTree, "", childPrefix, newLine)
                filetool.save(options.apiDocumentationXmlFile, xmlContent, options.xmlOutputEncoding)



        if options.apiDocumentationJsonFile != None:
            print "  * Writing JSON API file to %s" % options.apiDocumentationJsonFile

            if options.apiSeparateFiles:
                packages = api.packagesToJsonString(docTree, "", childPrefix, newLine)
                filetool.save(options.apiDocumentationJsonFile, packages, options.scriptOutputEncoding)

                for cls in api.classNodeIterator(docTree):
                    classContent = tree.nodeToJsonString(cls, "", childPrefix, newLine)
                    fileName = os.path.join(os.path.dirname(options.apiDocumentationJsonFile), cls.get("fullName") + ".js")
                    filetool.save(fileName, classContent, options.scriptOutputEncoding)

            else:
                jsonContent = tree.nodeToJsonString(docTree, "", childPrefix, newLine)
                filetool.save(options.apiDocumentationJsonFile, jsonContent, options.scriptOutputEncoding)

        # create search index
        if options.apiDocumentationIndexFile != None:
            print "  * Generating API index..."
            jsonContent = tree.nodeToIndexString(docTree, "", childPrefix, newLine)
            print "  * Writing API search index to %s" % options.apiDocumentationIndexFile
            filetool.save(options.apiDocumentationIndexFile, jsonContent, options.scriptOutputEncoding)






    ######################################################################
    #  CREATE COPY OF RESOURCES
    ######################################################################

    if options.copyResources:

        print
        print "  CREATE COPY OF RESOURCES:"
        print "----------------------------------------------------------------------------"

        resources.copy(options, sortedIncludeList, fileDb)






    ######################################################################
    #  GENERATION OF SETTINGS
    ######################################################################

    if options.generateSourceScript or options.generateCompiledScript:
        print
        print "  INCLUSION OF SETTINGS:"
        print "----------------------------------------------------------------------------"

        if options.generateSourceScript:
            print "  * Processing settings of source version..."

            settingsArr = options.useSetting[:]
            settingsArr.append("qx.version:%s" % options.version)
            settingsArr.append("qx.isSource:true")
            inlineSourceCode.append(settings.generate(settingsArr, options.addNewLines))

        if options.generateCompiledScript:
            print "  * Processing settings of compiled version..."

            settingsArr = options.useSetting[:]
            settingsArr.append("qx.version:%s" % options.version)
            settingsArr.append("qx.isSource:false")
            inlineCompiledCode.append(settings.generate(settingsArr, options.addNewLines))






    ######################################################################
    #  GENERATION OF VARIANTS
    ######################################################################

    if options.generateSourceScript or options.generateCompiledScript:
        print
        print "  INCLUSION OF VARIANTS:"
        print "----------------------------------------------------------------------------"

        if options.generateSourceScript:
            print "  * Processing variants of source version..."

            variantsArr = options.useVariant[:]
            inlineSourceCode.append(variants.generate(variantsArr, options.addNewLines))

        if options.generateCompiledScript:
            print "  * Processing variants of compiled version..."

            variantsArr = options.useVariant[:]
            inlineCompiledCode.append(variants.generate(variantsArr, options.addNewLines))







    ######################################################################
    #  GENERATION OF SOURCE VERSION
    ######################################################################

    if options.generateSourceScript:
        print
        print "  GENERATION OF SOURCE SCRIPT:"
        print "----------------------------------------------------------------------------"

        if options.sourceScriptFile == None and (options.sourceTemplateInputFile == None or options.sourceTemplateOutputFile == None):
            print "  * You must define at least one source script file or template input/output."
            sys.exit(1)

        if options.sourceScriptFile:
            options.sourceScriptFile = os.path.normpath(options.sourceScriptFile)

        if options.sourceTemplateInputFile:
            options.sourceTemplateInputFile = os.path.normpath(options.sourceTemplateInputFile)

        if options.sourceTemplateOutputFile:
            options.sourceTemplateOutputFile = os.path.normpath(options.sourceTemplateOutputFile)


        print "  * Generating script block..."

        # Handling line feed setting
        sourceLineFeed = "";
        if options.addNewLines:
            sourceLineFeed = "\n";

        # Generating script block
        scriptBlocks = ""
        scriptBlocks += '<script type="text/javascript">%s</script>' % "".join(inlineSourceCode)
        for fileId in sortedIncludeList:
            if fileDb[fileId]["meta"]:
              continue

            if fileDb[fileId]["classUri"] == None:
                print "  * Missing class URI definition for class path %s." % fileDb[fileId]["classPath"]
                sys.exit(1)

            scriptBlocks += '<script type="text/javascript" src="%s%s"></script>' % (os.path.join(fileDb[fileId]["classUri"], fileDb[fileId]["pathId"].replace(".", '/')), config.JSEXT)
            scriptBlocks += sourceLineFeed

        # Writing includer
        if options.sourceScriptFile != None:
            print "  * Storing includer as %s..." % options.sourceScriptFile
            sourceScript = "document.write('%s');" % scriptBlocks.replace("'", "\\'")
            if options.addNewLines:
                sourceScript = sourceScript.replace("\n", "\\\n")
            filetool.save(options.sourceScriptFile, sourceScript, options.scriptOutputEncoding)

        # Patching template
        if options.sourceTemplateInputFile != None and options.sourceTemplateOutputFile != None:
            print "  * Patching template: %s => %s" % (options.sourceTemplateInputFile, options.sourceTemplateOutputFile)
            tmpl = filetool.read(options.sourceTemplateInputFile)
            res = tmpl.replace(options.sourceTemplateReplace, scriptBlocks)
            filetool.save(options.sourceTemplateOutputFile, res, options.scriptOutputEncoding)





    ######################################################################
    #  GENERATION OF COMPILED VERSION
    ######################################################################

    if options.generateCompiledScript:
        print
        print "  GENERATION OF COMPILED SCRIPT:"
        print "----------------------------------------------------------------------------"

        buildLineFeed = "";
        if options.addNewLines:
            buildLineFeed = "\n";

        compiledOutput = "".join(inlineCompiledCode)

        if options.compiledScriptFile == None:
            print "  * You must define the compiled script file!"
            sys.exit(1)

        if options.verbose:
            print "  * Compiling..."
        else:
            print "  * Compiling: ",

        for fileId in sortedIncludeList:
            if fileDb[fileId]["meta"]:
              continue

            if options.verbose:
                print "    - Compiling %s" % fileId
            else:
                sys.stdout.write(".")
                sys.stdout.flush()

            if options.prettyPrint:
                options.prettyPrint = False  # make sure it's disabled

            compiledFileContent = compiler.compile(loader.getTree(fileDb, fileId, options), options, options.addNewLines, options.verbose)

            if options.addFileIds:
                compiledOutput += "\n\n\n/* ID: " + fileId + " */\n" + compiledFileContent + "\n"
            else:
                compiledOutput += compiledFileContent

            if not compiledOutput.endswith(";") and not compiledOutput.endswith("\n"):
                compiledOutput += ";"

        if not options.verbose:
            print

        print "  * Storing output as %s..." % options.compiledScriptFile
        filetool.save(options.compiledScriptFile, compiledOutput, options.scriptOutputEncoding)
Exemple #17
0
def execute(fileDb, moduleDb, options, pkgid="", names=[]):

  additionalOutput = []


  ######################################################################
  #  SORT OF INCLUDE LIST
  ######################################################################

  print
  print "  SORT OF INCLUDE LIST:"
  print "----------------------------------------------------------------------------"

  if options.verbose:
    print "  * Include (with dependencies): %s" % options.includeWithDeps
    print "  * Include (without dependencies): %s" % options.includeWithoutDeps
    print "  * Exclude (with dependencies): %s" % options.excludeWithDeps
    print "  * Exclude (without dependencies): %s" % options.excludeWithoutDeps

  print "  * Sorting %s classes..." % len(fileDb)

  sortedIncludeList = loader.getSortedList(options, fileDb, moduleDb)

  if len(sortedIncludeList) == 0:
  	print "    - No class files to include. Exciting!"
  	sys.exit(1)
  else:
  	print "    - Including %s classes" % len(sortedIncludeList)

  if options.printIncludes:
    print
    print "  PRINT OF INCLUDE ORDER:"
    print "----------------------------------------------------------------------------"
    print "  * The files will be included in this order:"
    for fileId in sortedIncludeList:
      print "    - %s" % fileId

  if options.printDeps:
    print
    print "  OUTPUT OF DEPENDENCIES:"
    print "----------------------------------------------------------------------------"
    print "  * These are all included files with their dependencies:"
    for fileId in sortedIncludeList:
      print "    - %s" % fileId
      if len(fileDb[fileId]["loadtimeDeps"]) > 0:
        print "      - Loadtime: "
        for depEntry in fileDb[fileId]["loadtimeDeps"]:
          print "        - %s" % depEntry

      if len(fileDb[fileId]["afterDeps"]) > 0:
        print "      - After: "
        for depEntry in fileDb[fileId]["afterDeps"]:
          print "        - %s" % depEntry

      if len(fileDb[fileId]["runtimeDeps"]) > 0:
        print "      - Runtime: "
        for depEntry in fileDb[fileId]["runtimeDeps"]:
          print "        - %s" % depEntry

      if len(fileDb[fileId]["loadDeps"]) > 0:
        print "      - Load: "
        for depEntry in fileDb[fileId]["loadDeps"]:
          print "        - %s" % depEntry

      if len(fileDb[fileId]["optionalDeps"]) > 0:
        print "      - Optional: "
        for depEntry in fileDb[fileId]["optionalDeps"]:
          print "        - %s" % depEntry





  ######################################################################
  #  GRAPHVIZ OUTPUT
  ######################################################################

  if options.depDotFile:
    graph.store(fileDb, sortedIncludeList, options)






  ######################################################################
  #  SOURCE MIGRATION
  ######################################################################

  if options.migrateSource:
    print
    print "  SOURCE MIGRATION:"
    print "----------------------------------------------------------------------------"

    print "  * Migrate Source Code..."

    migrator.handle(sortedIncludeList, fileDb, options)

    # Return after migration: Ignore other jobs
    return





  ######################################################################
  #  GENERATION OF PRETTY PRINTED CODE
  ######################################################################

  if options.fixSource:
    print
    print "  FIX SOURCE CODE:"
    print "----------------------------------------------------------------------------"

    if options.verbose:
      print "  * Fixing code..."
    else:
      print "  * Fixing code: ",

    for fileId in sortedIncludeList:
      if options.verbose:
        print "    - Reading %s" % fileId

      fileEntry = fileDb[fileId]

      filePath = fileEntry["path"]
      fileEncoding = fileEntry["encoding"]

      fileContent = filetool.read(filePath, fileEncoding)
      fixedContent = textutil.removeTrailingSpaces(textutil.tab2Space(textutil.any2Unix(fileContent), 2))

      if fixedContent != fileContent:
        if options.verbose:
          print "      - Storing modifications..."
        else:
          sys.stdout.write("!")
          sys.stdout.flush()

        filetool.save(filePath, fixedContent, fileEncoding)

      elif not options.verbose:
        sys.stdout.write(".")
        sys.stdout.flush()

    if not options.verbose:
      print

    # Return after fixing: Ignore other jobs
    return






  ######################################################################
  #  GENERATION OF PRETTY PRINTED CODE
  ######################################################################

  if options.prettyPrint:
    print
    print "  GENERATION OF PRETTY PRINTED CODE:"
    print "----------------------------------------------------------------------------"

    if options.verbose:
      print "  * Pretty printing..."
    else:
      print "  * Pretty printing: ",

    for fileId in sortedIncludeList:
      if options.verbose:
        print "    - Compiling %s" % fileId
      else:
        sys.stdout.write(".")
        sys.stdout.flush()

      prettyFileContent = compiler.compile(loader.getTree(fileDb, fileId, options), True)

      if not prettyFileContent.endswith("\n"):
        prettyFileContent += "\n"

      filetool.save(fileDb[fileId]["path"], prettyFileContent)

    if not options.verbose:
      print

    # Return after pretty print: Ignore other jobs
    return



  ######################################################################
  #  STRING OPTIMIZATION
  ######################################################################

  if options.optimizeStrings:
    print
    print "  STRING OPTIMIZATION:"
    print "----------------------------------------------------------------------------"

    if options.verbose:
      print "  * Searching strings..."
    else:
      print "  * Searching strings: ",

    stringMap = {}

    for fileId in sortedIncludeList:
      if options.verbose:
        print "    - %s" % fileId
      else:
        sys.stdout.write(".")
        sys.stdout.flush()

      localMap = loader.getStrings(fileDb, fileId, options)

      for value in localMap:
        if value in stringMap:
          stringMap[value] += localMap[value]
        else:
          stringMap[value] = localMap[value]

    if not options.verbose:
      print

    counter = 0
    for value in stringMap:
      counter += stringMap[value]

    stringList = stringoptimizer.sort(stringMap)

    print "  * Found %s strings (used %s times)" % (len(stringMap), counter)

    if options.verbose:
      print "  * Replacing strings..."
    else:
      print "  * Replacing strings: ",

    for fileId in sortedIncludeList:
      if options.verbose:
        print "    - %s" % fileId
      else:
        sys.stdout.write(".")
        sys.stdout.flush()

      stringoptimizer.replace(loader.getTree(fileDb, fileId, options), stringList, "$" + pkgid, options.verbose)

    if not options.verbose:
      print

    print "  * Generating replacement..."
    additionalOutput.append(stringoptimizer.replacement(stringList, "$" + pkgid))






  ######################################################################
  #  LOCAL VARIABLE OPTIMIZATION
  ######################################################################

  if options.optimizeVariables:
    print
    print "  LOCAL VARIABLE OPTIMIZATION:"
    print "----------------------------------------------------------------------------"

    if options.verbose:
      print "  * Optimizing variables..."
    else:
      print "  * Optimizing variables: ",

    for fileId in sortedIncludeList:
      if options.verbose:
        print "    - %s" % fileId
      else:
        sys.stdout.write(".")
        sys.stdout.flush()

      variableoptimizer.search(loader.getTree(fileDb, fileId, options), [], 0, "$", skipPrefix = options.optimizeVariablesSkipPrefix, debug = options.enableDebug)

    if not options.verbose:
      print






  ######################################################################
  #  NAME OBFUSCATION
  ######################################################################

  if options.obfuscateIdentifiers:
    print
    print "  OBFUSCATE IDENTIFIERS:"
    print "----------------------------------------------------------------------------"

    if options.verbose:
      print "  * Obfuscating identifiers..."
    else:
      print "  * Obfuscating identifiers: ",

    counter = 0

    for fileId in sortedIncludeList:
      if options.verbose:
        print "    - %s" % fileId
      else:
        sys.stdout.write(".")
        sys.stdout.flush()

      counter += obfuscator.update(loader.getTree(fileDb, fileId, options), names, "$$")

    if not options.verbose:
      print

    print "  * Updated %s names" % counter






  ######################################################################
  #  TOKEN STORAGE
  ######################################################################

  if options.storeTokens:
    print
    print "  TOKEN STORAGE:"
    print "----------------------------------------------------------------------------"

    if options.tokenOutputDirectory == None:
      print "  * You must define the token output directory!"
      sys.exit(1)

    if options.verbose:
      print "  * Storing tokens..."
    else:
      print "  * Storing tokens: ",

    for fileId in sortedIncludeList:
      tokenString = tokenizer.convertTokensToString(loader.getTokens(fileDb, fileId, options))

      if options.verbose:
        print "    * writing tokens for %s (%s KB)..." % (fileIdm, len(tokenString) / 1000.0)
      else:
        sys.stdout.write(".")
        sys.stdout.flush()

      filetool.save(os.path.join(filetool.normalize(options.tokenOutputDirectory), fileId + config.TOKENEXT), tokenString)

    if not options.verbose:
      print




  ######################################################################
  #  TREE STORAGE
  ######################################################################

  if options.storeTree:
    print
    print "  TREE STORAGE:"
    print "----------------------------------------------------------------------------"

    if options.treeOutputDirectory == None:
      print "  * You must define the tree output directory!"
      sys.exit(1)

    if options.verbose:
      print "  * Storing tree..."
    else:
      print "  * Storing tree: ",

    for fileId in sortedIncludeList:
      treeString = "<?xml version=\"1.0\" encoding=\"" + options.xmlOutputEncoding + "\"?>\n" + tree.nodeToXmlString(loader.getTree(fileDb, fileId, options))

      if options.verbose:
        print "    * writing tree for %s (%s KB)..." % (fileId, len(treeString) / 1000.0)
      else:
        sys.stdout.write(".")
        sys.stdout.flush()

      filetool.save(os.path.join(filetool.normalize(options.treeOutputDirectory), fileId + config.XMLEXT), treeString)

    if not options.verbose:
      print





  ######################################################################
  #  GENERATION OF API
  ######################################################################

  if options.generateApiDocumentation:
    print
    print "  GENERATION OF API:"
    print "----------------------------------------------------------------------------"

    if options.apiDocumentationJsonFile == None and options.apiDocumentationXmlFile == None:
      print "  * You must define one of JSON or XML API documentation file!"

    docTree = None

    if options.verbose:
      print "  * Generating API tree..."
    else:
      print "  * Generating API tree: ",

    for fileId in sortedIncludeList:
      if options.verbose:
        print "    - %s" % fileId
      else:
        sys.stdout.write(".")
        sys.stdout.flush()

      docTree = api.createDoc(loader.getTree(fileDb, fileId, options), docTree)

    if not options.verbose:
      print

    if docTree:
      print "  * Finalizing tree..."
      api.postWorkPackage(docTree, docTree)

    if options.apiDocumentationXmlFile != None:
      print "  * Writing XML API file to %s" % options.apiDocumentationXmlFile

      xmlContent = "<?xml version=\"1.0\" encoding=\"" + options.xmlOutputEncoding + "\"?>\n"

      if options.addNewLines:
        xmlContent += "\n" + tree.nodeToXmlString(docTree)
      else:
        xmlContent += tree.nodeToXmlString(docTree, "", "", "")

      filetool.save(options.apiDocumentationXmlFile, xmlContent, options.xmlOutputEncoding)

    if options.apiDocumentationJsonFile != None:
      print "  * Writing JSON API file to %s" % options.apiDocumentationJsonFile

      if options.addNewLines:
        jsonContent = tree.nodeToJsonString(docTree)
      else:
        jsonContent = tree.nodeToJsonString(docTree, "", "", "")

      filetool.save(options.apiDocumentationJsonFile, jsonContent, options.scriptOutputEncoding)





  ######################################################################
  #  CREATE COPY OF RESOURCES
  ######################################################################

  if options.copyResources:

    print
    print "  CREATE COPY OF RESOURCES:"
    print "----------------------------------------------------------------------------"

    resources.copy(options, sortedIncludeList, fileDb)






  ######################################################################
  #  GENERATION OF SETTINGS
  ######################################################################

  if options.generateSourceScript or options.generateCompiledScript:
    settingsStr = ""

    if len(options.defineRuntimeSetting) != 0:
      print
      print "  GENERATION OF SETTINGS:"
      print "----------------------------------------------------------------------------"

      print "  * Processing input data..."
      settingsStr = settings.generate(options)

      if options.settingsScriptFile:
        print "   * Storing result to %s" % options.settingsScriptFile
        filetool.save(options.settingsScriptFile, settingsStr)

        # clear settings for build and source
        settingsStr = ""





  ######################################################################
  #  GENERATION OF SOURCE VERSION
  ######################################################################

  if options.generateSourceScript:
    print
    print "  GENERATION OF SOURCE SCRIPT:"
    print "----------------------------------------------------------------------------"

    if options.sourceScriptFile == None and (options.sourceTemplateInputFile == None or options.sourceTemplateOutputFile == None):
      print "  * You must define at least one source script file or template input/output."
      sys.exit(1)

    if options.sourceScriptFile:
      options.sourceScriptFile = os.path.normpath(options.sourceScriptFile)

    if options.sourceTemplateInputFile:
      options.sourceTemplateInputFile = os.path.normpath(options.sourceTemplateInputFile)

    if options.sourceTemplateOutputFile:
      options.sourceTemplateOutputFile = os.path.normpath(options.sourceTemplateOutputFile)


    print "  * Generating script block..."

    # Handling line feed setting
    sourceLineFeed = "";
    if options.addNewLines:
      sourceLineFeed = "\n";


    # Generating inline code...
    inlineCode = ""
    inlineCode += settingsStr + sourceLineFeed
    inlineCode += "qx.IS_SOURCE=true;%s" % sourceLineFeed
    inlineCode += "qx.VERSION=\"%s\";%s" % (options.version, sourceLineFeed)
    inlineCode += "".join(additionalOutput)


    # Generating script block
    scriptBlocks = ""
    scriptBlocks += '<script type="text/javascript">%s</script>' % inlineCode
    for fileId in sortedIncludeList:
      if fileDb[fileId]["classUri"] == None:
        print "  * Missing class URI definition for class path %s." % fileDb[fileId]["classPath"]
        sys.exit(1)

      scriptBlocks += '<script type="text/javascript" src="%s%s"></script>' % (os.path.join(fileDb[fileId]["classUri"], fileDb[fileId]["pathId"].replace(".", os.sep)), config.JSEXT)
      scriptBlocks += sourceLineFeed



    if options.sourceScriptFile != None:
      print "  * Storing includer as %s..." % options.sourceScriptFile
      sourceScript = "document.write('%s');" % scriptBlocks.replace("'", "\\'")
      if options.addNewLines:
        sourceScript = sourceScript.replace("\n", "\\\n")
      filetool.save(options.sourceScriptFile, sourceScript, options.scriptOutputEncoding)

    if options.sourceTemplateInputFile != None and options.sourceTemplateOutputFile != None:
      print "  * Patching template: %s => %s" % (options.sourceTemplateInputFile, options.sourceTemplateOutputFile)
      tmpl = filetool.read(options.sourceTemplateInputFile)
      res = tmpl.replace(options.sourceTemplateReplace, scriptBlocks)
      filetool.save(options.sourceTemplateOutputFile, res, options.scriptOutputEncoding)





  ######################################################################
  #  GENERATION OF COMPILED VERSION
  ######################################################################

  if options.generateCompiledScript:
    print
    print "  GENERATION OF COMPILED SCRIPT:"
    print "----------------------------------------------------------------------------"

    buildLineFeed = "";
    if options.addNewLines:
      buildLineFeed = "\n";

    inlineCode = ""
    inlineCode += settingsStr + buildLineFeed
    inlineCode += "qx.IS_SOURCE=false;%s" % buildLineFeed
    inlineCode += "qx.VERSION=\"%s\";%s" % (options.version, buildLineFeed)
    inlineCode += "".join(additionalOutput)

    compiledOutput = inlineCode

    if options.compiledScriptFile == None:
      print "  * You must define the compiled script file!"
      sys.exit(1)

    if options.verbose:
      print "  * Compiling..."
    else:
      print "  * Compiling: ",

    for fileId in sortedIncludeList:
      if options.verbose:
        print "    - Compiling %s" % fileId
      else:
        sys.stdout.write(".")
        sys.stdout.flush()

      compiledFileContent = compiler.compile(loader.getTree(fileDb, fileId, options), False, options.addNewLines, options.enableDebug)

      if options.addFileIds:
        compiledOutput += "\n\n\n/* ID: " + fileId + " */\n" + compiledFileContent + "\n"
      else:
        compiledOutput += compiledFileContent

      if not compiledOutput.endswith(";") and not compiledOutput.endswith("\n"):
        compiledOutput += ";"

    if not options.verbose:
      print

    print "  * Storing output as %s..." % options.compiledScriptFile
    filetool.save(options.compiledScriptFile, compiledOutput, options.scriptOutputEncoding)
Exemple #18
0
def parseFile(fileName, uniqueId="", encoding="utf-8"):
    return parseStream(filetool.read(fileName, encoding), uniqueId)
Exemple #19
0
def execute(fileDb, moduleDb, options, pkgid="", names=[]):

    additionalOutput = []

    ######################################################################
    #  SORT OF INCLUDE LIST
    ######################################################################

    print
    print "  SORT OF INCLUDE LIST:"
    print "----------------------------------------------------------------------------"

    if options.verbose:
        print "  * Include (with dependencies): %s" % options.includeWithDeps
        print "  * Include (without dependencies): %s" % options.includeWithoutDeps
        print "  * Exclude (with dependencies): %s" % options.excludeWithDeps
        print "  * Exclude (without dependencies): %s" % options.excludeWithoutDeps

    print "  * Sorting %s classes..." % len(fileDb)

    sortedIncludeList = loader.getSortedList(options, fileDb, moduleDb)

    if len(sortedIncludeList) == 0:
        print "    - No class files to include. Exciting!"
        sys.exit(1)
    else:
        print "    - Including %s classes" % len(sortedIncludeList)

    if options.printIncludes:
        print
        print "  PRINT OF INCLUDE ORDER:"
        print "----------------------------------------------------------------------------"
        print "  * The files will be included in this order:"
        for fileId in sortedIncludeList:
            print "    - %s" % fileId

    if options.printDeps:
        print
        print "  OUTPUT OF DEPENDENCIES:"
        print "----------------------------------------------------------------------------"
        print "  * These are all included files with their dependencies:"
        for fileId in sortedIncludeList:
            print "    - %s" % fileId
            if len(fileDb[fileId]["loadtimeDeps"]) > 0:
                print "      - Loadtime: "
                for depEntry in fileDb[fileId]["loadtimeDeps"]:
                    print "        - %s" % depEntry

            if len(fileDb[fileId]["afterDeps"]) > 0:
                print "      - After: "
                for depEntry in fileDb[fileId]["afterDeps"]:
                    print "        - %s" % depEntry

            if len(fileDb[fileId]["runtimeDeps"]) > 0:
                print "      - Runtime: "
                for depEntry in fileDb[fileId]["runtimeDeps"]:
                    print "        - %s" % depEntry

            if len(fileDb[fileId]["loadDeps"]) > 0:
                print "      - Load: "
                for depEntry in fileDb[fileId]["loadDeps"]:
                    print "        - %s" % depEntry

            if len(fileDb[fileId]["optionalDeps"]) > 0:
                print "      - Optional: "
                for depEntry in fileDb[fileId]["optionalDeps"]:
                    print "        - %s" % depEntry

    ######################################################################
    #  GRAPHVIZ OUTPUT
    ######################################################################

    if options.depDotFile:
        graph.store(fileDb, sortedIncludeList, options)

    ######################################################################
    #  SOURCE MIGRATION
    ######################################################################

    if options.migrateSource:
        print
        print "  SOURCE MIGRATION:"
        print "----------------------------------------------------------------------------"

        print "  * Migrate Source Code..."

        migrator.handle(sortedIncludeList, fileDb, options)

        # Return after migration: Ignore other jobs
        return

    ######################################################################
    #  GENERATION OF PRETTY PRINTED CODE
    ######################################################################

    if options.fixSource:
        print
        print "  FIX SOURCE CODE:"
        print "----------------------------------------------------------------------------"

        if options.verbose:
            print "  * Fixing code..."
        else:
            print "  * Fixing code: ",

        for fileId in sortedIncludeList:
            if options.verbose:
                print "    - Reading %s" % fileId

            fileEntry = fileDb[fileId]

            filePath = fileEntry["path"]
            fileEncoding = fileEntry["encoding"]

            fileContent = filetool.read(filePath, fileEncoding)
            fixedContent = textutil.removeTrailingSpaces(
                textutil.tab2Space(textutil.any2Unix(fileContent), 2))

            if fixedContent != fileContent:
                if options.verbose:
                    print "      - Storing modifications..."
                else:
                    sys.stdout.write("!")
                    sys.stdout.flush()

                filetool.save(filePath, fixedContent, fileEncoding)

            elif not options.verbose:
                sys.stdout.write(".")
                sys.stdout.flush()

        if not options.verbose:
            print

        # Return after fixing: Ignore other jobs
        return

    ######################################################################
    #  GENERATION OF PRETTY PRINTED CODE
    ######################################################################

    if options.prettyPrint:
        print
        print "  GENERATION OF PRETTY PRINTED CODE:"
        print "----------------------------------------------------------------------------"

        if options.verbose:
            print "  * Pretty printing..."
        else:
            print "  * Pretty printing: ",

        for fileId in sortedIncludeList:
            if options.verbose:
                print "    - Compiling %s" % fileId
            else:
                sys.stdout.write(".")
                sys.stdout.flush()

            prettyFileContent = compiler.compile(
                loader.getTree(fileDb, fileId, options), True)

            if not prettyFileContent.endswith("\n"):
                prettyFileContent += "\n"

            filetool.save(fileDb[fileId]["path"], prettyFileContent)

        if not options.verbose:
            print

        # Return after pretty print: Ignore other jobs
        return

    ######################################################################
    #  STRING OPTIMIZATION
    ######################################################################

    if options.optimizeStrings:
        print
        print "  STRING OPTIMIZATION:"
        print "----------------------------------------------------------------------------"

        if options.verbose:
            print "  * Searching strings..."
        else:
            print "  * Searching strings: ",

        stringMap = {}

        for fileId in sortedIncludeList:
            if options.verbose:
                print "    - %s" % fileId
            else:
                sys.stdout.write(".")
                sys.stdout.flush()

            localMap = loader.getStrings(fileDb, fileId, options)

            for value in localMap:
                if value in stringMap:
                    stringMap[value] += localMap[value]
                else:
                    stringMap[value] = localMap[value]

        if not options.verbose:
            print

        counter = 0
        for value in stringMap:
            counter += stringMap[value]

        stringList = stringoptimizer.sort(stringMap)

        print "  * Found %s strings (used %s times)" % (len(stringMap),
                                                        counter)

        if options.verbose:
            print "  * Replacing strings..."
        else:
            print "  * Replacing strings: ",

        for fileId in sortedIncludeList:
            if options.verbose:
                print "    - %s" % fileId
            else:
                sys.stdout.write(".")
                sys.stdout.flush()

            stringoptimizer.replace(loader.getTree(fileDb, fileId, options),
                                    stringList, "$" + pkgid, options.verbose)

        if not options.verbose:
            print

        print "  * Generating replacement..."
        additionalOutput.append(
            stringoptimizer.replacement(stringList, "$" + pkgid))

    ######################################################################
    #  LOCAL VARIABLE OPTIMIZATION
    ######################################################################

    if options.optimizeVariables:
        print
        print "  LOCAL VARIABLE OPTIMIZATION:"
        print "----------------------------------------------------------------------------"

        if options.verbose:
            print "  * Optimizing variables..."
        else:
            print "  * Optimizing variables: ",

        for fileId in sortedIncludeList:
            if options.verbose:
                print "    - %s" % fileId
            else:
                sys.stdout.write(".")
                sys.stdout.flush()

            variableoptimizer.search(
                loader.getTree(fileDb, fileId, options), [],
                0,
                "$",
                skipPrefix=options.optimizeVariablesSkipPrefix,
                debug=options.enableDebug)

        if not options.verbose:
            print

    ######################################################################
    #  NAME OBFUSCATION
    ######################################################################

    if options.obfuscateIdentifiers:
        print
        print "  OBFUSCATE IDENTIFIERS:"
        print "----------------------------------------------------------------------------"

        if options.verbose:
            print "  * Obfuscating identifiers..."
        else:
            print "  * Obfuscating identifiers: ",

        counter = 0

        for fileId in sortedIncludeList:
            if options.verbose:
                print "    - %s" % fileId
            else:
                sys.stdout.write(".")
                sys.stdout.flush()

            counter += obfuscator.update(
                loader.getTree(fileDb, fileId, options), names, "$$")

        if not options.verbose:
            print

        print "  * Updated %s names" % counter

    ######################################################################
    #  TOKEN STORAGE
    ######################################################################

    if options.storeTokens:
        print
        print "  TOKEN STORAGE:"
        print "----------------------------------------------------------------------------"

        if options.tokenOutputDirectory == None:
            print "  * You must define the token output directory!"
            sys.exit(1)

        if options.verbose:
            print "  * Storing tokens..."
        else:
            print "  * Storing tokens: ",

        for fileId in sortedIncludeList:
            tokenString = tokenizer.convertTokensToString(
                loader.getTokens(fileDb, fileId, options))

            if options.verbose:
                print "    * writing tokens for %s (%s KB)..." % (
                    fileIdm, len(tokenString) / 1000.0)
            else:
                sys.stdout.write(".")
                sys.stdout.flush()

            filetool.save(
                os.path.join(filetool.normalize(options.tokenOutputDirectory),
                             fileId + config.TOKENEXT), tokenString)

        if not options.verbose:
            print

    ######################################################################
    #  TREE STORAGE
    ######################################################################

    if options.storeTree:
        print
        print "  TREE STORAGE:"
        print "----------------------------------------------------------------------------"

        if options.treeOutputDirectory == None:
            print "  * You must define the tree output directory!"
            sys.exit(1)

        if options.verbose:
            print "  * Storing tree..."
        else:
            print "  * Storing tree: ",

        for fileId in sortedIncludeList:
            treeString = "<?xml version=\"1.0\" encoding=\"" + options.xmlOutputEncoding + "\"?>\n" + tree.nodeToXmlString(
                loader.getTree(fileDb, fileId, options))

            if options.verbose:
                print "    * writing tree for %s (%s KB)..." % (
                    fileId, len(treeString) / 1000.0)
            else:
                sys.stdout.write(".")
                sys.stdout.flush()

            filetool.save(
                os.path.join(filetool.normalize(options.treeOutputDirectory),
                             fileId + config.XMLEXT), treeString)

        if not options.verbose:
            print

    ######################################################################
    #  GENERATION OF API
    ######################################################################

    if options.generateApiDocumentation:
        print
        print "  GENERATION OF API:"
        print "----------------------------------------------------------------------------"

        if options.apiDocumentationJsonFile == None and options.apiDocumentationXmlFile == None:
            print "  * You must define one of JSON or XML API documentation file!"

        docTree = None

        if options.verbose:
            print "  * Generating API tree..."
        else:
            print "  * Generating API tree: ",

        for fileId in sortedIncludeList:
            if options.verbose:
                print "    - %s" % fileId
            else:
                sys.stdout.write(".")
                sys.stdout.flush()

            docTree = api.createDoc(loader.getTree(fileDb, fileId, options),
                                    docTree)

        if not options.verbose:
            print

        if docTree:
            print "  * Finalizing tree..."
            api.postWorkPackage(docTree, docTree)

        if options.apiDocumentationXmlFile != None:
            print "  * Writing XML API file to %s" % options.apiDocumentationXmlFile

            xmlContent = "<?xml version=\"1.0\" encoding=\"" + options.xmlOutputEncoding + "\"?>\n"

            if options.addNewLines:
                xmlContent += "\n" + tree.nodeToXmlString(docTree)
            else:
                xmlContent += tree.nodeToXmlString(docTree, "", "", "")

            filetool.save(options.apiDocumentationXmlFile, xmlContent,
                          options.xmlOutputEncoding)

        if options.apiDocumentationJsonFile != None:
            print "  * Writing JSON API file to %s" % options.apiDocumentationJsonFile

            if options.addNewLines:
                jsonContent = tree.nodeToJsonString(docTree)
            else:
                jsonContent = tree.nodeToJsonString(docTree, "", "", "")

            filetool.save(options.apiDocumentationJsonFile, jsonContent,
                          options.scriptOutputEncoding)

    ######################################################################
    #  CREATE COPY OF RESOURCES
    ######################################################################

    if options.copyResources:

        print
        print "  CREATE COPY OF RESOURCES:"
        print "----------------------------------------------------------------------------"

        resources.copy(options, sortedIncludeList, fileDb)

    ######################################################################
    #  GENERATION OF SETTINGS
    ######################################################################

    if options.generateSourceScript or options.generateCompiledScript:
        settingsStr = ""

        if len(options.defineRuntimeSetting) != 0:
            print
            print "  GENERATION OF SETTINGS:"
            print "----------------------------------------------------------------------------"

            print "  * Processing input data..."
            settingsStr = settings.generate(options)

            if options.settingsScriptFile:
                print "   * Storing result to %s" % options.settingsScriptFile
                filetool.save(options.settingsScriptFile, settingsStr)

                # clear settings for build and source
                settingsStr = ""

    ######################################################################
    #  GENERATION OF SOURCE VERSION
    ######################################################################

    if options.generateSourceScript:
        print
        print "  GENERATION OF SOURCE SCRIPT:"
        print "----------------------------------------------------------------------------"

        if options.sourceScriptFile == None and (
                options.sourceTemplateInputFile == None
                or options.sourceTemplateOutputFile == None):
            print "  * You must define at least one source script file or template input/output."
            sys.exit(1)

        if options.sourceScriptFile:
            options.sourceScriptFile = os.path.normpath(
                options.sourceScriptFile)

        if options.sourceTemplateInputFile:
            options.sourceTemplateInputFile = os.path.normpath(
                options.sourceTemplateInputFile)

        if options.sourceTemplateOutputFile:
            options.sourceTemplateOutputFile = os.path.normpath(
                options.sourceTemplateOutputFile)

        print "  * Generating script block..."

        # Handling line feed setting
        sourceLineFeed = ""
        if options.addNewLines:
            sourceLineFeed = "\n"

        # Generating inline code...
        inlineCode = ""
        inlineCode += settingsStr + sourceLineFeed
        inlineCode += "qx.IS_SOURCE=true;%s" % sourceLineFeed
        inlineCode += "qx.VERSION=\"%s\";%s" % (options.version,
                                                sourceLineFeed)
        inlineCode += "".join(additionalOutput)

        # Generating script block
        scriptBlocks = ""
        scriptBlocks += '<script type="text/javascript">%s</script>' % inlineCode
        for fileId in sortedIncludeList:
            if fileDb[fileId]["classUri"] == None:
                print "  * Missing class URI definition for class path %s." % fileDb[
                    fileId]["classPath"]
                sys.exit(1)

            scriptBlocks += '<script type="text/javascript" src="%s%s"></script>' % (
                os.path.join(fileDb[fileId]["classUri"],
                             fileDb[fileId]["pathId"].replace(
                                 ".", os.sep)), config.JSEXT)
            scriptBlocks += sourceLineFeed

        if options.sourceScriptFile != None:
            print "  * Storing includer as %s..." % options.sourceScriptFile
            sourceScript = "document.write('%s');" % scriptBlocks.replace(
                "'", "\\'")
            if options.addNewLines:
                sourceScript = sourceScript.replace("\n", "\\\n")
            filetool.save(options.sourceScriptFile, sourceScript,
                          options.scriptOutputEncoding)

        if options.sourceTemplateInputFile != None and options.sourceTemplateOutputFile != None:
            print "  * Patching template: %s => %s" % (
                options.sourceTemplateInputFile,
                options.sourceTemplateOutputFile)
            tmpl = filetool.read(options.sourceTemplateInputFile)
            res = tmpl.replace(options.sourceTemplateReplace, scriptBlocks)
            filetool.save(options.sourceTemplateOutputFile, res,
                          options.scriptOutputEncoding)

    ######################################################################
    #  GENERATION OF COMPILED VERSION
    ######################################################################

    if options.generateCompiledScript:
        print
        print "  GENERATION OF COMPILED SCRIPT:"
        print "----------------------------------------------------------------------------"

        buildLineFeed = ""
        if options.addNewLines:
            buildLineFeed = "\n"

        inlineCode = ""
        inlineCode += settingsStr + buildLineFeed
        inlineCode += "qx.IS_SOURCE=false;%s" % buildLineFeed
        inlineCode += "qx.VERSION=\"%s\";%s" % (options.version, buildLineFeed)
        inlineCode += "".join(additionalOutput)

        compiledOutput = inlineCode

        if options.compiledScriptFile == None:
            print "  * You must define the compiled script file!"
            sys.exit(1)

        if options.verbose:
            print "  * Compiling..."
        else:
            print "  * Compiling: ",

        for fileId in sortedIncludeList:
            if options.verbose:
                print "    - Compiling %s" % fileId
            else:
                sys.stdout.write(".")
                sys.stdout.flush()

            compiledFileContent = compiler.compile(
                loader.getTree(fileDb, fileId, options), False,
                options.addNewLines, options.enableDebug)

            if options.addFileIds:
                compiledOutput += "\n\n\n/* ID: " + fileId + " */\n" + compiledFileContent + "\n"
            else:
                compiledOutput += compiledFileContent

            if not compiledOutput.endswith(
                    ";") and not compiledOutput.endswith("\n"):
                compiledOutput += ";"

        if not options.verbose:
            print

        print "  * Storing output as %s..." % options.compiledScriptFile
        filetool.save(options.compiledScriptFile, compiledOutput,
                      options.scriptOutputEncoding)
Exemple #20
0
def indexFile(
    filePath,
    filePathId,
    classPath,
    listIndex,
    classEncoding,
    classUri,
    resourceInput,
    resourceOutput,
    options,
    fileDb={},
    moduleDb={},
):

    ########################################
    # Checking cache
    ########################################

    useCache = False
    loadCache = False
    cachePath = None

    if options.cacheDirectory != None:
        cachePath = os.path.join(filetool.normalize(options.cacheDirectory), filePathId + "-entry.pcl")
        useCache = True

        if not filetool.checkCache(filePath, cachePath):
            loadCache = True

    ########################################
    # Loading file content / cache
    ########################################

    if loadCache:
        fileEntry = filetool.readCache(cachePath)
        fileId = filePathId

    else:
        fileContent = filetool.read(filePath, classEncoding)

        # Extract ID
        fileContentId = extractFileContentId(fileContent)

        # Search for valid ID
        if fileContentId == None:
            if not filePathId.endswith("__init__"):
                print "    - Could not extract ID from file: %s. Fallback to path %s!" % (filePath, filePathId)
            fileId = filePathId

        else:
            fileId = fileContentId

        if fileId != filePathId:
            print "    - ID mismatch: CONTENT=%s != PATH=%s" % (fileContentId, filePathId)
            if not options.migrateSource:
                sys.exit(1)

        fileEntry = {
            "autoDependencies": False,
            "cached": False,
            "cachePath": cachePath,
            "meta": fileId.endswith("__init__"),
            "ignoreDeps": extractIgnore(fileContent, fileId),
            "optionalDeps": extractOptional(fileContent, fileId),
            "loadtimeDeps": extractLoadtimeDeps(fileContent, fileId),
            "runtimeDeps": extractRuntimeDeps(fileContent, fileId),
            "resources": extractResources(fileContent, fileId),
            "embeds": extractEmbeds(fileContent, fileId),
            "modules": extractModules(fileContent, fileId),
        }

    ########################################
    # Additional data
    ########################################

    # We don't want to cache these items
    fileEntry["path"] = filePath
    fileEntry["pathId"] = filePathId
    fileEntry["encoding"] = classEncoding
    fileEntry["resourceInput"] = resourceInput
    fileEntry["resourceOutput"] = resourceOutput
    fileEntry["classUri"] = classUri
    fileEntry["listIndex"] = listIndex
    fileEntry["classPath"] = classPath

    ########################################
    # Registering file
    ########################################

    # Register to file database
    fileDb[fileId] = fileEntry

    # Register to module database
    for moduleId in fileEntry["modules"]:
        if moduleDb.has_key(moduleId):
            moduleDb[moduleId].append(fileId)
        else:
            moduleDb[moduleId] = [fileId]
Exemple #21
0
def parseFile(fileName, uniqueId="", encoding="utf-8"):
  return parseStream(filetool.read(fileName, encoding), uniqueId)