예제 #1
0
def migrateFile(
                filePath, compiledPatches, compiledInfos,
                hasPatchModule=False, options=None, encoding="UTF-8"):

    logging.info("  - File: %s" % filePath)

    # Read in original content
    fileContent = filetool.read(filePath, encoding)

    fileId = loader.extractFileContentId(fileContent);

    # Apply patches
    patchedContent = fileContent

    if hasPatchModule and fileId is not None:

        import patch
        tree = treegenerator.createSyntaxTree(tokenizer.parseStream(fileContent))

        # If there were any changes, compile the result
        if patch.patch(fileId, tree):
            options.prettyPrint = True  # make sure it's set
            patchedContent = compiler.compile(tree, options)

    # apply RE patches
    patchedContent = regtool(patchedContent, compiledPatches, True, filePath)
    patchedContent = regtool(patchedContent, compiledInfos, False, filePath)

    # Write file
    if patchedContent != fileContent:
        logging.info("    - %s has been modified. Storing modifications ..." % filePath)
        filetool.save(filePath, patchedContent, encoding)
예제 #2
0
def getTokens(fileDb, fileId, options):
  if not fileDb[fileId].has_key("tokens"):
    if options.verbose:
      print "    - Generating tokens for %s..." % fileId

    useCache = False
    loadCache = False

    fileEntry = fileDb[fileId]

    filePath = fileEntry["path"]
    fileEncoding = fileEntry["encoding"]

    if options.cacheDirectory != None:
      cachePath = os.path.join(filetool.normalize(options.cacheDirectory), fileId + "-tokens.pcl")
      useCache = True

      if not filetool.checkCache(filePath, cachePath, getInternalModTime(options)):
        loadCache = True

    if loadCache:
      tokens = filetool.readCache(cachePath)
    else:
      fileContent = filetool.read(filePath, fileEncoding)
      tokens = tokenizer.parseStream(fileContent, fileId)

      if useCache:
        if options.verbose:
          print "    - Caching tokens for %s..." % fileId

        filetool.storeCache(cachePath, tokens)

    fileDb[fileId]["tokens"] = tokens

  return fileDb[fileId]["tokens"]
예제 #3
0
파일: loader.py 프로젝트: eean/webrok
def getTokens(fileDb, fileId, options):
    if not fileDb[fileId].has_key("tokens"):
        if options.verbose:
            print "    - Generating tokens for %s..." % fileId

        useCache = False
        loadCache = False

        fileEntry = fileDb[fileId]

        filePath = fileEntry["path"]
        fileEncoding = fileEntry["encoding"]

        if options.cacheDirectory != None:
            cachePath = os.path.join(filetool.normalize(options.cacheDirectory), fileId + "-tokens.pcl")
            useCache = True

            if not filetool.checkCache(filePath, cachePath):
                loadCache = True

        if loadCache:
            tokens = filetool.readCache(cachePath)
        else:
            fileContent = filetool.read(filePath, fileEncoding)

            # TODO: This hack is neccesary because the current parser cannot handle comments
            #       without a context.
            if fileDb[fileId]["meta"]:
                fileContent += "\n(function() {})()"

            tokens = tokenizer.parseStream(fileContent, fileId)

            if useCache:
                if options.verbose:
                    print "    - Caching tokens for %s..." % fileId

                filetool.storeCache(cachePath, tokens)

        fileDb[fileId]["tokens"] = tokens

    return fileDb[fileId]["tokens"]
예제 #4
0
def getTokens(fileDb, fileId, options):
    if not fileDb[fileId].has_key("tokens"):
        if options.verbose:
            print "    - Generating tokens for %s..." % fileId

        useCache = False
        loadCache = False

        fileEntry = fileDb[fileId]

        filePath = fileEntry["path"]
        fileEncoding = fileEntry["encoding"]

        if options.cacheDirectory != None:
            cachePath = os.path.join(filetool.normalize(options.cacheDirectory), fileId + "-tokens.pcl")
            useCache = True

            if not filetool.checkCache(filePath, cachePath):
                loadCache = True

        if loadCache:
            tokens = filetool.readCache(cachePath)
        else:
            fileContent = filetool.read(filePath, fileEncoding)

            # TODO: This hack is neccesary because the current parser cannot handle comments
            #       without a context.
            if fileDb[fileId]["meta"]:
                fileContent += "\n(function() {})()"

            tokens = tokenizer.parseStream(fileContent, fileId)

            if useCache:
                if options.verbose:
                    print "    - Caching tokens for %s..." % fileId

                filetool.storeCache(cachePath, tokens)

        fileDb[fileId]["tokens"] = tokens

    return fileDb[fileId]["tokens"]
예제 #5
0
def getTokens(fileDb, fileId, options):
    if not fileDb[fileId].has_key("tokens"):
        if options.verbose:
            print "    - Generating tokens for %s..." % fileId

        useCache = False
        loadCache = False

        fileEntry = fileDb[fileId]

        filePath = fileEntry["path"]
        fileEncoding = fileEntry["encoding"]

        if options.cacheDirectory != None:
            cachePath = os.path.join(
                filetool.normalize(options.cacheDirectory),
                fileId + "-tokens.pcl")
            useCache = True

            if not filetool.checkCache(filePath, cachePath,
                                       getInternalModTime(options)):
                loadCache = True

        if loadCache:
            tokens = filetool.readCache(cachePath)
        else:
            fileContent = filetool.read(filePath, fileEncoding)
            tokens = tokenizer.parseStream(fileContent, fileId)

            if useCache:
                if options.verbose:
                    print "    - Caching tokens for %s..." % fileId

                filetool.storeCache(cachePath, tokens)

        fileDb[fileId]["tokens"] = tokens

    return fileDb[fileId]["tokens"]
예제 #6
0
def migrateFile(filePath,
                compiledPatches,
                compiledInfos,
                hasPatchModule=False,
                options=None,
                encoding="UTF-8"):

    logging.info("  - File: %s" % filePath)

    # Read in original content
    fileContent = filetool.read(filePath, encoding)

    fileId = loader.extractFileContentId(fileContent)

    # Apply patches
    patchedContent = fileContent

    if hasPatchModule and fileId is not None:

        import patch
        tree = treegenerator.createSyntaxTree(
            tokenizer.parseStream(fileContent))

        # If there were any changes, compile the result
        if patch.patch(fileId, tree):
            options.prettyPrint = True  # make sure it's set
            patchedContent = compiler.compile(tree, options)

    # apply RE patches
    patchedContent = regtool(patchedContent, compiledPatches, True, filePath)
    patchedContent = regtool(patchedContent, compiledInfos, False, filePath)

    # Write file
    if patchedContent != fileContent:
        logging.info("    - %s has been modified. Storing modifications ..." %
                     filePath)
        filetool.save(filePath, patchedContent, encoding)
예제 #7
0
def compileString(jsString, uniqueId=""):
    """
    Compile a string containing a JavaScript fragment into a syntax tree.
    """
    return treegenerator.createSyntaxTree(tokenizer.parseStream(jsString, uniqueId)).getFirstChild()
예제 #8
0
def handle(fileList, fileDb, options):
  confPath = os.path.join(os.path.join(os.path.dirname(os.path.abspath(sys.argv[0])), "migration"), options.migrationTarget)

  infoPath = os.path.join(confPath, "info")
  patchPath = os.path.join(confPath, "patches")

  importedModule = False
  infoList = []
  patchList = []
  htmlList = getHtmlList(options)




  print "  * Number of script input files: %s" % len(fileList)
  print "  * Number of HTML input files: %s" % len(htmlList)
  print "  * Update to version: %s" % options.migrationTarget



  print "  * Searching for patch module..."

  for root, dirs, files in os.walk(confPath):

    # Filter ignored directories
    for ignoredDir in config.DIRIGNORE:
      if ignoredDir in dirs:
        dirs.remove(ignoredDir)

    # Searching for files
    for fileName in files:
      filePath = os.path.join(root, fileName)

      if os.path.splitext(fileName)[1] != config.PYEXT:
        continue

      if fileName == "patch.py":
        print "    - Importing..."

        if not root in sys.path:
          sys.path.insert(0, root)

        import patch
        importedModule = True







  emptyLine = re.compile("^\s*$")



  print "  * Searching for info expression data..."

  for root, dirs, files in os.walk(infoPath):

    # Filter ignored directories
    for ignoredDir in config.DIRIGNORE:
      if ignoredDir in dirs:
        dirs.remove(ignoredDir)

    # Searching for files
    for fileName in files:
      filePath = os.path.join(root, fileName)

      fileContent = textutil.any2Unix(filetool.read(filePath, "utf-8"))
      infoList.append({"path":filePath, "content":fileContent.split("\n")})

      if options.verbose:
        print "    - %s" % filePath

  print "    - Number of info files: %s" % len(infoList)

  print "    - Compiling expressions..."

  compiledInfos = []

  for infoFile in infoList:
    print "      - %s" % os.path.basename(infoFile["path"])
    for line in infoFile["content"]:
      if emptyLine.match(line) or line.startswith("#") or line.startswith("//"):
        continue

      compiled = entryCompiler(line)
      if compiled != None:
        compiledInfos.append(compiled)

  print "    - Number of infos: %s" % len(compiledInfos)




  print "  * Searching for patch expression data..."

  for root, dirs, files in os.walk(patchPath):

    # Filter ignored directories
    for ignoredDir in config.DIRIGNORE:
      if ignoredDir in dirs:
        dirs.remove(ignoredDir)

    # Searching for files
    for fileName in files:
      filePath = os.path.join(root, fileName)

      fileContent = textutil.any2Unix(filetool.read(filePath, "utf-8"))
      patchList.append({"path":filePath, "content":fileContent.split("\n")})

      if options.verbose:
        print "    - %s" % filePath

  print "    - Number of patch files: %s" % len(patchList)

  print "    - Compiling expressions..."

  compiledPatches = []

  for patchFile in patchList:
    print "      - %s" % os.path.basename(patchFile["path"])
    for line in patchFile["content"]:
      if emptyLine.match(line) or line.startswith("#") or line.startswith("//"):
        continue

      compiled = entryCompiler(line)
      if compiled != None:
        compiledPatches.append(compiled)

  print "    - Number of patches: %s" % len(compiledPatches)








  print
  print "  FILE PROCESSING:"
  print "----------------------------------------------------------------------------"

  if len(fileList) > 0:
    print "  * Processing script files:"

    for fileId in fileList:
      fileEntry = fileDb[fileId]

      filePath = fileEntry["path"]
      fileEncoding = fileEntry["encoding"]

      print "    - %s" % fileId

      # Read in original content
      fileContent = filetool.read(filePath, fileEncoding)
      patchedContent = fileContent

      # Apply patches
      if importedModule:
        tree = treegenerator.createSyntaxTree(tokenizer.parseStream(patchedContent))

        # If there were any changes, compile the result
        if patch.patch(fileId, tree):
          patchedContent = compiler.compile(tree, True)

      patchedContent = regtool(patchedContent, compiledPatches, True, options)
      patchedContent = regtool(patchedContent, compiledInfos, False, options)

      # Write file
      if patchedContent != fileContent:
        print "      - Store modifications..."
        filetool.save(filePath, patchedContent, fileEncoding)

    print "  * Done"



  if len(htmlList) > 0:
    print "  * Processing HTML files:"

    for filePath in htmlList:
      print "    - %s" % filePath

      # Read in original content
      fileContent = filetool.read(filePath)

      patchedContent = fileContent
      patchedContent = regtool(patchedContent, compiledPatches, True, options)
      patchedContent = regtool(patchedContent, compiledInfos, False, options)

      # Write file
      if patchedContent != fileContent:
        print "      - Store modifications..."
        filetool.save(filePath, patchedContent)

    print "  * Done"