Exemplo n.º 1
0
 def addPackageDependency(self, dependency, depPath=None):
     if not dependency:
         return
     if isinstance(dependency, str):
         framework = self.loadFramework(dependency)
         if not framework:
             return
     else:
         framework = dependency
         if depPath:
             dependency = depPath
         else:
             dependency = os.path.dirname(dependency.__file__)
     self.dependencies[dependency] = new_md5(
         cPickle.dumps(framework)).hexdigest()
     self.logPrint('Added configure dependency from ' + dependency + '(' +
                   str(self.dependencies[dependency]) + ')')
     for child in framework.childGraph.vertices:
         child.argDB = self.argDB
         child.showHelp = 0
         child.logName = self.logName
         child.setup()
         self.childGraph.replaceVertex(self.require(child.__module__, None),
                                       child)
     return
Exemplo n.º 2
0
    def processFileSetModule(self, set):
        '''Compile all the files in "set" using a module directly'''
        if not len(set): return self.output
        import nargs
        import sourceDatabase

        # Check for cached output
        #   We could of course hash this big key again
        #   These keys could be local, but we can do that if they proliferate too much. It would mean
        #     that each project would have to compile the SIDL once
        flags = self.getFlags(set)
        cacheKey = 'cacheKey' + ''.join(
            [sourceDatabase.SourceDB.getChecksum(f)
             for f in set] + [new_md5(''.join(flags)).hexdigest()])
        if set.tag.startswith('old') and cacheKey in self.argDB:
            self.debugPrint(
                'Loading ' + str(set) + ' for a ' + self.language + ' ' +
                self.action + ' from argument database (' + cacheKey + ')', 3,
                'compile')
            outputFiles = cPickle.loads(self.argDB[cacheKey])
        else:
            # Save targets so that they do not interfere with Scandal
            target = self.argDB.target
            self.argDB.target = []
            # Run compiler and reporter
            compiler = self.getCompilerModule().Scandal(flags + set)
            if not set.tag.startswith('old'):
                self.debugPrint(
                    'Compiling ' + str(set) + ' into a ' + self.language +
                    ' ' + self.action, 3, 'compile')
                self.debugPrint('  with flags ' + str(flags), 4, 'compile')
                compiler.run()
            else:
                self.debugPrint(
                    'Reporting on ' + str(set) + ' for a ' + self.language +
                    ' ' + self.action, 3, 'compile')
                self.debugPrint('  with flags ' + str(flags), 4, 'compile')
                compiler.report()
            outputFiles = compiler.outputFiles
            self.argDB[cacheKey] = cPickle.dumps(outputFiles)
            # Restore targets and remove flags
            self.argDB.target = target
            for flag in flags:
                del self.argDB[nargs.Arg.parseArgument(flag)[0]]
        # Construct output
        tag = self.outputTag
        if self.isServer:
            (package, ext) = os.path.splitext(os.path.basename(set[0]))
            tag += ' ' + package
        self.output.children.append(
            build.fileset.RootedFileSet(self.usingSIDL.project.getUrl(),
                                        outputFiles,
                                        tag=tag))
        return self.output
Exemplo n.º 3
0
 def getChecksum(source, chunkSize = 1024*1024):
   '''Return the md5 checksum for a given file, which may also be specified by its filename
      - The chunkSize argument specifies the size of blocks read from the file'''
   if isinstance(source, file):
     f = source
   else:
     f = file(source)
   m = new_md5()
   size = chunkSize
   buf  = f.read(size)
   while buf:
     m.update(buf)
     buf = f.read(size)
   f.close()
   return m.hexdigest()
Exemplo n.º 4
0
 def getChecksum(source, chunkSize=1024 * 1024):
     '''Return the md5 checksum for a given file, which may also be specified by its filename
    - The chunkSize argument specifies the size of blocks read from the file'''
     if isinstance(source, file):
         f = source
     else:
         f = file(source)
     m = new_md5()
     size = chunkSize
     buf = f.read(size)
     while buf:
         m.update(buf)
         buf = f.read(size)
     f.close()
     return m.hexdigest()
Exemplo n.º 5
0
 def updatePackageDependencies(self):
   for dependency, digest in self.dependencies.items():
     framework = self.loadFramework(dependency)
     if digest == new_md5(cPickle.dumps(framework)).hexdigest():
       continue
     self.logPrint('Configure dependency from '+dependency+' has changed. Reloading...')
     for child in framework.childGraph.vertices:
       self.childGraph.replaceVertex(self.require(child.__module__, None), child)
       self.logPrint('  Reloaded '+child.__module__)
     self.updateDependencies()
     for child in framework.childGraph.vertices:
       for depChild in self.childGraph.depthFirstVisit(child, outEdges = 0):
         if hasattr(depChild, '_configured'):
           del depChild._configured
       self.logPrint('  Will reconfigure subtree for '+child.__module__)
   return
Exemplo n.º 6
0
 def updatePackageDependencies(self):
   for dependency, digest in self.dependencies.items():
     framework = self.loadFramework(dependency)
     if digest == new_md5(pickle.dumps(framework)).hexdigest():
       continue
     self.logPrint('Configure dependency from '+dependency+' has changed. Reloading...')
     for child in framework.childGraph.vertices:
       self.childGraph.replaceVertex(self.require(child.__module__, None), child)
       self.logPrint('  Reloaded '+child.__module__)
     self.updateDependencies()
     for child in framework.childGraph.vertices:
       for depChild in self.childGraph.depthFirstVisit(child, outEdges = 0):
         if hasattr(depChild, '_configured'):
           del depChild._configured
       self.logPrint('  Will reconfigure subtree for '+child.__module__)
   return
Exemplo n.º 7
0
 def addPackageDependency(self, dependency, depPath = None):
   if not dependency:
     return
   if isinstance(dependency, str):
     framework = self.loadFramework(dependency)
     if not framework:
       return
   else:
     framework = dependency
     if depPath:
       dependency = depPath
     else:
       dependency = os.path.dirname(dependency.__file__)
   self.dependencies[dependency] = new_md5(cPickle.dumps(framework)).hexdigest()
   self.logPrint('Added configure dependency from '+dependency+'('+str(self.dependencies[dependency])+')')
   for child in framework.childGraph.vertices:
     child.argDB = self.argDB
     child.showHelp = 0
     child.logName  = self.logName
     child.setup()
     self.childGraph.replaceVertex(self.require(child.__module__, None), child)
   return
Exemplo n.º 8
0
  def processFileSetModule(self, set):
    '''Compile all the files in "set" using a module directly'''
    if not len(set): return self.output
    import nargs
    import sourceDatabase

    # Check for cached output
    #   We could of course hash this big key again
    #   These keys could be local, but we can do that if they proliferate too much. It would mean
    #     that each project would have to compile the SIDL once
    flags    = self.getFlags(set)
    cacheKey = 'cacheKey'+''.join([sourceDatabase.SourceDB.getChecksum(f) for f in set]+[new_md5(''.join(flags)).hexdigest()])
    if set.tag.startswith('old') and cacheKey in self.argDB:
      self.debugPrint('Loading '+str(set)+' for a '+self.language+' '+self.action+' from argument database ('+cacheKey+')', 3, 'compile')
      outputFiles = cPickle.loads(self.argDB[cacheKey])
    else:
      # Save targets so that they do not interfere with Scandal
      target            = self.argDB.target
      self.argDB.target = []
      # Run compiler and reporter
      compiler = self.getCompilerModule().Scandal(flags+set)
      if not set.tag.startswith('old'):
        self.debugPrint('Compiling '+str(set)+' into a '+self.language+' '+self.action, 3, 'compile')
        self.debugPrint('  with flags '+str(flags), 4, 'compile')
        compiler.run()
      else:
        self.debugPrint('Reporting on '+str(set)+' for a '+self.language+' '+self.action, 3, 'compile')
        self.debugPrint('  with flags '+str(flags), 4, 'compile')
        compiler.report()
      outputFiles          = compiler.outputFiles
      self.argDB[cacheKey] = cPickle.dumps(outputFiles)
      # Restore targets and remove flags
      self.argDB.target = target
      for flag in flags:
        del self.argDB[nargs.Arg.parseArgument(flag)[0]]
    # Construct output
    tag = self.outputTag
    if self.isServer:
      (package, ext) = os.path.splitext(os.path.basename(set[0]))
      tag           += ' '+package
    self.output.children.append(build.fileset.RootedFileSet(self.usingSIDL.project.getUrl(), outputFiles, tag = tag))
    return self.output