def parallelRun(self):
    '''
    Runs Sampler in parallel and collects solns
    Input: none
    Output: none
    '''
    self.outq=sque() #to dump (nRun,soln) in
    ps=[]
    wantprocs = self.input_file('Problem/numprocs',1)
    self.numprocs = min(wantprocs,multiprocessing.cpu_count())
    trackThousand = 0
    trials = int(self.sampler.totalSamples)
    print '\nRunning %1.0e samples in parallel...' %trials
    ps=[]
    self.numPs=0
    self.done=False
    self.histories={}
    self.histories['varNames']=self.varDict.keys()
    self.histories['vars']=self.varDict.values()
    self.histories['varPaths']=[]
    for var in self.varDict.values():
      #need to preset these so they can be filled with nRun?
      self.histories['varPaths'].append(var.path)
      self.histories['varVals']=[]
    print '  ...uncertain variables:',self.histories['varNames'],'...'
    if not self.restart:
      self.total_runs = -1
      trialsLeft = trials
      trialsAtRestart = 0
      #tempOutFile = file('solns.out','w')
    else: #start from restart
      print '\nStarting from restart...'
      print '  Starting after run',self.total_runs
      trialsAtRestart = self.total_runs
      trialsLeft = trials - self.total_runs
    print '  ...using',self.numprocs,'processors...'

    printFreq = self.input_file('Sampler/MC/printfreq',1000)
    print '  ...print frequency is',printFreq,'...'

    trialsPerProc = int(trials/float(self.numprocs))
    trialsPerProc = min(trialsPerProc,int(ceil(printFreq/4)))
    if trialsPerProc > 1000:
      trialsPerProc = 1000
    mesh_size = self.input_file('Problem/mesh_factor',1)

    self.done=False
    runDict={}
    starttime=time.time()
    rcvProc=0
    lastPrint=0
    doAPrint = False
    thrown=0
    print '\nFinished Run | Time Elapsed | Est. Remaining',
    print '| Number Discarded Solutions'
    while not self.done:
      #remove dead processses
      for p,proc in enumerate(ps):
        if not proc.is_alive():
          proc.join()
          del ps[p]
          rcvProc+=1
          while not self.outq.empty():
            slns,newthrown = list(self.outq.get())
            thrown+=newthrown
            lastPrint+=len(slns)
            if lastPrint >= printFreq:
              self.backends['PDF'].addToBins(slns,True)
              doAPrint=True
              lastPrint=0
            else:
              self.backends['PDF'].addToBins(slns)
          self.savestate(self.backends['PDF'].savedict())
          if rcvProc==self.numprocs:
            rcvProc=0
            if doAPrint:
              doAPrint = False
              lastPrint=0
              #print progress
              #FIXME fix for restart case
              finished = trials-trialsLeft
              totDone = finished + trialsAtRestart
              if self.restart:
                finished -= trialsAtRestart
              elapTime = time.time()-starttime
              dpdt = float(finished)/float(elapTime)
              toGo = dt.timedelta(seconds=(int(trialsLeft/dpdt)))
              elapTime = dt.timedelta(seconds=int(elapTime))
              print '%12i | %12s | %12s | %9i' %(totDone,elapTime,toGo,thrown),
              print '                      \r',
      if trialsLeft > 0:
        while len(ps)<self.numprocs and not self.done:
          if trialsLeft > trialsPerProc: newtrials = trialsPerProc
          else: newtrials = trialsLeft
          trialsLeft -= newtrials
          self.numPs+=1
          runDict['fileChange']={}
          runDict['fileChange']['Mesh/nx_per_reg']=mesh_size
          runDict['fileChange']['Mesh/ny_per_reg']=mesh_size
          ps.append(multiprocessing.Process(\
                      target = self.runSample,\
                      args=(trackThousand,newtrials,runDict)\
                   ))
          ps[-1].start()
          trackThousand+=1
      self.done = self.outq.empty() and len(ps)==0
    print '\n'
Esempio n. 2
0
 def __init__(self,ords,case):
   self.expOrds = ords
   self.case = case
   self.coeffs={}
   self.outq=sque()
 def parallelRun(self):
   '''
   Runs Sampler in parallel and collects solns
   Input: none
   Output: none
   '''
   self.outq=sque() #to dump (nRun,soln) in
   ps=[]
   wantprocs = self.input_file('Problem/numprocs',1)
   self.numprocs = min(wantprocs,multiprocessing.cpu_count())
   if not self.restart:
     print '\nRunning samples in parallel...'
     self.total_runs = -1
     ps=[]
     self.done=False
     self.histories={}
     self.histories['varNames']=self.varDict.keys()
     self.histories['vars']=self.varDict.values()
     self.histories['varPaths']=[]
     for var in self.varDict.values():
       #need to preset these so they can be filled with nRun?
       self.histories['varPaths'].append(var.path)
       self.histories['varVals']=[]
       self.histories['nRun']=[]
       self.histories['soln']=[]
     print '  ...uncertain variables:',self.histories['varNames']
     tempOutFile = file('solns.out','w')
   else: #start from restart
     print '\nStarting from restart...'
     print '  Starting after run',self.total_runs
     trialsAtRestart = self.total_runs
   print '  ...using',self.numprocs,'processors...'
   self.numPs=0
   finished=0
   while not self.done:
     #remove dead processses
     for p,proc in enumerate(ps):
       if not proc.is_alive():
         proc.join()
         while not self.outq.empty():
           n,sln = self.outq.get()
           self.histories['soln'][n]=sln
           finished+=1
         print 'Runs finished:',finished,
         del ps[p]
         #save state
     if not self.sampler.converged:
       while len(ps)<self.numprocs and not self.sampler.converged:
         self.numPs+=1
         self.total_runs+=1
         #TODO print single line, started/finished runs
         print 'Runs started:',self.total_runs,'\r',
         try:
           tot1 = self.backends['PDF'].tot1
           tot2 = self.backends['PDF'].tot2
           N = self.backends['PDF'].N
           runDict = self.sampler.giveSample([N,tot1,tot2])
         except KeyError:
           runDict = self.sampler.giveSample()
         self.histories['nRun'].append(self.total_runs)
         self.histories['soln'].append(0) #placeholder
         #print self.sampler.type
         for key in runDict.keys():
           try: self.histories[key].append(runDict[key])
           except KeyError:
             self.histories[key]=[runDict[key]]
         #  print '  ',key,self.histories[key][-1]
         #add flux output identifier to change dict
         #TODO this expects the same output block for everything!
         runDict['fileChange']={}
         outFileName='run.out'+str(self.total_runs)
         runDict['fileChange']['Output/file']=outFileName
         mesh_size = self.input_file('Problem/mesh_factor',1)
         runDict['fileChange']['Mesh/nx_per_reg']=mesh_size
         runDict['fileChange']['Mesh/ny_per_reg']=mesh_size
         inp_file = self.ie.writeInput(self.templateFile,
                              self.inputDir,
                              self.histories['varPaths'],
                              runDict['varVals'],
                              runDict['fileChange'],
                              self.total_runs)
         ps.append(multiprocessing.Process(\
                     target = self.runSample,\
                     args=(self.total_runs,inp_file,outFileName))\
                  )
         ps[-1].start()
     self.done = (len(ps)==0) and self.sampler.converged
Esempio n. 4
0
  def parallelRun(self):
    '''
    Runs Sampler in parallel and collects solns
    Input: none
    Output: none
    '''
    self.outq=sque() #to dump (nRun,soln) in
    ps=[]
    wantprocs = self.input_file('Problem/numprocs',1)
    self.numprocs = min(wantprocs,multiprocessing.cpu_count())
    trackThousand = 0
    trials = int(self.sampler.totalSamples)
    print '\nRunning %1.0e samples in parallel...' %trials
    ps=[]
    self.numPs=0
    self.done=False
    self.histories={}
    self.histories['varNames']=self.varDict.keys()
    self.histories['vars']=self.varDict.values()
    self.histories['varPaths']=[]
    for var in self.varDict.values():
      #need to preset these so they can be filled with nRun?
      self.histories['varPaths'].append(var.path)
      self.histories['varVals']=[]
    print '  ...uncertain variables:',self.histories['varNames'],'...'
    if not self.restart:
      self.total_runs = -1
      trialsLeft = trials
      trialsAtRestart = 0
      #tempOutFile = file('solns.out','w')
    else: #start from restart
      print '\nStarting from restart...'
      print '  Starting after run',self.total_runs
      trialsAtRestart = self.total_runs
      trialsLeft = trials - self.total_runs
    print '  ...using',self.numprocs,'processors...'

    printFreq = self.input_file('Sampler/MC/printfreq',1000)
    print '  ...print frequency is',printFreq,'...'

    trialsPerProc = int(trials/float(self.numprocs))
    trialsPerProc = min(trialsPerProc,int(ceil(printFreq/4)))
    if trialsPerProc > 1000:
      trialsPerProc = 1000
    mesh_size = self.input_file('Problem/mesh_factor',1)

    self.done=False
    runDict={}
    starttime=time.time()
    rcvProc=0
    lastPrint=0
    doAPrint = False
    thrown=0
    print '\nFinished Run | Time Elapsed | Est. Remaining',
    print '| Number Discarded Solutions'
    while not self.done:
      #remove dead processses
      for p,proc in enumerate(ps):
        if not proc.is_alive():
          proc.join()
          del ps[p]
          rcvProc+=1
          while not self.outq.empty():
            slns,newthrown = list(self.outq.get())
            thrown+=newthrown
            lastPrint+=len(slns)
            if lastPrint >= printFreq:
              self.backends['PDF'].addToBins(slns,True)
              doAPrint=True
              lastPrint=0
            else:
              self.backends['PDF'].addToBins(slns)
          self.savestate(self.backends['PDF'].savedict())
          if rcvProc==self.numprocs:
            rcvProc=0
            if doAPrint:
              doAPrint = False
              lastPrint=0
              #print progress
              #FIXME fix for restart case
              finished = trials-trialsLeft
              totDone = finished + trialsAtRestart
              if self.restart:
                finished -= trialsAtRestart
              elapTime = time.time()-starttime
              dpdt = float(finished)/float(elapTime)
              toGo = dt.timedelta(seconds=(int(trialsLeft/dpdt)))
              elapTime = dt.timedelta(seconds=int(elapTime))
              print '%12i | %12s | %12s | %9i' %(totDone,elapTime,toGo,thrown),
              print '                      \r',
      if trialsLeft > 0:
        while len(ps)<self.numprocs and not self.done:
          if trialsLeft > trialsPerProc: newtrials = trialsPerProc
          else: newtrials = trialsLeft
          trialsLeft -= newtrials
          self.numPs+=1
          runDict['fileChange']={}
          runDict['fileChange']['Mesh/nx_per_reg']=mesh_size
          runDict['fileChange']['Mesh/ny_per_reg']=mesh_size
          ps.append(multiprocessing.Process(\
                      target = self.runSample,\
                      args=(trackThousand,newtrials,runDict)\
                   ))
          ps[-1].start()
          trackThousand+=1
      self.done = self.outq.empty() and len(ps)==0
    print '\n'
Esempio n. 5
0
 def parallelRun(self):
   '''
   Runs Sampler in parallel and collects solns
   Input: none
   Output: none
   '''
   self.outq=sque() #to dump (nRun,soln) in
   ps=[]
   wantprocs = self.input_file('Problem/numprocs',1)
   self.numprocs = min(wantprocs,multiprocessing.cpu_count())
   if not self.restart:
     print '\nRunning samples in parallel...'
     self.total_runs = -1
     ps=[]
     self.done=False
     self.histories={}
     self.histories['varNames']=self.varDict.keys()
     self.histories['vars']=self.varDict.values()
     self.histories['varPaths']=[]
     for var in self.varDict.values():
       #need to preset these so they can be filled with nRun?
       self.histories['varPaths'].append(var.path)
       self.histories['varVals']=[]
       self.histories['nRun']=[]
       self.histories['soln']=[]
     print '  ...uncertain variables:',self.histories['varNames']
     tempOutFile = file('solns.out','w')
   else: #start from restart
     print '\nStarting from restart...'
     print '  Starting after run',self.total_runs
     trialsAtRestart = self.total_runs
   print '  ...using',self.numprocs,'processors...'
   self.numPs=0
   finished=0
   while not self.done:
     #remove dead processses
     for p,proc in enumerate(ps):
       if not proc.is_alive():
         proc.join()
         while not self.outq.empty():
           n,sln = self.outq.get()
           self.histories['soln'][n]=sln
           finished+=1
         print 'Runs finished:',finished,
         del ps[p]
         #save state
     if not self.sampler.converged:
       while len(ps)<self.numprocs and not self.sampler.converged:
         self.numPs+=1
         self.total_runs+=1
         #TODO print single line, started/finished runs
         print 'Runs started:',self.total_runs,'\r',
         try:
           tot1 = self.backends['PDF'].tot1
           tot2 = self.backends['PDF'].tot2
           N = self.backends['PDF'].N
           runDict = self.sampler.giveSample([N,tot1,tot2])
         except KeyError:
           runDict = self.sampler.giveSample()
         self.histories['nRun'].append(self.total_runs)
         self.histories['soln'].append(0) #placeholder
         #print self.sampler.type
         for key in runDict.keys():
           try: self.histories[key].append(runDict[key])
           except KeyError:
             self.histories[key]=[runDict[key]]
         #  print '  ',key,self.histories[key][-1]
         #add flux output identifier to change dict
         #TODO this expects the same output block for everything!
         runDict['fileChange']={}
         outFileName='run.out'+str(self.total_runs)
         runDict['fileChange']['Output/file']=outFileName
         mesh_size = self.input_file('Problem/mesh_factor',1)
         runDict['fileChange']['Mesh/nx_per_reg']=mesh_size
         runDict['fileChange']['Mesh/ny_per_reg']=mesh_size
         inp_file = self.ie.writeInput(self.templateFile,
                              self.inputDir,
                              self.histories['varPaths'],
                              runDict['varVals'],
                              runDict['fileChange'],
                              self.total_runs)
         ps.append(multiprocessing.Process(\
                     target = self.runSample,\
                     args=(self.total_runs,inp_file,outFileName))\
                  )
         ps[-1].start()
     self.done = (len(ps)==0) and self.sampler.converged
Esempio n. 6
0
 def __init__(self, ords, case):
     self.expOrds = ords
     self.case = case
     self.coeffs = {}
     self.outq = sque()
Esempio n. 7
0
 def __init__(self,ords):
   self.coeffs={}
   self.ords = ords
   self.outq=sque()
Esempio n. 8
0
 def __init__(self, ords):
     self.coeffs = {}
     self.ords = ords
     self.outq = sque()