Esempio n. 1
0
 def _computeWeightedPercentile(self,arrayIn,pbWeight,percent=0.5):
   """
     Method to compute the weighted percentile in a array of data
     @ In, arrayIn, list/numpy.array, the array of values from which the percentile needs to be estimated
     @ In, pbWeight, list/numpy.array, the reliability weights that correspond to the values in 'array'
     @ In, percent, float, the percentile that needs to be computed (between 0.01 and 1.0)
     @ Out, result, float, the percentile
   """
   idxs                   = np.argsort(np.asarray(zip(pbWeight,arrayIn))[:,1])
   # Inserting [0.0,arrayIn[idxs[0]]] is needed when few samples are generated and
   # a percentile that is < that the first pb weight is requested. Otherwise the median
   # is returned (that is wrong).
   sortedWeightsAndPoints = np.insert(np.asarray(zip(pbWeight[idxs],arrayIn[idxs])),0,[0.0,arrayIn[idxs[0]]],axis=0)
   weightsCDF             = np.cumsum(sortedWeightsAndPoints[:,0])
   try:
     index = utils.find_le_index(weightsCDF,percent)
     result = sortedWeightsAndPoints[index,1]
   except ValueError:
     result = np.median(arrayIn)
   return result
Esempio n. 2
0
  def localGenerateInput(self,model,myInput):
    """
      Function to select the next most informative point for refining the limit
      surface search.
      After this method is called, the self.inputInfo should be ready to be sent
      to the model
      @ In, model, model instance, an instance of a model
      @ In, myInput, list, a list of the original needed inputs for the model (e.g. list of files, etc.)
      @ Out, None
    """
    if self.startAdaptive == True and self.adaptiveReady == True:
      LimitSurfaceSearch.localGenerateInput(self,model,myInput)
      #the adaptive sampler created the next point sampled vars
      #find the closest branch
      if self.hybridDETstrategy is not None: closestBranch, cdfValues, treer = self._checkClosestBranch()
      else                                 : closestBranch, cdfValues = self._checkClosestBranch()
      if closestBranch is None: self.raiseADebug('An usable branch for next candidate has not been found => create a parallel branch!')
      # add pbthresholds in the grid
      investigatedPoint = {}
      for key,value in cdfValues.items():
        ind = utils.find_le_index(self.branchProbabilities[key],value)
        if not ind: ind = 0
        if value not in self.branchProbabilities[key]:
          self.branchProbabilities[key].insert(ind,value)
          self.branchValues[key].insert(ind,self.distDict[key].ppf(value))
        investigatedPoint[key] = value
      # collect investigated point
      self.investigatedPoints.append(investigatedPoint)

      if closestBranch:
        info = self._retrieveBranchInfo(closestBranch)
        self._constructEndInfoFromBranch(model, myInput, info, cdfValues)
      else:
        # create a new tree, since there are no branches that are close enough to the adaptive request
        elm = ETS.HierarchicalNode(self.messageHandler,self.name + '_' + str(len(self.TreeInfo.keys())+1))
        elm.add('name', self.name + '_'+ str(len(self.TreeInfo.keys())+1))
        elm.add('startTime', 0.0)
        # Initialize the endTime to be equal to the start one...
        # It will modified at the end of each branch
        elm.add('endTime', 0.0)
        elm.add('runEnded',False)
        elm.add('running',True)
        elm.add('queue',False)
        elm.add('completedHistory', False)
        branchedLevel = {}
        for key,value in cdfValues.items(): branchedLevel[key] = utils.index(self.branchProbabilities[key],value)
        # The dictionary branchedLevel is stored in the xml tree too. That's because
        # the advancement of the thresholds must follow the tree structure
        elm.add('branchedLevel', branchedLevel)
        if self.hybridDETstrategy is not None and not self.foundEpistemicTree:
          # adaptive hybrid DET and not found a tree in the epistemic space
          # take the first tree and modify the hybridsamplerCoordinate
          hybridSampled = copy.deepcopy(self.TreeInfo.values()[0].getrootnode().get('hybridsamplerCoordinate'))
          for hybridStrategy in hybridSampled:
            for key in self.epistemicVariables.keys():
              if key in hybridStrategy['SampledVars'].keys():
                self.raiseADebug("epistemic var " + str(key)+" value = "+str(self.values[key]))
                hybridStrategy['SampledVars'][key]   = copy.copy(self.values[key])
                hybridStrategy['SampledVarsPb'][key] = self.distDict[key].pdf(self.values[key])
                hybridStrategy['prefix'] = len(self.TreeInfo.values())+1
            # TODO: find a strategy to recompute the probability weight here (for now == PointProbability)
            hybridStrategy['PointProbability'] = reduce(mul, self.inputInfo['SampledVarsPb'].values())
            hybridStrategy['ProbabilityWeight'] = reduce(mul, self.inputInfo['SampledVarsPb'].values())
          elm.add('hybridsamplerCoordinate', hybridSampled)
        # Here it is stored all the info regarding the DET => we create the info for all the branchings and we store them
        self.TreeInfo[self.name + '_' + str(len(self.TreeInfo.keys())+1)] = ETS.HierarchicalTree(self.messageHandler,elm)
        self._createRunningQueueBeginOne(self.TreeInfo[self.name + '_' + str(len(self.TreeInfo.keys()))],branchedLevel, model,myInput)
    return DynamicEventTree.localGenerateInput(self,model,myInput)