예제 #1
0
파일: Optimizer.py 프로젝트: dylanjm/raven
  def initialize(self, externalSeeding=None, solutionExport=None):
    """
      This function should be called every time a clean optimizer is needed. Called before takeAstep in <Step>
      @ In, externalSeeding, int, optional, external seed
      @ In, solutionExport, DataObject, optional, a PointSet to hold the solution
      @ Out, None
    """
    AdaptiveSampler.initialize(self, externalSeeding=externalSeeding, solutionExport=solutionExport)
    # functional constraints
    for entry in self.assemblerDict.get('Constraint', []):
      self._constraintFunctions.append(entry[3])

    for entry in self.assemblerDict.get('ImplicitConstraint', []):
      self._impConstraintFunctions.append(entry[3])
    # sampler
    self._initializeInitSampler(externalSeeding)
    # seed
    if self._seed is not None:
      randomUtils.randomSeed(self._seed)
    # variable bounds
    self._variableBounds = {}
    for var in self.toBeSampled:
      dist = self.distDict[var]
      lower = dist.lowerBound if dist.lowerBound is not None else -np.inf
      upper = dist.upperBound if dist.upperBound is not None else np.inf
      self._variableBounds[var] = [lower, upper]
      self.raiseADebug('Set bounds for opt var "{}" to {}'.format(var, self._variableBounds[var]))
    # trajectory initialization
    for i, init in enumerate(self._initialValues):
      self._initialValues[i] = self.normalizeData(init)
      self.initializeTrajectory()
예제 #2
0
 def __init__(self):
     """
   Default Constructor that will initialize member variables with reasonable
   defaults or empty lists/dictionaries where applicable.
   @ In, None
   @ Out, None
 """
     AdaptiveSampler.__init__(self)
     self._initialValues = {
     }  # dict stores the user provided initial values, i.e. {var: val}
     self._updateValues = {
     }  # dict stores input variables values for the current MCMC iteration, i.e. {var:val}
     self._proposal = {
     }  # dict stores the proposal distributions for input variables, i.e. {var:dist}
     self._priorFuns = {
     }  # dict stores the prior functions for input variables, i.e. {var:fun}
     self._burnIn = 0  # integers indicate how many samples will be discarded
     self._likelihood = None  # stores the output from the likelihood
     self._logLikelihood = False  # True if the user provided likelihood is in log format
     self._availProposal = {
         'normal': Distributions.Normal(0.0, 1.0),
         'uniform': Distributions.Uniform(-1.0, 1.0)
     }  # available proposal distributions
     self._acceptDist = Distributions.Uniform(
         0.0, 1.0)  # uniform distribution for accept/rejection purpose
     self.toBeCalibrated = {}  # parameters that will be calibrated
     # assembler objects
     self.addAssemblerObject('proposal',
                             InputData.Quantity.zero_to_infinity)
     self.addAssemblerObject('probabilityFunction',
                             InputData.Quantity.zero_to_infinity)
예제 #3
0
파일: Optimizer.py 프로젝트: dylanjm/raven
  def __init__(self):
    """
      Constructor.
      @ In, None
      @ Out, None
    """
    AdaptiveSampler.__init__(self)
    ## Instance Variable Initialization
    # public
    # _protected
    self._seed = None           # random seed to apply
    self._minMax = 'min'        # maximization or minimization?
    self._activeTraj = []       # tracks live trajectories
    self._cancelledTraj = {}    # tracks cancelled trajectories, and reasons
    self._convergedTraj = {}    # tracks converged trajectories, and values obtained
    self._numRepeatSamples = 1  # number of times to repeat sampling (e.g. denoising)
    self._objectiveVar = None   # objective variable for optimization
    self._initialValues = None  # initial variable values (trajectory starting locations), list of dicts
    self._variableBounds = None # dictionary of upper/lower bounds for each variable (may be inf?)
    self._trajCounter = 0       # tracks numbers to assign to trajectories
    self._initSampler = None    # sampler to use for picking initial seeds
    self._constraintFunctions = [] # list of constraint functions
    self._impConstraintFunctions = [] # list of implicit constraint functions
    # __private
    # additional methods
    self.addAssemblerObject('Constraint', '-1')      # Explicit (input-based) constraints
    self.addAssemblerObject('ImplicitConstraint', '-1')      # Implicit constraints
    self.addAssemblerObject('Sampler', '-1')          # This Sampler can be used to initialize the optimization initial points (e.g. partially replace the <initial> blocks for some variables)

    # register adaptive sample identification criteria
    self.registerIdentifier('traj') # the trajectory of interest
예제 #4
0
  def initialize(self, externalSeeding=None, solutionExport=None):
    """
      This function should be called every time a clean MCMC is needed. Called before takeAstep in <Step>
      @ In, externalSeeding, int, optional, external seed
      @ In, solutionExport, DataObject, optional, a PointSet to hold the solution
      @ Out, None
    """
    self._acceptDist.initializeDistribution()
    AdaptiveSampler.initialize(self, externalSeeding=externalSeeding, solutionExport=solutionExport)
    ## TODO: currently AdaptiveSampler is still using self.assemblerDict to retrieve the target evaluation.
    # We should change it to using the following method.
    # retrieve target evaluation
    # self._targetEvaluation = self.retrieveObjectFromAssemblerDict('TargetEvaluation', self._targetEvaluation)
    for var, priorFun in self._priorFuns.items():
      self._priorFuns[var] = self.retrieveObjectFromAssemblerDict('probabilityFunction', priorFun)
      if "pdf" not in self._priorFuns[var].availableMethods():
        self.raiseAnError(IOError,'Function', self._priorFuns[var], 'does not contain a method named "pdf". \
          It must be present if this needs to be used in a MCMC Sampler!')
        if not self._initialValues[var]:
          self.raiseAnError(IOError, '"initial" is required when using "probabilityFunction", but not found \
            for variable "{}"'.format(var))
      # initialize the input variable values
    for var, dist in self.distDict.items():
      distType = dist.getDistType()
      if distType != 'Continuous':
        self.raiseAnError(IOError, 'variable "{}" requires continuous distribution, but "{}" is provided!'.format(var, distType))

    meta = ['LogPosterior', 'AcceptRate']
    self.addMetaKeys(meta)
예제 #5
0
 def localFinalizeActualSampling(self, jobObject, model, myInput):
     """
   General function (available to all samplers) that finalizes the sampling
   calculation just ended. In this case, The function is aimed to check if
   all the batch calculations have been performed
   @ In, jobObject, instance, an instance of a JobHandler
   @ In, model, model instance, it is the instance of a RAVEN model
   @ In, myInput, list, the generating input
   @ Out, None
 """
     AdaptiveSampler.localFinalizeActualSampling(self, jobObject, model,
                                                 myInput)
예제 #6
0
파일: Optimizer.py 프로젝트: dylanjm/raven
 def _localGenerateAssembler(self, initDict):
   """
     It is used for sending to the instanciated class, which is implementing the method, the objects that have been requested through "whatDoINeed" method
     Overloads the base Sampler class since optimizer has different requirements
     @ In, initDict, dict, dictionary ({'mainClassName(e.g., Databases):{specializedObjectName(e.g.,DatabaseForSystemCodeNamedWolf):ObjectInstance}'})
     @ Out, None
   """
   AdaptiveSampler._localGenerateAssembler(self, initDict)
   # functions and distributions already collected
   self.assemblerDict['DataObjects'] = []
   self.assemblerDict['Distributions'] = []
   self.assemblerDict['Functions'] = []
   for mainClass in ['DataObjects', 'Distributions', 'Functions']:
     for funct in initDict[mainClass]:
       self.assemblerDict[mainClass].append([mainClass,
                                             initDict[mainClass][funct].type,
                                             funct,
                                             initDict[mainClass][funct]])
예제 #7
0
 def localStillReady(self, ready):
     """
   Determines if sampler is prepared to provide another input.  If not, and
   if jobHandler is finished, this will end sampling.
   @ In,  ready, bool, a boolean representing whether the caller is prepared for another input.
   @ Out, ready, bool, a boolean representing whether the caller is prepared for another input.
 """
     ready = AdaptiveSampler.localStillReady(self, ready)
     return ready
예제 #8
0
 def _formatSolutionExportVariableNames(self, acceptable):
   """
     Does magic formatting for variables, based on this class's needs.
     Extend in inheritors as needed.
     @ In, acceptable, set, set of acceptable entries for solution export for this entity
     @ Out, acceptable, set, modified set of acceptable variables with all formatting complete
   """
   acceptable = AdaptiveSampler._formatSolutionExportVariableNames(self, acceptable)
   return acceptable
예제 #9
0
파일: Optimizer.py 프로젝트: dylanjm/raven
 def _localWhatDoINeed(self):
   """
     This method is a local mirror of the general whatDoINeed method.
     It is implemented by the optimizers that need to request special objects
     @ In, None
     @ Out, needDict, dict, list of objects needed
   """
   needDict = AdaptiveSampler._localWhatDoINeed()
   needDict['Functions'] = [(None, 'all')]
   return needDict
예제 #10
0
 def localFinalizeActualSampling(self, jobObject, model, myInput):
   """
     General function (available to all samplers) that finalizes the sampling
     calculation just ended. In this case, The function is aimed to check if
     all the batch calculations have been performed
     @ In, jobObject, instance, an instance of a JobHandler
     @ In, model, model instance, it is the instance of a RAVEN model
     @ In, myInput, list, the generating input
     @ Out, None
   """
   self._localReady = True
   AdaptiveSampler.localFinalizeActualSampling(self, jobObject, model, myInput)
   prefix = jobObject.getMetadata()['prefix']
   full = self._targetEvaluation.realization(index=self.counter-1)
   rlz = dict((var, full[var]) for var in (list(self.toBeCalibrated.keys()) + [self._likelihood] + list(self.dependentSample.keys())))
   rlz['traceID'] = self.counter
   rlz['LogPosterior'] = self.inputInfo['LogPosterior']
   rlz['AcceptRate'] = self.inputInfo['AcceptRate']
   if self.counter == 1:
     self._addToSolutionExport(rlz)
     self._currentRlz = rlz
   if self.counter > 1:
     alpha = self._useRealization(rlz, self._currentRlz)
     self.netLogPosterior = alpha
     self._accepted = self._checkAcceptance(alpha)
     if self._accepted:
       self._currentRlz = rlz
       self._addToSolutionExport(rlz)
       self._updateValues = dict((var, rlz[var]) for var in self._updateValues)
     else:
       self._currentRlz.update({'traceID':self.counter, 'LogPosterior': self.inputInfo['LogPosterior'], 'AcceptRate':self.inputInfo['AcceptRate']})
       self._addToSolutionExport(self._currentRlz)
       self._updateValues = dict((var, self._currentRlz[var]) for var in self._updateValues)
   if self._tune:
     self._acceptInTune = self._acceptInTune + 1 if self._accepted else self._acceptInTune
     self._countsUntilTune -= 1
   ## tune scaling parameter
   if not self._countsUntilTune and self._tune:
     ### tune
     self._scaling = self.tuneScalingParam(self._scaling, self._acceptInTune/float(self._tuneInterval))
     ### reset counter
     self._countsUntilTune = self._tuneInterval
     self._acceptInTune = 0
예제 #11
0
파일: Optimizer.py 프로젝트: dylanjm/raven
 def amIreadyToProvideAnInput(self):
   """
     This is a method that should be called from any user of the optimizer before requiring the generation of a new input.
     This method act as a "traffic light" for generating a new input.
     Reason for not being ready could be for example: exceeding number of model evaluation, convergence criteria met, etc.
     @ In, None
     @ Out, ready, bool, indicating the readiness of the optimizer to generate a new input.
   """
   # if any trajectories are still active, we're ready to provide an input
   ready = AdaptiveSampler.amIreadyToProvideAnInput(self)
   if not self._activeTraj:
     self.raiseADebug(' ... No active optimization trajectories.')
     ready = False
   return ready
예제 #12
0
 def __init__(self):
   """
     Default Constructor that will initialize member variables with reasonable
     defaults or empty lists/dictionaries where applicable.
     @ In, None
     @ Out, None
   """
   AdaptiveSampler.__init__(self)
   self.onlySampleAfterCollecting = True
   self._initialValues = {} # dict stores the user provided initial values, i.e. {var: val}
   self._updateValues = {} # dict stores input variables values for the current MCMC iteration, i.e. {var:val}
   self._proposal = {} # dict stores the proposal distributions for input variables, i.e. {var:dist}
   self._proposalDist = {} # dist stores the input variables for each proposal distribution, i.e. {distName:[(var,dim)]}
   self._priorFuns = {} # dict stores the prior functions for input variables, i.e. {var:fun}
   self._burnIn = 0      # integers indicate how many samples will be discarded
   self._likelihood = None # stores the output from the likelihood
   self._logLikelihood = False # True if the user provided likelihood is in log format
   self._availProposal = {'normal': Distributions.Normal,
                          'multivariateNormal': Distributions.MultivariateNormal} # available proposal distributions
   self._acceptDist = Distributions.Uniform(0.0, 1.0) # uniform distribution for accept/rejection purpose
   self.toBeCalibrated = {} # parameters that will be calibrated
   self._correlated = False # True if input variables are correlated else False
   self.netLogPosterior = 0.0 # log-posterior vs iteration
   self._localReady = True # True if the submitted job finished
   self._currentRlz = None # dict stores the current realizations, i.e. {var: val}
   self._acceptRate = 1. # The accept rate for MCMC
   self._acceptCount = 1 # The total number of accepted samples
   self._tune = True # Tune the scaling parameter if True
   self._tuneInterval = 100 # the number of sample steps for each tuning of scaling parameter
   self._scaling = 1.0 # The initial scaling parameter
   self._countsUntilTune = self._tuneInterval # The remain number of sample steps until the next tuning
   self._acceptInTune = 0 # The accepted number of samples for given tune interval
   self._accepted = False # The indication of current samples, True if accepted otherwise False
   self._stdProposalDefault = 0.2 # the initial scaling of the std of proposal distribution (only apply to default)
   # assembler objects
   self.addAssemblerObject('proposal', InputData.Quantity.zero_to_infinity)
   self.addAssemblerObject('probabilityFunction', InputData.Quantity.zero_to_infinity)
예제 #13
0
    def initialize(self, externalSeeding=None, solutionExport=None):
        """
      This function should be called every time a clean MCMC is needed. Called before takeAstep in <Step>
      @ In, externalSeeding, int, optional, external seed
      @ In, solutionExport, DataObject, optional, a PointSet to hold the solution
      @ Out, None
    """
        # TODO: currently, we only consider uncorrelated case
        # initialize distributions
        for _, dist in self._availProposal.items():
            dist.initializeDistribution()
        self._acceptDist.initializeDistribution()
        for var in self._updateValues:
            if var in self._proposal:
                self._proposal[var] = self.retrieveObjectFromAssemblerDict(
                    'proposal', self._proposal[var])
                distType = self._proposal[var].getDistType()
                if distType != 'Continuous':
                    self.raiseAnError(
                        IOError,
                        'variable "{}" requires continuous proposal distribution, but "{}" is provided!'
                        .format(var, distType))
            else:
                self._proposal[var] = self._availProposal['normal']

        AdaptiveSampler.initialize(self,
                                   externalSeeding=externalSeeding,
                                   solutionExport=solutionExport)
        ## TODO: currently AdaptiveSampler is still using self.assemblerDict to retrieve the target evaluation.
        # We should change it to using the following method.
        # retrieve target evaluation
        # self._targetEvaluation = self.retrieveObjectFromAssemblerDict('TargetEvaluation', self._targetEvaluation)

        for var, priorFun in self._priorFuns.items():
            self._priorFuns[var] = self.retrieveObjectFromAssemblerDict(
                'probabilityFunction', priorFun)
            if "pdf" not in self._priorFuns[var].availableMethods():
                self.raiseAnError(
                    IOError, 'Function', self._priorFuns[var],
                    'does not contain a method named "pdf". \
          It must be present if this needs to be used in a MCMC Sampler!')
                if not self._initialValues[var]:
                    self.raiseAnError(
                        IOError,
                        '"initial" is required when using "probabilityFunction", but not found \
            for variable "{}"'.format(var))
            # initialize the input variable values
        for var, dist in self.distDict.items():
            totDim = self.variables2distributionsMapping[var]['totDim']
            distType = dist.getDistType()
            if distType != 'Continuous':
                self.raiseAnError(
                    IOError,
                    'variable "{}" requires continuous distribution, but "{}" is provided!'
                    .format(var, distType))
            if totDim != 1:
                self.raiseAnError(
                    IOError,
                    "Total dimension for given distribution {} should be 1".
                    format(dist.type))
            if self._updateValues[var] is None:
                value = dist.rvs()
                self._updateValues[var] = value