Beispiel #1
0
    def cost(x, axis=None, samples=Ns):
        """upper bound on expected model output

    Inputs:
        x: list of model hyperparameters
        axis: int, the index of y on which to find bound (all, by default)
        samples: int, number of samples, for a non-deterministic OUQ model

    Returns:
        upper bound on expected value of model output
        """
        # build a model, F(x|a), and tune "a" for optimal F.
        toy_ = wrap(d=x[0], e=x[1])(toy)  #NOTE: reduces nx by 2
        #print('building model F(x|a)...')
        model = WrapModel('model', model=toy_, nx=nx, ny=ny, rnd=False)

        rnd = Ns if model.rnd else None
        #print('building UQ objective of expected model output...')
        b = ExpectedValue(model,
                          bnd,
                          constraint=scons,
                          cvalid=is_cons,
                          samples=rnd)
        i = counter.count()
        #print('solving for upper bound on expected model output...')
        solver = b.upper_bound(axis=axis, id=i, **param)
        if type(solver) is not tuple:
            solver = (solver, )  #FIXME: save solver to DB (or pkl)
        if axis is None:
            results = tuple(-s.bestEnergy for s in solver)  #NOTE: -1 for LUB
            #print('[id: %s] %s' % (i, tuple(s.bestSolution for s in solver)))
        else:
            results = -solver[axis].bestEnergy  #NOTE: -1 for LUB
            #print('[id: %s] %s' % (i, solver[axis].bestSolution))
        return results
    def cost(x, axis=None, samples=Ns):
        """upper bound on expected model error, for surrogate and 'truth'

    Inputs:
        x: list of model hyperparameters
        axis: int, the index of y on which to find bound (all, by default)
        samples: int, number of samples, for a non-deterministic OUQ model

    Returns:
        upper bound on expected value of model error
        """
        # CASE 0: |F(x|a) - F'(x|a')|, no G. Tune "a" for optimal F, a = x[-2:]
        toy_ = wrap(d=x[0], e=x[1])(toy)
        #print('building model F(x|a) of truth...')
        model = WrapModel('model', model=toy_, nx=nx, ny=ny, rnd=False)

        #print('building UQ model of model error...')
        error = ErrorModel('error', model=truth, surrogate=model)

        rnd = Ns if error.rnd else None
        #print('building UQ objective of expected model error...')
        b = ExpectedValue(error,
                          bnd,
                          constraint=scons,
                          cvalid=is_cons,
                          samples=rnd)
        i = counter.count()
        #print('solving for upper bound on expected model error...')
        solver = b.upper_bound(axis=axis, id=i, **param)
        if type(solver) is not tuple:
            solver = (solver, )  #FIXME: save solver to DB (or pkl)
        if axis is None:
            results = tuple(-s.bestEnergy for s in solver)  #NOTE: -1 for LUB
            #print('[id: %s] %s' % (i, tuple(s.bestSolution for s in solver)))
        else:
            results = -solver[axis].bestEnergy  #NOTE: -1 for LUB
            #print('[id: %s] %s' % (i, solver[axis].bestSolution))
        return results
# *) F(x|a) "is callable". Update "d" in G(d), then actively update G(x|A).

# CASE 0: |F(x|a) - F'(x|a')|, no G. Tune "a" for optimal F, a = x[-2:]

if __name__ == '__main__':

    #from toys import cost5x3 as toy; nx = 5; ny = 3
    #from toys import function5x3 as toy; nx = 5; ny = 3
    #from toys import cost5x1 as toy; nx = 5; ny = 1
    #from toys import function5x1 as toy; nx = 5; ny = 1
    #from toys import cost5 as toy; nx = 5; ny = None
    from toys import function5 as toy
    nx = 5
    ny = None
    from toys import wrap
    toy3 = wrap(d=10, e=10)(toy)
    nx = nx - 2  #NOTE: reduces nx by 2

    # update 'inner-loop' optimization parameters
    from misc import param, npts, wlb, wub, is_cons, scons
    from ouq import ExpectedValue
    from mystic.monitors import VerboseLoggingMonitor, Monitor, VerboseMonitor
    from mystic.termination import VTRChangeOverGeneration as VTRCOG
    from mystic.termination import Or, VTR, ChangeOverGeneration as COG
    param['opts']['termination'] = COG(1e-10, 100)  #NOTE: short stop?
    param['npop'] = 160  #NOTE: increase if results.txt is not monotonic
    param['stepmon'] = VerboseLoggingMonitor(1,
                                             20,
                                             filename='log.txt',
                                             label='output')