Ejemplo n.º 1
0
def testPlot4(trials=40, maxsteps=512):
    fun = StochQuad(noiseLevel=100., curvature=1)
    fwrap = FunctionWrapper(trials, fun, record_samples=True)
    fwrap.nextSamples(100000)
    fwrap = DataFunctionWrapper(fwrap._seen, fun, shuffling=False)

    for i, (aclass, aparams) in enumerate([
        (vSGD, {
            'batch_size': 1
        }),
        (vSGDfd, {
            'batch_size': 1
        }),
    ]):
        pylab.subplot(2, 1, 2)
        fwrap.reset()
        ls = lossTraces(fwrap=fwrap,
                        aclass=aclass,
                        dim=trials,
                        maxsteps=maxsteps,
                        algoparams=aparams)
        plotWithPercentiles(ls, algo_colors[aclass], aclass.__name__)
        pylab.semilogy()
        pylab.xlim(0, maxsteps)
        pylab.legend()

        pylab.subplot(2, 2, i + 1)
        fwrap.reset()
        plotHeatmap(fwrap, aclass, aparams, trials, maxsteps)

    pylab.show()
Ejemplo n.º 2
0
def testPlot2(trials=51, maxsteps=5000):
    f = FunctionWrapper(trials, OptimumJumper(StochQuad(noiseLevel=10, curvature=1), jumptime=1000, jumpdist_std=1))
    for aclass, aparams in [#(SGD, {'learning_rate':0.1}),
                            #(SGD, {'learning_rate':0.01}),
                            #(AveragingSGD, {'learning_rate':0.01}),
                            #(AveragingSGD, {'learning_rate':0.01, 'fixedDecay':0.1}),
                            #(AveragingSGD, {'learning_rate':0.01, 'fixedDecay':0.1}),
                            #(AveragingSGD, {'learning_rate':0.1}),
                            #(AveragingSGD, {'learning_rate':1.0}),
                            (AveragingOracle, {}),
                            (AveragingOracle, {"fixedDecay":0.1}),
                            #(AveragingOracle, {"fixedDecay":0.01}),
                            (AdaptivelyAveragingOracle, {}),
                            #(AdaGrad, {'init_lr':0.3}),
                            #(Amari, {'init_lr':0.1, 'time_const':100}),
                            #(RMSProp, {'init_lr':0.1}),
                            (OracleSGD, {}),
                            #(vSGD, {'verbose':False}),
                            #(vSGDfd, {}),
                            ]:
        ls = lossTraces(fwrap=f, aclass=aclass, dim=trials,
                        maxsteps=maxsteps, algoparams=aparams)
        plotWithPercentiles(ls, algo_colors[aclass], aclass.__name__)
    pylab.semilogy()
    pylab.xlim(0, maxsteps)
    pylab.legend()
    pylab.show()
Ejemplo n.º 3
0
def testPlot4(trials=40, maxsteps=512):
    fun = StochQuad(noiseLevel=100., curvature=1)
    fwrap = FunctionWrapper(trials, fun, record_samples=True)
    fwrap.nextSamples(100000)
    fwrap = DataFunctionWrapper(fwrap._seen, fun, shuffling=False)
    
    for i, (aclass, aparams) in enumerate([(vSGD, {'batch_size':1}),
                                           (vSGDfd, {'batch_size':1}),
                                           ]):
        pylab.subplot(2, 1, 2)
        fwrap.reset()
        ls = lossTraces(fwrap=fwrap, aclass=aclass, dim=trials,
                        maxsteps=maxsteps, algoparams=aparams)
        plotWithPercentiles(ls, algo_colors[aclass], aclass.__name__)
        pylab.semilogy()
        pylab.xlim(0, maxsteps)
        pylab.legend()
    
        pylab.subplot(2, 2, i + 1)
        fwrap.reset()
        plotHeatmap(fwrap, aclass, aparams, trials, maxsteps)
        
    pylab.show()
Ejemplo n.º 4
0
def testPlot2(trials=51, maxsteps=5000):
    f = FunctionWrapper(
        trials,
        OptimumJumper(StochQuad(noiseLevel=10, curvature=1),
                      jumptime=1000,
                      jumpdist_std=1))
    for aclass, aparams in [  #(SGD, {'learning_rate':0.1}),
            #(SGD, {'learning_rate':0.01}),
            #(AveragingSGD, {'learning_rate':0.01}),
            #(AveragingSGD, {'learning_rate':0.01, 'fixedDecay':0.1}),
            #(AveragingSGD, {'learning_rate':0.01, 'fixedDecay':0.1}),
            #(AveragingSGD, {'learning_rate':0.1}),
            #(AveragingSGD, {'learning_rate':1.0}),
        (AveragingOracle, {}),
        (AveragingOracle, {
            "fixedDecay": 0.1
        }),
            #(AveragingOracle, {"fixedDecay":0.01}),
        (AdaptivelyAveragingOracle, {}),
            #(AdaGrad, {'init_lr':0.3}),
            #(Amari, {'init_lr':0.1, 'time_const':100}),
            #(RMSProp, {'init_lr':0.1}),
        (OracleSGD, {}),
            #(vSGD, {'verbose':False}),
            #(vSGDfd, {}),
    ]:
        ls = lossTraces(fwrap=f,
                        aclass=aclass,
                        dim=trials,
                        maxsteps=maxsteps,
                        algoparams=aparams)
        plotWithPercentiles(ls, algo_colors[aclass], aclass.__name__)
    pylab.semilogy()
    pylab.xlim(0, maxsteps)
    pylab.legend()
    pylab.show()