Example #1
0
fpar, succ = least_sq_fit(single_gamma_hrf, [5, 5, 1], a)
"""
With these parameters we can compute high-resultion curves for the estimated
time course, and plot it together with the "true" time course, and the data:
"""

x = np.linspace(0, 20)
curves = [(x, single_gamma_hrf(x, 6, 7, 1)), (x, single_gamma_hrf(x, *fpar))]

# plot data (with error bars) and both curves
plot_err_line(a, curves=curves, linestyle='-')

# add legend to plot
pl.legend(('original', 'fit'))
pl.title('True and estimated BOLD response')
"""

.. image:: ../pics/ex_curvefitting_bold.*
   :align: center
   :alt: BOLD response fitting example


Searchlight accuracy distributions
----------------------------------

When doing a searchlight analysis one might have the idea that the
resulting accuracies are actually sampled from two distributions: one
causes by an actual signal source and the chance distribution.  Let's
assume the these two distributions can be approximated by a Gaussian,
and take a look at a toy example, how we could explore the data.
Example #2
0
"""
With these parameters we can compute high-resultion curves for the estimated
time course, and plot it together with the "true" time course, and the data:
"""

x = np.linspace(0,20)
curves = [(x, single_gamma_hrf(x, 6, 7, 1)),
          (x, single_gamma_hrf(x, *fpar))]

# plot data (with error bars) and both curves
plot_err_line(a, curves=curves, linestyle='-')

# add legend to plot
pl.legend(('original', 'fit'))
pl.title('True and estimated BOLD response')

"""

.. image:: ../pics/ex_curvefitting_bold.*
   :align: center
   :alt: BOLD response fitting example


Searchlight accuracy distributions
----------------------------------

When doing a searchlight analysis one might have the idea that the
resulting accuracies are actually sampled from two distributions: one
causes by an actual signal source and the chance distribution.  Let's
assume the these two distributions can be approximated by a Gaussian,
Example #3
0
ndatasets = len(datasets)
nmappers = len(mappers.keys())

pl.figure(figsize=(8, 8))
fig = 1

for ds in datasets:
    for mname, mapper in mappers.iteritems():
        mapper.train(ds)

        dproj = mapper.forward(ds.samples)
        mproj = mapper.proj
        pl.subplot(ndatasets, nmappers, fig)
        if fig <= 3:
            pl.title(mname)
        pl.axis('equal')

        pl.scatter(ds.samples[:, 0] - center[0],
                   ds.samples[:, 1] - center[1],
                   s=30,
                   c=(ds.sa.targets) * 200)
        plot_proj_dir(mproj)
        fig += 1

if cfg.getboolean('examples', 'interactive', True):
    pl.show()
"""
Output of the example:

.. image:: ../pics/ex_projections.*
Example #4
0
ndatasets = len(datasets)
nmappers = len(mappers.keys())

pl.figure(figsize=(8,8))
fig = 1

for ds in datasets:
    for mname, mapper in mappers.iteritems():
        mapper.train(ds)

        dproj = mapper.forward(ds.samples)
        mproj = mapper.proj
        pl.subplot(ndatasets, nmappers, fig)
        if fig <= 3:
            pl.title(mname)
        pl.axis('equal')

        pl.scatter(ds.samples[:, 0] - center[0],
                  ds.samples[:, 1] - center[1],
                  s=30, c=(ds.sa.targets) * 200)
        plot_proj_dir(mproj)
        fig += 1


if cfg.getboolean('examples', 'interactive', True):
    pl.show()

"""
Output of the example:
Example #5
0
    result = kernel.compute(data)

# In the following we draw some 2D functions at random from the
# distribution N(O,kernel) defined by each available kernel and
# plot them. These plots shows the flexibility of a given kernel
# (with default parameters) when doing interpolation. The choice
# of a kernel defines a prior probability over the function space
# used for regression/classfication with GPR/GPC.
count = 1
for k in kernel_dictionary.keys():
    pl.subplot(3, 4, count)
    # X = np.random.rand(size)*12.0-6.0
    # X.sort()
    X = np.arange(-1, 1, .02)
    X = X[:, np.newaxis]
    ker = kernel_dictionary[k]()
    ker.compute(X, X)
    print k
    K = np.asarray(ker)
    for i in range(10):
        f = np.random.multivariate_normal(np.zeros(X.shape[0]), K)
        pl.plot(X[:, 0], f, "b-")

    pl.title(k)
    pl.axis('tight')
    count += 1

if cfg.getboolean('examples', 'interactive', True):
    # show all the cool figures
    pl.show()