def deconvolve(fluor, pos, prctile=10, A0=0.15, lamb0=0.15, do_plot=True):

    nc, nt = fluor.shape

    # euclidean distances
    dist = all_distances(pos)
    ij, distvec = submission.adjacency2vec(dist)

    # Pearson correlation coefficients for small fluorescence values
    corr = threshold_corr(fluor, prctile)
    ij, corrvec = submission.adjacency2vec(corr)

    # from Stetter et al 2012
    # A = 0.15
    # lamb = 0.15
    A, lamb = fit_gauss_blur(distvec, corrvec, A0, lamb0)

    # convolution matrix (nc x nc)
    C = gauss((A / 2., lamb), dist)   # why divide by 2?

    # # we set the diagonal to zero, since we don't consider a cell's own
    # # fluorescence
    # C[np.diag_indices(nc)] = 0

    # F + CF    = F_sc
    # (I + C)F  = F_sc
    deconv = np.linalg.solve((np.eye(nc) + C), fluor)

    if do_plot:

        corr2 = threshold_corr(deconv, prctile)
        ij, corrvec2 = submission.adjacency2vec(corr2)
        A2, lamb2 = fit_gauss_blur(distvec, corrvec2, A0, lamb0)

        fig, (ax1, ax2) = plt.subplots(2, 1, sharex=True, sharey=True,
                                       figsize=(8, 8))

        plot_hist_fit(distvec, corrvec, (A, lamb), ax=ax1)
        plot_hist_fit(distvec, corrvec2, (A2, lamb2), ax=ax2)

        ax1.set_title('Original', fontsize=18)
        ax2.set_title(      'Deconvolved', fontsize=18)

        ax2.set_xlabel('Distance (mm)', fontsize=14)
        ax1.set_ylabel('Correlation coefficient', fontsize=14)
        ax2.set_ylabel('Correlation coefficient', fontsize=14)

        cax, kw = colorbar.make_axes((ax1, ax2))
        ax2.images[0].set_clim(ax1.images[0].get_clim())
        cb = plt.colorbar(ax1.images[0], cax=cax, **kw)
        cb.set_label('Density')

        plt.show()

    return deconv
def run(f, name, network=None, network_pos=None, ncells=None, sim_hours=1,
        overwrite=False):

    try:

        try:
            print ">> Creating new group: '/%s'" % name
            g = f.create_group(f.root, name)

        except tables.NodeError as e:
            if overwrite:
                # recursively remove existing group
                f.remove_node('/' + name, recursive=True)
                g = f.create_group(f.root, name)
            else:
                raise e

        if network is None:
            print ">> Creating new connectivity matrix"

            # create an adjacency matrix for ncells, with default params
            adjacency = fake_network.create_network(ncells)

            # reshape the adjacency matrix into a vector form that is
            # compatible with submission.run_auc()
            ij, connected = submission.adjacency2vec(adjacency)
            network = np.vstack((ij, connected)).T
            del ij, connected

            f.create_carray(g, 'network', obj=network, filters=FILTERS)
            f.flush()

        else:
            # convert existing network to an adjacency matrix
            ij, connected = submission.real2dense(network, ncells)
            adjacency = submission.vec2adjacency(ij, connected)
            f.create_carray(g, 'network', obj=network[:], filters=FILTERS)

        if network_pos is None:
            print ">> Generating fake cell positions"
            network_pos = fake_blur.fake_positions(ncells)
            f.create_carray(g, 'network_pos', obj=network_pos, filters=FILTERS)
            f.flush()

        else:
            f.create_carray(g, 'network_pos', obj=network_pos[:],
                            filters=FILTERS)

        print ">> Running spiking network simulation"
        # run a NEST simulation using this adjacency matrix. the synaptic
        # scaling parameter will be adjusted automatically to achieve a target
        # burst rate according to default params
        simtime = sim_hours * 60 * 60 * 1000    # time in ms
        (spike_times, cell_indices,
            resampled_spikes) = fake_spikes.run_simulation(adjacency,
                                                           simtime=simtime,
                                                           verbose=True)

        f.create_carray(g, 'spike_times', obj=spike_times, filters=FILTERS)
        f.create_carray(g, 'spike_cell_indices', obj=cell_indices,
                        filters=FILTERS)
        f.create_carray(g, 'resampled_spikes', obj=resampled_spikes,
                        filters=FILTERS)
        f.flush()
        del spike_times, cell_indices

        print ">> Modelling calcium dynamics"
        calcium = fake_ca.fast_ca_from_spikes(resampled_spikes)
        f.create_carray(g, 'calcium', obj=calcium, filters=FILTERS)
        f.flush()
        del resampled_spikes

        print ">> Modelling dye saturation"
        no_noise_fluor = fake_ca.fluor_from_ca(calcium)
        f.create_carray(g, 'no_noise_fluor', obj=no_noise_fluor,
                        filters=FILTERS)
        f.flush()
        del calcium

        print ">> Modelling fluorescence noise"
        noisy_fluor = fake_ca.noisy_from_no_noise(no_noise_fluor)
        f.create_carray(g, 'noisy_fluor', obj=noisy_fluor, filters=FILTERS)
        f.flush()
        del no_noise_fluor

        print ">> Modelling optical blurring"
        fluor = fake_blur.apply_gaussian_blur(network_pos, noisy_fluor)
        f.create_carray(g, 'fluorescence', obj=fluor, filters=FILTERS)
        f.flush()

        print ">> Done."

    except:

        import traceback
        # print the traceback for the exception
        traceback.print_exc()

        # make sure the file is closed, otherwise we won't have access
        # to the object and we won't be able to close it without
        # restarting ipython. close() calls flush() first, so no need to
        # call it explicitly.
        print 'Closing file "%s"' % f.filename
        f.close()
def run(f,
        name,
        network=None,
        network_pos=None,
        ncells=None,
        sim_hours=1,
        overwrite=False):

    try:

        try:
            print ">> Creating new group: '/%s'" % name
            g = f.create_group(f.root, name)

        except tables.NodeError as e:
            if overwrite:
                # recursively remove existing group
                f.remove_node('/' + name, recursive=True)
                g = f.create_group(f.root, name)
            else:
                raise e

        if network is None:
            print ">> Creating new connectivity matrix"

            # create an adjacency matrix for ncells, with default params
            adjacency = fake_network.create_network(ncells)

            # reshape the adjacency matrix into a vector form that is
            # compatible with submission.run_auc()
            ij, connected = submission.adjacency2vec(adjacency)
            network = np.vstack((ij, connected)).T
            del ij, connected

            f.create_carray(g, 'network', obj=network, filters=FILTERS)
            f.flush()

        else:
            # convert existing network to an adjacency matrix
            ij, connected = submission.real2dense(network, ncells)
            adjacency = submission.vec2adjacency(ij, connected)
            f.create_carray(g, 'network', obj=network[:], filters=FILTERS)

        if network_pos is None:
            print ">> Generating fake cell positions"
            network_pos = fake_blur.fake_positions(ncells)
            f.create_carray(g, 'network_pos', obj=network_pos, filters=FILTERS)
            f.flush()

        else:
            f.create_carray(g,
                            'network_pos',
                            obj=network_pos[:],
                            filters=FILTERS)

        print ">> Running spiking network simulation"
        # run a NEST simulation using this adjacency matrix. the synaptic
        # scaling parameter will be adjusted automatically to achieve a target
        # burst rate according to default params
        simtime = sim_hours * 60 * 60 * 1000  # time in ms
        (spike_times, cell_indices,
         resampled_spikes) = fake_spikes.run_simulation(adjacency,
                                                        simtime=simtime,
                                                        verbose=True)

        f.create_carray(g, 'spike_times', obj=spike_times, filters=FILTERS)
        f.create_carray(g,
                        'spike_cell_indices',
                        obj=cell_indices,
                        filters=FILTERS)
        f.create_carray(g,
                        'resampled_spikes',
                        obj=resampled_spikes,
                        filters=FILTERS)
        f.flush()
        del spike_times, cell_indices

        print ">> Modelling calcium dynamics"
        calcium = fake_ca.fast_ca_from_spikes(resampled_spikes)
        f.create_carray(g, 'calcium', obj=calcium, filters=FILTERS)
        f.flush()
        del resampled_spikes

        print ">> Modelling dye saturation"
        no_noise_fluor = fake_ca.fluor_from_ca(calcium)
        f.create_carray(g,
                        'no_noise_fluor',
                        obj=no_noise_fluor,
                        filters=FILTERS)
        f.flush()
        del calcium

        print ">> Modelling fluorescence noise"
        noisy_fluor = fake_ca.noisy_from_no_noise(no_noise_fluor)
        f.create_carray(g, 'noisy_fluor', obj=noisy_fluor, filters=FILTERS)
        f.flush()
        del no_noise_fluor

        print ">> Modelling optical blurring"
        fluor = fake_blur.apply_gaussian_blur(network_pos, noisy_fluor)
        f.create_carray(g, 'fluorescence', obj=fluor, filters=FILTERS)
        f.flush()

        print ">> Done."

    except:

        import traceback
        # print the traceback for the exception
        traceback.print_exc()

        # make sure the file is closed, otherwise we won't have access
        # to the object and we won't be able to close it without
        # restarting ipython. close() calls flush() first, so no need to
        # call it explicitly.
        print 'Closing file "%s"' % f.filename
        f.close()