Exemplo n.º 1
0
def write_pajek_hvcRA_coord(dirname, trial_number):
    """
    Create .net file with locations of HVC-RA neurons in array.
    Mature neurons are highlighted
    """
    file_RA_xy = os.path.join(dirname, "RA_xy_" + str(trial_number) + ".bin")

    file_training = os.path.join(dirname, "training_neurons.bin")
    file_pajek = os.path.join(dirname, "network_" + str(trial_number) + ".net")
    fileMature = os.path.join(dirname, "mature_" + str(trial_number) + ".bin")

    coord_RA = reading.read_coordinates(file_RA_xy)
    training_neurons = reading.read_training_neurons(file_training)
    (_, _, mature_indicators) = reading.read_mature_indicators(fileMature)

    mature_neurons = np.where(mature_indicators == 1)[0]
    num_neurons = coord_RA.shape[0]
    # sort array with neurons and training neurons #
    training_neurons.sort()
    mature_neurons.sort()

    with open(file_pajek, 'w') as f:
        f.write("*Vertices {0}\n".format(num_neurons))

        for i in range(num_neurons):
            if i in training_neurons:
                f.write('{0} "{1}" {2} {3} {4} ic Green\n'.format(
                    i + 1, i, coord_RA[i][0], coord_RA[i][1], coord_RA[i][2]))
            elif i in mature_neurons:
                f.write('{0} "{1}" {2} {3} {4} ic Black\n'.format(
                    i + 1, i, coord_RA[i][0], coord_RA[i][1], coord_RA[i][2]))
            else:
                f.write('{0} "{1}" {2} {3} {4} ic Yellow\n'.format(
                    i + 1, i, coord_RA[i][0], coord_RA[i][1], coord_RA[i][2]))
Exemplo n.º 2
0
def write_pajek_network_topology(dirname):
    """
    Create .net file with locations and connections between HVC-RA and HVC-I
    neurons
    """
    file_RA_xy = os.path.join(dirname, "RA_xy.bin")
    file_I_xy = os.path.join(dirname, "I_xy.bin")

    RA2I = os.path.join(dirname, "RA_I_connections.bin")
    I2RA = os.path.join(dirname, "I_RA_connections.bin")

    file_training = os.path.join(dirname, "training_neurons_clustered.bin")
    file_pajek = os.path.join(dirname, "network_topology_clustered.net")

    (N_RA, RA_targets, RA_targets_G, _, _) = reading.read_connections(RA2I)
    (N_I, I_targets, I_targets_G, _, _) = reading.read_connections(I2RA)

    coord_RA = reading.read_coordinates(file_RA_xy)
    coord_I = reading.read_coordinates(file_I_xy)

    #print targets_ID
    #print targets_G
    if file_training:
        training_neurons = reading.read_training_neurons(file_training)
    else:
        training_neurons = []

    print "Training neurons: ", training_neurons

    with open(file_pajek, 'w') as f:
        f.write("*Vertices {0}\n".format(N_RA + N_I))

        for i in range(N_RA):
            if i in training_neurons:
                f.write('{0} "{1}" {2} {3} {4} ic Green\n'.format(
                    i + 1, i, coord_RA[i][0], coord_RA[i][1], coord_RA[i][2]))
            else:
                f.write('{0} "{1}" {2} {3} {4} ic Yellow\n'.format(
                    i + 1, i, coord_RA[i][0], coord_RA[i][1], coord_RA[i][2]))

        for i in range(N_RA, N_RA + N_I):
            f.write('{0} "{1}" {2} {3} {4} ic Red\n'.format(
                i + 1, i, coord_I[i - N_RA][0], coord_I[i - N_RA][1],
                coord_I[i - N_RA][2]))

        f.write("*Arcs\n".format(N_RA))

        # write targets of HVC(RA) neurons
        for i, targets in enumerate(RA_targets):
            for j, target in enumerate(targets):
                f.write('{0} {1} {2} c Green\n'.format(i + 1,
                                                       N_RA + target + 1,
                                                       RA_targets_G[i][j]))

        # write targets of HVC(I) neurons
        for i, targets in enumerate(I_targets):
            for j, target in enumerate(targets):
                f.write('{0} {1} {2} c Red\n'.format(N_RA + i + 1, target + 1,
                                                     I_targets_G[i][j]))
Exemplo n.º 3
0
def write_pajek_neurons_connected_by_supersynapses(dirname, trial_number):
    """
    Create .net file with locations and supersynaptic connections for HVC-RA neurons connected by supersynapses
    """
    file_RA_xy = os.path.join(dirname, "RA_xy_" + str(trial_number) + ".bin")

    file_training = os.path.join(dirname, "training_neurons.bin")
    file_pajek = os.path.join(dirname, "network_" + str(trial_number) + ".net")
    fileSuperSynapses = os.path.join(
        dirname, "RA_RA_super_connections_" + str(trial_number) + ".bin")
    fileWeights = os.path.join(dirname,
                               "weights_" + str(trial_number) + ".bin")

    coord_RA = reading.read_coordinates(file_RA_xy)
    training_neurons = reading.read_training_neurons(file_training)
    (N_RA, _, super_synapses) = reading.read_synapses(fileSuperSynapses)
    (N_RA, _, weights) = reading.read_weights(fileWeights)

    network_neurons = set(training_neurons)

    for i in range(N_RA):
        for target in super_synapses[i]:
            network_neurons.add(target)

    network_neurons = sorted(list(network_neurons))

    num_neurons = len(network_neurons)
    # sort array with neurons and training neurons #
    training_neurons.sort()

    with open(file_pajek, 'w') as f:
        f.write("*Vertices {0}\n".format(num_neurons))

        for i, neuron_id in enumerate(network_neurons):
            if neuron_id in training_neurons:
                f.write('{0} "{1}" {2} {3} {4} ic Green\n'.format(
                    i + 1, neuron_id, coord_RA[neuron_id][0],
                    coord_RA[neuron_id][1], coord_RA[neuron_id][2]))
            else:
                f.write('{0} "{1}" {2} {3} {4} ic Yellow\n'.format(
                    i + 1, neuron_id, coord_RA[neuron_id][0],
                    coord_RA[neuron_id][1], coord_RA[neuron_id][2]))

        f.write("*Arcs\n")

        # write targets of HVC(RA) neurons
        for i, source_id in enumerate(network_neurons):
            for target_id in super_synapses[source_id]:
                try:
                    ind = utils.index(network_neurons, target_id)
                    f.write('{0} {1} {2} c Green\n'.format(
                        i + 1, ind + 1, weights[source_id][target_id]))
                except ValueError:
                    continue
Exemplo n.º 4
0
def write_pajek_neurons(dirname, trial_number):
    """
    Create .net file with locations and connections between mature HVC-RA neurons in array
    """
    file_RA_xy = os.path.join(dirname, "RA_xy_" + str(trial_number) + ".bin")

    file_training = os.path.join(dirname, "training_neurons.bin")
    file_pajek = os.path.join(dirname, "network_" + str(trial_number) + ".net")
    fileMature = os.path.join(dirname, "mature_" + str(trial_number) + ".bin")
    fileSuperSynapses = os.path.join(
        dirname, "RA_RA_super_connections_" + str(trial_number) + ".bin")
    fileWeights = os.path.join(dirname,
                               "weights_" + str(trial_number) + ".bin")

    coord_RA = reading.read_coordinates(file_RA_xy)
    training_neurons = reading.read_training_neurons(file_training)
    (N_RA, _, weights) = reading.read_weights(fileWeights)
    (_, _, mature_indicators) = reading.read_mature_indicators(fileMature)
    (_, _, super_synapses) = reading.read_synapses(fileSuperSynapses)

    mature_neurons = np.where(mature_indicators == 1)[0]
    #print list(mature_neurons)
    #mature_neurons = range(N_RA)
    num_neurons = len(mature_neurons)
    # sort array with neurons and training neurons #
    training_neurons.sort()
    mature_neurons.sort()

    with open(file_pajek, 'w') as f:
        f.write("*Vertices {0}\n".format(num_neurons))

        for i, neuron_id in enumerate(mature_neurons):
            if neuron_id in training_neurons:
                f.write('{0} "{1}" {2} {3} {4} ic Green\n'.format(
                    i + 1, neuron_id, coord_RA[neuron_id][0],
                    coord_RA[neuron_id][1], coord_RA[neuron_id][2]))
            else:
                f.write('{0} "{1}" {2} {3} {4} ic Yellow\n'.format(
                    i + 1, neuron_id, coord_RA[neuron_id][0],
                    coord_RA[neuron_id][1], coord_RA[neuron_id][2]))

        f.write("*Arcs\n".format(num_neurons))

        # write targets of HVC(RA) neurons
        for i, source_id in enumerate(mature_neurons):
            for target_id in super_synapses[source_id]:
                try:
                    ind = utils.index(mature_neurons, target_id)
                    f.write('{0} {1} {2} c Green\n'.format(
                        i + 1, ind + 1, weights[source_id][target_id]))
                except ValueError:
                    continue
Exemplo n.º 5
0
ax1.set_title('Somatic spikes')

ax2 = f.add_subplot(122)
utils.plotSpikes(spike_times_d, neuron_id_d, ax2)
ax2.set_ylabel('id')
ax2.set_xlabel('Time (ms)')
ax2.set_xlim([-5, TRIAL_DURATION])
ax2.set_ylim([-25, N_RA + 25])
ax2.set_title('Dendritic spikes')

##############################################
# plot burst density
##############################################
training_spread = reading.read_training_spread(
    os.path.join(dataDir, "training_spread.bin"))
training_neurons = reading.read_training_neurons(
    os.path.join(dataDir, "training_neurons.bin"))

N_TR = len(training_neurons)

print "Number of training neurons: ", N_TR

bursts = utils.getBursts(spike_times_s, BURST_DURATION)

first_spikes_in_bursts = [[] for i in range(N_RA)]

for burstsNeuron, id in zip(bursts, neuron_id_s):
    for burst in burstsNeuron:
        first_spikes_in_bursts[id[0]].append(burst[0])

all_first_spikes_in_burst = []
Exemplo n.º 6
0
fileTraining = os.path.join(dirname, "training_neurons.bin")
fileMature = os.path.join(dirname, "mature_" + str(trial_number) + ".bin")
fileAxonalDelaysRA2RA = os.path.join(
    dirname, "axonal_delays_RA2RA_" + str(trial_number) + ".bin")

#fileActive = "/home/eugene/Output/networks/chainGrowth/testGrowthDelays3/RA_RA_active_connections_5300.bin"

#(_, _, active_synapses) = reading.read_synapses(fileActive)

(_, _, axonal_delays_RA2RA) = reading.read_axonal_delays(fileAxonalDelaysRA2RA)
(_, _, active_synapses) = reading.read_synapses(fileActiveSynapses)
(_, _, super_synapses) = reading.read_synapses(fileSuperSynapses)
(_, _, weights) = reading.read_weights(fileWeights)
(_, _, mature_indicators) = reading.read_remodeled_indicators(fileMature)

training_neurons = reading.read_training_neurons(fileTraining)

print "Mature neurons: ", [
    i for i in np.where(mature_indicators == 1)[0] if i not in training_neurons
]
print "Training neurons: ", training_neurons
for i in training_neurons:
    print "Training neuron {0} has {1} supersynapses : {2}".format(
        i, len(super_synapses[i]), super_synapses[i])

print axonal_delays_RA2RA[228][231]
print weights[228][231]

print axonal_delays_RA2RA[726][231]
print weights[726][231]
Exemplo n.º 7
0
def write_pajek_network_subset(dirname, trial_number, N, fileSpikes):
    """
    Create .net file with locations and connections between mature HVC-RA neurons in array
        first N mature neurons that spiked are plotted
    """
    file_RA_xy = os.path.join(dirname, "RA_xy_" + str(trial_number) + ".bin")

    file_training = os.path.join(dirname, "training_neurons.bin")
    file_pajek = os.path.join(dirname,
                              "network_subset_" + str(trial_number) + ".net")
    fileMature = os.path.join(dirname, "mature_" + str(trial_number) + ".bin")
    fileSuperSynapses = os.path.join(
        dirname, "RA_RA_super_connections_" + str(trial_number) + ".bin")
    fileWeights = os.path.join(dirname,
                               "weights_" + str(trial_number) + ".bin")

    coord_RA = reading.read_coordinates(file_RA_xy)
    training_neurons = reading.read_training_neurons(file_training)
    (N_RA, _, weights) = reading.read_weights(fileWeights)
    (_, _, mature_indicators) = reading.read_mature_indicators(fileMature)
    (_, _, super_synapses) = reading.read_synapses(fileSuperSynapses)

    #print list(mature_neurons)
    #mature_neurons = range(N_RA)

    # sort array with neurons and training neurons #
    training_neurons.sort()

    #fileDend = "/home/eugene/Output/networks/chainGrowth/passiveDendrite/test/noImmatureOut4/test_spike_times_dend_5.bin"
    #fileSoma = "/home/eugene/Output/networks/chainGrowth/passiveDendrite/test/noImmatureOut4/test_spike_times_soma_5.bin"

    (_, _, spike_times_soma,
     neuron_fired_soma) = reading.read_time_info(fileSpikes)

    ordered_soma_spikes_raw, ordered_soma_raw = zip(
        *sorted(zip(spike_times_soma, neuron_fired_soma)))

    first_mature_spiked = []

    for spikes, neuron_ids in zip(ordered_soma_spikes_raw, ordered_soma_raw):
        if len(first_mature_spiked) >= N:
            break

        if mature_indicators[neuron_ids[0]] == 1:
            first_mature_spiked.append(neuron_ids[0])

    first_mature_spiked.sort()

    num_neurons = len(first_mature_spiked)

    with open(file_pajek, 'w') as f:
        f.write("*Vertices {0}\n".format(num_neurons))

        for i, neuron_id in enumerate(first_mature_spiked):
            if neuron_id in training_neurons:
                f.write('{0} "{1}" {2} {3} {4} ic Green\n'.format(
                    i + 1, neuron_id, coord_RA[neuron_id][0],
                    coord_RA[neuron_id][1], coord_RA[neuron_id][2]))
            else:
                f.write('{0} "{1}" {2} {3} {4} ic Yellow\n'.format(
                    i + 1, neuron_id, coord_RA[neuron_id][0],
                    coord_RA[neuron_id][1], coord_RA[neuron_id][2]))

        f.write("*Arcs\n".format(num_neurons))

        # write targets of HVC(RA) neurons
        for i, source_id in enumerate(first_mature_spiked):
            for target_id in super_synapses[source_id]:
                try:
                    ind = utils.index(first_mature_spiked, target_id)
                    f.write('{0} {1} {2} c Green\n'.format(
                        i + 1, ind + 1, weights[source_id][target_id]))
                except ValueError:
                    continue
Exemplo n.º 8
0
                    continue


if __name__ == "__main__":
    #dirname = "/home/eugene/Output/networks/chainGrowth/passiveDendrite/maturationTransition4/"

    dirname = "/mnt/hodgkin/eugene/results/immature/clusters/matTrans62/"
    trial_number = 23800

    #dirname = "/mnt/hodgkin/eugene/results/immature/clusters/matTrans62/"
    #trial_number = 23800

    # fileSpikes = "/home/eugene/results/immature/clusters/test/matTrans29/test_spike_times_soma_10.bin"

    dirname = "/mnt/hodgkin/eugene/Output/networks/chainGrowth/network200RA55I"
    fileTraining = "/mnt/hodgkin/eugene/Output/networks/chainGrowth/network200RA55I/training_neurons_random.bin"

    training_neurons = set(reading.read_training_neurons(fileTraining))

    coord_HVCRA = reading.read_coordinates(os.path.join(dirname, "RA_xy.bin"))
    coord_HVCI = reading.read_coordinates(os.path.join(dirname, "I_xy.bin"))

    #print training_neurons
    #print coord_RA

    #write_pajek_neurons_connected_by_supersynapses(dirname, trial_number)
    write_allCoords(training_neurons, coord_HVCRA, coord_HVCI,
                    os.path.join(dirname, "pajek.net"))
    #write_pajek_neurons(dirname, trial_number)
    #write_pajek_network_subset(dirname, trial_number, N, fileSpikes)
    #write_pajek_hvcRA_coord(dirname, trial_number)
Exemplo n.º 9
0
def computeLockingInfo(dataDir, testDir):
    """
    Compute syllable locking significance and spike pattern properties for HVC-RA neurons from testing trials
    """
    N_RA, _ = reading.read_num_neurons(os.path.join(dataDir,
                                                    "num_neurons.bin"))
    training_neurons = reading.read_training_neurons(
        os.path.join(dataDir, "training_neurons.bin"))

    files = os.listdir(testDir)

    somatic_spikes = [[] for i in range(N_RA)]

    for f in files:
        if "test_spike_times_soma" in f:
            spike_times_s, neuron_id_s, ordered_spikes_s, neuron_ordered_id_s = utils.getSpikes(
                os.path.join(testDir, f))

            # find earliest first spike time of training neurons to allign spikes to syllable onsets
            minTrainingFirstSpike = 1e6  # earliest first spike time of training neuron

            for spikes, neuronId in zip(spike_times_s, neuron_id_s):
                if neuronId[0] in training_neurons:
                    if spikes[0] < minTrainingFirstSpike:
                        minTrainingFirstSpike = spikes[0]

            #print "earliest first spike time of training neurons: ",minTrainingFirstSpike
            print minTrainingFirstSpike
            for spikes, neuronId in zip(spike_times_s, neuron_id_s):
                somatic_spikes[neuronId[0]].append(
                    [spike - minTrainingFirstSpike for spike in spikes])

    #print somatic_spikes

    earliestLocking = -500  # earliest locking to syllable onset in ms
    latestLocking = 500  # latest locking to syllable onset in ms
    nbootstrap = 1000  # number of bootstrap samples
    interBurstGap = 30.0  # gap between bursts in ms

    if not os.path.isdir(os.path.join(testDir, 'figures')):
        os.mkdir(os.path.join(testDir, 'figures'))

    plt.ioff()

    pvalues = []

    meanFsi = []
    medianFsi = []
    meanIsi = []
    medianIsi = []

    syllableLockingTIme = []

    for nid in range(N_RA):
        if len(somatic_spikes[nid]) == 0:
            pvalues.append(1.0)
            syllableLockingTIme.append(np.nan)
            meanFsi.append(np.nan)
            medianFsi.append(np.nan)
            meanIsi.append(np.nan)
            medianIsi.append(np.nan)

            continue

        #print somatic_spikes[nid]
        binCenters, averageFiringRate = calculateAverageFiringRate(
            somatic_spikes[nid], earliestLocking, latestLocking, 1.0)

        smoothWindow = 21

        smoothedFiringRate = moving_average(averageFiringRate, smoothWindow)

        # estimate significance of peak in firing rate
        peakFiringRate = np.max(smoothedFiringRate[smoothWindow:-smoothWindow])
        peakFiringRateTime = np.array(binCenters)[smoothWindow:-smoothWindow][
            np.argmax(smoothedFiringRate[smoothWindow:-smoothWindow])]

        bootstrapOffset = 500.0  # offset of spikes in bootstrap samples in ms

        bootstrapPeakFiringRates = np.empty(nbootstrap, np.float32)

        for i in range(nbootstrap):
            spikesShifted = []

            for spikes in somatic_spikes[nid]:
                spikesShifted.append(
                    shiftSpikes(spikes, bootstrapOffset, earliestLocking,
                                latestLocking))

            binCentersBootstrap, averageFiringRateBootstrap = calculateAverageFiringRate(
                spikesShifted, earliestLocking, latestLocking, 1.0)

            smoothedFiringRateBootstrap = moving_average(
                averageFiringRateBootstrap, smoothWindow)

            bootstrapPeakFiringRates[i] = np.max(
                smoothedFiringRateBootstrap[smoothWindow:-smoothWindow])

        isi = []
        fsi = []

        for spikes in somatic_spikes[nid]:
            #print spikes
            if len(spikes) > 1:
                bursts = utils.getBurstsForNeuron(spikes, interBurstGap)
                #print bursts
                for burst in bursts:
                    if len(burst) > 1:
                        isi.extend(np.diff(burst))
                        fsi.append(burst[1] - burst[0])

        print len(fsi)
        print len(somatic_spikes[nid])
        #print isi
        #print fsi

        # plot example of bootstrap spikes
        #==============================================================================
        #         if i == 0:
        #             f2 = plt.figure()
        #             plt.suptitle("Bootstrap example")
        #             ax1_2 = f2.add_subplot(211)
        #
        #             plotSpikes(spikesShifted, ax1_2)
        #
        #             ax1_2.set_xlim([earliestLocking - 100, latestLocking + 100])
        #
        #             ax2_2 = f2.add_subplot(212)
        #             ax2_2.step(binCenters, averageFiringRate, where='mid')
        #             ax2_2.step(binCenters, smoothedFiringRate, where='mid', linewidth=3.0)
        #             ax2_2.set_xlim([earliestLocking - 100, latestLocking + 100])
        #             ax2_2.set_xlabel('Time relative to syllable onset (ms)')
        #             ax2_2.set_ylabel('Firing rate (1/ms)')
        #
        #             plt.savefig(neuronDir + "/" + 'bootstrapEx.png', bbox_inches='tight')
        #             plt.close(f2)
        #
        #==============================================================================

        pvalue = float(
            len(np.where(bootstrapPeakFiringRates >= peakFiringRate)
                [0])) / float(nbootstrap)
        print "p-value = ", pvalue
        print "Syllable locking time = ", peakFiringRateTime

        f1 = plt.figure()
        plt.suptitle('Neuron {0} with p = {1}'.format(nid, pvalue))
        ax1 = f1.add_subplot(211)

        plotSpikes(somatic_spikes[nid], ax1)

        ax1.set_xlim([earliestLocking - 100, latestLocking + 100])

        ax2 = f1.add_subplot(212)
        ax2.step(binCenters,
                 averageFiringRate,
                 where='mid',
                 linewidth=3.0,
                 zorder=2,
                 color='b',
                 label='firing rate')

        ax2.step(binCenters,
                 smoothedFiringRate,
                 where='mid',
                 linewidth=3.0,
                 zorder=2,
                 color='r',
                 label='smooth firing rate')
        ax2.set_xlim([earliestLocking - 100, latestLocking + 100])

        #ax1.set_xlim([-100, 300])
        #ax2.set_xlim([-100, 300])

        ax2.set_xlabel('Time relative to syllable onset (ms)')
        ax2.set_ylabel('Firing rate (1/ms)')
        plt.legend()

        plt.savefig(os.path.join(testDir,
                                 'figures/firingRate{0}.png'.format(nid)),
                    bbox_inches='tight')
        plt.close(f1)

        pvalues.append(pvalue)
        syllableLockingTIme.append(peakFiringRateTime)

        if len(fsi) > 0:
            meanFsi.append(np.mean(fsi))
            medianFsi.append(np.median(fsi))
            meanIsi.append(np.mean(isi))
            medianIsi.append(np.median(isi))
        else:
            meanFsi.append(np.nan)
            medianFsi.append(np.nan)
            meanIsi.append(np.nan)
            medianIsi.append(np.nan)

        np.savez(os.path.join(testDir, "lockingInfo.npz"),
                 syllableLockingTIme=syllableLockingTIme,
                 pvalues=pvalues,
                 meanFsi=meanFsi,
                 medianFsi=medianFsi,
                 meanIsi=meanIsi,
                 medianIsi=medianIsi)
Exemplo n.º 10
0
def compare_networkAndPoolConductance(dataDir, testDataDir, outFigureDir,
                                      simName, trial):
    """
    Function reads conductances for all neurons in the network, calculates average total conductances
    and conductances alinged to average bursting times
    """
    N_RA, N_I = reading.read_num_neurons(
        os.path.join(dataDir, "num_neurons.bin"))
    training_neurons = reading.read_training_neurons(
        os.path.join(dataDir, "training_neurons.bin"))
    N_TR = len(training_neurons)

    _, numTestTrials, \
        probability_soma_spike, average_num_soma_spikes_in_trial, mean_first_soma_spike_time, std_first_soma_spike_time,\
        probability_dend_spike, average_num_dend_spikes_in_trial, mean_first_dend_spike_time, std_first_dend_spike_time = reading.read_jitter(os.path.join(testDataDir, "jitter.bin"))

    neuronsWithRobustDendriticSpike = np.where(
        probability_dend_spike >= 0.75)[0]
    meanFirstSomaSpikeOfNeuronsWithRoburstDendriticSpike = mean_first_soma_spike_time[
        neuronsWithRobustDendriticSpike]

    t, Vs, Vd, Gexc_d, Ginh_d, n, h, r, c, Ca = reading.read_hh2_buffer_full(
        os.path.join(testDataDir, "RA/RA0_trial0.bin"))

    Ginh = np.zeros((N_RA, len(t)), np.float32)
    Gexc = np.zeros((N_RA, len(t)), np.float32)

    numOtherNeurons = N_RA - 1

    GinhSumAll = np.zeros(len(t), np.float32)
    GexcSumAll = np.zeros(len(t), np.float32)

    for testTrial in range(numTestTrials):
        print "Test trial: ", testTrial
        #if testTrial == 1:
        #  break
        for neuronId in range(N_RA):
            t, Vs, Vd, Gexc_d, Ginh_d, n, h, r, c, Ca = reading.read_hh2_buffer_full(
                os.path.join(
                    testDataDir, "RA/RA" + str(neuronId) + "_trial" +
                    str(testTrial) + ".bin"))

            Ginh[neuronId] += Ginh_d
            Gexc[neuronId] += Gexc_d

            # sum of conductances for all neurons excluding training
            if neuronId not in training_neurons:
                GinhSumAll += Ginh_d
                GexcSumAll += Gexc_d

    for neuronId in range(N_RA):
        Ginh[neuronId] /= float(numTestTrials)

    GinhSumAll = GinhSumAll / (float(numTestTrials) * float(N_RA - N_TR))
    GexcSumAll = GexcSumAll / (float(numTestTrials) * float(N_RA - N_TR))

    #print np.max(spike_times_d)
    #print np.max(t)

    #window = 100.0 # window size in ms
    window = 50.0

    Gbursted = None  # conductance aligned to bursting time
    Gother = None  # conductance of neurons that did npt burst

    dt = t[1] - t[0]
    window_t = [
        float(i) * dt - window / 2. for i in range(int(window / dt) - 1)
    ]

    GburstedInh_window = np.empty(
        (len(neuronsWithRobustDendriticSpike) - N_TR, int(window / dt) - 1),
        np.float32)  # conductances of all burst neurons aligned to burst time
    GburstedExc_window = np.empty(
        (len(neuronsWithRobustDendriticSpike) - N_TR, int(window / dt) - 1),
        np.float32)  # conductances of all burst neurons aligned to burst time

    # plot conductances for several random bursted neurons
    np.random.seed(1991)

    nid_toPlot = np.random.choice(neuronsWithRobustDendriticSpike,
                                  16,
                                  replace=False)

    nrows = 4
    ncols = 4

    fInh, axarrInh = plt.subplots(nrows=nrows, ncols=ncols)
    fExc, axarrExc = plt.subplots(nrows=nrows, ncols=ncols)

    neuronPlotCounter = 0
    neuronSavedCounter = 0

    for nid, meanFirstSpikeTime in zip(
            neuronsWithRobustDendriticSpike,
            meanFirstSomaSpikeOfNeuronsWithRoburstDendriticSpike):
        if nid in training_neurons:
            continue

        meanFirstSpikeTime = round(int(meanFirstSpikeTime / dt) * dt, 2)

        GburstedInh_window[neuronSavedCounter] = Ginh[nid][
            (t > meanFirstSpikeTime - window / 2.)
            & (t < meanFirstSpikeTime + window / 2.)]
        GburstedExc_window[neuronSavedCounter] = Gexc[nid][
            (t > meanFirstSpikeTime - window / 2.)
            & (t < meanFirstSpikeTime + window / 2.)]

        # normalize conductance by max value
        #Gbursted_window[neuronSavedCounter] /= np.max(Gbursted_window[neuronSavedCounter])

        # normalize to o mean and unit variance
        #Gbursted_window[neuronSavedCounter] = sklearn.preprocessing.scale(Gbursted_window[neuronSavedCounter], axis=0, with_mean=True, with_std=True, copy=True)

        if nid in nid_toPlot:
            row = neuronPlotCounter // 4
            col = neuronPlotCounter % 4
            axarrInh[row, col].plot(window_t,
                                    GburstedInh_window[neuronSavedCounter])
            axarrExc[row, col].plot(window_t,
                                    GburstedExc_window[neuronSavedCounter])
            #axarr[row, col].vlines(meanFirstSpikeTime, 0, np.max(Ginh[nid]))

            if row == 3:
                axarrInh[row, col].set_xlabel('Time (ms)')
                axarrExc[row, col].set_xlabel('Time (ms)')

            if col == 0:
                axarrInh[row, col].set_ylabel('Ginh (mS/cm^2)')
                axarrExc[row, col].set_ylabel('Gexc (mS/cm^2)')

            neuronPlotCounter += 1

        neuronSavedCounter += 1

        #if Gbursted == None:
        #  Gbursted = Ginh[nid[0]][(t > dend_spike_time[0] - window/2.)&(t < dend_spike_time[0] + window/2.)]
        #else:
        #  Gbursted += Ginh[nid[0]][(t > dend_spike_time[0] - window/2.)&(t < dend_spike_time[0] + window/2.)]

        #if Gother == None:
        #  Gother = (GsumAll-Ginh[nid[0]])[(t > dend_spike_time[0] - window/2.)&(t < dend_spike_time[0] + window/2.)]
        #else:
        #  Gother += (GsumAll-Ginh[nid[0]])[(t > dend_spike_time[0] - window/2.)&(t < dend_spike_time[0] + window/2.)]

        #plt.figure()
        #plt.plot(t, Gexc_d)
        #plt.vlines(dend_spike_time[0], 0, np.max(Gexc_d))

        #plt.figure()
        #plt.plot(t, G)
        #plt.vlines(dend_spike_time[0], 0, np.max(G))

    w, h = maximize_figure(fInh.number)
    fInh.savefig(outFigureDir + simName + "_trial" + str(trial) +
                 '_Ginh_examples.png',
                 bbox_inches='tight')
    plt.close(fInh)

    w, h = maximize_figure(fExc.number)
    fExc.savefig(outFigureDir + simName + "_trial" + str(trial) +
                 '_Gexc_examples.png',
                 bbox_inches='tight')
    plt.close(fExc)

    GburstedInh = np.sum(
        GburstedInh_window,
        axis=0) / float(len(neuronsWithRobustDendriticSpike) - N_TR)
    std_GburstedInh = np.std(GburstedInh_window, axis=0)

    GburstedExc = np.sum(
        GburstedExc_window,
        axis=0) / float(len(neuronsWithRobustDendriticSpike) - N_TR)
    std_GburstedExc = np.std(GburstedExc_window, axis=0)

    f = plt.figure()
    plt.plot(window_t, GburstedInh, label='bursted neurons')
    plt.plot(
        window_t, GburstedInh +
        std_GburstedInh / np.sqrt(len(neuronsWithRobustDendriticSpike) - N_TR))
    plt.plot(
        window_t, GburstedInh -
        std_GburstedInh / np.sqrt(len(neuronsWithRobustDendriticSpike) - N_TR))
    plt.xlabel('Time (ms)')
    plt.ylabel('average Ginh (mS/cm^2)')

    f.savefig(outFigureDir + simName + "_trial" + str(trial) +
              '_average_Ginh.png',
              bbox_inches='tight')
    plt.close(f)

    #plt.plot(window_t, Gother / float((N_RA - N_TR - 1)*numNeuronsWithDendSpike), label='other neurons')
    #plt.legend()

    f = plt.figure()
    plt.plot(t, GinhSumAll)
    plt.xlabel('Time (ms)')
    plt.ylabel('total Ginh (mS/cm^2)')
    f.savefig(outFigureDir + simName + "_trial" + str(trial) +
              '_total_Ginh.png',
              bbox_inches='tight')
    plt.close(f)

    f = plt.figure()
    plt.plot(window_t, GburstedExc, label='bursted neurons')
    plt.plot(
        window_t, GburstedExc +
        std_GburstedExc / np.sqrt(len(neuronsWithRobustDendriticSpike) - N_TR))
    plt.plot(
        window_t, GburstedExc -
        std_GburstedExc / np.sqrt(len(neuronsWithRobustDendriticSpike) - N_TR))
    plt.xlabel('Time (ms)')
    plt.ylabel('average Gexc (mS/cm^2)')
    f.savefig(outFigureDir + simName + "_trial" + str(trial) +
              '_average_Gexc.png',
              bbox_inches='tight')
    plt.close(f)

    #plt.plot(window_t, Gother / float((N_RA - N_TR - 1)*numNeuronsWithDendSpike), label='other neurons')
    #plt.legend()

    f = plt.figure()
    plt.plot(t, GexcSumAll)
    plt.xlabel('Time (ms)')
    plt.ylabel('total Gexc (mS/cm^2)')
    f.savefig(outFigureDir + simName + "_trial" + str(trial) +
              '_total_Gexc.png',
              bbox_inches='tight')
    plt.close(f)
Exemplo n.º 11
0
def analyze_inhibitory_input_history(dataDir, testDataDir, trial):
    """
    Function compares inhibitory conductance of neurons with 
    conductances of the neurons that burst later
    """
    N_RA, N_I = reading.read_num_neurons(
        os.path.join(dataDir, "num_neurons.bin"))
    training_neurons = set(
        reading.read_training_neurons(
            os.path.join(dataDir, "training_neurons.bin")))

    _, _, activity_history = reading.read_activity_history(
        os.path.join(dataDir, "activity_history_" + str(trial) + ".bin"))

    smoothWindowSize = 25
    smooth_activity_history = np.apply_along_axis(moving_average, 1,
                                                  activity_history,
                                                  smoothWindowSize)

    #print np.any(smooth_activity_history[4] >= 1.0)
    recruited_candidates = np.where(
        np.any(smooth_activity_history[:, smoothWindowSize:] >= 1, axis=1))[0]

    recruited = [c for c in recruited_candidates if c not in training_neurons]
    print training_neurons

    smooth_activity_history = smooth_activity_history[:, trial:
                                                      smoothWindowSize:-1]

    recruitment_time = []
    for r in recruited:
        recruitment_time.append(
            np.where(smooth_activity_history[r] >= 1)[0][0])

    #print recruitment_time



    _, _, \
        _, _, mean_first_soma_spike_time, _,\
        _, _, _, _ = reading.read_jitter(os.path.join(testDataDir, "jitter.bin"))

    recruitment_time, recruited = zip(
        *sorted(zip(recruitment_time, recruited)))

    first_soma_spike_time_recruited = mean_first_soma_spike_time[recruited]

    print zip(recruited, recruitment_time, first_soma_spike_time_recruited)

    trialsBefore = 95
    laterSpiking = 20.0

    for rid, rtime, rfirstSpikeTime in zip(recruited, recruitment_time,
                                           first_soma_spike_time_recruited):
        laterSpikedNeurons = set(
            np.array(recruited)[first_soma_spike_time_recruited >=
                                rfirstSpikeTime + laterSpiking])

        for trialNum in range(rtime - trialsBefore, rtime):
            t, Ginh_d = reading.read_inhibitory_conductance_during_trial(
                os.path.join(dataDir, "Ginh_trial_" + str(trialNum) + ".bin"))

            for nid in laterSpikeNeurons:
                pass

        break
Exemplo n.º 12
0
def compare_conductanceOfNetworkNeurons(Ginh, dataDir, testDataDir,
                                        outFigureDir, simName, trial):
    """
    Function compares inhibitory conductance of neurons with 
    conductances of the neurons that burst later
    """
    plt.ioff()

    N_RA, N_I = reading.read_num_neurons(
        os.path.join(dataDir, "num_neurons.bin"))
    training_neurons = reading.read_training_neurons(
        os.path.join(dataDir, "training_neurons.bin"))
    N_TR = len(training_neurons)

    _, numTestTrials, \
        probability_soma_spike, average_num_soma_spikes_in_trial, mean_first_soma_spike_time, std_first_soma_spike_time,\
        probability_dend_spike, average_num_dend_spikes_in_trial, mean_first_dend_spike_time, std_first_dend_spike_time = reading.read_jitter(os.path.join(testDataDir, "jitter.bin"))

    neuronsWithRobustDendriticSpike = np.where(
        probability_dend_spike >= 0.75)[0]
    meanFirstSomaSpikeOfNeuronsWithRoburstDendriticSpike = mean_first_soma_spike_time[
        neuronsWithRobustDendriticSpike]

    print "Robustly firing neurons: ", neuronsWithRobustDendriticSpike
    print "First soma spikes of robust neurons: ", meanFirstSomaSpikeOfNeuronsWithRoburstDendriticSpike

    window = 20.0
    dt = t[1] - t[0]
    window_t = [
        float(i) * dt - window / 2. for i in range(int(window / dt) - 1)
    ]

    #GburstedInh_window = np.empty((len(neuronsWithRobustDendriticSpike) - N_TR, int(window / dt)-1), np.float32) # conductances of all burst neurons aligned to burst time

    # plot conductances for several random bursted neurons
    #np.random.seed(1991)

    #nid_toPlot = np.random.choice(neuronsWithRobustDendriticSpike, 16, replace=False)

    #nrows = 4
    #ncols = 4

    #fInh, axarrInh = plt.subplots(nrows=nrows, ncols=ncols)
    #fExc, axarrExc = plt.subplots(nrows=nrows, ncols=ncols)

    #neuronPlotCounter = 0
    #neuronSavedCounter = 0

    integralConductanceOfAllSpiked = []
    integralConductanceOfAllOther = []

    averageInhConductanceOfRecruitedBeforeBurstTime = []
    averageInhConductanceOfLaterRecruitedBeforeBurstTime = []

    neuronCounter = 0

    for nid, meanFirstSpikeTime in zip(
            neuronsWithRobustDendriticSpike,
            meanFirstSomaSpikeOfNeuronsWithRoburstDendriticSpike):
        if nid in training_neurons:
            continue

        #print "counter = {0}; mean first spike time = {1}".format(neuronCounter, meanFirstSpikeTime)

        meanFirstSpikeTime = round(int(meanFirstSpikeTime / dt) * dt, 2)

        #GburstedInh_window[neuronSavedCounter] = Ginh[nid][(t >meanFirstSpikeTime - window/2.)&(t < meanFirstSpikeTime + window/2.)]

        GburstedInh = Ginh[nid][(t > meanFirstSpikeTime - window / 2.)
                                & (t < meanFirstSpikeTime + window / 2.)]

        #plt.figure()
        #plt.title('Neuron {0} with first spike time {1}'.format(nid, meanFirstSpikeTime))
        #plt.plot(window_t, GburstedInh)
        #ax  = f.add_subplot(511)

        integralConductanceOfSpiked = integral(GburstedInh, dt)
        integralConductanceOfAllSpiked.append(integralConductanceOfSpiked)
        minInhibition = np.min(GburstedInh)

        averageInhConductanceOfRecruitedBeforeBurstTime = np.mean(
            Ginh[nid][t < meanFirstSpikeTime + window / 2.])

        #ax.plot(window_t, GburstedInh)

        # neurons that spike later:
        integralConductanceOfOther = []
        minInhibitionLater = []

        laterSpikedNeurons = np.where((probability_dend_spike >= 0.75) & (
            mean_first_soma_spike_time > meanFirstSpikeTime + window))[0]
        #print "ind of neurons that spiked later: ",laterSpikedNeurons
        print "first spike times of neurons that spiked later: ", mean_first_soma_spike_time[
            laterSpikedNeurons]

        numToPlot = 0
        plotCounter = 0

        for nid_later in laterSpikedNeurons:
            #if i >= numToPlot:
            # break
            GInhLater = Ginh[nid_later][(t > meanFirstSpikeTime - window / 2.)
                                        &
                                        (t < meanFirstSpikeTime + window / 2.)]
            integralConductanceOfOther.append(integral(GInhLater, dt))
            minInhibitionLater.append(np.min(GInhLater))

            if plotCounter < numToPlot:
                plt.figure()
                plt.plot(window_t, GInhLater)
                plt.title('Neuron {0} with later first spike time {1}'.format(
                    nid_later, mean_first_soma_spike_time[nid_later]))

                plotCounter += 1

            averageInhConductanceOfLaterRecruitedBeforeBurstTime.append(
                np.mean(Ginh[nid_later][t < meanFirstSpikeTime + window / 2.]))

        integralConductanceOfAllOther.extend(integralConductanceOfOther)

        f = plt.figure()
        plt.title('Min conductance near burst time of recruited')
        plt.hist(minInhibitionLater,
                 fill=False,
                 edgecolor='r',
                 label='spiked later')
        ylim = plt.ylim()
        plt.vlines(minInhibition, 0, ylim[1])
        plt.legend()
        f.savefig(outFigureDir + simName + "_neuron" + str(nid) +
                  '_min_Ginh.png',
                  bbox_inches='tight')
        plt.close(f)

        f = plt.figure()
        plt.title('Integral of conductance near burst time of recruited')
        plt.hist(integralConductanceOfOther,
                 fill=False,
                 edgecolor='r',
                 label='spiked later')
        ylim = plt.ylim()
        plt.vlines(integralConductanceOfSpiked, 0, ylim[1])
        plt.legend()
        f.savefig(outFigureDir + simName + "_neuron" + str(nid) +
                  '_integral_Ginh.png',
                  bbox_inches='tight')
        plt.close(f)

        f = plt.figure()
        plt.title('Comparison of average conductance before burst time')
        plt.hist(averageInhConductanceOfLaterRecruitedBeforeBurstTime,
                 fill=False,
                 edgecolor='r',
                 label='spiked later')
        ylim = plt.ylim()
        plt.vlines(averageInhConductanceOfRecruitedBeforeBurstTime, 0, ylim[1])
        plt.legend()
        f.savefig(outFigureDir + simName + "_neuron" + str(nid) +
                  '_average_Ginh.png',
                  bbox_inches='tight')
        plt.close(f)

        #plt.figure()
        #plt.hist(integralConductanceOfOther, fill=False, edgecolor='r', label='spiked later')
        #ylim=plt.ylim()
        #plt.vlines(integralConductanceOfSpiked, 0, ylim[1])
        #plt.legend()

        if neuronCounter == 5:
            break
        neuronCounter += 1
Exemplo n.º 13
0
def compare_inhibitory_weights(dataDir, outFigureDir, simName, trial):
    """
    Compare inhibitory inputs to network and pool neurons
    """
    N_RA, N_I = reading.read_num_neurons(
        os.path.join(dataDir, "num_neurons.bin"))
    training_neurons = reading.read_training_neurons(
        os.path.join(dataDir, "training_neurons.bin"))
    N_TR = len(training_neurons)

    _, numTestTrials, \
        probability_soma_spike, average_num_soma_spikes_in_trial, mean_first_soma_spike_time, std_first_soma_spike_time,\
        probability_dend_spike, average_num_dend_spikes_in_trial, mean_first_dend_spike_time, std_first_dend_spike_time = reading.read_jitter(os.path.join(testDataDir, "jitter.bin"))

    neuronsWithRobustDendriticSpike = np.where(
        probability_dend_spike >= 0.75)[0]

    # analyze inhibitory weights to neurons
    (_, targets_ID, weights_I2RA, syn_lengths,
     axonal_delays) = reading.read_connections(
         os.path.join(dataDir, "I_RA_connections_" + str(trial) + ".bin"))

    inhibition_weights_on_network_neurons = []
    inhibition_weights_on_pool_neurons = []

    total_inhibition_on_network_neurons = {}
    total_inhibition_on_pool_neurons = {}

    set_neuronsWithRobustDendriticSpike = set(neuronsWithRobustDendriticSpike)

    for i in range(N_I):
        for j, target in enumerate(targets_ID[i]):
            if target not in training_neurons:
                if target in set_neuronsWithRobustDendriticSpike:
                    if target in total_inhibition_on_network_neurons:
                        total_inhibition_on_network_neurons[
                            target] += weights_I2RA[i][j]
                    else:
                        total_inhibition_on_network_neurons[
                            target] = weights_I2RA[i][j]

                    inhibition_weights_on_network_neurons.append(
                        weights_I2RA[i][j])
                else:
                    if target in total_inhibition_on_pool_neurons:
                        total_inhibition_on_pool_neurons[
                            target] += weights_I2RA[i][j]
                    else:
                        total_inhibition_on_pool_neurons[
                            target] = weights_I2RA[i][j]
                    inhibition_weights_on_pool_neurons.append(
                        weights_I2RA[i][j])

    totalInhId, totalInhW = total_inhibition_on_network_neurons.keys(
    ), total_inhibition_on_network_neurons.values()

    f = plt.figure()
    plt.scatter(mean_first_soma_spike_time[totalInhId], totalInhW)
    plt.xlabel('Mean first spike time (ms)')
    plt.ylabel('Total inhibitory input (mS/cm^2)')
    f.savefig(outFigureDir + simName + "_trial" + str(trial) +
              '_total_Ginh_vs_burstTime.png',
              bbox_inches='tight')
    plt.close(f)

    nbin = 20
    hist_on_network, bin_edges_on_network = np.histogram(
        inhibition_weights_on_network_neurons, bins=nbin)
    hist_on_network = hist_on_network.astype(float) / float(
        len(neuronsWithRobustDendriticSpike) - N_TR)
    bin_centers_on_network = bin_edges_on_network[:-1:1] + bin_edges_on_network[
        1] - bin_edges_on_network[0]

    hist_on_pool, bin_edges_on_pool = np.histogram(
        inhibition_weights_on_pool_neurons, bins=nbin)
    hist_on_pool = hist_on_pool.astype(float) / float(
        N_RA - len(neuronsWithRobustDendriticSpike))
    bin_centers_on_pool = bin_edges_on_pool[:-1:1] + bin_edges_on_pool[
        1] - bin_edges_on_pool[0]

    f = plt.figure()
    plt.xlabel('Inhibitory input weight (mS/cm^2)')
    plt.ylabel('# of inputs per neuron')
    #plt.hist( , fill=False, label='network neurons', edgecolor='r')
    #plt.hist(np.array(inhibition_weights_on_pool_neurons) / float(N_RA - len(neuronsWithRobustDendriticSpike) - N_TR), fill=False, label='pool neurons', edgecolor='b')
    plt.bar(bin_centers_on_network,
            hist_on_network,
            align='center',
            fill=False,
            edgecolor='b',
            width=bin_edges_on_network[1] - bin_edges_on_network[0],
            label='network neurons')
    plt.bar(bin_centers_on_pool,
            hist_on_pool,
            align='center',
            fill=False,
            edgecolor='r',
            width=bin_edges_on_pool[1] - bin_edges_on_pool[0],
            label='pool neurons')
    plt.legend(loc=4)
    f.savefig(outFigureDir + simName + "_trial" + str(trial) +
              '_Ginh_weights_comparison.png',
              bbox_inches='tight')
    plt.close(f)

    nbin = 20
    hist_on_network_total, bin_edges_on_network_total = np.histogram(
        total_inhibition_on_network_neurons.values(), bins=nbin)
    hist_on_network_total = hist_on_network_total.astype(float) / float(
        len(neuronsWithRobustDendriticSpike) - N_TR)
    bin_centers_on_network_total = bin_edges_on_network_total[:-1:1] + bin_edges_on_network_total[
        1] - bin_edges_on_network_total[0]

    hist_on_pool_total, bin_edges_on_pool_total = np.histogram(
        total_inhibition_on_pool_neurons.values(), bins=nbin)
    hist_on_pool_total = hist_on_pool_total.astype(float) / float(
        N_RA - len(neuronsWithRobustDendriticSpike))
    bin_centers_on_pool_total = bin_edges_on_pool_total[:-1:1] + bin_edges_on_pool_total[
        1] - bin_edges_on_pool_total[0]

    f = plt.figure()
    plt.xlabel('Total inhibitory input weight (mS/cm^2)')
    plt.ylabel('Norm. # of neurons')
    #plt.hist( , fill=False, label='network neurons', edgecolor='r')
    #plt.hist(np.array(inhibition_weights_on_pool_neurons) / float(N_RA - len(neuronsWithRobustDendriticSpike) - N_TR), fill=False, label='pool neurons', edgecolor='b')
    plt.bar(bin_centers_on_network_total,
            hist_on_network_total,
            align='center',
            fill=False,
            edgecolor='b',
            width=bin_edges_on_network_total[1] -
            bin_edges_on_network_total[0],
            label='network neurons')
    plt.bar(bin_centers_on_pool_total,
            hist_on_pool_total,
            align='center',
            fill=False,
            edgecolor='r',
            width=bin_edges_on_pool_total[1] - bin_edges_on_pool_total[0],
            label='pool neurons')
    plt.legend(loc=1)
    f.savefig(outFigureDir + simName + "_trial" + str(trial) +
              '_total_Ginh_comparison.png',
              bbox_inches='tight')
    plt.close(f)

    f = plt.figure()
    plt.bar([1, 2], [
        np.mean(total_inhibition_on_pool_neurons.values()),
        np.mean(total_inhibition_on_network_neurons.values())
    ],
            align='center',
            width=0.1,
            yerr=[
                np.std(total_inhibition_on_pool_neurons.values()) /
                np.sqrt(float(N_RA - len(neuronsWithRobustDendriticSpike))),
                np.std(total_inhibition_on_network_neurons.values()) /
                np.sqrt(float(len(neuronsWithRobustDendriticSpike) - N_TR))
            ])
    plt.xticks([1, 2], ['pool', 'network'])
    plt.ylabel('Mean inhibitory input (mS/cm^2)')
    f.savefig(outFigureDir + simName + "_trial" + str(trial) +
              '_mean_Ginh_comparison.png',
              bbox_inches='tight')
    plt.close(f)

    from scipy.stats import ranksums
    print ranksums(total_inhibition_on_pool_neurons.values(),
                   total_inhibition_on_network_neurons.values())

    plt.show()