Exemple #1
0
def draw_histograms(data):
    base_hist = "%s/histogram" % output_base
    base_box = "%s/boxplot" % output_base
    plots = [
        ('time', True),
        ('blocks', False),
        ('blocks executed', False),
        ('host instructions', False),
        ('source instructions', False),
        ('host instructions executed', False),
        ('source instructions emulated', False),
        ('mips', True),
        ('hotness', False),
        ('host block size', False),
        ('source block size', False),
        ('compilation inefficiency', False),
        ('execution inefficiency', False)
    ]

    for p in plots:
        name = p[0]
        processed = data
        if not p[1]:
            processed = {'JIT': data['JIT']}

        plot.histogram(processed, name, '%s/%s.png' % (base_hist, name))
        plot.boxplot(processed, name, '%s/%s.png' % (base_box, name))
Exemple #2
0
def _test_default_inhibitory_isi_distribution(info):
    """
    The test creates the default inhibitory ISI distribution, and
    uses it to generate 10000 events (ISIs). Then plot of the ISI
    histogram is saved.
    """
    assert isinstance(info, TestInfo)
    plot.histogram(
        datalgo.make_histogram(
            distribution.default_inhibitory_isi_distribution().generate(100000),
            0.00025,
            0.0,
            ),
        os.path.join(info.output_dir, "default_inhibitory_isi_distribution.png"),
        normalised=False,
        title="default_inhibitory_isi_distribution " + plot.get_title_placeholder()
        )
    return 0
Exemple #3
0
def _test_default_inhibitory_isi_distribution(info):
    """
    The test creates the default inhibitory ISI distribution, and
    uses it to generate 10000 events (ISIs). Then plot of the ISI
    histogram is saved.
    """
    assert isinstance(info, TestInfo)
    plot.histogram(datalgo.make_histogram(
        distribution.default_inhibitory_isi_distribution().generate(100000),
        0.00025,
        0.0,
    ),
                   os.path.join(info.output_dir,
                                "default_inhibitory_isi_distribution.png"),
                   normalised=False,
                   title="default_inhibitory_isi_distribution " +
                   plot.get_title_placeholder())
    return 0
Exemple #4
0
def do_histogram_OLD():
  # obs = {n: c for n,c in db.runquery('select expname, count(obs) from expr e, obs where e.expid=obs.expid group by expname')}
  data = OrderedDict()
  raw = OrderedDict()
  raw['Serial']     = obs2tw(getobs('serial'))
  raw['Parallel']   = obs2tw(getobs('parallel')[:600000])
  raw['Uniform']    = obs2tw(getobs('uniform3')[50000:650000])
  raw['Biased']     = obs2tw(getobs('biased4')[50000:650000])
  raw['RW-Explore'] = obs2tw(getobs('reweight5_epr')[50000:650000])
  raw['RW-Exploit'] = obs2tw(getobs('reweight5_xlt')[50000:650000])
  for key, obslist in raw.items():
    groupby = {}
    for i in obslist:
      if i not in groupby:
        groupby[i] = 0
      groupby[i] += 1
    data[key] = groupby
  P.histogram(data, 'Histogram_Event_Counts', ylim=(0,200000))
Exemple #5
0
def main(name, songs, graph):
    '''
  Mustat - Music Statistics.
  CLI to program to generate statistics about music artists.
  '''
    try:
        art = Artist(name, songs)
        click.echo("Artist data downloaded!\n")
        click.echo("**** Summary Artist Data ****")
        click.echo("Artist name: %s" % art.name)
        click.echo("Attempted song downloads %d" % songs)
        click.echo("Successful song downloads %d" % len(art.songs))
        click.echo("Average word length = %.2f" % art.averageWordLength)
        click.echo("Standard deviation of word length = %.2f" %
                   art.stdevWordLength)
        click.echo("Variance of word length = %.2f" % art.varianceWordLength)
        click.echo("Most common word: '%s'" % art.mostCommonword)

        if graph:
            histogram(art.wordLengths,
                      x_label="Word Lengths",
                      y_label="Frequency",
                      title="{} Word Lengths".format(art.name))

            wordBarPlot(art.words,
                        x_label="Words",
                        y_label="Frequency",
                        title="{}'s Top 10 Words".format(art.name))
            show()
    except APINearlyFound as e:
        click.echo("Did you mean %s?" % e.args)
    except APINotFound:
        click.echo(
            "Could not find any artists or songs under the artist name '%s'" %
            name)
    except APIFormatError:
        click.echo("Unexpected API error")
    except Exception as e:
        click.echo("**** An unexpected error occurred ****")
        raise e
Exemple #6
0
def do_histogram():
  global nex, ntr
  ordex = ['Serial', 'Parallel','Uniform','Biased', 'MVNN', 'Reweight']
  binlist = [(a,b) for a in range(5) for b in range(5)]
  hist = {k: {ab : 0 for ab in binlist} for k in ordex}
  obs  = {k: [] for k in ordex}
  total = {k: 0 for k in ordex} 
  for k in ordex:
    print(k)
    for rmslist in nex[k].rmsd_cw.values():
      for i, rms in enumerate(rmslist):
        A, B = np.argsort(rms)[:2]
        delta = np.abs(rms[B] - rms[A])
        if delta < 0.12:
          sub_state = B
        else:
          sub_state = A
        obs[k].append((A, sub_state))
        total[k] += 1

  for k in ordex:
    for o in obs[k]:
      hist[k][o] += 1
  for k in ordex:
    for o in sorted(cnt[k].keys()):
      print(k, o, cnt[k][o])
      hist[k][o] = int(hist[k][o] * 500000 / total[k])

  cnt = {e: {k: 0 for k in ['Well-2', 'Well-3', 'Well-4',
                        'Tran-0', 'Tran-1', 'Tran-2', 'Tran-3', 'Tran-4']} for e in ordex}
  for k in ordex:
    for a in range(5):
      for b in range(5):
        if a == b:
          if a not in [0, 1]:
            cnt[k]['Well-%d'%a] = hist[k][(a,b)]
        else:
          cnt[k]['Tran-%d'%a] += hist[k][(a,b)]

  P.histogram(cnt, 'Total Observations for each state Well / Transitions')
Exemple #7
0
def _test_hermit_distribution(info):
    """
    The test build 10 hermit distributions, each for a different
    peek. Curves and histograms and normalised histograms are build
    and corresponding plots are saved.
    """
    assert isinstance(info, TestInfo)
    print("  Generating graph " + os.path.join(info.output_dir, "ns_curve.png"))
    plot.curve(
        distribution.make_points_of_normal_distribution(
            num_points=100,
            nu=0.0,
            sigma=1.0,
            normalise=True
            ),
        os.path.join(info.output_dir, "ns_curve.png")
        )
    for peek_x in numpy.arange(0.1, 0.95, 0.1, float):
        print("  Computing points of hermit cubic at peek " + str(peek_x) + ".")
        points = datalgo.move_scale_curve_points(
                    distribution.make_points_of_hermit_cubic_approximation_of_normal_distribution(
                        peek_x=peek_x,
                        mult_m01=1.0,
                        mult_mx=1.0,
                        num_points=100
                        ),
                    scale_x=0.298,
                    scale_y=10,
                    pow_y=1.5,
                    shift_x=0.002
                    )
        print("  Saving " + os.path.join(info.output_dir, "ns_curve_hermit_adapted_" + format(peek_x, ".2f") + ".png"))
        plot.curve(
            points,
            os.path.join(info.output_dir, "ns_curve_hermit_adapted_" + format(peek_x, ".2f") + ".png")
            )
        print("  Computing histogram from the hermit cubic.")
        hist = datalgo.make_histogram_from_points(points)
        print("  Saving " + os.path.join(info.output_dir, "ns_hist_adapted_" + format(peek_x, ".2f") + ".png"))
        plot.histogram(
            hist,
            os.path.join(info.output_dir, "ns_hist_adapted_" + format(peek_x, ".2f") + ".png"),
            normalised=False
            )
        print("  Saving " + os.path.join(info.output_dir, "ns_hist_normalised_adapted_" +
                                         format(peek_x, ".2f") + ".png"))
        plot.histogram(
            hist,
            os.path.join(info.output_dir, "ns_hist_normalised_adapted_" + format(peek_x, ".2f") + ".png"),
            normalised=True
            )
        print("  Saving " + os.path.join(info.output_dir, "hd_" + format(peek_x, ".2f") + ".png"))
        plot.histogram(
            distribution.hermit_distribution(peek_x),
            os.path.join(info.output_dir, "hd_" + format(peek_x, ".2f") + ".png"),
            )

    return 0
    def graph_histogram(self, values, output, title, xlabel, num_bins=20, xmax=None, ymax=None):
        """
            Plot a histogram.

            Args:
            - values : The data to plot. An array of integers
            - title : Figure title
            - xlabel : Figure x label
            Options:
            - output : Location to save output
            - num_bins : The number of histogram bins to divide the data among
            - xwidth : Manually set x-axis width. (Using to standardize widths.)
        """
        return plot.histogram(values
                              ,title
                              ,xlabel
                              ,"Count"
                              ,num_bins
                              ,xmax or  max(values)
                              ,ymax or None
                              ,"%s/%s" % (self.output_dir, output))
Exemple #9
0
def _test_alignment_of_aligned_spike_trains(info):
    """
    Checks alignment of spike trains all aligned to a pivot
    spike train. Test is conducted for several alignment
    coefficients.
    """
    assert isinstance(info, TestInfo)

    seed = 0

    def get_next_seed():
        nonlocal seed
        seed += 1
        return seed

    numpy.random.seed(get_next_seed())

    def make_spike_train(is_excitatory=True, mean_frequency=None, percentage_of_regularity_phases=None, seed=None):
        if mean_frequency is None:
            mean_frequency = 15.0 if is_excitatory else 60.0
        if percentage_of_regularity_phases is None:
            percentage_of_regularity_phases = 0.0
        return spike_train.create(
                    distribution.hermit_distribution_with_desired_mean(1.0 / mean_frequency, 0.003, 0.3, 0.0001, pow_y=2, seed=seed)
                        if is_excitatory
                        else distribution.hermit_distribution_with_desired_mean(1.0 / mean_frequency, 0.001, 0.08, 0.0001, bin_size=0.0002, pow_y=2, seed=seed),
                    percentage_of_regularity_phases
                    )

    pivot_trains = [
        (make_spike_train(True, seed=get_next_seed()), "epivot"),
        (make_spike_train(False, seed=get_next_seed()), "ipivot"),
        ]

    nsteps = 1000000
    dt = 0.0001
    start_time = 0.0

    def simulate_trains(trains):
        t = start_time
        for step in range(nsteps):
            utility.print_progress_string(step, nsteps)
            for train, _ in trains:
                train.on_time_step(t, dt)
            t += dt

    print("Simulating pivot trains")
    simulate_trains(pivot_trains)

    def make_aligned_train(
            pivot, alignment_coefficient, dispersion_fn,
            is_excitatory=True, mean_frequency=None, seed=None
            ):
        assert isinstance(pivot, spike_train.SpikeTrain)
        train = make_spike_train(is_excitatory, mean_frequency, 0.0, seed)
        train.set_spike_history_alignment(pivot.get_spikes_history(), alignment_coefficient, dispersion_fn)
        return train

    dispersion_fn = datalgo.AlignmentDispersionToSpikesHistory(2.0)

    other_trains = [[(make_aligned_train(pivot, coef, dispersion_fn, kind, seed=get_next_seed()),
                      kname + str(idx) + "_" + format(coef, ".2f"))
                        for kind, kname in [(True, "e"), (False, "i")]
                        for idx in range(2)
                        for coef in [-1.0, -0.75, -0.5, -0.25, 0.0, 0.25, 0.5, 0.75, 1.0]
                        ] for pivot, _ in pivot_trains]

    print("Simulating other trains")
    simulate_trains([train for pivot_trains in other_trains for train in pivot_trains])

    print("Saving results:")
    for pivot_desc_pair, pivot_trains in zip(pivot_trains, other_trains):
        pivot = pivot_desc_pair[0]
        pivot_desc = pivot_desc_pair[1]
        outdir = os.path.join(info.output_dir, pivot_desc)
        os.makedirs(outdir, exist_ok=True)
        print("    Saving statistics of spike train: " + pivot_desc)
        with open(os.path.join(outdir, "stats_" + pivot_desc + ".json"), "w") as ofile:
            ofile.write(json.dumps({"configuration": pivot.get_configuration(), "statistics": pivot.get_statistics()},
                                   sort_keys=True,
                                   indent=4))
        print("    Saving spike event distributions of spike train: " + pivot_desc)
        plot.histogram(
            datalgo.make_histogram(
                datalgo.make_difference_events(pivot.get_spikes_history()),
                dt,
                start_time
                ),
            os.path.join(outdir, "distrib_" + pivot_desc + ".png"),
            normalised=False
            )
        for i in range(len(pivot_trains)):
            train_i, desc_i = pivot_trains[i]
            print("    Saving statistics of spike train: " + desc_i)
            with open(os.path.join(outdir, "stats_" + desc_i + ".json"), "w") as ofile:
                ofile.write(json.dumps({"configuration": train_i.get_configuration(),
                                        "statistics": train_i.get_statistics()},
                                       sort_keys=True,
                                       indent=4))
            print("    Saving spike event distributions of spike train: " + desc_i)
            plot.histogram(
                datalgo.make_histogram(
                    datalgo.make_difference_events(train_i.get_spikes_history()),
                    dt,
                    start_time
                    ),
                os.path.join(outdir, "distrib_" + desc_i + ".png"),
                normalised=False
                )

            print("    Saving alignment histogram: " + desc_i + " VS " + pivot_desc)
            hist_pair = datalgo.make_alignment_histograms_of_spike_histories(
                                train_i.get_spikes_history(),
                                pivot.get_spikes_history(),
                                0.005,
                                dt / 2.0
                                )
            plot.histogram(
                hist_pair[0],
                os.path.join(outdir, "hist_" + desc_i + "_vs_" + pivot_desc + ".png"),
                normalised=False
                )

            for j in range(i + 1, len(pivot_trains)):
                train_j, desc_j = pivot_trains[j]

                print("    Saving alignment histogram: " + desc_i + " VS " + desc_j)
                hist_pair = datalgo.make_alignment_histograms_of_spike_histories(
                                    train_i.get_spikes_history(),
                                    train_j.get_spikes_history(),
                                    0.005,
                                    dt / 2.0
                                    )
                # plot.histogram(
                #     hist_pair[0],
                #     os.path.join(outdir, "hist_" + desc_i + "_vs_" + desc_j + ".png"),
                #     normalised=False
                #     )
                # plot.histogram(
                #     hist_pair[1],
                #     os.path.join(outdir, "hist_" + desc_j + "_vs_" + desc_i + ".png")
                #     )

                print("    Saving alignment curve: " + desc_i + " VS " + desc_j)
                with plot.Plot(os.path.join(outdir, "curve_" + desc_i + "_vs_" + desc_j + ".png")) as plt:
                    plt.curve(
                        datalgo.interpolate_discrete_function(
                            datalgo.approximate_discrete_function(
                                distribution.Distribution(hist_pair[0]).get_probability_points()
                                )
                            ),
                        legend=desc_i + " -> " + desc_j
                        )
                    plt.curve(
                        datalgo.interpolate_discrete_function(
                            datalgo.approximate_discrete_function(
                                distribution.Distribution(hist_pair[1]).get_probability_points()
                                )
                            ),
                        legend=desc_j + " -> " + desc_i
                        )
        plot.event_board_per_partes(
            [pivot.get_spikes_history()] + [train.get_spikes_history() for train, _ in pivot_trains],
            os.path.join(outdir, "spikes_board.png"),
            start_time,
            start_time + nsteps * dt,
            1.0,
            5,
            lambda p: print("    Saving spikes board part: " + os.path.basename(p)),
            [[plot.get_random_rgb_colour()] * len(pivot.get_spikes_history())] +
                [[plot.get_random_rgb_colour()] * len(train.get_spikes_history())
                 for train, _ in pivot_trains],
            " " + plot.get_title_placeholder() + " SPIKES BOARD"
            )

    return 0    # No failures
Exemple #10
0
def _test_aligned_spike_trains(info):
    """
    Checks alignment of spike trains for different alignment coefficients.
    """
    assert isinstance(info, TestInfo)


    # def make_alignment_histogram(name, lo_event, hi_event, dt, num_events=None):
    #     if num_events is None:
    #         num_events = int((hi_event - lo_event) / dt) + 1
    #     raw_events = [lo_event + (i / float(num_events - 1)) * (hi_event - lo_event) for i in range(0, num_events)]
    #     events = []
    #     for e in raw_events:
    #         t = 0.0
    #         while e > t + 1.5 * dt:
    #             t += dt
    #         events.append(t)
    #     half_epsilon = 0.5 * (dt / 10)
    #     coefs = []
    #     for i in range(len(events)):
    #         for j in range(i + 1, len(events)):
    #             dist = events[j] - events[i]
    #             shift = dt
    #             while shift < dist:
    #                 if not (shift < half_epsilon or shift > dist - half_epsilon):
    #                     # print(format(shift, ".3f") + " / " + format(dist, ".3f") + " = " + format(shift/dist, ".3f") +
    #                     #       "  --->  " +
    #                     #       format(shift/dt, ".3f") + " / " + format(dist / dt, ".3f") + " = " + format((shift/dt)/(dist/dt), ".3f"))
    #                     coef = max(-1.0, min(2.0 * shift / dist - 1.0, 1.0))
    #                     coefs.append(coef)
    #                 shift += dt
    #     coefs = sorted(coefs)
    #     hist = datalgo.make_histogram(coefs, 0.005, 0.0, 1)
    #     plot.histogram(
    #         hist,
    #         os.path.join(info.output_dir, name + ".png"),
    #         normalised=False
    #         )
    #     return 0
    # make_alignment_histogram("xhist", 0.003, 0.030, 0.0001)
    # make_alignment_histogram("yhist", 0.003, 0.130, 0.0001)
    # return 0

    seed = 0

    def get_next_seed():
        nonlocal seed
        seed += 1
        return seed

    numpy.random.seed(get_next_seed())

    def make_spike_train(is_excitatory=True, mean_frequency=None, percentage_of_regularity_phases=None, seed=None):
        # min_event = 0.003
        # max_event = 0.030
        # num_events = 28
        # return spike_train.create(
        #             distribution.Distribution(
        #                 {(min_event + (i / float(num_events - 1)) * (max_event - min_event)): 1.0 / float(num_events)
        #                  for i in range(0, num_events)},
        #                 seed
        #                 ),
        #             0.0
        #             )
        if mean_frequency is None:
            mean_frequency = 15.0 if is_excitatory else 60.0
        if percentage_of_regularity_phases is None:
            percentage_of_regularity_phases = 0.0
        return spike_train.create(
                    distribution.hermit_distribution_with_desired_mean(1.0 / mean_frequency, 0.003, 0.3, 0.0001, pow_y=2, seed=seed)
                        if is_excitatory
                        else distribution.hermit_distribution_with_desired_mean(1.0 / mean_frequency, 0.001, 0.08, 0.0001, bin_size=0.0002, pow_y=2, seed=seed),
                    percentage_of_regularity_phases
                    )

    pivot_trains = [
        (make_spike_train(True, seed=get_next_seed()), "epivot"),
        (make_spike_train(False, seed=get_next_seed()), "ipivot"),
        ]

    nsteps = 1000000
    dt = 0.0001
    start_time = 0.0

    def simulate_trains(trains):
        t = start_time
        for step in range(nsteps):
            utility.print_progress_string(step, nsteps)
            for train, _ in trains:
                train.on_time_step(t, dt)
            t += dt

    print("Simulating pivot trains")
    simulate_trains(pivot_trains)

    def make_aligned_train(
            pivot, alignment_coefficient, dispersion_fn,
            is_excitatory=True, mean_frequency=None, seed=None
            ):
        assert isinstance(pivot, spike_train.SpikeTrain)
        train = make_spike_train(is_excitatory, mean_frequency, 0.0, seed)
        train.set_spike_history_alignment(pivot.get_spikes_history(), alignment_coefficient, dispersion_fn)
        return train

    dispersion_fn = datalgo.AlignmentDispersionToSpikesHistory(2.0)

    other_trains = [
        (make_spike_train(True, seed=get_next_seed()), "eunaligned"),
        (make_spike_train(False, seed=get_next_seed()), "iunaligned"),
        ] + [
        (make_aligned_train(pivot, coef, dispersion_fn, kind, seed=get_next_seed()),
         kname + "aligned(" + desc + "," + format(coef, ".2f") + ")")
            for pivot, desc in pivot_trains
            for kind, kname in [(True, "e"), (False, "i")]
            for coef in [-1.0, -0.75, -0.5, -0.25, 0.0, 0.25, 0.5, 0.75, 1.0]
        ]

    print("Simulating other trains")
    simulate_trains(other_trains)

    print("Saving results:")
    for pivot, pivot_desc in pivot_trains:
        print("    Saving statistics of spike train: " + pivot_desc)
        with open(os.path.join(info.output_dir, "stats_" + pivot_desc + ".json"), "w") as ofile:
            ofile.write(json.dumps({"configuration": pivot.get_configuration(), "statistics": pivot.get_statistics()},
                                   sort_keys=True,
                                   indent=4))
        print("    Saving spike event distributions of spike train: " + pivot_desc)
        plot.histogram(
            datalgo.make_histogram(
                datalgo.make_difference_events(pivot.get_spikes_history()),
                dt,
                start_time
                ),
            os.path.join(info.output_dir, "distrib_" + pivot_desc + ".png"),
            normalised=False
            )
    for other, other_desc in other_trains:
        print("    Saving statistics of spike train: " + other_desc)
        with open(os.path.join(info.output_dir, "stats_" + other_desc + ".json"), "w") as ofile:
            ofile.write(json.dumps({"configuration": other.get_configuration(), "statistics": other.get_statistics()},
                                   sort_keys=True,
                                   indent=4))
        print("    Saving spike event distributions of spike train: " + other_desc)
        plot.histogram(
            datalgo.make_histogram(
                datalgo.make_difference_events(other.get_spikes_history()),
                dt,
                start_time
                ),
            os.path.join(info.output_dir, "distrib_" + other_desc + ".png"),
            normalised=False
            )
    for pivot, pivot_desc in pivot_trains:
        for other, other_desc in other_trains:
            print("    Saving alignment histograms: " + pivot_desc + " VS " + other_desc)
            hist_pair = datalgo.make_alignment_histograms_of_spike_histories(
                                pivot.get_spikes_history(),
                                other.get_spikes_history(),
                                0.005,
                                dt / 2.0
                                )
            plot.histogram(
                hist_pair[0],
                os.path.join(info.output_dir, "hist_" + pivot_desc + "_vs_" + other_desc + ".png"),
                normalised=False
                )
            plot.histogram(
                hist_pair[1],
                os.path.join(info.output_dir, "hist_" + pivot_desc + "_vs_" + other_desc + "_inv.png")
                )
            print("    Saving alignment curve: " + pivot_desc + " VS " + other_desc)
            with plot.Plot(os.path.join(info.output_dir, "curve_" + pivot_desc + "_vs_" + other_desc + ".png")) as plt:
                plt.curve(
                    datalgo.interpolate_discrete_function(
                        datalgo.approximate_discrete_function(
                            distribution.Distribution(hist_pair[0]).get_probability_points()
                            )
                        ),
                    legend=pivot_desc + " -> " + other_desc
                    )
                plt.curve(
                    datalgo.interpolate_discrete_function(
                        datalgo.approximate_discrete_function(
                            distribution.Distribution(hist_pair[1]).get_probability_points()
                            )
                        ),
                    legend=other_desc + " -> " + pivot_desc
                    )
        plot.event_board_per_partes(
            [pivot.get_spikes_history()] + [other.get_spikes_history() for other, _ in other_trains],
            os.path.join(info.output_dir, "spikes_board_" + pivot_desc + ".png"),
            start_time,
            start_time + nsteps * dt,
            1.0,
            5,
            lambda p: print("    Saving spikes board part: " + os.path.basename(p)),
            [[plot.get_random_rgb_colour()] * len(pivot.get_spikes_history())] +
                [[plot.get_random_rgb_colour()] * len(other.get_spikes_history())
                 for other, _ in other_trains],
            " " + plot.get_title_placeholder() + " SPIKES BOARD"
            )

    return 0    # No failures
Exemple #11
0
def _test_spike_trains(info):
    """
    The test generates several excitatory and inhibitory spike strains.
    Each excitatory/inhibitory spike train differs from another one by
    a different level of noise in time intervals between individual
    spikes. Nevertheless, spiking distribution is preserved for each
    spike train for any chosen level of noise.
    """
    assert isinstance(info, TestInfo)

    start_time = 0.0
    dt = 0.001
    nsteps = 5 * 60 * 1000
    num_spikers_per_kind = 11

    trains = [spike_train.create(distribution.default_excitatory_isi_distribution(), 10.0 * i)
              for i in range(num_spikers_per_kind)] +\
             [spike_train.create(distribution.default_inhibitory_isi_distribution(), 10.0 * i)
              for i in range(num_spikers_per_kind)]

    t = start_time
    for step in range(nsteps):
        utility.print_progress_string(step, nsteps)
        for train in trains:
            train.on_time_step(t, dt)
        t += dt

    print("  Saving results.")

    for i, train in enumerate(trains):
        if i < num_spikers_per_kind:
            train_id = "excitatory[" + str(i) + "]"
            colour = plot.get_colour_pre_excitatory(0.75)
        else:
            train_id = "inhibitory[" + str(i - num_spikers_per_kind) + "]"
            colour = plot.get_colour_pre_inhibitory(0.75)

        file_name = train_id + "_info.json"
        pathname = os.path.join(info.output_dir, file_name)
        print("    Saving info " + pathname)
        with open(pathname, "w") as ofile:
            ofile.write(json.dumps({"configuration": train.get_configuration(), "statistics": train.get_statistics()},
                                   sort_keys=True,
                                   indent=4))

        file_name = train_id + "_isi_histogram.png"
        pathname = os.path.join(info.output_dir, file_name)
        print("    Saving plot " + pathname)
        plot.histogram(
            datalgo.make_histogram(
                datalgo.make_difference_events(
                    train.get_spikes_history()
                    ),
                dt,
                start_time
                ),
            pathname,
            False,
            colour,
            plot.get_title_placeholder()
            )

        # file_name = train_id + "_histogram_reguatory_lengths.png"
        # pathname = os.path.join(info.output_dir, file_name)
        # print("    Saving plot " + pathname)
        # plot.histogram(
        #     train.get_regularity_length_distribution(),
        #     pathname,
        #     False,
        #     colour,
        #     plot.get_title_placeholder()
        #     )
        #
        # file_name = train_id + "_histogram_noise_lengths.png"
        # pathname = os.path.join(info.output_dir, file_name)
        # print("    Saving plot " + pathname)
        # plot.histogram(
        #     train.get_noise_length_distribution(),
        #     pathname,
        #     False,
        #     colour,
        #     plot.get_title_placeholder()
        #     )

        isi_delta =\
            datalgo.make_function_from_events(
                datalgo.make_difference_events(
                    datalgo.make_difference_events(
                        train.get_spikes_history()
                        )
                    )
                )
        plot.curve_per_partes(
            isi_delta,
            os.path.join(info.output_dir, train_id + "_isi_delta_curve.png"),
            0,
            len(isi_delta),
            1000,
            None,
            lambda p: print("    Saving plot " + p),
            colour,
            plot.get_title_placeholder()
            )

    return 0
Exemple #12
0
def _test_hermit_distribution(info):
    """
    The test build 10 hermit distributions, each for a different
    peek. Curves and histograms and normalised histograms are build
    and corresponding plots are saved.
    """
    assert isinstance(info, TestInfo)
    print("  Generating graph " +
          os.path.join(info.output_dir, "ns_curve.png"))
    plot.curve(
        distribution.make_points_of_normal_distribution(num_points=100,
                                                        nu=0.0,
                                                        sigma=1.0,
                                                        normalise=True),
        os.path.join(info.output_dir, "ns_curve.png"))
    for peek_x in numpy.arange(0.1, 0.95, 0.1, float):
        print("  Computing points of hermit cubic at peek " + str(peek_x) +
              ".")
        points = datalgo.move_scale_curve_points(
            distribution.
            make_points_of_hermit_cubic_approximation_of_normal_distribution(
                peek_x=peek_x, mult_m01=1.0, mult_mx=1.0, num_points=100),
            scale_x=0.298,
            scale_y=10,
            pow_y=1.5,
            shift_x=0.002)
        print("  Saving " + os.path.join(
            info.output_dir, "ns_curve_hermit_adapted_" +
            format(peek_x, ".2f") + ".png"))
        plot.curve(
            points,
            os.path.join(
                info.output_dir,
                "ns_curve_hermit_adapted_" + format(peek_x, ".2f") + ".png"))
        print("  Computing histogram from the hermit cubic.")
        hist = datalgo.make_histogram_from_points(points)
        print("  Saving " +
              os.path.join(info.output_dir, "ns_hist_adapted_" +
                           format(peek_x, ".2f") + ".png"))
        plot.histogram(
            hist,
            os.path.join(info.output_dir,
                         "ns_hist_adapted_" + format(peek_x, ".2f") + ".png"),
            normalised=False)
        print("  Saving " + os.path.join(
            info.output_dir, "ns_hist_normalised_adapted_" +
            format(peek_x, ".2f") + ".png"))
        plot.histogram(hist,
                       os.path.join(
                           info.output_dir, "ns_hist_normalised_adapted_" +
                           format(peek_x, ".2f") + ".png"),
                       normalised=True)
        print("  Saving " +
              os.path.join(info.output_dir, "hd_" + format(peek_x, ".2f") +
                           ".png"))
        plot.histogram(
            distribution.hermit_distribution(peek_x),
            os.path.join(info.output_dir,
                         "hd_" + format(peek_x, ".2f") + ".png"),
        )

    return 0
Exemple #13
0
def _test_alignment_of_aligned_spike_trains(info):
    """
    Checks alignment of spike trains all aligned to a pivot
    spike train. Test is conducted for several alignment
    coefficients.
    """
    assert isinstance(info, TestInfo)

    seed = 0

    def get_next_seed():
        nonlocal seed
        seed += 1
        return seed

    numpy.random.seed(get_next_seed())

    def make_spike_train(is_excitatory=True,
                         mean_frequency=None,
                         percentage_of_regularity_phases=None,
                         seed=None):
        if mean_frequency is None:
            mean_frequency = 15.0 if is_excitatory else 60.0
        if percentage_of_regularity_phases is None:
            percentage_of_regularity_phases = 0.0
        return spike_train.create(
            distribution.hermit_distribution_with_desired_mean(
                1.0 / mean_frequency, 0.003, 0.3, 0.0001, pow_y=2, seed=seed)
            if is_excitatory else
            distribution.hermit_distribution_with_desired_mean(1.0 /
                                                               mean_frequency,
                                                               0.001,
                                                               0.08,
                                                               0.0001,
                                                               bin_size=0.0002,
                                                               pow_y=2,
                                                               seed=seed),
            percentage_of_regularity_phases)

    pivot_trains = [
        (make_spike_train(True, seed=get_next_seed()), "epivot"),
        (make_spike_train(False, seed=get_next_seed()), "ipivot"),
    ]

    nsteps = 1000000
    dt = 0.0001
    start_time = 0.0

    def simulate_trains(trains):
        t = start_time
        for step in range(nsteps):
            utility.print_progress_string(step, nsteps)
            for train, _ in trains:
                train.on_time_step(t, dt)
            t += dt

    print("Simulating pivot trains")
    simulate_trains(pivot_trains)

    def make_aligned_train(pivot,
                           alignment_coefficient,
                           dispersion_fn,
                           is_excitatory=True,
                           mean_frequency=None,
                           seed=None):
        assert isinstance(pivot, spike_train.SpikeTrain)
        train = make_spike_train(is_excitatory, mean_frequency, 0.0, seed)
        train.set_spike_history_alignment(pivot.get_spikes_history(),
                                          alignment_coefficient, dispersion_fn)
        return train

    dispersion_fn = datalgo.AlignmentDispersionToSpikesHistory(2.0)

    other_trains = [[
        (make_aligned_train(pivot,
                            coef,
                            dispersion_fn,
                            kind,
                            seed=get_next_seed()),
         kname + str(idx) + "_" + format(coef, ".2f"))
        for kind, kname in [(True, "e"), (False, "i")] for idx in range(2)
        for coef in [-1.0, -0.75, -0.5, -0.25, 0.0, 0.25, 0.5, 0.75, 1.0]
    ] for pivot, _ in pivot_trains]

    print("Simulating other trains")
    simulate_trains(
        [train for pivot_trains in other_trains for train in pivot_trains])

    print("Saving results:")
    for pivot_desc_pair, pivot_trains in zip(pivot_trains, other_trains):
        pivot = pivot_desc_pair[0]
        pivot_desc = pivot_desc_pair[1]
        outdir = os.path.join(info.output_dir, pivot_desc)
        os.makedirs(outdir, exist_ok=True)
        print("    Saving statistics of spike train: " + pivot_desc)
        with open(os.path.join(outdir, "stats_" + pivot_desc + ".json"),
                  "w") as ofile:
            ofile.write(
                json.dumps(
                    {
                        "configuration": pivot.get_configuration(),
                        "statistics": pivot.get_statistics()
                    },
                    sort_keys=True,
                    indent=4))
        print("    Saving spike event distributions of spike train: " +
              pivot_desc)
        plot.histogram(datalgo.make_histogram(
            datalgo.make_difference_events(pivot.get_spikes_history()), dt,
            start_time),
                       os.path.join(outdir, "distrib_" + pivot_desc + ".png"),
                       normalised=False)
        for i in range(len(pivot_trains)):
            train_i, desc_i = pivot_trains[i]
            print("    Saving statistics of spike train: " + desc_i)
            with open(os.path.join(outdir, "stats_" + desc_i + ".json"),
                      "w") as ofile:
                ofile.write(
                    json.dumps(
                        {
                            "configuration": train_i.get_configuration(),
                            "statistics": train_i.get_statistics()
                        },
                        sort_keys=True,
                        indent=4))
            print("    Saving spike event distributions of spike train: " +
                  desc_i)
            plot.histogram(datalgo.make_histogram(
                datalgo.make_difference_events(train_i.get_spikes_history()),
                dt, start_time),
                           os.path.join(outdir, "distrib_" + desc_i + ".png"),
                           normalised=False)

            print("    Saving alignment histogram: " + desc_i + " VS " +
                  pivot_desc)
            hist_pair = datalgo.make_alignment_histograms_of_spike_histories(
                train_i.get_spikes_history(), pivot.get_spikes_history(),
                0.005, dt / 2.0)
            plot.histogram(
                hist_pair[0],
                os.path.join(outdir,
                             "hist_" + desc_i + "_vs_" + pivot_desc + ".png"),
                normalised=False)

            for j in range(i + 1, len(pivot_trains)):
                train_j, desc_j = pivot_trains[j]

                print("    Saving alignment histogram: " + desc_i + " VS " +
                      desc_j)
                hist_pair = datalgo.make_alignment_histograms_of_spike_histories(
                    train_i.get_spikes_history(), train_j.get_spikes_history(),
                    0.005, dt / 2.0)
                # plot.histogram(
                #     hist_pair[0],
                #     os.path.join(outdir, "hist_" + desc_i + "_vs_" + desc_j + ".png"),
                #     normalised=False
                #     )
                # plot.histogram(
                #     hist_pair[1],
                #     os.path.join(outdir, "hist_" + desc_j + "_vs_" + desc_i + ".png")
                #     )

                print("    Saving alignment curve: " + desc_i + " VS " +
                      desc_j)
                with plot.Plot(
                        os.path.join(
                            outdir, "curve_" + desc_i + "_vs_" + desc_j +
                            ".png")) as plt:
                    plt.curve(datalgo.interpolate_discrete_function(
                        datalgo.approximate_discrete_function(
                            distribution.Distribution(
                                hist_pair[0]).get_probability_points())),
                              legend=desc_i + " -> " + desc_j)
                    plt.curve(datalgo.interpolate_discrete_function(
                        datalgo.approximate_discrete_function(
                            distribution.Distribution(
                                hist_pair[1]).get_probability_points())),
                              legend=desc_j + " -> " + desc_i)
        plot.event_board_per_partes(
            [pivot.get_spikes_history()] +
            [train.get_spikes_history() for train, _ in pivot_trains],
            os.path.join(outdir, "spikes_board.png"), start_time,
            start_time + nsteps * dt, 1.0, 5, lambda p: print(
                "    Saving spikes board part: " + os.path.basename(p)),
            [[plot.get_random_rgb_colour()] * len(pivot.get_spikes_history())]
            + [[plot.get_random_rgb_colour()] * len(train.get_spikes_history())
               for train, _ in pivot_trains],
            " " + plot.get_title_placeholder() + " SPIKES BOARD")

    return 0  # No failures
Exemple #14
0
def _test_aligned_spike_trains(info):
    """
    Checks alignment of spike trains for different alignment coefficients.
    """
    assert isinstance(info, TestInfo)

    # def make_alignment_histogram(name, lo_event, hi_event, dt, num_events=None):
    #     if num_events is None:
    #         num_events = int((hi_event - lo_event) / dt) + 1
    #     raw_events = [lo_event + (i / float(num_events - 1)) * (hi_event - lo_event) for i in range(0, num_events)]
    #     events = []
    #     for e in raw_events:
    #         t = 0.0
    #         while e > t + 1.5 * dt:
    #             t += dt
    #         events.append(t)
    #     half_epsilon = 0.5 * (dt / 10)
    #     coefs = []
    #     for i in range(len(events)):
    #         for j in range(i + 1, len(events)):
    #             dist = events[j] - events[i]
    #             shift = dt
    #             while shift < dist:
    #                 if not (shift < half_epsilon or shift > dist - half_epsilon):
    #                     # print(format(shift, ".3f") + " / " + format(dist, ".3f") + " = " + format(shift/dist, ".3f") +
    #                     #       "  --->  " +
    #                     #       format(shift/dt, ".3f") + " / " + format(dist / dt, ".3f") + " = " + format((shift/dt)/(dist/dt), ".3f"))
    #                     coef = max(-1.0, min(2.0 * shift / dist - 1.0, 1.0))
    #                     coefs.append(coef)
    #                 shift += dt
    #     coefs = sorted(coefs)
    #     hist = datalgo.make_histogram(coefs, 0.005, 0.0, 1)
    #     plot.histogram(
    #         hist,
    #         os.path.join(info.output_dir, name + ".png"),
    #         normalised=False
    #         )
    #     return 0
    # make_alignment_histogram("xhist", 0.003, 0.030, 0.0001)
    # make_alignment_histogram("yhist", 0.003, 0.130, 0.0001)
    # return 0

    seed = 0

    def get_next_seed():
        nonlocal seed
        seed += 1
        return seed

    numpy.random.seed(get_next_seed())

    def make_spike_train(is_excitatory=True,
                         mean_frequency=None,
                         percentage_of_regularity_phases=None,
                         seed=None):
        # min_event = 0.003
        # max_event = 0.030
        # num_events = 28
        # return spike_train.create(
        #             distribution.Distribution(
        #                 {(min_event + (i / float(num_events - 1)) * (max_event - min_event)): 1.0 / float(num_events)
        #                  for i in range(0, num_events)},
        #                 seed
        #                 ),
        #             0.0
        #             )
        if mean_frequency is None:
            mean_frequency = 15.0 if is_excitatory else 60.0
        if percentage_of_regularity_phases is None:
            percentage_of_regularity_phases = 0.0
        return spike_train.create(
            distribution.hermit_distribution_with_desired_mean(
                1.0 / mean_frequency, 0.003, 0.3, 0.0001, pow_y=2, seed=seed)
            if is_excitatory else
            distribution.hermit_distribution_with_desired_mean(1.0 /
                                                               mean_frequency,
                                                               0.001,
                                                               0.08,
                                                               0.0001,
                                                               bin_size=0.0002,
                                                               pow_y=2,
                                                               seed=seed),
            percentage_of_regularity_phases)

    pivot_trains = [
        (make_spike_train(True, seed=get_next_seed()), "epivot"),
        (make_spike_train(False, seed=get_next_seed()), "ipivot"),
    ]

    nsteps = 1000000
    dt = 0.0001
    start_time = 0.0

    def simulate_trains(trains):
        t = start_time
        for step in range(nsteps):
            utility.print_progress_string(step, nsteps)
            for train, _ in trains:
                train.on_time_step(t, dt)
            t += dt

    print("Simulating pivot trains")
    simulate_trains(pivot_trains)

    def make_aligned_train(pivot,
                           alignment_coefficient,
                           dispersion_fn,
                           is_excitatory=True,
                           mean_frequency=None,
                           seed=None):
        assert isinstance(pivot, spike_train.SpikeTrain)
        train = make_spike_train(is_excitatory, mean_frequency, 0.0, seed)
        train.set_spike_history_alignment(pivot.get_spikes_history(),
                                          alignment_coefficient, dispersion_fn)
        return train

    dispersion_fn = datalgo.AlignmentDispersionToSpikesHistory(2.0)

    other_trains = [
        (make_spike_train(True, seed=get_next_seed()), "eunaligned"),
        (make_spike_train(False, seed=get_next_seed()), "iunaligned"),
    ] + [(make_aligned_train(
        pivot, coef, dispersion_fn, kind, seed=get_next_seed()),
          kname + "aligned(" + desc + "," + format(coef, ".2f") + ")")
         for pivot, desc in pivot_trains
         for kind, kname in [(True, "e"), (False, "i")]
         for coef in [-1.0, -0.75, -0.5, -0.25, 0.0, 0.25, 0.5, 0.75, 1.0]]

    print("Simulating other trains")
    simulate_trains(other_trains)

    print("Saving results:")
    for pivot, pivot_desc in pivot_trains:
        print("    Saving statistics of spike train: " + pivot_desc)
        with open(
                os.path.join(info.output_dir, "stats_" + pivot_desc + ".json"),
                "w") as ofile:
            ofile.write(
                json.dumps(
                    {
                        "configuration": pivot.get_configuration(),
                        "statistics": pivot.get_statistics()
                    },
                    sort_keys=True,
                    indent=4))
        print("    Saving spike event distributions of spike train: " +
              pivot_desc)
        plot.histogram(datalgo.make_histogram(
            datalgo.make_difference_events(pivot.get_spikes_history()), dt,
            start_time),
                       os.path.join(info.output_dir,
                                    "distrib_" + pivot_desc + ".png"),
                       normalised=False)
    for other, other_desc in other_trains:
        print("    Saving statistics of spike train: " + other_desc)
        with open(
                os.path.join(info.output_dir, "stats_" + other_desc + ".json"),
                "w") as ofile:
            ofile.write(
                json.dumps(
                    {
                        "configuration": other.get_configuration(),
                        "statistics": other.get_statistics()
                    },
                    sort_keys=True,
                    indent=4))
        print("    Saving spike event distributions of spike train: " +
              other_desc)
        plot.histogram(datalgo.make_histogram(
            datalgo.make_difference_events(other.get_spikes_history()), dt,
            start_time),
                       os.path.join(info.output_dir,
                                    "distrib_" + other_desc + ".png"),
                       normalised=False)
    for pivot, pivot_desc in pivot_trains:
        for other, other_desc in other_trains:
            print("    Saving alignment histograms: " + pivot_desc + " VS " +
                  other_desc)
            hist_pair = datalgo.make_alignment_histograms_of_spike_histories(
                pivot.get_spikes_history(), other.get_spikes_history(), 0.005,
                dt / 2.0)
            plot.histogram(hist_pair[0],
                           os.path.join(
                               info.output_dir, "hist_" + pivot_desc + "_vs_" +
                               other_desc + ".png"),
                           normalised=False)
            plot.histogram(
                hist_pair[1],
                os.path.join(
                    info.output_dir,
                    "hist_" + pivot_desc + "_vs_" + other_desc + "_inv.png"))
            print("    Saving alignment curve: " + pivot_desc + " VS " +
                  other_desc)
            with plot.Plot(
                    os.path.join(
                        info.output_dir, "curve_" + pivot_desc + "_vs_" +
                        other_desc + ".png")) as plt:
                plt.curve(datalgo.interpolate_discrete_function(
                    datalgo.approximate_discrete_function(
                        distribution.Distribution(
                            hist_pair[0]).get_probability_points())),
                          legend=pivot_desc + " -> " + other_desc)
                plt.curve(datalgo.interpolate_discrete_function(
                    datalgo.approximate_discrete_function(
                        distribution.Distribution(
                            hist_pair[1]).get_probability_points())),
                          legend=other_desc + " -> " + pivot_desc)
        plot.event_board_per_partes(
            [pivot.get_spikes_history()] +
            [other.get_spikes_history() for other, _ in other_trains],
            os.path.join(info.output_dir,
                         "spikes_board_" + pivot_desc + ".png"), start_time,
            start_time + nsteps * dt, 1.0, 5, lambda p: print(
                "    Saving spikes board part: " + os.path.basename(p)),
            [[plot.get_random_rgb_colour()] * len(pivot.get_spikes_history())]
            + [[plot.get_random_rgb_colour()] * len(other.get_spikes_history())
               for other, _ in other_trains],
            " " + plot.get_title_placeholder() + " SPIKES BOARD")

    return 0  # No failures
Exemple #15
0
def _test_spike_trains(info):
    """
    The test generates several excitatory and inhibitory spike strains.
    Each excitatory/inhibitory spike train differs from another one by
    a different level of noise in time intervals between individual
    spikes. Nevertheless, spiking distribution is preserved for each
    spike train for any chosen level of noise.
    """
    assert isinstance(info, TestInfo)

    start_time = 0.0
    dt = 0.001
    nsteps = 5 * 60 * 1000
    num_spikers_per_kind = 11

    trains = [spike_train.create(distribution.default_excitatory_isi_distribution(), 10.0 * i)
              for i in range(num_spikers_per_kind)] +\
             [spike_train.create(distribution.default_inhibitory_isi_distribution(), 10.0 * i)
              for i in range(num_spikers_per_kind)]

    t = start_time
    for step in range(nsteps):
        utility.print_progress_string(step, nsteps)
        for train in trains:
            train.on_time_step(t, dt)
        t += dt

    print("  Saving results.")

    for i, train in enumerate(trains):
        if i < num_spikers_per_kind:
            train_id = "excitatory[" + str(i) + "]"
            colour = plot.get_colour_pre_excitatory(0.75)
        else:
            train_id = "inhibitory[" + str(i - num_spikers_per_kind) + "]"
            colour = plot.get_colour_pre_inhibitory(0.75)

        file_name = train_id + "_info.json"
        pathname = os.path.join(info.output_dir, file_name)
        print("    Saving info " + pathname)
        with open(pathname, "w") as ofile:
            ofile.write(
                json.dumps(
                    {
                        "configuration": train.get_configuration(),
                        "statistics": train.get_statistics()
                    },
                    sort_keys=True,
                    indent=4))

        file_name = train_id + "_isi_histogram.png"
        pathname = os.path.join(info.output_dir, file_name)
        print("    Saving plot " + pathname)
        plot.histogram(
            datalgo.make_histogram(
                datalgo.make_difference_events(train.get_spikes_history()), dt,
                start_time), pathname, False, colour,
            plot.get_title_placeholder())

        # file_name = train_id + "_histogram_reguatory_lengths.png"
        # pathname = os.path.join(info.output_dir, file_name)
        # print("    Saving plot " + pathname)
        # plot.histogram(
        #     train.get_regularity_length_distribution(),
        #     pathname,
        #     False,
        #     colour,
        #     plot.get_title_placeholder()
        #     )
        #
        # file_name = train_id + "_histogram_noise_lengths.png"
        # pathname = os.path.join(info.output_dir, file_name)
        # print("    Saving plot " + pathname)
        # plot.histogram(
        #     train.get_noise_length_distribution(),
        #     pathname,
        #     False,
        #     colour,
        #     plot.get_title_placeholder()
        #     )

        isi_delta =\
            datalgo.make_function_from_events(
                datalgo.make_difference_events(
                    datalgo.make_difference_events(
                        train.get_spikes_history()
                        )
                    )
                )
        plot.curve_per_partes(
            isi_delta,
            os.path.join(info.output_dir,
                         train_id + "_isi_delta_curve.png"), 0, len(isi_delta),
            1000, None, lambda p: print("    Saving plot " + p), colour,
            plot.get_title_placeholder())

    return 0
Exemple #16
0
        negatives_data = parse_data(args.negatives_data)
        main(baseline_data, positives_data, negatives_data, args.roc)
    elif args.which == 'utils':
        if args.minmax > 0:
            minmax = utils.minmax([row[1] for row in baseline_data],
                                  args.minmax)

            print('Minimal {} values:'.format(args.minmax))
            for n in minmax['mins']:
                print('\t{}'.format(n))

            print('Maximal {} values:'.format(args.minmax))
            for n in minmax['maxs']:
                print('\t{}'.format(n))
        if args.stats:
            stats = utils.stats([row[1] for row in baseline_data])

            print('Mean: {}\nStd. Deviation: {}'.format(
                stats['mean'], stats['std_dev']))
    elif args.which == 'plot':
        plt_counter = 1
        if args.time_series:
            plot.time_series(baseline_data, plt_counter)
            plt_counter += 1
        if args.histogram:
            plot.histogram([row[1] for row in baseline_data], plt_counter)
            plt_counter += 1
        if args.base_slot_histogram:
            plot.base_slot_histogram(baseline_data, plt_counter)
            plt_counter += 1