Пример #1
0
def PlotPersist(dtsetfile, plotdir, grpname):
    dtset = np.load(dtsetfile, allow_pickle=True)
    #allow_pickle op is for adapting spec change of numpy 1.16.3 and later
    dts = dtset['dataset']
    dataset0 = []

    for dt in dts:
        dt0 = dt['inputs/0']
        dataset0.append(dt0)

    dim0 = len(dataset0)
    dim1 = len(dataset0[0])
    dim2 = len(dataset0[0][0])

    PCdata = []
    for dt64 in dataset0:
        for dt in dt64:
            PCdata.append([dt[0], dt[1], dt[2]])

    #Plot persistent diagram
    hc.PDList.from_alpha_filtration(PCdata,
                                    save_to="pointcloud.idiagram",
                                    save_boundary_map=True)
    pdlist = hc.PDList("pointcloud.idiagram")
    pd = pdlist.dth_diagram(1)
    pd.histogram().plot(colorbar={"type": "log"})
    plotfile1 = plotdir + grpname + "-pers.png"
    plt.savefig(plotfile1)
    plt.close()

    print(
        f'Plot PC1/PC2/PC3 data of {grpname}: Shape= {dim0} x {dim1} x {dim2}')
Пример #2
0
def make_PD(pointcloud):
    hc.PDList.from_alpha_filtration(pointcloud,
                                    save_to="pointcloud.idiagram",
                                    save_boundary_map=True)

    pdlist = hc.PDList("pointcloud.idiagram")

    pd = [pdlist.dth_diagram(i) for i in range(3)]

    for j in range(len(pd)):
        lengths = [
            math.sqrt(pd[j].deaths[i]**2 + pd[j].births[i]**2)
            for i in range(len(pd[j].deaths))
        ]
        length_maximum = max(lengths)

        for i in range(len(pd[j].deaths)):
            pd[j].births[i] = math.sqrt(2) * pd[j].births[i] / length_maximum
            pd[j].deaths[i] = math.sqrt(2) * pd[j].deaths[i] / length_maximum

        return pd
Пример #3
0
def make_PD(pointcloud):
    hc.PDList.from_alpha_filtration(pointcloud,
                                    save_to="pointcloud.idiagram",
                                    save_boundary_map=True)

    pdlist = hc.PDList("pointcloud.idiagram")

    PD = []

    for j in range(3):
        pd = pdlist.dth_diagram(j)

        death_max = max(pd.deaths)
        birth_max = max(pd.births)

        #0-1区間で正規化
        for i in range(len(pd.deaths)):
            pd.births[i] = pd.births[i] / birth_max if pd.births[i] != 0 else 0
            pd.deaths[i] = pd.deaths[i] / death_max if pd.deaths[i] != 0 else 0

        PD.append(pd)

    return PD
Пример #4
0
#以下実行部

data = data_processing(path)
death_birth = []

for i in range(len(data_ranges)):
    #    for channel,pic_name,txt_name in zip(channels,picture_names,output_text_name):

    data = data_processing(path)

    pointcloud = delay(data, 'channel2', data_ranges[i])

    #PD生成・保存
    hc.PDList.from_alpha_filtration(pointcloud,
                                    save_to="pointcloud.idiagram",
                                    save_boundary_map=True)

    pdlist = hc.PDList("pointcloud.idiagram")

    pd = [pdlist.dth_diagram(i) for i in range(3)]  #0~2次元のPDを生成
    pd = normalization(pd)  #データの正規化
    betti_sequence = generating_betti_sequence(pd)

    #        output_betti_picture(betti_sequence)
    output_betti_text(betti_sequence, "test{0}.txt".format(i + 1))

#        death_birth = calc_death_birth(pd,death_birth,i)
#        output_picture(pd,picture_names[i])
#        output_text(pd)

print("Done")
Пример #5
0
    #compute R-wave peak coordinates
    rpeaks = signal.find_peaks(ekg[:, 1], height=0.5, distance=10)
    r_peak_xcs, r_peak_xc_idx = get_rpeak_xcs(rpeaks, ekg)
    rr_int_avg, rr_int_sd = get_rr_interval(r_peak_xcs)

    #trim signal to begin and end with an R-wave peak
    ekg = trim(ekg, r_peak_xcs)

    #compute persistent homology of processed signal
    output = hc.PDList.from_alpha_filtration(ekg,
                                             no_squared=True,
                                             save_boundary_map=True,
                                             save_phtrees=True,
                                             save_to="pointcloud.pdgm")
    pd1 = hc.PDList("pointcloud.pdgm").dth_diagram(1)
    persist = np.asarray(pd1.deaths - pd1.births)
    births = np.asarray(pd1.births)
    deaths = np.asarray(pd1.deaths)

    #compute optimal cycle representatives
    #cycle_xcs,cycle_ycs,bps=get_vol_opt_cycle_centroid_coords(persist,pd1)
    cycle_xcs, cycle_ycs, bps = get_card_opt_cycle_centroid_coords(
        persist, pd1)
    #cycle_xcs,cycle_ycs,bps=get_stab_vol_cycle_centroid_coords(persist,pd1)
    #cycle_xcs,cycle_ycs,bps=get_stab_subvol_cycle_centroid_coords(persist,pd1)

    #measure intervals of interest and compute the index locations of P,Q,S, and T-waves within the 1d array persist
    int_tda[i, :], idx_p, idx_q, idx_s, idx_t = get_intervals_and_H1wave_idxs(
        ekg, r_peak_xcs, rr_int_avg, persist, births, cycle_xcs, cycle_ycs,
        bps)