Ejemplo n.º 1
0
def BoxPlot(x, path='none', title='none', label='none'):
    """
    x: numpy [perticles,id]
    """

    sns.set_style("dark")

    # numpy -> list
    x_tuple = tuple(x[:, i].tolist() for i in np.arange(x.shape[1]))

    idx = []
    for i in np.arange(x.shape[-1]):  # data [peticles]
        idx = np.append(idx, str(i + 1))

    fig, ax = plt.subplots()
    ax.boxplot(x_tuple,
               sym='d',
               patch_artist=True,
               boxprops=dict(facecolor='lightblue', color='gray'),
               medianprops=dict(color='gray'))
    ax.set_xticklabels(idx)

    myData.isDirectory(path)
    plt.savefig(os.path.join(path, f"{label}.png"))
    plt.close()
Ejemplo n.º 2
0
def scatter3D_heatmap(x,
                      y,
                      z,
                      var,
                      rangeP,
                      path="none",
                      title="none",
                      label="none"):
    """
    Args
        val: variable for heatmap
    """
    #pdb.set_trace()

    sns.set_style("dark")

    #var = np.hstack([var[0],var[1]])

    # normalize variable(var) into 0 to 1
    nlVar = list((var - min(var)) / (max(var) - min(var)))

    # red(row,<<0) -> yellow(high)
    colors = plt.cm.autumn(nlVar)

    fig = plt.figure()
    ax = Axes3D(fig)

    #pdb.set_trace()
    ax.scatter(x, y, z, c=colors, marker="o", alpha=0.5)
    #ax.scatter(x[0],y[0],z[0],c=colors[:x[0].shape[0]],marker=".",alpha=0.5)
    #ax.scatter(x[1],y[1],z[1],s=10,c=colors[x[0].shape[0]:],marker="x",alpha=0.5)
    """
    xmin = np.min([np.min(x[0]),np.min(x[1])])
    xmax = np.max([np.max(x[0]),np.max(x[1])])
    
    ymin = np.min([np.min(y[0]),np.min(y[1])])
    ymax = np.max([np.max(y[0]),np.max(y[1])])
    
    zmin = np.min([np.min(z[0]),np.min(z[1])])
    zmax = np.max([np.max(z[0]),np.max(z[1])])
    
    ax.set_xlim(xmin,xmax)
    ax.set_ylim(ymin,ymax)
    ax.set_zlim(zmin,zmax)
    """
    ax.set_xlim(rangeP[0][ntI], rangeP[1][ntI])
    ax.set_ylim(rangeP[0][tntI], rangeP[1][tntI])
    ax.set_zlim(rangeP[0][ttI], rangeP[1][ttI])

    ax.set_title(f"{title}")

    myData.isDirectory(path)
    plt.savefig(os.path.join(path, f"{label}.png"))
    plt.close()
Ejemplo n.º 3
0
def HistLikelihood(weights, path, label="auto", color="black"):
    #pdb.set_trace()

    # mean & var for label
    lhMean = np.mean(weights, 0)
    lhVar = np.var(weights, 0)

    sns.set_style("dark")
    sns.distplot(weights, kde=False, rug=False, color=color)

    #plt.xlim([0,0.12])
    #plt.ylim([0,175])
    plt.suptitle(f"mean:{lhMean}\n var:{lhVar}")
    myData.isDirectory(path)
    plt.savefig(os.path.join(path, f"{label}.png"))
    plt.close()
Ejemplo n.º 4
0
def scatter3D(x, y, z, rangeP, path="none", title="none", label="none"):

    sns.set_style("dark")

    fig = plt.figure()
    ax = Axes3D(fig)
    ax.scatter(x, y, z, c="black", marker="o", alpha=0.5, linewidths=0.5)

    ax.set_xlabel("nk")
    ax.set_ylabel("tnk")
    ax.set_zlabel("tk")

    ax.set_xlim(rangeP[0][ntI], rangeP[1][ntI])
    ax.set_ylim(rangeP[0][tntI], rangeP[1][tntI])
    ax.set_zlim(rangeP[0][ttI], rangeP[1][ttI])

    ax.set_title(f"{title}")

    myData.isDirectory(path)
    plt.savefig(os.path.join(path, f"{label}.png"))
    plt.close()
Ejemplo n.º 5
0
def NumberLine(gt, pred, path, label="auto"):
    """
    発生年数がどうなってるかを確認したくって
    Args
        gt: 真値t list[nk,tnk,tk]
        pred: 予測値t [perticles,cells]
    """
    for cell in np.arange(3):
        #pdb.set_trace()
        # predict year [perticles,]
        x = gt[cell]
        xhat = pred[:, cell]
        y = [0] * 1  # y = 0
        yhat = [0] * xhat.shape[0]

        # 数直線 ---------------------------------------------------------------
        fig, ax = plt2.subplots(figsize=(10, 10))  #画像サイズ
        fig.set_figheight(1)  #高さ調整
        ax.tick_params(labelbottom=True, bottom=False)  #x軸設定
        ax.tick_params(labelleft=False, left=False)  #y軸設定
        # ---------------------------------------------------------------------

        # グラフの体裁 -----------------------------------------------------------
        #xMin, xMax = np.min(np.append(x,xhat)), np.max(np.append(x,xhat))
        xMin, xMax = 0, 1400
        plt2.tight_layout()  #グラフの自動調整
        plt2.hlines(y=0, xmin=xMin, xmax=xMax, color="silver")  #横軸
        pylab.box(False)  #枠を消す
        # -----------------------------------------------------------------

        # 散布図 -----------------------------------------------------------
        plt2.scatter(xhat, yhat, c='skyblue')  # 予測値
        plt2.scatter(x, y[0], c='coral')  # 真値
        plt2.title(f"min:{int(np.min(xhat))} max:{int(np.max(xhat))}")
        # -----------------------------------------------------------------
        myData.isDirectory(path)
        plt2.savefig(os.path.join(path, f"{label}_{cellname[cell]}.png"),
                     bbox_inches="tight")

        plt2.close()
Ejemplo n.º 6
0
#------------------------------------------------------------------------------
if __name__ == "__main__":

    # 1 確実に起きた地震
    # 190 全て起きた地震
    for tfID in [190]:

        print("-----------------------------------")
        print("------ {} historical eq data ------".format(tfID))

        # dirpath for each logs
        dirPath = f"{tfID}"
        # full path for each logs
        filePath = os.path.join(logsPath, dirPath, fileName)
        # path exist or not exist
        myData.isDirectory(os.path.join(logsPath, dirPath))
        myData.isDirectory(os.path.join('parFile', dirPath))

        # ----------------- 真の南海トラフ巨大地震履歴 V------------------------- #
        with open(os.path.join(featuresPath, "nankairireki.pkl"), "rb") as fp:
            nkfiles = pickle.load(fp)

        # 発生年数取得 & slip velocity (all 30)
        gtV = nkfiles[tfID, :, :]

        # 重複なしの真値地震
        #gtJ = np.unique(np.where(gtV>0)[0])
        gtJ_nk = np.where(
            gtV[:, ntI] > 0)[0]  # [84,287,499,761,898,1005,1107,1254,1346]
        gtJ_tnk = np.where(
            gtV[:, tntI] > 0)[0]  # [84,287,496,761,898,1005,1107,1254,1344]
Ejemplo n.º 7
0
        # update paramb
        updateBs = np.array([sb for sb in standB])
        # plot
        meanB, medianB = np.mean(updateBs, 0), np.median(updateBs, 0)
        # Num.of perticle for label
        numBs = updateBs.shape[0]
        # index max perticle
        maxBsind = [i for i, x in enumerate(ratebs) if x == max(ratebs)]
        # max perticle
        maxBs = np.array([np.array(updateBs[ind]) for ind in maxBsind])
        # rate of max perticle
        maxrate = int(np.max(ratebs[maxBsind]))

        # save paramters (high rate of perticle)
        maxbpath = os.path.join(savetxtPath, f'maxB_{mode}')
        myData.isDirectory(maxbpath)
        np.savetxt(os.path.join(maxbpath, f'maxB_{iS+1}_{maxrate}.txt'),
                   maxBs * 1000000,
                   fmt='%d',
                   delimiter=',')
        # plot 3D heatmap scatter
        s3hpath = os.path.join(imgPath, f'PF_{mode}')
        myPlot.scatter3D_heatmap(updateBs[:, ntI],
                                 updateBs[:, tntI],
                                 updateBs[:, ttI],
                                 ratebs,
                                 rangeP=[minB, maxB],
                                 path=s3hpath,
                                 title=f'mean:{meanB}\n median:{medianB}',
                                 label=f'Bheatmap_{iS+1}_{numBs}')
    # -------------------------------------------------------------------------
Ejemplo n.º 8
0
def simulate(features,y,x,mode=0,t=0,pTime=0,nP=0,nCell=3,isSavetxt=False,isPlot=False):
    """
    Args
        features: システムモデル値xt. th[1400,perticles,3], V[1400,perticles,3], b[perticles,3]
        y: 観測モデル値yt. [eq.years,]
        x: 地震年数(1400年). [(eq.years zero padding),perticles]
    """
    #pdb.set_trace()
    # 1. 初期化 ----------------------------------------------------------------
    # 状態ベクトル theta,v,year ※1セルの時おかしいかも
    # リサンプリング後の特徴量ベクトル
    xResampled = np.zeros((nP,nCell))
    # all norm-likelihood
    maxgW = np.zeros((nP))
    maxpW = np.zeros((nP))
    # weight for eq. year in each cell + penalty
    gw = np.zeros((nP,nCell+1))
    # weight for eq. times
    pw = np.zeros((nP,nCell+1))
    # -------------------------------------------------------------------------
    #pdb.set_trace()
    # -------------------------------------------------------------------------
    flag = False
    for i in np.arange(nP): # アンサンブル分まわす
        # =====================================================================
        #         尤度計算
        # =====================================================================
        # zero-paddingしたpredを予測した年数だけにする [地震発生年数(可変),]
        # predicted eq.year
        yhat_nk = (x[x[:,i,ntI]>0,i,ntI]).astype(int)
        yhat_tnk = (x[x[:,i,tntI]>0,i,tntI]).astype(int)
        yhat_tk = (x[x[:,i,ttI]>0,i,ttI]).astype(int)
        yhat = [yhat_nk,yhat_tnk,yhat_tk]
        
        #pdb.set_trace()
        # 尤度は地震発生年数、重みとかけるのは状態ベクトル
        # 2.c & 2.d 各粒子の尤度と重み -------------------------------------------
        # ground truth eq.year (time=t)
        standYs = [y[ntI][t],y[tntI][t],y[ttI][t]]
       
        # nearist -----
        if mode == 100:
            weight, maxweight, years = norm_likelihood.norm_likelihood_nearest(y,yhat,standYs=standYs,time=t)
        
            gw[i] = weight
            maxgW[i] = maxweight
        
        if mode == 13:
            weight, maxweight, years = norm_likelihood.norm_likelihood_nearest_penalty(y,yhat,standYs=standYs,time=t)
        
            gw[i] = weight
            maxgW[i] = maxweight
        
        if mode == 101:
            weight, maxweight, years = norm_likelihood.norm_likelihood_nearest_safetypenalty(y,yhat,standYs=standYs,time=t)
                 
            gw[i] = weight
            maxgW[i] = maxweight
        
        if mode == 102:
            gweight, gmaxweight, years = norm_likelihood.norm_likelihood_nearest_safetypenalty(y,yhat,standYs=standYs,time=t)
            pweight = norm_likelihood.norm_likelihood_alltimes(y,yhat)
        
            gw[i] = gweight
            pw[i] = pweight
            
            maxgW[i] = gmaxweight
            maxpW[i] = pweight 
        # ---------------------------------------------------------------------
        
        # for plot ------------------------------------------------------------
        if not flag:
            yearInds = years
            flag = True
        else:
            # [perticle,3]
            yearInds = np.vstack([yearInds,years])
        # ---------------------------------------------------------------------
        
    # 規格化 -------------------------------------------------------------------
    # only eq.years
    if mode == 100 or mode == 101:
        # 全セルがぴったりの時
        if any(maxgW==0):
            zeroind = np.where(maxgW==0)[0].tolist()
            maxgW[zeroind] = -1
        
        tmpgW = 1/-maxgW
        wNorm = tmpgW/np.sum(tmpgW)
        
    # eq.years & eq.times
    elif mode == 102:    
        if any(maxgW==0):
            zeroind = np.where(maxgW==0)[0].tolist()
            maxgW[zeroind] = -1
        
        tmpgW = 1/-maxgW
        
        maxW = tmpgW + maxpW 
        wNorm = maxW/np.sum(maxW)
    
    print(wNorm)
    # -------------------------------------------------------------------------
    #pdb.set_trace()
    # =========================================================================
    #         リサンプリング
    # =========================================================================
    initU = np.random.uniform(0,1/nP)

    # ※3セル同じ組み合わせのbが選ばれる
    # index for resampling
    k = resampling(initU,wNorm,nP=nP)
    
    # not update b var. ----
    if mode == 90:
        xResampled = features[k] 
    
    # simple var. ----
    if mode == 100:
        # system noise --------------------------------------------------------
        # ※元の値と足してもマイナスになるかも
        # array[cell,perticles] V & theta parameterがすべて同じ組み合わせになるのを防ぐため
        bnoise = np.array([np.random.normal(0,0.01*np.mean(features[:,cell]),nP) for cell in np.arange(nCell)])
        # ---------------------------------------------------------------------
    
        # Add noise
        xResampled = features[k] + np.abs(bnoise).T
    
    # 尤度工夫 var. ----
    if mode == 101 or mode == 102:
        #pdb.set_trace()
        # index for mean theta,V,b
        muind = np.argmax(wNorm)
        # highest of norm likelihood (index) for mean & sigma
        muB = features[bInd][muind] * 1000000
        # variable & co-variable matrix (xy,yz,zx)
        Bin = 10
        sigmaB = [[0,Bin,Bin],[Bin,0,Bin],[Bin,Bin,0]]
        
        # 尤度の1番高いところを中心に、次のbの分布決定
        # system noise --------------------------------------------------------
        # [perticle,cell]
        bnoise = np.random.multivariate_normal(muB,sigmaB,nP)
        # ---------------------------------------------------------------------
        # [perticle,cell]
        xResampled = np.abs(bnoise)*0.000001
        xResampled[0] = features[muind]
    
    #pdb.set_trace()
    
    print(f"---- 【{t}】 times ----\n")
    # 発生年数 plot ------------------------------------------------------------
    if isPlot:
        nlpath = os.path.join(imgPath,f'numlines_{mode}')
        myData.isDirectory(nlpath)
        myPlot.NumberLine(standYs,yearInds,path=nlpath,label=f"best_years_{t}")
    # -------------------------------------------------------------------------
    
    # save year & likelihood txt ----------------------------------------------
    if isSavetxt:
     
        # nearist ----
        lhpath = os.path.join(savetxtPath,f"lh_{mode}")
        myData.isDirectory(lhpath)
        if mode == 100 or 101:
            np.savetxt(os.path.join(lhpath,f"lh_{t}.txt"),gw)
            np.savetxt(os.path.join(lhpath,f"sum_lh_{t}.txt"),maxgW)
        else:
            np.savetxt(os.path.join(lhpath,f"lh_g_{t}.txt"),gw)
            np.savetxt(os.path.join(lhpath,f"lh_p_{t}.txt"),pw)
            np.savetxt(os.path.join(lhpath,f"sum_lh_g_{t}.txt"),maxgW)
            np.savetxt(os.path.join(lhpath,f"sum_lh_p_{t}.txt"),maxpW)
        
        np.savetxt(os.path.join(lhpath,f"w_{t}.txt"),wNorm)
        
        # Save param b
        xt = features[k]
        bpath = os.path.join(savetxtPath,f'B_{mode}')
        myData.isDirectory(bpath)
        np.savetxt(os.path.join(bpath,f'{t}.txt'),xt,fmt='%6f')    
    # -------------------------------------------------------------------------
    
    return xResampled, k
Ejemplo n.º 9
0
def Rireki(gt,
           pred,
           path='none',
           title="none",
           label="none",
           isShare=False,
           isSeparate=True,
           isResearch=False,
           iseach=False):
    """
    Args
        gt: gt eq. (best year). [1400,3] 
    """

    if isResearch:
        if iseach:
            dists = myData.eachMAEyear(gt, pred)
        else:
            # degree of similatery
            dists = myData.MAEyear(gt, pred)

        title = dists

    sns.set_style("dark")
    # share gt & pred
    if isShare:
        fig, figInds = plt.subplots(nrows=3, sharex=True)
        for figInd in np.arange(len(figInds)):
            figInds[figInd].plot(np.arange(pred.shape[0]),
                                 pred[:, figInd],
                                 color="skyblue")
            figInds[figInd].plot(np.arange(gt.shape[0]),
                                 gt[:, figInd],
                                 color="coral")

    #pdb.set_trace()
    if isSeparate:
        colors = ["coral", "skyblue", "coral", "skyblue", "coral", "skyblue"]

        # scalling var.
        predV, gtV = np.zeros([1400, 3]), np.zeros([1400, 3])

        # del first year
        pred_nk = [s for s in pred[ntI].tolist() if s != 0]
        pred_tnk = [s for s in pred[tntI].tolist() if s != 0]
        pred_tk = [s for s in pred[ttI].tolist() if s != 0]

        gt_tk = [s for s in gt[ttI].tolist() if s != 0]

        predV[pred_nk, ntI] = 5
        predV[pred_tnk, tntI] = 5
        predV[pred_tk, ttI] = 5

        gtV[gt[ntI].tolist(), ntI] = 5
        gtV[gt[tntI].tolist(), tntI] = 5
        gtV[gt_tk, ttI] = 5
        #pdb.set_trace()

        # [1400,3]
        plot_data = [
            gtV[:, ntI], predV[:, ntI], gtV[:, tntI], predV[:, tntI],
            gtV[:, ttI], predV[:, ttI]
        ]
        # not scalling var. [1400,3]
        #plot_data = [gt[:,ntI],pred[:,ntI],gt[:,tntI],pred[:,tntI],gt[:,ttI],pred[:,ttI]]

        fig = plt.figure()
        fig, axes = plt.subplots(nrows=6, sharex="col")
        for row, (color, data) in enumerate(zip(colors, plot_data)):
            axes[row].plot(np.arange(1400), data, color=color)

    plt.suptitle(f"{title}", fontsize=8)

    myData.isDirectory(path)
    plt.savefig(os.path.join(path, f"{label}.png"))
    plt.close()

    if isResearch:
        return int(np.sum(dists))


# -----------------------------------------------------------------------------
Ejemplo n.º 10
0
def simulate(features,
             y,
             x,
             ssYears,
             mode=0,
             t=0,
             pTime=0,
             sy=0,
             nP=0,
             nCell=3,
             isSavetxt=False,
             isPlot=False):
    """
    Args
        features: システムモデル値xt. th[1400,perticles,3], V[1400,perticles,3], b[perticles,3]
        y: 観測モデル値yt. [eq.years,]
        x: 地震年数(1400年). [(eq.years zero padding),perticles]
        sy: start of assimilation for perticles.
    """
    #pdb.set_trace()
    # 1. 初期化 ----------------------------------------------------------------
    # 状態ベクトル theta,v,year ※1セルの時おかしいかも
    ThVec = np.zeros((nP, nCell))
    VVec = np.zeros((nP, nCell))
    # リサンプリング後の特徴量ベクトル
    xResampled = np.zeros((nParam, nP, nCell))
    # all norm-likelihood
    maxgW = np.zeros((nP))
    maxpW = np.zeros((nP))
    # weight for eq. year in each cell + penalty
    gw = np.zeros((nP, nCell + 1))
    # weight for eq. times
    pw = np.zeros((nP, nCell + 1))
    # weight
    wNorm = np.zeros((nP))
    # -------------------------------------------------------------------------
    #pdb.set_trace()
    # -------------------------------------------------------------------------
    flag = False
    for i in np.arange(nP):  # アンサンブル分まわす
        # =====================================================================
        #         尤度計算
        # =====================================================================
        # zero-paddingしたpredを予測した年数だけにする [地震発生年数(可変),]
        yhat_nk = (x[x[:, i, ntI] > 0, i, ntI]).astype(int)
        yhat_tnk = (x[x[:, i, tntI] > 0, i, tntI]).astype(int)
        yhat_tk = (x[x[:, i, ttI] > 0, i, ttI]).astype(int)

        if t > 0:
            # 2000年 + 同化した年数
            standInds = ssYears[i] + state_Year
            # 1400年のスケールに合わせる
            yhat_nk = yhat_nk - standInds
            yhat_tnk = yhat_tnk - standInds
            yhat_tk = yhat_tk - standInds

        yhat = [yhat_nk, yhat_tnk, yhat_tk]
        #pdb.set_trace()
        # 尤度は地震発生年数、重みとかけるのは状態ベクトル
        # 2.c & 2.d 各粒子の尤度と重み -------------------------------------------
        standYs = [y[ntI][t], y[tntI][t], y[ttI][t]]

        # nearist -----
        # if mode == 'near' or mode == 'simple' or mode == 'b_near'
        if mode == 0:
            weight, maxweight, years = norm_likelihood.norm_likelihood_nearest(
                y, yhat, standYs=standYs, time=t)

            gw[i] = weight
            maxgW[i] = maxweight

        # if mode == 'sp_time_near' or mode == 'b_sp_time_near'
        elif mode == 3 or mode == 2:
            gweight, gmaxweight, years = norm_likelihood.norm_likelihood_nearest_safetypenalty(
                y, yhat, standYs=standYs, time=t)
            pweight = norm_likelihood.norm_likelihood_alltimes(y, yhat)

            gw[i] = gweight
            pw[i] = pweight

            maxgW[i] = gmaxweight
            maxpW[i] = pweight

        elif mode == 4:
            weight, maxweight, years = norm_likelihood.norm_likelihood_eachnearest(
                y, yhat, standYs=standYs, time=t)

            gw[i] = weight
            maxgW[i] = maxweight

        elif mode == 5 or mode == 6:
            weight, maxweight, years = norm_likelihood.norm_likelihood_eachnearest_penalty(
                y, yhat, standYs=standYs, time=t)

            gw[i] = weight
            maxgW[i] = maxweight

        elif mode == 7:
            pweight = norm_likelihood.norm_likelihood_alltimes(y, yhat)
            gweight, gmaxweight, years = norm_likelihood.norm_likelihood_eachnearest_penalty(
                y, yhat, standYs=standYs, time=t)

            gw[i] = gweight
            pw[i] = pweight

            maxgW[i] = gmaxweight
            maxpW[i] = pweight

        # ---------------------------------------------------------------------
        #pdb.set_trace()
        for indY, indC in zip(years, [ntI, tntI, ttI]):
            # 各セルで尤度の一番高かった年数に合わせる 1400 -> 1
            # ※ 別々の同化タイミングになる
            # ※地震が発生していないときは、tonankaiの地震発生年数を採用
            # ※違う年数でも同じ値の時あり
            if int(indY) == 0:  # for tk
                ThVec[i, indC] = features[0][int(years[tntI]), i, indC]
                VVec[i, indC] = features[1][int(years[tntI]), i, indC]
            else:
                ThVec[i, indC] = features[0][int(years[indC]), i, indC]
                VVec[i, indC] = features[1][int(years[indC]), i, indC]

        if not flag:
            yearInds = years
            flag = True
        else:
            # [perticle,3]
            yearInds = np.vstack([yearInds, years])
    #pdb.set_trace()
    # 規格化 -------------------------------------------------------------------
    # only eq.years
    if mode == 0 or mode == 4 or mode == 5 or mode == 6:
        # 全セルがぴったりの時
        if any(maxgW == 0):
            zeroind = np.where(maxgW == 0)[0].tolist()
            maxgW[zeroind] = 1

        tmpgW = 1 / maxgW
        wNorm = tmpgW / np.sum(tmpgW)

    # eq.years & eq.times
    elif mode == 3 or mode == 2 or mode == 7:
        if any(maxgW == 0):
            zeroind = np.where(maxgW == 0)[0].tolist()
            maxgW[zeroind] = 1

        tmpgW = 1 / maxgW

        maxW = tmpgW + maxpW
        wNorm = maxW / np.sum(maxW)

    # -------------------------------------------------------------------------
    #pdb.set_trace()
    # =========================================================================
    #         リサンプリング
    # =========================================================================
    initU = np.random.uniform(0, 1 / nP)

    # ※3セル同じ組み合わせのbが選ばれる
    # index for resampling
    k = resampling(initU, wNorm, nP=nP)

    # simple var. ----
    if mode == 0:
        # system noise --------------------------------------------------------
        # ※元の値と足してもマイナスになるかも
        # array[cell,perticles] V & theta parameterがすべて同じ組み合わせになるのを防ぐため
        Thnoise = np.array([
            np.random.normal(0, 0.01 * np.mean(ThVec[:, cell]), nP)
            for cell in np.arange(nCell)
        ])
        Vnoise = np.array([
            np.random.normal(0, 0.01 * np.mean(VVec[:, cell]), nP)
            for cell in np.arange(nCell)
        ])
        bnoise = np.array([
            np.random.normal(0, 0.01 * np.mean(features[bInd][:, cell]), nP)
            for cell in np.arange(nCell)
        ])
        # ---------------------------------------------------------------------

        xResampled[thInd] = ThVec[k] + np.abs(Thnoise).T
        xResampled[vInd] = VVec[k] + np.abs(Vnoise).T
        # Add noise
        xResampled[bInd] = features[bInd][k] + np.abs(bnoise).T

        updatesy = sy[k]

    # 尤度工夫 var. ----
    # if mode == 'sp_alltime' or mode == 'b_sp_nl' or mode == 'b_sp_time_nl':
    elif mode == 3 or mode == 4 or mode == 5 or mode == 7:
        # index for mean theta,V,b
        muind = np.argmax(wNorm)
        # highest of norm likelihood (index) for mean & sigma
        muB = features[bInd][muind] * 1000000
        # variable & co-variable matrix (xy,yz,zx)
        Bin = 10
        sigmaB = [[0, Bin, Bin], [Bin, 0, Bin], [Bin, Bin, 0]]

        # 尤度の1番高いところを中心に、次のbの分布決定
        # system noise --------------------------------------------------------
        # [cell,perticle]
        Thnoise = np.array([
            np.random.normal(0, 0.01 * np.mean(ThVec[:, cell]), nP)
            for cell in np.arange(nCell)
        ])
        Vnoise = np.array([
            np.random.normal(0, 0.01 * np.mean(VVec[:, cell]), nP)
            for cell in np.arange(nCell)
        ])
        # [perticle,cell]
        bnoise = np.random.multivariate_normal(muB, sigmaB, nP)
        # ---------------------------------------------------------------------
        # [perticle,cell]
        xResampled[thInd] = ThVec[k] + np.abs(Thnoise).T
        xResampled[vInd] = VVec[k] + np.abs(Vnoise).T
        xResampled[bInd] = np.abs(bnoise) * 0.000001
        xResampled[bInd][0] = features[bInd][muind]
        # 尤度が一番高いperticleの年数に合わせる
        updatesy = np.array(sy[muind].tolist() * nP)[:, np.newaxis]

    # not update b var. ----
    elif mode == 2 or mode == 6:
        # system noise --------------------------------------------------------
        # ※元の値と足してもマイナスになるかも
        # array[cell,perticles] V & theta parameterがすべて同じ組み合わせになるのを防ぐため
        Thnoise = np.array([
            np.random.normal(0, 0.01 * np.mean(ThVec[:, cell]), nP)
            for cell in np.arange(nCell)
        ])
        Vnoise = np.array([
            np.random.normal(0, 0.01 * np.mean(VVec[:, cell]), nP)
            for cell in np.arange(nCell)
        ])
        # ---------------------------------------------------------------------

        xResampled[thInd] = ThVec[k] + np.abs(Thnoise).T
        xResampled[vInd] = VVec[k] + np.abs(Vnoise).T
        xResampled[bInd] = features[bInd][k]

        updatesy = sy[k]

    #pdb.set_trace()

    print(f"---- 【{t}】 times ----\n")
    # 発生年数 plot ------------------------------------------------------------
    if isPlot:
        nlpath = os.path.join(imgPath, f'numlines_{mode}')
        myData.isDirectory(nlpath)
        myPlot.NumberLine(standYs,
                          yearInds,
                          path=nlpath,
                          label=f"best_years_{t}")
    # -------------------------------------------------------------------------

    # save year & likelihood txt ----------------------------------------------
    if isSavetxt:

        # nearist ----
        lhpath = os.path.join(savetxtPath, f"lh_{mode}")
        myData.isDirectory(lhpath)
        if mode == 0 or mode == 4 or mode == 5 or mode == 6:
            np.savetxt(os.path.join(lhpath, f"lh_{t}.txt"), gw)
            np.savetxt(os.path.join(lhpath, f"sum_lh_{t}.txt"), maxgW)
        else:
            np.savetxt(os.path.join(lhpath, f"lh_g_{t}.txt"), gw)
            np.savetxt(os.path.join(lhpath, f"lh_p_{t}.txt"), pw)
            np.savetxt(os.path.join(lhpath, f"sum_lh_g_{t}.txt"), maxgW)
            np.savetxt(os.path.join(lhpath, f"sum_lh_p_{t}.txt"), maxpW)

        np.savetxt(os.path.join(lhpath, f"w_{t}.txt"), wNorm)

        # Save param b
        xt = features[bInd][k]
        bpath = os.path.join(savetxtPath, f'B_{mode}')
        myData.isDirectory(bpath)
        np.savetxt(os.path.join(bpath, f'{t}.txt'), xt, fmt='%6f')
    # -------------------------------------------------------------------------

    return xResampled, yearInds.astype(int), updatesy, k