Exemplo n.º 1
0
                                       train_idx[0],
                                       npp_params,
                                       clean=True,
                                       physical=True,
                                       downsample=True)
            for i in train_idx[1:]:
                _, x_i, y_i = load(data_name,
                                   i,
                                   npp_params,
                                   clean=True,
                                   physical=True,
                                   downsample=True)
                x_train = np.concatenate((x_train, x_i), axis=0)
                y_train = np.concatenate((y_train, y_i), axis=0)

            x_train, y_train, x_validation, y_validation = utils.split_data(
                [x_train, y_train], split=0.8, shuffle=True)

            # create poison data
            _, x_p, y_p = load(data_name,
                               s_id[0],
                               npp_params,
                               clean=False,
                               physical=True,
                               downsample=False)

            idx = utils.shuffle_data(len(x_p))
            x_poison, y_poison = x_p[idx[:int(poison_rate *
                                              len(x_train))]], y_p[
                                                  idx[:int(poison_rate *
                                                           len(x_train))]]
Exemplo n.º 2
0
y_validation = np.squeeze(y_validation)
y_poison = np.squeeze(y_poison)
y_test = np.squeeze(y_test)
y_test_poison = np.squeeze(y_test_poison)

# path='EEG_Data/MI_DR02/'#
# data = loadmat(path + 'dataPOI0.2.mat')
# x_POI = data['x_POI']
# y_POI = data['y_POI']
# y_POI=np.squeeze(y_POI)

y_poison = np.ones(shape=y_poison.shape)  # 这里是决定是否改变加poison的数据的标签

#x_train, y_train, x_validation, y_validation = utils.split_data([x_validation, y_validation ], split=0.8, shuffle=True)#减少x_train的数量,避免retrain时干净数据太多
x_train, y_train, _, _ = utils.split_data([x_train, y_train],
                                          split=0.2,
                                          shuffle=True)
x_validation, y_validation, _, _ = utils.split_data(
    [x_validation, y_validation], split=0.5, shuffle=True)

if not baseline:  #若不是baseline,加上污染(后门)数据
    x_train = np.concatenate((x_train, x_poison), axis=0)
    y_train = np.concatenate((y_train, y_poison), axis=0)

# if  POI:
#     x_train = np.concatenate((x_train, x_POI), axis=0)
#     y_train = np.concatenate((y_train, y_POI), axis=0)

data_size = y_train.shape[0]  #打乱数据顺序,重新排序
shuffle_index = utils.shuffle_data(data_size)
x_train = x_train[shuffle_index]
Exemplo n.º 3
0
x1 = X_po[np.where(Y_po == 0)]  # 消除类别不平衡的问题
x2 = X_po[np.where(Y_po == 1)]
sample_num = min(len(x1), len(x2))
#idx1, idx2 = utils.shuffle_data(len(x1)), utils.shuffle_data(len(x2))
X_po = np.concatenate([x1[idx1[:sample_num]], x2[idx2[:sample_num]]], axis=0)
Y_po = np.concatenate(
    [np.zeros(shape=[sample_num]),
     np.ones(shape=[sample_num])], axis=0)

X_cl = X_cl[:, np.newaxis, :, :]
X_po = X_po[:, np.newaxis, :, :]
leng = len(X_cl)
idx_al = np.arange(leng)  #

idx_cl, _, idx_po, _ = utils.split_data([idx_al, idx_al],
                                        split=0.86,
                                        shuffle=True)
idx_po, _, idx_test_po, _ = utils.split_data([idx_po, idx_po],
                                             split=0.5,
                                             shuffle=True)
x_train = X_cl[idx_cl]
y_train = Y_cl[idx_cl]
x_poison = X_po[idx_po]
y_poison = Y_po[idx_po]
x_test = X_cl[idx_test_po]
y_test = Y_cl[idx_test_po]
x_test_poison = X_po[idx_test_po]
y_test_poison = Y_po[idx_test_po]

x_train, y_train, x_validation, y_validation = utils.split_data(
    [x_train, y_train], split=0.8, shuffle=True)
Exemplo n.º 4
0
import numpy as np
from scipy.io import loadmat
import scipy.io as io
import lib.utils as utils
npp_params = [1.5, 5, 0.1]
path = 'EEG_Data/MI/'
data = loadmat(
    path +
    'data2-{}-{}-{}.mat'.format(npp_params[0], npp_params[1], npp_params[2]))
signal = data['x_train']
y_gusspoi = np.squeeze(data['y_train'])
signal, y_gusspoi, _, _ = utils.split_data([signal, y_gusspoi],
                                           split=0.2,
                                           shuffle=True)
#对信号添加指定信噪比的高斯信号
signal_noise = np.zeros(signal.shape)
SNR = 0.1
for i in range(len(signal)):
    noise = np.random.randn(signal.shape[1], signal.shape[2],
                            signal.shape[3])  #产生N(0,1)噪声数据
    noise = noise - np.mean(noise)  #均值为0
    a = signal[i].size
    signal_power = np.linalg.norm(
        signal[i] - signal[i].mean())**2 / signal[i].size  #此处是信号的std**2
    noise_variance = signal_power / np.power(10, (SNR / 10))  #此处是噪声的std**2
    noise = (np.sqrt(noise_variance) / np.std(noise)) * noise  ##此处是噪声的std**2
    signal_noise[i] = noise + signal[i]

Ps = (np.linalg.norm(signal - signal.mean()))**2  #signal power
Pn = (np.linalg.norm(signal - signal_noise))**2  #noise power
snr = 10 * np.log10(Ps / Pn)