Ejemplo n.º 1
0
def handleWindowData(data):
    #1.转换
    a, w = GetDataUtil.getAandG(data)
    #print(a[:,a.shape[1]-1])
    # 此处用于保证所阶段
    if a.shape[1] > 300:
        a = a[:, :300]
        w = w[:, :300]
        #print("数据长于300")
    elif a.shape[1] < 300:
        #print("数据短于300")
        for i in range(300 - a.shape[1]):
            a = np.concatenate((a, a[:, a.shape[1] - 1].reshape(3, 1)), axis=1)
            w = np.concatenate((w, w[:, w.shape[1] - 1].reshape(3, 1)), axis=1)
    #print(a.shape)
    #print(w.shape)
    X = np.concatenate((a, w), axis=0)
    #print(X.shape)
    #2.数据预处理
    X = X[np.newaxis, :, :]
    X = GetDataUtil.dataProcess(X, logPrint=False)
    X = X[:, np.newaxis, :, :]
    #3.预测
    label = model.predict(X)
    t = time.strftime('%Y-%m-%d %H:%M:%S')
    print(t + "  预测结果:" + Labels[np.argmax(label)] + '\n')
 def readfile(
         self,
         filename="./rawdataset2/RandomCrop_NPAWF_Noise_orgin_all_10000.npy"
 ):
     data, label = GetDataUtil.getData(filename)
     label = label - 1
     #        print(label)
     #        label = np_utils.to_categorical(label-1,5)
     return data, label
def DataArgument_3(rawData,
                   savePath="../DataSet_NPSave/RandomCropAugmentatedData"):
    AugmentatedData = []
    print("正在生成数据,请稍后...")
    for data in rawData:
        newData = data.copy()
        idx = np.random.randint(150, size=1)
        if data["Label"] == 2:  # 该样本是“碰撞”
            while np.min(newData["Acc"][0]) != np.min(
                    newData["Acc"][0, idx[0]:idx[0] + 150]):
                idx = np.random.randint(150, size=1)
        newData["Acc"] = newData["Acc"][:, idx[0]:idx[0] + 150]
        newData["Gyr"] = newData["Gyr"][:, idx[0]:idx[0] + 150]
        AugmentatedData.append(newData)
    print("已完毕,共生成%d个新数据。" % (len(AugmentatedData)))
    AugmentatedData = np.array(AugmentatedData)
    # 插值处理
    return GetDataUtil.interpolation(AugmentatedData,
                                     sample=300,
                                     kind="cubic",
                                     savePath=savePath)
 def readfile(self, filename="./DataSet_NPSave/JustifiedData.npy"):
     #        filename="./DataSet_NPSave/JustifiedData.npy"
     data, label = GetDataUtil.getData(filename)
     label = label - 1
     #        label = np_utils.to_categorical(label-1,5)
     return data, label
Ejemplo n.º 5
0
for data in accelerate_data:
    data["Acc"][0] += 0.05
    count += 1
print("已完成%d个样本增加0.1" % (count))

# 数组拼接
AllData_Acc_01 = np.concatenate(
    (accelerate_data, collision_data, uniform_speed_data, left_turn_data,
     right_turn_data))
np.save("DataSet_NPSave/RandomCrop_NPAWF_Noise_orgin_all_ACC_01_10000.npy",
        AllData_Acc_01)

#%%
import numpy as np

noiseData = np.concatenate(GetDataUtil.getAandG(
    GetDataUtil.readFile("DataSet/静止/2017-12-23-匀速")),
                           axis=0)
idx = np.random.randint(noiseData.shape[1] - 300, size=1)
print(noiseData[2, idx[0]:idx[0] + 300] - 1)
#%%
#dataSet = np.load("DataSet_NPSave/RandomCrop_NPAWF_Noise_orgin_all_10000.npy")
dataSet = np.load("DataSet_NPSave/JustifiedData.npy")
count = 0
for data in dataSet:
    if np.max(data["Gyr"]) > 200:
        print("存在%d" % (np.max(data["Gyr"])))
        count += 1
print(count)
#%%
import numpy as np
import GetDataUtil
Ejemplo n.º 6
0
Created on Wed Jan 17 10:51:07 2018
模型工具类,提供模型训练,模型保存,模型导入操作
@author: John Kwok
"""
import GetDataUtil
import numpy as np
#%%
'''
# 声明训练集,数据集
X_train = []
X_test = []
y_train = []
y_test = []
'''
# 获取 训练/测试 数据集
X_train, X_test, y_train, y_test = GetDataUtil.dataInit()
#%%
# 将标签转换为one_hot稀疏值
from keras.utils import np_utils
y_train = np_utils.to_categorical(y_train-1,5)
y_test = np_utils.to_categorical(y_test-1,5)
#%%
#卷积神经网络input需要dim = 4
X_train = X_train[:,np.newaxis,:,:]
X_test = X_test[:,np.newaxis,:,:]
print(X_train.shape)
#%%
import time
import sklearn
from keras.utils import plot_model
from keras.models import Model
# -*- coding: utf-8 -*-
"""
Created on Mon Feb 26 16:06:04 2018
数据增强程序
@author: John Kwok
"""

#%%
'''第一步:初始化,数据准备'''
import GetDataUtil
import numpy as np

originData = GetDataUtil.saveDataToNP("../DataSet/trim",
                                      savePath="../DataSet_NPSave/DataSet.npy")
GetDataUtil.interpolation(originData,
                          sample=300,
                          kind="cubic",
                          savePath="../DataSet_NPSave/JustifiedData.npy")
# 加载文件
justifiedData = np.load("../DataSet_NPSave/JustifiedData.npy")
print(len(justifiedData))
accelerate_data = []
collision_data = []
uniform_speed_data = []
left_turn_data = []
right_turn_data = []
# 分离各个动作到数组
for data in justifiedData:
    if data["Label"] == 1:
        accelerate_data.append(data)
    elif data["Label"] == 2:
Ejemplo n.º 8
0
from tensorflow import set_random_seed
set_random_seed(2)    

import GetDataUtil
#import SparseFilter
import numpy as np
#%%
'''
# 声明训练集,数据集
X_train = []
X_test = []
y_train = []
y_test = []
'''
# 获取 训练/测试 数据集
X_train, X_test, y_train, y_test = GetDataUtil.dataInit()
#%%
# 将标签转换为one_hot稀疏值
from keras.utils import np_utils
y_train = np_utils.to_categorical(y_train-1,5)
y_test = np_utils.to_categorical(y_test-1,5)
#%%
#卷积神经网络input需要dim = 4
X_train = X_train[:,np.newaxis,:,:]
X_test = X_test[:,np.newaxis,:,:]
print(X_train.shape)
#%%
import time
import sklearn
from keras.optimizers import Adam
from sklearn.utils import shuffle
Ejemplo n.º 9
0
    acc_y_subfig.figure.canvas.draw()
    acc_z_subfig.figure.canvas.draw()
    gyr_x_subfig.figure.canvas.draw()
    gyr_y_subfig.figure.canvas.draw()
    gyr_z_subfig.figure.canvas.draw()


# 发送端模块,100HZ 每次采样生成33字节数据,
# 时间窗口如果取3s 则一次接收9900字节
while True:
    recv = sk.recv(10240)
    data += recv
    raw_data += recv
    # 动态画出六轴数据图
    if len(raw_data) >= 3300:
        a, w = GetDataUtil.getAandG(raw_data[:3300])
        raw_data = raw_data[3300:]
        # 更新图
        p(a, w)
        plt.ioff()
        plt.tight_layout()
        plt.show()

    # 分类预测
    if len(data) >= 9900:
        # lenth_before = len(data)
        handleWindowData(data[:9900])
        data = data[9900:]
        # lenth_after = len(data)
        # print("Before:%d  after:%d  差值:%d"%(lenth_before,lenth_after,lenth_before-lenth_after))
    # t = time.strftime('%Y-%m-%d %H:%M:%S')
Ejemplo n.º 10
0
# -*- coding: utf-8 -*-
"""
Created on Tue Apr 10 20:33:20 2018

@author: gyhu
"""

#%%
import numpy as np
import keras
import GetDataUtil

#%%
'''1. Get the train and test dataset.'''
X_train_origin, X_test_origin, y_train, y_test = GetDataUtil.getTrainTestSet(
    dataPath=
    "../DataSet_NPSave/RandomCrop_NPAWF_Noise_orgin_all_ACC_005_10000.npy",
    test_size=0.1)
#X_train_origin, X_test_origin, y_train, y_test = GetDataUtil.getTrainTestSet(dataPath = "../DataSet_NPSave/JustifiedData.npy",test_size = 0.1)

#%%
'''2. Data preprocessing'''


def DataPreprocess(data):
    print("Data Preprocessing,Please wait...")
    '''
    # 加速度归一化 0~1
    
    data[:,:3,:] /= 16
    data[:,:3,:] *= 32768.0
    data[:,:3,:] += 32768.0
Ejemplo n.º 11
0
from keras.models import Model
from keras.layers import Input, Dense
from keras.layers.convolutional import Conv2D
from keras.layers.normalization import BatchNormalization
from keras.initializers import TruncatedNormal
from keras import regularizers
from keras.layers.core import Activation, Flatten, Lambda
from keras.layers.pooling import MaxPooling2D, AveragePooling2D
from keras.layers.core import Dropout
from keras import backend as k

k.set_image_dim_ordering('th')
'''1. Get the train and test dataset.'''
# 数据增强后
# X_train_origin, y_train = GetDataUtil.splitDataAndLabel(dataPath = "../DataSet_NPSave/RandomCrop_NPAWF_Noise_orgin_all_10000.npy")
X_train, y_train = GetDataUtil.splitDataAndLabel(
    dataPath="../DataSet_NPSave/Train_Data_Orig.npy")
X_test, y_test = GetDataUtil.splitDataAndLabel(
    dataPath="../DataSet_NPSave/Test_Data_Orig.npy")

# 数据增强前
#X_train_origin, X_test_origin, y_train, y_test = GetDataUtil.getTrainTestSet(dataPath = "../DataSet_NPSave/JustifiedData.npy",test_size = 0.1)
'''2. Data preprocessing'''


def DataPreprocess(data):
    print("Data Preprocessing,Please wait...")
    '''
    # 加速度归一化 0~1
    
    data[:,:3,:] /= 16
    data[:,:3,:] *= 32768.0
# 随机选取
accelerate_data = np.random.choice(accelerate_data, size=20)
collision_data = np.random.choice(collision_data, size=20)
uniform_speed_data = np.random.choice(uniform_speed_data, size=20)
left_turn_data = np.random.choice(left_turn_data, size=20)
right_turn_data = np.random.choice(right_turn_data, size=20)

print(accelerate_data.shape)
print(collision_data.shape)
print(uniform_speed_data.shape)
print(left_turn_data.shape)
print(right_turn_data.shape)
# 数组拼接
selectedData = np.concatenate(
    (accelerate_data, collision_data, uniform_speed_data, left_turn_data,
     right_turn_data))
# (100,)
print(selectedData.shape)
# 生成图片
GetDataUtil.generatePic(selectedData, picSavePath="Pic_All_ACC_01_100")

#%%
'''此部分为生成滤波后的波形图'''
# 初始化巴特沃思滤波器 低通 截止频率 1HZ
b, a = signal.butter(3, 0.02, 'low')

for i in range(len(selectedData)):
    selectedData[i]["Acc"] = signal.filtfilt(b, a, selectedData[i]["Acc"])
    selectedData[i]["Gyr"] = signal.filtfilt(b, a, selectedData[i]["Gyr"])
# 生成图片
GetDataUtil.generatePic(selectedData, picSavePath="Pic_lowPass_100")
Ejemplo n.º 13
0
def convertData(filePath = "DataSet/trim/右转/1513994245"):
    return GetDataUtil.getAandG(GetDataUtil.readFile(filePath))
Ejemplo n.º 14
0
"""
Created on Fri Jan 12 17:51:24 2018

@author: John Kwok
"""

import GetDataUtil
import numpy as np
from sklearn.metrics import classification_report
from sklearn import preprocessing

#%%
'''
第一步:将原始数据文件整合保存在npy数组文件中。得到“DataSet.npy”
'''
GetDataUtil.saveDataToNP("DataSet/trim")

#%%
'''
第二步:使用插值法,使原始数据长度对齐,默认采用300为目标长度(即时间窗口3s)。
生成文件“JustifiedData.npy”,并且返回data
[data]:字典集合,{"Acc":A,"Gyr":G,"Label":label}
'''
data = GetDataUtil.interpolation(np.load("DataSet.npy"))

#%%
'''第三步:获取数据,并且归一化、标准化'''
X_train, X_test, y_train, y_test = GetDataUtil.getTrainTestSet()
# 数据预处理(此处只使用了归一化、标准化,Flatten)
# 后续研究可以加入其他预处理:1.滤波 2.稀疏滤波 3.特征升维 等
X_train = preprocessing.scale(X_train.reshape(-1,1800))