예제 #1
0
    # dataset_fft = fft_transformer(dataset_dense, 100)
    # dataset = np.hstack((dataset_4feature, dataset_fft, label))
    # dataset_guiyi_1 = guiyi(dataset)
    # print(dataset_guiyi_1.shape)
    # print(Counter(dataset_guiyi_1[:, -1]))
    # print('onehot编码后结果:')
    # one_hot = onehot(dataset_guiyi_1)
    # print(one_hot.shape)
    # print(np.sum(one_hot[:, -3:], axis=0))
    # print(np.min(dataset_guiyi_1, axis=0))
    # SaveFile(data=dataset_guiyi_1, savepickle_p=p1)

    data_all2 = making2(p=p, argchoice_r=argchoice_r2)
    dataset_4feature, dataset_dense, label = data_all2[:, :
                                                       4], data_all2[:, 4:
                                                                     -1], data_all2[:,
                                                                                    -1][:,
                                                                                        np
                                                                                        .
                                                                                        newaxis]
    dataset_fft = fft_transformer(dataset_dense, 100)
    dataset = np.hstack((dataset_4feature, dataset_fft, label))
    dataset_guiyi_2 = guiyi(dataset)
    print(dataset_guiyi_2.shape)
    print('onehot编码后结果: ')
    one_hot2 = onehot(dataset_guiyi_2)
    print(one_hot2.shape)
    print(np.sum(one_hot2[:, -11:], axis=0))
    # print(np.min(dataset_guiyi_2, axis=0))
    SaveFile(data=dataset_guiyi_2, savepickle_p=p2)
예제 #2
0
def session(dataset_path, train_path='', test_path=''):
    '''
    节点连接
    :param dataset_path: 数据集路径
    :param train_path: 训练集数据路径,默认为空
    :param test_path: 测试集数据路径,默认为空
    :return: None
    '''
    #导入数据集
    dataset = LoadFile(p=dataset_path)
    # dataset = guiyi(dataset)
    dataset = onehot(dataset)

    g = tf.Graph()
    with g.as_default():
        with tf.name_scope('placeholder'):
            x_f = tf.placeholder(dtype=tf.float32, shape=[None, 4], name='x_f')
            x_l = tf.placeholder(dtype=tf.float32,
                                 shape=[None, 100],
                                 name='x_l')
            y = tf.placeholder(dtype=tf.float32, shape=[None, 3], name='y')
            learning_rate = tf.placeholder(dtype=tf.float32, name='lr')
            is_training = tf.placeholder(dtype=tf.bool, name='is_training')
        output = layers(x_f=x_f, x_l=x_l, is_training=is_training)
        with tf.name_scope('prediction'):
            # loss = -tf.reduce_mean(y * tf.log(output), name='loss')
            loss = tf.reduce_mean(
                tf.keras.losses.categorical_crossentropy(y_true=y,
                                                         y_pred=output))
            opt = tf.train.GradientDescentOptimizer(
                learning_rate=learning_rate).minimize(loss)
            acc = tf.reduce_mean(tf.cast(
                tf.equal(tf.keras.backend.argmax(output, axis=1),
                         tf.keras.backend.argmax(y, axis=1)), tf.float32),
                                 name='pred')
        with tf.name_scope('etc'):
            init = tf.global_variables_initializer()
            gpu_options = tf.GPUOptions(per_process_gpu_memory_fraction=0.333)
    with tf.Session(config=tf.ConfigProto(gpu_options=gpu_options),
                    graph=g) as sess:
        sess.run(init)
        #划分训练集和测试集
        train_data, test_data = spliting(dataset, 3000)
        for i in range(6000):  #6000
            for data in input(dataset=train_data, batch_size=500):
                _ = sess.run(opt,
                             feed_dict={
                                 x_f: data[:, :4],
                                 x_l: data[:, 4:-3],
                                 y: data[:, -3:],
                                 learning_rate: 1e-2,
                                 is_training: False
                             })
                if i % 100 == 0:
                    loss_ = sess.run(loss,
                                     feed_dict={
                                         x_f: data[:, :4],
                                         x_l: data[:, 4:-3],
                                         y: data[:, -3:],
                                         is_training: False
                                     })
                    acc_1 = sess.run(acc,
                                     feed_dict={
                                         x_f: data[:, :4],
                                         x_l: data[:, 4:-3],
                                         y: data[:, -3:],
                                         is_training: False
                                     })
            if i % 100 == 0:
                acc_2 = sess.run(acc,
                                 feed_dict={
                                     x_f: test_data[:, :4],
                                     x_l: test_data[:, 4:-3],
                                     y: test_data[:, -3:],
                                     is_training: False
                                 })
                print('第%s轮训练集损失函数值为: %s  训练集准确率为: %s  测试集准确率为: %s' %
                      (i, loss_, acc_1, acc_2))

        tf.summary.FileWriter('log/first_graph', sess.graph)
예제 #3
0
    return fft_abs


if __name__ == '__main__':
    p = r'/home/xiaosong/桌面/OLDENBURG_all.pickle'
    dataset = LoadFile(p)
    nums_cl = [[6557, 0], [611, 2], [101, 2], [13, 2], [554, 2], [155, 2],
               [100, 2], [1165, 1], [1993, 1], [947, 2], [1133, 2], [1152, 1],
               [542, 2], [754, 2], [2163, 1]]
    dataset_output = making(nums_cl=nums_cl, dataset=dataset)
    print(dataset_output.shape)
    checkclassifier(dataset_output[:, -1])
    # SaveFile(dataset_output, savepickle_p=r'/home/xiaosong/桌面/OLDENBURG_3cl.pickle')
    dataset_4feature, dataset_dense, label = dataset_output[:, :
                                                            4], dataset_output[:,
                                                                               4:
                                                                               -1], dataset_output[:,
                                                                                                   -1][:,
                                                                                                       np
                                                                                                       .
                                                                                                       newaxis]
    dataset_fft = fft_transformer(dataset_dense, 100)
    dataset = np.hstack((dataset_4feature, dataset_fft, label))
    dataset_guiyi = guiyi(dataset)
    print(dataset_guiyi.shape)
    # print(np.min(dataset_guiyi, axis=0))
    SaveFile(data=dataset_guiyi,
             savepickle_p=r'/home/xiaosong/桌面/OLDENBURG_3cl.pickle')
    dataset_onehot = onehot(dataset_guiyi)
    print(np.sum(dataset_onehot[:, -3:], axis=0))