Ejemplo n.º 1
0
    if currentType in sourceFiles:
        print("U [%s:%s]" % (currentType, currentPath))
        sourceFiles[currentType].append(currentPath)
    else:
        print("I [%s:%s]" % (currentType, currentPath))
        sourceFiles[currentType] = [currentPath]

if len(sourceFiles) == 0:
    print("No files were added. Exiting.")
    sys.exit(1)

nn = NN(Payload.vectorSize(), OutputMapper.vectorSize)

if os.path.isfile(weightsPath):
    nn.load(weightsPath)

samples = TrainingSample.TrainingSampleBatch()
for trafficType in sourceFiles.keys():
    for filePath in sourceFiles[trafficType]:
        cap = pyshark.FileCapture(filePath)
        for packet in cap:
            try:
                sample = TrainingSample.TrainingSample(
                    Payload.fromPySharkCapture(packet), trafficType)
                samples.add(sample)
            except AttributeError as ae:
                print(ae)

samples.shuffle()
vectors = samples.toVectors()
Ejemplo n.º 2
0
# train tuning
m = y.shape[1]
m_train = int(0.7 * m)
x_train = x[:, 0:m_train]
y_train = y[:, 0:m_train]
x_val = x[:, m_train:]
y_val = y[:, m_train:]

Layers = (17, 8, 8, 4, 1)
maxIteration = 5000
alpha = 0.03
reg = 0.0
batch_size = 200

nn = NN(Layers, alpha, reg)
nn.load('config')

n_batch = m_train // batch_size
for epoch in range(0, maxIteration):
    for i in range(0, n_batch):
        start = i * batch_size
        end = np.minimum(start + batch_size, m_train)
        nn.train_once(x_train[:, start:end], y_train[:, start:end])
    print(epoch, " train cost J: ", np.asscalar(nn.J(nn.predict(x), y)))
    print(epoch, " test cost J: ", np.asscalar(nn.J(nn.predict(x_val), y_val)))

nn.test(x_val, y_val)
print(" accuracy: ", nn.accuracy)
print(" precision: ", nn.precision)
print(" recall: ", nn.recall)
print(" fScore: ", nn.fscore)
Ejemplo n.º 3
0

if __name__ == '__main__':
    with open('../NN_config.yaml') as f:
        NN_config = yaml.load(f)

    nn = NN(
        inputsLength=NN_config['lengths']['inputs'],
        outputsLength=NN_config['lengths']['outputs'],
        hiddenLayersLength=NN_config['lengths']['hiddenLayers'],
        thicknessLength=NN_config['lengths']['neuronsPerHiddenLayer'],
        activation=getattr(af, NN_config['activationFunction']),
        learningRate=NN_config['learningRate']
    )
    if not '--reset' in sys.argv:
        nn.load('../brain.json')

    print('Fetching digits data...')
    epoches = int(sys.argv[1])
    digits = {
        'train': get_digits(epoches, 'train'),
        'test': get_digits(1000, 'test')
    }
    print('Done fetching! Time for training.')

    print('Training...')
    losses = np.array([], dtype='float64')
    for n, dig in enumerate(digits['train']):
        inputs = np.array(dig['pixels']).flatten() * 1.0
        inputs /= 255.0
        target = np.arange(10)*0.0