예제 #1
0
for i, name in enumerate(file_names):
    labels[i] = np.eye(10)[int(name.split('/')[-2])]


def read(addresses):
    X = []
    for address in addresses:
        im = cv2.cvtColor(cv2.imread(address), cv2.COLOR_BGR2GRAY)
        im = im[:, :, np.newaxis]
        im = im / 255.0
        # im = im.reshape(-1)
        X.append(im)

    return np.array(X)


train_ds = Dataset(file_names, labels, 8, True, read)

nn = Model((28, 28, 1))
nn.add(Conv(5, 6, 1, 'same', 'relu'))
nn.add(BatchNorm(0.9))
# nn.add(MaxPool(2))
nn.add(Conv(5, 16, 1, 'same', 'relu'))

nn.add(Flatten())
nn.add(Dense(84, 'relu'))
nn.add(BatchNorm(0.9))
nn.add(Dense(10, 'linear'))

nn.train(1e-4, train_ds, 10, 'softmax_cross_entropy_with_logits')
예제 #2
0
# nn.add(BatchNorm(0.9))
# nn.add(MaxPool(2))

# nn.add(Conv(5,128,1,'same','relu'))
# nn.add(BatchNorm(0.9))
# nn.add(Conv(3,128,1,'same','relu'))
# nn.add(BatchNorm(0.9))
# nn.add(MaxPool(2))

# nn.add(Conv(1,32,2,'valid','relu'))

# nn.add(Flatten())
# nn.add(Dense(1024,'relu'))
# nn.add(Dense(1,'sigmoid'))

nn.add(Conv(5, 6, 1, 'same', 'leaky_relu'))
# # nn.add(BatchNorm(0.9))
# nn.add(Conv(3,2,1,'same','leaky_relu'))
# # nn.add(BatchNorm(0.9))
nn.add(MaxPool(2))
nn.add(Conv(5, 16, 1, 'same', 'leaky_relu'))
# nn.add(Conv(1,2,2,'valid','leaky_relu'))

nn.add(Flatten())
nn.add(Dense(84, 'relu'))
# nn.add(Dense(256,'relu'))
# nn.add(BatchNorm(0.9))
nn.add(Dense(1, 'sigmoid'))

# nn.add(Conv(5,8,2,'valid','leaky_relu'))
# nn.add(BatchNorm(0.9))
예제 #3
0
파일: iris.py 프로젝트: Atom-101/NumPyML
sys.path.append('~/common_data/Projects/Custom_CNN_Lib')

import pandas as pd
import numpy as np

from Model.Model import Model
from Layers.Dense import Dense
from Layers.Conv import Conv
from Dataset.Dataset import Dataset

df = pd.read_csv('Iris.csv')
# print(df.head())

df['Species'] = pd.factorize(df['Species'], sort=True)[0]
# print(df.head())
train = df.iloc[:135, :]
valid = df.iloc[135:, :]

Y = train.iloc[:, -1].values
Y = np.eye(np.max(Y) + 1)[Y]

train_dataset = Dataset(train.iloc[:, 1:-1].values, Y, 135)
# print(train_dataset.next())

nn = Model(4)
nn.add(Dense(6, 'relu'))
nn.add(Dense(3, 'sigmoid'))

nn.train(1e-2, train_dataset, 8000, 'binary_cross_entropy', None)
# nn.train(1e-2, train_dataset, 8000, 'softmax_cross_entropy_with_logits',None)