コード例 #1
0
def testDataloader():
    from src import dataloader
    import cv2

    data, labels, classes = dataloader.loadData("data_part1/train/")
    td, tl, vd, vl = dataloader.splitValidation(data, labels, 10)

    print(tl[0])

    cv2.imshow('Imagem', td[0])
    cv2.waitKey(0)
    cv2.destroyAllWindows()
コード例 #2
0
    return b1, w1, loss


def validate(x, y, w0, b0):
    ok = 0
    for i in range(len(x)):
        y_ = sigmoid(np.dot(x[i], w0) + b0)
        shot = np.argmax(y_)
        if (y[i][shot] == 1):
            ok += 1

    return ok / len(x)


if __name__ == '__main__':  # main here
    data, labels, classes = dataloader.loadData("data_part1/train/")
    td, tl, vd, vl = dataloader.splitValidation(data, labels, 10)

    # normalizing train and validation data [0, 1]
    td = np.reshape(td, (len(td), 77 * 71))
    vd = np.reshape(vd, (len(vd), 77 * 71))
    td = td / 255.
    vd = vd / 255.

    epoch = 130
    learning_rate = 0.1
    batch_size = 20

    # 10 dimensoes para os pesos
    w = np.random.uniform(-0.1, 0.1, (len(td[0]), 10))
    b = np.zeros(10)
コード例 #3
0
import cv2

from src import dataloader

TRAIN_PATH = "data_part1/train/"

IMAGE_HEIGHT = 64  # height of the image
IMAGE_WIDTH = 64   # width of the image
NUM_CHANNELS = 1   # number of channels of the image

NUM_EPOCHS_FULL = 200
S_LEARNING_RATE_FULL = 0.001
F_LEARNING_RATE_FULL = 0.001
BATCH_SIZE = 64

data, labels, classes = dataloader.loadData(TRAIN_PATH)

resized = dataloader.resize(data, IMAGE_WIDTH, IMAGE_HEIGHT)
# cv2.imshow('resized', resized[10])
# cv2.waitKey(0)
# exit()
td, tl, vd, vl = dataloader.splitValidation(resized, labels, 10)

td = np.reshape(td, (len(td), IMAGE_HEIGHT, IMAGE_WIDTH, NUM_CHANNELS))/255 
vd = np.reshape(vd, (len(vd), IMAGE_HEIGHT, IMAGE_WIDTH, NUM_CHANNELS))/255 

discClasses = np.array(['fake', 'true'])

graph = tf.Graph()
with graph.as_default():