Beispiel #1
0
  elif label=='jogging':
    key = 4.
  elif label=='running':
    key = 5.
  elif label=='walking':
    key = 6.
  else:
    print "[ERROR] WRONG LABEL VALUE  :  ", label
  temp = np.hstack((temp, key))
Yte=temp
del temp

print "[INFO] TRAIN DATA SHAPE : ", Xtr.shape
print "[INFO] TEST DATA SHAPE  : ", Xte.shape

train_set_x, train_set_y=ds.shared_dataset((Xtr, Ytr));
test_set_x, test_set_y=ds.shared_dataset((Xte, Yte));

del Xtr, Xte, Ytr, Yte

n_train_batches=train_set_x.get_value(borrow=True).shape[0]
n_train_batches=int(np.ceil(n_train_batches/batch_size)) 
n_test_batches=test_set_x.get_value(borrow=True).shape[0]
n_test_batches=int(np.ceil(n_test_batches/batch_size))

print n_train_batches, n_test_batches
print "[MESSAGE] The data is loaded"
 
################################## LAYERWISE MODEL #######################################

X=T.matrix("data")
from scae_destin.cost import categorical_cross_entropy_cost
from scae_destin.cost import L2_regularization

start_time = time.time()
n_epochs = 100
batch_size = 100
nkerns = 100

Xtr, Ytr, Xte, Yte = ds.load_CIFAR10("../cifar-10-batches-py/")

Xtr = np.mean(Xtr, 3)
Xte = np.mean(Xte, 3)
Xtrain = Xtr.reshape(Xtr.shape[0], Xtr.shape[1] * Xtr.shape[2]) / 255.0
Xtest = Xte.reshape(Xte.shape[0], Xte.shape[1] * Xte.shape[2]) / 255.0

train_set_x, train_set_y = ds.shared_dataset((Xtrain, Ytr))
test_set_x, test_set_y = ds.shared_dataset((Xtest, Yte))

n_train_batches = train_set_x.get_value(borrow=True).shape[0] / batch_size
n_test_batches = test_set_x.get_value(borrow=True).shape[0] / batch_size

print "[MESSAGE] The data is loaded"

################################## FIRST LAYER #######################################

X = T.matrix("data")
y = T.ivector("label")
idx = T.lscalar()
corruption_level = T.fscalar()

images = X.reshape((batch_size, 1, 32, 32))
Beispiel #3
0
from scae_destin.cost import L2_regularization

#def conv():

n_epochs=1
batch_size=100
nkerns=100

Xtr, Ytr, Xte, Yte=ds.load_CIFAR10("../cifar-10-batches-py/")

Xtr=np.mean(Xtr, 3)
Xte=np.mean(Xte, 3)
Xtrain=Xtr.reshape(Xtr.shape[0], Xtr.shape[1]*Xtr.shape[2])/255.0
Xtest=Xte.reshape(Xte.shape[0], Xte.shape[1]*Xte.shape[2])/255.0

train_set_x, train_set_y=ds.shared_dataset((Xtrain, Ytr))
test_set_x, test_set_y=ds.shared_dataset((Xtest, Yte))

n_train_batches=train_set_x.get_value(borrow=True).shape[0]/batch_size
n_test_batches=test_set_x.get_value(borrow=True).shape[0]/batch_size

print "[MESSAGE] The data is loaded"

################################## FIRST LAYER #######################################

X=T.matrix("data")
y=T.ivector("label")
idx=T.lscalar()
corruption_level=T.fscalar()

images=X.reshape((batch_size, 1, 32, 32))