Esempio n. 1
0
	def build_fclayer(self, layer, previous_layer, last_output, **kwargs):
		# We need to reshape the last_output
		# depending on what type of layer we had
		if previous_layer['name'] in ['Pool', 'Convolution']:
			os = previous_layer['output_shape']
			last_output = last_output.reshape((
				os[0],
				os[1] * os[2] * os[3]
			))
			kwargs['n_in'] = os[1] * os[2] * os[3]
		else:
			# We can assume we have an fc layer
			kwargs['n_in'] = previous_layer['output_shape'][1]

		# With FCLayer we can just passing kwargs
		entity = FCLayer(**kwargs)
		layer['output_shape'] = entity.output_shape()

		### Logging ###
		self.logger.info("fclayer output shape")
		self.logger.info(layer['output_shape'])

		return (layer, entity, last_output)
Esempio n. 2
0
print("####")
print((nkernels[1], nkernels[0], 4, 4))
print((batch_size, nkernels[0], 12, 12))
conv1 = Convolution.withoutFilters(filter_shape=(nkernels[1], nkernels[0], 4,
                                                 4),
                                   image_shape=(batch_size, nkernels[0], 12,
                                                12))

fm1 = conv1.get_output(pool_out0)

pool1 = Pool((2, 2))
pool_out1 = pool1.get_output(fm1)
pool_out1 = pool_out1.flatten(2)

print("FCLayer 0")
fc0 = FCLayer(n_in=320, n_out=500)

fc0_out = fc0.get_output(pool_out1)

print("Get softmax output")
soft0 = FCLayer(500, 62, activation=T.nnet.softmax)

output = soft0.get_output(fc0_out)
params = soft0.params + fc0.params + conv1.params + conv0.params

cost = T.mean(T.nnet.binary_crossentropy(y, output))
grads = T.grad(cost, params)

updates = [(param_i, param_i - .001 * grad_i)
           for param_i, grad_i in zip(params, grads)]
Esempio n. 3
0
print((nkernels[1],nkernels[0],4,4))
print((batch_size, nkernels[0],  12,  12))
conv1 = Convolution.withoutFilters(
	filter_shape=(nkernels[1],nkernels[0],4,4),
	image_shape=(batch_size, nkernels[0],  12,  12)
)

fm1 = conv1.get_output(pool_out0)

pool1 = Pool((2,2))
pool_out1 = pool1.get_output(fm1)
pool_out1 = pool_out1.flatten(2)

print("FCLayer 0")
fc0 = FCLayer(
	n_in=320,
	n_out=500
)

fc0_out = fc0.get_output(pool_out1)

print("Get softmax output")
soft0 = FCLayer(
	500,
	62,
	activation=T.nnet.softmax
)

output = soft0.get_output(fc0_out)
params = soft0.params + fc0.params + conv1.params + conv0.params

cost = T.mean(T.nnet.binary_crossentropy(y, output))