Ejemplo n.º 1
0
def cost_func(x):
  return softmax.cost(x, num_classes, input_size, lamb, input_data, labels)[0]
Ejemplo n.º 2
0
  input_size = 8
  # only 100 datapoints
  input_data = input_data[:, :100]
  labels = labels[:100]
  # only top input_size most-varying input elements (pixels)
  indices = input_data.var(1).argsort()[-input_size:]
  input_data = np.asfortranarray(input_data[indices, :])

# Randomly initialise theta
theta = 0.005 * np.random.randn(num_classes * input_size, 1)

# === Step 2: Implement softmaxCost ===
#
# Implement softmaxCost in [softmax.cost()](softmax.html#section-1).

cost, grad = softmax.cost(theta, num_classes, input_size, lamb, input_data, labels)

# === Step 3: Gradient checking ===
#
#  As with any learning algorithm, always check that the gradients are
#  correct before learning the parameters.

# Cost function
def cost_func(x):
  return softmax.cost(x, num_classes, input_size, lamb, input_data, labels)[0]

# For testing…
if False:
  num_grad = util.compute_numerical_gradient(cost_func, theta)
  num_grad = num_grad.ravel('F')