def evaluate(self, x, y): """Returns the number of correct predictions. Args: x: 2-d array of size batch_size x image_size. y: 2-d array of size batch_size x num_classes. Returns: A scalar, the number of correct predictions. """ y_actual = np.argmax(y, axis=1) y_predicted = np.argmax(self.forward(x), axis=1) correct = int(np.sum(np.array(y_actual == y_predicted))) return correct
def conv_shape_tuple(lhs_shape, rhs_shape, strides, pads, batch_group_count=1): """Compute the shape tuple of a conv given input shapes in canonical order.""" if isinstance(pads, str): pads = padtype_to_pads(lhs_shape[2:], rhs_shape[2:], strides, pads) if len(pads) != len(lhs_shape) - 2: msg = 'Wrong number of explicit pads for convolution: expected {}, got {}.' raise TypeError(msg.format(len(lhs_shape) - 2, len(pads))) lhs_padded = onp.add(lhs_shape[2:], np.sum(np.array(pads).reshape(-1, 2), axis=1)) out_space = np.floor_divide( np.subtract(lhs_padded, rhs_shape[2:]), strides) + 1 out_space = np.maximum(0, out_space) assert lhs_shape[0] % batch_group_count == 0 out_shape = (lhs_shape[0] // batch_group_count, rhs_shape[0]) return tuple(out_shape + tuple(out_space))
def conv_transpose_shape_tuple(lhs_shape, rhs_shape, window_strides, padding, dimension_numbers): lhs_perm, rhs_perm, out_perm = conv_general_permutations(dimension_numbers) lhs_trans = np.take(lhs_shape, lhs_perm) rhs_trans = np.take(rhs_shape, rhs_perm) if isinstance(padding, str): padding = [_conv_transpose_padding(k, s, padding) for k,s in zip(rhs_trans[2:], window_strides)] padding = list(map(np.sum, padding)) unpad_out_space = [(i-1) * s - k + 2 for i, k, s in zip(lhs_trans[2:], rhs_trans[2:], window_strides)] out_space = np.sum([unpad_out_space, padding], axis=0).tolist() out_trans = tuple((lhs_trans[0], rhs_trans[0]) + tuple(out_space)) return tuple(np.take(out_trans, np.argsort(out_perm)))
def f(x, y): return np.sum(x + y)
def testTFInterop(self): x_np = np.sum(np.ones([1, 2]) + tf.ones([2, 1])) x_onp = onp.sum(onp.ones([1, 2]) + onp.ones([2, 1])) self.assertAllClose(x_onp, x_np)
def mean_squared_error(self, y_out, y): """ mean squared error loss""" loss = np.sum((y - y_out)**2) return loss
def schedule(i): return values[np.sum(i > boundaries)]
def _assertAllClose(self, x, y, rtol): x = ravel_pytree(x)[0] y = ravel_pytree(y)[0] diff = 2 * np.sum( np.abs(x - y)) / (np.sum(np.abs(x)) + np.sum(np.abs(y)) + 1e-4) self.assertLess(diff, rtol)