def testRunOperatorOnce(self): self.assertEqual( workspace.RunOperatorOnce( self.net.Proto().op[0].SerializeToString()), True) self.assertEqual(workspace.HasBlob("testblob"), True) blobs = workspace.Blobs() self.assertEqual(len(blobs), 1) self.assertEqual(blobs[0], "testblob")
def CheckSimple(self, op, inputs, outputs_to_check): """Checks the operator in a very simple fashion by stacking a sum of squares on the top. Inputs: op: the operator to be checked. inputs: the input data in numpy arrays. input_to_check: an index specifying which input blob we should check. outputs_with_grads: indices specifying which output blobs will we need to check gradients with. For these outputs, we will collect a squared sum and also feed in their gradients. grad_operator: the gradient operator. If not given, we will get the gradient operator from the gradient registry. Outputs: boolean: True if it passes, False if it does not pass. """ # Entering the checker workspace old_ws_name = workspace.CurrentWorkspace() results = [] workspace.SwitchWorkspace("_device_check_", True) for i, device_option in enumerate(self._device_options): for i, arr in enumerate(inputs): workspace.FeedBlob(op.inputs[i], arr, device_option) op.device_option.CopyFrom(device_option) workspace.RunOperatorOnce(op) results.append([ workspace.FetchBlob(op.outputs[idx]) for idx in outputs_to_check ]) # Everything is done, reset the workspace. workspace.ResetWorkspace() # After running on all devices, check correctness success = True for i in range(1, len(self._device_options)): for j in range(len(outputs_to_check)): x = results[i][j] y = results[0][j] if np.any(np.abs(x - y) > self._threshold): print 'Failure in checking device option', i, 'and output ', print op.outputs[j], '. The outputs are:' print x.flatten() print y.flatten() success = False continue workspace.SwitchWorkspace(old_ws_name) return success
def GetLossAndGrad(self, op, grad_ops, x, input_name, outputs_with_grads): # First, feed in the current input. Note that we are not changing anything # else, so we don't need to feed in others. workspace.FeedBlob(input_name, x, self._device_option) # Run. workspace.RunOperatorOnce(op) loss = 0. # Get Loss and feed in the gradients, run gradient ops. for idx in outputs_with_grads: name = op.outputs[idx] arr = workspace.FetchBlob(name) loss += (arr**2).sum() workspace.FeedBlob(core.GetGradientName(name), arr, self._device_option) loss /= 2. # Run gradient ops workspace.RunOperatorsOnce(grad_ops) # Get gradients grad = workspace.FetchBlob(core.GetGradientName(input_name)) return loss, grad
def CheckSimple(self, op, inputs, outputs_to_check): """Checks the operator with different device implementations. Inputs: op: the operator to be checked. inputs: the input data in numpy arrays. outputs_to_check: the outputs to check between devices. Outputs: boolean: True if it passes, False if it does not pass. """ # Entering the checker workspace old_ws_name = workspace.CurrentWorkspace() results = [] workspace.SwitchWorkspace("_device_check_", True) for i, device_option in enumerate(self._device_options): for i, arr in enumerate(inputs): workspace.FeedBlob(op.input[i], arr, device_option) op.device_option.CopyFrom(device_option) workspace.RunOperatorOnce(op) results.append([ workspace.FetchBlob(op.output[idx]) for idx in outputs_to_check ]) # Everything is done, reset the workspace. workspace.ResetWorkspace() # After running on all devices, check correctness success = True for i in range(1, len(self._device_options)): for j in range(len(outputs_to_check)): x = results[i][j] y = results[0][j] if np.any(np.abs(x - y) > self._threshold): print 'Failure in checking device option', i, 'and output ', print op.output[j], '. The outputs are:' print x.flatten() print y.flatten() success = False #else: # print ('Passed device pair (0, %d), %s %s' % # (i, outputs_to_check[j], y.shape)) workspace.SwitchWorkspace(old_ws_name) return success