def batchNorm(self, node, makeEquations): """Function to generate equations for a BatchNormalization Args: node (node): ONNX node representing the BatchNormalization operation :meta private """ nodeName = node.output[0] inputName = node.input[0] self.shapeMap[nodeName] = self.shapeMap[inputName] # Get attributes epsilon = None for attr in node.attribute: if attr.name == "epsilon": epsilon = get_attribute_value(attr) # Get inputs scales = self.constantMap[node.input[1]].reshape(-1) biases = self.constantMap[node.input[2]].reshape(-1) input_means = self.constantMap[node.input[3]].reshape(-1) input_variances = self.constantMap[node.input[4]].reshape(-1) if not makeEquations: return numChannels = len(scales) # Get variables inputVars = self.varMap[inputName].reshape(numChannels, -1) outputVars = self.makeNewVariables(nodeName).reshape(numChannels, -1) assert (inputVars.shape == outputVars.shape) numInputs = inputVars.shape[1] for i in range(numChannels): for j in range(numInputs): # Add equation # To know this computation, # refer to https://github.com/onnx/onnx/blob/master/docs/Operators.md#batchnormalization. e = MarabouUtils.Equation() e.addAddend(-1, outputVars[i][j]) e.addAddend( 1 / np.sqrt(input_variances[i] + epsilon) * scales[i], inputVars[i][j]) e.setScalar(input_means[i] / np.sqrt(input_variances[i] + epsilon) * scales[i] - biases[i]) self.addEquation(e)