def __call__(self, *xs: "variable.Variable"): y_axes = [] y_shape_dict = AxisKeyDict() # Check variable in descent order of the number of dimensions. # Without this procedure, in case that x0.order=C and x1.order=NC, the output order is CN. Expected result is NC. xs_order = [(i, x) for i, x in enumerate(xs)] xs_order.sort(key=lambda d: d[1].ndim, reverse=True) for i, x in xs_order: for axis in x.order.axes: if axis in y_axes: if y_shape_dict[axis] == 1: # broadcast y_shape_dict[axis] = x.shape_dict[axis] else: y_axes.append(axis) y_shape_dict[axis] = x.shape_dict[axis] if Placeholder.check_resolved(x.shape_dict[axis]): if Placeholder.check_resolved(y_shape_dict[axis]): assert y_shape_dict[axis] == x.shape_dict[axis] or x.shape_dict[axis] == 1, \ "All input variables of elementwise operator should be same shape: " \ f"y.shape_dict[{axis}]={y_shape_dict[axis]}, " \ f"x{i}.shape_dict[{axis}]={x.shape_dict[axis]}" else: y_shape_dict[axis] = x.shape_dict[axis] y = variable.Variable([y_shape_dict[axis] for axis in y_axes], Order(y_axes)) ChannelMode.set(y, ChannelModeEnum.R) for i, x in enumerate(xs): self.append_input(f"x{i}", x) self.append_output("y", y) return y,
def __call__(self, *xs: "variable.Variable"): y_axes = [] y_shape_dict = AxisKeyDict() for i, x in enumerate(xs): for axis in x.order.axes: if axis in y_axes: if y_shape_dict[axis] == 1: # broadcast y_shape_dict[axis] = x.shape_dict[axis] else: y_axes.append(axis) y_shape_dict[axis] = x.shape_dict[axis] if Placeholder.check_resolved(x.shape_dict[axis]): if Placeholder.check_resolved(y_shape_dict[axis]): assert y_shape_dict[axis] == x.shape_dict[axis] or x.shape_dict[axis] == 1, \ "All input variables of elementwise operator should be same shape: " \ f"y.shape_dict[{axis}]={y_shape_dict[axis]}, " \ f"x{i}.shape_dict[{axis}]={x.shape_dict[axis]}" else: y_shape_dict[axis] = x.shape_dict[axis] self.append_input(f"x{i}", x) y = variable.Variable([y_shape_dict[axis] for axis in y_axes], Order(y_axes)) self.append_output("y", y) return y,
def exec(self): reduced_axis = self.axis x = self.inputs["x"] y_axes = [axis for axis in x.order.axes if axis != reduced_axis] y_shape = [x.shape_dict[axis] for axis in y_axes] y_order = Order(y_axes) # Add tensorwise attributes for axis in y_order.axes: self.attributes.add(Tensorwise(self, axis)) y = variable.Variable(y_shape, y_order) self.append_output("y", y) return y,
def __call__(self, *xs: "variable.Variable"): y = variable.Variable(xs[0].shape, xs[0].order) for i, x in enumerate(xs): for axis in x.order.axes: assert axis in y.order.axes, f"All input variables of elementwise operator should be same shape. x[{i}] does not have " \ f"{axis}: x0.order={xs[0].order}, x{i}.order={xs[i].order}" if Placeholder.check_resolved( x.shape_dict[axis]) or Placeholder.check_resolved( y.shape_dict[axis]): assert y.shape_dict[axis] == x.shape_dict[axis], "All input variables of elementwise operator should be " \ f"same shape: x0.shape_dict=f{xs[0].shape_dict}, x{i}" \ f".shape_dict=f{xs[i].shape_dict}" self.append_input(f"x{i}", x) self.append_output("y", y) return y,
def __call__(self, x: "variable.Variable"): reduced_axis = self.axis y_axes = list(x.order.axes) y_shape = [ 1 if axis == reduced_axis else x.shape_dict[axis] for axis in y_axes ] y_order = Order(y_axes) y = variable.Variable(y_shape, y_order) for axis in x.order.axes: if axis != reduced_axis: self.attributes.add(Tensorwise(axis)) self.append_input("x", x) self.append_output("y", y) return y,
def exec(self): y_axes = [] y_shape_dict = AxisKeyDict() # Check variable in descent order of the number of dimensions. # Without this procedure, in case that x0.order=C and x1.order=NC, the output order is CN. Expected result is NC. xs = [self.inputs[f"x{i}"] for i in range(len(self.inputs))] xs_order = [(i, x) for i, x in enumerate(xs)] xs_order.sort(key=lambda d: d[1].ndim, reverse=True) for i, x in xs_order: for axis in x.order.axes: if axis in y_axes: if y_shape_dict[axis] == 1: # broadcast y_shape_dict[axis] = x.shape_dict[axis] else: y_axes.append(axis) y_shape_dict[axis] = x.shape_dict[axis] if Placeholder.check_resolved(x.shape_dict[axis]): if Placeholder.check_resolved(y_shape_dict[axis]): assert y_shape_dict[axis] == x.shape_dict[ axis] or x.shape_dict[axis] == 1, f""" [Elementwise] All input variables of elementwise operator should be same shape: (y.shape) = {y_shape_dict[a] for a in y_axes} (x{i}.shape) = {x.shape} (y.shape[{axis}]) = {y_shape_dict[axis]} (x{i}.shape[{axis}]) = {x.shape_dict[axis]}""" else: y_shape_dict[axis] = x.shape_dict[axis] # Add tensorwise attributes for axis in y_axes: self.attributes.add(Tensorwise(self, axis)) y = variable.Variable([y_shape_dict[axis] for axis in y_axes], Order(y_axes)) self.append_output("y", y) return y,
def __call__(self, *xs: "variable.Variable"): """ Args: *xs (:class:`~webdnn.graph.variable.Variable`): Input variables. All input variables must be same shape. Returns: tuple of :class:`~webdnn.graph.variable.Variable`: Output variable. It is same shape of input variables. """ y = variable.Variable(xs[0].shape, xs[0].order) for i, x in enumerate(xs): for axis in x.order.axes: assert axis in y.order.axes if Placeholder.check_resolved( x.shape_dict[axis]) or Placeholder.check_resolved( y.shape_dict[axis]): assert y.shape_dict[axis] == x.shape_dict[axis] self.append_input(f"x{i}", x) self.append_output("y", y) return y,