def type_inference(self): if any_symbolic(self.shape.shape): # We can't infer any shape if shape has variable length. return types.tensor(self.x.dtype, (get_new_variadic_symbol(),)) # shape has fixed length here. if self.shape.sym_val is None: shape = tuple([get_new_symbol() for _ in range(self.shape.shape[0])]) return types.tensor(self.x.dtype, shape) t, _ = self._get_type_val() return t
def type_inference(self): if any_symbolic(self.shape.shape): # We can't infer any shape if shape has variable length. return types.tensor(types.fp32, (get_new_variadic_symbol(),)) # shape has fixed length here. if self.shape.sym_val is None: ret_shape = tuple([get_new_symbol() for _ in range(self.shape.shape[0])]) return types.tensor(types.fp32, ret_shape) return types.tensor(self.value.dtype, tuple(self.shape.sym_val.tolist()))
def test_builder_to_backend_symbolic(self, use_cpu_only, backend): s0 = get_new_symbol() s_len = get_new_symbol() s1 = get_new_variadic_symbol() input_placeholders = { "x": mb.placeholder(shape=(2, s0)), "shape": mb.placeholder(shape=(3, ), dtype=types.int32), "shape2": mb.placeholder(shape=(s_len, ), dtype=types.int32), } def build(x, shape, shape2): return [ mb.reshape(x=x, shape=[2, -1]), mb.reshape(x=x, shape=[1, -1]), mb.reshape(x=x, shape=[2, 1, 1, -1]), mb.reshape(x=x, shape=shape), mb.reshape(x=x, shape=shape2), ] expected_output_types = [ (2, s0, types.fp32), (1, 2 * s0, types.fp32), (2, 1, 1, s0, types.fp32), (UNK_SYM, UNK_SYM, UNK_SYM, types.fp32), (UNK_VARIADIC, types.fp32), ] expected_outputs = [ np.array([[1, 2, 3], [4, 5, 6]], dtype=np.float32), np.array([[1, 2, 3, 4, 5, 6]], dtype=np.float32), np.array([[[[1.0, 2.0, 3.0]]], [[[4.0, 5.0, 6.0]]]], dtype=np.float32), np.array([[[1, 2, 3]], [[4, 5, 6]]], dtype=np.float32), np.array([[[1, 2, 3]], [[4, 5, 6]]], dtype=np.float32), ] input_values = { "x": np.array([[1, 2, 3], [4, 5, 6]], dtype=np.float32), "shape": np.array([2, 1, 3], dtype=np.float32), "shape2": np.array([2, 1, 3], dtype=np.float32), } run_compare_builder( build, input_placeholders, input_values, expected_output_types, expected_outputs, use_cpu_only=use_cpu_only, frontend_only=False, backend=backend, )
def type_inference(self): x_type = self.x.dtype perm = self.perm.val x_shape = np.array(self.x.shape) if len(perm) != self.x.rank: msg = "perm should have the same length as rank(x): {} != {}" raise ValueError(msg.format(len(perm), self.x.rank)) if self.x.rank == 0: return self.x.sym_type # scalar cannot be transposed if any_variadic(self.x.shape): ret_shape = get_new_variadic_symbol() else: ret_shape = x_shape[perm] return types.tensor(x_type, tuple(ret_shape))