def __init__(self, dataset, num_epochs=-1, output_numpy=False, do_copy=True): self._col_names = None # create a copy of tree and work on it. self.ori_dataset = dataset self.ir_tree, self.dataset = dataset.create_ir_tree() self._runtime_context = cde.PythonRuntimeContext() self._runtime_context.Init() consumer = cde.PythonIteratorConsumer(num_epochs) consumer.Init(self.ir_tree) self._runtime_context.AssignConsumer(consumer) self._iterator = self._runtime_context.GetConsumer() self._transform_tensor = lambda t: t.as_array() if not output_numpy: if do_copy: self._transform_tensor = lambda t: Tensor(t.as_array()) else: self._transform_tensor = lambda t: Tensor.from_numpy(t.as_array()) self._index = 0 # todo remove next when ContextManager is done ITERATORS_LIST.append(weakref.ref(self)) _unset_iterator_cleanup()
# x = Tensor.from_numpy(np.random.random([Batch,Seq,Heads*Dim_head]).astype(np.float32)) # mask = Tensor.from_numpy(np.ones(x.shape).astype(np.float32)) # model = Performer(depth =2, dim=Heads*Dim_head, heads=Heads, causal=True) # out = model(x,mask) # print(out) # print(out.shape) # print(model) # test for PerformerLM np.random.seed(777) Batch, Seq, Dim, Heads = 2, 10, 8, 2 #[2,10] input_ids = Tensor.from_numpy( np.random.random([ Batch, Seq, ]).astype(np.int)) mask_np = np.ones(input_ids.shape).astype(np.float32) mask_np[0, 2:] = 0. #[2,10,8] x = Tensor.from_numpy( np.random.random([Batch, Seq, Dim]).astype(np.float32)) mask = Tensor.from_numpy(mask_np) # model = Performer(depth =2, dim=Dim, heads=Heads, causal=True) # print(model) # out = model(x,mask) # model = SelfAttention(dim=Dim, heads=Heads, dim_head=Dim//Heads, causal = False, nb_features = None, qr_uniform_q = False, dropout = 0.9) # out = model(x,mask) model = PerformerLM(num_tokens=100, max_seq_len=Seq,