def test_gather(self): """Test that Gather can be invoked.""" in_tensor = np.random.uniform(size=(5, 4)).astype(np.float32) with self.session() as sess: out_tensor = Gather(indices=[[2], [3]])(in_tensor).eval() assert np.array_equal([in_tensor[2], in_tensor[3]], out_tensor) out_tensor = Gather()(in_tensor, np.array([[1, 1], [0, 3]])).eval() assert np.array_equal([in_tensor[1, 1], in_tensor[0, 3]], out_tensor)
def test_Gather_pickle(): tg = TensorGraph() feature = Feature(shape=(tg.batch_size, 1)) layer = Gather(indices=[[0], [2], [3]], in_layers=feature) tg.add_output(layer) tg.set_loss(layer) tg.build() tg.save()
def create_layers(self, state, **kwargs): i = Reshape(in_layers=[state[0]], shape=(-1, 1)) i = AddConstant(-1, in_layers=[i]) i = InsertBatchIndex(in_layers=[i]) # shape(i) = (batch_size, 1) q = Reshape(in_layers=[state[1]], shape=(-1, self.n_queue_obs)) # shape(q) = (batch_size, n_queue_obs) #q = Dense(16, in_layers=[q], activation_fn=tensorflow.nn.relu) ## shape(q) = (batch_size, 16) x = q if not self.single_layer: for j in range(1): x1 = Dense(8, in_layers=[x], activation_fn=tensorflow.nn.relu) x = Concat(in_layers=[q, x1]) # 1) shape(x) = (batch_size, n_queue_obs) # 2) shape(x) = (batch_size, n_queue_obs + 8) ps = [] for j in range(self.n_products): p = Dense(n_actions, in_layers=[x]) ps.append(p) p = Stack(in_layers=ps, axis=1) # shape(p) = (batch_size, n_products, n_actions) p = Gather(in_layers=[p, i]) # shape(p) = (batch_size, n_actions) p = SoftMax(in_layers=[p]) vs = [] for j in range(self.n_products): v = Dense(1, in_layers=[x]) vs.append(v) v = Stack(in_layers=vs, axis=1) # shape(v) = (batch_size, n_products, 1) v = Gather(in_layers=[v, i]) # shape(v) = (batch_size, 1) return {'action_prob': p, 'value': v}