示例#1
0
# the concatenated parameter vector can be accessed by
gaussianOther.params

# we can also access the tensor variables with
gaussianOther.tv_final_b
# All tensor variables of one object can be found by
gaussianOther.tv_variables_list

tf_klDiv = gaussian.klDivergence(gaussianOther)

klDiff_tf = data[...] >= tf_klDiv

# We can directly view the output of different tensors, i.e., the mean
data[...] >= gaussian.mean

# Or the different layers

#data[...] >= gaussian.layers[0]


# We can also generate TFMappings such as Gaussians wich get tensors as input:

stateTensor = dataManager.createTensorForEntry('states')
stateTensor = stateTensor * 2

gaussianTensor = gaussianOther.clone('clonedGaussian', stateTensor)
data[...] >= gaussianTensor.mean



示例#2
0
num_cpu = 1
tf_config = tf.ConfigProto(inter_op_parallelism_threads=num_cpu, intra_op_parallelism_threads=num_cpu)
session = tf.Session(config=tf_config)
session.__enter__()


#define our mapping class. A mapping is a callable object, where the call function is implemented by the MappingMethod decorator

# Create a dataManager that can handle the input (X) and output (Y) of a 1 dimensional
# function
dataManager = DataManager('values')
dataManager.addDataEntry('X', 2)
dataManager.addDataEntry('Y', 2)
dataManager.addDataEntry('Z', 2)

x = dataManager.createTensorForEntry('X')
y = dataManager.createTensorForEntry('Y')

data = dataManager.createDataObject([10])

data[...].X = np.ones((10,2))
data[...].Y = np.ones((10,2))

z = x + y
y1 = x - z

dataManager.connectTensorToEntry(z, 'Z')
dataManager.connectTensorToEntry(y1, 'Y')

data[...] >> (z, y1) >> data