def __init__(self, var_set, pt_tensor=None, discrete_pgm=None, name=None): if (discrete_pgm is None): discrete_pgm = DiscretePGM.get_context() if (discrete_pgm is None): raise Exception("No DiscretePGM specified, neither explicit nor as current context") self.discrete_pgm = discrete_pgm self.var_set = frozenset(var_set) self.scope = discrete_pgm.map_var_set(self.var_set) var_indices = sorted([ discrete_pgm.var_index(v) for v in var_set]) var_idx_map = {} for i, sorted_idx in enumerate(var_indices): var_idx_map[sorted_idx] = i self.var_indices = var_indices self.var_idx_map = var_idx_map shp = [self.discrete_pgm.cardinalities[vidx] for vidx in var_indices] self.shape = shp self.is_shared = False if (pt_tensor=="ones"): pt_tensor = T.ones(shp, dtype=theano.config.floatX) if (pt_tensor=='zeros'): pt_tensor = T.zeros(shp, dtype=theano.config.floatX) if (pt_tensor=="shared"): pt_tensor = theano.shared(np.zeros(shp, dtype=theano.config.floatX)) self.is_shared = True if (pt_tensor is None): bcast = [False,]*len(self.var_set) tensor_type = T.TensorType(dtype=theano.config.floatX, broadcastable=bcast) self.pt_tensor = T.TensorVariable(type=tensor_type, name=name) else: self.pt_tensor = T.as_tensor_variable(pt_tensor)
def compact_shape(ndarr, var_set, discrete_pgm=None): ''' Helper function: Create ndarray in compact format from ndarray in expanded format''' if (discrete_pgm is None): discrete_pgm = DiscretePGM.get_context() if (discrete_pgm is None): raise Exception("No DiscretePGM specified, neither explicit nor as current context") scope = discrete_pgm.map_var_set(var_set) return np.reshape(ndarr, [discrete_pgm.cardinalities[i] for i in sorted(scope)])