def __init__(self, data, batch_size, shuffle): """Generator to iterate through all the data indefinitely in batches. Args: data: list of numpy.arrays, where each of them must be of the same length on its leading dimension. batch_size: number of data points to return in a minibatch. It must be at least 1. shuffle: if True, the data is iterated in a random order, and this order differs for each pass of the data. """ if not isinstance(batch_size, int) or batch_size < 1: raise ValueError( "batch_size should be a positive integer, %r given" % batch_size) self._data = data self._batch_size = batch_size self._shuffle = shuffle # total number of data points in data self._N = None for datum in self._data: if self._N is None: self._N = len(datum) else: if self._N != len(datum): raise ValueError( "data have different leading dimensions: %d and %d" % (self._N, len(datum))) if self._shuffle: self._fixed_random = util.FixedRandom() self._indices = np.array([], dtype=int)
def initializeParticles(self): """ Initialize particles to be consistent with a uniform prior. Each particle is a tuple of ghost positions. Use self.numParticles for the number of particles. You may find the python package 'itertools' helpful. Specifically, you will need to think about permutations of legal ghost positions, with the additional understanding that ghosts may occupy the same space. Look at the 'product' function in itertools to get an implementation of the Cartesian product. Note: If you use itertools, keep in mind that permutations are not returned in a random order; you must shuffle the list of permutations in order to ensure even placement of particles across the board. Use self.legalPositions to obtain a list of positions a ghost may occupy. ** NOTE ** the variable you store your particles in must be a list; a list is simply a collection of unweighted variables (positions in this case). Storing your particles as a Counter or dictionary (where there could be an associated weight with each position) is incorrect and will produce errors """ "*** YOUR CODE HERE ***" product = [ item for item in itertools.product(self.legalPositions, repeat=self.numGhosts) ] util.FixedRandom().random.shuffle(product) self.particles = [ product[i % len(product)] for i in range(self.numParticles) ]
def __init__(self, input_ph=None, prediction_tensor=None, max_eval_batch_size=500): self.input_ph = input_ph self.prediction_tensor = prediction_tensor self._param_vars = OrderedDict() self._fixed_random = util.FixedRandom( ) # deterministically initialize weights self._max_eval_batch_size = max_eval_batch_size
def solveProblem(self, moduleDict): inferenceModule = moduleDict['inference'] if self.alg == 'inferenceByVariableElimination': studentComputationWithCallTracking = getattr(inferenceModule, self.alg + 'WithCallTracking') studentComputation = studentComputationWithCallTracking(self.callTrackingList) solvedFactor = studentComputation(self.problemBayesNet, self.queryVariables, self.evidenceDict, self.variableEliminationOrder) elif self.alg == 'inferenceByLikelihoodWeightingSampling': randomSource = util.FixedRandom().random studentComputationRandomSource = getattr(inferenceModule, self.alg + 'RandomSource') studentComputation = studentComputationRandomSource(randomSource) #random.seed(self.seed) # reset seed so that if we had to compute the bayes net we still have the initial seed solvedFactor = studentComputation(self.problemBayesNet, self.queryVariables, self.evidenceDict, self.numSamples) return solvedFactor
def get_fixed_random(): global _RANDOM if _RANDOM is None: _RANDOM = util.FixedRandom() return _RANDOM