Example #1
0
 def makeConsistent(self):
     weight = 0
     for node, val in self.unpropagatedObservations.iteritems():
         appNode = self.getConstrainableNode(node)
         #      print "PROPAGATE", node, appNode
         scaffold = constructScaffold(self, [OrderedSet([appNode])])
         rhoWeight, _ = detachAndExtract(self, scaffold)
         scaffold.lkernels[appNode] = DeterministicLKernel(
             self.pspAt(appNode), val)
         xiWeight = regenAndAttach(self, scaffold, False, OmegaDB(),
                                   OrderedDict())
         # If xiWeight is -inf, we are in an impossible state, but that might be ok.
         # Finish constraining, to avoid downstream invariant violations.
         node.observe(val)
         constrain(self, appNode, node.observedValue)
         weight += xiWeight
         weight -= rhoWeight
     self.unpropagatedObservations.clear()
     if not math.isnan(weight):
         # Note: +inf weight is possible at spikes in density against
         # Lebesgue measure.
         return weight
     else:
         # How could we get a NaN here?
         # If one observation made the state inconsistent, the rhoWeight
         # of another might conceivably be infinite, possibly leading to
         # a nan weight.  I want to normalize these to indicating that
         # the resulting state is impossible.
         return float("-inf")
Example #2
0
 def likelihood_weight(self):
     # TODO This is a different control path from primitive_infer
     # because it needs to return the weight, and that didn't used to
     # return values when this was writen.
     scaffold = BlockScaffoldIndexer("default", "all").sampleIndex(self)
     (_rhoWeight, rhoDB) = detachAndExtract(self, scaffold)
     xiWeight = regenAndAttach(self, scaffold, False, rhoDB, OrderedDict())
     # Always "accept"
     return xiWeight
Example #3
0
 def detach_for_proposal(self, scaffold):
     pnodes = scaffold.getPrincipalNodes()
     currentValues = infer.getCurrentValues(self, pnodes)
     infer.registerDeterministicLKernels(self, scaffold, pnodes,
                                         currentValues)
     rhoWeight, rhoDB = detachAndExtract(self, scaffold)
     infer.unregisterDeterministicLKernels(
         self, scaffold, pnodes
     )  # de-mutate the scaffold in case it is used for subsequent operations
     return rhoWeight, rhoDB
Example #4
0
def log_likelihood_at(trace, args):
  (scaffolders, transitions, _) = dispatch_arguments(trace, ("bogon",) + args)
  if transitions > 0:
    # Don't want to think about the interaction between 'each' and the
    # value this is supposed to return.
    assert len(scaffolders) == 1, "log_likelihood_at doesn't support 'each'"
    scaffold = scaffolders[0].sampleIndex(trace)
    (_rhoWeight, rhoDB) = detachAndExtract(trace, scaffold)
    xiWeight = regenAndAttach(trace, scaffold, True, rhoDB, OrderedDict())
    # Old state restored, don't need to do anything else
    return xiWeight
  else:
    return 0.0
Example #5
0
def log_joint_at(trace, args):
  (scaffolders, transitions, _) = dispatch_arguments(trace, ("bogon",) + args)
  if transitions > 0:
    # Don't want to think about the interaction between 'each' and the
    # value this is supposed to return.
    assert len(scaffolders) == 1, "log_joint_at doesn't support 'each'"
    scaffold = scaffolders[0].sampleIndex(trace)
    pnodes = scaffold.getPrincipalNodes()
    currentValues = getCurrentValues(trace, pnodes)
    registerDeterministicLKernels(trace, scaffold, pnodes, currentValues,
      unconditional=True)
    (_rhoWeight, rhoDB) = detachAndExtract(trace, scaffold)
    xiWeight = regenAndAttach(trace, scaffold, True, rhoDB, OrderedDict())
    # Old state restored, don't need to do anything else
    return xiWeight
  else:
    return 0.0
Example #6
0
    def __call__(self, trace, scaffolder):
        # CONSDIER how to unify this code with EnumerativeGibbsOperator.
        # Problems:
        # - a torus cannot be copied by copy_trace
        # - a particle cannot be copied by copy_trace either
        # - copy_trace undoes incorporation (on Lite traces)

        scaffold = scaffolder.sampleIndex(trace)
        assertTrace(trace, scaffold)

        pnodes = scaffold.getPrincipalNodes()
        allSetsOfValues = \
            getCartesianProductOfEnumeratedValuesWithAddresses(trace, pnodes)

        xiWeights = []
        xiParticles = []

        for newValuesWithAddresses in allSetsOfValues:
            xiParticle = self.copy_trace(trace)
            # CONSIDER what to do with the weight from this
            xiParticle.makeConsistent()
            # Impossible original state is probably fine
            # ASSUME the scaffolder is deterministic. Have to make the
            # scaffold again b/c detach mutates it, and b/c it may not work
            # across copies of the trace.
            scaffold = scaffolder.sampleIndex(xiParticle)
            (rhoWeight, _) = detachAndExtract(xiParticle, scaffold)
            assertTorus(scaffold)
            registerDeterministicLKernelsByAddress(xiParticle, scaffold,
                                                   newValuesWithAddresses)
            xiWeight = regenAndAttach(xiParticle, scaffold, False, OmegaDB(),
                                      OrderedDict())
            xiParticles.append(xiParticle)
            # CONSIDER What to do with the rhoWeight.  Subtract off the
            # likelihood?  Subtract off the prior and the likelihood?  Do
            # nothing?  Subtracting off the likelihood makes
            # hmm-approx-filter.vnt from ppaml-cps/cp4/p3_hmm be
            # deterministic (except roundoff effects), but that may be an
            # artifact of the way that program is written.
            xiWeights.append(xiWeight - rhoWeight)
        return (xiParticles, xiWeights)
Example #7
0
    def compute_particles(self, trace, scaffold):
        assertTrace(trace, scaffold)

        pnodes = scaffold.getPrincipalNodes()
        currentValues = getCurrentValues(trace, pnodes)

        registerDeterministicLKernels(trace, scaffold, pnodes, currentValues)

        rhoWeight, self.rhoDB = detachAndExtract(trace, scaffold)
        xiWeights = []
        xiParticles = []

        allSetsOfValues = getCartesianProductOfEnumeratedValues(trace, pnodes)

        for newValues in allSetsOfValues:
            if newValues == currentValues:
                # If there are random choices downstream, keep their current values.
                # This follows the auxiliary variable method in Neal 2000,
                # "Markov Chain Sampling Methods for Dirichlet Process Models"
                # (Algorithm 8 with m = 1).
                # Otherwise, we may target the wrong stationary distribution.
                # See testEnumerativeGibbsBrushRandomness in
                # test/inference_language/test_enumerative_gibbs.py for an
                # example.
                shouldRestore = True
                omegaDB = self.rhoDB
            else:
                shouldRestore = False
                omegaDB = OmegaDB()
            xiParticle = self.copy_trace(trace)
            assertTorus(scaffold)
            registerDeterministicLKernels(trace, scaffold, pnodes, newValues)
            xiParticles.append(xiParticle)
            xiWeights.append(
                regenAndAttach(xiParticle, scaffold, shouldRestore, omegaDB,
                               OrderedDict()))
            # if shouldRestore:
            #   assert_almost_equal(xiWeights[-1], rhoWeight)
        return (xiParticles, xiWeights)
Example #8
0
    def checkInvariants(self):
        # print "Begin invariant check"
        assert len(self.unpropagatedObservations) == 0, \
          "Don't checkInvariants with unpropagated observations"
        rcs = copy.copy(self.rcs)
        ccs = copy.copy(self.ccs)
        aes = copy.copy(self.aes)
        scopes = OrderedDict()
        for (scope_name, scope) in self.scopes.iteritems():
            new_scope = SamplableMap()
            for (block_name, block) in scope.iteritems():
                new_scope[block_name] = copy.copy(block)
            scopes[scope_name] = new_scope

        scaffold = BlockScaffoldIndexer("default", "all").sampleIndex(self)
        rhoWeight, rhoDB = detachAndExtract(self, scaffold)

        assert len(
            self.rcs) == 0, "Global detach left random choices registered"
        # TODO What if an observed random choice had registered an
        # AEKernel?  Will that be left here?
        assert len(self.aes) == 0, "Global detach left AEKernels registered"
        assert len(
            self.scopes
        ) == 1, "Global detach left random choices in non-default scope %s" % self.scopes
        assert len(
            self.scopes['default']
        ) == 0, "Global detach left random choices in default scope %s" % self.scopes[
            'default']

        xiWeight = regenAndAttach(self, scaffold, True, rhoDB, OrderedDict())

        # XXX Apparently detach/regen sometimes has the effect of changing
        # the order of rcs.
        assert set(rcs) == set(
            self.rcs
        ), "Global detach/restore changed the registered random choices from %s to %s" % (
            rcs, self.rcs)
        assert ccs == self.ccs, "Global detach/restore changed the registered constrained choices from %s to %s" % (
            ccs, self.ccs)
        assert aes == self.aes, "Global detach/restore changed the registered AEKernels from %s to %s" % (
            aes, self.aes)

        for scope_name in OrderedSet(scopes.keys()).union(self.scopes.keys()):
            if scope_name in scopes and scope_name not in self.scopes:
                assert False, "Global detach/restore destroyed scope %s with blocks %s" % (
                    scope_name, scopes[scope_name])
            if scope_name not in scopes and scope_name in self.scopes:
                assert False, "Global detach/restore created scope %s with blocks %s" % (
                    scope_name, self.scopes[scope_name])
            scope = scopes[scope_name]
            new_scope = self.scopes[scope_name]
            for block_name in OrderedSet(scope.keys()).union(new_scope.keys()):
                if block_name in scope and block_name not in new_scope:
                    assert False, "Global detach/restore destroyed block %s, %s with nodes %s" % (
                        scope_name, block_name, scope[block_name])
                if block_name not in scope and block_name in new_scope:
                    assert False, "Global detach/restore created block %s, %s with nodes %s" % (
                        scope_name, block_name, new_scope[block_name])
                assert scope[block_name] == new_scope[
                    block_name], "Global detach/restore changed the addresses in block %s, %s from %s to %s" % (
                        scope_name, block_name, scope[block_name],
                        new_scope[block_name])

        assert_allclose(
            rhoWeight,
            xiWeight,
            err_msg="Global restore gave different weight from detach")
Example #9
0
 def just_detach(self, scaffold):
     return detachAndExtract(self, scaffold)