示例#1
0
文件: Tables.py 项目: amgregoi/School
def proveSequent(C6, bfactlist, bgoal) :
    """attempts to prove proposition  bgoal  from  sequence of propositions
       bfactlist.  
  
       params: C6; bfactlist - sequence of btrees; bgoal - a btree

       Approach:  places  bgoal into cnf; 
                  places conjucntion of bfactlist into dnf;
                  for each subgoal in cnf, for each sublist in dnf,
                    attempts to prove  sublist |- subgoal.
                  If all are accomplished, then goal is proved.
       returns True, if bgoal proved; returns False otherwise
    """
    #if bgoal[0] == "forall" :
    #    success = proveUniversal(C6, bfactlist, bgoal)  # sorry...
    #    return success

    # place  bfactlist  into  dnf to compute all possible proof contexts:
    bigfact = NF.conjunctionOf(bfactlist)
    dfacts = NF.dnf(bigfact)   # all possible cases in bfactlist

    subgoals = NF.cnf(bgoal)   # must prove all subgoal conjuncts

    for subg in subgoals :  # must prove each subgoal,  subg
        if ["True"] in subg :   # each subg is a disjunct...
            pass  # it's proved
        else :
            for premiselist in dfacts :  # must prove each  premiselist |- subg
                #print "READY TO PROVE:", premiselist
                #print "|-", subg
                #print
                if ["False"] in premiselist :
                    pass # proved -- the premises are self-contradictory
                else : # rats, time to do a proof:
                    # since subg is a disjunction, use  premiselist
                    # to try to prove _one_ of the disjuncts:
                    proved_subg = False
                    for prim in subg :
                        proved_subg = verifyRelation(C6, premiselist, prim)
                        if proved_subg : break  
                    if not(proved_subg) : return False  # we failed

    return True  # we made it this far; it means all subgoals were
示例#2
0
文件: Tables.py 项目: amgregoi/School
def insertRelation(C6, btree):
    """extracts all [RELOP, e1, e2]-relations asserted within  btree 
       and places them into the  rels  table in  C6

       params: C6 table  and  btree 
    """

    #sigma = C6["store"]
    # eval all facts in the bfactlist:
    cnffact = NF.cnf(btree)  # returns a list of disjunctive clauses
    # save any disjunctive clause that is exactly [[RELOP, b1, b2]]:
    for clause in cnffact :
        if len(clause) == 1  and  clause[0][0] in RELOPS :
            relop = clause[0][0]
            pe1 = PE.evall(C6, clause[0][1])
            pe2 = PE.evall(C6, clause[0][2])
            if pe1 != {} and pe2 != {} :
                newrel = [relop, pe1, pe2]
                if newrel not in C6["rels"] :
                    C6["rels"] = [newrel] + C6["rels"]  # new fact at front
#Define sigmoidal activation function and inverse synaptic time constant
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
fmax=1
v0=1.8
varsigma=0.56
act_fun=ActivationFunction.ActivationFunction(v0,fmax,varsigma)
#inverse synaptic time constant
zeta=100
#define field initialasation and number of iterations for estimation
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
mean=[0]*len(Phi)
P0=10*pb.eye(len(mean))
x0=pb.multivariate_normal(mean,P0,[1]).T
number_of_iterations=10
#ignore first 100 observations allowing the model's initial transients to die out
First_n_observations=100
#populate the model
NF_model=NF(NF_Connectivity_kernel,sensor_kernel,obs_locns,gamma,gamma_weight,Sigma_varepsilon,act_fun,zeta,Ts,field_space_x_y,spacestep)
IDE_model=IDE(IDE_Connectivity_kernel,field,sensor_kernel,obs_locns,gamma,gamma_weight,Sigma_varepsilon,act_fun,x0,P0,zeta,Ts,field_space_x_y,spacestep)
#generate Neural Field model
NF_model.gen_ssmodel()
V,Y=NF_model.simulate(T)
#generate the reduced model (state space model)
IDE_model.gen_ssmodel()
#estimate the states, the connectivity kernel parameters and the synaptic dynamics
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
ps_estimate=para_state_estimation(IDE_model)
ps_estimate.itrerative_state_parameter_estimation(Y[First_n_observations:],number_of_iterations)

sensor_center=pb.matrix([[0],[0]])
sensor_width=0.9**2 
sensor_kernel=basis(sensor_center,sensor_width,dimension)

#Define sigmoidal activation function and inverse synaptic time constant
fmax=10
v0=2
varsigma=0.8
act_fun=ActivationFunction(v0,fmax,varsigma)
#inverse synaptic time constant
zeta=100
#define field initialasation and number of iterations for estimation
mean=[0]*len(Phi)
P0=10*pb.eye(len(mean))
x0=pb.matrix(pb.multivariate_normal(mean,P0,[1])).T
number_of_iterations=10
#ignore first 100 observations allowing the model's initial transients to die out
First_n_observations=100

#populate the model
NF_model=NF(NF_Connectivity_kernel,sensor_kernel,obs_locns,observation_locs_mm,gamma,gamma_weight,Sigma_varepsilon,act_fun,zeta,Ts,field_space,spacestep)
IDE_model=IDE(IDE_Connectivity_kernel,IDE_field,sensor_kernel,obs_locns,gamma,gamma_weight,Sigma_varepsilon,act_fun,x0,P0,zeta,Ts,field_space,spacestep)
#generate the Neural Field model
NF_model.gen_ssmodel()
#V,Y=NF_model.simulate(T)
#generate the reduced model (state space model)
IDE_model.gen_ssmodel()
#estimate the states, the connectivity kernel parameters and the synaptic dynamics
#ps_estimate=para_state_estimation(IDE_model)
#ps_estimate.itrerative_state_parameter_estimation(Y[First_n_observations:],number_of_iterations)