Beispiel #1
0
    from com.sri.ai.jspark.values import Values as _Values
    value_str = _Values.value_str
    append_value_str = _Values.append_value_str
    inverse_eval = _Values.inverse_eval
    setUnpickleFunctions = _Values.setUnpickleFunctions
    VALUE_CONSTRUCTOR = _Values.VALUE_CONSTRUCTOR
    # Set the reg_append_value_str and reg_inverse_eval functions
    import spark.internal.parse.set_methods as _sm
    _Values.setRegFunctions(_sm.reg_append_value_str, _sm.reg_inverse_eval)
    # set the methods for Python instances of Value
    from spark.internal.parse.value import Value as _Value
    def _Value_append_value_str(x, buf):
        return x.append_value_str(buf)
    def _Value_inverse_eval(x):
        return x.inverse_eval()
    _sm.set_methods(_Value, _Value_append_value_str, _Value_inverse_eval)
else:
    # Python implementation
    from spark.internal.parse.values_python import \
         Symbol, isSymbol, Variable, isVariable, Structure, isStructure, \
         value_str, append_value_str, inverse_eval, \
         setUnpickleFunctions, VALUE_CONSTRUCTOR



################################################################
# Specify functions for unpickling types that are not standard Python
# These all have to be top level functions for pickling to work.
# For pickling to be compatible across implementations,
# these functions have to be defined in the same module across implementations.
Beispiel #2
0
        idnum = self.objectIdSave()
        #NEWPM.recordLocation("Calling value_str")
        from spark.internal.init import is_persist_state
        if is_persist_state():
            return buf.append(",(idObject %d)" % idnum)
        return buf.append("<%s:%s:%d>" % (self.cvcategory, \
                               (self.cvname() or self.__class__.__name__), \
                               idnum))

    def inverse_eval(self):
        idnum = self.objectIdSave()
        return SYM_idObject.structure(idnum)

################################################################
# Failures

from spark.internal.exception import Failure, deconstructFailure

def failure_append_value_str(failure, buf):
    buf.append(",")
    append_value_str(failure_inverse_eval(failure), buf)
    return buf

def failure_inverse_eval(failure):
    return Structure(Symbol("failure"), [inverse_eval(x) for x in deconstructFailure(failure)])

set_methods(Failure, append_value_str=failure_append_value_str, inverse_eval=failure_inverse_eval)


###############################################################
Beispiel #3
0
                               (self.cvname() or self.__class__.__name__), \
                               idnum))

    def inverse_eval(self):
        idnum = self.objectIdSave()
        return SYM_idObject.structure(idnum)


################################################################
# Failures

from spark.internal.exception import Failure, deconstructFailure


def failure_append_value_str(failure, buf):
    buf.append(",")
    append_value_str(failure_inverse_eval(failure), buf)
    return buf


def failure_inverse_eval(failure):
    return Structure(Symbol("failure"),
                     [inverse_eval(x) for x in deconstructFailure(failure)])


set_methods(Failure,
            append_value_str=failure_append_value_str,
            inverse_eval=failure_inverse_eval)

###############################################################