예제 #1
0
def masterConfig():
    from tensorlog import bpcompiler
    from tensorlog import dataset
    from tensorlog import funs
    from tensorlog import learn
    from tensorlog import matrixdb
    from tensorlog import mutil
    from tensorlog import ops
    from tensorlog import program
    from tensorlog import xcomp

    master = config.Config()
    master.bpcompiler = bpcompiler.conf
    master.help.bpcompiler = 'config for tensorlog.bpcompiler'
    master.dataset = dataset.conf
    master.help.dataset = 'config for tensorlog.dataset'
    master.funs = funs.conf
    master.help.funs = 'config for tensorlog.funs'
    master.learn = learn.conf
    master.help.learn = 'config for tensorlog.learn'
    master.matrixdb = matrixdb.conf
    master.help.matrixdb = 'config for tensorlog.matrixdb'
    master.mutil = mutil.conf
    master.help.mutil = 'config for tensorlog.mutil'
    master.ops = ops.conf
    master.help.ops = 'config for tensorlog.ops'
    master.program = program.conf
    master.help.program = 'conf for tensorlog.program'
    master.xcomp = xcomp.conf
    master.help.xcomp = 'config for tensorlog.xcomp'
    try:
        from tensorlog import debug
        master.debug = debug.conf
        master.help.debug = 'config for tensorlog.debug'
    except ImportError:
        logging.warn('debug module not imported')
    return master
예제 #2
0
import time
import math
import numpy as NP
import scipy.sparse as SS
import collections

from tensorlog import config
from tensorlog import dataset
from tensorlog import declare
from tensorlog import funs
from tensorlog import mutil
from tensorlog import opfunutil

# clip to avoid exploding gradients

conf = config.Config()
conf.minGradient = -100;   conf.help.minGradient = "Clip gradients smaller than this to minGradient"
conf.maxGradient = +100;   conf.help.minGradient = "Clip gradients larger than this to maxGradient"

##############################################################################
# helper classes
##############################################################################

class GradAccumulator(object):
    """Accumulate the sum gradients for perhaps many parameters, indexing
    them by parameter name.  Also maintains 'counter' statistics,
    which are simply floats indexed by a counter name.  Counters are
    mostly updated by the Tracer functions.  The only required counter
    is the counter 'n', which is the size of the minibatch the
    gradient was computed on.
    """