示例#1
0
def get_profiler(profiling, lang='c'):
    """ Figures out which profiling linker to use """
    if profiling:
        if lang == 'c':
            profmode = theano.ProfileMode(optimizer='fast_run',
                                        linker=theano.gof.OpWiseCLinker())
        elif lang == 'py':
            profmode = theano.ProfileMode(optimizer='fast_run',
                                        linker=theano.gof.PerformLinker())
    else:
        profmode = None
    return profmode
示例#2
0
def test_module_advanced_example():

    profmode = theano.ProfileMode(optimizer='fast_run',
                                  linker=theano.gof.OpWiseCLinker())
    profmode = PrintEverythingMode(theano.gof.OpWiseCLinker(), 'fast_run')

    data_x = N.random.randn(4, 10)
    data_y = [[int(x)] for x in (N.random.randn(4) > 0)]

    model = SpecifiedRegressionLayer(regularize=False).make(input_size=10,
                                                            target_size=1,
                                                            stepsize=0.1,
                                                            mode=profmode)

    for i in xrange(1000):
        xe, gw, gb, ga = model.update(data_x, data_y)
        if i % 100 == 0:
            print(i, xe)
            pass
        #for inputs, targets in my_training_set():
        #print "cost:", model.update(inputs, targets)

    print("final weights:", model.w)
    print("final biases:", model.b)

    profmode.print_summary()
示例#3
0
    def __init__(self,
                 network,
                 profile=False,
                 lr=0.3,
                 L1=0,
                 momentum=0.4,
                 L2=0.00005,
                 compile=True):
        self.profile = profile

        self.network = network

        self.lr = np.float32(lr)
        self.momentum = np.float32(momentum)

        self.params = network.params

        self.cost = network.cost + np.float32(L2) * T.sum([
            (p**2).sum() for p in self.params
        ]) + np.float32(L1) * T.sum([abs(p).sum() for p in self.params])
        self.grads = T.grad(self.cost, self.params)

        # Expressions evaluated for training
        self.cost_exprs = [self.cost, network.cost]
        self.cost_names = ['L2 cost', "Network cost"]
        for name, monitor in network.monitors:
            self.cost_names.append(name)
            self.cost_exprs.append(monitor)

        self.shapes = [p.get_value(borrow=True).shape for p in self.params]
        self.counts = [np.prod(s) for s in self.shapes]
        self.starts = np.cumsum([0] + self.counts)[:-1]
        self.dtype = self.params[0].get_value().dtype

        self.best_cost = 1e100
        self.best_iter = 0
        self.best_params = [p.get_value().copy() for p in self.params]

        mode = "FAST_RUN"
        if self.profile:
            mode = theano.ProfileMode(optimizer='fast_run',
                                      linker=theano.gof.OpWiseCLinker())

        if compile:
            self.f_eval = theano.function(network.inputs, network.output)

            self.f_learn = theano.function(network.inputs + [network.target],
                                           self.cost_exprs,
                                           updates=list(
                                               self.learning_updates()),
                                           mode=mode)

            self.f_cost = theano.function(network.inputs + [network.target],
                                          self.cost_exprs)
示例#4
0
import numpy as np

import gzip, cPickle

import matplotlib.pyplot as plt

plt.ion()

from utils import generate_data, get_context

# DEBUGGING

from theano import ProfileMode

mode = theano.ProfileMode(optimizer='fast_run',
                          linker=theano.gof.OpWiseCLinker())
# mode = theano.compile.DebugMode(check_py_code=False, require_matching_strides=False)
# mode = None

# load data
print ">> Loading dataset..."

f = gzip.open('datasets/mnist.pkl.gz', 'rb')
train_set, valid_set, test_set = cPickle.load(f)
f.close()

train_set_x, train_set_y = train_set
valid_set_x, valid_set_y = valid_set
test_set_x, test_set_y = test_set

# TODO DEBUG
def get_profile_mode():
    profmode = theano.ProfileMode(optimizer='fast_run', linker=theano.gof.OpWiseCLinker())
    return profmode
示例#6
0
import numpy as np
import theano
import theano.tensor as T
from nn.math import softmax, sigmoid, make_onehot
from theano import pp
import pandas as pd
from data_utils import utils as du
from theano import ProfileMode
import theano.tensor.nnet as nn
from misc import *
from nn.base import NNBase

profmode = theano.ProfileMode(optimizer='fast_compile')

dtype = theano.config.floatX
N_ASPECTS = 5
N_SENTIMENTS = 3

#n_hidden = 50

# squashing of the gates should result in values between 0 and 1
# therefore we use the logistic function
sigma = lambda x: 1 / (1 + T.exp(-x))

# for the other activation function we use the tanh
act = T.tanh


def one_lstm_step(seq, seq2, h_tm1, h_tm2, c_tm1, c_tm2, coeff, coeff_rev,
                  W_hy, b_y, W_xi, W_xi2, U_hi, U_hi2, b_i, b_i2, W_xf, W_xf2,
                  U_hf, U_hf2, b_f, b_f2, W_xc, W_xc2, U_hc, U_hc2, b_c, b_c2,