コード例 #1
0
ファイル: model.py プロジェクト: mansooru/daliy.practice.unet
    def build_model(self):
        self.X = tf.placeholder("float", [None, self.width, self.height])
        if self.is_train:
            self.trX, self.trY = read_mat(
                './data/' + self.data_set + '.mat', True)
            self.Y = tf.placeholder("float", [None, self.width, self.height])
        else:
            self.trX = read_mat('./data/' + self.test_set + '.mat', False)
        self.num_of_data = len(self.trX)

        if self.is_train:
            loss_tmp, grad_tmp = self.loss_and_grad()
            with tf.device('/cpu:0'):
                self.cost = tf.reduce_mean(loss_tmp)
                grad = average_gradients(grad_tmp)
                tf.summary.scalar("cost", self.cost)
        else:
            with tf.variable_scope(tf.get_variable_scope()):
                self.logit = self.inference(self.X)

        update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS)
        with tf.device('/cpu:0'):
            with tf.control_dependencies(update_ops):
                self.train_op = self.optimizer.apply_gradients(grad)

        self.summary = tf.summary.merge_all()

        self.saver = tf.train.Saver()
コード例 #2
0
ファイル: tests.py プロジェクト: jvermander/machinelearning
def test5():
    print("\n\nTest 5 - Algorithm Tweaks (Bias & Variance)")
    print("Expected / Actual:")

    print("\nRegularized Linear Regression: ")
    X, y = ut.read_mat('mat/ex5data1.mat')
    X = ut.create_design(X)
    theta = np.array([1, 1])
    print("303.993 / ", alg.SSD(theta, X, y, 1))
    grad = alg.SSD_gradient(theta, X, y, 1)
    print("-15.30 / ", grad[0])
    print("598.250 / ", grad[1])

    print("\nLearning Curve:")
    raw = ut.read_mat_raw('mat/ex5data1.mat')
    X = raw['X']
    y = raw['y'].reshape(-1)

    Xval = raw['Xval']
    yval = raw['yval'].reshape(-1)
    print("Check plot")
    # pt.plot_learning_curve(ut.create_design(X), y, ut.create_design(Xval), yval, 0)

    print("\nFitting polynomial regression:")
    p = 8
    X_poly = ut.poly_features(X, p)
    X_poly, mu, sigma = ut.normalize_features(X_poly)
    X_poly = ut.create_design(X_poly)

    Xval = ut.poly_features(Xval, p)
    Xval -= mu
    Xval /= sigma
    Xval = ut.create_design(Xval)

    l = 0.01
    theta = alg.parametrize_linear(X_poly, y, l)

    print("Check plot, l =", l)
    pt.fit_plot(X, y, mu, sigma, theta, p)
    pt.plot_learning_curve(X_poly, y, Xval, yval, l)

    print("\nOptimize regularization:")
    print("Check plot")

    l = pt.plot_validation_curve(X_poly, y, Xval, yval)

    Xtest = raw['Xtest']
    ytest = raw['ytest'].reshape(-1)
    Xtest = ut.poly_features(Xtest, p)
    Xtest -= mu
    Xtest /= sigma
    Xtest = ut.create_design(Xtest)

    theta = alg.parametrize_linear(X_poly, y, l)
    print("3.8599 / ", alg.SSD(theta, Xtest, ytest, 0))

    print("\nRandomized learning curve:")
    print("Check plot")
    pt.plot_randomized_learning_curve(X_poly, y, Xval, yval, 0.01)
    return
コード例 #3
0
def bruteforce(slot_inicial, nslots):
    
    # Inicializacao e importacao de dados
    
    mat_file = utils.read_mat("final.mat")
    
    s2 = mat_file["s2"]
    P = mat_file["P"]
    PI = mat_file["PI"]
    
    n_maquinas = mat_file["n_maquinas"]
    
    pesos = mat_file["pesos"]
    
    x_teste = mat_file["x_teste"]    
    Consumo_total = x_teste[slot_inicial:slot_inicial+nslots]
    
    [lista,combinacoes] = utils.init_markov(nslots,n_maquinas, pesos)
    
        
    while(1):
        cost = 0
        
        
                
    
    return
コード例 #4
0
ファイル: tests.py プロジェクト: jvermander/machinelearning
def test3():
    print("\n\nTest 3 - Multiclass Logistic Regression & Neural Networks")
    print("Expected / Actual:")

    print("\nMulticlass LR:")
    X, y = ut.read_mat('mat/ex3data1.mat')
    for i in range(y.shape[0]):
        if (y[i] == 10): y[i] = 0

    theta = np.array([-2, -1, 1, 2])
    X_t = ut.create_design(np.arange(1, 16, 1).reshape(3, 5).T / 10)
    y_t = np.array(([1, 0, 1, 0, 1]))
    l_t = 3
    cost = alg.cross_ent(theta, X_t, y_t, l_t)
    grad = alg.cross_ent_gradient(theta, X_t, y_t, l_t)
    print("2.534819 / %f" % cost)
    print("0.146561 / %f" % grad[0])
    print("-0.548558 / %f" % grad[1])
    print("0.724722 / %f" % grad[2])
    print("1.398003 / %f" % grad[3])

    degree = 10
    l = 0.1
    theta = alg.multiclass_logreg(X, y, l, degree)
    p = ut.multiclass_prediction(theta, X)
    print(">= 95 / %f" % (np.mean(p == y) * 100))

    print("\nNeural Networks (Forward Propagation): ")
    data = ut.read_mat_raw('mat/ex3weights.mat')
    theta1 = data['Theta1']
    theta2 = data['Theta2']

    X, y = ut.read_mat('mat/ex3data1.mat')
    p = test3neuralnet(theta1, theta2, X)
    print("Predicted: ", p)
    print("Actual: ", y)
    print("Expected vs. Actual Accuracy: 97.52 / %f" % (np.mean(p == y) * 100))
    return
コード例 #5
0
def main(configs):

    # read mat data from file
    input_data = utl.read_mat(configs['DATA_PATH'])

    # data preprocessing
    input_data, proc_mask = utl.data_preprocessing(input_data,\
            configs['MONTH_SELECTION'])

    # generate feature vectors
    feats, labels = generate_features(input_data)

    # backup feats and labels
    feats_backup = feats
    labels_backup = labels

    # weather classification
    feats, labels, masks = weather_classification(feats, configs['MODE'],
                                                  labels)

    if configs['MODE'] == 'grid search':

        grid_search_wrapper(feats, labels, configs)

    elif configs['MODE'] == 'holdout training':

        holdout_train_wrapper(feats, labels, configs, masks)

    elif configs['MODE'] == 'weather prediction':

        preds = weather_prediction(feats, labels, configs, masks)

        # compare predicted irradiance drop
        utl.plot_irradiance_drop(feats_backup[:, 5] - preds,
                                 feats_backup[:, 5] - labels_backup)
        utl.plot_irradiance_drop(preds, labels_backup)
        ''' regroup the data '''
        preds_cube, labels_cube = utl.regroup_data(preds, labels_backup,
                                                   proc_mask)
        utl.compare_daily_mean(preds_cube, labels_cube, sensor_selection=24)
        plt.show()
コード例 #6
0
def bruteforce(slot_inicial, nslots):

    # Inicializacao e importacao de dados

    mat_file = utils.read_mat("final.mat")

    s2 = mat_file["s2"]
    P = mat_file["P"]
    PI = mat_file["PI"]

    n_maquinas = mat_file["n_maquinas"]

    pesos = mat_file["pesos"]

    x_teste = mat_file["x_teste"]
    Consumo_total = x_teste[slot_inicial:slot_inicial + nslots]

    [lista, combinacoes] = utils.init_markov(nslots, n_maquinas, pesos)

    while (1):
        cost = 0

    return
コード例 #7
0
ファイル: tests.py プロジェクト: jvermander/machinelearning
def test4():
    print("\n\nTest 4 - Neural Networks")
    print("Expected / Actual:")

    print("\nForward Propagation & Cost: ")
    X, y = ut.read_mat('mat/ex4data1.mat')
    data = io.loadmat('mat/ex4weights.mat')
    w1 = data['Theta1'][:, 1:]
    b1 = data['Theta1'][:, 0]
    w2 = data['Theta2'][:, 1:]
    b2 = data['Theta2'][:, 0]

    layers = np.array([400, 25, 10])
    y = nn.Neural.binarize_ground_truth(y, 10)
    net = nn.Neural(layers, X, y)
    net.weight = np.concatenate([w1.flatten(), w2.flatten()])
    net.bias = np.concatenate([b1.flatten(), b2.flatten()])
    result = net.fp().T

    print("0.00011266 / %.8f" % result[0, 0])
    print("0.9907 / %.4f" % result[2665, 4])
    print("0.000047972 / %.9f" % result[321, 0])
    print("0.0819 / %.4f" % result[-1, -1])
    print("0.287629 / %.6f" % net.cost())

    print("\nRegularized Cost:")
    net.l = 1
    print("0.383770 / %.6f" % net.cost())

    print("\nSigmoid Derivative:")
    print("0.25 / ", net.sigmoid_deriv(net.sigmoid(0)))

    net.l = 0
    print("\nBackpropagation: ")
    grad = net.bp()
    print("(10285,) /", grad.shape)
    print("0.0000015972 /%.10f" % grad[5])
    print("0.00015668 / %.8f" % grad[666])
    print("-0.0011 / %.4f" % grad[-(net.bias.shape[0] + 55)])
    print("0.00077333 / %.8f" % grad[-(net.bias.shape[0] + 1)])

    print("0.000061871 / %.9f" % grad[-(net.bias.shape[0])])
    print("-0.000037065 / %.9f" % grad[-(net.bias.shape[0] - 15)])
    print("0.00024755 / %.8f" % grad[-1])
    print("< 1e-9 / ", nn.Neural.debug_bp())

    print("\nBackpropagation, with regularization:")
    net.l = 3
    print("0.576051 / %f" % net.binary_cross_entropy())
    net.fp()
    net.bp()
    print("< 1e-9 /", nn.Neural.debug_bp())

    print("\nGradient descent: ")
    net = nn.Neural(layers, X, y)
    net.l = 300
    net.parametrize(1000)
    p = net.predict(X)
    print("Training accuracy: ", np.mean(p == y) * 100)

    return
コード例 #8
0
import tensorflow as tf
import numpy as np, sys, os
from sklearn.utils import shuffle
from scipy.ndimage import imread
from scipy.misc import imresize
import matplotlib.pyplot as plt
from utils import read_mat

np.random.seed(678)
tf.set_random_seed(1400)

# load DataSet
train_X, train_label = read_mat('./data/data_set.mat', True)
test_X, test_label = read_mat('./data/test_set.mat', True)
train_images = np.expand_dims(train_X[0:1400, :, :], axis=3)
train_labels = np.expand_dims(train_label[0:1400, :, :], axis=3)
train_images = (train_images - train_images.min()) / (train_images.max() -
                                                      train_images.min())
train_labels = (train_labels - train_labels.min()) / (train_labels.max() -
                                                      train_labels.min())

test_images = np.expand_dims(test_X[0:400, :, :], axis=3)
test_labels = np.expand_dims(test_label[0:400, :, :], axis=3)
test_images = (test_images - test_images.min()) / (test_images.max() -
                                                   test_images.min())
test_labels = (test_labels - test_labels.min()) / (test_labels.max() -
                                                   test_labels.min())


def tf_relu(x):
    return tf.nn.relu(x)
コード例 #9
0
import utils
import scipy
import numpy

# Inicializacao e importacao de dados
print "initializing and loading data"

#mat_file = utils.read_mat("final.mat")
mat_file = utils.read_mat("Final35.mat")

# Variancia
s2 = mat_file["s2"]

# Matriz de probabilidades
matriz_probabilidades = mat_file["probab"]

# Consumo total
Consumo_total = mat_file["x_teste"]

# Vector com o numero de submaquinas de cada maquina
n_maquinas = mat_file["n_maquinas"][0].tolist()

# Vector com o consumo de cada submaquina
pesos = mat_file["pesos"].tolist()[0]

# Variaveis auxiliares

l = scipy.shape(pesos)[0]
t = scipy.shape(Consumo_total)[0]

# Matriz de armazenamento dos resultados de cada problema
コード例 #10
0
import utils
import scipy
import numpy

# Inicializacao e importacao de dados
print "initializing and loading data"

#mat_file = utils.read_mat("final.mat")
mat_file = utils.read_mat("Final35.mat")


# Variancia
s2 = mat_file["s2"]

# Matriz de probabilidades
matriz_probabilidades = mat_file["probab"]

# Consumo total
Consumo_total = mat_file["x_teste"]

# Vector com o numero de submaquinas de cada maquina
n_maquinas = mat_file["n_maquinas"][0].tolist()

# Vector com o consumo de cada submaquina
pesos = mat_file["pesos"].tolist()[0]

# Variaveis auxiliares

l = scipy.shape(pesos)[0]
t = scipy.shape(Consumo_total)[0]
コード例 #11
0
import os, glob
from PIL import Image
from utils import read_mat

DATA_DIR = os.path.abspath(
    os.path.join(__file__, os.path.pardir, os.path.pardir))
OUTPUT_DIR = os.path.join(DATA_DIR, 'processed', 'annotations')

try:
    os.makedirs(OUTPUT_DIR)
except:
    pass

annotation_files = glob.glob(
    os.path.join(DATA_DIR, 'raw', 'clothing-co-parsing', 'annotations',
                 'pixel-level', '*.mat'))

for f in annotation_files:
    image_name = os.path.splitext(os.path.basename(f))[0]

    annotation = read_mat(f)['groundtruth']
    annotation = Image.fromarray(annotation)
    annotation.save(os.path.join(OUTPUT_DIR, image_name + '.png'))
コード例 #12
0
import os
import numpy as np
from utils import read_mat

DATA_DIR = os.path.abspath(
    os.path.join(__file__, os.path.pardir, os.path.pardir))
OUTPUT_DIR = os.path.join(DATA_DIR, 'metadata')

labels_filepath = os.path.join(DATA_DIR, 'raw', 'clothing-co-parsing',
                               'label_list.mat')

labels = read_mat(labels_filepath)['label_list'][0]
labels = np.array([l[0] for i, l in enumerate(labels)])
labels = np.stack((range(len(labels)), labels), axis=1)

np.savetxt(os.path.join(OUTPUT_DIR, 'labels.txt'),
           labels,
           fmt='%s',
           delimiter=',')
コード例 #13
0
import utils
import scipy
import numpy
import unfold

# Inicializacao e importacao de dados
print "initializing and loading data"

mat_file = utils.read_mat("final.mat")

# Variancia
s2 = mat_file["s2"]

# Matriz de probabilidades
matriz_probabilidades = mat_file["probab"]

# Consumo total
Consumo_total = mat_file["x_teste"]

# Vector com o numero de submaquinas de cada maquina
n_maquinas = mat_file["n_maquinas"]

# Vector com o consumo de cada submaquina
pesos = mat_file["pesos"].tolist()[0]

# Variaveis auxiliares

l = scipy.shape(pesos)[0]
t = scipy.shape(Consumo_total)[0]

# Matriz de armazenamento dos resultados de cada problema