Ejemplo n.º 1
0
from tools import data_loader, model_io, render, data_analysis
import matplotlib.pyplot as plt
import numpy as np
import settings
import math
import mpl_toolkits.axes_grid1 as axes_grid1

# load trained neural network (nn)
nnName = 'nn_Linear_4096_4_Rect_Linear_4_2_SoftMax_(batchsize_10_number_iterations_20000).txt'
# nnName = 'nn_Linear_1024_2_Rect_Linear_2_2_SoftMax_(batchsize_10_number_iterations_10000).txt'
nn = model_io.read(settings.modelPath + nnName)

# I do not want to load the data every time, therefore the if statement
if 'X' not in locals():
    # load data
    X, Y = data_loader.load_data()

# choose some test data
idx = 0
x = X['train'][[idx]]
y = Y['train'][[idx]]
nnPred = nn.forward(x)
# print nnPred
# lrpScores = nn.lrp(nnPred, 'alphabeta', 2)
# print np.sum(lrpScores)
#plt.matshow(render.vec2im(x[0] + innerCircleSq))

# inspect first linear layer
# --------------------------

W1 = nn.modules[0].W
Ejemplo n.º 2
0
class original_db():
    train_data, test_data = load_data(DATA_PATH)
Ejemplo n.º 3
0
# load data
from tools.data_loader import load_data
PROJECT_NAME = '003_digit'
DATA_PATH = 'datasets\\{}\\'.format(PROJECT_NAME)

data_train, data_test = load_data(DATA_PATH)

#-----------------------------------------------------------

Y_train = data_train["label"]
X_train = data_train.drop(labels=["label"], axis=1)
del data_train

# ----------------------------------------------------------
from time import time

import pandas as pd
from pandas import Series, DataFrame
import numpy as np

import matplotlib.pyplot as plt
import matplotlib.image as mpimg
import seaborn as sns

np.random.seed(2)

from sklearn.model_selection import train_test_split
from sklearn.metrics import confusion_matrix
import itertools

from keras.utils.np_utils import to_categorical  # convert to one-hot-encoding
    f['pred_k'] = pred_k
    f['cFrm'] = cFrm
    f['idx'] = te_idx

    f.close()


if __name__ == '__main__':

    dataset_testing = 'SumMe'  # testing dataset: SumMe or TVSum
    model_type = 2  # 1 for vsLSTM and 2 for dppLSTM, please refer to the readme file for more detail
    model_idx = 'dppLSTM_' + dataset_testing + '_' + model_type.__str__()

    # load data
    print('... loading data')
    train_set, val_set, val_idx, test_set, te_idx = data_loader.load_data(
        data_dir='../data/',
        dataset_testing=dataset_testing,
        model_type=model_type)
    model_file = '../models/model_trained_' + dataset_testing

    train(model_idx=model_idx,
          train_set=train_set,
          val_set=val_set,
          model_saved=model_file)

    inference(model_file=model_file,
              model_idx=model_idx,
              test_set=test_set,
              test_dir='./res_LSTM/',
              te_idx=te_idx)
# -*- coding: utf-8 -*-
import requests as req
from time import time

PROJECT_NAME = '005_landmarks_retrieval'
DATA_PATH = 'datasets\\{}\\'.format(PROJECT_NAME)

from tools.data_loader import load_data
train_data, test_data = load_data(DATA_PATH)

print(train_data.columns)
print(train_data.shape)

# -----------------------------------------------
# primary analysis

# missing value
from tools.pandas_extend import NA_refiner
nar = NA_refiner(train_data)
nar.show()
# comments: there is no missing value

# how much images for one landmark
print(train_data.landmark_id.value_counts().describe())
# comments: very less

# ------------------------------------------------------
# read the image
from skimage import io

t0 = time()
Ejemplo n.º 6
0
# load data
from tools.data_loader import load_data
PROJECT_NAME = '002_house_price'
DATA_PATH = 'datasets\\{}\\'.format(PROJECT_NAME)

train, test = load_data(DATA_PATH)

#---------------------------------------------------

import pandas as pd 
# pd.set_option('display.float_format', lambda x: '{:.3f}'.format(x)) #Limiting floats output to 3 decimal points

import numpy as np
from pandas import Series,DataFrame

all_data = pd.concat((train, test)).reset_index(drop=True)
ntrain = train.shape[0]
# ntest = test.shape[0]
y_train = train.SalePrice.values


# ----------------------------------------------------------------------------

train.info()
train.shape
test.info()
test.shape

import seaborn as sns
color = sns.color_palette()
Ejemplo n.º 7
0
from networks.dpp_lstm import DPPLSTM
from networks.loss import LOSS
from torch.autograd import Variable
import numpy as np
import torch

import pdb

if __name__ == '__main__':

    # load data
    data_dir = '../data/'
    test_data_name = 'SumMe'
    model_type = 2  # 1 for vsLSTM, 2 for dppLSTM
    print('...loading data')
    train_set, valid_set, test_set, test_idx = load_data(
        data_dir, test_data_name, model_type)

    one_sample = train_set[0][0]
    one_bin_label = train_set[1][
        0]  # binary value: 0 - non keyframe, 1 - keyfram
    one_idx_label = train_set[2][0]  # keyframe index

    seq_len = one_sample.shape[0]
    input_size = 1024
    hidden_size = 256
    output_size = 256
    c_mlp_output_size = 1
    k_mlp_output_size = 256
    num_layers = 1
    batch_size = 1