Exemplo n.º 1
0
def main():
        
    arch = cfg.arch

    model = build_network()
    if torch.cuda.device_count() > 1:
        model = nn.DataParallel(model)
    print("now we are using %d gpus" %torch.cuda.device_count())
    model.cuda()
    print model

    # load the model
    print("=> Loading Network %s" % cfg.resume)
    checkpoint = torch.load(cfg.resume)
    model.load_state_dict(checkpoint['state_dict'])
 
    print("=> loaded checkpoint '{}' (epoch {})"
          .format(cfg.resume, checkpoint['epoch']))
                                 
    cudnn.benchmark = False

    test_loader = data_loader( BatchSize=cfg.batch_size,
                               NumWorkers = cfg.num_workers).test_loader
    print("test data_loader are ready!")
    
    # test mode
    model.eval()

    # test an image
    # load the image transformer
    centre_crop = trn.Compose([
        trn.Resize((224,224)),
        trn.CenterCrop(224),
        trn.ToTensor(),
        trn.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])
    ])

    # fully connected layers
    FC = []
            
    for iter, test_data in enumerate(test_loader):
        test_inputs, test_labels, u,v = test_data # not use landmarks while testing
        test_inputs, test_labels, u,v =  torch.autograd.Variable(test_inputs.cuda(), volatile=True).float(), torch.autograd.Variable(test_labels.cuda(),volatile=True).float(), torch.autograd.Variable(u.cuda(),volatile=True), torch.autograd.Variable(v.cuda(),volatile=True)

        model_FC = model(test_inputs, u,v)
        if iter % 100 ==0:
            print(model_FC.size())
            print(model_FC)
            sio.savemat('FC.mat', {'FC':FC})
            
        FC.append(model_FC.data.cpu().numpy())

    sio.savemat('FC.mat', {'FC':FC})
    print("Fully Connected Layers are saved as FC.mat ")
Exemplo n.º 2
0
import keras
import numpy as np
import keras.backend as K
from keras.optimizers import Adam
from keras.callbacks import Callback

from model.seq2seq import Seq2Seq
from data.data_loader import data_loader
import utils.config as config


data = data_loader('./data/Sogo2008.dat', config)
chars = data.get_vocab('./data/vocab.json')


def gen_titles(s, topk=3):
    xid = np.array([data.str2id(s)] * topk)
    yid = np.array([[2]] * topk)
    scores = [0] * topk
    # print(xid, yid, scores)
    for i in range(50):
        proba = model.predict([xid, yid])[:, i, 3:]
        # print(proba.shape)
        log_proba = np.log(proba + 1e-6)
        arg_topk = log_proba.argsort(axis=1)[:, -topk:]
        _yid = []
        _socres = []
        if i == 0:
            for j in range(topk):
                # print(yid, arg_topk)
                _yid.append(list(yid[j]) + [arg_topk[0][j] + 3])
Exemplo n.º 3
0
# plan A, big change and no solution

from data.data_loader import data_loader
from preprocessing.up2down import up2down
from sklearn.preprocessing import scale
import numpy as np

prices = data_loader('../data/')

# ------- Config -------
u2d_error = 0.02
u2d_split_length = 10
# ----------------------

for price in prices:
    adj_close = price.iloc[:, 5].values
    adj_close = scale(adj_close)
    u2d_adj_close_index = up2down(adj_close, u2d_error)

    barrel = []
    for index_ in range(len(u2d_adj_close_index) - u2d_split_length):
        feature_point_index = u2d_adj_close_index[index_:index_ +
                                                  u2d_split_length]
        feature_point_values = [0] * u2d_split_length
        for i_ in range(u2d_split_length):
            feature_point_values[i_] = adj_close[feature_point_index[i_]]
        barrel.append((feature_point_index, feature_point_values))

    barrel_length = len(barrel)
    nn_distance = np.zeros([barrel_length, barrel_length], dtype=np.float)
    for i_ in range(barrel_length - 1):
Exemplo n.º 4
0
def run():
    data = data_loader(DATA_SOURCE)
    result = evaluator(data)
    print(result)
Exemplo n.º 5
0
#! /usr/bin/python
# _*_ coding:utf-8 _*_

from data.data_loader import data_loader
from sklearn.naive_bayes import MultinomialNB, BernoulliNB
from model_op import Save_Model, result_metrics, Save_Weight
from sklearn import svm
from sklearn.svm import SVC
from sklearn.ensemble import AdaBoostClassifier
from sklearn.ensemble import RandomForestClassifier
from sklearn.tree import DecisionTreeClassifier
from sklearn import datasets, metrics

filename = r'E:\zflPro\data\getfeature-resnet-1-dsn-flatten0.mat'
# filename = r'getfeature-resnet-1-dsn-flatten0.mat'
TrainSample, Trainlabel, TestSample, Testlabel = data_loader(filename)

model_dir = r'E:\zflPro\MLmodel'
model_TDlist = ["SVM", "RF", "ADB"]
C_range = range(1, 20, 1)
gama_max = 0.001
max_range = range(500, 1000, 10)
nesti_ran = range(500)
depth_range = range(30)
rate_range = 10
njob_range = range(20)

# svm.SVC


def svmtrain(TrainSample, Trainlabel, Testlabel):