def inference():
    thandler = trainer.handler(args.process_command())

    rt_data = rt()
    data = trainer.load_data(rt_data.data, data_type=rt_data.data_type)
    test_loader = data

    model_ = models.MLP(300, classes=2)
    #print(model_)
    total = sum(p.numel() for p in model_.parameters() if p.requires_grad)
    print('# of para: {}'.format(total))

    model_name = 'MLP.pt'

    predicted = thandler.predict(model_, test_loader, model_name)

    print([np.argmax(np.array(i)) for i in predicted])
Example #2
0
import args
import chainer.links as L
import chainer
import data_handler as dh
import model as cntn
import numpy as np
from chainer import Chain, optimizers, serializers, Variable
from util import key2value
import json
from getkp import getkp, getSingleAndMoreKP

###load arguments
arg = args.process_command()
testing_url = arg.predict
doc_len = arg.dlen
word_len = arg.wlen
word_dim = arg.wdim
n_units = arg.hdim
n_label = arg.label
filter_length = arg.flen
filter_width = word_len
filter_height = word_dim
output_channel = arg.channel
batch_size = arg.batch
n_epoch = arg.epoch
model_url = arg.model
topk = 10


def loadLine(line, doc_len, word_len):
    dataset = dh.load_corpus(line, doc_len, word_len)
Example #3
0
# this script is for running the whole task
# usage: python main.py -g use_gpu -e epochs -b batch_size -lr learning_rate -wd weight_decay

import pickle
import torch
import torch.nn as nn
import torch.optim as optim
import numpy as np
from tqdm import tqdm
from torch.utils.data import DataLoader
from args import process_command
from data import RFMDataset
from model import LogisticReg

# hyperparemeters
arguments = process_command()
epochs        = arguments.epoch
batch_size    = arguments.batch
learning_rate = arguments.lr
weight_decay  = arguments.wd
use_gpu = torch.cuda.is_available()


if __name__ == '__main__':
    # read preprocessed data
    print( 'preparing data...' )
    with open( './data/preprocess-RFM.pickle', 'rb' ) as f:
        X, y, test_X, test_y, idx_to_label_dict, header = pickle.load( f )
        f.close()

    cut = int( len( X ) * 0.1 )
Example #4
0
import args
import rt_data as rt
import sys
import torch
import torch_model as models
import training_handler
import util

from tensorflow import keras
from sklearn.svm import SVC
from sklearn.metrics import accuracy_score

thandler = training_handler.handler(args.process_command())


def RUN_SVC(data):
    print('SVC')
    (train_data, train_labels), (test_data, test_labels) = data

    clf = SVC(C=0.1, gamma='auto')
    clf.fit(util.padding(train_data), train_labels)
    y_pred = clf.predict(util.padding(test_data))
    print('Accuracy: {}'.format(accuracy_score(test_labels, y_pred)))


def data_loader(data_, data_type=[torch.LongTensor, torch.LongTensor]):
    (train_data, train_labels), (test_data, test_labels) = data_

    train_size = int(len(train_data) * 0.1)

    valid_data = train_data[:train_size]