Ejemplo n.º 1
0
def main():
    parameters = {}
    parameters['axes'] = parameter_indices_to_plot
    assert len(parameters['axes']) == 2, \
        'Plot_3D can only plot over 2 parameters.'
    assert all(0<=p<=config.P-1 for p in parameters['axes']), \
        'Provided parameters do not coincide with those in config.'
    parameters['sliders'] = list(set(range(config.P))-set(parameters['axes']))
        
    # Store references to plots, otherwise the widgets become unresponsive due
    # to garbage collector. https://stackoverflow.com/a/42884505
    plots = {}

    scaler = utils.load_scaler()
    for component in config.components:
        model_constructor = utils.models[model_key]
        model = model_constructor()
        model.load(utils.model_dir, component)
        
        # Initialize and load data structres
        features = {}; targets = {}; outputs = {}
        for dataset in ['train', 'test']:
            features[dataset] = utils.load_features(dataset)
            targets[dataset] = utils.load_targets(dataset, component)
            outputs[dataset] = None
        # Create the interactive 3D plot
        plots[component] = Plot_3D(component, outputs, targets, features, model, parameters, scaler)
    pyplot.show()
Ejemplo n.º 2
0
    def __init__(self, name, load=False):
        # Model Training Configuration
        self.day_in = 50
        self.step = 2  # these means use previous 50 days to predict the future 2 days
        self.day_want = 10  # this is the number of days you want to predict
        self.num_features = 6

        if load:
            self.name = name
            self.model = load_checkpoint(filename=self.name)
            self.scaler = load_scaler(filename=self.name)
        else:
            self.name = name
            self.model = self.build_model()
            self.scaler = MinMaxScaler(feature_range=(-1, 1))
Ejemplo n.º 3
0
    def __init__(self, name, load=False):
        # Model Training Configuration
        self.day_in = 50  # number of input days
        self.day_out = 5  # number of output days
        # self.day_want = 10  # this is the number of days you want to predict
        self.num_features = 1

        if load:
            self.name = name
            self.model = load_checkpoint(filename=self.name)
            self.scaler = load_scaler(filename=self.name)
        else:
            self.name = name
            self.model = self.build_model()
            # self.scaler = MinMaxScaler(feature_range=(-1, 1))
            self.scaler = {}
Ejemplo n.º 4
0
import sys
import numpy as np

import utils
import models

X_TEST_PATH = sys.argv[1]
ANS_PATH = sys.argv[2]
MODEL_PATH = './model_sqr.npy'
SCAL_PATH = './scaler_sqr.npy'

b, w = utils.load_model(MODEL_PATH)
x_test = utils.load_data(X_TEST_PATH)
x_test = np.concatenate((x_test, x_test[:, 0:6]**2), axis=1)  # 加入平方當特徵

max, min = utils.load_scaler(SCAL_PATH)
x_test = utils.scaling(x_test, max, min)

y_pred = models.predict(x_test, b, w)

utils.save_ans_dir(y_pred, ANS_PATH)
Ejemplo n.º 5
0
import sys
import os
import pandas as pd
import numpy as np

import utils
import linear_model as lm

test_path = sys.argv[1]
output_path = sys.argv[2]
# test_path = os.path.join(os.path.dirname(__file__), "./data/test.csv")
# output_path = os.path.join(os.path.dirname(__file__), "./ans_sqr_test.csv")

model_path = os.path.join(os.path.dirname(__file__), "./model_sqr.npy")
scaler_path = os.path.join(os.path.dirname(__file__), './scaler_sqr.npy')

fea_select, y_pos = (0, 4, 5, 6, 7, 8, 9, 16), 70

b, w = utils.load_model(model_path)
# print(w.shape)
max, min = utils.load_scaler(scaler_path)
x_test = utils.load(test_path, mode='test', fea_select=fea_select, y_pos=y_pos)
x_test = np.concatenate((x_test, x_test**2), axis=1)
x_test = utils.scaling(x_test, max, min)

predicted = lm.predict(x_test, b, w)
print('>>> Predicted Result :\n', predicted)

utils.save_ans(predicted, output_path)
print(tabulate(table, headers=headers, tablefmt='psql'))
print()

# GENERATING VALIDATION SAMPLE AND LOADING PRE-TRAINED WEIGHTS
print('CLASSIFIER: loading valid sample',
      args.n_valid,
      end=' ... ',
      flush=True)
func_args = data_file, all_var, args.n_valid, args.n_tracks, args.n_classes, args.cuts
valid_sample, valid_labels = make_sample(*func_args)
#sample_analysis(valid_sample, valid_labels, scalars, scaler_file); sys.exit()
if args.cross_valid == 'OFF' and args.checkpoint != '':
    print('CLASSIFIER: loading pre-trained weights from', checkpoint, '\n')
    model.load_weights(checkpoint)
    if args.scaling:
        valid_sample = load_scaler(valid_sample, scalars, scaler_file)

# TRAINING LOOP
if args.cross_valid == 'OFF' and args.n_epochs >= 1:
    print('CLASSIFIER: train sample:',
          format(args.n_train[1] - args.n_train[0], '8.0f'), 'e')
    print('CLASSIFIER: valid sample:',
          format(args.n_valid[1] - args.n_valid[0], '8.0f'), 'e')
    print('\nCLASSIFIER: using TensorFlow', tf.__version__)
    print('CLASSIFIER: using', n_gpus, 'GPU(s)')
    print('\nCLASSIFIER: using', args.NN_type, 'architecture with', end=' ')
    print([group for group in train_var if train_var[group] != []])
    print('\nCLASSIFIER: loading train sample',
          args.n_train,
          end=' ... ',
          flush=True)
Ejemplo n.º 7
0
headers = [          key  for key in train_var if train_var[key]!=[]]
table   = [train_var[key] for key in train_var if train_var[key]!=[]]
length  = max([len(n) for n in table])
table   = list(map(list, zip(*[n+(length-len(n))*[''] for n in table])))
print(tabulate(table, headers=headers, tablefmt='psql')); print()


# GENERATING VALIDATION SAMPLE AND LOADING PRE-TRAINED WEIGHTS
print('CLASSIFIER: loading valid sample', args.n_valid, end=' ... ', flush=True)
func_args = (data_file, total_var, args.n_valid, args.n_tracks, args.n_classes, args.valid_cuts)
valid_sample, valid_labels = make_sample(*func_args)
#sample_analysis(valid_sample, valid_labels, scalars, args.scaler_in, args.output_dir); sys.exit()
if args.cross_valid == 'OFF' and args.model_in != '':
    print('CLASSIFIER: loading pre-trained weights from', args.output_dir+'/'+args.model_in, '\n')
    model.load_weights(args.output_dir+'/'+args.model_in)
    if args.scaling: valid_sample = load_scaler(valid_sample, scalars, args.output_dir+'/'+args.scaler_in)


# TRAINING LOOP
if args.cross_valid == 'OFF' and args.n_epochs >= 1:
    print(  'CLASSIFIER: train sample:'   , format(args.n_train[1] -args.n_train[0], '8.0f'), 'e')
    print(  'CLASSIFIER: valid sample:'   , format(args.n_valid[1] -args.n_valid[0], '8.0f'), 'e')
    print('\nCLASSIFIER: using TensorFlow', tf.__version__ )
    print(  'CLASSIFIER: using'           , n_gpus, 'GPU(s)')
    print('\nCLASSIFIER: using'           , args.NN_type, 'architecture with', end=' ')
    print([group for group in train_var if train_var[group] != [ ]])
    print('\nCLASSIFIER: loading train sample', args.n_train, end=' ... ', flush=True)
    func_args = (data_file, total_var, args.n_train, args.n_tracks, args.n_classes, args.train_cuts)
    train_sample, train_labels = make_sample(*func_args); sample_composition(train_sample)
    if args.resampling == 'ON': train_sample, train_labels = balance_sample(train_sample, train_labels)
    if args.scaling and args.model_in != '':