Example #1
0
 def test_estimate(self):
     config = {
         "n_attributes": 2,
         "learning_rate": 0.1,
         "batch_size": 1,
     }
     data_set = DataFrame({'id1': [2, 2, 1, 1, 1, ], 'id2': [1, 1, 2, 2, 2, ], 'rating': [2, 2, 3, 3, 3, ]})
     estimator = Estimator(data_set.ix[:, :2].values, config, 2, 2)
     y = data_set.ix[:, 2].values
     for metric in ['MSE', 'MAE']:
         error = estimator.estimate(y, config['batch_size'], 0.1, metric, 8)
         assert error > 0
         assert error < 1
Example #2
0
def evaluate(data_set_name, layer_size, n_hidden_layers):
    with open(os.path.join('../specs', data_set_name + '.json')) as specs_file:
        specs = json.load(specs_file)
    data_set = pandas.read_csv(os.path.join('../resources', specs['file']), sep=specs['separator'],
                               engine=specs['engine'])
    print(data_set.head())
    with open(os.path.join(os.path.dirname(__file__), 'neural-net.json')) as config_file:
        config = json.load(config_file)
    x = data_set.ix[:, :2].values
    estimator = Estimator(x, config, layer_size, n_hidden_layers)
    y = data_set.ix[:, 2].values.reshape(-1, 1)
    if specs['scaling']:
        y = sklearn.preprocessing.MaxAbsScaler().fit_transform(numpy.log(y))
    return estimator.estimate(y, config['batch_size'], specs['test_size'], specs['metric'])
Example #3
0
def evaluate(data_set_name, layer_size, n_hidden_layers):
    with open(os.path.join('../specs', data_set_name + '.json')) as specs_file:
        specs = json.load(specs_file)
    data_set = pandas.read_csv(os.path.join('../resources', specs['file']), sep=specs['separator'],
                               engine=specs['engine'])
    print(data_set.head())
    with open(os.path.join(os.path.dirname(__file__), 'neural-net.json')) as config_file:
        config = json.load(config_file)
    x = data_set.ix[:, :2].values
    estimator = Estimator(x, config, layer_size, n_hidden_layers)
    y = data_set.ix[:, 2].values.reshape(-1, 1)
    if specs['scaling']:
        y = sklearn.preprocessing.MaxAbsScaler().fit_transform(numpy.log(y))
    return estimator.estimate(y, config['batch_size'], specs['test_size'], specs['metric'])