Example #1
0
def main_():
    if len(argv) < 2:
        print 'The input format is\nmain train/test   [tcp/b] [filename]'
        return 0
    t = argv[1]
    mode = 'tcp'
    data = []
    if len(argv) == 4:
        mode = argv[2]
    if mode == 'tcp':
        getdata = getdata_tcp
    else:
        getdata = getdata_b
    if t == 'test':
        if len(argv) == 2:
            test()
        else:
            fname = argv[3]
            data = getdata_file(fname)
            data = filter_xyz(data)
            data14 = cluster(data, 14)
            simulate(data14)
    elif t == 'train':
        train()

    else:
        while (True):
            data = getdata()
            data = filter_xyz(data)
            data14 = cluster(data, 14)
            simulate(data14)
def main(P, mate, mutate):
    """Run this experiment"""
    training_ints = initialize_instances('./../data/x_train_val.csv')
    testing_ints = initialize_instances('./../data/x_test.csv')
    factory = BackPropagationNetworkFactory()
    measure = SumOfSquaresError()
    data_set = DataSet(training_ints)
    acti = HyperbolicTangentSigmoid()
    rule = RPROPUpdateRule()
    oa_name = "GA_{}_{}_{}".format(P, mate, mutate)
    FILE = OUTFILE.replace('XXX', oa_name)
    with open(FILE, 'w') as f:
        f.write('{},{},{},{},{},{},{},{},{}\n'.format('iteration', 'MSE_trg',
                                                      'MSE_tst', 'acc_trg',
                                                      'acc_tst', 'f1_trg',
                                                      'f1_tst', 'train_time',
                                                      'pred_time'))
    classification_network = factory.createClassificationNetwork([
        INPUT_LAYER, HIDDEN_LAYER1, HIDDEN_LAYER2, HIDDEN_LAYER3,
        HIDDEN_LAYER4, OUTPUT_LAYER
    ], acti)
    nnop = NeuralNetworkOptimizationProblem(data_set, classification_network,
                                            measure)
    oa = StandardGeneticAlgorithm(P, mate, mutate, nnop)
    train(oa, classification_network, oa_name, training_ints, testing_ints,
          measure, TRAINING_ITERATIONS, FILE)
Example #3
0
def main_():
	if len(argv) < 2:
		print 'The input format is\nmain train/test   [tcp/b] [filename]'
		return 0
	t=argv[1]
	mode='tcp'
	data=[]	
	if len(argv) == 4	:
		mode=argv[2]
	if mode == 'tcp':
		getdata = getdata_tcp
	else :
		getdata = getdata_b
	if t == 'test':
		if len(argv) == 2:
			test()
		else:
			fname=argv[3]
			data=getdata_file(fname)
			data=filter_xyz(data)
			data14=cluster(data,14);
			simulate(data14);	
	elif t == 'train':
		train()
			
	else:
		while(True):
			data=getdata()
			data=filter_xyz(data)
			data14=cluster(data,14)
			simulate(data14)
def main():
    """Run this experiment"""
    training_ints = initialize_instances('./../data/x_train_val.csv')
    testing_ints = initialize_instances('./../data/x_test.csv')
    factory = BackPropagationNetworkFactory()
    measure = SumOfSquaresError()
    data_set = DataSet(training_ints)
    acti = HyperbolicTangentSigmoid()
    rule = RPROPUpdateRule()
    classification_network = factory.createClassificationNetwork([INPUT_LAYER, HIDDEN_LAYER1,HIDDEN_LAYER2,HIDDEN_LAYER3,HIDDEN_LAYER4, OUTPUT_LAYER],acti)
    nnop = NeuralNetworkOptimizationProblem(data_set, classification_network, measure)
    oa = RandomizedHillClimbing(nnop)
    train(oa, classification_network, 'RHC', training_ints, testing_ints, measure, TRAINING_ITERATIONS, OUTFILE)
def main():
    """Run this experiment"""
    training_ints = initialize_instances('./../data/x_train_val.csv')
    testing_ints = initialize_instances('./../data/x_test.csv')
    factory = BackPropagationNetworkFactory()
    measure = SumOfSquaresError()
    data_set = DataSet(training_ints)
    acti = HyperbolicTangentSigmoid()
    rule = RPROPUpdateRule()
    # oa_names = ["Backprop"]
    classification_network = factory.createClassificationNetwork([
        INPUT_LAYER, HIDDEN_LAYER1, HIDDEN_LAYER2, HIDDEN_LAYER3,
        HIDDEN_LAYER4, OUTPUT_LAYER
    ], acti)
    train(
        BatchBackPropagationTrainer(data_set, classification_network, measure,
                                    rule), classification_network, 'Backprop',
        training_ints, testing_ints, measure, TRAINING_ITERATIONS, OUTFILE)
Example #6
0
def train_from_rss(feeds):
    parser = parse_old.Parser()
    all_lines = []
    for link, content in feeds:
        text_clean = re.sub('<[^<]+?>', '', content)
        raw = urllib2.urlopen(link).read()
        encoding = chardet.detect(raw)['encoding']
        raw_uni = raw.decode(encoding)
        line_list = parser.parserByDensity(raw_uni)
        line_list = [line[:4] for line in line_list]
        line_list = [line + [(1 if line[0][:20] in text_clean else 0)] for line in line_list]
        # 大于2行才有意义
        # 获取最后一个是1的,这样所有判断的才是确定是正确的。
        lines = []
        start = 0
        last = 0
        for index, val in enumerate(line_list):
            if val[-1]:
                last = index
            if val[-1] and not start:
                start = index
#        for index in range(last):
#            print str(line_list[index][5]) + str(line_list[index][1])[:4] + line_list[index][0]

        if len(line_list) > 2:
            fake_line = ['', 0, 0, 0, 0]
            line_list = [fake_line] + line_list + [fake_line]
            lines = [[line_list[i][1], line_list[i - 1][1], line_list[i + 1][1], line_list[i][-1]] for i in range(1, len(line_list) - 1)]

        all_lines += lines
    # Balance positive samples and negative samples:
    positive = [line for line in all_lines if line[-1] > 0]
    negative = [line for line in all_lines if line[-1] <= 0]
    random.shuffle(positive)
    random.shuffle(negative)
    min_len = min(len(positive), len(negative), 100)  # Too many samples may not a good thing.
    all_lines = positive[:min_len] + negative[:min_len]
    random.shuffle(all_lines)
    train(all_lines)
    with open('train.csv', 'w') as f:
        for line in all_lines:
            print line
            f.write(','.join(map(str, (line[0], line[-1]))) + '\n')
def train_bayes(params):
    """
    Wrapper around train function to serve as objective function for Gaussian
    optimization in scikit-optimize routine gp_minimize.

    Arguments:
    ----------
        params: list, shape=[nb_layers + 2,]
        List of search space dimensions. Entries have to be tuples 
        (lower_bound, upper_bound) for Reals or Integers.

    Returns:
    --------
        tbd

    """
    # Create Hyperdash hd_experiment
    hd_exp = Experiment(project_name)

    # Translate params into format understood by train function
    # n_layer = 4
    # layer_sizes = hd_exp.param('layer_sizes', (2**np.array(params[:n_layer])).tolist())
    # learning_rate = hd_exp.param('learning rate', 10**params[n_layer])
    # mini_batch_size = hd_exp.param('mini batch size', int(2**params[n_layer + 1]))
    # pkeep = hd_exp.param('dropout prob', 1)
    # hyper_params = [layer_sizes, learning_rate, mini_batch_size, pkeep]
    # hyper_param_str = make_hyper_param_str(hyper_params)

    layer_sizes = [4096] * 4
    learning_rate = hd_exp.param('learning rate', 10**params[0])
    mini_batch_size = hd_exp.param('mini batch size', int(2**params[1]))
    pkeep = hd_exp.param('dropout prob', 1)
    hyper_params = [layer_sizes, learning_rate, mini_batch_size, pkeep]
    hyper_param_str = make_hyper_param_str(hyper_params)

    # Call train function
    tic = time.time()
    logger.info('Start training for ' + hyper_param_str)
    log_df, best_error = train(train_tuple, validation_tuple, hyper_params,
                               nb_epochs, random_seed, hd_exp, project_dir)
    elapsed_time = time.time() - tic
    logger.info('Finished training in {} s.'.format(elapsed_time))

    # Writing Pandas log file to csv file on disk.
    logger.info('Writing pandas DF log to disk.')
    log_df.to_csv(project_dir + '/' + hyper_param_str + '/data_df.csv')

    # Finish Hyperdash Experiment
    hd_exp.end()

    return best_error
Example #8
0
def my_solution(X_train, X_test, y_train, y_test):
    # train with self
    dimensions = [X_train.shape[1], 3, y_train.shape[1]]
    params = ann.train(X_train,
                       y_train,
                       dimensions,
                       alpha=0.1,
                       batch_size=50,
                       epoch=3472,
                       momentum=0.04,
                       tol=1e-10)
    y_pred = ann.predict(X_test, params, dimensions)

    y_pred = y_pred.argmax(axis=1)
    y_test = y_test.argmax(axis=1)

    # evaluate
    print('My implements1:')
    print(classification_report(y_test, y_pred))

    return params, y_pred, y_test
Example #9
0
import ann

ann.initialize()

ann.restore()

ann.train(0.015, 1000000, 10000)
validation_tuple = load_labeled_csv(validation_filename, feature_cols,
                                    label_cols)

# Normalize training and validation data by training statistics
train_mean = np.mean(train_tuple.features, axis=0)
train_std = np.std(train_tuple.features, axis=0)

train_tuple.features -= train_mean
train_tuple.features /= train_std

validation_tuple.features -= train_mean
validation_tuple.features /= train_std

logger.info('Finished importing and normalization of input data.')

# ------------------------ Training --------------------------------------- #

hd_exp = Experiment(hyper_param_str)

# Run backpropagation training.
df, best_error = train(train_tuple, validation_tuple, hyper_params, nb_epochs,
                       random_seed, hd_exp, deep_cal_dir + '/code/')

logger.info('Writing log dataframe to csv on disk.')
df.to_csv(hyper_param_str + '/log_file.csv')

# Finish Hyperdash experiment.
hd_exp.end()

logger.info("PROGRAM END.")
Example #11
0
#! /usr/bin/python

import ann

traindata = ann.read_dataset('train.list')
trainlabel = ann.read_label('trainlabel.list')

hidden = 100
nn = ann.create_neural_net(960, hidden, 20)

testdata = ann.read_dataset('test1.list')
testlabel = ann.read_label('testlabel1.list')

epoch = 1000
learningrate = 0.2
momentum = 0.75

ann.train(nn, traindata, trainlabel, testdata, testlabel, epoch, learningrate, momentum, 1, 'classified.txt', 'classified2.txt')