示例#1
0
    del data1

    data2 = load_state('model_predicts')
    if data2 == None:
        save_state('model_predicts', (v, z, models))
    else:
        v, z, models = data2
    del data2

    for part in ['xgb1', 'xgb2', 'xgb3',
                 'keras_mlp1', 'keras_mlp2', 'keras_mlp2']:
        if part in models: continue
        print('running', part)
        f = globals()[part]
        f(train2, y, test2, v, z)
        models.add(part)
        save_state('model_predicts', (v, z, models))

    v1 = prestore(v.drop(['id', 'y'], axis=1).values)
    z1 = prestore(z.drop(['id', 'y'], axis=1).values)

    lr = linear_model.BayesianRidge()
    cv = model_selection.cross_val_score(lr, v1, v.y, cv=10, scoring=metrics.make_scorer(metrics.log_loss))
    print(cv, cv.mean(), cv.std())

    lr.fit(v1, v['y'])
    print(lr.coef_, np.sum(lr.coef_))
    z['y'] = np.clip(lr.predict(z1), 1e-5, 1-1e-5)

#    z.y = (z.keras_resnet1 + z.keras_mlp1 + z.xgb1 + z.xgb2 + z.xgb3) / 5
#    z.y = prestore(z.y)
示例#2
0
def network2(feature_dimension):
    models = Sequential()
    #W_regularizer=l2(0.0001)
    models.add(Dense(256, input_dim=feature_dimension, init='uniform', W_regularizer=l2(0.0001)))
    models.add(PReLU())
    models.add(BatchNormalization())
    models.add(Dropout(0.8))
    models.add(Dense(64, activation='relu'))
    models.add(Dropout(0.3))
    models.add(Dense(32, activation='relu'))
    models.add(Dropout(0.2))
    models.add(Dense(3, activation='softmax'))
    #models.add(Activation('softmax'))
    opt = optimizers.Adagrad(lr=0.015)
    models.compile(optimizer=opt, loss='categorical_crossentropy', metrics=['accuracy'])

    return models
示例#3
0
print('1')
x_test = vectorize_sequence(test_data)
print('2')
y_train = np.asarray(train_label).astype('float16')
print('3')
y_test = np.asarray(test_label).astype('float16')
print('4')

x_val = x_train[:10000]
partial_x_train = x_train[10000:]

y_val = y_train[:10000]
partial_y_train = y_train[10000:]

models = models.Sequential()
models.add(layers.Dense(16, activation='relu', input_shape=(10000, )))
models.add(layers.Dense(16, activation='relu'))
models.add(layers.Dense(1, activation='sigmoid'))

models.compile(optimizer='rmsprop',
               loss='binary_crossentropy',
               metrics=['accuracy'])

history = models.fit(partial_x_train,
                     partial_y_train,
                     epochs=20,
                     batch_size=50,
                     validation_data=(x_val, y_val))

history_dict = history.history
loss_values = history_dict['loss']
num_filters = 32  # Number of conv filters
max_pool_size = (2, 2)  # shape of MaxPool
conv_kernel_size = (3, 3)  # conv kernel shape
imag_shape = (28, 28, 1)
num_classes = 10
drop_prob = 0.5  # fraction to drop (0-1.0)

# Define the model type
model = Sequential()

# Define layers in the NN
# Define the 1st convlution layer.  We use border_mode= and input_shape only on first layer
# border_mode=value restricts convolution to only where the input and the filter fully overlap (ie. not partial overlap)
model.add(
    Convolution2D(num_filters,
                  conv_kernel_size[0],
                  conv_kernel_size[1],
                  border_mode='valid',
                  input_shape=imag_shape))
# push through RELU activation
model.add(Activation('relu'))
# take results and run through max_pool
model.add(MaxPooling2D(pool_size=max_pool_size))

# 2nd Convolution layer
model.add(Convolution2D(num_filters, conv_kernel_size[0], conv_kernel_size[1]))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=max_pool_size))

# Fully Connected Layer
model.add(Flatten())
model.add(Dense(128))  # Fully connected layer in Keras
示例#5
0
    for i, label in enumerate(labels):
        results[i, label] = 1.
    return results


one_hot_train_labels = to_one_hot(train_labels)
one_hot_test_labels = to_one_hot(test_labels)
'''keras内置方法实现标签向量化
from keras.utils.np_utils import to_categorical
one_hot_train_labels = to_categorical(train_labels)
one_hot_test_labels = to_categorical(test_labels)
'''

#构建网络
models = models.Sequential()
models.add(layers.Dense(64, activation='relu', input_shape=(10000, )))
models.add(layers.Dense(64, activation='relu'))
models.add(layers.Dense(46, activation='softmax'))

models.compile(optimizer='rmsprop',
               loss='categorical_crossentropy',
               metrics=['acc'])

#数据集分类
x_val = x_train[0:1000]
pratial_x_train = x_train[1000:]

y_val = one_hot_train_labels[0:1000]
pratial_y_train = one_hot_train_labels[1000:]

#开始训练
    dst = os.path.join(test_dogs_dir, fname)
    shutil.copyfile(src, dst)

# Sanity Check we divided and copied the imags properly

print('total training cat images:', len(os.listdir(train_cats_dir)))
print('total training dog images:', len(os.listdir(train_dogs_dir)))
print('total validation cat images:', len(os.listdir(validation_cats_dir)))
print('total validation dog images:', len(os.listdir(validation_dogs_dir)))
print('total test cat images:', len(os.listdir(test_cats_dir)))
print('total test dog images:', len(os.listdir(test_dogs_dir)))

# Build Model

model = models.Sequential()
models.add(layers.Conv2D(32, (3, 3), activation='relu', input_shape=(150, 150, 3)))
models.add(layers.MaxPooling2D(2, 2))
models.add(layers.Conv2D(64, (3, 3), activation='relu'))
models.add(layers.MaxPooling2D(2, 2))
models.add(layers.Conv2D(128, (3, 3), activation='relu'))
models.add(layers.MaxPooling2D(2, 2))
models.add(layers.Conv2D(128, (3, 3), activation='relu'))
models.add(layers.MaxPooling2D(2, 2))
models.add(layers.Flatten())
models.add(layers.Dense(512, activation='relu'))
models.add(layers.Dense(1, activation='sigmoid'))

# Compile Network

# RMSprop optimizer, as usual. Because you ended the network with a single sigmoid unit,
#  you’ll use binary crossentropy as the loss
示例#7
0
文件: shendu.py 项目: 155black/second
        return results


x_train = vectorize_sequences(train_data)
x_test = vectorize_sequences(test_data)
y_train = np.asarray(train_labels).astype('float32')
y_test = np.asarray(test_labels).astype('float32')
output = relu(dot(W, input) + b)

from keras import models
from keras import layers

models = models.Sequential()
models.add(
    layers.Dense(16,
                 kernel_regularizer=regularizers.l2(0.001),
                 activation='relu',
                 input_shape=(10000, )))
models.add(
    layers.Dense(16,
                 kernel_regularizer=regularizers.l2(0.001),
                 activation='relu'))
models.add(layers.Dense(1, activation='sigmoid'))

from keras import optimizers
from keras import losses
from keras import metrics

model.compile(optimizer='rmsprop',
              loss='binary_crossentropy',
              metrics=['accuracy'])
示例#8
0
from keras import models
from keras import layers


models = models.Sequential()
models.add(layers.Dense(32, input_shape=(784,)))
models.add(layers.Dense(32))