def init_cnn(): net = NeuralNet( layers=[ # input layer (layers.InputLayer, { 'shape': (None, bastdm5.classification.settings.CHANNELS, bastdm5.classification.settings.MINI_SEGMENT_LENGTH, bastdm5.classification.settings.MEL_DATA_POINTS) }), # convolution layers 1 (layers.Conv2DLayer, { 'num_filters': 32, 'filter_size': (8, 1) }), (layers.MaxPool2DLayer, { 'pool_size': (4, 1), 'stride': (2, 1) }), # convolution layers 2 (layers.Conv2DLayer, { 'num_filters': 32, 'filter_size': (8, 1) }), (layers.MaxPool2DLayer, { 'pool_size': (4, 1), 'stride': (2, 1) }), # dense layer (layers.DenseLayer, { 'num_units': 100 }), (layers.DropoutLayer, {}), (layers.DenseLayer, { 'num_units': 50 }), # output layer (layers.DenseLayer, { 'num_units': 6, 'nonlinearity': nonlinearities.softmax }) ], # learning rate parameters update_learning_rate=0.001, update_momentum=0.9, regression=False, max_epochs=999, verbose=1, ) net.batch_iterator_test = TestSegmentBatchIterator( batch_size=bastdm5.classification.settings.MINI_BATCH_SIZE) y_mapping = utils.load_from_pickle(CNN_Y_MAPPING_PATH) net.load_params_from(CNN_WEIGHTS_PATH) return net, y_mapping
def create_clf(cls, data=None): """Create neural network.""" try: bits = data.shape[1] except AttributeError: bits = cls.default_bits net_params = { "layers": [("input", InputLayer), ("inputdrop", DropoutLayer), ("hidden", DenseLayer), ("hiddendrop", DropoutLayer), ("output", DenseLayer)], "input_shape": (None, bits), "inputdrop_p": cls.input_dropout_rate, "hidden_num_units": cls.hidden_num_units, "hidden_nonlinearity": cls.hidden_nonlinearity, "hiddendrop_p": cls.hidden_dropout_rate, "output_num_units": 2, "output_nonlinearity": cls.output_nonlinearity, "update_learning_rate": cls.leakiness, "max_epochs": cls.max_epochs, "on_epoch_finished": EarlyStopping(patience=cls.patience) } clf = NeuralNet(**net_params) if data is not None: batch_size = min( cls.max_batch_size, int(cls.min_percent_data_in_batch * data.shape[0])) clf.batch_iterator_train = BalancedClassIterator( batch_size=batch_size) clf.batch_iterator_test = BalancedClassIterator( batch_size=batch_size) return clf
X_new[i, j] = np.rot90(Xb[i, j, sx, sy] + noise[j], k=nrotate) return X_new, yb count = 0 print("Starting model combination...") for startx in range(4): for starty in range(4): for rotate in range(4): net.batch_iterator_test = AugmentedBatchIterator(batch_size=128, crop_size=4, testing=True, startx=startx, starty=starty, rotate=rotate) y_pred_valid[:, count] = net.predict_proba(X_valid)[:, 1] count += 1 print("Iteration: {} / 64".format(count)) combine = bmc.BMC() combine.fit(y_pred_valid, y_valid) print("Validation set done.") X_test = np.load("../data/sdss_test_images.npy") y_test = np.load("../data/sdss_test_labels.npy")
X_new[i, j] = np.rot90(Xb[i, j, sx, sy] + noise[j], k=nrotate) return X_new, yb count = 0 print("Starting model combination...") for startx in range(4): for starty in range(4): for rotate in range(4): net.batch_iterator_test=AugmentedBatchIterator( batch_size=128, crop_size=4, testing=True, startx=startx, starty=starty, rotate=rotate ) y_pred_valid[:, count] = net.predict_proba(X_valid)[:, 1] count += 1 print("Iteration: {} / 64".format(count)) combine = bmc.BMC() combine.fit(y_pred_valid, y_valid) print("Validation set done.") X_test = np.load("../data/sdss_test_images.npy")