x = BatchNormalization()(x) x = Flatten()(x) x = Dense(1200, activation='relu')(x) x = BatchNormalization()(x) return Model(inputs=_input, outputs=x, name='embedding') all_features = read.read() test_ids = list(all_features.keys()) all_labels = list(all_features[test_ids[0]].keys()) for test_id in test_ids: for a_label in all_labels: train_labels = [a for a in all_labels if a != a_label] _train_features, _test_features = split(all_features, test_id) _train_features = read.remove_class(_train_features, [a_label]) _support_features, _test_features = read.support_set_split( _test_features, k_shot) _train_features, _train_labels = flatten(_train_features) _support_features, _support_labels = flatten(_support_features) id_list = range(len(train_labels)) activity_id_dict = dict(zip(train_labels, id_list)) _train_labels_ = [] for item in _train_labels: _train_labels_.append(activity_id_dict.get(item)) _train_labels_ = np_utils.to_categorical(_train_labels_,
base_input = Input((input_shape, )) x = Dense(1200, activation='relu')(base_input) embedding_model = Model(base_input, x, name='embedding') return embedding_model feature_data = read.read() test_ids = list(feature_data.keys()) all_labels = list(feature_data[test_ids[0]].keys()) for test_id in test_ids: for a_label in all_labels: train_labels = [a for a in all_labels if a != a_label] _train_data, _test_data = read.split(feature_data, test_id) _train_data = read.remove_class(_train_data, [a_label]) _support_data, _test_data = read.support_set_split(_test_data, k_shot) _train_data, _train_labels = read.flatten(_train_data) _support_data, _support_labels = read.flatten(_support_data) _train_data = np.array(_train_data) _support_data = np.array(_support_data) _train_labels = np.array(_train_labels) _support_labels = np.array(_support_labels) base_network = build_mlp_model(feature_length) input_a = Input(shape=(feature_length, ))
for test_id in test_ids: for _int in range(5): test_labels_indices = np.random.choice(len(all_labels), num_test_classes, False) test_labels = [ a for ii, a in enumerate(all_labels) if ii in test_labels_indices ] print(test_labels) train_labels = [ a for ii, a in enumerate(all_labels) if ii not in test_labels_indices ] print(train_labels) _train_data, _test_data = read.split(feature_data, test_id) _train_data = read.remove_class(_train_data, test_labels) _support_data, _test_data = read.support_set_split(_test_data, k_shot) _train_data, _train_labels = read.flatten(_train_data) _support_data, _support_labels = read.flatten(_support_data) _train_data = np.array(_train_data) _train_data = np.expand_dims(_train_data, 3) _support_data = np.array(_support_data) _support_data = np.expand_dims(_support_data, 3) _train_labels = np.array(_train_labels) _support_labels = np.array(_support_labels) base_network = build_conv_model()