Пример #1
0
def create_model(MAX_QRY_LENGTH=50,
                 MAX_DOC_LENGTH=2900,
                 NUM_OF_FEATS=10,
                 PSGS_SIZE=[(50, 1)],
                 NUM_OF_FILTERS=5,
                 tau=1):
    alpha_size = len(PSGS_SIZE)
    psgMat = Input(shape=(
        MAX_QRY_LENGTH,
        MAX_DOC_LENGTH,
        1,
    ), name="passage")
    homoMat = Input(shape=(NUM_OF_FEATS, ), name="h_feats")
    # Convolution2D, Meaning pooling and Max pooling.
    # Conv2D, Mean pooling, Max pooling
    M, K, r = [], [], []
    for idx, PSG_SIZE in enumerate(PSGS_SIZE):
        tau = PSG_SIZE[0] / 2
        pool_size = (MAX_QRY_LENGTH - PSG_SIZE[0]) / tau + 1
        # Convolution
        m_1 = Convolution2D(filters=NUM_OF_FILTERS,
                            kernel_size=PSG_SIZE,
                            strides=tau,
                            padding='valid',
                            name="pConv2D_" + str(idx))(psgMat)
        M.append(m_1)
        # Mean pooling
        k_1 = AveragePooling2D(pool_size=(pool_size, 1),
                               strides=1,
                               name="pAvePool_" + str(idx))(M[idx])
        K.append(k_1)
        # Max Pooling
        r_1 = GlobalMaxPooling2D(name="pMaxPool_" + str(idx))(K[idx])
        r.append(r_1)
    concat_r = concatenate(r)
    # Fusion Matrix and predict relevance
    # get h(q, d)
    # MLP(DENSE(len(r(q,d))))
    phi_h = Dense(alpha_size, activation="softmax", name="TrainMat")(homoMat)
    dot_prod = dot([concat_r, phi_h], axes=1, name="rel_dot")
    # tanh(dot(r.transpose * h))
    #pred = Activation("tanh", name="activation_tanh")(dot_prod)
    pred = Dense(1, activation="sigmoid", name="activation_sigmoid")(dot_prod)

    # We now have everything we need to define our model.
    model = Model(inputs=[psgMat, homoMat], outputs=pred)
    model.summary()
    '''
    from keras.utils import plot_model
    plot_model(model, to_file='model.png')
    '''
    return model
Пример #2
0
def testaugmentation(X_test, y_test, no_visit=5):
    new_patients_vec = []
    new_patients_label = []
    for i in range(len(X_test)):  ## patient-wise
        T = X_test[i]
        #T  = T[y_test[i][:,2]==0]
        P = y_test[i]
        #P = P[y_test[i][:,2]==0]
        if len(T) > no_visit or len(T) == no_visit:
            K = []
            L = []
            for j in range(len(T) - no_visit, len(T)):
                K.append(T[j])
                L.append(P[j])
            new_patients_vec.append(K)
            new_patients_label.append(L)
    return new_patients_vec, new_patients_label
Пример #3
0
K.set_image_dim_ordering('th')

# fix random seed for reproducibility
seed = 7
numpy.random.seed(seed)

K = []
# load the dataset
dataframe = pandas.read_csv('roadseg_dataset_newest.csv', header=1)
dataset = dataframe.values
data = dataset[20000:40000,:]

for i in data[:,0]:
	img = cv2.imread(i)
	img = cv2.resize(img, (28,28))
	K.append(img)
X = numpy.array(K)

look_back = 30
# Y = numpy.array(data[:,2:4])
Y = numpy.empty_like(data[:,2:4])

#MA Filter
N = 500
imu_p_smooth_ma = dataset[20000-N:40000+N,2]
imu_p_smooth_t = numpy.convolve(imu_p_smooth_ma, numpy.ones((N,))/N, mode='valid')
imu_p_smooth_t = imu_p_smooth_t[int(N/2):int(len(imu_p_smooth_t)-(N/2))-1]


Y[:,0] = imu_p_smooth_t
Y[:,1] = data[:,3]
Пример #4
0
from keras.utils import np_utils
from keras import backend as K
K.set_image_dim_ordering('th')

# fix random seed for reproducibility
seed = 7
numpy.random.seed(seed)

K = []
# load the dataset
dataframe = pandas.read_csv('dataset/face.csv', header=None)
dataset = dataframe.values

X_old = dataset[:, 0]
for i in X_old:
    K.append(cv2.imread(i))
X = numpy.array(K)

Y = dataset[:, 1]
encoder = LabelEncoder()
encoder.fit(Y)
encoder_Y = encoder.transform(Y)
print(Y)
print(encoder_Y)
dummy_y = np_utils.to_categorical(encoder_Y)
# split into train and test sets
train_size = int(len(dataset) * 0.67)
test_size = len(dataset) - train_size
X_train = X[0:train_size]
X_test = X[train_size:len(dataset)]
Y_train = encoder_Y[0:train_size]