def MT_model(perm): #def funx1(i,maxL): #if i<maxL: #return Input(shape=(fpSize,),name=f"MRg_{i}") #elif i<2*maxL-3 and i!=maxL+2: # elif maxL<=i< 2*maxL: #return Input(shape=(FpLen,),name=f"Ml_{i}") #else: # return Input(shape=(FpLen1,),name=f"Sq_{i}") def funx1(i, maxL): if i < maxL: return Input(shape=(256, ), name=f"MoR_{i}") x2 = [funx1(i, maxL) for i in range(1 * maxL)] # x2_1=Concatenate()([x2[0],x2[1],x2[2]]) xs = [] xs = [x2[perm[0]], x2[perm[1]], x2[perm[2]]] #xs=[x2[1],x2[2],x2[0]] #xs=[x2[1],x2[0],x2[2]] #xs=[x2[2],x2[0],x2[1]] #xs=[x2[2],x2[1],x2[0]] #xs=[x2[2],x2[0],x2[1]] #xs1=[] xs2 = [] #xs.append(x2) #for i in range(maxL): # xs.append(x2[i]) # xs.append(x2[maxL+i]) # xs.append(x2[2*maxL+i]) list1 = [100, 100, 100, 100] #list1=[100,58,58,58] # list2=[112,112,112,112] pairwise_model = pl.PairwiseModel((256, ), pl.repeat_layers(Dense, list1, name="hidden", activation='relu'), name="pairwise_model") perm_encoder = pl.PermutationalEncoder(pairwise_model, maxL, name="permutational_encoder") perm_layer = pl.PermutationalLayer1(perm_encoder, name="permutational_layer") outputs = perm_layer.model(xs) outputs = average(outputs) #outputs = maximum(outputs) # print("x2",x2) output_51 = Dense(100, activation='relu', kernel_initializer=my_init)(outputs) # output_51 = Dense(200, activation='relu',kernel_initializer=my_init)(output_51) # output_51 = Dense(200, activation='relu',kernel_initializer=my_init)(output_51) # output_51 = Dense(200, activation='relu',kernel_initializer=my_init)(output_51) # output_51 = Dense(200, activation='relu',kernel_initializer=my_init)(output_51) # output_51 = Dense(200, activation='relu',kernel_initializer=my_init)(output_51) # #output_51 = Dropout(0.5)(output_51) # output_51 = Dense(200, activation='relu',kernel_initializer=my_init)(output_51) # output_Loss = Dense(17, name='Loss_output', activation='softmax', kernel_initializer=my_init)(output_51) # output_Loss=Dense(17,name='Loss_output',activation='linear',kernel_initializer=my_init)(output_51) #output_Loss=Dense(17,name='Loss_output',activation='linear',kernel_initializer=my_init)(output_51) model = Model(inputs=x2, outputs=output_Loss) # model.compile(loss={'Loss_output':'categorical_crossentropy'}, # optimizer=Adam(lr=0.00005, beta_1=0.9, beta_2=0.999, epsilon=1e-8), loss_weights = {'Loss_output':1.} # ,metrics=['accuracy'] # ) model.compile( loss={'Loss_output': 'categorical_crossentropy'}, optimizer=Adam(lr=0.00276, beta_1=0.9, beta_2=0.999, epsilon=1e-8), loss_weights={'Loss_output': 1.} #optimizer=Adam(lr=0.00005, beta_1=0.9, beta_2=0.999, epsilon=1e-8), loss_weights = {'Loss_output':1.} , metrics=['accuracy']) model.summary() plot_model(model, 'Config3Mod.png', show_shapes=True) return model
def MT_model(perm): #def funx1(i,maxL): #if i<maxL: #return Input(shape=(fpSize,),name=f"MRg_{i}") #elif i<2*maxL-3 and i!=maxL+2: # elif maxL<=i< 2*maxL: #return Input(shape=(FpLen,),name=f"Ml_{i}") #else: # return Input(shape=(FpLen1,),name=f"Sq_{i}") def funx1(i, maxL): if i < maxL: return Input(shape=(FpLen, ), name=f"Mol_{i}") elif maxL <= i < 2 * maxL: return Input(shape=(FpLen1, ), name=f"S2q_{i}") else: return Input(shape=(767, ), name=f"Mfp2q_{i}") x2 = [funx1(i, maxL) for i in range(2 * maxL)] xs = [] xs1 = [] xs2 = [] for i in range(maxL): xs.append(x2[perm[i]]) xs.append(x2[maxL + perm[i]]) # xs.append(x2[2*maxL+i]) list1 = [112, 112, 112, 112] #list2=[112,112,112,112] pairwise_model = pl.PairwiseModel((256, ), pl.repeat_layers(Dense, list1, name="hidden", activation='relu'), name="pairwise_model") perm_encoder = pl.PermutationalEncoder(pairwise_model, 2 * maxL, name="permutational_encoder") perm_layer = pl.PermutationalLayer1(perm_encoder, name="permutational_layer") outputs = perm_layer.model(xs) perm_layer4 = pl.PermutationalLayer1( pl.PermutationalEncoder( pl.PairwiseModel((112, ), pl.repeat_layers(Dense, [256], activation="linear")), 2 * maxL), name="permutational_layer4", ) outputs = perm_layer4.model(outputs) #outputs = Add()(outputs) #outputs1 = Add()(outputs1) outputs = maximum(outputs) #outputs1 = maximum(outputs1) #outputs2 = maximum(outputs2) #outputs = average(outputs) # outputs1 = average(outputs1) # outputs2 = average(outputs2) # output_3 = Concatenate()([outputs,outputs1,outputs2]) output_3 = outputs output_3 = Dense(100, activation='relu', kernel_initializer=my_init)(output_3) output_Loss = Dense(17, name='Loss_output', activation='softmax', kernel_initializer=my_init)(output_3) #output_Loss=Dense(17,name='Loss_output',activation='sigmoid',kernel_initializer=my_init)(output_3) model = Model(inputs=x2, outputs=output_Loss) model.compile( loss={'Loss_output': 'categorical_crossentropy'}, #model.compile(loss={'Loss_output':'binary_crossentropy'}, optimizer=Adam(lr=0.00276, beta_1=0.9, beta_2=0.999, epsilon=1e-8), loss_weights={'Loss_output': 1.} # optimizer=Adam(lr=0.00005, beta_1=0.9, beta_2=0.999, epsilon=1e-8), loss_weights = {'Loss_output':1.} , metrics=['accuracy']) model.summary() plot_model(model, 'Config3Mod.png', show_shapes=True) return model