Beispiel #1
0
def test_phagraphNN_generateFromSmiles_file():
    import phagraphnn.utilities as ut
    path = "./tests/data/twoD.smi"
    mol = ut.CDPLmolFromSmiles(path,False)
    assert(len(mol.atoms) == 22) #2D
    mol_conf = ut.CDPLmolFromSmiles(path,True)
    assert(len(mol_conf.atoms) == 37) #3D
Beispiel #2
0
def test_phagraphNN_generateFromSmiles_smile():
    import phagraphnn.utilities as ut

    data = ut.readChemblXls("./tests/data/CHE_3.xls")
    mol =ut.CDPLmolFromSmiles(data[2][1],False) # same smiles then twoD.smi
    mol_conf =ut.CDPLmolFromSmiles(data[2][1],True) # same smiles then twoD.smi

    assert(len(mol.atoms) == 22) #2D
    assert(len(mol_conf.atoms) == 37) #3D with Hydrogens
Beispiel #3
0
def test_PhaGat2_classification():
    import phagraphnn.utilities as ut
    from phagraphnn.PhaGraph import PhaGraph,PhaNode

    data = ut.readChemblXls("./tests/data/CHE_3.xls")
    graph_list = []
    for i in range(0,len(data)):
        graph = PhaGraph()
        mol = ut.CDPLmolFromSmiles(data[i][1],True)
        graph(ut.CDPLphaGenerator(None,mol,"lig_only"))
        graph.setProperty("ic50",(0,1))
        graph_list.append(graph)
    from phagraphnn.DataPreparer import DataPreparer
    loader = DataPreparer(graph_list,3,property_string="ic50",mpn="gru",is_path=False)

    from phagraphnn.PhaGatModel2 import PhaGatModel2 as gat
    import tensorflow as tf

    seq = tf.keras.Sequential([
    tf.keras.layers.Dense(16, activation='relu', input_shape=(128,),name="first_layer"),
    tf.keras.layers.Dense(8, activation='relu',name="second_layer"),
    tf.keras.layers.Dense(2,activation= None)],name="output_NN")

    gat = gat(hidden_dim=32,output_nn=seq,regression = False)
    lr = 0.001
    gat.compile(loss=tf.keras.losses.BinaryCrossentropy(),
                optimizer=tf.keras.optimizers.Adam(lr))
    rec = tf.keras.metrics.MeanAbsoluteError()
    for batch in loader:
        inputs,af,other = batch
        gat(inputs)
        for epoch in range(0,10):
            pred,loss = gat.train(inputs=inputs,outputs=af,learning_rate=lr)
Beispiel #4
0
def test_PhaGru():
    import phagraphnn.utilities as ut
    from phagraphnn.PhaGraph import PhaGraph,PhaNode

    data = ut.readChemblXls("./tests/data/CHE_3.xls")
    graph_list = []
    for i in range(0,len(data)):
        graph = PhaGraph()
        mol = ut.CDPLmolFromSmiles(data[i][1],True)
        graph(ut.CDPLphaGenerator(None,mol,"lig_only"))
        graph.setProperty("ic50",data[i][2])
        graph_list.append(graph)
    from phagraphnn.DataPreparer import DataPreparer
    loader = DataPreparer(graph_list,3,property_string="ic50",mpn="gru")

    from phagraphnn.PhaGruMPN import PhaGruMPN as gru
    from phagraphnn.PhaGruMPN2 import PhaGruMPN2 as gru2
    from phagraphnn.PhaGruMPN3 import PhaGruMPN3 as gru3
    import tensorflow as tf

    seq = tf.keras.Sequential([
    tf.keras.layers.Dense(16, activation='relu', input_shape=(32,),name="first_layer"),
    tf.keras.layers.Dense(8, activation='relu',name="second_layer"),
    tf.keras.layers.Dense(1,activation= None)],name="output_NN")

    seq2 = tf.keras.Sequential([
    tf.keras.layers.Dense(16, activation='relu', input_shape=(32,),name="first_layer"),
    tf.keras.layers.Dense(8, activation='relu',name="second_layer"),
    tf.keras.layers.Dense(1,activation= None)],name="output_NN")

    seq3 = tf.keras.Sequential([
    tf.keras.layers.Dense(16, activation='relu', input_shape=(32,),name="first_layer"),
    tf.keras.layers.Dense(8, activation='relu',name="second_layer"),
    tf.keras.layers.Dense(1,activation= None)],name="output_NN")

    gru = gru(32,3,seq)
    gru2 = gru2(32,3,seq2)
    gru3 = gru3(32,3,seq3)

    lr = 0.001
    gru.compile(loss=tf.keras.losses.mse,
                optimizer=tf.keras.optimizers.Adam(lr))

    gru2.compile(loss=tf.keras.losses.mse,
                optimizer=tf.keras.optimizers.Adam(lr))

    gru3.compile(loss=tf.keras.losses.mse,
                optimizer=tf.keras.optimizers.Adam(lr))
    rec = tf.keras.metrics.MeanAbsoluteError()
    for batch in loader:
        inputs,af,other = batch
        gru(inputs)
        gru2(inputs)
        gru3(inputs)
        for epoch in range(0,10):
            pred,loss = gru.train(inputs=inputs,outputs=af,learning_rate=lr)
            pred,loss = gru2.train(inputs=inputs,outputs=af,learning_rate=lr)
            pred,loss = gru3.train(inputs=inputs,outputs=af,learning_rate=lr)
Beispiel #5
0
import phagraphnn.utilities as ut
from phagraphnn.PhaGraph import PhaGraph, PhaNode
from phagraphnn.DataPreperer import DataPreparer
from phagraphnn.PhaGatModel import PhaGatModel as gat
from phagraphnn.PhaGatModel2 import PhaGatModel2 as gat2
from phagraphnn.PhaGruMPN import PhaGruMPN as gru
from phagraphnn.PhaGruMPN2 import PhaGruMPN2 as gru2
from phagraphnn.PhaGruMPN3 import PhaGruMPN3 as gru3

#%%
### load data
data = ut.readChemblXls("./tests/data/CHE_3.xls")
graph_list = []
for i in range(0, len(data)):
    graph = PhaGraph()
    mol = ut.CDPLmolFromSmiles(data[i][1], True)
    graph(ut.CDPLphaGenerator(None, mol, "lig_only"))
    graph.setProperty("ic50", data[i][2])
    graph_list.append(graph)
loader = DataPreparer(graph_list,
                      3,
                      property_string="ic50",
                      mpn="gat",
                      is_path=False)

#%%
###generate network

# this is the MLP part and is put after the "encoding" of the graph
seq_gat = tf.keras.Sequential([
    tf.keras.layers.Dense(