コード例 #1
0
def test_two_hop():
    assert TwoHop().__repr__() == 'TwoHop()'

    edge_index = torch.tensor([[0, 0, 0, 1, 2, 3], [1, 2, 3, 0, 0, 0]])
    edge_attr = torch.tensor([1, 2, 3, 1, 2, 3], dtype=torch.float)
    data = Data(edge_index=edge_index, edge_attr=edge_attr)

    data = TwoHop()(data)
    edge_index, edge_attr = data.edge_index, data.edge_attr

    assert edge_index.tolist() == [[0, 0, 0, 1, 1, 1, 2, 2, 2, 3, 3, 3],
                                   [1, 2, 3, 0, 2, 3, 0, 1, 3, 0, 1, 2]]
    assert edge_attr.tolist() == [1, 2, 3, 1, 0, 0, 2, 0, 0, 3, 0, 0]
コード例 #2
0
ファイル: test_two_hop.py プロジェクト: LONG-9621/SplineCNN
def test_two_hop():
    assert TwoHop().__repr__() == 'TwoHop()'

    edge_index = torch.tensor([[0, 0, 0, 1, 2, 3], [1, 2, 3, 0, 0, 0]])
    edge_attr = torch.tensor([1, 2, 3, 1, 2, 3], dtype=torch.float)

    data = Data(edge_index=edge_index, edge_attr=edge_attr, num_nodes=4)
    data = TwoHop()(data)
    assert len(data) == 2
    assert data.edge_index.tolist() == [[0, 0, 0, 1, 1, 1, 2, 2, 2, 3, 3, 3],
                                        [1, 2, 3, 0, 2, 3, 0, 1, 3, 0, 1, 2]]
    assert data.edge_attr.tolist() == [1, 2, 3, 1, 0, 0, 2, 0, 0, 3, 0, 0]

    data = Data(edge_index=edge_index, num_nodes=4)
    data = TwoHop()(data)
    assert len(data) == 1
    assert data.edge_index.tolist() == [[0, 0, 0, 1, 1, 1, 2, 2, 2, 3, 3, 3],
                                        [1, 2, 3, 0, 2, 3, 0, 1, 3, 0, 1, 2]]
コード例 #3
0
ファイル: utils.py プロジェクト: dcoukos/masif-tools
def apply_pretransforms(pre_transforms=None):
    from dataset import Structures
    # Structures should check already whether these pre_transforms have been computed
    if pre_transforms is None:
        trainset = Structures(root='./datasets/{}_train/'.format(p.dataset),
                              pre_transform=Compose(
                                  (FaceAttributes(), NodeCurvature(),
                                   FaceToEdge(), TwoHop())))
        testset = Structures(root='./datasets/{}_test/'.format(p.dataset),
                             pre_transform=Compose(
                                 (FaceAttributes(), NodeCurvature(),
                                  FaceToEdge(), TwoHop())))
    else:
        trainset = Structures(root='./datasets/{}_train/'.format(p.dataset),
                              pre_transform=pre_transforms)
        testset = Structures(root='./datasets/{}_test/'.format(p.dataset),
                             pre_transform=pre_transforms)
    return trainset, testset
コード例 #4
0
ファイル: 3hops.py プロジェクト: dcoukos/masif-tools
np.random.seed(p.random_seed)
learn_rate = p.learn_rate
modelpath = make_model_directory()

if str(device) == 'cuda:0':
    epochs = p.epochs
else:
    epochs = 20

# ---- Importing and structuring Datasets and Model ----
print('Importing structures.')
# Remember!!! Shape Index can only be computed on local. Add other transforms after
# Pre_tranform step to not contaminate the data.
trainset = Structures(root='./datasets/masif_site_train/',
                      pre_transform=Compose((FaceAttributes(), NodeCurvature(),
                                             FaceToEdge(), TwoHop())))
# Define transform in epoch, so that rotation occurs around Δ axis every time.
validset = Structures(root='./datasets/masif_site_test/',
                      pre_transform=Compose((FaceAttributes(), NodeCurvature(),
                                             FaceToEdge(), TwoHop())))

if p.shuffle_dataset:
    trainset = trainset.shuffle()
n_features = trainset.get(0).x.shape[1]

# ---- Import previous model to allow deep network to train -------------

model = p.model_type(9, heads=p.heads).to(cpu)

model.to(device)
optimizer = torch.optim.Adam(model.parameters(),
コード例 #5
0
np.random.seed(p.random_seed)
learn_rate = p.learn_rate
modelpath = make_model_directory()

if str(device) == 'cuda:0':
    epochs = p.epochs
else:
    epochs = 20

# ---- Importing and structuring Datasets and Model ----
print('Importing structures.')
# Remember!!! Shape Index can only be computed on local. Add other transforms after
# Pre_tranform step to not contaminate the data.
dataset = Structures(root='./datasets/{}_train/'.format(p.dataset),
                     pre_transform=Compose((FaceAttributes(), NodeCurvature(),
                                            FaceToEdge(), TwoHop())),
                     transform=AddShapeIndex())

samples = len(dataset)
assert (p.validation_split < 0.3)
cutoff = int(np.floor(samples * (1 - p.validation_split)))
trainset = dataset[:cutoff]
validset = dataset[cutoff:]
maskedset = validset[:int(len(validset) / 2)]
validset = validset[int(len(validset) / 2):]

if p.shuffle_dataset:
    trainset = trainset.shuffle()
n_features = trainset.get(0).x.shape[1]
print('Setting up model...')
models = [
コード例 #6
0
ファイル: utils.py プロジェクト: dcoukos/masif-tools
def generate_surface(model_type,
                     model_path,
                     pdb_code,
                     use_structural_data=False):
    '''
        Save the surface prediction for a particular structure.
    '''
    from dataset import read_ply
    '''
    converter = Compose((Center(), FaceAttributes(),
                        NodeCurvature(), FaceToEdge(),
                        TwoHop(), AddShapeIndex()))
    '''
    converter = Compose((Center(), FaceAttributes(), NodeCurvature(),
                         FaceToEdge(), TwoHop(), AddShapeIndex()))
    path = glob('./structures/test/{}.ply'.format(pdb_code))[0]
    name = path.split('/')[-1]
    structure = read_ply(path)

    face = structure.face
    structure = converter(structure)

    device = torch.device('cpu')
    structure.x.shape[1]
    if p.heads is not None:
        model = model_type(structure.x.shape[1], heads=p.heads)
    else:
        model = model_type(structure.x.shape[1])
    model.load_state_dict(torch.load(model_path, map_location=device))
    model.eval()

    prediction = model(structure)
    rounded = prediction.round()

    # ---- Make directory ---
    dir = model_path.split('models/', 1)[1].split('.')[0]
    full_path = os.path.expanduser(
        '~/Desktop/Drawer/LPDI/masif-tools/surfaces/' + dir)
    folder = full_path.rsplit('/', 1)[0]
    if not os.path.exists(folder):
        os.mkdir(folder)
    if not os.path.exists(full_path):
        os.mkdir(full_path)

    save_ply(filename='./surfaces/{}/{}'.format(dir, name),
             vertices=structure.pos.detach().numpy(),
             normals=structure.norm.detach().numpy(),
             faces=face.t().detach().numpy(),
             charges=structure.x[:, 0].reshape(-1, 1).detach().numpy(),
             hbond=structure.x[:, 1].reshape(-1, 1).detach().numpy(),
             hphob=structure.x[:, 2].reshape(-1, 1).detach().numpy(),
             iface=prediction.detach().numpy())

    save_ply(filename='./surfaces/{}/r_{}'.format(dir, name),
             vertices=structure.pos.detach().numpy(),
             normals=structure.norm.detach().numpy(),
             faces=face.t().detach().numpy(),
             charges=structure.x[:, 0].reshape(-1, 1).detach().numpy(),
             hbond=structure.x[:, 1].reshape(-1, 1).detach().numpy(),
             hphob=structure.x[:, 2].reshape(-1, 1).detach().numpy(),
             iface=rounded.detach().numpy())
コード例 #7
0
ファイル: 4hops.py プロジェクト: dcoukos/masif-tools
np.random.seed(p.random_seed)
learn_rate = p.learn_rate
modelpath = make_model_directory()

if str(device) == 'cuda:0':
    epochs = p.epochs
else:
    epochs = 20

# ---- Importing and structuring Datasets and Model ----
print('Importing structures.')
# Remember!!! Shape Index can only be computed on local. Add other transforms after
# Pre_tranform step to not contaminate the data.
trainset = Structures(root='./datasets/masif_site_train/',
                      pre_transform=Compose((FaceAttributes(), NodeCurvature(),
                                             FaceToEdge(), TwoHop())))
# Define transform in epoch, so that rotation occurs around Δ axis every time.
validset = Structures(root='./datasets/masif_site_test/',
                      pre_transform=Compose((FaceAttributes(), NodeCurvature(),
                                             FaceToEdge(), TwoHop())))

if p.shuffle_dataset:
    trainset = trainset.shuffle()
n_features = trainset.get(0).x.shape[1]

# ---- Import previous model to allow deep network to train -------------

model = p.model_type(9, heads=p.heads).to(cpu)

model.to(device)
optimizer = torch.optim.Adam(model.parameters(),
コード例 #8
0
learn_rate = p.learn_rate
modelpath = make_model_directory()

if str(device) == 'cuda:0':
    epochs = p.epochs
else:
    epochs = 20

# ---- Importing and structuring Datasets and Model ----
print('Importing structures.')
# Remember!!! Shape Index can only be computed on local. Add other transforms after
# Pre_tranform step to not contaminate the data.
dataset = Structures(root='./datasets/{}_train/'.format(p.dataset),
                      pre_transform=Compose((FaceAttributes(),
                                             NodeCurvature(), FaceToEdge(),
                                             TwoHop())),
                      transform=Compose((AddShapeIndex(), Center(), AddPositionalData())))


samples = len(dataset)
assert(p.validation_split < 0.3)
cutoff = int(np.floor(samples*(1-p.validation_split)))
trainset = dataset[:cutoff]
validset = dataset[cutoff:]
maskedset = validset[:int(len(validset)/2)]
validset = validset[int(len(validset)/2):]


if p.shuffle_dataset:
    trainset = trainset.shuffle()
n_features = trainset.get(0).x.shape[1]
コード例 #9
0
ファイル: transforms.py プロジェクト: dcoukos/masif-tools
 def __init__(self, hops):
     super(MultiHop, self).__init__()
     self.nhops = hops
     self.converter = TwoHop()
     assert hops > 1
コード例 #10
0
dataset

# Applying pretransformation once and saving datasets!
# New strategy... load the data first, without pretransforms,
# then load into a dataloader and batch apply the transforms.

# Why is it not using gpu?
from utils import apply_pretransforms
from dataset import Structures_SI_mem, Structures
from torch_geometric.transforms import *
from transforms import *

dataset = Structures_SI(root='./datasets/thous_train/',
                        pre_transform=Compose(
                            (Center(), FaceAttributes(), NodeCurvature(),
                             FaceToEdge(), TwoHop(), AddShapeIndex())))
dataset = Structures_SI_mem(root='./datasets/thous_train/',
                            pre_transform=Compose(
                                (Center(), FaceAttributes(), NodeCurvature(),
                                 FaceToEdge(), TwoHop(), AddShapeIndex())))

dataset[0]

dataset[0]

apply_pretransforms()

# Checking why my transformations keep crashing...
import torch
from torch_geometric.transforms import Compose, FaceToEdge, TwoHop, Center
from transforms import *
コード例 #11
0
device = torch.device('cuda:0')
# reproducibility
torch.manual_seed(p.random_seed)
np.random.seed(p.random_seed)
learn_rate = p.learn_rate
modelpath = make_model_directory()

if str(device) == 'cuda:0':
    epochs = p.epochs
else:
    epochs = 20

# ---- Importing and structuring Datasets and Model ----
if p.twohop is True:
    print("Adding two-hop edges to data graphs")
    converter = TwoHop()
else:
    converter = None

print('Importing structures.')
trainset = Structures(root='./datasets/{}_train/'.format(p.dataset),
                      prefix=p.dataset)
samples = len(trainset)
cutoff = int(np.floor(samples*(1-p.validation_split)))
validset = trainset[cutoff:]
trainset = trainset[:cutoff]


if p.shuffle_dataset:
    trainset = trainset.shuffle()
n_features = trainset.get(0).x.shape[1]