Beispiel #1
0
def apply_pretransforms(pre_transforms=None):
    from dataset import Structures
    # Structures should check already whether these pre_transforms have been computed
    if pre_transforms is None:
        trainset = Structures(root='./datasets/{}_train/'.format(p.dataset),
                              pre_transform=Compose(
                                  (FaceAttributes(), NodeCurvature(),
                                   FaceToEdge(), TwoHop())))
        testset = Structures(root='./datasets/{}_test/'.format(p.dataset),
                             pre_transform=Compose(
                                 (FaceAttributes(), NodeCurvature(),
                                  FaceToEdge(), TwoHop())))
    else:
        trainset = Structures(root='./datasets/{}_train/'.format(p.dataset),
                              pre_transform=pre_transforms)
        testset = Structures(root='./datasets/{}_test/'.format(p.dataset),
                             pre_transform=pre_transforms)
    return trainset, testset
Beispiel #2
0
torch.manual_seed(p.random_seed)
np.random.seed(p.random_seed)
learn_rate = p.learn_rate
modelpath = make_model_directory()

if str(device) == 'cuda:0':
    epochs = p.epochs
else:
    epochs = 20

# ---- Importing and structuring Datasets and Model ----
print('Importing structures.')
# Remember!!! Shape Index can only be computed on local. Add other transforms after
# Pre_tranform step to not contaminate the data.
trainset = Structures(root='./datasets/masif_site_train/',
                      pre_transform=Compose((FaceAttributes(), NodeCurvature(),
                                             FaceToEdge(), TwoHop())))
# Define transform in epoch, so that rotation occurs around Δ axis every time.
validset = Structures(root='./datasets/masif_site_test/',
                      pre_transform=Compose((FaceAttributes(), NodeCurvature(),
                                             FaceToEdge(), TwoHop())))

if p.shuffle_dataset:
    trainset = trainset.shuffle()
n_features = trainset.get(0).x.shape[1]

# ---- Import previous model to allow deep network to train -------------

model = p.model_type(9, heads=p.heads).to(cpu)

model.to(device)
Beispiel #3
0
dataset[0]

dataset[0]

apply_pretransforms()

# Checking why my transformations keep crashing...
import torch
from torch_geometric.transforms import Compose, FaceToEdge, TwoHop, Center
from transforms import *
import params as p
from dataset import Structures

dataset = Structures(pre_transform=Compose((Center(), FaceAttributes(),
                                            NodeCurvature(), FaceToEdge(),
                                            TwoHop())))

data = Structures(root='./datasets/thous/')[234]
pre_transform = Compose(
    (Center(), FaceAttributes(), NodeCurvature(), FaceToEdge(), TwoHop()))
data1 = dataset[233]
data2 = dataset[234]
data3 = dataset[235]
data2

data1 = pre_transform(data1)
data2 = pre_transform(data2)
data3 = pre_transform(data3)

data2
Beispiel #4
0
torch.manual_seed(p.random_seed)
np.random.seed(p.random_seed)
learn_rate = p.learn_rate
modelpath = make_model_directory()

if str(device) == 'cuda:0':
    epochs = p.epochs
else:
    epochs = 20

# ---- Importing and structuring Datasets and Model ----
print('Importing structures.')
# Remember!!! Shape Index can only be computed on local. Add other transforms after
# Pre_tranform step to not contaminate the data.
dataset = Structures(root='./datasets/{}_train/'.format(p.dataset),
                     pre_transform=Compose((FaceAttributes(), NodeCurvature(),
                                            FaceToEdge(), TwoHop())),
                     transform=AddShapeIndex())

samples = len(dataset)
assert (p.validation_split < 0.3)
cutoff = int(np.floor(samples * (1 - p.validation_split)))
trainset = dataset[:cutoff]
validset = dataset[cutoff:]
maskedset = validset[:int(len(validset) / 2)]
validset = validset[int(len(validset) / 2):]

if p.shuffle_dataset:
    trainset = trainset.shuffle()
n_features = trainset.get(0).x.shape[1]
print('Setting up model...')
models = [
Beispiel #5
0
torch.manual_seed(p.random_seed)
np.random.seed(p.random_seed)
learn_rate = p.learn_rate
modelpath = make_model_directory()

if str(device) == 'cuda:0':
    epochs = p.epochs
else:
    epochs = 20

# ---- Importing and structuring Datasets and Model ----
print('Importing structures.')
# Remember!!! Shape Index can only be computed on local. Add other transforms after
# Pre_tranform step to not contaminate the data.
trainset = Structures(root='./datasets/{}_train/'.format(p.dataset),
                      pre_transform=Compose((FaceAttributes(), NodeCurvature(),
                                             FaceToEdge(), TwoHop())),
                      transform=AddShapeIndex())
trainset[0].x.shape
torch.cat((trainset[0].x, trainset[0].pos, trainset[0].norm), dim=1).shape
samples = len(trainset)

cutoff = int(np.floor(samples * (1 - p.validation_split)))
validset = trainset[cutoff:]
trainset = trainset[:cutoff]

if p.shuffle_dataset:
    trainset = trainset.shuffle()
n_features = trainset.get(0).x.shape[1]
print('Setting up model...')
paths = [
    './models/Feb16_14:09_20b/best_0.pt', './models/Feb16_14:09_20b/best_1.pt',
Beispiel #6
0
np.random.seed(p.random_seed)
learn_rate = p.learn_rate
modelpath = make_model_directory()

if str(device) == 'cuda:0':

    epochs = p.epochs
else:
    epochs = 20

# ---- Importing and structuring Datasets and Model ----
print('Importing structures.')
# Remember!!! Shape Index can only be computed on local. Add other transforms after
# Pre_tranform step to not contaminate the data.
trainset = Structures(root='./datasets/masif_site_train/',
                      pre_transform=Compose((FaceAttributes(), NodeCurvature(),
                                             FaceToEdge(), TwoHop())))
# Define transform in epoch, so that rotation occurs around Δ axis every time.
validset = Structures(root='./datasets/masif_site_test/',
                      pre_transform=Compose((FaceAttributes(), NodeCurvature(),
                                             FaceToEdge(), TwoHop())))

if p.shuffle_dataset:
    trainset = trainset.shuffle()
n_features = trainset.get(0).x.shape[1]

# ---- Import previous model to allow deep network to train -------------
model = p.model_type(3).to(device)
optimizer = torch.optim.Adam(model.parameters(), lr=learn_rate, weight_decay=p.weight_decay)

writer = SummaryWriter(comment='model:{}_lr:{}_shuffle:{}_seed:{}'.format(
Beispiel #7
0
modelpath = make_model_directory()

if str(device) == 'cuda:0':
    epochs = p.epochs
else:
    epochs = 20

# ---- Importing and structuring Datasets and Model ----
print('Importing structures.')
# Remember!!! Shape Index can only be computed on local. Add other transforms after
# Pre_tranform step to not contaminate the data.
dataset = Structures(root='./datasets/{}_train/'.format(p.dataset),
                     pre_transform=Compose((FaceAttributes(), NodeCurvature(),
                                            FaceToEdge(), TwoHop())),
                     transform=Compose(
                         (AddShapeIndex(),
                          BlockModelApply((4, 4, 4), [
                              './models/Feb16_14:09_20b/best_0.pt',
                              './models/Feb16_14:09_20b/best_1.pt',
                              './models/Feb16_14:09_20b/best_2.pt'
                          ]))))

samples = len(dataset)
assert (p.validation_split < 0.3)
cutoff = int(np.floor(samples * (1 - p.validation_split)))
trainset = dataset[:cutoff]
validset = dataset[cutoff:]
maskedset = validset[:int(len(validset) / 2)]
validset = validset[int(len(validset) / 2):]

if p.shuffle_dataset:
    trainset = trainset.shuffle()
Beispiel #8
0
modelpath = make_model_directory()

if str(device) == 'cuda:0':
    epochs = p.epochs
else:
    epochs = 20

# ---- Importing and structuring Datasets and Model ----
if p.twohop is True:
    print("Adding two-hop edges to data graphs")
    converter = TwoHop()
else:
    converter = None

print('Importing structures.')
trainset = Structures(root='./datasets/{}_train/'.format(p.dataset),
                      prefix=p.dataset)
samples = len(trainset)
cutoff = int(np.floor(samples*(1-p.validation_split)))
validset = trainset[cutoff:]
trainset = trainset[:cutoff]


if p.shuffle_dataset:
    trainset = trainset.shuffle()
n_features = trainset.get(0).x.shape[1]
print('Setting up model...')
model = p.model_type(6, heads=p.heads).to(device)
model = DataParallel(model).to(device)
optimizer = torch.optim.Adam(model.parameters(), lr=learn_rate, weight_decay=p.weight_decay)
# scheduler = torch.optim.lr_scheduler.ReduceLROnPlateau(optimizer, mode='min',
#                                                       factor=p.lr_decay,
Beispiel #9
0
np.random.seed(p.random_seed)
learn_rate = p.learn_rate
modelpath = make_model_directory()
coverage = p.coverage
hops = p.hops

if str(device) == 'cuda:0':
    epochs = p.epochs
else:
    epochs = 20

# ---- Importing and structuring Datasets and Model ----
print('Importing structures.')
# Remember!!! Shape Index can only be computed on local. Add other transforms after
# Pre_tranform step to not contaminate the data.
trainset = Structures(root='./datasets/masif_site_train/',
                      transform=AddShapeIndex())
validset = Structures(root='./datasets/masif_site_test/',
                      transform=AddShapeIndex())

# What exactly is a structures Dataset again?
trainset[0]

if p.shuffle_dataset:
    trainset = trainset.shuffle()
n_features = trainset.get(0).x.shape[1]
print('Setting up model...')
model = p.model_type(4, 4)
optimizer = torch.optim.Adam(model.parameters(),
                             lr=learn_rate,
                             weight_decay=p.weight_decay)
# scheduler = torch.optim.lr_scheduler.ReduceLROnPlateau(optimizer, mode='min',