Exemple #1
0
    def setup(self, stage: Optional[str] = None):
        dataloaders = sidechainnet.load(
            casp_version=self.casp_version,
            scn_dir=self.scn_dir,
            with_pytorch="dataloaders",
        )
        print(dataloaders.keys()
              )  # ['train', 'train_eval', 'valid-10', ..., 'valid-90', 'test']

        self.train = ScnDataset(dataloaders["train"].dataset,
                                self.train_max_len)
        self.val = ScnDataset(dataloaders["valid-90"].dataset,
                              self.eval_max_len)
        self.test = ScnDataset(dataloaders["test"].dataset, self.eval_max_len)
Exemple #2
0
    distances = torch.cdist(coords, coords, p=2)
    boundaries = torch.linspace(2,
                                20,
                                steps=DISTOGRAM_BUCKETS,
                                device=coords.device)
    discretized_distances = torch.bucketize(distances, boundaries[:-1])
    discretized_distances.masked_fill_(~(mask[:, :, None] & mask[:, None, :]),
                                       IGNORE_INDEX)
    return discretized_distances


# get data

data = scn.load(casp_version=12,
                thinning=30,
                with_pytorch='dataloaders',
                batch_size=1,
                dynamic_batching=False,
                return_masks=True)

data = iter(data['train'])
data_cond = lambda t: t[1].shape[1] < THRESHOLD_LENGTH
dl = cycle(data, data_cond)

# model

model = Alphafold2(dim=256, depth=1, heads=8, dim_head=64).to(DEVICE)

# optimizer

optim = Adam(model.parameters(), lr=LEARNING_RATE)
Exemple #3
0
    depth=2,
    attend_self=True,
    input_degrees=1,
    output_degrees=2,
    reduce_dim_out=True,
    differentiable_coors=True,
    num_neighbors=0,
    attend_sparse_neighbors=True,
    num_adj_degrees=2,
    adj_dim=4,
    num_degrees=2,
)

data = scn.load(casp_version=12,
                thinning=30,
                with_pytorch='dataloaders',
                batch_size=BATCH_SIZE,
                dynamic_batching=False)
# Add gaussian noise to the coords
# Testing the refinement algorithm

dl = cycle(data['train'])
optim = Adam(transformer.parameters(), lr=1e-4)
transformer = transformer.cuda()

for _ in range(10000):
    for _ in range(GRADIENT_ACCUMULATE_EVERY):
        batch = next(dl)
        seqs, coords, masks = batch.seqs, batch.crds, batch.msks

        seqs = seqs.cuda().argmax(dim=-1)