def hide_non_minimal_complexes(pdb_object): scores = rosetta.parse_scores(pdb_object.minimized.scores.read()) hidden_folder = pdb_object.minimized.hidden_complexes.path hidden_folder.delete() storage.make_directory(hidden_folder) for number in list(scores.keys())[1:]: complex_path = pdb_object.minimized.complex.pdb[number].path storage.move(complex_path, hidden_folder, no_fail=True)
def hide_non_minimal_complexes(pdb_object): """Generate constraint file for Rosetta minimization. Constraints are added to maintain contact ions close to their original positions. pdb_object : PDBObject PDB structure to be handled """ scores = rosetta.parse_scores(pdb_object.minimized.scores.read()) hidden_folder = pdb_object.minimized.hidden_complexes.path storage.make_directory(hidden_folder) for number in list(scores.keys())[1:]: complex_path = pdb_object.minimized.complex.pdb[number].path print("Hiding ", complex_path) storage.move(complex_path, hidden_folder, no_fail=True)
def generate_tfrecords(dataset_object): """Generate TFRecords from a dataset's combined images. dataset_object : DatasetObject Dataset of the images. """ files = dataset_object.images chunks = chunk_by_size(files) storage.clear_directory(dataset_object.tfrecords, no_fail=True) storage.make_directory(dataset_object.tfrecords, no_fail=True) lines = dataset_object.labels.read().splitlines() lines = [line.split(" ") for line in lines] pdb_labels = dict([(line[0], line[1:]) for line in lines]) for i, chunk in enumerate(chunks): write_tfrecords(chunk, dataset_object, i, pdb_labels)
def combine_maps(pdb_object): """Postprocessing step for combining the feature map images into one image Parameters ---------- pdb_object : PDBObject PDB object which images will be combined """ features = [ pdb_object.image.htmd.read(), pdb_object.image.electronegativity.read(), pdb_object.image.rosetta.read() ] grid = np.concatenate(features, axis=-1) storage.make_directory(pdb_object.image.combined.path.parent) pdb_object.image.combined.write(grid)
def __init__(self, model_object, dataset_object, channels, action, seed): self.dataset_object = dataset_object if not self.results.path.exists(): self.results.write("") self.model_object = model_object self.channels = channels input_fn = input.load_fn(dataset_object, channels, settings.rotate, action) self.iterator = input_fn().make_initializable_iterator() model_fn = model_object.load_fn() self.id, self.X, self.y = self.iterator.get_next() self.shape = tf.shape(self.y)[0] model = model_fn(self.X, self.y, action, settings.rotate) self.op = model.train_op self.loss = model.loss self.predictions = model.predictions checkpoint_folder = self.dataset_object.model(self.model_object, self.channels, seed) storage.make_directory(checkpoint_folder) self.save_path = checkpoint_folder / "model.ckpt" self.saver = tf.train.Saver()