def __init__(self, df, predict_ahead, fare, predictant, p, d, q): DataProcessing.__init__(self, df, fare) self.predict_ahead = predict_ahead self.predictant = predictant self.train, self.test = self.train_test_split() self.p = p self.q = q self.d = d self.rmse, self.mape = self.model()
def __init__(self, df, predict_ahead, fare, predictant, NFILTERS, BATCH_SIZE, NB_EPOCHS): DataProcessing.__init__(self, df, fare) self.predict_ahead = predict_ahead self.predictant = predictant self.train, self.test, self.scaler = self.train_test_split() self.NFILTERS = NFILTERS self.NB_EPOCHS = NB_EPOCHS self.BATCH_SIZE = BATCH_SIZE self.rmse, self.mape = self.model()
class Scanner(Thread): def __init__(self, args, event, factory, name=None): super().__init__(name=name, daemon=True) self.args = args self.event = event self.event.user.code = SCANNER_NEW_PIXEL if args.from_file is not None: output = None inp = args.from_file else: output = open(args.raw_out, "wb") inp = Serial("/dev/ttyACM0", 9600, timeout=0.5) inp.write(1) if args.invert_gamma: dp = DataProcessing(inp, args.gamma, args.integ_cycles, output, args.track) args.gamma = 1 / args.gamma self.dp = DataProcessing(inp, args.gamma, args.integ_cycles, output, args.track) self.size = self.dp.get_size() self.sprite = factory.create_sprite(size=self.size) self.completed_part = self.sprite.subsprite( (0, 0, self.sprite.size[0], 0)) def run(self): self.running = True array = sdl2.ext.pixels2d(self.sprite) for i in range(array.size): while self.running: try: pix = self.dp.read_pixel() array.flat[i] = int.from_bytes( pack("3B", *self.dp.raw_to_rgb(pix, 'absolute')), "big") sdl2.SDL_PushEvent(ctypes.byref(self.event)) except io.BlockingIOError: continue break self.completed_part = self.sprite.subsprite( (0, 0, int(i / self.sprite.size[0]), self.sprite.size[1])) if self.running is False: break def join(self): self.running = False super().join() if self.dp.output is not None: self.dp.output.close() self.dp.ser.close()
def main(argv): Logger.write_log('YAML Config Generator has started!') try: params: dict = ArgParser.parse(argv=argv) except (ArgumentError, ArgumentModeError) as err: print(err.what()) sys.exit(2) if ArgParser.params_are_valid(params=params): Logger.write_debug_log('Params validated, processing...') dp = DataProcessing(params=params) dp.process() else: Logger.write_error_log(''.join(argv)) raise ValueError("Invalid arguments provided! See help for valid inputs") Logger.write_log('Generator is done :)')
def __init__(self, prefixes, num_models=10, num_solutions=10): self.num_models = num_models self.num_solutions = num_solutions self.data = {} self.data_path = {} self.prefixes = prefixes data_dir = os.path.join(DATA_DIR, 'Docking') for prefix in prefixes: dataset = DataProcessing(data_dir=data_dir, prefix=prefix) for pdb_name in dataset.pdb_chains.keys(): decoys = {} decoys_path = {} receptor_chain = dataset.pdb_chains[pdb_name][0] ligand_chain = dataset.pdb_chains[pdb_name][1] for model_num in range(num_models): for solution_num in range(num_solutions): decoy_name = dataset.dirs.get_complex_decoy_prefix(pdb_name, receptor_chain, ligand_chain, model_num, solution_num) if decoy_name in dataset.decoy_data.keys(): receptor_file = dataset.dirs.get_complex_chain_file(pdb_name, receptor_chain, model_num, solution_num) ligand_file = dataset.dirs.get_complex_chain_file(pdb_name, ligand_chain, model_num, solution_num) decoys[decoy_name] = dataset.decoy_data[decoy_name] decoys_path[decoy_name] = (receptor_file, ligand_file) if not pdb_name in self.data.keys(): self.data[pdb_name] = {} if not pdb_name in self.data_path.keys(): self.data_path[pdb_name] = {} self.data[pdb_name] = {**self.data[pdb_name], **decoys} self.data_path[pdb_name] = {**self.data_path[pdb_name], **decoys_path} self.data_chains = {} for prefix in prefixes: dataset = DataProcessing(data_dir=data_dir, prefix=prefix) for pdb_name in dataset.pdb_chains.keys(): decoys = [] receptor_chain = dataset.pdb_chains[pdb_name][0] ligand_chain = dataset.pdb_chains[pdb_name][1] for model_num in range(1, num_models+1): receptor_file = dataset.dirs.get_protein_chains_file(pdb_name, receptor_chain, model_num) ligand_file = dataset.dirs.get_protein_chains_file(pdb_name, ligand_chain, model_num) decoys.append((receptor_file, ligand_file)) if not pdb_name in self.data_chains.keys(): self.data_chains[pdb_name] = [] self.data_chains[pdb_name] += decoys
def __init__(self, args, event, factory, name=None): super().__init__(name=name, daemon=True) self.args = args self.event = event self.event.user.code = SCANNER_NEW_PIXEL if args.from_file is not None: output = None inp = args.from_file else: output = open(args.raw_out, "wb") inp = Serial("/dev/ttyACM0", 9600, timeout=0.5) inp.write(1) if args.invert_gamma: dp = DataProcessing(inp, args.gamma, args.integ_cycles, output, args.track) args.gamma = 1 / args.gamma self.dp = DataProcessing(inp, args.gamma, args.integ_cycles, output, args.track) self.size = self.dp.get_size() self.sprite = factory.create_sprite(size=self.size) self.completed_part = self.sprite.subsprite( (0, 0, self.sprite.size[0], 0))
def get_chain_bounding_boxes(self, axis, threshold=100.0, rewrite=False): pdb_sizes = {} if (not os.path.exists('chains_boxes.pkl')) or rewrite: for prefix in self.prefixes: dataset = DataProcessing(prefix) for pdb_name in tqdm(dataset.pdb_chains.keys()): pdb_file = dataset.dirs.get_structure_file(pdb_name) receptor_chain = _get_chain(pdb_file, dataset.pdb_chains[pdb_name][0], do_center=False) ligand_chain = _get_chain(pdb_file, dataset.pdb_chains[pdb_name][1], do_center=False) ligand_a, ligand_b = _get_bbox(ligand_chain) receptor_a, receptor_b = _get_bbox(receptor_chain) ligand_size = np.max(np.abs(ligand_b - ligand_a)) receptor_size = np.max(np.abs(receptor_b - receptor_a)) pdb_sizes[pdb_name] = (receptor_size, ligand_size) break with open('chains_boxes.pkl', 'wb') as fout: pkl.dump(pdb_sizes, fout) else: with open('chains_boxes.pkl', 'rb') as fin: pdb_sizes = pkl.load(fin) sizes = [] exclusion_list = [] for pdb_name in pdb_sizes.keys(): sizes += list(pdb_sizes[pdb_name]) if max(pdb_sizes[pdb_name][0], pdb_sizes[pdb_name][1]) > threshold: exclusion_list.append(pdb_name) axis.hist(sizes, bins=40, alpha=0.7) axis.set_xlabel('BBox size') axis.set_ylabel('Num targets') return exclusion_list
def compute_alignments(self, axis, seq_list, threshold=0.9, prefix='_nearnative', rewrite=False, num_processes=10): dataset = DataProcessing(prefix) all_alignments = {} for n, pdb_name in enumerate(dataset.pdb_chains.keys()): if (not os.path.exists('Alignments/%s.pkl'%pdb_name)) or rewrite: print('Processing ', pdb_name, ' %d/%d'%(n, len(dataset.pdb_chains.keys()))) receptor_chain = dataset.pdb_chains[pdb_name][0] ligand_chain = dataset.pdb_chains[pdb_name][1] model_num = None solution_num = None for model in range(1,11): if not model_num is None: break for solution in range(1,11): decoy_name = dataset.dirs.get_complex_decoy_prefix(pdb_name, receptor_chain, ligand_chain, model, solution) if decoy_name in dataset.decoy_data: model_num=model solution_num=solution break if model_num is None: continue receptor_file = dataset.dirs.get_complex_chain_file(pdb_name, receptor_chain, model_num, solution_num) ligand_file = dataset.dirs.get_complex_chain_file(pdb_name, ligand_chain, model_num, solution_num) receptor_sequence = get_pdb_seq(receptor_file) ligand_sequence = get_pdb_seq(ligand_file) jobs = [] for target_receptor_path, target_receptor_seq, target_ligand_path, target_ligand_seq in seq_list: jobs.append((receptor_sequence, target_receptor_seq)) jobs.append((receptor_sequence, target_ligand_seq)) jobs.append((ligand_sequence, target_receptor_seq)) jobs.append((ligand_sequence, target_ligand_seq)) pool = multiprocessing.Pool(num_processes) results = pool.map(get_alignment, jobs) pool.close() alignments = [] for n, target in enumerate(seq_list): max_id = max(results[4*n][1], results[4*n+1][1], results[4*n+2][1], results[4*n+3][1]) alignments.append( (target[0], target[2], max_id) ) with open('Alignments/%s.pkl'%pdb_name, 'wb') as fout: pkl.dump(alignments, fout) else: with open('Alignments/%s.pkl'%pdb_name, 'rb') as fin: alignments = pkl.load(fin) all_alignments[pdb_name] = alignments exclusion_list = set([]) N = len(all_alignments.keys()) M = len(seq_list) mat = np.zeros( (N, M) ) for i, key in enumerate(all_alignments.keys()): for j, al in enumerate(all_alignments[key]): mat[i, j] = al[2] if al[2]>threshold: exclusion_list.add(key) print(key, al[0]) axis.imshow(mat) return exclusion_list
x = image_array for r in range(len(x)): for c in range(len(x[0])): if x[r][c] < 255: x[r][c] = 0 else: x[r][c] = 255 return x if __name__ == "__main__": args = dp_parser.parse_args() ser = Serial("/dev/ttyACM0", 9600) dp = DataProcessing(ser) ser.write(1) dp.get_size() print(dp.size) im = dp.read_into_array("image") im = dp.makeim(im, args.contrast, flip_odd=False) im.save("test.png") img = Image.open("emoticon.png") im = np.array(img, dtype="uint8") auto_canny(im, sigma=0.33) img = Image.open("refactored.png")