class ProbabilisticModel(object): def __init__(self, allelenumber_max): self.allelenumber_max = allelenumber_max self.priors_parser = PriorParser() self.data = Data() self._init_components() def read_priors(self, priors_filename): raise NotImplemented def read_data(self, filename_base): self.data.read_data(filename_base) def preprocess(self): raise NotImplemented def run(self, idx_restart, restart_parameters, max_iters, stop_value): trainer = self.model_trainer_class(self.priors, self.data, idx_restart, restart_parameters, self.config_parameters, max_iters, stop_value) trainer.train() self.model_parameters = trainer.model_parameters self.log_likelihood = trainer.log_likelihood def write_parameters(self, filename_base): self.model_parameters.write_parameters(filename_base) def _init_components(self): raise NotImplemented
def convert(self): seg_num = self.segments.num process_num = self.process_num if process_num > seg_num: process_num = seg_num pool = Pool(processes = process_num) args_list = [] for j in range(0, seg_num): seg_name = self.segments[j][0] chrom = self.segments[j][1] start = self.segments[j][2] end = self.segments[j][3] args_tuple = (seg_name, chrom, start, end, self.normal_bam_filename, self.tumor_bam_filename, self.reference_genome_filename, self.min_depth, self.min_bqual, self.min_mqual) args_list.append(args_tuple) counts_tuple_list = pool.map(process_by_segment, args_list) paired_counts = [] BAF_counts = [] for counts_tuple_j in counts_tuple_list: paired_counts_j, BAF_counts_j = counts_tuple_j paired_counts.append(paired_counts_j) BAF_counts.append(BAF_counts_j) BAF_heatmap = BAFHeatMap(BAF_counts) BAF_heatmap.write_heatmap(self.filename_base) data = Data(self.segments, paired_counts) data.tumor_LOH_test(self.WES_flag) data.write_data(self.filename_base)
def __init__(self, allelenumber_max): self.allelenumber_max = allelenumber_max self.priors_parser = PriorParser() self.data = Data() self._init_components()