def grammar_1(cursor, tokens): """ Grammar for 'grammar_1:= grammar_2 or grammar_1 | grammar_2 and grammar_1 | grammar_2'. :param cursor: :param tokens: :return: """ if len(tokens) > 1 and tokens[1] == 'or': # grammar_2 or grammar_1 # split tokens by the first occurring 'or' and store the tokens before # and after the 'or' in a dictionary or_dictionary = SBiDer_helper.split_by(tokens, 'or') return grammar_or(grammar_2(cursor, or_dictionary.get(0)), grammar_1(cursor, or_dictionary.get(1))) elif len(tokens) > 1 and tokens[1] == 'and': # grammar_2 and grammar_1 # split tokens by the first occurring 'and' and store the tokens # before and after the 'and' in a dictionary and_dictionary = SBiDer_helper.split_by(tokens, 'and') return grammar_and(grammar_2(cursor, and_dictionary.get(0)), grammar_1(cursor, and_dictionary.get(1))) else: # grammar_2 return grammar_2(cursor, tokens)
def build_indirect_sbider_path(input_dictionary, repressor_dictionary, output_dictionary, input_species_list, output_species_list, path_queue, final_operon_path_list, memory_operon, memory_species, activated_paths): temp_memory_species = [] for an_operon in set(input_dictionary.keys()) - set(memory_operon): if SBiDer_helper.promoter_activation(input_dictionary, repressor_dictionary, an_operon, [], memory_species, True): just_produced_species = output_dictionary[an_operon] just_produced_unique_species = SBiDer_helper.uniquely_merge_multi_dimensional_list_of_lists(just_produced_species) if SBiDer_helper.match_any_list(just_produced_species, output_species_list): if len(activated_paths) > 1: ope_path_backward = build_sbider_path_memory_tree(input_dictionary, activated_paths, an_operon) final_operon_path_list.extend(ope_path_backward) else: if an_operon not in memory_operon: path_queue.append(([an_operon], just_produced_unique_species)) memory_operon.append(an_operon) memory_operon = SBiDer_helper.remove_duplicates_within_list(memory_operon) temp_memory_species.extend(just_produced_unique_species) activated_paths.append([[an_operon], just_produced_unique_species]) memory_species.extend(temp_memory_species) memory_species = SBiDer_helper.remove_duplicates_within_list(memory_species) if len(path_queue) > 0: build_direct_sbider_path(input_dictionary, repressor_dictionary, output_dictionary, input_species_list, output_species_list, path_queue, final_operon_path_list, memory_operon, memory_species, activated_paths, True)
def get_sbider_path(inp_dic, rep_dic, outp_dic, inp_spe, outp_spe, indirect_flag=False): final_ope_path = [] path_queue = [([], inp_spe)] memory_ope = [] memory_spe = [] memory_spe.extend(inp_spe) activated_paths = [] build_direct_sbider_path(inp_dic, rep_dic, outp_dic, inp_spe, outp_spe, path_queue, final_ope_path, memory_ope, memory_spe, activated_paths, indirect_flag) if len(final_ope_path) > 0: final_ope_path = SBiDer_helper.remove_duplicated_lists_within_a_list_of_lists(final_ope_path) return final_ope_path # End of sbider_searcher.py
def make_plasmid_species_name_dictionary(cursor, operon_id_plasmid_name_dictionary, operon_species_dictionary): plasmid_species_name_dictionary = {} for operon_id, species_id_list in operon_species_dictionary.items(): uniquely_merge_spe_id_list = SBiDer_helper.uniquely_merge_multi_dimensional_list_of_lists(species_id_list) plasmid_name = operon_id_plasmid_name_dictionary[operon_id] plasmid_species_name_dictionary[plasmid_name] = [db_get_species_name_from_id(cursor, spe_id) for spe_id in uniquely_merge_spe_id_list] return plasmid_species_name_dictionary
def build_indirect_sbider_path(input_dictionary, repressor_dictionary, output_dictionary, input_species_list, output_species_list, path_queue, final_operon_path_list, memory_operon, memory_species, activated_paths): temp_memory_species = [] for an_operon in set(input_dictionary.keys()) - set(memory_operon): if SBiDer_helper.promoter_activation(input_dictionary, repressor_dictionary, an_operon, [], memory_species, True): just_produced_species = output_dictionary[an_operon] just_produced_unique_species = SBiDer_helper.uniquely_merge_multi_dimensional_list_of_lists( just_produced_species) if SBiDer_helper.match_any_list(just_produced_species, output_species_list): if len(activated_paths) > 1: ope_path_backward = build_sbider_path_memory_tree( input_dictionary, activated_paths, an_operon) final_operon_path_list.extend(ope_path_backward) else: if an_operon not in memory_operon: path_queue.append( ([an_operon], just_produced_unique_species)) memory_operon.append(an_operon) memory_operon = SBiDer_helper.remove_duplicates_within_list( memory_operon) temp_memory_species.extend(just_produced_unique_species) activated_paths.append([[an_operon], just_produced_unique_species]) memory_species.extend(temp_memory_species) memory_species = SBiDer_helper.remove_duplicates_within_list( memory_species) if len(path_queue) > 0: build_direct_sbider_path(input_dictionary, repressor_dictionary, output_dictionary, input_species_list, output_species_list, path_queue, final_operon_path_list, memory_operon, memory_species, activated_paths, True)
def grammar_2(cursor, tokens): """ Grammar for 'grammar_2:= (grammar_1) or grammar_1 | (grammar_1) and grammar_1 | (grammar_1) | interactor'. :param cursor: :param tokens: :return: """ if len(tokens) <= 0: raise ValueError("Invalid Syntax") elif tokens[0] == "(": # (grammar_1) or grammar_1 | (grammar_1) and grammar_1| (grammar_1) # token after the last occurring ')' token_after_last_closer = SBiDer_helper.remove_parentheses(tokens) if token_after_last_closer == 'or': # split tokens by the first occurring 'or' and store the tokens # before and after the 'or' in a dictionary or_dictionary = SBiDer_helper.split_by(tokens, 'or') return grammar_or(grammar_1(cursor, or_dictionary.get(0)), grammar_1(cursor, or_dictionary.get(1))) elif token_after_last_closer == 'and': # split tokens by the first occurring 'and' and store the tokens # before and after the 'and' in a dictionary and_dictionary = SBiDer_helper.split_by(tokens, 'and') return grammar_and(grammar_1(and_dictionary.get(0)), grammar_1(and_dictionary.get(1))) else: # grammar_1; delegate to grammar_1 return grammar_1(cursor, tokens) else: # interactor; delegate to interactor return interactor(cursor, tokens)
def get_all_output_transition_species(cursor, input_transition_id): """Obtain all species an output transition produces.""" species_list = [] species_list_unformatted = db_select(cursor, "OutputTransitionSpecies", ["spe_id"], ["ot_id"], ["="], ["'" + input_transition_id + "'"], [""]) species_list_unformatted = species_list_unformatted.fetchall() for species_index in range(len(species_list_unformatted)): species_list.append(list(species_list_unformatted[species_index])) species_list = SBiDer_helper.uniquely_merge_multi_dimensional_list_of_lists(species_list) return species_list
def grammar_and(tokens1, tokens2): """ Grammar for 'and'. :param tokens1: :param tokens2: :return: """ grammar_and_output = [] for token1 in tokens1: for token2 in tokens2: grammar_and_output.append( SBiDer_helper.uniquely_merge_list_of_lists([token1, token2])) return grammar_and_output
def grammar_0(cursor, tokens): """ Grammar for 'grammar_0:= grammar_1 > grammar_1'. :param cursor: :param tokens: :return: """ if '=' not in tokens: raise ValueError("grammar_0(tokens): Usage: input = output") else: input_output_dictionary = SBiDer_helper.split_by(tokens, '=') return grammar_output(grammar_1(cursor, input_output_dictionary[0]), grammar_1(cursor, input_output_dictionary[1]))
def build_direct_sbider_path(input_dictionary, repressor_dictionary, output_dictionary, input_species_list, output_species_list, path_queue, final_operon_path_list, memory_operon, memory_species, activated_paths, indirect_flag): while len(path_queue) != 0: (previously_visited_operon_list, just_previously_produced_species_list) = path_queue.pop(0) for an_operon in set(input_dictionary.keys()) - set( SBiDer_helper.uniquely_merge_multi_dimensional_list_of_lists(previously_visited_operon_list)): if an_operon not in memory_operon: if SBiDer_helper.promoter_activation(input_dictionary, repressor_dictionary, an_operon, just_previously_produced_species_list, memory_species, False): visited_operon_list = previously_visited_operon_list + [an_operon] just_produced_species = output_dictionary[an_operon] just_produced_unique_species = SBiDer_helper.uniquely_merge_multi_dimensional_list_of_lists( just_produced_species) if SBiDer_helper.match_any_list(just_produced_species, output_species_list): if not indirect_flag: final_operon_path_list.append(visited_operon_list) else: path_queue.append((visited_operon_list, just_produced_unique_species)) memory_operon.append(an_operon) memory_operon = SBiDer_helper.remove_duplicates_within_list(memory_operon) memory_species.extend(just_produced_unique_species) memory_species = SBiDer_helper.remove_duplicates_within_list(memory_species) activated_paths.append([[an_operon], just_produced_unique_species]) if indirect_flag: build_indirect_sbider_path(input_dictionary, repressor_dictionary, output_dictionary, input_species_list, output_species_list, path_queue, final_operon_path_list, memory_operon, memory_species, activated_paths) return final_operon_path_list
def build_direct_sbider_path(input_dictionary, repressor_dictionary, output_dictionary, input_species_list, output_species_list, path_queue, final_operon_path_list, memory_operon, memory_species, activated_paths, indirect_flag): while len(path_queue) != 0: (previously_visited_operon_list, just_previously_produced_species_list) = path_queue.pop(0) for an_operon in set(input_dictionary.keys()) - set( SBiDer_helper.uniquely_merge_multi_dimensional_list_of_lists( previously_visited_operon_list)): if an_operon not in memory_operon: if SBiDer_helper.promoter_activation( input_dictionary, repressor_dictionary, an_operon, just_previously_produced_species_list, memory_species, False): visited_operon_list = previously_visited_operon_list + [ an_operon ] just_produced_species = output_dictionary[an_operon] just_produced_unique_species = SBiDer_helper.uniquely_merge_multi_dimensional_list_of_lists( just_produced_species) if SBiDer_helper.match_any_list(just_produced_species, output_species_list): if not indirect_flag: final_operon_path_list.append(visited_operon_list) else: path_queue.append((visited_operon_list, just_produced_unique_species)) memory_operon.append(an_operon) memory_operon = SBiDer_helper.remove_duplicates_within_list( memory_operon) memory_species.extend(just_produced_unique_species) memory_species = SBiDer_helper.remove_duplicates_within_list( memory_species) activated_paths.append([[an_operon], just_produced_unique_species]) if indirect_flag: build_indirect_sbider_path(input_dictionary, repressor_dictionary, output_dictionary, input_species_list, output_species_list, path_queue, final_operon_path_list, memory_operon, memory_species, activated_paths) return final_operon_path_list
def build_sbider_network(directory_path, user_query, indirect=False): print("** build_sbider_network") # Access database database_file = directory_path + "/SBiDer.db" conn, cur = db.db_open(database_file) # Dictionary of fragmented user inputs that satisfy user query logic_dictionary = parser.parse_logic(cur, user_query) # Dictionaries of: Operon <-> InputSpecies & Operon <-> OutputSpecies input_dictionary, output_dictionary = db.make_ope_id_spe_id_dics(cur) print("** input dictionary") SBiDer_helper.printplus(input_dictionary) print("** output dictionary") SBiDer_helper.printplus(output_dictionary) # Dictionary of: Operon <-> Repressor repressor_dictionary = db.make_ope_id_rep_spe_id_dic(cur) print("** repressor dictionary") SBiDer_helper.printplus(repressor_dictionary) # Build operon path for each fragmented user input, which satisfies user query all_operon_path = [] for input_species, output_species_list in logic_dictionary.items(): operon_path_per_start_species = [input_species] for output_species in output_species_list: operon_path_list = searcher.get_sbider_path(input_dictionary, repressor_dictionary, output_dictionary, list(input_species), output_species, indirect) operon_path_per_start_species.extend(operon_path_list) all_operon_path.append(operon_path_per_start_species) # Create JSON file needed to display the found genetic circuit path_json = grapher.create_subnetwork_json_string(cur, operon_path_per_start_species, database_file) return path_json
def make_output_ope_id_spe_id_dic(cursor): """Make operon output species dictionary.""" output_ope_id_spe_id_dict = {} merged_ope_ot_spe = cursor.execute('''SELECT OperonOutputTransition.ope_id, OperonOutputTransition.ot_id, OutputTransitionSpecies.spe_id FROM OperonOutputTransition, OutputTransitionSpecies WHERE OperonOutputTransition.ot_id = OutputTransitionSpecies.ot_id''') # previous ope_id, ot_id, and spe_id previous_operon, previous_output_transition, previous_species = merged_ope_ot_spe.fetchone() output_transition_list_idx = 0 output_ope_id_spe_id_dict[previous_operon] = [[]] output_ope_id_spe_id_dict[previous_operon][output_transition_list_idx].append(previous_species.strip()) # ope_id, ot_id, and spe_id for operon, output_transition, species in merged_ope_ot_spe.fetchall(): if operon == previous_operon and not SBiDer_helper.contain_all_elements(output_ope_id_spe_id_dict[operon], [species]): if output_transition == previous_output_transition: output_ope_id_spe_id_dict[operon][output_transition_list_idx].append(species.strip()) else: output_transition_list_idx += 1 output_ope_id_spe_id_dict[operon].append([]) output_ope_id_spe_id_dict[operon][output_transition_list_idx].append(species.strip()) else: output_transition_list_idx = 0 output_ope_id_spe_id_dict[operon] = [[]] output_ope_id_spe_id_dict[operon][output_transition_list_idx].append(species.strip()) previous_operon = operon previous_output_transition = output_transition return output_ope_id_spe_id_dict
def get_sbider_path(inp_dic, rep_dic, outp_dic, inp_spe, outp_spe, indirect_flag=False): final_ope_path = [] path_queue = [([], inp_spe)] memory_ope = [] memory_spe = [] memory_spe.extend(inp_spe) activated_paths = [] build_direct_sbider_path(inp_dic, rep_dic, outp_dic, inp_spe, outp_spe, path_queue, final_ope_path, memory_ope, memory_spe, activated_paths, indirect_flag) if len(final_ope_path) > 0: final_ope_path = SBiDer_helper.remove_duplicated_lists_within_a_list_of_lists( final_ope_path) return final_ope_path # End of sbider_searcher.py