Ejemplo n.º 1
0
def test_discard_quotes_with_as_pyasp_termset():
    answers = Answers(('a("b","d")', )).as_pyasp
    assert next(answers) == TermSet((Atom(predicate='a',
                                          args=('"b"', '"d"')), ))
    answers = Answers(('a("b","d")', )).as_pyasp.discard_quotes
    assert next(answers) == TermSet((Atom(predicate='a',
                                          args=('"b"', '"d"')), ))
Ejemplo n.º 2
0
def test_discard_quotes_with_as_pyasp_termset_and_careful_parsing():
    answers = Answers(('a("b","d")', )).parse_args.as_pyasp.careful_parsing
    assert next(answers) == TermSet((Atom(predicate='a',
                                          args=('"b"', '"d"')), ))
    answers = Answers(
        ('a("b","d")', )).parse_args.as_pyasp.discard_quotes.careful_parsing
    assert next(answers) == TermSet(
        (Atom(predicate='a',
              args=('"b"', '"d"')), ))  # as_pyasp overrides discard_quotes
Ejemplo n.º 3
0
def readSBMLseeds(filename):
    lpfacts = TermSet()
    tree = etree.parse(filename)
    sbml = tree.getroot()
    model = get_model(sbml)
    listOfSpecies = get_listOfSpecies(model)

    for e in listOfSpecies:
        if e.tag[0] == "{":
            uri, tag = e.tag[1:].split("}")
        else:
            tag = e.tag
        if tag == "species":
            lpfacts.add(Atom('seed', ["\"" + e.attrib.get("id") + "\""]))
    return lpfacts
Ejemplo n.º 4
0
def get_optimal_completions(draft, repairnet, seeds, targets, optimum, nmodels=0):

    instance = TermSet(draft.union(repairnet).union(targets).union(seeds))
    ireactions = compute_ireactions(instance)
    instance = TermSet(instance.union(ireactions))
    instance_f = utils.to_file(instance)

    prg = [minimal_completion_prg, instance_f]

    options = '--configuration=handy --opt-strategy=usc,0 --opt-mode=optN,{0}'.format(
        optimum)

    models = clyngor.solve(prg, options=options,
                           nb_model=nmodels).by_arity.discard_quotes
    opt_models = clyngor.opt_models_from_clyngor_answers(models)

    return opt_models
Ejemplo n.º 5
0
def get_union_of_optimal_completions(draft, repairnet, seeds, targets, optimum):

    instance = TermSet(draft.union(repairnet).union(targets).union(seeds))
    ireactions = compute_ireactions(instance)
    instance = TermSet(instance.union(ireactions))
    instance_f = utils.to_file(instance)
    prg = [minimal_completion_prg, instance_f]

    options = '--configuration jumpy --opt-strategy=usc,5 --enum-mode=brave --opt-mode=optN,' + \
        str(optimum)

    models = clyngor.solve(prg, options=options)
    for model in models.discard_quotes.by_arity:
        best_model = model

    os.unlink(instance_f)
    return best_model
Ejemplo n.º 6
0
def compute_ireactions(instance):
    instance_f = utils.to_file(instance)
    prg = [ireaction_prg, instance_f]
    best_model = None

    models = clyngor.solve(prg)
    for model in models.discard_quotes.by_arity:
        best_model = model
    os.unlink(instance_f)

    output = TermSet()
    for pred in best_model:
        if pred == 'ireaction':
            for a in best_model[pred]:
                output.add(Atom('ireaction("' + a[0] + '","' + a[1] + '")'))

    return output
Ejemplo n.º 7
0
def get_intersection_of_completions(draft, repairnet, seeds, targets):

    instance = TermSet(draft.union(repairnet).union(targets).union(seeds))
    ireactions = compute_ireactions(instance)
    instance = TermSet(instance.union(ireactions))
    instance_f = utils.to_file(instance)

    prg = [completion_prg, instance_f]
    options = '--enum-mode=cautious --opt-mode=ignore '

    best_model = None
    models = clyngor.solve(prg, options=options)
    for model in models.discard_quotes.by_arity:
        best_model = model

    os.unlink(instance_f)
    return best_model
Ejemplo n.º 8
0
def get_minimal_completion_size(draft, repairnet, seeds, targets):

    instance = TermSet(draft.union(repairnet).union(targets).union(seeds))
    ireactions = compute_ireactions(instance)
    instance = TermSet(instance.union(ireactions))
    instance_f = utils.to_file(instance)

    prg = [minimal_completion_prg, instance_f]

    co = "--configuration=jumpy --opt-strategy=usc,5"

    optimum = None
    models = clyngor.solve(prg, options=co)
    for model in models.discard_quotes.by_arity:
        optimum = model

    os.unlink(instance_f)
    return optimum
Ejemplo n.º 9
0
def extract_xreactions(model, return_atom=True):
    lst = set(a[0] for pred in model if pred == 'xreaction'
              for a in model[pred])
    if return_atom:
        atom = TermSet(
            Atom('xreaction(\"' + a[0] + '\",\"' + a[1] + '\")')
            for pred in model if pred == 'xreaction' for a in model[pred])
        return lst, atom
    else:
        return lst
Ejemplo n.º 10
0
 def copy_assignment(self, m):
     if m.optimality_proven:
         termset_from_model = TermSet(
             Atom.from_tuple_repr(atom)
             for atom in Parser().parse_terms(str(m)))
         flux = self.prop.assignment(m.thread_id)
         if self.unique:
             self.solutions.clear()
         self.solutions.append((termset_from_model, flux))
     return True
Ejemplo n.º 11
0
def run_meneco(draft_sbml, seeds_sbml, targets_sbml, repair_sbml, enumeration):
    """Complete metabolic network by selecting reactions from a database

    Args:
        draft_sbml (str): SBML file name of metabolic network
        seeds_sbml (str): SBML file name seeds
        targets_sbml (str): SBML file name of targets
        repair_sbml (str): SBML file name of repair database
    """
    logger.info('Reading draft network from ' + draft_sbml)
    draftnet = sbml.readSBMLnetwork(draft_sbml, 'draft')
    # draftnet.to_file("draftnet.lp")

    logger.info('Reading seeds from ' + seeds_sbml)
    seeds = sbml.readSBMLseeds(seeds_sbml)
    # seeds.to_file("seeds.lp")

    logger.info('Reading targets from ' + targets_sbml)
    targets = sbml.readSBMLtargets(targets_sbml)
    # targets.to_file("targets.lp")

    logger.info('\nChecking draftnet for unproducible targets')
    model = query.get_unproducible(draftnet, targets, seeds)
    sys.stdout.flush()

    unproducible_targets_lst = extract_unprod_traget(model)
    logger.info(str(len(unproducible_targets_lst)) + ' unproducible targets:')
    logger.info("\n".join(unproducible_targets_lst))

    if repair_sbml == None:
        return (unproducible_targets_lst)

    logger.info('\nReading repair network from ' + repair_sbml)
    repairnet = sbml.readSBMLnetwork(repair_sbml, 'repairnet')
    # repairnet.to_file("repairnet.lp")
    sys.stdout.flush()
    logger.info('done')

    all_reactions = draftnet
    all_reactions = TermSet(all_reactions.union(repairnet))
    logger.info('\nChecking draftnet + repairnet for unproducible targets')
    model = query.get_unproducible(all_reactions, seeds, targets)
    unproducible_targets = extract_unprod_traget(model)

    logger.info('  still ' + str(len(unproducible_targets)) +
                ' unproducible targets:')
    logger.info("\n".join(unproducible_targets))
    never_producible = extract_unprod_traget(model)

    reconstructable_targets = set()
    reconstructable_targets_atoms = TermSet()
    for t in unproducible_targets_lst:
        if not (t in never_producible):
            reconstructable_targets.add(t)
            reconstructable_targets_atoms.add(Atom('target(\"' + t + '\")'))

    logger.info('\n ' + str(len(reconstructable_targets)) +
                ' targets to reconstruct:')
    logger.info("\n".join(reconstructable_targets))

    if len(reconstructable_targets) == 0:
        utils.clean_up()
        quit()

    essential_reactions = TermSet()
    essential_reactions_to_print = set()
    essential_reactions_target = {}
    for t in reconstructable_targets:
        single_target = TermSet()
        single_target.add(Atom('target(\"' + t + '\")'))
        logger.info('\nComputing essential reactions for ' + t)
        essentials = query.get_intersection_of_completions(
            draftnet, repairnet, seeds, single_target)

        essentials_to_print, essentials_atoms = extract_xreactions(
            essentials, True)

        essential_reactions_target[t] = essentials_to_print

        logger.info(' ' + str(len(essentials_to_print)) +
                    ' essential reactions found:')
        logger.info("\n".join(essentials_to_print))
        essential_reactions = TermSet(
            essential_reactions.union(essentials_atoms))
        essential_reactions_to_print = set(
            essential_reactions_to_print.union(essentials_to_print))

    logger.info('\nOverall ' + str(len(essential_reactions_to_print)) +
                ' essential reactions found.')
    logger.info("\n".join(essential_reactions_to_print))

    logger.info('\nAdding essential reactions to network.')
    draftnet = TermSet(draftnet.union(essential_reactions))

    utils.clean_up()

    # draftnet.to_file("draft.lp")
    # repairnet.to_file("repairnet.lp")
    # unproducible_targets.to_file("targets.lp")
    # seeds.to_file("seeds.lp")

    logger.info('\nComputing one minimal completion to produce all targets')
    one_min_sol = query.get_minimal_completion_size(
        draftnet, repairnet, seeds, reconstructable_targets_atoms)

    one_min_sol_lst = extract_xreactions(one_min_sol, False)
    optimum = len(one_min_sol_lst)
    logger.info("\n".join(one_min_sol_lst))

    logger.info('\nComputing common reactions in all completion with size ' +
                str(optimum))
    intersection_sol = query.get_intersection_of_optimal_completions(
        draftnet, repairnet, seeds, reconstructable_targets_atoms, optimum)

    intersection_sol_lst = extract_xreactions(intersection_sol, False)
    logger.info("\n".join(intersection_sol_lst))

    logger.info(
        '\nComputing union of reactions from all completion with size ' +
        str(optimum))
    union_sol = query.get_union_of_optimal_completions(
        draftnet, repairnet, seeds, reconstructable_targets_atoms, optimum)

    union_sol_lst = extract_xreactions(union_sol, False)
    logger.info("\n".join(union_sol_lst))

    if enumeration:
        logger.info('\nComputing all completions with size ' + str(optimum))
        enumeration_sol = query.get_optimal_completions(
            draftnet, repairnet, seeds, reconstructable_targets_atoms, optimum)
        count = 1
        enumeration_sol_lst = []
        for model in enumeration_sol:
            logger.info('Completion ' + str(count) + ': ')
            count += 1
            model_lst = extract_xreactions(model, False)
            enumeration_sol_lst.append(model_lst)
            logger.info("\n".join(model_lst))
        #TODO provide clean lists, not list version of terms in what is returned
    else:
        enumeration_sol_lst = []

    return unproducible_targets_lst, reconstructable_targets, essential_reactions_target, one_min_sol_lst, intersection_sol_lst, union_sol_lst, enumeration_sol_lst
Ejemplo n.º 12
0
def run_meneco(draftnet: str, seeds: str, targets: str, repairnet: str,
               enumeration: bool, json: bool):
    """Complete metabolic network by selecting reactions from a database

    Args:
        draftnet: SBML file name of metabolic network
        seeds: SBML file name seeds
        targets: SBML file name of targets
        repairnet: SBML file name of repair database
    """
    result = {}

    logger.info('Reading draft network ...')
    result['Draft network file'] = draftnet
    if not json:
        print('Draft network file: {0}'.format(draftnet))
    draftnet = sbml.readSBMLnetwork(draftnet, 'draft')
    # draftnet.to_file("draftnet.lp")

    logger.info('Reading seeds ...')
    result['Seeds file'] = seeds
    if not json:
        print('Seeds file: {0}'.format(seeds))
    seeds = sbml.readSBMLseeds(seeds)
    # seeds.to_file("seeds.lp")

    logger.info('Reading targets ...')
    result['Targets file'] = targets
    if not json:
        print('Targets file: {0}\n'.format(targets))
    targets = sbml.readSBMLtargets(targets)
    # targets.to_file("targets.lp")

    logger.info('Checking draftnet for unproducible targets')
    model = query.get_unproducible(draftnet, seeds, targets)

    unproducible_targets_lst = extract_unprod_target(model)

    result['Unproducible targets'] = list(unproducible_targets_lst)
    if not json:
        print('{0} unproducible targets:\n\t{1}\n'.format(
            len(unproducible_targets_lst),
            '\n\t'.join(unproducible_targets_lst)))

    if len(unproducible_targets_lst) == 0:
        utils.clean_up()
        return result

    if repairnet == None:
        utils.clean_up()
        return result

    logger.info('Reading repair db ...')
    result['Repair db file'] = repairnet
    if not json:
        print('Repair db file: {0}\n'.format(repairnet))
    repairnet = sbml.readSBMLnetwork(repairnet, 'repairnet')
    # repairnet.to_file("repairnet.lp")

    all_reactions = draftnet
    all_reactions = TermSet(all_reactions.union(repairnet))
    logger.info('Checking draftnet + repairnet for unproducible targets')
    model = query.get_unproducible(all_reactions, seeds, targets)
    never_producible = extract_unprod_target(model)

    result['Unreconstructable targets'] = list(never_producible)
    if not json:
        print('Still {0} unreconstructable targets:\n\t{1}\n'.format(
            len(never_producible), '\n\t'.join(never_producible)))

    reconstructable_targets = set()
    reconstructable_targets_atoms = TermSet()
    for t in unproducible_targets_lst:
        if not (t in never_producible):
            reconstructable_targets.add(t)
            reconstructable_targets_atoms.add(Atom('target("' + t + '")'))

    result['Reconstructable targets'] = list(reconstructable_targets)
    if not json:
        print('{0} reconstructable targets:\n\t{1}\n'.format(
            len(reconstructable_targets),
            '\n\t'.join(reconstructable_targets)))

    if len(reconstructable_targets) == 0:
        utils.clean_up()
        return result

    essential_reactions = TermSet()
    essential_reactions_to_print = set()
    essential_reactions_target = {}
    for t in reconstructable_targets:
        single_target = TermSet()
        single_target.add(Atom('target("' + t + '")'))
        logger.info('Computing essential reactions for ' + t)
        essentials = query.get_intersection_of_completions(
            draftnet, repairnet, seeds, single_target)

        essentials_to_print, essentials_atoms = extract_xreactions(
            essentials, True)

        essential_reactions_target[t] = list(essentials_to_print)
        if not json:
            print('{0} essential reactions for target {1}:\n\t{2}\n'.format(
                len(essentials_to_print), t, '\n\t'.join(essentials_to_print)))

        essential_reactions = TermSet(
            essential_reactions.union(essentials_atoms))
        essential_reactions_to_print = set(
            essential_reactions_to_print.union(essentials_to_print))

    result['Essential reactions'] = essential_reactions_target
    if not json:
        print('Overall {0} essential reactions found:\n\t{1}\n'.format(
            len(essential_reactions_to_print),
            '\n\t'.join(essential_reactions_to_print)))

    logger.info('Adding essential reactions to network')
    draftnet = TermSet(draftnet.union(essential_reactions))

    utils.clean_up()

    # draftnet.to_file("draft.lp")
    # repairnet.to_file("repairnet.lp")
    # unproducible_targets.to_file("targets.lp")
    # seeds.to_file("seeds.lp")

    logger.info('Computing one minimal completion to produce all targets')
    one_min_sol = query.get_minimal_completion_size(
        draftnet, repairnet, seeds, reconstructable_targets_atoms)

    one_min_sol_lst = extract_xreactions(one_min_sol, False)
    optimum = len(one_min_sol_lst)

    result['One minimal completion'] = list(one_min_sol_lst)
    if not json:
        print('One minimal completion of size {0}:\n\t{1}\n'.format(
            len(one_min_sol_lst), '\n\t'.join(one_min_sol_lst)))

    logger.info(
        'Computing common reactions in all completion with size {0}'.format(
            optimum))
    intersection_sol = query.get_intersection_of_optimal_completions(
        draftnet, repairnet, seeds, reconstructable_targets_atoms, optimum)

    intersection_sol_lst = extract_xreactions(intersection_sol, False)

    result['Intersection of cardinality minimal completions'] = list(
        intersection_sol_lst)
    if not json:
        print(
            'Intersection of cardinality minimal completions:\n\t{0}\n'.format(
                '\n\t'.join(intersection_sol_lst)))

    logger.info(
        'Computing union of reactions from all completion with size {0}'.
        format(optimum))
    union_sol = query.get_union_of_optimal_completions(
        draftnet, repairnet, seeds, reconstructable_targets_atoms, optimum)

    union_sol_lst = extract_xreactions(union_sol, False)

    result['Union of cardinality minimal completions'] = list(union_sol_lst)
    if not json:
        print('Union of cardinality minimal completions:\n\t{0}\n'.format(
            '\n\t'.join(union_sol_lst)))

    if enumeration:
        logger.info('Computing all completions with size {0}'.format(optimum))
        enumeration_sol = query.get_optimal_completions(
            draftnet, repairnet, seeds, reconstructable_targets_atoms, optimum)
        count = 1
        enumeration_sol_lst = []
        for model in enumeration_sol:
            model_lst = extract_xreactions(model, False)
            enumeration_sol_lst.append(list(model_lst))

            if not json:
                print('Completion {0}:\n\t{1}\n'.format(
                    count, '\n\t'.join(model_lst)))
            count += 1
        result['All cardinality minimal completions'] = enumeration_sol_lst

    return result
Ejemplo n.º 13
0
def readSBMLnetwork_with_score(filename, name):

    lpfacts = TermSet()
    tree = etree.parse(filename)
    sbml = tree.getroot()
    model = get_model(sbml)
    listOfReactions = get_listOfReactions(model)

    for e in listOfReactions:
        if e.tag[0] == "{":
            uri, tag = e.tag[1:].split("}")
        else:
            tag = e.tag
        if tag == "reaction":
            reactionId = e.attrib.get("id")
            lpfacts.add(
                Atom('reaction',
                     ["\"" + reactionId + "\"", "\"" + name + "\""]))
            if (e.attrib.get("reversible") == "true"):
                lpfacts.add(Atom('reversible', ["\"" + reactionId + "\""]))

            listOfReactants = get_listOfReactants(e)
            if listOfReactants == None:
                print("\n Warning:", reactionId, "listOfReactants=None")
            else:
                for r in listOfReactants:
                    lpfacts.add(
                        Atom('reactant', [
                            "\"" + r.attrib.get("species") + "\"",
                            "\"" + reactionId + "\"", "\"" + name + "\""
                        ]))

            listOfProducts = get_listOfProducts(e)
            if listOfProducts == None:
                print("\n Warning:", reactionId, "listOfProducts=None")
            else:
                for p in listOfProducts:
                    lpfacts.add(
                        Atom('product', [
                            "\"" + p.attrib.get("species") + "\"",
                            "\"" + reactionId + "\"", "\"" + name + "\""
                        ]))

            score = get_score(e)
            if score != 0:
                value = int(float(score) * 1000)
                lpfacts.add(
                    Atom('value', ["\"" + reactionId + "\"",
                                   str(value)]))
            else:
                #print " no value for ",Reaction_ID
                lpfacts.add(Atom('value', ["\"" + reactionId + "\"", "0"]))

    if name == 'draft':
        lpfacts.add(Atom('draft', ["\"" + name + "\""]))

    return lpfacts
Ejemplo n.º 14
0
def readSBMLnetwork(filename, name):

    lpfacts = TermSet()
    tree = etree.parse(filename)
    sbml = tree.getroot()
    model = get_model(sbml)
    listOfReactions = get_listOfReactions(model)

    for e in listOfReactions:
        if e.tag[0] == "{":
            _uri, tag = e.tag[1:].split("}")
        else:
            tag = e.tag
        if tag == "reaction":
            reactionId = e.attrib.get("id")
            lpfacts.add(
                Atom('reaction',
                     ["\"" + reactionId + "\"", "\"" + name + "\""]))
            if (e.attrib.get("reversible") == "true"):
                lpfacts.add(
                    Atom('reversible',
                         ["\"" + reactionId + "\"", "\"" + name + "\""]))

            listOfReactants = get_listOfReactants(e)
            if listOfReactants == None:
                logger.warning(
                    "Warning: {0} listOfReactants=None".format(reactionId))
            else:
                for r in listOfReactants:
                    lpfacts.add(
                        Atom('reactant', [
                            "\"" + r.attrib.get("species") + "\"",
                            "\"" + reactionId + "\"", "\"" + name + "\""
                        ]))

            listOfProducts = get_listOfProducts(e)
            if listOfProducts == None:
                logger.warning(
                    "Warning: {0} listOfProducts=None".format(reactionId))
            else:
                for p in listOfProducts:
                    lpfacts.add(
                        Atom('product', [
                            "\"" + p.attrib.get("species") + "\"",
                            "\"" + reactionId + "\"", "\"" + name + "\""
                        ]))

    if name == 'draft':
        lpfacts.add(Atom('draft', ["\"" + name + "\""]))

    return lpfacts
Ejemplo n.º 15
0
def test_as_pyasp_termset_termset_by_predicate():
    answers = Answers(('a("b","d")',)).as_pyasp.by_predicate
    assert next(answers) == {'a': TermSet((Atom(predicate='a',args=('"b"','"d"')),))}