Пример #1
0
def assemble_model(stmts):
    pa = PysbAssembler()
    pa.add_statements(stmts)
    model = pa.make_model(policies='one_step')
    pa.add_default_initial_conditions(100.0)

    try:
        targeted_agents = get_targeted_agents(stmts)
        no_upstream_active_agents = get_no_upstream_active_agents(stmts)
    except:
        targeted_agents = []
        no_upstream_active_agents = []
    try:
        chemical_agents = get_chemical_agents(stmts)
    except:
        chemical_agents = []

    for m in model.monomers:
        try:
            if m.name in targeted_agents or m.name in no_upstream_active_agents:
                pysb_assembler.set_base_initial_condition(model,
                    model.monomers[m.name], 50.0)
                pysb_assembler.set_extended_initial_condition(model, m, 50.0)
            elif m.name in chemical_agents:
                pysb_assembler.set_base_initial_condition(model,
                    model.monomers[m.name], 10000.0)
            else:
                pysb_assembler.set_extended_initial_condition(model, m, 0)
        except:
            pysb_assembler.set_extended_initial_condition(model, m, 0)
    # Tweak parameters
    for param in model.parameters:
        if 'kf' in param.name and 'bind' in param.name:
            param.value = param.value * 100
    return model
Пример #2
0
def assemble_model(stmts):
    pa = PysbAssembler()
    pa.add_statements(stmts)
    model = pa.make_model(policies='one_step')
    pa.add_default_initial_conditions(100.0)

    try:
        targeted_agents = get_targeted_agents(stmts)
        no_upstream_active_agents = get_no_upstream_active_agents(stmts)
    except:
        targeted_agents = []
        no_upstream_active_agents = []
    try:
        chemical_agents = get_chemical_agents(stmts)
    except:
        chemical_agents = []

    for m in model.monomers:
        try:
            if m.name in targeted_agents or m.name in no_upstream_active_agents:
                pysb_assembler.set_base_initial_condition(model,
                    model.monomers[m.name], 50.0)
                pysb_assembler.set_extended_initial_condition(model, m, 50.0)
            elif m.name in chemical_agents:
                pysb_assembler.set_base_initial_condition(model,
                    model.monomers[m.name], 10000.0)
            else:
                pysb_assembler.set_extended_initial_condition(model, m, 0)
        except:
            pysb_assembler.set_extended_initial_condition(model, m, 0)
    # Tweak parameters
    for param in model.parameters:
        if 'kf' in param.name and 'bind' in param.name:
            param.value = param.value * 100
    return model
def get_subnetwork(statements, nodes):
    """Return a PySB model based on a subset of given INDRA Statements.

    Statements are first filtered for nodes in the given list and other nodes
    are optionally added based on relevance in a given network. The filtered
    statements are then assembled into an executable model using INDRA's
    PySB Assembler.

    Parameters
    ----------
    statements : list[indra.statements.Statement]
        A list of INDRA Statements to extract a subnetwork from.
    nodes : list[str]
        The names of the nodes to extract the subnetwork for.

    Returns
    -------
    model : pysb.Model
        A PySB model object assembled using INDRA's PySB Assembler from
        the INDRA Statements corresponding to the subnetwork.
    """
    filtered_statements = _filter_statements(statements, nodes)
    pa = PysbAssembler()
    pa.add_statements(filtered_statements)
    model = pa.make_model()
    return model
Пример #4
0
 def assemble_pysb(self, mode='local', bucket=EMMAA_BUCKET_NAME):
     """Assemble the model into PySB and return the assembled model."""
     if not self.assembled_stmts:
         self.run_assembly()
     pa = PysbAssembler()
     pa.add_statements(self.assembled_stmts)
     pysb_model = pa.make_model()
     if mode == 's3':
         for exp_f in self.export_formats:
             if exp_f not in {
                     'sbml', 'kappa', 'kappa_im', 'kappa_cm', 'bngl',
                     'sbgn', 'pysb_flat', 'gromet'
             }:
                 continue
             elif exp_f == 'gromet':
                 # Export gromet here if there's no separate "dynamic" pysb
                 if 'dynamic' not in self.assembly_config:
                     fname = f'gromet_{self.date_str}.json'
                     pysb_to_gromet(pysb_model, self.name,
                                    self.assembled_stmts, fname)
             else:
                 fname = f'{exp_f}_{self.date_str}.{exp_f}'
                 pa.export_model(exp_f, fname)
             logger.info(f'Uploading {fname}')
             client = get_s3_client(unsigned=False)
             client.upload_file(fname, bucket,
                                f'exports/{self.name}/{fname}')
     return pysb_model
Пример #5
0
 def assemble_pysb(self):
     """Assemble the model into PySB and return the assembled model."""
     if not self.assembled_stmts:
         self.run_assembly()
     pa = PysbAssembler()
     pa.add_statements(self.assembled_stmts)
     pysb_model = pa.make_model()
     return pysb_model
Пример #6
0
def test_eidos_to_pysb():
    stmts = __get_stmts_from_remote_jsonld()
    pa = PysbAssembler()

    # Make sure these don't error
    pa.add_statements(stmts)
    pa.make_model()
    for fmt in ['kappa', 'sbml', 'sbgn']:
        exp_str = pa.export_model(fmt)
        assert exp_str, "Got no exported model from eidos->psyb to %s." % fmt
    return
Пример #7
0
def processText(Text):
    pa = PysbAssembler()
    # Process a natural language description of a mechanism
    trips_processor = trips.process_text(Text)

    # Collect extracted mechanisms in PysbAssembler
    pa.add_statements(trips_processor.statements)

    # Assemble the model
    model = pa.make_model(policies='two_step')
    return model
Пример #8
0
def test_getting_started7_8():
    # Chunk 7
    stmts = gn_stmts  # Added only in this test, not in docs
    from indra.assemblers.pysb import PysbAssembler
    pa = PysbAssembler()
    pa.add_statements(stmts)
    model = pa.make_model()
    assert model

    # Chunk 8
    sbml_model = pa.export_model('sbml')
    assert sbml_model
Пример #9
0
def test_eidos_to_pysb():
    stmts = __get_stmts_from_remote_jsonld()
    pa = PysbAssembler()

    # Make sure these don't error
    pa.add_statements(stmts)
    model = pa.make_model()
    assert model.rules, model.rules
    for fmt in ['kappa', 'sbml', 'sbgn']:
        exp_str = pa.export_model(fmt)
        assert exp_str, "Got no exported model from eidos->psyb to %s." % fmt
    return
Пример #10
0
def test_readme_using_indra1():
    from indra.sources import trips
    from indra.assemblers.pysb import PysbAssembler
    pa = PysbAssembler()
    # Process a natural language description of a mechanism
    trips_processor = trips.process_text(
        'MEK2 phosphorylates ERK1 at Thr-202 and Tyr-204')
    # Collect extracted mechanisms in PysbAssembler
    pa.add_statements(trips_processor.statements)
    # Assemble the model
    model = pa.make_model(policies='two_step')
    assert model
Пример #11
0
    def post(self):
        """Assemble INDRA Statements and return PySB model string.

        Parameters
        ----------
        statements : list[indra.statements.Statement.to_json()]
            A list of INDRA Statements to assemble.

        export_format : str
            The format to export into, for instance "kappa", "bngl",
            "sbml", "matlab", "mathematica", "potterswheel". See
            http://pysb.readthedocs.io/en/latest/modules/export/index.html
            for a list of supported formats. In addition to the formats
            supported by PySB itself, this method also provides "sbgn"
            output.

        Returns
        -------
        image or model
            Assembled exported model. If export_format is kappa_im or kappa_cm,
            image is returned. Otherwise model string is returned.
        """
        args = request.json
        stmts_json = args.get('statements')
        export_format = args.get('export_format')
        stmts = stmts_from_json(stmts_json)
        pa = PysbAssembler()
        pa.add_statements(stmts)
        pa.make_model()
        try:
            for m in pa.model.monomers:
                pysb_assembler.set_extended_initial_condition(pa.model, m, 0)
        except Exception as e:
            logger.exception(e)

        if not export_format:
            model_str = pa.print_model()
        elif export_format in ('kappa_im', 'kappa_cm'):
            fname = 'model_%s.png' % export_format
            root = os.path.dirname(os.path.abspath(fname))
            graph = pa.export_model(format=export_format, file_name=fname)
            with open(fname, 'rb') as fh:
                data = 'data:image/png;base64,%s' % \
                    base64.b64encode(fh.read()).decode()
                return {'image': data}
        else:
            try:
                model_str = pa.export_model(format=export_format)
            except Exception as e:
                logger.exception(e)
                model_str = ''
        res = {'model': model_str}
        return res
Пример #12
0
def indra_processing(statements):
    pa = PysbAssembler()
    pa.add_statements(statements)
    # pa.make_model(policies='two_step')
    pa.make_model()
    print(10 * '=', 'Monomer', 10 * '=')
    for monomer in pa.model.monomers:
        print(monomer)
    for rule in pa.model.rules:
        print(rule)
    for parameter in pa.model.parameters:
        print(parameter)
    return pa
Пример #13
0
def test_check_model():
    explain = Activation(raf, erk)
    mek_active = Agent('MEK', db_refs={'FPLX': 'MEK'},
                       activity=ActivityCondition('activity', True))
    model_stmts = [Activation(raf, mek), Activation(mek_active, erk)]
    # Build the pysb model
    pa = PysbAssembler()
    pa.add_statements(model_stmts)
    pa.make_model(policies='one_step')
    md = ModelDiagnoser(model_stmts, pa.model, explain)
    result = md.check_explanation()
    assert result['has_explanation'] is True
    path = result['explanation_path']
    assert len(path) == 2
    assert path[0] == model_stmts[0]
    assert path[1] == model_stmts[1]
Пример #14
0
 def assemble_dynamic_pysb(self, **kwargs):
     """Assemble a version of a PySB model for dynamic simulation."""
     # First need to run regular assembly
     if not self.assembled_stmts:
         self.run_assembly()
     if 'dynamic' in self.assembly_config:
         logger.info('Assembling dynamic PySB model')
         ap = AssemblyPipeline(self.assembly_config['dynamic'])
         # Not overwrite assembled stmts
         stmts = deepcopy(self.assembled_stmts)
         new_stmts = ap.run(stmts)
         pa = PysbAssembler()
         pa.add_statements(new_stmts)
         pysb_model = pa.make_model()
         return pysb_model
     logger.info('Did not find dynamic assembly steps')
Пример #15
0
def assemble_model(model_name, statements):
    # Pysb assembly
    pa = PysbAssembler()
    pa.add_statements(statements)
    ts = time.time()
    model = pa.make_model()
    te = time.time()
    print('Assembly took %.2fs' % (te - ts))
    model.name = model_name

    add_observable(model)
    set_parameters(model)

    # Save and return model
    pa.model = model
    pa.save_model('%s.py' % model_name)
    return model
Пример #16
0
def test_check_model():
    explain = Activation(raf, erk)
    mek_active = Agent('MEK',
                       db_refs={'FPLX': 'MEK'},
                       activity=ActivityCondition('activity', True))
    model_stmts = [Activation(raf, mek), Activation(mek_active, erk)]
    # Build the pysb model
    pa = PysbAssembler()
    pa.add_statements(model_stmts)
    pa.make_model(policies='one_step')
    md = ModelDiagnoser(model_stmts, pa.model, explain)
    result = md.check_explanation()
    assert result['has_explanation'] is True
    path = result['explanation_path']
    assert len(path) == 2
    assert path[0] == model_stmts[0]
    assert path[1] == model_stmts[1]
Пример #17
0
def remove_kappa_dead_rules(stmts, model, dead_rules):
    # FIXME: we should probably check that a statement we remove has all its
    # generated rules recognized as dead. If it has at least one live rule
    # coming from it, we shouldn't remove it. But the dead rules should still
    # be removed somehow from the final model.
    dead_uuids  = set()
    for rule in dead_rules:
        for ann in model.annotations:
            if ann.subject == rule and ann.predicate == 'from_indra_statement':
                dead_uuids.add(ann.object)
    all_uuids = {stmt.uuid for stmt in stmts}
    live_uuids = all_uuids - dead_uuids
    stmts = ac.filter_uuid_list(stmts, live_uuids)
    pa = PysbAssembler()
    pa.add_statements(stmts)
    model = pa.make_model()
    return stmts, model
Пример #18
0
def assemble_model(model_name, statements):
    # Pysb assembly
    pa = PysbAssembler()
    pa.add_statements(statements)
    ts = time.time()
    model = pa.make_model()
    te = time.time()
    print('Assembly took %.2fs' % (te-ts))
    model.name = model_name

    add_observable(model)
    set_parameters(model)

    # Save and return model
    pa.model = model
    pa.save_model('%s.py' % model_name)
    return model
Пример #19
0
def test_nl_modeling():
    # 1 code chunk
    from indra.sources import trips
    model_text = 'MAP2K1 phosphorylates MAPK1 and DUSP6 dephosphorylates MAPK1.'
    tp = trips.process_text(model_text)

    # 2nd code chunk
    for st in tp.statements:
        assert st.evidence[0].text  # Replaces a print statement in the doc

    # 3rd code chunk
    from indra.assemblers.pysb import PysbAssembler
    pa = PysbAssembler()
    pa.add_statements(tp.statements)
    pa.make_model(policies='two_step')

    # 4th code chunk
    for monomer in pa.model.monomers:
        assert monomer  # This replaces a print statements in the doc

    # 5th code chunk
    for rule in pa.model.rules:
        assert rule  # This replaces a print statements in the doc

    # 6th code chunk
    for parameter in pa.model.parameters:
        assert parameter  # This replaces a print statements in the doc

    # 7th code chunk
    for annotation in pa.model.annotations:
        assert annotation  # This replaces a print statements in the doc

    # 8th code chunk (this code is currently in a commented out section)
    pa.set_context('A375_SKIN')
    for monomer_pattern, parameter in pa.model.initial_conditions:
        assert monomer_pattern
        assert parameter.value

    # 9th code chunk
    _ = pa.export_model('sbml')
    assert _
    _ = pa.export_model('bngl')
    assert _
Пример #20
0
def assemble_pysb(stmts, data_genes, out_file):
    """Return an assembled PySB model."""
    base_file, _ = os.path.splitext(out_file)
    #stmts = ac.load_statements('%s.pkl' % base_file)
    stmts = preprocess_stmts(stmts, data_genes)

    # Make a SIF model equivalent to the PySB model
    # Useful for making direct comparisons in pathfinding
    sa = SifAssembler(stmts)
    sa.make_model(use_name_as_key=True, include_mods=True,
                  include_complexes=True)
    sif_str = sa.print_model(include_unsigned_edges=True)
    with open('%s_pysb.sif' % base_file, 'wt') as f:
        f.write(sif_str)

    # This is the "final" set of statements going into the assembler so it
    # makes sense to cache these.
    # This is also the point where index cards can be generated
    ac.dump_statements(stmts, '%s_before_pa.pkl' % base_file)
    assemble_index_cards(stmts, 'output/index_cards')

    # Save a version of statements with no evidence for faster loading
    for s in stmts:
        s.evidence = []
        for ss in s.supports + s.supported_by:
            ss.evidence = []
    ac.dump_statements(stmts, '%s_no_evidence.pkl' % base_file)

    # Assemble model
    pa = PysbAssembler()
    pa.add_statements(stmts)
    pa.make_model(reverse_effects=False)
    #ac.dump_statements(pa.statements, '%s_after_pa.pkl' % base_file)
    # Set context
    set_context(pa)
    # Add observables
    add_observables(pa.model)
    pa.save_model(out_file)
    with open('korkut_pysb.pkl', 'wb') as fh:
        pickle.dump(pa.model, fh)
    #pa.export_model('kappa', '%s.ka' % base_file)
    return pa.model
Пример #21
0
def get_subnetwork(statements,
                   nodes,
                   relevance_network=None,
                   relevance_node_lim=10):
    """Return a PySB model based on a subset of given INDRA Statements.

    Statements are first filtered for nodes in the given list and other nodes
    are optionally added based on relevance in a given network. The filtered
    statements are then assembled into an executable model using INDRA's
    PySB Assembler.

    Parameters
    ----------
    statements : list[indra.statements.Statement]
        A list of INDRA Statements to extract a subnetwork from.
    nodes : list[str]
        The names of the nodes to extract the subnetwork for.
    relevance_network : Optional[str]
        The UUID of the NDEx network in which nodes relevant to the given
        nodes are found.
    relevance_node_lim : Optional[int]
        The maximal number of additional nodes to add to the subnetwork
        based on relevance.

    Returns
    -------
    model : pysb.Model
        A PySB model object assembled using INDRA's PySB Assembler from
        the INDRA Statements corresponding to the subnetwork.
    """
    if relevance_network is not None:
        relevant_nodes = _find_relevant_nodes(nodes, relevance_network,
                                              relevance_node_lim)
        all_nodes = nodes + relevant_nodes
    else:
        all_nodes = nodes
    filtered_statements = _filter_statements(statements, all_nodes)
    pa = PysbAssembler()
    pa.add_statements(filtered_statements)
    model = pa.make_model()
    return model
Пример #22
0
def test_propose_statement():
    jun = Agent('JUN', db_refs={'HGNC':'6204', 'UP': 'P05412'})
    explain = Activation(raf, jun)
    erk_active = Agent('ERK', db_refs={'FPLX': 'ERK'},
                       activity=ActivityCondition('activity', True))
    # Leave out MEK activates ERK
    model_stmts = [Activation(raf, mek), Activation(erk_active, jun)]
    # Build the pysb model
    pa = PysbAssembler()
    pa.add_statements(model_stmts)
    pa.make_model(policies='one_step')
    md = ModelDiagnoser(model_stmts, pa.model, explain)
    result = md.check_explanation()
    assert result['has_explanation'] is False
    assert result.get('explanation_path') is None
    inf_prop = result.get('connect_rules')
    assert inf_prop == ('RAF_activates_MEK_activity',
                        'ERK_act_activates_JUN_activity'), inf_prop
    stmt_prop = result.get('connect_stmts')
    assert stmt_prop == (model_stmts[0], model_stmts[1])
    stmt_suggestions = md.suggest_statements(*stmt_prop)
Пример #23
0
def get_subnetwork(statements, nodes, relevance_network=None,
                   relevance_node_lim=10):
    """Return a PySB model based on a subset of given INDRA Statements.

    Statements are first filtered for nodes in the given list and other nodes
    are optionally added based on relevance in a given network. The filtered
    statements are then assembled into an executable model using INDRA's
    PySB Assembler.

    Parameters
    ----------
    statements : list[indra.statements.Statement]
        A list of INDRA Statements to extract a subnetwork from.
    nodes : list[str]
        The names of the nodes to extract the subnetwork for.
    relevance_network : Optional[str]
        The UUID of the NDEx network in which nodes relevant to the given
        nodes are found.
    relevance_node_lim : Optional[int]
        The maximal number of additional nodes to add to the subnetwork
        based on relevance.

    Returns
    -------
    model : pysb.Model
        A PySB model object assembled using INDRA's PySB Assembler from
        the INDRA Statements corresponding to the subnetwork.
    """
    if relevance_network is not None:
        relevant_nodes = _find_relevant_nodes(nodes, relevance_network,
                                              relevance_node_lim)
        all_nodes = nodes + relevant_nodes
    else:
        all_nodes = nodes
    filtered_statements = _filter_statements(statements, all_nodes)
    pa = PysbAssembler()
    pa.add_statements(filtered_statements)
    model = pa.make_model()
    return model
Пример #24
0
def assemble_pysb():
    """Assemble INDRA Statements and return PySB model string."""
    if request.method == 'OPTIONS':
        return {}
    response = request.body.read().decode('utf-8')
    body = json.loads(response)
    stmts_json = body.get('statements')
    export_format = body.get('export_format')
    stmts = stmts_from_json(stmts_json)
    pa = PysbAssembler()
    pa.add_statements(stmts)
    pa.make_model()
    if not export_format:
        model_str = pa.print_model()
    else:
        try:
            model_str = pa.export_model(format=export_format)
        except Exception as e:
            logger.exception(e)
            model_str = ''
    res = {'model': model_str}
    return res
Пример #25
0
def test_propose_statement():
    jun = Agent('JUN', db_refs={'HGNC': '6204', 'UP': 'P05412'})
    explain = Activation(raf, jun)
    erk_active = Agent('ERK',
                       db_refs={'FPLX': 'ERK'},
                       activity=ActivityCondition('activity', True))
    # Leave out MEK activates ERK
    model_stmts = [Activation(raf, mek), Activation(erk_active, jun)]
    # Build the pysb model
    pa = PysbAssembler()
    pa.add_statements(model_stmts)
    pa.make_model(policies='one_step')
    md = ModelDiagnoser(model_stmts, pa.model, explain)
    result = md.check_explanation()
    assert result['has_explanation'] is False
    assert result.get('explanation_path') is None
    inf_prop = result.get('connect_rules')
    assert inf_prop == ('RAF_activates_MEK_activity',
                        'ERK_act_activates_JUN_activity'), inf_prop
    stmt_prop = result.get('connect_stmts')
    assert stmt_prop == (model_stmts[0], model_stmts[1])
    stmt_suggestions = md.suggest_statements(*stmt_prop)
Пример #26
0
    def replace_agent(self, agent_name, agent_replacement_names, model_id):
        """Replace an agent in a model with other agents.

        This is used, for instance, to expand a protein family to
        multiple specific proteins.
        """
        for stmt in self.statements[model_id-1]:
            agent_key = [i for i, m in enumerate(stmt.agent_list())
                         if m is not None and m.name == agent_name]
            if agent_key:
                self.statements[model_id-1].remove(stmt)
                for p in agent_replacement_names:
                    s = copy.deepcopy(stmt)
                    if isinstance(stmt, Complex):
                        s.members[agent_key[0]].name = p
                    else:
                        s.__dict__[agent_key[0]].name = p
                    self.extend_statements([s], model_id)
        pa = PysbAssembler()
        pa.add_statements(self.statements[model_id-1])
        model = pa.make_model()
        pa.add_default_initial_conditions(self.default_initial_amount)
        return model
Пример #27
0
    def replace_agent(self, agent_name, agent_replacement_names, model_id):
        """Replace an agent in a model with other agents.

        This is used, for instance, to expand a protein family to
        multiple specific proteins.
        """
        for stmt in self.statements[model_id-1]:
            agent_key = [i for i, m in enumerate(stmt.agent_list())
                         if m is not None and m.name == agent_name]
            if agent_key:
                self.statements[model_id-1].remove(stmt)
                for p in agent_replacement_names:
                    s = copy.deepcopy(stmt)
                    if isinstance(stmt, Complex):
                        s.members[agent_key[0]].name = p
                    else:
                        s.__dict__[agent_key[0]].name = p
                    self.extend_statements([s], model_id)
        pa = PysbAssembler()
        pa.add_statements(self.statements[model_id-1])
        model = pa.make_model()
        pa.add_default_initial_conditions(self.default_initial_amount)
        return model
Пример #28
0
 def assemble_dynamic_pysb(self, mode='local', bucket=EMMAA_BUCKET_NAME):
     """Assemble a version of a PySB model for dynamic simulation."""
     # First need to run regular assembly
     if not self.assembled_stmts:
         self.run_assembly()
     if 'dynamic' in self.assembly_config:
         logger.info('Assembling dynamic PySB model')
         ap = AssemblyPipeline(self.assembly_config['dynamic'])
         # Not overwrite assembled stmts
         stmts = deepcopy(self.assembled_stmts)
         self.dynamic_assembled_stmts = ap.run(stmts)
         pa = PysbAssembler()
         pa.add_statements(self.dynamic_assembled_stmts)
         pysb_model = pa.make_model()
         if mode == 's3' and 'gromet' in self.export_formats:
             fname = f'gromet_{self.date_str}.json'
             pysb_to_gromet(pysb_model, self.name,
                            self.dynamic_assembled_stmts, fname)
             logger.info(f'Uploading {fname}')
             client = get_s3_client(unsigned=False)
             client.upload_file(fname, bucket,
                                f'exports/{self.name}/{fname}')
         return pysb_model
     logger.info('Did not find dynamic assembly steps')
Пример #29
0
def assemble_pysb():
    """Assemble INDRA Statements and return PySB model string."""
    if request.method == 'OPTIONS':
        return {}
    response = request.body.read().decode('utf-8')
    body = json.loads(response)
    stmts_json = body.get('statements')
    export_format = body.get('export_format')
    stmts = stmts_from_json(stmts_json)
    pa = PysbAssembler()
    pa.add_statements(stmts)
    pa.make_model()
    try:
        for m in pa.model.monomers:
            pysb_assembler.set_extended_initial_condition(pa.model, m, 0)
    except Exception as e:
        logger.exception(e)

    if not export_format:
        model_str = pa.print_model()
    elif export_format in ('kappa_im', 'kappa_cm'):
        fname = 'model_%s.png' % export_format
        root = os.path.dirname(os.path.abspath(fname))
        graph = pa.export_model(format=export_format, file_name=fname)
        with open(fname, 'rb') as fh:
            data = 'data:image/png;base64,%s' % \
                base64.b64encode(fh.read()).decode()
            return {'image': data}
    else:
        try:
            model_str = pa.export_model(format=export_format)
        except Exception as e:
            logger.exception(e)
            model_str = ''
    res = {'model': model_str}
    return res
Пример #30
0
def assemble_pysb():
    """Assemble INDRA Statements and return PySB model string."""
    if request.method == 'OPTIONS':
        return {}
    response = request.body.read().decode('utf-8')
    body = json.loads(response)
    stmts_json = body.get('statements')
    export_format = body.get('export_format')
    stmts = stmts_from_json(stmts_json)
    pa = PysbAssembler()
    pa.add_statements(stmts)
    pa.make_model()
    try:
        for m in pa.model.monomers:
            pysb_assembler.set_extended_initial_condition(pa.model, m, 0)
    except Exception as e:
        logger.exception(e)

    if not export_format:
        model_str = pa.print_model()
    elif export_format in ('kappa_im', 'kappa_cm'):
        fname = 'model_%s.png' % export_format
        root = os.path.dirname(os.path.abspath(fname))
        graph = pa.export_model(format=export_format, file_name=fname)
        with open(fname, 'rb') as fh:
            data = 'data:image/png;base64,%s' % \
                base64.b64encode(fh.read()).decode() 
            return {'image': data}
    else:
        try:
            model_str = pa.export_model(format=export_format)
        except Exception as e:
            logger.exception(e)
            model_str = ''
    res = {'model': model_str}
    return res
Пример #31
0
 def assemble_pysb(self, stmts):
     pa = PysbAssembler()
     pa.add_statements(stmts)
     pa.make_model(policies=self.default_policy)
     pa.add_default_initial_conditions(self.default_initial_amount)
     return pa.model
Пример #32
0
        extension = (f if f != 'pysb_flat' else 'py')
        fname = 'hello_indra_model.%s' % extension
        with open(fname, 'wb') as fh:
            fh.write(model_export.encode('utf-8'))


# User defines text
text = 'MEK1 phosphorylates ERK2 on threonine 185 and tyrosine 187.'

# Process text using TRIPS processor
tp = trips.process_text(text)

# Get the list of extracted Statements
stmts = tp.statements

# Assemble a PySB model
pa = PysbAssembler()
pa.add_statements(stmts)
pa.make_model()

# Run simulation
t = np.linspace(0, 300)
sol = Solver(pa.model, t)
sol.run()

# Plot the result
plot_result(pa.model, sol)

# Export model
export_hello(pa.model, ['sbml', 'bngl', 'kappa', 'pysb_flat'])
Пример #33
0
def make_bmi_model():
    pa = PysbAssembler()
    pa.add_statements(stmts)
    model = pa.make_model()
    bm = BMIModel(model, inputs=['rainfall'])
    return bm
Пример #34
0
def make_bmi_model():
    pa = PysbAssembler()
    pa.add_statements(stmts)
    model = pa.make_model()
    bm = BMIModel(model, inputs=['rainfall'])
    return bm
Пример #35
0
    return mon(sdict)

def add_initial(model, pattern, value):
    """Add initial condition; if an initial condition for this pattern
    already exists, override it."""
    complex_pattern = pysb.as_complex_pattern(pattern)

    for other_cp, other_value in model.initial_conditions:
        if complex_pattern.is_equivalent_to(other_cp):
            model.initial_conditions.remove((other_cp, other_value))
    model.initial(complex_pattern, value)

# Generate model rules via indra
pa = PysbAssembler()
bp = bel.process_belrdf('RAS_combined.rdf')
pa.add_statements(bp.statements)
model = pa.make_model(initial_conditions=False)

# Useful shortcuts to access model components
m = model.monomers
p = model.parameters

# Add ligand to the model
model.add_component(Monomer('EGF'))
model.add_component(Parameter('kf_ee_act', 1e-6))
model.add_component(
    Rule('EGF_activates_EGFR',
         m['EGF']() + m['EGFR']({'Kinase':'inactive'}) >>
         m['EGF']() + m['EGFR']({'Kinase':'active'}),
         p['kf_ee_act']))
Пример #36
0
        extension = (f if f != 'pysb_flat' else 'py')
        fname = 'hello_indra_model.%s' % extension
        with open(fname, 'wb') as fh:
            fh.write(model_export)


# User defines text
text = 'MEK1 phosphorylates ERK2 on threonine 185 and tyrosine 187.'

# Process text using TRIPS processor
tp = trips.process_text(text)

# Get the list of extracted Statements
stmts = tp.statements

# Assemble a PySB model
pa = PysbAssembler()
pa.add_statements(stmts)
pa.make_model()

# Run simulation
t = np.linspace(0, 300)
sol = Solver(pa.model, t)
sol.run()

# Plot the result
plot_result(pa.model, sol)

# Export model
export_hello(pa.model, ['sbml', 'bngl', 'kappa', 'pysb_flat'])
Пример #37
0
def assemble_pysb(stmts, data_genes, contextualize=False):
    # Filter the INDRA Statements to be put into the model
    stmts = ac.filter_by_type(stmts, Complex, invert=True)
    stmts = ac.filter_direct(stmts)
    stmts = ac.filter_belief(stmts, 0.95)
    stmts = ac.filter_top_level(stmts)
    # Strip the extraneous supports/supported by here
    strip_supports(stmts)
    stmts = ac.filter_gene_list(stmts, data_genes, 'all')
    stmts = ac.filter_enzyme_kinase(stmts)
    stmts = ac.filter_mod_nokinase(stmts)
    stmts = ac.filter_transcription_factor(stmts)
    # Simplify activity types
    ml = MechLinker(stmts)
    ml.gather_explicit_activities()
    ml.reduce_activities()
    ml.gather_modifications()
    ml.reduce_modifications()
    stmts = normalize_active_forms(ml.statements)
    # Replace activations when possible
    ml = MechLinker(stmts)
    ml.gather_explicit_activities()
    ml.replace_activations()
    # Require active forms
    ml.require_active_forms()
    num_stmts = len(ml.statements)
    while True:
        # Remove inconsequential PTMs
        ml.statements = ac.filter_inconsequential_mods(ml.statements,
                                                       get_mod_whitelist())
        ml.statements = ac.filter_inconsequential_acts(ml.statements,
                                                       get_mod_whitelist())
        if num_stmts <= len(ml.statements):
            break
        num_stmts = len(ml.statements)
    stmts = ml.statements
    # Save the Statements here
    ac.dump_statements(stmts, prefixed_pkl('pysb_stmts'))


    # Add drug target Statements
    drug_target_stmts = get_drug_target_statements()
    stmts += drug_target_stmts

    # Just generate the generic model
    pa = PysbAssembler()
    pa.add_statements(stmts)
    model = pa.make_model()
    with open(prefixed_pkl('pysb_model'), 'wb') as f:
        pickle.dump(model, f)

    # Run this extra part only if contextualize is set to True
    if not contextualize:
        return

    cell_lines_no_data = ['COLO858', 'K2', 'MMACSF', 'MZ7MEL', 'WM1552C']
    for cell_line in cell_lines:
        if cell_line not in cell_lines_no_data:
            stmtsc = contextualize_stmts(stmts, cell_line, data_genes)
        else:
            stmtsc = stmts
        pa = PysbAssembler()
        pa.add_statements(stmtsc)
        model = pa.make_model()
        if cell_line not in cell_lines_no_data:
            contextualize_model(model, cell_line, data_genes)
        ac.dump_statements(stmtsc, prefixed_pkl('pysb_stmts_%s' % cell_line))
        with open(prefixed_pkl('pysb_model_%s' % cell_line), 'wb') as f:
            pickle.dump(model, f)
Пример #38
0
def make_pysb_model(statements):
    pa = PysbAssembler()
    pa.add_statements(statements)
    model = pa.make_model()
    return model
Пример #39
0
 def assemble_pysb(self, stmts):
     pa = PysbAssembler()
     pa.add_statements(stmts)
     pa.make_model(policies=self.default_policy)
     pa.add_default_initial_conditions(self.default_initial_amount)
     return pa.model
Пример #40
0
def assemble_model(model_id, reread=False):
    model_name = 'model%d' % model_id
    # If model has already been read, just process the EKB XML
    if os.path.exists(model_name + '.xml') and not reread:
        tp = trips.process_xml(open(model_name + '.xml').read())
    else:
        # Start with the basic model
        model_txt = open('model1.txt').read()
        # Apply patches one by one to get to the current model text
        for j in range(1, model_id):
            patch_txt = open('model%d_from%d.txt' % (j+1, j)).read()
            model_txt = apply_patch(model_txt, patch_txt)
        print('Reading model %d text:' % model_id)
        print(model_txt)
        # Process model text and save result EKB XML
        tp = trips.process_text(model_txt, model_name + '.xml')

    print('Assembling statements:')
    for i, st in enumerate(tp.statements):
        print('%d: %s' % (i, st))
    # Assemble the PySB model
    pa = PysbAssembler()
    pa.add_statements(tp.statements)
    model = pa.make_model(policies='two_step')

    # Set initial conditions
    erk = model.monomers['ERK']
    obs = Observable('ERK_p', erk(phospho='p'))
    model.add_component(obs)
    vem = model.monomers['VEMURAFENIB']
    obs = Observable('Vem_free', vem(map3k=None))
    model.add_component(obs)
    ras = model.monomers['RAS']
    obs = Observable('RAS_active', ras(gtp=ANY))
    model.add_component(obs)
    braf = model.monomers['BRAF']
    obs = Observable('BRAF_active', braf(vemurafenib=None))
    model.add_component(obs)
    model.parameters['BRAF_0'].value = 0
    egf = model.monomers['EGF']
    obs = Observable('EGF_free', egf(erbb=None))
    model.add_component(obs)

    # Add mutated form of BRAF as initial condition
    sites_dict = {}
    for site in braf.sites:
        if site in braf.site_states:
            sites_dict[site] = braf.site_states[site][0]
        else:
            sites_dict[site] = None
    sites_dict['V600'] = 'E'
    model.add_component(Parameter('BRAF_mut_0', 1e5))
    model.initial(braf(**sites_dict), model.parameters['BRAF_mut_0'])

    # Set up model parameters
    model.parameters['kf_ee_bind_1'].value = 1
    model.parameters['kr_ee_bind_1'].value = 0.1
    model.parameters['kf_ee_bind_2'].value = 1
    model.parameters['kr_ee_bind_2'].value = 0.1
    model.parameters['kf_eg_bind_1'].value = 1
    model.parameters['kr_eg_bind_1'].value = 0.1
    model.parameters['kf_gs_bind_1'].value = 1
    model.parameters['kr_gs_bind_1'].value = 0.1
    model.parameters['kf_sr_bind_1'].value = 1
    model.parameters['kr_sr_bind_1'].value = 50
    model.parameters['kf_rg_bind_1'].value = 50
    model.parameters['kr_rg_bind_1'].value = 0.5
    model.parameters['kf_rb_bind_1'].value = 1
    model.parameters['kr_rb_bind_1'].value = 0.5

    model.parameters['kf_vb_bind_1'].value = 10
    model.parameters['kr_vb_bind_1'].value = 1

    model.parameters['kf_bm_bind_1'].value = 1
    model.parameters['kr_bm_bind_1'].value = 0.1
    model.parameters['kc_bm_phosphorylation_1'].value = 3
    model.parameters['kf_pm_bind_1'].value = 1
    model.parameters['kr_pm_bind_1'].value = 0.001
    model.parameters['kc_pm_dephosphorylation_1'].value = 10
    model.parameters['kf_me_bind_1'].value = 1
    model.parameters['kr_me_bind_1'].value = 0.1
    model.parameters['kc_me_phosphorylation_1'].value = 10
    model.parameters['kf_de_bind_1'].value = 1
    model.parameters['kr_de_bind_1'].value = 0.001
    model.parameters['kc_de_dephosphorylation_1'].value = 10


    model.parameters['VEMURAFENIB_0'].value = 0
    model.parameters['EGF_0'].value = 1e3
    model.parameters['EGFR_0'].value = 1e5
    model.parameters['SOS_0'].value = 1e3
    model.parameters['GRB2_0'].value = 1e5
    model.parameters['RAS_0'].value = 2e5
    model.parameters['GTP_0'].value = 1e7
    model.parameters['MEK_0'].value = 1e5
    model.parameters['ERK_0'].value = 1e5
    model.parameters['DUSP6_0'].value = 1e3
    model.parameters['PPP2CA_0'].value = 1e5

    if model_id >= 2:
        model.parameters['Phosphatase_0'].value = 1e2
        model.parameters['kf_es_bind_1'].value = 1e-05
        model.parameters['kr_es_bind_1'].value = 1e-04
        model.parameters['kc_es_phosphorylation_1'].value = 1
        model.parameters['kf_ps_bind_1'].value = 1
        model.parameters['kr_ps_bind_1'].value = 0.1
        model.parameters['kc_ps_dephosphorylation_1'].value = 1e-04

    if model_id >= 3:
        model.parameters['kf_bb_bind_1'].value = 10
        model.parameters['kr_bb_bind_1'].value = 1
        model.parameters['kf_vb_bind_2'].value = 1e-04

    pa.model = model
    pa.save_model('model%d.py' % model_id)
    return model
Пример #41
0
    elif demo_idx == 2:
        out_name_maps = [{
            'atmosphere_water__rainfall_volume_flux': 'rainfall'
        }, {}]
        input_vars = [['rainfall'], ['flood']]
    else:
        out_name_maps = [{
            'atmosphere_water__rainfall_volume_flux':
            'Precipitation'
        }]
        input_vars = [['Precipitation']]
    # We now assemble PySB models from the INDRA Statements and then
    # instantiate these models as BMI-wrapped models along with a simulator
    for idx, model_stmts in enumerate(stmts):
        pa = PysbAssembler()
        pa.add_statements(model_stmts)
        model = pa.make_model()
        if demo_idx in (1, 2):
            model.name = 'indra_model%d' % idx
        else:
            model.name = 'indra_eval_model'
        bm = BMIModel(model,
                      inputs=input_vars[idx],
                      stop_time=50000,
                      outside_name_map=out_name_maps[idx])
        bmi_models.append(bm)

    # Example 1: two NL models co-simulated
    if demo_idx == 1:
        # We make the model component repository without Topoflow
        make_component_repo(bmi_models, False)