Exemplo n.º 1
0
def test_members():
    set_default_client(None)
    assert default_client() is None
    c = Client(NEUPRINT_SERVER, DATASET)
    assert c.server == f'https://{NEUPRINT_SERVER}'
    assert c.dataset == DATASET

    assert default_client() is c

    df = c.fetch_custom("MATCH (m:Meta) RETURN m.primaryRois as rois")
    assert isinstance(df, pd.DataFrame)
    assert df.columns == ['rois']
    assert len(df) == 1
    assert isinstance(df['rois'].iloc[0], list)

    assert isinstance(c.fetch_available(), list)
    assert isinstance(c.fetch_help(), str)
    assert c.fetch_server_info() is True
    assert isinstance(c.fetch_version(), str)
    assert isinstance(c.fetch_database(), dict)
    assert isinstance(c.fetch_datasets(), dict)
    assert isinstance(c.fetch_db_version(), str)
    assert isinstance(c.fetch_profile(), dict)
    assert isinstance(c.fetch_token(), str)
    assert isinstance(c.fetch_daily_type(), tuple)
    assert isinstance(c.fetch_roi_completeness(), pd.DataFrame)
    assert isinstance(c.fetch_roi_connectivity(), pd.DataFrame)
    assert isinstance(c.fetch_roi_mesh('AB(R)'), bytes)
    assert isinstance(c.fetch_skeleton(EXAMPLE_BODY), pd.DataFrame)
Exemplo n.º 2
0
def get_body_ids_from_roi(roi,
                          server,
                          token,
                          hemibrain_neuron=True,
                          pre_threshold=0,
                          post_threshold=0,
                          total_threshold=0):
    """

    Args:
        roi (str): Neuropil abreviation from Neuprint
        server (str): Neuprint server URL
        token (str): Neuprint auth token
        hemibrain_neuron (bool): Specifies whether to search
            Hemibrain-Neuron of Hemibrain-Segment.
        pre_threshold (int): Requires bodies meet threshold of presynaptic sites
            in the ROI
        post_threshold (int): Requires bodies meet threshold of postsynaptic sites
            in the ROI
        total_threshold (int): Requires bodies meet threshold of total synaptic sites
            in the ROI

    Returns:
        Dataframe containing body IDs that innervate the ROI

    """
    query_template = (
        'MATCH (n:`{HEMIBRAIN}`)\n'
        'WHERE n.{ROI}\n'
        'RETURN n.bodyId AS ID, n.name AS NAME, n.roiInfo AS ROIINFO'
    )

    # Start Neuprint python client
    client = Client(server, token)
    client.fetch_version()

    if hemibrain_neuron:
        query = query_template.format(HEMIBRAIN='hemibrain-Neuron', ROI=roi)
    else:
        query = query_template.format(HEMIBRAIN='hemibrain-Segment', ROI=roi)

    results = client.fetch_custom(query)
    results['ROIINFO'] = results['ROIINFO'].apply(ast.literal_eval)
    results['PRE'] = results['ROIINFO'].apply(lambda x: int(x[roi]['pre']))
    results['POST'] = results['ROIINFO'].apply(lambda x: int(x[roi]['post']))

    results = results[results['PRE'] + results['POST'] >= total_threshold]
    results = results[results['PRE'] >= pre_threshold]
    results = results[results['POST'] >= post_threshold]

    return results[['ID', 'NAME', 'PRE', 'POST']].reset_index()
Exemplo n.º 3
0
SERVER = 'emdata1.int.janelia.org:11000'
np = Client(SERVER)

# find nodes in the graph
roifilter = ""
roifilterset = set()
ignorelabels = set(["location", "timeStamp", "confidence", "type"])
connections = {}
points = {}

roirent = {}
if rois is not None and len(rois) > 0:
    # traverse one ROI at a time
    query = "MATCH (m :Meta:hemibrain) RETURN m.superLevelRois"
    res = np.fetch_custom(query)
    major_rois = set(res.iloc[0, 0])

    for iter1 in range(len(rois)):
        print("Processing ROI:", rois[iter1])
        # warn if ROI is not a super level ROI
        if rois[iter1] not in major_rois:
            print("ROI not a major ROI, compartment stats might be wrong")

        # calculate pins vs compute
        totalcompute = 0
        totalpins = 0
        query = "MATCH (n :`hemibrain-Neuron`) WHERE (n.status=\"Roughly traced\" OR n.status=\"Prelim Roughly traced\" OR n.status=\"Traced\" OR n.status=\"Leaves\") AND n.`" + rois[
            iter1] + "` RETURN n.roiInfo AS roiInfo, n.bodyId AS bodyid"

        res = np.fetch_custom(query)
Exemplo n.º 4
0
class NeuPrintExecutor(Neo4jExecutor):
    """
    A NeuPrintExecutor may be used to access an existing neuPrint server.

    This class converts a DotMotif motif object into a neuPrint-compatible
    query. Not all neuPrint datatypes or query types are available, but this
    adds complete support for DotMotif motif searches by passing raw Cypher
    queries to the neuPrint server over the HTTP API.

    Note that the neuPrint default timeout is quite short, and slower motif
    queries may not run in time.

    """
    def __init__(self, host: str, dataset: str, token: str) -> None:
        """
        Create a new NeuPrintExecutor that points to a deployed neuPrint DB.

        Arguments:
            host (str): The host of the neuPrint server (for example,
                'neuprint.janelia.org')
            dataset (str): The name of the dataset to reference (for example,
                'hemibrain:v1.1`)
            token (str): The user's neuPrint access token. To retrieve this
                token, go to https://[host]/account.

        Returns:
            None

        """
        self._created_container = False
        self.host = host
        self.dataset = dataset
        self.token = token
        self.client = Client(host, dataset=self.dataset, token=self.token)

    def run(self, cypher: str) -> pd.DataFrame:
        """
        Run an arbitrary cypher command.

        You should usually ignore this, and use .find() instead.

        Arguments:
            cypher (str): The command to run

        Returns:
            The result of the cypher query

        """
        return self.client.fetch_custom(cypher)

    def count(self, motif: dotmotif, limit=None) -> int:
        """
        Count a motif in a larger graph.

        Arguments:
            motif (dotmotif.dotmotif): The motif to search for

        Returns:
            int: The count of this motif in the host graph

        """
        qry = self.motif_to_cypher(motif,
                                   count_only=True,
                                   static_entity_labels=_DEFAULT_ENTITY_LABELS)
        if limit:
            qry += f" LIMIT {limit}"
        res = self.client.fetch_custom(qry)
        print(res)
        return int(res.to_numpy())

    def find(self, motif: dotmotif, limit=None) -> pd.DataFrame:
        """
        Find a motif in a larger graph.

        Arguments:
            motif (dotmotif.dotmotif): The motif to search for

        Returns:
            pd.DataFrame: The results of the search

        """
        qry = self.motif_to_cypher(motif,
                                   static_entity_labels=_DEFAULT_ENTITY_LABELS)
        if limit:
            qry += f" LIMIT {limit}"
        return self.client.fetch_custom(qry)

    @staticmethod
    def motif_to_cypher(motif: dotmotif,
                        count_only: bool = False,
                        static_entity_labels: dict = None) -> str:
        """
        Convert a motif to neuprint-flavored Cypher.

        This is currently a thin passthrough for Neo4jExecutor.motif_to_cypher.

        """
        static_entity_labels = static_entity_labels or _DEFAULT_ENTITY_LABELS
        return Neo4jExecutor.motif_to_cypher(motif, count_only,
                                             static_entity_labels)
# ------------------------------------------
'''Tip: Inspect all cypher queries by enabling debug logging'''
# setup_debug_logging()

# ------------------------------------------
# Execute a custom query
# ------------------------------------------
'''This query will return all neurons in the ROI ‘AB’ that have greater than 10 pre-synaptic sites.
Results are ordered by total synaptic sites (pre+post)'''
query = """\
    MATCH (n :Neuron {`AB(R)`: true})
    WHERE n.pre > 10
    RETURN n.bodyId AS bodyId, n.name AS name, n.pre AS numpre, n.post AS numpost
    ORDER BY n.pre + n.post DESC
"""
results = c.fetch_custom(query)
print(f"Found {len(results)} results")
print(results.head())

# ------------------------------------------
# ROIs
# ------------------------------------------
'''In neuprint, each neuron is annotated with the list of regions (ROIs) it intersects, along
with the synapse counts in each. The ROIs comprise a hierarchy, with smaller ROIs nested within
larger ROIs. Furthermore, primary ROIs are guaranteed not to overlap, and they roughly tile the
entire brain (with some gaps)
'''
'''For a quick overview of the ROI hierarchy, use fetch_roi_hierarchy()'''
print(
    fetch_roi_hierarchy(include_subprimary=True,
                        mark_primary=True,
Exemplo n.º 6
0
    nest.Simulate(float(exp_length))
    #v = nest.GetStatus(voltmeter)[0]['events']['V_m']     # volts
    s = nest.GetStatus(spikedetector)[0]['events']  # spikes
    np.save(
        root + '/simulations/droso_' + stimulus_odor + '_' + simulation_id +
        '.npy', np.array(s, dtype=object))
    ntnsubstatus("Simulation name: droso_" + stimulus_odor + "_" +
                 simulation_id)


#******************************************************************************#
#Run everything
if __name__ == "__main__":
    # If we need to download data from Janelia do so
    if not args.disable_build or not args.disable_design:
        ntnstatus('Downloading neuron data from Janelia')
        token = list(open(token_address))[0].replace('\n', '')
        c = Client('neuprint.janelia.org', dataset=dataset, token=token)
        q = """MATCH (n :Neuron )
                WHERE n.instance <>'{}'
                RETURN n.bodyId AS bodyId, n.type AS type
                ORDER BY n.bodyId ASC
            """
        results = c.fetch_custom(q)
    if not args.disable_build:
        build_matrix(results)
    if not args.disable_design:
        design_stimuli(results)
    if not args.disable_simulate:
        run_nest_simulation()
Exemplo n.º 7
0
class Graph:
    """
    Graph used to construct, store, and transform neuron connectivity graphs.

    Attributes:
        _client (Client): Neuprint API client.
        depth (int): Depth of search to build connectivity graph.
        graph (Dataframe): Rows are edges in connectivity graph.
        paths (Dataframe): Rows are paths in the format N0, W_1, N_1, ..., W_n, N_n.
        sources (list of int): List of source body IDs.
        targets (list of int): List of target body IDs.
    """


    def __init__(self, sources, targets, depth, verbose=False):
        """ Initialization method.
        Args:
            sources (list of int): List of source body IDs.
            targets (list of int): List of target body IDs.
            depth (int): Depth of search to build connectivity graph.

        Returns:
            None
        """
        self._client = None
        self.verbose = verbose
        self.depth = int(depth)
        self.graph = pd.DataFrame(columns=['SRC', 'DES', 'WEIGHT', 'LAYER'])
        self.paths = pd.DataFrame()
        self.sources = list(sources)
        self.targets = list(targets)


    def _verbose_print(self, msg):
        """ Verbocity print statement

        Args:
            msg (str): Message that should be printed for user
            verbose (bool):  If True print message

        Returns:
            None
        """
        if self.verbose:
            print(msg)


    def start_client(self, server, token):
        """ Creates a Neuprint API Client instance.

        Args:
            server (str): URL of Neuprint server.
            token (str): Authentication token for Neuprint.

        Returns:
            None
        """
        try:
            self._client = Client(server, token)
        except RuntimeError as error:
            raise RuntimeError("Issue instantiating Neuprint client:", error)


    def _query_downstream_partners(self, ids, threshold, hemibrain_neuron=True):
        """ Query Neuprint for downstream partners of a list of body IDs.

        Args:
            ids (list of int): List of body IDs to search from.
            threshold (int): Minimum connection strength between partners.
            hemibrain_neuron (bool): Specifies whether to search
                Hemibrain-Neuron of Hemibrain-Segment.

        Returns:
            Dataframe containing the connection pairs between query IDs
            and downstream partners.
        """
        query_template = (
            'WITH {IDS} AS SRC\n'
            'UNWIND SRC AS ID\n'
            'MATCH (n:`{HEMIBRAIN}`)-[w:ConnectsTo]->(m:`{HEMIBRAIN}`)\n'
            'WHERE n.bodyId = ID AND w.weight >= {THRESHOLD}\n'
            'RETURN n.bodyId AS SRC, m.bodyId AS DES, w.weight AS WEIGHT'
        )

        if hemibrain_neuron:
            query = query_template.format(
                HEMIBRAIN='hemibrain-Neuron', IDS=list(ids), THRESHOLD=threshold
            )
        else:
            query = query_template.format(
                HEMIBRAIN='hemibrain-Segment', IDS=list(ids), THRESHOLD=threshold
            )
        # print(query)
        results = self._client.fetch_custom(query)
        return results


    def _prune_graph(self, graph):
        """ Removes paths that do not include target bodies

        Starting from the top-most layer, remove all nodes from a layer that
        are not target bodies and do not connect to a body in the previous layer.

        Args:
            graph (Dataframe): Rows are edges of the connectivity graph.
            verbose (bool): When true print data regarding execution and
                progress.

        Returns:
            Dataframe with edges only edges that are part of pathways to targets
        """
        frontier = set(self.targets)
        pruned = pd.DataFrame(columns=['SRC', 'DES', 'WEIGHT', 'LAYER'])
        for layer in reversed(range(max(graph['LAYER']))):
            layer_edges = graph[graph['LAYER'] == layer + 1]
            lc1 = len(layer_edges)
            layer_edges = layer_edges[layer_edges.DES.isin(frontier)]
            pruned = pruned.append(layer_edges, ignore_index=True)
            frontier = set(layer_edges.SRC.values) | frontier
            self._verbose_print('Layer {LAYER}: {NUM_EDGES} edges'
                                .format(LAYER=layer + 1, NUM_EDGES=lc1))
            self._verbose_print('Layer {LAYER} Pruned: {NUM_EDGES} edges'
                                .format(LAYER=layer + 1, NUM_EDGES=len(layer_edges)))
        return pruned.reset_index(drop=True)


    def make_graph(self, threshold=1):
        """ Create connectivity graph of all downstream neurons

        Use breadth first search to find all bodies within N
        hops downstream of the source IDs.  Does not explore
        past nodes that have been visited or that are part of
        target set.

        Args:
            threshold (int, optional): Connection weight threshold between bodies
            verbose (bool, optional): Whether to print data

        Returns:
            Dataframe containing connectivity graph edges and connection weights.
        """
        frontier = set(self.sources)
        explored = set(self.targets)
        graph = pd.DataFrame(columns=['SRC', 'DES', 'WEIGHT', 'LAYER'])

        self._verbose_print('\nBuilding graph')
        for i in range(self.depth):
            self._verbose_print('Retrieving layer {LAYER} bodies'.format(LAYER=str(i+1)))

            layer_bodies = self._query_downstream_partners(frontier, threshold)
            layer_bodies['LAYER'] = i + 1
            explored = frontier | explored
            frontier = set(layer_bodies['DES'].values) - explored
            graph = graph.append(layer_bodies, ignore_index=True)

            self._verbose_print('Layer {LAYER}: {NUM} bodies'.format(NUM=len(frontier), LAYER=i+1))

        self._verbose_print('\nPruning graph of edges that do not lead to target bodies')
        self.graph = self._prune_graph(graph)
        return self.graph

    def graph_to_csv(self, file_name):
        """ Save graph to CSV

        Args:
            file_name (str): desired destination file for graph data

        Returns:
            None
        """
        self.graph.to_csv(file_name, index=False)


    def paths_to_csv(self, file_name):
        """ Save path to CSV

        Args:
            file_name (str): desired destination file for path data

        Returns:
            None
        """
        self.paths.to_csv(file_name, index=False)


    def compute_paths(self):
        """ Get all paths from from source bodies to targets.

        Find all permutations of edges that create pathways to target bodies.
        Starting from the second deepest layer, join layer N-1 DES column
        on layer N SRC column.

        Returns:

        """
        if self.graph.empty:
            raise Exception('No graph is not defined.')

        max_layer = self.graph['LAYER'].max()
        right = self.graph[self.graph['LAYER'] == max_layer][['SRC', 'WEIGHT', 'DES']]
        right = right.rename(
            index=str,
            columns={
                'SRC': 'N_{LAYER}'.format(LAYER=str(max_layer - 1)),
                'WEIGHT': 'W_{LAYER}'.format(LAYER=str(max_layer)),
                'DES': 'N_{LAYER}'.format(LAYER=str(max_layer)),
            },
        )

        for layer in reversed(range(max_layer - 1)):
            output_layer = 'N_{LAYER}'.format(LAYER=str(layer))
            connection_weight = 'W_{LAYER}'.format(LAYER=str(layer + 1))
            input_layer = 'N_{LAYER}'.format(LAYER=str(layer + 1))
            left = self.graph[self.graph['LAYER'] == layer + 1][
                ['SRC', 'WEIGHT', 'DES']
            ]
            left = left.rename(
                index=str,
                columns={
                    'SRC': output_layer,
                    'WEIGHT': connection_weight,
                    'DES': input_layer,
                },
            )
            right = pd.merge(left, right, on=input_layer, how='left')

        self.paths = right