Ejemplo n.º 1
0
    def verify_networkx(self,
                        n_checks=1000,
                        version="2019-12-18",
                        graph_type="G"):
        from src.data import load_networkx

        raw_g = load_networkx(graph_type, version)
        _verify_networkx(self.g, raw_g, n_checks)
        _verify_networkx(raw_g, self.g, n_checks)
#%% Save

out_graphs = []
[out_graphs.append(i) for i in nx_graphs_raw.values()]
[print(i) for i in nx_graphs_raw.keys()]
save_names = ["Gaa", "Gad", "Gda", "Gdd"]
[out_graphs.append(i) for i in nx_graphs_norm.values()]
[print(i) for i in nx_graphs_norm.keys()]
save_names += ["Gaan", "Gdan", "Gadn", "Gddn"]
out_graphs.append(nx_all_raw)
save_names.append("G")
out_graphs.append(nx_all_norm)
save_names.append("Gn")

for name, graph in zip(save_names, out_graphs):
    nx.write_graphml(graph, output_path / (name + ".graphml"))

meta_data_df.to_csv(output_path / "meta_data.csv")

#%% verify things are right
for name, graph_wrote in zip(save_names, out_graphs):
    print(name)
    graph_read = nx.read_graphml(output_path / (name + ".graphml"))
    adj_read = nx.to_numpy_array(graph_read)
    adj_wrote = nx.to_numpy_array(graph_wrote)
    print(np.array_equal(adj_read, adj_wrote))
    graph_loader = load_networkx(name, version=data_date_groups)
    adj_loader = nx.to_numpy_array(graph_loader)
    print(np.array_equal(adj_wrote, adj_loader))
    print()
    Remaps integer labels based on who is most frequent
    """
    uni_labels, uni_inv, uni_counts = np.unique(labels,
                                                return_inverse=True,
                                                return_counts=True)
    sort_inds = np.argsort(uni_counts)[::-1]
    new_labels = range(len(uni_labels))
    uni_labels_sorted = uni_labels[sort_inds]
    relabel_map = dict(zip(uni_labels_sorted, new_labels))

    new_labels = np.array(itemgetter(*labels)(relabel_map))
    return new_labels


#%% Load some graph info
graph = load_networkx("Gn")

filename = (
    "maggot_models/data/raw/Maggot-Brain-Connectome/4-color-matrices_Brain/" +
    "2019-09-18-v2/brain_meta-data.csv")

meta_df = pd.read_csv(filename, index_col=0)
print(meta_df.head())

pair_df = pd.read_csv(
    "maggot_models/data/raw/Maggot-Brain-Connectome/pairs/knownpairsatround5.csv"
)

print(pair_df.head())

# get the nodelist for pairing left-right
def get_paired_adj(graph_type, nodelist):
    graph = load_networkx(graph_type)
    matched_graph = graph.subgraph(nodelist)
    adj_df = nx.to_pandas_adjacency(matched_graph, nodelist=nodelist)
    adj = adj_df.values
    return adj
Ejemplo n.º 5
0
save_names = ["Gaa", "Gad", "Gda", "Gdd"]
[out_graphs.append(i) for i in nx_graphs_norm.values()]
[print(i) for i in nx_graphs_norm.keys()]
save_names += ["Gaan", "Gdan", "Gadn", "Gddn"]
out_graphs.append(nx_all_raw)
save_names.append("G")
out_graphs.append(nx_all_norm)
save_names.append("Gn")

for name, graph in zip(save_names, out_graphs):
    nx.write_graphml(graph, output_path / (name + ".graphml"))

meta.to_csv(output_path / "meta_data.csv")

#%% verify things are right
print("\n\nChecking graphs are the same when saved")
print(output_path)
for name, graph_wrote in zip(save_names, out_graphs):
    print(name)
    graph_read = nx.read_graphml(output_path / (name + ".graphml"))
    adj_read = nx.to_numpy_array(graph_read)
    adj_wrote = nx.to_numpy_array(graph_wrote)
    print(np.array_equal(adj_read, adj_wrote))
    graph_loader = load_networkx(name, version=output_name)
    adj_loader = nx.to_numpy_array(graph_loader)
    print(np.array_equal(adj_wrote, adj_loader))
    print()

print("Done!")
sys.stdout.close()
Ejemplo n.º 6
0
#%% Load data
import networkx as nx
import numpy as np
import pandas as pd

from graspy.plot import degreeplot, edgeplot, gridplot, heatmap
from mpl_toolkits.axes_grid1 import make_axes_locatable

from src.data import load_networkx
from src.utils import meta_to_array, savefig

#%%
graph_type = "Gn"

graph = load_networkx(graph_type)

df_adj = nx.to_pandas_adjacency(graph)
adj = df_adj.values

classes = meta_to_array(graph, "Class")
print(np.unique(classes))

nx_ids = np.array(list(graph.nodes()), dtype=int)
df_ids = df_adj.index.values.astype(int)
df_adj.index = df_ids
df_adj.columns = df_ids
np.array_equal(nx_ids, df_ids)
cell_ids = df_ids

#%% Map MW classes to the indices of cells belonging to them
unique_classes, inverse_classes = np.unique(classes, return_inverse=True)
nx_all_norm = df_to_nx(df_all_norm, meta_data_dict)

#%% Save

out_graphs = []
[out_graphs.append(i) for i in nx_graphs_raw.values()]
[print(i) for i in nx_graphs_raw.keys()]
save_names = ["Gaa", "Gad", "Gda", "Gdd"]
[out_graphs.append(i) for i in nx_graphs_norm.values()]
[print(i) for i in nx_graphs_norm.keys()]
save_names += ["Gaan", "Gdan", "Gadn", "Gddn"]
out_graphs.append(nx_all_raw)
save_names.append("G")
out_graphs.append(nx_all_norm)
save_names.append("Gn")

for name, graph in zip(save_names, out_graphs):
    nx.write_graphml(graph, output_path / (name + ".graphml"))

#%% verify things are right
for name, graph_wrote in zip(save_names, out_graphs):
    print(name)
    graph_read = nx.read_graphml(output_path / (name + ".graphml"))
    adj_read = nx.to_numpy_array(graph_read)
    adj_wrote = nx.to_numpy_array(graph_wrote)
    print(np.array_equal(adj_read, adj_wrote))
    graph_loader = load_networkx(name)
    adj_loader = nx.to_numpy_array(graph_loader)
    print(np.array_equal(adj_wrote, adj_loader))
    print()
Ejemplo n.º 8
0
        plt.barh(y, width=widths[i], left=starts[i], height=0.5, label=e)
        prop = 100 * widths[i] / widths.sum()
        prop = f"{prop:2.0f}%"
        ax.text(
            centers[i], 0.15, prop, ha="center", va="center", color="k", fontsize=15
        )


palette = "Set1"
sns.set_context("talk", font_scale=1.2)
plt.style.use("seaborn-white")
sns.set_palette(palette)
graph_type_labels = [r"A $\to$ D", r"A $\to$ A", r"D $\to$ D", r"D $\to$ A"]


graph = load_networkx("Gn")

meta_df = pd.read_csv(
    "maggot_models/data/raw/Maggot-Brain-Connectome/4-color-matrices_Brain/2019-09-18-v2/brain_meta-data.csv",
    index_col=0,
)
print(meta_df.head())

pair_df = pd.read_csv(
    "maggot_models/data/raw/Maggot-Brain-Connectome/pairs/knownpairsatround5.csv"
)

print(pair_df.head())
#%%
left_nodes = pair_df["leftid"].values.astype(str)
right_nodes = pair_df["rightid"].values.astype(str)
import numpy as np
import pandas as pd
import seaborn as sns

from graspy.plot import heatmap
from graspy.utils import binarize
from src.data import load_networkx
from src.utils import meta_to_array

version = "mb_2019-09-23"

plt.style.use("seaborn-white")
sns.set_palette("deep")
#%% Load and plot the full graph
graph_type = "G"
graph = load_networkx(graph_type, version=version)
classes = meta_to_array(graph, "Class")
side_labels = meta_to_array(graph, "Hemisphere")
adj_df = nx.to_pandas_adjacency(graph)
name_map = {
    "APL": "APL",
    "Gustatory PN": "PN",
    "KC 1 claw": "KC",
    "KC 2 claw": "KC",
    "KC 3 claw": "KC",
    "KC 4 claw": "KC",
    "KC 5 claw": "KC",
    "KC 6 claw": "KC",
    "KC young": "KC",
    "MBIN": "MBIN",
    "MBON": "MBON",
Ejemplo n.º 10
0
# %% [markdown]
# # Imports
from src.graph import MetaGraph
from src.data import load_networkx
from graspy.utils import is_fully_connected

# %% [markdown]
# # Constants
BRAIN_VERSION = "2019-12-18"
GRAPH_TYPE = "Gad"
# %% [markdown]
# # Loads
g = load_networkx(GRAPH_TYPE, BRAIN_VERSION)
mg = MetaGraph(g)

# %% [markdown]
# # Show that getting LCC works
print(is_fully_connected(mg.g))
print(mg.n_verts)
print(mg.meta.shape)
print()
mg = mg.make_lcc()
print(is_fully_connected(mg.g))
print(mg.n_verts)
print(mg.meta.shape)

# %% [markdown]
# #
Ejemplo n.º 11
0
def _unique_like(vals):
    # gives output like
    uniques, inds, counts = np.unique(vals,
                                      return_index=True,
                                      return_counts=True)
    inds_sort = np.argsort(inds)
    uniques = uniques[inds_sort]
    counts = counts[inds_sort]
    return uniques, counts


#%%
graph_type = "Gn"

graph = load_networkx(graph_type, version="mb_2019-09-23")

df_adj = nx.to_pandas_adjacency(graph)
adj = df_adj.values

classes = meta_to_array(graph, "Class")
print(np.unique(classes))

nx_ids = np.array(list(graph.nodes()), dtype=int)
df_ids = df_adj.index.values.astype(int)
df_adj.index = df_ids
df_adj.columns = df_ids
np.array_equal(nx_ids, df_ids)
cell_ids = df_ids

sort_inds = _sort_inds(adj, classes, np.ones_like(classes), True)
Ejemplo n.º 12
0
    )


def stashobj(obj, name, **kws):
    saveobj(obj, name, foldername=FNAME, save_on=SAVEOBJS, **kws)


# adj, class_labels, side_labels, pair_labels, skeleton_labels, = load_everything(
#     "Gadn",
#     version=BRAIN_VERSION,
#     return_keys=["Merge Class", "Hemisphere", "Pair"],
#     return_ids=True,
# )


g = load_networkx("Gn", version=BRAIN_VERSION)

g_sym = nx.to_undirected(g)
skeleton_labels = np.array(list(g_sym.nodes()))
scales = [1]
r = 0.5
out_dict = cm.best_partition(g_sym, resolution=r)
partition = np.array(itemgetter(*skeleton_labels.astype(str))(out_dict))
adj = nx.to_numpy_array(g_sym, nodelist=skeleton_labels)

part_unique, part_count = np.unique(partition, return_counts=True)
for uni, count in zip(part_unique, part_count):
    if count < 3:
        inds = np.where(partition == uni)[0]
        partition[inds] = -1
Ejemplo n.º 13
0
#%%
MB_VERSION = "mb_2019-09-23"
BRAIN_VERSION = "2019-09-18-v2"
GRAPH_TYPES = ["Gad", "Gaa", "Gdd", "Gda"]
GRAPH_TYPE_LABELS = [r"A $\to$ D", r"A $\to$ A", r"D $\to$ D", r"D $\to$ A"]
N_GRAPH_TYPES = len(GRAPH_TYPES)

FNAME = os.path.basename(__file__)[:-3]
print(FNAME)

adj, class_labels, side_labels = load_everything("G",
                                                 version=BRAIN_VERSION,
                                                 return_class=True,
                                                 return_side=True)

graph = load_networkx("G", BRAIN_VERSION)
node2vec = Node2Vec(graph,
                    dimensions=6,
                    workers=12,
                    p=0.5,
                    q=0.5,
                    walk_length=100,
                    num_walks=20)

model = node2vec.fit(window=20, min_count=1, batch_words=4)
vecs = [model.wv.get_vector(n) for n in graph.node()]

embedding = np.array(vecs)

pairplot(embedding, labels=meta_to_array(graph, "Class"), palette="tab20")
Ejemplo n.º 14
0
    rot_latent = latent
    rot_latent[left_inds] = latent[left_inds] @ R
    return rot_latent, diff


# %% [markdown]
# #

graph_type = "Gad"
use_spl = False
embed = "lse"
remove_pdiff = True
plus_c = True

mg = load_metagraph(graph_type, BRAIN_VERSION)
g = load_networkx(graph_type, BRAIN_VERSION)
source_bad = 3609202
target_bad = 10934438
source_ind = np.where(mg.meta.index == source_bad)[0]
target_ind = np.where(mg.meta.index == target_bad)[0]
print(mg.adj[source_ind, target_ind])
try:
    print(mg.g[source_bad])
    mg.g[source_bad][target_bad]
except KeyError:
    print("Edge not present in mg.g")

try:
    print(g[source_bad])
    g[source_bad][target_bad]
except KeyError:
#%% Save

out_graphs = []
[out_graphs.append(i) for i in nx_graphs_raw.values()]
[print(i) for i in nx_graphs_raw.keys()]
save_names = ["Gaa", "Gad", "Gda", "Gdd"]
[out_graphs.append(i) for i in nx_graphs_norm.values()]
[print(i) for i in nx_graphs_norm.keys()]
save_names += ["Gaan", "Gdan", "Gadn", "Gddn"]
out_graphs.append(nx_all_raw)
save_names.append("G")
out_graphs.append(nx_all_norm)
save_names.append("Gn")

for name, graph in zip(save_names, out_graphs):
    nx.write_graphml(graph, output_path / (name + ".graphml"))

#%% verify things are right
for name, graph_wrote in zip(save_names, out_graphs):
    print(name)
    graph_read = nx.read_graphml(output_path / (name + ".graphml"))
    adj_read = nx.to_numpy_array(graph_read)
    adj_wrote = nx.to_numpy_array(graph_wrote)
    print(np.array_equal(adj_read, adj_wrote))
    graph_loader = load_networkx(name, version="mb_2019-09-23")
    adj_loader = nx.to_numpy_array(graph_loader)
    print(np.array_equal(adj_wrote, adj_loader))
    print()

#%%