def load_sif(args): conn = gripql.Connection(args.server) O = conn.graph(args.db) proteins = set() with open(args.input) as handle: rows = [] for line in handle: row = line.rstrip().split("\t") if not (row[0].startswith("CHEBI:") or row[2].startswith("CHEBI:")): rows.append(row) proteins.add(row[0]) proteins.add(row[2]) print("Loading Proteins") for i in proteins: O.addVertex(i, "Protein", {}) def chunks(l, n): """Yield successive n-sized chunks from l.""" for i in range(0, len(l), n): yield l[i:i + n] i = 0 for chunk in chunks(rows, 10000): b = O.bulkAdd() for row in chunk: b.addEdge(row[0], row[2], row[1], {}) b.execute() i += len(chunk) print("Loaded %s edges" % i)
def load_matrix(args): conn = gripql.Connection(args.server) O = conn.graph(args.db) matrix = pandas.read_csv(args.input, sep="\t", index_col=0) for name, row in matrix.iterrows(): data = {} for k, v in row.iteritems(): if not isinstance(v, float) or not math.isnan(v): data[k] = v O.addVertex(name, "Sample", data)
def __init__(self): if not os.path.exists(credential_file): with open(credential_file, 'w') as cfile: cdata = { 'OauthEmail': os.environ.get('BMEGAOauthEmail', ''), 'OauthAccessToken': os.environ.get('BMEGAOauthAccessToken', ''), 'OauthExpires': os.environ.get('BMEGAOauthExpires', '') } json.dump(cdata, cfile) conn = gripql.Connection('https://bmeg.io/api', credential_file=credentials_dir + "bmeg_credentials.json") self.O = conn.graph("rc5") print("Connected to bmeg")
def render_page_content(pathname): c = gripql.Connection(conn.GRIP, credential_file=conn.CRED) graphs = [] for i in c.listGraphs(): if not i.endswith("__schema__"): graphs.append(i) if pathname == "/" or pathname == "/query": return query_view.setup(graphs) elif pathname == "/facet": return facet_view.setup(graphs) return dbc.Jumbotron([ html.H1("404: Not found", className="text-danger"), html.Hr(), html.P(f"The pathname {pathname} was not recognised..."), ])
import pandas as pd import json import gripql import datetime import dash_core_components as dcc import dash_html_components as html from app import app import dash import plotly.express as px import plotly.graph_objects as go # https://towardsdatascience.com/build-an-interactive-choropleth-map-with-plotly-and-dash-1de0de00dce0 conn = gripql.Connection("http://localhost:8201") G = conn.graph("covid") with open("geojson-counties-fips.json") as handle: counties = json.loads(handle.read()) countiesSub = {"type": "FeatureCollection", "features": []} for c in counties['features']: if c['properties']['STATE'] == "41": countiesSub['features'].append(c) curDate = "2020-04-14 23:33:31" q = G.query().V().hasLabel("SummaryLocation").has( gripql.eq("province_state", "OR")).as_("a") q = q.out("summary_reports").has(gripql.eq("date", curDate)).as_("b") q = q.render(["$a._gid", "$b.confirmed", "$b.deaths", "$b.recovered"])
def __init__(self): conn = gripql.Connection('https://bmeg.io/api', credential_file=credentials_dir + "bmeg_credentials.json") self.O = conn.graph("bmeg_rc1_2") print("Connected to bmeg")
def load_matrix(args): conn = gripql.Connection(args.server) if args.db not in list(conn.listGraphs()): conn.addGraph(args.db) O = conn.graph(args.db) if args.columns is not None: matrix = pandas.read_csv(args.input, sep=args.sep, index_col=args.index_col, header=None, names=args.columns, skiprows=args.skiprows) else: matrix = pandas.read_csv(args.input, sep=args.sep, index_col=args.index_col, skiprows=args.skiprows) if args.transpose: matrix = matrix.transpose() if args.dump is not None: dump_vertex_file = open(args.dump + ".vertex", "w") dump_edge_file = open(args.dump + ".edge", "w") def dump_vertex(gid, label, data): print("Add Vertex: %s" % (gid)) dump_vertex_file.write( json.dumps({ "gid": gid, "label": label, "data": data }) + "\n") def dump_edge(src, dst, label, data): print("Add Edge: %s %s" % (src, dst)) dump_edge_file.write( json.dumps({ "from": src, "to": dst, "label": label, "data": data }) + "\n") if args.connect: if not args.no_vertex: #every row x col creates an edge with the weight value for c in matrix.columns: cname = "%s%s" % (args.col_prefix, c) if list(O.query().V(c).count())[0]['count'] == 0: if args.dump is not None: dump_vertex(c, args.col_label, {}) else: O.addVertex(c, args.col_label) for r in matrix.index: rname = "%s%s" % (args.row_prefix, r) if list(O.query().V(r).count())[0]['count'] == 0: if args.dump: dump_vertex(r, args.row_label, {}) else: O.addVertex(r, args.row_label) for name, row in matrix.iterrows(): rname = "%s%s" % (args.row_prefix, name) print("Loading: %s" % (rname)) b = O.bulkAdd() for c in matrix.columns: cname = "%s%s" % (args.col_prefix, c) v = row[c] if not math.isnan(v): if args.dump: dump_edge(rname, cname, args.edge_label, {args.edge_prop: v}) else: b.addEdge(rname, cname, args.edge_label, {args.edge_prop: v}) b.execute() else: if args.col_regex is not None: col_map = {} for col in matrix.columns: new_col = re.sub(args.col_regex[0], args.col_regex[1], col) col_map[col] = new_col matrix = matrix.rename(columns=col_map) for name, row in matrix.iterrows(): rname = "%s%s" % (args.row_prefix, name) print("Loading: %s" % (rname)) data = {} for c in matrix.columns: v = row[c] if args.column_include is None or c in args.column_include: if c not in args.column_exclude: if not isinstance(v, float) or not math.isnan(v): data[c] = v for col, reg, rep in args.regex: data[col] = re.sub(reg, rep, data[col]) if not args.no_vertex and rname not in args.exclude: if args.dump: dump_vertex(rname, args.row_label, data) else: O.addVertex(rname, args.row_label, data) data["_rowname"] = name data["_gid"] = rname for dst, edge in args.edge: try: dstFmt = dst.format(**data) except KeyError: print("Formatting Error") dstFmt = None if dstFmt is not None: if args.dump: dump_edge(rname, dstFmt, edge, {}) else: O.addEdge(rname, dstFmt, edge) for dst, label in args.dst_vertex: try: dstFmt = dst.format(**data) except KeyError: dstFmt = None if dstFmt is not None: if list(O.query().V(dstFmt).count())[0]['count'] == 0: if args.dump: dump_vertex(dstFmt, label, {}) else: O.addVertex(dstFmt, label, {}) if args.dump is not None: dump_vertex_file.close() dump_edge_file.close()
nargs="+", default=[], help="Exclude test suite(s)") args = parser.parse_args() server = args.server if len(args.tests) > 0: tests = ["ot_" + t for t in args.tests] else: tests = [ os.path.basename(a)[:-3] for a in glob(os.path.join(TESTS, "ot_*.py")) ] # filter out excluded tests tests = [t for t in tests if t[3:] not in args.exclude] conn = gripql.Connection(server) correct = 0 total = 0 for name in tests: mod = imp.load_source('test.%s' % name, os.path.join(TESTS, name + ".py")) for f in dir(mod): if f.startswith("test_"): func = getattr(mod, f) if callable(func): try: print("Running: %s %s " % (name, f[5:])) GRAPH = "test_graph_" + id_generator() conn.addGraph(GRAPH) e = func(conn.graph(GRAPH))
def __init__(self, bmeg_url, bmeg_graph, bmeg_credential_file): # Set up connection to server and create graph handle conn = gripql.Connection(bmeg_url, credential_file=bmeg_credential_file) self.O = conn.graph(bmeg_graph) # noqa E741
def connect(): return gripql.Connection(GRIP, credential_file=CRED)