def _initialize_graph_connection(config): """Initialize a graph connection with the given configuration.""" graph = Graph(config, strict=True) base_node = declarative_node() base_relationship = declarative_relationship() graph.include( graph.build_mapping(base_node, base_relationship, auto_plural=True)) return graph
def _initialize_graph_connection(config, initial_drop=False): """Initialize a graph connection with the given configuration.""" local_config = deepcopy(config) local_config.initial_drop = initial_drop graph = Graph(local_config, strict=True) base_node = declarative_node() base_relationship = declarative_relationship() graph.include( graph.build_mapping(base_node, base_relationship, auto_plural=True)) return graph
def get_pyorient_client(): db_url = ''.join([ 'plocal://', server_config.ORIENTDB_HOST, ':2424', '/', server_config.ORIENTDB_DB ]) graph = Graph(Config.from_url(db_url, server_config.ORIENTDB_USER, server_config.ORIENTDB_PASSWORD), strict=True) graph.include( graph.build_mapping(declarative_node(), declarative_relationship(), auto_plural=False)) return graph
def db_setup(HOST: str, USER: str, dbname: str, PASSW: str, PORT: str, keep: bool): """sets up database :param HOST: OGM Graph ref :param USER: OGM Graph ref :param dbname: OGM Graph ref :param PASSW: OGM Graph ref :param PORT: OGM Graph ref :param keep: boolean value to keep or destroy database """ print('(connecting to db)') clear_database = '' if (not keep): clear_database = input( 'Are you sure you want to delete the database? (Y/N)') if clear_database in ['Y', 'y', 'Yes', 'yes', 'YES']: print('(dropping database)') g = Graph(Config.from_url(dbname, USER, PASSW, initial_drop=True)) g.create_all(Node.registry) g.create_all(Relationships.registry) else: print('(keeping database)') g = Graph(Config.from_url(dbname, USER, PASSW, initial_drop=False)) SchemaNode = declarative_node() SchemaRelationship = declarative_relationship() classes_from_schema = g.build_mapping(SchemaNode, SchemaRelationship, auto_plural=True) g.include(classes_from_schema) g.client.command( "ALTER DATABASE DATETIMEFORMAT \"yyyy-MM-dd'T'HH:mm:ss.SSS'Z'\"") print('our db g', g) return g
""" import logging import sys import numpy as np from pyorient.ogm import Graph, Config import neuroarch.models as models import neuroarch.query as query import neuroarch.nxtools as nxtools from cx_config import cx_db graph = Graph(Config.from_url(cx_db, 'admin', 'admin', initial_drop=False)) graph.include(models.Node.registry) graph.include(models.Relationship.registry) logging.basicConfig(level=logging.DEBUG, stream=sys.stdout, format='%(asctime)s %(name)s %(levelname)s %(message)s') logger = logging.getLogger('cx') def leaky_iaf_params(lpu, extern): """ Generate LeakyIAF params. """ k = 1000 assert isinstance(extern, bool) if lpu == 'BU' or lpu == 'bu': return {'extern': extern, 'initV': -0.06 * k,
class neuroarch_server(object): """ Methods to process neuroarch json tasks """ def __init__(self, database='/na_server', username='******', password='******', user=None): try: self.graph = Graph( Config.from_url(database, username, password, initial_drop=False, serialization_type=OrientSerialization.Binary)) except: #print "WARNING: Serialisation flag ignored" self.graph = Graph( Config.from_url(database, username, password, initial_drop=False)) self.graph.include(Node.registry) self.graph.include(Relationship.registry) self.user = user self.query_processor = query_processor(self.graph) self._busy = False def retrieve_neuron(self, nid): # WIP: Currently retrieves all information for the get_as method, this will be refined when we know what data we want to store and pull out here try: n = self.graph.get_element(nid) if n == None: return {} else: output = QueryWrapper.from_objs(self.graph, [n]) return output.get_as()[0].to_json() except Exception as e: raise e def process_query(self, task): """ configure a task processing, and format the results as desired """ # WIP: Expand type of information that can be retrieved assert 'query' in task.keys() try: self.query_processor.process(task['query'], self.user) return True except Exception as e: print e return False @staticmethod def process_verb(output, user, verb): if verb == 'add': assert (len(user.state) >= 2) user.state[-1] = output + user.state[-2] elif verb == 'keep': assert (len(user.state) >= 2) user.state[-1] = output & user.state[-2] output = user.state[-2] - user.state[-1] elif verb == 'remove': assert (len(user.state) >= 2) user.state[-1] = user.state[-2] - output else: assert (len(user.state) >= 2) cmd = {'undo': {'states': 1}} output = output & user.state[-2] user.process_command(cmd) return output def receive_task(self, task, threshold=None, query_results=True): """ process a task of form {'query':...} or {'command': ...} update the user states, and query neuroarch This is the default access route """ while (self._busy): time.sleep(1) try: self._busy = True if not type(task) == dict: task = json.loads(task) task = byteify(task) if 'format' not in task: task['format'] = 'morphology' assert 'query' in task or 'command' in task user = self.user if 'command' in task: output = user.process_command(task['command']) if 'verb' in task and not task['verb'] == 'show': try: output = self.process_verb(output, user, task['verb']) except Exception as e: print e if not task['verb'] == 'add': if task['format'] == 'morphology': output = output.get_data_rids(cls='MorphologyData') else: output = output._records_to_list(output.nodes) self._busy = False return (output, True) if isinstance(output, QueryWrapper): #print output._records_to_list(output.nodes) if task['format'] == 'morphology': #df = output.get_data(cls='MorphologyData')[0] try: #output= df[['sample','identifier','x','y','z','r','parent','name']].to_dict(orient='index') #output= df.to_dict(orient='index') output = output.get_data(cls='MorphologyData', as_type='nx').node except KeyError: output = {} elif task['format'] == 'no_result': output = {} elif task['format'] == 'get_data': if 'cls' in task: output = output.get_data( cls=task['cls'])[0].to_dict(orient='index') else: output = output.get_data()[0].to_dict( orient='index') elif task['format'] == 'nx': nx_graph = output.get_as('nx') output = { 'nodes': nx_graph.node, 'edges': nx_graph.edge } elif task['format'] == 'nk': output = output.traverse_owned_by_get_toplevel() for x in output['LPU']: g = output['LPU'][x].get_as('nx') output['LPU'][x] = { 'nodes': g.node, 'edges': g.edge } for x in output['Pattern']: g = output['Pattern'][x].get_as('nx') output['Pattern'][x] = { 'nodes': g.node, 'edges': g.edge } elif task['format'] == 'df': dfs = output.get_as() output = {} if 'node_cols' in task: output['nodes'] = dfs[0][ task['node_cols']].to_dict(orient='index') else: output['nodes'] = dfs[0].to_dict(orient='index') if 'edge_cols' in task: output['edges'] = dfs[1][ task['edge_cols']].to_dict(orient='index') else: output['edges'] = dfs[1].to_dict(orient='index') elif task['format'] == 'qw': pass # Default to nodes and edges df else: dfs = output.get_as() output = { 'nodes': dfs[0].to_dict(orient='index'), 'edges': dfs[1].to_dict(orient='index') } else: output = str(output) if threshold and isinstance(output, dict): chunked_output = [] for c in chunks(output, threshold): chunked_output.append(c) output = chunked_output self._busy = False return (output, True) elif 'query' in task: succ = self.process_query(task) if query_results: task['command'] = {"retrieve": {"state": 0}} output = (None, ) try: self._busy = False output = self.receive_task(task, threshold) if output[0] == None: succ = False except Exception as e: print e succ = False self._busy = False if 'temp' in task and task['temp'] and len( user.state) >= 2: user.process_command({'undo': {'states': 1}}) return (output[0], succ) self._busy = False return succ except Exception as e: print e self._busy = False
'deebeetee')) Node = declarative_node() Relationship = declarative_relationship() class TransactionsRel(Relationship): element_plural = 'tx' element_type = 'tx' since = DateTime() tx = Float() class Person(Node): element_plural = 'person' element_type = 'person' uid = String(indexed=True) name = String(indexed=True) credit_balance = Float(default=0.0, indexed=True) debit_balance = Float(default=0.0, indexed=True) balance = Float(default=0.0, indexed=True) #graph.create_all(Node.registry) #graph.create_all(Relationship.registry) # Bind Schema graph.include(Node.registry) graph.include(Relationship.registry)
class neuroarch_server(object): """ Methods to process neuroarch json tasks """ def __init__(self, database='/na_server', username='******', password='******', user=None): try: self.graph = Graph( Config.from_url(database, username, password, initial_drop=False, serialization_type=OrientSerialization.Binary)) except: print "WARNING: Serialisation flag ignored" self.graph = Graph( Config.from_url(database, username, password, initial_drop=False)) self.graph.include(Node.registry) self.graph.include(Relationship.registry) self.user = user self.query_processor = query_processor(self.graph) def retrieve_neuron(self, nid): # WIP: Currently retrieves all information for the get_as method, this will be refined when we know what data we want to store and pull out here try: n = self.graph.get_element(nid) if n == None: return {} else: output = QueryWrapper.from_objs(self.graph, [n]) return output.get_as()[0].to_json() except Exception as e: raise e # Hackathon 2017 def retrieve_by_id(self, task, threshold): # Retrieve an object by ID, in order to allow direct addressing to na objects or vfb neurons. # WIP: Test thresholding and chunking with larger NA 'tags' key_types = ['na', 'vfb'] #,'fc'] # A list of valid ids # na is a direct neuroarch ID minus the # # vfb is virtual fly brain with tag vib_id # fc will be fly circuit, currently in name if not type(task) == dict: task = json.loads(task) task = byteify(task) user = self.user key_type = task["key_type"] key = task["key"] assert key_type in key_types if key_type == 'na': try: n = self.graph.get_element('#' + key) except Exception as e: raise e elif key_type == 'vfb': n = self.graph.Neurons.query(vfb_id=key).first() else: pass if n == None: return ({}, False) else: output = QueryWrapper.from_objs(self.graph, [n]) # Add hook into user system user.append(output) df = output.get_data(cls='MorphologyData')[0] output = df[[ 'sample', 'identifier', 'x', 'y', 'z', 'r', 'parent', 'name' ]].to_dict(orient='index') if threshold and isinstance(output, dict): chunked_output = [] for c in chunks(output, threshold): chunked_output.append(c) output = chunked_output return (output, True) def process_query(self, task): """ configure a task processing, and format the results as desired """ # WIP: Expand type of information that can be retrieved assert 'query' in task.keys() try: self.query_processor.process(task['query'], self.user) return True except Exception as e: print e return False @staticmethod def process_verb(output, user, verb): if verb == 'add': assert (len(user.state) >= 2) user.state[-1] = output + user.state[-2] elif verb == 'keep': assert (len(user.state) >= 2) user.state[-1] = output & user.state[-2] output = user.state[-2] - user.state[-1] elif verb == 'remove': assert (len(user.state) >= 2) user.state[-1] = user.state[-2] - output else: assert (len(user.state) >= 2) cmd = {'undo': {'states': 1}} output = output & user.state[-2] user.process_command(cmd) return output def receive_task(self, task, threshold=None, query_results=True): """ process a task of form {'query':...} or {'command': ...} update the user states, and query neuroarch This is the default access route """ if not type(task) == dict: task = json.loads(task) task = byteify(task) if 'format' not in task: task['format'] = 'morphology' assert 'query' in task or 'command' in task user = self.user if 'command' in task: output = user.process_command(task['command']) if 'verb' in task and not task['verb'] == 'show': try: output = self.process_verb(output, user, task['verb']) except Exception as e: print e if not task['verb'] == 'add': if task['format'] == 'morphology': output = output.get_data_rids(cls='MorphologyData') else: output = output._records_to_list(output.nodes) return (output, True) if isinstance(output, QueryWrapper): #print output._records_to_list(output.nodes) if task['format'] == 'morphology': df = output.get_data(cls='MorphologyData')[0] try: output = df[[ 'sample', 'identifier', 'x', 'y', 'z', 'r', 'parent', 'name' ]].to_dict(orient='index') except KeyError: output = {} elif task['format'] == 'no_result': output = {} elif task['format'] == 'get_data': if 'cls' in task: output = output.get_data(cls=task['cls'])[0].to_dict( orient='index') else: output = output.get_data()[0].to_dict(orient='index') elif task['format'] == 'nx': nx_graph = output.get_as('nx') output = {'nodes': nx_graph.node, 'edges': nx_graph.edge} elif task['format'] == 'nk': output = output.traverse_owned_by_get_toplevel() for x in output['LPU']: g = output['LPU'][x].get_as('nx') output['LPU'][x] = {'nodes': g.node, 'edges': g.edge} for x in output['Pattern']: g = output['Pattern'][x].get_as('nx') output['Pattern'][x] = { 'nodes': g.node, 'edges': g.edge } elif task['format'] == 'df': dfs = output.get_as() output = {} if 'node_cols' in task: output['nodes'] = dfs[0][task['node_cols']].to_dict( orient='index') else: output['nodes'] = dfs[0].to_dict(orient='index') if 'edge_cols' in task: output['edges'] = dfs[1][task['edge_cols']].to_dict( orient='index') else: output['edges'] = dfs[1].to_dict(orient='index') # Default to nodes and edges df else: dfs = output.get_as() output = { 'nodes': dfs[0].to_dict(orient='index'), 'edges': dfs[1].to_dict(orient='index') } else: output = str(output) if threshold and isinstance(output, dict): chunked_output = [] for c in chunks(output, threshold): chunked_output.append(c) output = chunked_output return (output, True) elif 'query' in task: succ = self.process_query(task) if query_results: task['command'] = {"retrieve": {"state": 0}} output = (None, ) try: output = self.receive_task(task, threshold) if output[0] == None: succ = False except Exception as e: print e succ = False return (output[0], succ) return succ