def __enter__(self): # assign original execute_query() self.original_execute_query = connection.execute_query # create a pool with session self.pool = connection.CONNECTION_POOL_TYPE(pool_size=self.pool_size, with_session=True, **connection.HOST_PARAMS) # assign optional binding variables if self.bindings: connection.execute_query( 'g', bindings=self.bindings, isolate=False, pool=self.pool) # patch execute_query if we're running non-concurrently if connection.CONNECTION_TYPE == Connection: # shadow execute_query with default self.pool def execute_in_pool(query, params=None, transaction=True, isolate=True, pool=self.pool, *args, **kwargs): params = params or {} return self.original_execute_query( query, bindings=params, transaction=transaction, isolate=isolate, pool=pool, *args, **kwargs ) # patch execute_query to re-use the pool with session connection.execute_query = execute_in_pool return self.pool
def __enter__(self): # assign original execute_query() self.original_execute_query = connection.execute_query # create a pool with session self.pool = connection.CONNECTION_POOL_TYPE(pool_size=self.pool_size, with_session=True, **connection.HOST_PARAMS) # assign optional binding variables if self.bindings: connection.execute_query('g', params=self.bindings, isolate=False, pool=self.pool) # patch execute_query if we're running non-concurrently if connection.CONNECTION_TYPE == Connection: # shadow execute_query with default self.pool def execute_in_pool(query, params=None, transaction=True, isolate=True, pool=self.pool, *args, **kwargs): params = params or {} return self.original_execute_query( query, params=params, transaction=transaction, isolate=isolate, pool=pool, *args, **kwargs ) # patch execute_query to re-use the pool with session connection.execute_query = execute_in_pool return self.pool
def __exit__(self, exc, message, traceback): # execute g = g.baseGraph connection.execute_query("g = g.baseGraph", transaction=False, isolate=False, pool=self.pool) return super(BlueprintsWrapper, self).__exit__(exc, message, traceback)
def test_blueprints_wrapper(self): k = 10 wrapper_config = {'class_name': "ReadOnlyGraph", 'bindings': {'k': k}} with BlueprintsWrapper(**wrapper_config): gsk = connection.execute_query('"powers of ${k}"') pysk = "powers of {}".format(k) self.assertEqual(gsk, pysk) kk = connection.execute_query("k * k") self.assertEqual(kk, k * k)
def get_existing_indices(): """ Find all Vertex and Edge types available in the database """ vertex_indices = connection.execute_query( 'mgmt = graph.openManagement(); mgmt.getVertexLabels().collect{it.name()}' ) edge_indices = connection.execute_query( 'mgmt = graph.openManagement(); mgmt.getRelationTypes(EdgeLabel).collect{it.name()}' ) return vertex_indices, edge_indices
def test_blueprints_wrapper(self): k = 10 wrapper_config = { 'class_name': "ReadOnlyGraph", 'bindings': {'k': k} } with BlueprintsWrapper(**wrapper_config): gsk = connection.execute_query('"powers of ${k}"') pysk = "powers of {}".format(k) self.assertEqual(gsk, pysk) kk = connection.execute_query("k * k") self.assertEqual(kk, k * k)
def _reload_values(self, *args, **kwargs): """ Method for reloading the current vertex by reading its current values from the database. """ reloaded_values = {} future = connection.get_future(kwargs) future_result = connection.execute_query( 'g.V(vid)', {'vid': self._id}, **kwargs) def on_read(f2): try: result = f2.result() result = result.data[0] except Exception as e: future.set_exception(e) else: # del result['type'] # don't think I need this reloaded_values['id'] = result['id'] for name, value in result.get('properties', {}).items(): # This is a hack until decide how to deal with props reloaded_values[name] = value[0]['value'] future.set_result(reloaded_values) def on_reload_values(f): try: stream = f.result() except Exception as e: future.set_exception(e) else: future_read = stream.read() future_read.add_done_callback(on_read) future_result.add_done_callback(on_reload_values) return future
def _reload_values(self, *args, **kwargs): """ Method for reloading the current vertex by reading its current values from the database. """ reloaded_values = {} future = connection.future_class() future_result = connection.execute_query( 'g.V(vid)', {'vid': self._id}, **kwargs) def on_read(f2): try: result = f2.result() result = result.data[0] except Exception as e: future.set_exception(e) else: # del result['type'] # don't think I need this reloaded_values['id'] = result['id'] for name, value in result.get('properties', {}).items(): # This is a hack until decide how to deal with props reloaded_values[name] = value[0]['value'] future.set_result(reloaded_values) def on_reload_values(f): try: stream = f.result() except Exception as e: future.set_exception(e) else: future_read = stream.read() future_read.add_done_callback(on_read) future_result.add_done_callback(on_reload_values) return future
def __enter__(self): pool = super(BlueprintsWrapper, self).__enter__() # execute g_assignment resp = connection.execute_query(self.g_assignment, transaction=False, isolate=False, pool=pool) # provide a dummy stopTransaction() on non-transactional graphs connection.execute_query( "if (!g.metaClass.respondsTo(g, 'stopTransaction')) { g.metaClass.stopTransaction = {null} }", transaction=False, isolate=False, pool=self.pool ) # execute wrapper setup for statement in self.setup: connection.execute_query(statement, transaction=False, isolate=False, pool=pool) return pool
def test_violate_manual_schema_short(self): key = IntegerTestVertex.get_property_by_name('test_val3') label = IntegerTestVertex.get_label() yield create_key(key, 'Short') with self.assertRaises(RuntimeError): stream = yield connection.execute_query( "graph.addVertex(label, l0, k0, v0)", bindings={'l0': label, 'k0': key, 'v0': 'hello'}) resp = yield stream.read()
def test_gather_existing_indices(self): """ Make sure existing vertex and edge types can be gathered """ v_idx, e_idx = get_existing_indices() v_idx = (yield (yield v_idx).read()).data e_idx = (yield (yield e_idx).read()).data self.assertEqual(len(v_idx), 0) self.assertEqual(len(e_idx), 0) # create vertex and edge index yield connection.execute_query('mgmt = graph.openManagement(); mgmt.makeVertexLabel(name).make(); mgmt.commit()', params={'name': 'testvertexindex'}) yield connection.execute_query('mgmt = graph.openManagement(); mgmt.makeEdgeLabel(name).make(); mgmt.commit()', params={'name': 'testedgeindex'}) v_idx, e_idx = get_existing_indices() v_idx = (yield (yield v_idx).read()).data e_idx = (yield (yield e_idx).read()).data self.assertEqual(len(v_idx), 1) self.assertEqual(len(e_idx), 1)
def test_manual_schema(self): key = IntegerTestVertex.get_property_by_name('test_val') label = IntegerTestVertex.get_label() yield create_key(key, 'Integer') stream = yield connection.execute_query( "graph.addVertex(label, l0, k0, v0)", bindings={'l0': label, 'k0': key, 'v0': 101}) resp = yield stream.read() self.assertEqual( resp.data[0]['properties'][key][0]['value'], 101)
def test_gather_existing_indices(self): """ Make sure existing vertex and edge types can be gathered """ v_idx, e_idx = get_existing_indices() v_idx = (yield (yield v_idx).read()).data e_idx = (yield (yield e_idx).read()).data self.assertEqual(len(v_idx), 0) self.assertEqual(len(e_idx), 0) # create vertex and edge index yield connection.execute_query( 'mgmt = graph.openManagement(); mgmt.makeVertexLabel(name).make(); mgmt.commit()', params={'name': 'testvertexindex'}) yield connection.execute_query( 'mgmt = graph.openManagement(); mgmt.makeEdgeLabel(name).make(); mgmt.commit()', params={'name': 'testedgeindex'}) v_idx, e_idx = get_existing_indices() v_idx = (yield (yield v_idx).read()).data e_idx = (yield (yield e_idx).read()).data self.assertEqual(len(v_idx), 1) self.assertEqual(len(e_idx), 1)
def _get_stream(self, script, deserialize, **kwargs): def process_results(results): if not results: results = [] if deserialize: results = [Element.deserialize(r) for r in results] return results future_results = connection.execute_query(script, bindings=self._bindings, handler=process_results, **kwargs) return future_results
def _simple_traversal(self, operation, *args, **kwargs): """ Perform a simple traversal starting from the current edge returning a list of results. :param operation: The operation to be performed :type operation: str :rtype: list """ results = connection.execute_query( 'g.e(id).%s()' % operation, {'id': self.id}, **kwargs) return [Element.deserialize(r) for r in results]
def test_violate_manual_schema_double(self): key = DoubleTestVertex.get_property_by_name('test_val') label = DoubleTestVertex.get_label() yield create_key(key, 'Double') with self.assertRaises(RuntimeError): stream = yield connection.execute_query( "graph.addVertex(label, l0, k0, v0)", bindings={ 'l0': label, 'k0': key, 'v0': 'somestring' }) resp = yield stream.read() print(resp)
def get(self, deserialize=True, *args, **kwargs): script = "g.V(vid).{}".format(self._get()) self._bindings.update({"vid": self._vertex._id}) def process_results(results): if not results: results = [] if deserialize: results = [Element.deserialize(r) for r in results] return results future_results = connection.execute_query( script, params=self._bindings, handler=process_results, **kwargs) return future_results
def isolation_query(scope): wrapper_config = { 'class_name': "ReadOnlyGraph", 'bindings': {'scope': scope}, 'pool_size': 5 } scope_values = [] with BlueprintsWrapper(**wrapper_config) as pool: for i in range(7): scope_val = connection.execute_query( "sleep sleep_length\nreturn scope", params={'sleep_length': 100 * (i % 2 + 1) - scope*10}, pool=pool ) scope_values.append(scope_val) return scope, scope_values
def get(self, deserialize=True, *args, **kwargs): script = "g.V(vid).{}".format(self._get()) self._bindings.update({"vid": self._vertex._id}) def process_results(results): if not results: results = [] if deserialize: results = [Element.deserialize(r) for r in results] return results future_results = connection.execute_query(script, bindings=self._bindings, handler=process_results, **kwargs) return future_results
def isolation_query(scope): wrapper_config = { 'class_name': "ReadOnlyGraph", 'bindings': { 'scope': scope }, 'pool_size': 5 } scope_values = [] with BlueprintsWrapper(**wrapper_config) as pool: for i in range(7): scope_val = connection.execute_query( "sleep sleep_length\nreturn scope", params={'sleep_length': 100 * (i % 2 + 1) - scope * 10}, pool=pool) scope_values.append(scope_val) return scope, scope_values
def all(cls, ids, as_dict=False, *args, **kwargs): """ Load all edges with the given edge_ids from the graph. By default this will return a list of edges but if as_dict is True then it will return a dictionary containing edge_ids as keys and edges found as values. :param ids: A list of titan IDs :type ids: list :param as_dict: Toggle whether to return a dictionary or list :type as_dict: boolean :rtype: dict | list """ if not isinstance(ids, array_types): raise GoblinQueryError("ids must be of type list or tuple") # strids = [str(i) for i in ids] def edge_handler(results): try: results = list(filter(None, results)) except TypeError: raise cls.DoesNotExist if len(results) != len(ids): raise GoblinQueryError( "the number of results don't match the number of edge " + "ids requested") objects = [] for r in results: try: objects += [Element.deserialize(r)] except KeyError: # pragma: no cover raise GoblinQueryError('Edge type "%s" is unknown' % '') if as_dict: # pragma: no cover return {e._id: e for e in objects} return objects return connection.execute_query("g.E(*eids)", params={'eids': ids}, handler=edge_handler, **kwargs)
def all(cls, ids, as_dict=False, *args, **kwargs): """ Load all edges with the given edge_ids from the graph. By default this will return a list of edges but if as_dict is True then it will return a dictionary containing edge_ids as keys and edges found as values. :param ids: A list of titan IDs :type ids: list :param as_dict: Toggle whether to return a dictionary or list :type as_dict: boolean :rtype: dict | list """ if not isinstance(ids, array_types): raise GoblinQueryError("ids must be of type list or tuple") # strids = [str(i) for i in ids] def edge_handler(results): try: results = list(filter(None, results)) except TypeError: raise cls.DoesNotExist if len(results) != len(ids): raise GoblinQueryError( "the number of results don't match the number of edge " + "ids requested") objects = [] for r in results: try: objects += [Element.deserialize(r)] except KeyError: # pragma: no cover raise GoblinQueryError('Edge type "%s" is unknown' % '') if as_dict: # pragma: no cover return {e._id: e for e in objects} return objects return connection.execute_query("g.E(*eids)", bindings={'eids': ids}, handler=edge_handler, **kwargs)
def _simple_traversal(self, operation, *args, **kwargs): """ Perform a simple traversal starting from the current edge returning a list of results. :param operation: The operation to be performed :type operation: str :rtype: list """ deserialize = kwargs.pop('deserialize', True) def edge_traversal_handler(data): if deserialize: data = [Element.deserialize(d) for d in data] return data future_results = connection.execute_query( 'g.e(id).%s()' % operation, {'id': self.id}, handler=edge_traversal_handler, **kwargs) return
def _property_handler(script, graph_name, **kwargs): future = connection.get_future(kwargs) future_response = connection.execute_query(script, graph_name=graph_name) def on_read(f2): try: result = f2.result() except Exception as e: future.set_exception(e) else: future.set_result(result) def on_key(f): try: stream = f.result() except Exception as e: future.set_exception(e) else: future_read = stream.read() future_read.add_done_callback(on_read) future_response.add_done_callback(on_key) return future
def _reload_values(self, *args, **kwargs): """ Re-read the values for this edge from the graph database. """ reloaded_values = {} future = connection.get_future(kwargs) future_result = connection.execute_query('g.E(eid)', {'eid': self._id}, **kwargs) def on_read(f2): try: result = f2.result() result = result.data[0] except Exception as e: future.set_exception(e) else: if result: # del result['type'] reloaded_values['id'] = result['id'] for name, value in result.get('properties', {}).items(): reloaded_values[name] = value if result['id']: setattr(self, 'id', result['id']) future.set_result(reloaded_values) else: future.set_result({}) def on_reload(f): try: stream = f.result() except Exception as e: future.set_exception(e) else: future_read = stream.read() future_read.add_done_callback(on_read) future_result.add_done_callback(on_reload) return future
def _reload_values(self, *args, **kwargs): """ Re-read the values for this edge from the graph database. """ reloaded_values = {} future = connection.future_class() future_result = connection.execute_query( 'g.E(eid)', {'eid': self._id}, **kwargs) def on_read(f2): try: result = f2.result() result = result.data[0] except Exception as e: future.set_exception(e) else: if result: # del result['type'] reloaded_values['id'] = result['id'] for name, value in result.get('properties', {}).items(): reloaded_values[name] = value if result['id']: setattr(self, 'id', result['id']) future.set_result(reloaded_values) else: future.set_result({}) def on_reload(f): try: stream = f.result() except Exception as e: future.set_exception(e) else: future_read = stream.read() future_read.add_done_callback(on_read) future_result.add_done_callback(on_reload) return future
def all(cls, ids=[], as_dict=False, match_length=True, *args, **kwargs): """ Load all vertices with the given ids from the graph. By default this will return a list of vertices but if as_dict is True then it will return a dictionary containing ids as keys and vertices found as values. :param ids: A list of titan ids :type ids: list :param as_dict: Toggle whether to return a dictionary or list :type as_dict: boolean :rtype: dict | list """ if not isinstance(ids, array_types): raise GoblinQueryError("ids must be of type list or tuple") handlers = [] future = connection.future_class() if len(ids) == 0: future_results = connection.execute_query( 'g.V.hasLabel(x)', params={"x": cls.get_label()}, **kwargs) else: strids = [str(i) for i in ids] # Need to test sending complex bindings with client vids = ", ".join(strids) future_results = connection.execute_query( 'g.V(%s)' % vids, **kwargs) def id_handler(results): try: results = list(filter(None, results)) except TypeError: raise cls.DoesNotExist if len(results) != len(ids) and match_length: raise GoblinQueryError( "the number of results don't match the number of " + "ids requested") return results handlers.append(id_handler) def result_handler(results): objects = [] for r in results: try: objects += [Element.deserialize(r)] except KeyError: # pragma: no cover raise GoblinQueryError( 'Vertex type "%s" is unknown' % r.get('label', '')) if as_dict: # pragma: no cover return {v._id: v for v in objects} return objects handlers.append(result_handler) def on_all(f): try: stream = f.result() except Exception as e: future.set_exception(e) else: [stream.add_handler(h) for h in handlers] future.set_result(stream) future_results.add_done_callback(on_all) return future
def __call__(self, instance, *args, **kwargs): """ Intercept attempts to call the GremlinMethod attribute and perform a gremlin query returning the results. :param instance: The class instance the method was called on :param pool: The RexPro connection pool to execute the query with (optional) :type instance: object """ self._setup() # pop the optional execute query arguments from kwargs query_kwargs = connection.pop_execute_query_kwargs(kwargs) query_kwargs['transaction'] = (query_kwargs.get('transaction') or self.transaction) args = list(args) if not self.classmethod: args = [instance._id] + args params = self.defaults.copy() if len(args + list(kwargs.values())) > len(self.arg_list): # pragma: no cover raise TypeError( '%s() takes %s args, %s given' % ( self.attr_name, len(self.arg_list), len(args))) # check for and calculate callable defaults for k, v in params.items(): if callable(v): params[k] = v() arglist = self.arg_list[:] for arg in args: params[arglist.pop(0)] = arg for k, v in kwargs.items(): if k not in arglist: an = self.attr_name if k in params: # pragma: no cover raise TypeError( "%s() got multiple values for keyword argument '%s'" % (an, k)) else: # pragma: no cover raise TypeError( "%s() got an unexpected keyword argument '%s'" % (an, k)) arglist.pop(arglist.index(k)) params[k] = v params = self.transform_params_to_database(params) import_list = [] for imp in self.imports + self.extra_imports: if imp is not None: for import_string in imp.import_list: import_list.append(import_string) import_string = '\n'.join(import_list) script = '\n'.join([import_string, self.function_body]) # Figure out new method to set context for logging... # try: # if hasattr(instance, 'get_element_type'): # context = "vertices.{}".format(instance.get_element_type()) # elif hasattr(instance, 'get_label'): # context = "edges.{}".format(instance.get_label()) # else: # context = "other" context = "TODO" context = "{}.{}".format(context, self.method_name) return connection.execute_query(script, bindings=params, context=context, **query_kwargs)
def get_existing_indices(): """ Find all Vertex and Edge types available in the database """ vertex_indices = connection.execute_query('mgmt = graph.openManagement(); mgmt.getVertexLabels().collect{it.name()}') edge_indices = connection.execute_query('mgmt = graph.openManagement(); mgmt.getRelationTypes(EdgeLabel).collect{it.name()}') return vertex_indices, edge_indices
def __call__(self, instance, *args, **kwargs): """ Intercept attempts to call the GremlinMethod attribute and perform a gremlin query returning the results. :param instance: The class instance the method was called on :param pool: The RexPro connection pool to execute the query with (optional) :type instance: object """ self._setup() # pop the optional execute query arguments from kwargs query_kwargs = connection.pop_execute_query_kwargs(kwargs) query_kwargs['transaction'] = (query_kwargs.get('transaction') or self.transaction) args = list(args) if not self.classmethod: args = [instance._id] + args params = self.defaults.copy() if len(args + list(kwargs.values())) > len(self.arg_list): # pragma: no cover raise TypeError( '%s() takes %s args, %s given' % ( self.attr_name, len(self.arg_list), len(args))) # check for and calculate callable defaults for k, v in params.items(): if callable(v): params[k] = v() arglist = self.arg_list[:] for arg in args: params[arglist.pop(0)] = arg for k, v in kwargs.items(): if k not in arglist: an = self.attr_name if k in params: # pragma: no cover raise TypeError( "%s() got multiple values for keyword argument '%s'" % (an, k)) else: # pragma: no cover raise TypeError( "%s() got an unexpected keyword argument '%s'" % (an, k)) arglist.pop(arglist.index(k)) params[k] = v params = self.transform_params_to_database(params) import_list = [] for imp in self.imports + self.extra_imports: if imp is not None: for import_string in imp.import_list: import_list.append(import_string) import_string = '\n'.join(import_list) script = '\n'.join([import_string, self.function_body]) # Figure out new method to set context for logging... # try: # if hasattr(instance, 'get_element_type'): # context = "vertices.{}".format(instance.get_element_type()) # elif hasattr(instance, 'get_label'): # context = "edges.{}".format(instance.get_label()) # else: # context = "other" context = "TODO" context = "{}.{}".format(context, self.method_name) tmp = connection.execute_query(script, params, context=context, **query_kwargs) # Temporary hack # except GoblinQueryError as pqe: # pragma: no cover # import pprint # msg = "Error while executing Gremlin method\n\n" # msg += "[Method]\n{}\n\n".format(self.method_name) # msg += "[Params]\n{}\n\n".format(pprint.pformat(params)) # msg += "[Function Body]\n{}\n".format(self.function_body) # msg += "[Imports]\n{}\n".format(import_string) # msg += "\n[Error]\n{}\n".format(pqe) # if hasattr(pqe, 'raw_response'): # msg += "\n[Raw Response]\n{}\n".format(pqe.raw_response) # raise GoblinGremlinException(msg) return tmp
def test_wrapper_isolation(self): connection.execute_query("k")
def get_existing_indices(): """ Find all Vertex and Edge types available in the database """ vertex_indices = connection.execute_query('g.getIndexedKeys(Vertex.class)') edge_indices = connection.execute_query('g.getIndexedKeys(Edge.class)') return vertex_indices, edge_indices
def all(cls, ids=[], as_dict=False, match_length=True, *args, **kwargs): """ Load all vertices with the given ids from the graph. By default this will return a list of vertices but if as_dict is True then it will return a dictionary containing ids as keys and vertices found as values. :param ids: A list of titan ids :type ids: list :param as_dict: Toggle whether to return a dictionary or list :type as_dict: boolean :rtype: dict | list """ if not isinstance(ids, array_types): raise GoblinQueryError("ids must be of type list or tuple") deserialize = kwargs.pop('deserialize', True) handlers = [] future = connection.get_future(kwargs) if len(ids) == 0: future_results = connection.execute_query( 'g.V.hasLabel(x)', bindings={"x": cls.get_label()}, **kwargs) else: strids = [str(i) for i in ids] # Need to test sending complex bindings with client vids = ", ".join(strids) future_results = connection.execute_query( 'g.V(%s)' % vids, **kwargs) def id_handler(results): try: results = list(filter(None, results)) except TypeError: raise cls.DoesNotExist if len(results) != len(ids) and match_length: raise GoblinQueryError( "the number of results don't match the number of " + "ids requested") return results handlers.append(id_handler) def result_handler(results): objects = [] for r in results: if deserialize: try: objects += [Element.deserialize(r)] except KeyError: # pragma: no cover raise GoblinQueryError( 'Vertex type "%s" is unknown' % r.get('label', '')) else: objects = results if as_dict: # pragma: no cover return {v._id: v for v in objects} return objects handlers.append(result_handler) def on_all(f): try: stream = f.result() except Exception as e: future.set_exception(e) else: [stream.add_handler(h) for h in handlers] future.set_result(stream) future_results.add_done_callback(on_all) return future