def test_transaction_isolation(self): """ Tests that operations between 2 transactions are isolated """ conn1 = self.get_connection() conn2 = self.get_connection() if conn1.graph_features['supportsTransactions']: with conn1.transaction(): v1, v2, v3 = conn1.execute(""" def v1 = g.addVertex([val:1, str:"vertex 1"]) def v2 = g.addVertex([val:2, str:"vertex 2"]) def v3 = g.addVertex([val:3, str:"vertex 3"]) [v1, v2, v3] """) print_("{}, {}, {}".format(v1, v2, v3)) conn1.open_transaction() conn2.open_transaction() v1_1 = conn1.execute(""" def v1 = g.v(eid) v1.setProperty("str", "v1") v1 """, params={'eid': v1['_id']}) v1_2 = conn2.execute(""" g.v(eid) """, params={'eid': v1['_id']}) self.assertEqual(v1_2['_properties']['str'], 'vertex 1')
def test_bring_pool_down(self): print_("Eventlet Lengthy Query") # Well use this and add time delays to kill the connection before the while loop and # within the while loop of the respective connection.py file to simulate the db going down # and coming back up. pool = self.get_pool() conn1 = pool.create_connection() print_("Calling Lengthy Query") result1 = conn1.execute(script="""g.V('element_type','measurement')""")
def test_many_network_calls(self): """ Test known responses on a network that should be slow, we should get them all asynchronously """ pile = eventlet.GreenPile() for i in xrange(self.NUM_ITER): pile.spawn(spawn_slow_network_and_query_slow_response, self, self.SLOW_NETWORK_QUERY, 1, { 'value': i, i: 'value' }) for result in pile: print_(result) self.assertIsInstance(result, dict)
def test_many_network_calls(self): """ Test known responses on a network that should be slow, we should get them all asynchronously """ pile = eventlet.GreenPile() for i in xrange(self.NUM_ITER): pile.spawn(spawn_slow_network_and_query_slow_response, self, self.SLOW_NETWORK_QUERY, 1, {'value': i, i: 'value'} ) for result in pile: print_(result) self.assertIsInstance(result, dict)
def test_transaction_isolation(self): """ Tests that operations between 2 transactions are isolated """ conn1 = self.get_connection() conn2 = self.get_connection() if conn1.graph_features['supportsTransactions']: with conn1.transaction(): v1, v2, v3 = conn1.execute( """ def v1 = g.addVertex([val:1, str:"vertex 1"]) def v2 = g.addVertex([val:2, str:"vertex 2"]) def v3 = g.addVertex([val:3, str:"vertex 3"]) [v1, v2, v3] """ ) print_("{}, {}, {}".format(v1, v2, v3)) conn1.open_transaction() conn2.open_transaction() v1_1 = conn1.execute( """ def v1 = g.v(eid) v1.setProperty("str", "v1") v1 """, params={'eid': v1['_id']} ) v1_2 = conn2.execute( """ g.v(eid) """, params={'eid': v1['_id']} ) self.assertEqual(v1_2['_properties']['str'], 'vertex 1')
def start_massive_queries(script_params_pairs): num_queries = len(script_params_pairs) g = group([rexpro_query.s(script, params) for script, params in script_params_pairs]) results = g().join() lresults = len(results) print_("Got {} Results of {}:".format(lresults, num_queries)) for i, result in enumerate(results): print_("{} - {}".format(i, result)) logger.info("{} - {}".format(i, result)) print_("Got {} of {} Results ({}%):".format(lresults, num_queries, (lresults/float(num_queries))*100.0)) return results
if __name__ == '__main__': import tasks from rexpro._compat import print_, xrange print_("Queuing up tasks...") SLOW_NETWORK_QUERY = """def test_slow_query(sleep_time, value) { sleep sleep_time return value } return test_slow_query(sleep_length, data) """ NUM_QUERIES = 10000 SLEEP_TIME = 1 script_params_pairs = [(SLOW_NETWORK_QUERY, {'sleep_length': SLEEP_TIME, 'data': i}) for i in xrange(NUM_QUERIES)] results = tasks.start_massive_queries(script_params_pairs) print_("Got Results:") print_(results) else: print_("This must be executed manually")