def test_reconnect_waits_by_default(self): c = r.connect(port=self.port) t = time() r.js('while(true);', timeout=0.5).run(c, noreply=True) c.reconnect() duration = time() - t self.assertGreaterEqual(duration, 0.5)
def test_reconnect_does_not_wait_if_requested(self): c = r.connect(port=self.port) t = time() r.js('while(true);', timeout=0.5).run(c, noreply=True) c.reconnect(noreply_wait=False) duration = time() - t self.assertLess(duration, 0.5)
def generate_stats(rdb_conn): issues = r.table(ISSUES_TABLE) issues_with_milestone = issues.filter(lambda issue: issue['milestone'] != None) milestones = issues_with_milestone.map(lambda issue: issue['milestone']['title']).distinct() # Generate user stats (how many issues assigned to this user have been opened and closed) for a particular set of issues def user_stats(issue_set): # Remove issues that don't have owners from the issue set issue_set = issue_set.filter(lambda issue: issue['assignee'] != None) # Get a list of users issues are assigned to owners = issue_set.map(lambda issue: issue['assignee']).distinct() # Count the issues with a given owner and state (shorthand since we reuse this) def count_issues(owner,state): return issue_set.filter(lambda issue: (issue['assignee']['login'] == owner['login']) & (issue['state'] == state)).count() # Return a list of documents with stats for each owner return owners.map(lambda owner: { 'owner': owner['login'], 'owner_avatar_url': owner['avatar_url'], 'open_issues': count_issues(owner,'open'), 'closed_issues': count_issues(owner,'closed'), }) # Return owner stats for a particular milestone (filter issues to just include a milestone) def user_stats_by_milestone(m): return user_stats(issues_with_milestone.filter(lambda issue: issue['milestone']['title'] == m)) # Return the number of issues with a particular state (and optionally a particular milestone) def num_issues(state, milestone=None): if milestone is None: issue_set = issues else: issue_set = issues_with_milestone.filter(lambda issue: issue['milestone']['title'] == milestone) return issue_set.filter(lambda issue: issue['state'] == state).count() # Two key things: # - we have to call coerce_to('array') since we get a sequence, and this will error otherwise # - we have to call list() on the stats to make sure we pull down all the data from a Cursor report = r.expr({ 'datetime': r.js('(new Date).toISOString()'), 'by_milestone': r.expr([{ 'milestone': 'all', 'open_issues': num_issues('open'), 'closed_issues': num_issues('closed'), 'user_stats': user_stats(issues).coerce_to('array') }]).union(milestones.map(lambda m: { 'milestone': m, 'open_issues': num_issues('open', m), 'closed_issues': num_issues('closed', m), 'user_stats': user_stats_by_milestone(m).coerce_to('array') })) }) # Add the generated report to the database print "Generating and inserting new user stats at %s" % datetime.now().strftime("%Y-%m-%d %H:%M") r.table(STATS_TABLE).insert(r.expr([report])).run(rdb_conn)
def node_property_map(g_id, prop_map_name, prop_map_type, func, conn): g = prep_pm(g_id) pm = g.new_vertex_property(prop_map_type) if type(func).__name__ in ['str', 'unicode']: final_func = r.js("(function(node){return [node['id'], %s(node)]})" % func) else: def final_func(node): return node['id'], func(node) for node_id, node_val in auto_reql(r.db(db_id(g_id)).table('nodes').map(final_func), conn): pm[node_id] = node_val property_maps[g_id][prop_map_name] = pm return {'property_map': prop_map_name}
def link_property_map(g_id, prop_map_name, prop_map_type, func, conn): g = prep_pm(g_id) pm = g.new_edge_property(prop_map_type) if type(func).__name__ in ['str', 'unicode']: final_func = r.js("(function(link){return [link['id'].split('_'), %s(link)]})" % func) else: def final_func(link): return link['id'].split('_'), func(link) for [o, eid, t], link_val in auto_reql(r.db(db_id(g_id)).table('links').map(final_func), conn): e = get_edge(g, o, t, eid) pm[e] = link_val property_maps[g_id][prop_map_name] = pm return {'property_map': prop_map_name}
import rethinkdb as r r.connect(port = 42865).repl() r.table_drop("foo").run() r.table_create("foo").run() print r.table("foo").index_create("sid", lambda x: r.js("1")).run()
import rethinkdb as r r.connect(port=42865).repl() r.table_drop("foo").run() r.table_create("foo").run() print r.table("foo").index_create("sid", lambda x: r.js("1")).run()