def __init__(self, host, port=27017, base='dmb', user=None, passwd=None): try: self.conn = connection.Connection(host, port)[base] if self.conn: log.info('connected database') except: log.critical( '%d %s %s' % (sys.exc_traceback.tb_lineno, sys.exc_type, sys.exc_value))
#!/usr/bin/env python # -*- coding: utf-8 -*- from pymongo import connection conn = connection.Connection(host="localhost") def main(): db = conn['testdb'] collection = db['testcollection'] post = {"asdf": 3, "this": 5} collection.insert(post) cursor = collection.find({'asdf': 3}) for i in cursor: print i if __name__ == '__main__': main()
def main(): connection._TIMEOUT = 60 # jack up the timeout c = connection.Connection() c.drop_database("benchmark") db = c.benchmark timed("insert (small, no index)", insert, [db, 'small_none', small], setup_insert) timed("insert (medium, no index)", insert, [db, 'medium_none', medium], setup_insert) timed("insert (large, no index)", insert, [db, 'large_none', large], setup_insert) db.small_index.create_index("x", ASCENDING) timed("insert (small, indexed)", insert, [db, 'small_index', small]) db.medium_index.create_index("x", ASCENDING) timed("insert (medium, indexed)", insert, [db, 'medium_index', medium]) db.large_index.create_index("x", ASCENDING) timed("insert (large, indexed)", insert, [db, 'large_index', large]) timed("batch insert (small, no index)", insert_batch, [db, 'small_bulk', small], setup_insert) timed("batch insert (medium, no index)", insert_batch, [db, 'medium_bulk', medium], setup_insert) timed("batch insert (large, no index)", insert_batch, [db, 'large_bulk', large], setup_insert) timed("find_one (small, no index)", find_one, [db, 'small_none', per_trial / 2]) timed("find_one (medium, no index)", find_one, [db, 'medium_none', per_trial / 2]) timed("find_one (large, no index)", find_one, [db, 'large_none', per_trial / 2]) timed("find_one (small, indexed)", find_one, [db, 'small_index', per_trial / 2]) timed("find_one (medium, indexed)", find_one, [db, 'medium_index', per_trial / 2]) timed("find_one (large, indexed)", find_one, [db, 'large_index', per_trial / 2]) timed("find (small, no index)", find, [db, 'small_none', per_trial / 2]) timed("find (medium, no index)", find, [db, 'medium_none', per_trial / 2]) timed("find (large, no index)", find, [db, 'large_none', per_trial / 2]) timed("find (small, indexed)", find, [db, 'small_index', per_trial / 2]) timed("find (medium, indexed)", find, [db, 'medium_index', per_trial / 2]) timed("find (large, indexed)", find, [db, 'large_index', per_trial / 2]) # timed("find range (small, no index)", find, # [db, 'small_none', {"$gt": per_trial / 4, "$lt": 3 * per_trial / 4}]) # timed("find range (medium, no index)", find, # [db, 'medium_none', {"$gt": per_trial / 4, "$lt": 3 * per_trial / 4}]) # timed("find range (large, no index)", find, # [db, 'large_none', {"$gt": per_trial / 4, "$lt": 3 * per_trial / 4}]) timed("find range (small, indexed)", find, [ db, 'small_index', { "$gt": per_trial / 2, "$lt": per_trial / 2 + batch_size } ]) timed("find range (medium, indexed)", find, [ db, 'medium_index', { "$gt": per_trial / 2, "$lt": per_trial / 2 + batch_size } ]) timed("find range (large, indexed)", find, [ db, 'large_index', { "$gt": per_trial / 2, "$lt": per_trial / 2 + batch_size } ])
"--unique", dest="unique", action="store_true", default=False) (options, args) = parser.parse_args() params = { 'q': options.query, 'type': 'tweet', 'window': 'a', 'perpage': '100' } # http://code.google.com/p/otterapi/wiki/Resources if options.unique: params['nohidden'] = '0' searchurl = "http://otter.topsy.com/search.json?" c = connection.Connection() db = c.topsy def store(otterdata): if verbose: print otterdata['content'], otterdata['url'], otterdata[ 'trackback_total'] db.rawtopsy.save(otterdata) count = 1 while count <= 10: queryurl = searchurl + urlencode(params) + '&page=' + str(count) if verbose: print "* * * fetching ", queryurl