def connect(cfg): """connect to mongo database :param cfg: Dictionary containing configuration for MongoDB connection :type cfg: dict """ args = { 'tz_aware': cfg.get('tz_aware', False), 'replicaset': cfg.get('replicaset', cfg.get('replica_set')) } if 'uri' in cfg: args['host'] = cfg['uri'] else: args['host'] = cfg.get('host', 'localhost') args['port'] = int(cfg.get('port', 27017)) client = MotorClient(*args) if cfg.get('user'): yield client[cfg['db']].authenticate(cfg['user'], cfg['password']) if cfg.get('read_preference'): read_preference = cfg['read_preference'].upper() client.read_preference = getattr( pymongo.read_preferences.ReadPreference, read_preference) raise gen.Return(client)
def initialize(connection_string): database = MotorClient(connection_string).experiment database.add_son_manipulator(ObjectIdAsTimestamp()) session_status = model.SessionStatus() sessions = model.Sessions(database) seeder = model.Final() snippets = model.Snippets() emails = model.Emails(database) trials = model.Trials(database) template_path = './static/templates/' def static(pattern): return url(pattern, web.Page, dict(path=template_path)) def template(file_, **kwargs): t = dict(path=template_path + '%s' % file_) t.update(kwargs) return t routes = [ url(r'/', web.Home, template('home.html', sessions=sessions, trials=trials)), static(r'/(questions)/?'), static(r'/(tutorial)/?'), static(r'/(experiment)/?'), url(r'/(complete)/?', web.CompletePage, dict(path=template_path)), url(r'/admin/?', web.Admin, template('admin.html', sessions=sessions)), url(r'/admin/login?', web.AdminLogin), url(r'/admin/logout?', web.AdminLogout), url(r'/admin/clear?', web.AdminClearUserCookie), url(r'/api/trial/?', api.Trial, dict(sessions=sessions, trials=trials)), url(r'/api/sessions/?', api.Sessions, dict(sessions=sessions)), #Test url( r'/api/form/?', api.Form, dict(sessions=sessions, session_status=session_status, trials=trials)), # post #url(r'/api/email/?', api.EmailForm, dict(emails=emails)), # post url(r'/api/admin/db\.json', api.AdminDbDump, dict(sessions=sessions)), url(r'/api/admin/sessions/?', api.AdminSessions, dict(sessions=sessions)), url(r'/api/admin/emails/?', api.AdminEmails, dict(emails=emails)), url(r'/api/admin/sessions/([\w^/]+)?', api.AdminSessionData, dict(sessions=sessions)), url(r'/api/admin/trials/?', api.AdminTrials, dict(trials=trials, seeder=seeder)), url(r'/api/admin/trials/([\w^/]+)?', api.AdminUpdateTrials, dict(trials=trials)), url(r'/api/admin/duplicates/?', api.Duplicates, dict(sessions=sessions, trials=trials)), url(r'/api/admin/missing/?', api.Missing, dict(sessions=sessions, trials=trials)), url(r'/api/admin/snippets/?', api.AdminSnippets, dict(snippets=snippets)), ] return routes
def setUp(self): super(AsyncLRUTest, self).setUp() self.sr = redis.StrictRedis() self.ar = AsyncStrictRedis() self.m = MotorClient() self.c = self.m.my_test self.io_loop.run_sync(self.setup_coro)
def __init__(self): handlers = [ (r'/api/signup', SignupHandler), (r'/api/token/get', TokenGetHandler), (r'/api/token/renew', TokenRenewHandler), (r'/api/key/get', GetKeyHandler), (r'/api/test', TestApiHandler) ] settings = dict( login_url='/login', debug=True ) super(Application, self).__init__(handlers, **settings) # MongoDB self.db = MotorClient(**MONGODB_HOST)[MONGODB_DBNAME] # ThreadPoolExecutor for long tasks like password hashing self.executor = ThreadPoolExecutor(WORKERS) # Secret key for HMAC self.hmac_key = nacl.utils.random(size=APP_SECRETKEY_SIZE)
class RESTserver(AsyncTCPServer): db = MotorClient('localhost', 27017) @coroutine def handle_stream(self, stream, address): connection = HandlerClient(stream, address, RESTserver.db) yield connection.on_connect()
def __init__(self): handlers = [(r'/', TestWebHandler), (r'/login', LoginHandler), (r'/logout', LogoutHandler), (r'/api/signup', SignupHandler), (r'/api/token/get', TokenGetHandler), (r'/api/token/renew', TokenRenewHandler), (r'/api/key/get', GetKeyHandler), (r'/api/test', TestApiHandler)] template_path = os.path.join(os.path.dirname(__file__), 'templates') settings = { 'template_path': template_path, 'login_url': '/login', 'debug': DEBUG, 'xsrf_cookies': True, 'cookie_secret': nacl.utils.random(size=64) } super(WebApp, self).__init__(handlers, **settings) self.token_expires_time = TOKEN_EXPIRES_TIME # MongoDB self.db = MotorClient(**DBHOST)[DBNAME] # ThreadPoolExecutor for long tasks like password hashing self.executor = ThreadPoolExecutor(WORKERS) # Secret key for HMAC self.hmac_key = nacl.utils.random(size=64)
def setUp(self): ''' Add test data to movies_test collection ''' super(AsyncTest, self).setUp() connection_string = 'mongodb://{user}:{password}@{host}:{port}/{name}'.format(**config['mongo_connection']) self.collection = MotorClient(connection_string)[config['mongo_connection']['name']][MOVIES_TEST_COLLECTION] self.io_loop.run_sync(self.setup_coroutine)
def create_app(checker: BaseChecker, mongo_url: str = "mongodb://mongodb:27017") -> None: logger = logging.getLogger(__name__) mongo = MotorClient(mongo_url)[checker.name] app = tornado.web.Application([ (r"/", EnoCheckerRequestHandler) ], logger=logger, checker=checker, mongo=mongo) app.listen(checker.checker_port) tornado.ioloop.IOLoop.current().start()
def setUpClass(self): self.my_app.db = MotorClient(**MONGODB_HOST)[MONGODB_DBNAME] self.my_app.executor = ThreadPoolExecutor(WORKERS) self.my_app.whitelist = WHITELIST self.my_app.hmac_key = random(size=APP_SECRETKEY_SIZE)
def setup(self): print "-setup" self.client = MotorClient() #print "c, ", self.client self.db = self.client['test_database'] self.permissions = MotorCollection(self.db, 'acl_permissions') self.groups = MotorCollection(self.db, 'acl_groups') self.resources = MotorCollection(self.db, 'acl_resources') self.users = MotorCollection(self.db, 'users') self.user_ids = yield self.users.insert([{'name':'burger'},{'name':'paul'}]) self.user_ids = [x for x in self.user_ids] self.admin_user = User("burger", self.user_ids[0]) self.user_user = User("paul", self.user_ids[1]) self.perm_ids = yield self.permissions.insert([x.to_primitive() for x in [ Permission({"name":"read"}), Permission({"name":"write"}), Permission({"name":"update"}), Permission({"name":"delete"}), ]]) self.res_ids = yield self.resources.insert([x.to_primitive() for x in [ Resource({'name':'own data'}), Resource({'name':'others data'}), Resource({'name':'all data'}) ]]) self.group_ids = yield self.groups.insert([x.to_primitive() for x in [ Group({"name":"brugere", "permissions":[ ResourcePermissionPair({"resource":"own data", "permissions":[ "read", "write" ] }) ], "members":[self.user_ids[0], self.user_ids[1]], }), Group({"name":"super brugere", "permissions":[ ResourcePermissionPair({"resource":'own data', "permissions":[ "read", "write", "update" ] }), ResourcePermissionPair({"resource":"others data", "permissions":[ "read" ] }) ], "members":[self.user_ids[0]], }) ]]) doc = yield self.groups.find_one({"name": "brugere"}) members =doc['members'] #print "members:", members #print "users", self.user_ids # assert self.admin_user._id in members assert True
def setUp(self): super(FileNameSearcherTest, self).setUp() self.sr = redis.StrictRedis() self.ar = AsyncStrictRedis() self.m = MotorClient() self.c = self.m.fbt self.patcher1 = mock.patch('redis_handler.RedisHandler.redis_client', return_value=mock_redis) self.patcher1.start() ''' self.patcher2 = mock.patch("pymongo.MongoReplicaSetClient", return_value=sync_db) self.patcher3 = mock.patch("motor.MotorReplicaSetClient", return_value=db) #self.patcher4 = mock.patch("async_redis.asyncRedis.AsyncStrictRedis", spec=AsyncStrictRedis, return_value=AsyncStrictRedis()) self.patcher2.start() self.patcher3.start() #self.patcher4.start() ''' self.io_loop.run_sync(self.setup_coro)
def create_app(): client = MotorClient(DATABASE_SETTINGS['url']) db = client[DATABASE_SETTINGS['name']] return web.Application([ web.url(r'/', IndexHandler, {'db': db}, name='index'), web.url(r'/index', IndexHandler, {'db': db}), web.url(r'/success', SuccessHandler, {'db': db}), ], **TORNADO_SETTINGS)
def create_pool(para): """ 生成数据库连接池 :param para: 数据库配置参数 :param cursor_class: :return: """ client = MotorClient(para['mongo_auth_url']) return client
def connect(cfg): """connect to mongo database :param cfg: Dictionary containing configuration for MongoDB connection :type cfg: dict """ LOG.info("connecting to %s", cfg['host']) client = MotorClient(host=cfg.get('host', 'localhost'), port=cfg.get('port', 27017), tz_aware=cfg.get('tz_aware', False), replicaset=cfg.get('replicaset', cfg.get('replica_set'))) if cfg.get('user'): yield client[cfg['db']].authenticate(cfg['user'], cfg['password']) if cfg.get('read_preference'): read_preference = cfg['read_preference'].upper() client.read_preference = getattr( pymongo.read_preferences.ReadPreference, read_preference) raise gen.Return(client)
def get_db_conn(db_name='iot_simple_api'): conn = MotorClient(MONGODB_URI) db = conn[db_name] # Create a capped collection to Stream data db.create_collection('stream', capped=True, size=DEFAULT_COLLECTION_SIZE, callback=lambda x, y: (x, y)) return db, conn
async def testFunc(): motor = MotorClient( 'mongodb://*****:*****@172.16.60.199:27019/IFData' ) storage = Storage(motor.IFData) get = await storage.append('DBTest', '2020-07-25T15:37:45.318000+08:00', 'FetchTime', filter={'FetchTime': 1}) print(get)
def connect(self, uri, db=None, w=1, j=True, **options): io_loop = options.get('io_loop', None) self.client = MotorClient(uri, w=w, j=j, **options).open_sync() self.client_sync = self.client.sync_client() db = db or uri_parser.parse_uri(uri)['database'] if not db: raise ConfigurationError('No database defined in uri') self.db = self.client[db] self.db_sync = self.client_sync[db]
def setUp(self): super(MyTestCase, self).setUp() self.client = MotorClient() # wait for the setup coroutine to complete before beginning the test self.io_loop.run_sync(self.setup_coro) # for visual control client = MongoClient() db = client.test cursor = db.collection.find() for document in cursor: print(document)
def __test(): import logging import sys from motor import MotorClient logging.basicConfig( stream=sys.stdout, level=logging.DEBUG, format='[%(asctime)s] %(levelname)s %(module)s:%(lineno)d %(message)s') register(logging) client = MotorClient() client.test.collection.insert({'message': 'hi!'})
def __init__(self): self.client = MotorClient() self.withdraw_db = self.client.withdraw_db self.available_tokens = self.withdraw_db.available_tokens self.withdraw_requests = self.withdraw_db.withdraw_requests self.withdraw_custom_token_requests = self.withdraw_db.withdraw_custom_token_requests self.create_token_requests = self.withdraw_db.create_token_requests self.executed_withdraws = self.withdraw_db.executed_withdraws self.withdraw_db["tokens"].create_index("contract_address", unique=True)
class AsyncTest(AsyncTestCase): def setUp(self): ''' Add test data to movies_test collection ''' super(AsyncTest, self).setUp() connection_string = 'mongodb://{user}:{password}@{host}:{port}/{name}'.format(**config['mongo_connection']) self.collection = MotorClient(connection_string)[config['mongo_connection']['name']][MOVIES_TEST_COLLECTION] self.io_loop.run_sync(self.setup_coroutine) @coroutine def setup_coroutine(self): movies = [] with open('movies_sample.json') as json_file: movies = json.load(json_file) yield self.collection.remove() yield self.collection.insert(movies) def test_right_database(self): ''' Gets the right database ''' self.assertEquals('uber', motor_client.name) @gen_test def test_operation(self): movie = yield self.collection.find_one({'title': 'Copycat'}) self.assertEqual('Copycat', movie['title']) @gen_test def tearDown(self): ''' Remove test data ''' yield self.collection.remove()
def __init__(self): assert not hasattr(self.__class__, '_instance'), 'Do not call constructor directly!' host = config.MONGODB_HOST port = config.MONGODB_PORT print("\033[1;30;46m|motor : host %s port %s \033[0m" % (config.MONGODB_HOST, config.MONGODB_PORT)) db = config.MONGODB_NAME self.client = MotorClient(config.MONGODB_HOST_LIST) if os.environ.get( "DOCKER") else MotorClient(host=host, port=port) self.db = self.client[db] self.user_coll = self.db['users'] self.vendor_coll = self.db['vendor'] self.seq_coll = self.db['seq'] self.channel_coll = self.db['channel'] self.cgoods_coll = self.db['cgoods'] self.fgoods_coll = self.db['fgoods'] self.worker_coll = self.db['worker'] self.channel_titles_coll = self.db["channel_titles"] # 渠道表标题 self.in_channel_coll = self.db['in_channel'] # 导入的原始渠道订单 self.out_channel_coll = self.db['out_channel'] # 原始订单里导出的发往厂家的订单 self.down_channel_coll = self.db['down_channel'] # 生成的可以下下载的给厂家的表格 self.in_vendor_coll = self.db['in_vendor'] # 导入的原始厂商回单 self.out_vendor_coll = self.db['out_vendor'] # 厂商回单分到个渠道 self.down_vendor_coll = self.db['down_vendor'] # 生成的可以下下载的给渠道的表格 self.music_order_coll = self.db['music_order'] # 生成的可以下下载的给ai的订单 self.channel_model_coll = self.db['channel_model'] # 渠道模型 self.vendor_model_coll = self.db['vendor_model'] # 厂商模型 self.sys_order_coll = self.db['sys_order'] # 系统订单 self.channel_form_coll = self.db["channel_form"] # 渠道上传的表单 self.vendor_form_coll = self.db["vendor_form"] # 厂商上传的表单 self.order_statistics_coll = self.db["order_statistics"] # 订单统计
def connect(cfg): """connect to mongo database :param cfg: Dictionary containing configuration for MongoDB connection :type cfg: dict """ args = { 'tz_aware': cfg.get('tz_aware', False), 'replicaset': cfg.get('replicaset', cfg.get('replica_set')) } if 'uri' in cfg: args['host'] = cfg['uri'] else: args['host'] = cfg.get('host', 'localhost') args['port'] = int(cfg.get('port', 27017)) client = MotorClient(*args) if cfg.get('user'): yield client[cfg['db']].authenticate(cfg['user'], cfg['password']) if cfg.get('read_preference'): read_preference = cfg['read_preference'].upper() client.read_preference = getattr(pymongo.read_preferences.ReadPreference, read_preference) raise gen.Return(client)
def __init__(self, mongo_host, mongo_port, mongo_db_name, tornado_debug=None): """ Инициализация класса """ handlers = [ (r"/user/(.*)", UserHandler), (r"/post/(.*)", PostHandler), (r"/posts/(.*)", PostsHandler) ] settings = dict( title="Test Mail", debug=tornado_debug or True, ) super(RestApplication, self).__init__(handlers, **settings) motor = MotorClient(mongo_host, mongo_port, tz_aware=True) self.motor = motor[mongo_db_name]
def __init__(self, loop): self._loop = loop handlers = [(r'/', IndexHandler), (r'/api', ApiHandler), (r'/[a-zA-Z0-9]*/?', RedirectHandler)] template_path = os.path.join(os.path.dirname(__file__), 'templates') settings = { 'template_path': template_path, 'debug': True, } super(WebApp, self).__init__(handlers, **settings) # TODO: move workers, db host params and domain to config self.db = MotorClient(host='127.0.0.1')['pushort'] self.executor = ThreadPoolExecutor(32) # FIXME self.short_domain = 'http://127.0.0.1:8888'
def _init_connection(self): # 配置数据库的连接信息,这里并不会去连接数据库,只有在第一次数据库操作时进行连接 if self._client is None: try: # uri = "mongodb://%s:%s@%s" % ( # quote_plus(self._user_name), quote_plus(self._password), self._host) self._client = MotorClient( host=self._host, port=self._port, username=self._user_name, password=self._password, authSource=self._db_name, maxPoolSize=self._max_pool_size, minPoolSize=self._min_pool_size, socketTimeoutMS=self._socket_timeout, connectTimeoutMS=self._conn_timeout, heartbeatFrequencyMS=self._heartbeat, serverSelectionTimeoutMS=self._server_select_timeout, connect=self._connect, appname=self._app_name) self._db = self._client[self._db_name] except ConnectionFailure as e: LOGGER.error(traceback.print_exc()) LOGGER.error(message_format("mongo init error: " + e.message)) except OperationFailure as e: LOGGER.error(traceback.print_exc()) LOGGER.error(message_format("mongo init error: " + e.message)) except Exception as e: LOGGER.error(traceback.print_exc()) LOGGER.error(message_format("mongo init error: " + e.message)) try: # The ismaster command is cheap and does not require auth. self._client.admin.command('ismaster') except ConnectionFailure as e: LOGGER.error(traceback.print_exc()) LOGGER.error(message_format("mongo init error: " + e.message)) except OperationFailure as e: LOGGER.error(traceback.print_exc()) LOGGER.error(message_format("mongo init error: " + e.message)) except Exception as e: LOGGER.error(traceback.print_exc()) LOGGER.error(message_format("mongo init error: " + e.message)) else: LOGGER.info("mongo init success")
def __init__(self, database='proauth2', host='localhost', port=27017, user=None, pwd=None): ''' initialize a mongodb connection to mongodb://user:pass@host:port use database ''' if user and pwd: connection_string = 'mongodb://%s:%s@%s:%s' % \ (user, pwd, host, port) else: connection_string = 'mongodb://%s:%s' % \ (host, port) self.db = MotorClient(connection_string).open_sync()[database]
def __init__(self, database='moth', host='localhost', port=27017, user=None, pwd=None): ''' Moth uses a mongo back-end (although other data stores may be added later) __init__ creates a mongo connection with the passed credentials. ''' if user and pwd: connection_string = 'mongodb://%s:%s@%s:%s' % \ (user, pwd, host, port) else: connection_string = 'mongodb://%s:%s' % \ (host, port) self.db = MotorClient(connection_string).open_sync()[database]
async def main(): """ Demo main updates a database via a Kew """ # Create the db connection # Async clients should be init'd inside of an async scope conn = MotorClient("mongodb://*****:*****@loc.test.com:12345") # Create the Kew with db conn so the workers have access to it kew = MyKew(conn) # Load data into the Kew data = {"a": 1, "b": 2, "c": 3} for pair in data.items(): await kew.add(pair) # Wait for the Kew to finish processing the data await kew.finish()
def __init__(self): db = MotorClient(os.environ['MONGOHQ_URL']).rin_stg settings = { 'template_path': str(pathlib.Path(__file__).parent.resolve() / 'template'), 'static_path': str(pathlib.Path(__file__).parent.resolve() / 'static'), # 'debug': True, 'flickr': flickr.Flickr(os.environ['FLICKR_API_KEY']), 'cache': flickr.FlickrCache(db) } handlers = [ (r'/', MainHandler), (r'/page/([1-9][0-9]{0,8})', MainHandler), ] super().__init__(handlers, **settings)
def __init__(self): handlers = [(r'/desqol-auth/?', WelcomeHandler), (r'/desqol-auth/api/?', WelcomeHandler), (r'/desqol-auth/api/registration', RegistrationHandler), (r'/desqol-auth/api/login', LoginHandler), (r'/desqol-auth/api/logout', LogoutHandler), (r'/desqol-auth/api/user', UserHandler)] settings = dict() super(Application, self).__init__(handlers, **settings) self.db = MotorClient(**MONGODB_HOST)[MONGODB_DBNAME] self.executor = ThreadPoolExecutor(WORKERS) self.whitelist = WHITELIST self.hmac_key = random(size=APP_SECRETKEY_SIZE)
def main(): app = Application([url(r"/ws/(\w+)", WSHandler), url(r"/static/(.*)", StaticFileHandler, {'path': get_current_dir()+"/static"}), url(r"/new/(\w+)", HomeHandler), url(r"/(.+)", StaticFileHandler, {'path': get_current_dir()+"/html"}), url(r"/", HomeHandler)], debug=False) server = HTTPServer(app) parser = ArgumentParser() parser.add_argument("--mongo", help="Mongo hostname", default="localhost") parser.add_argument("--port", help="Server port", default=8888) parser.add_argument("--distance", help="Distance", default=1000) args = parser.parse_args() server.bind(args.port) server.start(1) app.settings["db"] = MotorClient(args.mongo).db138 app.settings["distance"] = args.distance IOLoop.current().start()
def run(): mock_redis = redis.StrictRedis() sync_db = MongoClient() db = MotorClient() mock_mapping = { "settings": { "refresh_interval": "5s", "number_of_shards": 2, "number_of_replicas": 1 }, "mappings": { "_default_": { "_all": { "enabled": False } }, } } # CAUTION: local search disabled!!! es = ESSearch(host="localhost", port=9200, index_name="test_index", type_name="test_type", index_mapping=mock_mapping, analyze_fields=[], none_analyze_fields=[]) with mock.patch('redis_handler.RedisHandler.redis_client', return_value=mock_redis) as whate_ever: with mock.patch('redis_cluster_proxy.Redis', return_value=mock_redis) as whate_ever1: with mock.patch("pymongo.MongoReplicaSetClient", return_value=sync_db) as what_ever2: with mock.patch("motor.MotorReplicaSetClient", return_value=db) as what_ever3: with mock.patch("es_search.ESSearch", return_value=es) as what_ever4: from fbt_http import main main()
# -*- coding: utf-8 -*- from __future__ import unicode_literals import tornado from tornado.options import define from motor import MotorClient define("port", default=8000, help="run on the given port", type=int) define("config", default=None, help="tornado config file") define("debug", default=False, help="debug mode") tornado.options.parse_command_line() uri = "mongodb://localhost:27017/pandora" client = MotorClient(uri, tz_aware=True) db = client.get_default_database() settings = { }
class AsyncLRUTest(AsyncTestCase): def setUp(self): super(AsyncLRUTest, self).setUp() self.sr = redis.StrictRedis() self.ar = AsyncStrictRedis() self.m = MotorClient() self.c = self.m.my_test self.io_loop.run_sync(self.setup_coro) @gen.coroutine def setup_coro(self): yield self.ar.flushdb() yield self.m.drop_database('my_test') self.redis_delegator = RedisDelegate(self.ar, self.c) tag = Tags() users = Users() fblog = Fblog() static_all_resources = StaticAllResources() self.redis_delegator.add_collection(tag) self.redis_delegator.add_collection(users) self.redis_delegator.add_collection(fblog) self.redis_delegator.add_collection(static_all_resources, 'all_resources') @gen_test def test_SetField_get_and_set(self): file_ids = self.redis_delegator.tags(1).file_ids sr = self.sr res = yield file_ids.get() self.assertEqual(res, set()) file_ids_list = ['1','2','3'] yield file_ids.set(file_ids_list) self.assertEqual(sr.scard(file_ids.key_name), 3) self.assertTrue(sr.sismember(KEYS_MODIFIED_SET, file_ids.key_name)) self.assertTrue(sr.zrank(LRU_QUEUE, file_ids.key_name) is not None) res = yield file_ids.get() self.assertEqual(res, set(file_ids_list)) self.stop() @gen_test def test_SetField_scard_sadd_srem_sismember(self): file_ids = self.redis_delegator.tags(1).file_ids sr = self.sr res = yield file_ids.scard() self.assertEqual(res, 0) res = yield file_ids.sadd('1') self.assertEqual(res, 1) res = yield file_ids.scard() self.assertEqual(res, 1) res = yield file_ids.sadd('1', '2', '3') self.assertEqual(res, 2) res = yield file_ids.sismember('2') self.assertTrue(res) res = yield file_ids.srem('2') self.assertEqual(res, 1) res = yield file_ids.sismember('2') self.assertFalse(res) res = yield file_ids.scard() self.assertEqual(res, 2) self.stop() @gen_test def test_SetField_make_data_in_redis(self): file_ids = self.redis_delegator.tags(1).file_ids sr = self.sr file_ids_list = ['1','2','3', '4'] yield self.c.tags.insert({'uid': 1, 'file_ids': file_ids_list}) res = yield file_ids.get() self.assertEqual(res, set(file_ids_list)) self.stop() @gen_test def test_ListField_get_and_set(self): log = self.redis_delegator.fblog(1).log sr = self.sr res = yield log.get() self.assertEqual(res, list()) log_list = [{'1':[1,2]},{'2': {'2':1}},{'3':[1,2]}] yield log.set(log_list) self.assertEqual(sr.llen(log.key_name), 3) self.assertTrue(sr.sismember(KEYS_MODIFIED_SET, log.key_name)) self.assertTrue(sr.zrank(LRU_QUEUE, log.key_name) is not None) res = yield log.get() self.assertEqual(res, log_list) self.stop() @gen_test def test_ListField_llen_ltrim(self): log = self.redis_delegator.fblog(1).log sr = self.sr log_list = [{'1':[1,2]},{'2': {'2':1}},{'3':[1,2]}, {'4':[2,3,4]}] yield log.set(log_list) res = yield log.llen() self.assertEqual(res, 4) res = yield log.ltrim(0, 5) self.assertTrue(res) res = yield log.llen() self.assertEqual(res, 4) res = yield log.ltrim(0, 1) #self.assertEqual(res, 2) res = yield log.llen() self.assertEqual(res, 2) res = yield log.get() self.assertEqual(res, log_list[:2]) self.stop() @gen_test def test_ListField_lindex(self): log = self.redis_delegator.fblog(1).log sr = self.sr log_list = [{'1':[1,2]},{'2': {'2':1}},{'3':[1,2]}, {'4':[2,3,4]}] yield log.set(log_list) res = yield log.lindex(0) self.assertEqual(res, log_list[0]) res = yield log.lindex(1) self.assertEqual(res, log_list[1]) res = yield log.lindex(-1) self.assertEqual(res, log_list[-1]) res = yield log.lindex(-5) self.assertEqual(res, None) res = yield log.lindex(5) self.assertEqual(res, None) self.stop() @gen_test def test_ListField_lrem(self): log = self.redis_delegator.fblog(1).log sr = self.sr log_list = [{'1':[1,2]},{'2': {'2':1}},{'3':[1,2]}, {'2': {'2':1}}, {'2': {'2':1}}, {'4':[2,3,4]}, {'2': {'2':1}}] yield log.set(log_list) res = yield log.lrem(1, {'2': {'2':1}}) res = yield log.get() del log_list[1] self.assertEqual(res, log_list) res = yield log.lrem(-1, {'2': {'2':1}}) res = yield log.get() del log_list[-1] self.assertEqual(res, log_list) res = yield log.lrem(0, {'2': {'2':1}}) res = yield log.get() del log_list[2:4] self.assertEqual(res, log_list) self.stop() @gen_test def test_ListField_lindex_rpush_lpop_lrange(self): log = self.redis_delegator.fblog(1).log sr = self.sr log_list = [{'1':[1,2]},{'4':[2,3,4]}, {'2': {'2':1}},{'3':[1,2]}] res = yield log.rpush({'1':[1,2]}) self.assertEqual(res, 1) res = yield log.rpush({'4':[2,3,4]}) self.assertEqual(res, 2) res = yield log.rpush({'2': {'2':1}},{'3':[1,2]}) self.assertEqual(res, 4) res = yield log.lrange(0, -1) self.assertEqual(res, log_list) res = yield log.lrange(0, 5) self.assertEqual(res, log_list) res = yield log.lrange(0, -5) self.assertEqual(res, []) res = yield log.lrange(0, 2) self.assertEqual(res, log_list[:3]) res = yield log.lrange(1, 3) self.assertEqual(res, log_list[1:4]) self.stop() @gen_test def test_ListField_make_data_in_redis(self): log = self.redis_delegator.fblog(1).log sr = self.sr log_list = [{'1':[1,2]},{'4':[2,3,4]}, {'2': {'2':1}},{'3':[1,2]}] yield self.c.fblog.insert({'uid': 1, 'online_time': 1.0, 'log': log_list}) #res = yield self.c.fblog.find_one() #print res res = yield log.get() self.assertEqual(res, log_list) self.stop() @gen_test def test_ZsetField_get_and_set(self): friends = self.redis_delegator.users(1).friends sr = self.sr friends_list = [{'uid': 1, 'isStar': 0}, {'uid': 2, 'isStar': 0}, {'uid': 3, 'isStar': 1}, {'uid': 4, 'isStar': 0}] yield friends.set(friends_list) self.assertEqual(sr.zcard(friends.key_name), 4) self.assertTrue(sr.sismember(KEYS_MODIFIED_SET, friends.key_name)) self.assertTrue(sr.zrank(LRU_QUEUE, friends.key_name) is not None) res = yield friends.get() self.assertEqual(res, sorted(friends_list, key = lambda x: x['isStar'])) self.stop() @gen_test def test_ZsetField_zscore(self): friends = self.redis_delegator.users(1).friends sr = self.sr friends_list = [{'uid': 1, 'isStar': 5}, {'uid': 2, 'isStar': 0}, {'uid': 3, 'isStar': 1}, {'uid': 4, 'isStar': 0}] yield friends.set(friends_list) res = yield friends.zscore(1) self.assertEqual(res, 5) res = yield friends.zscore(3) self.assertEqual(res, 1) res = yield friends.zscore(8) self.assertEqual(res, None) self.stop() @gen_test def test_ZsetField_zadd_zrem_zrange(self): friends = self.redis_delegator.users(1).friends sr = self.sr friends_list = [{'uid': 1, 'isStar': 5}, {'uid': 2, 'isStar': 0}, {'uid': 3, 'isStar': 1}, {'uid': 4, 'isStar': 0}] yield friends.zadd(5, 1, 0, 2, 1, 3, 0, 4) res = yield friends.zcard() self.assertEqual(res, len(friends_list)) res = yield friends.zrange(0, -1) self.assertEqual(res, sorted(friends_list, key = lambda x: x['isStar'])) res = yield friends.zrem(1) self.assertEqual(res, 1) res = yield friends.zrange(0, -1) self.assertEqual(res, sorted(friends_list[1:], key = lambda x: x['isStar'])) res = yield friends.zrem(5) self.assertEqual(res, 0) res = yield friends.zrange(0, -1) self.assertEqual(res, sorted(friends_list[1:], key = lambda x: x['isStar'])) res = yield friends.zrem(2, 3) self.assertEqual(res, 2) res = yield friends.zrange(0, -1) self.assertEqual(res, sorted(friends_list[3:], key = lambda x: x['isStar'])) self.stop() @gen_test def test_ZsetField_make_data_in_redis(self): friends = self.redis_delegator.users(1).friends sr = self.sr friends_list = [{'uid': 1, 'isStar': 5}, {'uid': 2, 'isStar': 0}, {'uid': 3, 'isStar': 1}, {'uid': 4, 'isStar': 0}] yield self.c.users.insert({'uid': 1, 'haslog': 1, 'test': 'xyz','friends': friends_list}) #res = yield self.c.fblog.find_one() #print res res = yield friends.get() self.assertEqual(res, sorted(friends_list, key = lambda x: x['isStar'])) self.stop() @gen_test def test_set_None(self): users = self.redis_delegator.users(1) sr = self.sr friends_list = [{'uid': 1, 'isStar': 5}, {'uid': 2, 'isStar': 0}, {'uid': 3, 'isStar': 1}, {'uid': 4, 'isStar': 0}] yield self.c.users.insert({'uid': 1, 'haslog': 1, 'test': 'xyz', 'friends': friends_list}) doc = yield self.c.users.find_one({'uid': 1}) res = yield users.get('haslog') self.assertEqual(res, 1) res = yield users.get('test') self.assertEqual(res, 'xyz') res = yield users.find(1, ['test', 'xyz']) self.assertEqual(res, {'haslog': 1, 'test': 'xyz'}) yield users.set('test', None) self.assertFalse(sr.sismember(KEYS_MODIFIED_SET, users._key)) self.assertTrue(sr.zrank(LRU_QUEUE, users._key) is not None) self.assertEqual(sr.hgetall(users._key), {'haslog': '1'}) doc1 = yield self.c.users.find_one({'uid': 1}) doc['test'] = None self.assertEqual(doc, doc1) self.stop() @gen_test def test_common_field_get_and_set(self): users = self.redis_delegator.users(1) sr = self.sr friends_list = [{'uid': 1, 'isStar': 5}, {'uid': 2, 'isStar': 0}, {'uid': 3, 'isStar': 1}, {'uid': 4, 'isStar': 0}] yield self.c.users.insert({'uid': 1, 'haslog': 1, 'test': 'xyz', 'friends': friends_list}) yield users.set('haslog', 0) res = yield users.get('haslog') self.assertEqual(res, 0) self.assertTrue(sr.sismember(KEYS_MODIFIED_SET, users._key)) self.assertTrue(sr.zrank(LRU_QUEUE, users._key) is not None) self.stop() @gen_test def test_unset_field(self): users = self.redis_delegator.users(1) sr = self.sr friends_list = [{'uid': 1, 'isStar': 5}, {'uid': 2, 'isStar': 0}, {'uid': 3, 'isStar': 1}, {'uid': 4, 'isStar': 0}] yield self.c.users.insert({'uid': 1, 'haslog': 1, 'test': 'xyz', 'friends': friends_list}) doc = yield self.c.users.find_one({'uid': 1}) res = yield users.get('test') self.assertEqual(res, 'xyz') yield users.delete(1, 'test') self.assertFalse(sr.sismember(KEYS_MODIFIED_SET, users._key)) self.assertTrue(sr.zrank(LRU_QUEUE, users._key) is not None) self.assertEqual(sr.hgetall(users._key), {'haslog': '1'}) doc1 = yield self.c.users.find_one({'uid': 1}) doc.pop('test') self.assertEqual(doc, doc1) self.stop() @gen_test def test_static_find(self): sr = self.sr test_resources = [{'file_id': str(i), 'file_name': 'file' + str(i), 'main_type': i % 2, 'mtime': i, 'download_num': i} for i in range(15)] test_resources_copy = copy.deepcopy(test_resources) for t in test_resources_copy: self.c.all_resources.insert(t) res = yield self.redis_delegator.all_resources.find('0') self.assertEqual(res, test_resources[0]) self.assertTrue(sr.exists('all_resources:0')) res = yield self.redis_delegator.all_resources.find([str(_) for _ in range(6)]) res = sorted(res, key=lambda x: x['file_id']) self.assertEqual(res, test_resources[:6]) res = yield self.redis_delegator.all_resources.find([str(_) for _ in range(7, 12)], {'main_type': 1}) res = sorted(res, key=lambda x: int(x['file_id'])) self.assertEqual(res, test_resources[7:12:2]) @gen_test def test_find_update(self): users = self.redis_delegator.users(1) sr = self.sr friends_list = [{'uid': 1, 'isStar': 5}, {'uid': 2, 'isStar': 0}, {'uid': 3, 'isStar': 1}, {'uid': 4, 'isStar': 0}] doc = {'haslog': 1, 'test': 'xyz', 'friends': sorted(friends_list, key = lambda x: x['isStar'])} yield self.c.users.insert({'uid': 1, 'haslog': 1, 'test': 'xyz', 'friends': sorted(friends_list, key = lambda x: x['isStar'])}) #doc = yield self.c.users.find_one({'uid': 1}) res = yield users.find(1) self.assertEqual(res, doc) update_doc = {'haslog': 3, 'friends': [{'uid': 2, 'isStar': 11}]} yield users.update(1, update_doc) doc.update(update_doc) res = yield users.find(1) self.assertEqual(res, doc) self.assertTrue(sr.sismember(KEYS_MODIFIED_SET, users._key)) self.assertFalse(sr.zrank(LRU_QUEUE, users._key) is None) self.assertTrue(sr.sismember(KEYS_MODIFIED_SET, users.friends.key_name)) self.assertFalse(sr.zrank(LRU_QUEUE, users.friends.key_name) is None) update_doc = {'haslog': None} yield users.update(1, update_doc) #doc.update(update_doc) doc.pop('haslog') res = yield users.find(1) self.assertEqual(res, doc) update_doc = {'test': '123', 'abc': 'aaa'} yield users.update(1, update_doc) doc.update(update_doc) res = yield users.find(1) self.assertEqual(res, doc) self.stop() @gen_test def test_write_back(self): users = self.redis_delegator.users(1) sr = self.sr friends_list = [{'uid': 1, 'isStar': 5}, {'uid': 2, 'isStar': 0}, {'uid': 3, 'isStar': 1}, {'uid': 4, 'isStar': 0}] doc = {'haslog': 1, 'test': 'xyz', 'friends': sorted(friends_list, key = lambda x: x['isStar'])} yield self.c.users.insert({'uid': 1, 'haslog': 1, 'test': 'xyz', 'friends': sorted(friends_list, key = lambda x: x['isStar'])}) #doc = yield self.c.users.find_one({'uid': 1}) res = yield users.find(1) self.assertEqual(res, doc) update_doc = {'test': '123', 'abc': 'aaa'} yield users.update(1, update_doc) doc.update(update_doc) res = yield users.find(1) self.assertEqual(res, doc) #self.assertTrue(sr.sismember(KEYS_MODIFIED_SET, users._key)) #self.assertFalse(sr.zrank(LRU_QUEUE, users._key) is None) yield users.write_back(1) #self.assertFalse(sr.sismember(KEYS_MODIFIED_SET, users._key)) #self.assertTrue(sr.zrank(LRU_QUEUE, users._key) is None) doc1 = yield self.c.users.find_one({'uid': 1}, {'uid': 0, '_id': 0}) self.assertEqual(doc, doc1) update_doc = {'haslog': 3, 'friends': [{'uid': 2, 'isStar': 11}]} yield users.update(1, update_doc) doc.update(update_doc) yield users.write_back(1, 'friends') yield users.write_back(1) doc1 = yield self.c.users.find_one({'uid': 1}, {'uid': 0, '_id': 0}) self.assertEqual(doc, doc1) self.stop() @gen_test(timeout=10) def test_try_write_back(self): def side_effect(*args, **kwargs): kwargs['lock_timeout'] = 1 return acquire_lock_with_timeout(*args, **kwargs) with mock.patch('redis_async_lru_scheduler.acquire_lock_with_timeout', side_effect=side_effect) as whate_ever: #with mock.patch('redis_async_lru_scheduler.LOCK_TIMEOUT', 1) as whate_ever: LOCK_TIMEOUT = 1 users = self.redis_delegator.users(1) sr = self.sr friends_list = [{'uid': 1, 'isStar': 5}, {'uid': 2, 'isStar': 0}, {'uid': 3, 'isStar': 1}, {'uid': 4, 'isStar': 0}] doc = {'haslog': 1, 'test': 'xyz', 'friends': sorted(friends_list, key = lambda x: x['isStar'])} yield self.c.users.insert({'uid': 1, 'haslog': 1, 'test': 'xyz', 'friends': sorted(friends_list, key = lambda x: x['isStar'])}) #doc = yield self.c.users.find_one({'uid': 1}) res = yield users.find(1) self.assertEqual(res, doc) self.assertFalse(sr.sismember(KEYS_MODIFIED_SET, users._key)) self.assertTrue(sr.zrank(LRU_QUEUE, users._key) is not None) update_doc = {'test': '123', 'abc': 'aaa'} yield users.update(1, update_doc) doc.update(update_doc) self.assertTrue(sr.sismember(KEYS_MODIFIED_SET, users._key)) self.assertTrue(sr.zrank(LRU_QUEUE, users._key) is not None) time.sleep(LOCK_TIMEOUT+1) res = yield self.redis_delegator.try_write_back(self.ar, 'users:1') self.assertTrue(res) self.assertFalse(sr.sismember(KEYS_MODIFIED_SET, users._key)) self.assertTrue(sr.zrank(LRU_QUEUE, users._key) is None) doc1 = yield self.c.users.find_one({'uid': 1}, {'uid': 0, '_id': 0}) self.assertEqual(doc, doc1) update_doc = {'haslog': 3, 'friends': [{'uid': 2, 'isStar': 11}]} yield users.update(1, update_doc) doc.update(update_doc) self.assertTrue(sr.sismember(KEYS_MODIFIED_SET, users.friends.key_name)) self.assertTrue(sr.zrank(LRU_QUEUE, users.friends.key_name) is not None) time.sleep(LOCK_TIMEOUT+1) res = yield self.redis_delegator.try_write_back(self.ar, 'users:1.friends') self.assertTrue(res) res = yield self.redis_delegator.try_write_back(self.ar, 'users:1') self.assertFalse(sr.sismember(KEYS_MODIFIED_SET, users.friends.key_name)) self.assertTrue(sr.zrank(LRU_QUEUE, users.friends.key_name) is None) doc1 = yield self.c.users.find_one({'uid': 1}, {'uid': 0, '_id': 0}) self.assertEqual(doc, doc1) self.stop()
def get_app(self): db = MotorClient(os.getenv('DB_HOST', 'mongodb://localhost:27017'), ) loop = asyncio.get_event_loop() loop.run_until_complete(db.drop_database(os.getenv('DB_NAME'))) return application
""" module to import all necessary modules """ # tornado modules from tornado.gen import coroutine from tornado.ioloop import IOLoop from tornado.escape import json_encode, json_decode from tornado.httpserver import HTTPServer from tornado.options import define, options from tornado.web import RequestHandler, Application, removeslash # other modules import json from os.path import join, dirname, isfile from motor import MotorClient import jwt from bson.objectid import ObjectId import os, uuid, sys from passlib.hash import pbkdf2_sha256 from datetime import datetime, timedelta secret = "gnberghnergb" db = MotorClient()["iwp"]
class FileNameSearcherTest(AsyncTestCase): def setUp(self): super(FileNameSearcherTest, self).setUp() self.sr = redis.StrictRedis() self.ar = AsyncStrictRedis() self.m = MotorClient() self.c = self.m.fbt self.patcher1 = mock.patch('redis_handler.RedisHandler.redis_client', return_value=mock_redis) self.patcher1.start() ''' self.patcher2 = mock.patch("pymongo.MongoReplicaSetClient", return_value=sync_db) self.patcher3 = mock.patch("motor.MotorReplicaSetClient", return_value=db) #self.patcher4 = mock.patch("async_redis.asyncRedis.AsyncStrictRedis", spec=AsyncStrictRedis, return_value=AsyncStrictRedis()) self.patcher2.start() self.patcher3.start() #self.patcher4.start() ''' self.io_loop.run_sync(self.setup_coro) def tearDown(self): self.patcher1.stop() ''' self.patcher2.stop() self.patcher3.stop() #self.patcher4.stop() ''' @gen.coroutine def setup_coro(self): yield self.ar.flushdb() yield self.m.drop_database('fbt') yield self.ar.sadd(USER_IP_CACHE_SET_KEY, *[1, 2, 3, 4]) yield self.c.users.insert({'uid': 1, 'friends': [{'uid': 2, 'isStar': 1}, {'uid': 3, 'isStar': 1}]}) yield self.c.resources_of_user.insert({'uid': 2, 'file_ids': [str(_) for _ in range(10)]}) yield self.c.resources_of_user.insert({'uid': 3, 'file_ids': [str(_) for _ in range(5, 15)]}) yield self.c.resources_of_user.insert({'uid': 4, 'file_ids': [str(_) for _ in range(15, 20)]}) for i, file_name in enumerate(test_filename): file_id = str(i) resource = {'file_id': file_id, 'mtime': test_mtime[i], 'download_num': test_download_num[i], 'public': 1, 'main_type': test_main_type[i], 'file_name': file_name} test_resources.append(resource) yield self.c.all_resources.insert(resource) yield FileNameSearcher().file_id_add_title(file_id, file_name) #yield [self.c.all_resources.insert(resource), FileNameSearcher().file_id_add_title(file_id, file_name)] @gen_test(timeout=3) def test_all(self): search_key = test_keyword[3] res = yield FileNameSearcher().query_file_ids_by_file_name(search_key, 1, 4) res_list = list() for i, v in enumerate(test_filename): if search_key in v: test_resources[i].pop('_id') res_list.append(test_resources[i]) res_list = sorted(res_list, key=lambda x: x["mtime"], reverse=True) res = yield FileNameSearcher().query_file_ids_by_file_name(search_key, 1, 4, sort="mtime") #print res_list[0:3], len(res_list) #print res, search_key self.assertEqual(res, (len(res_list), res_list[0:4])) #print res res_list = sorted(res_list, key=lambda x: x["download_num"], reverse=True) res = yield FileNameSearcher().query_file_ids_by_file_name(search_key, 1, 4, sort="download_num") self.assertEqual(res, (len(res_list), res_list[0:4])) res = yield FileNameSearcher().query_file_ids_by_file_name('a b', 1, 4, sort="download_num") self.assertEqual(res, (0, [])) res = yield FileNameSearcher().query_file_ids_by_file_name('xxxxxxxx', 1, 4, sort="download_num") self.assertEqual(res, (0, [])) # test for query_file_ids_by_file_name_private search_key = test_keyword[1] print 'search_key', search_key res_list = list() for i, v in enumerate(test_filename[:15]): if search_key in v: if '_id' in test_resources[i]: test_resources[i].pop('_id') res_list.append(test_resources[i]) res = yield FileNameSearcher().query_file_ids_by_file_name_private(1, search_key, 1, 4, sort="mtime") res_list = sorted(res_list, key=lambda x: x["mtime"], reverse=True) self.assertEqual(res, (len(res_list), res_list[0:4])) # test for query_file_ids_by_file_name_private res_list = list() for i, v in enumerate(test_filename[:15]): if '_id' in test_resources[i]: test_resources[i].pop('_id') res_list.append(test_resources[i]) res = yield FileNameSearcher().get_private_resources(1, 1, 4, sort="mtime") res_list = sorted(res_list, key=lambda x: x["mtime"], reverse=True) self.assertEqual(res, (15, res_list[0:4])) # test for query_file_ids_by_file_name_private print test_main_type main_type = test_main_type[0] res_list = list() for i, v in enumerate(test_filename[:15]): if main_type == test_resources[i]['main_type']: if '_id' in test_resources[i]: test_resources[i].pop('_id') res_list.append(test_resources[i]) res_list = sorted(res_list, key=lambda x: x["download_num"], reverse=True) res = yield FileNameSearcher().get_private_resources_by_type(1, main_type, 1, 4, sort="download_num") print len(res_list) self.assertEqual(res, (len(res_list), res_list[0:4])) ##################### test_basic ################### # sync test_filename1 = ' '.join(test_keyword[:3]) test_file_id = '21' filename_searcher = FileNameSearcher() rdb = filename_searcher.db mongo = filename_searcher.mongoDB filename_searcher.file_id_add_title_sync(test_file_id, test_filename1) for k in test_keyword[:3]: key_name = make_key_for_keyword(k) self.assertTrue(rdb.sismember(key_name, test_file_id)) for res in mongo.key_fileids.find({'key': {"$in": test_keyword[:3]}}): self.assertTrue(test_file_id in res['file_ids']) filename_searcher.remove_file_id_sync(test_file_id, test_filename1) for k in test_keyword[:3]: key_name = make_key_for_keyword(k) self.assertFalse(rdb.sismember(key_name, test_file_id)) for res in mongo.key_fileids.find({'key': {"$in": test_keyword[:3]}}): self.assertFalse(test_file_id in res['file_ids']) # async yield filename_searcher.file_id_add_title(test_file_id, test_filename1) for k in test_keyword[:3]: key_name = make_key_for_keyword(k) self.assertTrue(rdb.sismember(key_name, test_file_id)) for res in mongo.key_fileids.find({'key': {"$in": test_keyword[:3]}}): self.assertTrue(test_file_id in res['file_ids']) yield filename_searcher.remove_file_id(test_file_id, test_filename1) for k in test_keyword[:3]: key_name = make_key_for_keyword(k) self.assertFalse(rdb.sismember(key_name, test_file_id)) for res in mongo.key_fileids.find({'key': {"$in": test_keyword[:3]}}): self.assertFalse(test_file_id in res['file_ids']) # test init filename_searcher.drop() filename_searcher.init_from_mongo() key_fileids_dict = defaultdict(list) for i, f in enumerate(test_filename): i = str(i) for k in f.split(): key_fileids_dict[k].append(i) for k in test_keyword: key_name = make_key_for_keyword(k) self.assertEqual(sorted(rdb.smembers(key_name)), sorted(key_fileids_dict[k])) for res in mongo.key_fileids.find(): self.assertEqual(sorted(res['file_ids']), sorted(key_fileids_dict[res['key']])) # test add filename_searcher.scan_from_mongo(0) for k in test_keyword: key_name = make_key_for_keyword(k) self.assertEqual(sorted(rdb.smembers(key_name)), sorted(key_fileids_dict[k])) for res in mongo.key_fileids.find(): self.assertEqual(sorted(res['file_ids']), sorted(key_fileids_dict[res['key']])) print 'add test' last_mtime = max(test_mtime) t_mtime = last_mtime + 1 resource = {'file_id': '21', 'mtime': t_mtime, 'download_num': 11, 'public': 1, 'main_type': 1, 'file_name': 'test'} yield self.c.all_resources.insert(resource) res = filename_searcher.scan_from_mongo(last_mtime) self.assertEqual(t_mtime, res) self.stop()
__author__ = 'ilya' from motor import MotorClient, Op from tornado.web import * import tornado.gen as gen import tornado.ioloop import bson.json_util as json from bson.objectid import ObjectId from random import randrange client = MotorClient() client.open_sync() db = client['dogentine'] class CardsHandler(RequestHandler): @gen.engine @asynchronous def get(self): cards_cursor = db.cards.find() cards = yield Op(cards_cursor.to_list) self.write(json.dumps(cards)) self.finish() @gen.engine @asynchronous def post(self): card = json.loads(self.request.body) card = yield Op(db.cards.insert, card) self.write(json.dumps(card))
class Store(object): KEEP_ALIVE_TIMEOUT = 60 # Seconds OPSLOG_SIZE = 1000000 # 1 MB def __init__(self): self.client = None self.db = None self.subscriptions = {} def connect(self, uri, db=None, w=1, j=True, **options): io_loop = options.get('io_loop', None) self.client = MotorClient(uri, w=w, j=j, **options).open_sync() self.client_sync = self.client.sync_client() db = db or uri_parser.parse_uri(uri)['database'] if not db: raise ConfigurationError('No database defined in uri') self.db = self.client[db] self.db_sync = self.client_sync[db] #PeriodicCallback(self.client.alive, Store.KEEP_ALIVE_TIMEOUT, # io_loop=io_loop).start() def opslog(self, collection): collection_opslog = '{0}.opslog'.format(collection) try: defer(self.db.create_collection, collection_opslog, capped=True, size=Store.OPSLOG_SIZE) # Prime opslog as tailable cursors die on empty collections defer(self.db[collection_opslog].insert, {}) except CollectionInvalid: pass return self.db[collection_opslog] def _monitor(self, collection, query_key, query): # TODO: Handle doc removal # TODO: Batch requests try: query = {'doc.{0}'.format(k): v for k, v in query.items()} query['_id'] = {'$gt': ObjectId.from_datetime(datetime.utcnow())} opslog = self.opslog(collection) cursor = opslog.find(query, tailable=True, await_data=True) item = tail(cursor.tail) while True: ops, err = next(item) if err: raise err print(ops) if not ops['doc'].get('_id'): _log.warn('Opslog for collection "{0}" contains a ' 'document with no _id'.format(collection)) continue if ops['op'] == 'insert': doc = ops['doc'] elif ops['op'] == 'update': doc = ops['updated'] response = json.dumps({ 'response': 'subscribe', 'query': query_key, 'collection': collection, 'result': [doc], }) for request in list(self.subscriptions[query_key]): if request.is_closed: self.subscriptions[query_key].remove(request) continue request.send(response) if not self.subscriptions[query_key]: break except Exception as e: _log.exception(e) finally: if query_key in self.subscriptions: del self.subscriptions[query_key] def subscribe(self, request, collection, query_key): # TODO: Inject security policies/adapters/transforms here query = json.loads(query_key) if query_key in self.subscriptions: self.subscriptions[query_key].add(request) else: Greenlet(self._monitor).switch(collection, query_key, query) self.subscriptions[query_key] = {request} docs = defer(self.db[collection].find(query).to_list, 1000) request.send(json.dumps({ 'response': 'subscribe', 'query': query_key, 'collection': collection, 'result': docs })) def __getattr__(self, name): return self[name] def __getitem__(self, name): return Collection(model, name)
class TestTornadoMotorAcl: io_loop = IOLoop.instance() @gen_test def setup(self): print "-setup" self.client = MotorClient() #print "c, ", self.client self.db = self.client['test_database'] self.permissions = MotorCollection(self.db, 'acl_permissions') self.groups = MotorCollection(self.db, 'acl_groups') self.resources = MotorCollection(self.db, 'acl_resources') self.users = MotorCollection(self.db, 'users') self.user_ids = yield self.users.insert([{'name':'burger'},{'name':'paul'}]) self.user_ids = [x for x in self.user_ids] self.admin_user = User("burger", self.user_ids[0]) self.user_user = User("paul", self.user_ids[1]) self.perm_ids = yield self.permissions.insert([x.to_primitive() for x in [ Permission({"name":"read"}), Permission({"name":"write"}), Permission({"name":"update"}), Permission({"name":"delete"}), ]]) self.res_ids = yield self.resources.insert([x.to_primitive() for x in [ Resource({'name':'own data'}), Resource({'name':'others data'}), Resource({'name':'all data'}) ]]) self.group_ids = yield self.groups.insert([x.to_primitive() for x in [ Group({"name":"brugere", "permissions":[ ResourcePermissionPair({"resource":"own data", "permissions":[ "read", "write" ] }) ], "members":[self.user_ids[0], self.user_ids[1]], }), Group({"name":"super brugere", "permissions":[ ResourcePermissionPair({"resource":'own data', "permissions":[ "read", "write", "update" ] }), ResourcePermissionPair({"resource":"others data", "permissions":[ "read" ] }) ], "members":[self.user_ids[0]], }) ]]) doc = yield self.groups.find_one({"name": "brugere"}) members =doc['members'] #print "members:", members #print "users", self.user_ids # assert self.admin_user._id in members assert True @gen_test def teardown(self): self.db = None self.client.drop_database('test_database') @gen_test def test_setup(self): assert len(self.user_ids) == 2 assert len(self.perm_ids) == 4 assert len(self.res_ids) == 3 assert len(self.group_ids) == 2 @gen_test def test_that_user_can_read_own_data(self): handler = OwnDataHandler(self.db, self.user_user) yield handler.post() assert handler.status == 200 #assert handler.status == 200 @gen_test def test_that_user_can_not_read_others_data(self): handler = OthersDataHandler(self.db, self.user_user) yield handler.post() assert handler.status == 403 @gen_test def test_that_admin_can_read_own_data(self): handler = OwnDataHandler(self.db, self.admin_user) yield handler.post() assert handler.status == 200 #assert handler.status == 200 @gen_test def test_that_admin_can_read_others_data(self): handler = OthersDataHandler(self.db, self.admin_user) yield handler.post() assert handler.status == 200 @gen_test def test_that_admin_can_read_others_and_own_data(self): handler = OwnAndOthersDataHandler(self.db, self.admin_user) yield handler.post() assert handler.status == 200