Exemple #1
0
 def __init__(self):
     self.redis_handler = RedisHandler(
         host=app.config.get('REDIS_HOST'),
         port=app.config.get('REDIS_PORT_NO'),
         password=app.config.get('REDIS_PASSWORD'),
         idle_timeout=app.config.get('REDIS_IDLE_TIMEOUT'),
     )
     self.get_tag_resources = self._cache_function(self.get_tag_resources)
Exemple #2
0
def updateFriendsById(uid):
    uid = long(uid)
    uid_tmp = str(uid)
    user = yield motorclient.fbt_realtime.users.find_one({"uid": uid}, {
        "_id": 0,
        "friends": 1
    })
    if user:
        RedisHandler.f_hset(FRIENDINFO, uid_tmp, json.dumps(user["friends"]))
Exemple #3
0
 def vcode(self, phone, *args):
     r = RedisHandler()
     r.clear_all_vcode_count()
     data = [self.mode + u'/1/user/vcode', 'phone=' + str(phone)]
     for arg in args:
         data.append(arg)
     result = self.send_request_for_dx(*data)
     assert result['code'] == '0'
     return result['dc']
Exemple #4
0
def getUserByNick(nick_name):
    user = RedisHandler.f_hget(NICKINFO, nick_name)
    if user:
        raise gen.Return(user)
    else:
        user = yield motorclient.fbt_realtime.users.find_one({"nick_name": nick_name}, {"_id": 0, "user": 1})
        if user:
            RedisHandler.f_hset(NICKINFO, nick_name, user["user"])
            raise gen.Return(user["user"])
        else:
            raise gen.Return(None)
Exemple #5
0
def cacheUserInfo(user):
    uid_tmp = str(user["uid"])
    gender = user["gender"]
    if gender:
        if gender == u"男":
            gender = 1
        else:
            gender = 0
    info = json.dumps({"nick_name": user["nick_name"], "icon": user["icon"], 
                    "user": user["user"], "gender": gender, "school": user["school"]})
    RedisHandler.f_hset(FRIENDINFO, uid_tmp, json.dumps(user["friends"]))
    RedisHandler.f_hset(USERINFO, uid_tmp, info)
Exemple #6
0
def getFriendsById(uid):
    uid = long(uid)
    uid_tmp = str(uid)
    info = RedisHandler.f_hget(FRIENDINFO, uid_tmp)
    if info:
        raise gen.Return(json.loads(info))
    else:
        user = yield motorclient.fbt_realtime.users.find_one({"uid": uid}, {"_id": 0, "friends": 1})
        if user:
            RedisHandler.f_hset(FRIENDINFO, uid_tmp, json.dumps(user["friends"]))
            raise gen.Return(user["friends"])
        else:
            raise gen.Return(None)
Exemple #7
0
 def get_vcode_by_redis(self, phone, dc):
     r = RedisHandler()
     m = Mongo()
     vcode = False
     if dc.find('#') != -1:
         dc = dc.split('#')[0]
     try:
         mid = m.get_mid(dc)
         vcode = r.get_vcode(str(mid), str(phone))
     except Exception as e:
         print e
     finally:
         r.clear_vcode_count(self.tid)
         return vcode
Exemple #8
0
def getUserByNick(nick_name):
    user = RedisHandler.f_hget(NICKINFO, nick_name)
    if user:
        raise gen.Return(user)
    else:
        user = yield motorclient.fbt_realtime.users.find_one(
            {"nick_name": nick_name}, {
                "_id": 0,
                "user": 1
            })
        if user:
            RedisHandler.f_hset(NICKINFO, nick_name, user["user"])
            raise gen.Return(user["user"])
        else:
            raise gen.Return(None)
Exemple #9
0
def getUserInfoById(uid):
    uid = long(uid)
    uid_tmp = str(uid)
    info = RedisHandler.f_hget(USERINFO, uid_tmp)
    if info:
        raise gen.Return(json.loads(info))
    else:
        user = yield motorclient.fbt_realtime.users.find_one({"uid": uid},
                                                             {"_id": 0})
        if user:
            cacheUserInfo(user)
            gender = user["gender"]
            if gender:
                if gender == u"男":
                    gender = 1
                else:
                    gender = 0
            info = {
                "nick_name": user["nick_name"],
                "icon": user["icon"],
                "user": user["user"],
                "gender": gender,
                "school": user["school"]
            }
            raise gen.Return(info)
        else:
            raise gen.Return(None)
Exemple #10
0
def check_token(uid, token):
    if util.MemCache.get(str(uid), LOGIN_T) != token:
        store_t = RedisHandler.f_get(str(uid), RedisHandler.type_token)
        if store_t != token:
            return False
        else:
            util.MemCache.set(str(uid), token, LOGIN_T, 7200)
    return True
Exemple #11
0
def main():
    load_dotenv()
    max_number_read_files = os.getenv('MAX_NUMBER_READ_FILES')
    files_path = os.getenv('QUIZ_FILES_PATH')
    redis_base = RedisHandler()

    get_quiz_for_bot(files_path, redis_base, max_number_read_files)
    print('Done!')
Exemple #12
0
def getFriendsById(uid):
    uid = long(uid)
    uid_tmp = str(uid)
    info = RedisHandler.f_hget(FRIENDINFO, uid_tmp)
    if info:
        raise gen.Return(json.loads(info))
    else:
        user = yield motorclient.fbt_realtime.users.find_one({"uid": uid}, {
            "_id": 0,
            "friends": 1
        })
        if user:
            RedisHandler.f_hset(FRIENDINFO, uid_tmp,
                                json.dumps(user["friends"]))
            raise gen.Return(user["friends"])
        else:
            raise gen.Return(None)
Exemple #13
0
def cacheUserInfo(user):
    uid_tmp = str(user["uid"])
    gender = user["gender"]
    if gender:
        if gender == u"男":
            gender = 1
        else:
            gender = 0
    info = json.dumps({
        "nick_name": user["nick_name"],
        "icon": user["icon"],
        "user": user["user"],
        "gender": gender,
        "school": user["school"]
    })
    RedisHandler.f_hset(FRIENDINFO, uid_tmp, json.dumps(user["friends"]))
    RedisHandler.f_hset(USERINFO, uid_tmp, info)
Exemple #14
0
 def __init__(self):
     self.index_keys = None
     self.redis_handler = RedisHandler(
         host=app.config.get('REDIS_HOST'),
         port=app.config.get('REDIS_PORT_NO'),
         password=app.config.get('REDIS_PASSWORD'),
         idle_timeout=app.config.get('REDIS_IDLE_TIMEOUT'),
     )
     self.pricing_api = AwsPricingApi()
     # AWS details
     self.apikey = app.config.get('AWS_ACCESS_KEY_ID')
     self.apisecret = app.config.get('AWS_SECRET_ACCESS_KEY')
     self.owner_id = app.config.get('AWS_OWNER_ID')
     self.regions = app.config.get('REGIONS')
     # Timeout
     self.expire = app.config.get('EXPIRE_DURATION')
     self.sync_timeout = app.config.get('SYNC_TIMEOUT')
Exemple #15
0
def check_token(uid, token):
    if util.MemCache.get(str(uid), LOGIN_T) != token:
        store_t = RedisHandler.f_get(str(uid), RedisHandler.type_token)
        if store_t != token:
            return False
        else:
            util.MemCache.set(str(uid), token, LOGIN_T, 7200)
    return True
Exemple #16
0
 def get(self):
     if self.get_argument("key", "") == "fbt":
         login_user = RedisHandler.f_hgetall(LOGIN_U)
         if not login_user:
             return
         self.render("user.html", title="login user", items=login_user, online_cnt=len(login_user))
     else:
         raise tornado.web.HTTPError(404)
Exemple #17
0
def login(driver, mode, package, phone, tid='a_imei000000000000000'):
    if mode == 'mcp/dx':
        try:
            from adb import get_config_by_adb
            driver.find_element_by_id(elements_id.MENU_ICON).click()
            driver.find_element_by_id(elements_id.PHONE_LOGIN_ICON).click()
            driver.find_element_by_id(elements_id.PHONE_INPUT).send_keys(phone)
            driver.find_element_by_id(elements_id.GET_VCODE).click()
            sleep(5)
            dc = get_config_by_adb(package)['dc']
            r = RedisHandler()
            vcode = r.get_vcode_by_dc(phone, dc, tid)
            driver.find_element_by_id(elements_id.VCODE_INPUT).send_keys(vcode)
            driver.find_element_by_id(elements_id.PHONE_LOGIN_BUTTON).click()
            return True
        except Exception, e:
            print sys._getframe().f_code.co_name
            print e.__repr__()
            return False
Exemple #18
0
    def __init__(self, analyzer=jieba.cut_for_search):
        self.mongoDB = mongoclient.fbt
        self.motorDB = motorclient.fbt
        self.db = RedisHandler(
            RedisHandler.type_search
        )  #redis.Redis(host='127.0.0.1', port=PORT, db =0, password = searchRedisPassWd)
        #self.adb = AsyncStrictRedis(host=REDIS_MASTER_HOST, port=REDIS_PORT[2], password=REDIS_PWD[2])
        # for mock
        self.db.init()
        connection_kwargs = self.db.redis_client(
            RedisHandler.type_search,
            'master').connection_pool.connection_kwargs
        self.adb = AsyncStrictRedis(
            host=connection_kwargs.get('host', 'localhost'),
            port=connection_kwargs.get('port', 6379),
            password=connection_kwargs.get('password', None))
        self.redis_delegator = RedisDelegate(self.adb, self.motorDB)
        #users = Users(expire=EXPIRE_TIME)
        all_resources = StaticAllResources(expire=EXPIRE_TIME)
        resource_of_user = ResourcesOfUser(expire=EXPIRE_TIME)
        #self.redis_delegator.add_collection(users)
        self.redis_delegator.add_collection(all_resources)
        self.redis_delegator.add_collection(resource_of_user)
        # async
        self.pipeline = self.adb.pipeline()

        # just for cache, at most time, may be for the same user
        self.global_search_key_res_dict = dict()
        self.private_search_key_res_dict = dict()
        self.search_key_heapq = list()

        # cache for friend type list
        # friend_type_dict: (uid, type): fild_id list
        '''
        self.friend_type_dict = dict()
        self.friend_type_heapq = list()
        '''

        # sync
        self.pipe = self.db.pipeline()
        self.analyzer = analyzer
        self.key_fileidSetDict = dict()
Exemple #19
0
 def get(self):
     if self.get_argument("key", "") == "fbt":
         login_user = RedisHandler.f_hgetall(LOGIN_U)
         if not login_user:
             return
         self.render("user.html",
                     title="login user",
                     items=login_user,
                     online_cnt=len(login_user))
     else:
         raise tornado.web.HTTPError(404)
Exemple #20
0
def login(driver,phone,in_page = False):
    try:
        r = RedisHandler()
        r.clear_all_vcode_count()
        if not in_page:
            driver.find_element(*MY_CONFIG).click()
            driver.find_element(*PHONE_LOGIN_ICON).click()
        driver.find_element(*PHONE_INPUT).send_keys(phone)
        sleep(3)
        driver.find_element(*GET_VCODE).click()
        sleep(3)
        vcode = r.get_latest_vcode(phone)
        print vcode
        driver.find_element(*VCODE_INPUT).send_keys(vcode)
        sleep(5)
        driver.find_element(*PHONE_LOGIN_BUTTON).click()
        return True
    except Exception,e:
        print sys._getframe().f_code.co_name
        print e.__repr__()
        return False
Exemple #21
0
def login(driver, mode, package, phone, tid=DEVICE_TID, in_page=False):
    from adb import get_config_by_adb
    if not in_page:
        driver.find_element_by_id(MENU_ICON).click()
        driver.find_element_by_id(PHONE_LOGIN_ICON).click()
    driver.find_element_by_id(PHONE_INPUT).send_keys(phone)
    driver.find_element_by_id(GET_VCODE).click()
    time.sleep(5)
    r = RedisHandler()
    if is_root:
        dc = get_config_by_adb(package)['dc']
        vcode = r.get_vcode_by_dc(phone, dc, tid)
    else:
        vcode = r.get_latest_vcode(phone)
        print vcode
    if vcode:
        driver.find_element_by_id(VCODE_INPUT).send_keys(vcode)
        driver.find_element_by_id(PHONE_LOGIN_BUTTON).click()
        return True
    else:
        return False
Exemple #22
0
    def __init__(self, redis_conn=None, sync_db=None):
        if redis_conn:
            self.conn = redis_conn
        else:
            self.conn = RedisHandler(RedisHandler.type_lru)

        if sync_db:
            self.mongo_conn = sync_db
        else:
            self.mongo_conn = mongoclient.fbt

        self.col_name_list = list()
Exemple #23
0
def login(
    driver,
    mode,
    package,
    phone,
    tid=DEVICE_TID,
    in_page=False,
):
    try:
        from adb import get_config_by_adb
        if not in_page:
            driver.find_element_by_id(MY_CONFIG).click()
            driver.find_element_by_id(PHONE_LOGIN_ICON).click()
        driver.find_element_by_id(PHONE_INPUT).send_keys(phone)
        driver.find_element_by_id(GET_VCODE).click()
        sleep(5)
        r = RedisHandler()
        r.clear_all_vcode_count()
        if IS_ROOT:
            dc = get_config_by_adb(package)['dc']
            vcode = r.get_vcode_by_dc(phone, dc, tid)
        else:
            vcode = r.get_latest_vcode(phone)
        if vcode:
            driver.find_element_by_id(VCODE_INPUT).send_keys(vcode)
            driver.find_element_by_id(PHONE_LOGIN_BUTTON).click()
            return True
        else:
            return False
    except Exception, e:
        print sys._getframe().f_code.co_name
        print e.__repr__()
        return False
Exemple #24
0
 def __init__(self):
     self.index_keys = None
     self.redis_handler = RedisHandler(
         host=app.config.get('REDIS_HOST'),
         port=app.config.get('REDIS_PORT_NO'),
         password=app.config.get('REDIS_PASSWORD'),
         idle_timeout=app.config.get('REDIS_IDLE_TIMEOUT'),
     )
     self.pricing_api = AwsPricingApi()
     # AWS details
     self.apikey = app.config.get('AWS_ACCESS_KEY_ID')
     self.apisecret = app.config.get('AWS_SECRET_ACCESS_KEY')
     self.owner_id = app.config.get('AWS_OWNER_ID')
     self.regions = app.config.get('REGIONS')
     # Timeout
     self.expire = app.config.get('EXPIRE_DURATION')
     self.sync_timeout = app.config.get('SYNC_TIMEOUT')
Exemple #25
0
def getUserInfoById(uid):
    uid = long(uid)
    uid_tmp = str(uid)
    info = RedisHandler.f_hget(USERINFO, uid_tmp)
    if info:
        raise gen.Return(json.loads(info))
    else:
        user = yield motorclient.fbt_realtime.users.find_one({"uid": uid}, {"_id": 0})
        if user:
            cacheUserInfo(user)
            gender = user["gender"]
            if gender:
                if gender == u"男":
                    gender = 1
                else:
                    gender = 0
            info = {"nick_name": user["nick_name"], "icon": user["icon"], 
                            "user": user["user"], "gender": gender, "school": user["school"]}
            raise gen.Return(info)
        else:
            raise gen.Return(None)
Exemple #26
0
    def __init__(self, analyzer = jieba.cut_for_search) :
        self.mongoDB = mongoclient.fbt
        self.motorDB = motorclient.fbt
        self.db = RedisHandler(RedisHandler.type_search) #redis.Redis(host='127.0.0.1', port=PORT, db =0, password = searchRedisPassWd)
        #self.adb = AsyncStrictRedis(host=REDIS_MASTER_HOST, port=REDIS_PORT[2], password=REDIS_PWD[2])
        # for mock
        self.db.init()
        connection_kwargs = self.db.redis_client(RedisHandler.type_search, 'master').connection_pool.connection_kwargs
        self.adb = AsyncStrictRedis(host=connection_kwargs.get('host', 'localhost'), port=connection_kwargs.get('port', 6379), 
                                                        password=connection_kwargs.get('password', None))
        self.redis_delegator = RedisDelegate(self.adb, self.motorDB)
        #users = Users(expire=EXPIRE_TIME)
        all_resources = StaticAllResources(expire=EXPIRE_TIME)
        resource_of_user = ResourcesOfUser(expire=EXPIRE_TIME)
        #self.redis_delegator.add_collection(users)
        self.redis_delegator.add_collection(all_resources)
        self.redis_delegator.add_collection(resource_of_user)
        # async
        self.pipeline = self.adb.pipeline()

        # just for cache, at most time, may be for the same user
        self.global_search_key_res_dict = dict()
        self.private_search_key_res_dict = dict()
        self.search_key_heapq = list()

        # cache for friend type list
        # friend_type_dict: (uid, type): fild_id list
        '''
        self.friend_type_dict = dict()
        self.friend_type_heapq = list()
        '''

        # sync
        self.pipe = self.db.pipeline()
        self.analyzer = analyzer
        self.key_fileidSetDict = dict()
Exemple #27
0
def fetch_token_in_cache(uid):
    return RedisHandler.f_get(str(uid), RedisHandler.type_token)
Exemple #28
0
class FileNameSearcher(Singleton):
    def __init__(self, analyzer=jieba.cut_for_search):
        self.mongoDB = mongoclient.fbt
        self.motorDB = motorclient.fbt
        self.db = RedisHandler(
            RedisHandler.type_search
        )  #redis.Redis(host='127.0.0.1', port=PORT, db =0, password = searchRedisPassWd)
        #self.adb = AsyncStrictRedis(host=REDIS_MASTER_HOST, port=REDIS_PORT[2], password=REDIS_PWD[2])
        # for mock
        self.db.init()
        connection_kwargs = self.db.redis_client(
            RedisHandler.type_search,
            'master').connection_pool.connection_kwargs
        self.adb = AsyncStrictRedis(
            host=connection_kwargs.get('host', 'localhost'),
            port=connection_kwargs.get('port', 6379),
            password=connection_kwargs.get('password', None))
        self.redis_delegator = RedisDelegate(self.adb, self.motorDB)
        #users = Users(expire=EXPIRE_TIME)
        all_resources = StaticAllResources(expire=EXPIRE_TIME)
        resource_of_user = ResourcesOfUser(expire=EXPIRE_TIME)
        #self.redis_delegator.add_collection(users)
        self.redis_delegator.add_collection(all_resources)
        self.redis_delegator.add_collection(resource_of_user)
        # async
        self.pipeline = self.adb.pipeline()

        # just for cache, at most time, may be for the same user
        self.global_search_key_res_dict = dict()
        self.private_search_key_res_dict = dict()
        self.search_key_heapq = list()

        # cache for friend type list
        # friend_type_dict: (uid, type): fild_id list
        '''
        self.friend_type_dict = dict()
        self.friend_type_heapq = list()
        '''

        # sync
        self.pipe = self.db.pipeline()
        self.analyzer = analyzer
        self.key_fileidSetDict = dict()

    '''
    def get_friend_type_file_ids(self, uid, tp):
        info = (uid, tp)
        res = self.friend_type_dict.get(info, None)
        if res is not None:
            for i, v in enumerate(self.friend_type_heapq):
                if info == v[1]:
                    self.friend_type_heapq[i][0] = time()
                    heapq.heapify(self.friend_type_heapq)
                    break

        while self.friend_type_heapq and self.friend_type_heapq[0][0] + EXPIRE_TIME < time():
            _time, _info = heapq.heappop(self.friend_type_heapq)
            self.friend_type_dict.pop(_info)

        return res

    def put_friend_type_file_ids(self, uid, tp, file_ids_set):
        info = (uid, tp)
        if info in self.friend_type_dict:
            for i, v in enumerate(self.friend_type_heapq):
                if info == v[1]:
                    self.friend_type_heapq[i][0] = time()
                    heapq.heapify(self.friend_type_heapq)
                    break
        else:
            heapq.heappush(self.friend_type_heapq, [time(), info])

        self.friend_type_dict[info] = file_ids_set
    '''

    def get_search_key(self, info):
        if isinstance(info[0], tuple):
            search_key_res_dict = self.private_search_key_res_dict
        else:
            search_key_res_dict = self.global_search_key_res_dict

        res = search_key_res_dict.get(info, None)
        if res is not None:
            for i, v in enumerate(self.search_key_heapq):
                if info == v[1]:
                    self.search_key_heapq[i][0] = time()
                    heapq.heapify(self.search_key_heapq)
                    break

        while self.search_key_heapq and self.search_key_heapq[0][
                0] + EXPIRE_TIME < time():
            _time, _info = heapq.heappop(self.search_key_heapq)
            search_key_res_dict.pop(_info)

        return res

    def put_search_key(self, info, key):
        if isinstance(info[0], tuple):
            search_key_res_dict = self.private_search_key_res_dict
        else:
            search_key_res_dict = self.global_search_key_res_dict

        if info in search_key_res_dict:
            for i, v in enumerate(self.search_key_heapq):
                if info == v[1]:
                    self.search_key_heapq[i][0] = time()
                    heapq.heapify(self.search_key_heapq)
                    break
        else:
            heapq.heappush(self.search_key_heapq, [time(), info])

        search_key_res_dict[info] = key

    def flush(self):
        ''' executes the pipeline, returns a list of results '''
        return self.pipe.execute()

    def drop(self):
        ''' drops the entire index '''
        return self.db.flushdb()

    @gen.coroutine
    def check_exists(self, key, expire=EXPIRE_TIME):
        is_exists = yield self.adb.expire(key, expire)
        raise gen.Return(is_exists)

    @gen.coroutine
    def check_multi_exists(self, key_list, expire=EXPIRE_TIME):
        for key in key_list:
            self.pipeline.expire(key, expire)
        res = yield self.pipeline.execute()
        raise gen.Return(res)

    def getKeys(self, file_name):
        return set(
            filter(lambda x: len(x) > 1 or (u'\u4e00' <= x <= u'\u9fff'),
                   self.analyzer(file_name.lower())))

    '''
    functions ending with '_sync' are just for pub_sub
    '''

    def file_id_add_title_sync(self, file_id, file_name):
        keys = self.getKeys(file_name)
        for k in keys:
            if not self.db.sismember('stop_keyword_list', k):
                key_name = make_key_for_keyword(k)
                self.db.sadd(key_name, file_id)
                self.mongoDB.key_fileids.update(
                    {"key": k}, {"$addToSet": {
                        "file_ids": file_id
                    }}, True)
                if self.db.scard(key_name) > STOP_KEYWORD_FREQUENCE:
                    self.db.sadd('stop_keyword_list', k)
                    self.db.delete(key_name)

    def remove_file_id_sync(self, file_id, file_name):
        keys = self.getKeys(file_name)
        for k in keys:
            key_name = make_key_for_keyword(k)
            self.pipe.srem(key_name, file_id)
            self.mongoDB.key_fileids.update({"key": k},
                                            {"$pull": {
                                                "file_ids": file_id
                                            }}, True)
        self.flush()

    @gen.coroutine
    def file_id_add_title(self, file_id, file_name):
        pipeline = self.pipeline
        adb = self.adb
        keys = self.getKeys(file_name)
        key_list = list()
        for k in keys:
            ismembers = yield adb.sismember('stop_keyword_list', k)
            if not ismembers:
                key_name = make_key_for_keyword(k)
                key_list.append(k)
                pipeline.sadd(key_name, file_id)
                yield self.motorDB.key_fileids.update(
                    {"key": k}, {"$addToSet": {
                        "file_ids": file_id
                    }}, True)
        yield pipeline.execute()
        for key in key_list:
            key_name = make_key_for_keyword(k)
            pipeline.scard(key_name)
        scard_list = yield pipeline.execute()
        for card, k in zip(scard_list, key_list):
            if card > STOP_KEYWORD_FREQUENCE:
                key_name = make_key_for_keyword(k)
                pipeline.sadd('stop_keyword_list', k)
                pipeline.delete(key_name)
        yield pipeline.execute()

    @gen.coroutine
    def remove_file_id(self, file_id, file_name):
        keys = self.getKeys(file_name)
        pipeline = self.pipeline
        to_yield = list()
        for k in keys:
            key_name = make_key_for_keyword(k)
            pipeline.srem(key_name, file_id)
            y = self.motorDB.key_fileids.update(
                {"key": k}, {"$pull": {
                    "file_ids": file_id
                }}, True)
            to_yield.append(y)
        to_yield.append(pipeline.execute())
        yield to_yield

    # def file_id_add_title_from_init(self, pymongoDB, file_id, file_name) :
    #     keys = self.getKeys(file_name)
    #     flag = self.db.info()['used_memory']  < MEM_LIMIT
    #     for k in keys :
    #         if not self.db.sismember('stop_keyword_list', k) :
    #             res = pymongoDB.key_fileids.find_and_modify(query={'key': k}, update= {"$addToSet": {"file_ids": file_id}}, upsert=True, full_response= True)
    #             if res and res['value'] and ('file_ids' in res['value']) and (len(res['value']['file_ids']) > STOP_KEYWORD_FREQUENCE) :
    #                 self.db.sadd('stop_keyword_list', k)
    #                 self.db.delete(k)
    #             if flag :
    #                 self.pipe.sadd(k, file_id)
    #             else :
    #                 self.db.delete(k)
    #     if flag :
    #         self.flush()

    def file_id_add_title_from_init(self, file_id, file_name):
        keys = self.getKeys(file_name)
        for k in keys:
            if k not in self.key_fileidSetDict:
                self.key_fileidSetDict[k] = set([file_id])
            else:
                self.key_fileidSetDict[k].add(file_id)

    def init_index_in_redis(self, pymongoDB):
        for k, v in self.key_fileidSetDict.iteritems():
            if len(v) <= STOP_KEYWORD_FREQUENCE:
                if self.db.info()['used_memory'] < MEM_LIMIT:
                    key_name = make_key_for_keyword(k)
                    self.db.sadd(key_name, *v)
                    #for f in v :
                    #    self.db.sadd(k, f)
                pymongoDB.key_fileids.insert({'key': k, 'file_ids': list(v)})
            else:
                self.db.sadd('stop_keyword_list', k)

    @gen.coroutine
    def query_file_ids_by_file_name(self,
                                    file_name,
                                    page,
                                    max_resources_cnt_in_page,
                                    sort=None,
                                    desc=True):
        keys = tuple(self.getKeys(file_name))
        rd = self.redis_delegator
        global_search_key = yield self._query_file_ids_by_file_name(keys)
        file_ids = yield self.adb.smembers(global_search_key)
        if sort:
            # sort all, TODO: check sufficiency
            yield rd.all_resources.find(file_ids)
            file_ids = yield self.sort(global_search_key, sort, None, None,
                                       desc)
        ret = list()
        size = len(file_ids)
        index = 0
        (start_index, end_index) = ((page - 1) * max_resources_cnt_in_page,
                                    page * max_resources_cnt_in_page)
        for file_id in file_ids:
            resource = yield rd.all_resources.find(file_id)
            if resource and (1 != resource.get(
                    "hidden", None)) and (1 == resource.get("public", None)):
                if end_index == index:
                    break
                elif index >= start_index:
                    resource['file_id'] = file_id
                    ret.append(resource)
                index += 1
        raise gen.Return((size, ret))

    @gen.coroutine
    def query_file_ids_by_file_name_private(self,
                                            my_uid,
                                            file_name,
                                            page,
                                            max_resources_cnt_in_page,
                                            sort=None,
                                            desc=True):
        keys = tuple(self.getKeys(file_name))
        (start_index, end_index) = ((page - 1) * max_resources_cnt_in_page,
                                    page * max_resources_cnt_in_page)
        rd = self.redis_delegator
        adb = self.adb

        private_search_key = self.get_search_key((keys, my_uid))
        is_exists = False
        if private_search_key:
            is_exists = yield self.check_exists(private_search_key)
        if not is_exists:
            global_search_key = yield self._query_file_ids_by_file_name(keys)
            friend_fileids_set_key = make_key_for_friend_fileids(my_uid)
            is_exists = yield self.check_exists(friend_fileids_set_key)
            #res = yield adb.smembers(global_search_key)
            #print res
            # Becouse empty is not so likely, it has a good chance of epiration
            if not is_exists:
                res = yield self.load_private_resources(my_uid)
            #res = yield adb.smembers(friend_fileids_set_key)
            #print res
            private_search_key = make_key_for_private_search()
            size = yield adb.sinterstore(private_search_key, global_search_key,
                                         friend_fileids_set_key)
            yield adb.expire(private_search_key, EXPIRE_TIME)
            res = yield self.adb.smembers(private_search_key)
        else:
            size = yield adb.scard(private_search_key)

        if sort:
            file_ids = yield self.sort(private_search_key, sort, start_index,
                                       max_resources_cnt_in_page, desc)
        else:
            file_ids = yield self.adb.smembers(private_search_key)
            file_ids = list(file_ids)
            file_ids = file_ids[start_index:end_index]

        ret = yield rd.all_resources.find(file_ids, keep_order=bool(sort))
        self.put_search_key((keys, my_uid), private_search_key)
        raise gen.Return((size, ret))

    @gen.coroutine
    def _query_file_ids_by_file_name(self, keys):
        ukey = make_key_for_global_search()
        if not keys:
            raise gen.Return(ukey)
        global_search_key = self.get_search_key(keys)
        if global_search_key:
            is_exists = yield self.check_exists(global_search_key)
            if is_exists:
                #res = yield self.adb.smembers(global_search_key)
                raise gen.Return(global_search_key)

        pipeline = self.pipeline
        if not keys:
            raise gen.Return([])

        key_list = list()
        for k in keys:
            ismembers = yield self.adb.sismember('stop_keyword_list', k)
            if not ismembers:
                key_name = make_key_for_keyword(k)
                key_list.append(key_name)
                is_exists = yield self.adb.exists(key_name)
                if not is_exists:
                    res = yield self.motorDB.key_fileids.find_one({'key': k}, {
                        'file_ids': 1,
                        '_id': 0
                    })
                    if res:
                        '''
                        try:
                            print 'HIT: ', k
                        except Exception, e:
                            print e
                        '''
                        pipeline.sadd(key_name, *res['file_ids'])

        pipeline.sinterstore(ukey, *key_list)
        #pipeline.smembers(ukey)
        pipeline.expire(ukey, EXPIRE_TIME)
        res = yield pipeline.execute()
        self.put_search_key(keys, ukey)
        raise gen.Return(ukey)

    @gen.coroutine
    def sort(self, key, sort, start=0, num=20, desc=True):
        alpha = sort not in ('mtime', 'download_num')
        by = "all_resources:*->" + sort
        res = yield self.adb.sort(key,
                                  by=by,
                                  alpha=alpha,
                                  desc=desc,
                                  start=start,
                                  num=num)
        raise gen.Return(res)

    @gen.coroutine
    def get_private_resources_by_type(self,
                                      my_uid,
                                      _type,
                                      page,
                                      max_resources_cnt_in_page,
                                      sort=None,
                                      desc=True):
        (start_index, end_index) = ((page - 1) * max_resources_cnt_in_page,
                                    page * max_resources_cnt_in_page)
        rd = self.redis_delegator
        size = yield self.load_private_resources(my_uid, _type)
        if 0 == size:
            raise gen.Return((0, []))
        friend_type_key = make_key_for_all_type_list(my_uid, _type)
        if sort:
            res = yield self.sort(friend_type_key, sort, start_index,
                                  max_resources_cnt_in_page, desc)
        else:
            res = yield self.adb.smembers(friend_type_key)
            res = list(res)
            res = res[start_index:end_index]

        ret = yield rd.all_resources.find(res, keep_order=bool(sort))
        raise gen.Return((size, ret))

    @gen.coroutine
    def get_private_resources(self,
                              my_uid,
                              page,
                              max_resources_cnt_in_page,
                              sort=None,
                              desc=True):
        (start_index, end_index) = ((page - 1) * max_resources_cnt_in_page,
                                    page * max_resources_cnt_in_page)
        rd = self.redis_delegator
        file_ids = yield self.load_private_resources(my_uid)
        size = len(file_ids)
        if sort:
            friend_fileids_set_key = make_key_for_friend_fileids(my_uid)
            file_ids = yield self.sort(friend_fileids_set_key, sort,
                                       start_index, max_resources_cnt_in_page,
                                       desc)
        else:
            file_ids = file_ids[start_index:end_index]

        ret = yield rd.all_resources.find(file_ids, keep_order=bool(sort))
        raise gen.Return((size, ret))

    @gen.coroutine
    def get_online_friends(self, my_uid):
        pipeline = self.pipeline
        online_friends_key = ':'.join(['my_friends', str(my_uid)])
        is_exists = yield self.check_exists(online_friends_key)
        if not is_exists:
            friend_uid_list = yield getFriendsById(my_uid)
            if friend_uid_list:
                pipeline.sadd(online_friends_key,
                              *[_['uid'] for _ in friend_uid_list])
                pipeline.expire(online_friends_key, EXPIRE_TIME)
                yield pipeline.execute()
        online_friends_uid_list = yield self.adb.sinter(
            online_friends_key, USER_IP_CACHE_SET_KEY)
        online_friends_uid_list = [long(_) for _ in online_friends_uid_list]
        raise gen.Return(online_friends_uid_list)

    @gen.coroutine
    def load_private_resources_helper(self, my_uid, tp=None):
        pipeline = self.pipeline
        rd = self.redis_delegator
        my_type_resource_set = set()
        my_resource_set = set()
        friend_uid_list = yield self.get_online_friends(
            my_uid)  # yield getFriendsById(my_uid)
        for uid in friend_uid_list:
            assert uid > 0
            friend_resource = yield rd.resources_of_user(uid).file_ids.get()
            if friend_resource:
                file_in_dir = {}
                fileids_of_one_friend = set()
                for f in friend_resource:
                    if '+' in f:
                        dir_id, file_id = f.split('+')
                        if dir_id in file_in_dir:
                            file_in_dir[dir_id].append(file_id)
                        else:
                            file_in_dir[dir_id] = [file_id]
                    else:
                        fileids_of_one_friend.add(f)
                if file_in_dir:
                    fileids_of_one_friend = fileids_of_one_friend.union(
                        file_in_dir.keys())
                if fileids_of_one_friend:
                    #res = yield [rd.all_resources.find(_) for _ in fileids_of_one_friend]
                    key_fileids_dict = defaultdict(list)
                    #for r, file_id in zip(res, fileids_of_one_friend):
                    resources = yield rd.all_resources.find(
                        fileids_of_one_friend)
                    for r in resources:
                        if r and 'main_type' in r:
                            _type = r['main_type']
                            file_id = r['file_id']
                            key_fileids_dict[_type].append(file_id)
                    for _type, fileids in key_fileids_dict.iteritems():
                        one_friend_type_key = make_key_for_friend_type_list(
                            uid, _type)
                        is_exists = yield self.check_exists(
                            one_friend_type_key)
                        # write when expire
                        if not is_exists:
                            pipeline.sadd(one_friend_type_key, *fileids)
                            pipeline.expire(one_friend_type_key, EXPIRE_TIME)
                        if _type == tp:
                            my_type_resource_set = my_type_resource_set.union(
                                fileids)
                    yield pipeline.execute()
                my_resource_set = my_resource_set.union(fileids_of_one_friend)

        if my_resource_set:
            friend_fileids_set_key = make_key_for_friend_fileids(my_uid)
            pipeline.sadd(friend_fileids_set_key, *my_resource_set)
            pipeline.expire(friend_fileids_set_key, EXPIRE_TIME)
            yield pipeline.execute()
        if my_type_resource_set:
            friend_type_key = make_key_for_all_type_list(my_uid, tp)
            pipeline.sadd(friend_type_key, *my_type_resource_set)
            pipeline.expire(friend_type_key, EXPIRE_TIME)
            yield pipeline.execute()
        if tp:
            raise gen.Return(my_type_resource_set)
        else:
            raise gen.Return(my_resource_set)

    @gen.coroutine
    def load_private_resources(self, my_uid, tp=None):
        adb = self.adb
        #friend_uid_list = yield rd.users(my_uid).friends.get()

        if tp is None:
            friend_fileids_set_key = make_key_for_friend_fileids(my_uid)
            is_exists = yield self.check_exists(friend_fileids_set_key)
            if is_exists:
                my_resource_set = yield adb.smembers(friend_fileids_set_key)
                raise gen.Return(list(my_resource_set))
            else:
                res = yield self.load_private_resources_helper(my_uid, tp)
                raise gen.Return(list(res))
        else:
            friend_type_key = make_key_for_all_type_list(my_uid, tp)
            is_exists = yield self.check_exists(friend_type_key)
            if is_exists:
                size = yield self.adb.scard(friend_type_key)
                raise gen.Return(size)
            else:
                friend_uid_list = yield self.get_online_friends(
                    my_uid)  # yield getFriendsById(my_uid)
                if 0 == len(friend_uid_list):
                    raise gen.Return(0)
                else:
                    exists_list = yield self.check_multi_exists([
                        make_key_for_friend_type_list(_, tp)
                        for _ in friend_uid_list
                    ])
                    if any(exists_list):
                        size = yield self.adb.sunionstore(
                            friend_type_key, *[
                                make_key_for_friend_type_list(_, tp)
                                for _ in friend_uid_list
                            ])
                        raise gen.Return(size)
                    else:
                        res = yield self.load_private_resources_helper(
                            my_uid, tp)
                        raise gen.Return(len(res))

    def init_from_mongo(self):
        db = mongoclient.fbt
        with codecs.open('fn', 'w', 'utf-8') as f:
            for record in db.all_resources.find(
                {
                    "hidden": {
                        "$ne": 1
                    },
                    "public": 1
                }, {
                    'file_id': 1,
                    'file_name': 1,
                    '_id': 0
                }):
                if 'file_id' in record and 'file_name' in record:
                    self.file_id_add_title_from_init(record['file_id'],
                                                     record['file_name'])
                    f.write("%s\t%s\n" %
                            (record['file_id'], record['file_name']))
        self.init_index_in_redis(db)
        db.key_fileids.create_index('key', background=True)

    def add_index_in_redis(self, pymongoDB):
        pipeline = self.pipe
        rdb = self.db
        key_list = list()
        for k, v in self.key_fileidSetDict.iteritems():
            ismembers = rdb.sismember('stop_keyword_list', k)
            if not ismembers:
                if self.db.info()['used_memory'] < MEM_LIMIT:
                    key_list.append(k)
                    key_name = make_key_for_keyword(k)
                    pipeline.sadd(key_name, *v)
            self.mongoDB.key_fileids.update(
                {"key": k}, {"$addToSet": {
                    "file_ids": {
                        "$each": list(v)
                    }
                }}, True)
        for key in key_list:
            key_name = make_key_for_keyword(k)
            pipeline.scard(key_name)
        scard_list = pipeline.execute()
        for card, k in zip(scard_list, key_list):
            if card > STOP_KEYWORD_FREQUENCE:
                key_name = make_key_for_keyword(k)
                pipeline.sadd('stop_keyword_list', k)
                pipeline.delete(key_name)
        pipeline.execute()

    def scan_from_mongo(self, last_mtime):
        db = mongoclient.fbt
        if 0 == last_mtime:
            records_list = db.all_resources.find(
                {
                    "hidden": {
                        "$ne": 1
                    },
                    "public": 1
                }, {
                    'file_id': 1,
                    'file_name': 1,
                    'mtime': 1,
                    '_id': 0
                })
        else:
            records_list = db.all_resources.find(
                {
                    "hidden": {
                        "$ne": 1
                    },
                    "public": 1,
                    'mtime': {
                        "$gt": last_mtime
                    }
                }, {
                    'file_id': 1,
                    'file_name': 1,
                    'mtime': 1,
                    '_id': 0
                })

        for record in records_list:
            if 'file_id' in record and 'file_name' in record:
                self.file_id_add_title_from_init(record['file_id'],
                                                 record['file_name'])
                if 'mtime' in record:
                    last_mtime = max(last_mtime, record['mtime'])
        self.add_index_in_redis(db)
        return last_mtime
Exemple #29
0
class FileNameSearcher(Singleton) :
    def __init__(self, analyzer = jieba.cut_for_search) :
        self.mongoDB = mongoclient.fbt
        self.motorDB = motorclient.fbt
        self.db = RedisHandler(RedisHandler.type_search) #redis.Redis(host='127.0.0.1', port=PORT, db =0, password = searchRedisPassWd)
        #self.adb = AsyncStrictRedis(host=REDIS_MASTER_HOST, port=REDIS_PORT[2], password=REDIS_PWD[2])
        # for mock
        self.db.init()
        connection_kwargs = self.db.redis_client(RedisHandler.type_search, 'master').connection_pool.connection_kwargs
        self.adb = AsyncStrictRedis(host=connection_kwargs.get('host', 'localhost'), port=connection_kwargs.get('port', 6379), 
                                                        password=connection_kwargs.get('password', None))
        self.redis_delegator = RedisDelegate(self.adb, self.motorDB)
        #users = Users(expire=EXPIRE_TIME)
        all_resources = StaticAllResources(expire=EXPIRE_TIME)
        resource_of_user = ResourcesOfUser(expire=EXPIRE_TIME)
        #self.redis_delegator.add_collection(users)
        self.redis_delegator.add_collection(all_resources)
        self.redis_delegator.add_collection(resource_of_user)
        # async
        self.pipeline = self.adb.pipeline()

        # just for cache, at most time, may be for the same user
        self.global_search_key_res_dict = dict()
        self.private_search_key_res_dict = dict()
        self.search_key_heapq = list()

        # cache for friend type list
        # friend_type_dict: (uid, type): fild_id list
        '''
        self.friend_type_dict = dict()
        self.friend_type_heapq = list()
        '''

        # sync
        self.pipe = self.db.pipeline()
        self.analyzer = analyzer
        self.key_fileidSetDict = dict()

    '''
    def get_friend_type_file_ids(self, uid, tp):
        info = (uid, tp)
        res = self.friend_type_dict.get(info, None)
        if res is not None:
            for i, v in enumerate(self.friend_type_heapq):
                if info == v[1]:
                    self.friend_type_heapq[i][0] = time()
                    heapq.heapify(self.friend_type_heapq)
                    break

        while self.friend_type_heapq and self.friend_type_heapq[0][0] + EXPIRE_TIME < time():
            _time, _info = heapq.heappop(self.friend_type_heapq)
            self.friend_type_dict.pop(_info)

        return res

    def put_friend_type_file_ids(self, uid, tp, file_ids_set):
        info = (uid, tp)
        if info in self.friend_type_dict:
            for i, v in enumerate(self.friend_type_heapq):
                if info == v[1]:
                    self.friend_type_heapq[i][0] = time()
                    heapq.heapify(self.friend_type_heapq)
                    break
        else:
            heapq.heappush(self.friend_type_heapq, [time(), info])

        self.friend_type_dict[info] = file_ids_set
    '''

    def get_search_key(self, info):
        if isinstance(info[0], tuple):
            search_key_res_dict = self.private_search_key_res_dict
        else:
            search_key_res_dict = self.global_search_key_res_dict

        res = search_key_res_dict.get(info, None)
        if res is not None:
            for i, v in enumerate(self.search_key_heapq):
                if info == v[1]:
                    self.search_key_heapq[i][0] = time()
                    heapq.heapify(self.search_key_heapq)
                    break

        while self.search_key_heapq and self.search_key_heapq[0][0] + EXPIRE_TIME < time():
            _time, _info = heapq.heappop(self.search_key_heapq)
            search_key_res_dict.pop(_info)

        return res

    def put_search_key(self, info, key):
        if isinstance(info[0], tuple):
            search_key_res_dict = self.private_search_key_res_dict
        else:
            search_key_res_dict = self.global_search_key_res_dict

        if info in search_key_res_dict:
            for i, v in enumerate(self.search_key_heapq):
                if info == v[1]:
                    self.search_key_heapq[i][0] = time()
                    heapq.heapify(self.search_key_heapq)
                    break
        else:
            heapq.heappush(self.search_key_heapq, [time(), info])

        search_key_res_dict[info] = key

    def flush(self):
        ''' executes the pipeline, returns a list of results '''
        return self.pipe.execute()
    
    def drop(self):
        ''' drops the entire index '''
        return self.db.flushdb()

    @gen.coroutine
    def check_exists(self, key, expire=EXPIRE_TIME):
        is_exists = yield self.adb.expire(key, expire)
        raise gen.Return(is_exists)

    @gen.coroutine
    def check_multi_exists(self, key_list, expire=EXPIRE_TIME):
        for key in key_list:
            self.pipeline.expire(key, expire)
        res = yield self.pipeline.execute()
        raise gen.Return(res)

    def getKeys(self, file_name) :
        return set(filter( lambda x : len(x) > 1 or (u'\u4e00' <= x <= u'\u9fff'), self.analyzer(file_name.lower()) ))

    '''
    functions ending with '_sync' are just for pub_sub
    '''
    def file_id_add_title_sync(self, file_id, file_name) :
        keys = self.getKeys(file_name)
        for k in keys :
            if not self.db.sismember('stop_keyword_list', k) :
                key_name = make_key_for_keyword(k)
                self.db.sadd(key_name, file_id)
                self.mongoDB.key_fileids.update({"key": k}, {"$addToSet": {"file_ids": file_id}}, True)
                if self.db.scard(key_name) > STOP_KEYWORD_FREQUENCE :
                    self.db.sadd('stop_keyword_list', k)
                    self.db.delete(key_name)

    def remove_file_id_sync(self, file_id, file_name)  :
        keys = self.getKeys(file_name)
        for k in keys:
            key_name = make_key_for_keyword(k)
            self.pipe.srem(key_name, file_id)
            self.mongoDB.key_fileids.update({"key": k}, {"$pull": {"file_ids": file_id}}, True)
        self.flush()

    @gen.coroutine
    def file_id_add_title(self, file_id, file_name):
        pipeline = self.pipeline
        adb = self.adb
        keys = self.getKeys(file_name)
        key_list = list()
        for k in keys :
            ismembers = yield adb.sismember('stop_keyword_list', k)
            if not ismembers:
                key_name = make_key_for_keyword(k)
                key_list.append(k)
                pipeline.sadd(key_name, file_id)
                yield self.motorDB.key_fileids.update({"key": k}, {"$addToSet": {"file_ids": file_id}}, True)
        yield pipeline.execute()
        for key in key_list:
            key_name = make_key_for_keyword(k)
            pipeline.scard(key_name)
        scard_list = yield pipeline.execute()
        for card, k in zip(scard_list, key_list):
            if card > STOP_KEYWORD_FREQUENCE:
                key_name = make_key_for_keyword(k)
                pipeline.sadd('stop_keyword_list', k)
                pipeline.delete(key_name)
        yield pipeline.execute()

    @gen.coroutine
    def remove_file_id(self, file_id, file_name)  :
        keys = self.getKeys(file_name)
        pipeline = self.pipeline
        to_yield = list()
        for k in keys:
            key_name = make_key_for_keyword(k)
            pipeline.srem(key_name, file_id)
            y = self.motorDB.key_fileids.update({"key": k}, {"$pull": {"file_ids": file_id}}, True)
            to_yield.append(y)
        to_yield.append(pipeline.execute())
        yield to_yield

    # def file_id_add_title_from_init(self, pymongoDB, file_id, file_name) :
    #     keys = self.getKeys(file_name)
    #     flag = self.db.info()['used_memory']  < MEM_LIMIT
    #     for k in keys :
    #         if not self.db.sismember('stop_keyword_list', k) :
    #             res = pymongoDB.key_fileids.find_and_modify(query={'key': k}, update= {"$addToSet": {"file_ids": file_id}}, upsert=True, full_response= True)
    #             if res and res['value'] and ('file_ids' in res['value']) and (len(res['value']['file_ids']) > STOP_KEYWORD_FREQUENCE) :
    #                 self.db.sadd('stop_keyword_list', k)
    #                 self.db.delete(k)
    #             if flag :
    #                 self.pipe.sadd(k, file_id)
    #             else :
    #                 self.db.delete(k)
    #     if flag :
    #         self.flush()

    def file_id_add_title_from_init(self, file_id, file_name) :
        keys = self.getKeys(file_name)
        for k in keys :
            if k not in self.key_fileidSetDict :
                self.key_fileidSetDict[k] = set([file_id])
            else :
                self.key_fileidSetDict[k].add(file_id)

    def init_index_in_redis(self, pymongoDB) :
        for k, v in self.key_fileidSetDict.iteritems() :
            if len(v) <= STOP_KEYWORD_FREQUENCE :
                if self.db.info()['used_memory']  < MEM_LIMIT :
                    key_name = make_key_for_keyword(k)
                    self.db.sadd(key_name, *v)
                    #for f in v :
                    #    self.db.sadd(k, f)
                pymongoDB.key_fileids.insert({'key': k, 'file_ids': list(v)})
            else :
                self.db.sadd('stop_keyword_list', k)

    @gen.coroutine
    def query_file_ids_by_file_name(self, file_name, page, max_resources_cnt_in_page, sort=None, desc=True):
        keys = tuple(self.getKeys(file_name))
        rd = self.redis_delegator
        global_search_key  = yield self._query_file_ids_by_file_name(keys)
        file_ids = yield self.adb.smembers(global_search_key)
        if sort:
            # sort all, TODO: check sufficiency
            yield rd.all_resources.find(file_ids)
            file_ids = yield self.sort(global_search_key, sort, None, None, desc)
        ret = list()
        size = len(file_ids)
        index = 0
        (start_index, end_index) = ((page - 1) * max_resources_cnt_in_page, page * max_resources_cnt_in_page)
        for file_id in file_ids:
            resource = yield rd.all_resources.find(file_id)
            if resource and (1 != resource.get("hidden", None)) and (1 == resource.get("public", None)):
                if end_index == index:
                    break
                elif index >= start_index:
                    resource['file_id'] = file_id
                    ret.append(resource)
                index += 1
        raise gen.Return((size, ret))

    @gen.coroutine
    def query_file_ids_by_file_name_private(self, my_uid, file_name, page, max_resources_cnt_in_page, sort=None, desc=True) :
        keys = tuple(self.getKeys(file_name))
        (start_index, end_index) = ((page - 1) * max_resources_cnt_in_page, page * max_resources_cnt_in_page)
        rd = self.redis_delegator
        adb = self.adb

        private_search_key = self.get_search_key((keys, my_uid))
        is_exists = False
        if private_search_key:
            is_exists = yield self.check_exists(private_search_key)
        if not is_exists:
            global_search_key  = yield self._query_file_ids_by_file_name(keys)
            friend_fileids_set_key = make_key_for_friend_fileids(my_uid)
            is_exists = yield self.check_exists(friend_fileids_set_key)
            #res = yield adb.smembers(global_search_key)
            #print res
            # Becouse empty is not so likely, it has a good chance of epiration
            if not is_exists:
                res = yield self.load_private_resources(my_uid)
            #res = yield adb.smembers(friend_fileids_set_key)
            #print res
            private_search_key = make_key_for_private_search()
            size = yield adb.sinterstore(private_search_key, global_search_key, friend_fileids_set_key)
            yield adb.expire(private_search_key, EXPIRE_TIME)
            res =  yield self.adb.smembers(private_search_key)
        else:
            size = yield adb.scard(private_search_key)

        if sort:
            file_ids = yield self.sort(private_search_key, sort, start_index, max_resources_cnt_in_page, desc)
        else:
            file_ids = yield self.adb.smembers(private_search_key)
            file_ids = list(file_ids)
            file_ids = file_ids[start_index: end_index]

        ret = yield rd.all_resources.find(file_ids, keep_order=bool(sort))
        self.put_search_key((keys, my_uid), private_search_key)
        raise gen.Return((size, ret))

    @gen.coroutine
    def _query_file_ids_by_file_name(self, keys) :
        ukey = make_key_for_global_search()
        if not keys:
            raise gen.Return(ukey)
        global_search_key = self.get_search_key(keys)
        if global_search_key:
            is_exists = yield self.check_exists(global_search_key)
            if is_exists:
                #res = yield self.adb.smembers(global_search_key)
                raise gen.Return(global_search_key)

        pipeline = self.pipeline
        if not keys :
            raise gen.Return([])

        key_list = list()
        for k in keys :
            ismembers = yield self.adb.sismember('stop_keyword_list', k)
            if not ismembers:
                key_name = make_key_for_keyword(k)
                key_list.append(key_name)
                is_exists = yield self.adb.exists(key_name)
                if not is_exists :
                    res = yield self.motorDB.key_fileids.find_one({'key': k}, {'file_ids': 1, '_id' : 0})
                    if res :
                        '''
                        try:
                            print 'HIT: ', k
                        except Exception, e:
                            print e
                        '''
                        pipeline.sadd(key_name, *res['file_ids'])

        pipeline.sinterstore(ukey, *key_list)
        #pipeline.smembers(ukey)
        pipeline.expire(ukey, EXPIRE_TIME)
        res = yield pipeline.execute()
        self.put_search_key(keys, ukey)
        raise gen.Return(ukey)

    @gen.coroutine
    def sort(self, key, sort, start=0, num=20, desc=True):
        alpha = sort not in ('mtime', 'download_num')
        by = "all_resources:*->" + sort
        res = yield self.adb.sort(key, by=by, alpha=alpha,desc=desc, start=start, num=num)
        raise gen.Return(res)

    @gen.coroutine
    def get_private_resources_by_type(self, my_uid, _type, page, max_resources_cnt_in_page, sort=None, desc=True):
        (start_index, end_index) = ((page - 1) * max_resources_cnt_in_page, page * max_resources_cnt_in_page)
        rd = self.redis_delegator
        size = yield self.load_private_resources(my_uid, _type)
        if 0 == size:
            raise gen.Return((0, []))
        friend_type_key = make_key_for_all_type_list(my_uid, _type)
        if sort:
            res = yield self.sort(friend_type_key, sort, start_index, max_resources_cnt_in_page, desc)
        else:
            res = yield self.adb.smembers(friend_type_key)
            res = list(res)
            res = res[start_index: end_index]

        ret = yield rd.all_resources.find(res, keep_order=bool(sort))
        raise gen.Return((size, ret))

    @gen.coroutine
    def get_private_resources(self, my_uid, page, max_resources_cnt_in_page, sort=None, desc=True):
        (start_index, end_index) = ((page - 1) * max_resources_cnt_in_page, page * max_resources_cnt_in_page)
        rd = self.redis_delegator
        file_ids = yield self.load_private_resources(my_uid)
        size = len(file_ids)
        if sort:
            friend_fileids_set_key = make_key_for_friend_fileids(my_uid)
            file_ids = yield self.sort(friend_fileids_set_key, sort, start_index, max_resources_cnt_in_page, desc)
        else:
            file_ids = file_ids[start_index: end_index]

        ret = yield rd.all_resources.find(file_ids, keep_order=bool(sort))
        raise gen.Return((size, ret))

    @gen.coroutine
    def get_online_friends(self, my_uid):
        pipeline = self.pipeline
        online_friends_key = ':'.join(['my_friends', str(my_uid)])
        is_exists = yield self.check_exists(online_friends_key)
        if not is_exists:
            friend_uid_list = yield getFriendsById(my_uid)
            if friend_uid_list:
                pipeline.sadd(online_friends_key, *[_['uid'] for _ in friend_uid_list])
                pipeline.expire(online_friends_key, EXPIRE_TIME)
                yield pipeline.execute()
        online_friends_uid_list = yield self.adb.sinter(online_friends_key, USER_IP_CACHE_SET_KEY)
        online_friends_uid_list = [long(_) for _ in online_friends_uid_list]
        raise gen.Return(online_friends_uid_list)

    @gen.coroutine
    def load_private_resources_helper(self, my_uid, tp=None):
        pipeline = self.pipeline
        rd = self.redis_delegator
        my_type_resource_set = set()
        my_resource_set = set()
        friend_uid_list = yield self.get_online_friends(my_uid)# yield getFriendsById(my_uid)
        for uid in friend_uid_list:
            assert uid > 0
            friend_resource = yield rd.resources_of_user(uid).file_ids.get()
            if friend_resource:
                file_in_dir = {}
                fileids_of_one_friend = set()
                for f in friend_resource:
                    if '+' in f:
                        dir_id, file_id = f.split('+')
                        if dir_id in file_in_dir:
                            file_in_dir[dir_id].append(file_id)
                        else:
                            file_in_dir[dir_id] = [file_id]
                    else:
                        fileids_of_one_friend.add(f)
                if file_in_dir:
                    fileids_of_one_friend = fileids_of_one_friend.union(file_in_dir.keys())
                if fileids_of_one_friend:
                    #res = yield [rd.all_resources.find(_) for _ in fileids_of_one_friend]
                    key_fileids_dict = defaultdict(list)
                    #for r, file_id in zip(res, fileids_of_one_friend):
                    resources = yield rd.all_resources.find(fileids_of_one_friend)
                    for r in resources:
                        if r and 'main_type' in r:
                            _type = r['main_type']
                            file_id = r['file_id']
                            key_fileids_dict[_type].append(file_id)
                    for _type, fileids in key_fileids_dict.iteritems():
                        one_friend_type_key = make_key_for_friend_type_list(uid, _type)
                        is_exists = yield self.check_exists(one_friend_type_key)
                        # write when expire
                        if not is_exists:
                            pipeline.sadd(one_friend_type_key, *fileids)
                            pipeline.expire(one_friend_type_key, EXPIRE_TIME)
                        if _type == tp:
                            my_type_resource_set = my_type_resource_set.union(fileids)
                    yield pipeline.execute()
                my_resource_set = my_resource_set.union(fileids_of_one_friend)

        if my_resource_set:
            friend_fileids_set_key = make_key_for_friend_fileids(my_uid)
            pipeline.sadd(friend_fileids_set_key, *my_resource_set)
            pipeline.expire(friend_fileids_set_key, EXPIRE_TIME)
            yield pipeline.execute()
        if my_type_resource_set:
            friend_type_key = make_key_for_all_type_list(my_uid, tp)
            pipeline.sadd(friend_type_key, *my_type_resource_set)
            pipeline.expire(friend_type_key, EXPIRE_TIME)
            yield pipeline.execute()
        if tp:
            raise gen.Return(my_type_resource_set)
        else:
            raise gen.Return(my_resource_set)

    @gen.coroutine
    def load_private_resources(self, my_uid, tp=None):
        adb = self.adb
        #friend_uid_list = yield rd.users(my_uid).friends.get()

        if tp is None:
            friend_fileids_set_key = make_key_for_friend_fileids(my_uid)
            is_exists = yield self.check_exists(friend_fileids_set_key)
            if is_exists:
                my_resource_set = yield adb.smembers(friend_fileids_set_key)
                raise gen.Return(list(my_resource_set))
            else:
                res = yield self.load_private_resources_helper(my_uid, tp)
                raise gen.Return(list(res))
        else:
            friend_type_key = make_key_for_all_type_list(my_uid, tp)
            is_exists = yield self.check_exists(friend_type_key)
            if is_exists:
                size = yield self.adb.scard(friend_type_key)
                raise gen.Return(size)
            else:
                friend_uid_list = yield self.get_online_friends(my_uid) # yield getFriendsById(my_uid)
                if 0 == len(friend_uid_list):
                    raise gen.Return(0)
                else:
                    exists_list = yield self.check_multi_exists([make_key_for_friend_type_list(_, tp) for _ in friend_uid_list])
                    if any(exists_list):
                        size = yield self.adb.sunionstore(friend_type_key, *[make_key_for_friend_type_list(_, tp) for _ in friend_uid_list])
                        raise gen.Return(size)
                    else:
                        res = yield self.load_private_resources_helper(my_uid, tp)
                        raise gen.Return(len(res))

    def init_from_mongo(self):
        db = mongoclient.fbt
        with codecs.open('fn', 'w','utf-8') as f :
            for record in db.all_resources.find({"hidden": {"$ne": 1}, "public": 1}, {'file_id': 1, 'file_name': 1, '_id': 0}) :
                if 'file_id' in record and 'file_name' in record :
                    self.file_id_add_title_from_init(record['file_id'], record['file_name'])
                    f.write("%s\t%s\n" % (record['file_id'], record['file_name']))
        self.init_index_in_redis(db)
        db.key_fileids.create_index('key', background=True)

    def add_index_in_redis(self, pymongoDB) :
        pipeline = self.pipe
        rdb = self.db
        key_list = list()
        for k, v in self.key_fileidSetDict.iteritems() :
            ismembers = rdb.sismember('stop_keyword_list', k)
            if not ismembers:
                if self.db.info()['used_memory']  < MEM_LIMIT:
                    key_list.append(k)
                    key_name = make_key_for_keyword(k)
                    pipeline.sadd(key_name, *v)
            self.mongoDB.key_fileids.update({"key": k}, {"$addToSet": {"file_ids": {"$each" : list(v)}}}, True)
        for key in key_list:
            key_name = make_key_for_keyword(k)
            pipeline.scard(key_name)
        scard_list =  pipeline.execute()
        for card, k in zip(scard_list, key_list):
            if card > STOP_KEYWORD_FREQUENCE:
                key_name = make_key_for_keyword(k)
                pipeline.sadd('stop_keyword_list', k)
                pipeline.delete(key_name)
        pipeline.execute()

    def scan_from_mongo(self, last_mtime):
        db = mongoclient.fbt
        if 0 == last_mtime:
            records_list = db.all_resources.find({"hidden": {"$ne": 1}, "public": 1}, {'file_id': 1, 'file_name': 1, 'mtime': 1, '_id': 0})
        else:
            records_list = db.all_resources.find({"hidden": {"$ne": 1}, "public": 1, 'mtime': {"$gt": last_mtime}}, {'file_id': 1, 'file_name': 1, 'mtime': 1, '_id': 0})

        for record in records_list:
            if 'file_id' in record and 'file_name' in record :
                self.file_id_add_title_from_init(record['file_id'], record['file_name'])
                if 'mtime' in record:
                    last_mtime = max(last_mtime, record['mtime'])
        self.add_index_in_redis(db)
        return last_mtime
 def __init__(self):
     # self._redis_cache = redis.StrictRedis(password="******", port=6382)
     self._redis_cache = RedisHandler()
Exemple #31
0
 def __getattr__(self, command):
     if self._redis is None:
         self._redis = RedisHandler(tp=self._type)
     return getattr(self._redis, command)
Exemple #32
0
class SyncAws(object):
    def __init__(self):
        self.index_keys = None
        self.redis_handler = RedisHandler(
            host=app.config.get('REDIS_HOST'),
            port=app.config.get('REDIS_PORT_NO'),
            password=app.config.get('REDIS_PASSWORD'),
            idle_timeout=app.config.get('REDIS_IDLE_TIMEOUT'),
        )
        self.pricing_api = AwsPricingApi()
        # AWS details
        self.apikey = app.config.get('AWS_ACCESS_KEY_ID')
        self.apisecret = app.config.get('AWS_SECRET_ACCESS_KEY')
        self.owner_id = app.config.get('AWS_OWNER_ID')
        self.regions = app.config.get('REGIONS')
        # Timeout
        self.expire = app.config.get('EXPIRE_DURATION')
        self.sync_timeout = app.config.get('SYNC_TIMEOUT')


    def background_sync(self):
        gevent.spawn_raw(self.sync)


    def get_last_sync_time(self):
        last_sync_time = self.redis_handler.get_last_sync_time()
        if last_sync_time:
            return int(last_sync_time)
        return 0


    def sync(self):
        sync_lock = self.redis_handler.get_sync_lock()
        if sync_lock:
            return
        self.redis_handler.set_sync_lock(timeout=self.sync_timeout)
        self.index_keys = []
        thread_list = self.sync_ec2()
        print 'Sync Started... . . .  .  .   .     .     .'
        gevent.joinall(thread_list, timeout=self.sync_timeout)
        gevent.killall(thread_list)
        print 'Details saved. Indexing records!'
        self.index_records()
        self.redis_handler.set_last_sync_time()
        print 'Starting cleanup of stale records...'
        self.redis_handler.cleanup_keys(self.index_keys)
        self.redis_handler.set_sync_lock(timeout=0)
        print 'Complete'


    def is_sync_in_progress(self):
        sync_lock = self.redis_handler.get_sync_lock()
        if sync_lock:
            return True
        return False


    def sync_ec2(self):
        if (self.regions is None) or (self.regions == 'all'):
            region_list = get_region_list()
        else:
            region_list = [r.strip() for r in self.regions.split(',')]
        thread_list = []
        for region in region_list:
            thread = gevent.spawn(self.sync_ec2_instances, region)
            thread_list.append(thread)
            thread = gevent.spawn(self.sync_ebs_volumes, region)
            thread_list.append(thread)
            thread = gevent.spawn(self.sync_ebs_snapshots, region)
            thread_list.append(thread)
            thread = gevent.spawn(self.sync_ec2_elbs, region)
            thread_list.append(thread)
            thread = gevent.spawn(self.sync_elastic_ips, region)
            thread_list.append(thread)
        return thread_list


    def sync_ec2_instances(self, region):
        ec2_handler = Ec2Handler(self.apikey, self.apisecret, region)
        try:
            instance_list = ec2_handler.fetch_all_instances()
        except Exception as e:
            print "Exception for EC2 in Region: %s, message: %s" \
                % (region, e.message)
            return
        for instance in instance_list:
            instance_details = ec2_handler.get_instance_details(instance)
            if instance_details.get('state') == 'running':
                per_hr_cost = self.pricing_api.get_instance_per_hr_cost(
                    region, instance_details.get('instance_type'),
                    instance_details.get('platform'))
                instance_details['per_hour_cost'] = per_hr_cost
            else:
                instance_details['per_hour_cost'] = 0.0
            instance_details['timestamp'] = int(time.time())
            if instance_details.get('ebs_ids'):
                for volume_id in instance_details.get('ebs_ids').split(','):
                    ebs_details = self.redis_handler.get_ebs_volume_details(
                        region, volume_id)
                    if not ebs_details:
                        continue
                    ebs_details['instance_id'] = instance_details.get('instance_id')
                    if not instance_details.get('tag_keys'):
                        continue
                    tag_keys = set(instance_details.get('tag_keys').split(','))
                    if ebs_details.get('tag_keys'):
                        tag_keys.update(set(ebs_details.get('tag_keys').split(',')))
                    ebs_details['tag_keys'] = ','.join(tag_keys)
                    for tag_name in instance_details.get('tag_keys').split(','):
                        tag_value = set(instance_details.get('tag:%s' % tag_name, '').split(','))
                        if ebs_details.get('tag:%s' % tag_name, '').strip():
                            old_value = set(ebs_details.get('tag:%s' % tag_name).split(','))
                            tag_value.update(old_value)
                        if not tag_value:
                            continue
                        ebs_details['tag:%s' % tag_name] = ','.join(tag_value)
                    self.redis_handler.save_ebs_vol_details(ebs_details)
            hash_key, _ = self.redis_handler.save_instance_details(
                instance_details)
            self.index_keys.append(hash_key)
            if self.expire > 0:
                self.redis_handler.expire(hash_key, self.expire)
        print "Instance sync complete for ec2 region: %s" % region


    def sync_ec2_elbs(self, region):
        ec2_handler = Ec2Handler(self.apikey, self.apisecret, region)
        try:
            elb_list = ec2_handler.fetch_all_elbs()
        except Exception as e:
            print "Exception for ELB in Region: %s, message: %s" % (region,
                                                                    e.message)
            return
        for elb in elb_list:
            details, instance_id_list = ec2_handler.get_elb_details(elb)
            per_hr_cost = self.pricing_api.get_elb_per_hr_cost(region)
            details['per_hour_cost'] = per_hr_cost
            details['timestamp'] = int(time.time())
            for instance_id in instance_id_list:
                instance_elb_names = self.redis_handler.get_instance_item_value(
                    region=details.get('region'),
                    instance_id=instance_id,
                    key='instance_elb_names'
                ) or ''
                instance_elb_names = set(instance_elb_names.split(','))
                if '' in instance_elb_names:
                    instance_elb_names.remove('')
                instance_elb_names.add(elb.name)
                instance_elb_names = ','.join(instance_elb_names)
                self.redis_handler.add_instance_detail(
                    region=details.get('region'),
                    instance_id=instance_id,
                    key='instance_elb_names',
                    value=instance_elb_names,
                )
                ## Add Instance tags in elb details
                i_details = self.redis_handler.get_instance_details(
                    details.get('region'), instance_id)
                if i_details.get('tag_keys'):
                    tag_keys = set(i_details.get('tag_keys').split(','))
                    if details.get('tag_keys'):
                        old_keys = set(details.get('tag_keys').split(','))
                        tag_keys.update(old_keys)
                    details['tag_keys'] = ','.join(tag_keys)
                    for tag_name in i_details['tag_keys'].split(','):
                        tag_value = i_details.get('tag:%s' % tag_name, '').strip()
                        if not tag_value:
                            continue
                        tag_value = set(tag_value.split(','))
                        if details.get('tag:%s' % tag_name):
                            old_value = set(details.get('tag:%s' % tag_name).split(','))
                            tag_value.update(old_value)
                        details['tag:%s' % tag_name] = ','.join(tag_value)
            hash_key, _ = self.redis_handler.save_elb_details(details)
            self.index_keys.append(hash_key)
            if self.expire > 0:
                self.redis_handler.expire(hash_key, self.expire)
        print "ELB sync complete for ec2 region: %s" % region


    def sync_elastic_ips(self, region):
        ec2_handler = Ec2Handler(self.apikey, self.apisecret, region)
        try:
            elastic_ip_list = ec2_handler.fetch_elastic_ips()
        except Exception as e:
            print "Exception for Elastic IPs in Region: %s, message: %s" \
                  % (region, e.message)
            return
        for elastic_ip in elastic_ip_list:
            details = ec2_handler.get_elastic_ip_details(elastic_ip)
            details['timestamp'] = int(time.time())
            if details.get('instance_id'):
                details['per_hour_cost'] = 0.0
                ## Add Instance tags in Elastic IP details
                i_details = self.redis_handler.get_instance_details(
                    region, details.get('instance_id'))
                if i_details.get('tag_keys'):
                    details['tag_keys'] = i_details.get('tag_keys')
                    for tag_name in i_details['tag_keys'].split(','):
                        tag_value = i_details.get('tag:%s' % tag_name, '').strip()
                        if not tag_value:
                            continue
                        details['tag:%s' % tag_name] = tag_value
            else:
                per_hr_cost = self.pricing_api.get_elastic_ip_per_hr_cost(region)
                details['per_hour_cost'] = per_hr_cost
            hash_key, _ = self.redis_handler.save_elastic_ip_details(details)
            self.index_keys.append(hash_key)
            if self.expire > 0:
                self.redis_handler.expire(hash_key, self.expire)
        print "Elastic ip sync complete for ec2 region: %s" % region


    def sync_ebs_volumes(self, region):
        ec2_handler = Ec2Handler(self.apikey, self.apisecret, region)
        try:
            ebs_volume_list = ec2_handler.fetch_ebs_volumes()
        except Exception as e:
            print "Exception for EBS Volumes in Region: %s, message: %s" \
                  % (region, e.message)
            return
        for ebs_volume in ebs_volume_list:
            details = ec2_handler.get_ebs_details(ebs_volume)
            per_gbm_cost = self.pricing_api.get_ebs_volume_cost(
                region, details.get('type'), 'per_gbm')
            if details.get('type') == 'standard':
                per_mior_cost = self.pricing_api.get_ebs_volume_cost(
                    region, details.get('type'), 'per_mior')
                io_cost = "variable"
                details['per_mior_cost'] = per_mior_cost
            elif details.get('type') == 'gp2':
                io_cost = 0.0
            elif details.get('type') == 'io1':
                per_iops_cost = self.pricing_api.get_ebs_volume_cost(
                    region, details.get('type'), 'per_iops')
                iops_count = details.get('iops', 0)
                if iops_count and per_iops_cost:
                    io_cost = iops_count * per_iops_cost
                else:
                    io_cost = 'Not Found'
                details['per_iops_cost'] = per_iops_cost
            details['per_gbm_storage_cost'] = per_gbm_cost
            monthly_cost = per_gbm_cost * details.get('size')
            if isinstance(io_cost, int) or isinstance(io_cost, float):
                monthly_cost += io_cost
            details['monthly_cost'] = monthly_cost
            details['timestamp'] = int(time.time())
            hash_key, _ = self.redis_handler.save_ebs_vol_details(details)
            self.index_keys.append(hash_key)
            if self.expire > 0:
                self.redis_handler.expire(hash_key, self.expire)
        print "EBS volume sync complete for ec2 region: %s" % region


    def sync_ebs_snapshots(self, region):
        if not self.owner_id:
            return
        ec2_handler = Ec2Handler(self.apikey, self.apisecret, region)
        try:
            ebs_snapshot_list = ec2_handler.fetch_ebs_snapshots(
                owner_id=self.owner_id)
        except Exception as e:
            print "Exception for EBS Snapshots in Region: %s, message: %s" \
                  % (region, e.message)
            return
        for snapshot in ebs_snapshot_list:
            details = ec2_handler.get_snapshot_details(snapshot)
            per_gbm_cost = self.pricing_api.get_ebs_volume_cost(
                region, 'ebs-snapshot', 'per_gbm_stored')
            details['per_gbm_storage_cost'] = per_gbm_cost
            max_monthly_cost = per_gbm_cost * details.get('volume_size')
            details['monthly_cost'] = 'Variable. Depends on actual data stored'
            details['timestamp'] = int(time.time())
            ## Map parent volume tags with snapshot
            ebs_details = self.redis_handler.get_ebs_volume_details(
                    region, details.get('parent_volume_id'))
            if ebs_details:
                if ebs_details.get('instance_id'):
                    details['instance_id'] = ebs_details.get('instance_id')
                if ebs_details.get('tag_keys'):
                    tag_keys = set(ebs_details.get('tag_keys').split(','))
                    if details.get('tag_keys'):
                        tag_keys.update(set(details.get('tag_keys').split(',')))
                    details['tag_keys'] = ','.join(tag_keys)
                    for tag_name in ebs_details.get('tag_keys').split(','):
                        tag_value = set(ebs_details.get('tag:%s' % tag_name, '').split(','))
                        if details.get('tag:%s' % tag_name, '').strip():
                            old_value = set(details.get('tag:%s' % tag_name).split(','))
                            tag_value.update(old_value)
                        if not tag_value:
                            continue
                        details['tag:%s' % tag_name] = ','.join(tag_value)
            ## Save data
            hash_key, _ = self.redis_handler.save_ebs_snapshot_details(details)
            self.index_keys.append(hash_key)
            if self.expire > 0:
                self.redis_handler.expire(hash_key, self.expire)
        print "EBS snapshot sync complete for ec2 region: %s" % region


    def index_records(self):
        indexed_tags = {}
        self.save_index(','.join(self.index_keys), ALL_RESOURCE_INDEX)
        for hash_key in self.index_keys:
            details = self.redis_handler.get_details(hash_key)
            tag_keys = details.get('tag_keys', '').split(',')
            if '' in tag_keys:
                tag_keys.remove('')
            for tag_name in tag_keys:
                tag_value = details.get('tag:%s' % tag_name, '').strip()
                if not tag_value:
                    continue
                tag_value = "%s:%s" % (tag_name, tag_value)
                self.save_index(hash_key, tag_value)
                if ',' in tag_value:
                    for sub_values in tag_value.split(','):
                        self.save_index(hash_key, sub_values)
                if indexed_tags.get(tag_name, None):
                    value_list = indexed_tags[tag_name].split(',')
                    value_list.append(tag_value)
                    indexed_tags[tag_name] = ','.join(set(value_list))
                else:
                    indexed_tags[tag_name] = tag_value
        if indexed_tags:
            self.redis_handler.save_indexed_tags(indexed_tags)


    def save_index(self, hash_key, value):
        index_value = self.redis_handler.get_index(value)
        if index_value:
            index_value = "%s,%s" % (index_value, hash_key)
        else:
            index_value = hash_key
        ## Remove redundant values
        indexed_keys = set(index_value.split(','))
        ## Clean stale index entries
        for k in indexed_keys.copy():
            if not self.redis_handler.exists(k):
                indexed_keys.remove(k)
        ## Save Index
        if len(indexed_keys) > 0:
            self.redis_handler.save_index(value, ','.join(indexed_keys))
            self.redis_handler.expire_index(value, self.expire)
Exemple #33
0
class SyncAws(object):
    def __init__(self):
        self.index_keys = None
        self.redis_handler = RedisHandler(
            host=app.config.get('REDIS_HOST'),
            port=app.config.get('REDIS_PORT_NO'),
            password=app.config.get('REDIS_PASSWORD'),
            idle_timeout=app.config.get('REDIS_IDLE_TIMEOUT'),
        )
        self.pricing_api = AwsPricingApi()
        # AWS details
        self.apikey = app.config.get('AWS_ACCESS_KEY_ID')
        self.apisecret = app.config.get('AWS_SECRET_ACCESS_KEY')
        self.owner_id = app.config.get('AWS_OWNER_ID')
        self.regions = app.config.get('REGIONS')
        # Timeout
        self.expire = app.config.get('EXPIRE_DURATION')
        self.sync_timeout = app.config.get('SYNC_TIMEOUT')

    def background_sync(self):
        gevent.spawn_raw(self.sync)

    def get_last_sync_time(self):
        last_sync_time = self.redis_handler.get_last_sync_time()
        if last_sync_time:
            return int(last_sync_time)
        return 0

    def sync(self):
        sync_lock = self.redis_handler.get_sync_lock()
        if sync_lock:
            return
        self.redis_handler.set_sync_lock(timeout=self.sync_timeout)
        self.index_keys = []
        thread_list = self.sync_ec2()
        print 'Sync Started... . . .  .  .   .     .     .'
        gevent.joinall(thread_list, timeout=self.sync_timeout)
        gevent.killall(thread_list)
        print 'Details saved. Indexing records!'
        self.index_records()
        self.redis_handler.set_last_sync_time()
        print 'Starting cleanup of stale records...'
        self.redis_handler.cleanup_keys(self.index_keys)
        self.redis_handler.set_sync_lock(timeout=0)
        print 'Complete'

    def is_sync_in_progress(self):
        sync_lock = self.redis_handler.get_sync_lock()
        if sync_lock:
            return True
        return False

    def sync_ec2(self):
        if (self.regions is None) or (self.regions == 'all'):
            region_list = get_region_list()
        else:
            region_list = [r.strip() for r in self.regions.split(',')]
        thread_list = []
        for region in region_list:
            thread = gevent.spawn(self.sync_ec2_instances, region)
            thread_list.append(thread)
            thread = gevent.spawn(self.sync_ebs_volumes, region)
            thread_list.append(thread)
            thread = gevent.spawn(self.sync_ebs_snapshots, region)
            thread_list.append(thread)
            thread = gevent.spawn(self.sync_ec2_elbs, region)
            thread_list.append(thread)
            thread = gevent.spawn(self.sync_elastic_ips, region)
            thread_list.append(thread)
        return thread_list

    def sync_ec2_instances(self, region):
        ec2_handler = Ec2Handler(self.apikey, self.apisecret, region)
        try:
            instance_list = ec2_handler.fetch_all_instances()
        except Exception as e:
            print "Exception for EC2 in Region: %s, message: %s" \
                % (region, e.message)
            return
        for instance in instance_list:
            instance_details = ec2_handler.get_instance_details(instance)
            if instance_details.get('state') == 'running':
                per_hr_cost = self.pricing_api.get_instance_per_hr_cost(
                    region, instance_details.get('instance_type'),
                    instance_details.get('platform'))
                instance_details['per_hour_cost'] = per_hr_cost
            else:
                instance_details['per_hour_cost'] = 0.0
            instance_details['timestamp'] = int(time.time())
            if instance_details.get('ebs_ids'):
                for volume_id in instance_details.get('ebs_ids').split(','):
                    ebs_details = self.redis_handler.get_ebs_volume_details(
                        region, volume_id)
                    if not ebs_details:
                        continue
                    ebs_details['instance_id'] = instance_details.get(
                        'instance_id')
                    if not instance_details.get('tag_keys'):
                        continue
                    tag_keys = set(instance_details.get('tag_keys').split(','))
                    if ebs_details.get('tag_keys'):
                        tag_keys.update(
                            set(ebs_details.get('tag_keys').split(',')))
                    ebs_details['tag_keys'] = ','.join(tag_keys)
                    for tag_name in instance_details.get('tag_keys').split(
                            ','):
                        tag_value = set(
                            instance_details.get('tag:%s' % tag_name,
                                                 '').split(','))
                        if ebs_details.get('tag:%s' % tag_name, '').strip():
                            old_value = set(
                                ebs_details.get('tag:%s' %
                                                tag_name).split(','))
                            tag_value.update(old_value)
                        if not tag_value:
                            continue
                        ebs_details['tag:%s' % tag_name] = ','.join(tag_value)
                    self.redis_handler.save_ebs_vol_details(ebs_details)
            hash_key, _ = self.redis_handler.save_instance_details(
                instance_details)
            self.index_keys.append(hash_key)
            if self.expire > 0:
                self.redis_handler.expire(hash_key, self.expire)
        print "Instance sync complete for ec2 region: %s" % region

    def sync_ec2_elbs(self, region):
        ec2_handler = Ec2Handler(self.apikey, self.apisecret, region)
        try:
            elb_list = ec2_handler.fetch_all_elbs()
        except Exception as e:
            print "Exception for ELB in Region: %s, message: %s" % (region,
                                                                    e.message)
            return
        for elb in elb_list:
            details, instance_id_list = ec2_handler.get_elb_details(elb)
            per_hr_cost = self.pricing_api.get_elb_per_hr_cost(region)
            details['per_hour_cost'] = per_hr_cost
            details['timestamp'] = int(time.time())
            for instance_id in instance_id_list:
                instance_elb_names = self.redis_handler.get_instance_item_value(
                    region=details.get('region'),
                    instance_id=instance_id,
                    key='instance_elb_names') or ''
                instance_elb_names = set(instance_elb_names.split(','))
                if '' in instance_elb_names:
                    instance_elb_names.remove('')
                instance_elb_names.add(elb.name)
                instance_elb_names = ','.join(instance_elb_names)
                self.redis_handler.add_instance_detail(
                    region=details.get('region'),
                    instance_id=instance_id,
                    key='instance_elb_names',
                    value=instance_elb_names,
                )
                ## Add Instance tags in elb details
                i_details = self.redis_handler.get_instance_details(
                    details.get('region'), instance_id)
                if i_details.get('tag_keys'):
                    tag_keys = set(i_details.get('tag_keys').split(','))
                    if details.get('tag_keys'):
                        old_keys = set(details.get('tag_keys').split(','))
                        tag_keys.update(old_keys)
                    details['tag_keys'] = ','.join(tag_keys)
                    for tag_name in i_details['tag_keys'].split(','):
                        tag_value = i_details.get('tag:%s' % tag_name,
                                                  '').strip()
                        if not tag_value:
                            continue
                        tag_value = set(tag_value.split(','))
                        if details.get('tag:%s' % tag_name):
                            old_value = set(
                                details.get('tag:%s' % tag_name).split(','))
                            tag_value.update(old_value)
                        details['tag:%s' % tag_name] = ','.join(tag_value)
            hash_key, _ = self.redis_handler.save_elb_details(details)
            self.index_keys.append(hash_key)
            if self.expire > 0:
                self.redis_handler.expire(hash_key, self.expire)
        print "ELB sync complete for ec2 region: %s" % region

    def sync_elastic_ips(self, region):
        ec2_handler = Ec2Handler(self.apikey, self.apisecret, region)
        try:
            elastic_ip_list = ec2_handler.fetch_elastic_ips()
        except Exception as e:
            print "Exception for Elastic IPs in Region: %s, message: %s" \
                  % (region, e.message)
            return
        for elastic_ip in elastic_ip_list:
            details = ec2_handler.get_elastic_ip_details(elastic_ip)
            details['timestamp'] = int(time.time())
            if details.get('instance_id'):
                details['per_hour_cost'] = 0.0
                ## Add Instance tags in Elastic IP details
                i_details = self.redis_handler.get_instance_details(
                    region, details.get('instance_id'))
                if i_details.get('tag_keys'):
                    details['tag_keys'] = i_details.get('tag_keys')
                    for tag_name in i_details['tag_keys'].split(','):
                        tag_value = i_details.get('tag:%s' % tag_name,
                                                  '').strip()
                        if not tag_value:
                            continue
                        details['tag:%s' % tag_name] = tag_value
            else:
                per_hr_cost = self.pricing_api.get_elastic_ip_per_hr_cost(
                    region)
                details['per_hour_cost'] = per_hr_cost
            hash_key, _ = self.redis_handler.save_elastic_ip_details(details)
            self.index_keys.append(hash_key)
            if self.expire > 0:
                self.redis_handler.expire(hash_key, self.expire)
        print "Elastic ip sync complete for ec2 region: %s" % region

    def sync_ebs_volumes(self, region):
        ec2_handler = Ec2Handler(self.apikey, self.apisecret, region)
        try:
            ebs_volume_list = ec2_handler.fetch_ebs_volumes()
        except Exception as e:
            print "Exception for EBS Volumes in Region: %s, message: %s" \
                  % (region, e.message)
            return
        for ebs_volume in ebs_volume_list:
            details = ec2_handler.get_ebs_details(ebs_volume)
            per_gbm_cost = self.pricing_api.get_ebs_volume_cost(
                region, details.get('type'), 'per_gbm')
            if details.get('type') == 'standard':
                per_mior_cost = self.pricing_api.get_ebs_volume_cost(
                    region, details.get('type'), 'per_mior')
                io_cost = "variable"
                details['per_mior_cost'] = per_mior_cost
            elif details.get('type') == 'gp2':
                io_cost = 0.0
            elif details.get('type') == 'io1':
                per_iops_cost = self.pricing_api.get_ebs_volume_cost(
                    region, details.get('type'), 'per_iops')
                iops_count = details.get('iops', 0)
                if iops_count and per_iops_cost:
                    io_cost = iops_count * per_iops_cost
                else:
                    io_cost = 'Not Found'
                details['per_iops_cost'] = per_iops_cost
            details['per_gbm_storage_cost'] = per_gbm_cost
            monthly_cost = per_gbm_cost * details.get('size')
            if isinstance(io_cost, int) or isinstance(io_cost, float):
                monthly_cost += io_cost
            details['monthly_cost'] = monthly_cost
            details['timestamp'] = int(time.time())
            hash_key, _ = self.redis_handler.save_ebs_vol_details(details)
            self.index_keys.append(hash_key)
            if self.expire > 0:
                self.redis_handler.expire(hash_key, self.expire)
        print "EBS volume sync complete for ec2 region: %s" % region

    def sync_ebs_snapshots(self, region):
        if not self.owner_id:
            return
        ec2_handler = Ec2Handler(self.apikey, self.apisecret, region)
        try:
            ebs_snapshot_list = ec2_handler.fetch_ebs_snapshots(
                owner_id=self.owner_id)
        except Exception as e:
            print "Exception for EBS Snapshots in Region: %s, message: %s" \
                  % (region, e.message)
            return
        for snapshot in ebs_snapshot_list:
            details = ec2_handler.get_snapshot_details(snapshot)
            per_gbm_cost = self.pricing_api.get_ebs_volume_cost(
                region, 'ebs-snapshot', 'per_gbm_stored')
            details['per_gbm_storage_cost'] = per_gbm_cost
            max_monthly_cost = per_gbm_cost * details.get('volume_size')
            details['monthly_cost'] = 'Variable. Depends on actual data stored'
            details['timestamp'] = int(time.time())
            ## Map parent volume tags with snapshot
            ebs_details = self.redis_handler.get_ebs_volume_details(
                region, details.get('parent_volume_id'))
            if ebs_details:
                if ebs_details.get('instance_id'):
                    details['instance_id'] = ebs_details.get('instance_id')
                if ebs_details.get('tag_keys'):
                    tag_keys = set(ebs_details.get('tag_keys').split(','))
                    if details.get('tag_keys'):
                        tag_keys.update(set(
                            details.get('tag_keys').split(',')))
                    details['tag_keys'] = ','.join(tag_keys)
                    for tag_name in ebs_details.get('tag_keys').split(','):
                        tag_value = set(
                            ebs_details.get('tag:%s' % tag_name,
                                            '').split(','))
                        if details.get('tag:%s' % tag_name, '').strip():
                            old_value = set(
                                details.get('tag:%s' % tag_name).split(','))
                            tag_value.update(old_value)
                        if not tag_value:
                            continue
                        details['tag:%s' % tag_name] = ','.join(tag_value)
            ## Save data
            hash_key, _ = self.redis_handler.save_ebs_snapshot_details(details)
            self.index_keys.append(hash_key)
            if self.expire > 0:
                self.redis_handler.expire(hash_key, self.expire)
        print "EBS snapshot sync complete for ec2 region: %s" % region

    def index_records(self):
        indexed_tags = {}
        self.save_index(','.join(self.index_keys), ALL_RESOURCE_INDEX)
        for hash_key in self.index_keys:
            details = self.redis_handler.get_details(hash_key)
            tag_keys = details.get('tag_keys', '').split(',')
            if '' in tag_keys:
                tag_keys.remove('')
            for tag_name in tag_keys:
                tag_value = details.get('tag:%s' % tag_name, '').strip()
                if not tag_value:
                    continue
                tag_value = "%s:%s" % (tag_name, tag_value)
                self.save_index(hash_key, tag_value)
                if ',' in tag_value:
                    for sub_values in tag_value.split(','):
                        self.save_index(hash_key, sub_values)
                if indexed_tags.get(tag_name, None):
                    value_list = indexed_tags[tag_name].split(',')
                    value_list.append(tag_value)
                    indexed_tags[tag_name] = ','.join(set(value_list))
                else:
                    indexed_tags[tag_name] = tag_value
        if indexed_tags:
            self.redis_handler.save_indexed_tags(indexed_tags)

    def save_index(self, hash_key, value):
        index_value = self.redis_handler.get_index(value)
        if index_value:
            index_value = "%s,%s" % (index_value, hash_key)
        else:
            index_value = hash_key
        ## Remove redundant values
        indexed_keys = set(index_value.split(','))
        ## Clean stale index entries
        for k in indexed_keys.copy():
            if not self.redis_handler.exists(k):
                indexed_keys.remove(k)
        ## Save Index
        if len(indexed_keys) > 0:
            self.redis_handler.save_index(value, ','.join(indexed_keys))
            self.redis_handler.expire_index(value, self.expire)
Exemple #34
0
def updateFriendsById(uid):
    uid = long(uid)
    uid_tmp = str(uid)
    user = yield motorclient.fbt_realtime.users.find_one({"uid": uid}, {"_id": 0, "friends": 1})
    if user:
        RedisHandler.f_hset(FRIENDINFO, uid_tmp, json.dumps(user["friends"]))
Exemple #35
0
        LogForUser.set_db(log_db)
        #FBCoinManager.set_update_fb_callback(SocketHandler.update_fb)
        #FBRankManager.initialize() #load rank info from file
        #FBRankTimer.set_io_loop(ioloop)
        #FBRankTimer.run() #backup the weekly and monthly rank
        
        SocketHandler.set_io_loop(ioloop)
        SocketHandler.init()
        ioloop.add_timeout(long(time()) + 3600, lambda: SocketHandler.check_on_line())
        ioloop.start()
    except Exception, e:
        print e
        print "OK. I will exit..."
    finally:
        SocketHandler.sub_user_coin.close()
        SocketHandler.sub_user_login.close()
        SocketHandler.sub_user_inform.close()
        # close sub client
        
        RedisHandler.f_save()
        util.MemCache.save(str(options.port))
        #FBRankManager.save_info_to_file() #backup rank info to file
class NullHandler(logging.Handler):
    def __init__(self,level=logging.ERROR):
        logging.Handler.__init__(self,level)
    def emit(self,record):
        pass

if __name__ == "__main__":
    main()
Exemple #36
0
def fetch_token_in_cache(uid):
    return RedisHandler.f_get(str(uid), RedisHandler.type_token)
Exemple #37
0
        SocketHandler.set_io_loop(ioloop)
        SocketHandler.init()
        ioloop.add_timeout(
            long(time()) + 3600, lambda: SocketHandler.check_on_line())
        ioloop.start()
    except Exception, e:
        print e
        print "OK. I will exit..."
    finally:
        SocketHandler.sub_user_coin.close()
        SocketHandler.sub_user_login.close()
        SocketHandler.sub_user_inform.close()
        # close sub client

        RedisHandler.f_save()
        util.MemCache.save(str(options.port))


        #FBRankManager.save_info_to_file() #backup rank info to file
class NullHandler(logging.Handler):
    def __init__(self, level=logging.ERROR):
        logging.Handler.__init__(self, level)

    def emit(self, record):
        pass


if __name__ == "__main__":
    main()
Exemple #38
0
import json

from telegram import (ReplyKeyboardMarkup, ReplyKeyboardRemove)
from telegram.ext import (Updater, CommandHandler, MessageHandler, Filters,
                          ConversationHandler)

from redis_handler import RedisHandler

logger = logging.getLogger(__name__)

NEW_QUESTION, ANSWER = range(2)

custom_keyboard = [['Новый вопрос', 'Сдаться'], ['Мой счет']]
markup = ReplyKeyboardMarkup(custom_keyboard)

redis_base = RedisHandler()


def start(update, context):
    score_id = f'score_tg_{update.effective_chat.id}'
    redis_base.initiate_score(score_id)

    update.message.reply_text(
        'Привет! Я бот для викторин\n'
        'Для начала нажми на "Новый вопрос"\n'
        'Для завершения игры /cancel',
        reply_markup=markup)
    
    return NEW_QUESTION

Exemple #39
0
class TagReport(object):
    def __init__(self):
        self.redis_handler = RedisHandler(
            host=app.config.get('REDIS_HOST'),
            port=app.config.get('REDIS_PORT_NO'),
            password=app.config.get('REDIS_PASSWORD'),
            idle_timeout=app.config.get('REDIS_IDLE_TIMEOUT'),
        )
        self.get_tag_resources = self._cache_function(self.get_tag_resources)


    def _cache_function(self, f):
        @wraps(f)
        def decorated_function(*args, **kwargs):
            kwargs_keys = kwargs.keys()
            kwargs_keys.sort()
            path = "%s/%s/%s" % (f.func_name, '/'.join(args),
                '/'.join(['%s=%s' % (k, kwargs[k]) for k in kwargs_keys]))
            cached_response = self.redis_handler.get_cached_object(path)
            if cached_response:
                return json.loads(cached_response)
            response = f(*args, **kwargs)
            expire_duration = app.config.get('EXPIRE_DURATION')
            gevent.spawn_raw(self.redis_handler.set_object_cache, path,
                             json.dumps(response), expire_duration)
            return response
        return decorated_function


    def get_tags_info(self):
        tags_info = self.redis_handler.get_indexed_tags()
        for k, v in tags_info.items():
            if not v:
                tags_info[k] = []
                continue
            value_list = []
            for key_value in v.split(','):
                if key_value.startswith('%s:' % k):
                    value_list.append(key_value[(len(k) + 1):])
                else:
                    value_list.append(key_value)
            tags_info[k] = value_list
        return tags_info


    def get_tag_resources(self, tag_key, tag_value=None):
        tags_info = self.get_tags_info()
        if (not tag_value) or (tag_value.lower() == 'all'):
            process_tag_values = tags_info.get(tag_key, [])
        else:
            process_tag_values = tag_value.split(',')
        hash_set = set()
        for value in process_tag_values:
            if value.strip():
                value = "%s:%s" % (tag_key, value)
                hash_keys = self.redis_handler.get_index(value) or ''
                if hash_keys:
                    hash_set.update(hash_keys.split(','))
        # All Hash keys
        all_hash_keys = self.redis_handler.get_index(ALL_RESOURCE_INDEX)
        all_hash_set = set(all_hash_keys.split(','))
        # Tagged resources
        tag_resources = {
            tag_key: {
                'instance': [],
                'elb': [],
                'elastic_ip': [],
                'ebs_volumes': [],
                'ebs_snapshots': [],
            }
        }
        if (not tag_value) or (tag_value.lower() == 'all'):
            tag_resources['--NOT-TRACKED--'] = {
                'instance': [],
                'elb': [],
                'elastic_ip': [],
                'ebs_volumes': [],
                'ebs_snapshots': [],
            }
        # Get details
        for key in all_hash_set:
            if not key.strip():
                continue
            details = self.redis_handler.get_details(key)
            if not details:
                continue
            if key in hash_set:
                category = tag_key
            else:
                category = '--NOT-TRACKED--'
            if category not in tag_resources:
                continue
            details = self._cal_monthly_cost(details)
            if key.startswith(self.redis_handler.instance_hash_prefix):
                tag_resources[category]['instance'].append(details)
            elif key.startswith(self.redis_handler.elb_hash_prefix):
                tag_resources[category]['elb'].append(details)
            elif key.startswith(self.redis_handler.elastic_ip_hash_prefix):
                tag_resources[category]['elastic_ip'].append(details)
            elif key.startswith(self.redis_handler.ebs_vol_hash_prefix):
                tag_resources[category]['ebs_volumes'].append(details)
            elif key.startswith(self.redis_handler.ebs_snapshot_hash_prefix):
                tag_resources[category]['ebs_snapshots'].append(details)
            else:
                raise Exception("Unable to categorize info: %s" % str(details))
        ## Calculate total cost
        total_cost = 0.0
        for category, resources in tag_resources.items():
            for resource_type, resource_list in resources.items():
                cost = 0.0
                for details in resource_list:
                    try:
                        cost += float(details.get('monthly_cost'))
                    except (ValueError, TypeError):
                        cost = 'Undefined'
                if isinstance(cost, float):
                    tcost_key = "%s:total_cost" % resource_type
                    tag_resources[category][tcost_key] = round(cost, 3)
                    total_cost += cost
        tag_resources['sethji:estimated_monthly_bill'] = round(total_cost, 3)
        return tag_resources


    def get_instance_details(self, region, instance_id):
        details = self.redis_handler.get_instance_details(region, instance_id)
        details = self._cal_monthly_cost(details)
        return details


    def get_ebs_volume_details(self, region, volume_id):
        return self.redis_handler.get_ebs_volume_details(region, volume_id)


    def get_ebs_snapshot_details(self, region, snapshot_id):
        return self.redis_handler.get_ebs_snapshot_details(region, snapshot_id)


    def get_elb_details(self, region, elb_name):
        details = self.redis_handler.get_elb_details(region, elb_name)
        details = self._cal_monthly_cost(details)
        return details


    def get_elastic_ip_details(self, elastic_ip):
        details = self.redis_handler.get_elastic_ip_details(elastic_ip)
        details = self._cal_monthly_cost(details)
        return details


    def _cal_monthly_cost(self, details):
        try:
            if ('per_hour_cost' in details) and ('monthly_cost' not in details):
                ph_cost = float(details.get('per_hour_cost'))
                day_count = get_current_month_day_count()
                details['monthly_cost'] = ph_cost * 24 * day_count
            if 'monthly_cost' in details:
                details['monthly_cost'] = round(float(details['monthly_cost']), 3)
        except ValueError:
            return details
        return details