class GetUsers(): def __init__(self): self.get_raw_data = GetRawData() self.redis_client = RedisClient() def get_users(self, category_id, page): try: raw_data = self.get_raw_data.get_users(category_id, page) except Exception as e: raw_data = None logger.error('get_users错误-' + e.args + '-category_id-' + category_id + '-page-' + page) if raw_data: sec_user_id_list = self.parse_users(raw_data) self.save_to_redis(sec_user_id_list) def parse_users(self, raw_data): sec_user_id_list = [] data = raw_data.get('aweme_list') for each in data: sec_user_id = each.get('author').get('sec_uid') sec_user_id_list.append(sec_user_id) return sec_user_id_list def save_to_redis(self, sec_user_id_list): for each in sec_user_id_list: self.redis_client.add_users(each) def run(self): cate_list = range(-1, 15) for cate in cate_list: cate_page_list = [[cate, page] for page in range(0, 100)] logger.info('get_users当前爬取cate-' + str(cate)) tasks = [gevent.spawn(self.get_users, str(cate), str(page)) for cate, page in cate_page_list] gevent.joinall(tasks)
def __init__(self, website): fd = open("conf/%s_website.json" % website, "r") tmp = fd.read() data = json.loads(tmp) self.website = data["website_name"] self.check_url = data["check_url"] self.cookies_db = RedisClient('cookies', self.website) self.users_db = RedisClient('users', self.website)
def POST(self): doc = cherrypy.request.json print("Received doc: '%s'", doc) rc = RedisClient(redis_conf["host"], redis_conf["port"], redis_conf["db"], redis_conf["collection"]) dumped_doc = json.dumps(doc) rc.put(dumped_doc) return {"received doc": dumped_doc}
def __init__(self): self.get_raw_data = GetRawData() self.redis_client = RedisClient() self.batch_size = 10 self.pre_user_list = [] self.stupid_key_words = [ '公司', '店', '铺', '厂', '行', '鞋', '装', '市', '服', '饰', '商', '贸', '牌', '汇', '馆', '裤', '业', '专', '卖' ]
def __init__(self, proxy_type='https'): if proxy_type == 'https': self.redis_handler = RedisClient('https_proxy') elif proxy_type == 'http': self.redis_handler = RedisClient('http_proxy') else: raise Exception('type must be https or http') self.proxy_type = proxy_type self.proxy_pool = set([*fuzz_all(), *self.redis_handler.get_all()])
def __init__(self): self.get_raw_data = GetRawData() #self.db = pymysql.connect(host='47.114.166.130', port=13306, user='******', password='******', db='bxdb', charset='utf8mb4') #self.cursor = self.db.cursor() self.db = pymysql.connect(host='localhost', port=3306, user='******', password='******', db='bxmind', charset='utf8mb4') self.cursor = self.db.cursor() self.mysql_client = MysqlClient() self.redis_client = RedisClient() self.stupid_key_words = STUPID_KEY_WORDS self.a_list = [] self.b_list = []
def persist_redis(redis_conf, elastic_conf): rc = RedisClient(redis_conf["host"], redis_conf["port"], redis_conf["db"], redis_conf["collection"]) length = rc.length() print(length) i = 0 bulk_size = elastic_conf["bulk_size"] while (i < length): docs = rc.popMany(bulk_size) print("Inserting %s documents" % len(docs)) persist_elasticsearch(docs, elastic_conf) i += bulk_size
class GetRooms(): def __init__(self): self.get_raw_data = GetRawData() self.redis_client = RedisClient() self.sec_user_id_list = [] self.room_id_list = [] def get_channel(self): try: channel_raw_data = self.get_raw_data.get_channel() except Exception as e: logger.error('get_channel出错-' + e.args[0]) return None try: self.parse_channel(channel_raw_data) except Exception as e: logger.error('parse_channel出错-' + e.args[0]) return None #logger.info(json.dumps([i[-10:-1] for i in self.sec_user_id_list])) for each in self.room_id_list: self.redis_client.add_rooms(each) for each in self.sec_user_id_list: self.redis_client.add_users(each, 1) def parse_channel(self, channel_raw_data): for each in channel_raw_data.get('data'): room_id = each.get('data').get('id_str') sec_user_id = each.get('data').get('owner').get('sec_uid') follower = each.get('data').get('owner').get('follow_info').get( 'follower_count') if follower >= 10000: try: item_list = self.get_raw_data.get_item_list( sec_user_id, room_id) except Exception as e: logger.error('get_item_list出错-' + e.args[0]) return None if len(item_list.get('promotions')) != 0: self.room_id_list.append(room_id) self.sec_user_id_list.append(sec_user_id) def run(self): tasks = [gevent.spawn(self.get_channel) for i in range(1)] gevent.joinall(tasks) logger.info('本批次共获得room_id和sec_user_id-' + str(len(self.sec_user_id_list)) + '-' + str(len(self.room_id_list))) self.sec_user_id_list.clear() self.room_id_list.clear()
class GetUserDongtai(): def __init__(self): self.get_raw_data = GetRawData() self.redis_client = RedisClient() self.room_id_list = [] def get_users(self): users = self.redis_client.get_users() return users def save_rooms(self): for each in self.room_id_list: self.redis_client.add_rooms(each, 0) def get_user_dongtai(self, sec_user_id): try: raw_data = self.get_raw_data.get_user_dongtai(sec_user_id) except Exception as e: logger.error('get_user_dongtai出错-' + e.args[0] + '-sec_user_id-' + sec_user_id) return None try: self.parse_user_dongtai(raw_data) except Exception as e: logger.error('parse_user_dongtai出错-' + e.args[0] + '-sec_user_id-' + sec_user_id) def parse_user_dongtai(self, raw_data): data = raw_data.get('dongtai_list')[0] room_id = data.get('aweme').get('author').get('room_id') if room_id != 0: self.room_id_list.append(str(room_id)) logger.info('该主播已开始直播,room_id-' + str(room_id)) else: logger.info('该主播尚未开始直播') def run(self): users = self.get_users() logger.info('共有users-' + str(len(users))) batch_size = 20 #20个也获取不到数据 for batch_limit in range(0, len(users), batch_size): start = batch_limit stop = min(batch_limit + batch_size, len(users)) logger.info('当前爬取用户序号-' + str(start + 1) + '-' + str(stop)) tasks = [ gevent.spawn(self.get_user_dongtai, sec_user_id) for sec_user_id in users[start:stop] ] gevent.joinall(tasks)
class GetCurrentRoom(): def __init__(self): self.get_raw_data = GetRawData() self.redis_client = RedisClient() self.room_id_list = [] def get_users(self): users = self.redis_client.get_users() return users def save_rooms(self): for each in self.room_id_list: self.redis_client.add_rooms(each, 0) def get_current_room(self, sec_user_id): try: raw_data = self.get_raw_data.get_current_room(sec_user_id) except Exception as e: logger.error('get_current_room出错-' + e.args[0] + '-sec_user_id-' + sec_user_id) return None try: check = raw_data.get('data').get('pay_grade').get('grade_describe') except Exception as e: logger.error('parse_current_room出错' + e.args[0] + '-sec_user_id-' + sec_user_id) return None own_room = raw_data.get('data').get('own_room') if own_room: #如果有这个,说明直播以及开始了 room_id = own_room.get('room_ids_str')[0] self.room_id_list.append(room_id) logger.info(sec_user_id + '-正在直播,room_id-' + room_id) else: logger.info(sec_user_id + '-未在直播') def run(self): users = self.get_users() logger.info('共有users-' + str(len(users))) batch_size = 20 for batch_limit in range(0, len(users), batch_size): start = batch_limit stop = min(batch_limit + batch_size, len(users)) logger.info('当前爬取用户序号-' + str(start + 1) + '-' + str(stop)) tasks = [ gevent.spawn(self.get_current_room, sec_user_id) for sec_user_id in users[start:stop] ] gevent.joinall(tasks)
class GetSecUserIds(): def __init__(self): self.get_raw_data = GetRawData() self.redis_client = RedisClient() def get_aweme_lists(self): offset = 20 aweme_lists = [] awemes = self.redis_client.get_feigua_awemes() for i in range(0, len(awemes), offset): aweme_list = awemes[i:i+offset] aweme_lists.append(aweme_list) return aweme_lists def get_clips(self, aweme_list): sec_user_id_list = [] aweme_id_list = [] aweme_int_list = [int(aweme) for aweme in aweme_list] try: raw_data = self.get_raw_data.get_clips(aweme_int_list) except Exception as e: logger.error('get_clips出错-' + e.args[0]) return None if raw_data.get('status_code') == 2053: logger.info('这组没有视频') else: data = raw_data.get('aweme_details') for each in data: aweme_id = each.get('aweme_id') sec_user_id = each.get('author').get('sec_uid') if sec_user_id: aweme_id_list.append(aweme_id) sec_user_id_list.append(sec_user_id) for each in aweme_id_list: self.redis_client.delete_feigua_awemes(each) for each in sec_user_id_list: self.redis_client.add_pre_users(each, -1) def run(self): aweme_lists = self.get_aweme_lists() logger.info('共有feigua_aweme组数:' + str(len(aweme_lists))) batch_size = 1 for batch_limit in range(0, len(aweme_lists), batch_size): start = batch_limit stop = min(batch_limit+batch_size, len(aweme_lists)) logger.info('get_clips爬取当前feigua_aweme组序号-' + str(start+1) + '-' + str(stop)) tasks = [gevent.spawn(self.get_clips, aweme_list) for aweme_list in aweme_lists[start:stop]] gevent.joinall(tasks)
class CheckQualificationByPromotion(): def __init__(self): self.get_raw_data = GetRawData() self.redis_client = RedisClient() self.batch_size = 50 def get_pre_users(self): batch = [] while len(batch) < self.batch_size: pre_user = self.redis_client.get_pre_users() if not self.is_user(pre_user): #如果这个pre_user在user表中还不存在 batch.append(pre_user) return batch def count_pre_users(self): return self.redis_client.count_pre_users() def is_user(self, sec_user_id): return self.redis_client.is_user(sec_user_id) def check_qualification_by_promotion(self, sec_user_id): try: raw_data = self.get_raw_data.get_promotions(sec_user_id) except Exception as e: logger.error('get_promotions出错-' + e.args[0] + '-sec_user_id-' + sec_user_id) return None try: raw_data.get('columns')[0].get('name') #表示确实获取到了页面 data = raw_data.get('promotions') if len(data) > 10: #确实获取到了页面,promotion大于10 self.redis_client.add_users(sec_user_id) except Exception as e: logger.error('解析promotions页面失败-sec_user_id-' + sec_user_id + '-' + e.args[0]) def run(self): if self.count_pre_users() > 0: batch = self.get_pre_users() tasks = [ gevent.spawn(self.check_qualification_by_promotion, sec_user_id) for sec_user_id in batch ] gevent.joinall(tasks) else: logger.info('pre_users列表空了,程序退出') sys.exit()
def make_server(redis_client=None): app = Flask(__name__) CORS(app) # Using Redis as a simple persistence service if not redis_client: redis_client = RedisClient(host="redis") @app.route("/game", methods=["POST"]) def play(): # TODO: Maybe move redis_client to middleware deck_nums = redis_client.get_list("deck") response_data = index.play(json=request.get_json(), deck_nums=deck_nums) redis_client.set_list("deck", response_data["deck"]) del response_data["deck"] return jsonify(data=response_data) @app.errorhandler(Exception) def handle_invalid_usage(err): message = getattr(err, "message", str(err)) print("ERROR MESSAGE:", message, flush=True) return jsonify({"message": message, "status_code": 400}) # Is jsonify is the best thing to use here? I'm unclear how it differs from: # return message, 400 return app
def get_metrics(self): with RedisClient(self.host, self.port, self.auth) as client: self.log_verbose('Connected to Redis at %s:%s' % (self.host, self.port)) self.dispatch_info(client) self.dispatch_list_lengths(client)
def lpush(self,key,value,**kwargs): """ LPUSH key value Append an element to the head of the List value at key param; key:string value:string **kwargs: a dict obj:object baseobj:base object return: True or False """ #print "listfield lpush ",key,",",value try: if setting.Debug: n = datetime.datetime.now() pipe = RedisClient.getInstance().redis.pipeline() pipe.lpush(key,value) self.change_log("list:insert",kwargs["obj"].id,kwargs["baseobj"],pipe) pipe.execute() if setting.Debug: logger.info(" lpush key: %s,use : %s" % (key,datetime.datetime.now() - n)) return True except Exception,e: pipe.reset() logger.error(e) return False
def rpush(self, key, value, **kwargs): """ push the data into list of redis at right of list param; key:string value:string **kwargs: a dict obj:object baseobj:base object return: True or False """ #Save #print "listfield rpush ",key,",",value try: pipe = RedisClient.getInstance().redis.pipeline() pipe.rpush(key, value) self.change_log("list:insert", kwargs["obj"].id, kwargs["baseobj"], pipe) pipe.execute() return True except Exception, e: pipe.reset() logger.error(e) return False
def zadd(self,key ,member ,score,**kwargs): """ add the member into the sorted set by score if the member is exist then update it's score param: key:string member:string score:rank integer **kwargs:include obj and baseobj obj:object baseobj:base object return: True or False """ try: pipe = RedisClient.getInstance().redis.pipeline() pipe.zadd(key ,member ,score) self.change_log("sortset:insert",kwargs["obj"].id,kwargs["baseobj"],pipe,score) pipe.execute() return True except Exception,e: pipe.reset() logger.error(e) return False
def lpush(self, key, value, **kwargs): """ LPUSH key value Append an element to the head of the List value at key param; key:string value:string **kwargs: a dict obj:object baseobj:base object return: True or False """ #print "listfield lpush ",key,",",value try: if setting.Debug: n = datetime.datetime.now() pipe = RedisClient.getInstance().redis.pipeline() pipe.lpush(key, value) self.change_log("list:insert", kwargs["obj"].id, kwargs["baseobj"], pipe) pipe.execute() if setting.Debug: logger.info(" lpush key: %s,use : %s" % (key, datetime.datetime.now() - n)) return True except Exception, e: pipe.reset() logger.error(e) return False
def change_log(self,oper,obj_id,baseobj,pipe=None,score=None): """ save the relation of Reference list|sortset:insert:user_posts:user_id:post_id list|sortset:delete:user_posts:user_id:post_id param: oper: the operation type is string obj_id: id of object type is integer baseobj: base object pipe: redis pipe default is None score: use rank """ #是否启用数据同步 if not setting.DATA_SYNC: return #初始化服务 dp = pipe or RedisClient.getInstance().redis #保存chang_log #String = 操作符: 主类型_引用类型s : 主类型ID: 此类型ID basetype = str(baseobj.__class__.__name__).lower() ref = self.ref.lower() if basetype == ref: ref = self.name.lower() if oper.startswith("sortset"): val = "%(oper)s:_:%(model_type)s_%(relate_type)ss:_:%(id)s:_:%(rid)s:_:%(score)s" % {"oper":oper,"model_type": basetype,"relate_type": ref,"id":baseobj.id,"rid" : obj_id ,"score":score} else: val = "%(oper)s:_:%(model_type)s_%(relate_type)ss:_:%(id)s:_:%(rid)s" % {"oper":oper,"model_type": basetype,"relate_type": ref,"id":baseobj.id,"rid" : obj_id} logger.info("sync: " + val) #保存数据dao Redis List Queue dp.lpush("change_log",val)
def __init__(self, model_class): """ initialize the model class and get the redis's db param: model_class: model """ self.model_class = model_class self._db = RedisClient.getInstance().redis
def __init__(self, model_class): """ initialize the model class and get the redis's db param: model_class: model """ self.model_class = model_class self._db = RedisClient.getInstance().redis
class Test(): def __init__(self): self.redis = RedisClient() def test_single(self, proxy): ip = json.loads(proxy) ip_type = ip['type'] ip = ip['ip'] try: response = get_response(URL, ip) if response.status_code in VALID_CODE: print(ip, ip_type, '正常') self.redis.max(proxy) else: print(ip, ip_type, '-1') self.redis.decrease(proxy) except Exception as e: print(e) print(ip, ip_type, '-1') self.redis.decrease(proxy) def run(self): proxies = self.redis.all() q = queue.Queue(10) for proxy in proxies: if not q.full(): q.put(proxy) if not q.empty(): proxy = q.get() t = threading.Thread(target=self.test_single, args=(proxy, )) t.start()
def _get_license_info(): redis_client = RedisClient() license_sig = redis_client.get(lc_check.redis_key_license_signature, is_json=False) if license_sig is not None and license_sig != "": license_info = utils.check_license_signature_valid( license_sig, CERT_PATH + LICENSE_CERT) if license_info != {} and license_info[ "deviceId"] == utils.get_device_uuid(): products = [] for product in license_info["products"]: product_id = product["product"] products.append(license_product[product_id - 1]) return license_info["licenseId"], products else: return "本机license不合法,license is: " + license_info["licenseId"], [] else: return "本机并未指定license", []
class Check(): def __init__(self): self.redis = RedisClient() async def check_single(self, proxy): conn = aiohttp.TCPConnector(verify_ssl=False) async with aiohttp.ClientSession(connector=conn) as session: try: if isinstance(proxy, bytes): proxy = proxy.decode("utf-8") real_proxy = "http://" + proxy check_url = CHECK_URL[random.randint(0, len(CHECK_URL) - 1)] print("checking: ", real_proxy, check_url) async with session.get(check_url, proxy=real_proxy, timeout=10) as response: score = self.redis.get_score(proxy) if response.status in VALID_STATUS_CODES: if score != 100: #设置到50分 self.redis.max(proxy) print("%s can be use 50 score" % proxy) else: print("1:Not valid code:%s decrease" % proxy) #付费代理检测到是51分则直接删除 if socore == 51: self.redis.delproxy(proxy) else: self.redis.decrease(proxy) except (TimeoutError, OSError, ServerDisconnectedError, ClientResponseError) as e: print("2:Catch Error error:%s %s decrese" % (str(e), proxy)) self.redis.decrease(proxy) def check(self): try: proxies = self.redis.all() loop = asyncio.get_event_loop() for index in range(0, len(proxies), CHECK_SIZE): check_proxies = proxies[index:index + CHECK_SIZE] tasks = [self.check_single(proxy) for proxy in check_proxies] #把多个写成放进一个事件循环 loop.run_until_complete(asyncio.wait(tasks)) except Exception as e: print("check err: %s" % str(e))
def __init__(self): queues_callbacks = { CONFIG.get('rabbit', 'rabbit_mot_exchange'): self.callback_new_mot, "spf_response_exchange": self.callback_process_mot } # load geometries initially to be used for entire session self.geoms = self.read_geo_valid() # Queue on the exchange the bot are reading from # %%RABBIT_MOT_EXCHANGE%%-bot-%%SCHEDULE_MATCHING_BOT_ID%% super(ScheduleMatchingBot, self).__init__('sched_matching', CONFIG, queues_callbacks) self.redis_client = RedisClient(CONFIG.get('redis', 'redis_host'), CONFIG.get('redis', 'redis_port')) logger.info("Event bot created")
def zcard(self, key,**kwargs ): """ get the base integer of sorted set param: key:string **kwarg:dict return: count of list """ return RedisClient.getInstance().redis.zcard( key )
class SaveIp(): def __init__(self): self.redis = RedisClient() self.crawler = Crawler() def is_over_threshold(self): """判断是否达到了代理池限制""" if self.redis.count() >= POOL_UPPER_THRESHOLD: return True else: return False def run(self): print(' 获取器开始执行 ') if not self.is_over_threshold(): proxies = self.crawler.run() for proxy in proxies: print(proxy, '存入') self.redis.add(proxy)
class ProxyManage(Resource): def __init__(self): self.http_proxy = RedisClient('http_proxy') self.https_proxy = RedisClient('https_proxy') self.parser = reqparse.RequestParser() self.parser.add_argument('type', type=str, required=True, help='required args of proxy type: http/https, like ?type=http') @error_handle def get(self): self.parser.add_argument('all', type=str, default='false', required=False, help='') get_all = self.parser.parse_args()['all'] proxy_type = self.parser.parse_args()['type'] if proxy_type == 'http': if get_all == 'true': return self.http_proxy.get_all() else: return self.http_proxy.get_one() elif proxy_type == 'https': if get_all == 'true': return self.https_proxy.get_all() else: return self.https_proxy.get_one() else: raise ParamError(msg='proxy type param error,must be http/https') @error_handle def delete(self): self.parser.add_argument('ip', type=str, required=True) args = self.parser.parse_args() proxy_type = args.get('type') ip = args.get('ip') if proxy_type == 'http': return self.http_proxy.delete(ip) if proxy_type == 'https': return self.https_proxy.delete(ip) else: self.http_proxy.delete(ip) self.https_proxy.delete(ip) return
class CheckQualificationByPromotion(): def __init__(self): self.get_raw_data = GetRawData() self.redis_client = RedisClient() def get_users(self): users = self.redis_client.get_pre_users(-1, -1) return users def check_commercial(self, sec_user_id): try: raw_data = self.get_raw_data.get_promotions(sec_user_id) except Exception as e: logger.error('get_promotions出错-' + e.args[0] + '-sec_user_id-' + sec_user_id) return None try: raw_data.get('columns')[0].get('name') #表示确实获取到了页面 data = raw_data.get('promotions') if len(data) == 0: #确实获取到了页面,promotion仍没有,那就真的不带货了 logger.info('该用户不带货,将删除,sec_user_id-' + sec_user_id) self.redis_client.delete_pre_users(sec_user_id) else: #prrmotion是有的,说明带货,那就状态改为0 self.redis_client.add_pre_users(sec_user_id, 0) except Exception as e: logger.error('解析promotions页面失败-sec_user_id-' + sec_user_id + '-' + e.args[0]) def run(self): users = self.get_users() logger.info('共有待确认是否带货用户数量:' + str(len(users))) batch_size = 50 #尽管异步,还是很慢,200个就很慢很慢了,慢到跟同步一样,这可能是抖音某个神奇的特点吧 for batch_limit in range(0, len(users), batch_size): start = batch_limit stop = min(batch_limit + batch_size, len(users)) logger.info('check_commercial爬取当前用户序号-' + str(start + 1) + '-' + str(stop)) tasks = [ gevent.spawn(self.check_commercial, sec_user_id) for sec_user_id in users[start:stop] ] gevent.joinall(tasks)
def scard(self,key,**kwargs): """ SCARD key Return the number of elements (the cardinality) of the Set at key param: key:string **kwargs:dict return: count """ return RedisClient.getInstance().redis.scard(key)
def llen(self,key,**kwargs): """ LLEN key Return the length of the List value at key param; key:string **kwargs: a dict return: integer of length """ #print "len key",key return RedisClient.getInstance().redis.llen(key)
def sismember(self,key,member_id,**kwargs): """ SISMEMBER key member Test if the specified value is a member of the Set at key param: key:string member_id:string **kwargs:dict return: objects of list """ return RedisClient.getInstance().redis.sismember(key,member_id)
def delete(self,key,pipe,**kwargs): """ delete the list use index param: key: string pipe: redis pipe return: True or false """ db = pipe | RedisClient.getInstance().redis return db.delete(key)
def zscore(self, key ,member_id,**kwargs): """ get the score of member param: key:string member_id:integer **kwargs:dict return: score """ return RedisClient.getInstance().redis.zscore( key ,member_id)
def llen(self,key,**kwargs): """ get the length of list param: key:string **kwargs:dict return: length of list """ # LLEN key Return the length of the List value at key #print "len key",key return RedisClient.getInstance().redis.llen(key)
def zrevrank( self,key , member_id,**kwargs): """ get the the index of member in sorted set in front is the highest score param: key:string member_id:integer **kwargs:dict return: integer """ return RedisClient.getInstance().redis.zrevrank( key ,member_id)
def delete(self,key,pipe,**kwargs): """ delete the value of key param; key:string pipe:redis **kwargs:dict return: True or False """ db = pipe | RedisClient.getInstance().redis return db.delete(key)
def check_and_print_results(self): redis_client = RedisClient() for role in self.ROLES: family = self.join_items([self.family, role]) highstate_result = redis_client.get_highstate_result(family) try: logging.info('printing results for server: {0}'.format(role)) parsed_result = json.loads(highstate_result) return_results = parsed_result.pop('return') json_results = json.dumps(parsed_result, indent=4) yaml_dump = yaml.safe_dump(return_results) if self.SAVE_LOGS: self.check_for_log_dir() self.write_to_log_file(yaml_dump, role) self.write_to_log_file(json_results, role) except Exception as e: logging.info('Result:{0} Exception:{1}'.format(highstate_result, e)) try: if self.highstate_failed(highstate_result): self.is_build_successful = False except: self.is_build_successful = False
def zrevrange(self, key ,start=0, end=10,select = None,**kwargs): """ get the the index of member in sorted set in front is the lowest score highest in the back param: key:string member_id:integer **kwargs:dict return: integer """ pks = RedisClient.getInstance().redis.zrevrange( key ,start, end) return self.ref_klass.objects.filter(id=tuple(pks),select=select)
def zrangebyscore(self, key ,min, max,select = None,**kwargs): """ get the the member in sorted set between min and max param: key:string min:integer max:integer select:object default is None **kwargs:dict return: members of list """ pks = RedisClient.getInstance().redis.zrangebyscore( key ,min, max) return self.ref_klass.objects.filter(id=tuple(pks),select=select)
def spop(self,key,select=None,**kwargs): """ SPOP key Remove and return (pop) a random element from the Set value at key param: key:string **kwargs:include obj obj:the object select:related object return: object """ pk = RedisClient.getInstance().redis.spop(key) self.change_log("set:delete",pk,kwargs["obj"]) return self.ref_klass.objects.filter(id=pk,select=select)
def smembers(self,key,select=None,**kwargs): """ SMEMBERS key Return all the members of the Set value at key param: key:string select:object default is None **kwargs:dict return: objects of list """ n = datetime.datetime.now() pks = RedisClient.getInstance().redis.smembers(key) logger.info("smembers key: %s,select: %s,use: %s" % (key,select,datetime.datetime.now() - n)) return self.ref_klass.objects.filter(id = tuple(pks),select=select)
def zrange(self, key , start=0, end=10,select=None,**kwargs): """ get the the member in sorted set between start and end in front is the lowest score param: key:string start:integer end:integer select:object default is None **kwargs:dict return: members of list """ pks = RedisClient.getInstance().redis.zrange( key ,start, end) return self.ref_klass.objects.filter(id=tuple(pks),select=select)
def rpop(self,key,select=None,**kwargs): """ get the data into list of redis at right of list param; key:string value:string **kwargs: a dict select:the default is None the related object return: object """ # RPOP key Return and remove (atomically) the first element of the List at key #print "rpop key",key pk = RedisClient.getInstance().redis.rpop(key) self.change_log("list:delete",pk,kwargs["obj"]) return self.ref_klass.objects.filter(id=pk,select=select)
def rpop(self,key,**kwargs): """ RPOP key Return and remove (atomically) the first element of the List at key param; key:string **kwargs: a dict obj:object return: object """ #print "rpop key",key pk = RedisClient.getInstance().redis.rpop(key) self.change_log("list:delete",pk,kwargs["obj"]) objs = self.ref_klass.objects.filter(id=pk) if objs: return objs[0] return None
def __init__(self, mapfile=None, cfgfile=None): # host = "192.168.61.77" # self.init_mysql(host=host,user="******",passwd="qianfendian",db="BfdLocation",charset="utf8") cur_dir = os.path.dirname(os.path.abspath(__file__)) or os.getcwd() if not mapfile: mapfile = cur_dir + "/resource/MAP_DISTRICT" self.init_file(mapfile) if not cfgfile: cfgfile = cur_dir + "/resource/district.conf" cf = ConfigParser.ConfigParser() cf.read(cfgfile) host = cf.get("redis", "host") port = cf.getint("redis", "port") db = cf.getint("redis", "db") self.redis_client = RedisClient(host=host, port=port, db=db) self.addr_latlng_util = AddrLatlngUtil()
def lrange(self,key,start=0,end=10,select=None,**kwargs): """ get the date in the list param: key:string start:integer default is 0 end:integer default is 10 select:default is None related object **kwargs:dict return: the data in list """ # LRANGE key start end Return a range of elements from the List at key n = datetime.datetime.now() pks = RedisClient.getInstance().redis.lrange(key,start,end) logger.info("lrange key: %s,start: %s, end: %s ,select:%s,use : %s" % (key,start,end,select,datetime.datetime.now() - n)) return self.ref_klass.objects.filter(id=tuple(pks),select=select)
def lrange(self,key,start=0,end=10,**kwargs): """ LRANGE key start end Return a range of elements from the List at key param: key:string start:integer default is 0 end:integer default is 10 **kwargs:dict return: the data in list """ if setting.Debug: n = datetime.datetime.now() pks = RedisClient.getInstance().redis.lrange(key,start,end) if setting.Debug: logger.info("lrange key: %s,start: %s, end: %s ,use : %s" % (key,start,end,datetime.datetime.now() - n)) #返回相关对象集合 return find_include(self.ref_klass,pks,kwargs)
def zrem(self, key,member_id,**kwargs): """ delete the member in sorted set param: key:string member_id:integer **kwargs:dict return: True or False """ try: pipe = RedisClient.getInstance().redis.pipeline() pipe.zrem( key,member_id) self.change_log("sortset:delete",member,kwargs["baseobj"]) pipe.execute() return True except Exception,e: pipe.reset() logger.error(e) return False
def srem(self,key,member,**kwargs): """ SREM key member Remove the specified member from the Set value at key param: key:string member:string **kwargs:include baseobj baseobj: base object return: True or False """ try: pipe = RedisClient.getInstance().redis.pipeline() pipe.srem(key,member) self.change_log("list:delete",member,kwargs["baseobj"]) pipe.execute() return True except Exception,e: pipe.reset() logger.error(e) return False
def lpush(self,key,value,**kwargs): """ push the data into list of redis at left of list param; key:string value:string **kwargs: a dict return: True or False """ #LPUSH key value Append an element to the head of the List value at key #print "listfield lpush ",key,",",value try: pipe = RedisClient.getInstance().redis.pipeline() pipe.lpush(key,value) self.change_log("list:insert",kwargs["obj"].id,kwargs["baseobj"],pipe) pipe.execute() return True except Exception,e: pipe.reset() logger.error(e) return False
def rpush(self,key,value,**kwargs): """ push the data into list of redis at right of list param; key:string value:string **kwargs: a dict return: True or False """ #Save #print "listfield rpush ",key,",",value try: pipe = RedisClient.getInstance().redis.pipeline() pipe.rpush(key,value) self.change_log("list:insert",kwargs["obj"].id,kwargs["baseobj"],pipe) pipe.execute() return True except Exception,e: pipe.reset() logger.error(e) return False
def sadd(self,key,member,**kwargs): """ SADD key member Add the specified member to the Set value at key param: key:string member:string **kwargs:include obj and baseobj obj:the object baseobj: base object return: True or False """ # SADD key member Add the specified member to the Set value at key try: pipe = RedisClient.getInstance().redis.pipeline() pipe.sadd(key,member) self.change_log("set:insert",kwargs["obj"].id,kwargs["baseobj"],pipe) pipe.execute() return True except Exception,e: pipe.reset() logger.error(e) return False
class RedisCache(object): def __init__(self, address, port): self.address = address self.port = port self.redis_client = RedisClient(self.address, self.port) def cache(self, **options): """ Cache decorator """ def cache_inside(fn, **kwargs): def wrapper(*args, **kwargs): fn_name = fn.__name__ signature_generator = options.get( 'signature_generator', self._get_signature ) if not hasattr(signature_generator, '__call__'): raise TypeError( "signature_generator must be a callable function" ) signature = signature_generator(args, **kwargs) fn_hash = str(hash(fn_name + signature)) cache_request = self.redis_client.get(fn_hash) if cache_request is '': # Cache miss ret = fn(*args, **kwargs) if 'expiration' in options: self.redis_client.setex( fn_hash, ret, options.get('expiration') ) else: self.redis_client.set(fn_hash, ret, **options) else: # Cache hit return cache_request return ret return wrapper return cache_inside def _get_signature(*args, **kwargs): """ Gets the signature of the decorated method :return: arg1,...argn,kwarg1=kwarg1,...kwargn=kwargn """ # Join regular arguments together with commas parsed_args = ",".join(map(lambda x: str(x), args[1])) # Join keyword arguments together with `=` and commas parsed_kwargs = ",".join( map(lambda x: '%s=%s' % (x, str(kwargs[x])), kwargs) ) # Filter out empty params parsed = filter( lambda x: x != '', [parsed_args, parsed_kwargs] ) return ','.join(parsed)