def input(self, ml): count = 0 for m in ml: if self._inqueue(m): count += 1 logger.info("Input %d new message", count) self.log("Input %d new message" % count)
def select_samples(message): candidates = {} null_msg = [] for m in message['seen_list']: if len(m.tags) >= 1: #candidates.append(m) candidates[m.msg_id] = m else: null_msg.append(m) logger.warning(str(len(message['seen_list']))) # Sample same number of null tag messages prob = float(len(candidates)) / (len(message['seen_list']) - len(candidates)) if prob > 1.0: prob = 1.0 print "Selecting null message probability: %.3f" % (prob) for m in null_msg: if random.random() < prob: #m.tags = {"null": 1} # All tag id is greater than or equal to 1. # 0 is reserved as "null" tag if the user has not defined one. m.tags = {0: 1} #candidates.append(m) candidates[m.msg_id] = m print "Total %d samples extracted" % (len(candidates)) return candidates
def __init__(self, snspocket = None): super(SRFEQueue, self).__init__(self.SQLITE_QUEUE_CONF) self.sp = snspocket # SNSPocket object self.msgMapping = { "Email":1, "FacebookFeed":2, "RSS":3, "RSS2RW":4, "RSSSummary":5, "RenrenBlog":6, "RenrenFeed": 7, "RenrenPhoto": 8, "RenrenShare":9, "RenrenStatus":10, "RenrenStatusDirect": 11, "SQLite":12, "SinaWeiboBase":13, "SinaWeiboStatus":14, "SinaWeiboWapStatus":15, "TencentWeiboStatus":16, "TwitterStatus":17, "InstagramFeed":18, "DoubanFeed":19} self.platform_num = 19 self.inputnum = 10 self.condLast = time.time() try: self.queue_conf = json.loads(open('conf' + delimeter + 'queue.json', 'r').read()) except IOError, e: logger.warning("No conf/queue.json, use defaults") self.queue_conf = {}
def config(): info = {} (k, sp, ap, q) = token_to_user(request.get_cookie('token')) pr = get_preference(k) if sp is None: return {"info": {}, "sp": {}, "ap": {}, "q": {}} for ch in sp: info[ch] = sp[ch].jsonconf info[ch]['expire_after'] = int(sp[ch].expire_after()) info[ch]['is_authed'] = sp[ch].is_authed() info[ch]['need_auth'] = sp[ch].need_auth() con_user = sqlite3.connect("." + delimeter + "User" + delimeter + str(k) + delimeter + "srfe_queue.db", check_same_thread = False) con_user.isolation_level = None cur_user = con_user.cursor() r = cur_user.execute("SELECT platform, toggle FROM msg_toggle WHERE toggle <> 2") for m in r: for ch in info.values(): logger.warning(str(ch)) logger.warning(str(m[0])) if ch["platform"] == m[0]: ch["toggle"] = m[1] r = cur_user.execute("SELECT id, username, platform, toggle FROM user_toggle") bu = {} for m in r: bu[str(m[0])] = { "id": m[0], "username": m[1], "platform": m[2], "toggle": m[3] } return {"info": info, "sp": sp, "ap": ap, "q": q, "pr":pr, "bu": bu}
def flag(self, message, fl): ''' flag v.s. message: 1 <-> 1 ''' if isinstance(message, snstype.Message): #digest = message.digest_pyobj msg_id = message.msg_id else: msg_id = message cur = self.con.cursor() ret = False try: cur.execute( ''' UPDATE msg SET flag=? WHERE id=? ''', (fl, msg_id)) self.con.commit() ret = True except Exception, e: logger.warning("Catch exception: %s", e)
def wrapper_check_login(*al, **ad): logger.warning(str(user)) token = request.get_cookie("token") if not check_token(token): redirect('/login') else: return func(*al, **ad)
def tag_add(self, name): cur = self.con.cursor() r = cur.execute(''' INSERT INTO tag(name, visible) VALUES(?, ?) ''', (name, 1)) logger.debug("Add tag %s", name) self.refresh_tags()
def run(self): while (self.keep_running): try: self.queue.input() logger.debug("Invoke input() on queue") time.sleep(INPUT_GAP) except Exception as e: logger.warning("Catch Exception in InputThread: %s", e)
def tag_toggle(self, tag_id): cur_visible = self.tags_all[tag_id]['visible'] cur = self.con.cursor() r = cur.execute(''' UPDATE tag SET visible=? WHERE id=? ''', (1 - cur_visible, tag_id)) logger.debug("Set tag %d to visibility %d", tag_id, 1 - cur_visible)
def __init__(self, snspocket=None): super(SRFEQueue, self).__init__(self.SQLITE_QUEUE_CONF) self.sp = snspocket # SNSPocket object try: self.queue_conf = json.loads(open('conf/queue.json', 'r').read()) except IOError, e: logger.warning("No conf/queue.json, use defaults") self.queue_conf = {}
def __init__(self, snspocket = None): super(SRFEQueue, self).__init__(self.SQLITE_QUEUE_CONF) self.sp = snspocket # SNSPocket object try: self.queue_conf = json.loads(open('conf/queue.json', 'r').read()) except IOError, e: logger.warning("No conf/queue.json, use defaults") self.queue_conf = {}
def rand_execute(prob, func): if prob >= 1.0: func() else: if random.random() < prob: logger.debug('func execute. prob: %f', prob) return func() else: logger.debug('func do not execute. prob: %f', prob)
class Feature(object): """docstring for Feature""" env = { "dir_conf": "./conf", "dir_kdb": "./kdb", } feature_extractors = [] try: awjson = json.loads(open('conf/autoweight.json').read()) features = awjson['features'] import plugin for f in features: module_name = f[0] class_name = f[1] #print module_name #print class_name mo = __import__("ranking.plugin.%s" % module_name, fromlist=["ranking.plugin"]) cl = getattr(mo, class_name) feature_extractors.append(cl(env)) #TODO: # Make the dynamic import method better (more standard ways). # The current import method is borrowed from: # http://stackoverflow.com/questions/301134/dynamic-module-import-in-python # It just works. #cl = __import__("plugin.%s.%s" % (module_name, class_name), fromlist=["plugin.%s" % module_name]) #cl = getattr(getattr(plugin, module_name), class_name) except IOError: logger.warning('No "conf/autoweight.json"!') except KeyError: logger.warning('No "features" defined"!') def __init__(self): super(Feature, self).__init__() @staticmethod def extract(msg): ''' Feature extraction. It will extract features to a dict and store in "msg.feature". msg: an snstype.Message object ''' if not isinstance(msg, snstype.Message): logger.warning( "Cannot extract feature for non snstype.Message object") return # Add all kinds of features msg.feature = {} for fe in Feature.feature_extractors: fe.add_features(msg)
def tag_add(self, name): cur = self.con.cursor() r = cur.execute( ''' INSERT INTO tag(name, visible) VALUES(?, ?) ''', (name, 1)) logger.debug("Add tag %s", name) self.refresh_tags()
def config_preference_add(): k = token_to_user_key(request.get_cookie('token')) for (w,l) in zip(request.forms.getall("winner"), request.forms.getall("loser")): if not [w,l] in user_sp[k]["pr"]["preference"] and w != l: user_sp[str(k)]["pr"]["preference"].append([w, l]) try: json.dump(user_sp[str(k)]["pr"], open('." + delimeter + "User" + delimeter + "' + str(k) + '" + delimeter + "conf" + delimeter + "autoweight.json', "w")) except Exception as e: logger.warning("Catch Exception in InputThread: %s", e) return "Preference has been added!"
def tag_toggle(self, tag_id): cur_visible = self.tags_all[tag_id]['visible'] cur = self.con.cursor() r = cur.execute( ''' UPDATE tag SET visible=? WHERE id=? ''', (1 - cur_visible, tag_id)) logger.debug("Set tag %d to visibility %d", tag_id, 1 - cur_visible) self.refresh_tags()
def hook_new_message(q, msg): mymsg = False sp = q.sp for ch in sp.values(): if 'user_name' in ch.jsonconf: user_name = ch.jsonconf['user_name'] if user_name == msg.parsed.username: mymsg = True if mymsg: logger.debug('distribute message: %s', msg) distribute.update(msg)
def load_weight(self, fn = None): if fn is None: fn = 'conf/weights.json' try: self.feature_weight = json.loads(open(fn, 'r').read()) self.feature_name = self.feature_weight.keys() logger.info("Loaded weights: %s", self.feature_weight) except IOError: logger.warning("No '%s' weights config file, use empty setting.", fn) self.feature_weight = {} self.feature_name = self.feature_weight.keys()
def __init__(self, snspocket = None): super(SRFEQueue, self).__init__(self.SQLITE_QUEUE_CONF) self.sp = snspocket # SNSPocket object #self.__mount_default_home_timeline_count() #self.queue_conf = json.load(open('conf/queue.json', 'r')) #self.feature_weight = self.queue_conf['feature_weight'] try: self.queue_conf = json.loads(open('conf/queue.json', 'r').read()) except IOError, e: logger.warning("No conf/queue.json, use defaults") self.queue_conf = {}
def load_weight(self, fn=None): if fn is None: fn = 'conf/weights.json' try: self.feature_weight = json.loads(open(fn, 'r').read()) self.feature_name = self.feature_weight.keys() logger.info("Loaded weights: %s", self.feature_weight) except IOError: logger.warning("No '%s' weights config file, use empty setting.", fn) self.feature_weight = {} self.feature_name = self.feature_weight.keys()
def run(self): #webbrowser.open("http://127.0.0.1:8080/login") while (self.keep_running): self.queue.input() try: logger.debug("Invoke input() on queue") os.chdir("." + delimeter + "User" + delimeter + str(self.user_id)) p = os.getcwd() self.sp.save_config(fn_channel= p + delimeter + "conf" + delimeter + "channel.json",fn_pocket= p + delimeter + "conf" + delimeter + "pocket.json") os.chdir(".." + delimeter + ".." + delimeter) time.sleep(INPUT_GAP) except Exception as e: logger.warning("Catch Exception in InputThread: %s", e)
def user_toggle(self, msg_id): cur = self.con.cursor() try: r = cur.execute(''' SELECT pyobj FROM msg WHERE id=? ''', (msg_id, )) str_obj = r.next()[0] message = self._str2pyobj(str_obj) except Exception, e: logger.warning("Catch exception: %s", e) return
def get_unseen_count(self): cur = self.con.cursor() r = cur.execute(''' SELECT count(*) FROM msg WHERE flag='unseen' ''') try: return r.next()[0] except Exception, e: logger.warning("Catch Exception: %s", e) return -1
def home_timeline(): (k, sp, ap, q) = token_to_user(request.get_cookie('token')) (sl, tl) = q.output(q.inputnum) logger.warning(str(len(tl))) meta = { "unseen_count": q.get_unseen_count() } if len(sl) > 0: q.condLast = sl[-1].parsed.time mp = {} for (s,v) in sp.items(): mp[s] = q.msgMapping[v.platform] return {'sl': sl, 'mp': mp, 'tl': tl, 'snsapi_utils': snsapi_utils, 'tags': q.get_tags(), 'meta': meta, "token" : request.get_cookie('token')}
def reweight_all(self, younger_than = 86400): begin = self.time() cur = self.con.cursor() try: latest_time = int(self.time() - younger_than) r = cur.execute(''' SELECT id from msg WHERE time >= ? ''', (latest_time, )) for m in r: self.reweight(m[0]) except Exception, e: logger.warning("Catch exception: %s", e) return False
def input(self, channel = None): if channel: ml = self._home_timeline(channel) else: ml = snstype.MessageList() for chn in self.sp: ml.extend(self._home_timeline(chn)) count = 0 for m in ml: if self._inqueue(m): count += 1 logger.info("Input %d new message", count) self.log("Input %d new message" % count)
def sql(self, query_string): cur = self.con.cursor() try: r = cur.execute(query_string) logger.debug("SQL query string: %s", query_string) message_list = snstype.MessageList() for m in r: obj = self._str2pyobj(m[1]) obj.msg_id = m[0] message_list.append(obj) return message_list except Exception, e: logger.warning("Catch exception when executing '%s': %s", query_string, e) return snstype.MessageList()
def auth_second(): (k, sp, ap, q) = token_to_user(request.get_cookie('token')) op = "auth_second for %s" % (ap.current_channel) qs = request.query_string # For compatibility with lower level interface. # The snsbase parses code from the whole url. ap.code_url = "http://snsapi.snsapi/auth/second/auth?%s" % qs sp[ap.current_channel].auth_second() logger.warning("c") os.chdir("." + delimeter + "User" + delimeter + str(k)) sp[ap.current_channel].save_token() os.chdir(".." + delimeter + ".." + delimeter) logger.warning("d") result = "done: %s" % qs return {'operation': op, 'result': result}
def reweight_all(self, younger_than=86400): begin = self.time() cur = self.con.cursor() try: latest_time = int(self.time() - younger_than) r = cur.execute( ''' SELECT id from msg WHERE time >= ? ''', (latest_time, )) for m in r: self.reweight(m[0]) except Exception, e: logger.warning("Catch exception: %s", e) return False
def input(self, channel=None): if channel: ml = self._home_timeline(channel) else: ml = snstype.MessageList() for chn in self.sp: ml.extend(self._home_timeline(chn)) count = 0 for m in ml: if self._inqueue(m): count += 1 logger.info("Input %d new message", count) self.log("Input %d new message" % count) return "Input %s new messages" % count
def update_from_console(t, *al, **ad): ''' A wrapper function to deal with user input from console. String input from console is in console encoding. We must first cast it to unicode, which is the standard across SNSAPI. ''' if isinstance(t, str): return sp.update(console_input(t), *al, **ad) elif isinstance(t, snstype.Message): return sp.update(t, *al, **ad) else: logger.warning("unknown type: %s", type(t))
def ranked_timeline(): (k, sp, ap, q) = token_to_user(request.get_cookie('token')) (sl, tl) = q.output_ranked(q.inputnum, 86400) meta = { "unseen_count": q.get_unseen_count() } logger.warning(str(q.condLast)) if len(sl) > 0: if str(sl[-1]["weight"]) == "0.0": q.condLast = 0.0001 else: q.condLast = sl[-1]["weight"] logger.warning(str(q.condLast)) mp = {} for (s,v) in sp.items(): mp[s] = q.msgMapping[v.platform] return {'sl': sl, 'mp': mp, 'tl': tl, 'snsapi_utils': snsapi_utils, 'tags': q.get_tags(), 'meta': meta, "token" : request.get_cookie('token')}
def reply(self, msg_id, comment, channel = None): cur = self.con.cursor() try: r = cur.execute(''' SELECT pyobj FROM msg WHERE id=? ''', (msg_id, )) str_obj = r.next()[0] message = self._str2pyobj(str_obj) result = self.sp.reply(message, comment, channel) self.log('[reply]%s;%s;%s' % (msg_id, result)) return result except Exception, e: logger.warning("Catch exception: %s", e) return {}
def extract(msg): """ Feature extraction. It will extract features to a dict and store in "msg.feature". msg: an snstype.Message object """ if not isinstance(msg, snstype.Message): logger.warning("Cannot extract feature for non snstype.Message object") return # Add all kinds of features msg.feature = {} for fe in Feature.feature_extractors: fe.add_features(msg)
def reweight(self, msg_id): cur = self.con.cursor() try: r = cur.execute(''' SELECT pyobj FROM msg WHERE id=? ''', (msg_id,)) m = self._str2pyobj(list(r)[0][0]) w = self._weight_feature(m) t = int(self.time()) r = cur.execute(''' UPDATE msg SET weight=?,weight_time=? WHERE id=? ''', (w, t, msg_id)) except Exception, e: logger.warning("Catch exception: %s", e)
def sql(self, condition): cur = self.con.cursor() try: # We trust the client string. This software is intended for personal use. qs = "SELECT DISTINCT msg.id,msg.pyobj FROM msg,msg_tag WHERE %s" % condition r = cur.execute(qs) logger.debug("SQL query string: %s", qs) message_list = snstype.MessageList() for m in r: obj = self._str2pyobj(m[1]) obj.msg_id = m[0] message_list.append(obj) return message_list except Exception, e: logger.warning("Catch exception when executing '%s': %s", condition, e) return snstype.MessageList()
def extract(msg): ''' Feature extraction. It will extract features to a dict and store in "msg.feature". msg: an snstype.Message object ''' if not isinstance(msg, snstype.Message): logger.warning( "Cannot extract feature for non snstype.Message object") return # Add all kinds of features msg.feature = {} for fe in Feature.feature_extractors: fe.add_features(msg)
def forward(self, msg_id, comment): cur = self.con.cursor() try: r = cur.execute( ''' SELECT pyobj FROM msg WHERE id=? ''', (msg_id, )) str_obj = r.next()[0] message = self._str2pyobj(str_obj) result = self.sp.forward(message, comment) self.log('[forward]%s;%s;%s' % (msg_id, result, comment)) return result except Exception, e: logger.warning("Catch exception: %s", e) return {}
def unlike(self, msg_id, channel = None): cur = self.con.cursor() try: r = cur.execute(''' SELECT pyobj FROM msg WHERE id=? ''', (msg_id, )) str_obj = r.next()[0] message = self._str2pyobj(str_obj) result = self.sp.unlike(message, channel) self.log('[unlike]%s;%s' % (msg_id, result)) if result: cur.execute("UPDATE msg SET like = 0 WHERE id=?", (msg_id, )) return result except Exception, e: logger.warning("Catch exception: %s", e) return False
def tag_add(self, name): cur = self.con.cursor() r = cur.execute(''' SELECT id FROM tag WHERE name=? ''', (name, )) for t in cur: if type(t[0]) == int: return "Duplicate tag found." r = cur.execute(''' INSERT INTO tag(name, visible) VALUES(?, ?) ''', (name, 1)) logger.debug("Add tag %s", name) self.refresh_tags() r = cur.execute(''' SELECT id FROM tag WHERE name=? ''', (name, )) for t in cur: return t[0]
def reweight(self, msg_id): cur = self.con.cursor() try: r = cur.execute( ''' SELECT pyobj FROM msg WHERE id=? ''', (msg_id, )) m = self._str2pyobj(list(r)[0][0]) w = self._weight_feature(m) t = int(self.time()) r = cur.execute( ''' UPDATE msg SET weight=?,weight_time=? WHERE id=? ''', (w, t, msg_id)) except Exception, e: logger.warning("Catch exception: %s", e)
def tag(self, message, tg): ''' flag v.s. message: * <-> * ''' if isinstance(message, snstype.Message): msg_id = message.msg_id else: msg_id = message cur = self.con.cursor() ret = False try: cur.execute( ''' INSERT INTO msg_tag(msg_id, tag_id) VALUES (?,?) ''', (msg_id, tg)) self.con.commit() ret = True except Exception, e: logger.warning("Catch exception: %s", e)
begin = self.time() cur = self.con.cursor() try: latest_time = int(self.time() - younger_than) r = cur.execute( ''' SELECT id from msg WHERE time >= ? ''', (latest_time, )) for m in r: self.reweight(m[0]) except Exception, e: logger.warning("Catch exception: %s", e) return False end = self.time() logger.info("Reweight done. Time elapsed: %.2f", end - begin) return True def _dump2pickle(self, fn_pickle): # dump all to pickle format cur = self.con.cursor() r = cur.execute(''' SELECT id,time,userid,username,text,pyobj,flag FROM msg ''') message_list = snstype.MessageList() for m in r: obj = self._str2pyobj(m[5]) obj.msg_id = m[0] obj.flag = m[6] message_list.append(obj) r = cur.execute('''
# -*- coding: utf-8 -*- import sys sys.path.append('snsapi') from functools import wraps import json import snsapi from snsapi.snspocket import SNSPocket from snsapi.snslog import SNSLog as logger from lbucket import * try: _wauto_conf = json.loads(open('conf/wauto.json').read()) except Exception, e: logger.warning("Load conf error: %s. Use default", e) _wauto_conf = {} _wauto_conf['priority'] = { 'update': 5, 'home_timeline': 4, 'forward': 3, 'reply': 3 } logger.debug("conf: %s", _wauto_conf) ''' Make the invokation from Python interpreter more convenient. Use synchronous calls. ''' def _dummy_decorator_generator(*args, **kwargs):
sp.load_config() for c in sp.values(): c.request_url = lambda url: ap.request_url(url) c.fetch_code = lambda : ap.fetch_code() c.auth() srfe = Bottle() q = SRFEQueue(sp) q.connect() q.refresh_tags() try: jsonconf = json.load(open('conf/srfe.json', 'r')) except IOError: logger.warning("Do not find conf/srfe.json, use defaults") jsonconf = { "cookie_sign_key": "You had better set your own key in the config!!!!", "username": "******", "password": "******" } INPUT_GAP = jsonconf.get('input_gap', 60 * 5) # 5 Minutes per fetch logger.debug("INPUT_GAP: %s", INPUT_GAP) class InputThread(threading.Thread): def __init__(self, queue): super(InputThread, self).__init__() self.queue = queue self.keep_running = True
def _inqueue(self, message): cur = self.con.cursor() try: # Deduplicate # Explain the problem of the following two methods for future reference: # 1. digest = self._digest_pyobj(message) # Python object are hashed to different values even the SNS message # fields are all the same. # 2. digest = message.digest_parsed() # I forget what is the problem.. I should have noted before. digest = message.digest() #logger.debug("message pyobj digest '%s'", digest) r = cur.execute( ''' SELECT digest FROM msg WHERE digest = ? ''', (digest, )) if len(list(r)) > 0: #logger.debug("message '%s' already exists", digest) return False else: logger.debug("message '%s' is new", digest) #TODO: # This is temporary solution for object digestion. # # For our Message object, the following evaluates to False!! # Serialize.dumps(o) == Serialize.dumps(Serialize.loads(Serialize.dumps(o))) # # To perform deduplication and further refer to this message, # we store the calculated digestion as an attribute of the message. # Note however, after this operation the digest of 'message' will not # be the valued stored therein! This is common problem in such mechanism, # e.g. UDP checksum. Developers should have this in mind. message.digest_pyobj = self._digest_pyobj(message) cur.execute(''' INSERT INTO msg( time , text , userid , username , mid , platform , digest , digest_parsed , digest_pyobj , parsed , pyobj , flag , weight , weight_time ) VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?) ''', (\ message.parsed.time,\ message.parsed.text,\ message.parsed.userid,\ message.parsed.username,\ str(message.ID),\ message.platform,\ message.digest(),\ message.digest_parsed(),\ #self._digest_pyobj(message),\ message.digest_pyobj,\ message.dump_parsed(),\ self._pyobj2str(message),\ "unseen", self._weight_feature(message), int(time.time()) )) return True except Exception, e: logger.warning("failed: %s", str(e)) #print message #raise e return False
def load_weight(self, fn=None): if fn is None: fn = 'conf/weights.json' self.feature_weight = json.loads(open(fn, 'r').read()) self.feature_name = self.feature_weight.keys() logger.info("Loaded weights: %s", self.feature_weight)
* lc = load_config * sc = save_config * lsc = list_channel * lsp = list_platform * newc = new_channel * addc = add_channel * clc = clear_channel * auth = auth * ht = home_timeline * up = update * re = reply * fwd = forward * lk = like * ulk = unlike Tutorial of SNSCLI: * https://github.com/hupili/snsapi/wiki/Tutorial-of-snscli """ if __name__ == '__main__': #==== default initialization one may like ==== print helpdoc load_config() list_channel() auth() logger.info("Ready to drop into the interactive shell of SNSCLI!") import code code.interact(local=locals())
class WeiboAutomator(object): '''Wrap common operations with rate limit facility ''' # Most buckets are derived from Sina's offcial description [1]. # The additional one 'wauto_snsapi' limits SNSAPI request rate globally # (avoid lower layer failure). # # Ref: # * [1] http://open.weibo.com/wiki/Rate-limiting SINA_BUCKETS = [ ('wauto_snsapi', LeakyBucket(1, 0, 0.5)), ('ip.hour.test_auth', cal_bucket(1000, 60 * 60)), ('user.hour.test_auth.total', cal_bucket(150, 60 * 60)), ('user.hour.test_auth.update', cal_bucket(30, 60 * 60)), ('user.hour.test_auth.reply', cal_bucket(60, 60 * 60)), ('user.hour.test_auth.follow', cal_bucket(60, 60 * 60)), ('user.day.test_auth.follow', cal_bucket(100, 60 * 60 * 24)), ] POLICY_GROUP = {} POLICY_GROUP['general'] = { 'wauto_snsapi': 1, 'ip.hour.test_auth': 1, 'user.hour.test_auth.total': 1 } POLICY_GROUP['update'] = dict(POLICY_GROUP['general'], **{'user.hour.test_auth.update': 1}) POLICY_GROUP['reply'] = dict(POLICY_GROUP['general'], **{'user.hour.test_auth.reply': 1}) POLICY_GROUP['follow'] = dict( POLICY_GROUP['general'], **{ 'user.hour.test_auth.follow': 1, 'user.day.test_auth.follow': 1 }) _log = lambda x: logger.debug('ret: %s', x) def __init__(self): super(WeiboAutomator, self).__init__() self.sp = SNSPocket() self.sp.load_config() self.sp.auth() # assign 'channel_name' as automator self.weibo = self.sp['automator'] self.rlq = RateLimitQueue() map(lambda t: self.rlq.add_bucket(t[0], t[1]), self.SINA_BUCKETS) # This implementation of dumps and loads are too simple. # They do not work with some callback functions. # I switch to 'dill' #def dumps(self): # r = copy.deepcopy(self.rlq) # for t in r._tasks: # # First arg should be 'self' if do not operate our RLQ directly. # t.args = list(t.args) # t.args.pop(0) # t.func = marshal.dumps(t.func.func_code) # t.callback = marshal.dumps(t.callback.func_code) # return pickle.dumps(r) #def loads(self, s): # r = pickle.loads(s) # for t in r._tasks: # t.args.insert(0, self) # t.args = tuple(t.args) # code_func = marshal.loads(t.func) # t.func = types.FunctionType(code_func, globals()) # code_callback = marshal.loads(t.callback) # t.callback = types.FunctionType(code_callback, globals()) # self.rlq = r #Original 'loads' 2 # #self.rlq._buckets = r._buckets # #for t in r._tasks: # # code = marshal.loads(t.callback) # # t.callback = types.FunctionType(code, globals()) # # t.args.insert(0, self) # # t.args = tuple(t.args) # # t.kwargs['callback'] = t.callback # # f = getattr(WeiboAutomator, t.func) # # # Execute the wrapped class method again to insert task # # f(*t.args, **t.kwargs) def run(self): return self.rlq.run() def clear_tasks(self): return self.rlq.clear_tasks() def _tounicode(self, text): if isinstance(text, unicode): return text else: return text.decode('utf-8') def get_uid(self): if hasattr(self, '_uid'): return self._uid else: ret = self.weibo.weibo_request('account/get_uid', 'GET', {}) return ret['uid'] uid = property(get_uid) def rate_limit_status(self): ret = self.weibo.weibo_request('account/rate_limit_status', 'GET', {}) return ret @rate_limit(buckets=POLICY_GROUP['follow'], callback=_log) def follow(self, uid): ret = self.weibo.weibo_request('friendships/create', 'POST', {'uid': uid}) return ret @rate_limit(buckets=POLICY_GROUP['follow'], callback=_log) def follow_by_name(self, screen_name): ret = self.weibo.weibo_request('friendships/create', 'POST', {'screen_name': screen_name}) return ret @rate_limit(buckets=POLICY_GROUP['general'], callback=_log, priority=_wauto_conf['priority']['home_timeline']) def home_timeline(self, count=20): return self.weibo.home_timeline(count) @rate_limit(buckets=POLICY_GROUP['update'], callback=_log, priority=_wauto_conf['priority']['update']) def update(self, text): return self.weibo.update(self._tounicode(text)) @rate_limit(buckets=POLICY_GROUP['reply'], callback=_log, priority=_wauto_conf['priority']['reply']) def reply(self, status, text): if isinstance(status, snsapi.snstype.Message): statusID = status.ID else: statusID = status return self.weibo.reply(statusID, self._tounicode(text)) @rate_limit(buckets=POLICY_GROUP['general'], callback=_log, priority=_wauto_conf['priority']['forward']) def forward(self, status, text): return self.weibo.forward(status, self._tounicode(text)) @rate_limit(buckets=POLICY_GROUP['general'], callback=_log) def show(self, uid=None, screen_name=None): params = {} if not uid is None: params['uid'] = uid elif not screen_name is None: params['screen_name'] = screen_name else: params['uid'] = self.uid ret = self.weibo.weibo_request('users/show', 'GET', params) return ret @rate_limit(buckets=POLICY_GROUP['general'], callback=_log) def domain_show(self, url): '''Lookup user by personal url. We will match and remove common weibo prefix. :param url: e.g. 'http://weibo.com/xiena' --> url='xiena' ''' import re pattern = re.compile('^http:\/\/.*weibo.com\/') url = re.sub(pattern, '', url) ret = self.weibo.weibo_request('users/domain_show', 'GET', {'domain': url}) return ret @rate_limit(buckets=POLICY_GROUP['general'], callback=_log) def get_friends(self, uid=None, screen_name=None, count=200, cursor=None): params = {'count': count} if not uid is None: params['uid'] = uid elif not screen_name is None: params['screen_name'] = screen_name else: params['uid'] = self.uid if not cursor is None: params['cursor'] = cursor ret = self.weibo.weibo_request('friendships/friends', 'GET', params) return ret @rate_limit(buckets=POLICY_GROUP['general'], callback=_log) def get_friends_ids(self, uid=None, screen_name=None, count=5000, cursor=None): params = {'count': count} if not uid is None: params['uid'] = uid elif not screen_name is None: params['screen_name'] = screen_name else: params['uid'] = self.uid if not cursor is None: params['cursor'] = cursor ret = self.weibo.weibo_request('friendships/friends/ids', 'GET', params) return ret @rate_limit(buckets=POLICY_GROUP['general'], callback=_log) def get_followers(self, uid=None, screen_name=None, count=200, cursor=None): params = {'count': count} if not uid is None: params['uid'] = uid elif not screen_name is None: params['screen_name'] = screen_name else: params['uid'] = self.uid if not cursor is None: params['cursor'] = cursor ret = self.weibo.weibo_request('friendships/followers', 'GET', params) return ret @rate_limit(buckets=POLICY_GROUP['general'], callback=_log) def get_followers_ids(self, uid=None, screen_name=None, count=5000, cursor=None): params = {'count': count} if not uid is None: params['uid'] = uid elif not screen_name is None: params['screen_name'] = screen_name else: params['uid'] = self.uid if not cursor is None: params['cursor'] = cursor ret = self.weibo.weibo_request('friendships/followers/ids', 'GET', params) return ret @rate_limit(buckets=POLICY_GROUP['general'], callback=_log) def get_followers_active(self, uid=None, screen_name=None, count=200, cursor=None): params = {'count': count} if not uid is None: params['uid'] = uid elif not screen_name is None: params['screen_name'] = screen_name else: params['uid'] = self.uid if not cursor is None: params['cursor'] = cursor ret = self.weibo.weibo_request('friendships/followers/active', 'GET', params) return ret # This Api is only for advanced app permission @rate_limit(buckets=POLICY_GROUP['general'], callback=_log) def search_topics(self, q, count=50, page=None): params = {'count': count} if not page is None: params['page'] = page ret = self.weibo.weibo_request('search/topics', 'GET', params) return ret