def get_new_key_list(rkey, keylist, get_count=20, dryrun=False): json_arr = redis_pool.getV(rkey) rkeys_list = [] messages_list = [] new_keys_list = [] if (json_arr): print("Posts Redis Cache exists for [%s]" % rkey) json_arr = json_arr.decode() rkeys_list = json.loads(json_arr) print("Loaded Rkeys List %s" % rkeys_list) for key in keylist[:get_count]: if (key in rkeys_list): print("Key ID [%s] is OLD! Skip sending...." % (key)) else: print("Post ID [%s] is NEW! Add to new key list...." % (key)) new_keys_list.append(key) print("BEFORE Post List %s" % rkeys_list) rkeys_list = new_keys_list + rkeys_list print("AFTER Posts List %s" % rkeys_list) print("AFTER Posts List (Truncated) %s" % rkeys_list[:get_count]) new_json_arr = json.dumps(rkeys_list[:get_count]) if (not dryrun): redis_pool.setV(rkey, new_json_arr) return new_keys_list
def get_options_code_list(): json_arr = redis_pool.getV(KEY) code_list = [] #print(json_arr) if (json_arr): json_arr = json_arr.decode() code_list = json.loads(str(json_arr)) print("Loaded List: %s" % code_list)
def is_option_code(code): json_arr = redis_pool.getV(KEY) code_list = [] code = code.zfill(5) if (json_arr): json_arr = json_arr.decode() if (code in json_arr): return True else: return False
def push_posts_list(plist, tg_group, excerpt=False): rkey = "HOMEBLOGGER:BLOGS" print(rkey) json_arr = redis_pool.getV(rkey) posts_list = [] messages_list = [] new_posts_list = [] if (json_arr): print("Posts Redis Cache exists for [%s]" % rkey) json_arr = json_arr.decode() posts_list = json.loads(json_arr) print("Loaded Posts List %s" % posts_list) get_count = GET_POSTS_COUNT else: get_count = NEW_POSTS_COUNT for post in plist[:get_count]: purl = post[0] ptitle = "%s" % (post[1]) pid = purl.split("=")[-1] #print("pid %s" % pid) if (pid in posts_list): print("Post ID [%s] is OLD! Skip sending...." % (pid)) else: print("Post ID [%s] is NEW! Prepare for sending...." % (pid)) new_posts_list.append(pid) message = ptitle + DEL message = message + purl #print(message) messages_list.append(message) print("BEFORE Post List %s" % posts_list) posts_list = new_posts_list + posts_list print("AFTER Posts List %s" % posts_list) print("AFTER Posts List (Limited) %s" % posts_list[:NEW_POSTS_COUNT]) new_json_arr = json.dumps(posts_list[:NEW_POSTS_COUNT]) redis_pool.setV(rkey, new_json_arr) send_count = 1 for msg in messages_list: if (send_count == 1): msg = u'\U0001F4F0' + " <b>HomeBlogger Posts Updates</b>" + DEL + msg print("Msg sent: [%s]" % msg) bot_sender.broadcast_list(msg, tg_group) send_count = send_count + 1
def push_rss_news_alerts_with_redis(url): print("Url: [" + url + "]") full_message = "" messages_list = [] new_posts_list = [] posts = feedparser.parse(url) url_hash = int(hashlib.md5(url.encode()).hexdigest(), 16) ftitle = posts['feed']['title'] rkey = "NEWS:HK01" + str(url_hash) json_arr = redis_pool.getV(rkey) posts_list = [] if (json_arr): print("Posts Redis Cache exists for [%s]" % url) json_arr = json_arr.decode() posts_list = json.loads(json_arr) print("Loaded Posts List %s" % posts_list) get_count = GET_POSTS_COUNT else: get_count = NEW_POSTS_COUNT for post in posts.entries[:get_count]: if 'published_parsed' in post.keys(): stime = str(time.mktime(post.published_parsed)) else: stime = str(time.mktime(post.updated_parsed)) stitle = post.title stime = str(int(hashlib.md5(stitle.encode()).hexdigest(), 16)) if "ERROR WHILE FETCHING" in stitle: print(stitle) return if (str(stime) in posts_list): print("Post created at %s is OLD! Skip sending...." % (stime)) else: print("Post created at %s is NEW! Prepare for sending...." % (stime)) new_posts_list.append(stime) message = u'\U0001F30F' + " <a href='%s'>%s</a>" % (post.link, stitle) messages_list.append(message) posts_list = new_posts_list + posts_list print("Full Posts List %s" % posts_list[:NEW_POSTS_COUNT]) new_json_arr = json.dumps(posts_list[:NEW_POSTS_COUNT]) redis_pool.setV(rkey, new_json_arr) return messages_list
def push_posts_list(group, pdict, tg_group, excerpt=False): rkey = "FB:" + group json_arr = redis_pool.getV(rkey) posts_list = [] messages_list = [] new_posts_list = [] if (json_arr): print("Posts Redis Cache exists for [%s]" % rkey) json_arr = json_arr.decode() posts_list = json.loads(json_arr) print("Loaded Posts List %s" % posts_list) get_count = GET_POSTS_COUNT else: get_count = NEW_POSTS_COUNT for key in pdict: #'https://m.facebook.com/story.php?story_fbid=4210050162362435&id=223783954322429' pid = key.split('=')[-2].split('&')[0] if (pid in posts_list): print("Post ID [%s] is OLD! Skip sending...." % (pid)) else: print("Post ID [%s] is NEW! Prepare for sending...." % (pid)) new_posts_list.append(pid) url = key if (excerpt): message = pdict[key] + DEL message = message + url else: message = url messages_list.append(message) #print("BEFORE Post List %s" % posts_list) posts_list = new_posts_list + posts_list #print("AFTER Posts List %s" % posts_list) #print("AFTER Posts List (Limited) %s" % posts_list[:NEW_POSTS_COUNT]) new_json_arr = json.dumps(posts_list[:NEW_POSTS_COUNT]) redis_pool.setV(rkey, new_json_arr) send_count = 1 for msg in messages_list: if (send_count == 1): msg = u'\U0001F4F0' + " <b>Latest Posts Updates</b>" + DEL + msg print("Msg sent: [%s]" % msg) bot_sender.broadcast_list(msg, tg_group) send_count = send_count + 1
def push_posts_list(group, plist, tg_group, excerpt=False): rkey = "FB:" + group json_arr = redis_pool.getV(rkey) posts_list = [] messages_list = [] new_posts_list = [] if (json_arr): print("Posts Redis Cache exists for [%s]" % rkey) json_arr = json_arr.decode() posts_list = json.loads(json_arr) print("Loaded Posts List %s" % posts_list) get_count = GET_POSTS_COUNT else: get_count = NEW_POSTS_COUNT for pid in plist[:get_count]: if (pid in posts_list): print("Post ID [%s] is OLD! Skip sending...." % (pid)) else: print("Post ID [%s] is NEW! Prepare for sending...." % (pid)) new_posts_list.append(pid) url = "https://www.facebook.com/%s/posts/%s" % (group, pid) if (excerpt): message = get_post_content(url) + DEL message = message + url else: message = url messages_list.append(message) #print("BEFORE Post List %s" % posts_list) posts_list = new_posts_list + posts_list #print("AFTER Posts List %s" % posts_list) #print("AFTER Posts List (Limited) %s" % posts_list[:NEW_POSTS_COUNT]) new_json_arr = json.dumps(posts_list[:NEW_POSTS_COUNT]) redis_pool.setV(rkey, new_json_arr) send_count = 1 for msg in messages_list: if (send_count == 1): msg = u'\U0001F4F0' + " <b>Latest Posts Updates</b>" + DEL + msg print("Msg sent: [%s]" % msg) bot_sender.broadcast_list(msg, tg_group) send_count = send_count + 1
def push_posts_list(group, btitle, plist, tlist, tg_group): rkey = "WB:" + group json_arr = redis_pool.getV(rkey) posts_list = [] messages_list = [] new_posts_list = [] if (json_arr): print("Posts Redis Cache exists for [%s]" % rkey) json_arr = json_arr.decode() posts_list = json.loads(json_arr) print("Loaded Posts List %s" % posts_list) get_count = GET_POSTS_COUNT else: get_count = NEW_POSTS_COUNT #get_count = 1 for idx, pid in enumerate(plist[:get_count]): if (pid in posts_list): print("Post ID [%s] is OLD! Skip sending...." % (pid)) else: print("Post ID [%s] is NEW! Prepare for sending...." % (pid)) new_posts_list.append(pid) url = "https://m.weibo.cn/status/%s" % (pid) message = tlist[idx] + DEL message = message + url messages_list.append(message) #print("BEFORE Post List %s" % posts_list) posts_list = new_posts_list + posts_list #print("AFTER Posts List %s" % posts_list) #print("AFTER Posts List (Limited) %s" % posts_list[:NEW_POSTS_COUNT]) new_json_arr = json.dumps(posts_list[:NEW_POSTS_COUNT]) redis_pool.setV(rkey, new_json_arr) send_count = 1 for msg in messages_list: if (send_count == 1): msg = u'\U0001F4F0' + (" <b>Latest Weibo for</b> %s" % btitle) + DEL + msg print("Msg sent: [%s]" % msg) bot_sender.broadcast_list(msg, tg_group) send_count = send_count + 1
def push_posts_list(group, plist, tg_group): rkey = "ZH:" + group json_arr = redis_pool.getV(rkey) posts_list = [] messages_list = [] new_posts_list = [] if (json_arr): print("Posts Redis Cache exists for [%s]" % rkey) json_arr = json_arr.decode() posts_list = json.loads(json_arr) print("Loaded Posts List %s" % posts_list) get_count = GET_POSTS_COUNT else: get_count = NEW_POSTS_COUNT for post in plist[:get_count]: pid = post['url'].split("/")[-1] if (pid in posts_list): print("Post ID [%s] is OLD! Skip sending...." % (pid)) else: print("Post ID [%s] is NEW! Prepare for sending...." % (pid)) new_posts_list.append(pid) message = "%s\n%s" % (post['title'], post['url']) messages_list.append(message) print("BEFORE Post List %s" % posts_list) posts_list = new_posts_list + posts_list print("AFTER Posts List %s" % posts_list) print("AFTER Posts List (Limited) %s" % posts_list[:NEW_POSTS_COUNT]) new_json_arr = json.dumps(posts_list[:NEW_POSTS_COUNT]) redis_pool.setV(rkey, new_json_arr) send_count = 1 for msg in messages_list: if (send_count == 1): msg = u'\U0001F4F0' + " <b>Latest Zhihu Updates</b>" + DEL + msg print("Msg sent: [%s]" % msg) bot_sender.broadcast_list(msg, tg_group) send_count = send_count + 1
def send_news(cat): passages = news(cat) #print(passages) #return rkey = "NEWS:NOWTV:%s" % cat json_arr = redis_pool.getV(rkey) posts_list = [] messages_list = [] new_posts_list = [] if (json_arr): print("Posts Redis Cache exists for [%s]" % rkey) json_arr = json_arr.decode() posts_list = json.loads(json_arr) print("Loaded Posts List %s" % posts_list) get_count = GET_POSTS_COUNT else: get_count = NEW_POSTS_COUNT for passage in passages[:get_count]: pid = passage[0] purl = passage[1] if (pid in posts_list): print("Post ID [%s] is OLD! Skip sending...." % (pid)) else: print("Post ID [%s] is NEW! Prepare for sending...." % (pid)) [subject, content, dtime] = news_detail(purl) #print(subject) #print(content) hashtag = "#%s" % cat ptext = " <a href='%s'>%s</a>" % (purl, subject) ptext = ptext + DEL + hashtag + " " + content.replace(EL, DEL) + DEL + dtime new_posts_list.append(pid) #bot_sender.broadcast_list(ptext, url_preview=False) bot_sender.broadcast_list(ptext, "telegram-channel", url_preview=False) #print("BEFORE Post List %s" % posts_list) posts_list = new_posts_list + posts_list #print("AFTER Posts List %s" % posts_list) #print("AFTER Posts List (Limited) %s" % posts_list[:NEW_POSTS_COUNT]) new_json_arr = json.dumps(posts_list[:NEW_POSTS_COUNT]) redis_pool.setV(rkey, new_json_arr)
def main(args): passages = news() #print(passages) #return rkey = "NEWS:MGELONGHUI" #rkey = "NEWS:MGELONGHUI:TEST" json_arr = redis_pool.getV(rkey) posts_list = [] messages_list = [] new_posts_list = [] if (json_arr): print("Posts Redis Cache exists for [%s]" % rkey) json_arr = json_arr.decode() posts_list = json.loads(json_arr) print("Loaded Posts List %s" % posts_list) get_count = GET_POSTS_COUNT else: get_count = NEW_POSTS_COUNT for passage in passages[:get_count]: pid = passage[0] purl = passage[1] if (pid in posts_list): print("Post ID [%s] is OLD! Skip sending...." % (pid)) else: print("Post ID [%s] is NEW! Prepare for sending...." % (pid)) subject = news_subject(purl) ptext = u'\U0001F3E0' + " <a href='%s'>%s</a>" % (purl, subject) new_posts_list.append(pid) #bot_sender.broadcast_list(ptext, url_preview=False) bot_sender.broadcast_list(ptext, "telegram-zerohedge", url_preview=False) #print("BEFORE Post List %s" % posts_list) posts_list = new_posts_list + posts_list #print("AFTER Posts List %s" % posts_list) #print("AFTER Posts List (Limited) %s" % posts_list[:NEW_POSTS_COUNT]) new_json_arr = json.dumps(posts_list[:NEW_POSTS_COUNT]) redis_pool.setV(rkey, new_json_arr)
def main(args): passages = news() #print(passages) rkey = "NEWS:GELONGHUI" json_arr = redis_pool.getV(rkey) posts_list = [] messages_list = [] new_posts_list = [] if (json_arr): print("Posts Redis Cache exists for [%s]" % rkey) json_arr = json_arr.decode() posts_list = json.loads(json_arr) print("Loaded Posts List %s" % posts_list) get_count = GET_POSTS_COUNT else: get_count = NEW_POSTS_COUNT for passage in passages[:get_count]: pid = passage[0] ptext = passage[1] if (pid in posts_list): print("Post ID [%s] is OLD! Skip sending...." % (pid)) else: print("Post ID [%s] is NEW! Prepare for sending...." % (pid)) print(ptext) new_posts_list.append(pid) #bot_sender.broadcast_list(ptext) bot_sender.broadcast_list(ptext, "telegram-zerohedge") #print("BEFORE Post List %s" % posts_list) posts_list = new_posts_list + posts_list #print("AFTER Posts List %s" % posts_list) #print("AFTER Posts List (Limited) %s" % posts_list[:NEW_POSTS_COUNT]) new_json_arr = json.dumps(posts_list[:NEW_POSTS_COUNT]) redis_pool.setV(rkey, new_json_arr)
def main(args): print("Cat:[%s]" % args[1]) cat = args[1] passages = news(cat) print(passages) #return rkey = "NEWS:MONCC%s" % (cat) json_arr = redis_pool.getV(rkey) posts_list = [] messages_list = [] new_posts_list = [] if (json_arr): print("Posts Redis Cache exists for [%s]" % rkey) json_arr = json_arr.decode() posts_list = json.loads(json_arr) print("Loaded Posts List %s" % posts_list) get_count = GET_POSTS_COUNT else: get_count = NEW_POSTS_COUNT for passage in passages[:get_count]: pid = passage[0] ptext = passage[1] if (pid in posts_list): print("Post ID [%s] is OLD! Skip sending...." % (pid)) else: print("Post ID [%s] is NEW! Prepare for sending...." % (pid)) print(ptext) new_posts_list.append(pid) bot_sender.broadcast(ptext, is_test=False, url_preview=False) #print("BEFORE Post List %s" % posts_list) posts_list = new_posts_list + posts_list #print("AFTER Posts List %s" % posts_list) #print("AFTER Posts List (Limited) %s" % posts_list[:NEW_POSTS_COUNT]) new_json_arr = json.dumps(posts_list[:NEW_POSTS_COUNT]) redis_pool.setV(rkey, new_json_arr)
def main(args): #return rkey = "BLOG:SKYCHEUNG" lasthash = redis_pool.getV(rkey) if (lasthash): print("Last Redis Cache exists for [%s]" % rkey) print("Loaded Last Hash: %s" % lasthash) else: lasthash = b"" newhash, content = news() if (newhash == lasthash.decode()): print("Post ID [%s] is OLD! Skip sending...." % (newhash)) else: print("Post ID [%s] is NEW! Prepare for sending...." % (newhash)) #bot_sender.broadcast(content, is_test=False, url_preview=True) bot_sender.broadcast_list(content, "telegram-ptgroup", True) redis_pool.setV(rkey, newhash)
def get_rss_alerts_with_redis(url): print("Url: [" + url + "]") full_message = "" messages_list = [] new_posts_list = [] posts = feedparser.parse(url) #print("posts [%s]" % posts) url_hash = int(hashlib.md5(url.encode()).hexdigest(), 16) ftitle = posts['feed']['title'] if 'title' in posts['feed'] else "" rkey = "RSS:" + str(url_hash) json_arr = redis_pool.getV(rkey) posts_list = [] if (json_arr): print("Posts Redis Cache exists for [%s] [%s]" % (url, rkey)) json_arr = json_arr.decode() posts_list = json.loads(json_arr) print("Loaded Posts List %s" % posts_list) get_count = GET_POSTS_COUNT else: get_count = NEW_POSTS_COUNT for post in posts.entries[:get_count]: #print(post.keys()) if 'published_parsed' in post.keys(): stime = str(time.mktime(post.published_parsed)) else: stime = str(time.mktime(post.updated_parsed)) stitle = post.title if "ERROR WHILE FETCHING" in stitle: print(stitle) return if (str(stime) in posts_list): print("Post created at %s is OLD! Skip sending...." % (stime)) else: print("Post created at %s is NEW! Prepare for sending...." % (stime)) new_posts_list.append(stime) #message = post.link message = "<b>" + stitle + "</b>\n" message = message + post.link messages_list.append(message) #print("New Posts List %s" % posts_list) posts_list = new_posts_list + posts_list print("Full Posts List %s" % posts_list[:NEW_POSTS_COUNT]) new_json_arr = json.dumps(posts_list[:NEW_POSTS_COUNT]) redis_pool.setV(rkey, new_json_arr) if messages_list: messages_list.insert( 0, "<pre>\n</pre>" + u'\U0001F4F0' + " <b>Latest Posts Updates</b>") full_message = DEL.join(messages_list) #print("Passage: [" + full_message + "]") return full_message
def get_chart(code): token = redis_pool.getV("STOCKCHARTS:TOKEN").decode("utf-8") furl = "http://stockcharts.com/c-sc/sc?s=%s&p=D&b=5&g=0&i=%s" % ( code.upper(), token) return furl
def push_tweet(name, tcount=1, test=False, group="telegram-twitter"): sname = '@%s' % name rkey = "Twitter:" + name.lower() json_arr = redis_pool.getV(rkey) tweet_list = [] if (json_arr): print("Twitter Redis Cache exists for [%s]" % name) json_arr = json_arr.decode() tweet_list = json.loads(json_arr) print("Loaded Tweet List %s" % tweet_list) get_count = GET_TWEET_COUNT else: get_count = NEW_TWEET_COUNT try: statuses = API.GetUserTimeline(screen_name=sname, include_rts=False, exclude_replies=False, count=get_count) except: print("User Timeline Error: [%s]" % name) return messages_list = [] new_tweet_list = [] for s in reversed(statuses): if (str(s.id) in tweet_list): print("%s created at %s is OLD! Skip sending...." % (s.id, s.created_at)) else: source = (re.sub('<[^<]+?>', '', s.source)).strip() #if source == "IFTTT" and not ("Facebook" in s.full_text): # continue print("%s created at %s is NEW! Prepare for sending...." % (s.id, s.created_at)) new_tweet_list.append(str(s.id)) url = ('https://mobile.twitter.com/i/web/status/%s' % s.id) created = str(s.created_at) text = re.sub(r"\$([A-Za-z]+)", r"/qd\1", s.full_text) analysis = get_sentiment(s.full_text) message = "[%s] %s\n(<a href='%s'>%s</a>)" % ( analysis, text, url, created.split('+')[0] + "GMT") #message = "[%s] %s\n(%s)" % (analysis, text, created.split('+')[0] + "GMT") messages_list.append(message) print("BEFORE Tweet List %s" % tweet_list) tweet_list = list(reversed(new_tweet_list)) + tweet_list print("AFTER Tweet List %s" % tweet_list) print("AFTER Tweet List (LIMIT) %s" % tweet_list[:NEW_TWEET_COUNT]) new_json_arr = json.dumps(tweet_list[:NEW_TWEET_COUNT]) redis_pool.setV(rkey, new_json_arr) if messages_list: surl = "https://mobile.twitter.com/%s" % name smsg = "<pre>\n</pre>" + random.choice(LOADING) + ( "<a href='%s'>@%s</a> is Tweeting..." % (surl, name)) bot_sender.broadcast_list(smsg, group, url_preview=False) #messages_list.insert(0, "<pre>\n</pre>" + random.choice(LOADING) + "<b>@%s is Tweeting...</b>" % name) # zerohedge summary if name in ("zerohedge", "barronsonline", "xhnews"): #full_message = DEL.join(messages_list) #bot_sender.broadcast_list(full_message, group) #return up = False else: up = True #test=True for msg in messages_list: if (test): bot_sender.broadcast_list(msg) else: bot_sender.broadcast_list(msg, group, url_preview=up)