def publish_news(url): passages = push_rss_news_alerts_with_redis(url) print(passages) for passage in passages: print(passage) bot_sender.broadcast(passage, is_test=False, url_preview=False)
def main(args): url = 'https://news.mingpao.com/rss/ins/s00002.xml' passages = push_rss_news_alerts_with_redis(url) print(passages) for passage in passages: print(passage) bot_sender.broadcast(passage, is_test=False, url_preview=False)
def generate_WEEKLY_SIG_MT(num_workers, signalType): conn = sqlite3.connect("/app/hickoryStrats/hickory/db/stock_db.dat") conn.row_factory = sqlite3.Row c = conn.cursor() c.execute("select * from stocks order by code asc") rows = c.fetchall() isInv = False if (signalType == "NORMAL"): sigFun = manageWeeklyStockSignals else: sigFun = manageWeeklyStockSignals data_list = [] # We can use a with statement to ensure threads are cleaned up promptly with concurrent.futures.ThreadPoolExecutor( max_workers=num_workers) as executor: # Start the load operations and mark each future with its URL future_to_manage = { executor.submit(sigFun, row["code"], 4): row for row in rows[:LIMIT] } for future in concurrent.futures.as_completed(future_to_manage): row = future_to_manage[future] try: data = future.result(timeout=15) data_list.append(data) except concurrent.futures.TimeoutError: logging.error("%r took too long to run..." % (row["code"])) except Exception as exc: #print('%r generated an exception: %s' % (row["code"], exc)) logging.error(" Error retrieving code: " + row["code"]) #logging.error(traceback.format_exc()) else: if (data == False): print('%r result is %s' % (row["code"], data)) try: mm = "Weekly All Time High List" passage = "" for d in data_list: rcode = d[0] print(rcode) # send telegram messages isTest = True if (passage): bot_sender.broadcast("Weekly Test", isTest) except Exception: logging.error(traceback.format_exc())
def main(): passage = get_etf_stat(config.get("aastocks", "hy-url-etf")) bot_sender.broadcast(passage) url_list = config.items("aastocks-hy-industry") for key, url in url_list: print("Industry to retrieve: " + key + " => " + url) passage = get_hy_stat(url, key.split("-")[-1]) bot_sender.broadcast(passage)
def main(): # profit warning (+ve) #for report in get_latest_reports(40, 1): # print(report) # bot_sender.broadcast(report) # profit warning (-ve) #for report in get_latest_reports(40, 2): # print(report) # bot_sender.broadcast(report) # result annountcement (aastocks) for passage in result_announcement.get_latest_result_announcement(): print(passage) bot_sender.broadcast(passage, False, False)
def main(args): print("Cat:[%s]" % args[1]) cat = args[1] passages = news(cat) print(passages) #return rkey = "NEWS:MONCC%s" % (cat) json_arr = redis_pool.getV(rkey) posts_list = [] messages_list = [] new_posts_list = [] if (json_arr): print("Posts Redis Cache exists for [%s]" % rkey) json_arr = json_arr.decode() posts_list = json.loads(json_arr) print("Loaded Posts List %s" % posts_list) get_count = GET_POSTS_COUNT else: get_count = NEW_POSTS_COUNT for passage in passages[:get_count]: pid = passage[0] ptext = passage[1] if (pid in posts_list): print("Post ID [%s] is OLD! Skip sending...." % (pid)) else: print("Post ID [%s] is NEW! Prepare for sending...." % (pid)) print(ptext) new_posts_list.append(pid) bot_sender.broadcast(ptext, is_test=False, url_preview=False) #print("BEFORE Post List %s" % posts_list) posts_list = new_posts_list + posts_list #print("AFTER Posts List %s" % posts_list) #print("AFTER Posts List (Limited) %s" % posts_list[:NEW_POSTS_COUNT]) new_json_arr = json.dumps(posts_list[:NEW_POSTS_COUNT]) redis_pool.setV(rkey, new_json_arr)
def main(): passage = get_week_high() print(passage) bot_sender.broadcast(passage, False)
def main(): # sync top 100 list passage = get_aastocks_calendar() # Send a message to a chat room (chat room ID retrieved from getUpdates) bot_sender.broadcast(passage)
def main(): for report in get_latest_reports(40): bot_sender.broadcast(report)
def main(): for passage in get_latest_result_announcement(): print(passage) bot_sender.broadcast(passage, True, url_preview=False)
def generate_SIG_MT(num_workers, signalType): conn = sqlite3.connect("/app/hickoryStrats/hickory/db/stock_db.dat") conn.row_factory = sqlite3.Row c = conn.cursor() c.execute("select * from stocks order by code asc") rows = c.fetchall() isInv = False if (signalType == "NORMAL"): sigFun = manageStockSignals elif (signalType == "INVERSE"): sigFun = manageInverseStockSignals isInv = True else: sigFun = manageStockSignals data_list = [] # We can use a with statement to ensure threads are cleaned up promptly with concurrent.futures.ThreadPoolExecutor( max_workers=num_workers) as executor: # Start the load operations and mark each future with its URL future_to_manage = { executor.submit(sigFun, row["code"], 4): row for row in rows[:LIMIT] } for future in concurrent.futures.as_completed(future_to_manage): row = future_to_manage[future] try: data = future.result(timeout=15) data_list.append(data) except concurrent.futures.TimeoutError: logging.error("%r took too long to run..." % (row["code"])) except Exception as exc: #print('%r generated an exception: %s' % (row["code"], exc)) logging.error(" Error retrieving code: " + row["code"]) #logging.error(traceback.format_exc()) else: if (data == False): print('%r result is %s' % (row["code"], data)) try: if (isInv): mm = u'\U0001F422' + u'\U0001F422' + u'\U00002198' + u'\U00002197' + " List" else: mm = u'\U0001F422' + u'\U0001F422' + u'\U0001F199' + " List" t1 = mm + '\n\n' t2 = mm + " with " + u'\U00000032' + " " + u'\U0000303D' + '\n\n' t3 = mm + " with " + u'\U00000033' + " " + u'\U0000303D' + '\n\n' p1 = p2 = p3 = "" list_data = [] datalist = {} list_datalist = [] masterdata = {} for d in data_list: rcode = d[0] # for generate watcher js stock = {} stock["code"] = rcode stock["label"] = stock_db.get_stock_name(rcode) list_data.append(stock) # for generate telegram messages if (d[1] == True): alt = "" if (stock_quote.is_52weekhigh(rcode)): alt = u'\U0001F525' p1 = p1 + "/qd" + rcode + " - " + alt + stock_db.get_stock_name( rcode) + " (" + stock_db.get_stock_industry( rcode) + ")" + '\n' if (len(d) > 3 and d[2] == True): p2 = p2 + "/qd" + rcode + " - " + stock_db.get_stock_name( rcode) + " (" + stock_db.get_stock_industry( rcode) + ")" + '\n' if (len(d) > 3 and d[3] == True): p3 = p3 + "/qd" + rcode + " - " + stock_db.get_stock_name( rcode) + " (" + stock_db.get_stock_industry( rcode) + ")" + '\n' # generate watcher js if (list_data): datalist["code"] = "DAILY_BreakOutList_" + datetime.today( ).strftime('%y%m%d') datalist["list"] = list_data datalist["label"] = datalist["code"] + "_" + str( len(list_data)) list_datalist.append(datalist) masterdata["list"] = list_datalist json_data = json.dumps(masterdata) print(json_data) filesub = "web/js/" filedir = EXPORT_REPO + filesub filepath = "dailyWatcherBreakoutListData.js" with open(filedir + filepath, 'w') as the_file: the_file.write("//" + str(datetime.today()) + EL) the_file.write("var " + filepath.replace(".js", "") + " =" + json_data + ";") print("File extracted as " + filedir + filepath) git_util.commitAll(EXPORT_REPO) git_util.push_remote(EXPORT_REPO) # send telegram messages isTest = False if (p1): bot_sender.broadcast(str(t1 + p1), isTest) if (p2): bot_sender.broadcast(str(t2 + p2), isTest) if (p3): bot_sender.broadcast(str(t3 + p3), isTest) except Exception: logging.error(traceback.format_exc())