def getChatMessages(phone_number, api_id, api_hash, channel_name, number_of_messages): message = [] time = [] messageId = [] sender = [] replyTo = [] counter = 0 data = {'message': message, 'time': time} loop = asyncio.new_event_loop() asyncio.set_event_loop(loop) client = TelegramClient(phone_number, api_id, api_hash, loop=loop) try: client.connect() client = checkUserAuthorization(client, phone_number) chats = client.get_messages(channel_name, number_of_messages) if len(chats): for chat in chats: if checkNumberOfDays(chat.date) == -int(NUM_OF_DAY): messageId.append(chat.id) message.append(chat.message) sender.append(chat.from_id) replyTo.append(chat.reply_to_msg_id) time.append(chat.date) counter += 1 log.info(f'{NUM_OF_DAY} days => total {counter} chat messages') except Exception as err: log.exception(err) finally: client.disconnect() return data
def sendEmailNotification(df): # https://www.google.com/settings/security/lesssecureapps user = decode(dict(configReader(GMAIL))['user']) password = decode(dict(configReader(GMAIL))['password']) sender = decode(dict(configReader(GMAIL))['user']) recipients = decode(dict(configReader(GMAIL))['user']) smtpServer = dict(configReader(GMAIL))['smtpserver'] port = int(dict(configReader(GMAIL))['port']) msg = MIMEMultipart() msg['Subject'] = "Trading-Report-" + datetime.datetime.now().strftime("%d %B %I:%M") msg['From'] = sender msg['To'] = recipients body_content = build_table(df, 'grey_dark') msg.attach(MIMEText(body_content, "html")) msg_body = msg.as_string() context = ssl.create_default_context() try: server = smtplib.SMTP(smtpServer, port) server.set_debuglevel(0) server.ehlo() server.starttls(context=context) server.ehlo() server.login(user, password) server.sendmail(msg['From'], msg['To'], msg_body) server.quit() log.info("Mail Sent...") except smtplib.SMTPException as err: log.exception(err)
def executeSelectQuery(sql): log.info(f'Query execution => {sql} => IN PROGRESS') con = getMySQLEngine() df = pd.read_sql(sql, con=con) log.info(f'Query execution => {sql} => COMPLETED') con.dispose() return df
def moveFiles(src, tgt, pattern): if os.path.isdir(src) and os.path.isdir(tgt): fileList = glob.glob(os.path.join(src, pattern)) if len(fileList) > 0: for file in fileList: os.rename(file, tgt + os.path.basename(file)) log.info('Files moved to target') else: log.exception("Path doesn't exits")
def executeUpdateQuery(sql): conn = None try: log.info(f'Query execution => {sql} => IN PROGRESS') conn = getMySQLEngine().connect() conn.execute(sql) log.info(f'Query execution => {sql} => COMPLETED') except conn.Exception as err: log.exception(err)
def getSoupObject(url): soup = None try: page = urllib.request.urlopen(url, timeout=10) pageText = page.read().decode('utf-8') soup = bs4.BeautifulSoup(pageText, "html.parser") except urllib.error.HTTPError as http_error: log.error(f'HTTPError: {http_error.code} for {url}') except urllib.error.URLError as url_error: log.error(f'URLError: {url_error.reason} for {url}') else: log.info(f'{url} is up') return soup
def getMarketsIndianIndices(): soup = getSoupObject(url['indianIndicesURL']) webpageTime = str(soup.find_all("div", table['webpageTime']).pop(1)) webpageTime = webpageTime[webpageTime.find(">") + 1:webpageTime.find("</")] webpageTime = datetime.strptime(webpageTime, "%d %b %I:%M") log.info(f'indian-indices Time - {webpageTime}') dom = etree.HTML(str(soup)) liveMarket = dom.xpath(xpath['liveMarket'])[0].text base = dom.xpath(xpath['base'])[0].text nse = dom.xpath(xpath['nse'])[0].text bseTable = soup.find_all("div", "clearfix tab-pane fade") nseTable = soup.find_all("div", "clearfix tab-pane fade in active") if liveMarket == 'Live Markets' and base == 'Bse' and nse == 'Nse': getBseNSeData(bseTable, base) getBseNSeData(nseTable, nse)
def getBseNSeData(tableName, exchangeName): tableRows = tableName[0].find_all("tr") tableRows = tableRows[1:] for row in tableRows: columns = row.find_all("td") name.append(columns[0].text) currentValue.append(columns[1].text) change.append(columns[2].text) percentageChg.append(columns[3].text) opEn.append(columns[4].text) high.append(columns[5].text) low.append(columns[6].text) df = pd.DataFrame(list( zip(name, currentValue, change, percentageChg, opEn, high, low)), columns=bseNSeColumns) df['time'] = pd.to_datetime('now') log.info(f'{exchangeName} => ' + os.linesep + tabulate(df, tablefmt="fancy_grid")) writeDataToDatabase(df, f'{exchangeName}')
def getCommodities(): soup = getSoupObject(url['baseUrl']) dom = etree.HTML(str(soup)) mcx = dom.xpath(xpath['mcx'])[0].text mcxTable = soup.find_all('div', table['mcxTable']) if re.sub(r"[\n\t\s]*", "", mcx) == 'MCX': tableRows = mcxTable[0].find_all("tr") tableRows = tableRows[1:] for row in tableRows: columns = row.find_all("td") commodity.append(re.sub(r"[\n\t]*", "", columns[0].text)) price.append(re.sub(r"[\n\t]*", "", columns[1].text)) change.append(re.sub(r"[\n\t]*", "", columns[3].text)) percentageChg.append(re.sub(r"[\n\t]*", "", columns[4].text)) df = pd.DataFrame(list(zip(commodity, price, change, percentageChg)), columns=commoditiesColumns) df['time'] = pd.to_datetime('now') log.info('commodities => ' + os.linesep + tabulate(df, tablefmt="fancy_grid")) writeDataToDatabase(df, 'commodities')
def writeDataToDatabase(df, table): con = getMySQLEngine() df.to_sql(table, con=con, index=False, if_exists='append') con.dispose() log.info('Data Successfully Inserted to db...')
def readCsvFile(file): df = pd.read_csv(file, encoding='utf-8', sep=',', escapechar='\\') log.info(f'Reading file -> {file}') return df.astype(str).apply(lambda x: x.str.encode('ascii', 'ignore').str.decode('ascii'))
def writeToCsvFile(data, path): df = pd.DataFrame(data) df.to_csv(path, index=False, encoding='utf-8', sep=',', escapechar='\\') log.info(f'File hase been saved to => {path}')
from sqlalchemy import text from src.main.constants.ApplicationConstants import DATA_DIR, RAW_DATA_DIR, PROCESSED_DATA_DIR, ROOT, TELEGRAM, SQL from src.main.properties.ConfigurationFileReader import log from src.main.utils.Utils import configReader, writeToCsvFile, readCsvFile, executeSelectQuery, getDeltaRecords, \ executeUpdateQuery, writeDataToDatabase, sendEmailNotification, moveFiles, getChatMessages # loading config values from src.main.validation.Checks import checkStockLose api_id = int(dict(configReader(TELEGRAM))['api_id']) api_hash = dict(configReader(TELEGRAM))['api_hash'] phone_number = dict(configReader(TELEGRAM))['phone_number'] log.info(f'phone_number - {phone_number}') channel_name = dict(configReader(TELEGRAM))['channel_name'] log.info(f'Channel Name - {channel_name}') number_of_messages = int(dict(configReader(TELEGRAM))['number_of_messages']) log.info(f'Number of messages to be extracted - {number_of_messages}') source = ROOT + DATA_DIR + RAW_DATA_DIR log.info(f'source path - {source}') destination = ROOT + DATA_DIR + PROCESSED_DATA_DIR log.info(f'destination path - {destination}') filename = source + channel_name + '-' + str(datetime.datetime.now()) + '.csv' data = getChatMessages(phone_number, api_id, api_hash, channel_name,