def __init__(self): self.db = database.Database() smsgwglobals.wislogger.debug("SCHEDULER: starting") self.scheduler = BackgroundScheduler() self.scheduler.start() smsgwglobals.wislogger.debug( "SCHEDULER: REPROCESS_SMS job starting. Interval: " + str(wisglobals.resendinterval) + " minutes") self.scheduler.add_job(self.reprocess_sms, 'interval', minutes=wisglobals.resendinterval) smsgwglobals.wislogger.debug( "SCHEDULER: REPROCESS_ORPHANED_SMS job starting. Interval: 30 seconds" ) self.scheduler.add_job(self.reprocess_orphaned_sms, 'interval', seconds=30) smsgwglobals.wislogger.debug( "SCHEDULER: DELETE_OLD_SMS job starting. Interval: 7 days") self.scheduler.add_job(self.delete_old_sms, 'interval', days=7) smsgwglobals.wislogger.debug( "SCHEDULER: TRIGGER_WATCHDOGS job starting. Interval: 15 seconds") self.scheduler.add_job(self.trigger_watchdogs, 'interval', seconds=15)
async def send(self, posts, bot): db = database.Database() # go through new submissions for post in posts: status = db.upsertGameDeal(post) # 1 = updated, 2 = created, -1 = error in update/inserting channels = guild.getChannels('gamedeals') for channel in channels: # the deal already exists if status == common.STATUS.SUCCESS.UPDATED: # price check for steam games if 'steampowered.com' in post['url']: try: existingDeal = db.getGameDeal(post) new_price = self.ssf.getPrice(url=post['url']) new_price = new_price[ 'final'] if new_price else 9223372036854775806 if 'price' in existingDeal: old_price = existingDeal['price'] # if new price is less than older price post the deal if int(new_price) < int(old_price): await self.steam.post(bot, channel, post) # can't compare price, so leave the deal except InvalidArgument as e: if common.getEnvironment( ) == 'prod' or common.getEnvironment() == 'dev': await bot.get_channel( common.getMasterLog() ).send( f"error getting price for {post['url']} of reddit id {post['id']}. Arguments passed {e.error}, error type {e.type}." ) pass # else: # await self.steam.post(bot, channel, post) # the deal is a new one elif status == common.STATUS.SUCCESS.INSERTED: # special handler for steam if 'steampowered.com' in post['url']: await self.steam.post(bot, channel, post) else: await bot.get_channel(channel['channel_id'] ).send(post['url']) # if logging is enabled post log if 'logging' in channel: await bot.get_channel(channel['logging']).send( f"sent {post['title']} in {channel['channel_name']}" ) # there has been error updating or inserting deal else: # log it in master log bot.get_channel(self.masterLogger).send( f"**DB Error**: Failed Updating/Inserting {post['id']}." )
async def cleaner(bot): db = database.Database() masterlog = common.getMasterLog() masterlog = bot.get_channel(masterlog) await masterlog.send(f"**Routine**: Purge gamedeals started.") if db.cleanGameDeal() == common.STATUS.SUCCESS: await masterlog.send(f"**DB Purge**: Purged gamedeals successfully.")
def process(self, sms_id): try: db = database.Database() smsen = db.read_sms(smsid=sms_id) if not smsen: smsgwglobals.wislogger.debug("WATCHDOG: no SMS with ID: " + sms_id + " in DB") # Add sms_id back to the queue self.queue.put(sms_id) except error.DatabaseError as e: smsgwglobals.wislogger.debug(e.message) # Add sms_id back to the queue self.queue.put(sms_id) # we have sms, just process sms = smsen[0] smsgwglobals.wislogger.debug("WATCHDOG: Process SMS: " + str(sms)) # create smstrans object for easy handling smstrans = Smstransfer(**sms) route = wisglobals.rdb.read_routing(smstrans.smsdict["modemid"]) if route is None or len(route) == 0: smsgwglobals.wislogger.debug("WATCHDOG: ALERT ROUTE LOST") # try to reprocess route try: smstrans.updatedb() Helper.processsms(smstrans) except apperror.NoRoutesFoundError: pass else: self.queue.put(smstrans.smsdict["smsid"]) elif route[0]["wisid"] != wisglobals.wisid: self.deligate(smstrans, route) else: # we have a route, this wis is the correct one # therefore give the sms to the PIS # this is a bad hack to ignore obsolete routes # this may lead to an error, fixme route[:] = [d for d in route if d['obsolete'] < 1] smsgwglobals.wislogger.debug("WATCHDOG: process with route %s ", str(route)) smsgwglobals.wislogger.debug("WATCHDOG: Sending to PIS %s", str(sms)) # only continue if route contains data if len(route) > 0: self.dispatch_sms(smstrans, route) else: # Reprocess try: smstrans.updatedb() Helper.processsms(smstrans) except apperror.NoRoutesFoundError: pass else: self.queue.put(smstrans.smsdict["smsid"])
async def cleaner(self, bot): db = database.Database() masterlog = common.getMasterLog() masterlog = bot.get_channel(masterlog) await masterlog.send(f"**Routine**: Purge crackwatch started.") if db.cleanCrackwatch() == common.STATUS.SUCCESS: await masterlog.send(f"**DB Purge**: Purged crackwatch successfully.")
def report(self): smsgwglobals.wislogger.debug("STATS: Logstash reporter running") smsgwglobals.wislogger.debug("STATS: Logstash reporter token: " + self.logstashstatstoken) smsgwglobals.wislogger.debug("STATS: Logstash reporter server " + self.logstashstatsserver) db = database.Database() timestamp = db.read_statstimestamp() smsgwglobals.wislogger.debug("STATS: Logstash last timestamp " + str(timestamp)) smsen = None if len(timestamp) == 0: smsen = db.read_sucsmsstats() else: smsen = db.read_sucsmsstats(timestamp[0]['lasttimestamp']) retval = {} retval['all'] = len(smsen) retval['pro'] = 0 smsgwglobals.wislogger.debug("STATS: Logstash data count to process " + str(len(smsen))) datafieldsrow = {} for sms in smsen: datafieldsrow['smsintime'] = re.sub(' ', 'T', sms['smsintime']) datafieldsrow['statustime'] = re.sub(' ', 'T', sms['statustime']) datafieldsrow['targetnr'] = re.sub('.{3}$', 'XXX', sms['targetnr']) datafieldsrow['appid'] = sms['appid'] datafieldsrow['modemid'] = sms['modemid'] datafieldsrow['imsi'] = sms['imsi'] datafieldsrow['sourceip'] = sms['sourceip'] datafieldsrow['xforwardedfor'] = sms['xforwardedfor'] try: self.send(datafieldsrow=datafieldsrow, timestamp=datafieldsrow['statustime']) db.write_statstimestamp(sms['statustime']) retval['pro'] += 1 except RuntimeError as e: smsgwglobals.wislogger.debug( "STATS: Logstash reporter send exeption " + str(e)) raise break return retval
async def on_ready(): # if env is not dev the load regular cogs if common.getEnvironment() != 'dev': db = database.Database() # update bot start time db.updateBotStartTime() # guild for guilds in bot.guilds: guild.updateGuidInfo(guilds) for module in modules_prod: bot.load_extension(module) # load cogs which are are development else: for module in modules_dev: bot.load_extension(module) await bot.change_presence( status=discord.Status.online, activity=discord.CustomActivity(name='Testing mode >.<'))
def raise_heartbeat(self, routingid): smsgwglobals.wislogger.debug("ROUTERDB: Raising Heartbeat" + " routing entries...") try: now = datetime.utcnow() smsgwglobals.wislogger.debug("ROUTERDB: NEW HEARTBEAT " + str(now)) query = ("UPDATE routing SET " + "changed = ? ," + "obsolete = 0 " + "WHERE routingid = ? " + "AND obsolete < 14") routes = self.read_routing() current_route = [ route for route in routes if route['routingid'] == routingid ] if current_route: db = database.Database() sms_count = db.read_sms_count_by_imsi(current_route[0]["imsi"]) # Looks like we enter new day - reset sms counter if sms_count == 0: self.reset_sms_count(routingid) rdblock.acquire() result = self.cur.execute(query, [now, routingid]) count = result.rowcount self.con.commit() smsgwglobals.wislogger.debug("ROUTERDB: " + str(count) + " routing HEARTBEAT updated!") return count except Exception as e: smsgwglobals.wislogger.critical( "ROUTERDB: " + query + " failed! [EXCEPTION]:%s", e) raise error.DatabaseError("Unable to change obsolete! ", e) finally: rdblock.release()
async def run(self, bot): db = database.Database() masterLogger = common.getMasterLog() # request the page req = requests.get(self.url) # get service from database service = db.getService("csgoupdates") if common.getEnvironment() == 'dev': # post log in logging channel await bot.get_channel(masterLogger).send( f"**Scraped**: CSGO Updates.") # return variable updates = [] # soupy soup = BeautifulSoup(req.content, 'html5lib') # post container: container = soup.find('div', attrs={'id': 'post_container'}) # iterating though each post for post in container.findAll('div', attrs={'class': 'inner_post'}): posts = {} posts['title'] = post.h2.text # beautify date posts['date'] = post.find('p', attrs={ 'class': 'post_date' }).text[:-5] posts['date'] = str(datetime.strptime(posts['date'], "%Y.%m.%d"))[:-9] posts['url'] = post.select("a")[0].get('href') # patchnotes posts['patchnotes'] = [] for p in post.findAll('p'): try: if p.attrs['class']: pass except KeyError: # remove html tags posts['patchnotes'].append( str(p).replace("<br/>", "").replace("<p>", "").replace("</p>", "")) posts['patchnotes'] = "\n\n".join(posts['patchnotes']) posts['patchnotes'] = w3lib.html.remove_tags(posts['patchnotes']) posts['id'] = posts['date'] posts['service_name'] = 'csgoupdates' posts['service_id'] = str(service['_id']) status = db.upsertPatchnotes(posts) if status == common.STATUS.INSERTED: updates.append(posts) elif status == common.STATUS.REDUNDANT: break else: await bot.get_channel(masterLogger).send( f"**Scrape Error - CSGO Updates**: id = {posts['id']}.") # process list in ascending order for update in updates[::-1]: # discord embed description limit if len(update['patchnotes']) >= 2048: update['patchnotes'] = update['patchnotes'][:2040] + "\n..." # send an embed message embed = discord.Embed(title=update["title"], url=update["url"], description=update["patchnotes"]) embed.add_field(name="Date", value=update["date"], inline=True) # send message channels = guild.getChannels("csgoupdates") for channel in channels: await bot.get_channel(channel["channel_id"]).send(embed=embed) # if logging is enabled post log if "logging" in channel: await bot.get_channel(channel["logging"]).send( f"sent {update['title']} in {channel['channel_name']}") # sleep for 1 second await asyncio.sleep(1) # update database data = {} data["name"] = "csgoupdates" if len(updates) > 0: data["lastposted"] = common.getDatetimeIST() data["latest"] = updates[0]["date"] status = db.upsertService(data) if status == common.STATUS.SUCCESS.INSERTED: await bot.get_channel(masterLogger).send( f"**Created Service**: {data['name']}.") elif status == common.STATUS.FAIL.INSERT: await bot.get_channel(masterLogger).send( f"**DB Insert Error - Service**: {data['name']}.") elif status == common.STATUS.FAIL.UPDATE: await bot.get_channel(masterLogger).send( f"**DB Update Error - Service**: {data['name']}.") else: pass
def process(self): smsgwglobals.wislogger.debug("WATCHDOG: processing sms") # check if we have SMS to work on smscount = 0 try: db = database.Database() # cleanup old sms db.delete_old_sms(wisglobals.cleanupseconds) smsen = db.read_sms(status=0) smsen = smsen + db.read_sms(status=1) smscount = len(smsen) if smscount == 0: smsgwglobals.wislogger.debug("WATCHDOG: " + "no SMS to process") return except error.DatabaseError as e: smsgwglobals.wislogger.debug(e.message) # we have sms, just process smsgwglobals.wislogger.debug("WATCHDOG: Count to process: %s", str(smscount)) while smscount > 0: for sms in smsen: smsgwglobals.wislogger.debug("WATCHDOG: Process SMS: " + str(sms)) # create smstrans object for easy handling smstrans = smstransfer.Smstransfer(**sms) # check if we have routes # if we have no routes, set error code and # continue with the next sms routes = wisglobals.rdb.read_routing() if routes is None or len(routes) == 0: smstrans.smsdict["statustime"] = datetime.utcnow() smstrans.smsdict["status"] = 100 smsgwglobals.wislogger.debug( "WATCHDOG: NO routes to process SMS: " + str(smstrans.smsdict)) smstrans.updatedb() continue # check if modemid exists in routing route = wisglobals.rdb.read_routing( smstrans.smsdict["modemid"]) if route is None or len(route) == 0: smsgwglobals.wislogger.debug("WATCHDOG: " + " ALERT ROUTE LOST") # try to reprocess route smstrans.smsdict["status"] = 106 smstrans.updatedb() Helper.processsms(smstrans) elif route[0]["wisid"] != wisglobals.wisid: self.deligate(smstrans, route) else: # we have a route, this wis is the correct one # therefore give the sms to the PIS # this is a bad hack to ignore obsolete routes # this may lead to an error, fixme route[:] = [d for d in route if d['obsolete'] < 13] smsgwglobals.wislogger.debug( "WATCHDOG: process with route %s ", str(route)) smsgwglobals.wislogger.debug("WATCHDOG: Sending to PIS %s", str(sms)) # only continue if route contains data if len(route) > 0: self.send(smstrans, route) smsen = db.read_sms(status=0) smsen = smsen + db.read_sms(status=1) smscount = len(smsen)
def write_routing(self, route, changed=None): """Insert or replace a routing entry Attributes: wisid ... text-1st of primary key modemid ... text-serving modem number-2nd of primary key regex ... text-regex to match numbers for modem sms_count ... int-number of sms delivered sms_limit ... int-number of sms limit to sent via this route account_balance ... float-number of amount of money on SIM card imsi ... text-serving SIM card IMSI imei ... text-serving modem IMEI carrier ... text-serving network carrier name/code lbfactor ... int-factor if different contingets wisurl ... text-url of wis obsolete ... route got flag for deletion modemname ... text-longtext of modem changed ... datetime.utcnow-when changed """ query = ( "INSERT OR REPLACE INTO routing " + "(wisid, modemid, regex, sms_count, sms_limit, account_balance, imsi, imei, carrier, lbfactor, wisurl, " + "pisurl, obsolete, modemname, sim_blocked, routingid, changed) " + "VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) ") # read sms_count if exist db = database.Database() sms_count = db.read_sms_count_by_imsi(route["imsi"]) if changed is None: changed = datetime.utcnow() try: smsgwglobals.wislogger.debug( "ROUTERDB: Write into routing" + " :wisid: " + route["wisid"] + " :modemid: " + route["modemid"] + " :regex: " + route["regex"] + " :sms_count: " + str(sms_count) + " :sms_limit: " + str(route["sms_limit"]) + " :account_balance: " + str(route["account_balance"]) + " :imsi: " + route["imsi"] + " :imei: " + route["imei"] + " :carrier: " + route["carrier"] + " :lbfactor: " + str(route["lbfactor"]) + " :wisurl: " + route["wisurl"] + " :pisurl: " + route["pisurl"] + " :obsolete: " + str(route["obsolete"]) + " :modemname: " + route["modemname"] + " :sim_blocked: " + route["sim_blocked"] + " :routingid: " + route["routingid"] + " :changed: " + str(changed)) rdblock.acquire() self.cur.execute( query, (route["wisid"], route["modemid"], route["regex"], sms_count, route["sms_limit"], route["account_balance"], route["imsi"], route["imei"], route["carrier"], route["lbfactor"], route["wisurl"], route["pisurl"], route["obsolete"], route["modemname"], route["sim_blocked"], route["routingid"], changed)) self.con.commit() smsgwglobals.wislogger.debug("ROUTERDB: INSERT!") except Exception as e: smsgwglobals.wislogger.critical( "ROUTERDB: " + query + " failed! [EXCEPTION]:%s", e) raise error.DatabaseError("Unable to INSERT routing entry! ", e) finally: rdblock.release()
from flask import * from common import database APP = Flask(__name__) SESSIONS = {} @APP.route('/') def home(): template = "/coming-soon/coming-soon.html" return render_template(template) if __name__ == '__main__': database.Database(cfg_from_file=False) APP.run(debug=True)
async def run(self, bot): masterLogger = common.getMasterLog() db = database.Database() # r/CrackWatch subreddit = [] try: subreddit = self.getSubreddit(30) except Exception: await bot.get_channel(masterLogger).send(f"**Error - Reddit**: unable to fetch r/crackwatch") # get latest from database service = db.getService("crackwatch") if 'latest' not in service: service['latest'] = None if common.getEnvironment() == 'dev': # post log in logging channel await bot.get_channel(masterLogger).send(f"**Scraped**: CrackWatch.") # create deque posts = deque() id = None # append data to deque for submission in subreddit: if not id: id = submission.id if submission.id == service['latest']: break post = {} post['id'] = submission.id post['title'] = submission.title post['url'] = submission.url post['selftext'] = submission.selftext post['created'] = common.getTimeFromTimestamp(submission.created) if self.crackHead(submission.link_flair_text): post['flair'] = submission.link_flair_text posts.appendleft(post) # go through new submissions for i in range(len(posts)): if not posts[i]['flair'] and common.getEnvironment() == 'dev': await bot.get_channel(masterLogger).send(f"**Error in CW [no flair]** : check {posts[i]['url']}.") else: if 'release' in posts[i]['flair'].lower(): posts[i]['type'] = 'crack' elif "repack" in posts[i]['flair'].lower(): posts[i]['type'] = 'repack' else: posts[i]['type'] = 'unknown' status = db.upsertCrackwatch(posts[i]) if status == common.STATUS.SUCCESS.INSERTED: # check for release flair if posts[i]['type'] == 'crack': description = posts[i]['selftext'] # discord embed description limit if len(posts[i]['selftext']) >= 2048: description = description[:2040]+"\n..." embed = discord.Embed( title=posts[i]['title'], url=posts[i]['url'], description=description ) links = re.findall(r"(?:http\:|https\:)?\/\/\S*\.(?:png|jpg)[A-Za-z0-9.\/\-\_\?\=\:]*", description) if len(links) > 0: embed.set_image(url=links[0]) elif posts[i]['url'].endswith(".jpg") or posts[i]['url'].endswith(".png"): embed.set_image(url=posts[i]['url']) embed.add_field( name="Time", value=posts[i]['created'], inline=True ) # send message channels = guild.getChannels("crackwatch") for channel in channels: await bot.get_channel(channel["channel_id"]).send(embed=embed) # if logging is enabled post log if "logging" in channel: await bot.get_channel(channel["logging"]).send(f"sent {posts[i]['title']} in {channel['channel_name']}") # sleep for 1 sec await asyncio.sleep(1) # check for repack flair elif posts[i]['type'] == 'repack': embed = discord.Embed( title=posts[i]['title'], url=posts[i]['url'], description=posts[i]['selftext'] ) if posts[i]['url'].endswith(".jpg") or posts[i]['url'].endswith(".png"): embed.set_image(url=posts[i]['url']) embed.add_field( name="Time", value=posts[i]['created'], inline=True ) # send message channels = guild.getChannels("repacknews") for channel in channels: await bot.get_channel(channel["channel_id"]).send(embed=embed) # if logging is enabled post log if "logging" in channel: await bot.get_channel(channel["logging"]).send(f"sent {posts[i]['title']} in {channel['channel_name']}") # sleep for 1 second await asyncio.sleep(1) if status == common.STATUS.FAIL.UPDATE or status == common.STATUS.FAIL.INSERT: await bot.get_channel(masterLogger).send(f"**DB Error - crackwatch**: Failed Updated/Insert for id = {posts[i]['id']}.") # update database data = {} data["name"] = "crackwatch" if len(posts) > 0: data["lastposted"] = common.getDatetimeIST() data["latest"] = id status = db.upsertService(data) if status == common.STATUS.SUCCESS.INSERTED: await bot.get_channel(masterLogger).send(f"**Created Service**: {data['name']}.") elif status == common.STATUS.FAIL.INSERT: await bot.get_channel(masterLogger).send(f"**DB Insert Error - Service**: {data['name']}.") elif status == common.STATUS.FAIL.UPDATE: await bot.get_channel(masterLogger).send(f"**DB Update Error - Service**: {data['name']}.") else: pass data["name"] = "repacknews" status = db.upsertService(data) if status == common.STATUS.SUCCESS.INSERTED: await bot.get_channel(masterLogger).send(f"**Created Service**: {data['name']}.") elif status == common.STATUS.FAIL.INSERT: await bot.get_channel(masterLogger).send(f"**DB Insert Error - Service**: {data['name']}.") elif status == common.STATUS.FAIL.UPDATE: await bot.get_channel(masterLogger).send(f"**DB Update Error - Service**: {data['name']}.") else: pass
def getrouting(self): str_list = [] th = [] tr = [] rows = wisglobals.rdb.read_routing(web=True) if len(rows) == 0: return "No routes available!" if len(rows) > 0: db = database.Database() sim_sms_sent = db.read_sms_count_by_imsi(real_sent=True, all_imsi=True) for row in rows: for cnt in sim_sms_sent: if cnt["imsi"] == row["imsi"]: row["sms_sent"] = cnt["sms_count"] if not row.get("sms_sent"): row["sms_sent"] = 0 # To lazy now to rename field in db. So change it just for frontend output row["sms_scheduled"] = row["sms_count"] del row["sms_count"] del row["modemname"] if len(rows) > 0: # Add real sent sms field od = collections.OrderedDict(sorted(rows[0].items())) for k, v in od.items(): th.append(k) for row in rows: od = collections.OrderedDict(sorted(row.items())) td = [] for k, v in od.items(): td.append(v) tr.append(td) str_list.append('<table id="routingTable" class="tablesorter">\n') str_list.append('<thead>\n') str_list.append('<tr>\n') for h in th: str_list.append('<th>' + h + '</th>\n') str_list.append('</tr>\n') str_list.append('</thead>\n') str_list.append('<tbody>\n') for r in tr: str_list.append('<tr>\n') for d in r: txt = None if "http" in str(d): txt = '<a href="' + str(d) + '" target="_blank">' + str(d) + '</a>' else: txt = str(d) str_list.append('<td>' + txt + '</td>\n') str_list.append('</tr>') str_list.append('</tbody>\n') str_list.append('</table>\n') return ''.join(str_list)
from flask import Flask, render_template, redirect, url_for, request, session from flask_cors import CORS from common import database from werkzeug.utils import secure_filename from modules.animal import Animal import os import uuid import random UPLOAD_FOLDER = './static/uploads' app = Flask(__name__) app.secret_key = 'ufvowevgouwveougvweoivg30808213tifg20v8g0' CORS(app) app.config['UPLOAD_FOLDER'] = UPLOAD_FOLDER db = database.Database() db.initialize() @app.route("/") def main(): results = db.search([None, None]) results = random.sample(results, 2) if 'username' not in session: return render_template('home-multipage.html', results=results) else: print(isAdmin) return render_template('home-multipage.html', user=session['username'], isAdmin=session['isAdmin'], results=results)
async def run(self, bot): masterLogger = common.getMasterLog() db = database.Database() # subreddits to fetch subreddits = ['gamedeals', 'steamdeals', 'freegamefindings'] # final post container of non existing and distinct deals enriched_post = deque() # for each subreddit for subreddit in subreddits: # get the service record service = db.getService(subreddit) if 'latest' not in service: service['latest'] = None # get the latest submissions posts = [] try: posts = self.getSubreddit(subreddit, 30) except Exception: await bot.get_channel(masterLogger).send( f"**Error** : unable to fetch r/{subreddit}") # id container id = None if common.getEnvironment() == 'dev': # post log in masterlogger await bot.get_channel(masterLogger).send( f"scraped {subreddit}.") # iterate through posts for post in posts: # this is done for getting the first id if not id: id = post.id # if there are no new post, break if post.id == service['latest']: break if isFromAcceptableStore(post): deal = {} deal['title'] = post.title deal['id'] = post.id if "reddit.com" in post.url: deal['url'] = gamedeals.getStoreLink(post) else: deal['url'] = gamedeals.removeURI(post.url) deal['created'] = common.getTimeFromTimestamp(post.created) if 'url' in deal and deal['url']: # check if its steam store link if 'steampowered.com' in deal['url']: price = None try: price = self.ssf.getPrice(url=deal['url']) except InvalidArgument as e: if common.getEnvironment( ) == 'prod' or common.getEnvironment( ) == 'dev': await bot.get_channel(masterLogger).send( f"error getting price for {deal['url']} of reddit id {deal['id']}. Arguments passed {e.error}, error type {e.type}." ) pass if price: deal['price'] = price['final'] if self.keyDoesNotExists(enriched_post, deal): enriched_post.appendleft(deal) # update database data = {} data["name"] = subreddit if len(enriched_post) > 0: data["lastposted"] = common.getDatetimeIST() if id: data["latest"] = id status = db.upsertService(data) if status == common.STATUS.SUCCESS.INSERTED: await bot.get_channel(masterLogger).send( f"**Created Service**: {data['name']}.") elif status == common.STATUS.FAIL.INSERT: await bot.get_channel(masterLogger).send( f"**DB Insert Error - Service**: {data['name']}.") elif status == common.STATUS.FAIL.UPDATE: await bot.get_channel(masterLogger).send( f"**DB Update Error - Service**: {data['name']}.") else: pass # send the final deque for posting await self.send(enriched_post, bot)
async def run(self, bot): db = database.Database() masterLogger = common.getMasterLog() # request the page req = requests.get(self.url) # get service from database service = db.getService("destinyupdates") if common.getEnvironment() == 'dev': # post log in logging channel await bot.get_channel(masterLogger).send( f"**Scraped**: Destiny 2 Updates.") # return variable updates = [] # soupy soup = BeautifulSoup(req.content, 'html5lib') # post container: container = soup.find('div', attrs={'id': 'explore-contents'}) # iterating though each post i = 0 for post in container.findAll('a'): # check for max 5 updates i = i + 1 if i >= 4: break posts = {} posts['link'] = post.get('href') posts['title'] = post.find('div', {"class": "title"}).text # addition request for date and patchnotes detail_req = requests.get('https://www.bungie.net/' + posts['link']) detail_soup = BeautifulSoup(detail_req.content, 'html5lib') # beautify date posts['date'] = detail_soup.select("div.metadata")[0].text posts['date'] = posts['date'].split("-")[0].strip() try: # the date is older than 1 day posts['date'] = str( datetime.strptime(posts['date'], "%b %d, %Y"))[:-9] except Exception: # convert relative hours to date posts['date'] = posts['date'].replace("h", "") delta = timedelta(hours=int(posts['date'])) posts['date'] = str( datetime.strftime((datetime.today() - delta), "%b %d, %Y")) posts['date'] = str( datetime.strptime(posts['date'], "%b %d, %Y"))[:-9] posts['patchnotes'] = str( detail_soup.find("div", attrs={"class": "content text-content"})) # removing tags posts['patchnotes'] = posts['patchnotes'].replace("\t", "").replace( "\n", "") posts['patchnotes'] = posts['patchnotes'].replace( "<div class=\"content text-content\">", "") posts['patchnotes'] = posts['patchnotes'].replace("<h2>", "\n").replace( "</h2>", "") posts['patchnotes'] = posts['patchnotes'].replace("<h3>", "\n").replace( "</h3>", "") posts['patchnotes'] = posts['patchnotes'].replace("<div>", "\n").replace( "</div>", "") posts['patchnotes'] = posts['patchnotes'].replace("<ul>", "").replace( "</ul>", "") posts['patchnotes'] = posts['patchnotes'].replace( "<li>", "- ").replace("</li>", "\n") posts['patchnotes'] = posts['patchnotes'].replace("<b>", "").replace( "</b>", "") posts['patchnotes'] = posts['patchnotes'].replace("<u>", "").replace( "</u>", "") posts['patchnotes'] = posts['patchnotes'].replace( "<br/><br/>", "\n").replace("<br/>", "") posts['patchnotes'] = posts['patchnotes'].replace("<hr/>", "") posts['patchnotes'] = posts['patchnotes'].replace("<p", " <p") posts['patchnotes'] = w3lib.html.remove_tags(posts['patchnotes']) posts['id'] = posts['date'] posts['service_name'] = 'destinyupdates' posts['service_id'] = str(service['_id']) status = db.upsertPatchnotes(posts) if status == common.STATUS.INSERTED: updates.append(posts) elif status == common.STATUS.REDUNDANT: break else: await bot.get_channel(masterLogger).send( f"**Scrape Error - Destiny 2 Updates**: id = {posts['id']}." ) # returns list in ascending order for update in updates[::-1]: # discord embed description limit if len(update['patchnotes']) >= 2048: update['patchnotes'] = update['patchnotes'][:2040] + "\n..." # send an embed message embed = discord.Embed(title=update["title"], url="https://www.bungie.net/" + update["link"], description=update['patchnotes']) embed.add_field(name="Date", value=update["date"], inline=True) # get all the channels with service enabled channels = guild.getChannels("destinyupdates") for channel in channels: await bot.get_channel(channel["channel_id"]).send(embed=embed) # if logging is enabled post log if "logging" in channel: await bot.get_channel(channel["logging"]).send( f"sent {update['title']} in {channel['channel_name']}") # sleep for 1 second await asyncio.sleep(1) # update database data = {} data["name"] = "destinyupdates" if len(updates) != 0: data["lastposted"] = common.getDatetimeIST() data["latest"] = updates[len(updates) - 1]["date"] status = db.upsertService(data) if status == common.STATUS.SUCCESS.INSERTED: await bot.get_channel(masterLogger).send( f"**Created Service**: {data['name']}.") elif status == common.STATUS.FAIL.INSERT: await bot.get_channel(masterLogger).send( f"**DB Insert Error - Service**: {data['name']}.") elif status == common.STATUS.FAIL.UPDATE: await bot.get_channel(masterLogger).send( f"**DB Update Error - Service**: {data['name']}.") else: pass