def do_GET(self): if 0: self.send_response(200) self.end_headers() self.wfile.write(b'fast') return watch = Stopwatch() global ourData try: args = self.path.split('?') if len(args)<2: print("no size arg", args) return sizearg = int(args[1]) front = '[%s]\n' % sizearg except Exception as e: self.send_response(500) self.end_headers() err_out = str(e).encode() self.wfile.write(err_out) return self.send_response(200) self.end_headers() self.wfile.write(front.encode()) #print('len:', len(ourData.slices.get(sizearg,'missing'))) self.wfile.write( ourData.slices.get(sizearg,'missing') ) watch.measure()
def main(): killhandler = KillHandler() reddit = praw.Reddit(client_id=config.client_id, client_secret=config.client_secret, username=config.username, password=config.password, user_agent=config.user_agent) logging.info("Starting checking submissions...") stopwatch = Stopwatch() while not killhandler.killed: try: for submission in reddit.subreddit('+'.join( config.subreddits)).stream.submissions(skip_existing=True): duration = stopwatch.measure() logging.info(f"New submission: {submission}") logging.info(f" -- retrieved in {duration:5.2f}s") # We don't need to post a sticky on stickied posts if submission.stickied: logging.info(f" -- skipping (stickied)") continue # Post a comment to let people know where to invest bot_reply = submission.reply_wrap(message.invest_place_here) # Sticky the comment if config.is_moderator: bot_reply.mod.distinguish(how='yes', sticky=True) # Measure how long processing took duration = stopwatch.measure() logging.info(f" -- processed in {duration:5.2f}s") if killhandler.killed: logging.info("Termination signal received - exiting") break except prawcore.exceptions.OAuthException as e_creds: traceback.print_exc() logging.error(e_creds) logging.critical("Invalid login credentials. Check your .env!") logging.critical( "Fatal error. Cannot continue or fix the problem. Bailing out..." ) exit() except Exception as e: logging.error(e) traceback.print_exc() time.sleep(10)
def doDB_Work(limit=9999): dbServer = r'jg-pc\jg1' dbName = 'Ajour_System_A/S_10aee6e8b7dd4f4a8fd0cbf9cedf91cb' theDBCfg = { 'server': dbServer, 'db': dbName, 'user': '******', 'pwd': 'morOg234' } with getConn(theDBCfg) as conn: csr = conn.cursor() sw = Stopwatch() a = selectFromDB(csr) sw.measure() print(len(a))
def getBCbyVsId( vsId = 'B14462', releaseId='624d9489-0f8c-48db-99f9-67701a040742' ): global headers, url bcUrl = url + '/api/v1/BuildingComponent/VSID/{vsId}/Release/{releaseId}'.format(vsId=vsId, releaseId=releaseId) watch = Stopwatch('req') r = requests.get( bcUrl, headers=headers) watch.measure() logIfBad(r) exes = json.loads(r.text) print(len(r.text)) if 0: pprint.pprint(exes)
def getBC_chunk(chunkSet): global headers, url bcUrl = url + '/api/v1/BuildingComponent/VSIDs/Release/624d9489-0f8c-48db-99f9-67701a040742?' + chunkSet #bcArgs print(bcUrl) #return watch = Stopwatch('req') r = requests.post( bcUrl, headers=headers) watch.measure() logIfBad(r) exes = json.loads(r.text) print('len:', len(r.text)) with open( 'BCs.json', "w") as outfile: json.dump(exes, outfile, indent=2) if 0: pprint.pprint(exes)
def GetDataForCatsFromServer(cat, desc, orgId, orgName, action): #action = 'GetData' #action = 'GetDataTest' reqUrl = base_url + '/api/Search/' + action #GetData SearchDataRequest = { 'ContextTypes': 'Organization', 'BuiltInCategories': [cat], 'Version': '2019', 'OrgId': orgId } sw = Stopwatch() while True: verbose = False if verbose: print("before GetData", SearchDataRequest['BuiltInCategories'], desc, reqUrl ) r = requests.post( reqUrl, json = SearchDataRequest, headers=headers) if r.status_code == 200: break print("after GetData", r.status_code, orgId, orgName) if r.status_code == 401: ensureLogin() else: print(r.text) raise break # or even throw? sw = Stopwatch() # start new stopwatch. m = sw.measure(silent=True) return (r,m)
def getBCs( releaseId='624d9489-0f8c-48db-99f9-67701a040742' ): global headers, url bcUrl = url + '/api/v1/BuildingComponent/VSIDs/Release/{releaseId}?VSID1=B33321&VSID2=B96932&VSID3=B75326&VSID4=B36547&VSID5=B20553&VSID6=B59061&VSID7=B58491&VSID8=B80296&VSID9=B81223'.format(releaseId=releaseId) print(bcUrl) watch = Stopwatch('req') r = requests.post( bcUrl, headers=headers) watch.measure() logIfBad(r) exes = json.loads(r.text) print(len(r.text)) with open( 'BCs.json', "w") as outfile: json.dump(exes, outfile, indent=2) if 0: pprint.pprint(exes)
def getPrices(): global headers, url releaseID = '624d9489-0f8c-48db-99f9-67701a040742' priceUrl = url + '/api/v1/BuildingComponent/Calculate/Release/%s?' % releaseID priceUrl += 'id1=d03b8b89-fb70-47da-9a18-011132b22921&id2=32e920bb-e927-4a10-ad85-14c32bd197ff&id3=3dbf3ce5-f7e5-4745-bf18-1bd766c1b54d' priceUrl += '&amount1=10&amount2=10&amount3=10' priceUrl += "&yourCalculationId=1246" #{myRandomCalculationNumber} print(priceUrl) watch = Stopwatch('req') r = requests.post( priceUrl, headers=headers) watch.measure() logIfBad(r) prices = json.loads(r.text) if 0: pprint.pprint(prices) with open( 'prices.json', "w") as outfile: json.dump(prices, outfile, indent=2)
def getBCbyGroup( # get the BCs within a BC-group. groupId = 'B14462', releaseId='624d9489-0f8c-48db-99f9-67701a040742' ): global headers, url bcUrl = url + '/api/v1/BuildingComponent/BuildingComponentGroup/{groupId}/Release/{releaseId}'.format(groupId=groupId, releaseId=releaseId) watch = Stopwatch('req') r = requests.get( bcUrl, headers=headers) watch.measure(show=False) logIfBad(r) exes = json.loads(r.text) if 0: print(len(r.text)) if 0: pprint.pprint(exes) exes = exes['buildingComponents'] return exes
def getPrice_chunk(chunkSet, chunkIx): global headers, url releaseID = '624d9489-0f8c-48db-99f9-67701a040742' priceUrl = url + '/api/v1/BuildingComponent/Calculate/Release/%s?' % releaseID priceUrl += chunkSet print(chunkIx, priceUrl) #return watch = Stopwatch('req') r = requests.post( priceUrl, headers=headers) watch.measure() logIfBad(r) prices = json.loads(r.text) if 0: pprint.pprint(prices) with open( 'prices.json', "w") as outfile: json.dump(prices, outfile, indent=2)
def GetToken_Password(): # oauth, exchange a password for a token. global env, url print(env['username'], env['password']) tokenReq = { 'grant_type': 'password', 'username': env['username'], #+'2', 'password': env['password'], 'client_id': env['client_id'], 'client_secret': env['client_secret'] #+'2' } token_url = url + '/token' print('before', token_url) watch = Stopwatch('req') r = requests.post(token_url, tokenReq) watch.measure() logIfBad(r) return r
def __init__(self): engine = create_engine(config.DB, pool_recycle=60, pool_pre_ping=True) session_maker = sessionmaker(bind=engine) self.sess = session_maker() logging.info("Calculating parameters...") stopwatch = Stopwatch() self.calc_tiers() duration = stopwatch.measure() logging.info("Rates : %s", RATES) logging.info("Tiers : %s", self.tiers) logging.info("Basics : %s", self.basics) logging.info(" -- calculated in %ss", duration)
def collect(self): """Collect taxes""" logging.info("Fetching investors...") stopwatch = Stopwatch() investors = self.sess.query(Investor).\ filter(Investor.completed > 0).\ all() duration = stopwatch.measure() logging.info("Investors : %d", len(investors)) logging.info(" -- fetched in %ss", duration) logging.info("Adjusting investors...") stopwatch.reset() for investor in investors: investor.balance = self.adjust_amount(investor) duration = stopwatch.measure() logging.info(" -- calculated in %ss", duration) logging.info("Committing ...") stopwatch.reset() self.sess.commit() self.sess.close() duration = stopwatch.measure() logging.info(" -- committed in %ss", duration)
def main(): """ This is the main function that listens to new submissions and then posts the ATTENTION sticky comment. """ logging.info("Starting submitter...") logging.info( "Sleeping for 8 seconds. Waiting for the database to turn on...") time.sleep(8) killhandler = KillHandler() engine = create_engine(config.DB, pool_recycle=60, pool_pre_ping=True) sess_maker = scoped_session(sessionmaker(bind=engine)) reddit = praw.Reddit(client_id=config.CLIENT_ID, client_secret=config.CLIENT_SECRET, username=config.USERNAME, password=config.PASSWORD, user_agent=config.USER_AGENT) # We will test our reddit connection here if not utils.test_reddit_connection(reddit): exit() logging.info("Starting checking submissions...") stopwatch = Stopwatch() sess = sess_maker() submission_time = int(time.time()) minimum_fee = config.SUBMISSION_MIN_FEE for submission in reddit.subreddit('+'.join(config.SUBREDDITS)).\ stream.submissions(skip_existing=True): duration = stopwatch.measure() logging.info("New submission: %s", submission) logging.info(" -- retrieved in %ss", duration) # We don't need to post a sticky on stickied posts if submission.stickied: logging.info(" -- skipping (stickied)") continue # We are looking if the post is created in the past # so we won't double charge it if submission.created_utc < submission_time: logging.info(" -- skipping (timeout)") continue submission_time = int(submission.created_utc) logging.info(" -- Submission timestamp: %s", \ time.asctime(time.gmtime(submission_time))) bot_reply = 0 delete_post = False # This is a bit of a controversial update, so im gonna make it # agile to switch between different modes if not config.SUBMISSION_FEE: # Post a comment to let people know where to invest bot_reply = submission.reply_wrap( message.invest_no_fee(f"u/{submission.author.name}")) else: # If a poster doesn't have an account, delete the post # if he has, take 1000 MemeCoins and invest them investor = sess.query(Investor).\ filter(Investor.name == submission.author.name).\ first() if not investor: bot_reply = submission.reply_wrap(message.NO_ACCOUNT_POST_ORG) delete_post = True logging.info(" -- Not a registered investor!") elif investor.balance < minimum_fee: bot_reply = submission.reply_wrap( message.modify_pay_to_post(investor.balance)) delete_post = True logging.info(" -- Not enough funds!") else: # We will make it 6% required_fee = int(investor.balance * (config.SUBMISSION_FEE_PERCENT / 100)) if required_fee < minimum_fee: required_fee = minimum_fee new_balance = investor.balance - required_fee investor.balance = new_balance bot_reply = submission.\ reply_wrap(message.modify_invest_place_here(required_fee, f"u/{submission.author.name}")) sess.commit() # Sticky the comment if config.IS_MODERATOR: bot_reply.mod.distinguish(how='yes', sticky=True) bot_reply.mod.approve() if delete_post: logging.info(" -- Deleting the post...") #Should we hide or just delete the post? submission.mod.remove() # Measure how long processing took duration = stopwatch.measure() logging.info(" -- processed in %.2fs", duration) if killhandler.killed: logging.info("Termination signal received - exiting") break
def reset_all(): logging.info("Preparing for new season...") engine = create_engine() session_maker = sessionmaker(bind=engine) stopwatch = Stopwatch() logging.info("Reset investors...") stopwatch.measure() sess = session_maker() investors = sess.query(Investor).delete(synchronize_session=False) sess.commit() duration = stopwatch.measure() logging.info("Removed %d investors -- processed in %.2fs", investors, duration) sess.close() logging.info("Reset investments...") stopwatch.measure() sess = session_maker() investments = sess.query(Investment).delete(synchronize_session=False) sess.commit() duration = stopwatch.measure() logging.info("Removed %d investments -- processed in %.2fs", investments, duration) sess.close() logging.info("Reset posts...") stopwatch.measure() sess = session_maker() buyables = sess.query(Buyable).delete(synchronize_session=False) sess.commit() duration = stopwatch.measure() logging.info("Removed %d posts -- processed in %.2fs", buyables, duration) sess.close() logging.info("Clean up...") stopwatch.measure() sess = session_maker() sess.execute("VACUUM") sess.commit() duration = stopwatch.measure() logging.info("-- processed in %.2fs", duration) sess.close()
def main(): logging.info("Starting main") if config.post_to_reddit: logging.info("Warning: Bot will actually post to Reddit!") logging.info("Setting up database") killhandler = KillHandler() engine = create_engine(config.db, pool_recycle=60) sm = scoped_session(sessionmaker(bind=engine)) worker = CommentWorker(sm) while True: try: Base.metadata.create_all(engine) break except sqlalchemy.exc.OperationalError: logging.info("Database not available yet; retrying in 5s") time.sleep(5) logging.info("Setting up Reddit connection") reddit = praw.Reddit(client_id=config.client_id, client_secret=config.client_secret, username=config.username, password=config.password, user_agent=config.user_agent) stopwatch = Stopwatch() logging.info("Listening for inbox replies...") while not killhandler.killed: try: # Iterate over the latest comment replies in inbox reply_function = reddit.inbox.comment_replies if (config.maintenance): logging.info( "ENTERING MAINTENANCE MODE. NO OPERATIONS WILL BE PROCESSED." ) for comment in praw.models.util.stream_generator( reply_function): logging.info(f"New comment {comment}:") if comment.new: comment.reply_wrap(message.maintenance_org) comment.mark_read() for comment in praw.models.util.stream_generator(reply_function): # Measure how long since we finished the last loop iteration duration = stopwatch.measure() logging.info(f"New comment {comment}:") logging.info(f" -- retrieved in {duration:5.2f}s") if comment.new: # Process the comment worker(comment) # Mark the comment as processed comment.mark_read() else: logging.info(" -- skipping (already processed)") # Measure how long processing took duration = stopwatch.measure() logging.info(f" -- processed in {duration:5.2f}s") # Report the Reddit API call stats rem = int(reddit.auth.limits['remaining']) res = int(reddit.auth.limits['reset_timestamp'] - time.time()) logging.info( f" -- API calls remaining: {rem:3d}, resetting in {res:3d}s" ) # Check for termination requests if killhandler.killed: logging.info("Termination signal received - exiting") break stopwatch.reset() except prawcore.exceptions.OAuthException as e_creds: traceback.print_exc() logging.error(e_creds) logging.critical("Invalid login credentials. Check your .env!") logging.critical( "Fatal error. Cannot continue or fix the problem. Bailing out..." ) exit() except Exception as e: logging.error(e) traceback.print_exc() time.sleep(10)
def main(): logging.info("Starting calculator...") killhandler = KillHandler() engine = create_engine() session_maker = sessionmaker(bind=engine) reddit = praw.Reddit( client_id=config.CLIENT_ID, client_secret=config.CLIENT_SECRET, username=config.USERNAME, password=config.PASSWORD, user_agent=config.USER_AGENT, ) # We will test our reddit connection here if not utils.test_reddit_connection(reddit): return () praw.models.Comment.edit_wrap = edit_wrap stopwatch = Stopwatch() logging.info("Retrieving top ...") # query sess = session_maker() try: top_networth = (sess.query( Investor.name, func.coalesce(Investor.balance + func.sum(Investment.amount), Investor.balance).label("networth"), ).outerjoin( Investment, and_(Investor.name == Investment.name, Investment.done == 0)).group_by(Investor.name).order_by( desc("networth")).limit(1).one())[1] except NoResultFound: top_networth = 0 top_networth = max(top_networth, config.STARTING_BALANCE * 10) # al last starting * 10 sess.close() logging.info("Top networth: %d", top_networth) logging.info("Monitoring active investments...") while not killhandler.killed: sess = session_maker() then = int(time.time()) - config.INVESTMENT_DURATION investment = (sess.query(Investment).filter( Investment.done == 0).filter(Investment.time < then).order_by( Investment.time.asc()).first()) if not investment: # Nothing matured yet; wait a bit before trying again time.sleep(50) continue duration = stopwatch.measure() investor = sess.query(Investor).filter( Investor.name == investment.name).one() net_worth = investor.networth(sess) logging.info("New mature investment: %s", investment.comment) logging.info(" -- by %s", investor.name) # Retrieve the post the user invested in (lazily, no API call) post = reddit.submission(investment.post) # Retrieve the post's current upvote count (triggers an API call) upvotes_now = post.ups investment.final_upvotes = upvotes_now investment.op = (post.author and investor.name == post.author.name) investment.net_worth = net_worth investment.top_networth = top_networth # Updating the investor's balance factor = formula.calculate(upvotes_now, investment.upvotes, net_worth, top_networth) if factor > 1 and post.author and investor.name == post.author.name: # bonus per OP factor *= formula.OP_BONUS amount = investment.amount balance = investor.balance new_balance = int(balance + (amount * factor)) change = new_balance - balance profit = change - amount # Updating the investor's variables investor.completed += 1 # Retrieve the bot's original response (lazily, no API call) if investment.response != "0": response = reddit.comment(id=investment.response) else: response = EmptyResponse() if new_balance < BALANCE_CAP: # If investor is in a firm and he profits, # 15% goes to the firm investor.balance = new_balance # Edit the bot's response (triggers an API call) if profit > 0: logging.info(" -- profited %s", profit) elif profit == 0: logging.info(" -- broke even") else: logging.info(" -- lost %s", profit) edited_response = message.modify_invest_return( investment.amount, investment.upvotes, upvotes_now, change, profit, investor.balance, ) response.edit_wrap(edited_response) else: # This investment pushed the investor's balance over the cap investor.balance = BALANCE_CAP # Edit the bot's response (triggers an API call) logging.info(" -- profited %s but got capped", profit) response.edit_wrap( message.modify_invest_capped( investment.amount, investment.upvotes, upvotes_now, change, profit, investor.balance, )) investment.success = profit > 0 investment.profit = profit investment.done = True sess.commit() if top_networth < investor.balance: top_networth = investor.balance logging.info("New Top networth: %d", top_networth) # Measure how long processing took duration = stopwatch.measure() logging.info(" -- processed in %.2fs", duration) # Report the Reddit API call stats rem = int(reddit.auth.limits["remaining"]) res = int(reddit.auth.limits["reset_timestamp"] - time.time()) logging.info(" -- API calls remaining: %s, resetting in %.2fs", rem, res) sess.close()
def main(): """ This is where the magic happens. This function listens to all new messages in the inbox and passes them to worker object that decides on what to do with them. """ logging.info("Starting main") if config.POST_TO_REDDIT: logging.info("Warning: Bot will actually post to Reddit!") logging.info("Setting up database") killhandler = KillHandler() engine = create_engine(config.DB, pool_recycle=60, pool_pre_ping=True) session_maker = scoped_session(sessionmaker(bind=engine)) worker = CommentWorker(session_maker) while True: try: Base.metadata.create_all(engine) break except sqlalchemy.exc.OperationalError: logging.info("Database not available yet; retrying in 5s") time.sleep(5) logging.info("Setting up Reddit connection") reddit = praw.Reddit(client_id=config.CLIENT_ID, client_secret=config.CLIENT_SECRET, username=config.USERNAME, password=config.PASSWORD, user_agent=config.USER_AGENT) # We will test our reddit connection here if not utils.test_reddit_connection(reddit): exit() stopwatch = Stopwatch() logging.info("Listening for inbox replies...") while not killhandler.killed: # Iterate over the latest comment replies in inbox reply_function = reddit.inbox.comment_replies if config.MAINTENANCE: logging.info( "ENTERING MAINTENANCE MODE. NO OPERATIONS WILL BE PROCESSED.") for comment in praw.models.util.stream_generator(reply_function): logging.info("New comment %s:", comment) if comment.new: comment.reply_wrap(message.MAINTENANCE_ORG) comment.mark_read() for comment in praw.models.util.stream_generator(reply_function): # Measure how long since we finished the last loop iteration duration = stopwatch.measure() logging.info("New comment %s (%s):", comment, type(comment)) logging.info(" -- retrieved in %.2fs", duration) if comment.new: if comment.subreddit.display_name.lower() in config.SUBREDDITS: # Process the comment only in allowed subreddits worker(comment) else: logging.info(" -- skipping (wrong subreddit)") # Mark the comment as processed comment.mark_read() else: logging.info(" -- skipping (already processed)") # Measure how long processing took duration = stopwatch.measure() logging.info(" -- processed in %.2fs", duration) # Report the Reddit API call stats rem = int(reddit.auth.limits['remaining']) res = int(reddit.auth.limits['reset_timestamp'] - time.time()) logging.info(" -- API calls remaining: %.2f, resetting in %.2fs", rem, res) # Check for termination requests if killhandler.killed: logging.info("Termination signal received - exiting") break stopwatch.reset()
def main(): logging.info("Starting calculator") killhandler = KillHandler() engine = create_engine(config.db, pool_recycle=60) sm = sessionmaker(bind=engine) reddit = praw.Reddit(client_id=config.client_id, client_secret=config.client_secret, username=config.username, password=config.password, user_agent=config.user_agent) praw.models.Comment.edit_wrap = edit_wrap stopwatch = Stopwatch() logging.info("Monitoring active investments...") while not killhandler.killed: try: sess = sm() then = int(time.time()) - config.investment_duration investment = sess.query(Investment).\ filter(Investment.done == 0).\ filter(Investment.time < then).\ order_by(Investment.time.asc()).\ first() if not investment: # Nothing matured yet; wait a bit before trying again time.sleep(5) continue duration = stopwatch.measure() investor = sess.query(Investor).filter(Investor.name == investment.name).one() logging.info(f"New mature investment: {investment.comment}") logging.info(f" -- by {investor.name}") # Retrieve the post the user invested in (lazily, no API call) post = reddit.submission(investment.post) # Retrieve the post's current upvote count (triggers an API call) upvotes_now = post.ups investment.final_upvotes = upvotes_now # Updating the investor's balance factor = formula.calculate(upvotes_now, investment.upvotes) amount = investment.amount balance = investor.balance new_balance = int(balance + (amount * factor)) change = new_balance - balance profit = change - amount percent_str = f"{int((profit/amount)*100)}%" # Updating the investor's variables investor.completed += 1 # Retrieve the bot's original response (lazily, no API call) if investment.response != "0": response = reddit.comment(id=investment.response) else: response = EmptyResponse() if new_balance < BalanceCap: investor.balance = new_balance # Edit the bot's response (triggers an API call) if profit > 0: logging.info(f" -- profited {profit}") elif profit == 0: logging.info(f" -- broke even") else: logging.info(f" -- lost {profit}") response.edit_wrap(message.modify_invest_return(investment.amount, investment.upvotes, upvotes_now, change, profit, percent_str, investor.balance)) else: # This investment pushed the investor's balance over the cap investor.balance = BalanceCap # Edit the bot's response (triggers an API call) logging.info(f" -- profited {profit} but got capped") response.edit_wrap(message.modify_invest_capped(investment.amount, investment.upvotes, upvotes_now, change, profit, percent_str, investor.balance)) investment.success = (profit > 0) investment.profit = profit investment.done = True sess.commit() # Measure how long processing took duration = stopwatch.measure() logging.info(f" -- processed in {duration:5.2f}s") # Report the Reddit API call stats rem = int(reddit.auth.limits['remaining']) res = int(reddit.auth.limits['reset_timestamp'] - time.time()) logging.info(f" -- API calls remaining: {rem:3d}, resetting in {res:3d}s") except prawcore.exceptions.OAuthException as e_creds: traceback.print_exc() logging.error(e_creds) logging.critical("Invalid login credentials. Check your .env!") logging.critical("Fatal error. Cannot continue or fix the problem. Bailing out...") exit() except Exception as e: logging.error(e) traceback.print_exc() time.sleep(10) finally: sess.close()
def main(): killhandler = KillHandler() engine = create_engine(config.db, pool_recycle=60) sm = scoped_session(sessionmaker(bind=engine)) reddit = praw.Reddit(client_id=config.client_id, client_secret=config.client_secret, username=config.username, password=config.password, user_agent=config.user_agent) logging.info("Starting checking submissions...") stopwatch = Stopwatch() while not killhandler.killed: try: sess = sm() submission_time = int(time.time()) for submission in reddit.subreddit('+'.join(config.subreddits)).stream.submissions(skip_existing=True): duration = stopwatch.measure() logging.info(f"New submission: {submission}") logging.info(f" -- retrieved in {duration:5.2f}s") # We don't need to post a sticky on stickied posts if submission.stickied: logging.info(f" -- skipping (stickied)") continue # We are looking if the post is created in the past # so we won't double charge it if (submission.created_utc < submission_time): logging.info(f" -- skipping (timeout)") continue submission_time = int(submission.created_utc) logging.info(f" -- Submission timestamp: {time.asctime(time.gmtime(submission_time))}") bot_reply = 0 delete_post = False # This is a bit of a controversial update, so im gonna make it # agile to switch between different modes if config.submission_fee: # If a poster doesn't have an account, delete the post # if he has, take 1000 MemeCoins and invest them investor = sess.query(Investor).\ filter(Investor.name == submission.author.name).\ first() if not investor: bot_reply = submission.reply_wrap(message.no_account_post_org) delete_post = True logging.info(f" -- Not a registered investor!") elif (investor.balance < 1000): bot_reply = submission.reply_wrap(message.modify_pay_to_post(investor.balance)) delete_post = True logging.info(f" -- Not enough funds!") else: # We will make it 6% required_fee = int(investor.balance * 0.06) if (required_fee < 250): required_fee = 250 new_balance = investor.balance - required_fee investor.balance = new_balance bot_reply = submission.reply_wrap(message.modify_invest_place_here(required_fee)) sess.commit() else: # Post a comment to let people know where to invest bot_reply = submission.reply_wrap(message.invest_place_here_no_fee) # Sticky the comment if config.is_moderator: bot_reply.mod.distinguish(how='yes', sticky=True) if (delete_post): logging.info(f" -- Deleting the post...") #Should we hide or just delete the post? submission.mod.remove() # Measure how long processing took duration = stopwatch.measure() logging.info(f" -- processed in {duration:5.2f}s") if killhandler.killed: logging.info("Termination signal received - exiting") break except prawcore.exceptions.OAuthException as e_creds: traceback.print_exc() logging.error(e_creds) logging.critical("Invalid login credentials. Check your .env!") logging.critical("Fatal error. Cannot continue or fix the problem. Bailing out...") exit() except Exception as e: logging.error(e) traceback.print_exc() time.sleep(10)
def main(): logging.info("Starting buyable...") engine = create_engine() session_maker = sessionmaker(bind=engine, autoflush=False) reddit = praw.Reddit( client_id=config.CLIENT_ID, client_secret=config.CLIENT_SECRET, username=config.USERNAME, password=config.PASSWORD, user_agent=config.USER_AGENT, ) # We will test our reddit connection here if not test_reddit_connection(reddit): exit() praw.models.Comment.edit_wrap = edit_wrap stopwatch = Stopwatch() logging.info("Fetching active buyable...") sess = session_maker() then = int(time.time()) - config.INVESTMENT_DURATION buyables = ( sess.query(Buyable) .filter(Buyable.done == 0) .filter(Buyable.time < then) .order_by(Buyable.time.asc()) ) for buyable in buyables: duration = stopwatch.measure() logging.info("New mature investment: %s", buyable.post) logging.info(" -- by %s", buyable.name) # Retrieve the post submission = reddit.submission(id=buyable.post) buyable.final_upvotes = submission.ups if submission.removed or not submission.author: logging.info(" -- deleted or removed") # buyable.done = True sess.delete(buyable) sess.commit() duration = stopwatch.measure() logging.info(" -- processed in %.2fs", duration) continue # valid OC only if not deleted/removed if submission.stickied or submission.distinguished: logging.info(" -- stickied or distinguished") # buyable.done = True sess.delete(buyable) sess.commit() duration = stopwatch.measure() logging.info(" -- processed in %.2fs", duration) continue buyable.oc = submission.link_flair_text == "OC" if not buyable.oc: logging.info(" -- not OC") buyable.done = True sess.commit() duration = stopwatch.measure() logging.info(" -- processed in %.2fs", duration) continue # Retrieve OP investor = sess.query(Investor).filter(Investor.name == buyable.name).first() if not investor: logging.info(" -- OP not investor") buyable.done = True sess.commit() duration = stopwatch.measure() logging.info(" -- processed in %.2fs", duration) continue balance = investor.balance # Retrieve the post investments investments = ( sess.query(Investment) .filter(Investment.post == buyable.post) .filter(Investment.name != buyable.name) ) profit = 0 for investment in investments: profit += investment.amount / OC_BONUS net_worth = investor.networth(sess) if net_worth > 0: profit = int(min(profit, net_worth)) # Updating the investor's balance new_balance = int(balance + profit) # Retrieve the bot's original response (lazily, no API call) if buyable.response != "0": response = reddit.comment(id=buyable.response) else: response = EmptyResponse() if new_balance < BALANCE_CAP: investor.balance = new_balance # Edit the bot's response (triggers an API call) logging.info(" -- profited %d", profit) response.edit_wrap(response.body + message.modify_oc_return(profit)) else: # This investment pushed the investor's balance over the cap investor.balance = BALANCE_CAP # Edit the bot's response (triggers an API call) logging.info(" -- profited %d but got capped", profit) response.edit_wrap(response.body + message.modify_oc_capped()) buyable.profit = profit buyable.done = True sess.commit() # Measure how long processing took duration = stopwatch.measure() logging.info(" -- processed in %.2fs", duration) # Report the Reddit API call stats rem = int(reddit.auth.limits["remaining"]) res = int(reddit.auth.limits["reset_timestamp"] - time.time()) logging.info(" -- API calls remaining: %s, resetting in %.2fs", rem, res) sess.close()
def main(): logging.info("Starting calculator...") logging.info( "Sleeping for 8 seconds. Waiting for the database to turn on...") time.sleep(8) killhandler = KillHandler() engine = create_engine(config.DB, pool_recycle=60, pool_pre_ping=True) session_maker = sessionmaker(bind=engine) reddit = praw.Reddit(client_id=config.CLIENT_ID, client_secret=config.CLIENT_SECRET, username=config.USERNAME, password=config.PASSWORD, user_agent=config.USER_AGENT) # We will test our reddit connection here if not utils.test_reddit_connection(reddit): exit() praw.models.Comment.edit_wrap = edit_wrap stopwatch = Stopwatch() logging.info("Monitoring active investments...") while not killhandler.killed: sess = session_maker() then = int(time.time()) - config.INVESTMENT_DURATION investment = sess.query(Investment).\ filter(Investment.done == 0).\ filter(Investment.time < then).\ order_by(Investment.time.asc()).\ first() if not investment: # Nothing matured yet; wait a bit before trying again time.sleep(5) continue duration = stopwatch.measure() investor = sess.query(Investor).filter( Investor.name == investment.name).one() net_worth = sess.\ query(func.sum(Investment.amount)).\ filter(and_(Investment.name == investor.name, Investment.done == 0)).\ scalar()\ + investor.balance logging.info("New mature investment: %s", investment.comment) logging.info(" -- by %s", investor.name) # Retrieve the post the user invested in (lazily, no API call) post = reddit.submission(investment.post) # Retrieve the post's current upvote count (triggers an API call) upvotes_now = post.ups investment.final_upvotes = upvotes_now # Updating the investor's balance factor = formula.calculate(upvotes_now, investment.upvotes, net_worth) amount = investment.amount balance = investor.balance new_balance = int(balance + (amount * factor)) change = new_balance - balance profit = change - amount percent_str = f"{int((profit/amount)*100)}%" # Updating the investor's variables investor.completed += 1 # Retrieve the bot's original response (lazily, no API call) if investment.response != "0": response = reddit.comment(id=investment.response) else: response = EmptyResponse() firm_profit = 0 if new_balance < BALANCE_CAP: # If investor is in a firm and he profits, # 15% goes to the firm firm_name = '' if investor.firm != 0 and profit >= 0: firm = sess.query(Firm).\ filter(Firm.id == investor.firm).\ first() firm_name = firm.name user_profit = int(profit * ((100 - firm.tax) / 100)) investor.balance += user_profit + amount firm_profit = int(profit * (firm.tax / 100)) firm.balance += firm_profit else: investor.balance = new_balance # Edit the bot's response (triggers an API call) if profit > 0: logging.info(" -- profited %s", profit) elif profit == 0: logging.info(" -- broke even") else: logging.info(" -- lost %s", profit) edited_response = message.modify_invest_return( investment.amount, investment.upvotes, upvotes_now, change, profit, percent_str, investor.balance) if investor.firm != 0: edited_response += message.modify_firm_tax( firm_profit, firm_name) response.edit_wrap(edited_response) else: # This investment pushed the investor's balance over the cap investor.balance = BALANCE_CAP # Edit the bot's response (triggers an API call) logging.info(" -- profited %s but got capped", profit) response.edit_wrap( message.modify_invest_capped(investment.amount, investment.upvotes, upvotes_now, change, profit, percent_str, investor.balance)) investment.success = (profit > 0) investment.profit = profit investment.done = True sess.commit() # Measure how long processing took duration = stopwatch.measure() logging.info(" -- processed in %.2fs", duration) # Report the Reddit API call stats rem = int(reddit.auth.limits['remaining']) res = int(reddit.auth.limits['reset_timestamp'] - time.time()) logging.info(" -- API calls remaining: %s, resetting in %.2fs", rem, res) sess.close()
def main(): logging.info("Starting calculator") killhandler = KillHandler() engine = create_engine(config.db, pool_recycle=60) sm = sessionmaker(bind=engine) reddit = praw.Reddit(client_id=config.client_id, client_secret=config.client_secret, username=config.username, password=config.password, user_agent=config.user_agent) praw.models.Comment.edit_wrap = edit_wrap stopwatch = Stopwatch() logging.info("Monitoring active investments...") while not killhandler.killed: try: sess = sm() then = int(time.time()) - config.investment_duration investment = sess.query(Investment).\ filter(Investment.done == 0).\ filter(Investment.time < then).\ order_by(Investment.time.asc()).\ first() if not investment: # Nothing matured yet; wait a bit before trying again time.sleep(5) continue duration = stopwatch.measure() investor = sess.query(Investor).filter(Investor.name == investment.name).one() logging.info(f"New mature investment: {investment.comment}") logging.info(f" -- by {investor.name}") if investment.response != "0": response = reddit.comment(id=investment.response) else: response = EmptyResponse() post = reddit.submission(investment.post) upvotes_now = post.ups # <--- triggers a Reddit API call # Updating the investor's balance factor = formula.calculate(upvotes_now, investment.upvotes) amount = investment.amount balance = investor.balance new_balance = int(balance + (amount * factor)) change = new_balance - balance profit = change - amount profit_str = f"{int((profit/amount)*100)}%" # Updating the investor's variables investor.completed += 1 investor.balance = new_balance # Editing the comment as a confirmation text = response.body # <--- triggers a Reddit API call if profit > 0: logging.info(f" -- profited {profit}") response.edit_wrap(message.modify_invest_return(text, upvotes_now, change, profit_str, new_balance)) elif profit == 0: logging.info(f" -- broke even") response.edit_wrap(message.modify_invest_break_even(text, upvotes_now, change, profit_str, new_balance)) else: lost_memes = int( amount - change ) logging.info(f" -- lost {profit}") response.edit_wrap(message.modify_invest_lose(text, upvotes_now, lost_memes, profit_str, new_balance)) investment.success = (profit > 0) investment.profit = profit investment.done = True sess.commit() # Measure how long processing took duration = stopwatch.measure() logging.info(f" -- processed in {duration:5.2f}s") # Report the Reddit API call stats rem = int(reddit.auth.limits['remaining']) res = int(reddit.auth.limits['reset_timestamp'] - time.time()) logging.info(f" -- API calls remaining: {rem:3d}, resetting in {res:3d}s") except Exception as e: logging.error(e) traceback.print_exc() time.sleep(10) finally: sess.close()
def main() -> None: """ This is the main function that listens to new submissions and then posts the ATTENTION sticky comment. """ logging.info("Starting submitter...") killhandler = KillHandler() engine = create_engine() sess_maker = scoped_session(sessionmaker(bind=engine)) reddit = praw.Reddit( client_id=config.CLIENT_ID, client_secret=config.CLIENT_SECRET, username=config.USERNAME, password=config.PASSWORD, user_agent=config.USER_AGENT, ) logging.info("Setting up database") conn = sqlite3.connect(config.POST_DBFILE) conn.execute("CREATE TABLE IF NOT EXISTS posts (id)") conn.commit() logging.info("Setting up Telegram connection") tbot = telegram.Bot(token=config.TG_TOKEN) try: tbot.get_me() except telegram.error.TelegramError as e_teleg: logging.error(e_teleg) logging.critical("Telegram error!") return # We will test our reddit connection here if not test_reddit_connection(reddit): return logging.info("Starting checking submissions...") stopwatch = Stopwatch() sess = sess_maker() subreddits = reddit.subreddit("+".join(config.SUBREDDITS)) for submission in subreddits.stream.submissions(pause_after=6): if killhandler.killed: logging.info("Termination signal received - exiting") break if not submission: # because of pause_after # to handle ctr+c above continue duration = stopwatch.measure() logging.info("New submission: %s", submission) logging.info(" -- retrieved in %ss", duration) c = conn.cursor() c.execute("SELECT * FROM posts WHERE id=?", (submission.id,)) if c.fetchone(): logging.info("Already processed") continue post_telegram(conn, submission, tbot) bot_reply = post_reply(submission) # Measure how long processing took duration = stopwatch.measure() logging.info(" -- processed in %.2fs", duration) # Create Buyable if bot_reply: sess.add( Buyable(post=submission.id, name=submission.author.name, response=bot_reply.id) ) sess.commit()