def fill_database(d_df, verbose=False): if verbose: print("--> Filling in database") engine = utils.create_engine(flavor='sqlite') # Ajout des IDs dans les dataframes d_df['trajet'] = add_index(d_df['trajet'], 'trajet', 'id_trajet', engine) d_df['etape'] = add_index(d_df['etape'], 'etape', 'id_etape', engine) d_df['etape']['id_trajet'] = d_df['trajet']['id_trajet'] def filter_id(df, col, dcol): """Elimine de la DataFrame les éléments de col déjà présents en base""" session = Session() ids = session.query(dcol) \ .filter(dcol.in_(df[col])) \ .all() session.close() ids = [idx[0] for idx in ids] c = ~df[col].isin(ids) return df.loc[c] """ Suppression des arrêts déjà présents en base """ d_df['arret'] = filter_id(d_df['arret'], 'id_arret', Arret.id_arret) """ Suppression des véhicules déjà présents en base """ d_df['vehicule'] = filter_id(d_df['vehicule'], 'id_vehicule', Vehicule.id_vehicule) """ Suppression des lignes déjà présentes en base """ d_df['ligne'] = filter_id(d_df['ligne'], 'id_ligne', Ligne.id_ligne) connection = engine.connect() # Export des dataframes for tablename, df in d_df.items(): if verbose: print(tablename) df.to_sql(tablename, connection, if_exists='append', index=False) # Fermeture connection connection.close()
def reset_all(): logging.info("Preparing for new season...") engine = create_engine() session_maker = sessionmaker(bind=engine) stopwatch = Stopwatch() logging.info("Reset investors...") stopwatch.measure() sess = session_maker() investors = sess.query(Investor).delete(synchronize_session=False) sess.commit() duration = stopwatch.measure() logging.info("Removed %d investors -- processed in %.2fs", investors, duration) sess.close() logging.info("Reset investments...") stopwatch.measure() sess = session_maker() investments = sess.query(Investment).delete(synchronize_session=False) sess.commit() duration = stopwatch.measure() logging.info("Removed %d investments -- processed in %.2fs", investments, duration) sess.close() logging.info("Reset posts...") stopwatch.measure() sess = session_maker() buyables = sess.query(Buyable).delete(synchronize_session=False) sess.commit() duration = stopwatch.measure() logging.info("Removed %d posts -- processed in %.2fs", buyables, duration) sess.close() logging.info("Clean up...") stopwatch.measure() sess = session_maker() sess.execute("VACUUM") sess.commit() duration = stopwatch.measure() logging.info("-- processed in %.2fs", duration) sess.close()
def main(): logging.info("Starting leaderboard...") engine = create_engine() session_maker = sessionmaker(bind=engine) reddit = praw.Reddit( client_id=config.CLIENT_ID, client_secret=config.CLIENT_SECRET, username=config.USERNAME, password=config.PASSWORD, user_agent=config.USER_AGENT, ) # We will test our reddit connection here if not config.TEST and not test_reddit_connection(reddit): exit() sess = session_maker() # query top_users = (sess.query( Investor.name, func.coalesce(Investor.balance + func.sum(Investment.amount), Investor.balance).label("networth"), ).outerjoin(Investment, and_(Investor.name == Investment.name, Investment.done == 0)).group_by(Investor.name).order_by( desc("networth")).limit(500).all()) top_poster = sess.execute( """ SELECT name, SUM(oc) AS coc, SUM(CASE OC WHEN 1 THEN final_upvotes ELSE 0 END) AS soc, count(*) as ct, sum(final_upvotes) as st FROM "Buyables" WHERE done = 1 GROUP BY name ORDER BY coc DESC, soc DESC LIMIT :limit""", { "limit": 100 }, ).fetchall() # Sidebar sidebar_text = f""" /r/BancaDelMeme è un posto dove si puoi comprare, vendere, condividere, fare e investire sui meme liberamente. ***** **Migliori utenti:** {format_investor(top_users, 10)} [Classifica completa](/r/BancaDelMeme/wiki/leaderboardbig) **Migliori autori di OC:** {format_posters_small(top_poster, 3)} [Classifica completa](/r/BancaDelMeme/wiki/leaderboardocbig) Ultimo aggiornamento: {localtime} ###***[Inviaci dei suggerimenti!](https://www.reddit.com/message/compose?to=%2Fr%2FBancaDelMeme)*** *** **Subreddit ai quali potresti essere interessato:** /r/italy *** *** """ # redesign if not config.TEST: for subreddit in config.SUBREDDITS: # poster for widget in reddit.subreddit(subreddit).widgets.sidebar: if isinstance(widget, praw.models.TextArea): if widget.shortName.lower().replace(" ", "") == "top10": widget.mod.update(text=format_investor(top_users, 10)) logging.info(" -- Updated redesign top10: %s", subreddit) break # investor for widget in reddit.subreddit(subreddit).widgets.sidebar: if isinstance(widget, praw.models.TextArea): if widget.shortName.lower() == "migliori autori": widget.mod.update( text=format_posters_small(top_poster, 4)) logging.info( " -- Updated redesign migliori autori: %s", subreddit) break # Old and wiki logging.info(" -- Updating sidebar text to:") logging.info(sidebar_text.replace("\n", "\\n")) if not config.TEST: for subreddit in config.SUBREDDITS: sub = reddit.subreddit(subreddit) # Sidebar update sub.mod.update(description=sidebar_text) logging.info("Updated sidebar: %s", subreddit) # wiki full poster wikipage = sub.wiki["leaderboardocbig"] wikipage.edit(format_posters_full(top_poster, 100)) logging.info("Updated wiki poster: %s", subreddit) # wiki full investor wikipage = sub.wiki["leaderboardbig"] wikipage.edit(format_investor(top_users, 500)) logging.info("Updated wiki investor: %s", subreddit) # Report the Reddit API call stats rem = int(reddit.auth.limits["remaining"]) res = int(reddit.auth.limits["reset_timestamp"] - time.time()) logging.info(" -- API calls remaining: %s, resetting in %.2fs", rem, res) sess.close()
Modèle de données propre pour les données Irigo, ainsi que les fonctions permettant d'importer depuis les données brutes dans le modèle de données propre. """ from sqlalchemy import (Column, Integer, BigInteger, Float, MetaData, Table, ForeignKey, select, String, DateTime, func) from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import sessionmaker, relationship import pandas as pd import datetime as dt import json # Local imports import utils engine = utils.create_engine(flavor='sqlite') Session = sessionmaker(bind=engine) Base = declarative_base() class Arret(Base): __tablename__ = 'arret' id_arret = Column(BigInteger, primary_key=True, autoincrement=True) nom_arret = Column(String(32)) mne_arret = Column(String(32)) def __repr__(self): return "nom_arret='{}'".format(self.nom_arret)
def main(): logging.info("Starting calculator...") killhandler = KillHandler() engine = create_engine() session_maker = sessionmaker(bind=engine) reddit = praw.Reddit( client_id=config.CLIENT_ID, client_secret=config.CLIENT_SECRET, username=config.USERNAME, password=config.PASSWORD, user_agent=config.USER_AGENT, ) # We will test our reddit connection here if not utils.test_reddit_connection(reddit): return () praw.models.Comment.edit_wrap = edit_wrap stopwatch = Stopwatch() logging.info("Retrieving top ...") # query sess = session_maker() try: top_networth = (sess.query( Investor.name, func.coalesce(Investor.balance + func.sum(Investment.amount), Investor.balance).label("networth"), ).outerjoin( Investment, and_(Investor.name == Investment.name, Investment.done == 0)).group_by(Investor.name).order_by( desc("networth")).limit(1).one())[1] except NoResultFound: top_networth = 0 top_networth = max(top_networth, config.STARTING_BALANCE * 10) # al last starting * 10 sess.close() logging.info("Top networth: %d", top_networth) logging.info("Monitoring active investments...") while not killhandler.killed: sess = session_maker() then = int(time.time()) - config.INVESTMENT_DURATION investment = (sess.query(Investment).filter( Investment.done == 0).filter(Investment.time < then).order_by( Investment.time.asc()).first()) if not investment: # Nothing matured yet; wait a bit before trying again time.sleep(50) continue duration = stopwatch.measure() investor = sess.query(Investor).filter( Investor.name == investment.name).one() net_worth = investor.networth(sess) logging.info("New mature investment: %s", investment.comment) logging.info(" -- by %s", investor.name) # Retrieve the post the user invested in (lazily, no API call) post = reddit.submission(investment.post) # Retrieve the post's current upvote count (triggers an API call) upvotes_now = post.ups investment.final_upvotes = upvotes_now investment.op = (post.author and investor.name == post.author.name) investment.net_worth = net_worth investment.top_networth = top_networth # Updating the investor's balance factor = formula.calculate(upvotes_now, investment.upvotes, net_worth, top_networth) if factor > 1 and post.author and investor.name == post.author.name: # bonus per OP factor *= formula.OP_BONUS amount = investment.amount balance = investor.balance new_balance = int(balance + (amount * factor)) change = new_balance - balance profit = change - amount # Updating the investor's variables investor.completed += 1 # Retrieve the bot's original response (lazily, no API call) if investment.response != "0": response = reddit.comment(id=investment.response) else: response = EmptyResponse() if new_balance < BALANCE_CAP: # If investor is in a firm and he profits, # 15% goes to the firm investor.balance = new_balance # Edit the bot's response (triggers an API call) if profit > 0: logging.info(" -- profited %s", profit) elif profit == 0: logging.info(" -- broke even") else: logging.info(" -- lost %s", profit) edited_response = message.modify_invest_return( investment.amount, investment.upvotes, upvotes_now, change, profit, investor.balance, ) response.edit_wrap(edited_response) else: # This investment pushed the investor's balance over the cap investor.balance = BALANCE_CAP # Edit the bot's response (triggers an API call) logging.info(" -- profited %s but got capped", profit) response.edit_wrap( message.modify_invest_capped( investment.amount, investment.upvotes, upvotes_now, change, profit, investor.balance, )) investment.success = profit > 0 investment.profit = profit investment.done = True sess.commit() if top_networth < investor.balance: top_networth = investor.balance logging.info("New Top networth: %d", top_networth) # Measure how long processing took duration = stopwatch.measure() logging.info(" -- processed in %.2fs", duration) # Report the Reddit API call stats rem = int(reddit.auth.limits["remaining"]) res = int(reddit.auth.limits["reset_timestamp"] - time.time()) logging.info(" -- API calls remaining: %s, resetting in %.2fs", rem, res) sess.close()
def main(): logging.info("Starting buyable...") engine = create_engine() session_maker = sessionmaker(bind=engine, autoflush=False) reddit = praw.Reddit( client_id=config.CLIENT_ID, client_secret=config.CLIENT_SECRET, username=config.USERNAME, password=config.PASSWORD, user_agent=config.USER_AGENT, ) # We will test our reddit connection here if not test_reddit_connection(reddit): exit() praw.models.Comment.edit_wrap = edit_wrap stopwatch = Stopwatch() logging.info("Fetching active buyable...") sess = session_maker() then = int(time.time()) - config.INVESTMENT_DURATION buyables = ( sess.query(Buyable) .filter(Buyable.done == 0) .filter(Buyable.time < then) .order_by(Buyable.time.asc()) ) for buyable in buyables: duration = stopwatch.measure() logging.info("New mature investment: %s", buyable.post) logging.info(" -- by %s", buyable.name) # Retrieve the post submission = reddit.submission(id=buyable.post) buyable.final_upvotes = submission.ups if submission.removed or not submission.author: logging.info(" -- deleted or removed") # buyable.done = True sess.delete(buyable) sess.commit() duration = stopwatch.measure() logging.info(" -- processed in %.2fs", duration) continue # valid OC only if not deleted/removed if submission.stickied or submission.distinguished: logging.info(" -- stickied or distinguished") # buyable.done = True sess.delete(buyable) sess.commit() duration = stopwatch.measure() logging.info(" -- processed in %.2fs", duration) continue buyable.oc = submission.link_flair_text == "OC" if not buyable.oc: logging.info(" -- not OC") buyable.done = True sess.commit() duration = stopwatch.measure() logging.info(" -- processed in %.2fs", duration) continue # Retrieve OP investor = sess.query(Investor).filter(Investor.name == buyable.name).first() if not investor: logging.info(" -- OP not investor") buyable.done = True sess.commit() duration = stopwatch.measure() logging.info(" -- processed in %.2fs", duration) continue balance = investor.balance # Retrieve the post investments investments = ( sess.query(Investment) .filter(Investment.post == buyable.post) .filter(Investment.name != buyable.name) ) profit = 0 for investment in investments: profit += investment.amount / OC_BONUS net_worth = investor.networth(sess) if net_worth > 0: profit = int(min(profit, net_worth)) # Updating the investor's balance new_balance = int(balance + profit) # Retrieve the bot's original response (lazily, no API call) if buyable.response != "0": response = reddit.comment(id=buyable.response) else: response = EmptyResponse() if new_balance < BALANCE_CAP: investor.balance = new_balance # Edit the bot's response (triggers an API call) logging.info(" -- profited %d", profit) response.edit_wrap(response.body + message.modify_oc_return(profit)) else: # This investment pushed the investor's balance over the cap investor.balance = BALANCE_CAP # Edit the bot's response (triggers an API call) logging.info(" -- profited %d but got capped", profit) response.edit_wrap(response.body + message.modify_oc_capped()) buyable.profit = profit buyable.done = True sess.commit() # Measure how long processing took duration = stopwatch.measure() logging.info(" -- processed in %.2fs", duration) # Report the Reddit API call stats rem = int(reddit.auth.limits["remaining"]) res = int(reddit.auth.limits["reset_timestamp"] - time.time()) logging.info(" -- API calls remaining: %s, resetting in %.2fs", rem, res) sess.close()
def main() -> None: """ This is the main function that listens to new submissions and then posts the ATTENTION sticky comment. """ logging.info("Starting submitter...") killhandler = KillHandler() engine = create_engine() sess_maker = scoped_session(sessionmaker(bind=engine)) reddit = praw.Reddit( client_id=config.CLIENT_ID, client_secret=config.CLIENT_SECRET, username=config.USERNAME, password=config.PASSWORD, user_agent=config.USER_AGENT, ) logging.info("Setting up database") conn = sqlite3.connect(config.POST_DBFILE) conn.execute("CREATE TABLE IF NOT EXISTS posts (id)") conn.commit() logging.info("Setting up Telegram connection") tbot = telegram.Bot(token=config.TG_TOKEN) try: tbot.get_me() except telegram.error.TelegramError as e_teleg: logging.error(e_teleg) logging.critical("Telegram error!") return # We will test our reddit connection here if not test_reddit_connection(reddit): return logging.info("Starting checking submissions...") stopwatch = Stopwatch() sess = sess_maker() subreddits = reddit.subreddit("+".join(config.SUBREDDITS)) for submission in subreddits.stream.submissions(pause_after=6): if killhandler.killed: logging.info("Termination signal received - exiting") break if not submission: # because of pause_after # to handle ctr+c above continue duration = stopwatch.measure() logging.info("New submission: %s", submission) logging.info(" -- retrieved in %ss", duration) c = conn.cursor() c.execute("SELECT * FROM posts WHERE id=?", (submission.id,)) if c.fetchone(): logging.info("Already processed") continue post_telegram(conn, submission, tbot) bot_reply = post_reply(submission) # Measure how long processing took duration = stopwatch.measure() logging.info(" -- processed in %.2fs", duration) # Create Buyable if bot_reply: sess.add( Buyable(post=submission.id, name=submission.author.name, response=bot_reply.id) ) sess.commit()