Example #1
0
def main():
    # coin_name_array = util.read_subs_from_file(general["subreddit_file"])
    # coin_name_array = util.read_subs_from_file(general["binance_file"])
    coin_name_array = util.read_subs_from_file(general["poloniex_file"])
    auth = util.get_postgres_auth()
    db = DatabaseConnection(**auth)
    # all_subreddits = db.get_all_subreddits()
    all_subreddits = [coin[-1] for coin in coin_name_array]
    start_time = datetime.datetime.utcnow() - datetime.timedelta(hours=12)
    # end_time = datetime.datetime.utcnow() - datetime.timedelta(hours=24)
    end_time = datetime.datetime.utcnow()
    # growths = percentage_price_growths(db, all_subreddits, start_time, end_time)
    growths = average_growth(db, all_subreddits, start_time, end_time)
    print(growths)
    # covariance(db, all_subreddits)
    db.close()
Example #2
0
def main():
    parser = argparse.ArgumentParser(description="Simple Query")
    parser.add_argument('hours', metavar='h', type=float)
    args = parser.parse_args()

    auth = util.get_postgres_auth()
    db = DatabaseConnection(**auth)

    cur_utc = datetime.datetime.utcnow()
    subreddits = db.get_subreddits_with_data(cur_utc - datetime.timedelta(
        hours=args.hours))

    growths = get_growths(db, subreddits, datetime.datetime.utcnow(),
                          args.hours)
    for g in growths:
        print(g)
    db.close()
Example #3
0
def collect(coin_name_array, hours=12):
    """
    Collects the reddit data for the coins in coin_name_array.
    coin_name_array should be a 2D array where each row contains keywords for a crypto coin
    and the last one is the subreddit
    """
    stat = RedditStats()
    auth = util.get_postgres_auth()
    db = DatabaseConnection(**auth)
    mentions = stat.get_mentions(coin_name_array,
                                 hours=hours,
                                 include_submissions=True,
                                 score_scaling=True)
    log.info("Got mentions for all subs.")
    for i, coin_tuple in enumerate(coin_name_array):
        subreddit = coin_tuple[-1]
        stats_dict = stat.compile_dict(subreddit, hours=hours)
        stats_dict["mention_rate"] = mentions[0][i]
        stats_dict["mention_rate_1h"] = mentions[1][i]
        db.insert_data(stats_dict)
        log.info("Got stats for: %s" % (subreddit))
    db.close()
Example #4
0
def collect_price(coin_name_array):
    """
    Collects the price data for the coins in coin_name_list.
    """
    auth = util.get_postgres_auth()
    db = DatabaseConnection(**auth)
    cap = CoinCap()
    time = datetime.datetime.utcnow()
    price_data = cap.get_coin_price_data(coin_name_array)
    if (len(price_data) != len(coin_name_array)):
        log.warning("No price data for {} coins:".format(
            len(coin_name_array) - len(price_data)))
    for k, d in price_data.items():
        d["time"] = time
        for coin in coin_name_array:
            if k in coin:
                d["subreddit"] = coin[-1]
                break
        if "subreddit" not in d:
            log.warning("No subreddit for %s." % (d["coin_name"]))
        else:
            log.info("Got price for: %s" % (d["subreddit"]))
            db.insert_price(d)
    db.close()
Example #5
0
import datetime

import query
import util
from database import DatabaseConnection
from settings import autotrade

log = util.setup_logger(__name__)
auth = util.get_postgres_auth()
db = DatabaseConnection(**auth)

K = autotrade["k"]
GROWTH_HOURS = autotrade["growth_hours"]
# if a coin has less than STAGNATION_THRESHOLD price growth in STAGNATION_HOURS hours
# it is considered stagnating
MIN_HOLD_HOURS = autotrade["min_hold_hours"]
USE_STAGNATION_DETECTION = autotrade["use_stagnation_detection"]
STAGNATION_HOURS = autotrade["stagnation_hours"]
STAGNATION_THRESHOLD = autotrade["stagnation_threshold"]
NEVER_SELL = autotrade["never_sell"]

USE_DYNAMIC_STAGNATION_DETECTION = autotrade["use_dynamic_stagnation_detection"]
DYNAMIC_TOP_NR = autotrade["dynamic_top_nr"]
DRY_RUN = autotrade["dry_run"]


def __sell_and_spendings__(adapter, growths):
    """
    Calculates which coins to sell and how much to spend on other coins based on a dict of growths and subreddits.
    """
    assert len(growths) >= K
Example #6
0
def main():
    file_path = general["subreddit_file"]
    parser = argparse.ArgumentParser()
    parser.add_argument(
        "--find_subs",
        default=0,
        type=int,
        action='store',
        help="Find crypto coin subreddits (overwrites {}).".format(file_path))
    parser.add_argument("--recreate_table",
                        default=False,
                        action='store_true',
                        help="Delete and recreate the data table.")
    parser.add_argument(
        "--collect",
        default=False,
        action='store_true',
        help="Collect subreddit information into the database.")
    parser.add_argument(
        "--collect_price",
        default=False,
        action='store_true',
        help="Collect coin price information into the database.")
    parser.add_argument("--run_sim",
                        default=False,
                        action='store_true',
                        help="Run simulation.")
    parser.add_argument("--find_by_symbols",
                        default=False,
                        action='store_true',
                        help="Find coins and subreddits using 'symbols.csv'.")
    parser.add_argument("--auto_trade",
                        type=str,
                        default="",
                        help="Run auto trader for specified exchange.")
    args = parser.parse_args()
    # -----------------------------------

    if args.find_subs > 0:
        subs = create_coin_name_array(args.find_subs)
        util.write_subs_to_file(file_path, subs)

    if args.recreate_table:
        auth = util.get_postgres_auth()
        db = DatabaseConnection(**auth)
        db.delete_data_table()
        db.create_data_table()
        db.close()

    if args.collect:
        if os.path.exists(file_path):
            subs = util.read_subs_from_file(file_path)
            collect(subs)
        else:
            log.info("Collect called but %s does not exist." % (file_path))
            log.info("Run --find_subs first.")

    if args.collect_price:
        if os.path.exists(file_path):
            subs = util.read_subs_from_file(file_path)
            collect_price(subs)
        else:
            log.warn("Collect price called but %s does not exist." %
                     (file_path))
            log.warn("Run --find_subs first.")

    if args.run_sim:
        minute_offsets = range(60, 500, 43)
        for minute_offset in minute_offsets:
            end_time = datetime.datetime.utcnow() - datetime.timedelta(
                minutes=minute_offset)
            start_time = end_time - datetime.timedelta(15)
            policy_list = [
                policies.subreddit_growth_policy,
                # policies.largest_24h_increase_policy,
                policies.largest_xhr_policy,
                # policies.hybrid_policy,
                policies.subreddit_growth_policy_with_stagnation_detection,
                policies.
                subreddit_growth_policy_with_dynamic_stagnation_detection
            ]
            simulator.simulate(policy_list, start_time)
        title_str = "K={}, STEP_HOURS={}, GROWTH_HOURS={}, STAGNATION_HOURS={}, STAGNATION_THRESHOLD={}"
        title_str = title_str.format(policies.K, policies.STEP_HOURS,
                                     policies.GROWTH_HOURS,
                                     policies.STAGNATION_HOURS,
                                     policies.STAGNATION_THRESHOLD)

        plt.title(title_str)
        plt.show()

    if args.find_by_symbols:
        stat = RedditStats()
        guesses, found = stat.find_by_symbols("symbols.csv")
        util.write_subs_to_file("guesses.csv", guesses)
        util.write_subs_to_file("found.csv", found)

    if args.auto_trade != "":
        auto = AutoTrader.AutoTrader(args.auto_trade)
        auto.run()