def main(args: Namespace): with Database(args.database) as db: team_rows = db.fetch_all_rows(Team.sql_table()) for row in team_rows: create_team_from_row(row) for league_code in args.league: league = league_register[league_code] update_leagues(args.database, league, args.past) with Database(args.database) as db: db.create_table(Event) db.create_rows(Event)
def update_leagues(database: str, league: League, past: bool): with Database(database) as db: messages.vanilla_message('Updating {} {}...'.format( league.country, league.name)) name_constraint = "{}='{}' {} {}".format(ColumnNames.Code.name, league.name, Keywords.COLLATE.name, Keywords.NOCASE.name) country_constraint = "{}='{}' {} {}".format(ColumnNames.Country.name, league.country, Keywords.COLLATE.name, Keywords.NOCASE.name) current_constraint = "{}={}".format( ColumnNames.Current.name, Characters.FALSE.value if past else Characters.TRUE.value) constraints = [name_constraint, country_constraint, current_constraint] season_rows = db.fetch_all_rows(Season.sql_table(), constraints) for row in season_rows: season = create_season_from_row(row) messages.vanilla_message("Extracting events for season {}".format( season.year)) season_constraint = "{}={}".format(ColumnNames.Season_ID.name, season.id) finished_constraint = "{}={}".format(ColumnNames.Finished.name, Characters.TRUE.value) constraints = [season_constraint, finished_constraint] fixtures = get_fixtures(db, constraints) with ThreadPoolExecutor(max_workers=32) as executor: for fixture in fixtures: executor.submit(create_events_for_fixture, fixture)
def compute_summaries(database: str, season: Season, intervals: List[Interval], team: Team, venue: Venue) -> Tuple[Summary]: overall = Summary(intervals) for_team = Summary(intervals) against_team = Summary(intervals) with Database(database) as db: for fixture in season.fixtures(): fixture_constraint = "{}={}".format(ColumnNames.Fixture_ID.name, fixture.id) constraints = [fixture_constraint] events_rows = db.fetch_all_rows(Event.sql_table(), constraints) for row in events_rows: event: Event = create_event_from_row(row, fixture) if is_goal(event.detail): overall.add_goal(event.time, event.extra_time) count_for_team = False if venue == Venue.any: count_for_team = team in [ fixture.home_team, fixture.away_team ] elif venue == Venue.home and team == fixture.home_team: count_for_team = True elif venue == Venue.away and team == fixture.away_team: count_for_team = True if count_for_team: if event.team == team: for_team.add_goal(event.time, event.extra_time) else: against_team.add_goal(event.time, event.extra_time) return overall, for_team, against_team
def process_teams(arguments: Namespace): print('{:<25} {:<7} {:<7} {:<7} {:<7} {:<7}'.format( 'TEAM', 'MEDIAN', 'MEAN', 'STD', 'MIN', 'MAX')) with Database(arguments.database) as db: for team_name in arguments.team: lexemes = team_name.split('_') canonical_team_name = ' '.join(lexemes) (team_row, ) = db.fetch_rows_with_likeness(Team.sql_table(), canonical_team_name) team = Team.inventory[team_row[0]] match_rows = db.fetch_match_rows(team) matches = [] for match_row in match_rows: match = create_match_from_row(match_row) matches.append(match) matches.sort(key=lambda m: m.match_date) under_analysis = [] for match in matches: event_rows = db.fetch_event_rows(match) match_events = [] for event_row in event_rows: event = create_event_from_row(event_row) match_events.append(event) assert match_events match_events.sort(key=lambda e: (e.period, e.timestamp)) under_analysis.append(match_events) analyse(team, under_analysis)
def update_leagues(database: str, leagues: List[str], past: bool, force: bool): with Database(database) as db: team_rows = db.fetch_all_rows(Team.sql_table()) for row in team_rows: create_team_from_row(row) for league_code in leagues: messages.vanilla_message('Updating {}...'.format(league_code)) league = league_register[league_code] name_constraint = "{}='{}' {} {}".format(ColumnNames.Code.name, league.name, Keywords.COLLATE.name, Keywords.NOCASE.name) country_constraint = "{}='{}' {} {}".format( ColumnNames.Country.name, league.country, Keywords.COLLATE.name, Keywords.NOCASE.name) current_constraint = "{}={}".format( ColumnNames.Current.name, Characters.FALSE.value if past else Characters.TRUE.value) constraints = [ name_constraint, country_constraint, current_constraint ] season_rows = db.fetch_all_rows(Season.sql_table(), constraints) for row in season_rows: season = create_season_from_row(row) create_fixtures_json(season.id, force) load_fixture_data(league, season) db.create_table(Fixture) db.create_rows(Fixture) db.create_table(Team) db.create_rows(Team)
def main(args: Namespace): for country in args.country: create_teams_json(country, args.force) load_team_data(country) with Database(args.database) as db: db.create_table(Team) db.create_rows(Team)
def main(arguments: Namespace): create_season_json() seasons_json = get_seasons_json() with seasons_json.open() as in_file: json_text = load(in_file) for data in json_text['api']['leagues']: create_season_from_json(data) with Database(arguments.database) as db: db.create_table(Season) db.create_rows(Season)
def main(args: Namespace): leagues = [] if args.country: for country in args.country: leagues.extend([code for code, league in league_register.items() if league.country == country.capitalize()]) if args.league: leagues.extend(list(args.league)) if not args.country and not args.league: leagues.extend(list(league_register.keys())) left_datetime = datetime(args.year, args.month, args.day, args.lower) right_datetime = datetime(args.year, args.month, args.day, args.upper) print(left_datetime, right_datetime) with Database(args.database) as db: team_rows = db.fetch_all_rows(Team.sql_table()) for row in team_rows: create_team_from_row(row) for league_code in leagues: league = league_register[league_code] season = get_current_season(db, league) if season is not None: season_id = season[0] season_constraint = "{}={}".format(ColumnNames.Season_ID.name, season_id) finished_constraint = "{}={}".format(ColumnNames.Finished.name, Characters.FALSE.value) constraints = [season_constraint, finished_constraint] fixtures = get_fixtures(db, constraints) fixtures = filter_fixtures(fixtures, left_datetime, right_datetime) if fixtures: if args.event: teams = [] for fixture in fixtures: teams.append(Team.inventory[fixture.home_team.id]) teams.append(Team.inventory[fixture.away_team.id]) analyse_sequences(db, league_code, teams, args.event, args.negate, args.venue, args.half, args.minimum) else: output_fixtures(league, fixtures)
def fixtures_played(database: str, season: Season) -> bool: played = False with Database(database) as db: constraints = ["{}='{}'".format(ColumnNames.Season_ID.name, season.id)] fixture_rows = db.fetch_all_rows(Fixture.sql_table(), constraints) for row in fixture_rows: fixture = create_fixture_from_row(row) if fixture.home_team is not None and fixture.away_team is not None and not fixture.finished: lower_bound = datetime.today() + timedelta(days=-3) upper_bound = datetime.today() match_date = fixture.date.replace(tzinfo=None) if lower_bound <= match_date <= upper_bound: played = True return played
def get_match_information(database: str, season: Season, team: Team): with Database(database) as db: season_constraint = "{}={}".format(ColumnNames.Season_ID.name, season.id) team_constraint = "({}={} {} {}={})".format(ColumnNames.Home_ID.name, team.id, Keywords.OR.name, ColumnNames.Away_ID.name, team.id) finished_constraint = "{}={}".format(ColumnNames.Finished.name, Characters.FALSE.value) constraints = [season_constraint, team_constraint, finished_constraint] fixtures = get_fixtures(db, constraints) fixtures.sort(key=lambda fixture: fixture.date) return fixtures
def get_fixtures(database: str, left_team: Team, right_team: Team): fixtures = [] with Database(database) as db: teams_constraint = "{}={} {} {}={}".format(ColumnNames.Home_ID.name, left_team.id, Keywords.AND.name, ColumnNames.Away_ID.name, right_team.id) finished_constraint = "{}={}".format(ColumnNames.Finished.name, Characters.TRUE.value) constraints = [teams_constraint, finished_constraint] fixtures_rows = db.fetch_all_rows(Fixture.sql_table(), constraints) for row in fixtures_rows: fixture = create_fixture_from_row(row) fixtures.append(fixture) return fixtures
def load_players_and_coaches_and_teams_and_competitions(arguments: Namespace): with Database(arguments.database) as db: player_rows = db.fetch_all_rows(Player.sql_table()) for row in player_rows: create_player_from_row(row) team_rows = db.fetch_all_rows(Team.sql_table()) for row in team_rows: create_team_from_row(row) competition_rows = db.fetch_all_rows(Competition.sql_table()) for row in competition_rows: create_competition_from_row(row) coach_rows = db.fetch_all_rows(Coach.sql_table()) for row in coach_rows: create_coach_from_row(row)
def update_all(database: str, past: bool, force: bool): codes = [] with Database(database) as db: team_rows = db.fetch_all_rows(Team.sql_table()) for row in team_rows: create_team_from_row(row) for code, league in league_register.items(): constraints = [ "{}='{}'".format(ColumnNames.Country.name, league.country), "{}='{}'".format(ColumnNames.Code.name, league.name) ] season_rows = db.fetch_all_rows(Season.sql_table(), constraints) for season_row in season_rows: season = create_season_from_row(season_row) if season.current: if force or fixtures_played(database, season): codes.append(code) update_leagues(database, codes, past, force or codes)
def get_fixtures(database: str, team: Team, this_season: Season, venue: Venue): fixtures = [] with Database(database) as db: if venue == Venue.home: team_constraint = "{}={}".format(ColumnNames.Home_ID.name, team.id) elif venue == Venue.away: team_constraint = "{}={}".format(ColumnNames.Away_ID.name, team.id) else: team_constraint = "({}={} {} {}={})".format(ColumnNames.Home_ID.name, team.id, Keywords.OR.name, ColumnNames.Away_ID.name, team.id) finished_constraint = "{}={}".format(ColumnNames.Finished.name, Characters.TRUE.value) season_constraint = "{}={}".format(ColumnNames.Season_ID.name, this_season.id) constraints = [team_constraint, finished_constraint, season_constraint] fixtures_rows = db.fetch_all_rows(Fixture.sql_table(), constraints) for row in fixtures_rows: fixture = create_fixture_from_row(row) fixtures.append(fixture) return fixtures
def main(arguments: Namespace): miscellaneous.messages.verbose = arguments.verbose miscellaneous.messages.debug = arguments.debug arguments.data = os.path.abspath(arguments.data) events_directory, others_directory, matches_directory = extract_directories( arguments.data) fill_players_inventory(others_directory) fill_teams_inventory(others_directory) fill_competitions_inventory(others_directory) fill_coaches_inventory(others_directory) matches_prefix = 'matches_' events_prefix = 'events_' suffix = '.json' matches_and_events = {} for root, _, files in os.walk(matches_directory): for file in files: competition = file[len(matches_prefix):-len(suffix)] matches_file = os.path.join(root, file) check_file_or_directory_exists(matches_file) matches_and_events[competition] = [matches_file] for root, _, files in os.walk(events_directory): for file in files: competition = file[len(events_prefix):-len(suffix)] events_file = os.path.join(root, file) check_file_or_directory_exists(events_file) matches_and_events[competition].append(events_file) for competition, (matches_file, events_file) in matches_and_events.items(): verbose_message('Analysing {}'.format(competition)) fill_matches_inventory(matches_file) fill_events_inventory(events_file) db_filename = '{}.db'.format(competition) with Database(db_filename) as db: db.create_tables() db.create_rows()
def run_tradebot(control_queue, data_queue, pending_order_queue, completed_order_queue, markets, amount_per_call, table_name, logger, skip_list): """ Run trading algorithm on real-time market data :param pending_order_queue: Queue to pass orders to manager :param completed_order_queue: Queue to recieve completed orders from manager :param control_queue: Queue to control tradebot :param data_queue: Queue to receive data from scraper :param markets: List of markets :param amount_per_call: Amount to purchase per buy order :param table_name: Name of SQL table to record data :param logger: Main logger :param skip_list: List of markets to skip :return: """ market_data = {} market_status = {} for market in markets: if market not in skip_list: market_status[market] = BittrexStatus(market=market) market_data[market] = BittrexData(market=market) # Initialize SQL database connection db = Database(hostname=HOSTNAME, username=USERNAME, password=PASSCODE, database_name=TRADEBOT_DATABASE, logger=logger) # Initialize Bittrex object bittrex = Bittrex(api_key=BITTREX_CREDENTIALS.get('key'), api_secret=BITTREX_CREDENTIALS.get('secret'), api_version='v1.1') try: while True: # Receive data from scraper scraper_data = data_queue.get() # Add received scraper data from running data for market, entry in scraper_data.items(): if market not in skip_list: if entry.get( 'wprice' ) > 0: # Temporary fix for entries with 0 price market_data[market].datetime.append( entry.get('datetime')) market_data[market].wprice.append(entry.get('wprice')) market_data[market].buy_volume.append( entry.get('buy_volume')) market_data[market].sell_volume.append( entry.get('sell_volume')) # Check if any orders completed if not completed_order_queue.empty(): while not completed_order_queue.empty(): completed_order = completed_order_queue.get() order_market = completed_order.market # Update market statuses with completed orders if completed_order.type == OrderType.BUY.name: # Check if buy order skipped if completed_order.status == OrderStatus.SKIPPED.name: market_status[order_market].bought = False market_status[order_market].buy_signal = None logger.info( 'Tradebot: Received skipped buy order. Skipping buy order for {}.' .format(order_market)) else: market_status[ order_market].buy_order = completed_order logger.info( 'Tradebot: Received completed buy order for {}.' .format(order_market)) else: market_status[ order_market].sell_order = completed_order logger.info( 'Tradebot: Received completed sell order for {}.'. format(order_market)) status = market_status[order_market] # Completed buy and sell order for single market if status.buy_order.status == OrderStatus.COMPLETED.name and \ status.sell_order.status == OrderStatus.COMPLETED.name: profit = (status.sell_order.final_total + status.buy_order.final_total).quantize( BittrexConstants.DIGITS) percent = (profit * Decimal(-100) / status.buy_order.final_total).quantize( Decimal(10)**-4) formatted_buy_time = format_time( status.buy_order.closed_time, "%Y-%m-%d %H:%M:%S") formatted_sell_time = format_time( status.sell_order.closed_time, "%Y-%m-%d %H:%M:%S") logger.info( 'Tradebot: completed buy/sell order for {}.'. format(order_market)) db.insert_query( table_name, format_tradebot_entry( order_market, formatted_buy_time, status.buy_signal, status.buy_order.actual_price, status.buy_order.final_total, formatted_sell_time, status.sell_signal, status.sell_order.actual_price, status.sell_order.final_total, profit, percent)) # Reset buy/sell orders and buy/sell signals status.clear_orders() status.buy_signal = None status.sell_signal = None else: logger.error( 'Tradebot: Attempted to insert INCOMPLETE BUY and SELL order into database.' ) for market in scraper_data.keys(): data = market_data.get(market) if market not in skip_list: if len(data.datetime) > 65: # Clear the first entries data.clear_first() status = market_status.get(market) run_algorithm(data, status, amount_per_call, pending_order_queue, logger) if not control_queue.empty(): signal = control_queue.get() if signal == 'STOP': logger.info('Tradebot: Stopping tradebot ...') break except Exception: # TODO: change back to ConnectionError and ValueError logger.error(format_exc()) #logger.error(e) #logger.info('Tradebot: Stopping tradebot ...') finally: db.close() logger.info('Tradebot: Database connection closed.')
def run_scraper(control_queue, database_name, logger, markets=MARKETS, interval=60, sleep_time=5): """ Run scraper to pull data from Bittrex :param control_queue: Queue to control scraper :param database_name: Name of database :param logger: Main logger :param markets: List of active markets :param interval: Duration between entries into database :param sleep_time: Duration between API calls :return: """ # Initialize database object initialize_databases(database_name, markets, logger=logger) db = Database(hostname=HOSTNAME, username=USERNAME, password=PASSCODE, database_name=database_name, logger=logger) # Initialize Bittrex object bittrex_request = Bittrex(api_key=BITTREX_CREDENTIALS.get('key'), api_secret=BITTREX_CREDENTIALS.get('secret'), dispatch=return_request_input, api_version='v1.1') # Initialize variables run_tradebot = False proxy_indexes = list(range(len(PROXIES))) working_data = {} current_datetime = datetime.now().astimezone(tz=None) current_datetime = {k: current_datetime for k in MARKETS} last_price = { k: Decimal(0).quantize(BittrexConstants.DIGITS) for k in MARKETS } weighted_price = { k: Decimal(0).quantize(BittrexConstants.DIGITS) for k in MARKETS } try: with FuturesSession(max_workers=20) as session: while True: shuffle(proxy_indexes) start = time() response_dict = get_data(MARKETS, bittrex_request, session, PROXIES, proxy_indexes, logger=logger) working_data, current_datetime, last_price, weighted_price, entries = \ process_data(response_dict, working_data, current_datetime, last_price, weighted_price, logger, interval) if run_tradebot: tradebot_entries = {k: entries.get(k)[-1] for k in entries} SCRAPER_TRADEBOT_QUEUE.put(tradebot_entries) if entries: formatted_entries = list( chain.from_iterable([[(x, *format_bittrex_entry(y)) for y in entries[x]] for x in entries])) db.insert_transaction_query(formatted_entries) stop = time() run_time = stop - start if run_time > 5: logger.info( 'Scraper: Total time: {0:.2f}s'.format(run_time)) if run_time < sleep_time: sleep(sleep_time - run_time) if not control_queue.empty(): signal = control_queue.get() if signal == "START TRADEBOT": run_tradebot = True logger.info("Scraper: Starting tradebot ...") elif signal == "STOP TRADEBOT": run_tradebot = False logger.info("Scraper: Stopping tradebot ...") elif signal == "STOP": logger.info("Scraper: Stopping scraper ...") break except ConnectionError as e: logger.debug('ConnectionError: {}. Exiting ...'.format(e)) finally: db.close() logger.info("Scraper: Stopped scraper.") logger.info("Scraper: Database connection closed.")
def initialize_databases(database_name, markets, logger=None): """ Create new database for data collection and new table for trades :param database_name: :param markets: :param logger: :return: """ db = Database(hostname=HOSTNAME, username=USERNAME, password=PASSCODE, database_name='develop', logger=logger) # Create database if does not exist db.create_database(database_name) db.create_database(TRADEBOT_DATABASE) db.close() base_db = Database(hostname=HOSTNAME, username=USERNAME, password=PASSCODE, database_name=database_name, logger=logger) # Create tables if does not exist for market in markets: base_db.create_price_table(market) base_db.close() tradebot_db = Database(hostname=HOSTNAME, username=USERNAME, password=PASSCODE, database_name=TRADEBOT_DATABASE, logger=logger) tradebot_db.create_trade_table(database_name) tradebot_db.close()