from hltv_scraper import helper, scraper, get_match_ids, get_finished_events

threads = 32
statsids = helper.get_existing_data("statsIDs", 1)
existing_statsids = helper.get_existing_data('rounds', 0)
unscraped_stats = helper.remove_existing_data(existing_statsids, statsids,
                                              'matches')
chunks = (unscraped_stats[i:i + 250]
          for i in range(0, len(unscraped_stats), 250))

for group in chunks:
    statsToCheck = group

    # Step 1: add new matches to the event join table
    stats = helper.scrape(statsToCheck, scraper.get_match_stats, threads)
    new_stats = helper.fix_rounds(stats)

    # Step 2: Tabulate
    helper.tabulate("rounds", new_stats)
Ejemplo n.º 2
0
from hltv_scraper import helper, scraper, get_match_ids, get_finished_events

processes = 32
players = list(range(0, 16050))

for i in range(0, len(players)):
    players[i] = str(players[i])

existing_players = helper.get_existing_data('players', 0)
unscraped_players = helper.remove_existing_data(existing_players, players,
                                                'players')
chunks = (unscraped_players[i:i + 250]
          for i in range(0, len(unscraped_players), 250))

for group in chunks:
    players_to_check = group

    # Step 1: Add new events to eventIDs.csv
    new_players = helper.scrape(players_to_check, scraper.get_players,
                                processes)

    # Step 2: Tabulate

    helper.tabulate("players", new_players)
Ejemplo n.º 3
0
from hltv_scraper import helper, scraper, get_match_ids, get_finished_events


threads = 32
matches = helper.get_existing_data("matchIDs", 0)
match_lookup = helper.csv_lookup('matchIDs', 0, 1)
existing_matches = helper.get_existing_data('matchResults', 15)
unscraped_matches = helper.remove_existing_data(existing_matches, matches, 'matches')

for i in range(0, len(unscraped_matches)):
    unscraped_matches[i] = match_lookup[unscraped_matches[i]]

chunks = (unscraped_matches[i:i+250] for i in range(0, len(unscraped_matches), 250))

for group in chunks:
    matchesToCheck = group

    # Step 1: add new matches to the event join table
    newEvents = helper.scrape(matchesToCheck, scraper.get_match_events, threads)

    # Step 2: Update matchResults.csv
    newMatchInfo = helper.scrape(matchesToCheck, scraper.get_match_info, threads)
    # Sometimes this returns a multi-dimensional array, so we remove it
    newMatchInfo = helper.fix_match_results(newMatchInfo, 16)

    # Step 3: Update playerStats.csv
    newPlayerStats = helper.scrape(matchesToCheck, scraper.get_player_stats, threads)
    # This returns a single array for each match with all of the player stats, so we un-array it
    newPlayerStats = helper.fix_player_stats(newPlayerStats)

    # Step 4: Update picksAndBans.csv
Ejemplo n.º 4
0
    # Handle player stats
    stats = helper.fix_player_stats(
        helper.scrape(match_id, scraper.get_player_stats, processes))
    for i in range(0, len(stats)):
        stats[i][1] = team_id_to_name[stats[i][1]]
        stats[i][2] = player_id_to_name[stats[i][2]]

    # Handle printing
    print(f"\nTest scrape results for {match_id[0]}:\n")
    print(f"Event: {eventID[0][1]}\n")
    helper.print_array("Map results", matchInfo, 1)
    helper.print_array("Player stats", stats, 1)
    sys.exit("Test complete.")

# Make an array of existing Match and Event IDs
existing_match_ids = helper.get_existing_data("matchIDs", 1)
existing_event_ids = helper.get_existing_data("eventIDs", 3)
existing_completed_events = helper.get_existing_data("completedEvents", 0)

# Get the last ID so we know when to stop looking
new_match_ids = get_match_ids.get_match_ids(existing_match_ids[-1])
new_completed_events = get_finished_events.get_finished_events(
    existing_completed_events[-1])

# Exit if there are no new matches
if len(new_match_ids) < 1 and len(new_completed_events) < 1:
    print("Nothing to scrape!")

# Just check for new matches and break out of the loop
elif helper.check_args('check', sys.argv):
    print(
Ejemplo n.º 5
0
from hltv_scraper import helper, scraper, get_match_ids, get_finished_events

threads = 32
matches = helper.get_existing_data("matchIDs", 0)
match_lookup = helper.csv_lookup('matchIDs', 0, 1)
existing_matches = helper.get_existing_data('joinMatchEvent', 0)
unscraped_matches = helper.remove_existing_data(existing_matches, matches,
                                                'matches')

for i in range(0, len(unscraped_matches)):
    unscraped_matches[i] = match_lookup[unscraped_matches[i]]

chunks = (unscraped_matches[i:i + 250]
          for i in range(0, len(unscraped_matches), 250))

for group in chunks:
    matchesToCheck = group

    # Step 1: add new matches to the event join table
    newEvents = helper.scrape(matchesToCheck, scraper.get_match_events,
                              threads)

    # Step 2: Tabulate
    helper.tabulate("joinMatchEvent", newEvents)
Ejemplo n.º 6
0
import scraper
from hltv_scraper import helper, scraper, get_match_ids, get_finished_events

processes = 32
events = get_finished_events.get_finished_events()
print(len(events))
existing_events = helper.get_existing_data('eventIDs', 3)
print(len(existing_events))
unscraped_events = helper.remove_existing_data(existing_events, events,
                                               'events')
chunks = (unscraped_events[i:i + 250]
          for i in range(0, len(unscraped_events), 250))

for group in chunks:
    events_to_check = group

    # Step 1: Add new events to eventIDs.csv
    new_event_ids = helper.scrape(events_to_check, scraper.get_event_names,
                                  processes)

    # Step 2: Tabulate

    helper.tabulate("eventIDs", new_event_ids)
Ejemplo n.º 7
0
from hltv_scraper import helper, scraper

threads = 32
matches = helper.get_existing_data("matchIDs", 1)
existing_matches = helper.get_existing_data('playerStats', 9)
unscraped_matches = helper.remove_existing_data(existing_matches, matches,
                                                'matches')
chunks = (unscraped_matches[i:i + 250]
          for i in range(0, len(unscraped_matches), 250))

for group in chunks:
    matches_to_check = group

    # Step 3: Update playerStats.csv
    new_player_stats = helper.scrape(matches_to_check,
                                     scraper.get_player_stats, threads)
    # This returns a single array for each match with all of the player stats, so we un-array it
    new_player_stats = helper.fix_player_stats(new_player_stats)

    # Step 5: Tabulate
    helper.tabulate("playerStats", new_player_stats)