from hltv_scraper import helper, scraper, get_match_ids, get_finished_events

processes = 32
players = list(range(0, 16050))

for i in range(0, len(players)):
    players[i] = str(players[i])

existing_players = helper.get_existing_data('players', 0)
unscraped_players = helper.remove_existing_data(existing_players, players,
                                                'players')
chunks = (unscraped_players[i:i + 250]
          for i in range(0, len(unscraped_players), 250))

for group in chunks:
    players_to_check = group

    # Step 1: Add new events to eventIDs.csv
    new_players = helper.scrape(players_to_check, scraper.get_players,
                                processes)

    # Step 2: Tabulate

    helper.tabulate("players", new_players)
Exemple #2
0
threads = 32
matches = helper.get_existing_data("matchIDs", 0)
match_lookup = helper.csv_lookup('matchIDs', 0, 1)
existing_matches = helper.get_existing_data('matchResults', 15)
unscraped_matches = helper.remove_existing_data(existing_matches, matches, 'matches')

for i in range(0, len(unscraped_matches)):
    unscraped_matches[i] = match_lookup[unscraped_matches[i]]

chunks = (unscraped_matches[i:i+250] for i in range(0, len(unscraped_matches), 250))

for group in chunks:
    matchesToCheck = group

    # Step 1: add new matches to the event join table
    newEvents = helper.scrape(matchesToCheck, scraper.get_match_events, threads)

    # Step 2: Update matchResults.csv
    newMatchInfo = helper.scrape(matchesToCheck, scraper.get_match_info, threads)
    # Sometimes this returns a multi-dimensional array, so we remove it
    newMatchInfo = helper.fix_match_results(newMatchInfo, 16)

    # Step 3: Update playerStats.csv
    newPlayerStats = helper.scrape(matchesToCheck, scraper.get_player_stats, threads)
    # This returns a single array for each match with all of the player stats, so we un-array it
    newPlayerStats = helper.fix_player_stats(newPlayerStats)

    # Step 4: Update picksAndBans.csv
    raw_picks_and_bans = helper.scrape(matchesToCheck, scraper.get_match_vetos, threads)
    picks_and_bans = helper.fix_player_stats(raw_picks_and_bans)
from hltv_scraper import helper, scraper, get_match_ids, get_finished_events

threads = 32
statsids = helper.get_existing_data("statsIDs", 1)
existing_statsids = helper.get_existing_data('rounds', 0)
unscraped_stats = helper.remove_existing_data(existing_statsids, statsids,
                                              'matches')
chunks = (unscraped_stats[i:i + 250]
          for i in range(0, len(unscraped_stats), 250))

for group in chunks:
    statsToCheck = group

    # Step 1: add new matches to the event join table
    stats = helper.scrape(statsToCheck, scraper.get_match_stats, threads)
    new_stats = helper.fix_rounds(stats)

    # Step 2: Tabulate
    helper.tabulate("rounds", new_stats)
    processes = 1

    # Add the single match ID to an array
    match_id = []
    match_id.append(sys.argv[sys.argv.index('test') + 1])

    # Tell the user what we are parsing
    print(f"\nBeginning test scrape for {match_id[0]}:\n")

    # Make lookup dicts
    event_id_to_name = helper.csv_lookup('eventIDs', 3, 1)
    team_id_to_name = helper.csv_lookup('teams', 2, 0)
    player_id_to_name = helper.csv_lookup('players', 0, 1)

    # Handle the Event ID
    eventID = helper.scrape(match_id, scraper.get_match_events, processes)
    eventID[0][1] = event_id_to_name[eventID[0][1]]

    # Handle new match info
    matchInfo = helper.fix_match_results(
        helper.scrape(match_id, scraper.get_match_info, processes), 16)
    for i in range(0, len(matchInfo)):
        matchInfo[i][3] = team_id_to_name[matchInfo[i][3]]
        matchInfo[i][9] = team_id_to_name[matchInfo[i][9]]

    # Handle player stats
    stats = helper.fix_player_stats(
        helper.scrape(match_id, scraper.get_player_stats, processes))
    for i in range(0, len(stats)):
        stats[i][1] = team_id_to_name[stats[i][1]]
        stats[i][2] = player_id_to_name[stats[i][2]]
Exemple #5
0
from hltv_scraper import helper, scraper, get_match_ids, get_finished_events

threads = 32
matches = helper.get_existing_data("matchIDs", 0)
match_lookup = helper.csv_lookup('matchIDs', 0, 1)
existing_matches = helper.get_existing_data('joinMatchEvent', 0)
unscraped_matches = helper.remove_existing_data(existing_matches, matches,
                                                'matches')

for i in range(0, len(unscraped_matches)):
    unscraped_matches[i] = match_lookup[unscraped_matches[i]]

chunks = (unscraped_matches[i:i + 250]
          for i in range(0, len(unscraped_matches), 250))

for group in chunks:
    matchesToCheck = group

    # Step 1: add new matches to the event join table
    newEvents = helper.scrape(matchesToCheck, scraper.get_match_events,
                              threads)

    # Step 2: Tabulate
    helper.tabulate("joinMatchEvent", newEvents)
from hltv_scraper import helper, scraper, get_match_ids, get_finished_events

processes = 32
completed_events = get_finished_events.get_finished_events('0')
helper.tabulate("completedEvents", completed_events)
events = helper.un_dimension(completed_events, 0)
chunks = (events[i:i+250] for i in range(0, len(events), 250))

for group in chunks:
    events_to_check = group

    # Step 1: Add new events
    event_rewards = helper.scrape(events_to_check, scraper.get_event_rewards, processes)
    event_winners = helper.scrape(events_to_check, scraper.get_event_winners, processes)

    # Step 2: Tabulate
    helper.tabulate("eventPrizes", event_rewards)
    helper.tabulate("eventWinners", event_winners)
import scraper
from hltv_scraper import helper, scraper, get_match_ids, get_finished_events

processes = 32
events = get_finished_events.get_finished_events()
print(len(events))
existing_events = helper.get_existing_data('eventIDs', 3)
print(len(existing_events))
unscraped_events = helper.remove_existing_data(existing_events, events,
                                               'events')
chunks = (unscraped_events[i:i + 250]
          for i in range(0, len(unscraped_events), 250))

for group in chunks:
    events_to_check = group

    # Step 1: Add new events to eventIDs.csv
    new_event_ids = helper.scrape(events_to_check, scraper.get_event_names,
                                  processes)

    # Step 2: Tabulate

    helper.tabulate("eventIDs", new_event_ids)
Exemple #8
0
from hltv_scraper import helper, scraper

threads = 32
matches = helper.get_existing_data("matchIDs", 1)
existing_matches = helper.get_existing_data('playerStats', 9)
unscraped_matches = helper.remove_existing_data(existing_matches, matches,
                                                'matches')
chunks = (unscraped_matches[i:i + 250]
          for i in range(0, len(unscraped_matches), 250))

for group in chunks:
    matches_to_check = group

    # Step 3: Update playerStats.csv
    new_player_stats = helper.scrape(matches_to_check,
                                     scraper.get_player_stats, threads)
    # This returns a single array for each match with all of the player stats, so we un-array it
    new_player_stats = helper.fix_player_stats(new_player_stats)

    # Step 5: Tabulate
    helper.tabulate("playerStats", new_player_stats)