Ejemplo n.º 1
0
def main():
    if int(now_localized().strftime('%H')) != 23:
        return
    credentials = AuthCredentials(user_file="me")
    site = EsportsClient('lol', credentials=credentials)  # Set wiki
    pages = get_pages_to_make()
    for k in pages.keys():
        # print(k)
        try:
            site.save_tile(k, text=pages[k], summary="Automatic GCD Backup")
            site.touch_title(k)
        except Exception as e:
            ERRORS.append(str(e))

    if len(ERRORS) > 0:
        # for sure wait out any rate limiting
        time.sleep(30)
        site.save_tile(ERROR_REPORT_PAGE, text='<br>'.join(ERRORS))
Ejemplo n.º 2
0
from river_mwclient.esports_client import EsportsClient
from river_mwclient.auth_credentials import AuthCredentials
import time
limit = -1
credentials = AuthCredentials(user_file="me")
site = EsportsClient('fortnite', credentials=credentials)  # Set wiki

pages = site.pages_using('Infobox Player')

# c = site.client.categories['Pages with script errors']

startat_page = None
passed_startat = True

lmt = 0
#for p in c:
for p in pages:
    if lmt == limit:
        break
    if p.name == startat_page:
        passed_startat = True
    if not passed_startat:
        continue
    lmt += 1
    print(p.name)
    site.touch(p)
Ejemplo n.º 3
0
from river_mwclient.esports_client import EsportsClient
from river_mwclient.auth_credentials import AuthCredentials
import mwparserfromhell, re

credentials = AuthCredentials(user_file="bot")
site = EsportsClient('lol', credentials=credentials)  #  set wiki
summary = 'Changing links/display to be just 1 field, with link only'  # Set summary

limit = -1
startat_page = None
print(startat_page)
startat_page = 'Challengers Korea/2017 Season/Spring Season/Scoreboards/Week 3'
this_template = site.client.pages['Module:Scoreboard']  # Set template
pages = this_template.embeddedin()

#pages = [site.pages['Data:Challengers Korea/2019 Season/Spring Season']]


def links_to_display(template):
    if not template.has('name'):
        return
    name = template.get('name').value.strip()
    if '{{!}}' in name:
        template.add('name', name.split('{{!}}')[0])
    name = template.get('name').value.strip()
    if not template.has('link'):
        template.add('link', name, before='name')
        template.remove('name')
        return
    display_str = template.get('name').value.strip()
    link_str = template.get('link').value.strip()
Ejemplo n.º 4
0
import time
from mwclient.errors import ProtectedPageError

from river_mwclient.esports_client import EsportsClient
from river_mwclient.auth_credentials import AuthCredentials
import csv
import mwparserfromhell

credentials = AuthCredentials(user_file="bot")
site = EsportsClient('fortnite-esports', credentials=credentials)

for page in site.pages_using('Infobox Tournament'):
    text = page.text()
    if 'Tabs}}' not in text:
        text = '{{EmptyTournamentTabs}}\n' + text
        site.save(page, text, summary="Adding EmptyTournamentTabs")
Ejemplo n.º 5
0
from river_mwclient.esports_client import EsportsClient
from river_mwclient.auth_credentials import AuthCredentials

credentials = AuthCredentials(user_file="me")
loadout = EsportsClient('default-loadout',
                        credentials=credentials)  #  set wiki
target = EsportsClient('legendsofruneterra-esports',
                       credentials=credentials)  #  set wiki
summary = 'Default loadout of pages'  # Set summary

startat_namespace = None
print(startat_namespace)
# startat_namespace = 274

startat_page = None
print(startat_page)
# startat_page = 'Module:Navbox/Aether II/en'

overwrite_existing = True

startat_comparison = -1 if startat_namespace is None else startat_namespace - 1

passed_startat = False

for ns in loadout.client.namespaces:
    print(ns)
    if ns > startat_comparison:  # ns 4 is Project ns
        for page in loadout.client.allpages(namespace=ns):
            new_title = page.name
            if ns == 4:
                new_title = 'Project:{}'.format(page.page_title)
import mwparserfromhell
from river_mwclient.esports_client import EsportsClient
from river_mwclient.auth_credentials import AuthCredentials

limit = -1
quiet = True

credentials = AuthCredentials(user_file="bot")
site = EsportsClient('fortnite-esports', credentials=credentials)
summary = 'Discovering Fortnite IDs from tournament data'

result = site.cargo_client.query(
    tables=
    'TournamentResults=TR,TournamentResults__RosterLinks=RL,_pageData=PD,Players=P',
    join_on='TR._ID=RL._rowID,RL._value=PD._pageName,RL._value=P.ID',
    where=
    'PD._pageName IS NOT NULL AND TR.RosterIds__full NOT LIKE CONCAT("%", P.FortniteID, "%") AND TR.RosterLinks__full != TR.RosterIds__full',
    fields=
    'RL._value=name, TR.RosterLinks__full=RosterLinks, TR.RosterIds__full=RosterIds',
    group_by='RL._value',
    limit='max')
print(result)
lmt = 0
for item in result:
    if lmt == limit:
        lmt = lmt + 1
    name = item['name']
    if not quiet:
        print(name)
        print(item)
    idx = None
Ejemplo n.º 7
0
from river_mwclient.esports_client import EsportsClient
from river_mwclient.auth_credentials import AuthCredentials
from river_mwclient.page_modifier import PageModifierBase
from river_mwclient.wiki_time_parser import time_from_template


class PageModifier(PageModifierBase):
    def update_wikitext(self, wikitext):
        for template in wikitext.filter_templates():
            if not template.name.matches([
                    'MatchRecapS8', 'Scoreboard/Season 3',
                    'Scoreboard/Season 4', 'Scoreboard/Season 5',
                    'Scoreboard/Season 5', 'Scoreboard/Season 6',
                    'Scoreboard/Season 7', 'Scoreboard/Season 8'
            ]):
                continue
            date_time = time_from_template(template)
            if date_time is not None:
                template.add('dst', date_time.dst)


if __name__ == '__main__':
    credentials = AuthCredentials(user_file='bot')
    site = EsportsClient('lol', credentials=credentials)  # Set wiki
    PageModifier(site,
                 page_list=site.pages_using('Scoreboard/Button'),
                 summary="Fix dst").run()
from river_mwclient.esports_client import EsportsClient
from river_mwclient.auth_credentials import AuthCredentials
from mwclient.page import Page

credentials = AuthCredentials(user_file="bot")
site = EsportsClient('lol', credentials=credentials, max_retries=10)
summary = "Updating with category & licensing"

 # Ran this after item_pages_move.py 

for page in site.client.categories['Items']:
	page: Page
	name: str = page.name.replace(' (Item)', '')
	print('Starting {}'.format(name))
	file = site.client.pages['File:ItemSquare{}.png'.format(name)]
	if not file.exists:
		continue
	text = file.text()
	if 'redirect' in text:
		continue
	if not '[[category:' in text.lower():
		text = text + '\n[[Category:Item Icons]]'
	if not '{{fairuse}}' in text.lower():
		text = text + '\n{{Fairuse}}'
		
	# Fix a stupid mistake I had where I accidentally was duplicating the {{Fairuse}} template before
	text = text.replace('{{Fairuse}}\n{{Fairuse}}', '{{Fairuse}}')
	site.save(file, text, summary=summary)
from river_mwclient.esports_client import EsportsClient
from river_mwclient.auth_credentials import AuthCredentials
import mwparserfromhell, datetime

limit = -1

site = EsportsClient('fortnite-esports', user_file="bot")  # Set wiki
summary = 'Automatically setting active/inactive status'


def change_active_status(result, status):
    for p in result:
        text = p.text()
        wikitext = mwparserfromhell.parse(text)
        for tl in wikitext.filter_templates():
            if tl.name.matches('Infobox Player'):
                tl.add('isinactive', status)
        newtext = str(wikitext)
        if newtext != text:
            p.save(newtext, summary=summary)


now = datetime.datetime.now()
then = now - datetime.timedelta(days=6 * 28)

result = site.cargo_client.page_list(
    tables=
    'Tournaments=T,TournamentResults=Res, TournamentResults__RosterLinks=RL,PlayerRedirects=PR,Players=P',
    join_on=
    'T._pageName=Res.OverviewPage,Res._ID=RL._rowID,RL._value=PR.AllName,PR._pageName=P._pageName',
    where='P.IsInactive="1"',
Ejemplo n.º 10
0
from river_mwclient.esports_client import EsportsClient
from river_mwclient.auth_credentials import AuthCredentials
import mwparserfromhell, re

credentials = AuthCredentials(user_file="me")
site = EsportsClient('cod-esports', credentials=credentials)  #  set wiki
summary = 'Changing links/display to be just 1 field, with link only'  # Set summary

limit = -1
startat_page = None
print(startat_page)
#startat_page = 'Data:LCK/2018 Season/Summer Playoffs'
this_template = site.client.pages[
    'Template:Infobox Tournament']  # Set template
pages = this_template.embeddedin()

# with open('pages.txt', encoding="utf-8") as f:
# 	pages = f.readlines()

#pages = [site.client.pages['Data:Challengers Korea/2019 Season/Spring Season']]

params = ['player', 'player1']


def links_to_display(template, param):
    if not template.has(param):
        return
    if (not template.has(param + 'link')) and (
            not template.has(param + 'links')):
        return
    suffix = 'link'
Ejemplo n.º 11
0
def scrape(esc: EsportsClient, events, force):
    player_data_keys = [
        "perkPrimaryStyle", "perkSubStyle", "perk0", "perk1", "perk2", "perk3",
        "perk4", "perk5", "statPerk0", "statPerk1", "statPerk2"
    ]
    player_positions = ['Top', 'Jungle', 'Mid', 'ADC', 'Support']
    rune_dict = get_rune_dict()
    champ_dict = get_champ_dict()
    print_if_not_silent(events)
    with open('mh_riot_endpoint.txt') as f:
        mh_riot_endpoint = f.read().strip()
    mh_riot_token = get_id_token()
    for page_to_query in events:
        print_if_not_silent(page_to_query)
        result = esc.client.api(
            'cargoquery',
            format="json",
            limit='max',
            tables="MatchScheduleGame=MSG,MatchSchedule=MS",
            fields="MSG.OverviewPage,MSG.MatchHistory",
            where=(r'MSG._pageName="%s" AND MSG.MatchHistory IS NOT NULL'
                   r' AND NOT MSG.MatchHistory RLIKE ".*(lpl|lol)\.qq\.com.*"')
            % page_to_query,
            join_on="MSG.UniqueMatch=MS.UniqueMatch",
            order_by="MS.N_Page,MS.N_MatchInPage, MSG.N_GameInMatch")
        if result['cargoquery']:
            event = result['cargoquery'][0]['title']['OverviewPage']
            suffix = ''
            page_start = page_to_query.replace('Data:', '')
            if page_start != event:
                suffix = page_start.replace(event, '')
            errors_http = []
            errors_key = []
            page_name = event + '/Runes' + suffix
            page = esc.client.pages[page_name]
            text = page.text()
            text_tbl = []
            if text != "":
                text_tbl = text.split('\n')
                intro = text_tbl.pop(0) + '\n' + text_tbl.pop(0)
            else:
                overview_page = esc.client.pages[event]
                overview_text = overview_page.text()
                overview_text_tbl = overview_text.split('\n')
                tabs = overview_text_tbl[0]
                intro = tabs + '\n{{RunesStart2019}}'
            lines = [intro]
            for i, cargo_game in enumerate(result['cargoquery']):
                mh = (cargo_game['title']['MatchHistory']).replace(
                    '&amp;', '&')
                print_if_not_silent(mh)
                location = re.match(r'.*details/([^&]*)', mh)[1]
                if len(text_tbl) > 10 * i and (
                        location in text_tbl[10 * i]) and not force:
                    for j in range(0, 10):
                        lines.append(text_tbl[j + 10 * i])
                else:
                    print_if_not_silent('Querying match %s' % mh)
                    json_loc = mh_riot_endpoint + location
                    try:
                        game = requests.Session().get(json_loc,
                                                      cookies={
                                                          'id_token':
                                                          mh_riot_token
                                                      }).json()
                        full_patch = game['gameVersion']
                        patch_tbl = full_patch.split('.')
                        patch = str(patch_tbl[0] + '.' + patch_tbl[1])
                        for j in range(0, 10):
                            player_name = game['participantIdentities'][j][
                                'player']['summonerName']
                            try:
                                player_team = re.match('^(.+?) (.*)',
                                                       player_name)[1]
                                player_name = re.match('^(.+?) (.*)',
                                                       player_name)[2]
                            except Exception:
                                player_team = ''
                            player_champion_n = game['participants'][j][
                                'championId']
                            player_champion = champ_dict.get(
                                player_champion_n, str(player_champion_n))
                            player_position = player_positions[j % 5]
                            this_player = [
                                player_name, player_team, player_champion,
                                player_position
                            ]
                            for key in player_data_keys:
                                rune_key = game['participants'][j]['stats'][
                                    key]
                                rune_output = rune_dict.get(rune_key, rune_key)
                                this_player.append(rune_output)
                            this_player_output = '{{RunesLine2019|' + (
                                '|'.join(this_player)
                            ) + '|patch=' + patch + '|mh=' + location + '}}'
                            lines.append(this_player_output)
                            text_tbl.insert(10 * i + j, '')
                    except urllib.error.HTTPError:
                        errors_http.append(mh)
                    except KeyError:
                        errors_key.append(mh)
            lines.append('{{RunesEnd}}')
            new_text = '\n'.join(lines)
            if new_text != text and len(lines) > 3:
                print_if_not_silent('Saving page %s...' % page_name)
                esc.save(page,
                         new_text,
                         summary='Automatically updating Runes (python)')
            else:
                print_if_not_silent('Skipping page %s, no changes' % page_name)
            error_text = ''
            for e in errors_http:
                error_text = error_text + ' <br>\n' + page_to_query + ': ' + e + ' (HTTP)'
            for e in errors_key:
                error_text = error_text + '\n' + e + ' (Key)'
            if error_text != '':
                error_page = esc.client.pages[
                    'User:RheingoldRiver/Rune Errors']
                esc.save(error_page,
                         error_text,
                         summary='Reporting a Rune Error')
Ejemplo n.º 12
0
def scrapeLPL(esc: EsportsClient, events, force):
    player_positions = ['Top', 'Jungle', 'Mid', 'ADC', 'Support']
    rune_dict = get_rune_dict()
    champ_dict = get_champ_dict()
    please_escape = False
    with open('mh_qq_endpoint.txt') as f:
        mh_qq_endpoint = f.readlines()
    mh_qq_endpoint = [_.strip() for _ in mh_qq_endpoint]
    for page_to_query in events:
        if please_escape:
            break
        print_if_not_silent(page_to_query)
        result = esc.client.api(
            'cargoquery',
            format="json",
            limit='max',
            tables="MatchScheduleGame=MSG,MatchSchedule=MS",
            fields="MSG.OverviewPage,MSG.MatchHistory",
            where=(r'MSG._pageName="%s" AND MSG.MatchHistory IS NOT NULL'
                   r' AND MSG.MatchHistory RLIKE ".*(lpl|lol)\.qq\.com.*"') %
            page_to_query,
            join_on="MSG.UniqueMatch=MS.UniqueMatch",
            order_by="MS.N_Page,MS.N_MatchInPage, MSG.N_GameInMatch",
            group_by='MSG.MatchHistory')
        if not result['cargoquery']:
            continue
        event = result['cargoquery'][0]['title']['OverviewPage']
        suffix = ''
        page_start = page_to_query.replace('Data:', '')
        if page_start != event:
            suffix = page_start.replace(event, '')
        errors_http = []
        errors_key = []
        page_name = event + '/Runes' + suffix
        page = esc.client.pages[page_name]
        text = page.text()
        text_tbl = []
        team_keys = ['left', 'right']
        if text != "":
            text_tbl = text.split('\n')
            intro = text_tbl.pop(0) + '\n' + text_tbl.pop(0)
        else:
            overview_page = esc.client.pages[event]
            overview_text = overview_page.text()
            overview_text_tbl = overview_text.split('\n')
            tabs = overview_text_tbl[0]
            intro = tabs + '\n{{RunesStart2019}}'
        lines = [intro]
        counter = 0
        for i, cargo_game in enumerate(result['cargoquery']):
            # lmt += 1
            # if lmt == 2:
            # 	please_escape = True
            # 	break
            mh = (cargo_game['title']['MatchHistory']).replace('&amp;', '&')
            print_if_not_silent(mh)
            location = re.match(r'.*bmid=([0-9]*)', mh)[1]
            if len(text_tbl) > 10 * counter and (
                    location in text_tbl[10 * counter]) and not force:
                print_if_not_silent('Skipping %s' % location)
                for j in range(0, 10):
                    lines.append(text_tbl[j + 10 * counter])
                counter = counter + 1
                if len(text_tbl) > 10 * counter and (
                        location in text_tbl[10 * counter]) and not force:
                    print_if_not_silent('Skipping %s (2)' % location)
                    for j in range(0, 10):
                        lines.append(text_tbl[j + 10 * counter])
                    counter = counter + 1
                    if len(text_tbl) > 10 * counter and (
                            location in text_tbl[10 * counter]) and not force:
                        print_if_not_silent('Skipping %s (3)' % location)
                        for j in range(0, 10):
                            lines.append(text_tbl[j + 10 * counter])
                        counter = counter + 1
            else:
                print_if_not_silent('Querying match %s' % mh)
                json_loc = mh_qq_endpoint[0] + location
                print_if_not_silent(json_loc)
                try:
                    with urllib.request.urlopen(json_loc) as url:
                        series = json.loads(url.read().decode())
                    for game in series['msg']:
                        counter = counter + 1
                        gameId = game['sMatchId']
                        json_loc_2 = mh_qq_endpoint[1] + gameId
                        print_if_not_silent(json_loc_2)
                        with urllib.request.urlopen(json_loc_2) as url:
                            game = json.loads(url.read().decode())
                        areaId = game['msg']['sMatchInfo']['AreaId']
                        battleId = game['msg']['battleInfo']['BattleId']
                        json_loc_3 = mh_qq_endpoint[
                            2] + battleId + mh_qq_endpoint[3] + areaId
                        print_if_not_silent(json_loc_3)
                        with urllib.request.urlopen(json_loc_3) as url:
                            worldLookup = json.loads(url.read().decode())
                        worldLookupJSON = json.loads(worldLookup['msg'])
                        has_runes = True
                        rune_data = {}
                        if worldLookupJSON['battle_count_'] == 0:
                            has_runes = False
                        else:
                            worldId = worldLookupJSON['battle_list_'][0][
                                'world_']
                            json_loc_4 = mh_qq_endpoint[4] + str(
                                worldId) + mh_qq_endpoint[5] + str(battleId)
                            print_if_not_silent(json_loc_4)
                            with urllib.request.urlopen(json_loc_4) as url:
                                rune_data_unsorted_init = json.loads(
                                    url.read().decode())
                            rune_data_unsorted = json.loads(
                                rune_data_unsorted_init['msg'])
                            for p in range(0, 10):
                                rune_key = rune_data_unsorted['hero_list_'][p][
                                    'hero_id_']
                                rune_data[int(rune_key)] = rune_data_unsorted[
                                    'hero_list_'][p].copy()
                        patch = ''  # unless we can automated get patch which idt we can from any endpoint
                        teamnames = get_team_names(game)
                        game_data = json.loads(
                            game['msg']['battleInfo']['BattleData'])
                        j = 0
                        for j in range(0, 10):
                            rune_data_this = {}
                            if has_runes:
                                rune_data_this = copy.deepcopy(rune_data)
                            player_data = get_player_data(
                                game_data, team_keys, j)
                            player_champion_n = int(player_data['hero'])
                            player_name = player_data['name']
                            player_team = get_this_teamname(
                                teamnames, team_keys, j)
                            player_name = player_name.replace(player_team, '')
                            player_champion = champ_dict[
                                player_champion_n] if player_champion_n in champ_dict else str(
                                    player_champion_n)
                            player_position = player_positions[j % 5]
                            this_player = [
                                player_name, player_team, player_champion,
                                player_position
                            ]
                            if has_runes:
                                player_rune_data = rune_data_this[
                                    player_champion_n]['runes_info_'][
                                        'runes_list_'].copy()
                                this_rune_id = ''
                                this_player.append(rune_dict['trees'][
                                    player_rune_data[0]['runes_id_']])
                                for _ in range(0, 5):
                                    this_rune = player_rune_data.pop(0)
                                    this_rune_id = this_rune['runes_id_']
                                    rune_output = rune_dict[
                                        this_rune_id] if this_rune_id in rune_dict else this_rune_id
                                    this_player.append(rune_output)
                                this_player.insert(
                                    5, rune_dict['trees'][int(this_rune_id)])
                                stat_runes = player_rune_data.copy()
                                while stat_runes:
                                    if stat_runes[0]['runes_num_'] == 1:
                                        this_rune = stat_runes.pop(0)
                                    else:
                                        this_rune = stat_runes[0]
                                        stat_runes[0][
                                            'runes_num_'] = stat_runes[0][
                                                'runes_num_'] - 1
                                    this_rune_id = this_rune['runes_id_']
                                    rune_output = rune_dict[
                                        this_rune_id] if this_rune_id in rune_dict else this_rune_id
                                    this_player.append(rune_output)
                            this_player_output = '{{RunesLineLPL2019|' + (
                                '|'.join(this_player)
                            ) + '|patch=' + patch + '|mh=' + location + '}}'
                            lines.append(this_player_output)
                        text_tbl.insert(10 * i + j, '')
                except urllib.error.HTTPError:
                    errors_http.append(mh)
                except KeyError:
                    errors_key.append(mh)
                except Exception as e:
                    print_if_not_silent(e)
        lines.append('{{RunesEnd}}')
        new_text = '\n'.join(lines)
        if new_text != text and len(lines) > 3:
            print_if_not_silent('Saving page %s...' % page_name)
            esc.save(page,
                     new_text,
                     summary='Automatically updating Runes (python)')
        else:
            print_if_not_silent('Skipping page %s, no changes' % page_name)
        error_text = ''
        for e in errors_http:
            error_text = error_text + ' <br>\n' + page_to_query + ': ' + e + ' (HTTP)'
        for e in errors_key:
            error_text = error_text + '\n' + e + ' (Key)'
        if error_text != '':
            error_page = esc.client.pages['User:RheingoldRiver/Rune Errors']
            esc.save(error_page, error_text, summary='Reporting a Rune Error')
Ejemplo n.º 13
0
                except KeyError:
                    errors_key.append(mh)
                except Exception as e:
                    print_if_not_silent(e)
        lines.append('{{RunesEnd}}')
        new_text = '\n'.join(lines)
        if new_text != text and len(lines) > 3:
            print_if_not_silent('Saving page %s...' % page_name)
            esc.save(page,
                     new_text,
                     summary='Automatically updating Runes (python)')
        else:
            print_if_not_silent('Skipping page %s, no changes' % page_name)
        error_text = ''
        for e in errors_http:
            error_text = error_text + ' <br>\n' + page_to_query + ': ' + e + ' (HTTP)'
        for e in errors_key:
            error_text = error_text + '\n' + e + ' (Key)'
        if error_text != '':
            error_page = esc.client.pages['User:RheingoldRiver/Rune Errors']
            esc.save(error_page, error_text, summary='Reporting a Rune Error')


if __name__ == '__main__':
    SILENT = False
    credentials = AuthCredentials(user_file="me")
    esc_main = EsportsClient('lol', credentials=credentials)  # Set wiki
    pages = ['Data:Prime League Pro Division/2021 Season/Spring Promotion']
    scrape(esc_main, pages, False)
    # scrapeLPL(esc_main, pages, False)
Ejemplo n.º 14
0
import time

from river_mwclient.esports_client import EsportsClient
from river_mwclient.auth_credentials import AuthCredentials
from river_mwclient.page_modifier import PageModifierBase
from mwparserfromhell.nodes.template import Template
from mwparserfromhell import parse

credentials = AuthCredentials(user_file="bot")
site = EsportsClient('lol', credentials=credentials)  # Set wiki
summary = 'Use ChromaSet'  # Set summary

param_lookup = {
	'': '1'
}

def add_param(new_chroma, tl, s, param):
	old_param = 'chroma' + s + param
	if not tl.has(old_param):
		return
	new_param = param_lookup[param] if param in param_lookup else param
	new_chroma.add(new_param, tl.get(old_param).value.strip())


def add_special_param(new_chroma, tl, s):
	old_param = 'special' + s
	if not tl.has(old_param):
		return
	special = tl.get(old_param).value
	for template in special.filter_templates():
		if template.name.matches('abbr'):
Ejemplo n.º 15
0
    def update_and_save(self, page, lookup):
        text = page.text()
        wikitext = mwparserfromhell.parse(text)
        for template in wikitext.filter_templates():
            if template.name.matches(['Listplayer/Current']):
                player = template.get('1').value.strip()
                if player not in lookup:
                    template.add('squad', '')
                    continue
                template.add('squad', lookup[player])

        newtext = str(wikitext)
        if text != newtext:
            # print('Saving page %s...' % page.name)
            try:
                self.site.save(page, newtext, summary=self.SUMMARY)
            except EditError:
                self.site.log_error_content(
                    page.name, 'Spam filter prohibited squad point update')
        else:
            pass
        # print('Skipping page %s...' % page.name)


if __name__ == '__main__':
    credentials = AuthCredentials(user_file="me")
    fn_site = EsportsClient('fortnite', credentials=credentials)  # set wiki
    point_updater = PointUpdater(fn_site)
    point_updater.run()
Ejemplo n.º 16
0
import urllib.request, time, sprite_creator, io, os
from river_mwclient.esports_client import EsportsClient
from river_mwclient.auth_credentials import AuthCredentials
import re

SUFFIX = ''
SPRITE_NAME = 'Champion'
IMAGE_DIR = SPRITE_NAME + ' Images'
TEAM_DATA_FILE_LOCATION = SPRITE_NAME + 'Sprite' + SUFFIX + '.txt'
limit = -1
startat = None

credentials = AuthCredentials(user_file="me")
site = EsportsClient('commons', credentials=credentials) #  set wiki
site_lol = EsportsClient('lol', credentials=credentials) #  set wiki

def get_country_name(file_name):
	return file_name.replace('Square', '').replace('.png', '').replace('File:', '')

pattern = r'.*src\=\"(.+?)\".*'
cat = site_lol.client.categories['Champions']
for page in cat:
	to_parse_text = '[[File:%sSquare.png|link=]]' % page.name
	result = site_lol.client.api('parse', title = 'Main Page', text = to_parse_text, disablelimitreport = 1)
	parse_result_text = result['parse']['text']['*']
	url = re.match(pattern, parse_result_text)[1]
	image = urllib.request.urlopen(url).read()
	# image = Image.open(io.BytesIO(urllib.request.urlopen(url).read()))
	country = get_country_name(page.name)
	image_path = IMAGE_DIR + '/' + country + '.png'
	print(image_path)
Ejemplo n.º 17
0
from river_mwclient.esports_client import EsportsClient
from river_mwclient.auth_credentials import AuthCredentials

credentials = AuthCredentials(user_file="me")
archive = EsportsClient('lol-archive', credentials=credentials)  #  set wiki
live = EsportsClient('lol', credentials=credentials)  #  set wiki

pages = []

for page in archive.client.allpages(namespace=0):
    pages.append((page.name, live.client.pages[page.name].exists))

text = []

for p in pages:
    text.append('{}\t{}'.format(p[0], str(p[1])))

with open('archive_pages.txt', 'w+', encoding="utf-8") as f:
    f.write('\n'.join(text))
Ejemplo n.º 18
0
import time
from river_mwclient.esports_client import EsportsClient
from river_mwclient.auth_credentials import AuthCredentials
from mwclient.page import Page

credentials = AuthCredentials(user_file="bot")
site = EsportsClient('lol', credentials=credentials, max_retries=10)
summary = "Moving item pages"

for page in site.client.categories['Items']:
    page: Page
    name: str = page.name
    print(name)
    if not name.endswith('(Item)'):
        continue
    if page.namespace != 0:
        continue
    clean_name = page.name.replace(' (Item)', '')
    clean_page = site.client.pages[clean_name]
    clean_page_text = clean_page.text()
    if clean_page.exists and 'redirect' not in clean_page_text.lower():
        continue
    print('Moving to {}'.format(clean_name))
    # Delete and then move because the redirect has a history so we get an error, even with ignore_warnings
    if clean_page.exists:
        site.delete(clean_page)
    mh_page: Page = site.client.pages[name + '/Match History']
    if mh_page.exists:
        clean_mh_page = site.client.pages[clean_name + '/Match History']
        if clean_mh_page.exists:
            site.delete(clean_mh_page)
Ejemplo n.º 19
0
import re, mwparserfromhell
from river_mwclient.esports_client import EsportsClient
from river_mwclient.auth_credentials import AuthCredentials

interval = 180

pattern_add = r'Hook.add\([\'"](\w+).*\)'
pattern_run = r'Hook.run\([\'"](\w+).*\)'

credentials = AuthCredentials(user_file="me")
site = EsportsClient('lol', credentials=credentials)  # Set wiki

revisions = site.recentchanges_by_interval(interval, toponly=1)


def add_missing_params(template, params_to_add):
    n = 0
    for param in template.params:
        n += 1
        param_str = str(param.value)
        if param_str in params_to_add:
            params_to_add.remove(param_str)
    for param in params_to_add:
        template.add(n + 1, param)
    params_to_add.clear()


def add_new_template(text, template_name, params):
    if not len(params):
        return text
    template = mwparserfromhell.nodes.Template(template_name)
from river_mwclient.esports_client import EsportsClient
from river_mwclient.auth_credentials import AuthCredentials
from river_mwclient.extended_page import ExtendedPage

credentials = AuthCredentials(user_file="me")
site = EsportsClient('fortnite', credentials=credentials) #  set wiki

rc = site.recentchanges_by_interval(12 * 60, toponly=1)

data_pages = []

for p in rc:
	if p['title'].startswith('Data:'):
		data_pages.append(p['title'])

where = ' OR '.join(['TR._pageName="%s"' % _ for _ in data_pages])

players = site.cargo_client.page_list(
	tables="TournamentResults=TR,TournamentResults__RosterLinks=RL,_pageData=pd",
	join_on="TR._ID=RL._rowID, RL._value=pd._pageName",
	where='(%s) AND RL._rowID IS NOT NULL AND pd._pageName IS NOT NULL' % where,
	fields="RL._value=player"
)

for player in ExtendedPage.extend_pages(players):
	player.touch(check_existence=True)

# purge PR pages
for page in site.client.pages['Template:PRWiki'].embeddedin():
	page.purge()
Ejemplo n.º 21
0
from river_mwclient.esports_client import EsportsClient
from river_mwclient.auth_credentials import AuthCredentials
from river_mwclient.wiki_client import WikiClient
from river_mwclient.gamepedia_client import GamepediaClient
from river_mwclient.template_modifier import TemplateModifierBase
from mwparserfromhell.nodes import Template
import mwparserfromhell

credentials = AuthCredentials(user_file="me")
cod_wiki = EsportsClient('cod-esports', credentials=credentials)
target_wiki = WikiClient(url='https://river-sandbox.fandom.com',
                         path='/',
                         credentials=credentials)
summary = 'Bot edit'

fl_page = cod_wiki.client.pages[
    'Call of Duty Esports Wiki:Featured Leagues/Call of Duty League/2021 Season']
fl_page_text = fl_page.text()
template_text = None
for template in mwparserfromhell.parse(fl_page_text).filter_templates():
    template: Template
    if template.name.matches('Standings'):
        template_text = str(template)
        break
parsed_text_result = cod_wiki.client.api('expandtemplates',
                                         prop='wikitext',
                                         text=template_text)
parsed_text = parsed_text_result['expandtemplates']['wikitext']

parsed_text = parsed_text.replace('wikitable2', 'wikitable')
Ejemplo n.º 22
0
import urllib.request, time, sprite_creator, io, os
import re
from river_mwclient.esports_client import EsportsClient
from river_mwclient.auth_credentials import AuthCredentials

SUFFIX = ''
SPRITE_NAME = 'SmiteRole'
IMAGE_DIR = 'Sprites/' + SPRITE_NAME + ' Images'
TEAM_DATA_FILE_LOCATION = SPRITE_NAME + 'Sprite' + SUFFIX + '.txt'
FILE_TYPE = 'png'
limit = -1
startat = None

credentials = AuthCredentials(user_file="me")
site = EsportsClient('smite', credentials=credentials) #  set wiki
site_lol = EsportsClient('lol', credentials=credentials) #  set wiki

if not os.path.exists(IMAGE_DIR):
	os.makedirs(IMAGE_DIR)

def get_country_name(file_name):
	return file_name.replace('.' + FILE_TYPE, '').replace('File:', '').replace('Square','')

pattern = r'.*src\=\"(.+?)\".*'
cat = site.client.categories['Role Icons']
for page in cat:
	to_parse_text = '[[%s|link=]]' % page.name
	result = site.client.api('parse', title = 'Main Page', text = to_parse_text, disablelimitreport = 1)
	parse_result_text = result['parse']['text']['*']
	url = re.match(pattern, parse_result_text)[1]
	image = urllib.request.urlopen(url).read()
Ejemplo n.º 23
0
from river_mwclient.esports_client import EsportsClient
from river_mwclient.auth_credentials import AuthCredentials
from river_mwclient.template_modifier import TemplateModifierBase
from mwparserfromhell.nodes import Template

credentials = AuthCredentials(user_file="bot")
site = EsportsClient('fortnite-esports', credentials=credentials)
summary = 'Remove wrong fortnite ids'


class TemplateModifier(TemplateModifierBase):
    def update_template(self, template: Template):
        template.remove('fortnite_id')


page_list = site.cargo_client.page_list(tables="Players", where='FortniteID LIKE "%;%"', fields="OverviewPage", limit='max')

TemplateModifier(site, 'Infobox Player', page_list=page_list,
                 summary=summary).run()
		template_page.save(str(wikitext))

def make_templates(this):
	template_prefix = lookup[this]["template_prefix"]
	navbox_template = lookup[this]["navbox_template"]
	data_prefix = lookup[this]["data_prefix"]
	summary = 'Initializing %s Pages' % template_prefix  # Set summary
	site.client.pages['Template:%sOverview' % template_prefix].save(overview_text % navbox_template, summary=summary)
	site.client.pages['Template:%s/End' % template_prefix].save(end_text % template_prefix, summary=summary)
	site.client.pages['Template:%s/Date' % template_prefix].save(date_text % template_prefix, summary=summary)
	site.client.pages['Template:%s Navbox' % navbox_template].save(navbox_text % data_prefix, summary=summary)
	site.client.pages['Template:%s/Start' % template_prefix].save(start_text % template_prefix, summary=summary)

def check_and_make_redirects(d, page_prefix, redirect_text):
	weekday_index = d
	for i in range(0, 6):
		weekday_index += timedelta(days=1)
		y = weekday_index.year
		m = '{:02d}'.format(weekday_index.month)
		day = '{:02d}'.format(weekday_index.day)
		site.client.pages[page_prefix + '{}-{}-{}'.format(y, m, day)].save(redirect_text)


if __name__ == "__main__":
	page_type = 'news'
	years = range(2010,2020)
	credentials = AuthCredentials(user_file="bot")
	site = EsportsClient('valorant', credentials=credentials)  # Set wiki
	make_templates(page_type)
	make_data_pages(years, page_type)
import time
from river_mwclient.esports_client import EsportsClient
from river_mwclient.auth_credentials import AuthCredentials
from mwclient.page import Page

credentials = AuthCredentials(user_file="bot")
site = EsportsClient('lol', credentials=credentials, max_retries=10)
summary = "Moving item pages"

# really, more like, skipped...
failed = []

# Goal: Move all item files from File:itemname.png to File:ItemSquareitemname.png
# Discovery is by looking at pages in Category:Items and finding files named
# after these pages, because our item files aren't all properly categorized
# After doing this I'm going to run a separate script to properly categorize
# all of the item files (probably I should have done that at the same time but meh lol)

try:
    for page in site.client.categories['Items']:
        page: Page
        name: str = page.name.replace(' (Item)', '')
        print('Starting {}'.format(name))
        file = site.client.pages['File:{}.png'.format(name)]
        if not file.exists:
            failed.append(name)
            continue
        if 'redirect' in file.text().lower():
            continue
        site.move(file, 'File:ItemSquare{}.png'.format(name))
except Exception as e:
from river_mwclient.esports_client import EsportsClient
from river_mwclient.auth_credentials import AuthCredentials
import mwparserfromhell, re

site = EsportsClient('lol', user_file="bot") # Set wiki
summary = 'Attempting to parse old content as templates'  # Set summary

page_type = 'players' # tournament, players, teams

limit = -1
startat_page = None
print(startat_page)
#startat_page = 'YellOwStaR'
template_by_type = {
	'players' : 'Player',
	'teams' : 'Team',
	'tournament' : 'Tournament'
}
this_template = site.client.pages['Template:Infobox ' + template_by_type[page_type]]  # Set template
pages = this_template.embeddedin()

months = r'(Jan(?:uary)?|Feb(?:ruary)?|Mar(?:ch)?|Apr(?:il)?|May|Jun(?:e)?|Jul(?:y)?|Aug(?:ust)?|Sep(?:tember)?|Oct(?:ober)?|Nov(?:ember)?|Dec(?:ember)?)\.?'
date = r" *(\d+)(?:st|th|rd|nd)?[.,]? ?(?:\d\d\d\d,? ?)?(?: *\- *)?"
attrib_sep = r" ?(?:\([\dms]+\) )? ?(?: *\- *)?''"
attrib = r'(?: *\- *)?(with|from|by|From|By|With)'
regex = r"^\* ?" + months + date + r"\[(.+?) ([^\]]*)\]" + attrib_sep + attrib + r" (.+?) on (.*)'' *$"
no_author = r"^\* ?" + months + date + r"\[(.+?) ([^\]]*)\]" + attrib_sep + attrib + r" (.+?)'' *$"
translator = r"^\* ?" + months + date + r"\[(.+?) ([^\]]*)\]" + attrib_sep + '(translated by)' + r" (.+?) on (.*)'' *$"

passed_startat = False if startat_page else True
lmt = 0
Ejemplo n.º 27
0
import time
from mwclient.errors import ProtectedPageError

from river_mwclient.esports_client import EsportsClient
from river_mwclient.auth_credentials import AuthCredentials
import csv
import mwparserfromhell

credentials = AuthCredentials(user_file="bot")
site = EsportsClient('fortnite-esports', credentials=credentials)
summary = 'Bot edit'

with open('SA_IDs.csv', 'r') as f:
    id_reader = csv.reader(f)
    for row in id_reader:
        print(row)
        page = site.client.pages[row[1]]
        if not page.exists:
            continue
        text = page.text()
        wikitext = mwparserfromhell.parse(text)
        for template in wikitext.filter_templates():
            if template.name.matches('Infobox Player'):
                template.add('fortnite_id', row[0])
        new_text = str(wikitext)
        if text != new_text:
            time.sleep(1)
            print('Saving page %s....' % page.name)
            try:
                site.save(page, new_text, summary="Adding fortnite ID")
            except ProtectedPageError:
from river_mwclient.esports_client import EsportsClient
from river_mwclient.auth_credentials import AuthCredentials
from mwclient.page import Page
import mwparserfromhell

credentials = AuthCredentials(user_file="bot")
site = EsportsClient('lol', credentials=credentials, max_retries=10)
summary = "Create speculated item redirect"

# Recently I did a bunch of file rearranging, but it was kinda incomplete in making *all* of the
# needed file redirects I needed, so I'm now trying to fix that

for item_page in site.pages_using('Infobox Item'):
	page: Page
	file_name = 'File:ItemSquare{}.png'.format(item_page.name)
	item = item_page.name
	
	item_page_text = item_page.text()
	
	# first try and find out from the Infobox Item template on the page what the redirect should be
	# this part of the script actually ended up with some self-redirects that I didn't want
	# because I didn't realize that people had filled in the |image= with the name of the item
	# unnecessarily....so I should have added a check for that
	# but whatever, it didn't really create THAT much extra work for myself
	used_infobox = False
	for template in mwparserfromhell.parse(item_page_text).filter_templates():
		template: mwparserfromhell.nodes.Template
		if not template.name.matches('Infobox Item'):
			continue
		if template.has('image'):
			target = template.get('image').value.strip()
Ejemplo n.º 29
0
from river_mwclient.esports_client import EsportsClient
from river_mwclient.auth_credentials import AuthCredentials
from river_mwclient.gamepedia_client import GamepediaClient
from river_mwclient.wiki_client import WikiClient
from river_mwclient.errors import EsportsCacheKeyError
from river_mwclient.wiki_time_parser import time_from_str


credentials = AuthCredentials(user_file='me')

site = EsportsClient('lol', credentials=credentials)

# check schemes
site2 = WikiClient('https://wikisandbox.gamepedia.com')
site3 = WikiClient('http://wikisandbox.gamepedia.com', scheme='https', credentials=credentials)

# for ns in site.namespaces:
#     print(ns.name)

assert site.cache.get_disambiguated_player_from_event(
    'Claro Stars League/2021 Season/Opening Season', 'Luxor Gaming', 'Zeldris') == 'Zeldris (Christian Ticlavilca)'

assert site.cache.get_disambiguated_player_from_event(
    'El_Nexo/2020_Season/Split_1_Playoffs', 'Movistar Riders Academy', 'Marky'
) == 'Marky (Pedro José Serrano)'

# check fallback to Teams.Short
assert site.cache.get_team_from_event_tricode('GUL 2020 Closing Playoffs', 'MK') == 'Mad Kings'

assert site.cache.get_team_from_event_tricode('Worlds 2019 Main Event', 'SKT') == 'SK Telecom T1'
import time
from river_mwclient.esports_client import EsportsClient
from river_mwclient.auth_credentials import AuthCredentials
from mwclient.page import Page
from mwclient.image import Image

credentials = AuthCredentials(user_file="bot")
site = EsportsClient('lol', credentials=credentials, max_retries=10)
summary = "Updating with category & licensing"

# Delete unused file redirects to item squares
# These tend to collect over a while which isn't *really* a problem per se
# but does result in a ton of double redirects - after processing the most recent
# batch job, there were over 300 double redirects to fix, and I'd rather not
# have that happening, so I'm just going to delete all unused redirects now.

# For example, we had a bunch of Chinese names of items existing due to a
# failed experiment in the past to create scoreboards with Chinese item names
# entered in them - these redirects are totally unneeded at this point in time,
# and there's literally no reason to keep them around anymore.

for page in site.client.categories['Item Icons']:
	page: Image
	print('Starting page {}...'.format(page.name))
	for link in page.backlinks(redirect=True):
		print(link.name)
		link: Image
		if not link.redirect:
			continue
		i = 0