def main():
    twitter_client = tweepy.Client(
        bearer_token=os.environ.get('TWITTER_BEARER_TOKEN'))
    credentials = AuthCredentials(user_file="me")
    site = EsportsClient("lol", credentials=credentials)

    response = site.cargo_client.query(
        tables="NewsItems=NI",
        fields="NI.Source, NI._pageName=pageName, NI.N_LineInDate",
        where='NI.Source IS NOT NULL AND NI.Date_Sort >= "{}"'.format(
            check_from_date),
        order_by="NI.Date_Sort DESC")

    for item in response:
        source_list = item["Source"].split(":::")
        data_page = item["pageName"]
        line_in_date = item["N LineInDate"]
        for source_string in source_list:
            if not source_string:
                continue
            source = source_string.split(";;;")
            link = source[0]
            if source[2] != "twitter.com":
                continue

            # now we definitely have an existing source that's definitely from twitter
            tweet_id = re.search(r"status/([0-9]+)", link)[1]
            if not tweet_id:
                site.log_error_content("Can't get tweet id",
                                       text="Link: {0}".format(link))
                continue
            try:
                r = twitter_client.get_tweet(tweet_id)
            except tweepy.TooManyRequests:
                time.sleep(30)
                r = twitter_client.get_tweet(tweet_id)

            if not r.errors:
                continue
            if r.errors[0]["title"] == TWEET_NOT_FOUND_ERROR:
                site.log_error_content(
                    f"{data_page}",
                    text=f"Tweet not found! Link: {link} - Line {line_in_date}"
                )
            else:
                site.log_error_content(
                    "Failure trying to get tweet",
                    text=
                    "Other error! Link: {0}, Status Id: {1}, Error title: {2}".
                    format(str(link), str(tweet_id),
                           str(r.errors[0]["title"])))

    site.report_all_errors("Deleted Tweets")
Ejemplo n.º 2
0
def main():

    credentials = AuthCredentials(user_file='bot')
    site = EsportsClient('lol', credentials=credentials)
    acs = ACS()

    result = site.cargo_client.query(
        tables='MatchScheduleGame=MSG,AcsMetadata=ACS',
        join_on='MSG.GameId=ACS.GameId',
        fields=
        'MSG.MatchHistory=MatchHistory, MSG.GameId=GameId, MSG.OverviewPage=OverviewPage, MSG.MatchId=MatchId, MSG.N_GameInMatch=N_GameInMatch, MSG._pageName=Page',
        where='MatchHistory LIKE "%matchhistory%" AND ACS.GameId IS NULL')

    passed_startat = True
    startat = 'FRA1TMNT1 210419'

    for game in result:
        if 'gameHash' in game['MatchHistory']:
            re_match = re.match(
                r'^.*match-details/(.+?)/(.+?)\?gameHash=(.+?)(?:&a?m?p?;?tab=.*)?$',
                game['MatchHistory'])
            realm = re_match[1]
            game_id = re_match[2]
            game_hash = re_match[3]
        else:
            re_match = re.match(r'^.*match-details/(.+?)/([^/]*).*$',
                                game['MatchHistory'])
            realm = re_match[1]
            game_id = re_match[2]
            game_hash = None
        fingerprint = '{} {}'.format(realm, game_id)
        if fingerprint == startat:
            passed_startat = True
        if not passed_startat:
            continue
        print('Processing {} now, hash is {}...'.format(
            fingerprint, game_hash))
        try:
            site.save_title(
                'V4 data:{}_{}'.format(realm, game_id),
                json.dumps(acs.get_game(realm, game_id, game_hash)))

        except HTTPError:
            with open('acs_errors.txt', 'a') as f:
                f.write('\n{} {} {}'.format(realm, game_id, game_hash))
            print(game['MatchHistory'])
            print(game['Page'])
            continue
        try:
            site.save_title(
                'V4 data:{}_{}/Timeline'.format(realm, game_id),
                json.dumps(acs.get_game_timeline(realm, game_id, game_hash)))
        except HTTPError:
            print(game['MatchHistory'])
            print(game['Page'])
            pass
        site.save_title('V4 metadata:{}_{}'.format(realm, game_id),
                        get_metadata(game, realm, game_id, game_hash))
Ejemplo n.º 3
0
async def updatestats(site: EsportsClient,
                      section: str,
                      version: Optional[str] = None):
    async with aiohttp.ClientSession() as session:
        if version is None:
            async with session.get(DDRAGON_V) as resp:
                version = json.loads(await resp.text())[0]
        elif not re.match(r'\d+\.\d+\.\d+', version):
            version += ".1"
        async with session.get(DDRAGON.format(version, section)) as resp:
            data = json.loads(await resp.text())['data']

    tm = TEMPLATE_MODIFIERS[section](site,
                                     "Infobox " + section.title(),
                                     data=data,
                                     summary=section.title() + " Update for " +
                                     version)

    await asyncio.get_event_loop().run_in_executor(None, tm.run)
    site.report_all_errors('patchupdate')
 async def _do_the_thing(self, ctx, the_thing, *args):
     await ctx.send('Okay, starting now!')
     credentials = await utils.get_credentials(ctx, self.bot)
     site = EsportsClient('lol', credentials=credentials,
                          max_retries_mwc=0,
                          max_retries=2, retry_interval=10)
     try:
         async with StatusManager(self.bot):
             the_thing(site, *args).run()
     except ReadTimeout:
         return await ctx.send('Whoops, the site is taking too long to respond, try again later')
     await ctx.send('Okay, done!')
Ejemplo n.º 5
0
def check_results(site: EsportsClient, title):
    title = site.target(title)
    tables = [
        'TournamentResults=TR',
        'TournamentResults__RosterLinks=RL',
        'PlayerRedirects=PR',
        'Players=P',
    ]
    join = [
        'TR._ID=RL._rowID',
        'RL._value=PR.AllName',
        'PR._pageName=P._pageName',
    ]
    result = site.cargo_client.query(
        tables=','.join(tables),
        join_on=','.join(join),
        fields='TR.Team=Team,RL._value=Player',
        where='TR.Team != P.Team AND TR._pageName="Data:{}"'.format(title))
    return result
 async def autorosters(self, ctx, *, overview_page):
     """Generate team rosters for the specified tournament"""
     await ctx.send('Okay, starting now!')
     credentials = await utils.get_credentials(ctx, self.bot)
     site = EsportsClient('lol',
                          credentials=credentials,
                          max_retries_mwc=0,
                          max_retries=2,
                          retry_interval=10)
     overview_page = site.cache.get_target(overview_page)
     if not site.client.pages[overview_page].exists:
         return await ctx.send('The tournament page does not exist!')
     AutoRostersRunner(site, overview_page).run()
     username = site.credentials.username
     username = username.split('@')[0] if "@" in username else username
     sandbox_page = f"\nhttps://lol.fandom.com/wiki/User:{username}/Team_Rosters_Sandbox".replace(
         " ", "_")
     rosters_page = f"\nhttps://lol.fandom.com/wiki/{overview_page}/Team_Rosters".replace(
         " ", "_")
     await ctx.send(
         'Okay, done! **Remember the generated content has no coaches!**')
     await ctx.send(
         f'Here is the sandbox page with the new content: {sandbox_page}')
     await ctx.send(f'Here is where you should copy it: {rosters_page}')
import urllib.request, time, sprite_creator, io, os
from mwrogue.esports_client import EsportsClient
from mwcleric.auth_credentials import AuthCredentials
import re

SUFFIX = ''
SPRITE_NAME = 'Champion'
IMAGE_DIR = SPRITE_NAME + ' Images'
TEAM_DATA_FILE_LOCATION = SPRITE_NAME + 'Sprite' + SUFFIX + '.txt'
limit = -1
startat = None

credentials = AuthCredentials(user_file="me")
site = EsportsClient('commons', credentials=credentials)  #  set wiki
site_lol = EsportsClient('lol', credentials=credentials)  #  set wiki


def get_country_name(file_name):
    return file_name.replace('Square', '').replace('.png',
                                                   '').replace('File:', '')


pattern = r'.*src\=\"(.+?)\".*'
cat = site_lol.client.categories['Champions']
for page in cat:
    to_parse_text = '[[File:%sSquare.png|link=]]' % page.name
    result = site_lol.client.api('parse',
                                 title='Main Page',
                                 text=to_parse_text,
                                 disablelimitreport=1)
    parse_result_text = result['parse']['text']['*']
from mwrogue.esports_client import EsportsClient
from mwcleric.auth_credentials import AuthCredentials
limit = -1

site = EsportsClient('lol', user_file="cod")  # Set wiki

revisions = 2  # numver of revisions to roll back
comment = None  # require the comment on the edit you're rolling back to be this
print(comment)
comment = "fixing toggle"

with open('pages.txt', encoding="utf-8") as f:
    pages = f.readlines()
pages = [page.strip() for page in pages]

lmt = 0
for page in pages:
    if lmt == limit:
        break
    lmt += 1
    data = site.client.api('query',
                           format='json',
                           prop='revisions',
                           titles=page,
                           rvprop='content|comment',
                           rvlimit=revisions + 1)
    datapages = data['query']['pages']
    text = ''
    print(page)
    make_edit = comment
    for pg in datapages:
Ejemplo n.º 9
0

def check_results(site: EsportsClient, title):
    title = site.target(title)
    tables = [
        'TournamentResults=TR',
        'TournamentResults__RosterLinks=RL',
        'PlayerRedirects=PR',
        'Players=P',
    ]
    join = [
        'TR._ID=RL._rowID',
        'RL._value=PR.AllName',
        'PR._pageName=P._pageName',
    ]
    result = site.cargo_client.query(
        tables=','.join(tables),
        join_on=','.join(join),
        fields='TR.Team=Team,RL._value=Player',
        where='TR.Team != P.Team AND TR._pageName="Data:{}"'.format(title))
    return result


if __name__ == '__main__':
    credentials = AuthCredentials(user_file="me")
    fn_site = EsportsClient('fortnite-esports', credentials=credentials)
    final_result = check_results(
        fn_site, "Fortnite Champion Series: Chapter 2 Season 4/Heat 3/Europe")
    print(final_result)
    print(len(final_result))
Ejemplo n.º 10
0
from mwrogue.auth_credentials import AuthCredentials
from mwrogue.esports_client import EsportsClient
from mwrogue.wiki_time_parser import time_from_str

credentials = AuthCredentials(user_file='me')

site = EsportsClient('lol', credentials=credentials)

# for ns in site.namespaces:
#     print(ns.name)

assert site.cache.get_disambiguated_player_from_event(
    'European Masters/2021 Season/Spring Play-In', 'G2 Arctic',
    'Koldo') == 'Koldo'

assert site.cache.get_disambiguated_player_from_event(
    'Claro Stars League/2021 Season/Opening Season', 'Luxor Gaming',
    'Zeldris') == 'Zeldris (Christian Ticlavilca)'

assert site.cache.get_disambiguated_player_from_event(
    'El_Nexo/2020_Season/Split_1_Playoffs', 'Movistar Riders Academy',
    'Marky') == 'Marky (Pedro José Serrano)'

# check fallback to Teams.Short
assert site.cache.get_team_from_event_tricode('GUL 2020 Closing Playoffs',
                                              'MK') == 'Mad Kings'

assert site.cache.get_team_from_event_tricode('Worlds 2019 Main Event',
                                              'SKT') == 'SK Telecom T1'

assert site.cache.get_disambiguated_player_from_event(
Ejemplo n.º 11
0
        No personal data is stored in this cog.
        """
        return

    @commands.group()
    async def patchupdate(self, ctx):
        pass

    @patchupdate.command()
    async def championstats(self, ctx, version=None):
        await ctx.send("Okay, starting!")
        site = await utils.login_if_possible(ctx, self.bot, 'lol')
        async with ctx.typing():
            await updatestats(site, "champion", version)
        await ctx.send("Okay, done!")

    @patchupdate.command()
    async def itemstats(self, ctx, version=None):
        await ctx.send("Okay, starting!")
        site = await utils.login_if_possible(ctx, self.bot, 'lol')
        async with ctx.typing():
            await updatestats(site, "item", version)
        await ctx.send("Okay, done!")


if __name__ == "__main__":
    lolsite = EsportsClient('lol', credentials=AuthCredentials(user_file='me'))
    asyncio.run(updatestats(lolsite, "champion"))
    # asyncio.run(updatestats(lolsite, "item"))
Ejemplo n.º 12
0
                except KeyError:
                    errors_key.append(mh)
                except Exception as e:
                    print_if_not_silent(e)
        lines.append('{{RunesEnd}}')
        new_text = '\n'.join(lines)
        if new_text != text and len(lines) > 3:
            print_if_not_silent('Saving page %s...' % page_name)
            esc.save(page,
                     new_text,
                     summary='Automatically updating Runes (python)')
        else:
            print_if_not_silent('Skipping page %s, no changes' % page_name)
        error_text = ''
        for e in errors_http:
            error_text = error_text + ' <br>\n' + page_to_query + ': ' + e + ' (HTTP)'
        for e in errors_key:
            error_text = error_text + '\n' + e + ' (Key)'
        if error_text != '':
            error_page = esc.client.pages['User:RheingoldRiver/Rune Errors']
            esc.save(error_page, error_text, summary='Reporting a Rune Error')


if __name__ == '__main__':
    SILENT = False
    credentials = AuthCredentials(user_file="me")
    esc_main = EsportsClient('lol', credentials=credentials)  # Set wiki
    pages = ['Data:LCS/2021 Season/Spring Season/2']
    scrape(esc_main, pages, False)
    # scrapeLPL(esc_main, pages, False)
Ejemplo n.º 13
0
import time

from mwrogue.esports_client import EsportsClient
from mwcleric.auth_credentials import AuthCredentials

credentials = AuthCredentials(user_file="me")
site = EsportsClient('lol', credentials=credentials)  # Set wiki
summary = 'Forcing blank edit'  # Set summary

limit = -1
startat_page = None
print(startat_page)
startat_page = 'EShen'
this_template = site.client.pages['Template:Infobox Player']  # Set template
pages = this_template.embeddedin()

# pages = site.client.categories['Pages with script errors']

passed_startat = False if startat_page else True
lmt = 0
for page in pages:
    if lmt == limit:
        break
    if startat_page and page.name == startat_page:
        passed_startat = True
    if not passed_startat:
        print("Skipping page %s" % page.name)
        continue
    print('Purging page %s...' % page.name)
    site.purge(page)
Ejemplo n.º 14
0
from mwcleric.auth_credentials import AuthCredentials
from mwrogue.esports_client import EsportsClient

limit = -1
# startat_page = 'Gamelord'
template = 'Infobox Player'
form = 'Infobox Player'

credentials = AuthCredentials(user_file="bot")
site = EsportsClient('lol', credentials=credentials)  # Set wiki

########################################

result = site.pages_using(template, generator=False)

pages = []

for p in result:
    pages.append(p)

try:
    startat = pages.index(startat_page)
except NameError as e:
    startat = -1
except ValueError as e:
    startat = -1
failures = []

lmt = 0
for page in pages:
    if lmt == limit:
Ejemplo n.º 15
0
import datetime as dt
from mwrogue.esports_client import EsportsClient

Lolsite = EsportsClient('lol')


def getMatches(matches, date):
    date1 = (date + dt.timedelta(1))
    date2 = (date + dt.timedelta(2))

    response2 = Lolsite.cargo_client.query(
        tables="MatchSchedule",
        fields="Team1, Team2, ShownName, DateTime_UTC",
        limit="max",
        where="DateTime_UTC >= '" + str(date) + "' AND DateTime_UTC <= '" +
        str(date1) + "'")

    TODAY_MATCHES = list()
    for row in response2:
        if any(ext in row['ShownName'] for ext in matches):
            TODAY_MATCHES.insert(len(TODAY_MATCHES), row)
    matches = list()
    for r in TODAY_MATCHES:
        date = r['DateTime UTC']
        date = date.split(' ')
        matchString = r['Team1'] + ' VS. ' + r['Team2'] + ' at ' + date[
            1] + ' ' + r['ShownName']
        matches.append(matchString)

    return matches
Ejemplo n.º 16
0
import urllib.request, time, sprite_creator, io, os
import re
from mwrogue.esports_client import EsportsClient
from mwcleric.auth_credentials import AuthCredentials

SUFFIX = ''
SPRITE_NAME = 'SmiteRole'
IMAGE_DIR = 'Sprites/' + SPRITE_NAME + ' Images'
TEAM_DATA_FILE_LOCATION = SPRITE_NAME + 'Sprite' + SUFFIX + '.txt'
FILE_TYPE = 'png'
limit = -1
startat = None

credentials = AuthCredentials(user_file="me")
site = EsportsClient('smite', credentials=credentials)  #  set wiki
site_lol = EsportsClient('lol', credentials=credentials)  #  set wiki

if not os.path.exists(IMAGE_DIR):
    os.makedirs(IMAGE_DIR)


def get_country_name(file_name):
    return file_name.replace('.' + FILE_TYPE,
                             '').replace('File:', '').replace('Square', '')


pattern = r'.*src\=\"(.+?)\".*'
cat = site.client.categories['Role Icons']
for page in cat:
    to_parse_text = '[[%s|link=]]' % page.name
    result = site.client.api('parse',
Ejemplo n.º 17
0
from mwrogue.esports_client import EsportsClient
from mwcleric.auth_credentials import AuthCredentials
import mwparserfromhell, re

credentials = AuthCredentials(user_file="bot")
site = EsportsClient('lol', credentials=credentials)  #  set wiki
summary = 'Changing links/display to be just 1 field, with link only'  # Set summary

limit = -1
startat_page = None
print(startat_page)
# startat_page = 'Challengers Korea/2017 Season/Spring Season/Scoreboards/Week 3'
this_template = site.client.pages['Module:Scoreboard']  # Set template
pages = this_template.embeddedin()

#pages = [site.pages['Data:Challengers Korea/2019 Season/Spring Season']]


def links_to_display(template):
    if not template.has('name'):
        return
    name = template.get('name').value.strip()
    if '{{!}}' in name:
        template.add('name', name.split('{{!}}')[0])
    name = template.get('name').value.strip()
    if not template.has('link'):
        template.add('link', name, before='name')
        template.remove('name')
        return
    display_str = template.get('name').value.strip()
    link_str = template.get('link').value.strip()
Ejemplo n.º 18
0
from mwrogue.esports_client import EsportsClient
from mwcleric.auth_credentials import AuthCredentials
import time
limit = -1
credentials = AuthCredentials(user_file="me")
site = EsportsClient('lol', credentials=credentials)  # Set wiki

pages = site.pages_using('Infobox Player')

# c = site.client.categories['Pages with script errors']

startat_page = 'HauHau'
passed_startat = False

lmt = 0
#for p in c:
for p in pages:
    if lmt == limit:
        break
    if p.name == startat_page:
        passed_startat = True
    if not passed_startat:
        continue
    lmt += 1
    print(p.name)
    site.touch(p)
Ejemplo n.º 19
0
def scrape(esc: EsportsClient, events, force):
    player_data_keys = [
        "perkPrimaryStyle", "perkSubStyle", "perk0", "perk1", "perk2", "perk3",
        "perk4", "perk5", "statPerk0", "statPerk1", "statPerk2"
    ]
    player_positions = ['Top', 'Jungle', 'Mid', 'ADC', 'Support']
    rune_dict = get_rune_dict()
    champ_dict = get_champ_dict()
    print_if_not_silent(events)
    with open('mh_riot_endpoint.txt') as f:
        mh_riot_endpoint = f.read().strip()
    mh_riot_token = get_id_token()
    for page_to_query in events:
        print_if_not_silent(page_to_query)
        result = esc.client.api(
            'cargoquery',
            format="json",
            limit='max',
            tables="MatchScheduleGame=MSG,MatchSchedule=MS",
            fields="MSG.OverviewPage,MSG.MatchHistory",
            where=(r'MSG._pageName="%s" AND MSG.MatchHistory IS NOT NULL'
                   r' AND NOT MSG.MatchHistory RLIKE ".*(lpl|lol)\.qq\.com.*"')
            % page_to_query,
            join_on="MSG.MatchId=MS.MatchId",
            order_by="MS.N_Page,MS.N_MatchInPage, MSG.N_GameInMatch")
        if result['cargoquery']:
            event = result['cargoquery'][0]['title']['OverviewPage']
            suffix = ''
            page_start = page_to_query.replace('Data:', '')
            if page_start != event:
                suffix = page_start.replace(event, '')
            errors_http = []
            errors_key = []
            page_name = event + '/Runes' + suffix
            page = esc.client.pages[page_name]
            text = page.text()
            if 'RunesQueryTournament' in text:
                continue
            text_tbl = []
            if text != "" and len(text.split('\n')) > 1:
                text_tbl = text.split('\n')
                intro = text_tbl.pop(0) + '\n' + text_tbl.pop(0)
            else:
                overview_page = esc.client.pages[event]
                overview_text = overview_page.text()
                overview_text_tbl = overview_text.split('\n')
                tabs = overview_text_tbl[0]
                intro = tabs + '\n{{RunesStart2019}}'
            lines = [intro]
            for i, cargo_game in enumerate(result['cargoquery']):
                mh = (cargo_game['title']['MatchHistory']).replace(
                    '&amp;', '&')
                print_if_not_silent(mh)
                location = re.match(r'.*details/([^&]*)', mh)[1]
                if len(text_tbl) > 10 * i and (
                        location in text_tbl[10 * i]) and not force:
                    for j in range(0, 10):
                        lines.append(text_tbl[j + 10 * i])
                else:
                    print_if_not_silent('Querying match %s' % mh)
                    json_loc = mh_riot_endpoint + location
                    try:
                        game = requests.Session().get(json_loc,
                                                      cookies={
                                                          'id_token':
                                                          mh_riot_token
                                                      }).json()
                        full_patch = game['gameVersion']
                        patch_tbl = full_patch.split('.')
                        patch = str(patch_tbl[0] + '.' + patch_tbl[1])
                        for j in range(0, 10):
                            player_name = game['participantIdentities'][j][
                                'player']['summonerName']
                            try:
                                player_team = re.match('^(.+?) (.*)',
                                                       player_name)[1]
                                player_name = re.match('^(.+?) (.*)',
                                                       player_name)[2]
                            except Exception:
                                player_team = ''
                            player_champion_n = game['participants'][j][
                                'championId']
                            player_champion = champ_dict.get(
                                player_champion_n, str(player_champion_n))
                            player_position = player_positions[j % 5]
                            this_player = [
                                player_name, player_team, player_champion,
                                player_position
                            ]
                            for key in player_data_keys:
                                rune_key = game['participants'][j]['stats'][
                                    key]
                                rune_output = rune_dict.get(rune_key, rune_key)
                                this_player.append(rune_output)
                            this_player_output = '{{RunesLine2019|' + (
                                '|'.join(this_player)
                            ) + '|patch=' + patch + '|mh=' + location + '}}'
                            lines.append(this_player_output)
                            text_tbl.insert(10 * i + j, '')
                    except urllib.error.HTTPError:
                        errors_http.append(mh)
                    except KeyError:
                        errors_key.append(mh)
            lines.append('{{RunesEnd}}')
            new_text = '\n'.join(lines)
            if new_text != text and len(lines) > 3:
                print_if_not_silent('Saving page %s...' % page_name)
                esc.save(page,
                         new_text,
                         summary='Automatically updating Runes (python)')
            else:
                print_if_not_silent('Skipping page %s, no changes' % page_name)
            error_text = ''
            for e in errors_http:
                error_text = error_text + ' <br>\n' + page_to_query + ': ' + e + ' (HTTP)'
            for e in errors_key:
                error_text = error_text + '\n' + e + ' (Key)'
            if error_text != '':
                error_page = esc.client.pages[
                    'User:RheingoldRiver/Rune Errors']
                esc.save(error_page,
                         error_text,
                         summary='Reporting a Rune Error')
Ejemplo n.º 20
0
def scrapeLPL(esc: EsportsClient, events, force):
    player_positions = ['Top', 'Jungle', 'Mid', 'ADC', 'Support']
    rune_dict = get_rune_dict()
    champ_dict = get_champ_dict()
    please_escape = False
    with open('mh_qq_endpoint.txt') as f:
        mh_qq_endpoint = f.readlines()
    mh_qq_endpoint = [_.strip() for _ in mh_qq_endpoint]
    for page_to_query in events:
        if please_escape:
            break
        print_if_not_silent(page_to_query)
        result = esc.client.api(
            'cargoquery',
            format="json",
            limit='max',
            tables="MatchScheduleGame=MSG,MatchSchedule=MS",
            fields="MSG.OverviewPage,MSG.MatchHistory",
            where=(r'MSG._pageName="%s" AND MSG.MatchHistory IS NOT NULL'
                   r' AND MSG.MatchHistory RLIKE ".*(lpl|lol)\.qq\.com.*"') %
            page_to_query,
            join_on="MSG.MatchId=MS.MatchId",
            order_by="MS.N_Page,MS.N_MatchInPage, MSG.N_GameInMatch",
            group_by='MSG.MatchHistory')
        if not result['cargoquery']:
            continue
        event = result['cargoquery'][0]['title']['OverviewPage']
        suffix = ''
        page_start = page_to_query.replace('Data:', '')
        if page_start != event:
            suffix = page_start.replace(event, '')
        errors_http = []
        errors_key = []
        page_name = event + '/Runes' + suffix
        page = esc.client.pages[page_name]
        text = page.text()
        if 'RunesQueryTournament' in text:
            continue
        text_tbl = []
        team_keys = ['left', 'right']
        if text != "" and len(text.split('\n')) > 1:
            text_tbl = text.split('\n')
            intro = text_tbl.pop(0) + '\n' + text_tbl.pop(0)
        else:
            overview_page = esc.client.pages[event]
            overview_text = overview_page.text()
            overview_text_tbl = overview_text.split('\n')
            tabs = overview_text_tbl[0]
            intro = tabs + '\n{{RunesStart2019}}'
        lines = [intro]
        counter = 0
        for i, cargo_game in enumerate(result['cargoquery']):
            # lmt += 1
            # if lmt == 2:
            # 	please_escape = True
            # 	break
            mh = (cargo_game['title']['MatchHistory']).replace('&amp;', '&')
            print_if_not_silent(mh)
            location = re.match(r'.*bmid=([0-9]*)', mh)[1]
            if len(text_tbl) > 10 * counter and (
                    location in text_tbl[10 * counter]) and not force:
                print_if_not_silent('Skipping %s' % location)
                for j in range(0, 10):
                    lines.append(text_tbl[j + 10 * counter])
                counter = counter + 1
                if len(text_tbl) > 10 * counter and (
                        location in text_tbl[10 * counter]) and not force:
                    print_if_not_silent('Skipping %s (2)' % location)
                    for j in range(0, 10):
                        lines.append(text_tbl[j + 10 * counter])
                    counter = counter + 1
                    if len(text_tbl) > 10 * counter and (
                            location in text_tbl[10 * counter]) and not force:
                        print_if_not_silent('Skipping %s (3)' % location)
                        for j in range(0, 10):
                            lines.append(text_tbl[j + 10 * counter])
                        counter = counter + 1
            else:
                print_if_not_silent('Querying match %s' % mh)
                json_loc = mh_qq_endpoint[0] + location
                print_if_not_silent(json_loc)
                try:
                    with urllib.request.urlopen(json_loc) as url:
                        series = json.loads(url.read().decode())
                    for game in series['msg']:
                        counter = counter + 1
                        gameId = game['sMatchId']
                        json_loc_2 = mh_qq_endpoint[1] + gameId
                        print_if_not_silent(json_loc_2)
                        with urllib.request.urlopen(json_loc_2) as url:
                            game = json.loads(url.read().decode())
                        areaId = game['msg']['sMatchInfo']['AreaId']
                        battleId = game['msg']['battleInfo']['BattleId']
                        json_loc_3 = mh_qq_endpoint[
                            2] + battleId + mh_qq_endpoint[3] + areaId
                        print_if_not_silent(json_loc_3)
                        with urllib.request.urlopen(json_loc_3) as url:
                            worldLookup = json.loads(url.read().decode())
                        worldLookupJSON = json.loads(worldLookup['msg'])
                        has_runes = True
                        rune_data = {}
                        if worldLookupJSON['battle_count_'] == 0:
                            has_runes = False
                        else:
                            worldId = worldLookupJSON['battle_list_'][0][
                                'world_']
                            json_loc_4 = mh_qq_endpoint[4] + str(
                                worldId) + mh_qq_endpoint[5] + str(battleId)
                            print_if_not_silent(json_loc_4)
                            with urllib.request.urlopen(json_loc_4) as url:
                                rune_data_unsorted_init = json.loads(
                                    url.read().decode())
                            rune_data_unsorted = json.loads(
                                rune_data_unsorted_init['msg'])
                            for p in range(0, 10):
                                rune_key = rune_data_unsorted['hero_list_'][p][
                                    'hero_id_']
                                rune_data[int(rune_key)] = rune_data_unsorted[
                                    'hero_list_'][p].copy()
                        patch = ''  # unless we can automated get patch which idt we can from any endpoint
                        teamnames = get_team_names(game)
                        game_data = json.loads(
                            game['msg']['battleInfo']['BattleData'])
                        j = 0
                        for j in range(0, 10):
                            rune_data_this = {}
                            if has_runes:
                                rune_data_this = copy.deepcopy(rune_data)
                            player_data = get_player_data(
                                game_data, team_keys, j)
                            player_champion_n = int(player_data['hero'])
                            player_name = player_data['name']
                            player_team = get_this_teamname(
                                teamnames, team_keys, j)
                            player_name = player_name.replace(player_team, '')
                            player_champion = champ_dict[
                                player_champion_n] if player_champion_n in champ_dict else str(
                                    player_champion_n)
                            player_position = player_positions[j % 5]
                            this_player = [
                                player_name, player_team, player_champion,
                                player_position
                            ]
                            if has_runes:
                                player_rune_data = rune_data_this[
                                    player_champion_n]['runes_info_'][
                                        'runes_list_'].copy()
                                this_rune_id = ''
                                this_player.append(rune_dict['trees'][
                                    player_rune_data[0]['runes_id_']])
                                for _ in range(0, 5):
                                    this_rune = player_rune_data.pop(0)
                                    this_rune_id = this_rune['runes_id_']
                                    rune_output = rune_dict[
                                        this_rune_id] if this_rune_id in rune_dict else this_rune_id
                                    this_player.append(rune_output)
                                this_player.insert(
                                    5, rune_dict['trees'][int(this_rune_id)])
                                stat_runes = player_rune_data.copy()
                                while stat_runes:
                                    if stat_runes[0]['runes_num_'] == 1:
                                        this_rune = stat_runes.pop(0)
                                    else:
                                        this_rune = stat_runes[0]
                                        stat_runes[0][
                                            'runes_num_'] = stat_runes[0][
                                                'runes_num_'] - 1
                                    this_rune_id = this_rune['runes_id_']
                                    rune_output = rune_dict[
                                        this_rune_id] if this_rune_id in rune_dict else this_rune_id
                                    this_player.append(rune_output)
                            this_player_output = '{{RunesLineLPL2019|' + (
                                '|'.join(this_player)
                            ) + '|patch=' + patch + '|mh=' + location + '}}'
                            lines.append(this_player_output)
                        text_tbl.insert(10 * i + j, '')
                except urllib.error.HTTPError:
                    errors_http.append(mh)
                except KeyError:
                    errors_key.append(mh)
                except Exception as e:
                    print_if_not_silent(e)
        lines.append('{{RunesEnd}}')
        new_text = '\n'.join(lines)
        if new_text != text and len(lines) > 3:
            print_if_not_silent('Saving page %s...' % page_name)
            esc.save(page,
                     new_text,
                     summary='Automatically updating Runes (python)')
        else:
            print_if_not_silent('Skipping page %s, no changes' % page_name)
        error_text = ''
        for e in errors_http:
            error_text = error_text + ' <br>\n' + page_to_query + ': ' + e + ' (HTTP)'
        for e in errors_key:
            error_text = error_text + '\n' + e + ' (Key)'
        if error_text != '':
            error_page = esc.client.pages['User:RheingoldRiver/Rune Errors']
            esc.save(error_page, error_text, summary='Reporting a Rune Error')
import logging
from time import sleep

from lol_esports_parser import get_riot_game
from mwcleric.auth_credentials import AuthCredentials
from mwparserfromhell.nodes import Template
from mwparserfromhell.nodes.extras import Parameter
from mwrogue.esports_client import EsportsClient
from mwrogue.template_modifier import TemplateModifierBase
from requests import HTTPError

credentials = AuthCredentials(user_file="bot")
site = EsportsClient('lol', credentials=credentials)
summary = 'Add primary rune tree'

# there's tons of warnings about old patch versions that we don't care about,
# it's just gonna slow execution so disable warnings
logging.disable(logging.WARNING)


class TemplateModifier(TemplateModifierBase):
    def update_template(self, template: Template):
        # i just added more and more checks to avoid crashing on LPL games over time
        if any(
            [_ in self.current_page.name for _ in ['LPL', 'LDL', 'Demacia']]):
            return
        for param in template.params:
            if '|primary=' in str(param.value):
                return
        if not template.has('statslink'):
            return
    site.client.pages['Template:%s/End' % template_prefix].save(
        end_text % template_prefix, summary=summary)
    site.client.pages['Template:%s/Date' % template_prefix].save(
        date_text % template_prefix, summary=summary)
    site.client.pages['Template:%s Navbox' % navbox_template].save(
        navbox_text % data_prefix, summary=summary)
    site.client.pages['Template:%s/Start' % template_prefix].save(
        start_text % template_prefix, summary=summary)


def check_and_make_redirects(d, page_prefix, redirect_text):
    weekday_index = d
    for i in range(0, 6):
        weekday_index += timedelta(days=1)
        y = weekday_index.year
        m = '{:02d}'.format(weekday_index.month)
        day = '{:02d}'.format(weekday_index.day)
        site.client.pages[page_prefix +
                          '{}-{}-{}'.format(y, m, day)].save(redirect_text)


if __name__ == "__main__":
    page_type = 'news'
    years = range(2020, 2021)
    credentials = AuthCredentials(user_file="me")
    site = EsportsClient('legendsofruneterra-esports', credentials=credentials)
    make_templates(page_type)
    make_data_pages(years, page_type)
    make_data_pages(years, 'ec')
    make_data_pages(years, 'rumors')
Ejemplo n.º 23
0
import time

from mwrogue.esports_client import EsportsClient
from mwcleric.auth_credentials import AuthCredentials
from mwclient.errors import AssertUserFailedError

credentials = AuthCredentials(user_file="me")
loadout = EsportsClient('default-loadout-esports')  #  set wiki
target = EsportsClient('rl-esports', credentials=credentials)  #  set wiki
summary = 'Default loadout of pages'  # Set summary

startat_namespace = None
print(startat_namespace)
startat_namespace = 828

startat_page = None
print(startat_page)
# startat_page = 'Module:Navbox/Aether II/en'

overwrite_existing = True

startat_comparison = -1 if startat_namespace is None else startat_namespace - 1

passed_startat = False

for ns in loadout.client.namespaces:
    print(ns)
    if ns > startat_comparison:  # ns 4 is Project ns
        for page in loadout.client.allpages(namespace=ns):
            # time.sleep(1)
            print(page.name)
from mwrogue.esports_client import EsportsClient
from mwcleric.auth_credentials import AuthCredentials

credentials = AuthCredentials(user_file="me")
archive = EsportsClient('lol-archive', credentials=credentials) #  set wiki
live = EsportsClient('lol', credentials=credentials) #  set wiki

pages = []

for page in archive.client.allpages(namespace=0):
	pages.append((page.name, live.client.pages[page.name].exists))

text = []

for p in pages:
	text.append('{}\t{}'.format(p[0], str(p[1])))

with open('archive_pages.txt', 'w+', encoding="utf-8") as f:
	f.write('\n'.join(text))
Ejemplo n.º 25
0
            for player in sorted_data["players"][team]:
                player = player[0]
                game_rd_player = self.rosters_data[team]["players"][player]
                if players_data.get(player):
                    player_data = self.concat_args(players_data[player])
                else:
                    player_data = self.concat_args([{"player": player}])
                player_roles_data = self.concat_args(
                    game_rd_player["roles_data"])
                player_games_by_role = self.concat_args(
                    game_rd_player["games_by_role"])
                players_text += self.PLAYER_TEXT.format(
                    player_data, player_roles_data, player_games_by_role)
            teamsvs = self.concat_args(self.rosters_data[team]["teamsvs"])
            output += self.TEAM_TEXT.format(team, teamsvs, players_text)
        return output

    def save_page(self, output):
        username = self.site.credentials.username
        username = username.split('@')[0] if "@" in username else username
        page = self.site.client.pages[f"User:{username}/Team Rosters Sandbox"]
        self.site.save(page=page,
                       text=output,
                       summary="Generating Rosters from Scoreboard Data")


if __name__ == '__main__':
    credentials = AuthCredentials(user_file='bot')
    lol_site = EsportsClient('lol', credentials=credentials)
    AutoRostersRunner(lol_site, "LMF 2022 Opening").run()
    "RosterRumorLineNot", "RosterRumorLine"
]
SUBPAGES_TO_DELETE = ['{}/Tournament Results', 'Tooltip:{}']
summary = "Disambiguating {} to {}".format(original_name, new_name)

orig_name_lc = original_name[0].lower() + original_name[1:]
new_name_lc = new_name[0].lower() + new_name[1:]
orig_name_uc = original_name[0].upper() + original_name[1:]
new_name_uc = new_name[0].upper() + new_name[1:]

blank_edit_these = []

#############################################################################################

credentials = AuthCredentials(user_file="me")
site = EsportsClient('lol', credentials=credentials)  # Set wiki


def savepage(targetpage, savetext):
    site.save(targetpage, savetext, summary=summary, tags="bot_disambig")


def move_page(from_page):
    new_page_name = str(from_page.name).replace(orig_name_uc, new_name)
    new_page = site.client.pages[new_page_name]
    if new_page.exists:
        print("{} already exists, cannot move!".format(from_page.name))
    else:
        print("Moving page {} to {}".format(from_page.name, new_page_name))
        from_page.move(new_page_name, reason=summary, no_redirect=True)
        blank_edit_these.append(new_page)