def callee(self, *args, **kwargs): if MediaWikiVersion(self.version()) < MediaWikiVersion(version): raise NotImplementedError( 'Method or function "%s"\n' "isn't implemented in MediaWiki version < %s" % (fn.__name__, version)) return fn(self, *args, **kwargs)
def _parse_post_117(self): """Parse 1.17+ siteinfo data.""" response = fetch(self.api + '?action=query&meta=siteinfo&format=json') check_response(response) # remove preleading newlines and Byte Order Mark (BOM), see T128992 content = response.text.strip().lstrip('\uFEFF') info = json.loads(content) self.private_wiki = ('error' in info and info['error']['code'] == 'readapidenied') if self.private_wiki: # user-config.py is not loaded because PYWIKIBOT_NO_USER_CONFIG # is set to '2' by generate_family_file.py. # Prepare a temporary config for login. username = pywikibot.input( 'Private wiki detected. Login is required.\n' 'Please enter your username?') config.usernames['temporary_family'] = {'temporary_code': username} # Setup a dummy family so that we can create a site object fam = pywikibot.Family() fam.name = 'temporary_family' fam.scriptpath = lambda code: self.api[:-8] # without /api.php fam.langs = {'temporary_code': self.server} site = pywikibot.Site('temporary_code', fam) site.version = lambda: str(self.version) # Now the site object is able to login info = site.siteinfo else: info = info['query']['general'] self.version = MediaWikiVersion.from_generator(info['generator']) if self.version < MediaWikiVersion('1.17'): return self.server = urljoin(self.fromurl, info['server']) for item in ['scriptpath', 'articlepath', 'lang']: setattr(self, item, info[item])
def test_content_model(self): """Test content model.""" base = [ 'wikitext', 'javascript', 'css', 'text', ] wmf = [ 'MassMessageListContent', 'SecurePoll', 'flow-board', 'Scribunto', 'JsonSchema', ] if MediaWikiVersion(self.site.version()) >= MediaWikiVersion('1.24'): base.append('json') self._check_param_subset(self.site, 'edit', 'contentmodel', base) self._check_param_subset(self.site, 'parse', 'contentmodel', base) if isinstance(self.site.family, WikimediaFamily): # T151151 - en.wiki uninstalled Flow extension: if self.site.family == 'wikipedia' and self.site.code == 'en': wmf.remove('flow-board') self._check_param_subset(self.site, 'parse', 'contentmodel', wmf)
def test_init_pageset(self): """Test initializing with only the pageset.""" site = self.get_site() self.assertNotIn('query', api.ParamInfo.init_modules) pi = api.ParamInfo(site, set(['pageset'])) self.assertNotIn('query', api.ParamInfo.init_modules) self.assertEqual(len(pi), 0) pi._init() self.assertIn('main', pi._paraminfo) self.assertIn('paraminfo', pi._paraminfo) self.assertIn('pageset', pi._paraminfo) if MediaWikiVersion(self.site.version()) < MediaWikiVersion("1.12"): return if 'query' in pi.preloaded_modules: self.assertIn('query', pi._paraminfo) self.assertEqual(len(pi), 4) else: self.assertNotIn('query', pi._paraminfo) self.assertEqual(len(pi), 3) self.assertEqual(len(pi), len(pi.preloaded_modules)) if MediaWikiVersion(site.version()) >= MediaWikiVersion("1.21"): # 'generator' was added to 'pageset' in 1.21 generators_param = pi.parameter('pageset', 'generator') self.assertGreater(len(generators_param['type']), 1)
def test_with_module_revisions(self): """Test requesting the module revisions.""" site = self.get_site() pi = api.ParamInfo(site) self.assertEqual(len(pi), 0) pi.fetch(['revisions']) self.assertIn('query+revisions', pi._paraminfo) self.assertIn('main', pi._paraminfo) self.assertIn('paraminfo', pi._paraminfo) if MediaWikiVersion(self.site.version()) >= MediaWikiVersion("1.12"): self.assertEqual(len(pi), 1 + len(pi.preloaded_modules)) self.assertEqual(pi['revisions']['prefix'], 'rv') param = pi.parameter('revisions', 'prop') self.assertIsInstance(param, dict) self.assertEqual(param['name'], 'prop') self.assertNotIn('deprecated', param) self.assertIsInstance(param['type'], list) if MediaWikiVersion(self.site.version()) < MediaWikiVersion("1.12"): return self.assertIn('user', param['type'])
def test_init_pageset(self): site = self.get_site() self.assertNotIn('query', api.ParamInfo.init_modules) pi = api.ParamInfo(site, set(['pageset'])) self.assertNotIn('query', api.ParamInfo.init_modules) self.assertNotIn('query', pi.preloaded_modules) self.assertEqual(len(pi), 0) pi._init() self.assertIn('main', pi) self.assertIn('paraminfo', pi) self.assertIn('pageset', pi) if pi.modules_only_mode: self.assertIn('query', pi.preloaded_modules) self.assertIn('query', pi) self.assertEqual(len(pi), 4) else: self.assertNotIn('query', pi.preloaded_modules) self.assertNotIn('query', pi) self.assertEqual(len(pi), 3) self.assertEqual(len(pi), len(pi.preloaded_modules)) if MediaWikiVersion(site.version()) >= MediaWikiVersion("1.21"): # 'generator' was added to 'pageset' in 1.21 generators_param = pi.parameter('pageset', 'generator') self.assertGreater(len(generators_param['type']), 1)
def test_Coordinate_edit(self): """Attempt adding a Coordinate with globe set via item.""" testsite = self.get_repo() item = self._clean_item(testsite, 'P20480') # Make sure the wiki supports wikibase-conceptbaseuri version = testsite.version() if MediaWikiVersion(version) < MediaWikiVersion('1.29.0-wmf.2'): raise unittest.SkipTest('Wiki version must be 1.29.0-wmf.2 or ' 'newer to support unbound uncertainties.') # set new claim claim = pywikibot.page.Claim(testsite, 'P20480', datatype='globe-coordinate') target = pywikibot.Coordinate(site=testsite, lat=12.0, lon=13.0, globe_item=item) claim.setTarget(target) item.addClaim(claim) # confirm new claim item.get(force=True) claim = item.claims['P20480'][0] self.assertEqual(claim.getTarget(), target)
def setup_test_pages(self): """Helper function to set up pages that we will use in these tests.""" site = self.get_site() source = pywikibot.Page(site, 'User:Sn1per/MergeTest1') dest = pywikibot.Page(site, 'User:Sn1per/MergeTest2') # Make sure the wiki supports action=mergehistory if MediaWikiVersion(site.version()) < MediaWikiVersion('1.27.0-wmf.13'): raise unittest.SkipTest('Wiki version must be 1.27.0-wmf.13 or ' 'newer to support the history merge API.') if source.exists(): source.delete('Pywikibot merge history unit test') if dest.exists(): dest.delete('Pywikibot merge history unit test') source.text = 'Lorem ipsum dolor sit amet' source.save() first_rev = source.editTime() source.text = 'Lorem ipsum dolor sit amet is a common test phrase' source.save() second_rev = source.editTime() dest.text = 'Merge history page unit test destination' dest.save() return first_rev, second_rev
def test_generator(self): """Test from_generator classmethod.""" self.assertEqual(MediaWikiVersion.from_generator('MediaWiki 1.2.3'), self._make('1.2.3')) with self.assertRaisesRegex( ValueError, self.GENERATOR_STRING_RE): MediaWikiVersion.from_generator('Invalid 1.2.3')
def test_watchlist_show_flags(self): """Test watchlist show flags.""" types = ['minor', 'bot', 'anon', 'patrolled'] if MediaWikiVersion(self.site.version()) >= MediaWikiVersion('1.24'): types.append('unread') known = types + ['!%s' % item for item in types] self._check_param_subset(self.site, 'query+watchlist', 'show', known)
def _get_logentry(self, logtype): """Retrieve a single log entry.""" if self.site_key == 'old': # This is an assertion as the tests don't make sense with newer # MW versions and otherwise it might not be visible that the test # isn't run on an older wiki. self.assertLess(MediaWikiVersion(self.site.version()), MediaWikiVersion('1.20')) return next(iter(self.site.logevents(logtype=logtype, total=1)))
def translateAndCapitalizeNamespaces(self, text): """Use localized namespace names.""" # arz uses english stylish codes if self.site.sitename == 'wikipedia:arz': return text # wiki links aren't parsed here. exceptions = ['nowiki', 'comment', 'math', 'pre'] for namespace in self.site.namespaces.values(): if namespace == 0: # skip main (article) namespace continue # a clone is needed. Won't change the namespace dict namespaces = list(namespace) if namespace == 6 and self.site.family.name == 'wikipedia': if self.site.code in ('en', 'fr') and MediaWikiVersion( self.site.version()) >= MediaWikiVersion('1.14'): # do not change "Image" on en-wiki and fr-wiki assert u'Image' in namespaces namespaces.remove(u'Image') if self.site.code == 'hu': # do not change "Kép" on hu-wiki assert u'Kép' in namespaces namespaces.remove(u'Kép') elif self.site.code == 'pt': # use "Imagem" by default on pt-wiki (per T57242) assert 'Imagem' in namespaces namespaces.insert( 0, namespaces.pop(namespaces.index('Imagem'))) # final namespace variant final_ns = namespaces.pop(0) if namespace in (2, 3): # skip localized user namespace, maybe gender is used namespaces = ['User' if namespace == 2 else 'User talk'] # lowerspaced and underscored namespaces for i, item in enumerate(namespaces): item = item.replace(' ', '[ _]') item = u'[%s%s]' % (item[0], item[0].lower()) + item[1:] namespaces[i] = item namespaces.append(first_lower(final_ns)) if final_ns and namespaces: if self.site.sitename == 'wikipedia:pt' and namespace == 6: # only change on these file extensions (per T57242) extensions = ('png', 'gif', 'jpg', 'jpeg', 'svg', 'tiff', 'tif') text = textlib.replaceExcept( text, r'\[\[\s*({}) *:(?P<name>[^\|\]]*?\.({}))' r'(?P<label>.*?)\]\]'.format('|'.join(namespaces), '|'.join(extensions)), r'[[{}:\g<name>\g<label>]]'.format(final_ns), exceptions) else: text = textlib.replaceExcept( text, r'\[\[\s*(%s) *:(?P<nameAndLabel>.*?)\]\]' % '|'.join(namespaces), r'[[%s:\g<nameAndLabel>]]' % final_ns, exceptions) return text
def _parse_post_117(self): """Parse 1.17+ siteinfo data.""" response = fetch(self.api + '?action=query&meta=siteinfo&format=json') info = json.loads(response.content)['query']['general'] self.version = MediaWikiVersion.from_generator(info['generator']) if self.version < MediaWikiVersion('1.17'): return self.server = urljoin(self.fromurl, info['server']) for item in ['scriptpath', 'articlepath', 'lang']: setattr(self, item, info[item])
def test_invalid_versions(self): """Verify that insufficient version fail creating.""" with self.assertRaisesRegex(ValueError, self.INVALID_VERSION_RE): MediaWikiVersion('invalid') with self.assertRaisesRegex(ValueError, self.INVALID_VERSION_RE): MediaWikiVersion('1number') with self.assertRaisesRegex(ValueError, self.INVALID_VERSION_RE): MediaWikiVersion('1.missing') with self.assertRaisesRegex(AssertionError, 'Found \"wmf\" in \"wmf-1\"'): MediaWikiVersion('1.33wmf-1')
def test_old_mode(self): site = self.get_site() pi = api.ParamInfo(site, modules_only_mode=False) pi.fetch(['info']) self.assertIn('query+info', pi._paraminfo) self.assertIn('main', pi._paraminfo) self.assertIn('paraminfo', pi._paraminfo) if MediaWikiVersion(self.site.version()) >= MediaWikiVersion("1.12"): self.assertEqual(len(pi), 1 + len(pi.preloaded_modules)) self.assertIn('revisions', pi.prefixes)
def test_init(self): site = self.get_site() pi = api.ParamInfo(site) self.assertEqual(len(pi), 0) pi._init() self.assertIn('main', pi._paraminfo) self.assertIn('paraminfo', pi._paraminfo) if MediaWikiVersion(self.site.version()) >= MediaWikiVersion("1.12"): self.assertEqual(len(pi), len(pi.preloaded_modules)) self.assertIn('info', pi.query_modules) self.assertIn('login', pi._action_modules)
def test_content_format(self): """Test content format.""" base = [ 'text/x-wiki', 'text/javascript', 'text/css', 'text/plain', ] if MediaWikiVersion(self.site.version()) >= MediaWikiVersion('1.24'): base.append('application/json') self._check_param_values(self.site, 'edit', 'contentformat', base) self._check_param_values(self.site, 'parse', 'contentformat', base)
def test_watchlist_type(self): """Test watchlist type.""" known = ['edit', 'new', 'log'] _version = MediaWikiVersion(self.site.version()) if _version >= MediaWikiVersion('1.20'): known.append('external') if _version.version >= (1, 27): if _version >= MediaWikiVersion( '1.27.0-wmf.4') or _version.suffix == 'alpha': known.append('categorize') self._check_param_values(self.site, 'query+watchlist', 'type', known)
def test_multiple_modules(self): site = self.get_site() pi = api.ParamInfo(site) self.assertEqual(len(pi), 0) pi.fetch(['info', 'revisions']) self.assertIn('query+info', pi._paraminfo) self.assertIn('query+revisions', pi._paraminfo) self.assertIn('main', pi._paraminfo) self.assertIn('paraminfo', pi._paraminfo) if MediaWikiVersion(self.site.version()) < MediaWikiVersion("1.12"): return self.assertEqual(len(pi), 2 + len(pi.preloaded_modules))
def test_content_format(self): """Test content format.""" base = [ 'text/x-wiki', 'text/javascript', 'text/css', 'text/plain', ] if MediaWikiVersion(self.site.version()) >= MediaWikiVersion('1.24'): base.append('application/json') if isinstance(self.site, DataSite): # It is not clear when this format has been added, see T129281. base.append('application/vnd.php.serialized') self._check_param_values(self.site, 'edit', 'contentformat', base) self._check_param_values(self.site, 'parse', 'contentformat', base)
def test_new_mode(self): site = self.get_site() if MediaWikiVersion(site.version()) < MediaWikiVersion('1.25wmf4'): raise unittest.SkipTest( "version %s doesn't support the new paraminfo api" % site.version()) pi = api.ParamInfo(site, modules_only_mode=True) pi.fetch(['info']) self.assertIn('query+info', pi._paraminfo) self.assertIn('main', pi._paraminfo) self.assertIn('paraminfo', pi._paraminfo) self.assertEqual(len(pi), 1 + len(pi.preloaded_modules)) self.assertIn('revisions', pi.prefixes)
def test_many_continuations_limited(self): """Test PropertyGenerator with many limited props.""" mainpage = self.get_mainpage() links = list(self.site.pagelinks(mainpage, total=30)) titles = [l.title(with_section=False) for l in links] params = { 'rvprop': 'ids|flags|timestamp|user|comment|content', 'titles': '|'.join(titles) } if self.site.version() >= MediaWikiVersion('1.32'): params['rvslots'] = 'main' gen = api.PropertyGenerator( site=self.site, prop='revisions|info|categoryinfo|langlinks|templates', parameters=params) # An APIError is raised if set_maximum_items is not called. gen.set_maximum_items(-1) # suppress use of "rvlimit" parameter # Force the generator into continuation mode gen.set_query_increment(5) count = 0 for pagedata in gen: self.assertIsInstance(pagedata, dict) self.assertIn('pageid', pagedata) count += 1 self.assertEqual(len(links), count)
def test_tokens_in_mw_123_124wmf18(self): """Test ability to get page tokens.""" if MediaWikiVersion(self.orig_version()) >= '1.37wmf24': self.skipTest( 'Site {} version {} is too new for this tests.'.format( self.mysite, self._version)) self._test_tokens('1.23', '1.24wmf18', 'deleteglobalaccount')
def _require_errors(site): """ Check if the Wikibase site is so old it requires error bounds to be given. If no site item is supplied it raises a warning and returns True. @param site: The Wikibase site @type site: pywikibot.site.DataSite @rtype: bool """ if not site: warning( "WbQuantity now expects a 'site' parameter. This is needed to " "ensure correct handling of error bounds.") return False return MediaWikiVersion(site.version()) < MediaWikiVersion('1.29.0-wmf.2')
def test_with_invalid_module(self): site = self.get_site() pi = api.ParamInfo(site) self.assertEqual(len(pi), 0) pi.fetch('foobar') self.assertNotIn('foobar', pi._paraminfo) self.assertRaises(KeyError, pi.__getitem__, 'foobar') self.assertIn('main', pi._paraminfo) self.assertIn('paraminfo', pi._paraminfo) if MediaWikiVersion(self.site.version()) < MediaWikiVersion("1.12"): return self.assertEqual(len(pi), len(pi.preloaded_modules))
def test_generators(self): site = self.get_site() pi = api.ParamInfo(site, set(['pageset', 'query'])) self.assertEqual(len(pi), 0) pi._init() self.assertIn('main', pi._paraminfo) self.assertIn('paraminfo', pi._paraminfo) self.assertIn('pageset', pi._paraminfo) self.assertIn('query', pi._paraminfo) if MediaWikiVersion(site.version()) >= MediaWikiVersion("1.21"): # 'generator' was added to 'pageset' in 1.21 pageset_generators_param = pi.parameter('pageset', 'generator') query_generators_param = pi.parameter('query', 'generator') self.assertEqual(pageset_generators_param, query_generators_param)
def test_creator(self): """Test getCreator.""" mainpage = self.get_mainpage() creator = mainpage.getCreator() self.assertEqual(creator, (mainpage.oldest_revision.user, mainpage.oldest_revision.timestamp.isoformat())) self.assertIsInstance(creator[0], unicode) self.assertIsInstance(creator[1], unicode) self.assertDeprecation() self._reset_messages() if MediaWikiVersion(self.site.version()) >= MediaWikiVersion('1.16'): self.assertIsInstance(mainpage.previous_revision_id, int) self.assertEqual(mainpage.previous_revision_id, mainpage.latest_revision.parent_id) self.assertDeprecation()
def translateAndCapitalizeNamespaces(self, text): """Use localized namespace names.""" # arz uses english stylish codes if self.site.sitename() == 'wikipedia:arz': return text family = self.site.family # wiki links aren't parsed here. exceptions = ['nowiki', 'comment', 'math', 'pre'] for nsNumber in self.site.namespaces(): if nsNumber in (0, 2, 3): # skip main (article) namespace # skip user namespace, maybe gender is used continue # a clone is needed. Won't change the namespace dict namespaces = list(self.site.namespace(nsNumber, all=True)) thisNs = namespaces.pop(0) if nsNumber == 6 and family.name == 'wikipedia': if self.site.code in ('en', 'fr') and \ MediaWikiVersion(self.site.version()) >= MediaWikiVersion('1.14'): # do not change "Image" on en-wiki and fr-wiki assert u'Image' in namespaces namespaces.remove(u'Image') if self.site.code == 'hu': # do not change "Kép" on hu-wiki assert u'Kép' in namespaces namespaces.remove(u'Kép') elif self.site.code == 'pt': # bug 55242 should be implemented continue # lowerspaced and underscored namespaces for i in range(len(namespaces)): item = namespaces[i].replace(' ', '[ _]') item = u'[%s%s]' % (item[0], item[0].lower()) + item[1:] namespaces[i] = item namespaces.append(thisNs[0].lower() + thisNs[1:]) if thisNs and namespaces: text = textlib.replaceExcept( text, r'\[\[\s*(%s) *:(?P<nameAndLabel>.*?)\]\]' % '|'.join(namespaces), r'[[%s:\g<nameAndLabel>]]' % thisNs, exceptions) return text
def test_with_invalid_module(self): """Test requesting different kind of invalid modules.""" site = self.get_site() pi = api.ParamInfo(site) self.assertEqual(len(pi), 0) pi.fetch('foobar') self.assertNotIn('foobar', pi._paraminfo) self.assertRaisesRegex(KeyError, foo_bar, pi.__getitem__, 'foobar') self.assertRaisesRegex(KeyError, foo_bar, pi.__getitem__, 'foobar+foobar') self.assertIn('main', pi._paraminfo) self.assertIn('paraminfo', pi._paraminfo) if MediaWikiVersion(self.site.version()) < MediaWikiVersion("1.12"): return self.assertEqual(len(pi), len(pi.preloaded_modules))
def _parse_post_117(self): """Parse 1.17+ siteinfo data.""" response = fetch(self.api + '?action=query&meta=siteinfo&format=json') # remove preleading newlines and Byte Order Mark (BOM), see T128992 content = response.content.strip().lstrip('\uFEFF') info = json.loads(content) self.private_wiki = ('error' in info and info['error']['code'] == 'readapidenied') if self.private_wiki: return info = info['query']['general'] self.version = MediaWikiVersion.from_generator(info['generator']) if self.version < MediaWikiVersion('1.17'): return self.server = urljoin(self.fromurl, info['server']) for item in ['scriptpath', 'articlepath', 'lang']: setattr(self, item, info[item])
def handle_starttag(self, tag, attrs): """Handle an opening tag.""" attrs = dict(attrs) if tag == "meta": if attrs.get("name") == "generator": self.generator = attrs["content"] try: self.version = MediaWikiVersion.from_generator(self.generator) except ValueError: pass elif tag == "link" and "rel" in attrs and "href" in attrs: if attrs["rel"] in ("EditURI", "stylesheet", "search"): self.set_api_url(attrs["href"]) elif tag == "script" and "src" in attrs: self.set_api_url(attrs["src"])
def handle_starttag(self, tag, attrs): """Handle an opening tag.""" attrs = dict(attrs) if tag == "meta": if attrs.get('name') == 'generator': self.generator = attrs["content"] try: self.version = MediaWikiVersion.from_generator( self.generator) except ValueError: pass elif tag == 'link' and 'rel' in attrs and 'href' in attrs: if attrs['rel'] in ('EditURI', 'stylesheet', 'search'): self.set_api_url(attrs['href']) elif tag == 'script' and 'src' in attrs: self.set_api_url(attrs['src'])
def _parse_post_117(self): """Parse 1.17+ siteinfo data.""" response = fetch(self.api + "?action=query&meta=siteinfo&format=json") info = json.loads(response.content) self.private_wiki = "error" in info and info["error"]["code"] == "readapidenied" if self.private_wiki: return info = info["query"]["general"] self.version = MediaWikiVersion.from_generator(info["generator"]) if self.version < MediaWikiVersion("1.17"): return self.server = urljoin(self.fromurl, info["server"]) for item in ["scriptpath", "articlepath", "lang"]: setattr(self, item, info[item])
def test_generator(self): """Test from_generator classmethod.""" self.assertEqual(V.from_generator('MediaWiki 1.2.3'), self._make('1.2.3')) self.assertRaises(ValueError, V.from_generator, 'Invalid 1.2.3')
def test_generator(self): """Test from_generator classmethod.""" self.assertEqual(MediaWikiVersion.from_generator('MediaWiki 1.2.3'), self._make('1.2.3')) self.assertRaisesRegex(ValueError, self.GENERATOR_STRING_RE, MediaWikiVersion.from_generator, 'Invalid 1.2.3')