def extractFromCategories(self, page): for i in page.categories(): if i.title(withNamespace=False) in catrules[page.site.lang]: new_claims = catrules[page.site.lang][i.title( withNamespace=False)] for property, item in new_claims: self.lang_claims.append( BotClaim(property, pywikibot.ItemPage(wikidata, item), [page2source(page)]))
def getPlace(self, arg, page, property_, **kwargs): links = re.findall("(?<=\[\[).*?(?=\||\])", arg) locations = [] for link in links: page = pywikibot.Page(page.site, link) item = pywikibot.ItemPage.fromPage(page) if item.exists(): locations.append(item) if len(locations): self.lang_claims.append( BotClaim(property_, locations[0], [page2source(page)]))
def getBVKItem(self, arg, page, **kwargs): if arg in BVK_VARIANTS: self.lang_claims.append( BotClaim('p166', BVK_VARIANTS[arg], [page2source(page)])) else: print "*** Unknown BVK arg '%s' at '%s'" % (arg, page.title())
def getCommonscat(self, arg, page, **kwargs): if not arg: arg = page.title() self.lang_claims.append( BotClaim('p373', arg.replace('_', ' '), [page2source(page)]))
def getTYPE_fr(self, arg, page, **kwargs): if arg in TYPE_fr: self.lang_claims.append( BotClaim('p107', ItemPage(wikidata, TYPE_fr[arg]), [page2source(page)]))
def getULAN(self, arg, page, **kwargs): self.lang_claims.append(BotClaim('p245', arg, [page2source(page)]))
def getLCCN(self, arg, page, **kwargs): if self.validateString('[a-z]*/(\d\d|\d\d\d\d)/\d+', arg): parts = arg.split('/') lccn = parts[0] + parts[1] + '0' * (6 - len(parts[2])) + parts[2] self.lang_claims.append(BotClaim('p244', lccn, [page2source(page)]))
def getString(self, arg, page, pattern, property_, **kwargs): if self.validateString(pattern, arg): self.lang_claims.append( BotClaim(property_, arg, [page2source(page)])) else: pass