def main(*args): """ Main function. Grab a generator and pass it to the bot to work on """ all = False fix = True create = False for arg in pywikibot.handle_args(args): if arg == '-all': all = True elif arg == '-fix': fix = True elif arg == '-create': create = True repo = pywikibot.Site().data_repository() generator = None if all: query = u"""SELECT """ generator = pagegenerators.PreloadingItemGenerator( pagegenerators.WikidataSPARQLPageGenerator(query, site=repo)) elif fix: query = u"""SELECT ?item WHERE { ?item wdt:P1435 wd:Q13423591 . ?item wdt:P359 ?id } LIMIT 5000""" generator = pagegenerators.PreloadingItemGenerator( pagegenerators.WikidataSPARQLPageGenerator(query, site=repo)) elif create: query = u"""SELECT """ generator = pagegenerators.PreloadingItemGenerator( pagegenerators.WikidataSPARQLPageGenerator(query, site=repo)) if generator: rijksmonumentenComplexBot = RijksmonumentenComplexBot(generator) rijksmonumentenComplexBot.run()
def main(*args): """ Main function. Grab a generator and pass it to the bot to work on """ series = None report = None for arg in pywikibot.handle_args(args): if arg.startswith('-series:'): if len(arg) == 8: series = pywikibot.input( u'Please enter the Q id of the series to work on:') else: series = arg[8:] elif arg.startswith('-report:'): if len(arg) == 8: report = pywikibot.input( u'Please enter the name of the page to report on:') else: report = arg[8:] basequery = u"""SELECT DISTINCT ?item WHERE { ?item wdt:P31 wd:Q21191270 . ?item wdt:P179 wd:%s . MINUS { ?item wdt:P345 [] . ?item wdt:P1191 []} #{ ?item wdt:P155 ?otheritem } UNION { ?item wdt:P156 ?otheritem } #?otheritem wdt:P345 [] . }""" repo = pywikibot.Site().data_repository() if series: query = basequery % (series, ) generator = pagegenerators.PreloadingItemGenerator( pagegenerators.WikidataSPARQLPageGenerator(query, site=repo)) imdbFinderBot = IMDBFinderBot(generator, series) imdbFinderBot.run() else: seriesquery = u"""SELECT DISTINCT ?item WHERE { ?episode wdt:P31 wd:Q21191270 . ?episode wdt:P179 ?item . MINUS { ?episode wdt:P345 [] . ?item wdt:P1191 []} { ?episode wdt:P155 ?otheritem } UNION { ?episode wdt:P156 ?otheritem } ?otheritem wdt:P345 [] . ?otheritem wdt:P179 ?item . }""" seriesgen = pagegenerators.PreloadingItemGenerator( pagegenerators.WikidataSPARQLPageGenerator(seriesquery, site=repo)) for seriespage in seriesgen: series = seriespage.title() query = basequery % (series, ) generator = pagegenerators.PreloadingItemGenerator( pagegenerators.WikidataSPARQLPageGenerator(query, site=repo)) imdbFinderBot = IMDBFinderBot(generator, series) imdbFinderBot.run()
def main(*args): """ Do a query and have the bot process the items :param args: :return: """ # The queries for paintings without a creator, all or a specific collection query = u'SELECT ?item WHERE { ?item wdt:P31 wd:Q3305213 . MINUS { ?item wdt:P170 [] } }' querycollection = u"""SELECT ?item WHERE { ?item wdt:P31 wd:Q3305213 . ?item wdt:P195 wd:%s . MINUS { ?item wdt:P170 [] } }""" for arg in pywikibot.handle_args(args): if arg.startswith('-collectionid'): if len(arg) == 13: collectionid = pywikibot.input( u'Please enter the collectionid you want to work on:') else: collectionid = arg[14:] query = querycollection % (collectionid, ) repo = pywikibot.Site().data_repository() generator = pagegenerators.PreloadingItemGenerator( pagegenerators.WikidataSPARQLPageGenerator(query, site=repo)) paintingBot = PaintingBot(generator, change=False) paintingBot.run()
def main(): repo = pywikibot.Site().data_repository() query = u"""SELECT ?item WHERE { ?item wdt:P214 ?viafid . { ?item wdt:P27 wd:Q31 } UNION { ?item wdt:P27 wd:Q29999 } . ?item wdt:P31 wd:Q5 . MINUS { ?item wdt:P1006 [] } . } LIMIT 400000""" # This query will get all the Qid's for which NTA has a link, but the Qid doesn't have a link # The commented out lines will also make mismatched links visible. Too much for this bot now. query = u"""SELECT ?item ?person { SERVICE <http://data.bibliotheken.nl/sparql> { SELECT ?item ?person WHERE { ?person rdf:type <http://schema.org/Person> . ?person owl:sameAs ?item . FILTER REGEX(STR(?item), "http://www.wikidata.org/entity/") . } } # The URI (wdtn) links don't seem to be fully populated #MINUS { ?item wdtn:P1006 ?person } . MINUS { ?item wdt:P1006 [] } . #MINUS { ?item owl:sameAs ?item2 . ?item2 wdtn:P1006 ?person } MINUS { ?item owl:sameAs ?item2 . ?item2 wdt:P1006 [] } }""" #generator = pagegenerators.PreloadingItemGenerator(viafDumpGenerator()) generator = pagegenerators.PreloadingItemGenerator(ntaBacklinksGenerator()) #generator = pagegenerators.PreloadingItemGenerator(pagegenerators.WikidataSPARQLPageGenerator(query, site=repo)) viafImportBot = ViafImportBot(generator) viafImportBot.run()
def main(*args): """ Run the bot. By default it only runs on the items changed in the last 14 days. """ fullrun = False days = u'14' for arg in pywikibot.handle_args(args): if arg=='-full': fullrun = True elif arg.startswith('-days:'): if len(arg) == 6: days = pywikibot.input( u'Please enter the number of days you want to work on:') else: days = arg[6:] if fullrun: pywikibot.output(u'Doing a full run') query = u'SELECT DISTINCT ?item WHERE { ?item wdt:P245 [] . ?item wdt:P31 wd:Q5 }' else: pywikibot.output(u'Doing a run on the items modified in the last %s days' % (days,) ) query = u"""SELECT DISTINCT ?item { ?item wdt:P245 [] . ?item wdt:P31 wd:Q5 . ?item schema:dateModified ?date_modified . BIND (now() - ?date_modified as ?date_range) FILTER (?date_range < %s) }""" % (days,) repo = pywikibot.Site().data_repository() generator = pagegenerators.PreloadingItemGenerator(pagegenerators.WikidataSPARQLPageGenerator(query, site=repo)) ulanImportBot = UlanImportBot(generator) ulanImportBot.run()
def generator(self): query = self.store.build_query( 'qualifiers', item=self.good_item, good=', wd:'.join(self.whitelist), bad=', wd:'.join(self.blacklist)) return pagegenerators.PreloadingItemGenerator( pagegenerators.WikidataSPARQLPageGenerator(query, site=self.repo))
def test_non_item_gen(self): """Test TestPreloadingItemGenerator with ReferringPageGenerator.""" site = self.get_site() instance_of_page = pywikibot.Page(site, 'Property:P31') ref_gen = pagegenerators.ReferringPageGenerator(instance_of_page, total=5) gen = pagegenerators.PreloadingItemGenerator(ref_gen) self.assertTrue(all(isinstance(item, pywikibot.ItemPage) for item in gen))
def main(*args): """ Main function. Grab a generator and pass it to the bot to work on """ correctlocation = False query = u"""SELECT ?item ?collection WHERE { ?item wdt:P31 wd:Q3305213 . ?item wdt:P195 ?collection . MINUS { ?item wdt:P276 [] } . ?collection wdt:P625 [] . } ORDER BY ?collection""" for arg in pywikibot.handle_args(args): if arg.startswith('-correctlocation'): correctlocation = True query = """SELECT ?item ?collection ?location WHERE { ?item wdt:P31 wd:Q3305213 . ?item wdt:P276 ?location . ?item wdt:P195 ?collection . ?collection wdt:P131+ ?location ; wdt:P625 [] . } ORDER BY ?collection LIMIT 5000""" repo = pywikibot.Site().data_repository() generator = pagegenerators.PreloadingItemGenerator( pagegenerators.WikidataSPARQLPageGenerator(query, site=repo)) locationFromCollectionBot = LocationFromCollectionBot( generator, correctlocation=correctlocation) locationFromCollectionBot.run()
def main(): query=u"""SELECT DISTINCT ?item WHERE { ?item wdt:P31 wd:Q3305213 . ?item wdt:P195 wd:Q28045665 . ?item wdt:P195 wd:Q18600731 . ?item p:P217 ?inv1statement . ?inv1statement ps:P217 ?inv . ?inv1statement pq:P195 wd:Q28045665 . ?item p:P217 ?inv2statement . ?inv2statement ps:P217 ?inv . ?inv2statement pq:P195 wd:Q18600731 . MINUS { ?item wdt:P195 wd:Q28045660 . ?item wdt:P195 wd:Q28045674 . ?item wdt:P195 wd:Q2066737 . ?item p:P217 ?inv3statement . ?inv3statement ps:P217 ?inv . ?inv3statement pq:P195 wd:Q28045660 . ?item p:P217 ?inv4statement . ?inv4statement ps:P217 ?inv . ?inv4statement pq:P195 wd:Q28045674 . ?item p:P217 ?inv5statement . ?inv5statement ps:P217 ?inv . ?inv5statement pq:P195 wd:Q2066737 . } }""" repo = pywikibot.Site().data_repository() generator = pagegenerators.PreloadingItemGenerator(pagegenerators.WikidataSPARQLPageGenerator(query, site=repo)) provenanceBot = ProvenanceBot(generator) provenanceBot.run()
def __init__(self, generator): """ Arguments: * generator - A generator that yields wikidata item objects. """ self.repo = pywikibot.Site().data_repository() self.generator = pagegenerators.PreloadingItemGenerator(generator)
def main(): repo = pywikibot.Site().data_repository() query = u"""SELECT ?item WHERE { ?item wdt:P214 ?viafid . ?item wdt:P31 wd:Q5 . MINUS { ?item wdt:P1006 [] } . } LIMIT 400000""" generator = pagegenerators.PreloadingItemGenerator(pagegenerators.WikidataSPARQLPageGenerator(query, site=repo)) viafImportBot = ViafImportBot(generator) viafImportBot.run()
def getGenerator(self, genre): """ Build a SPARQL query to get interesting items to work on :return: A generator that yields items """ firstfilter = True query = """SELECT ?item WHERE { ?item wdt:P136 wd:%s . ?item wdt:P31 wd:Q3305213 . MINUS { ?item wdt:P136 ?genre . FILTER (?genre!=wd:%s) } } LIMIT 1000""" % (genre, genre) return pagegenerators.PreloadingItemGenerator( pagegenerators.WikidataSPARQLPageGenerator(query, site=self.repo))
def main(): repo = pywikibot.Site().data_repository() query = u"""SELECT ?item ?viafid WHERE { { ?item wdt:P27 wd:Q183 } UNION { ?item wdt:P27 wd:Q40 } UNION { ?item wdt:P27 wd:Q39 } . ?item wdtn:P214 ?viafid . MINUS { ?item wdt:P227 ?gndid } } LIMIT 30000""" generator = pagegenerators.PreloadingItemGenerator( pagegenerators.WikidataSPARQLPageGenerator(query, site=repo)) viafImportBot = ViafImportBot(generator) viafImportBot.run()
def getCreator(self, creator): """ Find the painter with the name in creator First check if the name is already in the self.creators cache Second, do a search If a hit is found, update the cache in self.creators """ # First use the cache if creator in self.creators: return self.creators[creator] # Search Wikidata for a suitable candidate, tell the search to only return humans searchstring = u'%s haswbstatement:P31=Q5' % (creator, ) creategen = pagegenerators.PreloadingItemGenerator( pagegenerators.WikibaseItemGenerator( pagegenerators.SearchPageGenerator(searchstring, step=None, total=50, namespaces=[0], site=self.repo))) for creatoritem in creategen: if creatoritem.isRedirectPage(): creatoritem = creatoritem.getRedirectTarget() # See if the label or one of the aliases of the creatoritem matches the string we have. Only label is case insensitive. if (creatoritem.get().get('labels').get('en') and creatoritem.get().get('labels').get('en').lower() == creator.lower()) or ( creatoritem.get().get('aliases').get('en') and creator in creatoritem.get().get('aliases').get('en')): if u'P106' in creatoritem.get().get('claims'): existing_claims = creatoritem.get().get('claims').get( 'P106') for existing_claim in existing_claims: if existing_claim.target_equals(u'Q1028181'): self.creators[creator] = creatoritem return creatoritem # Regex that should match all the anonymous work stuff that isn't covered by the list anonymousRegex = '^(Workshop of|Follower of|Circle of|Manner of|Forgery after|School of|After|Unidentified Artist|School of)\s.*$' anonymousMatch = re.match(anonymousRegex, creator, flags=re.I) if anonymousMatch: self.creators[creator] = self.creators.get('anonymous') return self.creators.get('anonymous') # We don't want to do the same search over and over again self.creators[creator] = None return None
def main(): """ Do a query for items that do have FAST-ID (P2163), but not and VIAF ID (P214) or LCAuth ID (P244) """ query = u"""SELECT DISTINCT ?item WHERE { ?item wdt:P2163 [] . MINUS { ?item wdt:P214 [] . ?item wdt:P244 [] } . }""" repo = pywikibot.Site().data_repository() generator = pagegenerators.PreloadingItemGenerator( pagegenerators.WikidataSPARQLPageGenerator(query, site=repo)) fastImportBot = FastImportBot(generator) fastImportBot.run()
def main(): #lang = u'en' #templates = getTemplateClaims(lang=lang) #print templates repo = pywikibot.Site(u'wikidata', u'wikidata').data_repository() namespaceclaims = { 4: u'Q14204246', # Wikipedia 10: u'Q11266439', # Template 14: u'Q4167836', # Category 100: u'Q4663903', # Portal 828: u'Q15184295', # Module } noclaimgen = pagegenerators.PreloadingItemGenerator(getNoclaimGenerator()) #repo = pywikibot.Site().data_repository() #print templates.keys() for itempage in noclaimgen: pywikibot.output(itempage.title()) if not itempage.exists(): pywikibot.output(u'Deleted, skipping') continue if itempage.isRedirectPage(): pywikibot.output(u'Redirect, skipping') continue data = itempage.get() if u'P31' not in data.get('claims'): for page in itempage.iterlinks(family=u'wikipedia'): pywikibot.output(page.title()) if not page.namespace() == 0 and page.namespace( ) in namespaceclaims: pywikibot.output(u'Working on %s' % (page.title(), )) newclaim = pywikibot.Claim(repo, u'P31') claimtarget = pywikibot.ItemPage( repo, namespaceclaims.get(page.namespace())) newclaim.setTarget(claimtarget) summary = u'Adding [[Property:%s]] -> [[%s]] based on %s' % ( u'P31', namespaceclaims.get( page.namespace()), page.title(asLink=True)) pywikibot.output(summary) try: itempage.addClaim(newclaim, summary=summary) except pywikibot.data.api.APIError: pywikibot.output(u'Ai, API problems. Let\'s sleep') time.sleep(60) break
def getGenerator(self): """ Get a generator of paintings that have one of the replacable genres :return: A generator that yields ItemPages """ query = u'SELECT ?item WHERE { ?item wdt:P31 wd:Q3305213 . ?item wdt:P136 ?genre .VALUES ?genre {' for genre in list(self.genres.keys()): query = query + u' wd:%s ' % (genre,) query = query + u' } }' generator = pagegenerators.PreloadingItemGenerator(pagegenerators.WikidataSPARQLPageGenerator(query, site=self.repo)) return generator
def __init__(self): """ Grab generator based on SPARQL to work on. """ self.site = pywikibot.Site(u'commons', u'commons') self.repo = self.site.data_repository() query = u"""SELECT DISTINCT ?item ?image WHERE { ?item wdt:P31 wd:Q3305213 . ?item wdt:P18 ?image. } LIMIT 200000""" self.generator = pagegenerators.PreloadingItemGenerator( pagegenerators.WikidataSPARQLPageGenerator(query, site=self.repo))
def getGenerator(self): """ Build a SPARQL query to get interesting items to work on :return: A generator that yields items """ firstfilter = True query = """SELECT DISTINCT ?item ?itemlabel WHERE { ?item wdt:P31 wd:Q3305213 . MINUS { ?item wdt:P921 [] } . { MINUS { ?item wdt:P136 [] } } UNION { ?item wdt:P136 wd:Q134307 } ?item rdfs:label ?itemlabel . FILTER(LANG(?itemlabel)="en" && REGEX(STR(?itemlabel), "^.+\\\\(\\\\d\\\\d\\\\d\\\\d-\\\\d\\\\d\\\\d\\\\d\\\\).*$")) } LIMIT 5000""" return pagegenerators.PreloadingItemGenerator( pagegenerators.WikidataSPARQLPageGenerator(query, site=self.repo))
def main(): """ Main function. Grab a generator and pass it to the bot to work on """ # Does have biografisch portaal, but no Parlement & Politiek ID query = u"""SELECT DISTINCT ?item WHERE { ?item wdt:P651 [] . MINUS { ?item wdt:P1749 [] } }""" repo = pywikibot.Site().data_repository() generator = pagegenerators.PreloadingItemGenerator( pagegenerators.WikidataSPARQLPageGenerator(query, site=repo)) ppImporterBot = PPImporterBot(generator) ppImporterBot.run()
def main(): """ Main function. Grab a generator and pass it to the bot to work on """ # Does have biografisch portaal, is a human, but no RKDartists query = u"""SELECT DISTINCT ?item WHERE { ?item wdt:P651 [] . ?item wdt:P31 wd:Q5 . MINUS {?item wdt:P650 [] . } }""" repo = pywikibot.Site().data_repository() generator = pagegenerators.PreloadingItemGenerator(pagegenerators.WikidataSPARQLPageGenerator(query, site=repo)) rkdArtistsImporterBot = RKDArtistsImporterBot(generator) rkdArtistsImporterBot.run()
def generator(self): extra = { 'common_wiki': 'wikidata', 'templates_no': 'Autoritní data', # l10n! 'wikidata_source_sites': self.site.dbName(), 'wikidata_item': 'with', 'wikidata_prop_item_use': ','.join(self.props), } petscan = pagegenerators.PetScanPageGenerator( ['Muži', 'Ženy', 'Žijící_lidé'], subset_combination=False, site=self.site, namespaces=[0], extra_options=extra) # l10n! items = pagegenerators.PreloadingItemGenerator(petscan) # hack return pagegenerators.WikidataPageFromItemGenerator(items, self.site)
def main(): query = u"""SELECT DISTINCT ?item WHERE { ?item wdt:P195 wd:Q574961 . ?item wdt:P31 wd:Q3305213 . ?item p:P217 ?invstatement . ?invstatement ps:P217 ?inv . ?invstatement pq:P195 wd:Q574961 . FILTER regex (?inv, "^OS[- ](.*)$"). } LIMIT 2500""" repo = pywikibot.Site().data_repository() generator = pagegenerators.PreloadingItemGenerator( pagegenerators.WikidataSPARQLPageGenerator(query, site=repo)) normalizationBot = NormalizationBot(generator) normalizationBot.run()
def main(): query = u"""SELECT DISTINCT ?item WHERE { { ?item wdt:P245 [] } UNION { ?item wdt:P650 [] } UNION { ?item wdt:P651 [] } UNION { ?item wdt:P3372 [] } . ?item wdt:P31 wd:Q5 . MINUS { ?item wdt:P21 [] } . }""" repo = pywikibot.Site().data_repository() generator = pagegenerators.PreloadingItemGenerator( pagegenerators.WikidataSPARQLPageGenerator(query, site=repo)) genderBot = GenderBot(generator) genderBot.run()
def main(): """ Do a query for items that do have RKDartists (P650) and VIAF (P214), but no ULAN (P245) :return: """ query = u"""SELECT ?item WHERE { ?item wdt:P650 [] . ?item wdt:P214 [] . MINUS { ?item wdt:P245 [] } }""" repo = pywikibot.Site().data_repository() generator = pagegenerators.PreloadingItemGenerator( pagegenerators.WikidataSPARQLPageGenerator(query, site=repo)) viafImportBot = ViafImportBot(generator) viafImportBot.run()
def main(*args): gen = getPaintingGenerator() repo = pywikibot.Site().data_repository() query = u"""SELECT ?item ?rkdid ?itemdesc WHERE { ?item wdt:P31 wd:Q3305213 . ?item wdt:P350 ?rkdid . MINUS { ?item p:P170 [] } . ?item schema:description ?itemdesc. FILTER(LANG(?itemdesc) = "en") } LIMIT 1000""" generator = pagegenerators.PreloadingItemGenerator( pagegenerators.WikidataSPARQLPageGenerator(query, site=repo)) imagesCreatorRobot = RKDimagesCreatorRobot(generator) imagesCreatorRobot.run()
def main(): query = u"""SELECT ?item ?commonscat WHERE { ?item wdt:P1435 wd:Q916333 . ?item wdt:P373 ?commonscat . MINUS { ?item wdt:P910 [] } . FILTER NOT EXISTS { ?article schema:about ?item . ?article schema:isPartOf <https://commons.wikimedia.org/> } }""" repo = pywikibot.Site().data_repository() generator = pagegenerators.PreloadingItemGenerator( pagegenerators.WikidataSPARQLPageGenerator(query, site=repo)) missingCommonsSitelinkBot = MissingCommonsSitelinkBot(generator) missingCommonsSitelinkBot.run()
def main(): query = u"""SELECT DISTINCT ?item WHERE { { ?item wdt:P245 [] } UNION # ULAN ID (P245) { ?item wdt:P650 [] } UNION # RKDartists ID (P650) { ?item wdt:P651 [] } UNION # Biografisch Portaal number (P651) { ?item wdt:P1707 [] } UNION # DAAO ID (P1707) { ?item wdt:P3372 [] } UNION # Auckland Art Gallery artist ID (P3372) { ?item wdt:P2843 [] } . # Benezit ID (P2843) ?item wdt:P31 wd:Q5 . MINUS { ?item p:P21 [] } . }""" repo = pywikibot.Site().data_repository() generator = pagegenerators.PreloadingItemGenerator( pagegenerators.WikidataSPARQLPageGenerator(query, site=repo)) genderBot = GenderBot(generator) genderBot.run()
def main(*args): repo = pywikibot.Site().data_repository() query = u"""SELECT ?item WHERE { ?item wdt:P350 ?rkdimage . MINUS { ?item wdt:P18 ?image } . MINUS { ?item wdt:P4765 ?ccimage } . ?item wdt:P31 wd:Q3305213 . MINUS { ?item wdt:P170 ?creator . ?creator wdt:P570 ?dod . FILTER(YEAR(?dod) > 1923) } MINUS { ?item wdt:P170 ?creator . ?creator wdt:P569 ?dob . FILTER(YEAR(?dob) > 1900) } MINUS { ?item wdt:P170 wd:Q4233718 . ?item wdt:P571 ?inception . FILTER(YEAR(?inception) > 1850) } ?item schema:dateModified ?modified } ORDER BY DESC(?modified)""" generator = pagegenerators.PreloadingItemGenerator( pagegenerators.WikidataSPARQLPageGenerator(query, site=repo)) imagesPublicDomainRobot = RKDimagesPublicDomain(generator) imagesPublicDomainRobot.run()
def main(*args): """ Main function. Grab a generator and pass it to the bot to work on """ create = False for arg in pywikibot.handle_args(args): if arg == '-create': create = True if create: pywikibot.output(u'Going to create new artists!') rkdArtistsCreatorBot = RKDArtistsCreatorBot() generator = rkdArtistsCreatorBot.run() else: pywikibot.output(u'Going to try to expand existing artists') query = u"""SELECT DISTINCT ?item { { ?item wdt:P650 ?value . ?item wdt:P31 wd:Q5 . # Needs to be human MINUS { ?item wdt:P21 [] . # No gender ?item wdt:P106 [] . # No occupation ?item wdt:P569 [] . # No date of birth } . } UNION { ?item wdt:P650 [] . ?item p:P569 ?birthclaim . MINUS { ?item p:P27 [] } # No country of citizenship ?birthclaim ps:P569 ?birth . FILTER(?birth > "+1900-00-00T00:00:00Z"^^xsd:dateTime) . } UNION { ?item wdt:P650 [] . ?item p:P569 ?birthclaim . MINUS { ?item p:P570 [] } # No date of death ?birthclaim ps:P569 ?birth . FILTER(?birth < "+1900-00-15T00:00:00Z"^^xsd:dateTime) } }""" repo = pywikibot.Site().data_repository() generator = pagegenerators.PreloadingItemGenerator( pagegenerators.WikidataSPARQLPageGenerator(query, site=repo)) rkdArtistsImporterBot = RKDArtistsImporterBot(generator) rkdArtistsImporterBot.run()