def full_test(): index = 0 pDialog.create('Universalscrapers Testing mode active', 'please wait') for item in movies: Scrapers_Run = 0 if pDialog.iscanceled(): break movie_links_scraper = universalscrapers.scrape_movie(item['title'], item['year'], item['imdb']) movie_links_scraper = movie_links_scraper() pDialog.update((index / num_shows) * 100, "Scraping Movie {} of {}".format(index, num_shows), item['title']) index += 1 for links in movie_links_scraper: Scrapers_Run += 1 pDialog.update((index / num_shows) * 100, "Scraping Movie {} of {}".format(index, num_shows), item['title'] + ' | '+str(int(Scrapers_Run))+'/'+str(len(No_of_scrapers))) for item in shows: Scrapers_Run = 0 if pDialog.iscanceled(): break episode_links_scraper = universalscrapers.scrape_episode(item['title'], item['show_year'], item['year'], item['season'], item['episode'], item['imdb'],'') episode_links_scraper = episode_links_scraper() pDialog.update((index / num_shows) * 100, "Scraping TV Show {} of {}".format(index, num_shows), item['title']) index += 1 for links in episode_links_scraper: Scrapers_Run += 1 pDialog.update((index / num_shows) * 100, "Scraping TV Show {} of {}".format(index, num_shows), item['title'] + ' | '+str(int(Scrapers_Run))+'/'+str(len(No_of_scrapers))) get_scraper_results()
def single_test(count, index): if count==5: pass else: Scrapers_Run = 0 Movies = movies[count] tv_shows = shows[count] pDialog.create('Universalscrapers Testing mode active', 'please wait') if dialog.yesno("Universalscrapers Testing Mode", 'Run next Movie?',Movies['title']+' ('+Movies['year']+')'): movie_links_scraper = universalscrapers.scrape_movie(Movies['title'], Movies['year'], Movies['imdb']) movie_links_scraper = movie_links_scraper() pDialog.update((index / num_shows) * 100, "Scraping Movie {} of {}".format(index, num_shows), Movies['title']) index += 1 for links in movie_links_scraper: Scrapers_Run += 1 pDialog.update((index / num_shows) * 100, "Scraping Movie {} of {}".format(index, num_shows), Movies['title'] + ' | '+str(int(Scrapers_Run))+'/'+str(len(No_of_scrapers))) Scrapers_Run = 0 if dialog.yesno("Universalscrapers Testing Mode", 'Would you like to run a tv show?', tv_shows['title']+' ('+tv_shows['year']+') S'+tv_shows['season']+'E'+tv_shows['episode']): episode_links_scraper = universalscrapers.scrape_episode(tv_shows['title'], tv_shows['show_year'], tv_shows['year'], tv_shows['season'], tv_shows['episode'], tv_shows['imdb'],'') episode_links_scraper = episode_links_scraper() pDialog.update((index / num_shows) * 100, "Scraping TV Show {} of {}".format(index, num_shows), tv_shows['title']) index += 1 for links in episode_links_scraper: Scrapers_Run += 1 pDialog.update((index / num_shows) * 100, "Scraping TV Show {} of {}".format(index, num_shows), tv_shows['title'] + ' | '+str(int(Scrapers_Run))+'/'+str(len(No_of_scrapers))) else: get_scraper_results() return count += 1 single_test(count, index)
def test(): global movies, shows try: test_movies = [] test_episodes = [] profile_path = xbmc.translatePath(xbmcaddon.Addon().getAddonInfo('profile')).decode('utf-8') test_file = xbmcvfs.File(os.path.join(profile_path, "testings.xml")) xml = BeautifulStoneSoup(test_file.read()) test_file.close() items = xml.findAll("item") for item in items: try: content = item.find("content") if content: if "movie" in content.text: meta = item.find("meta") test_movies.append({ 'title': meta.find("title").text, 'imdb': meta.find("imdb").text, 'year': meta.find("year").text, }) elif "episode" in content.text: meta = item.find("meta") test_episodes.append({ 'title': meta.find("tvshowtitle").text, 'show_year': int(meta.find("premiered").text[0:4]), 'year': meta.find("year").text, 'season': meta.find("season").text, 'episode': meta.find("season").text, 'imdb': meta.find("imdb").text, }) except: pass movies = test_movies shows = test_episodes except: pass dialog = xbmcgui.Dialog() pDialog = xbmcgui.DialogProgress() if dialog.yesno("universalscrapers Testing Mode", 'Clear cache?'): universalscrapers.clear_cache() try: dbcon = database.connect(os.path.join( xbmc.translatePath(xbmcaddon.Addon("script.module.universalscrapers").getAddonInfo('profile')).decode('utf-8'), 'url_cache.db')) dbcur = dbcon.cursor() except: dialog.ok("universalscrapers Testing Mode", 'Error connecting to db') sys.exit() num_movies = len(movies) if num_movies > 0: pDialog.create('universalscrapers Testing mode active', 'please wait') index = 0 for movie in movies: index += 1 title = movie['title'] year = movie['year'] imdb = movie['imdb'] if pDialog.iscanceled(): pDialog.close() break pDialog.update((index / num_movies) * 100, "Scraping movie {} of {}".format(index, num_movies), title) links_scraper = universalscrapers.scrape_movie(title, year, imdb) links_scraper = links_scraper() for scraper_links in links_scraper: if pDialog.iscanceled(): break if scraper_links: random.shuffle(scraper_links) pDialog.close() dbcur.execute("SELECT COUNT(DISTINCT(scraper)) FROM rel_src where episode = ''") match = dbcur.fetchone() num_movie_scrapers = match[0] dbcur.execute("SELECT scraper, count(distinct(urls)) FROM rel_src where episode = '' group by scraper") matches = dbcur.fetchall() failed = [] for match in matches: if int(match[1]) <= 1: failed.append(match[0]) if len(failed) > 0: failedstring = "Failed: {}".format(len(failed)) for fail in failed: failedstring += "\n - {}".format(str(fail)) else: failedstring = "" dbcur.execute("SELECT title, count(distinct(urls)) FROM rel_src where episode = '' group by title") matches = dbcur.fetchall() failed_movies = [] for match in matches: if int(match[1]) <= 1: if int(match[1]) == 1: dbcur.execute( "SELECT scraper, urls FROM rel_src where episode == '' and title == '{}' group by scraper".format( match[0])) new_matches = dbcur.fetchall() found = False for new_match in new_matches: if new_match[1] == "[]": continue else: found = True if not found: failed_movies.append(match[0]) else: failed_movies.append(match[0]) if len(failed_movies) > 0: failed_movie_string = "Failed movies: {}".format(len(failed_movies)) for fail in failed_movies: for movie in movies: if clean_title(movie['title']).upper() == str(fail): failed_movie_string += "\n - {}".format(movie["title"]) else: failed_movie_string = "" num_shows = len(shows) if num_shows > 0: pDialog.create('universalscrapers Testing mode active', 'please wait') index = 0 for show in shows: index += 1 title = show['title'] show_year = show['show_year'] year = show['year'] season = show['season'] episode = show['episode'] imdb = show['imdb'] tvdb = show.get('tvdb', '') if pDialog.iscanceled(): pDialog.close() break pDialog.update((index / num_shows) * 100, "Scraping show {} of {}".format(index, num_shows), title) links_scraper = universalscrapers.scrape_episode(title, show_year, year, season, episode, imdb, tvdb) links_scraper = links_scraper() for scraper_links in links_scraper: if pDialog.iscanceled(): break if scraper_links: random.shuffle(scraper_links) pDialog.close() dbcur.execute("SELECT COUNT(DISTINCT(scraper)) FROM rel_src where episode != ''") match = dbcur.fetchone() num_show_scrapers = match[0] dbcur.execute("SELECT scraper, count(distinct(urls)) FROM rel_src where episode != '' group by scraper") matches = dbcur.fetchall() failed = [] for match in matches: if int(match[1]) <= 1: if int(match[1]) == 1: dbcur.execute( "SELECT scraper, urls FROM rel_src where episode != '' and scraper == '{}' group by scraper".format( match[0])) match = dbcur.fetchone() if match[1] == "[]": failed.append(match[0]) else: failed.append(match[0]) if len(failed) > 0: show_scraper_failedstring = "Failed: {}".format(len(failed)) for fail in failed: show_scraper_failedstring += "\n - {}".format(str(fail)) else: show_scraper_failedstring = "" dbcur.execute("SELECT title, count(distinct(urls)) FROM rel_src where episode != '' group by title") matches = dbcur.fetchall() failed_shows = [] for match in matches: if int(match[1]) <= 1: if int(match[1]) == 1: dbcur.execute( "SELECT scraper, urls FROM rel_src where episode != '' and title == '{}' group by scraper".format( match[0])) new_matches = dbcur.fetchall() found = False for new_match in new_matches: if new_match[1] == "[]": continue else: found = True if not found: failed_shows.append(match[0]) else: failed_shows.append(match[0]) if len(failed_shows) > 0: failed_show_string = "Failed shows: {}".format(len(failed_shows)) for fail in failed_shows: for show in shows: if clean_title(show['title']).upper() == str(fail): failed_show_string += "\n - {} S{}-E{}".format(show["title"], show["season"], show["episode"]) else: failed_show_string = "" resultstring = 'Results:\n' if num_movies > 0: resultstring = resultstring + \ ' Movie Scrapers: {}\n' \ ' {}\n' \ ' {}\n'.format(num_movie_scrapers, failedstring, failed_movie_string) if num_shows > 0: resultstring = resultstring + \ ' Episode Scrapers: {}\n' \ ' {}\n' \ ' {}\n'.format(num_show_scrapers, show_scraper_failedstring, failed_show_string) dialog.textviewer("universalscrapers Testing Mode", resultstring)