def send_package(self, title, series_url, language_id): englisch = "" if language_id == 2: englisch = "/Englisch" if self.filename == 'SJ_Serien_Regex': link_placeholder = '[Episode/RegEx' + englisch + '] - ' elif self.filename == 'SJ_Serien': link_placeholder = '[Episode' + englisch + '] - ' elif self.filename == 'SJ_Staffeln_Regex]': link_placeholder = '[Staffel/RegEx' + englisch + '] - ' else: link_placeholder = '[Staffel' + englisch + '] - ' try: storage = self.db.retrieve_all(title) except Exception as e: self.log_debug("Fehler bei Datenbankzugriff: %s, Grund: %s" % (e, title)) return if 'added' in storage or 'notdl' in storage: self.log_debug(title + " - Release ignoriert (bereits gefunden)") else: download = add_decrypt(title, series_url, self.sj, self.dbfile) if download: self.db.store(title, 'added') log_entry = link_placeholder + title + ' - [SJ]' self.log_info(log_entry) notify(["[Click'n'Load notwendig] - " + log_entry], self.configfile) return log_entry
def crawldog(configfile, dbfile): disable_request_warnings(InsecureRequestWarning) crawljobs = RssConfig('Crawljobs', configfile) autostart = crawljobs.get("autostart") db = RssDb(dbfile, 'crawldog') grabber_was_collecting = False device = False while True: try: if not device or not is_device(device): device = get_device(configfile) myjd_packages = get_info(configfile, device) grabber_collecting = myjd_packages[2] if grabber_was_collecting or grabber_collecting: grabber_was_collecting = grabber_collecting time.sleep(5) else: packages_in_downloader_decrypted = myjd_packages[4][0] packages_in_linkgrabber_decrypted = myjd_packages[4][1] offline_packages = myjd_packages[4][2] encrypted_packages = myjd_packages[4][3] try: watched_titles = db.retrieve_all_titles() except: watched_titles = False notify_list = [] if packages_in_downloader_decrypted or packages_in_linkgrabber_decrypted or offline_packages or encrypted_packages: if watched_titles: for title in watched_titles: if packages_in_downloader_decrypted: for package in packages_in_downloader_decrypted: if title[0] in package[ 'name'] or title[0].replace( ".", " ") in package['name']: check = hoster_check( configfile, device, [package], title[0], [0]) device = check[0] if device: db.delete(title[0]) if packages_in_linkgrabber_decrypted: for package in packages_in_linkgrabber_decrypted: if title[0] in package[ 'name'] or title[0].replace( ".", " ") in package['name']: check = hoster_check( configfile, device, [package], title[0], [0]) device = check[0] episode = RssDb( dbfile, 'episode_remover').retrieve( title[0]) if episode: filenames = package['filenames'] if len(filenames) > 1: fname_episodes = [] for fname in filenames: try: if re.match( r'.*S\d{1,3}E\d{1,3}.*', fname, flags=re. IGNORECASE): fname = re.findall( r'S\d{1,3}E(\d{1,3})', fname, flags=re. IGNORECASE ).pop() else: fname = fname.replace( "hddl8", "").replace( "dd51", "" ).replace( "264", "" ).replace( "265", "") except: fname = fname.replace( "hddl8", "").replace( "dd51", "").replace( "264", "" ).replace( "265", "") fname_episode = "".join( re.findall( r'\d+', fname.split( ".part")[0])) try: fname_episodes.append( str( int(fname_episode ))) except: pass replacer = longest_substr( fname_episodes) new_fname_episodes = [] for new_ep_fname in fname_episodes: try: new_fname_episodes.append( str( int( new_ep_fname .replace( replacer, "")))) except: pass replacer = longest_substr( new_fname_episodes) newer_fname_episodes = [] for new_ep_fname in new_fname_episodes: try: newer_fname_episodes.append( str( int( re.sub( replacer, "", new_ep_fname, 1)))) except: pass replacer = longest_substr( newer_fname_episodes) even_newer_fname_episodes = [] for newer_ep_fname in newer_fname_episodes: try: even_newer_fname_episodes.append( str( int( re.sub( replacer, "", newer_ep_fname, 1)))) except: pass if even_newer_fname_episodes: fname_episodes = even_newer_fname_episodes elif newer_fname_episodes: fname_episodes = newer_fname_episodes elif new_fname_episodes: fname_episodes = new_fname_episodes delete_linkids = [] pos = 0 for delete_id in package[ 'linkids']: if str(episode) != str( fname_episodes[pos] ): delete_linkids.append( delete_id) pos += 1 if delete_linkids: delete_uuids = [ package['uuid'] ] RssDb( dbfile, 'episode_remover' ).delete(title[0]) device = remove_from_linkgrabber( configfile, device, delete_linkids, delete_uuids) if autostart: device = move_to_downloads( configfile, device, package['linkids'], [package['uuid']]) if device: db.delete(title[0]) if offline_packages: for package in offline_packages: if title[0] in package[ 'name'] or title[0].replace( ".", " ") in package['name']: notify_list.append("[Offline] - " + title[0]) print((u"[Offline] - " + title[0])) db.delete(title[0]) if encrypted_packages: for package in encrypted_packages: if title[0] in package[ 'name'] or title[0].replace( ".", " ") in package['name']: if title[1] == 'added': if retry_decrypt( configfile, dbfile, device, package['linkids'], [package['uuid']], package['urls']): db.delete(title[0]) db.store(title[0], 'retried') else: add_decrypt( package['name'], package['url'], "", dbfile) device = remove_from_linkgrabber( configfile, device, package['linkids'], [package['uuid']]) notify_list.append( "[Click'n'Load notwendig] - " + title[0]) print( u"[Click'n'Load notwendig] - " + title[0]) db.delete(title[0]) else: if not grabber_collecting: db.reset() if notify_list: notify(notify_list, configfile) time.sleep(30) except Exception: traceback.print_exc() time.sleep(30)
def download_sj(payload, configfile, dbfile): hostnames = RssConfig('Hostnames', configfile) sj = hostnames.get('sj') payload = decode_base64(payload).split("|") href = payload[0] title = payload[1] special = payload[2].strip().replace("None", "") series_url = 'https://' + sj + href series_info = get_url(series_url, configfile, dbfile) series_id = re.findall(r'data-mediaid="(.*?)"', series_info)[0] api_url = 'https://' + sj + '/api/media/' + series_id + '/releases' releases = get_url(api_url, configfile, dbfile) seasons = json.loads(releases) listen = ["SJ_Serien", "MB_Staffeln"] for liste in listen: cont = ListDb(dbfile, liste).retrieve() list_title = sanitize(title) if not cont: cont = "" if list_title not in cont: ListDb(dbfile, liste).store(list_title) config = RssConfig('SJ', configfile) english_ok = RssConfig('RSScrawler', configfile).get("english") quality = config.get('quality') ignore = config.get('rejectlist') result_seasons = {} result_episodes = {} for season in seasons: releases = seasons[season] for release in releases['items']: name = release['name'].encode('ascii', errors='ignore').decode('utf-8') hosters = release['hoster'] try: valid = bool(release['resolution'] == quality) except: valid = re.match(re.compile(r'.*' + quality + r'.*'), name) if valid and special: valid = bool("." + special.lower() + "." in name.lower()) if valid and not english_ok: valid = bool(".german." in name.lower()) if valid: valid = False for hoster in hosters: if hoster and check_hoster( hoster, configfile) or config.get("hoster_fallback"): valid = True if valid: try: ep = release['episode'] if ep: existing = result_episodes.get(season) if existing: for e in existing: if e == ep: if rate(name, ignore) > rate( existing[e], ignore): existing.update({ep: name}) else: existing = {ep: name} result_episodes.update({season: existing}) continue except: pass existing = result_seasons.get(season) dont = False if existing: if rate(name, ignore) < rate(existing, ignore): dont = True if not dont: result_seasons.update({season: name}) try: if result_seasons[season] and result_episodes[season]: del result_episodes[season] except: pass success = False try: if result_seasons[season]: success = True except: try: if result_episodes[season]: success = True except: pass if success: logger.debug(u"Websuche erfolgreich für " + title + " - " + season) else: for release in releases['items']: name = release['name'].encode('ascii', errors='ignore').decode('utf-8') hosters = release['hoster'] valid = True if valid and special: valid = bool("." + special.lower() + "." in name.lower()) if valid and not english_ok: valid = bool(".german." in name.lower()) if valid: valid = False for hoster in hosters: if hoster and check_hoster( hoster, configfile) or config.get("hoster_fallback"): valid = True if valid: try: ep = release['episode'] if ep: existing = result_episodes.get(season) if existing: for e in existing: if e == ep: if rate(name, ignore) > rate( existing[e], ignore): existing.update({ep: name}) else: existing = {ep: name} result_episodes.update({season: existing}) continue except: pass existing = result_seasons.get(season) dont = False if existing: if rate(name, ignore) < rate(existing, ignore): dont = True if not dont: result_seasons.update({season: name}) try: if result_seasons[season] and result_episodes[season]: del result_episodes[season] except: pass logger.debug(u"Websuche erfolgreich für " + title + " - " + season) matches = [] for season in result_seasons: matches.append(result_seasons[season]) for season in result_episodes: for episode in result_episodes[season]: matches.append(result_episodes[season][episode]) notify_array = [] for title in matches: db = RssDb(dbfile, 'rsscrawler') if add_decrypt(title, series_url, sj, dbfile): db.store(title, 'added') log_entry = u'[Suche/Serie] - ' + title + ' - [SJ]' logger.info(log_entry) notify_array.append(log_entry) notify(notify_array, configfile) if not matches: return False return matches