def check_failed_packages(configfile, device): try: if not device or not is_device(device): device = get_device(configfile) if device: try: grabber_collecting = device.linkgrabber.is_collecting() packages_in_linkgrabber = get_packages_in_linkgrabber(device) packages_in_linkgrabber_failed = packages_in_linkgrabber[0] packages_in_linkgrabber_offline = packages_in_linkgrabber[1] packages_in_linkgrabber_decrypted = packages_in_linkgrabber[2] except rsscrawler.myjdapi.TokenExpiredException: device = get_device(configfile) if not device or not is_device(device): return False grabber_collecting = device.linkgrabber.is_collecting() packages_in_linkgrabber = get_packages_in_linkgrabber(device) packages_in_linkgrabber_failed = packages_in_linkgrabber[0] packages_in_linkgrabber_offline = packages_in_linkgrabber[1] packages_in_linkgrabber_decrypted = packages_in_linkgrabber[2] return [device, grabber_collecting, packages_in_linkgrabber_decrypted, packages_in_linkgrabber_offline, packages_in_linkgrabber_failed] else: return False except rsscrawler.myjdapi.MYJDException as e: print(u"Fehler bei der Verbindung mit MyJDownloader: " + str(e)) return False
def move_to_new_package(configfile, device, linkids, package_id, new_title, new_path): try: if not device or not is_device(device): device = get_device(configfile) if device: try: device.linkgrabber.move_to_new_package(linkids, package_id, new_title, new_path) device.downloads.move_to_new_package(linkids, package_id, new_title, new_path) except rsscrawler.myjdapi.TokenExpiredException: device = get_device(configfile) if not device or not is_device(device): return False device.linkgrabber.move_to_new_package(linkids, package_id, new_title, new_path) device.downloads.move_to_new_package(linkids, package_id, new_title, new_path) return device else: return False except rsscrawler.myjdapi.MYJDException as e: print(u"Fehler bei der Verbindung mit MyJDownloader: " + str(e)) return False
def get_device(configfile): conf = RssConfig('RSScrawler', configfile) myjd_user = str(conf.get('myjd_user')) myjd_pass = str(conf.get('myjd_pass')) myjd_device = str(conf.get('myjd_device')) jd = rsscrawler.myjdapi.Myjdapi() jd.set_app_key('RSScrawler') if myjd_user and myjd_pass and myjd_device: try: jd.connect(myjd_user, myjd_pass) jd.update_devices() device = jd.get_device(myjd_device) except rsscrawler.myjdapi.MYJDException as e: print(u"Fehler bei der Verbindung mit MyJDownloader: " + str(e)) return False if not device or not is_device(device): return False return device elif myjd_user and myjd_pass: myjd_device = get_if_one_device(myjd_user, myjd_pass) try: jd.connect(myjd_user, myjd_pass) jd.update_devices() device = jd.get_device(myjd_device) except rsscrawler.myjdapi.MYJDException as e: print(u"Fehler bei der Verbindung mit MyJDownloader: " + str(e)) return False if not device or not is_device(device): return False return device else: return False
def retry_decrypt(configfile, device, linkids, uuid, links): try: if not device or not is_device(device): device = get_device(configfile) if device: try: package = device.linkgrabber.query_packages(params=[ { "availableOfflineCount": True, "availableOnlineCount": True, "availableTempUnknownCount": True, "availableUnknownCount": True, "bytesTotal": True, "childCount": True, "comment": True, "enabled": True, "hosts": True, "maxResults": -1, "packageUUIDs": uuid, "priority": True, "saveTo": True, "startAt": 0, "status": True }]) except rsscrawler.myjdapi.TokenExpiredException: device = get_device(configfile) if not device or not is_device(device): return False package = device.linkgrabber.query_packages(params=[ { "availableOfflineCount": True, "availableOnlineCount": True, "availableTempUnknownCount": True, "availableUnknownCount": True, "bytesTotal": True, "childCount": True, "comment": True, "enabled": True, "hosts": True, "maxResults": -1, "packageUUIDs": uuid, "priority": True, "saveTo": True, "startAt": 0, "status": True }]) if package: remove_from_linkgrabber(configfile, device, linkids, uuid) title = package[0].get('name') full_path = package[0].get('saveTo') download(configfile, device, title, None, links, None, full_path) return device else: return False else: return False except rsscrawler.myjdapi.MYJDException as e: print(u"Fehler bei der Verbindung mit MyJDownloader: " + str(e)) return False
def download(configfile, device, title, subdir, links, password, full_path=None): try: if not device or not is_device(device): device = get_device(configfile) links = str(links) crawljobs = RssConfig('Crawljobs', configfile) autostart = crawljobs.get("autostart") usesubdir = crawljobs.get("subdir") priority = "DEFAULT" if full_path: path = full_path else: if usesubdir: path = subdir + "/<jd:packagename>" else: path = "<jd:packagename>" if subdir == "RSScrawler/Remux": priority = "LOWER" try: device.linkgrabber.add_links(params=[ { "autostart": autostart, "links": links, "packageName": title, "extractPassword": password, "priority": priority, "downloadPassword": password, "destinationFolder": path, "overwritePackagizerRules": False }]) except rsscrawler.myjdapi.TokenExpiredException: device = get_device(configfile) if not device or not is_device(device): return False device.linkgrabber.add_links(params=[ { "autostart": autostart, "links": links, "packageName": title, "extractPassword": password, "priority": priority, "downloadPassword": password, "destinationFolder": path, "overwritePackagizerRules": False }]) return device except rsscrawler.myjdapi.MYJDException as e: print(u"Fehler bei der Verbindung mit MyJDownloader: " + str(e)) return False
def move_to_downloads(configfile, device, linkids, uuid): try: if not device or not is_device(device): device = get_device(configfile) if device: try: device.linkgrabber.move_to_downloadlist(linkids, uuid) except rsscrawler.myjdapi.TokenExpiredException: device = get_device(configfile) if not device or not is_device(device): return False device.linkgrabber.move_to_downloadlist(linkids, uuid) return device else: return False except rsscrawler.myjdapi.MYJDException as e: print(u"Fehler bei der Verbindung mit MyJDownloader: " + str(e)) return False
def update_jdownloader(configfile, device): try: if not device or not is_device(device): device = get_device(configfile) if device: try: device.update.restart_and_update() except rsscrawler.myjdapi.TokenExpiredException: device = get_device(configfile) if not device or not is_device(device): return False device.update.restart_and_update() return device else: return False except rsscrawler.myjdapi.MYJDException as e: print(u"Fehler bei der Verbindung mit MyJDownloader: " + str(e)) return False
def package_merge(configfile, device, title, uuids, linkids): try: if not device or not is_device(device): device = get_device(configfile) if device: try: device.linkgrabber.move_to_new_package(linkids, uuids, title, "<jd:packagename>") except rsscrawler.myjdapi.TokenExpiredException: device = get_device(configfile) if not device or not is_device(device): return False device.linkgrabber.move_to_new_package(linkids, uuids, title, "<jd:packagename>") return device else: return False except rsscrawler.myjdapi.MYJDException as e: print(u"Fehler bei der Verbindung mit MyJDownloader: " + str(e)) return False
def jdownloader_stop(configfile, device): try: if not device or not is_device(device): device = get_device(configfile) if device: try: device.downloadcontroller.stop_downloads() except rsscrawler.myjdapi.TokenExpiredException: device = get_device(configfile) if not device or not is_device(device): return False device.downloadcontroller.stop_downloads() return device else: return False except rsscrawler.myjdapi.MYJDException as e: print(u"Fehler bei der Verbindung mit MyJDownloader: " + str(e)) return False
def remove_from_linkgrabber(configfile, device, linkids, uuid): try: if not device or not is_device(device): device = get_device(configfile) if device: try: device.linkgrabber.remove_links(linkids, uuid) except rsscrawler.myjdapi.TokenExpiredException: device = get_device(configfile) if not device or not is_device(device): return False device.linkgrabber.remove_links(linkids, uuid) return device else: return False except rsscrawler.myjdapi.MYJDException as e: print(u"Fehler bei der Verbindung mit MyJDownloader: " + str(e)) return False
def get_state(configfile, device): try: if not device or not is_device(device): device = get_device(configfile) if device: try: downloader_state = device.downloadcontroller.get_current_state() grabber_collecting = device.linkgrabber.is_collecting() except rsscrawler.myjdapi.TokenExpiredException: device = get_device(configfile) if not device or not is_device(device): return False downloader_state = device.downloadcontroller.get_current_state() grabber_collecting = device.linkgrabber.is_collecting() return [device, downloader_state, grabber_collecting] else: return False except rsscrawler.myjdapi.MYJDException as e: print(u"Fehler bei der Verbindung mit MyJDownloader: " + str(e)) return False
def do_package_merge(configfile, device, title, uuids, linkids): try: if not device or not is_device(device): device = get_device(configfile) if device: try: move_to_new_package(configfile, device, linkids, uuids, title, "<jd:packagename>") except rsscrawler.myjdapi.TokenExpiredException: device = get_device(configfile) if not device or not is_device(device): return False move_to_new_package(configfile, device, linkids, uuids, title, "<jd:packagename>") return device else: return False except rsscrawler.myjdapi.MYJDException as e: print(u"Fehler bei der Verbindung mit MyJDownloader: " + str(e)) return False
def get_state(configfile, device): try: if not device or not is_device(device): device = get_device(configfile) if device: try: downloader_state = device.downloadcontroller.get_current_state( ) grabber_collecting = device.linkgrabber.is_collecting() except rsscrawler.myjdapi.TokenExpiredException: device = get_device(configfile) if not device or not is_device(device): return False downloader_state = device.downloadcontroller.get_current_state( ) grabber_collecting = device.linkgrabber.is_collecting() return [device, downloader_state, grabber_collecting] else: return False except rsscrawler.myjdapi.MYJDException as e: print(u"Fehler bei der Verbindung mit MyJDownloader: " + str(e)) return False
def get_info(configfile, device): try: if not device or not is_device(device): device = get_device(configfile) if device: try: downloader_state = device.downloadcontroller.get_current_state() grabber_collecting = device.linkgrabber.is_collecting() device.update.run_update_check() update_ready = device.update.is_update_available() packages_in_downloader = get_packages_in_downloader(device) packages_in_linkgrabber = get_packages_in_linkgrabber(device) packages_in_linkgrabber_failed = packages_in_linkgrabber[0] packages_in_linkgrabber_offline = packages_in_linkgrabber[1] packages_in_linkgrabber_decrypted = packages_in_linkgrabber[2] except rsscrawler.myjdapi.TokenExpiredException: device = get_device(configfile) if not device or not is_device(device): return False downloader_state = device.downloadcontroller.get_current_state() grabber_collecting = device.linkgrabber.is_collecting() device.update.run_update_check() update_ready = device.update.is_update_available() packages_in_downloader = get_packages_in_downloader(device) packages_in_linkgrabber = get_packages_in_linkgrabber(device) packages_in_linkgrabber_failed = packages_in_linkgrabber[0] packages_in_linkgrabber_offline = packages_in_linkgrabber[1] packages_in_linkgrabber_decrypted = packages_in_linkgrabber[2] return [device, downloader_state, grabber_collecting, update_ready, [packages_in_downloader, packages_in_linkgrabber_decrypted, packages_in_linkgrabber_offline, packages_in_linkgrabber_failed]] else: return False except rsscrawler.myjdapi.MYJDException as e: print(u"Fehler bei der Verbindung mit MyJDownloader: " + str(e)) return False
def crawler(configfile, dbfile, device, rsscrawler, log_level, log_file, log_format): sys.stdout = Unbuffered(sys.stdout) logger = logging.getLogger('rsscrawler') logger.setLevel(log_level) console = logging.StreamHandler(stream=sys.stdout) formatter = logging.Formatter(log_format) console.setLevel(log_level) logfile = logging.handlers.RotatingFileHandler(log_file) logfile.setFormatter(formatter) logfile.setLevel(logging.INFO) logger.addHandler(logfile) logger.addHandler(console) if log_level == 10: logfile_debug = logging.handlers.RotatingFileHandler( log_file.replace("RSScrawler.log", "RSScrawler_DEBUG.log")) logfile_debug.setFormatter(formatter) logfile_debug.setLevel(10) logger.addHandler(logfile_debug) disable_request_warnings(InsecureRequestWarning) log_debug = logger.debug crawltimes = RssDb(dbfile, "crawltimes") arguments = docopt(__doc__, version='RSScrawler') if not arguments['--testlauf']: while True: try: if not device or not is_device(device): device = get_device(configfile) scraper = check_url(configfile, dbfile) start_time = time.time() crawltimes.update_store("active", "True") crawltimes.update_store("start_time", start_time * 1000) log_debug("--------Alle Suchfunktion gestartet.--------") if device: device = ombi(configfile, dbfile, device, log_debug) for task in search_pool(configfile, dbfile, device, logger, scraper): name = task._INTERNAL_NAME try: file = " - Liste: " + task.filename except AttributeError: file = "" log_debug("-----------Suchfunktion (" + name + file + ") gestartet!-----------") device = task.periodical_task() log_debug("-----------Suchfunktion (" + name + file + ") ausgeführt!-----------") end_time = time.time() total_time = end_time - start_time interval = int(rsscrawler.get('interval')) * 60 random_range = random.randrange(0, interval // 4) wait = interval + random_range next_start = end_time + wait log_debug("-----Alle Suchfunktion ausgeführt (Dauer: " + readable_time(total_time) + ")! Wartezeit bis zum nächsten Suchlauf: " + readable_time(wait)) print( time.strftime("%Y-%m-%d %H:%M:%S") + u" - Alle Suchfunktion ausgeführt (Dauer: " + readable_time(total_time) + u")! Wartezeit bis zum nächsten Suchlauf: " + readable_time(wait)) crawltimes.update_store("end_time", end_time * 1000) crawltimes.update_store("total_time", readable_time(total_time)) crawltimes.update_store("next_start", next_start * 1000) crawltimes.update_store("active", "False") time.sleep(wait) log_debug("-------------Wartezeit verstrichen-------------") except Exception: traceback.print_exc() time.sleep(10) else: try: if not device or not is_device(device): device = get_device(configfile) scraper = check_url(configfile, dbfile) start_time = time.time() log_debug("--------Testlauf gestartet.--------") if device: device = ombi(configfile, dbfile, device, log_debug) for task in search_pool(configfile, dbfile, device, logger, scraper): name = task._INTERNAL_NAME try: file = " - Liste: " + task.filename except AttributeError: file = "" log_debug("-----------Suchfunktion (" + name + file + ") gestartet!-----------") task.periodical_task() log_debug("-----------Suchfunktion (" + name + file + ") ausgeführt!-----------") end_time = time.time() total_time = end_time - start_time log_debug("---Testlauf ausgeführt (Dauer: " + readable_time(total_time) + ")!---") print( time.strftime("%Y-%m-%d %H:%M:%S") + u" - Testlauf ausgeführt (Dauer: " + readable_time(total_time) + ")!") except Exception: traceback.print_exc() time.sleep(10)
def crawldog(configfile, dbfile): disable_request_warnings(InsecureRequestWarning) crawljobs = RssConfig('Crawljobs', configfile) autostart = crawljobs.get("autostart") db = RssDb(dbfile, 'crawldog') grabber_was_collecting = False device = False while True: try: if not device or not is_device(device): device = get_device(configfile) myjd_packages = get_info(configfile, device) grabber_collecting = myjd_packages[2] if grabber_was_collecting or grabber_collecting: grabber_was_collecting = grabber_collecting time.sleep(5) else: packages_in_downloader_decrypted = myjd_packages[4][0] packages_in_linkgrabber_decrypted = myjd_packages[4][1] offline_packages = myjd_packages[4][2] encrypted_packages = myjd_packages[4][3] try: watched_titles = db.retrieve_all_titles() except: watched_titles = False notify_list = [] if packages_in_downloader_decrypted or packages_in_linkgrabber_decrypted or offline_packages or encrypted_packages: if watched_titles: for title in watched_titles: if packages_in_downloader_decrypted: for package in packages_in_downloader_decrypted: if title[0] in package[ 'name'] or title[0].replace( ".", " ") in package['name']: check = hoster_check( configfile, device, [package], title[0], [0]) device = check[0] if device: db.delete(title[0]) if packages_in_linkgrabber_decrypted: for package in packages_in_linkgrabber_decrypted: if title[0] in package[ 'name'] or title[0].replace( ".", " ") in package['name']: check = hoster_check( configfile, device, [package], title[0], [0]) device = check[0] episode = RssDb( dbfile, 'episode_remover').retrieve( title[0]) if episode: filenames = package['filenames'] if len(filenames) > 1: fname_episodes = [] for fname in filenames: try: if re.match( r'.*S\d{1,3}E\d{1,3}.*', fname, flags=re. IGNORECASE): fname = re.findall( r'S\d{1,3}E(\d{1,3})', fname, flags=re. IGNORECASE ).pop() else: fname = fname.replace( "hddl8", "").replace( "dd51", "" ).replace( "264", "" ).replace( "265", "") except: fname = fname.replace( "hddl8", "").replace( "dd51", "").replace( "264", "" ).replace( "265", "") fname_episode = "".join( re.findall( r'\d+', fname.split( ".part")[0])) try: fname_episodes.append( str( int(fname_episode ))) except: pass replacer = longest_substr( fname_episodes) new_fname_episodes = [] for new_ep_fname in fname_episodes: try: new_fname_episodes.append( str( int( new_ep_fname .replace( replacer, "")))) except: pass replacer = longest_substr( new_fname_episodes) newer_fname_episodes = [] for new_ep_fname in new_fname_episodes: try: newer_fname_episodes.append( str( int( re.sub( replacer, "", new_ep_fname, 1)))) except: pass replacer = longest_substr( newer_fname_episodes) even_newer_fname_episodes = [] for newer_ep_fname in newer_fname_episodes: try: even_newer_fname_episodes.append( str( int( re.sub( replacer, "", newer_ep_fname, 1)))) except: pass if even_newer_fname_episodes: fname_episodes = even_newer_fname_episodes elif newer_fname_episodes: fname_episodes = newer_fname_episodes elif new_fname_episodes: fname_episodes = new_fname_episodes delete_linkids = [] pos = 0 for delete_id in package[ 'linkids']: if str(episode) != str( fname_episodes[pos] ): delete_linkids.append( delete_id) pos += 1 if delete_linkids: delete_uuids = [ package['uuid'] ] RssDb( dbfile, 'episode_remover' ).delete(title[0]) device = remove_from_linkgrabber( configfile, device, delete_linkids, delete_uuids) if autostart: device = move_to_downloads( configfile, device, package['linkids'], [package['uuid']]) if device: db.delete(title[0]) if offline_packages: for package in offline_packages: if title[0] in package[ 'name'] or title[0].replace( ".", " ") in package['name']: notify_list.append("[Offline] - " + title[0]) print((u"[Offline] - " + title[0])) db.delete(title[0]) if encrypted_packages: for package in encrypted_packages: if title[0] in package[ 'name'] or title[0].replace( ".", " ") in package['name']: if title[1] == 'added': if retry_decrypt( configfile, dbfile, device, package['linkids'], [package['uuid']], package['urls']): db.delete(title[0]) db.store(title[0], 'retried') else: add_decrypt( package['name'], package['url'], "", dbfile) device = remove_from_linkgrabber( configfile, device, package['linkids'], [package['uuid']]) notify_list.append( "[Click'n'Load notwendig] - " + title[0]) print( u"[Click'n'Load notwendig] - " + title[0]) db.delete(title[0]) else: if not grabber_collecting: db.reset() if notify_list: notify(notify_list, configfile) time.sleep(30) except Exception: traceback.print_exc() time.sleep(30)
def crawler(configfile, dbfile, device, rsscrawler, log_level, log_file, log_format): sys.stdout = Unbuffered(sys.stdout) logger = logging.getLogger('') logger.setLevel(log_level) console = logging.StreamHandler(stream=sys.stdout) formatter = logging.Formatter(log_format) console.setLevel(log_level) logfile = logging.handlers.RotatingFileHandler(log_file) logfile.setFormatter(formatter) logfile.setLevel(logging.INFO) logger.addHandler(logfile) logger.addHandler(console) if log_level == 10: logfile_debug = logging.handlers.RotatingFileHandler( log_file.replace("RSScrawler.log", "RSScrawler_DEBUG.log")) logfile_debug.setFormatter(formatter) logfile_debug.setLevel(10) logger.addHandler(logfile_debug) logging.getLogger("requests").setLevel(logging.WARNING) logging.getLogger("urllib3").setLevel(logging.WARNING) warnings.simplefilter("ignore", UnicodeWarning) log_debug = logging.debug arguments = docopt(__doc__, version='RSScrawler') if not arguments['--testlauf']: while True: try: if not device or not is_device(device): device = get_device(configfile) check_url(configfile, dbfile) start_time = time.time() log_debug("--------Alle Suchfunktion gestartet.--------") failed_packages = check_failed_packages(configfile, device) if failed_packages: device = failed_packages[0] notify_new_failed_packages(failed_packages[3], True, configfile, dbfile) notify_new_failed_packages(failed_packages[4], False, configfile, dbfile) device = ombi(configfile, dbfile, device, log_debug) for task in search_pool(configfile, dbfile, device, logging): name = task._INTERNAL_NAME try: file = " - Liste: " + task.filename except AttributeError: file = "" log_debug("-----------Suchfunktion (" + name + file + ") gestartet!-----------") device = task.periodical_task() log_debug("-----------Suchfunktion (" + name + file + ") ausgeführt!-----------") end_time = time.time() total_time = end_time - start_time interval = int(rsscrawler.get('interval')) * 60 random_range = random.randrange(0, interval // 4) wait = interval + random_range log_debug( "-----Alle Suchfunktion ausgeführt (Dauer: " + readable_time( total_time) + ")! Wartezeit bis zum nächsten Suchlauf: " + readable_time(wait)) print(time.strftime("%Y-%m-%d %H:%M:%S") + u" - Alle Suchfunktion ausgeführt (Dauer: " + readable_time( total_time) + u")! Wartezeit bis zum nächsten Suchlauf: " + readable_time(wait)) time.sleep(wait) log_debug("-------------Wartezeit verstrichen-------------") except Exception: traceback.print_exc() else: try: if not device or not is_device(device): device = get_device(configfile) check_url(configfile, dbfile) start_time = time.time() log_debug("--------Testlauf gestartet.--------") failed_packages = check_failed_packages(configfile, device) if failed_packages: device = failed_packages[0] notify_new_failed_packages(failed_packages[3], True, configfile, dbfile) notify_new_failed_packages(failed_packages[4], False, configfile, dbfile) device = ombi(configfile, dbfile, device, log_debug) for task in search_pool(configfile, dbfile, device, logging): name = task._INTERNAL_NAME try: file = " - Liste: " + task.filename except AttributeError: file = "" log_debug("-----------Suchfunktion (" + name + file + ") gestartet!-----------") task.periodical_task() log_debug("-----------Suchfunktion (" + name + file + ") ausgeführt!-----------") end_time = time.time() total_time = end_time - start_time log_debug( "---Testlauf ausgeführt (Dauer: " + readable_time(total_time) + ")!---") print(time.strftime("%Y-%m-%d %H:%M:%S") + u" - Testlauf ausgeführt (Dauer: " + readable_time(total_time) + ")!") except Exception: traceback.print_exc()
def retry_decrypt(configfile, dbfile, device, linkids, uuid, links): try: if not device or not is_device(device): device = get_device(configfile) if device: try: package = device.linkgrabber.query_packages( params=[{ "availableOfflineCount": True, "availableOnlineCount": True, "availableTempUnknownCount": True, "availableUnknownCount": True, "bytesTotal": True, "childCount": True, "comment": True, "enabled": True, "hosts": True, "maxResults": -1, "packageUUIDs": uuid, "priority": True, "saveTo": True, "startAt": 0, "status": True }]) except rsscrawler.myjdapi.TokenExpiredException: device = get_device(configfile) if not device or not is_device(device): return False package = device.linkgrabber.query_packages( params=[{ "availableOfflineCount": True, "availableOnlineCount": True, "availableTempUnknownCount": True, "availableUnknownCount": True, "bytesTotal": True, "childCount": True, "comment": True, "enabled": True, "hosts": True, "maxResults": -1, "packageUUIDs": uuid, "priority": True, "saveTo": True, "startAt": 0, "status": True }]) if not package: try: package = device.downloads.query_packages( params=[{ "bytesLoaded": True, "bytesTotal": True, "comment": True, "enabled": True, "eta": True, "priority": True, "finished": True, "running": True, "speed": True, "status": True, "childCount": True, "hosts": True, "saveTo": True, "maxResults": -1, "packageUUIDs": uuid, "startAt": 0, }]) except rsscrawler.myjdapi.TokenExpiredException: device = get_device(configfile) if not device or not is_device(device): return False package = device.downloads.query_packages( params=[{ "bytesLoaded": True, "bytesTotal": True, "comment": True, "enabled": True, "eta": True, "priority": True, "finished": True, "running": True, "speed": True, "status": True, "childCount": True, "hosts": True, "saveTo": True, "maxResults": -1, "packageUUIDs": uuid, "startAt": 0, }]) if package: remove_from_linkgrabber(configfile, device, linkids, uuid) title = package[0].get('name') full_path = package[0].get('saveTo') download(configfile, dbfile, device, title, None, links, None, full_path) return device else: return False else: return False except rsscrawler.myjdapi.MYJDException as e: print(u"Fehler bei der Verbindung mit MyJDownloader: " + str(e)) return False
def download(configfile, dbfile, device, title, subdir, old_links, password, full_path=None, autostart=False): try: if not device or not is_device(device): device = get_device(configfile) if isinstance(old_links, list): links = [] for link in old_links: if link not in links: links.append(link) else: links = [old_links] links = str(links).replace(" ", "") crawljobs = RssConfig('Crawljobs', configfile) usesubdir = crawljobs.get("subdir") priority = "DEFAULT" if full_path: path = full_path else: if usesubdir: path = subdir + "/<jd:packagename>" else: path = "<jd:packagename>" if "Remux" in path: priority = "LOWER" try: device.linkgrabber.add_links( params=[{ "autostart": autostart, "links": links, "packageName": title, "extractPassword": password, "priority": priority, "downloadPassword": password, "destinationFolder": path, "comment": "RSScrawler by rix1337", "overwritePackagizerRules": False }]) except rsscrawler.myjdapi.TokenExpiredException: device = get_device(configfile) if not device or not is_device(device): return False device.linkgrabber.add_links( params=[{ "autostart": autostart, "links": links, "packageName": title, "extractPassword": password, "priority": priority, "downloadPassword": password, "destinationFolder": path, "comment": "RSScrawler by rix1337", "overwritePackagizerRules": False }]) db = RssDb(dbfile, 'crawldog') if db.retrieve(title): db.delete(title) db.store(title, 'retried') else: db.store(title, 'added') return device except rsscrawler.myjdapi.MYJDException as e: print(u"Fehler bei der Verbindung mit MyJDownloader: " + str(e)) return False
def get_info(configfile, device): try: if not device or not is_device(device): device = get_device(configfile) if device: try: downloader_state = device.downloadcontroller.get_current_state( ) grabber_collecting = device.linkgrabber.is_collecting() device.update.run_update_check() update_ready = device.update.is_update_available() packages_in_downloader = get_packages_in_downloader( configfile, device) packages_in_downloader_failed = packages_in_downloader[0] packages_in_downloader_offline = packages_in_downloader[1] packages_in_downloader_decrypted = packages_in_downloader[2] device = packages_in_downloader[3] packages_in_linkgrabber = get_packages_in_linkgrabber( configfile, device) packages_in_linkgrabber_failed = packages_in_linkgrabber[0] packages_in_linkgrabber_offline = packages_in_linkgrabber[1] packages_in_linkgrabber_decrypted = packages_in_linkgrabber[2] device = packages_in_linkgrabber[3] except rsscrawler.myjdapi.TokenExpiredException: device = get_device(configfile) if not device or not is_device(device): return False downloader_state = device.downloadcontroller.get_current_state( ) grabber_collecting = device.linkgrabber.is_collecting() device.update.run_update_check() update_ready = device.update.is_update_available() packages_in_downloader = get_packages_in_downloader( configfile, device) packages_in_downloader_failed = packages_in_downloader[0] packages_in_downloader_offline = packages_in_downloader[1] packages_in_downloader_decrypted = packages_in_downloader[2] device = packages_in_downloader[3] packages_in_linkgrabber = get_packages_in_linkgrabber( configfile, device) packages_in_linkgrabber_failed = packages_in_linkgrabber[0] packages_in_linkgrabber_offline = packages_in_linkgrabber[1] packages_in_linkgrabber_decrypted = packages_in_linkgrabber[2] device = packages_in_linkgrabber[3] if packages_in_linkgrabber_failed: packages_in_linkgrabber_failed = cryptor_url_first( configfile, packages_in_linkgrabber_failed) if packages_in_downloader_failed: packages_in_downloader_failed = cryptor_url_first( configfile, packages_in_downloader_failed) if packages_in_downloader_failed and packages_in_linkgrabber_failed: packages_failed = packages_in_downloader_failed + packages_in_linkgrabber_failed elif packages_in_downloader_failed: packages_failed = packages_in_downloader_failed else: packages_failed = packages_in_linkgrabber_failed if packages_in_downloader_offline and packages_in_linkgrabber_offline: packages_offline = packages_in_downloader_offline + packages_in_linkgrabber_offline elif packages_in_downloader_offline: packages_offline = packages_in_downloader_offline else: packages_offline = packages_in_linkgrabber_offline return [ device, downloader_state, grabber_collecting, update_ready, [ packages_in_downloader_decrypted, packages_in_linkgrabber_decrypted, packages_offline, packages_failed ] ] else: return False except rsscrawler.myjdapi.MYJDException as e: print(u"Fehler bei der Verbindung mit MyJDownloader: " + str(e)) return False