Exemple #1
0
def get_device(configfile):
    conf = CrawlerConfig('FeedCrawler', configfile)
    myjd_user = str(conf.get('myjd_user'))
    myjd_pass = str(conf.get('myjd_pass'))
    myjd_device = str(conf.get('myjd_device'))

    jd = feedcrawler.myjdapi.Myjdapi()
    jd.set_app_key('FeedCrawler')

    if myjd_user and myjd_pass and myjd_device:
        try:
            jd.connect(myjd_user, myjd_pass)
            jd.update_devices()
            device = jd.get_device(myjd_device)
        except feedcrawler.myjdapi.MYJDException as e:
            print(u"Fehler bei der Verbindung mit MyJDownloader: " + str(e))
            return False
        if not device or not is_device(device):
            return False
        return device
    elif myjd_user and myjd_pass:
        myjd_device = get_if_one_device(myjd_user, myjd_pass)
        try:
            jd.connect(myjd_user, myjd_pass)
            jd.update_devices()
            device = jd.get_device(myjd_device)
        except feedcrawler.myjdapi.MYJDException as e:
            print(u"Fehler bei der Verbindung mit MyJDownloader: " + str(e))
            return False
        if not device or not is_device(device):
            return False
        return device
    else:
        return False
Exemple #2
0
def jdownloader_stop(configfile, device):
    try:
        if not device or not is_device(device):
            device = get_device(configfile)
        if device:
            try:
                device.downloadcontroller.stop_downloads()
            except feedcrawler.myjdapi.TokenExpiredException:
                device = get_device(configfile)
                if not device or not is_device(device):
                    return False
                device.downloadcontroller.stop_downloads()
            return device
        else:
            return False
    except feedcrawler.myjdapi.MYJDException as e:
        print(u"Fehler bei der Verbindung mit MyJDownloader: " + str(e))
        return False
Exemple #3
0
def update_jdownloader(configfile, device):
    try:
        if not device or not is_device(device):
            device = get_device(configfile)
        if device:
            try:
                device.update.restart_and_update()
            except feedcrawler.myjdapi.TokenExpiredException:
                device = get_device(configfile)
                if not device or not is_device(device):
                    return False
                device.update.restart_and_update()
            return device
        else:
            return False
    except feedcrawler.myjdapi.MYJDException as e:
        print(u"Fehler bei der Verbindung mit MyJDownloader: " + str(e))
        return False
Exemple #4
0
def move_to_downloads(configfile, device, linkids, uuid):
    try:
        if not device or not is_device(device):
            device = get_device(configfile)
        if device:
            try:
                device.linkgrabber.move_to_downloadlist(linkids, uuid)
            except feedcrawler.myjdapi.TokenExpiredException:
                device = get_device(configfile)
                if not device or not is_device(device):
                    return False
                device.linkgrabber.move_to_downloadlist(linkids, uuid)
            return device
        else:
            return False
    except feedcrawler.myjdapi.MYJDException as e:
        print(u"Fehler bei der Verbindung mit MyJDownloader: " + str(e))
        return False
Exemple #5
0
def do_package_merge(configfile, device, title, uuids, linkids):
    try:
        if not device or not is_device(device):
            device = get_device(configfile)
        if device:
            try:
                move_to_new_package(configfile, device, linkids, uuids, title, "<jd:packagename>")
            except feedcrawler.myjdapi.TokenExpiredException:
                device = get_device(configfile)
                if not device or not is_device(device):
                    return False
                move_to_new_package(configfile, device, linkids, uuids, title, "<jd:packagename>")
            return device
        else:
            return False
    except feedcrawler.myjdapi.MYJDException as e:
        print(u"Fehler bei der Verbindung mit MyJDownloader: " + str(e))
        return False
Exemple #6
0
def move_to_new_package(configfile, device, linkids, package_id, new_title, new_path):
    try:
        if not device or not is_device(device):
            device = get_device(configfile)
        if device:
            try:
                device.linkgrabber.move_to_new_package(linkids, package_id, new_title, new_path)
                device.downloads.move_to_new_package(linkids, package_id, new_title, new_path)
            except feedcrawler.myjdapi.TokenExpiredException:
                device = get_device(configfile)
                if not device or not is_device(device):
                    return False
                device.linkgrabber.move_to_new_package(linkids, package_id, new_title, new_path)
                device.downloads.move_to_new_package(linkids, package_id, new_title, new_path)
            return device
        else:
            return False
    except feedcrawler.myjdapi.MYJDException as e:
        print(u"Fehler bei der Verbindung mit MyJDownloader: " + str(e))
        return False
Exemple #7
0
def get_state(configfile, device):
    try:
        if not device or not is_device(device):
            device = get_device(configfile)
        if device:
            try:
                downloader_state = device.downloadcontroller.get_current_state()
                grabber_collecting = device.linkgrabber.is_collecting()
            except feedcrawler.myjdapi.TokenExpiredException:
                device = get_device(configfile)
                if not device or not is_device(device):
                    return False
                downloader_state = device.downloadcontroller.get_current_state()
                grabber_collecting = device.linkgrabber.is_collecting()
            return [device, downloader_state, grabber_collecting]
        else:
            return False
    except feedcrawler.myjdapi.MYJDException as e:
        print(u"Fehler bei der Verbindung mit MyJDownloader: " + str(e))
        return False
Exemple #8
0
def retry_decrypt(configfile, dbfile, device, linkids, uuid, links):
    try:
        if not device or not is_device(device):
            device = get_device(configfile)
        if device:
            try:
                package = device.linkgrabber.query_packages(params=[
                    {
                        "availableOfflineCount": True,
                        "availableOnlineCount": True,
                        "availableTempUnknownCount": True,
                        "availableUnknownCount": True,
                        "bytesTotal": True,
                        "childCount": True,
                        "comment": True,
                        "enabled": True,
                        "hosts": True,
                        "maxResults": -1,
                        "packageUUIDs": uuid,
                        "priority": True,
                        "saveTo": True,
                        "startAt": 0,
                        "status": True
                    }])
            except feedcrawler.myjdapi.TokenExpiredException:
                device = get_device(configfile)
                if not device or not is_device(device):
                    return False
                package = device.linkgrabber.query_packages(params=[
                    {
                        "availableOfflineCount": True,
                        "availableOnlineCount": True,
                        "availableTempUnknownCount": True,
                        "availableUnknownCount": True,
                        "bytesTotal": True,
                        "childCount": True,
                        "comment": True,
                        "enabled": True,
                        "hosts": True,
                        "maxResults": -1,
                        "packageUUIDs": uuid,
                        "priority": True,
                        "saveTo": True,
                        "startAt": 0,
                        "status": True
                    }])
            if not package:
                try:
                    package = device.downloads.query_packages(params=[
                        {
                            "bytesLoaded": True,
                            "bytesTotal": True,
                            "comment": True,
                            "enabled": True,
                            "eta": True,
                            "priority": True,
                            "finished": True,
                            "running": True,
                            "speed": True,
                            "status": True,
                            "childCount": True,
                            "hosts": True,
                            "saveTo": True,
                            "maxResults": -1,
                            "packageUUIDs": uuid,
                            "startAt": 0,
                        }])
                except feedcrawler.myjdapi.TokenExpiredException:
                    device = get_device(configfile)
                    if not device or not is_device(device):
                        return False
                    package = device.downloads.query_packages(params=[
                        {
                            "bytesLoaded": True,
                            "bytesTotal": True,
                            "comment": True,
                            "enabled": True,
                            "eta": True,
                            "priority": True,
                            "finished": True,
                            "running": True,
                            "speed": True,
                            "status": True,
                            "childCount": True,
                            "hosts": True,
                            "saveTo": True,
                            "maxResults": -1,
                            "packageUUIDs": uuid,
                            "startAt": 0,
                        }])
            if package:
                remove_from_linkgrabber(configfile, device, linkids, uuid)
                title = package[0].get('name')
                full_path = package[0].get('saveTo')
                download(configfile, dbfile, device, title, None, links, None, full_path)
                return device
            else:
                return False
        else:
            return False
    except feedcrawler.myjdapi.MYJDException as e:
        print(u"Fehler bei der Verbindung mit MyJDownloader: " + str(e))
        return False
Exemple #9
0
def download(configfile, dbfile, device, title, subdir, old_links, password, full_path=None, autostart=False):
    try:
        if not device or not is_device(device):
            device = get_device(configfile)

        if isinstance(old_links, list):
            links = []
            for link in old_links:
                if link not in links:
                    links.append(link)
        else:
            links = [old_links]

        links = str(links).replace(" ", "")
        crawljobs = CrawlerConfig('Crawljobs', configfile)
        usesubdir = crawljobs.get("subdir")
        priority = "DEFAULT"

        if full_path:
            path = full_path
        else:
            if usesubdir:
                path = subdir + "/<jd:packagename>"
            else:
                path = "<jd:packagename>"
        if "Remux" in path:
            priority = "LOWER"

        try:
            device.linkgrabber.add_links(params=[
                {
                    "autostart": autostart,
                    "links": links,
                    "packageName": title,
                    "extractPassword": password,
                    "priority": priority,
                    "downloadPassword": password,
                    "destinationFolder": path,
                    "comment": "FeedCrawler by rix1337",
                    "overwritePackagizerRules": False
                }])
        except feedcrawler.myjdapi.TokenExpiredException:
            device = get_device(configfile)
            if not device or not is_device(device):
                return False
            device.linkgrabber.add_links(params=[
                {
                    "autostart": autostart,
                    "links": links,
                    "packageName": title,
                    "extractPassword": password,
                    "priority": priority,
                    "downloadPassword": password,
                    "destinationFolder": path,
                    "comment": "FeedCrawler by rix1337",
                    "overwritePackagizerRules": False
                }])
        db = FeedDb(dbfile, 'crawldog')
        if db.retrieve(title):
            db.delete(title)
            db.store(title, 'retried')
        else:
            db.store(title, 'added')
        return device
    except feedcrawler.myjdapi.MYJDException as e:
        print(u"Fehler bei der Verbindung mit MyJDownloader: " + str(e))
        return False
Exemple #10
0
def get_info(configfile, device):
    try:
        if not device or not is_device(device):
            device = get_device(configfile)
        if device:
            try:
                downloader_state = device.downloadcontroller.get_current_state()
                grabber_collecting = device.linkgrabber.is_collecting()
                device.update.run_update_check()
                update_ready = device.update.is_update_available()

                packages_in_downloader = get_packages_in_downloader(configfile, device)
                packages_in_downloader_failed = packages_in_downloader[0]
                packages_in_downloader_offline = packages_in_downloader[1]
                packages_in_downloader_decrypted = packages_in_downloader[2]
                device = packages_in_downloader[3]

                packages_in_linkgrabber = get_packages_in_linkgrabber(configfile, device)
                packages_in_linkgrabber_failed = packages_in_linkgrabber[0]
                packages_in_linkgrabber_offline = packages_in_linkgrabber[1]
                packages_in_linkgrabber_decrypted = packages_in_linkgrabber[2]
                device = packages_in_linkgrabber[3]
            except feedcrawler.myjdapi.TokenExpiredException:
                device = get_device(configfile)
                if not device or not is_device(device):
                    return False
                downloader_state = device.downloadcontroller.get_current_state()
                grabber_collecting = device.linkgrabber.is_collecting()
                device.update.run_update_check()
                update_ready = device.update.is_update_available()

                packages_in_downloader = get_packages_in_downloader(configfile, device)
                packages_in_downloader_failed = packages_in_downloader[0]
                packages_in_downloader_offline = packages_in_downloader[1]
                packages_in_downloader_decrypted = packages_in_downloader[2]
                device = packages_in_downloader[3]

                packages_in_linkgrabber = get_packages_in_linkgrabber(configfile, device)
                packages_in_linkgrabber_failed = packages_in_linkgrabber[0]
                packages_in_linkgrabber_offline = packages_in_linkgrabber[1]
                packages_in_linkgrabber_decrypted = packages_in_linkgrabber[2]
                device = packages_in_linkgrabber[3]

            if packages_in_linkgrabber_failed:
                packages_in_linkgrabber_failed = cryptor_url_first(packages_in_linkgrabber_failed)
            if packages_in_downloader_failed:
                packages_in_downloader_failed = cryptor_url_first(packages_in_downloader_failed)

            if packages_in_downloader_failed and packages_in_linkgrabber_failed:
                packages_failed = packages_in_downloader_failed + packages_in_linkgrabber_failed
            elif packages_in_downloader_failed:
                packages_failed = packages_in_downloader_failed
            else:
                packages_failed = packages_in_linkgrabber_failed

            if packages_in_downloader_offline and packages_in_linkgrabber_offline:
                packages_offline = packages_in_downloader_offline + packages_in_linkgrabber_offline
            elif packages_in_downloader_offline:
                packages_offline = packages_in_downloader_offline
            else:
                packages_offline = packages_in_linkgrabber_offline

            return [device, downloader_state, grabber_collecting, update_ready,
                    [packages_in_downloader_decrypted, packages_in_linkgrabber_decrypted,
                     packages_offline,
                     packages_failed]]
        else:
            return False
    except feedcrawler.myjdapi.MYJDException as e:
        print(u"Fehler bei der Verbindung mit MyJDownloader: " + str(e))
        return False
Exemple #11
0
def crawler(configfile, dbfile, device, feedcrawler, log_level, log_file,
            log_format):
    sys.stdout = Unbuffered(sys.stdout)

    logger = logging.getLogger('feedcrawler')
    logger.setLevel(log_level)

    console = logging.StreamHandler(stream=sys.stdout)
    formatter = logging.Formatter(log_format)
    console.setLevel(log_level)

    logfile = logging.handlers.RotatingFileHandler(log_file)
    logfile.setFormatter(formatter)
    logfile.setLevel(logging.INFO)

    logger.addHandler(logfile)
    logger.addHandler(console)

    if log_level == 10:
        logfile_debug = logging.handlers.RotatingFileHandler(
            log_file.replace("FeedCrawler.log", "FeedCrawler_DEBUG.log"))
        logfile_debug.setFormatter(formatter)
        logfile_debug.setLevel(10)
        logger.addHandler(logfile_debug)

    disable_request_warnings(InsecureRequestWarning)

    log_debug = logger.debug

    ombi_first_launch = True

    crawltimes = FeedDb(dbfile, "crawltimes")

    arguments = docopt(__doc__, version='FeedCrawler')
    while True:
        try:
            if not device or not is_device(device):
                device = get_device(configfile)
            FeedDb(dbfile, 'cached_requests').reset()
            FeedDb(dbfile, 'cached_requests').cleanup()
            scraper = check_url(configfile, dbfile)
            start_time = time.time()
            crawltimes.update_store("active", "True")
            crawltimes.update_store("start_time", start_time * 1000)
            log_debug("--------Alle Suchfunktion gestartet.--------")
            requested_movies = 0
            requested_shows = 0
            ombi_string = ""
            if device:
                ombi_results = ombi(configfile, dbfile, device, log_debug,
                                    ombi_first_launch)
                device = ombi_results[0]
                ombi_results = ombi_results[1]
                requested_movies = ombi_results[0]
                requested_shows = ombi_results[1]
                ombi_first_launch = False
            if requested_movies or requested_shows:
                ombi_string = u"Die Ombi-Suche lief für: "
                if requested_movies:
                    ombi_string = ombi_string + str(
                        requested_movies) + " Filme"
                    if requested_shows:
                        ombi_string = ombi_string + " und "
                if requested_shows:
                    ombi_string = ombi_string + str(
                        requested_shows) + " Serien"
            for task in search_pool(configfile, dbfile, device, logger,
                                    scraper):
                name = task._SITE
                try:
                    file = " - Liste: " + task.filename
                except AttributeError:
                    file = ""
                log_debug("-----------Suchfunktion (" + name + file +
                          ") gestartet!-----------")
                device = task.periodical_task()
                log_debug("-----------Suchfunktion (" + name + file +
                          ") ausgeführt!-----------")
            cached_requests = FeedDb(dbfile, 'cached_requests').count()
            request_cache_string = u"Der FeedCrawler-Cache hat " + str(
                cached_requests) + " HTTP-Requests gespart!"
            end_time = time.time()
            total_time = end_time - start_time
            interval = int(feedcrawler.get('interval')) * 60
            random_range = random.randrange(0, interval // 4)
            wait = interval + random_range
            next_start = end_time + wait
            log_debug(
                time.strftime("%Y-%m-%d %H:%M:%S") +
                " - Alle Suchfunktion ausgeführt (Dauer: " +
                readable_time(total_time) + u")!")
            if ombi_string:
                log_debug(
                    time.strftime("%Y-%m-%d %H:%M:%S") + u" - " + ombi_string)
            log_debug(
                time.strftime("%Y-%m-%d %H:%M:%S") + u" - " +
                request_cache_string)
            log_debug("-----------Wartezeit bis zum nächsten Suchlauf: " +
                      readable_time(wait) + '-----------')
            ombi_string = ""
            print(
                time.strftime("%Y-%m-%d %H:%M:%S") +
                u" - Alle Suchfunktion ausgeführt (Dauer: " +
                readable_time(total_time) + u")!", ombi_string + " - " +
                request_cache_string if ombi_string else request_cache_string)
            print(u"-----------Wartezeit bis zum nächsten Suchlauf: " +
                  readable_time(wait) + '-----------')
            crawltimes.update_store("end_time", end_time * 1000)
            crawltimes.update_store("total_time", readable_time(total_time))
            crawltimes.update_store("next_start", next_start * 1000)
            crawltimes.update_store("active", "False")
            FeedDb(dbfile, 'cached_requests').reset()
            FeedDb(dbfile, 'cached_requests').cleanup()

            if arguments['--testlauf']:
                log_debug(u"-----------Testlauf beendet!-----------")
                print(u"-----------Testlauf beendet!-----------")
                return

            wait_chunks = wait // 10
            start_now_triggered = False
            while wait_chunks:
                time.sleep(10)
                if FeedDb(dbfile, 'crawltimes').retrieve("startnow"):
                    FeedDb(dbfile, 'crawltimes').delete("startnow")
                    start_now_triggered = True
                    break

                wait_chunks -= 1

            if start_now_triggered:
                log_debug("----------Wartezeit vorzeitig beendet----------")
            else:
                log_debug("-------------Wartezeit verstrichen-------------")
        except Exception:
            traceback.print_exc()
            time.sleep(10)
Exemple #12
0
def crawldog(configfile, dbfile):
    disable_request_warnings(InsecureRequestWarning)
    crawljobs = CrawlerConfig('Crawljobs', configfile)
    autostart = crawljobs.get("autostart")
    db = FeedDb(dbfile, 'crawldog')

    grabber_was_collecting = False
    grabber_collecting = False
    device = False

    while True:
        try:
            if not device or not is_device(device):
                device = get_device(configfile)

            myjd_packages = get_info(configfile, device)
            if myjd_packages:
                grabber_collecting = myjd_packages[2]

                if grabber_was_collecting or grabber_collecting:
                    grabber_was_collecting = grabber_collecting
                    time.sleep(5)
                else:
                    packages_in_downloader_decrypted = myjd_packages[4][0]
                    packages_in_linkgrabber_decrypted = myjd_packages[4][1]
                    offline_packages = myjd_packages[4][2]
                    encrypted_packages = myjd_packages[4][3]

                    try:
                        watched_titles = db.retrieve_all_titles()
                    except:
                        watched_titles = False

                    notify_list = []

                    if packages_in_downloader_decrypted or packages_in_linkgrabber_decrypted or offline_packages or encrypted_packages:

                        if watched_titles:
                            for title in watched_titles:
                                if packages_in_downloader_decrypted:
                                    for package in packages_in_downloader_decrypted:
                                        if title[0] in package[
                                                'name'] or title[0].replace(
                                                    ".",
                                                    " ") in package['name']:
                                            check = hoster_check(
                                                configfile, device, [package],
                                                title[0], [0])
                                            device = check[0]
                                            if device:
                                                db.delete(title[0])

                                if packages_in_linkgrabber_decrypted:
                                    for package in packages_in_linkgrabber_decrypted:
                                        if title[0] in package[
                                                'name'] or title[0].replace(
                                                    ".",
                                                    " ") in package['name']:
                                            check = hoster_check(
                                                configfile, device, [package],
                                                title[0], [0])
                                            device = check[0]
                                            episode = FeedDb(
                                                dbfile,
                                                'episode_remover').retrieve(
                                                    title[0])
                                            if episode:
                                                filenames = package[
                                                    'filenames']
                                                if len(filenames) > 1:
                                                    fname_episodes = []
                                                    for fname in filenames:
                                                        try:
                                                            if re.match(
                                                                    r'.*S\d{1,3}E\d{1,3}.*',
                                                                    fname,
                                                                    flags=re.
                                                                    IGNORECASE
                                                            ):
                                                                fname = re.findall(
                                                                    r'S\d{1,3}E(\d{1,3})',
                                                                    fname,
                                                                    flags=re.
                                                                    IGNORECASE
                                                                ).pop()
                                                            else:
                                                                fname = fname.replace(
                                                                    "hddl8", ""
                                                                ).replace(
                                                                    "dd51", ""
                                                                ).replace(
                                                                    "264", ""
                                                                ).replace(
                                                                    "265", "")
                                                        except:
                                                            fname = fname.replace(
                                                                "hddl8",
                                                                "").replace(
                                                                    "dd51", ""
                                                                ).replace(
                                                                    "264", ""
                                                                ).replace(
                                                                    "265", "")
                                                        fname_episode = "".join(
                                                            re.findall(
                                                                r'\d+',
                                                                fname.split(
                                                                    ".part")
                                                                [0]))
                                                        try:
                                                            fname_episodes.append(
                                                                str(
                                                                    int(fname_episode
                                                                        )))
                                                        except:
                                                            pass
                                                    replacer = longest_substr(
                                                        fname_episodes)

                                                    new_fname_episodes = []
                                                    for new_ep_fname in fname_episodes:
                                                        try:
                                                            new_fname_episodes.append(
                                                                str(
                                                                    int(
                                                                        new_ep_fname
                                                                        .
                                                                        replace(
                                                                            replacer,
                                                                            ""
                                                                        ))))
                                                        except:
                                                            pass
                                                    replacer = longest_substr(
                                                        new_fname_episodes)

                                                    newer_fname_episodes = []
                                                    for new_ep_fname in new_fname_episodes:
                                                        try:
                                                            newer_fname_episodes.append(
                                                                str(
                                                                    int(
                                                                        re.sub(
                                                                            replacer,
                                                                            "",
                                                                            new_ep_fname,
                                                                            1))
                                                                ))
                                                        except:
                                                            pass

                                                    replacer = longest_substr(
                                                        newer_fname_episodes)

                                                    even_newer_fname_episodes = []
                                                    for newer_ep_fname in newer_fname_episodes:
                                                        try:
                                                            even_newer_fname_episodes.append(
                                                                str(
                                                                    int(
                                                                        re.sub(
                                                                            replacer,
                                                                            "",
                                                                            newer_ep_fname,
                                                                            1))
                                                                ))
                                                        except:
                                                            pass

                                                    if even_newer_fname_episodes:
                                                        fname_episodes = even_newer_fname_episodes
                                                    elif newer_fname_episodes:
                                                        fname_episodes = newer_fname_episodes
                                                    elif new_fname_episodes:
                                                        fname_episodes = new_fname_episodes

                                                    delete_linkids = []
                                                    pos = 0
                                                    for delete_id in package[
                                                            'linkids']:
                                                        if str(episode) != str(
                                                                fname_episodes[
                                                                    pos]):
                                                            delete_linkids.append(
                                                                delete_id)
                                                        pos += 1
                                                    if delete_linkids:
                                                        delete_uuids = [
                                                            package['uuid']
                                                        ]
                                                        FeedDb(
                                                            dbfile,
                                                            'episode_remover'
                                                        ).delete(title[0])
                                                        device = remove_from_linkgrabber(
                                                            configfile, device,
                                                            delete_linkids,
                                                            delete_uuids)
                                            if autostart:
                                                device = move_to_downloads(
                                                    configfile, device,
                                                    package['linkids'],
                                                    [package['uuid']])
                                            if device:
                                                db.delete(title[0])

                                if offline_packages:
                                    for package in offline_packages:
                                        if title[0] in package[
                                                'name'] or title[0].replace(
                                                    ".",
                                                    " ") in package['name']:
                                            notify_list.append("[Offline] - " +
                                                               title[0])
                                            print((u"[Offline] - " + title[0]))
                                            db.delete(title[0])

                                if encrypted_packages:
                                    for package in encrypted_packages:
                                        if title[0] in package[
                                                'name'] or title[0].replace(
                                                    ".",
                                                    " ") in package['name']:
                                            if title[1] == 'added':
                                                if retry_decrypt(
                                                        configfile, dbfile,
                                                        device,
                                                        package['linkids'],
                                                    [package['uuid']],
                                                        package['urls']):
                                                    db.delete(title[0])
                                                    db.store(
                                                        title[0], 'retried')
                                            else:
                                                add_decrypt(
                                                    package['name'],
                                                    package['url'], "", dbfile)
                                                device = remove_from_linkgrabber(
                                                    configfile, device,
                                                    package['linkids'],
                                                    [package['uuid']])
                                                notify_list.append(
                                                    "[Click'n'Load notwendig] - "
                                                    + title[0])
                                                print(
                                                    u"[Click'n'Load notwendig] - "
                                                    + title[0])
                                                db.delete(title[0])
                    else:
                        if not grabber_collecting:
                            db.reset()

                    if notify_list:
                        notify(notify_list, configfile)

                time.sleep(30)
            else:
                print(
                    u"Scheinbar ist der JDownloader nicht erreichbar - bitte prüfen und neustarten!"
                )
        except Exception:
            traceback.print_exc()
            time.sleep(30)