コード例 #1
0
    def setUp(self):
        set_and_drop()
        config.settings = Bunch.fromDict(config.initialConfig)
        self.app = web.app.test_client()
        config.settings.main.apikey = None

        self.newznab1 = Bunch()
        self.newznab1.enabled = True
        self.newznab1.name = "newznab1"
        self.newznab1.host = "https://indexer1.com"
        self.newznab1.apikey = "apikeyindexer1.com"
        self.newznab1.timeout = None
        self.newznab1.hitLimit = None
        self.newznab1.score = 0
        self.newznab1.type = "newznab"
        self.newznab1.accessType = "both"
        self.newznab1.search_ids = ["imdbid", "rid", "tvdbid"]
        self.newznab1.searchTypes = ["book", "tvsearch", "movie"]

        self.newznab2 = Bunch()
        self.newznab2.enabled = True
        self.newznab2.name = "newznab2"
        self.newznab2.host = "https://indexer2.com"
        self.newznab2.apikey = "apikeyindexer2.com"
        self.newznab2.timeout = None
        self.newznab2.hitLimit = None
        self.newznab2.accessType = "both"
        self.newznab2.score = 0
        self.newznab2.type = "newznab"
        self.newznab2.search_ids = ["rid", "tvdbid"]
        self.newznab2.searchTypes = ["tvsearch", "movie"]

        config.settings.indexers = [self.newznab1, self.newznab2]
        read_indexers_from_config()
コード例 #2
0
ファイル: web.py プロジェクト: junkman690/nzbhydra
def internalapi_setsettings():
    try:
        config.import_config_data(request.get_json(force=True))
        internal_cache.delete_memoized(internalapi_getconfig)
        internal_cache.delete_memoized(internalapi_getsafeconfig)
        read_indexers_from_config()
        clean_up_database()
        return "OK"
    except Exception as e:
        logger.exception("Error saving settings")
        return "Error: %s" % e
コード例 #3
0
def internalapi_setsettings():
    logger.debug("Set settings request")
    try:
        config.import_config_data(request.get_json(force=True))
        internal_cache.delete_memoized(internalapi_getconfig)
        internal_cache.delete_memoized(internalapi_getsafeconfig)
        read_indexers_from_config()
        clean_up_database()
        return "OK"
    except Exception as e:
        logger.exception("Error saving settings")
        return "Error: %s" % e
コード例 #4
0
    def prepareSearchMocks(self,
                           requestsMock,
                           indexerCount=2,
                           resultsPerIndexers=1,
                           newznabItems=None,
                           title="newznab%dresult%d.title",
                           categories=None,
                           skip=None):
        """

        :param requestsMock: 
        :param indexerCount: 
        :param resultsPerIndexers: 
        :param newznabItems: 
        :param title: 
        :param categories: 
        :param skip: List of tuples with indexer and result index which will not be returned
        :return: 
        """
        if skip is None:
            skip = []
        allNewznabItems = []
        self.response_callbacks = []
        self.prepareIndexers(indexerCount)

        for i in range(1, indexerCount + 1):
            # Prepare search results
            if newznabItems is not None:
                indexerNewznabItems = newznabItems[i - 1]
            else:
                indexerNewznabItems = [
                    mockbuilder.buildNewznabItem(
                        title % (i, j), "newznab%dresult%d.guid" % (i, j),
                        " http://newznab%dresult%d.link" % (i, j),
                        arrow.get(0).format("ddd, DD MMM YYYY HH:mm:ss Z"),
                        "newznab%dresult%d.description" % (i, j), 1000,
                        "newznab%d" % i, categories)
                    for j in range(1, resultsPerIndexers + 1)
                    if not (i, j) in skip
                ]
            allNewznabItems.extend(indexerNewznabItems)
            xml = mockbuilder.buildNewznabResponse("newznab%dResponse" % i,
                                                   indexerNewznabItems, 0,
                                                   len(indexerNewznabItems))

            requestsMock.register_uri('GET',
                                      re.compile(r'.*newznab%d.*' % i),
                                      text=xml)
        read_indexers_from_config()

        allNewznabItems = sorted(allNewznabItems, key=lambda x: x.title)
        return allNewznabItems
コード例 #5
0
ファイル: nzbhydra.py プロジェクト: mehdroid/nzbhydra
def run():
    global logger
    parser = argparse.ArgumentParser(description="NZBHydra")
    parser.add_argument("--config", action="store", help="Settings file to load", default="settings.cfg")
    parser.add_argument("--database", action="store", help="Database file to load", default="nzbhydra.db")
    parser.add_argument("--host", action="store", help="Host to run on")
    parser.add_argument("--port", action="store", help="Port to run on", type=int)
    parser.add_argument("--nobrowser", action="store_true", help="Don't open URL on startup", default=False)

    args = parser.parse_args()
    parser.print_help()

    settings_file = args.config
    database_file = args.database

    print("Loading settings from %s" % settings_file)
    config.load(settings_file)
    config.save(settings_file)  # Write any new settings back to the file
    logger = log.setup_custom_logger("root")
    logger.info("Started")
    logger.info("Loading database file %s" % database_file)
    if not os.path.exists(database_file):
        database.init_db(database_file)
    else:
        database.update_db(database_file)
    database.db.init(database_file)
    indexers.read_indexers_from_config()

    if config.mainSettings.debug.get():
        logger.info("Debug mode enabled")

    host = config.mainSettings.host.get() if args.host is None else args.host
    port = config.mainSettings.port.get() if args.port is None else args.port

    logger.info("Starting web app on %s:%d" % (host, port))
    f = furl()
    f.host = "127.0.0.1"
    f.port = port
    f.scheme = "https" if config.mainSettings.ssl.get() else "http"
    if not args.nobrowser and config.mainSettings.startup_browser.get():
        logger.info("Opening browser to %s" % f.url)
        webbrowser.open_new(f.url)
    else:
        logger.info("Go to %s for the frontend (or whatever your public IP is)" % f.url)

    check_for_new_version()
    web.run(host, port)
コード例 #6
0
    def setUp(self):
        set_and_drop()
        config.settings = Bunch.fromDict(config.initialConfig)
        self.app = web.app.test_client()
        config.settings.main.apikey = None
        
        
        
        # 
        # getIndexerSettingByName("binsearch").enabled = False
        # getIndexerSettingByName("nzbindex").enabled = False
        # getIndexerSettingByName("omgwtf").enabled = False
        # getIndexerSettingByName("womble").enabled = False
        # getIndexerSettingByName("nzbclub").enabled = False
        # 
        self.newznab1 = Bunch()
        self.newznab1.enabled = True
        self.newznab1.name = "newznab1"
        self.newznab1.host = "https://indexer1.com"
        self.newznab1.apikey = "apikeyindexer1.com"
        self.newznab1.timeout = None
        self.newznab1.hitLimit = None
        self.newznab1.score = 0
        self.newznab1.type = "newznab"
        self.newznab1.accessType = "both"
        self.newznab1.search_ids = ["imdbid", "rid", "tvdbid"]
        self.newznab1.searchTypes = ["book", "tvsearch", "movie"]

        self.newznab2 = Bunch()
        self.newznab2.enabled = True
        self.newznab2.name = "newznab2"
        self.newznab2.host = "https://indexer2.com"
        self.newznab2.apikey = "apikeyindexer2.com"
        self.newznab2.timeout = None
        self.newznab2.hitLimit = None
        self.newznab2.accessType = "both"
        self.newznab2.score = 0
        self.newznab2.type = "newznab"
        self.newznab2.search_ids = ["rid", "tvdbid"]
        self.newznab2.searchTypes = ["tvsearch", "movie"]

        config.settings.indexers = [self.newznab1, self.newznab2]
        read_indexers_from_config()
コード例 #7
0
    def prepareSearchMocks(self,
                           rsps,
                           indexerCount=2,
                           resultsPerIndexers=1,
                           newznabItems=None,
                           title="newznab%dresult%d.title",
                           sleep=0):
        testData = []
        self.response_callbacks = []
        self.prepareIndexers(indexerCount)

        for i in range(1, indexerCount + 1):
            # Prepare search results
            if newznabItems is not None:
                indexerNewznabItems = newznabItems[i - 1]
            else:
                indexerNewznabItems = [
                    mockbuilder.buildNewznabItem(
                        title % (i, j), "newznab%dresult%d.guid" % (i, j),
                        "newznab%dresult%d.link" % (i, j),
                        arrow.get(0).format("ddd, DD MMM YYYY HH:mm:ss Z"),
                        "newznab%dresult%d.description" % (i, j), 1000,
                        "newznab%d" % i, None)
                    for j in range(1, resultsPerIndexers + 1)
                ]
            xml = mockbuilder.buildNewznabResponse("newznab%dResponse" % i,
                                                   indexerNewznabItems, 0,
                                                   len(indexerNewznabItems))
            self.response_callbacks.append(
                ('newznab%d' % i, randint(0, sleep), xml))

            # Prepare response mock
            url_re = re.compile(r'.*newznab%d.*' % i)
            rsps.add_callback(responses.GET,
                              url_re,
                              callback=self.rsps_callback,
                              content_type='application/x-html')
        read_indexers_from_config()

        return testData
コード例 #8
0
ファイル: test_Search.py プロジェクト: petrichor8/nzbhydra
    def testIndexersApiLimits(self):

        config.settings.searching.generate_queries = []
        self.newznab1.hitLimit = 3
        self.newznab1.hitLimitResetTime = None
        config.settings.indexers = [self.newznab1]
        read_indexers_from_config()
        search_request = SearchRequest()
        indexers = search.pick_indexers(search_request)
        self.assertEqual(1, len(indexers))
        dbsearch = Search(internal=True, time=arrow.utcnow().datetime)
        dbsearch.save()
        indexer = Indexer().get(name="newznab1")
        
        #Two accesses one and 12 hours ago
        IndexerApiAccess(indexer=indexer, search=dbsearch, time=arrow.utcnow().replace(hours=-1).datetime, type="search", url="", response_successful=True).save()
        IndexerApiAccess(indexer=indexer, search=dbsearch, time=arrow.utcnow().replace(hours=-12).datetime, type="search", url="", response_successful=True).save()
        self.assertEqual(1, len(search.pick_indexers(search_request)))

        #Another one 20 hours ago, so limit should be reached
        IndexerApiAccess(indexer=indexer, search=dbsearch, time=arrow.utcnow().replace(hours=-20).datetime, type="search", url="", response_successful=True).save()
        self.assertEqual(0, len(search.pick_indexers(search_request)))
コード例 #9
0
ファイル: test_Search.py プロジェクト: gspu/nzbhydra
    def testIndexersApiLimits(self):

        config.settings.searching.generate_queries = []
        self.newznab1.hitLimit = 3
        self.newznab1.hitLimitResetTime = None
        config.settings.indexers = [self.newznab1]
        read_indexers_from_config()
        search_request = SearchRequest()
        indexers = search.pick_indexers(search_request)
        self.assertEqual(1, len(indexers))
        dbsearch = Search(internal=True, time=arrow.utcnow().datetime)
        dbsearch.save()
        indexer = Indexer().get(name="newznab1")
        
        #Two accesses one and 12 hours ago
        IndexerApiAccess(indexer=indexer, search=dbsearch, time=arrow.utcnow().replace(hours=-1).datetime, type="search", url="", response_successful=True).save()
        IndexerApiAccess(indexer=indexer, search=dbsearch, time=arrow.utcnow().replace(hours=-12).datetime, type="search", url="", response_successful=True).save()
        self.assertEqual(1, len(search.pick_indexers(search_request)))

        #Another one 20 hours ago, so limit should be reached
        IndexerApiAccess(indexer=indexer, search=dbsearch, time=arrow.utcnow().replace(hours=-20).datetime, type="search", url="", response_successful=True).save()
        self.assertEqual(0, len(search.pick_indexers(search_request)))
コード例 #10
0
    def setUp(self):
        set_and_drop()
        config.settings = Bunch.fromDict(config.initialConfig)
        self.app = web.app.test_client()
        config.settings.main.apikey = None

        self.newznab1 = Bunch()
        self.newznab1.enabled = True
        self.newznab1.name = "newznab1"
        self.newznab1.host = "https://indexer1.com"
        self.newznab1.apikey = "apikeyindexer1.com"
        self.newznab1.timeout = None
        self.newznab1.hitLimit = None
        self.newznab1.backend = ""
        self.newznab1.categories = []
        self.newznab1.score = 0
        self.newznab1.type = "newznab"
        self.newznab1.accessType = "both"
        self.newznab1.search_ids = ["imdbid", "rid", "tvdbid"]
        self.newznab1.searchTypes = ["book", "tvsearch", "movie"]

        self.newznab2 = Bunch()
        self.newznab2.enabled = True
        self.newznab2.name = "newznab2"
        self.newznab2.host = "https://indexer2.com"
        self.newznab2.apikey = "apikeyindexer2.com"
        self.newznab2.timeout = None
        self.newznab2.hitLimit = None
        self.newznab2.backend = ""
        self.newznab2.categories = []
        self.newznab2.accessType = "both"
        self.newznab2.score = 0
        self.newznab2.type = "newznab"
        self.newznab2.search_ids = ["rid", "tvdbid"]
        self.newznab2.searchTypes = ["tvsearch", "movie"]

        config.settings.indexers = [self.newznab1, self.newznab2]

        read_indexers_from_config()
コード例 #11
0
    def prepareSearchMocks(self, requestsMock, indexerCount=2, resultsPerIndexers=1, newznabItems=None, title="newznab%dresult%d.title", categories=None, skip=None):
        """

        :param requestsMock: 
        :param indexerCount: 
        :param resultsPerIndexers: 
        :param newznabItems: 
        :param title: 
        :param categories: 
        :param skip: List of tuples with indexer and result index which will not be returned
        :return: 
        """
        if skip is None:
            skip = []
        allNewznabItems = []
        self.response_callbacks = []
        self.prepareIndexers(indexerCount)

        for i in range(1, indexerCount + 1):
            # Prepare search results
            if newznabItems is not None:
                indexerNewznabItems = newznabItems[i - 1]
            else:
                indexerNewznabItems = [mockbuilder.buildNewznabItem(title % (i, j), "newznab%dresult%d.guid" % (i, j), " http://newznab%dresult%d.link" % (i, j), arrow.get(0).format("ddd, DD MMM YYYY HH:mm:ss Z"), "newznab%dresult%d.description" % (i, j), 1000, "newznab%d" % i, categories) for
                                       j in
                                       range(1, resultsPerIndexers + 1)
                                       if not (i, j) in skip
                                       ]
            allNewznabItems.extend(indexerNewznabItems)
            xml = mockbuilder.buildNewznabResponse("newznab%dResponse" % i, indexerNewznabItems, 0, len(indexerNewznabItems))

            requestsMock.register_uri('GET', re.compile(r'.*newznab%d.*' % i), text=xml)
        read_indexers_from_config()

        allNewznabItems = sorted(allNewznabItems, key=lambda x: x.title)
        return allNewznabItems
コード例 #12
0
    def prepareSearchMocks(self, rsps, indexerCount=2, resultsPerIndexers=1, newznabItems=None, title="newznab%dresult%d.title", sleep=0):
        testData = []
        self.response_callbacks = []
        self.prepareIndexers(indexerCount)

        for i in range(1, indexerCount + 1):
            # Prepare search results
            if newznabItems is not None:
                indexerNewznabItems = newznabItems[i - 1]
            else:
                indexerNewznabItems = [mockbuilder.buildNewznabItem(title % (i, j), "newznab%dresult%d.guid" % (i, j), "newznab%dresult%d.link" % (i, j), arrow.get(0).format("ddd, DD MMM YYYY HH:mm:ss Z"), "newznab%dresult%d.description" % (i, j), 1000, "newznab%d" % i, None) for
                                       j in
                                       range(1, resultsPerIndexers + 1)]
            xml = mockbuilder.buildNewznabResponse("newznab%dResponse" % i, indexerNewznabItems, 0, len(indexerNewznabItems))
            self.response_callbacks.append(('newznab%d' % i, randint(0, sleep), xml))

            # Prepare response mock
            url_re = re.compile(r'.*newznab%d.*' % i)
            rsps.add_callback(responses.GET, url_re,
                              callback=self.rsps_callback,
                              content_type='application/x-html')
        read_indexers_from_config()

        return testData
コード例 #13
0
ファイル: nzbhydra.py プロジェクト: jedvalley123/nzbhydra
def run(arguments):
    nzbhydra.configFile = settings_file = arguments.config
    nzbhydra.databaseFile = database_file = arguments.database

    logger.notice("Loading settings from {}".format(settings_file))
    try:
        config.load(settings_file)
        config.save(settings_file)  # Write any new settings back to the file
        log.setup_custom_logger(arguments.logfile, arguments.quiet)
    except Exception:
        print("An error occured during migrating the old config. Sorry about that...: ")
        traceback.print_exc(file=sys.stdout)
        print("Trying to log messages from migration...")
        config.logLogMessages()
        os._exit(-5)

    try:
        logger.info("Started")

        if arguments.daemon:
            logger.info("Daemonizing...")
            daemonize(arguments.pidfile)

        config.logLogMessages()
        logger.info("Loading database file %s" % database_file)
        if not os.path.exists(database_file):
            database.init_db(database_file)
        else:
            database.update_db(database_file)
        database.db.init(database_file)
        indexers.read_indexers_from_config()

        if config.settings.main.debug:
            logger.info("Debug mode enabled")

        # Clean up any "old" files from last update
        oldfiles = glob.glob("*.updated")
        if len(oldfiles) > 0:
            logger.info("Deleting %d old files remaining from update" % len(oldfiles))
            for filename in oldfiles:
                try:
                    if "hydratray" not in filename:
                        logger.debug("Deleting %s" % filename)
                        os.remove(filename)
                    else:
                        logger.debug("Not deleting %s because it's still running. TrayHelper will restart itself" % filename)
                except Exception:
                    logger.warn("Unable to delete old file %s. Please delete manually" % filename)

        host = config.settings.main.host if arguments.host is None else arguments.host
        port = config.settings.main.port if arguments.port is None else arguments.port
        socksproxy = config.settings.main.socksProxy if arguments.socksproxy is None else arguments.socksproxy

        # SOCKS proxy settings
        if socksproxy:
            try:
                sockshost, socksport = socksproxy.split(':')  # FWIW: this won't work for literal IPv6 addresses
            except:
                logger.error('Incorrect SOCKS proxy settings "%s"' % socksproxy)
                sockshost, socksport = [None, None]
            if sockshost:
                logger.info("Using SOCKS proxy at host %s and port %s" % (sockshost, socksport))
                publicip = socks_proxy.setSOCKSproxy(sockshost, int(socksport))
                if publicip:
                    logger.info("Public IP address via SOCKS proxy: %s" % publicip)
                else:
                    logger.error("Could not get public IP address. Is the proxy working?")

        logger.notice("Starting web app on %s:%d" % (host, port))
        if config.settings.main.externalUrl is not None and config.settings.main.externalUrl != "":
            f = furl(config.settings.main.externalUrl)
        else:
            f = furl()
            f.host = "127.0.0.1"
            f.port = port
            f.scheme = "https" if config.settings.main.ssl else "http"
        if not arguments.nobrowser and config.settings.main.startupBrowser:
            if arguments.restarted:
                logger.info("Not opening the browser after restart")
            else:
                logger.info("Opening browser to %s" % f.url)
                webbrowser.open_new(f.url)
        else:
            logger.notice("Go to %s for the frontend" % f.url)

        web.run(host, port, basepath)
    except Exception:
        logger.exception("Fatal error occurred")
コード例 #14
0
ファイル: test_Search.py プロジェクト: petrichor8/nzbhydra
    def test_pick_indexers(self):
        config.settings.searching.generate_queries = []
        config.settings.indexers.extend([self.newznab1, self.newznab2])
        getIndexerSettingByName("womble").enabled = True
        getIndexerSettingByName("womble").accessType = "both"
        getIndexerSettingByName("nzbclub").enabled = True
        getIndexerSettingByName("nzbclub").accessType = "both"
        read_indexers_from_config()
        search_request = SearchRequest()

        indexers = search.pick_indexers(search_request)
        self.assertEqual(3, len(indexers))

        # Indexers with tv search and which support queries (actually searching for particular releases)
        search_request.query = "bla"
        indexers = search.pick_indexers(search_request)
        self.assertEqual(3, len(indexers))

        # Indexers with tv search, including those that only provide a list of latest releases (womble) but excluding the one that needs a query (nzbclub)
        search_request.query = None
        indexers = search.pick_indexers(search_request)
        self.assertEqual(3, len(indexers))

        search_request.identifier_key = "tvdbid"
        indexers = search.pick_indexers(search_request)
        self.assertEqual(2, len(indexers))
        self.assertEqual("newznab1", indexers[0].name)
        self.assertEqual("newznab2", indexers[1].name)

        search_request.identifier_key = "imdbid"
        search_request.category = getCategoryByName("movies")
        indexers = search.pick_indexers(search_request)
        self.assertEqual(1, len(indexers))
        self.assertEqual("newznab1", indexers[0].name)

        # WIth query generation NZBClub should also be returned
        infos.title_from_id = mock
        config.settings.searching.generate_queries = ["internal"]
        search_request.identifier_key = "tvdbid"
        search_request.query = None
        search_request.category = None
        indexers = search.pick_indexers(search_request)
        self.assertEqual(3, len(indexers))
        self.assertEqual("nzbclub", indexers[0].name)
        self.assertEqual("newznab1", indexers[1].name)
        self.assertEqual("newznab2", indexers[2].name)

        # Test picking depending on internal, external, both
        getIndexerSettingByName("womble").enabled = False
        getIndexerSettingByName("nzbclub").enabled = False

        getIndexerSettingByName("newznab1").accessType = "both"
        search_request.internal = True
        indexers = search.pick_indexers(search_request)
        self.assertEqual(2, len(indexers))
        search_request.internal = False
        indexers = search.pick_indexers(search_request)
        self.assertEqual(2, len(indexers))

        config.settings.indexers = [self.newznab1, self.newznab2]
        getIndexerSettingByName("newznab1").accessType = "external"
        read_indexers_from_config()
        search_request.internal = True
        indexers = search.pick_indexers(search_request)
        self.assertEqual(1, len(indexers))
        search_request.internal = False
        indexers = search.pick_indexers(search_request)
        self.assertEqual(2, len(indexers))

        getIndexerSettingByName("newznab1").accessType = "internal"
        read_indexers_from_config()
        search_request.internal = True
        indexers = search.pick_indexers(search_request)
        self.assertEqual(2, len(indexers))
        search_request.internal = False
        indexers = search.pick_indexers(search_request)
        self.assertEqual(1, len(indexers))
コード例 #15
0
ファイル: nzbhydra.py プロジェクト: theotherp/nzbhydra
def run(arguments):
    arguments.config = arguments.config if os.path.isabs(arguments.config) else os.path.join(nzbhydra.getBasePath(), arguments.config)
    arguments.database = arguments.database if os.path.isabs(arguments.database) else os.path.join(nzbhydra.getBasePath(), arguments.database)
    nzbhydra.configFile = settings_file = arguments.config
    nzbhydra.databaseFile = database_file = arguments.database

    logger.notice("Loading settings from {}".format(settings_file))
    try:
        config.load(settings_file)
        config.save(settings_file)  # Write any new settings back to the file
        log.setup_custom_logger(arguments.logfile, arguments.quiet)
    except Exception:
        print("An error occured during migrating the old config. Sorry about that...: ")
        traceback.print_exc(file=sys.stdout)
        print("Trying to log messages from migration...")
        config.logLogMessages()
        os._exit(-5)

    try:
        logger.info("Started")

        if arguments.daemon:
            logger.info("Daemonizing...")
            daemonize(arguments.pidfile)

        config.logLogMessages()

        if arguments.clearloganddb:
            logger.warning("Deleting log file and database now as requested")
            try:
                logger.warning("Deleting database file %s" % database_file)
                os.unlink(database_file)
            except Exception as e:
                logger.error("Unable to close or delete log file: %s" % e)

            try:
                handler = logger.handlers[1] if len(logger.handlers) == 2 else logger.handlers[0]
                filename = handler.stream.name

                if filename and os.path.exists(filename):
                    logger.warn("Deleting file %s" % filename)
                handler.flush()
                handler.close()
                logger.removeHandler(handler)
                os.unlink(filename)
                logger.addHandler(handler)
            except Exception as e:
                print("Unable to close or delete log file: %s" % e)


        try:
            import _sqlite3
            logger.debug("SQLite3 version: %s" % _sqlite3.sqlite_version)
        except:
            logger.error("Unable to log SQLite version")

        logger.info("Loading database file %s" % database_file)
        if not os.path.exists(database_file):
            database.init_db(database_file)
        else:
            database.update_db(database_file)
        logger.info("Starting db")

        indexers.read_indexers_from_config()

        if config.settings.main.debug:
            logger.info("Debug mode enabled")

        # Clean up any "old" files from last update
        oldfiles = glob.glob("*.updated")
        if len(oldfiles) > 0:
            logger.info("Deleting %d old files remaining from update" % len(oldfiles))
            for filename in oldfiles:
                try:
                    if "hydratray" not in filename:
                        logger.debug("Deleting %s" % filename)
                        os.remove(filename)
                    else:
                        logger.debug("Not deleting %s because it's still running. TrayHelper will restart itself" % filename)
                except Exception:
                    logger.warn("Unable to delete old file %s. Please delete manually" % filename)

        host = config.settings.main.host if arguments.host is None else arguments.host
        port = config.settings.main.port if arguments.port is None else arguments.port

        socksproxy = config.settings.main.socksProxy if arguments.socksproxy is None else arguments.socksproxy
        if socksproxy:
            webaccess.set_proxies(socksproxy)
        elif config.settings.main.httpProxy:
            webaccess.set_proxies(config.settings.main.httpProxy, config.settings.main.httpsProxy)

        logger.notice("Starting web app on %s:%d" % (host, port))
        if config.settings.main.externalUrl is not None and config.settings.main.externalUrl != "":
            f = furl(config.settings.main.externalUrl)
        else:
            f = furl()
            f.host = "127.0.0.1" if config.settings.main.host == "0.0.0.0" else config.settings.main.host
            f.port = port
            f.scheme = "https" if config.settings.main.ssl else "http"
        if not arguments.nobrowser and config.settings.main.startupBrowser:
            if arguments.restarted:
                logger.info("Not opening the browser after restart")
            else:
                logger.info("Opening browser to %s" % f.url)
                webbrowser.open_new(f.url)
        else:
            logger.notice("Go to %s for the frontend" % f.url)

        web.run(host, port, basepath)
    except Exception:
        logger.exception("Fatal error occurred")
コード例 #16
0
ファイル: nzbhydra.py プロジェクト: theotherp/nzbhydra
def run(arguments):
    arguments.config = arguments.config if os.path.isabs(
        arguments.config) else os.path.join(nzbhydra.getBasePath(),
                                            arguments.config)
    arguments.database = arguments.database if os.path.isabs(
        arguments.database) else os.path.join(nzbhydra.getBasePath(),
                                              arguments.database)
    nzbhydra.configFile = settings_file = arguments.config
    nzbhydra.databaseFile = database_file = arguments.database

    logger.notice("Loading settings from {}".format(settings_file))
    try:
        config.load(settings_file)
        config.save(settings_file)  # Write any new settings back to the file
        log.setup_custom_logger(arguments.logfile, arguments.quiet)
    except Exception:
        print(
            "An error occured during migrating the old config. Sorry about that...: "
        )
        traceback.print_exc(file=sys.stdout)
        print("Trying to log messages from migration...")
        config.logLogMessages()
        os._exit(-5)

    try:
        logger.info("Started")

        if arguments.daemon:
            logger.info("Daemonizing...")
            daemonize(arguments.pidfile)

        config.logLogMessages()

        if arguments.clearloganddb:
            logger.warning("Deleting log file and database now as requested")
            try:
                logger.warning("Deleting database file %s" % database_file)
                os.unlink(database_file)
            except Exception as e:
                logger.error("Unable to close or delete log file: %s" % e)

            try:
                handler = logger.handlers[1] if len(
                    logger.handlers) == 2 else logger.handlers[0]
                filename = handler.stream.name

                if filename and os.path.exists(filename):
                    logger.warn("Deleting file %s" % filename)
                handler.flush()
                handler.close()
                logger.removeHandler(handler)
                os.unlink(filename)
                logger.addHandler(handler)
            except Exception as e:
                print("Unable to close or delete log file: %s" % e)

        try:
            import _sqlite3
            logger.debug("SQLite3 version: %s" % _sqlite3.sqlite_version)
        except:
            logger.error("Unable to log SQLite version")

        logger.info("Loading database file %s" % database_file)
        if not os.path.exists(database_file):
            database.init_db(database_file)
        else:
            database.update_db(database_file)
        logger.info("Starting db")

        indexers.read_indexers_from_config()

        if config.settings.main.debug:
            logger.info("Debug mode enabled")

        # Clean up any "old" files from last update
        oldfiles = glob.glob("*.updated")
        if len(oldfiles) > 0:
            logger.info("Deleting %d old files remaining from update" %
                        len(oldfiles))
            for filename in oldfiles:
                try:
                    if "hydratray" not in filename:
                        logger.debug("Deleting %s" % filename)
                        os.remove(filename)
                    else:
                        logger.debug(
                            "Not deleting %s because it's still running. TrayHelper will restart itself"
                            % filename)
                except Exception:
                    logger.warn(
                        "Unable to delete old file %s. Please delete manually"
                        % filename)

        host = config.settings.main.host if arguments.host is None else arguments.host
        port = config.settings.main.port if arguments.port is None else arguments.port
        nzbhydra.urlBase = config.settings.main.urlBase if arguments.urlbase is None else arguments.urlbase

        socksproxy = config.settings.main.socksProxy if arguments.socksproxy is None else arguments.socksproxy
        if socksproxy:
            webaccess.set_proxies(socksproxy)
        elif config.settings.main.httpProxy:
            webaccess.set_proxies(config.settings.main.httpProxy,
                                  config.settings.main.httpsProxy)

        # Download a very small file from github to get a good estimate how many instances of hydra are running. Only executed once per installation (well, per settings.cfg instance)
        if not config.settings.main.downloadCounterExecuted and not config.settings.main.isFirstStart:
            try:
                webaccess.get(
                    "https://github.com/theotherp/apitests/releases/download/v5.0.0/downloadcounter2.zip"
                )
            except:
                pass
            config.settings.main.downloadCounterExecuted = True
            config.save()

        if config.settings.main.externalUrl is not None and config.settings.main.externalUrl != "":
            f = furl(config.settings.main.externalUrl)
            logger.notice("Starting web app on %s:%d" % (host, port))
        else:
            f = furl()

            if config.settings.main.host == "0.0.0.0":
                f.host = "127.0.0.1"
            elif config.settings.main.host == "::":
                f.host = "[::1]"
            elif ":" in config.settings.main.host:
                f.host = "[%s]" % config.settings.main.host
            else:
                f.host = config.settings.main.host
            f.port = port
            f.scheme = "https" if config.settings.main.ssl else "http"
            if nzbhydra.urlBase is not None:
                f.path = nzbhydra.urlBase + "/"
            logger.notice("Starting web app on %s:%d" % (f.host, port))
        if not arguments.nobrowser and config.settings.main.startupBrowser:
            if arguments.restarted:
                logger.info("Not opening the browser after restart")
            else:
                logger.info("Opening browser to %s" % f.url)
                webbrowser.open_new(f.url)
        else:
            logger.notice("Go to %s for the frontend" % f.url)
        if config.settings.main.isFirstStart:
            config.settings.main.isFirstStart = False
            config.save()
        web.run(host, port, basepath)
    except Exception:
        logger.exception("Fatal error occurred")
コード例 #17
0
def run(arguments):
    global logger
    
    settings_file = arguments.config
    database_file = arguments.database

    print("Loading settings from %s" % settings_file)
    config.load(settings_file)
    config.save(settings_file)  # Write any new settings back to the file
    logger = log.setup_custom_logger('root', arguments.logfile)
    try:
        logger.info("Started")

        if arguments.daemon:
            logger.info("Daemonizing...")
            daemonize(arguments.pidfile)
        
        config.logLogMessages()
        logger.info("Loading database file %s" % database_file)
        if not os.path.exists(database_file):
            database.init_db(database_file)
        else:
            database.update_db(database_file)
        database.db.init(database_file)
        indexers.read_indexers_from_config()
    
        if config.settings.main.debug:
            logger.info("Debug mode enabled")
            
        #Clean up any "old" files from last update
        oldfiles = glob.glob("*.updated")
        if len(oldfiles) > 0:
            logger.info("Deleting %d old files remaining from update" % len(oldfiles))
            for filename in oldfiles:
                try:
                    if "hydratray" not in filename:
                        logger.debug("Deleting %s" % filename)
                        os.remove(filename)
                    else:
                        logger.debug("Not deleting %s because it's still running. TrayHelper will restart itself" % filename)
                except Exception:
                    logger.warn("Unable to delete old file %s. Please delete manually" % filename)
            
        host = config.settings.main.host if arguments.host is None else arguments.host
        port = config.settings.main.port if arguments.port is None else arguments.port
    
        logger.info("Starting web app on %s:%d" % (host, port))
        if config.settings.main.externalUrl is not None and config.settings.main.externalUrl != "":
            f = furl(config.settings.main.externalUrl)
        else:
            f = furl()
            f.host = "127.0.0.1"
            f.port = port
            f.scheme = "https" if config.settings.main.ssl else "http"
        if not arguments.nobrowser and config.settings.main.startupBrowser:
            if arguments.restarted:
                logger.info("Not opening the browser after restart")
            else:
                logger.info("Opening browser to %s" % f.url)
                webbrowser.open_new(f.url)
        else:
            logger.info("Go to %s for the frontend" % f.url)
        
        web.run(host, port, basepath)
    except Exception:
        logger.exception("Fatal error occurred")
コード例 #18
0
ファイル: nzbhydra.py プロジェクト: haan2787/nzbhydra
def run(arguments):
    nzbhydra.configFile = settings_file = arguments.config
    nzbhydra.databaseFile = database_file = arguments.database

    logger.notice("Loading settings from {}".format(settings_file))
    try:
        config.load(settings_file)
        config.save(settings_file)  # Write any new settings back to the file
        log.setup_custom_logger(arguments.logfile, arguments.quiet)
    except Exception:
        print(
            "An error occured during migrating the old config. Sorry about that...: "
        )
        traceback.print_exc(file=sys.stdout)
        print("Trying to log messages from migration...")
        config.logLogMessages()
        os._exit(-5)

    try:
        logger.info("Started")

        if arguments.daemon:
            logger.info("Daemonizing...")
            daemonize(arguments.pidfile)

        config.logLogMessages()
        logger.info("Loading database file %s" % database_file)
        if not os.path.exists(database_file):
            database.init_db(database_file)
        else:
            database.update_db(database_file)
        database.db.init(database_file)
        indexers.read_indexers_from_config()

        if config.settings.main.debug:
            logger.info("Debug mode enabled")

        # Clean up any "old" files from last update
        oldfiles = glob.glob("*.updated")
        if len(oldfiles) > 0:
            logger.info("Deleting %d old files remaining from update" %
                        len(oldfiles))
            for filename in oldfiles:
                try:
                    if "hydratray" not in filename:
                        logger.debug("Deleting %s" % filename)
                        os.remove(filename)
                    else:
                        logger.debug(
                            "Not deleting %s because it's still running. TrayHelper will restart itself"
                            % filename)
                except Exception:
                    logger.warn(
                        "Unable to delete old file %s. Please delete manually"
                        % filename)

        host = config.settings.main.host if arguments.host is None else arguments.host
        port = config.settings.main.port if arguments.port is None else arguments.port
        socksproxy = config.settings.main.socksProxy if arguments.socksproxy is None else arguments.socksproxy

        # SOCKS proxy settings
        if socksproxy:
            try:
                sockshost, socksport = socksproxy.split(
                    ':')  # FWIW: this won't work for literal IPv6 addresses
            except:
                logger.error('Incorrect SOCKS proxy settings "%s"' %
                             socksproxy)
                sockshost, socksport = [None, None]
            if sockshost:
                logger.info("Using SOCKS proxy at host %s and port %s" %
                            (sockshost, socksport))
                publicip = socks_proxy.setSOCKSproxy(sockshost, int(socksport))
                if publicip:
                    logger.info("Public IP address via SOCKS proxy: %s" %
                                publicip)
                else:
                    logger.error(
                        "Could not get public IP address. Is the proxy working?"
                    )

        logger.notice("Starting web app on %s:%d" % (host, port))
        if config.settings.main.externalUrl is not None and config.settings.main.externalUrl != "":
            f = furl(config.settings.main.externalUrl)
        else:
            f = furl()
            f.host = "127.0.0.1"
            f.port = port
            f.scheme = "https" if config.settings.main.ssl else "http"
        if not arguments.nobrowser and config.settings.main.startupBrowser:
            if arguments.restarted:
                logger.info("Not opening the browser after restart")
            else:
                logger.info("Opening browser to %s" % f.url)
                webbrowser.open_new(f.url)
        else:
            logger.notice("Go to %s for the frontend" % f.url)

        web.run(host, port, basepath)
    except Exception:
        logger.exception("Fatal error occurred")
コード例 #19
0
ファイル: test_Search.py プロジェクト: gspu/nzbhydra
    def test_pick_indexers(self):
        config.settings.searching.generate_queries = []
        config.settings.indexers.extend([self.newznab1, self.newznab2])
        getIndexerSettingByName("womble").enabled = True
        getIndexerSettingByName("womble").accessType = "both"
        getIndexerSettingByName("nzbclub").enabled = True
        getIndexerSettingByName("nzbclub").accessType = "both"
        read_indexers_from_config()
        search_request = SearchRequest()

        indexers = search.pick_indexers(search_request)
        self.assertEqual(3, len(indexers))

        # Indexers with tv search and which support queries (actually searching for particular releases)
        search_request.query = "bla"
        indexers = search.pick_indexers(search_request)
        self.assertEqual(3, len(indexers))

        # Indexers with tv search, including those that only provide a list of latest releases (womble) but excluding the one that needs a query (nzbclub)
        search_request.query = None
        indexers = search.pick_indexers(search_request)
        self.assertEqual(3, len(indexers))

        search_request.identifier_key = "tvdbid"
        indexers = search.pick_indexers(search_request)
        self.assertEqual(2, len(indexers))
        self.assertEqual("newznab1", indexers[0].name)
        self.assertEqual("newznab2", indexers[1].name)

        search_request.identifier_key = "imdbid"
        search_request.category = getCategoryByName("movies")
        indexers = search.pick_indexers(search_request)
        self.assertEqual(1, len(indexers))
        self.assertEqual("newznab1", indexers[0].name)

        # WIth query generation NZBClub should also be returned
        infos.title_from_id = mock
        config.settings.searching.generate_queries = ["internal"]
        search_request.identifier_key = "tvdbid"
        search_request.query = None
        search_request.category = None
        indexers = search.pick_indexers(search_request)
        self.assertEqual(3, len(indexers))
        self.assertEqual("nzbclub", indexers[0].name)
        self.assertEqual("newznab1", indexers[1].name)
        self.assertEqual("newznab2", indexers[2].name)

        # Test picking depending on internal, external, both
        getIndexerSettingByName("womble").enabled = False
        getIndexerSettingByName("nzbclub").enabled = False

        getIndexerSettingByName("newznab1").accessType = "both"
        search_request.internal = True
        indexers = search.pick_indexers(search_request)
        self.assertEqual(2, len(indexers))
        search_request.internal = False
        indexers = search.pick_indexers(search_request)
        self.assertEqual(2, len(indexers))

        config.settings.indexers = [self.newznab1, self.newznab2]
        getIndexerSettingByName("newznab1").accessType = "external"
        read_indexers_from_config()
        search_request.internal = True
        indexers = search.pick_indexers(search_request)
        self.assertEqual(1, len(indexers))
        search_request.internal = False
        indexers = search.pick_indexers(search_request)
        self.assertEqual(2, len(indexers))

        getIndexerSettingByName("newznab1").accessType = "internal"
        read_indexers_from_config()
        search_request.internal = True
        indexers = search.pick_indexers(search_request)
        self.assertEqual(2, len(indexers))
        search_request.internal = False
        indexers = search.pick_indexers(search_request)
        self.assertEqual(1, len(indexers))