コード例 #1
0
class TestChannelRss(BaseTestChannel):

    @blocking_call_on_reactor_thread
    @inlineCallbacks
    def setUp(self, annotate=True, autoload_discovery=True):
        """
        Setup the tests by creating the ChannelRssParser instance and initializing it.
        """
        yield super(TestChannelRss, self).setUp(annotate=annotate)
        self.channel_rss = ChannelRssParser(self.fake_session, self.fake_channel_community, 'a')
        self.channel_rss.initialize()

    @blocking_call_on_reactor_thread
    @inlineCallbacks
    def tearDown(self, annotate=True):
        if self.channel_rss.running:
            self.channel_rss.shutdown()

        yield super(TestChannelRss, self).tearDown(annotate=annotate)

    def test_task_scrape_no_stop(self):
        self.channel_rss.cancel_all_pending_tasks()
        self.channel_rss._task_scrape()
        self.assertTrue(self.channel_rss.is_pending_task_active("rss_scrape"))

    def test_task_scrape_stop(self):
        self.channel_rss.cancel_all_pending_tasks()
        self.channel_rss._to_stop = True
        self.channel_rss._task_scrape()
        self.assertFalse(self.channel_rss.is_pending_task_active("rss_scrape"))

    def test_initialize(self):
        self.assertTrue(self.channel_rss.is_pending_task_active("rss_scrape"))

    def test_shutdown(self):
        cache_path = self.channel_rss._url_cache._file_path
        self.channel_rss._url_cache.add('a')
        self.channel_rss.shutdown()
        self.assertTrue(os.path.exists(cache_path))
        self.assertFalse(self.channel_rss.is_pending_task_active("rss_scrape"))

    @deferred(timeout=10)
    def test_parse_rss_feed(self):
        prepare_xml_rss(self.session_base_dir, 'test_rss.xml')
        self.channel_rss.rss_url = os.path.join(self.session_base_dir, 'test_rss.xml')
        self.channel_rss._url_cache = SimpleCache(os.path.join(self.session_base_dir, 'cache.txt'))
        dl = self.channel_rss.parse_feed()
        self.assertIsInstance(dl, DeferredList)
        return dl

    def test_parse_feed_stopped(self):
        prepare_xml_rss(self.session_base_dir, 'test_rss.xml')
        self.channel_rss.rss_url = os.path.join(self.session_base_dir, 'test_rss.xml')
        self.channel_rss._url_cache = SimpleCache(os.path.join(self.session_base_dir, 'cache.txt'))
        self.channel_rss._to_stop = True
        self.assertIsNone(self.channel_rss.parse_feed())
コード例 #2
0
ファイル: channel.py プロジェクト: wesavetheworld/tribler
        def _create_rss_feed(channel_date):
            self._is_created = True

            # create rss feed parsers
            self._logger.debug(u"channel %s %s created", self.name, hexlify(self._channel_community.cid))
            for rss_feed_url in self._rss_feed_dict:
                assert self._rss_feed_dict[rss_feed_url] is None
                rss_parser = ChannelRssParser(self._session, self._channel_community, rss_feed_url)
                rss_parser.initialize()
                self._rss_feed_dict[rss_feed_url] = rss_parser
コード例 #3
0
ファイル: channel.py プロジェクト: synctext/tribler
        def _create_rss_feed(channel_date):
            self._is_created = True

            # create rss feed parsers
            self._logger.debug(u"channel %s %s created", self.name, hexlify(self._channel_community.cid))
            for rss_feed_url in self._rss_feed_dict:
                assert self._rss_feed_dict[rss_feed_url] is None
                rss_parser = ChannelRssParser(self._session, self._channel_community, rss_feed_url)
                rss_parser.initialize()
                self._rss_feed_dict[rss_feed_url] = rss_parser
コード例 #4
0
ファイル: channel.py プロジェクト: wesavetheworld/tribler
    def create_rss_feed(self, rss_feed_url):
        if rss_feed_url in self._rss_feed_dict:
            self._logger.warn(u"skip existing rss feed: %s", repr(rss_feed_url))
            return

        if not self._is_created:
            # append the rss url if the channel has not been created yet
            self._rss_feed_dict[rss_feed_url] = None
        else:
            # create an rss feed parser for this
            rss_parser = ChannelRssParser(self._session, self._channel_community, rss_feed_url)
            rss_parser.initialize()
            self._rss_feed_dict[rss_feed_url] = rss_parser

        # flush the rss_feed_url to json file
        with codecs.open(self._rss_file_path, 'wb', encoding='utf8') as f:
            rss_list = [rss_url for rss_url in self._rss_feed_dict.iterkeys()]
            json.dump(rss_list, f)
コード例 #5
0
ファイル: channel.py プロジェクト: synctext/tribler
    def create_rss_feed(self, rss_feed_url):
        if rss_feed_url in self._rss_feed_dict:
            self._logger.warn(u"skip existing rss feed: %s", repr(rss_feed_url))
            return

        if not self._is_created:
            # append the rss url if the channel has not been created yet
            self._rss_feed_dict[rss_feed_url] = None
        else:
            # create an rss feed parser for this
            rss_parser = ChannelRssParser(self._session, self._channel_community, rss_feed_url)
            rss_parser.initialize()
            self._rss_feed_dict[rss_feed_url] = rss_parser

        # flush the rss_feed_url to json file
        with codecs.open(self._rss_file_path, 'wb', encoding='utf8') as f:
            rss_list = [rss_url for rss_url in self._rss_feed_dict.iterkeys()]
            json.dump(rss_list, f)
コード例 #6
0
ファイル: channel.py プロジェクト: wesavetheworld/tribler
    def initialize(self):
        # load existing rss_feeds
        if os.path.exists(self._rss_file_path):
            self._logger.debug(u"loading existing channel rss list from %s...", self._rss_file_path)

            with codecs.open(self._rss_file_path, 'rb', encoding='utf8') as f:
                rss_list = json.load(f, encoding='utf8')
                for rss_url in rss_list:
                    self._rss_feed_dict[rss_url] = None

        if self._is_created:
            # create rss-parsers
            for rss_feed_url in self._rss_feed_dict:
                rss_parser = ChannelRssParser(self._session, self._channel_community, rss_feed_url)
                rss_parser.initialize()
                self._rss_feed_dict[rss_feed_url] = rss_parser
        else:
            # subscribe to the channel creation event
            self._session.add_observer(self._on_channel_created, SIGNAL_CHANNEL, [SIGNAL_ON_CREATED])
コード例 #7
0
ファイル: channel.py プロジェクト: synctext/tribler
    def initialize(self):
        # load existing rss_feeds
        if os.path.exists(self._rss_file_path):
            self._logger.debug(u"loading existing channel rss list from %s...", self._rss_file_path)

            with codecs.open(self._rss_file_path, 'rb', encoding='utf8') as f:
                rss_list = json.load(f)
                for rss_url in rss_list:
                    self._rss_feed_dict[rss_url] = None

        if self._is_created:
            # create rss-parsers
            for rss_feed_url in self._rss_feed_dict:
                rss_parser = ChannelRssParser(self._session, self._channel_community, rss_feed_url)
                rss_parser.initialize()
                self._rss_feed_dict[rss_feed_url] = rss_parser
        else:
            # subscribe to the channel creation event
            self._session.add_observer(self._on_channel_created, SIGNAL_CHANNEL, [SIGNAL_ON_CREATED])
コード例 #8
0
class TestChannelRss(BaseTestChannel):
    @blocking_call_on_reactor_thread
    @inlineCallbacks
    def setUp(self, annotate=True, autoload_discovery=True):
        """
        Setup the tests by creating the ChannelRssParser instance and initializing it.
        """
        yield super(TestChannelRss, self).setUp(annotate=annotate)
        self.channel_rss = ChannelRssParser(self.fake_session,
                                            self.fake_channel_community, 'a')
        self.channel_rss.initialize()

        # Setup a test rss file server
        test_rss_file = os.path.join(TESTS_DATA_DIR, 'test_rss.xml')
        files_path = os.path.join(self.session_base_dir, 'files')
        os.mkdir(files_path)
        shutil.copyfile(test_rss_file, os.path.join(files_path,
                                                    'test_rss.xml'))
        self.file_server_port = get_random_port()
        self.setUpFileServer(self.file_server_port, files_path)

    @blocking_call_on_reactor_thread
    @inlineCallbacks
    def tearDown(self, annotate=True):
        if self.channel_rss.running:
            self.channel_rss.shutdown()

        yield super(TestChannelRss, self).tearDown(annotate=annotate)

    @deferred(timeout=10)
    def test_task_scrape_no_stop(self):
        self.channel_rss.rss_url = 'http://localhost:%d/test_rss.xml' % self.file_server_port
        self.channel_rss.cancel_all_pending_tasks()
        test_deferred = self.channel_rss._task_scrape()
        self.assertTrue(self.channel_rss.is_pending_task_active("rss_scrape"))
        return test_deferred

    @deferred(timeout=10)
    def test_task_scrape_stop(self):
        self.channel_rss.rss_url = 'http://localhost:%d/test_rss.xml' % self.file_server_port
        self.channel_rss.cancel_all_pending_tasks()
        self.channel_rss._to_stop = True
        test_deferred = self.channel_rss._task_scrape()
        self.assertFalse(self.channel_rss.is_pending_task_active("rss_scrape"))
        return test_deferred

    def test_initialize(self):
        self.assertTrue(self.channel_rss.is_pending_task_active("rss_scrape"))

    def test_shutdown(self):
        cache_path = self.channel_rss._url_cache._file_path
        self.channel_rss._url_cache.add('a')
        self.channel_rss.shutdown()
        self.assertTrue(os.path.exists(cache_path))
        self.assertFalse(self.channel_rss.is_pending_task_active("rss_scrape"))

    @deferred(timeout=10)
    def test_parse_rss_feed(self):
        """
        Test parsing a rss feed
        """
        self.channel_rss.rss_url = 'http://localhost:%d/test_rss.xml' % self.file_server_port
        self.channel_rss._url_cache = SimpleCache(
            os.path.join(self.session_base_dir, 'cache.txt'))

        def verify_rss(items):
            self.assertEqual(len(items), 2)

        return self.channel_rss.parse_feed().addCallback(verify_rss)

    @deferred(timeout=10)
    def test_parse_feed_stopped(self):
        """
        Test whether items are not parsed anymore when the parse feeder is stopped
        """
        self.channel_rss.rss_url = 'http://localhost:%d/test_rss.xml' % self.file_server_port
        self.channel_rss._url_cache = SimpleCache(
            os.path.join(self.session_base_dir, 'cache.txt'))
        self.channel_rss._to_stop = True

        def verify_rss(items):
            self.assertEqual(len(items), 0)

        return self.channel_rss.parse_feed().addCallback(verify_rss)
コード例 #9
0
ファイル: test_channel_rss.py プロジェクト: synctext/tribler
class TestChannelRss(BaseTestChannel):

    @inlineCallbacks
    def setUp(self):
        """
        Setup the tests by creating the ChannelRssParser instance and initializing it.
        """
        yield super(TestChannelRss, self).setUp()
        self.channel_rss = ChannelRssParser(self.fake_session, self.fake_channel_community, 'a')
        self.channel_rss.initialize()

        # Setup a test rss file server
        test_rss_file = os.path.join(TESTS_DATA_DIR, 'test_rss.xml')
        files_path = os.path.join(self.session_base_dir, 'files')
        os.mkdir(files_path)
        shutil.copyfile(test_rss_file, os.path.join(files_path, 'test_rss.xml'))
        self.file_server_port = get_random_port()
        self.setUpFileServer(self.file_server_port, files_path)

    @inlineCallbacks
    def tearDown(self):
        if self.channel_rss.running:
            self.channel_rss.shutdown()

        yield super(TestChannelRss, self).tearDown()

    @trial_timeout(10)
    def test_task_scrape_no_stop(self):
        self.channel_rss.rss_url = 'http://localhost:%d/test_rss.xml' % self.file_server_port
        self.channel_rss.cancel_all_pending_tasks()
        test_deferred = self.channel_rss._task_scrape()
        self.assertTrue(self.channel_rss.is_pending_task_active("rss_scrape"))
        return test_deferred

    @trial_timeout(10)
    def test_task_scrape_stop(self):
        self.channel_rss.rss_url = 'http://localhost:%d/test_rss.xml' % self.file_server_port
        self.channel_rss.cancel_all_pending_tasks()
        self.channel_rss._to_stop = True
        test_deferred = self.channel_rss._task_scrape()
        self.assertFalse(self.channel_rss.is_pending_task_active("rss_scrape"))
        return test_deferred

    def test_initialize(self):
        self.assertTrue(self.channel_rss.is_pending_task_active("rss_scrape"))

    def test_shutdown(self):
        cache_path = self.channel_rss._url_cache._file_path
        self.channel_rss._url_cache.add('a')
        self.channel_rss.shutdown()
        self.assertTrue(os.path.exists(cache_path))
        self.assertFalse(self.channel_rss.is_pending_task_active("rss_scrape"))

    @trial_timeout(10)
    def test_parse_rss_feed(self):
        """
        Test parsing a rss feed
        """
        self.channel_rss.rss_url = 'http://localhost:%d/test_rss.xml' % self.file_server_port

        def verify_rss(items):
            self.assertEqual(len(items), 2)

        return self.channel_rss.parse_feed().addCallback(verify_rss)

    @trial_timeout(10)
    def test_parse_no_rss(self):
        """
        Test parsing a non-rss feed
        """
        self.channel_rss.rss_url = 'http://localhost:%d/test_rsszz.xml' % self.file_server_port

        def verify_rss(items):
            self.assertIsNone(items)

        return self.channel_rss.parse_feed().addCallback(verify_rss)

    @trial_timeout(10)
    def test_parse_feed_stopped(self):
        """
        Test whether items are not parsed anymore when the parse feeder is stopped
        """
        self.channel_rss.rss_url = 'http://localhost:%d/test_rss.xml' % self.file_server_port
        self.channel_rss._url_cache = SimpleCache(os.path.join(self.session_base_dir, 'cache.txt'))
        self.channel_rss._to_stop = True

        def verify_rss(items):
            self.assertEqual(len(items), 0)

        return self.channel_rss.parse_feed().addCallback(verify_rss)