def test_has_caching(self): """Test if it returns True when has_caching is called""" self.assertEqual(Discourse.has_caching(), True)
def test_fetch_pinned(self): """Test whether the right list of topics is returned when some topics are pinned""" bodies_topics = [ read_file('data/discourse_topics_pinned.json'), read_file('data/discourse_topics_empty.json') ] body_topic_1148 = read_file('data/discourse_topic_1148.json') body_topic_1149 = read_file('data/discourse_topic_1149.json') body_topic_1150 = read_file('data/discourse_topic_1150.json') body_post = read_file('data/discourse_post.json') def request_callback(method, uri, headers): if uri.startswith(DISCOURSE_TOPICS_URL): body = bodies_topics.pop(0) elif uri.startswith(DISCOURSE_TOPIC_URL_1148): body = body_topic_1148 elif uri.startswith(DISCOURSE_TOPIC_URL_1149): body = body_topic_1149 elif uri.startswith(DISCOURSE_TOPIC_URL_1150): body = body_topic_1150 elif uri.startswith(DISCOURSE_POST_URL_1) or \ uri.startswith(DISCOURSE_POST_URL_2): body = body_post else: raise return (200, headers, body) httpretty.register_uri(httpretty.GET, DISCOURSE_TOPICS_URL, responses=[ httpretty.Response(body=request_callback) \ for _ in range(2) ]) httpretty.register_uri( httpretty.GET, DISCOURSE_TOPIC_URL_1148, responses=[httpretty.Response(body=request_callback)]) httpretty.register_uri( httpretty.GET, DISCOURSE_TOPIC_URL_1149, responses=[httpretty.Response(body=request_callback)]) httpretty.register_uri( httpretty.GET, DISCOURSE_TOPIC_URL_1150, responses=[httpretty.Response(body=request_callback)]) httpretty.register_uri( httpretty.GET, DISCOURSE_POST_URL_1, responses=[httpretty.Response(body=request_callback)]) httpretty.register_uri( httpretty.GET, DISCOURSE_POST_URL_2, responses=[httpretty.Response(body=request_callback)]) # On this tests two topics will be retrieved. # One of them was pinned but the date is in range. from_date = datetime.datetime(2016, 5, 25, 2, 0, 0) discourse = Discourse(DISCOURSE_SERVER_URL) topics = [topic for topic in discourse.fetch(from_date=from_date)] self.assertEqual(len(topics), 2) self.assertEqual(topics[0]['data']['id'], 1148) self.assertEqual(len(topics[0]['data']['post_stream']['posts']), 22) self.assertEqual(topics[0]['origin'], DISCOURSE_SERVER_URL) self.assertEqual(topics[0]['uuid'], '5298e4e8383c3f73c9fa7c9599779cbe987a48e4') self.assertEqual(topics[0]['updated_on'], 1464144769.526) self.assertEqual(topics[0]['category'], 'topic') self.assertEqual(topics[0]['tag'], DISCOURSE_SERVER_URL) self.assertEqual(topics[1]['data']['id'], 1150) self.assertEqual(len(topics[1]['data']['post_stream']['posts']), 2) self.assertEqual(topics[1]['origin'], DISCOURSE_SERVER_URL) self.assertEqual(topics[1]['uuid'], '373b597a2a389112875c3e544f197610373a7283') self.assertEqual(topics[1]['updated_on'], 1464274870.809) self.assertEqual(topics[1]['category'], 'topic') self.assertEqual(topics[0]['tag'], DISCOURSE_SERVER_URL)
def test_fetch_from_cache(self): """Test whether the cache works""" requests_http = [] bodies_topics = [ read_file('data/discourse_topics.json'), read_file('data/discourse_topics_empty.json') ] body_topic_1148 = read_file('data/discourse_topic_1148.json') body_topic_1149 = read_file('data/discourse_topic_1149.json') body_post = read_file('data/discourse_post.json') def request_callback(method, uri, headers): if uri.startswith(DISCOURSE_TOPICS_URL): body = bodies_topics.pop(0) elif uri.startswith(DISCOURSE_TOPIC_URL_1148): body = body_topic_1148 elif uri.startswith(DISCOURSE_TOPIC_URL_1149): body = body_topic_1149 elif uri.startswith(DISCOURSE_POST_URL_1) or \ uri.startswith(DISCOURSE_POST_URL_2): body = body_post else: raise requests_http.append(httpretty.last_request()) return (200, headers, body) httpretty.register_uri(httpretty.GET, DISCOURSE_TOPICS_URL, responses=[ httpretty.Response(body=request_callback) \ for _ in range(2) ]) httpretty.register_uri( httpretty.GET, DISCOURSE_TOPIC_URL_1148, responses=[httpretty.Response(body=request_callback)]) httpretty.register_uri( httpretty.GET, DISCOURSE_TOPIC_URL_1149, responses=[httpretty.Response(body=request_callback)]) httpretty.register_uri( httpretty.GET, DISCOURSE_POST_URL_1, responses=[httpretty.Response(body=request_callback)]) httpretty.register_uri( httpretty.GET, DISCOURSE_POST_URL_2, responses=[httpretty.Response(body=request_callback)]) # First, we fetch the topics from the server, storing them # in a cache cache = Cache(self.tmp_path) discourse = Discourse(DISCOURSE_SERVER_URL, cache=cache) topics = [topic for topic in discourse.fetch()] self.assertEqual(len(requests_http), 6) # Now, we get the topics from the cache. # The contents should be the same and there won't be # any new request to the server cached_topics = [topic for topic in discourse.fetch_from_cache()] self.assertEqual(len(cached_topics), len(topics)) self.assertEqual(len(cached_topics), 2) # Topics are returned in reverse order # from oldest to newest self.assertEqual(cached_topics[0]['data']['id'], 1149) self.assertEqual(len(cached_topics[0]['data']['post_stream']['posts']), 2) self.assertEqual(cached_topics[0]['origin'], DISCOURSE_SERVER_URL) self.assertEqual(cached_topics[0]['uuid'], '18068b95de1323a84c8e11dee8f46fd137f10c86') self.assertEqual(cached_topics[0]['updated_on'], 1464134770.909) self.assertEqual(cached_topics[0]['category'], 'topic') self.assertEqual(cached_topics[0]['origin'], DISCOURSE_SERVER_URL) self.assertEqual(cached_topics[1]['data']['id'], 1148) self.assertEqual(cached_topics[1]['origin'], DISCOURSE_SERVER_URL) self.assertEqual(cached_topics[1]['uuid'], '5298e4e8383c3f73c9fa7c9599779cbe987a48e4') self.assertEqual(cached_topics[1]['updated_on'], 1464144769.526) self.assertEqual(cached_topics[1]['category'], 'topic') self.assertEqual(cached_topics[1]['origin'], DISCOURSE_SERVER_URL) # The next assertions check the cases whether the chunk_size is # less than the number of posts of a topic self.assertEqual(len(cached_topics[1]['data']['post_stream']['posts']), 22) self.assertEqual( cached_topics[1]['data']['post_stream']['posts'][0]['id'], 18952) self.assertEqual( cached_topics[1]['data']['post_stream']['posts'][20]['id'], 2500) # No more requests were sent self.assertEqual(len(requests_http), 6)
def test_fetch_from_date(self): """Test whether a list of topics is returned from a given date""" requests_http = [] bodies_topics = [ read_file('data/discourse_topics.json'), read_file('data/discourse_topics_empty.json') ] body_topic_1148 = read_file('data/discourse_topic_1148.json') body_topic_1149 = read_file('data/discourse_topic_1149.json') body_post = read_file('data/discourse_post.json') def request_callback(method, uri, headers): if uri.startswith(DISCOURSE_TOPICS_URL): body = bodies_topics.pop(0) elif uri.startswith(DISCOURSE_TOPIC_URL_1148): body = body_topic_1148 elif uri.startswith(DISCOURSE_TOPIC_URL_1149): body = body_topic_1149 elif uri.startswith(DISCOURSE_POST_URL_1) or \ uri.startswith(DISCOURSE_POST_URL_2): body = body_post else: raise requests_http.append(httpretty.last_request()) return (200, headers, body) httpretty.register_uri(httpretty.GET, DISCOURSE_TOPICS_URL, responses=[ httpretty.Response(body=request_callback) \ for _ in range(2) ]) httpretty.register_uri( httpretty.GET, DISCOURSE_TOPIC_URL_1148, responses=[httpretty.Response(body=request_callback)]) httpretty.register_uri( httpretty.GET, DISCOURSE_TOPIC_URL_1149, responses=[httpretty.Response(body=request_callback)]) httpretty.register_uri( httpretty.GET, DISCOURSE_POST_URL_1, responses=[httpretty.Response(body=request_callback)]) httpretty.register_uri( httpretty.GET, DISCOURSE_POST_URL_2, responses=[httpretty.Response(body=request_callback)]) # On this tests only one topic will be retrieved from_date = datetime.datetime(2016, 5, 25, 2, 0, 0) discourse = Discourse(DISCOURSE_SERVER_URL) topics = [topic for topic in discourse.fetch(from_date=from_date)] self.assertEqual(len(topics), 1) self.assertEqual(topics[0]['data']['id'], 1148) self.assertEqual(len(topics[0]['data']['post_stream']['posts']), 22) self.assertEqual(topics[0]['origin'], DISCOURSE_SERVER_URL) self.assertEqual(topics[0]['uuid'], '5298e4e8383c3f73c9fa7c9599779cbe987a48e4') self.assertEqual(topics[0]['updated_on'], 1464144769.526) self.assertEqual(topics[0]['category'], 'topic') self.assertEqual(topics[0]['tag'], DISCOURSE_SERVER_URL) # Check requests expected = [{'page': ['0']}, {}, {}, {}] self.assertEqual(len(requests_http), len(expected)) for i in range(len(expected)): self.assertDictEqual(requests_http[i].querystring, expected[i])
def test_search_fields(self): """Test whether the search_fields is properly set""" bodies_topics = [ read_file('data/discourse/discourse_topics.json'), read_file('data/discourse/discourse_topics_empty.json') ] body_topic_1148 = read_file('data/discourse/discourse_topic_1148.json') body_topic_1149 = read_file('data/discourse/discourse_topic_1149.json') body_post = read_file('data/discourse/discourse_post.json') def request_callback(method, uri, headers): if uri.startswith(DISCOURSE_TOPICS_URL): body = bodies_topics.pop(0) elif uri.startswith(DISCOURSE_TOPIC_URL_1148): body = body_topic_1148 elif uri.startswith(DISCOURSE_TOPIC_URL_1149): body = body_topic_1149 elif uri.startswith(DISCOURSE_POST_URL_1) or \ uri.startswith(DISCOURSE_POST_URL_2): body = body_post else: raise Exception return 200, headers, body httpretty.register_uri(httpretty.GET, DISCOURSE_TOPICS_URL, responses=[ httpretty.Response(body=request_callback) for _ in range(2) ]) httpretty.register_uri( httpretty.GET, DISCOURSE_TOPIC_URL_1148, responses=[httpretty.Response(body=request_callback)]) httpretty.register_uri( httpretty.GET, DISCOURSE_TOPIC_URL_1149, responses=[httpretty.Response(body=request_callback)]) httpretty.register_uri( httpretty.GET, DISCOURSE_POST_URL_1, responses=[httpretty.Response(body=request_callback)]) httpretty.register_uri( httpretty.GET, DISCOURSE_POST_URL_2, responses=[httpretty.Response(body=request_callback)]) # Test fetch topics discourse = Discourse(DISCOURSE_SERVER_URL, sleep_time=0) topics = [topic for topic in discourse.fetch()] topic = topics[0] self.assertEqual(discourse.metadata_id(topic['data']), topic['search_fields']['item_id']) self.assertEqual(topic['data']['category_id'], 111) self.assertEqual(topic['data']['category_id'], topic['search_fields']['category_id']) topic = topics[1] self.assertEqual(discourse.metadata_id(topic['data']), topic['search_fields']['item_id']) self.assertEqual(topic['data']['category_id'], 111) self.assertEqual(topic['data']['category_id'], topic['search_fields']['category_id'])
def test_fetch_with_credentials(self): """Test whether a list of topics is returned when the backend is initialized with credentials""" requests_http = [] bodies_topics = [ read_file('data/discourse/discourse_topics.json'), read_file('data/discourse/discourse_topics_empty.json') ] body_topic_1148 = read_file('data/discourse/discourse_topic_1148.json') body_topic_1149 = read_file('data/discourse/discourse_topic_1149.json') body_post = read_file('data/discourse/discourse_post.json') def request_callback(method, uri, headers): if uri.startswith(DISCOURSE_TOPICS_URL): body = bodies_topics.pop(0) elif uri.startswith(DISCOURSE_TOPIC_URL_1148): body = body_topic_1148 elif uri.startswith(DISCOURSE_TOPIC_URL_1149): body = body_topic_1149 elif uri.startswith(DISCOURSE_POST_URL_1) or \ uri.startswith(DISCOURSE_POST_URL_2): body = body_post else: raise Exception requests_http.append(httpretty.last_request()) return 200, headers, body httpretty.register_uri(httpretty.GET, DISCOURSE_TOPICS_URL, responses=[ httpretty.Response(body=request_callback) for _ in range(2) ]) httpretty.register_uri( httpretty.GET, DISCOURSE_TOPIC_URL_1148, responses=[httpretty.Response(body=request_callback)]) httpretty.register_uri( httpretty.GET, DISCOURSE_TOPIC_URL_1149, responses=[httpretty.Response(body=request_callback)]) httpretty.register_uri( httpretty.GET, DISCOURSE_POST_URL_1, responses=[httpretty.Response(body=request_callback)]) httpretty.register_uri( httpretty.GET, DISCOURSE_POST_URL_2, responses=[httpretty.Response(body=request_callback)]) # Test fetch topics discourse = Discourse(DISCOURSE_SERVER_URL, sleep_time=0, api_username='******', api_token='12345') topics = [topic for topic in discourse.fetch()] self.assertEqual(len(topics), 2) # Topics are returned in reverse order # from oldest to newest self.assertEqual(topics[0]['data']['id'], 1149) self.assertEqual(len(topics[0]['data']['post_stream']['posts']), 2) self.assertEqual(topics[0]['origin'], DISCOURSE_SERVER_URL) self.assertEqual(topics[0]['uuid'], '18068b95de1323a84c8e11dee8f46fd137f10c86') self.assertEqual(topics[0]['updated_on'], 1464134770.909) self.assertEqual(topics[0]['category'], "topic") self.assertEqual(topics[0]['tag'], DISCOURSE_SERVER_URL) self.assertEqual(topics[1]['data']['id'], 1148) self.assertEqual(topics[1]['origin'], DISCOURSE_SERVER_URL) self.assertEqual(topics[1]['uuid'], '5298e4e8383c3f73c9fa7c9599779cbe987a48e4') self.assertEqual(topics[1]['updated_on'], 1464144769.526) self.assertEqual(topics[1]['category'], "topic") self.assertEqual(topics[1]['tag'], DISCOURSE_SERVER_URL) # The next assertions check the cases whether the chunk_size is # less than the number of posts of a topic self.assertEqual(len(topics[1]['data']['post_stream']['posts']), 22) self.assertEqual(topics[1]['data']['post_stream']['posts'][0]['id'], 18952) self.assertEqual(topics[1]['data']['post_stream']['posts'][20]['id'], 2500) # Check requests expected = [{'page': ['0']}, {'page': ['1']}, {}, {}, {}, {}] self.assertEqual(len(requests_http), len(expected)) for i in range(len(expected)): self.assertDictEqual(requests_http[i].querystring, expected[i])
def setUp(self): super().setUp() self.backend_write_archive = Discourse(DISCOURSE_SERVER_URL, archive=self.archive) self.backend_read_archive = Discourse(DISCOURSE_SERVER_URL, archive=self.archive)
def setUp(self): super().setUp() self.backend_write_archive = Discourse(DISCOURSE_SERVER_URL, sleep_time=0, api_token="aaaaa", archive=self.archive) self.backend_read_archive = Discourse(DISCOURSE_SERVER_URL, sleep_time=0, archive=self.archive)