def test_fetch_from_empty_cache(self): """Test if there are not any topics returned when the cache is empty""" cache = Cache(self.tmp_path) discourse = Discourse(DISCOURSE_SERVER_URL, cache=cache) cached_topics = [topic for topic in discourse.fetch_from_cache()] self.assertEqual(len(cached_topics), 0)
def test_fetch_from_non_set_cache(self): """Test if a error is raised when the cache was not set""" discourse = Discourse(DISCOURSE_SERVER_URL) with self.assertRaises(CacheError): _ = [topic for topic in discourse.fetch_from_cache()]
def test_fetch_empty(self): """Test whether it works when no topics are fetched""" body = read_file('data/discourse_topics_empty.json') httpretty.register_uri(httpretty.GET, DISCOURSE_TOPICS_URL, body=body, status=200) discourse = Discourse(DISCOURSE_SERVER_URL) topics = [topic for topic in discourse.fetch()] self.assertEqual(len(topics), 0)
def test_initialization(self): """Test whether attributes are initializated""" discourse = Discourse(DISCOURSE_SERVER_URL, origin='test') self.assertEqual(discourse.url, DISCOURSE_SERVER_URL) self.assertEqual(discourse.origin, 'test') self.assertIsInstance(discourse.client, DiscourseClient) # When origin is empty or None it will be set to # the value in url discourse = Discourse(DISCOURSE_SERVER_URL) self.assertEqual(discourse.url, DISCOURSE_SERVER_URL) self.assertEqual(discourse.origin, DISCOURSE_SERVER_URL) discourse = Discourse(DISCOURSE_SERVER_URL, origin='') self.assertEqual(discourse.url, DISCOURSE_SERVER_URL) self.assertEqual(discourse.origin, DISCOURSE_SERVER_URL)
def test_fetch_from_cache(self): """Test whether the cache works""" requests_http = [] bodies_topics = [ read_file('data/discourse_topics.json'), read_file('data/discourse_topics_empty.json') ] body_topic_1148 = read_file('data/discourse_topic_1148.json') body_topic_1149 = read_file('data/discourse_topic_1149.json') body_post = read_file('data/discourse_post.json') def request_callback(method, uri, headers): if uri.startswith(DISCOURSE_TOPICS_URL): body = bodies_topics.pop(0) elif uri.startswith(DISCOURSE_TOPIC_URL_1148): body = body_topic_1148 elif uri.startswith(DISCOURSE_TOPIC_URL_1149): body = body_topic_1149 elif uri.startswith(DISCOURSE_POST_URL_1) or \ uri.startswith(DISCOURSE_POST_URL_2): body = body_post else: raise requests_http.append(httpretty.last_request()) return (200, headers, body) httpretty.register_uri(httpretty.GET, DISCOURSE_TOPICS_URL, responses=[ httpretty.Response(body=request_callback) \ for _ in range(2) ]) httpretty.register_uri( httpretty.GET, DISCOURSE_TOPIC_URL_1148, responses=[httpretty.Response(body=request_callback)]) httpretty.register_uri( httpretty.GET, DISCOURSE_TOPIC_URL_1149, responses=[httpretty.Response(body=request_callback)]) httpretty.register_uri( httpretty.GET, DISCOURSE_POST_URL_1, responses=[httpretty.Response(body=request_callback)]) httpretty.register_uri( httpretty.GET, DISCOURSE_POST_URL_2, responses=[httpretty.Response(body=request_callback)]) # First, we fetch the topics from the server, storing them # in a cache cache = Cache(self.tmp_path) discourse = Discourse(DISCOURSE_SERVER_URL, cache=cache) topics = [topic for topic in discourse.fetch()] self.assertEqual(len(requests_http), 6) # Now, we get the topics from the cache. # The contents should be the same and there won't be # any new request to the server cached_topics = [topic for topic in discourse.fetch_from_cache()] self.assertEqual(len(cached_topics), len(topics)) self.assertEqual(len(cached_topics), 2) # Topics are returned in reverse order # from oldest to newest self.assertEqual(cached_topics[0]['data']['id'], 1149) self.assertEqual(len(cached_topics[0]['data']['post_stream']['posts']), 2) self.assertEqual(cached_topics[0]['origin'], DISCOURSE_SERVER_URL) self.assertEqual(cached_topics[0]['uuid'], '18068b95de1323a84c8e11dee8f46fd137f10c86') self.assertEqual(cached_topics[0]['updated_on'], 1464134770.909) self.assertEqual(cached_topics[1]['data']['id'], 1148) self.assertEqual(cached_topics[1]['origin'], DISCOURSE_SERVER_URL) self.assertEqual(cached_topics[1]['uuid'], '5298e4e8383c3f73c9fa7c9599779cbe987a48e4') self.assertEqual(cached_topics[1]['updated_on'], 1464144769.526) # The next assertions check the cases whether the chunk_size is # less than the number of posts of a topic self.assertEqual(len(cached_topics[1]['data']['post_stream']['posts']), 22) self.assertEqual( cached_topics[1]['data']['post_stream']['posts'][0]['id'], 18952) self.assertEqual( cached_topics[1]['data']['post_stream']['posts'][20]['id'], 2500) # No more requests were sent self.assertEqual(len(requests_http), 6)
def test_fetch_pinned(self): """Test whether the right list of topics is returned when some topics are pinned""" bodies_topics = [ read_file('data/discourse_topics_pinned.json'), read_file('data/discourse_topics_empty.json') ] body_topic_1148 = read_file('data/discourse_topic_1148.json') body_topic_1149 = read_file('data/discourse_topic_1149.json') body_topic_1150 = read_file('data/discourse_topic_1150.json') body_post = read_file('data/discourse_post.json') def request_callback(method, uri, headers): if uri.startswith(DISCOURSE_TOPICS_URL): body = bodies_topics.pop(0) elif uri.startswith(DISCOURSE_TOPIC_URL_1148): body = body_topic_1148 elif uri.startswith(DISCOURSE_TOPIC_URL_1149): body = body_topic_1149 elif uri.startswith(DISCOURSE_TOPIC_URL_1150): body = body_topic_1150 elif uri.startswith(DISCOURSE_POST_URL_1) or \ uri.startswith(DISCOURSE_POST_URL_2): body = body_post else: raise return (200, headers, body) httpretty.register_uri(httpretty.GET, DISCOURSE_TOPICS_URL, responses=[ httpretty.Response(body=request_callback) \ for _ in range(2) ]) httpretty.register_uri( httpretty.GET, DISCOURSE_TOPIC_URL_1148, responses=[httpretty.Response(body=request_callback)]) httpretty.register_uri( httpretty.GET, DISCOURSE_TOPIC_URL_1149, responses=[httpretty.Response(body=request_callback)]) httpretty.register_uri( httpretty.GET, DISCOURSE_TOPIC_URL_1150, responses=[httpretty.Response(body=request_callback)]) httpretty.register_uri( httpretty.GET, DISCOURSE_POST_URL_1, responses=[httpretty.Response(body=request_callback)]) httpretty.register_uri( httpretty.GET, DISCOURSE_POST_URL_2, responses=[httpretty.Response(body=request_callback)]) # On this tests two topics will be retrieved. # One of them was pinned but the date is in range. from_date = datetime.datetime(2016, 5, 25, 2, 0, 0) discourse = Discourse(DISCOURSE_SERVER_URL) topics = [topic for topic in discourse.fetch(from_date=from_date)] self.assertEqual(len(topics), 2) self.assertEqual(topics[0]['data']['id'], 1148) self.assertEqual(len(topics[0]['data']['post_stream']['posts']), 22) self.assertEqual(topics[0]['origin'], DISCOURSE_SERVER_URL) self.assertEqual(topics[0]['uuid'], '5298e4e8383c3f73c9fa7c9599779cbe987a48e4') self.assertEqual(topics[0]['updated_on'], 1464144769.526) self.assertEqual(topics[1]['data']['id'], 1150) self.assertEqual(len(topics[1]['data']['post_stream']['posts']), 2) self.assertEqual(topics[1]['origin'], DISCOURSE_SERVER_URL) self.assertEqual(topics[1]['uuid'], '373b597a2a389112875c3e544f197610373a7283') self.assertEqual(topics[1]['updated_on'], 1464274870.809)
def test_fetch_from_date(self): """Test whether a list of topics is returned from a given date""" requests_http = [] bodies_topics = [ read_file('data/discourse_topics.json'), read_file('data/discourse_topics_empty.json') ] body_topic_1148 = read_file('data/discourse_topic_1148.json') body_topic_1149 = read_file('data/discourse_topic_1149.json') body_post = read_file('data/discourse_post.json') def request_callback(method, uri, headers): if uri.startswith(DISCOURSE_TOPICS_URL): body = bodies_topics.pop(0) elif uri.startswith(DISCOURSE_TOPIC_URL_1148): body = body_topic_1148 elif uri.startswith(DISCOURSE_TOPIC_URL_1149): body = body_topic_1149 elif uri.startswith(DISCOURSE_POST_URL_1) or \ uri.startswith(DISCOURSE_POST_URL_2): body = body_post else: raise requests_http.append(httpretty.last_request()) return (200, headers, body) httpretty.register_uri(httpretty.GET, DISCOURSE_TOPICS_URL, responses=[ httpretty.Response(body=request_callback) \ for _ in range(2) ]) httpretty.register_uri( httpretty.GET, DISCOURSE_TOPIC_URL_1148, responses=[httpretty.Response(body=request_callback)]) httpretty.register_uri( httpretty.GET, DISCOURSE_TOPIC_URL_1149, responses=[httpretty.Response(body=request_callback)]) httpretty.register_uri( httpretty.GET, DISCOURSE_POST_URL_1, responses=[httpretty.Response(body=request_callback)]) httpretty.register_uri( httpretty.GET, DISCOURSE_POST_URL_2, responses=[httpretty.Response(body=request_callback)]) # On this tests only one topic will be retrieved from_date = datetime.datetime(2016, 5, 25, 2, 0, 0) discourse = Discourse(DISCOURSE_SERVER_URL) topics = [topic for topic in discourse.fetch(from_date=from_date)] self.assertEqual(len(topics), 1) self.assertEqual(topics[0]['data']['id'], 1148) self.assertEqual(len(topics[0]['data']['post_stream']['posts']), 22) self.assertEqual(topics[0]['origin'], DISCOURSE_SERVER_URL) self.assertEqual(topics[0]['uuid'], '5298e4e8383c3f73c9fa7c9599779cbe987a48e4') self.assertEqual(topics[0]['updated_on'], 1464144769.526) # Check requests expected = [{'page': ['0']}, {}, {}, {}] self.assertEqual(len(requests_http), len(expected)) for i in range(len(expected)): self.assertDictEqual(requests_http[i].querystring, expected[i])
def test_has_resuming(self): """Test if it returns True when has_resuming is called""" self.assertEqual(Discourse.has_resuming(), True)
def test_fetch_from_cache(self): """Test whether the cache works""" requests_http = [] bodies_topics = [read_file('data/discourse_topics.json'), read_file('data/discourse_topics_empty.json')] body_topic_1148 = read_file('data/discourse_topic_1148.json') body_topic_1149 = read_file('data/discourse_topic_1149.json') body_post = read_file('data/discourse_post.json') def request_callback(method, uri, headers): if uri.startswith(DISCOURSE_TOPICS_URL): body = bodies_topics.pop(0) elif uri.startswith(DISCOURSE_TOPIC_URL_1148): body = body_topic_1148 elif uri.startswith(DISCOURSE_TOPIC_URL_1149): body = body_topic_1149 elif uri.startswith(DISCOURSE_POST_URL_1) or \ uri.startswith(DISCOURSE_POST_URL_2): body = body_post else: raise requests_http.append(httpretty.last_request()) return (200, headers, body) httpretty.register_uri(httpretty.GET, DISCOURSE_TOPICS_URL, responses=[ httpretty.Response(body=request_callback) \ for _ in range(2) ]) httpretty.register_uri(httpretty.GET, DISCOURSE_TOPIC_URL_1148, responses=[ httpretty.Response(body=request_callback) ]) httpretty.register_uri(httpretty.GET, DISCOURSE_TOPIC_URL_1149, responses=[ httpretty.Response(body=request_callback) ]) httpretty.register_uri(httpretty.GET, DISCOURSE_POST_URL_1, responses=[ httpretty.Response(body=request_callback) ]) httpretty.register_uri(httpretty.GET, DISCOURSE_POST_URL_2, responses=[ httpretty.Response(body=request_callback) ]) # First, we fetch the topics from the server, storing them # in a cache cache = Cache(self.tmp_path) discourse = Discourse(DISCOURSE_SERVER_URL, cache=cache) topics = [topic for topic in discourse.fetch()] self.assertEqual(len(requests_http), 6) # Now, we get the topics from the cache. # The contents should be the same and there won't be # any new request to the server cached_topics = [topic for topic in discourse.fetch_from_cache()] self.assertEqual(len(cached_topics), len(topics)) self.assertEqual(len(cached_topics), 2) # Topics are returned in reverse order # from oldest to newest self.assertEqual(cached_topics[0]['data']['id'], 1149) self.assertEqual(len(cached_topics[0]['data']['post_stream']['posts']), 2) self.assertEqual(cached_topics[0]['origin'], DISCOURSE_SERVER_URL) self.assertEqual(cached_topics[0]['uuid'], '18068b95de1323a84c8e11dee8f46fd137f10c86') self.assertEqual(cached_topics[0]['updated_on'], 1464134770.909) self.assertEqual(cached_topics[1]['data']['id'], 1148) self.assertEqual(cached_topics[1]['origin'], DISCOURSE_SERVER_URL) self.assertEqual(cached_topics[1]['uuid'], '5298e4e8383c3f73c9fa7c9599779cbe987a48e4') self.assertEqual(cached_topics[1]['updated_on'], 1464144769.526) # The next assertions check the cases whether the chunk_size is # less than the number of posts of a topic self.assertEqual(len(cached_topics[1]['data']['post_stream']['posts']), 22) self.assertEqual(cached_topics[1]['data']['post_stream']['posts'][0]['id'], 18952) self.assertEqual(cached_topics[1]['data']['post_stream']['posts'][20]['id'], 2500) # No more requests were sent self.assertEqual(len(requests_http), 6)
def test_fetch_pinned(self): """Test whether the right list of topics is returned when some topics are pinned""" bodies_topics = [read_file('data/discourse_topics_pinned.json'), read_file('data/discourse_topics_empty.json')] body_topic_1148 = read_file('data/discourse_topic_1148.json') body_topic_1149 = read_file('data/discourse_topic_1149.json') body_topic_1150 = read_file('data/discourse_topic_1150.json') body_post = read_file('data/discourse_post.json') def request_callback(method, uri, headers): if uri.startswith(DISCOURSE_TOPICS_URL): body = bodies_topics.pop(0) elif uri.startswith(DISCOURSE_TOPIC_URL_1148): body = body_topic_1148 elif uri.startswith(DISCOURSE_TOPIC_URL_1149): body = body_topic_1149 elif uri.startswith(DISCOURSE_TOPIC_URL_1150): body = body_topic_1150 elif uri.startswith(DISCOURSE_POST_URL_1) or \ uri.startswith(DISCOURSE_POST_URL_2): body = body_post else: raise return (200, headers, body) httpretty.register_uri(httpretty.GET, DISCOURSE_TOPICS_URL, responses=[ httpretty.Response(body=request_callback) \ for _ in range(2) ]) httpretty.register_uri(httpretty.GET, DISCOURSE_TOPIC_URL_1148, responses=[ httpretty.Response(body=request_callback) ]) httpretty.register_uri(httpretty.GET, DISCOURSE_TOPIC_URL_1149, responses=[ httpretty.Response(body=request_callback) ]) httpretty.register_uri(httpretty.GET, DISCOURSE_TOPIC_URL_1150, responses=[ httpretty.Response(body=request_callback) ]) httpretty.register_uri(httpretty.GET, DISCOURSE_POST_URL_1, responses=[ httpretty.Response(body=request_callback) ]) httpretty.register_uri(httpretty.GET, DISCOURSE_POST_URL_2, responses=[ httpretty.Response(body=request_callback) ]) # On this tests two topics will be retrieved. # One of them was pinned but the date is in range. from_date = datetime.datetime(2016, 5, 25, 2, 0, 0) discourse = Discourse(DISCOURSE_SERVER_URL) topics = [topic for topic in discourse.fetch(from_date=from_date)] self.assertEqual(len(topics), 2) self.assertEqual(topics[0]['data']['id'], 1148) self.assertEqual(len(topics[0]['data']['post_stream']['posts']), 22) self.assertEqual(topics[0]['origin'], DISCOURSE_SERVER_URL) self.assertEqual(topics[0]['uuid'], '5298e4e8383c3f73c9fa7c9599779cbe987a48e4') self.assertEqual(topics[0]['updated_on'], 1464144769.526) self.assertEqual(topics[1]['data']['id'], 1150) self.assertEqual(len(topics[1]['data']['post_stream']['posts']), 2) self.assertEqual(topics[1]['origin'], DISCOURSE_SERVER_URL) self.assertEqual(topics[1]['uuid'], '373b597a2a389112875c3e544f197610373a7283') self.assertEqual(topics[1]['updated_on'], 1464274870.809)
def test_fetch_from_date(self): """Test whether a list of topics is returned from a given date""" requests_http = [] bodies_topics = [read_file('data/discourse_topics.json'), read_file('data/discourse_topics_empty.json')] body_topic_1148 = read_file('data/discourse_topic_1148.json') body_topic_1149 = read_file('data/discourse_topic_1149.json') body_post = read_file('data/discourse_post.json') def request_callback(method, uri, headers): if uri.startswith(DISCOURSE_TOPICS_URL): body = bodies_topics.pop(0) elif uri.startswith(DISCOURSE_TOPIC_URL_1148): body = body_topic_1148 elif uri.startswith(DISCOURSE_TOPIC_URL_1149): body = body_topic_1149 elif uri.startswith(DISCOURSE_POST_URL_1) or \ uri.startswith(DISCOURSE_POST_URL_2): body = body_post else: raise requests_http.append(httpretty.last_request()) return (200, headers, body) httpretty.register_uri(httpretty.GET, DISCOURSE_TOPICS_URL, responses=[ httpretty.Response(body=request_callback) \ for _ in range(2) ]) httpretty.register_uri(httpretty.GET, DISCOURSE_TOPIC_URL_1148, responses=[ httpretty.Response(body=request_callback) ]) httpretty.register_uri(httpretty.GET, DISCOURSE_TOPIC_URL_1149, responses=[ httpretty.Response(body=request_callback) ]) httpretty.register_uri(httpretty.GET, DISCOURSE_POST_URL_1, responses=[ httpretty.Response(body=request_callback) ]) httpretty.register_uri(httpretty.GET, DISCOURSE_POST_URL_2, responses=[ httpretty.Response(body=request_callback) ]) # On this tests only one topic will be retrieved from_date = datetime.datetime(2016, 5, 25, 2, 0, 0) discourse = Discourse(DISCOURSE_SERVER_URL) topics = [topic for topic in discourse.fetch(from_date=from_date)] self.assertEqual(len(topics), 1) self.assertEqual(topics[0]['data']['id'], 1148) self.assertEqual(len(topics[0]['data']['post_stream']['posts']), 22) self.assertEqual(topics[0]['origin'], DISCOURSE_SERVER_URL) self.assertEqual(topics[0]['uuid'], '5298e4e8383c3f73c9fa7c9599779cbe987a48e4') self.assertEqual(topics[0]['updated_on'], 1464144769.526) # Check requests expected = [{ 'page' : ['0'] }, {}, {}, {}] self.assertEqual(len(requests_http), len(expected)) for i in range(len(expected)): self.assertDictEqual(requests_http[i].querystring, expected[i])
def test_fetch(self): """Test whether a list of topics is returned""" requests_http = [] bodies_topics = [read_file('data/discourse_topics.json'), read_file('data/discourse_topics_empty.json')] body_topic_1148 = read_file('data/discourse_topic_1148.json') body_topic_1149 = read_file('data/discourse_topic_1149.json') body_post = read_file('data/discourse_post.json') def request_callback(method, uri, headers): if uri.startswith(DISCOURSE_TOPICS_URL): body = bodies_topics.pop(0) elif uri.startswith(DISCOURSE_TOPIC_URL_1148): body = body_topic_1148 elif uri.startswith(DISCOURSE_TOPIC_URL_1149): body = body_topic_1149 elif uri.startswith(DISCOURSE_POST_URL_1) or \ uri.startswith(DISCOURSE_POST_URL_2): body = body_post else: raise requests_http.append(httpretty.last_request()) return (200, headers, body) httpretty.register_uri(httpretty.GET, DISCOURSE_TOPICS_URL, responses=[ httpretty.Response(body=request_callback) \ for _ in range(2) ]) httpretty.register_uri(httpretty.GET, DISCOURSE_TOPIC_URL_1148, responses=[ httpretty.Response(body=request_callback) ]) httpretty.register_uri(httpretty.GET, DISCOURSE_TOPIC_URL_1149, responses=[ httpretty.Response(body=request_callback) ]) httpretty.register_uri(httpretty.GET, DISCOURSE_POST_URL_1, responses=[ httpretty.Response(body=request_callback) ]) httpretty.register_uri(httpretty.GET, DISCOURSE_POST_URL_2, responses=[ httpretty.Response(body=request_callback) ]) # Test fetch topics discourse = Discourse(DISCOURSE_SERVER_URL) topics = [topic for topic in discourse.fetch()] self.assertEqual(len(topics), 2) # Topics are returned in reverse order # from oldest to newest self.assertEqual(topics[0]['data']['id'], 1149) self.assertEqual(len(topics[0]['data']['post_stream']['posts']), 2) self.assertEqual(topics[0]['origin'], DISCOURSE_SERVER_URL) self.assertEqual(topics[0]['uuid'], '18068b95de1323a84c8e11dee8f46fd137f10c86') self.assertEqual(topics[0]['updated_on'], 1464134770.909) self.assertEqual(topics[1]['data']['id'], 1148) self.assertEqual(topics[1]['origin'], DISCOURSE_SERVER_URL) self.assertEqual(topics[1]['uuid'], '5298e4e8383c3f73c9fa7c9599779cbe987a48e4') self.assertEqual(topics[1]['updated_on'], 1464144769.526) # The next assertions check the cases whether the chunk_size is # less than the number of posts of a topic self.assertEqual(len(topics[1]['data']['post_stream']['posts']), 22) self.assertEqual(topics[1]['data']['post_stream']['posts'][0]['id'], 18952) self.assertEqual(topics[1]['data']['post_stream']['posts'][20]['id'], 2500) # Check requests expected = [{ 'page' : ['0'] }, { 'page' : ['1'] }, {}, {}, {}, {}] self.assertEqual(len(requests_http), len(expected)) for i in range(len(expected)): self.assertDictEqual(requests_http[i].querystring, expected[i])
def test_fetch(self): """Test whether a list of topics is returned""" requests_http = [] bodies_topics = [ read_file('data/discourse_topics.json'), read_file('data/discourse_topics_empty.json') ] body_topic_1148 = read_file('data/discourse_topic_1148.json') body_topic_1149 = read_file('data/discourse_topic_1149.json') body_post = read_file('data/discourse_post.json') def request_callback(method, uri, headers): if uri.startswith(DISCOURSE_TOPICS_URL): body = bodies_topics.pop(0) elif uri.startswith(DISCOURSE_TOPIC_URL_1148): body = body_topic_1148 elif uri.startswith(DISCOURSE_TOPIC_URL_1149): body = body_topic_1149 elif uri.startswith(DISCOURSE_POST_URL_1) or \ uri.startswith(DISCOURSE_POST_URL_2): body = body_post else: raise requests_http.append(httpretty.last_request()) return (200, headers, body) httpretty.register_uri(httpretty.GET, DISCOURSE_TOPICS_URL, responses=[ httpretty.Response(body=request_callback) \ for _ in range(2) ]) httpretty.register_uri( httpretty.GET, DISCOURSE_TOPIC_URL_1148, responses=[httpretty.Response(body=request_callback)]) httpretty.register_uri( httpretty.GET, DISCOURSE_TOPIC_URL_1149, responses=[httpretty.Response(body=request_callback)]) httpretty.register_uri( httpretty.GET, DISCOURSE_POST_URL_1, responses=[httpretty.Response(body=request_callback)]) httpretty.register_uri( httpretty.GET, DISCOURSE_POST_URL_2, responses=[httpretty.Response(body=request_callback)]) # Test fetch topics discourse = Discourse(DISCOURSE_SERVER_URL) topics = [topic for topic in discourse.fetch()] self.assertEqual(len(topics), 2) # Topics are returned in reverse order # from oldest to newest self.assertEqual(topics[0]['data']['id'], 1149) self.assertEqual(len(topics[0]['data']['post_stream']['posts']), 2) self.assertEqual(topics[0]['origin'], DISCOURSE_SERVER_URL) self.assertEqual(topics[0]['uuid'], '18068b95de1323a84c8e11dee8f46fd137f10c86') self.assertEqual(topics[0]['updated_on'], 1464134770.909) self.assertEqual(topics[1]['data']['id'], 1148) self.assertEqual(topics[1]['origin'], DISCOURSE_SERVER_URL) self.assertEqual(topics[1]['uuid'], '5298e4e8383c3f73c9fa7c9599779cbe987a48e4') self.assertEqual(topics[1]['updated_on'], 1464144769.526) # The next assertions check the cases whether the chunk_size is # less than the number of posts of a topic self.assertEqual(len(topics[1]['data']['post_stream']['posts']), 22) self.assertEqual(topics[1]['data']['post_stream']['posts'][0]['id'], 18952) self.assertEqual(topics[1]['data']['post_stream']['posts'][20]['id'], 2500) # Check requests expected = [{'page': ['0']}, {'page': ['1']}, {}, {}, {}, {}] self.assertEqual(len(requests_http), len(expected)) for i in range(len(expected)): self.assertDictEqual(requests_http[i].querystring, expected[i])