def test_wait_returns_automatically_when_empty(self): otto = Octopus(concurrency=1) otto.start() otto.wait(5) expect(otto.is_empty).to_be_true()
def test_can_wait(self): otto = Octopus(concurrency=1) def handle_url_response(url, response): self.response = response otto.enqueue('http://www.twitter.com', handle_url_response) otto.start() otto.wait(0) expect(self.response).not_to_be_null() expect(self.response.status_code).to_equal(200)
def test_can_handle_timeouts(self): url = 'http://baidu.com' otto = Octopus(concurrency=1, request_timeout_in_seconds=0.1) def handle_url_response(url, response): self.response = response otto.enqueue(url, handle_url_response) otto.start() otto.wait(5) expect(self.response.text).to_include('Connection to baidu.com timed out') expect(self.response.error).to_include('Connection to baidu.com timed out. (connect timeout=0.1)')
def test_can_handle_invalid_urls(self): url = 'http://kagdjdkjgka.fk' otto = Octopus(concurrency=1) def handle_url_response(url, response): self.response = response otto.enqueue(url, handle_url_response) otto.start() otto.wait(5) expect(self.response).not_to_be_null() expect(self.response.status_code).to_equal(599) expect(self.response.text).to_include("HTTPConnectionPool(host='kagdjdkjgka.fk', port=80)") expect(self.response.text).to_include('Max retries exceeded with url: /') expect(self.response.error).to_equal(self.response.text)
def test_can_handle_cached_responses_when_not_cached(self): url = 'http://www.twitter.com' otto = Octopus(concurrency=1, cache=True) def handle_url_response(url, response): self.response = response otto.enqueue(url, handle_url_response) otto.enqueue(url, handle_url_response) otto.enqueue(url, handle_url_response) otto.enqueue(url, handle_url_response) otto.start() otto.wait(5) expect(self.response).not_to_be_null() expect(self.response.status_code).to_equal(200)
def otto_requests(repetitions, concurrency, urls_to_retrieve): message = "Retrieving URLs concurrently with Octopus..." print print("=" * len(message)) print(message) print("=" * len(message)) print otto = Octopus(concurrency=concurrency) for url in urls_to_retrieve: otto.enqueue(url, handle_url_response) start_time = time() otto.start() otto.wait(0) return time() - start_time
def test_can_handle_more_urls_concurrently(self): urls = [ 'http://www.twitter.com', 'http://www.cnn.com', 'http://www.bbc.com', 'http://www.facebook.com' ] otto = Octopus(concurrency=4) def handle_url_response(url, response): self.responses[url] = response for url in urls: otto.enqueue(url, handle_url_response) otto.start() otto.wait(10) expect(self.responses).to_length(4) for url in urls: expect(self.responses).to_include(url) expect(self.responses[url].status_code).to_equal(200)