def setUp(self): self.get = Get() urls, response = initiate_crawl() json_response = json.loads(response['content'].decode()) job_id = json_response['job_id'] self.query_string = 'job_id=' + str(job_id) # Wait until the initiated crawl has begun. self.get.wait_for_passing_content( '/status', self.query_string, self._mk_response_test(['Running', 'Complete'])) # Stop the crawl. self.response = request('POST', '/stop', self.query_string)
def setUp(self): self.get = Get() urls, response = initiate_crawl() json_response = json.loads(response['content'].decode()) job_id = json_response['job_id'] self.query_string = 'job_id=' + str(job_id) # Wait until the initiated crawl has begun. self.get.wait_for_passing_content('/status', self.query_string, self._mk_response_test(['Running', 'Complete'])) # Stop the crawl. self.response = request('POST', '/stop', self.query_string)
def setUp_for_GetByUrl(self, resource, response_test): """ This is a setUp helper for tests classes that get by URL. It sets the get_response and json_response properties. Arguments: resource: string HTTP resource. response_test: function to test for acceptable response. Returns: None """ urls, self.response = initiate_crawl() json_response = json.loads(self.response['content'].decode()) query_string = 'url=' + urls[0] self.response, self.json_response = self.get_response( resource, query_string) self.wait_for_passing_content(resource, query_string, response_test)
def setUp_for_GetByJobId(self, resource, response_test): """ This is a setUp helper for tests classes that get by job_id. It sets the get_response and json_response properties. Arguments: resource: string HTTP resource. response_test: function to test for acceptable response. Returns: None """ self.response = initiate_crawl()[1] json_response = json.loads(self.response['content'].decode()) query_string = 'job_id=' + str(json_response['job_id']) self.response, self.json_response = self.get_response(resource, query_string) self.wait_for_passing_content(resource, query_string, response_test)
def setUp(self): self.get = Get() urls, response = initiate_crawl() json_response = json.loads(response['content'].decode()) job_id = json_response['job_id'] self.query_string = 'job_id=' + str(job_id) # Wait until the initiated crawl has begun. self.get.wait_for_passing_content('/status', self.query_string, self._mk_response_test(['Running', 'Complete'])) # Stop the crawl. response = request('POST', '/stop', self.query_string) self.assertEqual(response['http_status'], '202 Accepted') self.get.wait_for_passing_content('/status', self.query_string, self._mk_response_test(['Aborted'])) # Delete the results. for url in urls: self.response = request('DELETE', '/result', 'url=' + parse.quote(url))
def setUp(self): self.get = Get() urls, response = initiate_crawl() json_response = json.loads(response['content'].decode()) job_id = json_response['job_id'] self.query_string = 'job_id=' + str(job_id) # Wait until the initiated crawl has begun. self.get.wait_for_passing_content( '/status', self.query_string, self._mk_response_test(['Running', 'Complete'])) # Stop the crawl. response = request('POST', '/stop', self.query_string) self.assertEqual(response['http_status'], '202 Accepted') self.get.wait_for_passing_content('/status', self.query_string, self._mk_response_test(['Aborted'])) # Delete the results. for url in urls: self.response = request('DELETE', '/result', 'url=' + parse.quote(url))
def setUp(self): self.response = initiate_crawl()[1]