コード例 #1
0
ファイル: Stop.py プロジェクト: samalba/image-spider
    def setUp(self):
        self.get = Get()
        urls, response = initiate_crawl()
        json_response = json.loads(response['content'].decode())
        job_id = json_response['job_id']
        self.query_string = 'job_id=' + str(job_id)

        # Wait until the initiated crawl has begun.
        self.get.wait_for_passing_content(
            '/status', self.query_string,
            self._mk_response_test(['Running', 'Complete']))

        # Stop the crawl.
        self.response = request('POST', '/stop', self.query_string)
コード例 #2
0
ファイル: Stop.py プロジェクト: samalba/image-spider
    def setUp(self):
        self.get = Get()
        urls, response = initiate_crawl()
        json_response = json.loads(response['content'].decode())
        job_id = json_response['job_id']
        self.query_string = 'job_id=' + str(job_id)

        # Wait until the initiated crawl has begun.
        self.get.wait_for_passing_content('/status', self.query_string,
                                          self._mk_response_test(['Running',
                                                                 'Complete']))

        # Stop the crawl.
        self.response = request('POST', '/stop', self.query_string)
コード例 #3
0
    def setUp_for_GetByUrl(self, resource, response_test):
        """
        This is a setUp helper for tests classes that get by URL. It sets the
        get_response and json_response properties.

        Arguments:
            resource: string HTTP resource.
            response_test: function to test for acceptable response.

        Returns: None
        """

        urls, self.response = initiate_crawl()
        json_response = json.loads(self.response['content'].decode())
        query_string = 'url=' + urls[0]
        self.response, self.json_response = self.get_response(
            resource, query_string)
        self.wait_for_passing_content(resource, query_string, response_test)
コード例 #4
0
ファイル: Get.py プロジェクト: samalba/image-spider
    def setUp_for_GetByJobId(self, resource, response_test):

        """
        This is a setUp helper for tests classes that get by job_id. It sets the
        get_response and json_response properties.

        Arguments:
            resource: string HTTP resource.
            response_test: function to test for acceptable response.

        Returns: None
        """

        self.response = initiate_crawl()[1]
        json_response = json.loads(self.response['content'].decode())
        query_string = 'job_id=' + str(json_response['job_id'])
        self.response, self.json_response = self.get_response(resource,
                                                              query_string)
        self.wait_for_passing_content(resource, query_string, response_test)
コード例 #5
0
ファイル: Result.py プロジェクト: samalba/image-spider
    def setUp(self):
        self.get = Get()
        urls, response = initiate_crawl()
        json_response = json.loads(response['content'].decode())
        job_id = json_response['job_id']
        self.query_string = 'job_id=' + str(job_id)

        # Wait until the initiated crawl has begun.
        self.get.wait_for_passing_content('/status', self.query_string,
                                          self._mk_response_test(['Running',
                                                                 'Complete']))

        # Stop the crawl.
        response = request('POST', '/stop', self.query_string)
        self.assertEqual(response['http_status'], '202 Accepted')
        self.get.wait_for_passing_content('/status', self.query_string,
                                          self._mk_response_test(['Aborted']))

        # Delete the results.
        for url in urls:
            self.response = request('DELETE', '/result',
                                    'url=' + parse.quote(url))
コード例 #6
0
ファイル: Result.py プロジェクト: samalba/image-spider
    def setUp(self):
        self.get = Get()
        urls, response = initiate_crawl()
        json_response = json.loads(response['content'].decode())
        job_id = json_response['job_id']
        self.query_string = 'job_id=' + str(job_id)

        # Wait until the initiated crawl has begun.
        self.get.wait_for_passing_content(
            '/status', self.query_string,
            self._mk_response_test(['Running', 'Complete']))

        # Stop the crawl.
        response = request('POST', '/stop', self.query_string)
        self.assertEqual(response['http_status'], '202 Accepted')
        self.get.wait_for_passing_content('/status', self.query_string,
                                          self._mk_response_test(['Aborted']))

        # Delete the results.
        for url in urls:
            self.response = request('DELETE', '/result',
                                    'url=' + parse.quote(url))
コード例 #7
0
ファイル: Crawl.py プロジェクト: samalba/image-spider
 def setUp(self):
     self.response = initiate_crawl()[1]