Exemplo n.º 1
0
def main():
    logging.basicConfig(level=logging.DEBUG)

    with open('list_person_all_extended_utf8.zip', 'rb') as rfp:
        csv_spider = CSVSpider()
        csv_spider.parse(Response('', body=rfp.read()))
Exemplo n.º 2
0
def get_response(**kwargs):
    return Response(request.url, request=request, **kwargs)
Exemplo n.º 3
0
 def select_body_attachment(self, attachment):
     """Syntax sugar to hide from implementation the transformation detail."""
     return Response(url=BLANK_START_URL,
                     body=attachment.body,
                     request=Request(url=BLANK_START_URL))
Exemplo n.º 4
0
 def process_exception(self, request, exception, spider):
     return Response(url=request.url, status=110, request=request)
Exemplo n.º 5
0
    def test_response_cacheability(self):
        responses = [
            # 304 is not cacheable no matter what servers sends
            (False, 304, {}),
            (False, 304, {'Last-Modified': self.yesterday}),
            (False, 304, {'Expires': self.tomorrow}),
            (False, 304, {'Etag': 'bar'}),
            (False, 304, {'Cache-Control': 'max-age=3600'}),
            # Always obey no-store cache control
            (False, 200, {'Cache-Control': 'no-store'}),
            (False, 200, {'Cache-Control': 'no-store, max-age=300'}),  # invalid
            (False, 200, {'Cache-Control': 'no-store', 'Expires': self.tomorrow}),  # invalid
            # Ignore responses missing expiration and/or validation headers
            (False, 200, {}),
            (False, 302, {}),
            (False, 307, {}),
            (False, 404, {}),
            # Cache responses with expiration and/or validation headers
            (True, 200, {'Last-Modified': self.yesterday}),
            (True, 203, {'Last-Modified': self.yesterday}),
            (True, 300, {'Last-Modified': self.yesterday}),
            (True, 301, {'Last-Modified': self.yesterday}),
            (True, 308, {'Last-Modified': self.yesterday}),
            (True, 401, {'Last-Modified': self.yesterday}),
            (True, 404, {'Cache-Control': 'public, max-age=600'}),
            (True, 302, {'Expires': self.tomorrow}),
            (True, 200, {'Etag': 'foo'}),
        ]
        with self._middleware() as mw:
            for idx, (shouldcache, status, headers) in enumerate(responses):
                req0 = Request('http://example-%d.com' % idx)
                res0 = Response(req0.url, status=status, headers=headers)
                res1 = self._process_requestresponse(mw, req0, res0)
                res304 = res0.replace(status=304)
                res2 = self._process_requestresponse(mw, req0, res304 if shouldcache else res0)
                self.assertEqualResponse(res1, res0)
                self.assertEqualResponse(res2, res0)
                resc = mw.storage.retrieve_response(self.spider, req0)
                if shouldcache:
                    self.assertEqualResponse(resc, res1)
                    assert 'cached' in res2.flags and res2.status != 304
                else:
                    self.assertFalse(resc)
                    assert 'cached' not in res2.flags

        # cache unconditionally unless response contains no-store or is a 304
        with self._middleware(HTTPCACHE_ALWAYS_STORE=True) as mw:
            for idx, (_, status, headers) in enumerate(responses):
                shouldcache = 'no-store' not in headers.get('Cache-Control', '') and status != 304
                req0 = Request('http://example2-%d.com' % idx)
                res0 = Response(req0.url, status=status, headers=headers)
                res1 = self._process_requestresponse(mw, req0, res0)
                res304 = res0.replace(status=304)
                res2 = self._process_requestresponse(mw, req0, res304 if shouldcache else res0)
                self.assertEqualResponse(res1, res0)
                self.assertEqualResponse(res2, res0)
                resc = mw.storage.retrieve_response(self.spider, req0)
                if shouldcache:
                    self.assertEqualResponse(resc, res1)
                    assert 'cached' in res2.flags and res2.status != 304
                else:
                    self.assertFalse(resc)
                    assert 'cached' not in res2.flags
 def test_priority_adjust(self):
     req = Request('http://www.scrapytest.org/503')
     rsp = Response('http://www.scrapytest.org/503', body=b'', status=503)
     req2 = self.mw.process_response(req, rsp, self.spider)
     assert req2.priority < req.priority
Exemplo n.º 7
0
                else:
                    print "No MongoDb authentication"
                client.close()

# Return the item
        return item

    def configSectionMap(self, section):
        dict1 = {}
        options = self.config.options(section)
        for option in options:
            try:
                dict1[option] = self.config.get(section, option)
                if dict1[option] == -1:
                    print("skip: %s" % option)
            except:
                print("exception on %s!" % option)
                dict1[option] = None
        return dict1

if __name__ == "__main__":
    netcdfSpider = NetCDFSpider()
    #response=Response("http://users.rcc.uchicago.edu/~davidkelly999/prism.2deg.tile/0021/clim_0021_0028.tile.nc4")
    response = Response(
        "http://users.rcc.uchicago.edu/~davidkelly999/gsde.2deg.tile/0066/soil_0066_0055.tile.nc4"
    )
    response = Response(
        "http://iridl.ldeo.columbia.edu/SOURCES/.ECOSYSTEMS/.Matthews/dods")
    item = netcdfSpider.parse_items(response)
    print str(item)
 def test_process_spider_output(self):
     res = Response('http://scrapytest.org')
     reqs = [Request('http://scrapytest.org/1')]
     out = list(self.mw.process_spider_output(res, reqs, self.spider))
     self.assertEquals(out, reqs)
Exemplo n.º 9
0
    def test_delay_adjustment(self):
        delay = 0.5
        slot_key = 'www.scrapytest.org'
        url = 'http://www.scrapytest.org'
        ban_url = 'http://ban.me'

        self.spider.crawlera_enabled = True

        crawler = self._mock_crawler(self.settings)
        # ignore spider delay by default
        self.spider.download_delay = delay
        mw = self.mwcls.from_crawler(crawler)
        mw.open_spider(self.spider)
        self.assertEqual(self.spider.download_delay, 0)

        # preserve original delay
        self.spider.download_delay = delay
        self.spider.crawlera_preserve_delay = True
        mw = self.mwcls.from_crawler(crawler)
        mw.open_spider(self.spider)
        self.assertEqual(self.spider.download_delay, delay)

        slot = MockedSlot(self.spider.download_delay)
        crawler.engine.downloader.slots[slot_key] = slot

        # ban
        req = Request(url, meta={'download_slot': slot_key})
        res = Response(ban_url, status=self.bancode, request=req)
        mw.process_response(req, res, self.spider)
        self.assertEqual(slot.delay, delay)
        self.assertEqual(self.spider.download_delay, delay)

        retry_after = 1.5
        headers = {'retry-after': str(retry_after)}
        res = Response(ban_url,
                       status=self.bancode,
                       headers=headers,
                       request=req)
        mw.process_response(req, res, self.spider)
        self.assertEqual(slot.delay, retry_after)
        self.assertEqual(self.spider.download_delay, delay)

        res = Response(url, request=req)
        mw.process_response(req, res, self.spider)
        self.assertEqual(slot.delay, delay)
        self.assertEqual(self.spider.download_delay, delay)

        # server failures
        mw.process_exception(req, ConnectionRefusedError(), self.spider)
        self.assertEqual(slot.delay, mw.connection_refused_delay)
        self.assertEqual(self.spider.download_delay, delay)

        res = Response(ban_url, request=req)
        mw.process_response(req, res, self.spider)
        self.assertEqual(slot.delay, delay)
        self.assertEqual(self.spider.download_delay, delay)

        mw.process_exception(req, ConnectionRefusedError(), self.spider)
        self.assertEqual(slot.delay, mw.connection_refused_delay)
        self.assertEqual(self.spider.download_delay, delay)

        res = Response(ban_url, status=self.bancode, request=req)
        mw.process_response(req, res, self.spider)
        self.assertEqual(slot.delay, delay)
        self.assertEqual(self.spider.download_delay, delay)
 def test_plain_response(self):
     rsp = Response(url='http://test.com', body=self.uncompressed_body)
     new = self.mw.process_response(None, rsp, self.spider)
     assert new is rsp
     assert_samelines(self, new.body, rsp.body)
 def test_empty_response(self):
     rsp = Response(url='http://test.com', body=b'')
     new = self.mw.process_response(None, rsp, self.spider)
     assert new is rsp
     assert not rsp.body
     assert not new.body
Exemplo n.º 12
0
 def setUp(self):
     self.request = Request('http://example.com/index.html')
     self.response = Response(self.request.url, request=self.request)
     self.crawler = get_crawler(Spider, {'SPIDER_MIDDLEWARES_BASE': {}})
     self.spider = self.crawler._create_spider('foo')
     self.mwman = SpiderMiddlewareManager.from_crawler(self.crawler)
Exemplo n.º 13
0
    def process_request(self, request, spider):

        self.logger.debug(request.meta)

        if request.meta is None or "my_page_type" not in request.meta:
            self.logger.debug("Skipped request %s" % request.url)
            return None

        self.logger.debug("Process request %s for spider %s", request.url,
                          spider.name)

        if request.meta['my_page_type'] == 'update':
            return Response(str(request.url),
                            headers={"handle": request.meta['my_window']},
                            request=request)

        if spider.browser is None:
            spider.browser = webdriver.Firefox()
            spider.default_window = spider.browser.current_window_handle

        previous_windows = spider.browser.window_handles

        spider.browser.switch_to.window(spider.default_window)

        count = 0
        while count < 3:
            try:
                self.logger.debug("Opening " + request.url)
                spider.browser.execute_script('''window.open("%s");''' %
                                              request.url)
            except NoSuchWindowException as e:
                self.logger.debug("%s is caught, try again ..." %
                                  e.__class__.__name__)
                self.logger.debug(e.stacktrace)
                time.sleep(1)
                count += 1
            else:
                self.logger.debug(
                    "Waiting for new window to be opened %s ..." %
                    time.ctime(time.time()))
                WebDriverWait(spider.browser,
                              10).until(EC.new_window_is_opened)
                self.logger.debug("New window may be opened at %s." %
                                  time.ctime(time.time()))

                if len(spider.browser.window_handles) > len(previous_windows):
                    break
                else:
                    self.logger.debug(
                        "New window is not found, keep on waiting ...")
                    time.sleep(1)
                    count += 1

        if count == 3:
            self.logger.warning(
                "Browser failed to open new window for %s, reschedule the request."
                % request.url)
            spider.link_queue.append(request)
            return IgnoreRequest

        current_windows = spider.browser.window_handles

        # print "old windows = " + str(previous_windows)
        # print "new windows = " + str(current_windows)

        for w in current_windows:
            if w not in previous_windows:
                spider.browser.switch_to.window(w)
                break

        new_window = spider.browser.current_window_handle

        self.logger.debug("%s opened in window %s" % (request.url, new_window))

        return Response(str(spider.browser.current_url),
                        headers={"handle": new_window},
                        request=request)
Exemplo n.º 14
0
    def fetch(self, request, spider):
        """download_func"""
        info = self._extract_key_info(request)

        ret = self.store.fetch_file(request.url, info['key'], info['bucket'])
        return Response(request.url, body=json.dumps(ret))
 def test_cached_and_stale(self):
     sampledata = [
         (200, {
             'Date': self.today,
             'Expires': self.yesterday
         }),
         (200, {
             'Date': self.today,
             'Expires': self.yesterday,
             'Last-Modified': self.yesterday
         }),
         (200, {
             'Expires': self.yesterday
         }),
         (200, {
             'Expires': self.yesterday,
             'ETag': 'foo'
         }),
         (200, {
             'Expires': self.yesterday,
             'Last-Modified': self.yesterday
         }),
         (200, {
             'Expires': self.tomorrow,
             'Age': '86405'
         }),
         (200, {
             'Cache-Control': 'max-age=86400',
             'Age': '86405'
         }),
         # no-cache forces expiration, also revalidation if validators exists
         (200, {
             'Cache-Control': 'no-cache'
         }),
         (200, {
             'Cache-Control': 'no-cache',
             'ETag': 'foo'
         }),
         (200, {
             'Cache-Control': 'no-cache',
             'Last-Modified': self.yesterday
         }),
         (200, {
             'Cache-Control': 'no-cache,must-revalidate',
             'Last-Modified': self.yesterday
         }),
         (200, {
             'Cache-Control': 'must-revalidate',
             'Expires': self.yesterday,
             'Last-Modified': self.yesterday
         }),
         (200, {
             'Cache-Control': 'max-age=86400,must-revalidate',
             'Age': '86405'
         }),
     ]
     with self._middleware() as mw:
         for idx, (status, headers) in enumerate(sampledata):
             req0 = Request(f'http://example-{idx}.com')
             res0a = Response(req0.url, status=status, headers=headers)
             # cache expired response
             res1 = self._process_requestresponse(mw, req0, res0a)
             self.assertEqualResponse(res1, res0a)
             assert 'cached' not in res1.flags
             # Same request but as cached response is stale a new response must
             # be returned
             res0b = res0a.replace(body=b'bar')
             res2 = self._process_requestresponse(mw, req0, res0b)
             self.assertEqualResponse(res2, res0b)
             assert 'cached' not in res2.flags
             cc = headers.get('Cache-Control', '')
             # Previous response expired too, subsequent request to same
             # resource must revalidate and succeed on 304 if validators
             # are present
             if 'ETag' in headers or 'Last-Modified' in headers:
                 res0c = res0b.replace(status=304)
                 res3 = self._process_requestresponse(mw, req0, res0c)
                 self.assertEqualResponse(res3, res0b)
                 assert 'cached' in res3.flags
                 # get cached response on server errors unless must-revalidate
                 # in cached response
                 res0d = res0b.replace(status=500)
                 res4 = self._process_requestresponse(mw, req0, res0d)
                 if 'must-revalidate' in cc:
                     assert 'cached' not in res4.flags
                     self.assertEqualResponse(res4, res0d)
                 else:
                     assert 'cached' in res4.flags
                     self.assertEqualResponse(res4, res0b)
             # Requests with max-stale can fetch expired cached responses
             # unless cached response has must-revalidate
             req1 = req0.replace(headers={'Cache-Control': 'max-stale'})
             res5 = self._process_requestresponse(mw, req1, res0b)
             self.assertEqualResponse(res5, res0b)
             if 'no-cache' in cc or 'must-revalidate' in cc:
                 assert 'cached' not in res5.flags
             else:
                 assert 'cached' in res5.flags
Exemplo n.º 16
0
    def test_should_remove_req_res_references_before_caching_the_results(self):
        """Regression test case to prevent a memory leak in the Media Pipeline.

        The memory leak is triggered when an exception is raised when a Response
        scheduled by the Media Pipeline is being returned. For example, when a
        FileException('download-error') is raised because the Response status
        code is not 200 OK.

        It happens because we are keeping a reference to the Response object
        inside the FileException context. This is caused by the way Twisted
        return values from inline callbacks. It raises a custom exception
        encapsulating the original return value.

        The solution is to remove the exception context when this context is a
        _DefGen_Return instance, the BaseException used by Twisted to pass the
        returned value from those inline callbacks.

        Maybe there's a better and more reliable way to test the case described
        here, but it would be more complicated and involve running - or at least
        mocking - some async steps from the Media Pipeline. The current test
        case is simple and detects the problem very fast. On the other hand, it
        would not detect another kind of leak happening due to old object
        references being kept inside the Media Pipeline cache.

        This problem does not occur in Python 2.7 since we don't have Exception
        Chaining (https://www.python.org/dev/peps/pep-3134/).
        """
        # Create sample pair of Request and Response objects
        request = Request('http://url')
        response = Response('http://url', body=b'', request=request)

        # Simulate the Media Pipeline behavior to produce a Twisted Failure
        try:
            # Simulate a Twisted inline callback returning a Response
            raise StopIteration(response)
        except StopIteration as exc:
            def_gen_return_exc = exc
            try:
                # Simulate the media_downloaded callback raising a FileException
                # This usually happens when the status code is not 200 OK
                raise FileException('download-error')
            except Exception as exc:
                file_exc = exc
                # Simulate Twisted capturing the FileException
                # It encapsulates the exception inside a Twisted Failure
                failure = Failure(file_exc)

        # The Failure should encapsulate a FileException ...
        self.assertEqual(failure.value, file_exc)
        # ... and it should have the StopIteration exception set as its context
        self.assertEqual(failure.value.__context__, def_gen_return_exc)

        # Let's calculate the request fingerprint and fake some runtime data...
        fp = self.fingerprint(request)
        info = self.pipe.spiderinfo
        info.downloading.add(fp)
        info.waiting[fp] = []

        # When calling the method that caches the Request's result ...
        self.pipe._cache_result_and_execute_waiters(failure, fp, info)
        # ... it should store the Twisted Failure ...
        self.assertEqual(info.downloaded[fp], failure)
        # ... encapsulating the original FileException ...
        self.assertEqual(info.downloaded[fp].value, file_exc)
        # ... but it should not store the StopIteration exception on its context
        context = getattr(info.downloaded[fp].value, '__context__', None)
        self.assertIsNone(context)
Exemplo n.º 17
0
 def _mock_crawlera_response(self, url, headers=None, **kwargs):
     crawlera_headers = {"X-Crawlera-Version": "1.36.3-cd5e44"}
     if headers:
         crawlera_headers.update(headers)
     return Response(url, headers=crawlera_headers, **kwargs)
Exemplo n.º 18
0
 def test_default_media_downloaded(self):
     request = Request('http://url')
     response = Response('http://url', body=b'')
     assert self.pipe.media_downloaded(response, request,
                                       self.info) is response
    def test_404(self):
        req = Request('http://www.scrapytest.org/404')
        rsp = Response('http://www.scrapytest.org/404', body=b'', status=404)

        # dont retry 404s
        assert self.mw.process_response(req, rsp, self.spider) is rsp
Exemplo n.º 20
0
 def test_cached_and_stale(self):
     sampledata = [
         (200, {
             'Date': self.today,
             'Expires': self.yesterday
         }),
         (200, {
             'Date': self.today,
             'Expires': self.yesterday,
             'Last-Modified': self.yesterday
         }),
         (200, {
             'Expires': self.yesterday
         }),
         (200, {
             'Expires': self.yesterday,
             'ETag': 'foo'
         }),
         (200, {
             'Expires': self.yesterday,
             'Last-Modified': self.yesterday
         }),
         (200, {
             'Expires': self.tomorrow,
             'Age': '86405'
         }),
         (200, {
             'Cache-Control': 'max-age=86400',
             'Age': '86405'
         }),
         # no-cache forces expiration, also revalidation if validators exists
         (200, {
             'Cache-Control': 'no-cache'
         }),
         (200, {
             'Cache-Control': 'no-cache',
             'ETag': 'foo'
         }),
         (200, {
             'Cache-Control': 'no-cache',
             'Last-Modified': self.yesterday
         }),
     ]
     with self._middleware() as mw:
         for idx, (status, headers) in enumerate(sampledata):
             req0 = Request('http://example-%d.com' % idx)
             res0a = Response(req0.url, status=status, headers=headers)
             # cache expired response
             res1 = self._process_requestresponse(mw, req0, res0a)
             self.assertEqualResponse(res1, res0a)
             assert 'cached' not in res1.flags
             # Same request but as cached response is stale a new response must
             # be returned
             res0b = res0a.replace(body='bar')
             res2 = self._process_requestresponse(mw, req0, res0b)
             self.assertEqualResponse(res2, res0b)
             assert 'cached' not in res2.flags
             # Previous response expired too, subsequent request to same
             # resource must revalidate and succeed on 304 if validators
             # are present
             if 'ETag' in headers or 'Last-Modified' in headers:
                 res0c = res0b.replace(status=304)
                 res3 = self._process_requestresponse(mw, req0, res0c)
                 self.assertEqualResponse(res3, res0b)
                 assert 'cached' in res3.flags
Exemplo n.º 21
0
 def test_get_sitemap_body_gzip_headers(self):
     r = Response(url="http://www.example.com/sitemap",
                  body=self.GZBODY,
                  headers={"content-type": "application/gzip"})
     self.assertSitemapBody(r, self.BODY)
Exemplo n.º 22
0
 def setUp(self):
     self.response = Response(URL, request=Request(URL, meta={'cookiejar': 1}))
     self.spider = self.TestSpider()
Exemplo n.º 23
0
 def _build_response(self, url, meta=None):
     return Response(url,
                     request=Request(
                         url="http://www.example.com/parent.html",
                         meta=meta))
Exemplo n.º 24
0
 def get_response(self, origin):
     return Response(origin, headers=self.resp_headers)
Exemplo n.º 25
0
def _prepare_request_object(item_url):
    return Request(
        item_url,
        meta={'response': Response(item_url, status=200, body=b'data')})
Exemplo n.º 26
0
 def _test_passthrough(req):
     rsp = Response(url, headers={'Location': url2}, status=301, request=req)
     r = self.mw.process_response(req, rsp, self.spider)
     self.assertIs(r, rsp)
Exemplo n.º 27
0
 def test_request_response(self):
     req = Request('http://example.com/index.html')
     resp = Response(req.url, status=200)
     ret = self._download(req, resp)
     self.assertTrue(isinstance(ret, Response), "Non-response returned")
 def test_cached_and_fresh(self):
     sampledata = [
         (200, {
             'Date': self.yesterday,
             'Expires': self.tomorrow
         }),
         (200, {
             'Date': self.yesterday,
             'Cache-Control': 'max-age=86405'
         }),
         (200, {
             'Age': '299',
             'Cache-Control': 'max-age=300'
         }),
         # Obey max-age if present over any others
         (200, {
             'Date': self.today,
             'Age': '86405',
             'Cache-Control': 'max-age=' + str(86400 * 3),
             'Expires': self.yesterday,
             'Last-Modified': self.yesterday,
         }),
         # obey Expires if max-age is not present
         (200, {
             'Date': self.yesterday,
             'Age': '86400',
             'Cache-Control': 'public',
             'Expires': self.tomorrow,
             'Last-Modified': self.yesterday,
         }),
         # Default missing Date header to right now
         (200, {
             'Expires': self.tomorrow
         }),
         # Firefox - Expires if age is greater than 10% of (Date - Last-Modified)
         (200, {
             'Date': self.today,
             'Last-Modified': self.yesterday,
             'Age': str(86400 / 10 - 1)
         }),
         # Firefox - Set one year maxage to permanent redirects missing expiration info
         (300, {}),
         (301, {}),
         (308, {}),
     ]
     with self._middleware() as mw:
         for idx, (status, headers) in enumerate(sampledata):
             req0 = Request(f'http://example-{idx}.com')
             res0 = Response(req0.url, status=status, headers=headers)
             # cache fresh response
             res1 = self._process_requestresponse(mw, req0, res0)
             self.assertEqualResponse(res1, res0)
             assert 'cached' not in res1.flags
             # return fresh cached response without network interaction
             res2 = self._process_requestresponse(mw, req0, None)
             self.assertEqualResponse(res1, res2)
             assert 'cached' in res2.flags
             # validate cached response if request max-age set as 0
             req1 = req0.replace(headers={'Cache-Control': 'max-age=0'})
             res304 = res0.replace(status=304)
             assert mw.process_request(req1, self.spider) is None
             res3 = self._process_requestresponse(mw, req1, res304)
             self.assertEqualResponse(res1, res3)
             assert 'cached' in res3.flags
Exemplo n.º 29
0
 def test_priority_adjust(self):
     req = Request('http://a.com')
     rsp = Response('http://a.com', headers={'Location': 'http://a.com/redirected'}, status=301)
     req2 = self.mw.process_response(req, rsp, self.spider)
     assert req2.priority > req.priority
Exemplo n.º 30
0
 def get_media_requests(self, item, info):
     item_url = item['image_urls'][0]
     return Request(
         item_url,
         meta={'response': Response(item_url, status=200, body=b'data')})