def __init__(self, content=None, expires=None):
     super(ReppyWrapper, self).__init__(content, expires)
     if content:
         self.parser = Rules('robots.txt', 200, content, self.expires)
     else:
         self.parser = None
         self.my_super = super(ReppyWrapper, self)
Esempio n. 2
0
 def test_status_forbidden_allow(self):
     '''Test that if the flag is given, we allow all sites when robots.txt
     is forbidden'''
     rules = Rules('http://example.com/robots.txt',
                   401,
                   '',
                   0,
                   disallow_forbidden=False)
     self.assertTrue(rules.allowed('/foo', 't'))
     self.assertTrue(rules.allowed('http://example.com/foo', 't'))
Esempio n. 3
0
from __future__ import print_function

from contextlib import contextmanager
import time

from reppy.cache import RobotsCache
from reppy.parser import Rules

content = '''
User-agent: '*'
Allow: /
'''

cache = RobotsCache()
cache.add(Rules('http://example.com/', 200, content, float('inf')))


@contextmanager
def timer(count):
    '''Time this block.'''
    start = time.time()
    try:
        yield count
    finally:
        duration = time.time() - start
        print('Total: %s' % duration)
        print('  Avg: %s' % (duration / count))
        print(' Rate: %s' % (count / duration))

Esempio n. 4
0
 def test_status_allowed(self):
     '''If no robots.txt exists, we're given free range'''
     rules = Rules('http://example.com/robots.txt', 404, '', 0)
     self.assertTrue(rules.allowed('/foo', 't'))
     self.assertTrue(rules.allowed('http://example.com/foo', 't'))
Esempio n. 5
0
 def test_status_forbidden(self):
     '''Make sure that when we get a forbidden status, that we believe
     we're not allowed to crawl a site'''
     rules = Rules('http://example.com/robots.txt', 401, '', 0)
     self.assertTrue(not rules.allowed('/foo', 't'))
     self.assertTrue(not rules.allowed('http://example.com/foo', 't'))
Esempio n. 6
0
 def parse(strng):
     '''Helper to parse a string as a Rules object'''
     return Rules('http://example.com/robots.txt', 200, strng, 0)