def test_apache_count(self): # creating stats so they can be used by # main() as distant stats stats = ApacheLocalStats() stats.build_monthly_stats(2008, 11, log_sample, bz2_file, compression='bz2') # now patching url so it return the built stats import urllib2 old_open = urllib2.urlopen def _open(url): class FakeUrl(object): def read(self): return open(bz2_file).read() return FakeUrl() urllib2.urlopen = _open # just to make sure it doesn't brake try: main(config_file, log_sample) finally: urllib2.urlopen = old_open
def test_build_monthly_stats(self): results = StringIO() stats = ApacheLocalStats() stats.build_monthly_stats(2008, 11, log_sample, results) results.seek(0) reader = csv.reader(results) res = list(reader) # first, make sure all entries have values for line in res: self.assertEquals(len(line), 4) self.assert_('' not in line) self.assertEquals(res[0], ['appwsgi', '344.tar.bz2', 'Mozilla/5.0', '1']) self.assertEquals( res[456], ['Mtrax', 'Mtrax-2.2.07-py2.5-win32.egg', 'Firefox/3', '1']) self.assertEquals( res[486], ['OpenPGP', 'OpenPGP-0.2.3.tar.gz', 'Firefox/3', '1']) self.assertEquals(len(res), 10043)
def test_compression(self): stats = ApacheLocalStats() stats.build_monthly_stats(2008, 11, log_sample, bz2_file, compression='bz2') read = stats.read_stats(bz2_file) first_entry = read.next() self.assertEquals(first_entry['count'], '1') self.assertEquals(first_entry['packagename'], 'appwsgi')
def test_compression(self): stats = ApacheLocalStats() stats.build_monthly_stats(2008, 11, log_sample, bz2_file, compression='bz2') read = stats.read_stats(bz2_file) first_entry = read.next() self.assertEquals(first_entry['count'], '1') self.assertEquals(first_entry['packagename'], 'appwsgi')
def test_read_stats(self): results = StringIO() stats = ApacheLocalStats() stats.build_monthly_stats(2008, 11, log_sample, results) results.seek(0) read = stats.read_stats(results) first_entry = read.next() self.assertEquals(first_entry['count'], '1') self.assertEquals(first_entry['packagename'], 'appwsgi')
def test_read_stats(self): results = StringIO() stats = ApacheLocalStats() stats.build_monthly_stats(2008, 11, log_sample, results) results.seek(0) read = stats.read_stats(results) first_entry = read.next() self.assertEquals(first_entry['count'], '1') self.assertEquals(first_entry['packagename'], 'appwsgi')
def test_distant_stats(self): os.mkdir(mirror) url = 'http://example.com/mirror/daily/2008-11-18.bz2' stats = ApacheDistantLocalStats(mirror) self.assertEquals(list(stats.read_stats(url)), []) # let's build the stats local_stats = ApacheLocalStats() local_stats.build_monthly_stats(2008, 11, log_sample, bz2_file, compression='bz2') # now patching url so it return the built stats import urllib2 old_open = urllib2.urlopen def _open(url): class FakeUrl(object): def read(self): return open(bz2_file).read() return FakeUrl() urllib2.urlopen = _open read = stats.read_stats(url) first_entry = read.next() self.assertEquals(first_entry['count'], '1') self.assertEquals(first_entry['packagename'], 'appwsgi') # checking that the cache is filled self.assert_('2008-11-18.bz2' in os.listdir(mirror)) # removing the urlopen patch urllib2.urlopen = old_open # the cache should be activated now read = stats.read_stats(url) first_entry = read.next() self.assertEquals(first_entry['count'], '1') self.assertEquals(first_entry['packagename'], 'appwsgi')
def test_distant_stats(self): os.mkdir(mirror) url = 'http://example.com/mirror/daily/2008-11-18.bz2' stats = ApacheDistantLocalStats(mirror) self.assertEquals(list(stats.read_stats(url)), []) # let's build the stats local_stats = ApacheLocalStats() local_stats.build_monthly_stats(2008, 11, log_sample, bz2_file, compression='bz2') # now patching url so it return the built stats import urllib2 old_open = urllib2.urlopen def _open(url): class FakeUrl(object): def read(self): return open(bz2_file).read() return FakeUrl() urllib2.urlopen = _open read = stats.read_stats(url) first_entry = read.next() self.assertEquals(first_entry['count'], '1') self.assertEquals(first_entry['packagename'], 'appwsgi') # checking that the cache is filled self.assert_('2008-11-18.bz2' in os.listdir(mirror)) # removing the urlopen patch urllib2.urlopen = old_open # the cache should be activated now read = stats.read_stats(url) first_entry = read.next() self.assertEquals(first_entry['count'], '1') self.assertEquals(first_entry['packagename'], 'appwsgi')
def test_apache_count(self): # creating stats so they can be used by # main() as distant stats stats = ApacheLocalStats() stats.build_monthly_stats(2008, 11, log_sample, bz2_file, compression='bz2') # now patching url so it return the built stats import urllib2 old_open = urllib2.urlopen def _open(url): class FakeUrl(object): def read(self): return open(bz2_file).read() return FakeUrl() urllib2.urlopen = _open # just to make sure it doesn't brake try: main(config_file, log_sample) finally: urllib2.urlopen = old_open
def test_build_monthly_stats(self): results = StringIO() stats = ApacheLocalStats() stats.build_monthly_stats(2008, 11, log_sample, results) results.seek(0) reader = csv.reader(results) res = list(reader) # first, make sure all entries have values for line in res: self.assertEquals(len(line), 4) self.assert_('' not in line) self.assertEquals(res[0], ['appwsgi', '344.tar.bz2', 'Mozilla/5.0', '1']) self.assertEquals(res[456], ['Mtrax', 'Mtrax-2.2.07-py2.5-win32.egg', 'Firefox/3', '1']) self.assertEquals(res[486], ['OpenPGP', 'OpenPGP-0.2.3.tar.gz', 'Firefox/3', '1']) self.assertEquals(len(res), 10043)