def test_sort_buffered_independent(): table = (('foo', 'bar'), ('C', 2), ('A', 9), ('A', 6), ('F', 1), ('D', 10)) expectation = (('foo', 'bar'), ('F', 1), ('C', 2), ('A', 6), ('A', 9), ('D', 10)) result = sort(table, 'bar', buffersize=4) nrows(result) # cause data to be cached # check that two row iterators are independent, i.e., consuming rows # from one does not affect the other it1 = iter(result) it2 = iter(result) eq_(expectation[0], it1.next()) eq_(expectation[1], it1.next()) eq_(expectation[0], it2.next()) eq_(expectation[1], it2.next()) eq_(expectation[2], it2.next()) eq_(expectation[2], it1.next())
def test_progress(): # make sure progress doesn't raise exception table = (('foo', 'bar', 'baz'), ('a', 1, True), ('b', 2, True), ('b', 3)) nrows(progress(table))
def test_sort_buffered_independent(): table = (("foo", "bar"), ("C", 2), ("A", 9), ("A", 6), ("F", 1), ("D", 10)) expectation = (("foo", "bar"), ("F", 1), ("C", 2), ("A", 6), ("A", 9), ("D", 10)) result = sort(table, "bar", buffersize=4) nrows(result) # cause data to be cached # check that two row iterators are independent, i.e., consuming rows # from one does not affect the other it1 = iter(result) it2 = iter(result) eq_(expectation[0], it1.next()) eq_(expectation[1], it1.next()) eq_(expectation[0], it2.next()) eq_(expectation[1], it2.next()) eq_(expectation[2], it2.next()) eq_(expectation[2], it1.next())
def test_sort_buffered_independent(): table = (('foo', 'bar'), ('C', 2), ('A', 9), ('A', 6), ('F', 1), ('D', 10)) expectation = (('foo', 'bar'), ('F', 1), ('C', 2), ('A', 6), ('A', 9), ('D', 10)) result = sort(table, 'bar', buffersize=4) nrows(result) # cause data to be cached # check that two row iterators are independent, i.e., consuming rows # from one does not affect the other it1 = iter(result) it2 = iter(result) eq_(expectation[0], next(it1)) eq_(expectation[1], next(it1)) eq_(expectation[0], next(it2)) eq_(expectation[1], next(it2)) eq_(expectation[2], next(it2)) eq_(expectation[2], next(it1))
def test_fromxml_url(): # check internet connection try: url = 'http://raw.githubusercontent.com/petl-developers/petl/master/.pydevproject' urlopen(url) except Exception as e: print('SKIP test_fromxml_url: %s' % e, file=sys.stderr) else: actual = fromxml(url, 'pydev_property', {'name': ( '.', 'name'), 'prop': '.'}) assert nrows(actual) > 0 expect = fromxml('.pydevproject', 'pydev_property', {'name': ( '.', 'name'), 'prop': '.'}) ieq(expect, actual)
def test_fromxml_url(): # check internet connection try: url = 'http://raw.githubusercontent.com/petl-developers/petl/master/petl/test/resources/test.xml' urlopen(url) import pkg_resources filename = pkg_resources.resource_filename('petl', 'test/resources/test.xml') except Exception as e: pytest.skip('SKIP test_fromxml_url: %s' % e) else: actual = fromxml(url, 'pydev_property', {'name': ( '.', 'name'), 'prop': '.'}) assert nrows(actual) > 0 expect = fromxml(filename, 'pydev_property', {'name': ( '.', 'name'), 'prop': '.'}) ieq(expect, actual)
def test_sort_buffered_cleanup(): table = (('foo', 'bar'), ('C', 2), ('A', 9), ('A', 6), ('F', 1), ('D', 10)) result = sort(table, 'bar', buffersize=2) debug('initially filecache should be empty') eq_(None, result._filecache) debug('pull rows through, should populate file cache') eq_(5, nrows(result)) eq_(3, len(result._filecache)) debug('check all files exist') filenames = _get_names(result._filecache) for fn in filenames: assert os.path.exists(fn), fn debug('delete object and garbage collect') del result gc.collect() debug('check all files have been deleted') for fn in filenames: assert not os.path.exists(fn), fn
def test_sort_buffered_cleanup(): table = (("foo", "bar"), ("C", 2), ("A", 9), ("A", 6), ("F", 1), ("D", 10)) result = sort(table, "bar", buffersize=2) debug("initially filecache should be empty") eq_(None, result._filecache) debug("pull rows through, should populate file cache") eq_(5, nrows(result)) eq_(3, len(result._filecache)) debug("check all files exist") filenames = _get_names(result._filecache) for fn in filenames: assert os.path.exists(fn), fn debug("delete object and garbage collect") del result gc.collect() debug("check all files have been deleted") for fn in filenames: assert not os.path.exists(fn), fn
def test_sort_buffered_cleanup_open_iterator(): table = (('foo', 'bar'), ('C', 2), ('A', 9), ('A', 6), ('F', 1), ('D', 10)) # check if cleanup is robust against open iterators result = sort(table, 'bar', buffersize=2) debug('pull rows through, should populate file cache') eq_(5, nrows(result)) eq_(3, len(result._filecache)) debug('check all files exist') filenames = _get_names(result._filecache) for fn in filenames: assert os.path.exists(fn), fn debug(filenames) debug('open an iterator') it = iter(result) next(it) next(it) debug('delete objects and garbage collect') del result del it gc.collect() for fn in filenames: assert not os.path.exists(fn), fn
def test_sort_buffered_cleanup_open_iterator(): table = (("foo", "bar"), ("C", 2), ("A", 9), ("A", 6), ("F", 1), ("D", 10)) # check if cleanup is robust against open iterators result = sort(table, "bar", buffersize=2) debug("pull rows through, should populate file cache") eq_(5, nrows(result)) eq_(3, len(result._filecache)) debug("check all files exist") filenames = _get_names(result._filecache) for fn in filenames: assert os.path.exists(fn), fn debug(filenames) debug("open an iterator") it = iter(result) next(it) next(it) debug("delete objects and garbage collect") del result del it gc.collect() for fn in filenames: assert not os.path.exists(fn), fn
def test_fromxml_url(): tbl = fromxml(url, './/item', 'title') assert nrows(tbl) > 0
def test_fromxml_url(): tbl = fromxml('http://feeds.bbci.co.uk/news/rss.xml', './/item', 'title') assert nrows(tbl) > 0
def test_fromgff3_region(): tbl_features = fromgff3('fixture/sample.sorted.gff.gz', region='apidb|MAL5') eq_(7, nrows(tbl_features)) tbl_features = fromgff3('fixture/sample.sorted.gff.gz', region='apidb|MAL5:1289593-1289595') eq_(4, nrows(tbl_features))