def test_mem_generate_excel():

    alive = kick_maxrss()

    # Note: this has been tested with 1M row, and it works but it's slow.
    # 100krow makes the point.
    N_ROWS = 80000

    table = make_table(N_ROWS, 4)

    mem_before = getmaxrss_mb()
    # outputter = ExcelOutput
    outputter = ExceleratorOutput
    # outputter = XlsxWriterOutput

    with outputter("test/test_mem_generate_excel.xlsx") as xls:
        n = 0
        write_row = xls.add_sheet("hi")

        for row in find_trs(BytesIO(table)):
            write_row([e.text for e in row])

    used = getmaxrss_mb() - mem_before

    print "Used MB for 65krow:", used
    # measured at <42 MB
    assert used < 50, "Excessive memory usage"
def test_mem_parse_giant_table():

    # Note: this test really wants to be run by itself in a process since it
    #       measures the *max* rss of the whole program. If python allocates
    #       a large object which goes away, the test will lie to us. Hence,
    #       kick_maxrss().
    alive = kick_maxrss()

    # Note: this has been tested with 1M row, and it works but it's slow.
    # 100krow makes the point.
    N_ROWS = 100000

    table = make_table(N_ROWS, 4)

    mem_before = getmaxrss_mb()

    n = 0
    for row in find_trs(BytesIO(table)):
        n += 1

    used = getmaxrss_mb() - mem_before

    assert_equal(N_ROWS, n)

    # Check that we didn't use more than 1MB to parse the table.
    assert_less_equal(used, 1)