Example #1
0
def test_fdb_result_data_empty_pandas_fetch_all():
    raw_response = {
        'rowsetBase64': '',
        'rowtype': [{"name": "c1"}, {"name": "c2"}],
        'chunks': ['f1', 'f2', 'f3']
    }

    random.seed(datetime.datetime.now())
    column_meta = [
        {"logicalType": "FIXED", "precision": "38", "scale": "0"},
        {"logicalType": "FIXED", "precision": "38", "scale": "0"}
    ]

    def int64_generator():
        return random.randint(-9223372036854775, 9223372036854775)

    chunk_count = 3
    expected_chunk_result = chunk_count * [None]
    arrow_stream = chunk_count * [None]
    batch_row_count = 10
    batch_count = 9

    for i in range(chunk_count):
        arrow_stream[i], expected_chunk_result[i] = generate_data([pyarrow.int64(), pyarrow.int64()],
                                                                  column_meta,
                                                                  int64_generator,
                                                                  batch_count,
                                                                  batch_row_count)

    con = MockConnection()
    cur = MockCursor(con)
    res = ArrowResult(raw_response,
                      cur,
                      use_dict_result=False,
                      _chunk_downloader=MockDownloader(arrow_stream))
    cur._query_result_format = 'arrow'
    cur._result = res

    df = res._fetch_pandas_all()

    # assert row count
    assert df.shape[0] == batch_row_count * batch_count * chunk_count
    # assert column count
    assert df.shape[1] == 2

    for i in range(2):
        col = df.iloc[:, i]

        for idx, val in col.items():
            expected_chunk, expected_row_index_in_chunk = divmod(idx, batch_row_count * batch_count)
            expected_batch_index, expected_row_index_in_batch = divmod(expected_row_index_in_chunk, batch_row_count)

            expected_val = \
                expected_chunk_result[expected_chunk][expected_batch_index][i][expected_row_index_in_batch]

            if math.isnan(val):
                assert expected_val is None
            else:
                assert abs(expected_val - val) < EPSILON
Example #2
0
def test_fdb_result_data_empty_row_fetch():
    raw_response = {
        'rowsetBase64': '',
        'rowtype': [{"name": "c1"}, {"name": "c2"}],
        'chunks': ['f1', 'f2', 'f3']
    }

    random.seed(datetime.datetime.now())
    column_meta = [
        {"logicalType": "FIXED", "precision": "38", "scale": "0"},
        {"logicalType": "FIXED", "precision": "38", "scale": "0"}
    ]

    def int64_generator():
        return random.randint(-9223372036854775808, 9223372036854775807)

    chunk_count = 3
    expected_chunk_result = chunk_count * [None]
    arrow_stream = chunk_count * [None]
    batch_row_count = 10
    batch_count = 9

    for i in range(chunk_count):
        arrow_stream[i], expected_chunk_result[i] = generate_data([pyarrow.int64(), pyarrow.int64()],
                                                                  column_meta,
                                                                  int64_generator,
                                                                  batch_count,
                                                                  batch_row_count)

    con = MockConnection()
    cur = MockCursor(con)
    res = ArrowResult(raw_response,
                      cur,
                      use_dict_result=False,
                      _chunk_downloader=MockDownloader(arrow_stream))
    cur._query_result_format = 'arrow'
    cur._result = res

    count = 0
    while True:
        try:
            data = res.__next__()

            expected_chunk, expected_row_index_in_chunk = divmod(count, batch_row_count * batch_count)
            expected_batch_index, expected_row_index_in_batch = divmod(expected_row_index_in_chunk, batch_row_count)

            assert data[0] == \
                expected_chunk_result[expected_chunk][expected_batch_index][0][expected_row_index_in_batch]
            assert data[1] == \
                expected_chunk_result[expected_chunk][expected_batch_index][1][expected_row_index_in_batch]

            count += 1

        except StopIteration:
            break

    assert count == chunk_count * batch_row_count * batch_count