示例#1
0
def test_storage():

    # Get resources
    articles_descriptor = json.load(
        io.open('data/articles.json', encoding='utf-8'))
    comments_descriptor = json.load(
        io.open('data/comments.json', encoding='utf-8'))
    articles_rows = Stream('data/articles.csv', headers=1).open().read()
    comments_rows = Stream('data/comments.csv', headers=1).open().read()

    # Engine
    engine = create_engine(os.environ['DATABASE_URL'])

    # Storage
    storage = Storage(engine=engine, prefix='test_storage_')

    # Delete buckets
    storage.delete()

    # Create buckets
    storage.create(['articles', 'comments'],
                   [articles_descriptor, comments_descriptor])

    # Recreate bucket
    storage.create('comments', comments_descriptor, force=True)

    # Write data to buckets
    storage.write('articles', articles_rows)
    storage.write('comments', comments_rows)

    # Create new storage to use reflection only
    storage = Storage(engine=engine, prefix='test_storage_')

    # Create existent bucket
    with pytest.raises(RuntimeError):
        storage.create('articles', articles_descriptor)

    # Assert representation
    assert repr(storage).startswith('Storage')

    # Assert buckets
    assert storage.buckets == ['articles', 'comments']

    # Assert descriptors
    assert storage.describe('articles') == sync_descriptor(articles_descriptor)
    assert storage.describe('comments') == sync_descriptor(comments_descriptor)

    # Assert rows
    assert list(storage.read('articles')) == sync_rows(articles_descriptor,
                                                       articles_rows)
    assert list(storage.read('comments')) == sync_rows(comments_descriptor,
                                                       comments_rows)

    # Delete non existent bucket
    with pytest.raises(RuntimeError):
        storage.delete('non_existent')

    # Delete buckets
    storage.delete()
def test_storage():

    # Get resources
    articles_descriptor = json.load(io.open('data/articles.json', encoding='utf-8'))
    comments_descriptor = json.load(io.open('data/comments.json', encoding='utf-8'))
    articles_rows = Stream('data/articles.csv', headers=1).open().read()
    comments_rows = Stream('data/comments.csv', headers=1).open().read()

    # Engine
    engine = create_engine(os.environ['DATABASE_URL'])

    # Storage
    storage = Storage(engine=engine, prefix='test_storage_')

    # Delete buckets
    storage.delete()

    # Create buckets
    storage.create(
            ['articles', 'comments'],
            [articles_descriptor, comments_descriptor],
            indexes_fields=[[['rating'], ['name'], ['created_datetime']], []])

    # Recreate bucket
    storage.create('comments', comments_descriptor, force=True)

    # Write data to buckets
    storage.write('articles', articles_rows)
    storage.write('comments', comments_rows)

    # Create new storage to use reflection only
    storage = Storage(engine=engine, prefix='test_storage_')

    # Create existent bucket
    with pytest.raises(RuntimeError):
        storage.create('articles', articles_descriptor)

    # Assert representation
    assert repr(storage).startswith('Storage')

    # Assert buckets
    assert storage.buckets == ['articles', 'comments']

    # Assert descriptors
    assert storage.describe('articles') == sync_descriptor(articles_descriptor)
    assert storage.describe('comments') == sync_descriptor(comments_descriptor)

    # Assert rows
    assert list(storage.read('articles')) == sync_rows(articles_descriptor, articles_rows)
    assert list(storage.read('comments')) == sync_rows(comments_descriptor, comments_rows)

    # Delete non existent bucket
    with pytest.raises(RuntimeError):
        storage.delete('non_existent')


    # Delete buckets
    storage.delete()
def test_storage():

    # Get resources
    articles_schema = json.load(io.open('data/articles.json', encoding='utf-8'))
    comments_schema = json.load(io.open('data/comments.json', encoding='utf-8'))
    articles_data = topen('data/articles.csv', with_headers=True).read()
    comments_data = topen('data/comments.csv', with_headers=True).read()

    # Engine
    engine = create_engine(os.environ['DATABASE_URL'])

    # Storage
    storage = Storage(engine=engine, prefix='prefix_')

    # Delete tables
    for table in reversed(storage.tables):
        storage.delete(table)

    # Create tables
    storage.create(['articles', 'comments'], [articles_schema, comments_schema])

    # Write data to tables
    storage.write('articles', articles_data)
    storage.write('comments', comments_data)

    # Create new storage to use reflection only
    storage = Storage(engine=engine, prefix='prefix_')

    # Create existent table
    with pytest.raises(RuntimeError):
        storage.create('articles', articles_schema)

    # Get table representation
    assert repr(storage).startswith('Storage')

    # Get tables list
    assert storage.tables == ['articles', 'comments']

    # Get table schemas
    assert storage.describe('articles') == convert_schema(articles_schema)
    assert storage.describe('comments') == convert_schema(comments_schema)

    # Get table data
    assert list(storage.read('articles')) == convert_data(articles_schema, articles_data)
    assert list(storage.read('comments')) == convert_data(comments_schema, comments_data)

    # Delete tables
    for table in reversed(storage.tables):
        storage.delete(table)

    # Delete non existent table
    with pytest.raises(RuntimeError):
        storage.delete('articles')
def test_storage_bigdata():

    # Generate schema/data
    descriptor = {'fields': [{'name': 'id', 'type': 'integer'}]}
    rows = [{'id': value} for value in range(0, 2500)]

    # Push rows
    engine = create_engine(os.environ['DATABASE_URL'])
    storage = Storage(engine=engine, prefix='test_storage_bigdata_')
    storage.create('bucket', descriptor, force=True)
    storage.write('bucket', rows, keyed=True)

    # Pull rows
    assert list(storage.read('bucket')) == list(map(lambda x: [x['id']], rows))
def test_storage_bigdata():

    # Generate schema/data
    descriptor = {'fields': [{'name': 'id', 'type': 'integer'}]}
    rows = [[value,] for value in range(0, 2500)]

    # Push rows
    engine = create_engine(os.environ['DATABASE_URL'])
    storage = Storage(engine=engine, prefix='test_storage_bigdata_')
    storage.create('bucket', descriptor, force=True)
    storage.write('bucket', rows)

    # Pull rows
    assert list(storage.read('bucket')) == rows
def test_storage_bigdata_rollback():

    # Generate schema/data
    descriptor = {'fields': [{'name': 'id', 'type': 'integer'}]}
    rows = [(value,) for value in range(0, 2500)] + [('bad-value',)]

    # Push rows
    engine = create_engine(os.environ['DATABASE_URL'])
    storage = Storage(engine=engine, prefix='test_storage_bigdata_rollback_')
    storage.create('bucket', descriptor, force=True)
    try:
        storage.write('bucket', rows)
    except Exception:
        pass

    # Pull rows
    assert list(storage.read('bucket')) == []
def test_storage_bigdata_rollback():

    # Generate schema/data
    descriptor = {'fields': [{'name': 'id', 'type': 'integer'}]}
    rows = [(value,) for value in range(0, 2500)] + [('bad-value',)]

    # Push rows
    engine = create_engine(os.environ['DATABASE_URL'])
    storage = Storage(engine=engine, prefix='test_storage_bigdata_rollback_')
    storage.create('bucket', descriptor, force=True)
    try:
        storage.write('bucket', rows)
    except Exception:
        pass

    # Pull rows
    assert list(storage.read('bucket')) == []
# Engine
engine = create_engine(os.environ['DATABASE_URL'])

# Storage
storage = Storage(engine=engine, prefix='prefix_')

# Delete tables
for table in reversed(storage.buckets):
    storage.delete(table)

# Create tables
storage.create(['articles', 'comments'], [articles_schema, comments_schema])

print(articles_data)

# Write data to tables
storage.write('articles', articles_data)
storage.write('comments', comments_data)

# List tables
print(storage.buckets)

# Describe tables
print(storage.describe('articles'))
print(storage.describe('comments'))

# Read data from tables
print(list(storage.read('articles')))
print(list(storage.read('comments')))
示例#9
0
articles_data = topen('data/articles.csv', with_headers=True).read()
comments_data = topen('data/comments.csv', with_headers=True).read()

# Engine
engine = create_engine(os.environ['DATABASE_URL'])

# Storage
storage = Storage(engine=engine, prefix='prefix_')

# Delete tables
for table in reversed(storage.tables):
    storage.delete(table)

# Create tables
storage.create(['articles', 'comments'], [articles_schema, comments_schema])

# Write data to tables
storage.write('articles', articles_data)
storage.write('comments', comments_data)

# List tables
print(storage.tables)

# Describe tables
print(storage.describe('articles'))
print(storage.describe('comments'))

# Read data from tables
print(list(storage.read('articles')))
print(list(storage.read('comments')))