def test_storage_autoincrement_mapping(dialect, database_url):
    RESOURCE = {
        'schema': {
            'fields': [{
                'name': 'name',
                'type': 'string'
            }]
        },
        'data': [['london'], ['paris'], ['rome']],
    }

    # Write data
    engine = create_engine(database_url)
    storage = Storage(engine,
                      autoincrement={'bucket1': 'id'},
                      prefix='test_storage_autoincrement_mapping_')
    storage.create(['bucket1', 'bucket2'],
                   [RESOURCE['schema'], RESOURCE['schema']],
                   force=True)
    storage.write('bucket1', RESOURCE['data'])
    storage.write('bucket2', RESOURCE['data'])

    # Read data
    assert list(storage.read('bucket1')) == [
        [1, 'london'],
        [2, 'paris'],
        [3, 'rome'],
    ]
    assert list(storage.read('bucket2')) == [
        ['london'],
        ['paris'],
        ['rome'],
    ]
def test_storage_write_generator():

    # Create storage
    engine = create_engine(os.environ['SQLITE_URL'])
    storage = Storage(engine=engine, prefix='test_storage_')

    # Create bucket
    storage.create('comments', remove_fk(COMMENTS['schema']), force=True)

    # Write data using generator
    gen = storage.write('comments', COMMENTS['data'], as_generator=True)
    res = list(gen)

    # Assert
    assert len(res) == 2
    assert storage.read('comments') == cast(COMMENTS)['data']
def test_storage_bigdata_rollback():
    RESOURCE = {
        'schema': {
            'fields': [{
                'name': 'id',
                'type': 'integer'
            }]
        },
        'data': [(value, ) for value in range(0, 2500)] + [('bad-value', )]
    }

    # Write data
    engine = create_engine(os.environ['POSTGRES_URL'])
    storage = Storage(engine=engine, prefix='test_storage_bigdata_rollback_')
    storage.create('bucket', RESOURCE['schema'], force=True)
    try:
        storage.write('bucket', RESOURCE['data'])
    except Exception:
        pass

    # Read data
    assert list(storage.read('bucket')) == []
def test_storage_bigdata():
    RESOURCE = {
        'schema': {
            'fields': [{
                'name': 'id',
                'type': 'integer'
            }]
        },
        'data': [{
            'id': value
        } for value in range(0, 2500)]
    }

    # Write data
    engine = create_engine(os.environ['POSTGRES_URL'])
    storage = Storage(engine=engine, prefix='test_storage_bigdata_')
    storage.create('bucket', RESOURCE['schema'], force=True)
    storage.write('bucket', RESOURCE['data'], keyed=True)

    # Read data
    assert list(storage.read('bucket')) == list(
        map(lambda x: [x['id']], RESOURCE['data']))
def test_storage_limited_databases(dialect, database_url):

    # Create storage
    engine = create_engine(database_url)
    storage = Storage(engine=engine, prefix='test_storage_')

    # Delete buckets
    storage.delete()

    # Create buckets
    storage.create(
        ['articles', 'comments'],
        [remove_fk(ARTICLES['schema']),
         remove_fk(COMMENTS['schema'])],
        indexes_fields=[[['rating'], ['name']], []])
    storage.create('comments', remove_fk(COMMENTS['schema']), force=True)
    storage.create('temporal', TEMPORAL['schema'])
    storage.create('location', LOCATION['schema'])
    storage.create('compound', COMPOUND['schema'])

    # Write data
    storage.write('articles', ARTICLES['data'])
    storage.write('comments', COMMENTS['data'])
    storage.write('temporal', TEMPORAL['data'])
    storage.write('location', LOCATION['data'])
    storage.write('compound', COMPOUND['data'])

    # Create new storage to use reflection only
    storage = Storage(engine=engine, prefix='test_storage_')

    # Create existent bucket
    with pytest.raises(tableschema.exceptions.StorageError):
        storage.create('articles', ARTICLES['schema'])

    # Assert buckets
    assert storage.buckets == [
        'articles', 'comments', 'compound', 'location', 'temporal'
    ]

    # Assert schemas
    assert storage.describe('articles') == {
        'fields': [
            {
                'name': 'id',
                'type': 'integer',
                'constraints': {
                    'required': True
                }
            },
            {
                'name': 'parent',
                'type': 'integer'
            },
            {
                'name': 'name',
                'type': 'string'
            },
            {
                'name': 'current',
                'type': 'boolean' if dialect == 'sqlite' else 'integer'
            },
            {
                'name': 'rating',
                'type': 'number'
            },
        ],
        'primaryKey':
        'id',
        # foreignKeys not supported
    }
    assert storage.describe('comments') == {
        'fields': [
            {
                'name': 'entry_id',
                'type': 'integer',
                'constraints': {
                    'required': True
                }
            },
            {
                'name': 'comment',
                'type': 'string'
            },
            {
                'name': 'note',
                'type': 'string'
            },  # type downgrade
        ],
        'primaryKey':
        'entry_id',
        # foreignKeys not supported
    }
    assert storage.describe('temporal') == {
        'fields': [
            {
                'name': 'date',
                'type': 'date'
            },
            {
                'name': 'date_year',
                'type': 'date'
            },  # format removal
            {
                'name': 'datetime',
                'type': 'datetime'
            },
            {
                'name': 'duration',
                'type': 'string'
            },  # type fallback
            {
                'name': 'time',
                'type': 'time'
            },
            {
                'name': 'year',
                'type': 'integer'
            },  # type downgrade
            {
                'name': 'yearmonth',
                'type': 'string'
            },  # type fallback
        ],
    }
    assert storage.describe('location') == {
        'fields': [
            {
                'name': 'location',
                'type': 'string'
            },  # type fallback
            {
                'name': 'geopoint',
                'type': 'string'
            },  # type fallback
        ],
    }
    assert storage.describe('compound') == {
        'fields': [
            {
                'name': 'stats',
                'type': 'string'
            },  # type fallback
            {
                'name': 'persons',
                'type': 'string'
            },  # type fallback
        ],
    }

    # Assert data
    assert storage.read('articles') == cast(ARTICLES)['data']
    assert storage.read('comments') == cast(COMMENTS)['data']
    assert storage.read('temporal') == cast(TEMPORAL,
                                            skip=['duration',
                                                  'yearmonth'])['data']
    assert storage.read('location') == cast(LOCATION,
                                            skip=['geojson',
                                                  'geopoint'])['data']
    assert storage.read('compound') == cast(COMPOUND, skip=['array',
                                                            'object'])['data']

    # Assert data with forced schema
    storage.describe('compound', COMPOUND['schema'])
    assert storage.read('compound') == cast(COMPOUND)['data']

    # Delete non existent bucket
    with pytest.raises(tableschema.exceptions.StorageError):
        storage.delete('non_existent')

    # Delete buckets
    storage.delete()
articles_data = topen('data/articles.csv', with_headers=True).read()
comments_data = topen('data/comments.csv', with_headers=True).read()

# Engine
engine = create_engine(os.environ['POSTGRES_URL'])

# Storage
storage = Storage(engine=engine, prefix='prefix_')

# Delete tables
for table in reversed(storage.tables):
    storage.delete(table)

# Create tables
storage.create(['articles', 'comments'], [articles_schema, comments_schema])

# Write data to tables
storage.write('articles', articles_data)
storage.write('comments', comments_data)

# List tables
print(storage.tables)

# Describe tables
print(storage.describe('articles'))
print(storage.describe('comments'))

# Read data from tables
print(list(storage.read('articles')))
print(list(storage.read('comments')))
Example #7
0
            "name": "id",
            "type": "integer",
            "constraints": {
                "required": true
            }
        },
        {
            "name": "name",
            "type": "string"
        },
        {
            "name": "ssn",
            "type": "string",
            "protected": true
        }
    ]
}
"""

storage.create(['records'], [json.loads(records_schema)], encrypted_definitions=encryptedDefintion)

records_data = [
    [ 1, "John", "123456789"]
]

storage.write('records', records_data)

print(storage.describe('records'))

print(list(storage.read('records')))
Example #8
0
def test_storage():

    # Create storage
    engine = create_engine(os.environ['POSTGRES_URL'])
    storage = Storage(engine=engine, prefix='test_storage_')

    # Delete buckets
    storage.delete()

    # Create buckets
    storage.create(['articles', 'comments'],
                   [ARTICLES['schema'], COMMENTS['schema']],
                   indexes_fields=[[['rating'], ['name']], []])
    storage.create('comments', COMMENTS['schema'], force=True)
    storage.create('temporal', TEMPORAL['schema'])
    storage.create('location', LOCATION['schema'])
    storage.create('compound', COMPOUND['schema'])

    # Write data
    storage.write('articles', ARTICLES['data'])
    storage.write('comments', COMMENTS['data'])
    storage.write('temporal', TEMPORAL['data'])
    storage.write('location', LOCATION['data'])
    storage.write('compound', COMPOUND['data'])

    # Create new storage to use reflection only
    storage = Storage(engine=engine, prefix='test_storage_')

    # Create existent bucket
    with pytest.raises(tableschema.exceptions.StorageError):
        storage.create('articles', ARTICLES['schema'])

    # Assert buckets
    assert storage.buckets == [
        'articles', 'compound', 'location', 'temporal', 'comments'
    ]

    # Assert schemas
    assert storage.describe('articles') == ARTICLES['schema']
    assert storage.describe('comments') == {
        'fields': [
            {
                'name': 'entry_id',
                'type': 'integer',
                'constraints': {
                    'required': True
                }
            },
            {
                'name': 'comment',
                'type': 'string'
            },
            {
                'name': 'note',
                'type': 'string'
            },  # type downgrade
        ],
        'primaryKey':
        'entry_id',
        'foreignKeys': [
            {
                'fields': 'entry_id',
                'reference': {
                    'resource': 'articles',
                    'fields': 'id'
                }
            },
        ],
    }
    assert storage.describe('temporal') == {
        'fields': [
            {
                'name': 'date',
                'type': 'date'
            },
            {
                'name': 'date_year',
                'type': 'date'
            },  # format removal
            {
                'name': 'datetime',
                'type': 'datetime'
            },
            {
                'name': 'duration',
                'type': 'string'
            },  # type fallback
            {
                'name': 'time',
                'type': 'time'
            },
            {
                'name': 'year',
                'type': 'integer'
            },  # type downgrade
            {
                'name': 'yearmonth',
                'type': 'string'
            },  # type fallback
        ],
    }
    assert storage.describe('location') == {
        'fields': [
            {
                'name': 'location',
                'type': 'object'
            },  # type downgrade
            {
                'name': 'geopoint',
                'type': 'string'
            },  # type fallback
        ],
    }
    assert storage.describe('compound') == {
        'fields': [
            {
                'name': 'stats',
                'type': 'object'
            },
            {
                'name': 'persons',
                'type': 'object'
            },  # type downgrade
        ],
    }

    # Assert data
    assert storage.read('articles') == cast(ARTICLES)['data']
    assert storage.read('comments') == cast(COMMENTS)['data']
    assert storage.read('temporal') == cast(TEMPORAL,
                                            skip=['duration',
                                                  'yearmonth'])['data']
    assert storage.read('location') == cast(LOCATION,
                                            skip=['geopoint'])['data']
    assert storage.read('compound') == cast(COMPOUND)['data']

    # Assert data with forced schema
    storage.describe('compound', COMPOUND['schema'])
    assert storage.read('compound') == cast(COMPOUND)['data']

    # Delete non existent bucket
    with pytest.raises(tableschema.exceptions.StorageError):
        storage.delete('non_existent')

    # Delete buckets
    storage.delete()