Example #1
0
def test_storage_bigdata():

    # Generate schema/data
    descriptor = {'fields': [{'name': 'id', 'type': 'integer'}]}
    rows = [[
        value,
    ] for value in range(0, 15000)]

    # Push rows
    os.environ['GOOGLE_APPLICATION_CREDENTIALS'] = '.credentials.json'
    credentials = GoogleCredentials.get_application_default()
    service = build('bigquery', 'v2', credentials=credentials)
    project = json.load(io.open('.credentials.json',
                                encoding='utf-8'))['project_id']
    dataset = 'resource'
    prefix = '%s_' % uuid.uuid4().hex
    storage = Storage(service, project, dataset, prefix=prefix)
    storage.create('bucket', descriptor, force=True)
    storage.write('bucket', rows)

    # Pull rows
    assert list(storage.read('bucket')) == rows
def test_storage_bigdata():

    # Generate schema/data
    descriptor = {'fields': [{'name': 'id', 'type': 'integer'}]}
    rows = [[value,] for value in range(0, 15000)]

    # Push rows
    os.environ['GOOGLE_APPLICATION_CREDENTIALS'] = '.credentials.json'
    credentials = GoogleCredentials.get_application_default()
    service = build('bigquery', 'v2', credentials=credentials)
    project = json.load(io.open('.credentials.json', encoding='utf-8'))['project_id']
    dataset = 'resource'
    prefix = '%s_' % uuid.uuid4().hex
    storage = Storage(service, project, dataset, prefix=prefix)
    storage.create('bucket', descriptor, force=True)
    storage.write('bucket', rows)

    # Pull rows
    assert list(storage.read('bucket')) == rows
Example #3
0
# Get resources
articles_schema = json.load(io.open('data/articles.json', encoding='utf-8'))
articles_data = topen('data/articles.csv', with_headers=True).read()

# Prepare BigQuery
os.environ['GOOGLE_APPLICATION_CREDENTIALS'] = '.credentials.json'
credentials = GoogleCredentials.get_application_default()
service = build('bigquery', 'v2', credentials=credentials)
project = json.load(io.open('.credentials.json',
                            encoding='utf-8'))['project_id']
dataset = 'resource'
prefix = '%s_' % uuid.uuid4().hex

# Storage
storage = Storage(service, project, dataset, prefix=prefix)

# Delete tables
for table in reversed(storage.tables):
    storage.delete(table)

# Create tables
storage.create('articles', articles_schema)

# Write data to tables
storage.write('articles', articles_data)

# List tables
print(storage.tables)

# Describe tables
Example #4
0
def test_storage():

    # Get resources
    articles_descriptor = json.load(
        io.open('data/articles.json', encoding='utf-8'))
    articles_rows = Stream('data/articles.csv', headers=1).open().read()

    # Prepare BigQuery
    os.environ['GOOGLE_APPLICATION_CREDENTIALS'] = '.credentials.json'
    credentials = GoogleCredentials.get_application_default()
    service = build('bigquery', 'v2', credentials=credentials)
    project = json.load(io.open('.credentials.json',
                                encoding='utf-8'))['project_id']
    dataset = 'resource'
    prefix = '%s_' % uuid.uuid4().hex

    # Storage
    storage = Storage(service, project, dataset, prefix=prefix)

    # Delete buckets
    storage.delete()

    # Create buckets
    storage.create('articles', articles_descriptor)

    # Write data to buckets
    storage.write('articles', articles_rows)

    # Create new storage to use reflection only
    storage = Storage(service, project, dataset, prefix=prefix)

    # Create existent bucket
    with pytest.raises(RuntimeError):
        storage.create('articles', articles_descriptor)

    # Assert representation
    assert repr(storage).startswith('Storage')

    # Assert buckets
    assert storage.buckets == ['articles']

    # Assert descriptors
    assert storage.describe('articles') == sync_descriptor(articles_descriptor)

    # Assert rows
    assert list(storage.read('articles')) == sunc_rows(articles_descriptor,
                                                       articles_rows)

    # Delete non existent bucket
    with pytest.raises(RuntimeError):
        storage.delete('non_existent')

    # Delete buckets
    storage.delete()
def test_storage():

    # Get resources
    articles_descriptor = json.load(io.open('data/articles.json', encoding='utf-8'))
    articles_rows = Stream('data/articles.csv', headers=1).open().read()

    # Prepare BigQuery
    os.environ['GOOGLE_APPLICATION_CREDENTIALS'] = '.credentials.json'
    credentials = GoogleCredentials.get_application_default()
    service = build('bigquery', 'v2', credentials=credentials)
    project = json.load(io.open('.credentials.json', encoding='utf-8'))['project_id']
    dataset = 'resource'
    prefix = '%s_' % uuid.uuid4().hex

    # Storage
    storage = Storage(service, project, dataset, prefix=prefix)

    # Delete buckets
    storage.delete()

    # Create buckets
    storage.create('articles', articles_descriptor)

    # Write data to buckets
    storage.write('articles', articles_rows)

    # Create new storage to use reflection only
    storage = Storage(service, project, dataset, prefix=prefix)

    # Create existent bucket
    with pytest.raises(RuntimeError):
        storage.create('articles', articles_descriptor)

    # Assert representation
    assert repr(storage).startswith('Storage')

    # Assert buckets
    assert storage.buckets == ['articles']

    # Assert descriptors
    assert storage.describe('articles') == sync_descriptor(articles_descriptor)

    # Assert rows
    assert list(storage.read('articles')) == sunc_rows(articles_descriptor, articles_rows)

    # Delete non existent bucket
    with pytest.raises(RuntimeError):
        storage.delete('non_existent')

    # Delete buckets
    storage.delete()

# Get resources
articles_schema = json.load(io.open('data/articles.json', encoding='utf-8'))
articles_data = topen('data/articles.csv', with_headers=True).read()

# Prepare BigQuery
os.environ['GOOGLE_APPLICATION_CREDENTIALS'] = '.credentials.json'
credentials = GoogleCredentials.get_application_default()
service = build('bigquery', 'v2', credentials=credentials)
project = json.load(io.open('.credentials.json', encoding='utf-8'))['project_id']
dataset = 'resource'
prefix = '%s_' % uuid.uuid4().hex

# Storage
storage = Storage(service, project, dataset, prefix=prefix)

# Delete tables
for table in reversed(storage.tables):
    storage.delete(table)

# Create tables
storage.create('articles', articles_schema)

# Write data to tables
storage.write('articles', articles_data)

# List tables
print(storage.tables)

# Describe tables