Ejemplo n.º 1
0
def dataset_id(bigquery_client: bigquery.Client, project_id: str):
    dataset_id = prefixer.create_prefix()
    full_dataset_id = f"{project_id}.{dataset_id}"
    dataset = bigquery.Dataset(full_dataset_id)
    bigquery_client.create_dataset(dataset)
    yield dataset_id
    bigquery_client.delete_dataset(dataset, delete_contents=True, not_found_ok=True)
Ejemplo n.º 2
0
def dataset_id(client: bigquery.Client):
    project_id = client.project
    dataset_id = prefixer.create_prefix()
    dataset = bigquery.Dataset(f"{project_id}.{dataset_id}")
    dataset = client.create_dataset(dataset)
    yield dataset_id
    client.delete_dataset(dataset_id, delete_contents=True)
Ejemplo n.º 3
0
def dataset_id(bigquery_client):
    dataset_id = prefixer.create_prefix()
    bigquery_client.create_dataset(dataset_id)
    yield dataset_id
    bigquery_client.delete_dataset(dataset_id,
                                   delete_contents=True,
                                   not_found_ok=True)
Ejemplo n.º 4
0
def random_table_id(bigquery_client: bigquery.Client, project_id: str,
                    dataset_id: str):
    """Create a new table ID each time, so random_table_id can be used as
    target for load jobs.
    """
    random_table_id = prefixer.create_prefix()
    full_table_id = f"{project_id}.{dataset_id}.{random_table_id}"
    yield full_table_id
    bigquery_client.delete_table(full_table_id, not_found_ok=True)
Ejemplo n.º 5
0
def table_id_us_east1(bigquery_client: bigquery.Client, project_id: str,
                      dataset_id_us_east1: str):
    table_id = prefixer.create_prefix()
    full_table_id = f"{project_id}.{dataset_id_us_east1}.{table_id}"
    table = bigquery.Table(
        full_table_id, schema=[bigquery.SchemaField("string_col", "STRING")])
    bigquery_client.create_table(table)
    yield full_table_id
    bigquery_client.delete_table(table, not_found_ok=True)
Ejemplo n.º 6
0
def dataset(project_id, bq_client):
    from google.cloud import bigquery

    dataset_name = prefixer.create_prefix()

    dataset_id = "{}.{}".format(project_id, dataset_name)
    dataset = bigquery.Dataset(dataset_id)
    dataset.location = "US"
    created_dataset = bq_client.create_dataset(dataset)

    yield created_dataset

    bq_client.delete_dataset(dataset, delete_contents=True)
Ejemplo n.º 7
0
def test_create_prefix(monkeypatch):
    fake_datetime = FakeDateTime(datetime.datetime(2021, 6, 21, 3, 32, 0))
    monkeypatch.setattr(datetime, "datetime", fake_datetime)

    prefixer = test_utils.prefixer.Prefixer("python-test-utils",
                                            "tests/unit",
                                            separator="?")
    got = prefixer.create_prefix()
    parts = got.split("?")
    assert len(parts) == 7
    assert "?".join(parts[:5]) == "python?test?utils?tests?unit"
    datetime_part = parts[5]
    assert datetime_part == "20210621033200"
    random_hex_part = parts[6]
    assert re.fullmatch("[0-9a-f]+", random_hex_part)
Ejemplo n.º 8
0
def bigquery_regional_dataset(bigquery_client, bigquery_schema):
    project_id = bigquery_client.project
    dataset_id = prefixer.create_prefix()
    dataset = bigquery.Dataset(f"{project_id}.{dataset_id}")
    dataset.location = "asia-northeast1"
    dataset = bigquery_client.create_dataset(dataset)
    sample_table_id = f"{project_id}.{dataset_id}.sample_one_row"
    job = load_sample_data(
        sample_table_id,
        bigquery_client,
        bigquery_schema,
        filename="sample_one_row.json",
    )
    job.result()
    yield dataset_id
    bigquery_client.delete_dataset(dataset_id, delete_contents=True)
Ejemplo n.º 9
0
def bigquery_dataset(bigquery_client: bigquery.Client,
                     bigquery_schema: List[bigquery.SchemaField]):
    project_id = bigquery_client.project
    dataset_id = prefixer.create_prefix()
    dataset = bigquery.Dataset(f"{project_id}.{dataset_id}")
    dataset = bigquery_client.create_dataset(dataset)
    sample_table_id = f"{project_id}.{dataset_id}.sample"
    job1 = load_sample_data(sample_table_id, bigquery_client, bigquery_schema)
    job1.result()
    one_row_table_id = f"{project_id}.{dataset_id}.sample_one_row"
    job2 = load_sample_data(
        one_row_table_id,
        bigquery_client,
        bigquery_schema,
        filename="sample_one_row.json",
    )
    job2.result()
    view = bigquery.Table(f"{project_id}.{dataset_id}.sample_view", )
    view.view_query = f"SELECT string FROM `{dataset_id}.sample`"
    bigquery_client.create_table(view)
    yield dataset_id
    bigquery_client.delete_dataset(dataset_id, delete_contents=True)
Ejemplo n.º 10
0
def pytest_sessionstart(session):
    dataset_id = prefixer.create_prefix()
    session.config.option.dburi = [f"bigquery:///{dataset_id}"]
    with contextlib.closing(google.cloud.bigquery.Client()) as client:
        client.create_dataset(dataset_id)
    _pytest_sessionstart(session)