Пример #1
0
def test_default_no_data_leakage(setup):
    import google.auth.credentials
    from google.cloud.bigquery import client
    from google.cloud.bigquery import job

    mock_credentials = mock.Mock(spec=google.auth.credentials.Credentials)
    test_client = client.Client(project="test_project",
                                credentials=mock_credentials,
                                location="test_location")

    expected_attributes = {
        "foo": "baz",
        "db.system": "BigQuery",
        "db.name": "test_project",
        "location": "test_location",
    }
    with opentelemetry_tracing.create_span(TEST_SPAN_NAME,
                                           attributes=TEST_SPAN_ATTRIBUTES,
                                           client=test_client) as span:
        assert span.name == TEST_SPAN_NAME
        assert span.attributes == expected_attributes

    test_job_reference = job._JobReference(job_id="test_job_id",
                                           project="test_project_id",
                                           location="test_location")
    test_client = client.Client(project="test_project",
                                credentials=mock_credentials,
                                location="test_location")
    test_job = job._AsyncJob(job_id=test_job_reference, client=test_client)

    expected_attributes = {
        "db.system": "BigQuery",
        "db.name": "test_project_id",
        "location": "test_location",
        "num_child_jobs": 0,
        "job_id": "test_job_id",
        "foo": "baz",
        "hasErrors": False,
    }

    with opentelemetry_tracing.create_span(TEST_SPAN_NAME,
                                           attributes=TEST_SPAN_ATTRIBUTES,
                                           job_ref=test_job) as span:
        assert span.name == TEST_SPAN_NAME
        assert span.attributes == expected_attributes
Пример #2
0
def create_table(schema_file_name):
    # Creates table named bq_table_inventory to store metadata
    credentials2 = service_account.Credentials.from_service_account_file(
        SERVICE_ACCOUNT_KEY_FILE_PATH2)
    ct_bigquery_client = client.Client(project=PROJECT_ID2,
                                       credentials=credentials2)
    table1 = Table.from_string(PROJECT_ID2 + "." + DATASET_ID + "." + TABLE_ID)
    table1.schema = prepare_schema(schema_file_name)
    table1.partitioning_type = 'DAY'
    ct_bigquery_client.create_table(table1, exists_ok=True)
Пример #3
0
def test_span_creation_error(setup):
    import google.auth.credentials
    from google.cloud.bigquery import client
    from google.api_core.exceptions import GoogleAPICallError, InvalidArgument

    mock_credentials = mock.Mock(spec=google.auth.credentials.Credentials)
    test_client = client.Client(project="test_project",
                                credentials=mock_credentials,
                                location="test_location")

    expected_attributes = {
        "foo": "baz",
        "db.system": "BigQuery",
        "db.name": "test_project",
        "location": "test_location",
    }
    with pytest.raises(GoogleAPICallError):
        with opentelemetry_tracing.create_span(TEST_SPAN_NAME,
                                               attributes=TEST_SPAN_ATTRIBUTES,
                                               client=test_client) as span:
            assert span.name == TEST_SPAN_NAME
            assert span.attributes == expected_attributes
            raise InvalidArgument("test_error")
Пример #4
0
                                                     field_parts[1].strip(),
                                                     field_parts[2].strip(),
                                                     field_parts[3].strip())
                table_schema.append(table_field)
        f.close()
    except IOError:
        print(
            'Unable to open/find the {} schema file'.format(schema_file_name))
    return table_schema


# Starts here
create_table("table.schema")
credentials = service_account.Credentials.from_service_account_file(
    SERVICE_ACCOUNT_KEY_FILE_PATH)
bq_client = client.Client(project=PROJECT_ID, credentials=credentials)
# get the list of all datasets in the intended project
dataset_list = bq_client.list_datasets(project=PROJECT_ID)

# Iterate over each dataset in the project
for i_dataset in dataset_list:
    v_dataset = bq_dataset.DatasetReference(project=PROJECT_ID,
                                            dataset_id=i_dataset.dataset_id)
    print("Processing Dataset : {}".format(i_dataset.dataset_id))
    table_list = bq_client.list_tables(dataset=v_dataset)
    # if table_list.page_number > 0:
    print("Processing Tables metadata")
    # table_metadata holds dictionary of table attributes one dict per table
    table_metadata = []
    for i_table in table_list:
        table = bq_client.get_table(i_table.reference)