예제 #1
0
def test_bigquery_magic_w_maximum_bytes_billed_w_context_setter():
    ip = IPython.get_ipython()
    ip.extension_manager.load_extension("google.cloud.bigquery")
    magics.context._project = None

    magics.context.default_query_job_config = job.QueryJobConfig(
        maximum_bytes_billed=10203
    )

    project = "test-project"
    job_reference = copy.deepcopy(JOB_REFERENCE_RESOURCE)
    job_reference["projectId"] = project
    query = "SELECT 17 AS num"
    resource = copy.deepcopy(QUERY_RESOURCE)
    resource["jobReference"] = job_reference
    resource["configuration"]["query"]["query"] = query
    data = {"jobReference": job_reference, "totalRows": 0, "rows": []}
    credentials_mock = mock.create_autospec(
        google.auth.credentials.Credentials, instance=True
    )
    default_patch = mock.patch(
        "google.auth.default", return_value=(credentials_mock, "general-project")
    )
    conn = magics.context._connection = make_connection(resource, data)
    list_rows_patch = mock.patch(
        "google.cloud.bigquery.client.Client.list_rows",
        return_value=google.cloud.bigquery.table._EmptyRowIterator(),
    )
    with list_rows_patch, default_patch:
        ip.run_cell_magic("bigquery", "", query)

    _, req = conn.api_request.call_args_list[0]
    sent_config = req["data"]["configuration"]["query"]
    assert sent_config["maximumBytesBilled"] == "10203"
예제 #2
0
def test_context_no_connection():
    ip = IPython.get_ipython()
    ip.extension_manager.load_extension("google.cloud.bigquery")
    magics.context._project = None
    magics.context._credentials = None
    magics.context._connection = None

    credentials_mock = mock.create_autospec(
        google.auth.credentials.Credentials, instance=True)
    project = "project-123"
    default_patch = mock.patch("google.auth.default",
                               return_value=(credentials_mock, project))

    query = "select * from persons"
    job_reference = {"projectId": project, "jobId": "some-random-id"}
    table = {"projectId": project, "datasetId": "ds", "tableId": "persons"}
    resource = {
        "jobReference": job_reference,
        "configuration": {
            "query": {
                "destinationTable": table,
                "query": query,
                "queryParameters": [],
                "useLegacySql": False,
            }
        },
        "status": {
            "state": "DONE"
        },
    }
    data = {"jobReference": job_reference, "totalRows": 0, "rows": []}

    conn_mock = make_connection(resource, data, data, data)
    conn_patch = mock.patch("google.cloud.bigquery.client.Connection",
                            autospec=True)
    list_rows_patch = mock.patch(
        "google.cloud.bigquery.client.Client.list_rows",
        return_value=google.cloud.bigquery.table._EmptyRowIterator(),
    )
    with conn_patch as conn, list_rows_patch as list_rows, default_patch:
        conn.return_value = conn_mock
        ip.run_cell_magic("bigquery", "", query)

    # Check that query actually starts the job.
    list_rows.assert_called()
    assert len(conn_mock.api_request.call_args_list) == 2
    _, req = conn_mock.api_request.call_args_list[0]
    assert req["method"] == "POST"
    assert req["path"] == "/projects/{}/jobs".format(project)
    sent = req["data"]
    assert isinstance(sent["jobReference"]["jobId"], six.string_types)
    sent_config = sent["configuration"]["query"]
    assert sent_config["query"] == query
def test_context_no_connection():
    ip = IPython.get_ipython()
    ip.extension_manager.load_extension("google.cloud.bigquery")
    magics.context._project = None
    magics.context._credentials = None
    magics.context._connection = None

    credentials_mock = mock.create_autospec(
        google.auth.credentials.Credentials, instance=True
    )
    project = "project-123"
    default_patch = mock.patch(
        "google.auth.default", return_value=(credentials_mock, project)
    )

    query = "select * from persons"
    job_reference = {"projectId": project, "jobId": "some-random-id"}
    table = {"projectId": project, "datasetId": "ds", "tableId": "persons"}
    resource = {
        "jobReference": job_reference,
        "configuration": {
            "query": {
                "destinationTable": table,
                "query": query,
                "queryParameters": [],
                "useLegacySql": False,
            }
        },
        "status": {"state": "DONE"},
    }
    data = {"jobReference": job_reference, "totalRows": 0, "rows": []}

    conn_mock = make_connection(resource, data, data, data)
    conn_patch = mock.patch("google.cloud.bigquery.client.Connection", autospec=True)
    list_rows_patch = mock.patch(
        "google.cloud.bigquery.client.Client.list_rows",
        return_value=google.cloud.bigquery.table._EmptyRowIterator(),
    )
    with conn_patch as conn, list_rows_patch as list_rows, default_patch:
        conn.return_value = conn_mock
        ip.run_cell_magic("bigquery", "", query)

    # Check that query actually starts the job.
    list_rows.assert_called()
    assert len(conn_mock.api_request.call_args_list) == 2
    _, req = conn_mock.api_request.call_args_list[0]
    assert req["method"] == "POST"
    assert req["path"] == "/projects/{}/jobs".format(project)
    sent = req["data"]
    assert isinstance(sent["jobReference"]["jobId"], six.string_types)
    sent_config = sent["configuration"]["query"]
    assert sent_config["query"] == query
예제 #4
0
def test_context_no_connection():
    ip = IPython.get_ipython()
    ip.extension_manager.load_extension("google.cloud.bigquery")
    magics.context._project = None
    magics.context._credentials = None
    magics.context._connection = None

    credentials_mock = mock.create_autospec(
        google.auth.credentials.Credentials, instance=True
    )
    project = "project-123"
    default_patch = mock.patch(
        "google.auth.default", return_value=(credentials_mock, project)
    )
    job_reference = copy.deepcopy(JOB_REFERENCE_RESOURCE)
    job_reference["projectId"] = project

    query = "select * from persons"
    resource = copy.deepcopy(QUERY_RESOURCE)
    resource["jobReference"] = job_reference
    resource["configuration"]["query"]["query"] = query
    data = {"jobReference": job_reference, "totalRows": 0, "rows": []}

    conn_mock = make_connection(resource, data, data, data)
    conn_patch = mock.patch("google.cloud.bigquery.client.Connection", autospec=True)
    list_rows_patch = mock.patch(
        "google.cloud.bigquery.client.Client.list_rows",
        return_value=google.cloud.bigquery.table._EmptyRowIterator(),
    )
    with conn_patch as conn, list_rows_patch as list_rows, default_patch:
        conn.return_value = conn_mock
        ip.run_cell_magic("bigquery", "", query)

    # Check that query actually starts the job.
    list_rows.assert_called()
    assert len(conn_mock.api_request.call_args_list) == 2
    _, req = conn_mock.api_request.call_args_list[0]
    assert req["method"] == "POST"
    assert req["path"] == "/projects/{}/jobs".format(project)
    sent = req["data"]
    assert isinstance(sent["jobReference"]["jobId"], six.string_types)
    sent_config = sent["configuration"]["query"]
    assert sent_config["query"] == query