Beispiel #1
0
def test_to_user_agent_minimal():
    info = client_info.ClientInfo(
        python_version="1", api_core_version="2", grpc_version=None
    )

    user_agent = info.to_user_agent()

    assert user_agent == "gl-python/1 gax/2"
Beispiel #2
0
def test_constructor_defaults():
    info = client_info.ClientInfo()

    assert info.python_version is not None
    assert info.grpc_version is not None
    assert info.api_core_version is not None
    assert info.gapic_version is None
    assert info.client_library_version is None
Beispiel #3
0
def _cell_magic(line, query):
    """Underlying function for bigquery cell magic

    Note:
        This function contains the underlying logic for the 'bigquery' cell
        magic. This function is not meant to be called directly.

    Args:
        line (str): "%%bigquery" followed by arguments as required
        query (str): SQL query to run

    Returns:
        pandas.DataFrame: the query results.
    """
    args = magic_arguments.parse_argstring(_cell_magic, line)

    params = []
    if args.params is not None:
        try:
            params = _helpers.to_query_parameters(
                ast.literal_eval("".join(args.params)))
        except Exception:
            raise SyntaxError(
                "--params is not a correctly formatted JSON string or a JSON "
                "serializable dictionary")

    project = args.project or context.project
    client = bigquery.Client(
        project=project,
        credentials=context.credentials,
        default_query_job_config=context.default_query_job_config,
        client_info=client_info.ClientInfo(
            user_agent="ipython-{}".format(IPython.__version__)),
    )
    if context._connection:
        client._connection = context._connection
    bqstorage_client = _make_bqstorage_client(
        args.use_bqstorage_api or context.use_bqstorage_api,
        context.credentials)
    job_config = bigquery.job.QueryJobConfig()
    job_config.query_parameters = params
    job_config.use_legacy_sql = args.use_legacy_sql

    if args.maximum_bytes_billed == "None":
        job_config.maximum_bytes_billed = 0
    elif args.maximum_bytes_billed is not None:
        value = int(args.maximum_bytes_billed)
        job_config.maximum_bytes_billed = value
    query_job = _run_query(client, query, job_config)

    if not args.verbose:
        display.clear_output()

    result = query_job.to_dataframe(bqstorage_client=bqstorage_client)
    if args.destination_var:
        IPython.get_ipython().push({args.destination_var: result})
    else:
        return result
Beispiel #4
0
    def get_bigquery_client(cls, profile_credentials):
        database = profile_credentials.database
        creds = cls.get_bigquery_credentials(profile_credentials)
        location = getattr(profile_credentials, 'location', None)

        info = client_info.ClientInfo(user_agent=f'dbt-{dbt_version}')
        return google.cloud.bigquery.Client(
            database,
            creds,
            location=location,
            client_info=info,
        )
Beispiel #5
0
def test_to_user_agent_full():
    info = client_info.ClientInfo(
        python_version="1",
        grpc_version="2",
        api_core_version="3",
        gapic_version="4",
        client_library_version="5",
        user_agent="app-name/1.0",
    )

    user_agent = info.to_user_agent()

    assert user_agent == "app-name/1.0 gl-python/1 grpc/2 gax/3 gapic/4 gccl/5"
 def test_it():
     client = mock.Mock(
         _credentials="creds",
         secure=True,
         host="thehost",
         stub=object(),
         spec=("_credentials", "secure", "host", "stub"),
         client_info=client_info.ClientInfo(
             user_agent="google-cloud-ndb/{}".format(__version__)),
     )
     context = context_module.Context(client)
     with context.use():
         assert _api.stub() is client.stub
Beispiel #7
0
    def get_bigquery_client(cls, profile_credentials):
        if profile_credentials.impersonate_service_account:
            creds =\
                cls.get_impersonated_bigquery_credentials(profile_credentials)
        else:
            creds = cls.get_bigquery_credentials(profile_credentials)
        database = profile_credentials.database
        location = getattr(profile_credentials, 'location', None)

        info = client_info.ClientInfo(user_agent=f'dbt-{dbt_version}')
        return google.cloud.bigquery.Client(
            database,
            creds,
            location=location,
            client_info=info,
        )
Beispiel #8
0
def test_constructor_options():
    info = client_info.ClientInfo(
        python_version="1",
        grpc_version="2",
        api_core_version="3",
        gapic_version="4",
        client_library_version="5",
        user_agent="6"
    )

    assert info.python_version == "1"
    assert info.grpc_version == "2"
    assert info.api_core_version == "3"
    assert info.gapic_version == "4"
    assert info.client_library_version == "5"
    assert info.user_agent == "6"
 def test_secure_channel(datastore_pb2_grpc, _helpers):
     channel = _helpers.make_secure_channel.return_value
     client = mock.Mock(
         _credentials="creds",
         secure=True,
         host="thehost",
         spec=("_credentials", "secure", "host"),
         client_info=client_info.ClientInfo(
             user_agent="google-cloud-ndb/{}".format(__version__)
         ),
     )
     context = context_module.Context(client)
     with context.use():
         stub = _api.stub()
         assert _api.stub() is stub  # one stub per context
     assert stub is datastore_pb2_grpc.DatastoreStub.return_value
     datastore_pb2_grpc.DatastoreStub.assert_called_once_with(channel)
     _helpers.make_secure_channel.assert_called_once_with(
         "creds", client.client_info.to_user_agent(), "thehost"
     )
Beispiel #10
0
def _cell_magic(line, query):
    """Underlying function for bigquery cell magic

    Note:
        This function contains the underlying logic for the 'bigquery' cell
        magic. This function is not meant to be called directly.

    Args:
        line (str): "%%bigquery" followed by arguments as required
        query (str): SQL query to run

    Returns:
        pandas.DataFrame: the query results.
    """
    args = magic_arguments.parse_argstring(_cell_magic, line)

    if args.use_bqstorage_api is not None:
        warnings.warn(
            "Deprecated option --use_bqstorage_api, the BigQuery "
            "Storage API is already used by default.",
            category=DeprecationWarning,
        )
    use_bqstorage_api = not args.use_rest_api

    params = []
    if args.params is not None:
        try:
            params = _helpers.to_query_parameters(
                ast.literal_eval("".join(args.params)))
        except Exception:
            raise SyntaxError(
                "--params is not a correctly formatted JSON string or a JSON "
                "serializable dictionary")

    project = args.project or context.project
    client = bigquery.Client(
        project=project,
        credentials=context.credentials,
        default_query_job_config=context.default_query_job_config,
        client_info=client_info.ClientInfo(user_agent=IPYTHON_USER_AGENT),
    )
    if context._connection:
        client._connection = context._connection
    bqstorage_client = _make_bqstorage_client(use_bqstorage_api,
                                              context.credentials)

    close_transports = functools.partial(_close_transports, client,
                                         bqstorage_client)

    try:
        if args.max_results:
            max_results = int(args.max_results)
        else:
            max_results = None

        query = query.strip()

        if not query:
            error = ValueError("Query is missing.")
            _handle_error(error, args.destination_var)
            return

        # Any query that does not contain whitespace (aside from leading and trailing whitespace)
        # is assumed to be a table id
        if not re.search(r"\s", query):
            try:
                rows = client.list_rows(query, max_results=max_results)
            except Exception as ex:
                _handle_error(ex, args.destination_var)
                return

            result = rows.to_dataframe(bqstorage_client=bqstorage_client)
            if args.destination_var:
                IPython.get_ipython().push({args.destination_var: result})
                return
            else:
                return result

        job_config = bigquery.job.QueryJobConfig()
        job_config.query_parameters = params
        job_config.use_legacy_sql = args.use_legacy_sql
        job_config.dry_run = args.dry_run

        if args.destination_table:
            split = args.destination_table.split(".")
            if len(split) != 2:
                raise ValueError(
                    "--destination_table should be in a <dataset_id>.<table_id> format."
                )
            dataset_id, table_id = split
            job_config.allow_large_results = True
            dataset_ref = bigquery.dataset.DatasetReference(
                client.project, dataset_id)
            destination_table_ref = dataset_ref.table(table_id)
            job_config.destination = destination_table_ref
            job_config.create_disposition = "CREATE_IF_NEEDED"
            job_config.write_disposition = "WRITE_TRUNCATE"
            _create_dataset_if_necessary(client, dataset_id)

        if args.maximum_bytes_billed == "None":
            job_config.maximum_bytes_billed = 0
        elif args.maximum_bytes_billed is not None:
            value = int(args.maximum_bytes_billed)
            job_config.maximum_bytes_billed = value

        try:
            query_job = _run_query(client, query, job_config=job_config)
        except Exception as ex:
            _handle_error(ex, args.destination_var)
            return

        if not args.verbose:
            display.clear_output()

        if args.dry_run and args.destination_var:
            IPython.get_ipython().push({args.destination_var: query_job})
            return
        elif args.dry_run:
            print("Query validated. This query will process {} bytes.".format(
                query_job.total_bytes_processed))
            return query_job

        if max_results:
            result = query_job.result(max_results=max_results).to_dataframe(
                bqstorage_client=bqstorage_client)
        else:
            result = query_job.to_dataframe(bqstorage_client=bqstorage_client)

        if args.destination_var:
            IPython.get_ipython().push({args.destination_var: result})
        else:
            return result
    finally:
        close_transports()
Beispiel #11
0
def _cell_magic(line, query):
    """Underlying function for bigquery cell magic

    Note:
        This function contains the underlying logic for the 'bigquery' cell
        magic. This function is not meant to be called directly.

    Args:
        line (str): "%%bigquery" followed by arguments as required
        query (str): SQL query to run

    Returns:
        pandas.DataFrame: the query results.
    """
    # The built-in parser does not recognize Python structures such as dicts, thus
    # we extract the "--params" option and inteprpret it separately.
    try:
        params_option_value, rest_of_args = _split_args_line(line)
    except lap.exceptions.QueryParamsParseError as exc:
        rebranded_error = SyntaxError(
            "--params is not a correctly formatted JSON string or a JSON "
            "serializable dictionary")
        six.raise_from(rebranded_error, exc)
    except lap.exceptions.DuplicateQueryParamsError as exc:
        rebranded_error = ValueError("Duplicate --params option.")
        six.raise_from(rebranded_error, exc)
    except lap.exceptions.ParseError as exc:
        rebranded_error = ValueError(
            "Unrecognized input, are option values correct? "
            "Error details: {}".format(exc.args[0]))
        six.raise_from(rebranded_error, exc)

    args = magic_arguments.parse_argstring(_cell_magic, rest_of_args)

    if args.use_bqstorage_api is not None:
        warnings.warn(
            "Deprecated option --use_bqstorage_api, the BigQuery "
            "Storage API is already used by default.",
            category=DeprecationWarning,
        )
    use_bqstorage_api = not args.use_rest_api

    params = []
    if params_option_value:
        # A non-existing params variable is not expanded and ends up in the input
        # in its raw form, e.g. "$query_params".
        if params_option_value.startswith("$"):
            msg = 'Parameter expansion failed, undefined variable "{}".'.format(
                params_option_value[1:])
            raise NameError(msg)

        params = _helpers.to_query_parameters(
            ast.literal_eval(params_option_value))

    project = args.project or context.project

    bigquery_client_options = copy.deepcopy(context.bigquery_client_options)
    if args.bigquery_api_endpoint:
        if isinstance(bigquery_client_options, dict):
            bigquery_client_options[
                "api_endpoint"] = args.bigquery_api_endpoint
        else:
            bigquery_client_options.api_endpoint = args.bigquery_api_endpoint

    client = bigquery.Client(
        project=project,
        credentials=context.credentials,
        default_query_job_config=context.default_query_job_config,
        client_info=client_info.ClientInfo(user_agent=IPYTHON_USER_AGENT),
        client_options=bigquery_client_options,
    )
    if context._connection:
        client._connection = context._connection

    bqstorage_client_options = copy.deepcopy(context.bqstorage_client_options)
    if args.bqstorage_api_endpoint:
        if isinstance(bqstorage_client_options, dict):
            bqstorage_client_options[
                "api_endpoint"] = args.bqstorage_api_endpoint
        else:
            bqstorage_client_options.api_endpoint = args.bqstorage_api_endpoint

    bqstorage_client = _make_bqstorage_client(
        use_bqstorage_api,
        context.credentials,
        bqstorage_client_options,
    )

    close_transports = functools.partial(_close_transports, client,
                                         bqstorage_client)

    try:
        if args.max_results:
            max_results = int(args.max_results)
        else:
            max_results = None

        query = query.strip()

        if not query:
            error = ValueError("Query is missing.")
            _handle_error(error, args.destination_var)
            return

        # Any query that does not contain whitespace (aside from leading and trailing whitespace)
        # is assumed to be a table id
        if not re.search(r"\s", query):
            try:
                rows = client.list_rows(query, max_results=max_results)
            except Exception as ex:
                _handle_error(ex, args.destination_var)
                return

            result = rows.to_dataframe(bqstorage_client=bqstorage_client)
            if args.destination_var:
                IPython.get_ipython().push({args.destination_var: result})
                return
            else:
                return result

        job_config = bigquery.job.QueryJobConfig()
        job_config.query_parameters = params
        job_config.use_legacy_sql = args.use_legacy_sql
        job_config.dry_run = args.dry_run

        if args.destination_table:
            split = args.destination_table.split(".")
            if len(split) != 2:
                raise ValueError(
                    "--destination_table should be in a <dataset_id>.<table_id> format."
                )
            dataset_id, table_id = split
            job_config.allow_large_results = True
            dataset_ref = bigquery.dataset.DatasetReference(
                client.project, dataset_id)
            destination_table_ref = dataset_ref.table(table_id)
            job_config.destination = destination_table_ref
            job_config.create_disposition = "CREATE_IF_NEEDED"
            job_config.write_disposition = "WRITE_TRUNCATE"
            _create_dataset_if_necessary(client, dataset_id)

        if args.maximum_bytes_billed == "None":
            job_config.maximum_bytes_billed = 0
        elif args.maximum_bytes_billed is not None:
            value = int(args.maximum_bytes_billed)
            job_config.maximum_bytes_billed = value

        try:
            query_job = _run_query(client, query, job_config=job_config)
        except Exception as ex:
            _handle_error(ex, args.destination_var)
            return

        if not args.verbose:
            display.clear_output()

        if args.dry_run and args.destination_var:
            IPython.get_ipython().push({args.destination_var: query_job})
            return
        elif args.dry_run:
            print("Query validated. This query will process {} bytes.".format(
                query_job.total_bytes_processed))
            return query_job

        if max_results:
            result = query_job.result(max_results=max_results).to_dataframe(
                bqstorage_client=bqstorage_client)
        else:
            result = query_job.to_dataframe(bqstorage_client=bqstorage_client)

        if args.destination_var:
            IPython.get_ipython().push({args.destination_var: result})
        else:
            return result
    finally:
        close_transports()
Beispiel #12
0
    from google.api_core import client_info  # noqa

    _HAVE_GRPC = False
else:
    from google.api_core.gapic_v1 import client_info

    _HAVE_GRPC = True

from google.cloud.client import ClientWithProject
from google.cloud.error_reporting import __version__
from google.cloud.error_reporting._logging import _ErrorReportingLoggingAPI
from google.cloud.environment_vars import DISABLE_GRPC

_DISABLE_GRPC = os.getenv(DISABLE_GRPC, False)
_USE_GRPC = _HAVE_GRPC and not _DISABLE_GRPC
_CLIENT_INFO = client_info.ClientInfo(client_library_version=__version__)


class HTTPContext(object):
    """HTTPContext defines an object that captures the parameter for the
    httpRequest part of Error Reporting API

    :type method: str
    :param method: The type of HTTP request, such as GET, POST, etc.

    :type url: str
    :param url: The URL of the request

    :type user_agent: str
    :param user_agent: The user agent information that is provided with the
                       request.
Beispiel #13
0
def _cell_magic(line, query):
    """Underlying function for bigquery cell magic

    Note:
        This function contains the underlying logic for the 'bigquery' cell
        magic. This function is not meant to be called directly.

    Args:
        line (str): "%%bigquery" followed by arguments as required
        query (str): SQL query to run

    Returns:
        pandas.DataFrame: the query results.
    """
    args = magic_arguments.parse_argstring(_cell_magic, line)

    params = []
    if args.params is not None:
        try:
            params = _helpers.to_query_parameters(
                ast.literal_eval("".join(args.params))
            )
        except Exception:
            raise SyntaxError(
                "--params is not a correctly formatted JSON string or a JSON "
                "serializable dictionary"
            )

    project = args.project or context.project
    client = bigquery.Client(
        project=project,
        credentials=context.credentials,
        default_query_job_config=context.default_query_job_config,
        client_info=client_info.ClientInfo(user_agent=IPYTHON_USER_AGENT),
    )
    if context._connection:
        client._connection = context._connection
    bqstorage_client = _make_bqstorage_client(
        args.use_bqstorage_api or context.use_bqstorage_api, context.credentials
    )
    job_config = bigquery.job.QueryJobConfig()
    job_config.query_parameters = params
    job_config.use_legacy_sql = args.use_legacy_sql
    job_config.dry_run = args.dry_run

    if args.maximum_bytes_billed == "None":
        job_config.maximum_bytes_billed = 0
    elif args.maximum_bytes_billed is not None:
        value = int(args.maximum_bytes_billed)
        job_config.maximum_bytes_billed = value

    error = None
    try:
        query_job = _run_query(client, query, job_config)
    except Exception as ex:
        error = str(ex)

    if not args.verbose:
        display.clear_output()

    if error:
        if args.destination_var:
            print(
                "Could not save output to variable '{}'.".format(args.destination_var),
                file=sys.stderr,
            )
        print("\nERROR:\n", error, file=sys.stderr)
        return

    if args.dry_run and args.destination_var:
        IPython.get_ipython().push({args.destination_var: query_job})
        return
    elif args.dry_run:
        print(
            "Query validated. This query will process {} bytes.".format(
                query_job.total_bytes_processed
            )
        )
        return query_job

    result = query_job.to_dataframe(bqstorage_client=bqstorage_client)
    if args.destination_var:
        IPython.get_ipython().push({args.destination_var: result})
    else:
        return result
import grpc
import os
import requests

from google.api_core import client_info
from google.cloud import environment_vars
from google.cloud import _helpers
from google.cloud import client as google_client
from google.cloud.datastore_v1.gapic import datastore_client
from google.cloud.datastore_v1.proto import datastore_pb2_grpc

from google.cloud.ndb import __version__
from google.cloud.ndb import context as context_module
from google.cloud.ndb import key as key_module

_CLIENT_INFO = client_info.ClientInfo(
    user_agent="google-cloud-ndb/{}".format(__version__))

DATASTORE_API_HOST = datastore_client.DatastoreClient.SERVICE_ADDRESS.rsplit(
    ":", 1)[0]


def _get_gcd_project():
    """Gets the GCD application ID if it can be inferred."""
    return os.getenv(environment_vars.GCD_DATASET)


def _determine_default_project(project=None):
    """Determine default project explicitly or implicitly as fall-back.

    In implicit case, supports four environments. In order of precedence, the
    implicit environments are:
Beispiel #15
0
def google_client_info():
    user_agent = USER_AGENT_TEMPLATE.format(sqlalchemy.__version__)
    return client_info.ClientInfo(user_agent=user_agent)
Beispiel #16
0
def _cell_magic(line, query):
    """Underlying function for bigquery cell magic

    Note:
        This function contains the underlying logic for the 'bigquery' cell
        magic. This function is not meant to be called directly.

    Args:
        line (str): "%%bigquery" followed by arguments as required
        query (str): SQL query to run

    Returns:
        pandas.DataFrame: the query results.
    """
    args = magic_arguments.parse_argstring(_cell_magic, line)

    params = []
    if args.params is not None:
        try:
            params = _helpers.to_query_parameters(
                ast.literal_eval("".join(args.params)))
        except Exception:
            raise SyntaxError(
                "--params is not a correctly formatted JSON string or a JSON "
                "serializable dictionary")

    project = args.project or context.project
    client = bigquery.Client(
        project=project,
        credentials=context.credentials,
        default_query_job_config=context.default_query_job_config,
        client_info=client_info.ClientInfo(user_agent=IPYTHON_USER_AGENT),
    )
    if context._connection:
        client._connection = context._connection
    bqstorage_client = _make_bqstorage_client(
        args.use_bqstorage_api or context.use_bqstorage_api,
        context.credentials)

    if args.max_results:
        max_results = int(args.max_results)
    else:
        max_results = None

    query = query.strip()

    # Any query that does not contain whitespace (aside from leading and trailing whitespace)
    # is assumed to be a table id
    if not re.search(r"\s", query):
        try:
            rows = client.list_rows(query, max_results=max_results)
        except Exception as ex:
            _print_error(str(ex), args.destination_var)
            return

        result = rows.to_dataframe(bqstorage_client=bqstorage_client)
        if args.destination_var:
            IPython.get_ipython().push({args.destination_var: result})
            return
        else:
            return result

    job_config = bigquery.job.QueryJobConfig()
    job_config.query_parameters = params
    job_config.use_legacy_sql = args.use_legacy_sql
    job_config.dry_run = args.dry_run

    if args.maximum_bytes_billed == "None":
        job_config.maximum_bytes_billed = 0
    elif args.maximum_bytes_billed is not None:
        value = int(args.maximum_bytes_billed)
        job_config.maximum_bytes_billed = value

    try:
        query_job = _run_query(client, query, job_config=job_config)
    except Exception as ex:
        _print_error(str(ex), args.destination_var)
        return

    if not args.verbose:
        display.clear_output()

    if args.dry_run and args.destination_var:
        IPython.get_ipython().push({args.destination_var: query_job})
        return
    elif args.dry_run:
        print("Query validated. This query will process {} bytes.".format(
            query_job.total_bytes_processed))
        return query_job

    if max_results:
        result = query_job.result(max_results=max_results).to_dataframe(
            bqstorage_client=bqstorage_client)
    else:
        result = query_job.to_dataframe(bqstorage_client=bqstorage_client)

    if args.destination_var:
        IPython.get_ipython().push({args.destination_var: result})
    else:
        return result
Beispiel #17
0
def get_http_client_info():
    return http_client_info.ClientInfo(user_agent=USER_AGENT)