Example #1
0
def client():
    client = storage.Client(
        project=_CONF_TEST_PROJECT_ID,
        credentials=AnonymousCredentials(),
        client_options={"api_endpoint": _HOST},
    )
    return client
Example #2
0
def setUpModule():
    if USE_EMULATOR:
        from google.auth.credentials import AnonymousCredentials

        emulator_project = os.getenv("GCLOUD_PROJECT", "emulator-test-project")
        Config.CLIENT = Client(project=emulator_project,
                               credentials=AnonymousCredentials())
    else:
        Config.CLIENT = Client()

    retry = RetryErrors(exceptions.ServiceUnavailable)

    configs = list(retry(Config.CLIENT.list_instance_configs)())

    instances = retry(_list_instances)()
    EXISTING_INSTANCES[:] = instances

    if CREATE_INSTANCE:
        if not USE_EMULATOR:
            # Defend against back-end returning configs for regions we aren't
            # actually allowed to use.
            configs = [config for config in configs if "-us-" in config.name]

        if not configs:
            raise ValueError("List instance configs failed in module set up.")

        Config.INSTANCE_CONFIG = configs[0]
        config_name = configs[0].name

        Config.INSTANCE = Config.CLIENT.instance(INSTANCE_ID, config_name)
        created_op = Config.INSTANCE.create()
        created_op.result(30)  # block until completion
    else:
        Config.INSTANCE = Config.CLIENT.instance(INSTANCE_ID)
        Config.INSTANCE.reload()
Example #3
0
    def __init__(
        self,
        project=None,
        credentials=None,
        database=DEFAULT_DATABASE,
        client_info=_CLIENT_INFO,
        client_options=None,
    ) -> None:
        # NOTE: This API has no use for the _http argument, but sending it
        #       will have no impact since the _http() @property only lazily
        #       creates a working HTTP object.
        self._emulator_host = os.getenv(_FIRESTORE_EMULATOR_HOST)

        if self._emulator_host is not None:
            if credentials is None:
                credentials = AnonymousCredentials()
            if project is None:
                project = _DEFAULT_EMULATOR_PROJECT

        super(BaseClient, self).__init__(
            project=project,
            credentials=credentials,
            client_options=client_options,
            _http=None,
        )
        self._client_info = client_info
        if client_options:
            if type(client_options) == dict:
                client_options = google.api_core.client_options.from_dict(
                    client_options)
        self._client_options = client_options

        self._database = database
Example #4
0
def _create_dummy_storage_client():
    fake_host = os.getenv('STORAGE_PORT_4443_TCP_ADDR')
    external_url = 'https://{}:4443'.format(fake_host)
    storage.blob._API_ACCESS_ENDPOINT = 'https://storage.gcs.{}.nip.io:4443'.format(fake_host)
    storage.blob._DOWNLOAD_URL_TEMPLATE = (
        "%s/download/storage/v1{path}?alt=media" % external_url
    )
    storage.blob._BASE_UPLOAD_TEMPLATE = (
        "%s/upload/storage/v1{bucket_path}/o?uploadType=" % external_url
    )
    storage.blob._MULTIPART_URL_TEMPLATE = storage.blob._BASE_UPLOAD_TEMPLATE + "multipart"
    storage.blob._RESUMABLE_URL_TEMPLATE = storage.blob._BASE_UPLOAD_TEMPLATE + "resumable"
    my_http = requests.Session()
    my_http.verify = False  # disable SSL validation
    urllib3.disable_warnings(
        urllib3.exceptions.InsecureRequestWarning
    )  # disable https warnings for https insecure certs

    storage_client = storage.Client(
        credentials=AnonymousCredentials(),
        project='test',
        _http=my_http,
        client_options=ClientOptions(api_endpoint=external_url))

    if len(list(storage_client.list_buckets())) == 0:
        bucket = storage_client.create_bucket(_get_bucket_name())

    return storage_client
Example #5
0
    def setup_engine_options(self, engine_options):
        self.credentials = None
        self.instance_id = None
        self.project_id = None

        bigtable_emu = os.environ.get('BIGTABLE_EMULATOR_HOST', None)
        if bigtable_emu or ("emulator" in engine_options
                            and engine_options["emulator"]):
            self.credentials = AnonymousCredentials()
        elif "credentials" not in engine_options:
            raise ValueError("missing credentials option for bigtable engine")
        else:
            self.credentials = engine_options["credentials"]
            # use this if credentials is a path to a key.json service account
            # self.credentials = service_account.Credentials.from_service_account_file(engine_options["credentials"])

        if "project_id" not in engine_options:
            raise ValueError("missing project_id option for bigtable engine")
        else:
            self.project_id = engine_options["project_id"]

        if "instance_id" not in engine_options:
            raise ValueError("missing instance_id option for bigtable engine")
        else:
            self.instance_id = engine_options["instance_id"]
Example #6
0
    def test_database_admin_api_emulator_code(self):
        from google.auth.credentials import AnonymousCredentials
        from google.api_core.client_options import ClientOptions

        credentials = AnonymousCredentials()
        client_info = mock.Mock()
        client_options = ClientOptions(api_endpoint="emulator.host")
        client = self._make_one(
            project=self.PROJECT,
            credentials=credentials,
            client_info=client_info,
            client_options=client_options,
        )

        db_module = "google.cloud.spanner_v1.client.DatabaseAdminClient"
        with mock.patch(db_module) as database_admin_client:
            api = client.database_admin_api

        self.assertIs(api, database_admin_client.return_value)

        # API instance is cached
        again = client.database_admin_api
        self.assertIs(again, api)

        self.assertEqual(len(database_admin_client.call_args_list), 1)
        called_args, called_kw = database_admin_client.call_args
        self.assertEqual(called_args, ())
        self.assertEqual(called_kw["client_info"], client_info)
        self.assertEqual(called_kw["client_options"], client_options)
        self.assertIn("transport", called_kw)
        self.assertNotIn("credentials", called_kw)
Example #7
0
def create_storage_client(test):
    if test:
        EXTERNAL_URL = "https://127.0.0.1:4443"
        PUBLIC_HOST = "storage.gcs.127.0.0.1.nip.io:4443"

        storage.blob._API_ACCESS_ENDPOINT = "https://" + PUBLIC_HOST
        storage.blob._DOWNLOAD_URL_TEMPLATE = (
            u"%s/download/storage/v1{path}?alt=media" % EXTERNAL_URL)
        storage.blob._BASE_UPLOAD_TEMPLATE = (
            u"%s/upload/storage/v1{bucket_path}/o?uploadType=" % EXTERNAL_URL)
        storage.blob._MULTIPART_URL_TEMPLATE = storage.blob._BASE_UPLOAD_TEMPLATE + u"multipart"
        storage.blob._RESUMABLE_URL_TEMPLATE = storage.blob._BASE_UPLOAD_TEMPLATE + u"resumable"

        my_http = requests.Session()
        my_http.verify = False  # disable SSL validation
        urllib3.disable_warnings(
            urllib3.exceptions.InsecureRequestWarning
        )  # disable https warnings for https insecure certs

        storage_client = storage.Client(
            credentials=AnonymousCredentials(),
            project="test",
            _http=my_http,
            client_options=ClientOptions(api_endpoint=EXTERNAL_URL),
        )
    else:
        storage_client = storage.Client()

    return storage_client
Example #8
0
def gc_credentials():
    parser = ConfigParser()
    parser.read("google_cloud_credentials.ini")
    credentials_path = parser.get("google-cloud-tests",
                                  "credentials_json_path",
                                  fallback=None)
    emulator_endpoint = parser.get("google-cloud-tests",
                                   "emulator_endpoint",
                                   fallback=None)

    assert (
        credentials_path or emulator_endpoint
    ), "Either set endpoint (for gc emulation) or credentials_json_path (for actual gc)"

    if emulator_endpoint:
        # google's client library looks for this env var
        # if we didn't set it it would use the standard endpoint
        # at https://storage.googleapis.com
        os.environ["STORAGE_EMULATOR_HOST"] = emulator_endpoint
        credentials = AnonymousCredentials()
    else:
        # if no endpoint was defined we're running against actual GC and need credentials
        credentials = credentials_path
    yield credentials
    # unset the env var
    if emulator_endpoint:
        del os.environ["STORAGE_EMULATOR_HOST"]
Example #9
0
def gcsbucket():
    client = storage.Client(
        credentials=AnonymousCredentials(),
        client_options={
            "api_endpoint":
            os.environ.get("GCS_API_ENDPOINT", "http://127.0.0.1:4443")
        },
        project="test",
    )

    class GCSBucket(Bucket):
        def __init__(self, name):
            self.name = name
            self._client = client
            self._client.create_bucket(name)

        def put(self, key, content=""):
            self._client.get_bucket(
                self.name).blob(key).upload_from_string(content)

        def get(self, key):
            return self._client.get_bucket(self.name).get_blob(key)

        @property
        def root(self):
            return f"gs://{self.name}/"

    # To connect with fake-gcs-server, recreate gcs client
    paaaaath.gcs.GCSPath.register_client(client)

    bucket = "".join([
        random.choice("0123456789abcdefghijklmnopqrstuvwxyz")
        for _ in range(32)
    ])
    yield GCSBucket(bucket)
    def commit(self, byte_data, override_blob_name=None):

        # this means we're testing
        if os.environ.get("STORAGE_EMULATOR_HOST") is not None:
            client = storage.Client(
                credentials=AnonymousCredentials(),
                project=self.project,
            )
        else:  # pragma: no cover
            client = storage.Client(project=self.project, credentials=self.credentials)
        self.gcs_bucket = client.get_bucket(self.bucket)
        self.filename = self.filename_without_bucket

        # if we've been given the filename, use that, otherwise get the
        # name from the path builder
        if override_blob_name:
            blob_name = override_blob_name
        else:
            blob_name = self._build_path()

        try:
            blob = self.gcs_bucket.blob(blob_name)
            self.retry(blob.upload_from_string)(
                byte_data, content_type="application/octet-stream"
            )
            return blob_name
        except Exception as err:
            import traceback

            logger = get_logger()
            logger.error(
                f"Error Saving Blob to GCS {type(err).__name__} - {err}\n{traceback.format_exc()}"
            )
            raise err
def run_gapic():
    from helloworld import GreeterClient
    from helloworld import HelloRequest
    from helloworld import HelloReply
    import grpc
    from helloworld.services.greeter.transports import GreeterGrpcTransport
    from google.auth.credentials import AnonymousCredentials

    creds = AnonymousCredentials()
    # normally use creds but since we have custom CA certs..
    #c = GreeterClient(credentials=cred)

    # load custom certs
    with open('CA_crt.pem', 'rb') as fh:
        root_ca = fh.read()
    channel_creds = grpc.ssl_channel_credentials(root_certificates=root_ca, )
    transport = GreeterGrpcTransport(
        credentials=creds,
        ssl_channel_credentials=channel_creds,
    )
    c = GreeterClient(transport=transport)

    req = HelloRequest()
    req.name = 'sal'
    resp = c.say_hello(request=req)
    logging.info(resp.message)
def setUpModule():
    if USE_EMULATOR:
        from google.auth.credentials import AnonymousCredentials

        emulator_project = os.getenv("GCLOUD_PROJECT", "emulator-test-project")
        Config.CLIENT = Client(project=emulator_project,
                               credentials=AnonymousCredentials())
    else:
        Config.CLIENT = Client()
    retry = RetryErrors(exceptions.ServiceUnavailable)

    configs = list(retry(Config.CLIENT.list_instance_configs)())

    instances = retry(_list_instances)()
    EXISTING_INSTANCES[:] = instances

    # Delete test instances that are older than an hour.
    cutoff = int(time.time()) - 1 * 60 * 60
    for instance_pb in Config.CLIENT.list_instances(
            "labels.python-spanner-dbapi-systests:true"):
        instance = Instance.from_pb(instance_pb, Config.CLIENT)
        if "created" not in instance.labels:
            continue
        create_time = int(instance.labels["created"])
        if create_time > cutoff:
            continue
        # Instance cannot be deleted while backups exist.
        for backup_pb in instance.list_backups():
            backup = Backup.from_pb(backup_pb, instance)
            backup.delete()
        instance.delete()

    if CREATE_INSTANCE:
        if not USE_EMULATOR:
            # Defend against back-end returning configs for regions we aren't
            # actually allowed to use.
            configs = [config for config in configs if "-us-" in config.name]

        if not configs:
            raise ValueError("List instance configs failed in module set up.")

        Config.INSTANCE_CONFIG = configs[0]
        config_name = configs[0].name
        create_time = str(int(time.time()))
        labels = {
            "python-spanner-dbapi-systests": "true",
            "created": create_time
        }

        Config.INSTANCE = Config.CLIENT.instance(INSTANCE_ID,
                                                 config_name,
                                                 labels=labels)
        created_op = Config.INSTANCE.create()
        created_op.result(
            SPANNER_OPERATION_TIMEOUT_IN_SECONDS)  # block until completion

    else:
        Config.INSTANCE = Config.CLIENT.instance(INSTANCE_ID)
        Config.INSTANCE.reload()
def client():
    host = os.environ.get(_STORAGE_EMULATOR_ENV_VAR)
    client = storage.Client(
        project=_CONF_TEST_PROJECT_ID,
        credentials=AnonymousCredentials(),
        client_options={"api_endpoint": host},
    )
    return client
    async def test_progress(self, monkeypatch):
        fake_creds = AnonymousCredentials()
        mock_client = mock.create_autospec(
            ClusterControllerClient(credentials=fake_creds))
        mock_logging_client = mock.create_autospec(
            logging_v2.LoggingServiceV2Client(credentials=fake_creds))
        spawner = DataprocSpawner(hub=Hub(),
                                  dataproc=mock_client,
                                  user=MockUser(),
                                  _mock=True,
                                  logging=mock_logging_client,
                                  gcs_notebooks=self.gcs_notebooks)
        spawner.project = "test-progress"

        async def collect(ait):
            items = []
            async for value in ait:
                items.append(value)
            return items

        def create_logs():
            entries = []
            for i in range(5):
                e = LogEntry(insert_id=f'entry_{i}',
                             json_payload=ParseDict(
                                 {
                                     'method': 'method',
                                     'message': f'message_{i}'
                                 }, Struct()))
                entries.append(e)
            return entries

        def create_expected():
            progress = 5
            expected = []
            i = 0
            for e in create_logs():
                progress += math.ceil((90 - progress) / 4)
                expected.append({
                    'progress': progress,
                    'message': f'method: message_{i}'
                })
                i += 1
            expected.append({'message': 'operation.done()', 'progress': 71})
            return expected

        def test_list_log_entries(*args, **kwargs):
            return create_logs()

        op = MockOperation('op1', 'cluster1-op1')

        monkeypatch.setattr(mock_logging_client, 'list_log_entries',
                            test_list_log_entries)
        monkeypatch.setattr(spawner, 'operation', op)

        _, _ = await spawner.start()
        assert await collect(spawner.progress()) == create_expected()
    def test_minimium_cluster_definition(self, monkeypatch):
        """ Some keys must always be present for JupyterHub to work. """
        import yaml

        def test_read_file(*args, **kwargs):
            config_string = open('./tests/test_data/minimum.yaml', 'r').read()
            return config_string

        def test_clustername(*args, **kwargs):
            return 'test-clustername'

        fake_creds = AnonymousCredentials()
        mock_dataproc_client = mock.create_autospec(
            ClusterControllerClient(credentials=fake_creds))
        mock_gcs_client = mock.create_autospec(
            storage.Client(credentials=fake_creds, project='project'))
        spawner = DataprocSpawner(hub=Hub(),
                                  dataproc=mock_dataproc_client,
                                  gcs=mock_gcs_client,
                                  user=MockUser(),
                                  _mock=True,
                                  gcs_notebooks=self.gcs_notebooks)

        # Prevents a call to GCS. We return the local file instead.
        monkeypatch.setattr(spawner, "read_gcs_file", test_read_file)
        monkeypatch.setattr(spawner, "clustername", test_clustername)

        spawner.project = "test-project"
        spawner.zone = "test-self1-b"
        spawner.env_str = "test-env-str"
        spawner.args_str = "test-args-str"

        config_built = spawner._build_cluster_config()

        assert 'project_id' in config_built
        assert 'cluster_name' in config_built

        assert config_built['project_id'] == 'test-project'
        assert config_built['cluster_name'] == 'test-clustername'

        assert config_built['config']['gce_cluster_config']['zone_uri'].split(
            '/')[-1] == 'test-self1-b'

        assert Component['JUPYTER'].value in config_built['config'][
            'software_config']['optional_components']
        assert Component['ANACONDA'].value in config_built['config'][
            'software_config']['optional_components']

        assert 'dataproc:jupyter.hub.args' in config_built['config'][
            'software_config']['properties']
        assert 'dataproc:jupyter.hub.enabled' in config_built['config'][
            'software_config']['properties']
        # assert 'dataproc:jupyter.notebook.gcs.dir' in config_built['config']['software_config']['properties']
        assert 'dataproc:jupyter.hub.env' in config_built['config'][
            'software_config']['properties']
Example #16
0
def spanner_client():
    if _helpers.USE_EMULATOR:
        from google.auth.credentials import AnonymousCredentials

        credentials = AnonymousCredentials()
        return spanner_v1.Client(
            project=_helpers.EMULATOR_PROJECT,
            credentials=credentials,
        )
    else:
        return spanner_v1.Client()  # use google.auth.default credentials
Example #17
0
    def test_constructor_emulator_host_warning(self, mock_warn, mock_em):
        from google.cloud.spanner_v1 import client as MUT
        from google.auth.credentials import AnonymousCredentials

        expected_scopes = None
        creds = _make_credentials()
        mock_em.return_value = "http://emulator.host.com"
        with mock.patch("google.cloud.spanner_v1.client.AnonymousCredentials") as patch:
            expected_creds = patch.return_value = AnonymousCredentials()
            self._constructor_test_helper(expected_scopes, creds, expected_creds)
        mock_warn.assert_called_once_with(MUT._EMULATOR_HOST_HTTP_SCHEME)
Example #18
0
 def connect(self, method=None):
     if method == 'browser':
         cred = self._connect_browser()
     elif method == "cache":
         cred = self._connect_cache()
     elif method == 'anon':
         cred = AnonymousCredentials()
     else:
         raise ValueError(f"Invalid connection method `{method}`.")
     srv = build('drive', 'v3', credentials=cred)
     self._drives = srv.drives()
     self.service = srv.files()
    def test_locations(self, monkeypatch):
        import yaml

        def test_read_file(*args, **kwargs):
            config_string = open('./tests/test_data/basic_uri.yaml',
                                 'r').read()
            return config_string

        def test_clustername(*args, **kwargs):
            return 'test-clustername'

        fake_creds = AnonymousCredentials()
        mock_dataproc_client = mock.create_autospec(
            ClusterControllerClient(credentials=fake_creds))
        mock_gcs_client = mock.create_autospec(
            storage.Client(credentials=fake_creds, project='project'))
        spawner = DataprocSpawner(hub=Hub(),
                                  dataproc=mock_dataproc_client,
                                  gcs=mock_gcs_client,
                                  user=MockUser(),
                                  _mock=True,
                                  gcs_notebooks=self.gcs_notebooks)

        # Prevents a call to GCS. We return the local file instead.
        monkeypatch.setattr(spawner, "read_gcs_file", test_read_file)
        monkeypatch.setattr(spawner, "clustername", test_clustername)

        spawner.project = "test-project"
        spawner.region = "us-east1"
        spawner.zone = "us-east1-d"
        spawner.env_str = "test-env-str"
        spawner.args_str = "test-args-str"
        spawner.user_options = {
            'cluster_type': 'basic_uri.yaml',
            'cluster_zone': 'us-east1-d'
        }

        user_zone = spawner.user_options['cluster_zone']
        user_region = user_zone[:-2]

        config_built = spawner._build_cluster_config()

        assert config_built['config']['gce_cluster_config'][
            'subnetwork_uri'].split('/')[-3] == user_region
        assert config_built['config']['master_config'][
            'machine_type_uri'] == 'n1-standard-4'
        assert config_built['config']['worker_config'][
            'machine_type_uri'] == 'n1-highmem-16'
        assert config_built['config']['secondary_worker_config'][
            'machine_type_uri'] == 'n1-standard-4'
        assert config_built['config']['master_config']['accelerators'][0][
            'accelerator_type_uri'] == 'nvidia-tesla-v100'
Example #20
0
def get_bucket():
    options = dict(project=project_id)

    if settings.ENVIRONMENT != 'production':
        options['credentials'] = AnonymousCredentials()

    client = storage.Client(**options)
    bucket = client.bucket(bucket_name)

    if not bucket.exists():
        bucket.create(client)

    return bucket
Example #21
0
def _get_mock_gcs_client():
    my_http = requests.Session()
    my_http.verify = False  # disable SSL validation
    urllib3.disable_warnings(
        urllib3.exceptions.InsecureRequestWarning
    )  # disable https warnings for https insecure certs

    return storage.Client(
        credentials=AnonymousCredentials(),
        project="test",
        _http=my_http,
        client_options=ClientOptions(api_endpoint="https://127.0.0.1:4443"),
    )
Example #22
0
    def __init__(self, **kwargs):
        # Sanity check: Is our goal to use the emulator?
        # If so, create a grpc insecure channel with the emulator host
        # as the target.
        if os.environ.get("PUBSUB_EMULATOR_HOST"):
            kwargs["client_options"] = {
                "api_endpoint": os.environ.get("PUBSUB_EMULATOR_HOST")
            }
            kwargs["credentials"] = AnonymousCredentials()

        # Instantiate the underlying GAPIC client.
        self._api = subscriber_client.SubscriberClient(**kwargs)
        self._target = self._api._transport._host
Example #23
0
def get_blob(project: str, bucket: str, blob_name: str):

    # this means we're testing
    if os.environ.get("STORAGE_EMULATOR_HOST") is not None:
        client = storage.Client(
            credentials=AnonymousCredentials(),
            project=project,
        )
    else:  # pragma: no cover
        client = storage.Client(project=project)

    gcs_bucket = client.get_bucket(bucket)
    blob = gcs_bucket.get_blob(blob_name)
    return blob
Example #24
0
    def create_anonymous_client(cls):
        """Factory: return client with anonymous credentials.

        .. note::

           Such a client has only limited access to "public" buckets:
           listing their contents and downloading their blobs.

        :rtype: :class:`google.cloud.storage.client.Client`
        :returns: Instance w/ anonymous credentials and no project.
        """
        client = cls(project="<none>", credentials=AnonymousCredentials())
        client.project = None
        return client
    def test_cluster_definition_overrides(self, monkeypatch):
        """Check that config settings incompatible with JupyterHub are overwritten correctly."""
        import yaml

        def test_read_file(*args, **kwargs):
            config_string = open('./tests/test_data/export.yaml', 'r').read()
            return config_string

        def test_clustername(*args, **kwargs):
            return 'test-clustername'

        fake_creds = AnonymousCredentials()
        mock_dataproc_client = mock.create_autospec(
            ClusterControllerClient(credentials=fake_creds))
        mock_gcs_client = mock.create_autospec(
            storage.Client(credentials=fake_creds, project='project'))
        spawner = DataprocSpawner(hub=Hub(),
                                  dataproc=mock_dataproc_client,
                                  gcs=mock_gcs_client,
                                  user=MockUser(),
                                  _mock=True,
                                  gcs_notebooks=self.gcs_notebooks)

        # Prevents a call to GCS. We return the local file instead.
        monkeypatch.setattr(spawner, "read_gcs_file", test_read_file)
        monkeypatch.setattr(spawner, "clustername", test_clustername)

        spawner.project = "test-project"
        spawner.region = "us-east1"
        spawner.zone = "us-east1-d"
        spawner.env_str = "test-env-str"
        spawner.args_str = "test-args-str"
        spawner.user_options = {
            'cluster_type': 'export.yaml',
            'cluster_zone': 'test-form1-a'
        }

        config_built = spawner._build_cluster_config()

        # Verify that we disable Component Gateway (temporarily)
        assert config_built['config']['endpoint_config'][
            'enable_http_port_access'] == False
        # Verify that we disable preemptibility (temporarily)
        assert 'preemptibility' not in config_built['config']['master_config']
        assert 'preemptibility' not in config_built['config']['worker_config']
        # Verify that we removed cluster-specific namenode properties
        assert 'hdfs:dfs.namenode.lifeline.rpc-address' not in config_built[
            'config']['software_config']['properties']
        assert 'hdfs:dfs.namenode.servicerpc-address' not in config_built[
            'config']['software_config']['properties']
Example #26
0
def set_up():

    os.environ["STORAGE_EMULATOR_HOST"] = "http://localhost:9090"

    client = storage.Client(
        credentials=AnonymousCredentials(),
        project="testing",
    )
    bucket = client.bucket(BUCKET_NAME)
    try:
        bucket.delete(force=True)
    except:  # pragma: no cover
        pass
    bucket = client.create_bucket(BUCKET_NAME)
    def test_image_version_supports_component_gateway(self):
        fake_creds = AnonymousCredentials()
        mock_dataproc_client = mock.create_autospec(
            ClusterControllerClient(credentials=fake_creds))
        mock_gcs_client = mock.create_autospec(
            storage.Client(credentials=fake_creds, project='project'))
        spawner = DataprocSpawner(hub=Hub(),
                                  dataproc=mock_dataproc_client,
                                  gcs=mock_gcs_client,
                                  user=MockUser(),
                                  _mock=True,
                                  gcs_notebooks=self.gcs_notebooks)

        assert spawner._validate_image_version_supports_component_gateway(
            '1.3') is True
        assert spawner._validate_image_version_supports_component_gateway(
            '1.3-debian9') is True
        assert spawner._validate_image_version_supports_component_gateway(
            '1.3.6-debian9') is False
        assert spawner._validate_image_version_supports_component_gateway(
            '1.3.59-debian9') is True
        assert spawner._validate_image_version_supports_component_gateway(
            '1.3.999-debian9') is True
        assert spawner._validate_image_version_supports_component_gateway(
            '1.4-debian10') is True
        assert spawner._validate_image_version_supports_component_gateway(
            '1.4.6-debian10') is False
        assert spawner._validate_image_version_supports_component_gateway(
            '1.4.31-debian10') is True
        assert spawner._validate_image_version_supports_component_gateway(
            '1.5-debian10') is True
        assert spawner._validate_image_version_supports_component_gateway(
            '1.5.0-debian10') is False
        assert spawner._validate_image_version_supports_component_gateway(
            '1.5.5-debian10') is True
        assert spawner._validate_image_version_supports_component_gateway(
            '2') is True
        assert spawner._validate_image_version_supports_component_gateway(
            '2.0') is True
        assert spawner._validate_image_version_supports_component_gateway(
            '2.0.0') is True
        assert spawner._validate_image_version_supports_component_gateway(
            '2.3.0') is True
        assert spawner._validate_image_version_supports_component_gateway(
            '2.0.0-RC1-preview') is True
        assert spawner._validate_image_version_supports_component_gateway(
            'weird-unexpected-version-124.3.v2.2020-02-15') is True
        assert spawner._validate_image_version_supports_component_gateway(
            '1.3.weird-version-again') is True
    def test_cluster_definition_keep_core_values(self, monkeypatch):
        """ Some system's default values must remain no matter what. """
        import yaml

        def test_read_file(*args, **kwargs):
            config_string = open('./tests/test_data/basic.yaml', 'r').read()
            return config_string

        def test_clustername(*args, **kwargs):
            return 'test-clustername'

        fake_creds = AnonymousCredentials()
        mock_dataproc_client = mock.create_autospec(
            ClusterControllerClient(credentials=fake_creds))
        mock_gcs_client = mock.create_autospec(
            storage.Client(credentials=fake_creds, project='project'))
        spawner = DataprocSpawner(hub=Hub(),
                                  dataproc=mock_dataproc_client,
                                  gcs=mock_gcs_client,
                                  user=MockUser(),
                                  _mock=True,
                                  gcs_notebooks=self.gcs_notebooks)

        # Prevents a call to GCS. We return the local file instead.
        monkeypatch.setattr(spawner, "read_gcs_file", test_read_file)
        monkeypatch.setattr(spawner, "clustername", test_clustername)

        spawner.project = "test-project"
        spawner.region = "us-east1"
        spawner.zone = "us-east1-d"
        spawner.env_str = "test-env-str"
        spawner.args_str = "test-args-str"
        spawner.user_options = {
            'cluster_type': 'basic.yaml',
            'cluster_zone': 'test-form1-a'
        }

        config_built = spawner._build_cluster_config()

        assert config_built['project_id'] == 'test-project'
        assert config_built['cluster_name'] == 'test-clustername'

        assert config_built['config']['software_config']['properties'][
            'dataproc:jupyter.hub.args'] == 'test-args-str'
        assert config_built['config']['software_config']['properties'][
            'dataproc:jupyter.hub.enabled'] == 'true'
        # assert config_built['config']['software_config']['properties']['dataproc:jupyter.notebook.gcs.dir'] == f'gs://users-notebooks/fake'
        assert config_built['config']['software_config']['properties'][
            'dataproc:jupyter.hub.env'] == 'test-env-str'
Example #29
0
    def __init__(self, **kwargs):
        # Sanity check: Is our goal to use the emulator?
        # If so, create a grpc insecure channel with the emulator host
        # as the target.
        if os.environ.get("PUBSUB_EMULATOR_HOST"):
            kwargs["channel"] = grpc.insecure_channel(
                target=os.environ.get("PUBSUB_EMULATOR_HOST"))
            kwargs["credentials"] = AnonymousCredentials()

        # The GAPIC client has mTLS logic to determine the api endpoint and the
        # ssl credentials to use. Here we create a GAPIC client to help compute the
        # api endpoint and ssl credentials. The api endpoint will be used to set
        # `self._target`, and ssl credentials will be passed to
        # `grpc_helpers.create_channel` to establish a mTLS channel (if ssl
        # credentials is not None).
        client_options = kwargs.get("client_options", None)
        credentials = kwargs.get("credentials", None)
        client_for_mtls_info = subscriber_client.SubscriberClient(
            credentials=credentials, client_options=client_options)

        self._target = client_for_mtls_info._transport._host

        # Use a custom channel.
        # We need this in order to set appropriate default message size and
        # keepalive options.
        if "transport" not in kwargs:
            channel = kwargs.pop("channel", None)
            if channel is None:
                channel = grpc_helpers.create_channel(
                    credentials=kwargs.pop("credentials", None),
                    target=self.target,
                    ssl_credentials=client_for_mtls_info._transport.
                    _ssl_channel_credentials,
                    scopes=subscriber_client.SubscriberClient._DEFAULT_SCOPES,
                    options={
                        "grpc.max_send_message_length": -1,
                        "grpc.max_receive_message_length": -1,
                        "grpc.keepalive_time_ms": 30000,
                    }.items(),
                )
            # cannot pass both 'channel' and 'credentials'
            kwargs.pop("credentials", None)
            transport = subscriber_grpc_transport.SubscriberGrpcTransport(
                channel=channel)
            kwargs["transport"] = transport

        # Add the metrics headers, and instantiate the underlying GAPIC
        # client.
        self._api = subscriber_client.SubscriberClient(**kwargs)
Example #30
0
    def __init__(
        self,
        project=None,
        credentials=None,
        client_info=_CLIENT_INFO,
        user_agent=None,
        client_options=None,
        query_options=None,
    ):
        self._emulator_host = _get_spanner_emulator_host()

        if client_options and type(client_options) == dict:
            self._client_options = google.api_core.client_options.from_dict(
                client_options
            )
        else:
            self._client_options = client_options

        if self._emulator_host:
            credentials = AnonymousCredentials()
        elif isinstance(credentials, AnonymousCredentials):
            self._emulator_host = self._client_options.api_endpoint

        # NOTE: This API has no use for the _http argument, but sending it
        #       will have no impact since the _http() @property only lazily
        #       creates a working HTTP object.
        super(Client, self).__init__(
            project=project,
            credentials=credentials,
            client_options=client_options,
            _http=None,
        )
        self._client_info = client_info

        env_query_options = ExecuteSqlRequest.QueryOptions(
            optimizer_version=_get_spanner_optimizer_version()
        )

        # Environment flag config has higher precedence than application config.
        self._query_options = _merge_query_options(query_options, env_query_options)

        if user_agent is not None:
            warnings.warn(_USER_AGENT_DEPRECATED, DeprecationWarning, stacklevel=2)
            self.user_agent = user_agent

        if self._emulator_host is not None and (
            "http://" in self._emulator_host or "https://" in self._emulator_host
        ):
            warnings.warn(_EMULATOR_HOST_HTTP_SCHEME)