def test_no_auth_with_empty_user(self): """ Cluster does not require any authentication--so no credentials are provided in the user info """ config = { "clusters": [{ "name": "no-auth-cluster", "cluster": { "server": "http://localhost:8080" }, }], "users": [{ "name": "no-auth-cluster", "user": {} }], "contexts": [{ "name": "no-auth-cluster", "context": { "cluster": "no-auth-cluster", "user": "******", }, }], "current-context": "no-auth-cluster", } client = pykube.HTTPClient(pykube.KubeConfig(doc=config)) self.ensure_no_auth(client)
def test_exec_plugin_auth(self): self.config.update({ 'users': [ { 'name': 'test-user', 'user': { 'exec': { 'command': 'heptio-authenticator-aws', 'apiVersion': 'client.authentication.k8s.io/v1alpha1', 'args': [ "token", "-i", "test-pykube-mock-eks-cluster" ], }, }, }, ] }) _log.info('Built config: %s', self.config) with mock.patch('pykube.http.subprocess') as mock_subprocess: mock_subprocess.check_output = mock.Mock(return_value=AUTHPLUGIN_FIXTURE) adapter = pykube.http.KubernetesHTTPAdapterSendMixin() request, _ = adapter._setup_auth(self.request, pykube.KubeConfig(doc=self.config)) _log.debug('Checking headers %s', request.headers) self.assertTrue(mock_subprocess.check_output.called) self.assertIn('Authorization', request.headers) self.assertEqual(request.headers['Authorization'], 'Bearer test')
def login_mocks(mocker): kwargs = {} try: import pykube except ImportError: pass else: cfg = pykube.KubeConfig({ 'current-context': 'self', 'clusters': [{ 'name': 'self', 'cluster': { 'server': 'localhost' } }], 'contexts': [{ 'name': 'self', 'context': { 'cluster': 'self', 'namespace': 'default' } }], }) kwargs.update( pykube_in_cluster=mocker.patch.object(pykube.KubeConfig, 'from_service_account', return_value=cfg), pykube_from_file=mocker.patch.object(pykube.KubeConfig, 'from_file', return_value=cfg), ) return LoginMocks(**kwargs)
def get_client(kube_apiserver=None, key_file=None, cert_file=None, ca_cert=None): cluster = {"server": kube_apiserver} if ca_cert: cluster["certificate-authority"] = ca_cert user = {} if cert_file and key_file: user["client-certificate"] = cert_file user["client-key"] = key_file config = { "clusters": [{ "name": "ccp", "cluster": cluster }], "users": [{ "name": "ccp", "user": user }], "contexts": [{ "name": "ccp", "context": { "cluster": "ccp", "user": "******" }, }], "current-context": "ccp" } return pykube.HTTPClient(pykube.KubeConfig(config))
def build_kube_config(self): return pykube.KubeConfig({ "clusters": [ { "name": self.cluster.config["name"], "cluster": { "server": "https://{}".format(self.cluster.master_ip), }, }, ], "users": [ { "name": self.cluster.config["name"], "user": {}, # @@@ kubeadm }, ], "contexts": [{ "name": self.cluster.config["name"], "context": { "cluster": self.cluster.config["name"], "user": self.cluster.config["name"], }, }], "current-context": self.cluster.config["name"], })
def test_monkeypatched_get_pykube_cfg_overrides_pykube(mocker, login_mocks): get_pykube_cfg = mocker.patch('kopf.clients.auth.get_pykube_cfg') get_pykube_cfg.return_value = pykube.KubeConfig({ 'current-context': 'self', 'contexts': [{ 'name': 'self', 'context': { 'cluster': 'self' } }], 'clusters': [{ 'name': 'self', 'cluster': { 'server': 'https://localhost' } }], }) with pytest.deprecated_call(match=r"cease using kopf.login\(\)"): login() assert get_pykube_cfg.called assert not login_mocks.pykube_in_cluster.called assert not login_mocks.pykube_from_file.called
def login_mocks(mocker): """ Make all client libraries potentially optional, but do not skip the tests: skipping the tests is the tests' decision, not this mocking fixture's one. """ kwargs = {} try: import pykube except ImportError: pass else: cfg = pykube.KubeConfig({ 'current-context': 'self', 'clusters': [{'name': 'self', 'cluster': {'server': 'localhost'}}], 'contexts': [{'name': 'self', 'context': {'cluster': 'self', 'namespace': 'default'}}], }) kwargs.update( pykube_in_cluster=mocker.patch.object(pykube.KubeConfig, 'from_service_account', return_value=cfg), pykube_from_file=mocker.patch.object(pykube.KubeConfig, 'from_file', return_value=cfg), pykube_from_env=mocker.patch.object(pykube.KubeConfig, 'from_env', return_value=cfg), ) try: import kubernetes except ImportError: pass else: kwargs.update( client_in_cluster=mocker.patch.object(kubernetes.config, 'load_incluster_config'), client_from_file=mocker.patch.object(kubernetes.config, 'load_kube_config'), ) return LoginMocks(**kwargs)
def ca_auth(self, auth_data, server, cert_data, client_key, auth_way='ca_auth'): """ ca认证 :param cluster_name: :param auth_data: :param server: :param user_name: :param cert_data: :param client_key: :param context_name: :return: """ config = { "clusters": [{ "name": "self", "cluster": { "certificate-authority-data": "", "server": "" } }], "users": [{ "name": "self", "user": { "client-certificate-data": "", "client-key-data": "", } }], "contexts": [{ "name": "self", "context": { "cluster": "self", "user": "******" } }], "current-context": "self" } if auth_way == 'http': api = MyRequest(server) Log(3, "http11:{}".format(api)) return Result(api) else: config['clusters'][0]['cluster'][ 'certificate-authority-data'] = base64.b64encode(auth_data) config['clusters'][0]['cluster']['server'] = server config['users'][0]['user'][ 'client-certificate-data'] = base64.b64encode(cert_data) config['users'][0]['user']['client-key-data'] = base64.b64encode( client_key) # try: api = pykube.HTTPClient(pykube.KubeConfig(doc=config)) return Result(api)
def test_build_session_bearer_token(self): """Test that HTTPClient correctly parses the token """ self.config.update( {"users": [{ "name": "test-user", "user": { "token": "test" } }]}) _log.info("Built config: %s", self.config) client = pykube.HTTPClient(pykube.KubeConfig(doc=self.config)) _log.debug("Checking headers %s", client.session.headers)
def test_build_session_auth_provider(self): """Test that HTTPClient correctly parses the auth-provider config. Observed in GKE with kubelet v1.3. """ self.config.update({ 'users': [ { 'name': 'test-user', 'user': { 'auth-provider': { 'config': { 'access-token': 'abc', 'expiry': '2016-08-24T16:19:17.19878675-07:00', }, }, }, }, ] }) gcloud_content = """ { "client_id": "myclientid", "client_secret": "myclientsecret", "refresh_token": "myrefreshtoken", "type": "authorized_user" } """ _log.info('Built config: %s', self.config) try: tmp = tempfile.mktemp() with open(tmp, 'w') as f: f.write(gcloud_content) session = pykube.session.GCPSession( pykube.KubeConfig(doc=self.config), tmp) self.assertEquals(session.oauth.token['access_token'], 'abc') self.assertEquals(session.oauth.token['refresh_token'], 'myrefreshtoken') self.assertEquals(session.credentials.get('client_id'), 'myclientid') self.assertEquals(session.credentials.get('client_secret'), 'myclientsecret') finally: if os.path.exists(tmp): os.remove(tmp)
def test_build_session_bearer_token(self): """Test that HTTPClient correctly parses the token """ self.config.update( {'users': [ { 'name': 'test-user', 'user': { 'token': 'test' }, }, ]}) _log.info('Built config: %s', self.config) client = pykube.HTTPClient(pykube.KubeConfig(doc=self.config)) _log.debug('Checking headers %s', client.session.headers)
def get_client(kube_apiserver=None, key_file=None, cert_file=None, ca_cert=None, insecure=None): kube_apiserver = kube_apiserver or CONF.kubernetes.server key_file = key_file or CONF.kubernetes.key_file cert_file = cert_file or CONF.kubernetes.cert_file ca_cert = ca_cert or CONF.kubernetes.ca_cert insecure = insecure or CONF.kubernetes.insecure cluster = {"server": kube_apiserver} if ca_cert: cluster["certificate-authority"] = ca_cert elif insecure: cluster['insecure-skip-tls-verify'] = insecure user = {} if cert_file and key_file: user["client-certificate"] = cert_file user["client-key"] = key_file config = { "clusters": [ { "name": "ccp", "cluster": cluster } ], "users": [ { "name": "ccp", "user": user } ], "contexts": [ { "name": "ccp", "context": { "cluster": "ccp", "user": "******" }, } ], "current-context": "ccp" } return pykube.HTTPClient(pykube.KubeConfig(config))
def test_no_auth_with_no_user(self): config = { "clusters": [{ "name": "no-auth-cluster", "cluster": { "server": "http://localhost:8080" }, }], "contexts": [{ "name": "no-auth-cluster", "context": { "cluster": "no-auth-cluster" } }], "current-context": "no-auth-cluster", } client = pykube.HTTPClient(pykube.KubeConfig(doc=config)) self.ensure_no_auth(client)
def test_build_session_bearer_token(self): """Test that HTTPClient correctly parses the token """ self.config.update({ 'users': [ { 'name': 'test-user', 'user': { 'token': 'test' }, }, ] }) _log.info('Built config: %s', self.config) adapter = pykube.http.KubernetesHTTPAdapterSendMixin() request, _ = adapter._setup_auth(self.request, pykube.KubeConfig(doc=self.config)) _log.debug('Checking headers %s', request.headers) self.assertIn('Authorization', request.headers) self.assertEqual(request.headers['Authorization'], 'Bearer test')
def setUp(self): cfg = deepcopy(BASE_CONFIG) cfg.update({ 'users': [ { 'name': 'test-user', 'user': { 'exec': { 'command': 'heptio-authenticator-aws', 'args': [ "token", "-i", "test-pykube-mock-eks-cluster" ], 'apiVersion': 'client.authentication.k8s.io/v1alpha1', }, }, }, ] }) self.config = pykube.KubeConfig(doc=cfg)
def connect(self): """ # 测试连接 """ config = { "clusters": [{ "name": "self", "cluster": { "certificate-authority-data": "", "server": "" } }], "users": [{ "name": "self", "user": { "client-certificate-data": "", "client-key-data": "", } }], "contexts": [{ "name": "self", "context": { "cluster": "self", "user": "******" } }], "current-context": "self" } config['clusters'][0]['cluster'][ 'certificate-authority-data'] = base64.b64encode(self.auth_data) config['clusters'][0]['cluster']['server'] = self.server config['users'][0]['user'][ 'client-certificate-data'] = base64.b64encode(self.cert_data) config['users'][0]['user']['client-key-data'] = base64.b64encode( self.client_key) api = pykube.HTTPClient(pykube.KubeConfig(doc=config)) try: response = api.request(method='GET', url='', timeout=5) except pykube.PyKubeError as e: Log(3, 'server:{},ssl error:{}'.format(self.server, e.message)) return Result( '', FAIL, 'KubeClient connect to server:{} fail,ssl error:{}'.format( self.server, e)) except Exception as e: Log(3, 'server:{},ssl error:{}'.format(self.server, e.message)) return Result( '', FAIL, 'KubeClient connect to server:{} except,ssl error:{}'.format( self.server, e)) if response.status_code == 200: self.client = api return Result('ok') else: Log(3, msg='server:{} ssl error. text:{}'.format( self.server, response.text)) return Result('', FAIL, response.text)
KUBE_SAFETY_CHECK_CONFIG_KEY = 'test-fixture-safety-check' KUBE_CONFIG = pykube.KubeConfig({ 'apiVersion': 'v1', 'kind': 'Config', 'clusters': [ { 'name': 'test-fixture-cluster', 'certificate-authority-data': 'From fixture fx_kube_config', 'server': 'http://test-fixture-server', }, ], 'contexts': [ { 'name': 'test-fixture-context', 'context': { 'cluster': 'test-fixture-cluster', 'user': '******', }, }, ], 'current-context': 'test-fixture-context', KUBE_SAFETY_CHECK_CONFIG_KEY: 'I am present', }) LABEL_ZONE_VALUE = 'test-zone'
def process(event): affected_services = event.get('services') or [] errors = [] result_code = 200 for service in affected_services: # Set up the client with correct context confidant = _setup_confidant_client(service=service) # Pull the service details from Confidant service_details = confidant.get_service(service=service) app.logger.debug(service_details) if service_details.get('result') and not service_details.get('service'): err = "Unable to pull service details for '{}'".format(service) app.logger.error(err) errors.append(err) result_code = 400 break if service_details.get('result') and service_details['service'].get('enabled'): service_details = service_details['service'] service_name = service_details['id'] namespace = _service_to_namespace(service_name) credentials = service_details.get('credentials') or [] # Find which k8s clusters the service lives on, and get their login details k8s_clusters = [c for c in credentials if c.get('enabled') and c.get('name').startswith('k8s-cluster-')] secret_collection = [s for s in credentials if s.get('enabled') and not s.get('name').startswith('k8s-cluster-')] app.logger.debug({"k8s_clusters": k8s_clusters, "secret_collection": secret_collection}) # Build up the secret data. try: secrets = _parse_secret_collection(secret_collection=secret_collection, namespace=namespace) except BadSecretFormat as e: errors.append("Error in Secret data: " + str(e)) result_code = 400 break # Create a hashed value, to see if the data has changed. hashedval = hashlib.md5(str(repr(credentials)).encode('utf-8')).hexdigest() k8s_auth = _get_k8s_auth(credentials=credentials) for config in k8s_auth: app.logger.debug({"config": config}) try: kube_connection = pykube.HTTPClient(pykube.KubeConfig(config)) for k8s_secret in secrets.values(): if pykube.Secret(kube_connection, k8s_secret).exists(): pykube.Secret(kube_connection, k8s_secret).delete() # We delete, rather than update(), as updates seems to keep deleted data k8s_namespace = {"apiVersion": "v1", "kind": "Namespace", "metadata": {"name": namespace}} if not pykube.Namespace(kube_connection, k8s_namespace).exists(): errors.append("Creating namespace %s" % (namespace)) pykube.Namespace(kube_connection, k8s_namespace).create() pykube.Secret(kube_connection, k8s_secret).create() _trigger_deployment(kube_connection, namespace, checksum=hashedval) # After updating secrets, we trigger a rolling update in the namespace except KeyError as e: errors.append(str(e)) result_code = 500 app.logger.debug({"error": e}) except NamespaceNoDeploymentError as e: errors.append(str(e)) result_code = 200 return {'code': result_code, 'errors': errors}