def test_tls(self): with dask_testing_cluster(worker_kwargs={ 'security': tls_security(), "protocol": "tls" }, scheduler_kwargs={ 'security': tls_security(), "protocol": "tls" }) as (cluster, _): # These use test certs that ship with dask/distributed and should not be # used in production conf.set('dask', 'tls_ca', get_cert('tls-ca-cert.pem')) conf.set('dask', 'tls_cert', get_cert('tls-key-cert.pem')) conf.set('dask', 'tls_key', get_cert('tls-key.pem')) try: executor = DaskExecutor(cluster_address=cluster['address']) self.assert_tasks_on_executor(executor) executor.end() # close the executor, the cluster context manager expects all listeners # and tasks to have completed. executor.client.close() finally: conf.set('dask', 'tls_ca', '') conf.set('dask', 'tls_key', '') conf.set('dask', 'tls_cert', '')
def test_https_support(c, s, a, b): assert isinstance(s.services["dashboard"], BokehScheduler) port = s.services["dashboard"].port assert (format_dashboard_link( "localhost", port) == "https://localhost:%d/status" % port) ctx = ssl.create_default_context() ctx.load_verify_locations(get_cert("tls-ca-cert.pem")) http_client = AsyncHTTPClient() response = yield http_client.fetch( "https://localhost:%d/individual-plots.json" % port, ssl_options=ctx) response = json.loads(response.body.decode()) for suffix in [ "system", "counters", "workers", "status", "tasks", "stealing", "graph", ] + [url.strip("/") for url in response.values()]: req = HTTPRequest(url="https://localhost:%d/%s" % (port, suffix), ssl_options=ctx) response = yield http_client.fetch(req) assert response.code < 300 body = response.body.decode() assert not re.search("href=./", body) # no absolute links
def test_https_support(c, s, a, b): assert isinstance(s.services["dashboard"], BokehScheduler) port = s.services["dashboard"].port assert (format_dashboard_link( "localhost", port) == "https://localhost:%d/status" % port) ctx = ssl.create_default_context() ctx.load_verify_locations(get_cert("tls-ca-cert.pem")) http_client = AsyncHTTPClient() for suffix in [ "system", "counters", "workers", "status", "tasks", "stealing", "graph", "individual-task-stream", "individual-progress", "individual-graph", "individual-nbytes", "individual-nprocessing", "individual-profile", ]: req = HTTPRequest(url="https://localhost:%d/%s" % (port, suffix), ssl_options=ctx) response = yield http_client.fetch(req) body = response.body.decode() assert "bokeh" in body.lower() assert not re.search("href=./", body) # no absolute links
class TestDaskExecutorTLS(TestBaseDask): def setUp(self): self.dagbag = DagBag(include_examples=True) @conf_vars({ ('dask', 'tls_ca'): get_cert('tls-ca-cert.pem'), ('dask', 'tls_cert'): get_cert('tls-key-cert.pem'), ('dask', 'tls_key'): get_cert('tls-key.pem'), }) def test_tls(self): # These use test certs that ship with dask/distributed and should not be # used in production with dask_testing_cluster(worker_kwargs={ 'security': tls_security(), "protocol": "tls" }, scheduler_kwargs={ 'security': tls_security(), "protocol": "tls" }) as (cluster, _): executor = DaskExecutor(cluster_address=cluster['address']) self.assert_tasks_on_executor(executor) executor.end() # close the executor, the cluster context manager expects all listeners # and tasks to have completed. executor.client.close() @mock.patch('airflow.executors.dask_executor.DaskExecutor.sync') @mock.patch('airflow.executors.base_executor.BaseExecutor.trigger_tasks') @mock.patch('airflow.executors.base_executor.Stats.gauge') def test_gauge_executor_metrics(self, mock_stats_gauge, mock_trigger_tasks, mock_sync): executor = DaskExecutor() executor.heartbeat() calls = [ mock.call('executor.open_slots', mock.ANY), mock.call('executor.queued_tasks', mock.ANY), mock.call('executor.running_tasks', mock.ANY) ] mock_stats_gauge.assert_has_calls(calls)
def test_tls(self): with dask_testing_cluster( worker_kwargs={'security': tls_security()}, scheduler_kwargs={'security': tls_security()}) as (s, workers): # These use test certs that ship with dask/distributed and should not be # used in production configuration.set('dask', 'tls_ca', get_cert('tls-ca-cert.pem')) configuration.set('dask', 'tls_cert', get_cert('tls-key-cert.pem')) configuration.set('dask', 'tls_key', get_cert('tls-key.pem')) try: executor = DaskExecutor(cluster_address=s['address']) self.assert_tasks_on_executor(executor) executor.end() # close the executor, the cluster context manager expects all listeners # and tasks to have completed. executor.client.close() finally: configuration.set('dask', 'tls_ca', '') configuration.set('dask', 'tls_key', '') configuration.set('dask', 'tls_cert', '')
assert len(ts.source.data["start"]) == 10 assert "identity" in str(ts.source.data) futures = c.map(lambda x: x, range(100), pure=False) await wait(futures) ts.update() assert "lambda" in str(ts.source.data) @gen_cluster( client=True, scheduler_kwargs={"services": { ("dashboard", 0): BokehScheduler }}, config={ "distributed.scheduler.dashboard.tls.key": get_cert("tls-key.pem"), "distributed.scheduler.dashboard.tls.cert": get_cert("tls-cert.pem"), "distributed.scheduler.dashboard.tls.ca-file": get_cert("tls-ca-cert.pem"), }, ) def test_https_support(c, s, a, b): assert isinstance(s.services["dashboard"], BokehScheduler) port = s.services["dashboard"].port assert (format_dashboard_link( "localhost", port) == "https://localhost:%d/status" % port) ctx = ssl.create_default_context() ctx.load_verify_locations(get_cert("tls-ca-cert.pem"))
deserialize) from distributed.comm import (tcp, inproc, connect, listen, CommClosedError, parse_address, parse_host_port, unparse_host_port, resolve_address, get_address_host, get_local_address_for) EXTERNAL_IP4 = get_ip() if has_ipv6(): with warnings.catch_warnings(record=True): warnings.simplefilter('always') EXTERNAL_IP6 = get_ipv6() ca_file = get_cert('tls-ca-cert.pem') # The Subject field of our test certs cert_subject = ( (('countryName', 'XY'),), (('localityName', 'Dask-distributed'),), (('organizationName', 'Dask'),), (('commonName', 'localhost'),) ) def check_tls_extra(info): assert isinstance(info, dict) assert info['peercert']['subject'] == cert_subject assert 'cipher' in info cipher_name, proto_name, secret_bits = info['cipher']
CommClosedError, parse_address, parse_host_port, unparse_host_port, resolve_address, get_address_host, get_local_address_for, ) EXTERNAL_IP4 = get_ip() if has_ipv6(): with warnings.catch_warnings(record=True): warnings.simplefilter("always") EXTERNAL_IP6 = get_ipv6() ca_file = get_cert("tls-ca-cert.pem") # The Subject field of our test certs cert_subject = ( (("countryName", "XY"), ), (("localityName", "Dask-distributed"), ), (("organizationName", "Dask"), ), (("commonName", "localhost"), ), ) def check_tls_extra(info): assert isinstance(info, dict) assert info["peercert"]["subject"] == cert_subject assert "cipher" in info cipher_name, proto_name, secret_bits = info["cipher"]
from contextlib import contextmanager try: import ssl except ImportError: ssl = None # type: ignore import pytest import dask from distributed.comm import connect, listen from distributed.security import Security from distributed.utils_test import get_cert ca_file = get_cert("tls-ca-cert.pem") cert1 = get_cert("tls-cert.pem") key1 = get_cert("tls-key.pem") keycert1 = get_cert("tls-key-cert.pem") # Note this cipher uses RSA auth as this matches our test certs FORCED_CIPHER = "ECDHE-RSA-AES128-GCM-SHA256" TLS_13_CIPHERS = [ "TLS_AES_128_GCM_SHA256", "TLS_AES_256_GCM_SHA384", "TLS_CHACHA20_POLY1305_SHA256", "TLS_AES_128_CCM_SHA256", "TLS_AES_128_CCM_8_SHA256", ]
get_client_ssl_context) from distributed.utils_test import loop # noqa: F401 from distributed.protocol import (to_serialize, Serialized, serialize, deserialize) from distributed.comm import (tcp, inproc, connect, listen, CommClosedError, parse_address, parse_host_port, unparse_host_port, resolve_address, get_address_host, get_local_address_for) EXTERNAL_IP4 = get_ip() if has_ipv6(): EXTERNAL_IP6 = get_ipv6() ca_file = get_cert('tls-ca-cert.pem') # The Subject field of our test certs cert_subject = ((('countryName', 'XY'), ), (('localityName', 'Dask-distributed'), ), (('organizationName', 'Dask'), ), (('commonName', 'localhost'), )) def check_tls_extra(info): assert isinstance(info, dict) assert info['peercert']['subject'] == cert_subject assert 'cipher' in info cipher_name, proto_name, secret_bits = info['cipher'] # Most likely assert 'AES' in cipher_name
def get_client_ssl_context(certfile='tls-cert.pem', keyfile='tls-key.pem'): ctx = ssl.create_default_context(ssl.Purpose.SERVER_AUTH, cafile=ca_file) ctx.check_hostname = False ctx.verify_mode = ssl.CERT_REQUIRED ctx.load_cert_chain(get_cert(certfile), get_cert(keyfile)) return ctx
from contextlib import contextmanager import sys try: import ssl except ImportError: ssl = None import pytest from tornado import gen from distributed.comm import connect, listen from distributed.security import Security from distributed.utils_test import new_config, get_cert, gen_test ca_file = get_cert('tls-ca-cert.pem') cert1 = get_cert('tls-cert.pem') key1 = get_cert('tls-key.pem') keycert1 = get_cert('tls-key-cert.pem') # Note this cipher uses RSA auth as this matches our test certs FORCED_CIPHER = 'ECDHE-RSA-AES128-GCM-SHA256' TLS_13_CIPHERS = [ 'TLS_AES_128_GCM_SHA256', 'TLS_AES_256_GCM_SHA384', 'TLS_CHACHA20_POLY1305_SHA256', 'TLS_AES_128_CCM_SHA256', 'TLS_AES_128_CCM_8_SHA256', ]
from __future__ import print_function, division, absolute_import from time import sleep from distributed import Client from distributed.utils_test import (popen, get_cert, new_config_file, tls_security, tls_only_config) from distributed.utils_test import loop # noqa: F401 from distributed.metrics import time ca_file = get_cert('tls-ca-cert.pem') cert = get_cert('tls-cert.pem') key = get_cert('tls-key.pem') keycert = get_cert('tls-key-cert.pem') tls_args = ['--tls-ca-file', ca_file, '--tls-cert', keycert] tls_args_2 = ['--tls-ca-file', ca_file, '--tls-cert', cert, '--tls-key', key] def wait_for_cores(c, ncores=1): start = time() while len(c.ncores()) < 1: sleep(0.1) assert time() < start + 10 def test_basic(loop): with popen(['dask-scheduler', '--no-bokeh'] + tls_args) as s: