def test_pod_logs(dcos_api_session: DcosApiSession) -> None: skip_test_if_dcos_journald_log_disabled(dcos_api_session) test_uuid = uuid.uuid4().hex pod_id = 'integration-test-pod-logs-{}'.format(test_uuid) pod_definition = { 'id': '/{}'.format(pod_id), 'scaling': {'kind': 'fixed', 'instances': 1}, 'environment': {'PING': 'PONG'}, 'containers': [ { 'name': 'sleep1', 'exec': {'command': {'shell': 'echo $PING > foo;echo STDOUT_LOG;echo STDERR_LOG >&2;sleep 10000'}}, 'resources': {'cpus': 0.1, 'mem': 32}, 'healthcheck': {'command': {'shell': 'test $PING = `cat foo`'}} } ], 'networks': [{'mode': 'host'}] } with dcos_api_session.marathon.deploy_pod_and_cleanup(pod_definition): url = get_task_url(dcos_api_session, pod_id) container_id = url.split('/')[-1] check_response('STDOUT_LOG', lambda: dcos_api_session.get(url + '?filter=STREAM:STDOUT')) check_response('STDERR_LOG', lambda: dcos_api_session.get(url + '?filter=STREAM:STDERR')) response = dcos_api_session.get(url + '/download', query='limit=10&postfix=stdout') log_file_name = 'task-{}-stdout.log.gz'.format(container_id) check_response_ok(response, {'Content-Disposition': 'attachment; filename={}'.format(log_file_name)})
def test_task_logs(dcos_api_session: DcosApiSession) -> None: skip_test_if_dcos_journald_log_disabled(dcos_api_session) test_uuid = uuid.uuid4().hex task_id = "integration-test-task-logs-{}".format(test_uuid) task_definition = { "id": "/{}".format(task_id), "cpus": 0.1, "instances": 1, "mem": 128, "cmd": "echo STDOUT_LOG; echo STDERR_LOG >&2;sleep 999" } with dcos_api_session.marathon.deploy_and_cleanup(task_definition, check_health=False): url = get_task_url(dcos_api_session, task_id) check_response('STDOUT_LOG', lambda: dcos_api_session.get(url + '?filter=STREAM:STDOUT')) check_response('STDERR_LOG', lambda: dcos_api_session.get(url + '?filter=STREAM:STDERR')) stream_url = get_task_url(dcos_api_session, task_id, stream=True) response = dcos_api_session.get(stream_url, stream=True, headers={'Accept': 'text/event-stream'}) check_response_ok(response, {'Content-Type': 'text/event-stream', 'Cache-Control': 'no-cache'}) lines = response.iter_lines() sse_id = next(lines) assert sse_id, 'First line must be id. Got {}'.format(sse_id) data = next(lines).decode('utf-8', 'ignore') validate_sse_entry(data)
def test_adminrouter_access_control_enforcement( dcos_api_session: DcosApiSession, noauth_api_session: DcosApiSession) -> None: reason = 'Can only test adminrouter enforcement if auth is enabled' if not auth_enabled(): pytest.skip(reason) r = noauth_api_session.get('/acs/api/v1') assert r.status_code == 401 assert r.headers['WWW-Authenticate'] in ('acsjwt', 'oauthjwt') # Make sure that this is UI's error page body, # including some JavaScript. assert '<html>' in r.text assert '</html>' in r.text assert 'window.location' in r.text # Verify that certain locations are forbidden to access # when not authed, but are reachable as superuser. for path in ('/mesos_dns/v1/config', '/service/marathon/', '/mesos/'): r = noauth_api_session.get(path) assert r.status_code == 401 r = dcos_api_session.get(path) assert r.status_code == 200 # Test authentication with auth cookie instead of Authorization header. authcookie = { 'dcos-acs-auth-cookie': dcos_api_session.auth_user.auth_cookie } r = noauth_api_session.get('/service/marathon/', cookies=authcookie) assert r.status_code == 200
def test_mesos_agent_role_assignment(dcos_api_session: DcosApiSession) -> None: state_endpoint = '/state' for agent in dcos_api_session.public_slaves: r = dcos_api_session.get(state_endpoint, host=agent, port=5051) assert r.json()['flags']['default_role'] == 'slave_public' for agent in dcos_api_session.slaves: r = dcos_api_session.get(state_endpoint, host=agent, port=5051) assert r.json()['flags']['default_role'] == '*'
def test_log_proxy(dcos_api_session: DcosApiSession) -> None: r = dcos_api_session.get('/mesos/master/slaves') check_response_ok(r, {}) data = r.json() slaves_ids = sorted(x['id'] for x in data['slaves'] if x['hostname'] in dcos_api_session.all_slaves) for slave_id in slaves_ids: response = dcos_api_session.get('/system/v1/agent/{}/logs/v1/range/?skip_prev=10&limit=10'.format(slave_id)) check_response_ok(response, {'Content-Type': 'text/plain'}) lines = list(filter(lambda x: x != '', response.text.split('\n'))) assert len(lines) == 10, 'Expect 10 log entries. Got {}. All lines {}'.format(len(lines), lines)
def test_dcos_add_user(dcos_api_session: DcosApiSession, new_dcos_cli: DcosCli) -> None: """ dcos_add_user.py script adds a user to IAM using the script dcos_add_user.py. """ email_address = uuid.uuid4().hex + '@example.com' command = ['python', '/opt/mesosphere/bin/dcos_add_user.py', email_address] new_dcos_cli.exec_command(command) try: r = dcos_api_session.get('/acs/api/v1/users') r.raise_for_status() expected_user_data = { "uid": email_address, "description": "", "url": "/acs/api/v1/users/" + email_address, "is_remote": True, "is_service": False, "provider_type": "oidc", "provider_id": "https://dcos.auth0.com/" } assert expected_user_data in r.json()['array'] finally: delete_user(dcos_api_session, email_address)
def test_legacy_user_creation_with_empty_json_doc( dcos_api_session: DcosApiSession) -> None: # Legacy HTTP clients built for dcos-oauth such as the web UI (up to DC/OS # 1.12) might insert users in the following way: uid appears to be an email # address, and the JSON document in the request body does not provide a # `public_key` or a `password` property (indicating local user), or is # empty. The legacy web UI would insert users like that and expect those # users to be remote users, usable with the legacy OIDC ID Token login # method through the 'https://dcos.auth0.com/' provider. This behavior is # maintained in Bouncer for backwards compatibility. r = dcos_api_session.put('/acs/api/v1/users/[email protected]', json={}) assert r.status_code == 201, r.text # Bouncer annotates the created user (this is new compared to dcos-oauth). r = dcos_api_session.get('/acs/api/v1/users/[email protected]') assert r.json()['provider_type'] == 'oidc' assert r.json()['provider_id'] == 'https://dcos.auth0.com/' assert r.json()['is_remote'] is True # When the uid however does not appear to be an email address the more sane # behavior of Bouncer takes effect: an empty (meaningless) JSON body # results in a useful error message. r = dcos_api_session.put('/acs/api/v1/users/user1', json={}) assert r.status_code == 400 assert 'One of `password` or `public_key` must be provided' in r.text
def test_if_marathon_is_up(dcos_api_session: DcosApiSession) -> None: r = dcos_api_session.get('/marathon/v2/info') assert r.status_code == 200 response_json = r.json() assert "name" in response_json assert "marathon" == response_json["name"]
def test_if_overlay_master_is_up(dcos_api_session: DcosApiSession) -> None: r = dcos_api_session.get('/mesos/overlay-master/state') assert r.ok, "status_code: {}, content: {}".format(r.status_code, r.content) # Make sure the `dcos` and `dcos6` overlays have been configured. json = r.json() dcos_overlay_network = { 'vtep_subnet': '44.128.0.0/20', 'vtep_subnet6': 'fd01:a::/64', 'vtep_mac_oui': '70:B3:D5:00:00:00', 'overlays': [{ 'name': 'dcos', 'subnet': '9.0.0.0/8', 'prefix': 24 }, { 'name': 'dcos6', 'subnet6': 'fd01:b::/64', 'prefix6': 80 }] } assert nested_match(dcos_overlay_network, json['network'])
def test_containerizer_debug_endpoint( dcos_api_session: DcosApiSession) -> None: # Test that we can poll `/containerizer/debug` endpoint exposed by the agent. agent = dcos_api_session.slaves[0] r = dcos_api_session.get('/containerizer/debug', host=agent, port=5051) assert r.status_code == 200 assert r.json() == {'pending': []}
def test_memory_profiling(dcos_api_session: DcosApiSession) -> None: # Test that we can fetch raw memory profiles master_ip = dcos_api_session.masters[0] r0 = dcos_api_session.get('/memory-profiler/start', host=master_ip, port=5050) assert r0.status_code == 200, r0.text r1 = dcos_api_session.get('/memory-profiler/stop', host=master_ip, port=5050) assert r1.status_code == 200, r1.text r2 = dcos_api_session.get('/memory-profiler/download/raw', host=master_ip, port=5050) assert r2.status_code == 200, r2.text
def test_if_all_mesos_slaves_have_registered( dcos_api_session: DcosApiSession) -> None: r = dcos_api_session.get('/mesos/master/slaves') assert r.status_code == 200 data = r.json() slaves_ips = sorted(x['hostname'] for x in data['slaves']) assert slaves_ips == dcos_api_session.all_slaves
def skip_test_if_dcos_journald_log_disabled(dcos_api_session: DcosApiSession) -> None: response = dcos_api_session.get('/dcos-metadata/ui-config.json').json() try: strategy = response['uiConfiguration']['plugins']['mesos']['logging-strategy'] except Exception: log.exception('Unable to find logging strategy') raise if not strategy.startswith('journald'): pytest.skip('Skipping a test since journald logging is disabled')
def test_logout(dcos_api_session: DcosApiSession) -> None: """Test logout endpoint. It's a soft logout, instructing the user agent to delete the authentication cookie, i.e. this test does not have side effects on other tests. """ r = dcos_api_session.get('/acs/api/v1/auth/logout') cookieheader = r.headers['set-cookie'] assert 'dcos-acs-auth-cookie=;' in cookieheader or 'dcos-acs-auth-cookie="";' in cookieheader assert 'expires' in cookieheader.lower()
def test_if_srouter_service_endpoint_works( dcos_api_session: DcosApiSession) -> None: r = dcos_api_session.get('/service/marathon/v2/info') assert r.status_code == 200 assert len(r.text) > 100 response_json = r.json() assert "name" in response_json assert "marathon" == response_json["name"] assert "version" in response_json
def test_if_pkgpanda_metadata_is_available( dcos_api_session: DcosApiSession) -> None: r = dcos_api_session.get('/pkgpanda/active.buildinfo.full.json') assert r.status_code == 200 data = r.json() assert 'mesos' in data assert len( data ) > 5 # (prozlach) We can try to put minimal number of pacakages required
def test_if_we_have_capabilities(dcos_api_session: DcosApiSession) -> None: """Indirectly test that Cosmos is up since this call is handled by Cosmos. """ r = dcos_api_session.get( '/capabilities', headers={ 'Accept': 'application/vnd.dcos.capabilities+json;charset=utf-8;version=v1' }) assert r.status_code == 200 assert {'name': 'PACKAGE_MANAGEMENT'} in r.json()['capabilities']
def test_iam_migration(dcos_api_session: DcosApiSession) -> None: check_call(['sudo', 'systemctl', 'stop', 'dcos-bouncer-migrate-users.service']) def _filter_test_uids(r): return [ u['uid'] for u in r.json()['array'] if '@example.com' in u['uid']] r = dcos_api_session.get('/acs/api/v1/users') test_uids = _filter_test_uids(r) assert len(test_uids) == 0 check_call(['sudo', 'systemctl', 'start', 'dcos-bouncer-migrate-users.service']) # Sleep for 5 seconds and let the migration script run time.sleep(5) r = dcos_api_session.get('/acs/api/v1/users') test_uids = _filter_test_uids(r) assert len(test_uids) == 2 assert '*****@*****.**' in test_uids assert '*****@*****.**' in test_uids
def test_if_zookeeper_cluster_is_up(dcos_api_session: DcosApiSession) -> None: r = dcos_api_session.get('/exhibitor/exhibitor/v1/cluster/status') assert r.status_code == 200 data = r.json() serving_zks = sum(1 for x in data if x['code'] == 3) zks_ips = sorted(x['hostname'] for x in data) zks_leaders = sum(1 for x in data if x['isLeader']) assert zks_ips == dcos_api_session.masters assert serving_zks == len(dcos_api_session.masters) assert zks_leaders == 1
def test_if_cosmos_is_only_available_locally( dcos_api_session: DcosApiSession) -> None: # One should not be able to connect to the cosmos HTTP and admin ports # over non-lo interfaces msg = "Cosmos reachable from non-lo interface" with pytest.raises(ConnectionError, message=msg): dcos_api_session.get('/', host=dcos_api_session.masters[0], port=7070, scheme='http') with pytest.raises(ConnectionError, message=msg): dcos_api_session.get('/', host=dcos_api_session.masters[0], port=9990, scheme='http') # One should be able to connect to the cosmos HTTP and admin ports at # 127.0.0.1:7070 and 127.0.0.1:9990. # Getting HTTP error codes shows that we made it all the way to # cosmos which is exactly what we're testing. r = dcos_api_session.get('/', host="127.0.0.1", port=7070, scheme='http') assert r.status_code == 404 # In this case localhost:9990/ redirects to localhost:9990/admin so we # we expect a 200 r = dcos_api_session.get('/', host="127.0.0.1", port=9990, scheme='http') assert r.status_code == 200
def test_redirect_host(self, dcos_api_session: DcosApiSession, path: str, expected: str) -> None: """ Redirection does not propagate a bad Host header """ r = dcos_api_session.get(path, headers={'Host': 'bad.host'}, allow_redirects=False) r.raise_for_status() assert r.status_code == expected assert 'bad.host' not in r.headers['Location']
def test_pkgpanda_api(dcos_api_session: DcosApiSession) -> None: expanded_config = get_expanded_config() if 'advanced' in expanded_config['template_filenames']: reason = ('Will not work on advanced CF templates, see: ' 'https://jira.mesosphere.com/browse/DCOS_OSS-1375') pytest.skip(reason) def get_and_validate_package_ids(path: str, node: str) -> list: r = dcos_api_session.get(path, node=node) assert r.status_code == 200 package_ids = r.json() assert isinstance(package_ids, list) for package_id in package_ids: r = dcos_api_session.get(path + package_id, node=node) assert r.status_code == 200 name, version = package_id.split('--') assert r.json() == { 'id': package_id, 'name': name, 'version': version } return package_ids active_buildinfo = dcos_api_session.get( '/pkgpanda/active.buildinfo.full.json').json() active_buildinfo_packages = sorted( # Setup packages don't have a buildinfo. (package_name, info['package_version'] if info else None) for package_name, info in active_buildinfo.items()) def assert_packages_match_active_buildinfo(package_ids: list) -> None: packages = sorted(map(lambda id_: tuple(id_.split('--')), package_ids)) assert len(packages) == len(active_buildinfo_packages) for package, buildinfo_package in zip(packages, active_buildinfo_packages): if buildinfo_package[1] is None: # No buildinfo for this package, so we can only compare names. assert package[0] == buildinfo_package[0] else: assert package == buildinfo_package for node in dcos_api_session.masters + dcos_api_session.all_slaves: package_ids = get_and_validate_package_ids('/pkgpanda/repository/', node) active_package_ids = get_and_validate_package_ids( '/pkgpanda/active/', node) assert set(active_package_ids) <= set(package_ids) assert_packages_match_active_buildinfo(active_package_ids)
def test_if_overlay_master_agent_is_up( dcos_api_session: DcosApiSession) -> None: master_response = dcos_api_session.get('/mesos/overlay-master/state') assert master_response.ok,\ "status_code: {}, content: {}".format(master_response.status_code, master_response.content) master_overlay_json = master_response.json() agent_response = dcos_api_session.get('/mesos/overlay-agent/overlay') assert agent_response.ok,\ "status_code: {}, content: {}".format(agent_response.status_code, agent_response.content) # Make sure the `dcos` and `dcos6` overlays have been configured. agent_overlay_json = agent_response.json() assert 'ip' in agent_overlay_json agent_ip = agent_overlay_json['ip'] master_agent_overlays = None for agent in master_overlay_json['agents']: assert 'ip' in agent if agent['ip'] == agent_ip: assert len(agent['overlays']) == 2 master_agent_overlays = agent['overlays'] assert 'overlays' in agent_overlay_json assert len(agent_overlay_json['overlays']) == 2 for agent_overlay in agent_overlay_json['overlays']: overlay_name = agent_overlay['info']['name'] if master_agent_overlays[0]['info'][ 'name'] == overlay_name: # type: ignore _validate_dcos_overlay(overlay_name, agent_overlay, master_agent_overlays[0]) # type: ignore else: _validate_dcos_overlay(overlay_name, agent_overlay, master_agent_overlays[1]) # type: ignore
def test_checks_api(dcos_api_session: DcosApiSession) -> None: """ Test the checks API at /system/checks/ This will test that all checks run on all agents return a normal status. A failure in this test may be an indicator that some unrelated component failed and dcos-checks functioned properly. """ checks_uri = '/system/checks/v1/' # Test that we can list and run node and cluster checks on a master, agent, and public agent. check_nodes = [] for nodes in [ dcos_api_session.masters, dcos_api_session.slaves, dcos_api_session.public_slaves ]: if nodes: check_nodes.append(random.choice(nodes)) logging.info('Testing %s on these nodes: %s', checks_uri, ', '.join(check_nodes)) for node in check_nodes: for check_type in ['node', 'cluster']: uri = '{}{}/'.format(checks_uri, check_type) logging.info('Testing %s on %s', uri, node) # List checks r = dcos_api_session.get(uri, node=node) assert r.status_code == 200 checks = r.json() assert isinstance(checks, dict) # Run checks r = dcos_api_session.post(uri, node=node) assert r.status_code == 200 results = r.json() assert isinstance(results, dict) # check that the returned statuses of each check is 0 expected_status = {c: 0 for c in checks.keys()} response_status = { c: v['status'] for c, v in results['checks'].items() } # print out the response for debugging logging.info('Response: {}'.format(results)) assert expected_status == response_status # check that overall status is also 0 assert results['status'] == 0
def test_accept_gzip(self, dcos_api_session: DcosApiSession) -> None: """ Clients that send "Accept-Encoding: gzip" get gzipped responses for some assets. """ r = dcos_api_session.get('/') r.raise_for_status() filenames = self.pat.findall(r.text) assert len(filenames) > 0 for filename in set(filenames): log.info('Load %r', filename) r = dcos_api_session.head(filename, headers={'Accept-Encoding': 'gzip'}) r.raise_for_status() log.info('Response headers: %s', repr(r.headers)) assert r.headers.get('content-encoding') == 'gzip'
def test_if_dcos_ui_is_up(dcos_api_session: DcosApiSession) -> None: r = dcos_api_session.get('/') assert r.status_code == 200 assert len(r.text) > 100 assert 'DC/OS' in r.text # Not sure if it's really needed, seems a bit of an overkill: soup = bs4.BeautifulSoup(r.text, "html.parser") for link in soup.find_all(['link', 'a'], href=True): if urllib.parse.urlparse(link.attrs['href']).netloc: # Relative URLs only, others are to complex to handle here continue # Some links might start with a dot (e.g. ./img/...). Remove. href = link.attrs['href'].lstrip('.') link_response = dcos_api_session.head(href) assert link_response.status_code == 200
def test_not_accept_gzip(self, dcos_api_session: DcosApiSession) -> None: """ Clients that do not send "Accept-Encoding: gzip" do not get gzipped responses. """ r = dcos_api_session.get('/') r.raise_for_status() filenames = self.pat.findall(r.text) assert len(filenames) > 0 for filename in set(filenames): log.info('Load %r', filename) # Set a benign `Accept-Encoding` header to prevent underlying # libraries setting their own header based on their capabilities. r = dcos_api_session.head(filename, headers={'Accept-Encoding': 'identity'}) r.raise_for_status() log.info('Response headers: %s', repr(r.headers)) assert 'content-encoding' not in r.headers
def test_if_all_exhibitors_are_in_sync( dcos_api_session: DcosApiSession) -> None: r = dcos_api_session.get('/exhibitor/exhibitor/v1/cluster/status') assert r.status_code == 200 correct_data = sorted(r.json(), key=lambda k: k['hostname']) for master_node_ip in dcos_api_session.masters: # This relies on the fact that Admin Router always proxies the local # Exhibitor. resp = requests.get( 'http://{}/exhibitor/exhibitor/v1/cluster/status'.format( master_node_ip), verify=False) assert resp.status_code == 200 tested_data = sorted(resp.json(), key=lambda k: k['hostname']) assert correct_data == tested_data
def test_fault_domain(dcos_api_session: DcosApiSession) -> None: expanded_config = test_helpers.get_expanded_config() if expanded_config['fault_domain_enabled'] == 'false': pytest.skip('fault domain is not set') master_ip = dcos_api_session.masters[0] r = dcos_api_session.get('/state', host=master_ip, port=5050) assert r.status_code == 200 state = r.json() # check flags and get the domain parameters mesos master was started with. assert 'flags' in state, 'missing flags in state json' assert 'domain' in state['flags'], 'missing domain in state json flags' cli_flag = json.loads(state['flags']['domain']) expected_region, expected_zone = get_region_zone(cli_flag) # check master top level keys assert 'leader_info' in state, 'leader_info is missing in state json' assert 'domain' in state['leader_info'], 'domain is missing in state json' leader_region, leader_zone = get_region_zone( state['leader_info']['domain']) assert leader_region == expected_region, 'expect region {}. Got {}'.format( expected_region, leader_region) assert leader_zone == expected_zone, 'expect zone {}. Got {}'.format( expected_zone, leader_zone) for agent in state['slaves']: assert 'domain' in agent, 'missing domain field for agent. {}'.format( agent) agent_region, agent_zone = get_region_zone(agent['domain']) assert agent_region == expected_region, 'expect region {}. Got {}'.format( expected_region, agent_region) # agent_zone might be different on agents, so we just make sure it's a sane value assert agent_zone, 'agent_zone cannot be empty'
def _get_cluster_resources(dcos_api_session: DcosApiSession) -> Any: """Return the mesos state summary """ r = dcos_api_session.get('/mesos/state-summary') return r.json()