def test_if_unknown_user_is_forbidden_access( self, mocker, master_ar_process, path, valid_user_header): log_messages = { 'User not found: `bozydar`': SearchCriteria(1, True)} with iam_denies_all_requests(mocker): with assert_iam_queried_for_uid(mocker, 'bozydar'): assert_endpoint_response( master_ar_process, path, 401, headers=valid_user_header, assert_stderr=log_messages)
def test_app_without_task_host(self, nginx_class, mocker, superuser_user_header): app = self._nginx_alwaysthere_app() app["tasks"][0].pop("host", None) filter_regexp = { "Cannot find host for app '{}'".format(app["id"]): SearchCriteria(1, True), } self.assert_filter_regexp_for_invalid_app(filter_regexp, app, nginx_class, mocker, superuser_user_header)
def test_if_public_ip_detect_script_failue_is_handled( self, master_ar_process_perclass, valid_user_header): url = master_ar_process_perclass.make_url_from_path('/metadata') filter_regexp = { 'Traceback \(most recent call last\):': SearchCriteria(1, True), ("FileNotFoundError: \[Errno 2\] No such file or directory:" " '/usr/local/detect_ip_public_data.txt'"): SearchCriteria(1, True), } lbf = LineBufferFilter(filter_regexp, line_buffer=master_ar_process_perclass.stderr_line_buffer) with lbf, overridden_file_content('/usr/local/detect_ip_public_data.txt'): os.unlink('/usr/local/detect_ip_public_data.txt') resp = requests.get( url, allow_redirects=False, headers=valid_user_header) assert resp.status_code == 200 assert lbf.extra_matches == {} resp_data = resp.json() assert resp_data['PUBLIC_IPV4'] == "127.0.0.1"
def test_valid_auth_token(self, master_ar_process_perclass, valid_user_header): log_messages = { "UID from the valid DC/OS authentication token: `bozydar`": SearchCriteria(1, True), } assert_endpoint_response( master_ar_process_perclass, EXHIBITOR_PATH, 200, assert_error_log=log_messages, headers=valid_user_header, )
def test_app_with_port_index_nan_label(self, nginx_class, mocker, superuser_user_header): app = self._nginx_alwaysthere_app() app["labels"]["DCOS_SERVICE_PORT_INDEX"] = "not a number" filter_regexp = { "Cannot convert port to number for app '{}'".format(app["id"]): SearchCriteria(1, True), } self.assert_filter_regexp_for_invalid_app(filter_regexp, app, nginx_class, mocker, superuser_user_header)
def test_app_without_tasks_in_running_state(self, nginx_class, mocker, superuser_user_header): app = self._nginx_alwaysthere_app() app["tasks"] = [{"state": "TASK_FAILED"}] filter_regexp = { "No task in state TASK_RUNNING for app '{}'".format(app["id"]): SearchCriteria(1, True), } self.assert_filter_regexp_for_invalid_app(filter_regexp, app, nginx_class, mocker, superuser_user_header)
def test_if_var_is_honoured(self, valid_ip, nginx_class, mocker): filter_regexp = { 'Local Mesos Master IP: {}'.format(valid_ip): SearchCriteria(1, True), } ar = nginx_class(host_ip=valid_ip) with GuardedSubprocess(ar): lbf = LineBufferFilter(filter_regexp, line_buffer=ar.stderr_line_buffer) lbf.scan_log_buffer() assert lbf.extra_matches == {}
def test_app_without_service_scheme_label(self, nginx_class, mocker, superuser_user_header): app = self._nginx_alwaysthere_app() app["labels"].pop("DCOS_SERVICE_SCHEME", None) filter_regexp = { "Cannot find DCOS_SERVICE_SCHEME for app '{}'".format(app["id"]): SearchCriteria(1, True), } self.assert_filter_regexp_for_invalid_app(filter_regexp, app, nginx_class, mocker, superuser_user_header)
def test_app_without_task_specified_port_idx(self, nginx_class, mocker, valid_user_header): app = self._scheduler_alwaysthere_app() app["labels"]["DCOS_SERVICE_PORT_INDEX"] = "5" filter_regexp = { "Cannot find port at Marathon port index '5' for app '{}'".format(app["id"]): SearchCriteria(1, True), } self._assert_filter_regexp_for_invalid_app(filter_regexp, app, nginx_class, mocker, valid_user_header)
def test_if_absent_var_is_handled(self, nginx_class, mocker): filter_regexp = { 'Local Mesos Master IP: unknown': SearchCriteria(1, True), } ar = nginx_class(host_ip=None) with GuardedSubprocess(ar): lbf = LineBufferFilter(filter_regexp, line_buffer=ar.stderr_line_buffer) lbf.scan_log_buffer() assert lbf.extra_matches == {}
def test_app_without_task_ports(self, nginx_class, mocker, valid_user_header): app = self._scheduler_alwaysthere_app() app["tasks"][0].pop("ports", None) filter_regexp = { "Cannot find ports for app '{}'".format(app["id"]): SearchCriteria(1, True), } self._assert_filter_regexp_for_invalid_app(filter_regexp, app, nginx_class, mocker, valid_user_header)
def test_app_without_mesos_tasks(self, nginx_class, mocker, valid_user_header): app = self._scheduler_alwaysthere_app() app["tasks"] = [] filter_regexp = { "No task in state TASK_RUNNING for app '{}'".format(app["id"]): SearchCriteria(1, True), } self._assert_filter_regexp_for_invalid_app(filter_regexp, app, nginx_class, mocker, valid_user_header)
def test_valid_auth_token_priority( self, master_ar_process_perclass, valid_user_header, jwt_generator, ): log_messages = { "UID from the valid DC/OS authentication token: `bozydar`": SearchCriteria(1, True), "UID from the valid DC/OS authentication token: `test`": SearchCriteria(0, True), } token = jwt_generator(uid='test') assert_endpoint_response( master_ar_process_perclass, EXHIBITOR_PATH, 200, assert_error_log=log_messages, headers=valid_user_header, cookies={"dcos-acs-auth-cookie": token}, )
def test_if_iam_broken_resp_code_is_handled( self, master_ar_process_perclass, valid_user_header, mocker, ): mocker.send_command( endpoint_id='http://127.0.0.1:8101', func_name='always_bork', aux_data=True, ) log_messages = { 'UID from valid JWT: `bozydar`': SearchCriteria(1, True), "Unexpected response from IAM: ": SearchCriteria(1, True), } assert_endpoint_response( master_ar_process_perclass, EXHIBITOR_PATH, 500, assert_stderr=log_messages, headers=valid_user_header, )
def test_if_not_defining_the_var_is_handled(self, nginx_class, role): # Scanning for the exact log entry is bad, but in this case - can't be # avoided. filter_regexp = { 'SECRET_KEY_FILE_PATH not set.': SearchCriteria(1, False) } ar = nginx_class(role=role, secret_key_file_path=None) with GuardedSubprocess(ar): lbf = LineBufferFilter(filter_regexp, line_buffer=ar.stderr_line_buffer) lbf.scan_log_buffer() assert lbf.extra_matches == {}
def test_if_var_pointing_to_empty_file_is_handled( self, nginx_class, role, empty_file): # Scanning for the exact log entry is bad, but in this case - can't be # avoided. filter_regexp = {'Auth token verification key not set': SearchCriteria(1, False)} ar = nginx_class(role=role, auth_token_verification_key_file_path=empty_file) with GuardedSubprocess(ar): lbf = LineBufferFilter(filter_regexp, line_buffer=ar.stderr_line_buffer) lbf.scan_log_buffer() assert lbf.extra_matches == {}
def test_if_auth_module_is_enabled_by_unless_false_str_is_provided( self, nginx_class, mocker, enable_keyword): filter_regexp = { 'Activate authentication module.': SearchCriteria(1, True), } ar = nginx_class(auth_enabled=enable_keyword) url = ar.make_url_from_path('/exhibitor/foo/bar') with GuardedSubprocess(ar): lbf = LineBufferFilter(filter_regexp, line_buffer=ar.stderr_line_buffer) resp = requests.get(url, allow_redirects=False) assert resp.status_code == 401 lbf.scan_log_buffer() assert lbf.extra_matches == {}
def test_missmatched_auth_token_algo_in_cookie( self, master_ar_process_perclass, mismatch_alg_jwt_generator, repo_is_ee, ): log_messages = { ("Invalid token. Reason: whitelist unsupported alg: " + jwt_type_str(not repo_is_ee)): SearchCriteria(1, True), } token = mismatch_alg_jwt_generator(uid='user') assert_endpoint_response( master_ar_process_perclass, EXHIBITOR_PATH, 401, assert_stderr=log_messages, cookies={"dcos-acs-auth-cookie": token}, )
def test_expired_auth_token( self, master_ar_process_perclass, jwt_generator, ): log_messages = { "Invalid token. Reason: 'exp' claim expired at ": SearchCriteria(1, True), } token = jwt_generator(uid='test', exp=time.time() - 15) auth_header = {'Authorization': 'token={}'.format(token)} assert_endpoint_response( master_ar_process_perclass, EXHIBITOR_PATH, 401, assert_stderr=log_messages, headers=auth_header, )
def test_if_auth_module_can_be_disabled(self, nginx_class, mocker): filter_regexp = { ("ADMINROUTER_ACTIVATE_AUTH_MODULE set to `false`. " "Deactivate authentication module."): SearchCriteria(1, True), } ar = nginx_class(auth_enabled='false') url = ar.make_url_from_path('/exhibitor/foo/bar') with GuardedSubprocess(ar): lbf = LineBufferFilter(filter_regexp, line_buffer=ar.stderr_line_buffer) resp = requests.get(url, allow_redirects=False) assert resp.status_code == 200 lbf.scan_log_buffer() assert lbf.extra_matches == {}
def test_forged_auth_token( self, master_ar_process_perclass, forged_user_header, ): # Different validators emit different log messages, so we create two # tests - one for open, one for EE, each one having different log # message. log_messages = { "Invalid token": SearchCriteria(1, True), } assert_endpoint_response( master_ar_process_perclass, EXHIBITOR_PATH, 401, assert_error_log=log_messages, headers=forged_user_header, )
def test_valid_auth_token_without_uid( self, master_ar_process_perclass, jwt_generator, ): log_messages = { "Invalid token. Reason: Missing one of claims - \[ uid \]": SearchCriteria(1, True), } token = jwt_generator(uid='test', skip_uid_claim=True) auth_header = {'Authorization': 'token={}'.format(token)} assert_endpoint_response( master_ar_process_perclass, EXHIBITOR_PATH, 401, assert_stderr=log_messages, headers=auth_header, )
def test_if_default_scheme_is_honourded_by_mleader_endpoint( self, nginx_class, mocker, superuser_user_header): filter_regexp = {'Default scheme: https://': SearchCriteria(1, False)} cache_poll_period = 3 ar = nginx_class(cache_poll_period=cache_poll_period, cache_expiration=cache_poll_period - 1, default_scheme="https://") url = ar.make_url_from_path('/system/v1/leader/marathon/foo/bar/baz') with GuardedSubprocess(ar): lbf = LineBufferFilter(filter_regexp, line_buffer=ar.stderr_line_buffer) resp = requests.get(url, allow_redirects=False, headers=superuser_user_header) assert resp.status_code == 502 mocker.send_command(endpoint_id='http://127.0.0.1:8080', func_name='change_leader', aux_data="127.0.0.4:443") # First poll (2s) + normal poll interval(4s) < 2 * normal poll # interval(4s) time.sleep(cache_poll_period * 2) resp = requests.get(url, allow_redirects=False, headers=superuser_user_header) assert resp.status_code == 200 req_data = resp.json() assert req_data['endpoint_id'] == 'https://127.0.0.4:443' lbf.scan_log_buffer() assert lbf.extra_matches == {}
def test_if_broken_response_from_marathon_is_handled( self, nginx_class, mocker, valid_user_header): filter_regexp = { 'Cannot decode Marathon leader JSON': SearchCriteria(1, True), } mocker.send_command(endpoint_id='http://127.0.0.1:8080', func_name='break_leader_reply') ar = nginx_class() url = ar.make_url_from_path('/system/v1/leader/marathon/foo/bar/baz') with GuardedSubprocess(ar): lbf = LineBufferFilter(filter_regexp, timeout=(CACHE_FIRST_POLL_DELAY + 1), line_buffer=ar.stderr_line_buffer) resp = requests.get(url, allow_redirects=False, headers=valid_user_header) lbf.scan_log_buffer() assert resp.status_code == 503 assert lbf.extra_matches == {}
def test_if_invalid_auth_attempt_is_logged_correctly( self, master_ar_process, valid_jwt_generator): # Create some random, unique user that we can grep for: uid = 'some_random_string_abc1251231143' filter_regexp = { 'validate_jwt_or_exit\(\): User not found: `{}`'.format(uid): SearchCriteria(1, False)} lbf = LineBufferFilter(filter_regexp, line_buffer=master_ar_process.stderr_line_buffer) # Create token for this user: token = valid_jwt_generator(uid) header = {'Authorization': 'token={}'.format(token)} url = master_ar_process.make_url_from_path() with lbf: resp = requests.get(url, allow_redirects=False, headers=header) assert resp.status_code == 401 assert lbf.extra_matches == {}
def test_if_mesos_upstream_env_is_honoured(self, nginx_class, mocker, superuser_user_header): # Stage 0 - setup the environment: mocker.send_command(endpoint_id='http://127.0.0.2:5050', func_name='record_requests') mocker.send_command(endpoint_id='http://127.0.0.3:5050', func_name='record_requests') # Stage 1 - we set Mesos upstream to http://127.0.0.2:5050 and # verify that all the requests from cache go there: filter_regexp = { 'Mesos upstream: http://127.0.0.2:5050': SearchCriteria(1, True), 'Request url: http://127.0.0.2:5050/master/state-summary': SearchCriteria(1, True), } ar = nginx_class(upstream_mesos="http://127.0.0.2:5050") agent_id = 'de1baf83-c36c-4d23-9cb0-f89f596cd6ab-S1' url = ar.make_url_from_path('/agent/{}/blah/blah'.format(agent_id)) with GuardedSubprocess(ar): lbf = LineBufferFilter(filter_regexp, line_buffer=ar.stderr_line_buffer) requests.get(url, allow_redirects=False, headers=superuser_user_header) lbf.scan_log_buffer() m1_requests = mocker.send_command(endpoint_id='http://127.0.0.2:5050', func_name='get_recorded_requests') assert len(m1_requests) == 1 m2_requests = mocker.send_command(endpoint_id='http://127.0.0.3:5050', func_name='get_recorded_requests') assert len(m2_requests) == 0 assert lbf.extra_matches == {} # Stage 1 ends mocker.send_command(endpoint_id='http://127.0.0.2:5050', func_name='erase_recorded_requests') # Stage 2 - we set Mesos upstream to http://127.0.0.2:8080 and # verify that all the requests go to the new upstream filter_regexp = { 'Mesos upstream: http://127.0.0.3:5050': SearchCriteria(1, True), 'Request url: http://127.0.0.3:5050/master/state-summary': SearchCriteria(1, True), } ar = nginx_class(upstream_mesos="http://127.0.0.3:5050") with GuardedSubprocess(ar): lbf = LineBufferFilter(filter_regexp, line_buffer=ar.stderr_line_buffer) requests.get(url, allow_redirects=False, headers=superuser_user_header) lbf.scan_log_buffer() m1_requests = mocker.send_command(endpoint_id='http://127.0.0.2:5050', func_name='get_recorded_requests') assert len(m1_requests) == 0 m2_requests = mocker.send_command(endpoint_id='http://127.0.0.3:5050', func_name='get_recorded_requests') assert len(m2_requests) == 1 assert lbf.extra_matches == {}
def test_if_temp_dns_borkage_does_not_disrupt_mesosleader_caching( self, nginx_class, dns_server_mock, valid_user_header): filter_regexp_pre = { 'Marathon leader cache has been successfully updated': SearchCriteria(1, True), 'Marathon apps cache has been successfully updated': SearchCriteria(1, True), 'Mesos state cache has been successfully updated': SearchCriteria(1, True), '`Mesos Leader` state cache has been successfully updated': SearchCriteria(1, True), } filter_regexp_post = { 'Marathon leader cache has been successfully updated': SearchCriteria(1, True), 'Marathon apps cache has been successfully updated': SearchCriteria(1, True), 'Mesos state cache has been successfully updated': SearchCriteria(1, True), # The problem here is that there may occur two updated, one after # another, and failed one will be retried. This stems directly from # how cache.lua works. Let's permit multiple occurences for now. 'DNS server returned error code': SearchCriteria(1, False), 'Cache entry `mesos_leader` is stale': SearchCriteria(1, True), } cache_max_age_soft_limit = 3 ar = nginx_class( cache_max_age_soft_limit=cache_max_age_soft_limit, cache_max_age_hard_limit=1200, cache_expiration=2, cache_poll_period=3, cache_first_poll_delay=1, ) url = ar.make_url_from_path('/dcos-history-service/foo/bar') with GuardedSubprocess(ar): lbf = LineBufferFilter( filter_regexp_pre, timeout=5, # Just to give LBF enough time line_buffer=ar.stderr_line_buffer) with lbf: # Trigger cache update by issuing request: resp = requests.get(url, allow_redirects=False, headers=valid_user_header) assert resp.status_code == 200 assert lbf.extra_matches == {} lbf = LineBufferFilter( filter_regexp_post, timeout=5, # Just to give LBF enough time line_buffer=ar.stderr_line_buffer) with lbf: # Break `leader.mesos` DNS entry dns_server_mock.remove_dns_entry('leader.mesos.') # Wait for the cache to be old enough to be considered stale by # AR: # cache_max_age_soft_limit + extra delay in order to avoid # race conditions delay = 2 time.sleep(cache_max_age_soft_limit + delay) # Perform the main/test request: resp = requests.get(url, allow_redirects=False, headers=valid_user_header) assert resp.status_code == 200 assert lbf.extra_matches == {}
def test_if_marathon_upstream_env_is_honoured(self, nginx_class, mocker, superuser_user_header): # Stage 0 - setup the environment: mocker.send_command(endpoint_id='http://127.0.0.1:8080', func_name='record_requests') mocker.send_command(endpoint_id='http://127.0.0.2:8080', func_name='record_requests') # Stage 1 - we set Marathon upstream to http://127.0.0.1:8080 and # verify that all the requests from cache go there: filter_regexp = { 'Marathon upstream: http://127.0.0.1:8080': SearchCriteria(1, True), 'Request url: http://127.0.0.1:8080/v2/leader': SearchCriteria(1, True), ('Request url: http://127.0.0.1:8080/v2/apps' '\?embed=apps\.tasks\&label=DCOS_SERVICE_NAME'): SearchCriteria(1, True), } ar = nginx_class(upstream_marathon="http://127.0.0.1:8080") url = ar.make_url_from_path('/system/v1/leader/marathon/foo/bar/baz') with GuardedSubprocess(ar): lbf = LineBufferFilter(filter_regexp, line_buffer=ar.stderr_line_buffer) requests.get(url, allow_redirects=False, headers=superuser_user_header) lbf.scan_log_buffer() m1_requests = mocker.send_command(endpoint_id='http://127.0.0.1:8080', func_name='get_recorded_requests') assert len(m1_requests) == 2 m2_requests = mocker.send_command(endpoint_id='http://127.0.0.2:8080', func_name='get_recorded_requests') assert len(m2_requests) == 0 assert lbf.extra_matches == {} # Stage 1 ends mocker.send_command(endpoint_id='http://127.0.0.1:8080', func_name='erase_recorded_requests') # Stage 2 - we set Marathon upstream to http://127.0.0.2:8080 and # verify that all the requests go to the new upstream filter_regexp = { 'Marathon upstream: http://127.0.0.2:8080': SearchCriteria(1, True), 'Request url: http://127.0.0.2:8080/v2/leader': SearchCriteria(1, True), ('Request url: http://127.0.0.2:8080/v2/apps' '\?embed=apps\.tasks\&label=DCOS_SERVICE_NAME'): SearchCriteria(1, True), } ar = nginx_class(upstream_marathon="http://127.0.0.2:8080") with GuardedSubprocess(ar): lbf = LineBufferFilter(filter_regexp, line_buffer=ar.stderr_line_buffer) requests.get(url, allow_redirects=False, headers=superuser_user_header) lbf.scan_log_buffer() m1_requests = mocker.send_command(endpoint_id='http://127.0.0.1:8080', func_name='get_recorded_requests') assert len(m1_requests) == 0 m2_requests = mocker.send_command(endpoint_id='http://127.0.0.2:8080', func_name='get_recorded_requests') assert len(m2_requests) == 2 assert lbf.extra_matches == {}
def test_if_request_buffering_can_be_configured(self, mocker, nginx_class, valid_user_header, label_val, should_buffer): # If `DCOS_SERVICE_REQUEST_BUFFERING` is set to `false` (string) or # `false` (boolean), Admin Router will not buffer the client request before # sending it to the upstream. In any other case it the request is going # to be buffered. # Remove the data from MesosDNS and Mesos mocks w.r.t. resolved service mocker.send_command(endpoint_id='http://127.0.0.2:5050', func_name='set_frameworks_response', aux_data=[]) mocker.send_command(endpoint_id='http://127.0.0.1:8123', func_name='set_srv_response', aux_data=EMPTY_SRV) # Set the DCOS_SERVICE_REQUEST_BUFFERING for the test mock: srv = SCHEDULER_APP_ALWAYSTHERE_DIFFERENTPORT if label_val is not None: srv['labels']['DCOS_SERVICE_REQUEST_BUFFERING'] = label_val new_apps = { "apps": [ srv, ] } mocker.send_command(endpoint_id='http://127.0.0.1:8080', func_name='set_apps_response', aux_data=new_apps) # In theory it is possible to write a test that really checks if the # request was buffered or not. It would require talking to the mocked # endpoint during the test and checking if it is receiving the data as # it is being sent (there is no buffering) or only after the whole # request has been uploaded (Nginx buffers the data). Such a feature # would introduce some extra complexity into the test harness. Simply # checking if AR is printing the warning to the error log seems to be # good enough. filter_regexp = {} tmp = 'a client request body is buffered to a temporary file' if label_val in ["false", False]: filter_regexp[tmp] = SearchCriteria(0, True) else: filter_regexp[tmp] = SearchCriteria(1, True) ar = nginx_class(role="master") url = ar.make_url_from_path('/service/scheduler-alwaysthere/foo/bar/') # In order to make Nginx print a warning to the errorlog, the request # payload needs to be greater than client_body_buffer_size, which by # default is set to 16k. We use here 2MB for safe measure. # http://nginx.org/en/docs/http/ngx_http_core_module.html#client_body_buffer_size payload = {"data": "x" * 1024 * 1024 * 2} with GuardedSubprocess(ar): lbf = LineBufferFilter(filter_regexp, line_buffer=ar.stderr_line_buffer) resp = requests.post(url, allow_redirects=False, headers=valid_user_header, data=payload) lbf.scan_log_buffer() assert lbf.extra_matches == {} assert resp.status_code == 200