def test_pipeline_not_overwritten(self): loaded_msg = "Pipeline already registered: apm" wait_until(lambda: self.log_contains(loaded_msg), name=loaded_msg) desc = "empty apm test pipeline" wait_until(lambda: self.es.ingest.get_pipeline(id=self.pipeline_apm)[ self.pipeline_apm]['description'] == desc, name="fetching pipeline {}".format(self.pipeline_apm))
def setUp(self): # application self.application = "apm" # apm privileges self.privilege_agent_config = "config_agent:read" self.privilege_event = "event:write" self.privilege_sourcemap = "sourcemap:write" self.privileges = { "agentConfig": self.privilege_agent_config, "event": self.privilege_event, "sourcemap": self.privilege_sourcemap } self.privileges_all = list(self.privileges.values()) self.privilege_any = "*" # resources self.resource_any = ["*"] self.resource_backend = ["-"] user = os.getenv("ES_USER", "apm_server_user") password = os.getenv("ES_PASS", "changeme") self.apikey_name = "apm-systemtest" self.apikey = APIKeyHelper(self.get_elasticsearch_url(user, password)) # delete all existing api_keys with defined name of current user self.apikey.invalidate(self.apikey_name) # delete all existing application privileges to ensure they can be created for current user for p in self.privileges.keys(): url = "{}/{}/{}".format(self.apikey.privileges_url, self.application, p) requests.delete(url) wait_until(lambda: requests.get(url).status_code == 404) super(APIKeyBaseTest, self).setUp()
def test_sourcemap_cache_expiration(self): path = 'http://localhost:8000/test/e2e/general-usecase/bundle.js.map' r = self.upload_sourcemap(file_name='bundle.js.map', bundle_filepath=path) assert r.status_code == 202, r.status_code self.wait_for_sourcemaps() # insert document, which also leads to caching the sourcemap self.load_docs_with_template(self.get_error_payload_path(), self.intake_url, 'error', 1) self.assert_no_logged_warnings() # delete sourcemap and error event from ES self.es.indices.delete(index=self.ilm_index(index_error)) # fetching from ES will lead to an error afterwards self.es.indices.delete(index=index_smap, ignore=[400, 404]) wait_until(lambda: not self.es.indices.exists(index_smap)) # ensure smap is not in cache any more time.sleep(1) # after cache expiration no sourcemap should be found any more self.load_docs_with_template(self.get_error_payload_path(), self.intake_url, 'error', 1) self.check_rum_error_sourcemap(False, expected_err="No Sourcemap available")
def test_fetch_latest_of_multiple_sourcemaps(self): # upload sourcemap file that finds no matchings self.upload_sourcemap(file_name='bundle_no_mapping.js.map') self.load_docs_with_template(self.get_error_payload_path(), self.intake_url, 'error', 1) self.check_rum_error_sourcemap(False, expected_err="No Sourcemap found for") # remove existing document self.es.delete_by_query( index=index_error, body={"query": { "term": { "processor.name": 'error' } }}) wait_until(lambda: (self.es.count(index=index_error)['count'] == 0)) # upload second sourcemap file with same key, # that actually leads to proper matchings # this also tests that the cache gets invalidated, # as otherwise the former sourcemap would be taken from the cache. self.upload_sourcemap(expected_ct=2) self.load_docs_with_template(self.get_error_payload_path(), self.intake_url, 'error', 1) self.check_rum_error_sourcemap(True, count=1)
def wait_for_events(self, processor_name, expected_count, index=None, max_timeout=10): """ wait_for_events waits for an expected number of event docs with the given 'processor.name' value, and returns the hits when found. """ if index is None: index = apm_prefix query = {"term": {"processor.name": processor_name}} result = {} # TODO(axw) use "nonlocal" when we migrate to Python 3 def get_docs(): hits = self.es.search(index=index, body={"query": query})['hits'] result['docs'] = hits['hits'] return hits['total']['value'] == expected_count wait_until( get_docs, max_timeout=max_timeout, name="{} documents to reach {}".format(processor_name, expected_count), ) return result['docs']
def delete_selected(self, subject, author=None): self.select_email(subject, author) self.driver.find_element_by_xpath(self.delete_xpath).click() wait_until( lambda: len( mail_indexes_of(mails=self.reload_and_fetch_mail(), subject=subject, author=author)), 10, 0)
def test_onboarding_doc(self): """ This test starts the beat and checks that the onboarding doc has been published to ES """ wait_until(lambda: self.es.indices.exists(index_onboarding), name="onboarding index created") wait_until(lambda: (self.es.count(index=index_onboarding)['count'] == 1)) # Makes sure no error or warnings were logged self.assert_no_logged_warnings()
def wait_until_templates(es, templates=[], exist=True): expected = len(templates) if exist else 0 def expected_templates(): try: return len(es.indices.get_template(templates if templates else apm_prefix)) == expected except: return expected == 0 wait_until(expected_templates, name="expected templates: {}".format(templates))
def wait_until_pipelines(es, pipelines=default_pipelines, exist=True): expected = len(pipelines) if exist else 0 def expected_pipelines(): try: return len(es.ingest.get_pipeline(pipelines if pipelines else default_pipelines)) == expected except: return expected == 0 wait_until(expected_pipelines, name="expected pipelines {}".format(pipelines))
def test_move_folder(self): subject = datetime.datetime.now().strftime("%I:%M%p on %B %d, %Y") send_email(self.driver, self.mailer_page, self.current_page, subject) self.mailer_page.move_email('Спам', subject) self.mailer_page.go_to_folder('#spam', subject) wait_until( lambda: len( mail_indexes_of(mails=self.mailer_page.reload_and_fetch_mail(), subject=subject)), 10, 1)
def test_template(self): """ This test starts the beat and checks that the template has been loaded to ES """ wait_until(lambda: self.es.indices.exists(index_onboarding)) templates = self.es.indices.get_template(index_name) assert len(templates) == 1 t = templates[index_name] total_fields_limit = t['settings']['index']['mapping']['total_fields']['limit'] assert total_fields_limit == "2000", total_fields_limit
def wait_until_policies(es, policies=[], exist=True): expected = len(policies) if exist else 0 url = "{}{}".format(policy_url, ",".join(policies if policies else [default_policy])) def expected_policies(): try: return len(es.transport.perform_request('GET', url)) == expected except NotFoundError: return expected == 0 wait_until(expected_policies, name="expected policies: {}".format(policies))
def wait_until_aliases(es, aliases=[], exist=True): url = "/_alias/{}".format(",".join(aliases) if aliases else apm_prefix) expected = len(aliases) if exist else 0 def aliases_exist(): try: return len(es.transport.perform_request('GET', url)) == expected except NotFoundError: return expected == 0 wait_until(aliases_exist, name="expected aliases: {}".format(aliases))
def test_api_key_auth(self): # Send a POST request to the intake API URL. Doesn't matter what the # request body contents are, as the request will fail due to lack of # authorization. We just want to trigger the server's tracing. r = requests.post(self.intake_url, data="invalid") self.assertEqual(401, r.status_code) wait_until( lambda: get_instrumentation_event(self.es, index_transaction), name='have external server instrumentation documents with api_key')
def move_email(self, title, subject, author=None): self.select_email(subject, author) self.driver.find_element_by_xpath(self.move).click() self.driver.find_element_by_xpath( '//a[@class="b-folders__folder__link js-action" and @title="%s"]' % title).click() wait_until( lambda: len( mail_indexes_of(mails=self.reload_and_fetch_mail(), subject=subject, author=author)), 10, 0)
def setUp(self): super(JaegerBaseTest, self).setUp() wait_until(lambda: self.log_contains("Listening for Jaeger HTTP"), name="Jaeger HTTP listener started") wait_until(lambda: self.log_contains("Listening for Jaeger gRPC"), name="Jaeger gRPC listener started") # Extract the Jaeger server addresses. log = self.get_log() match = re.search("Listening for Jaeger HTTP requests on: (.*)$", log, re.MULTILINE) self.jaeger_http_url = "http://{}/{}".format(match.group(1), 'api/traces') match = re.search("Listening for Jaeger gRPC requests on: (.*)$", log, re.MULTILINE) self.jaeger_grpc_addr = match.group(1)
def wait_until_invalidated(self, name=None, id=None): if not name and not id: raise Exception("Either name or id must be given") def invalidated(): keys = self.fetch_by_name(name) if name else self.fetch_by_id(id) for entry in keys: if not entry["invalidated"]: return False return True wait_until(lambda: invalidated(), name="api keys invalidated")
def wait_for_profile(self): def cond(): response = self.es.count( index=index_profile, body={"query": { "term": { "processor.name": "profile" } }}) return response['count'] != 0 wait_until(cond, max_timeout=10, name="waiting for profile")
def send_email(driver, mailer_page, current_page, subject, recipient='*****@*****.**', text=get_random_text_and_number()): current_page.send_email(recipient, subject, text, mailer_page) driver.get('https://mail.yandex.ru') wait_until( lambda: len( mail_indexes_of(mails=mailer_page.reload_and_fetch_mail(), subject=subject)), 6, 1)
def test_api_key_auth(self): """Self-instrumentation using in-memory listener without configuring an APIKey""" # Send a POST request to the intake API URL. Doesn't matter what the # request body contents are, as the request will fail due to lack of # authorization. We just want to trigger the server's in-memory tracing, # and test that the in-memory tracer works without having an api_key configured r = requests.post(self.intake_url, data="invalid") self.assertEqual(401, r.status_code) wait_until( lambda: get_instrumentation_event(self.es, index_transaction), name='have in-memory instrumentation documents without api_key')
def wait_until_indices_truncated(es, truncate_indices=[agentcfg_index]): if not truncate_indices: return # truncate, don't delete agent configuration index since it's only created when kibana starts up for index in truncate_indices: if es.count(index=index, ignore_unavailable=True)["count"] > 0: es.delete_by_query(index, {"query": { "match_all": {} }}, ignore_unavailable=True, wait_for_completion=True) wait_until(lambda: es.count(index=index, ignore_unavailable=True)[ "count"] == 0, max_timeout=30, name="acm index {} to be empty".format(index))
def setUp(self): # application self.application = "apm" # apm privileges self.privilege_agent_config = "config_agent:read" self.privilege_event = "event:write" self.privilege_sourcemap = "sourcemap:write" self.privileges = { "agentConfig": self.privilege_agent_config, "event": self.privilege_event, "sourcemap": self.privilege_sourcemap } self.privileges_all = self.privileges.values() self.privilege_any = "*" # resources self.resource_any = ["*"] self.resource_backend = ["-"] self.api_key_name = "apm-systemtest" content_type = 'application/json' # api_key related urls for configured user (default: apm_server_user) user = os.getenv("ES_USER", "apm_server_user") password = os.getenv("ES_PASS", "changeme") self.es_url_apm_server_user = self.get_elasticsearch_url( user, password) self.api_key_url = "{}/_security/api_key".format( self.es_url_apm_server_user) self.privileges_url = "{}/_security/privilege".format( self.es_url_apm_server_user) # clean setup: # delete all existing api_keys with defined name of current user requests.delete(self.api_key_url, data=json.dumps({'name': self.api_key_name}), headers=headers(content_type='application/json')) wait_until(lambda: self.api_keys_invalidated(), name="delete former api keys") # delete all existing application privileges to ensure they can be created for current user for name in self.privileges.keys(): url = "{}/{}/{}".format(self.privileges_url, self.application, name) requests.delete(url) wait_until(lambda: requests.get(url).status_code == 404) super(BaseAPIKey, self).setUp()
def wait_until_indices_deleted(es, delete_indices=[apm_prefix]): # avoid unnecessary delete requests to ES if possible if not delete_indices or (all_apm(delete_indices) and len(es.indices.get(apm_prefix)) == 0): return for idx in delete_indices: es.indices.delete(idx, ignore=[404, 400]) def is_deleted(idx): try: return len(es.indices.get(idx)) == 0 except: return True for idx in delete_indices: wait_until(lambda: is_deleted(idx), name="index {} to be deleted".format(idx))
def check_experimental_key_indexed(self, experimental): self.load_docs_with_template(self.get_payload_path("experimental.ndjson"), self.intake_url, 'transaction', 2) wait_until(lambda: self.log_contains("events have been published"), max_timeout=10) time.sleep(2) self.assert_no_logged_warnings() for idx in [index_transaction, index_span, index_error]: # ensure documents exist rs = self.es.search(index=idx) assert rs['hits']['total']['value'] == 1 # check whether or not top level key `experimental` has been indexed rs = self.es.search(index=idx, body={"query": {"exists": {"field": 'experimental'}}}) ct = 1 if experimental else 0 assert rs['hits']['total']['value'] == ct, idx
def create_api_key(self, privileges, resources, application="apm"): payload = json.dumps({ "name": self.api_key_name, "role_descriptors": { self.api_key_name + "role_desc": { "applications": [{ "application": application, "privileges": privileges, "resources": resources }] } } }) resp = requests.post(self.api_key_url, data=payload, headers=headers(content_type='application/json')) assert resp.status_code == 200, resp.status_code id = resp.json()["id"] wait_until(lambda: self.api_key_exists(id), name="create api key") return base64.b64encode("{}:{}".format(id, resp.json()["api_key"]))
def test_sourcemap_cache_expiration(self): self.upload_sourcemap() # insert document, which also leads to caching the sourcemap self.load_docs_with_template(self.get_error_payload_path(), self.intake_url, 'error', 1) self.assert_no_logged_warnings() # delete sourcemap and error event from ES self.es.indices.delete(index=self.ilm_index(index_error)) # fetching from ES will lead to an error afterwards self.es.indices.delete(index=index_smap, ignore=[400, 404]) wait_until(lambda: not self.es.indices.exists(index_smap)) # ensure smap is not in cache any more time.sleep(1) # after cache expiration no sourcemap should be found any more self.load_docs_with_template(self.get_error_payload_path(), self.intake_url, 'error', 1) self.check_rum_error_sourcemap(False, expected_err="No Sourcemap available")
def wait_until_created(self, id): wait_until(lambda: len(self.fetch_by_id(id)) == 1, name="create api key")
def wait_until_ilm_logged(self): setup_enabled = self.config().get("ilm_setup_enabled") msg = "Finished index management setup." if setup_enabled != "false" else "Manage ILM setup is disabled." wait_until(lambda: self.log_contains(msg), name="ILM setup")
def wait_until_pipeline_logged(self): registration_enabled = self.config().get("register_pipeline_enabled") msg = "Registered Ingest Pipelines successfully" if registration_enabled != "false" else "No pipeline callback registered" wait_until(lambda: self.log_contains(msg), name="pipelines registration")
def test_template_setup_error(self): loaded_msg = "Exiting: `setup.template.name` and `setup.template.pattern` have to be set" wait_until(lambda: self.log_contains(loaded_msg), max_timeout=5)