def process_timeouts(self): # Start of ingest message if self.apm_client: self.apm_client.begin_transaction('ingest_msg') ingester = self.ingester timeouts = ingester.timeout_queue.dequeue_range( upper_limit=isotime.now(), num=10) for scan_key in timeouts: try: actual_timeout = False # Remove the entry from the hash of submissions in progress. entry = ingester.scanning.pop(scan_key) if entry: actual_timeout = True self.log.error("Submission timed out for %s: %s", scan_key, str(entry)) dup = ingester.duplicate_queue.pop(_dup_prefix + scan_key, blocking=False) if dup: actual_timeout = True while dup: self.log.error("Submission timed out for %s: %s", scan_key, str(dup)) dup = ingester.duplicate_queue.pop(_dup_prefix + scan_key, blocking=False) if actual_timeout: ingester.counter.increment('ingest_timeout') except Exception: self.log.exception("Problem timing out %s:", scan_key) # End of ingest message (success) if self.apm_client: elasticapm.tag(timeouts=len(timeouts)) self.apm_client.end_transaction('ingest_timeouts', 'success') return len(timeouts)
def run_archive_once(self): if not self.config.datastore.ilm.enabled: return now = now_as_iso() # Archive data for collection in self.archiveable_collections: # Start of expiry transaction if self.apm_client: self.apm_client.begin_transaction("Archive older documents") archive_query = f"archive_ts:[* TO {now}]" number_to_archive = collection.search(archive_query, rows=0, as_obj=False, use_archive=False)['total'] if self.apm_client: elasticapm.tag(query=archive_query) elasticapm.tag(number_to_archive=number_to_archive) self.log.info(f"Processing collection: {collection.name}") if number_to_archive != 0: # Proceed with archiving collection.archive(archive_query) self.counter_archive.increment(f'{collection.name}', increment_by=number_to_archive) self.log.info( f" Archived {number_to_archive} items to the time sliced storage..." ) else: self.log.debug(" Nothing to archive in this collection.") # End of expiry transaction if self.apm_client: self.apm_client.end_transaction(collection.name, 'archived')
def test_tags_dedot(elasticapm_client): elasticapm_client.begin_transaction("test") elasticapm.tag(**{"d.o.t": "dot"}) elasticapm.tag(**{"s*t*a*r": "star"}) elasticapm.tag(**{'q"u"o"t"e': "quote"}) elasticapm_client.end_transaction("test_name", 200) transactions = elasticapm_client.events[TRANSACTION] assert transactions[0]["context"]["tags"] == {"d_o_t": "dot", "s_t_a_r": "star", "q_u_o_t_e": "quote"}
def query(): """ All in one 搜索入口,可以查询学生、老师、教室,然后跳转到具体资源页面 正常情况应该是 post 方法,但是也兼容 get 防止意外情况,提高用户体验 埋点: - `query_resource_type`, 查询的资源类型: classroom, single_student, single_teacher, multiple_people, or not_exist. - `query_type`, 查询方式(姓名、学工号): by_name, by_id, other """ # if under maintenance, return to maintenance.html if app.config["MAINTENANCE"]: return render_template("maintenance.html") keyword = request.values.get('id') if not keyword or len(keyword) < 2: flash('请输入需要查询的姓名、学号、教工号或教室名称,长度不要小于2个字符') return redirect(url_for('main.main')) # 调用 api-server 搜索 with elasticapm.capture_span('rpc_search'): try: rpc_result = APIServer.search(keyword) except Exception as e: return handle_exception_with_error_page(e) # 不同类型渲染不同模板 if len(rpc_result.classrooms) >= 1: # 优先展示教室 # 我们在 kibana 中使用服务名过滤 apm 文档,所以 tag 不用增加服务名前缀 elasticapm.tag(query_resource_type='classroom') elasticapm.tag(query_type='by_name') if len(rpc_result.classrooms) > 1: # 多个教室选择 return render_template('query/multipleClassroomChoice.html', name=keyword, classrooms=rpc_result.classrooms) return redirect('/classroom/{}/{}'.format( rpc_result.classrooms[0].room_id_encoded, rpc_result.classrooms[0].semesters[-1])) elif len(rpc_result.students) == 1 and len( rpc_result.teachers) == 0: # 一个学生 elasticapm.tag(query_resource_type='single_student') if contains_chinese(keyword): elasticapm.tag(query_type='by_name') else: elasticapm.tag(query_type='by_id') if len(rpc_result.students[0].semesters) < 1: flash('没有可用学期') return redirect(url_for('main.main')) return redirect('/student/{}/{}'.format( rpc_result.students[0].student_id_encoded, rpc_result.students[0].semesters[-1])) elif len(rpc_result.teachers) == 1 and len( rpc_result.students) == 0: # 一个老师 elasticapm.tag(query_resource_type='single_teacher') if contains_chinese(keyword): elasticapm.tag(query_type='by_name') else: elasticapm.tag(query_type='by_id') if len(rpc_result.teachers[0].semesters) < 1: flash('没有可用学期') return redirect(url_for('main.main')) return redirect('/teacher/{}/{}'.format( rpc_result.teachers[0].teacher_id_encoded, rpc_result.teachers[0].semesters[-1])) elif len(rpc_result.teachers) >= 1 or len(rpc_result.students) >= 1: # multiple students, multiple teachers, or mix of both elasticapm.tag(query_resource_type='multiple_people') if contains_chinese(keyword): elasticapm.tag(query_type='by_name') else: elasticapm.tag(query_type='by_id') return render_template('query/peopleWithSameName.html', name=keyword, students=rpc_result.students, teachers=rpc_result.teachers) else: logger.info("No result for user search", {"keyword": request.values.get('id')}) elasticapm.tag(query_resource_type='not_exist') elasticapm.tag(query_type='other') flash('没有找到任何有关 {} 的信息,如果你认为这不应该发生,请联系我们。'.format( escape(request.values.get('id')))) return redirect(url_for('main.main'))
def try_run(self): self.datastore.alert.commit() while self.running: self.heartbeat() end_ts = self.get_last_reporting_ts(self.start_ts) if self.start_ts != end_ts: # Start of transaction if self.apm_client: self.apm_client.begin_transaction("Load workflows") workflow_queries = [ Workflow({ 'status': "TRIAGE", 'name': "Triage all with no status", 'creator': "SYSTEM", 'edited_by': "SYSTEM", 'query': "NOT status:*", 'workflow_id': "DEFAULT" }) ] try: for item in self.datastore.workflow.stream_search( "status:MALICIOUS"): workflow_queries.append(item) for item in self.datastore.workflow.stream_search( "status:NON-MALICIOUS"): workflow_queries.append(item) for item in self.datastore.workflow.stream_search( "status:ASSESS"): workflow_queries.append(item) for item in self.datastore.workflow.stream_search( '-status:["" TO *]'): workflow_queries.append(item) except SearchException as e: self.log.warning( f"Failed to load workflows from the datastore, retrying... :: {e}" ) # End of transaction if self.apm_client: elasticapm.tag( number_of_workflows=len(workflow_queries)) self.apm_client.end_transaction( 'loading_workflows', 'search_exception') continue # End of transaction if self.apm_client: elasticapm.tag(number_of_workflows=len(workflow_queries)) self.apm_client.end_transaction('loading_workflows', 'success') for workflow in workflow_queries: # Start of transaction if self.apm_client: self.apm_client.begin_transaction("Execute workflows") elasticapm.tag(query=workflow.query, labels=workflow.labels, status=workflow.status, priority=workflow.priority, user=workflow.creator) self.log.info( f'Executing workflow filter: {workflow.name}') labels = workflow.labels or [] status = workflow.status or None priority = workflow.priority or None if not status and not labels and not priority: # End of transaction if self.apm_client: self.apm_client.end_transaction( workflow.name, 'no_action') continue fq = [ "reporting_ts:[{start_ts} TO {end_ts}]".format( start_ts=self.start_ts, end_ts=end_ts) ] operations = [] fq_items = [] if labels: operations.extend([(self.datastore.alert.UPDATE_APPEND, 'label', lbl) for lbl in labels]) for label in labels: fq_items.append( "label:\"{label}\"".format(label=label)) if priority: operations.append((self.datastore.alert.UPDATE_SET, 'priority', priority)) fq_items.append("priority:*") if status: operations.append((self.datastore.alert.UPDATE_SET, 'status', status)) fq_items.append("status:*") fq.append("NOT ({exclusion})".format( exclusion=" AND ".join(fq_items))) try: count = self.datastore.alert.update_by_query( workflow.query, operations, filters=fq) if self.apm_client: elasticapm.tag(affected_alerts=count) if count: self.log.info( "{count} Alert(s) were affected by this filter." .format(count=count)) if workflow.workflow_id != "DEFAULT": operations = [ (self.datastore.alert.UPDATE_INC, 'hit_count', count), (self.datastore.alert.UPDATE_SET, 'last_seen', now_as_iso()), ] self.datastore.workflow.update( workflow.id, operations) except SearchException: self.log.warning( f"Invalid query '{safe_str(workflow.query or '')}' in workflow " f"'{workflow.name or 'unknown'}' by '{workflow.created_by or 'unknown'}'" ) # End of transaction if self.apm_client: self.apm_client.end_transaction( workflow.name, 'search_exception') continue # End of transaction if self.apm_client: self.apm_client.end_transaction( workflow.name, 'success') else: self.log.info( "Skipping all workflows since there where no new alerts in the specified time period." ) time.sleep(30) self.start_ts = end_ts
def query(): """ 查询本人课表视图函数 正常情况应该是 post 方法,但是也兼容 get 防止意外情况,提高用户体验 """ from flask import request, render_template, redirect, url_for, session from flask import current_app as app import elasticapm from everyclass.server.tools import is_chinese_char from everyclass.server.exceptions import NoStudentException, IllegalSemesterException from everyclass.server.db.mysql import get_local_conn from everyclass.server.db.dao import faculty_lookup from everyclass.server.db.dao import class_lookup from everyclass.server.db.dao import get_classes_for_student from everyclass.server.db.model import Semester from everyclass.server.db.dao import get_privacy_settings from everyclass.server.db.dao import get_my_semesters from everyclass.server.db.dao import check_if_stu_exist # if under maintenance, return to maintenance.html if app.config["MAINTENANCE"]: return render_template("maintenance.html") db = get_local_conn() cursor = db.cursor() # 如 URL 中有 id 参数,判断是姓名还是学号,然后赋学号给student_id if request.values.get('id'): id_or_name = request.values.get('id') # 首末均为中文,判断为人名 if is_chinese_char(id_or_name[0:1]) and is_chinese_char( id_or_name[-1:]): # 使用人名查询打点 elasticapm.tag(ec_query_method='by_name') mysql_query = "SELECT name,xh FROM ec_students WHERE name=%s" cursor.execute(mysql_query, (id_or_name, )) result = cursor.fetchall() if cursor.rowcount > 1: # 查询到多个同名,进入选择界面 students_list = list() for each_student in result: students_list.append([ each_student[0], each_student[1], faculty_lookup(each_student[1]), class_lookup(each_student[1]) ]) return render_template("query_same_name.html", count=cursor.rowcount, student_info=students_list) elif cursor.rowcount == 1: # 仅能查询到一个人,则赋值学号 student_id = result[0][1] else: # 查无此人 elasticapm.tag(ec_query_not_found=True) return _no_student_handle(id_or_name) # id 不为中文,则为学号 else: # 学号查询打点 elasticapm.tag(ec_query_method='by_id') student_id = request.values.get('id') # 判断学号是否有效 if not check_if_stu_exist(student_id): elasticapm.tag(ec_query_not_found=True) return _no_student_handle(student_id) # 写入 session 的学号一定有效 session['stu_id'] = student_id # url 中没有 id 参数但 session 中有 elif session.get('stu_id', None): elasticapm.tag(ec_query_method='by_session') student_id = session['stu_id'] # 既没有 id 参数也没有 session,无法知道需要查询谁的课表,返回主页 else: elasticapm.tag(ec_query_method='exception') return redirect(url_for('main.main')) # 查询学生本人的可用学期 my_available_semesters, student_name = get_my_semesters(student_id) # 如果没有学期,则直接返回 if not my_available_semesters: logger.warning('Not any semester in ec_student', stack=True) return _no_student_handle() # 如URL参数中包含学期,判断有效性后更新 session if request.values.get('semester'): try: sem = Semester(request.values.get('semester')) if sem in my_available_semesters: session['semester'] = sem.to_tuple() if app.config['DEBUG']: print('[query.query] updated session semester to', Semester(session['semester']).to_str()) # 用户指定的学期格式不合法 except IllegalSemesterException: if app.config['DEBUG']: print('[query.query] IllegalSemesterException handled.' + Semester(session['semester']).to_str()) session['semester'] = my_available_semesters[-1].to_tuple() cursor.close() # 关闭数据库连接 # 如果 session 中无学期或学期无效,回落到本人可用最新学期 # session 中学期使用 tuple 保存,因为 Semester 对象无法被序列化 semester = session.get('semester', None) if not semester or Semester(semester) not in my_available_semesters: session['semester'] = my_available_semesters[-1].to_tuple() try: student_classes = get_classes_for_student(student_id=student_id, sem=Semester( session['semester'])) except NoStudentException: return _no_student_handle(student_id) else: # 空闲周末判断,考虑到大多数人周末都是没有课程的 empty_weekend = True for cls_time in range(1, 7): for cls_day in range(6, 8): if (cls_day, cls_time) in student_classes: empty_weekend = False # 空闲课程判断,考虑到大多数人11-12节都是没有课程的 empty_6 = True for cls_day in range(1, 8): if (cls_day, 6) in student_classes: empty_6 = False empty_5 = True for cls_day in range(1, 8): if (cls_day, 5) in student_classes: empty_5 = False # available_semesters 为当前学生所能选择的学期,是一个list。 # 当中每一项又是一个包含两项的list,第一项为学期string,第二项为True/False表示是否为当前学期。 available_semesters = [] for each_semester in my_available_semesters: if session['semester'] == each_semester: available_semesters.append([each_semester, True]) else: available_semesters.append([each_semester, False]) # Privacy settings # Available privacy settings: "show_table_on_page", "import_to_calender", "major" privacy_settings = get_privacy_settings(student_id) # privacy on if "show_table_on_page" in privacy_settings: return render_template( 'blocked.html', name=student_name, falculty=faculty_lookup(student_id), class_name=class_lookup(student_id), stu_id=student_id, available_semesters=available_semesters, no_import_to_calender=True if "import_to_calender" in privacy_settings else False) # privacy off return render_template('query.html', name=student_name, falculty=faculty_lookup(student_id), class_name=class_lookup(student_id), stu_id=student_id, classes=student_classes, empty_wkend=empty_weekend, empty_6=empty_6, empty_5=empty_5, available_semesters=available_semesters)
def middleware(request): __traceback_hide__ = True if not request.user.is_authenticated: request.user = random.choices(users, weights=weights)[0] elasticapm.tag(customer_tier=request.user.customer_tier) return get_response(request)
def middleware(request): __traceback_hide__ = True elasticapm.tag(request_id=str(uuid.uuid4())) response = get_response(request) return response
def try_run(self, volatile=False): ingester = self.ingester cpu_mark = time.process_time() time_mark = time.time() # Move from ingest to unique and waiting queues. # While there are entries in the ingest queue we consume chunk_size # entries at a time and move unique entries to uniqueq / queued and # duplicates to their own queues / waiting. while self.running: self.heartbeat() while True: result = ingester.complete_queue.pop(blocking=False) if not result: break # Start of ingest message if self.apm_client: self.apm_client.begin_transaction('ingest_msg') sub = Submission(result) ingester.completed(sub) # End of ingest message (success) if self.apm_client: elasticapm.tag(sid=sub.sid) self.apm_client.end_transaction('ingest_complete', 'success') ingester.counter.increment_execution_time('cpu_seconds', time.process_time() - cpu_mark) ingester.counter.increment_execution_time('busy_seconds', time.time() - time_mark) message = ingester.ingest_queue.pop(timeout=1) cpu_mark = time.process_time() time_mark = time.time() if not message: continue # Start of ingest message if self.apm_client: self.apm_client.begin_transaction('ingest_msg') try: sub = SubmissionInput(message) # Write all input to the traffic queue ingester.traffic_queue.publish(SubmissionMessage({ 'msg': sub, 'msg_type': 'SubmissionIngested', 'sender': 'ingester', }).as_primitives()) task = IngestTask(dict( submission=sub, ingest_id=sub.sid, )) task.submission.sid = None # Reset to new random uuid except (ValueError, TypeError) as error: self.log.exception(f"Dropped ingest submission {message} because {str(error)}") # End of ingest message (value_error) if self.apm_client: self.apm_client.end_transaction('ingest_input', 'value_error') if volatile: raise continue if any(len(file.sha256) != 64 for file in task.submission.files): self.log.error(f"Invalid sha256: {[file.sha256 for file in task.submission.files]}") # End of ingest message (invalid_hash) if self.apm_client: self.apm_client.end_transaction('ingest_input', 'invalid_hash') continue for file in task.submission.files: file.sha256 = file.sha256.lower() ingester.ingest(task) # End of ingest message (success) if self.apm_client: self.apm_client.end_transaction('ingest_input', 'success')
def test_tag_while_no_transaction(caplog): with caplog.at_level(logging.WARNING, "elasticapm.errors"): elasticapm.tag(foo="bar") record = caplog.records[0] assert record.levelno == logging.WARNING assert "foo" in record.args
def try_run(self): queue = self.dispatcher.submission_queue cpu_mark = time.process_time() time_mark = time.time() while self.running: try: self.heartbeat() self.dispatcher.counter.increment_execution_time( 'cpu_seconds', time.process_time() - cpu_mark) self.dispatcher.counter.increment_execution_time( 'busy_seconds', time.time() - time_mark) message = queue.pop(timeout=1) cpu_mark = time.process_time() time_mark = time.time() if not message: continue # Start of process dispatcher transaction if self.apm_client: self.apm_client.begin_transaction( 'Process dispatcher message') # This is probably a complete task if 'submission' in message: task = SubmissionTask(message) if self.apm_client: elasticapm.tag(sid=task.submission.sid) # This is just as sid nudge, this submission should already be running elif 'sid' in message: active_task = self.dispatcher.active_submissions.get( message['sid']) if self.apm_client: elasticapm.tag(sid=message['sid']) if active_task is None: self.log.warning( f"[{message['sid']}] Dispatcher was nudged for inactive submission." ) # End of process dispatcher transaction (success) if self.apm_client: self.apm_client.end_transaction( 'submission_message', 'inactive') continue task = SubmissionTask(active_task) else: self.log.error( f'Corrupted submission message in dispatcher {message}' ) # End of process dispatcher transaction (success) if self.apm_client: self.apm_client.end_transaction( 'submission_message', 'corrupted') continue self.dispatcher.dispatch_submission(task) # End of process dispatcher transaction (success) if self.apm_client: self.apm_client.end_transaction('submission_message', 'success') except Exception as error: self.log.exception(error) # End of process dispatcher transaction (success) if self.apm_client: self.apm_client.end_transaction('submission_message', 'exception')
def query(): """ All in one 搜索入口,可以查询学生、老师、教室,然后跳转到具体资源页面 正常情况应该是 post 方法,但是也兼容 get 防止意外情况,提高用户体验 埋点: - `query_resource_type`, 查询的资源类型: classroom, single_student, single_teacher, multiple_people, or not_exist. - `query_type`, 查询方式(姓名、学工号): by_name, by_id, other """ import re from everyclass.server.utils.rpc import HttpRpc # if under maintenance, return to maintenance.html if app.config["MAINTENANCE"]: return render_template("maintenance.html") # transform upper case xh to lower case(currently api-server does not support upper case xh) to_search = request.values.get('id') if not to_search: flash('请输入需要查询的姓名、学号、教工号或教室名称') return redirect(url_for('main.main')) if re.match('^[A-Za-z0-9]*$', request.values.get('id')): to_search = to_search.lower() # add ‘座‘ since many users may search classroom in new campus without '座' and api-server doesn't not support if to_search[0] in ('a', 'b', 'c', 'd') and len(to_search) <= 5: to_search = to_search[0] + '座' + to_search[1:] # call api-server to search with elasticapm.capture_span('rpc_search'): rpc_result = HttpRpc.call_with_error_page('{}/v1/search/{}'.format( app.config['API_SERVER_BASE_URL'], to_search.replace("/", "")), retry=True) if isinstance(rpc_result, str): return rpc_result api_response = rpc_result # render different template for different resource types if len(api_response['room']) >= 1: # classroom # we will use service name to filter apm document first, so it's not required to add service name prefix here elasticapm.tag(query_resource_type='classroom') elasticapm.tag(query_type='by_name') api_response['room'][0]['semester'].sort() return redirect('/classroom/{}/{}'.format( api_response['room'][0]['rid'], api_response['room'][0]['semester'][-1])) elif len(api_response['student']) == 1 and len( api_response['teacher']) == 0: # only one student elasticapm.tag(query_resource_type='single_student') if contains_chinese(to_search): elasticapm.tag(query_type='by_name') else: elasticapm.tag(query_type='by_id') if len(api_response['student'][0]['semester']) < 1: flash('没有可用学期') return redirect(url_for('main.main')) api_response['student'][0]['semester'].sort() return redirect('/student/{}/{}'.format( api_response['student'][0]['sid'], api_response['student'][0]['semester'][-1])) elif len(api_response['teacher']) == 1 and len( api_response['student']) == 0: # only one teacher elasticapm.tag(query_resource_type='single_teacher') if contains_chinese(to_search): elasticapm.tag(query_type='by_name') else: elasticapm.tag(query_type='by_id') if len(api_response['teacher'][0]['semester']) < 1: flash('没有可用学期') return redirect(url_for('main.main')) api_response['teacher'][0]['semester'].sort() return redirect('/teacher/{}/{}'.format( api_response['teacher'][0]['tid'], api_response['teacher'][0]['semester'][-1])) elif len(api_response['teacher']) >= 1 or len( api_response['student']) >= 1: # multiple students, multiple teachers, or mix of both elasticapm.tag(query_resource_type='multiple_people') if contains_chinese(to_search): elasticapm.tag(query_type='by_name') else: elasticapm.tag(query_type='by_id') return render_template('query/peopleWithSameName.html', name=to_search, students_count=len(api_response['student']), students=api_response['student'], teachers_count=len(api_response['teacher']), teachers=api_response['teacher']) else: elasticapm.tag(query_resource_type='not_exist') elasticapm.tag(query_type='other') flash('没有找到任何有关 {} 的信息,如果你认为这不应该发生,请联系我们。'.format( escape(request.values.get('id')))) return redirect(url_for('main.main'))
def run_expiry_once(self): now = now_as_iso() delay = self.config.core.expiry.delay hour = self.datastore.ds.hour day = self.datastore.ds.day # Expire data for collection in self.expirable_collections: # Call heartbeat pre-dated by 5 minutes. If a collection takes more than # 5 minutes to expire, this container could be seen as unhealthy. The down # side is if it is stuck on something it will be more than 5 minutes before # the container is restarted. self.heartbeat(int(time.time() + 5 * 60)) # Start of expiry transaction if self.apm_client: self.apm_client.begin_transaction("Delete expired documents") if self.config.core.expiry.batch_delete: delete_query = f"expiry_ts:[* TO {now}||-{delay}{hour}/{day}]" else: delete_query = f"expiry_ts:[* TO {now}||-{delay}{hour}]" number_to_delete = collection.search(delete_query, rows=0, as_obj=False)['total'] if self.apm_client: elasticapm.tag(query=delete_query) elasticapm.tag(number_to_delete=number_to_delete) self.log.info(f"Processing collection: {collection.name}") if number_to_delete != 0: if self.config.core.expiry.delete_storage and collection.name in self.fs_hashmap: with elasticapm.capture_span( name=f'FILESTORE [ThreadPoolExecutor] :: delete()', labels={ "num_files": number_to_delete, "query": delete_query }): # Delete associated files with concurrent.futures.ThreadPoolExecutor( self.config.core.expiry.workers) as executor: res = { item['id']: executor.submit( self.fs_hashmap[collection.name], item['id']) for item in collection.stream_search( delete_query, fl='id', as_obj=False) } for v in res.values(): v.result() self.log.info( f' Deleted associated files from the ' f'{"cachestore" if "cache" in collection.name else "filestore"}...' ) # Proceed with deletion collection.delete_matching( delete_query, workers=self.config.core.expiry.workers) self.counter.increment(f'{collection.name}', increment_by=number_to_delete) self.log.info( f" Deleted {number_to_delete} items from the datastore..." ) else: self.log.debug(" Nothing to delete in this collection.") # End of expiry transaction if self.apm_client: self.apm_client.end_transaction(collection.name, 'deleted')
def test_tag_while_no_transaction(caplog): elasticapm.tag(foo='bar') record = caplog.records[0] assert record.levelno == logging.WARNING assert 'foo' in record.args
def test_tag_with_non_string_value(): requests_store = Tracer(lambda: [], lambda: [], lambda *args: None) t = requests_store.begin_transaction("test") elasticapm.tag(foo=1) requests_store.end_transaction(200, "test") assert t.tags == {"foo": "1"}
def test_tag_with_non_string_value(): requests_store = TransactionsStore(lambda: [], 99999) t = requests_store.begin_transaction("test") elasticapm.tag(foo=1) requests_store.end_transaction(200, 'test') assert t.tags == {'foo': '1'}
async def tracked_sleep(sleep_time, counter): async with elasticapm.async_capture_span('sleep_%d_%.3f' % (counter, sleep_time)): await asyncio.sleep(sleep_time) elasticapm.tag(**{'sleep_%d' % counter: sleep_time})