def generate(name: str, cards: Dict[Tuple[int, int], List[Dict]], semester: Semester, filename: str) -> None: """ 生成 ics 文件并保存到目录 :param name: 姓名 :param cards: 参与的课程 :param semester: 当前导出的学期 :param filename: 输出的文件名称,带后缀 :return: None """ from everyclass.server import statsd with tracer.trace("calendar_init"): semester_string = semester.to_str(simplify=True) semester = semester.to_tuple() # 创建 calender 对象 cal = Calendar() cal.add('prodid', '-//Admirable//EveryClass//EN') cal.add('version', '2.0') cal.add('calscale', 'GREGORIAN') cal.add('method', 'PUBLISH') cal.add('X-WR-CALNAME', name + '的' + semester_string + '课表') cal.add('X-WR-TIMEZONE', 'Asia/Shanghai') # 时区 tzc.add_component(tzs) cal.add_component(tzc) with tracer.trace("add_events"): # 创建 events for time in range(1, 7): for day in range(1, 8): if (day, time) in cards: for card in cards[(day, time)]: for week in card['week']: dtstart = _get_datetime(week, day, get_time(time)[0], semester) dtend = _get_datetime(week, day, get_time(time)[1], semester) if dtstart.year == 1984: continue cal.add_component( _build_event(card_name=card['name'], times=(dtstart, dtend), classroom=card['classroom'], teacher=card['teacher'], week_string=card['week_string'], current_week=week, cid=card['cid'])) with tracer.trace("write_file"): with open(os.path.join(calendar_dir(), filename), 'wb') as f: data = cal.to_ical() statsd.histogram('calendar.ics.generate.size', len(data)) f.write(data)
def run(event, context): with tracer.trace("enip.run_national"): with tracer.trace("enip.run_national.ingest"): ingest_id, ingest_dt, ingest_data = ingest_all() with tracer.trace("enip.run_national.export"): export_national(ingest_id, ingest_dt, ingest_dt.strftime("%Y%m%d%H%M%S"), ingest_data)
def get(self): num_traces = int(self.get_argument("traces", 1)) num_spans_per_trace = int(self.get_argument("spans_per_trace", 0)) num_str_tags_per_span = int(self.get_argument("str_tags_per_span", 5)) num_int_tags_per_span = int(self.get_argument("int_tags_per_span", 5)) tag_key_size = int(self.get_argument("tag_key_size", 10)) str_tag_value_size = int(self.get_argument("tag_value_size", 15)) def _set_tags(span): for i in range(num_str_tags_per_span): span.set_tag("s{}".format(str(i) * (tag_key_size - 1)), "*" * str_tag_value_size) for i in range(num_int_tags_per_span): span.set_tag("i{}".format(str(i) * (tag_key_size - 1)), 12312312) try: from ddtrace import tracer except ImportError: log.warning("handling the request - no tracer installed") else: log.warning( "handling request with num_traces=%d, num_spans_per_trace=%d", num_traces, num_spans_per_trace) for _ in range(num_traces): with tracer.trace("operation", service="stresser", resource="GET /stress") as s: _set_tags(s) for i in range(num_spans_per_trace - 1): with tracer.trace("child_operation_{}".format(i)) as s: _set_tags(s) self.write("OK")
def get_classroom(url_rid, url_semester): """教室查询""" # decrypt identifier in URL try: _, room_id = decrypt(url_rid, resource_type='room') except ValueError: return render_template("common/error.html", message=MSG_INVALID_IDENTIFIER) # RPC to get classroom timetable with tracer.trace('rpc_get_classroom_timetable'): try: room = Entity.get_classroom_timetable(url_semester, room_id) except Exception as e: return handle_exception_with_error_page(e) with tracer.trace('process_rpc_result'): cards = defaultdict(list) for card in room.cards: day, time = lesson_string_to_tuple(card.lesson) cards[(day, time)].append(card) empty_5, empty_6, empty_sat, empty_sun = _empty_column_check(cards) available_semesters = semester_calculate(url_semester, room.semesters) return render_template('query/room.html', room=room, cards=cards, empty_sat=empty_sat, empty_sun=empty_sun, empty_6=empty_6, empty_5=empty_5, available_semesters=available_semesters, current_semester=url_semester)
def export_national(ingest_run_id, ingest_run_dt, export_name, ingest_data=None): logging.info("Running national export...") with tracer.trace("enip.export.export_ntl.run_export"): data = NationalDataExporter(ingest_run_id, ingest_run_dt).run_export(ingest_data) with tracer.trace("enip.export.export_ntl.export_to_s3"): was_different, cdn_url = export_to_s3( ingest_run_id, ingest_run_dt, data.json(by_alias=True), national_schema, "national", export_name, ) if was_different: logging.info( f" National export completed WITH new results: {cdn_url}") else: logging.info( f" National export completed WITHOUT new results: {cdn_url}") return cdn_url
def main(): print('querying vertica') with tracer.trace('vertica_example', service="vertica-example"): query() with tracer.trace('vertica_example_error', service="vertica-example"): invalid_query() with tracer.trace('vertica_example_fetching', service="vertica-example"): fetching()
def run_states(event, context): with tracer.trace("enip.run_states"): with tracer.trace("enip.run_states.ingest"): ingest_dt = datetime.now(tz=timezone.utc) ap_data = ingest_ap(cursor=None, ingest_id=-1, save_to_db=False, return_levels={"county"}) with tracer.trace("enip.run_states.export"): export_all_states(ap_data, ingest_dt)
def execute_psql(temp_sql_file_path, source_path, download_job): """Executes a single PSQL command within its own Subprocess""" download_sql = Path(temp_sql_file_path).read_text() if download_sql.startswith("\\COPY"): # Trace library parses the SQL, but cannot understand the psql-specific \COPY command. Use standard COPY here. download_sql = download_sql[1:] # Stack 3 context managers: (1) psql code, (2) Download replica query, (3) (same) Postgres query with SubprocessTrace( name=f"job.{JOB_TYPE}.download.psql", service="bulk-download", resource=download_sql, span_type=SpanTypes.SQL, source_path=source_path, ), tracer.trace(name="postgres.query", service="db_downloaddb", resource=download_sql, span_type=SpanTypes.SQL), tracer.trace( name="postgres.query", service="postgres", resource=download_sql, span_type=SpanTypes.SQL): try: log_time = time.perf_counter() temp_env = os.environ.copy() if download_job and not download_job.monthly_download: # Since terminating the process isn't guaranteed to end the DB statement, add timeout to client connection temp_env[ "PGOPTIONS"] = f"--statement-timeout={settings.DOWNLOAD_DB_TIMEOUT_IN_HOURS}h" cat_command = subprocess.Popen(["cat", temp_sql_file_path], stdout=subprocess.PIPE) subprocess.check_output( [ "psql", "-q", "-o", source_path, retrieve_db_string(), "-v", "ON_ERROR_STOP=1" ], stdin=cat_command.stdout, stderr=subprocess.STDOUT, env=temp_env, ) duration = time.perf_counter() - log_time write_to_log( message= f"Wrote {os.path.basename(source_path)}, took {duration:.4f} seconds", download_job=download_job) except Exception as e: if not settings.IS_LOCAL: # Not logging the command as it can contain the database connection string e.cmd = "[redacted psql command]" logger.error(e) sql = subprocess.check_output(["cat", temp_sql_file_path]).decode() logger.error(f"Faulty SQL: {sql}") raise e
def get_student(url_sid: str, url_semester: str): """学生查询""" # decrypt identifier in URL try: _, student_id = decrypt(url_sid, resource_type='student') except ValueError: return render_template("common/error.html", message=MSG_INVALID_IDENTIFIER) # RPC 获得学生课表 with tracer.trace('rpc_get_student_timetable'): try: student = Entity.get_student_timetable(student_id, url_semester) except Exception as e: return handle_exception_with_error_page(e) # save sid_orig to session for verifying purpose # must be placed before privacy level check. Otherwise a registered user could be redirected to register page. session[SESSION_LAST_VIEWED_STUDENT] = StudentSession( sid_orig=student.student_id, sid=student.student_id_encoded, name=student.name) # 权限检查,如果没有权限则返回 has_permission, return_val = check_permission(student) if not has_permission: return return_val with tracer.trace('process_rpc_result'): cards: Dict[Tuple[int, int], List[Dict[str, str]]] = dict() for card in student.cards: day, time = lesson_string_to_tuple(card.lesson) if (day, time) not in cards: cards[(day, time)] = list() cards[(day, time)].append(card) empty_5, empty_6, empty_sat, empty_sun = _empty_column_check(cards) available_semesters = semester_calculate(url_semester, sorted(student.semesters)) # 增加访客记录 Redis.add_visitor_count(student.student_id, session.get(SESSION_CURRENT_USER, None)) return render_template('query/student.html', student=student, cards=cards, empty_sat=empty_sat, empty_sun=empty_sun, empty_6=empty_6, empty_5=empty_5, available_semesters=available_semesters, current_semester=url_semester)
def worker_function(parent): tracer.span_buffer.set(parent) seen_resources.append(tracer.span_buffer.get().resource) with tracer.trace("greenlet.call") as span: span.resource = "sibling" gevent.sleep() # Ensure we have the correct parent span even after a context switch eq_(tracer.span_buffer.get().span_id, span.span_id) with tracer.trace("greenlet.other_call") as child: child.resource = "sibling_child"
def fn(parent): tracer.span_buffer = GreenletLocalSpanBuffer() tracer.span_buffer.set(parent) with tracer.trace("greenlet.call") as span: span.service = "greenlet" gevent.sleep() # Ensure we have the correct parent span even after a context switch eq_(tracer.span_buffer.get().span_id, span.span_id) with tracer.trace("greenlet.child_call") as child: eq_(child.parent_id, span.span_id)
def export_state(ingest_run_dt, state_code, ingest_data): with tracer.trace("enip.export.export_state.run_export"): data = StateDataExporter(ingest_run_dt, state_code).run_export(ingest_data) with tracer.trace("enip.export.export_state.export_to_s3"): return export_to_s3( 0, ingest_run_dt, data.json(by_alias=True), state_schema, f"states/{state_code}", ingest_run_dt.strftime("%Y%m%d%H%M%S"), )
def get_entity(id, kind='adlib'): with tracer.trace("utils.get_entity", service="adlib") as span: # Get adlib entry client = datastore.Client() key = client.key(kind, id) try: with tracer.trace("client.get", service="datastore") as child_span: entity = client.get(key) return entity except Exception as e: print(e) # Oops! Tried to retrieve an entity that doesn't exist return None
def pair(): """A complex endpoint that makes a request to another Python service""" name = request.args.get('name') with tracer.trace("beer.query", service="beer-database"): beer = Beer.query.filter_by(name=name).first() # force a query with tracer.trace("donuts.query", service="beer-database"): Donut.query.all() with tracer.trace("donuts.query") as span: span.set_tag('beer.name', name) match = best_match(beer) return jsonify(match=match)
def create_function_execution_span(context, function_name, is_cold_start, trace_context): tags = {} if context: function_arn = (context.invoked_function_arn or "").lower() tags = { "cold_start": str(is_cold_start).lower(), "function_arn": function_arn, "request_id": context.aws_request_id, "resource_names": context.function_name, } source = trace_context["source"] if source != TraceContextSource.DDTRACE: tags["_dd.parent_source"] = source args = { "service": "aws.lambda", "resource": function_name, "span_type": "serverless", } tracer.set_tags({"_dd.origin": "lambda"}) span = tracer.trace("aws.lambda", **args) if span: span.set_tags(tags) return span
def test_custom_logging_injection_global_config(self): """Ensure custom log injection via get_correlation_log_record returns proper tracer information.""" capture_log = structlog.testing.LogCapture() structlog.configure(processors=[ tracer_injection, capture_log, structlog.processors.JSONRenderer() ]) logger = structlog.get_logger() with override_global_config( dict(version="global-version", env="global-env", service="global-service")): with tracer.trace("test span") as span: logger.msg("Hello!") assert len(capture_log.entries) == 1 assert capture_log.entries[0]["event"] == "Hello!" dd_log_record = capture_log.entries[0]["dd"] assert dd_log_record == { "span_id": str(span.span_id), "trace_id": str(span.trace_id), "service": "global-service", "env": "global-env", "version": "global-version", }
async def receive(self, text_data: str): with tracer.trace("ws.receive") as span: try: event = json.loads(text_data) except Exception: log.error("json decode failed for event %r", text_data, exc_info=True) return _type = event.get("type") if not _type: log.error("No type provided for event %r", text_data) return span.set_tag("type", _type) user = await channels.auth.get_user(self.scope) span.set_tag("user", user.username) # TODO: Custom consumer middleware? consumer = self.subcons(event) if not consumer: log.error("No consumer found for event %r", text_data) return span.resource = consumer.app_name await consumer.receive(user, event)
def legacy_get_ics(student_id, semester_str): """ 早期 iCalendar 订阅端点,出于兼容性考虑保留,仅支持未设定隐私等级的学生,其他情况使用新的日历订阅令牌获得 ics 文件。 """ # fix parameters place = student_id.find('-') semester_str = student_id[place + 1:len(student_id)] + '-' + semester_str student_id = student_id[:place] semester = Semester(semester_str) search_result = Entity.search(student_id) if len(search_result.students) != 1: # bad request return abort(400) if semester.to_str() not in search_result.students[0].semesters: return abort(400) with tracer.trace('get_privacy_settings'): privacy_settings = PrivacySettings.get_level( search_result.students[0].student_id) if privacy_settings != 0: # force user to get a calendar token when the user is privacy-protected but accessed through legacy interface return "Visit {} to get your calendar".format( url_for("main.main", _external=True)), 401 else: token = CalendarToken.get_or_set_calendar_token( resource_type="student", identifier=search_result.students[0].student_id, semester=semester.to_str()) return redirect(url_for('calendar.ics_download', calendar_token=token))
def wrapper(self, *args, **kwargs): with tracer.trace( "run", service="{}-integration".format(self._check.name), resource="{}.run_job".format(type(self).__name__), ): self.run_job()
def test_unpatched(self): """ Demonstrate a situation where thread-local storage leads to a bad tree: 1. Main thread spawns several coroutines 2. A coroutine is handed context from a sibling coroutine 3. A coroutine incorrectly sees a "sibling" span as its parent """ from ddtrace import tracer; tracer.enabled = False seen_resources = [] def my_worker_function(i): ok_(tracer.span_buffer.get()) seen_resources.append(tracer.span_buffer.get().resource) with tracer.trace("greenlet.call") as span: span.resource = "sibling" gevent.sleep() with tracer.trace("web.request") as span: span.service = "web" span.resource = "parent" worker_count = 5 workers = [gevent.spawn(my_worker_function, w) for w in range(worker_count)] gevent.joinall(workers) # check that a bad parent span was seen ok_("sibling" in seen_resources)
def test_global_patch(self): from ddtrace import tracer; tracer.enabled = False # Ensure the patch is active ok_(isinstance(tracer.span_buffer._locals, gevent.local.local)) seen_resources = [] def worker_function(parent): tracer.span_buffer.set(parent) seen_resources.append(tracer.span_buffer.get().resource) with tracer.trace("greenlet.call") as span: span.resource = "sibling" gevent.sleep() # Ensure we have the correct parent span even after a context switch eq_(tracer.span_buffer.get().span_id, span.span_id) with tracer.trace("greenlet.other_call") as child: child.resource = "sibling_child" with tracer.trace("web.request") as span: span.service = "web" span.resource = "parent" worker_count = 5 workers = [gevent.spawn(worker_function, span) for w in range(worker_count)] gevent.joinall(workers) # Ensure all greenlets see the right parent span ok_("sibling" not in seen_resources) ok_(all(s == "parent" for s in seen_resources))
def pymongo_integration(length): with tracer.trace("server pymongo operation"): client = MongoClient("mongo", 27017, serverSelectionTimeoutMS=2000) db = client["opentelemetry-tests"] collection = db["tests"] collection.find_one() return _random_string(length)
def run_export( self, preloaded_results: Iterable[SQLRecord]) -> structs.StateData: self.data = structs.StateData() sql_filter = "level = 'county' AND statepostal = %s" filter_params: List[Any] = [self.state] with tracer.trace("enip.export.state.historicals"): self.historical_counts = load_historicals(self.ingest_run_dt, sql_filter, filter_params) def handle_record(record): if record.officeid == "P": self.record_county_presidential_result(record) elif record.officeid == "S": self.record_county_senate_result(record) elif record.officeid == "H": self.record_county_house_result(record) else: raise RuntimeError( f"Uncategorizable result: {record.elex_id} {record.level} {record.officeid}" ) for record in preloaded_results: if record.statepostal == self.state: handle_record(record) return self.data
async def test_two_traced_pipelines(redis_client): with tracer.trace("web-request", service="test"): if aioredis_version >= (2, 0): p1 = await redis_client.pipeline(transaction=False) p2 = await redis_client.pipeline(transaction=False) await p1.set("blah", "boo") await p2.set("foo", "bar") await p1.get("blah") await p2.get("foo") else: p1 = redis_client.pipeline() p2 = redis_client.pipeline() p1.set("blah", "boo") p2.set("foo", "bar") p1.get("blah") p2.get("foo") response_list1 = await p1.execute() response_list2 = await p2.execute() assert response_list1[ 0] is True # response from redis.set is OK if successfully pushed assert response_list2[0] is True assert ( response_list1[1].decode() == "boo" ) # response from hset is 'Integer reply: The number of fields that were added.' assert response_list2[1].decode() == "bar"
def test_local_patch(self): """ Test patching a parent span into a coroutine's tracer """ from ddtrace import tracer; tracer.enabled = False from ddtrace.contrib.gevent import GreenletLocalSpanBuffer def fn(parent): tracer.span_buffer = GreenletLocalSpanBuffer() tracer.span_buffer.set(parent) with tracer.trace("greenlet.call") as span: span.service = "greenlet" gevent.sleep() # Ensure we have the correct parent span even after a context switch eq_(tracer.span_buffer.get().span_id, span.span_id) with tracer.trace("greenlet.child_call") as child: eq_(child.parent_id, span.span_id) with tracer.trace("web.request") as span: span.service = "web" worker = gevent.spawn(fn, span) worker.join()
def my_worker_function(i): ok_(tracer.span_buffer.get()) seen_resources.append(tracer.span_buffer.get().resource) with tracer.trace("greenlet.call") as span: span.resource = "sibling" gevent.sleep()
def create_function_execution_span( context, function_name, is_cold_start, trace_context, merge_xray_traces ): tags = {} if context: function_arn = (context.invoked_function_arn or "").lower() tk = function_arn.split(":") function_arn = ":".join(tk[0:7]) if len(tk) > 7 else function_arn function_version = tk[7] if len(tk) > 7 else "$LATEST" tags = { "cold_start": str(is_cold_start).lower(), "function_arn": function_arn, "function_version": function_version, "request_id": context.aws_request_id, "resource_names": context.function_name, "datadog_lambda": datadog_lambda_version, "dd_trace": ddtrace_version, } source = trace_context["source"] if source == TraceContextSource.XRAY and merge_xray_traces: tags["_dd.parent_source"] = source args = { "service": "aws.lambda", "resource": function_name, "span_type": "serverless", } tracer.set_tags({"_dd.origin": "lambda"}) span = tracer.trace("aws.lambda", **args) if span: span.set_tags(tags) return span
async def call(self, method_name, kwargs: dict = None, *, expiration: int = None, priority: int = 5, delivery_mode: DeliveryMode = RPC.DELIVERY_MODE): with tracer.trace(method_name, service='rabbitmq'): future = self.create_future() headers = {'From': self.result_queue.name} context = current_trace_context() self.DDTRACE_PROPAGATOR.inject(context, headers) message = Message(body=self.serialize(kwargs or {}), type=RPCMessageTypes.call.value, timestamp=time.time(), priority=priority, correlation_id=id(future), delivery_mode=delivery_mode, reply_to=self.result_queue.name, headers=headers) if expiration is not None: message.expiration = expiration await self.channel.default_exchange.publish( message, routing_key=method_name, mandatory=True) return await future
def get_random_key(kind='adlib'): with tracer.trace("utils.get_random_key", service="adlib") as span: client = datastore.Client() query = client.query(kind='adlib') query.keys_only() with tracer.trace("query.fetch", service="datastore") as child_span: query_keys_list = list(query.fetch()) query_keys_len = len(query_keys_list) if (query_keys_len == 0): return None else: random_index = randint(0, query_keys_len - 1) return query_keys_list[random_index].key
def register_by_password_status_refresh( request_id: str) -> Tuple[bool, str, Optional[str]]: """通过教务密码注册-刷新状态,返回是否成功、auth message及学号/教工号(如果成功)""" req = VerificationRequest.find_by_id(uuid.UUID(request_id)) if not req: raise IdentityVerifyRequestNotFoundError if req.method != "password": logger.warn( "Non-password verification request is trying get status from password interface" ) raise IdentityVerifyMethodNotExpectedError if req.method == VerificationRequest.STATUS_PWD_SUCCESS: raise RequestIDUsed( "Request ID is used and password is set. It cannot be reused.") # fetch status from everyclass-auth with tracer.trace('get_result'): rpc_result = Auth.get_result(str(request_id)) if rpc_result.success: # 密码验证通过,设置请求状态并新增用户 verification_req = VerificationRequest.find_by_id( uuid.UUID(request_id)) verification_req.set_status_success() add_user(identifier=verification_req.identifier, password=verification_req.extra["password"], password_encrypted=True) return True, "SUCCESS", verification_req.identifier else: # 如果不是成功状态则返回auth服务返回的message return False, rpc_result.message, None
def acquire(self, *args, **kw): tags = self.tags buckets = self.timing_buckets with datadog_bucket_timer("commcare.lock.acquire_time", tags, buckets), \ tracer.trace("commcare.lock.acquire", resource=self.key) as span: acquired = self.lock.acquire(*args, **kw) span.set_tags({ "key": self.key, "name": self.name, "acquired": ("true" if acquired else "false"), }) if acquired: timeout = getattr(self.lock, "timeout", None) if timeout: self.end_time = time.time() + timeout self.lock_timer.start() if self.track_unreleased: self.lock_trace = tracer.trace("commcare.lock.locked", resource=self.key) self.lock_trace.set_tags({"key": self.key, "name": self.name}) return acquired
def get(self, request, format=None): with tracer.trace('SearchView.get: parameter setup'): user = request.user # Search query. q = request.query_params.get('q', '').strip() # Paging parameters. page = request.query_params.get('page', '0') page = int(page) + 1 size = int(request.query_params.get('size', '20')) sort = request.query_params.get('sort', '-sent_date') # Mail labeling parameters. label_id = request.query_params.get('label', None) account_ids = request.query_params.get('account', None) # None means search in all owned email accounts. with tracer.trace('SearchView.get: email_accounts setup'): # Get a list of all email accounts added by the user or publicly shared with the user as a group inbox. email_accounts = get_shared_email_accounts(user, True) if account_ids: # Only search within the email accounts indicated by the account_ids parameter. account_ids = account_ids.split(',') email_accounts = email_accounts.filter(pk__in=account_ids) email_accounts = email_accounts.exclude( Q(is_active=False) | Q(is_deleted=True) | Q(is_authorized=False) ) message_list = EmailMessage.objects if q: # System Gmail labels visible in Lily, where the user can search in. gmail_labels = [ settings.GMAIL_LABEL_INBOX, settings.GMAIL_LABEL_SPAM, settings.GMAIL_LABEL_TRASH, settings.GMAIL_LABEL_SENT, settings.GMAIL_LABEL_DRAFT ] if label_id and label_id not in gmail_labels: # Also retrieve related user set labels because later the label name will be queried. email_accounts = email_accounts.prefetch_related('labels') # Prevent too much calls on the search api, so restrict number of search results per email account. max_results = 3 * size messages_ids = [] with tracer.trace('SearchView.get: for all accounts'): for email_account in email_accounts: if label_id: with tracer.trace('SearchView.get: building q with label lookup'): # Retrieve the label corresponding to the label_id, otherwise Gmail defaults to all mail. label_name = label_id if label_id not in gmail_labels: # Retrieve the label name if label_id will differ from the user set label name. try: label_name = email_account.labels.get(label_id=label_id).name except EmailLabel.DoesNotExist: logger.error( "Incorrect label id {0} with search request for account {1}.".format( label_id, email_account ) ) # Failing label lookup within one account should not halt the complete search. continue q = u"{0} {1}:{2}".format(q, 'label', label_name) with tracer.trace('SearchView.get: retrieving message_ids by Gmail API'): try: connector = GmailConnector(email_account) messages = connector.search(query=q, size=max_results) messages_ids.extend([message['id'] for message in messages]) except (InvalidCredentialsError, NotFoundError, HttpAccessTokenRefreshError, FailedServiceCallException) as e: logger.error( "Failed to search within account {0} with error: {1}.".format(email_account, e) ) # Failing search within one account should not halt the complete search. continue # Retrieve messages from the database. message_list = message_list.filter( message_id__in=messages_ids, account__in=email_accounts ) with tracer.trace('SearchView.get: retrieving messages from db'): message_list = message_list.order_by(sort) # Exclude fields that aren't serialized and potential large. message_list = message_list.defer("body_html", "body_text", "snippet") # The serializer will query for account, sender and star label, so instead of the extra separate queries, # retrieve them now. message_list = message_list.select_related('account', 'sender') message_list = message_list.prefetch_related('labels', 'received_by') # It's possible Google search returns message_id's that aren't in the database (due to 'sync from now'). actual_number_of_results = len(message_list) with tracer.trace('SearchView.get: serializing messages'): # Construct paginator. limit = size * page offset = limit - size message_list = message_list[offset:limit] serializer = EmailMessageListSerializer(message_list, many=True) result = { 'hits': serializer.data, 'total': actual_number_of_results, } return Response(result)
def child(): with tracer.trace('child') as span: span.set_tag('a', 'b') return 'child'
def run(self): self.track_load() failure_response = self._handle_basic_failure_modes() if failure_response: return FormProcessingResult(failure_response, None, [], [], 'known_failures') result = process_xform_xml(self.domain, self.instance, self.attachments, self.auth_context.to_json()) submitted_form = result.submitted_form self._post_process_form(submitted_form) self._invalidate_caches(submitted_form) if submitted_form.is_submission_error_log: self.formdb.save_new_form(submitted_form) response = self.get_exception_response_and_log(submitted_form, self.path) return FormProcessingResult(response, None, [], [], 'submission_error_log') if submitted_form.xmlns == DEVICE_LOG_XMLNS: return self.process_device_log(submitted_form) cases = [] ledgers = [] submission_type = 'unknown' openrosa_kwargs = {} with result.get_locked_forms() as xforms: if len(xforms) > 1: self.track_load(len(xforms) - 1) if self.case_db: case_db_cache = self.case_db case_db_cache.cached_xforms.extend(xforms) else: case_db_cache = self.interface.casedb_cache( domain=self.domain, lock=True, deleted_ok=True, xforms=xforms, load_src="form_submission", ) with case_db_cache as case_db: instance = xforms[0] if instance.is_duplicate: with tracer.trace('submission.process_duplicate'): submission_type = 'duplicate' existing_form = xforms[1] stub = UnfinishedSubmissionStub.objects.filter( domain=instance.domain, xform_id=existing_form.form_id ).first() result = None if stub: from corehq.form_processor.reprocess import reprocess_unfinished_stub_with_form result = reprocess_unfinished_stub_with_form(stub, existing_form, lock=False) elif existing_form.is_error: from corehq.form_processor.reprocess import reprocess_form result = reprocess_form(existing_form, lock_form=False) if result and result.error: submission_type = 'error' openrosa_kwargs['error_message'] = result.error if existing_form.is_error: openrosa_kwargs['error_nature'] = ResponseNature.PROCESSING_FAILURE else: openrosa_kwargs['error_nature'] = ResponseNature.POST_PROCESSING_FAILURE else: self.interface.save_processed_models([instance]) elif not instance.is_error: submission_type = 'normal' try: case_stock_result = self.process_xforms_for_cases(xforms, case_db) except (IllegalCaseId, UsesReferrals, MissingProductId, PhoneDateValueError, InvalidCaseIndex, CaseValueError) as e: self._handle_known_error(e, instance, xforms) submission_type = 'error' openrosa_kwargs['error_nature'] = ResponseNature.PROCESSING_FAILURE except Exception as e: # handle / log the error and reraise so the phone knows to resubmit # note that in the case of edit submissions this won't flag the previous # submission as having been edited. this is intentional, since we should treat # this use case as if the edit "failed" handle_unexpected_error(self.interface, instance, e) raise else: instance.initial_processing_complete = True openrosa_kwargs['error_message'] = self.save_processed_models(case_db, xforms, case_stock_result) if openrosa_kwargs['error_message']: openrosa_kwargs['error_nature'] = ResponseNature.POST_PROCESSING_FAILURE cases = case_stock_result.case_models ledgers = case_stock_result.stock_result.models_to_save openrosa_kwargs['success_message'] = self._get_success_message(instance, cases=cases) elif instance.is_error: submission_type = 'error' response = self._get_open_rosa_response(instance, **openrosa_kwargs) return FormProcessingResult(response, instance, cases, ledgers, submission_type)