def insert_data_to_db(self, metric_namespace: str, metric_name: str, data_type: int, kv, job_level=False): with DB.connection_context(): try: tracking_metric = TrackingMetric.model(table_index=self.job_id) tracking_metric.f_job_id = self.job_id tracking_metric.f_component_name = self.component_name if not job_level else 'dag' tracking_metric.f_task_id = self.task_id tracking_metric.f_role = self.role tracking_metric.f_party_id = self.party_id tracking_metric.f_metric_namespace = metric_namespace tracking_metric.f_metric_name = metric_name tracking_metric.f_type = data_type default_db_source = tracking_metric.to_json() tracking_metric_data_source = [] for k, v in kv: db_source = default_db_source.copy() db_source['f_key'] = serialize_b64(k) db_source['f_value'] = serialize_b64(v) db_source['f_create_time'] = current_timestamp() tracking_metric_data_source.append(db_source) self.bulk_insert_model_data( TrackingMetric.model(table_index=self.get_table_index()), tracking_metric_data_source) except Exception as e: stat_logger.exception(e)
def save_metric_meta_remote(self, metric_namespace: str, metric_name: str, metric_meta: MetricMeta, job_level: bool = False): # TODO: In the next version will be moved to tracking api module on arch/api package stat_logger.info( 'request save job {} component {} on {} {} {} {} metric meta'. format(self.job_id, self.component_name, self.role, self.party_id, metric_namespace, metric_name)) request_body = dict() request_body['metric_namespace'] = metric_namespace request_body['metric_name'] = metric_name request_body['metric_meta'] = serialize_b64(metric_meta, to_str=True) request_body['job_level'] = job_level response = api_utils.local_api( method='POST', endpoint='/{}/tracking/{}/{}/{}/{}/{}/metric_meta/save'.format( API_VERSION, self.job_id, self.component_name, self.task_id, self.role, self.party_id), json_body=request_body) return response['retcode'] == 0