Esempio n. 1
0
 def read_metrics_from_db(self,
                          metric_namespace: str,
                          metric_name: str,
                          data_type,
                          job_level=False):
     metrics = []
     try:
         tracking_metric_model = self.get_dynamic_db_model(
             TrackingMetric, self.job_id)
         tracking_metrics = tracking_metric_model.select(
             tracking_metric_model.f_key,
             tracking_metric_model.f_value).where(
                 tracking_metric_model.f_job_id == self.job_id,
                 tracking_metric_model.f_component_name == (
                     self.component_name if not job_level else
                     job_utils.job_virtual_component_name()),
                 tracking_metric_model.f_role == self.role,
                 tracking_metric_model.f_party_id == self.party_id,
                 tracking_metric_model.f_metric_namespace ==
                 metric_namespace,
                 tracking_metric_model.f_metric_name == metric_name,
                 tracking_metric_model.f_type == data_type)
         for tracking_metric in tracking_metrics:
             yield deserialize_b64(tracking_metric.f_key), deserialize_b64(
                 tracking_metric.f_value)
     except Exception as e:
         schedule_logger(self.job_id).exception(e)
         raise e
     return metrics
Esempio n. 2
0
 def save_table_meta(self, meta):
     schedule_logger(self.job_id).info(f'start save table meta:{meta}')
     address = storage.StorageTableMeta.create_address(storage_engine=meta.get("engine"),
                                                       address_dict=meta.get("address"))
     table_meta = storage.StorageTableMeta(name=meta.get("name"), namespace=meta.get("namespace"), new=True)
     table_meta.set_metas(**meta)
     meta["address"] = address
     meta["part_of_data"] = deserialize_b64(meta["part_of_data"])
     meta["schema"] = deserialize_b64(meta["schema"])
     table_meta.create()
     schedule_logger(self.job_id).info(f'save table meta success')
Esempio n. 3
0
 def python_value(self, value):
     if self._serialized_type == SerializedType.PICKLE:
         return deserialize_b64(value)
     elif self._serialized_type == SerializedType.JSON:
         if value is None:
             return {}
         return json_loads(value, object_hook=self._object_hook, object_pairs_hook=self._object_pairs_hook)
     else:
         raise ValueError(f"the serialized type {self._serialized_type} is not supported")
Esempio n. 4
0
 def get_table_meta(self, table_name, table_namespace):
     request_body = {"table_name": table_name, "namespace": table_namespace}
     response = api_utils.local_api(job_id=self.job_id,
                                    method='POST',
                                    endpoint='/tracker/{}/{}/{}/{}/{}/{}/table_meta/get'.format(
                                        self.job_id,
                                        self.component_name,
                                        self.task_id,
                                        self.task_version,
                                        self.role,
                                        self.party_id),
                                    json_body=request_body)
     if response['retcode'] != RetCode.SUCCESS:
         raise Exception(f"create table meta failed:{response['retmsg']}")
     else:
         data_table_meta = storage.StorageTableMeta(name=table_name,
                                                    namespace=table_namespace, new=True)
         data_table_meta.set_metas(**response["data"])
         data_table_meta.address = storage.StorageTableMeta.create_address(storage_engine=response["data"].get("engine"),
                                                                           address_dict=response["data"].get("address"))
         data_table_meta.part_of_data = deserialize_b64(data_table_meta.part_of_data)
         data_table_meta.schema = deserialize_b64(data_table_meta.schema)
         return data_table_meta
Esempio n. 5
0
def save_metric_meta(job_id, component_name, task_version, task_id, role,
                     party_id):
    request_data = request.json
    tracker = Tracker(job_id=job_id,
                      component_name=component_name,
                      task_id=task_id,
                      task_version=task_version,
                      role=role,
                      party_id=party_id)
    metric_meta = deserialize_b64(request_data['metric_meta'])
    tracker.save_metric_meta(metric_namespace=request_data['metric_namespace'],
                             metric_name=request_data['metric_name'],
                             metric_meta=metric_meta,
                             job_level=request_data['job_level'])
    return get_json_result()
Esempio n. 6
0
 def read_summary_from_db(self):
     try:
         summary_model = self.get_dynamic_db_model(ComponentSummary,
                                                   self.job_id)
         summary = summary_model.get_or_none(
             summary_model.f_job_id == self.job_id,
             summary_model.f_component_name == self.component_name,
             summary_model.f_role == self.role,
             summary_model.f_party_id == self.party_id)
         if summary:
             cpn_summary = deserialize_b64(summary.f_summary)
         else:
             cpn_summary = ""
     except Exception as e:
         schedule_logger(self.job_id).exception(e)
         raise e
     return cpn_summary
Esempio n. 7
0
    def restore(self, model_id: str, model_version: str, store_address: dict):
        """
        Restore model from mysql to local cache
        :param model_id:
        :param model_version:
        :param store_address:
        :return:
        """
        model = PipelinedModel(model_id, model_version)
        self.get_connection(store_address)

        try:
            with DB.connection_context():
                models_in_tables = MachineLearningModel.select().where(
                    MachineLearningModel.f_model_id == model_id,
                    MachineLearningModel.f_model_version == model_version,
                ).order_by(MachineLearningModel.f_slice_index)
            if not models_in_tables:
                raise ValueError(f"Cannot found model in table.")

            model_archive_data = b''.join(
                deserialize_b64(models_in_table.f_content)
                for models_in_table in models_in_tables)
            if not model_archive_data:
                raise ValueError(f"Cannot get model archive data.")

            with open(model.archive_model_file_path, "wb") as fw:
                fw.write(model_archive_data)
            model.unpack_model(model.archive_model_file_path)
        except Exception as e:
            LOGGER.exception(e)
            raise Exception(
                f"Restore model {model_id} {model_version} from mysql failed.")
        else:
            LOGGER.info(
                f"Restore model to {model.archive_model_file_path} from mysql successfully."
            )
        finally:
            self.close_connection()
Esempio n. 8
0
 def restore(self, model_id: str, model_version: str, store_address: dict):
     """
     Restore model from mysql to local cache
     :param model_id:
     :param model_version:
     :param store_address:
     :return:
     """
     try:
         self.get_connection(config=store_address)
         model = PipelinedModel(model_id=model_id, model_version=model_version)
         with DB.connection_context():
             models_in_tables = MachineLearningModel.select().where(MachineLearningModel.f_model_id == model_id,
                                                                    MachineLearningModel.f_model_version == model_version).\
                 order_by(MachineLearningModel.f_slice_index)
             if not models_in_tables:
                 raise Exception("Restore model {} {} from mysql failed: {}".format(
                     model_id, model_version, "can not found model in table"))
             f_content = ''
             for models_in_table in models_in_tables:
                 if not f_content:
                     f_content = models_in_table.f_content
                 else:
                     f_content += models_in_table.f_content
             model_archive_data = deserialize_b64(f_content)
             if not model_archive_data:
                 raise Exception("Restore model {} {} from mysql failed: {}".format(
                     model_id, model_version, "can not get model archive data"))
             with open(model.archive_model_file_path, "wb") as fw:
                 fw.write(model_archive_data)
             model.unpack_model(model.archive_model_file_path)
             LOGGER.info("Restore model to {} from mysql successfully".format(model.archive_model_file_path))
         self.close_connection()
     except Exception as e:
         LOGGER.exception(e)
         raise Exception("Restore model {} {} from mysql failed".format(model_id, model_version))
Esempio n. 9
0
 def python_value(self, value):
     return deserialize_b64(value)