def _load(properties): """Load a LGBMExplainableModel from the given properties. :param properties: A serialized dictionary representation of the LGBMExplainableModel. :type properties: dict :return: The deserialized LGBMExplainableModel. :rtype: azureml.explain.model.mimic.models.LGBMExplainableModel """ # create the LGBMExplainableModel without any properties using the __new__ function, similar to pickle lightgbm = LGBMExplainableModel.__new__(LGBMExplainableModel) # Get _n_features _n_features = properties.pop(_N_FEATURES) # If classification case get _n_classes if json.loads(properties[LightGBMSerializationConstants.MULTICLASS]): _n_classes = properties.pop(_N_CLASSES) # load all of the properties for key, value in properties.items(): # Regenerate the properties on the fly if key in LightGBMSerializationConstants.nonify_properties: if key == LightGBMSerializationConstants.LOGGER: parent = logging.getLogger(__name__) lightgbm_identity = json.loads(properties[LightGBMSerializationConstants.IDENTITY]) lightgbm.__dict__[key] = parent.getChild(lightgbm_identity) elif key == LightGBMSerializationConstants.TREE_EXPLAINER: lightgbm.__dict__[key] = None else: raise Exception("Unknown nonify key on deserialize in LightGBMExplainableModel: {}".format(key)) elif key in LightGBMSerializationConstants.save_properties: # Load the booster from file and re-create the LGBMClassifier or LGBMRegressor # This is not recommended but can be necessary to get around pickle being not secure # See here for more info: # https://github.com/Microsoft/LightGBM/issues/1942 # https://github.com/Microsoft/LightGBM/issues/1217 if json.loads(properties[LightGBMSerializationConstants.MULTICLASS]): new_lgbm = LGBMClassifier() lgbm_booster = Booster(params={LightGBMSerializationConstants.MODEL_STR: value}) new_lgbm._Booster = lgbm_booster new_lgbm._n_classes = _n_classes else: new_lgbm = LGBMRegressor() lgbm_booster = Booster(params={LightGBMSerializationConstants.MODEL_STR: value}) new_lgbm._Booster = lgbm_booster new_lgbm._n_features = _n_features lightgbm.__dict__[key] = new_lgbm elif key in LightGBMSerializationConstants.enum_properties: # NOTE: If more enums added in future, will need to handle this differently lightgbm.__dict__[key] = ShapValuesOutput(json.loads(value)) else: lightgbm.__dict__[key] = json.loads(value) return lightgbm
def _load(properties): """Load a LGBMExplainableModel from the given properties. :param properties: A serialized dictionary representation of the LGBMExplainableModel. :type properties: dict :return: The deserialized LGBMExplainableModel. :rtype: interpret_community.mimic.models.LGBMExplainableModel """ # create the LGBMExplainableModel without any properties using the __new__ function, similar to pickle lgbm_model = LGBMExplainableModel.__new__(LGBMExplainableModel) # Get _n_features _n_features = properties.pop(_N_FEATURES) # If classification case get _n_classes if json.loads(properties[LightGBMSerializationConstants.MULTICLASS]): _n_classes = properties.pop(_N_CLASSES) fitted_ = None if _FITTED in properties: fitted_ = json.loads(properties[_FITTED]) elif version.parse('3.3.1') <= version.parse(lightgbm.__version__): # If deserializing older model in newer version set this to true to prevent errors on calls fitted_ = True # load all of the properties for key, value in properties.items(): # Regenerate the properties on the fly if key in LightGBMSerializationConstants.nonify_properties: if key == LightGBMSerializationConstants.LOGGER: parent = logging.getLogger(__name__) lightgbm_identity = json.loads( properties[LightGBMSerializationConstants.IDENTITY]) lgbm_model.__dict__[key] = parent.getChild( lightgbm_identity) elif key == LightGBMSerializationConstants.TREE_EXPLAINER: lgbm_model.__dict__[key] = None else: raise Exception( "Unknown nonify key on deserialize in LightGBMExplainableModel: {}" .format(key)) elif key in LightGBMSerializationConstants.save_properties: # Load the booster from file and re-create the LGBMClassifier or LGBMRegressor # This is not recommended but can be necessary to get around pickle being not secure # See here for more info: # https://github.com/Microsoft/LightGBM/issues/1942 # https://github.com/Microsoft/LightGBM/issues/1217 booster_args = { LightGBMSerializationConstants.MODEL_STR: value } is_multiclass = json.loads( properties[LightGBMSerializationConstants.MULTICLASS]) if is_multiclass: objective = LightGBMSerializationConstants.MULTICLASS else: objective = LightGBMSerializationConstants.REGRESSION if LightGBMSerializationConstants.MODEL_STR in inspect.getargspec( Booster).args: extras = { LightGBMSerializationConstants.OBJECTIVE: objective } lgbm_booster = Booster(**booster_args, params=extras) else: # For backwards compatibility with older versions of lightgbm booster_args[ LightGBMSerializationConstants.OBJECTIVE] = objective lgbm_booster = Booster(params=booster_args) if is_multiclass: new_lgbm = LGBMClassifier() new_lgbm._Booster = lgbm_booster new_lgbm._n_classes = _n_classes else: new_lgbm = LGBMRegressor() new_lgbm._Booster = lgbm_booster # Specify fitted_ for newer versions of lightgbm on deserialize if fitted_ is not None: new_lgbm.fitted_ = fitted_ new_lgbm._n_features = _n_features lgbm_model.__dict__[key] = new_lgbm elif key in LightGBMSerializationConstants.enum_properties: # NOTE: If more enums added in future, will need to handle this differently lgbm_model.__dict__[key] = ShapValuesOutput(json.loads(value)) else: lgbm_model.__dict__[key] = json.loads(value) return lgbm_model