def nyObjOfModel(self, pmmlObj, singMod): import nyokaBase.PMML43Ext as ny if singMod['pmmlModelObject'].__dict__[ 'original_tagname_'] == 'MiningModel': nyokaObj = ny.PMML(MiningBuildTask=pmmlObj.MiningBuildTask, DataDictionary=pmmlObj.DataDictionary, MiningModel=[singMod['pmmlModelObject']]) elif singMod['pmmlModelObject'].__dict__[ 'original_tagname_'] == 'DeepNetwork': nyokaObj = ny.PMML(DataDictionary=pmmlObj.DataDictionary, DeepNetwork=[singMod['pmmlModelObject']]) else: nyokaObj = None return nyokaObj
def xgboost_to_pmml(pipeline, col_names, target_name, pmml_f_name='from_xgboost.pmml'): """ Exports xgboost pipeline object into pmml Parameters ---------- pipeline : Contains an instance of Pipeline with preprocessing and final estimator col_names : List Contains list of feature/column names. target_name : String Name of the target column. pmml_f_name : String Name of the pmml file. (Default='from_xgboost.pmml') Returns ------- Returns a pmml file """ try: model = pipeline.steps[-1][1] except: raise TypeError("Exporter expects pipeleine_instance and not an estimator_instance") else: if isinstance(col_names, np.ndarray): col_names = col_names.tolist() ppln_sans_predictor = pipeline.steps[:-1] trfm_dict_kwargs = dict() derived_col_names = col_names categoric_values = tuple() mining_imp_val = tuple() if ppln_sans_predictor: pml_pp = pp.get_preprocess_val(ppln_sans_predictor, col_names, model) trfm_dict_kwargs['TransformationDictionary'] = pml_pp['trfm_dict'] derived_col_names = pml_pp['derived_col_names'] col_names = pml_pp['preprocessed_col_names'] categoric_values = pml_pp['categorical_feat_values'] mining_imp_val = pml_pp['mining_imp_values'] PMML_kwargs = get_PMML_kwargs(model, derived_col_names, col_names, target_name, mining_imp_val, categoric_values) pmml = pml.PMML( version=sklToPmml.get_version(), Header=sklToPmml.get_header(), MiningBuildTask=sklToPmml.get_mining_buildtask(pipeline), DataDictionary=sklToPmml.get_data_dictionary(model, col_names, target_name, categoric_values), **trfm_dict_kwargs, **PMML_kwargs ) pmml.export(outfile=open(pmml_f_name, "w"), level=0)
def writePMML(self, model, predictedClass, fileName, dataSet): try: from nyokaBase.keras.keras_model_to_pmml import KerasToPmml pmmlToBack = KerasToPmml(model, model_name="TrainedModel", description="Keras Models in PMML", dataSet=dataSet, predictedClasses=predictedClass) except Exception as e: data_details = self.upDateStatus() data_details['status'] = 'Training Failed' data_details[ 'errorMessage'] = 'Error while converting Keras to PMML >> ' + str( e) data_details['errorTraceback'] = traceback.format_exc() with open(self.statusFile, 'w') as filetosave: json.dump(data_details, filetosave) # sys.exit() return -1 scriptCode = self.pmmlObj['script'] if scriptCode == []: scriptCode = None else: for sc in scriptCode: sc.__dict__['valueOf_'] = sc.get_valueOf_().replace( '<', '<') pmmlObjNew = pmmlToBack.__dict__ dDict = pmmlObjNew['DataDictionary'] netw = pmmlObjNew['DeepNetwork'] netw = self.updateSectionInfo(netw) extensionInfoForData = [ ny.Extension(value=self.hdExtDet, anytypeobjs_=['']) ] hd = ny.Header(copyright="Copyright (c) 2018 Software AG", Extension=extensionInfoForData, description="Neural Network Model", Timestamp=ny.Timestamp(datetime.now())) with open(fileName, 'w') as filetosave: jj = ny.PMML(version="4.3Ext", DeepNetwork=netw, DataDictionary=dDict, Header=hd, script=scriptCode) jj.export(filetosave, 0)
def getPmml(self, architecture): fName = 'classification' lenOfArch = len(architecture) mName = 'Keras Model' netWorkInfo = [] scriptVal = [] extensionInfoForData = [pml.Extension(value=[], anytypeobjs_=[''])] dataVal = {} for counta, j in enumerate(architecture): if counta == 0: someConectionId = 'na' else: someConectionId = tempConId if j['itemType'] in ['CODE']: # print ('##################',j) scriptFile = open(j['filePath'], 'r') scriptCode = scriptFile.read() scriptCode = scriptCode.replace('<', '<') scriptInfo = {} scrptVal = [] # dataVal['scriptUrl']=j['url'] useFor = j['useFor'] extensionInfoForScript = [ pml.Extension(value=scrptVal, anytypeobjs_=['']) ] scrp = pml.script(content=scriptCode, Extension=extensionInfoForScript, for_=useFor) scriptVal.append(scrp) tempConId = None elif j['itemType'] in ['DATA']: try: dataVal['dataUrl'] = j['filePath'] extensionInfoForData = [ pml.Extension(value=dataVal, anytypeobjs_=['']) ] except: pass tempConId = None elif j['itemType'] == 'FOLDING': # print (j) for k in j['children']: tempdata7 = self.convertToStandardJson(k) tempdata7['connectionLayerId'] = someConectionId tempConId = tempdata7['layerId'] pp = addLayer(tempdata7) netWorkInfo.append(pp) someConectionId = tempConId else: # print ('Start of tamasha$$$$$$$$$$',j) tempdata7 = self.convertToStandardJson(j) tempdata7['connectionLayerId'] = someConectionId tempConId = tempdata7['layerId'] # print ('pakda', tempdata7) pp = self.addLayer(tempdata7) netWorkInfo.append(pp) kk = pml.DeepNetwork(modelName=mName, functionName=fName, NetworkLayer=netWorkInfo, numberOfLayers=lenOfArch) tt = pml.Timestamp(datetime.now()) hd = pml.Header(copyright="Copyright (c) 2018 Software AG", Extension=extensionInfoForData, description="Neural Network Model", Timestamp=pml.Timestamp(datetime.now())) jj = pml.PMML(version="4.3Ext", script=scriptVal, Header=hd, DeepNetwork=[kk]) return jj