def selectArchitecture(self,checkTemplateID): if checkTemplateID=='mobilenetArch': pmmlObj = ny.parse(open(settingFilePath+'MobilenetArch.pmml','r'), silence=True) templateArch=self.pmmlToJson(settingFilePath+'MobilenetArch.pmml') elif checkTemplateID=='vgg16Arch': pmmlObj = ny.parse(open(settingFilePath+'vGG16Arch.pmml','r'), silence=True) templateArch=self.pmmlToJson(settingFilePath+'vGG16Arch.pmml') elif checkTemplateID=='vgg19Arch': pmmlObj = ny.parse(open(settingFilePath+'vGG19Arch.pmml','r'), silence=True) templateArch=self.pmmlToJson(settingFilePath+'vGG19Arch.pmml') return templateArch,pmmlObj
def __init__(self, pmml): self.nyoka_pmml = ny.parse(pmml, True) self.image_input = None self.layer_input = None self.model = None self.layers_outputs = {} self.model = self._build_model()
def test_keras_02(self): boston = load_boston() data = pd.DataFrame(boston.data) features = list(boston.feature_names) target = 'PRICE' data.columns = features data['PRICE'] = boston.target x_train, x_test, y_train, y_test = train_test_split(data[features], data[target], test_size=0.20, random_state=42) model = Sequential() model.add( Dense(13, input_dim=13, kernel_initializer='normal', activation='relu')) model.add(Dense(23)) model.add(Dense(1, kernel_initializer='normal')) model.compile(loss='mean_squared_error', optimizer='adam') model.fit(x_train, y_train, epochs=1000, verbose=0) pmmlObj = KerasToPmml(model) pmmlObj.export(open('sequentialModel.pmml', 'w'), 0) reconPmmlObj = ny.parse('sequentialModel.pmml', True) self.assertEqual(os.path.isfile("sequentialModel.pmml"), True) self.assertEqual(len(model.layers), len(reconPmmlObj.DeepNetwork[0].NetworkLayer) - 1)
def test_construction_vgg(self): model = applications.VGG16(weights="imagenet", include_top=False, input_shape=(224, 224, 3)) x = model.output x = layers.Flatten()(x) x = layers.Dense(1024, activation="relu")(x) x = layers.Dropout(0.5)(x) x = layers.Dense(1024, activation="relu")(x) predictions = layers.Dense(2, activation="softmax")(x) model_final = models.Model(input=model.input, output=predictions) model_final.compile(loss="binary_crossentropy", optimizer=optimizers.SGD(lr=0.0001, momentum=0.9), metrics=["accuracy"]) pmmlObj = KerasToPmml(model_final, dataSet='image') pmmlObj.export(open('vgg.pmml', 'w'), 0) reconPmmlObj = ny.parse('vgg.pmml', True) self.assertEqual(os.path.isfile("vgg.pmml"), True) self.assertEqual(len(model_final.layers), len(reconPmmlObj.DeepNetwork[0].NetworkLayer))
def test_keras_01(self): model = applications.MobileNet(weights='imagenet', include_top=False, input_shape=(224, 224, 3)) activType = 'sigmoid' x = model.output x = Flatten()(x) x = Dense(1024, activation="relu")(x) predictions = Dense(2, activation=activType)(x) model_final = Model(inputs=model.input, outputs=predictions, name='predictions') cnn_pmml = KerasToPmml(model_final, dataSet='image', predictedClasses=['cats', 'dogs']) cnn_pmml.export(open('2classMBNet.pmml', "w"), 0) reconPmmlObj = ny.parse('2classMBNet.pmml', True) self.assertEqual(os.path.isfile("2classMBNet.pmml"), True) self.assertEqual(len(model_final.layers), len(reconPmmlObj.DeepNetwork[0].NetworkLayer))
def pmmlToJson(self, filePath): pmmlObj = ny.parse(filePath, silence=True) pmmlDictObj = pmmlObj.__dict__ # print ('0'*100,pmmlObj.get_type()) if pmmlObj.get_type() == 'multi': print('came to Workflow') # print('*'*100) # print(PMMLMODELSTORAGE) # print('*'*100) import pathlib from trainModel.mergeTrainingV2 import TrainingViewModels pmmlFileObj = pathlib.Path(filePath) pmmlFileForKey = pmmlFileObj.name.replace(pmmlFileObj.suffix, '') from trainModel.mergeTrainingV2 import NewModelOperations NewModelOperations().loadExecutionModel(filePath) modelInformation = PMMLMODELSTORAGE[pmmlFileForKey] # print ('PMMLMODELSTORAGE after >>>>>>>>>>> ',PMMLMODELSTORAGE) # print (modelInformation) toexp = TrainingViewModels().restructureModelInforForExportDict( modelInformation) # print ('toexp'*20) # print ('toexportDictN >>>>>>>> ',toexp) import copy, json tempSec = { "name": "Section", "layerId": "Section", "children": [], "itemType": "FOLDING", "icon": "mdi mdi-group", "class": "wide", "modelType": "Workflow", "id": "id", "sectionId": "modName", "layerIndex": None, 'connectionLayerId': None } tempData = { "name": "Data", "icon": "mdi mdi-database-plus", "itemType": "DATA", "layerId": None, "trainable": False, "modelType": "Workflow", "id": None, "layerIndex": None, "connectionLayerId": None, "url": None, "filePath": None } tempCode = { "name": "Code", "icon": "mdi mdi-code-braces", "itemType": "CODE", "layerId": None, "trainable": False, "modelType": "Workflow", "id": "K2PVI4HZ3NBGF", "layerIndex": None, "connectionLayerId": None, "url": None, "filePath": None, "taskType": None, "scriptOutput": None, "scriptPurpose": None } tempModel = { "name": "Model", "icon": "mdi mdi-xml", "itemType": "MODEL", "layerId": None, "trainable": False, "modelType": "Workflow", "id": None, "layerIndex": None, "connectionLayerId": None, "url": None, "filePath": None, "taskType": None } # toexp={'K2PSSUKYFRSMF': {'hyperparameters': None, # 'data': 'C:/Users/swsh/Desktop/ZMODGit/ZMOD/ZMOD/Data/newData2', # 'preProcessingScript': {'scripts': ['def addVal(x):\n return x\n'], 'scriptpurpose': ['trainAndscore'], 'scriptOutput': ['DATA'], 'scriptPath': ['C:/Users/swsh/Desktop/ZMODGit/ZMOD/ZMOD/Code/scriptToTest.py']}, # 'modelObj': None, # 'pipelineObj': None, # 'featuresUsed': ['cylinders', 'displacement', 'horsepower', 'weight', 'acceleration'], # 'targetName': 'mpg', # 'postProcessingScript': {'scripts': [], 'scriptpurpose': [], 'scriptOutput': [], 'scriptPath': []}, # 'taskType': 'trainAndscore', # 'modelPath': 'C:\\Users\\swsh\\Desktop\\ZMODGit\\ZMOD\\ZMOD\\Models\\autoML2.pmml'}} workflowArch = [] for modTemp in list(toexp.keys()): temSecCop = copy.deepcopy(tempSec) #.copy() temSecCop['sectionId'] = modTemp temSecCop["layerId"] = modTemp if toexp[modTemp]['data'] != None: dataInfo = copy.deepcopy(tempData) import pathlib fileName = pathlib.Path(toexp[modTemp]['data']).name dataInfo['layerId'] = fileName dataInfo['url'] = '/Data/' + fileName dataInfo['filePath'] = toexp[modTemp]['data'] temSecCop['children'].append(dataInfo) for numSc, sC in enumerate( toexp[modTemp]['preProcessingScript']['scriptPath']): codeInfo = copy.deepcopy(tempCode) fileName = pathlib.Path( toexp[modTemp]['preProcessingScript']['scriptPath'] [numSc]).name codeInfo['layerId'] = fileName codeInfo['url'] = '/Code/' + fileName codeInfo['filePath'] = toexp[modTemp][ 'preProcessingScript']['scriptPath'][numSc] codeInfo['taskType'] = 'PREPROCESSING' codeInfo['scriptOutput'] = toexp[modTemp][ 'preProcessingScript']['scriptOutput'][numSc] codeInfo['scriptPurpose'] = toexp[modTemp][ 'preProcessingScript']['scriptpurpose'][numSc] temSecCop['children'].append(codeInfo) modtempC = copy.deepcopy(tempModel) fileName = pathlib.Path(toexp[modTemp]['modelPath']).name modtempC['layerId'] = fileName modtempC['url'] = '/Model/' + fileName modtempC['filePath'] = toexp[modTemp]['modelPath'] modtempC['taskType'] = toexp[modTemp]['taskType'] temSecCop['children'].append(modtempC) for numSc, sC in enumerate( toexp[modTemp]['postProcessingScript']['scriptPath']): codeInfo = copy.deepcopy(tempCode) fileName = pathlib.Path( toexp[modTemp]['postProcessingScript']['scriptPath'] [numSc]).name codeInfo['layerId'] = fileName codeInfo['url'] = '/Code/' + fileName codeInfo['filePath'] = toexp[modTemp][ 'postProcessingScript']['scriptPath'][numSc] codeInfo['taskType'] = 'POSTPROCESSING' codeInfo['scriptOutput'] = toexp[modTemp][ 'postProcessingScript']['scriptOutput'][numSc] codeInfo['scriptPurpose'] = toexp[modTemp][ 'postProcessingScript']['scriptpurpose'][numSc] temSecCop['children'].append(codeInfo) workflowArch.append(temSecCop) from random import choice from string import ascii_uppercase for num, i in enumerate(workflowArch): if i['itemType'] == 'FOLDING': i['layerIndex'] = num i['id'] = ''.join( choice(ascii_uppercase) for i in range(12)) for num2, j in enumerate(i['children']): j['layerIndex'] = num2 j['id'] = ''.join( choice(ascii_uppercase) for i in range(12)) else: i['layerIndex'] = num i['id'] = ''.join( choice(ascii_uppercase) for i in range(12)) # print ('l'*200) # print ('workflowArch',workflowArch) return workflowArch else: overAll = [] deepObject = pmmlDictObj['DeepNetwork'][0] listOfNetworkLayer = deepObject.NetworkLayer for lay in listOfNetworkLayer: networkDict = lay.__dict__ tempDict = {} tempDict['layerParam'] = {} tempDict['netParam'] = {} for j in networkDict: if networkDict[j] is not None: if j not in [ 'original_tagname_', 'LayerWeights', 'LayerParameters', 'Extension', 'LayerBias' ]: tempDict['netParam'][j] = networkDict[j] layerDict = networkDict['LayerParameters'].__dict__ for kk in layerDict: if layerDict[kk] is not None: if kk not in [ 'original_tagname_', 'Extension' ]: try: evalVal = list( ast.literal_eval(layerDict[kk])) except: evalVal = layerDict[kk] tempDict['layerParam'][kk] = evalVal tempDict['layerParam'][ 'trainable'] = False if layerDict[ 'trainable'] == False else True if len(networkDict['Extension']) > 0: ttt = networkDict['Extension'][0] sectionVal = ttt.get_value() import ast tempDict['sectionId'] = ast.literal_eval( sectionVal)['sectionId'] else: tempDict['sectionId'] = None overAll.append(tempDict) allLayers = MEMORY_OF_LAYERS['layerinfo'][0]['layers'] listOFLayersName = [ j['name'] for j in MEMORY_OF_LAYERS['layerinfo'][0]['layers'] ] architecture = [] for tempLay in overAll: import copy tempSpace = copy.deepcopy(allLayers[listOFLayersName.index( tempLay['netParam']['layerType'])]) layerPARA = tempLay['layerParam'] netWorkPARA = tempLay['netParam'] for j in netWorkPARA: try: tempSpace[j] = netWorkPARA[j] except: pass for k in layerPARA: for k2 in tempSpace['properties']: if k2['id'] == k: k2['value'] = layerPARA[k] try: tempSpace['sectionId'] = tempLay['sectionId'] except: pass tempSpace['trainable'] = layerPARA['trainable'] architecture.append(tempSpace) forLoopSection = [j['sectionId'] for j in architecture] # print ('forLoopSection $$$$$$$$$$$$$$$',forLoopSection) tempSection = { 'children': [], 'class': 'wide', 'icon': 'mdi mdi-group', 'id': '', 'itemType': 'FOLDING', 'layerId': 'Section', 'layerIndex': '', 'name': 'Section', 'sectionId': '', "sectionCollapse": True } import copy newarchitecture = [] tempSectionA = copy.deepcopy(tempSection) for num, secInfo in enumerate(forLoopSection): if secInfo is None: newarchitecture.append(architecture[num]) else: if (num + 1 < len(forLoopSection)) and ( forLoopSection[num] == forLoopSection[num + 1]): tempSectionA['children'].append(architecture[num]) else: tempSectionA['children'].append(architecture[num]) tempSectionA['sectionId'] = secInfo tempSectionA['layerId'] = 'Section_' + str(num) tempSectionA['name'] = 'Section_' + str(num) newarchitecture.append(tempSectionA) tempSectionA = copy.deepcopy(tempSection) hd = pmmlDictObj['Header'] scrptVal = pmmlDictObj['script'] DataVal = pmmlDictObj['Data'] import ast, pathlib try: try: dataUrl = DataVal[0].filePath except: dataUrl = 'Some issue' print('$$$$$$$$$$$$$$$$$$$$$$', dataUrl) if dataUrl != 'Some issue': fObj = pathlib.Path(dataUrl) dataCon = { 'icon': 'mdi mdi-database-plus', 'id': 'NNN', 'itemType': 'DATA', 'layerId': fObj.name, 'layerIndex': 0, 'name': 'Data', 'url': dataUrl } newarchitecture.insert(0, dataCon) for counT, sc in enumerate(scrptVal): import pathlib scriptPurpose = sc.scriptPurpose modelVal = sc.for_ classVal = sc.class_ filePathUrl = sc.filePath fObjScrpt = pathlib.Path(filePathUrl) scriptCon = { "name": "Code", "icon": "mdi mdi-code-braces", "itemType": "CODE", "modelFor": modelVal, "layerId": fObjScrpt.name, "scriptPurpose": scriptPurpose, 'url': filePathUrl, "layerIndex": "NA", 'useFor': classVal } newarchitecture.insert(counT + 1, scriptCon) else: pass except Exception as e: for counT, sc in enumerate(scrptVal): scriptUrl = sc.class_ import pathlib fObjScrpt = pathlib.Path(scriptUrl) scriptCon = { "name": "Code", "icon": "mdi mdi-code-braces", "itemType": "CODE", "layerId": fObjScrpt.name, 'url': scriptUrl, "layerIndex": "NA" } newarchitecture.insert(counT, scriptCon) print(e, 'some error occured') for num, i in enumerate(newarchitecture): if i['itemType'] == 'FOLDING': i['layerIndex'] = num i['id'] = ''.join( choice(ascii_uppercase) for i in range(12)) for num2, j in enumerate(i['children']): j['layerIndex'] = num2 j['id'] = ''.join( choice(ascii_uppercase) for i in range(12)) else: i['layerIndex'] = num from random import choice from string import ascii_uppercase i['id'] = ''.join( choice(ascii_uppercase) for i in range(12)) return newarchitecture
def loadExecutionModel(self, pmmlFile): pmmlFileObj = pathlib.Path(pmmlFile) pmmlFileForKey = pmmlFileObj.name.replace(pmmlFileObj.suffix, '') from nyoka import PMML43Ext as ny pmmlObj = ny.parse(pmmlFile, silence=True) modelObj = [] for inMod in modelObjectToCheck: if len(pmmlObj.__dict__[inMod]) > 0: modPMMLObj = pmmlObj.__dict__[inMod] if inMod == 'DeepNetwork': for ininMod in modPMMLObj: colInfo = self.getTargetAndColumnsName(ininMod) modelObj.append({ 'modelArchType': 'NNModel', 'pmmlModelObject': ininMod, 'recoModelObj': None, 'listOFColumns': None, 'targetCol': colInfo[1] }) else: for ininMod in modPMMLObj: colInfo = self.getTargetAndColumnsName(ininMod) # recoModelObj=generateModelfromPMML(ininMod) modelObj.append({ 'modelArchType': 'SKLModel', 'pmmlModelObject': ininMod, 'recoModelObj': None, 'listOFColumns': colInfo[0], 'targetCol': colInfo[1] }) tempDict = {} tempDict['train'] = {} tempDict['score'] = {} for singMod in modelObj: if singMod['pmmlModelObject'].taskType == 'trainAndscore': tempDict['train'][singMod['pmmlModelObject'].modelName] = {} tempDict['train'][ singMod['pmmlModelObject'].modelName]['modelObj'] = singMod tempDict['train'][singMod['pmmlModelObject'].modelName][ 'modelObj']['pmmlDdicObj'] = pmmlObj.DataDictionary tempDict['train'][singMod['pmmlModelObject'].modelName][ 'modelObj']['pmmlNyokaObj'] = self.nyObjOfModel( pmmlObj, singMod) tempDict['score'][singMod['pmmlModelObject'].modelName] = {} tempDict['score'][ singMod['pmmlModelObject'].modelName]['modelObj'] = singMod tempDict['score'][singMod['pmmlModelObject'].modelName][ 'modelObj']['pmmlDdicObj'] = pmmlObj.DataDictionary tempDict['train'][singMod['pmmlModelObject'].modelName][ 'modelObj']['pmmlNyokaObj'] = self.nyObjOfModel( pmmlObj, singMod) else: tempDict[singMod['pmmlModelObject'].taskType][ singMod['pmmlModelObject'].modelName] = {} tempDict[singMod['pmmlModelObject'].taskType][ singMod['pmmlModelObject'].modelName]['modelObj'] = singMod tempDict[singMod['pmmlModelObject'].taskType][ singMod['pmmlModelObject'].modelName]['modelObj'][ 'pmmlDdicObj'] = pmmlObj.DataDictionary tempDict[singMod['pmmlModelObject'].taskType][ singMod['pmmlModelObject']. modelName]['modelObj']['pmmlNyokaObj'] = self.nyObjOfModel( pmmlObj, singMod) tempDict2 = {} for taType in tempDict: tempTa = list(tempDict[taType].keys()) tempTa.sort() for taTTemp in tempTa: if taType in tempDict2: pass else: tempDict2[taType] = {} tempDict2[taType][taTTemp] = tempDict[taType][taTTemp] tempDict = tempDict2.copy() for sc1 in pmmlObj.script: if sc1.scriptPurpose == 'trainAndscore': tempDict['train'][sc1.for_][sc1.class_] = {} tempDict['train'][sc1.for_][sc1.class_ + '_code'] = self.getCode( sc1.valueOf_) tempDict['train'][sc1.for_][ sc1.class_] = self.getCodeObjectToProcess( self.getCode(sc1.valueOf_)) tempDict['score'][sc1.for_][sc1.class_] = {} tempDict['score'][sc1.for_][sc1.class_ + '_code'] = self.getCode( sc1.valueOf_) tempDict['score'][sc1.for_][ sc1.class_] = self.getCodeObjectToProcess( self.getCode(sc1.valueOf_)) else: tempDict[sc1.scriptPurpose][sc1.for_][sc1.class_] = {} tempDict[sc1.scriptPurpose][sc1.for_][sc1.class_ + '_code'] = self.getCode( sc1.valueOf_) tempDict[sc1.scriptPurpose][sc1.for_][ sc1.class_] = self.getCodeObjectToProcess( self.getCode(sc1.valueOf_)) taskTypesName = list(tempDict.keys()) listOfModelNames = set([k for j in tempDict for k in tempDict[j]]) hyperParDict = {} for extObj in pmmlObj.MiningBuildTask.Extension: if extObj.name == 'hyperparameters': hyperParDict[extObj.for_] = ast.literal_eval(extObj.value) try: miningBuildTaskList = pmmlObj.MiningBuildTask.__dict__['Extension'] for bTask in miningBuildTaskList: if bTask.__dict__['for_'] in listOfModelNames: for tT in taskTypesName: for modInd in listOfModelNames: tempDict[tT][modInd]['modelObj'][ 'miningExtension'] = bTask except: pass modelLoadStatus = [] for taskT in tempDict: print(taskT) for mO in tempDict[taskT]: if tempDict[taskT][mO]['modelObj'][ 'modelArchType'] == "NNModel": modelProp = tempDict[taskT][mO]['modelObj']['pmmlNyokaObj'] model_graph = Graph() with model_graph.as_default(): tf_session = Session() with tf_session.as_default(): print('step 5') from nyoka.reconstruct.pmml_to_pipeline_model import generate_skl_model print('step 5.1') model_net = generate_skl_model(modelProp) print('step 5.2') model = model_net.model model_graph = tf.get_default_graph() print('step 6') inputShapevals = [ inpuShape.value for inpuShape in list(model.input.shape) ] if str(model_net) != 'None': tempDict[taskT][mO]['modelObj'][ 'recoModelObj'] = model_net tempDict[taskT][mO]['modelObj'][ 'model_graph'] = model_graph tempDict[taskT][mO]['modelObj'][ 'tf_session'] = tf_session tempDict[taskT][mO]['modelObj'][ 'inputShape'] = inputShapevals modelLoadStatus.append(1) else: modelLoadStatus.append(0) try: tempDict[taskT][mO]['modelObj'][ 'hyperparameters'] = hyperParDict[mO] except: tempDict[taskT][mO]['modelObj'][ 'hyperparameters'] = None elif tempDict[taskT][mO]['modelObj'][ 'modelArchType'] == "SKLModel": modelProp = tempDict[taskT][mO]['modelObj']['pmmlNyokaObj'] from nyoka.reconstruct.pmml_to_pipeline_model import generate_skl_model recoModelObj = generate_skl_model(modelProp) if recoModelObj != None: tempDict[taskT][mO]['modelObj'][ 'recoModelObj'] = recoModelObj modelLoadStatus.append(1) else: modelLoadStatus.append(0) try: tempDict[taskT][mO]['modelObj'][ 'hyperparameters'] = hyperParDict[mO] except: tempDict[taskT][mO]['modelObj'][ 'hyperparameters'] = None # print('*'*100) # print(tempDict['score']['model2']) # print('*'*100) PMMLMODELSTORAGE[pmmlFileForKey] = tempDict if 0 in modelLoadStatus: messageToWorld = "Model load failed, please connect with admin" else: messageToWorld = "Model Loaded Successfully" resultResp = {'message': messageToWorld, 'keytoModel': pmmlFileForKey} return JsonResponse(resultResp, status=200)
def loadPMMLmodel(self, filepath, idforData=None): def readScriptFromPMML(scrptCode, useForVal): code = None for num, sc in enumerate(scrptCode): useFor = sc.for_ # print ('>>>>>>>UUUUUU>>>>>>>',useFor,useForVal) if useFor == useForVal: scripCode = sc.get_valueOf_() # print (scripCode) code = scripCode.lstrip('\n') lines = [] code = scripCode.lstrip('\n') leading_spaces = len(code) - len(code.lstrip(' ')) for line in code.split('\n'): lines.append(line[leading_spaces:]) code = '\n'.join(lines) return code global PMMLMODELSTORAGE try: print('step 1', filepath) pmmlName = os.path.basename(filepath).split('.')[0] nyoka_pmml_obj = ny.parse(filepath, True) pmmlObj = nyoka_pmml_obj.__dict__ try: checkMRCNN = nyoka_pmml_obj.DeepNetwork[0].Extension[ 0].name == 'config' except: checkMRCNN = False #MaskRcnn model if (nyoka_pmml_obj.DeepNetwork) and (checkMRCNN == True): from nyoka.mrcnn import pmml_to_maskrcnn from nyoka.mrcnn import model as modellib predClasses = self.getPredClasses(nyoka_pmml_obj) modelFolder = './logs/MaskRCNNWei_' + ''.join( choice(ascii_uppercase) for i in range(12)) + '/' self.checkCreatePath(modelFolder) model_graph = Graph() with model_graph.as_default(): tf_session = Session() with tf_session.as_default(): modelRecon = pmml_to_maskrcnn.GenerateMaskRcnnModel( nyoka_pmml_obj) weight_file = modelFolder + '/dumpedWeights.h5' modelRecon.model.keras_model.save_weights(weight_file) MODEL_DIR = modelFolder model = modellib.MaskRCNN(mode="inference", model_dir=MODEL_DIR, config=modelRecon.config) model.load_weights(weight_file, by_name=True) model_graph = tf.get_default_graph() # pmmlName = pmmlName PMMLMODELSTORAGE[pmmlName] = {} PMMLMODELSTORAGE[pmmlName]['model'] = model PMMLMODELSTORAGE[pmmlName]['modelType'] = 'MRCNN' PMMLMODELSTORAGE[pmmlName]['model_graph'] = model_graph PMMLMODELSTORAGE[pmmlName]['predClasses'] = list(predClasses) PMMLMODELSTORAGE[pmmlName]['tf_session'] = tf_session modelType = 'MRCNN' #DeepNetwokr Model elif nyoka_pmml_obj.DeepNetwork: hdInfo = pmmlObj['Header'] try: hdExtDet = ast.literal_eval( hdInfo.Extension[0].get_value()) except: pass print('step 2') predClasses = self.getPredClasses(nyoka_pmml_obj) print('step 3') newNet = nyoka_pmml_obj.DeepNetwork[0] print('step 4') scrptCode = nyoka_pmml_obj.script # print ('Step 4.1 ',scrptCode) try: preCode = readScriptFromPMML(scrptCode, 'TEST') except: preCode = None try: postCode = readScriptFromPMML(scrptCode, 'POSTPROCESSING') except: postCode = None model_graph = Graph() with model_graph.as_default(): tf_session = Session() with tf_session.as_default(): print('step 5') from nyoka.keras.pmml_to_keras_model import GenerateKerasModel print('step 5.1') model_net = GenerateKerasModel(nyoka_pmml_obj) print('step 5.2') model = model_net.model model_graph = tf.get_default_graph() print('step 6') inputShapevals = [ inpuShape.value for inpuShape in list(model.input.shape) ] PMMLMODELSTORAGE[pmmlName] = {} PMMLMODELSTORAGE[pmmlName]['model'] = model PMMLMODELSTORAGE[pmmlName]['predClasses'] = predClasses PMMLMODELSTORAGE[pmmlName]['preProcessScript'] = preCode PMMLMODELSTORAGE[pmmlName]['postProcessScript'] = postCode try: PMMLMODELSTORAGE[pmmlName]['scriptOutput'] = hdExtDet[ 'scriptOutput'] except: PMMLMODELSTORAGE[pmmlName]['scriptOutput'] = '' print('step 7') try: PMMLMODELSTORAGE[pmmlName]['inputShape'] = inputShapevals except: PMMLMODELSTORAGE[pmmlName]['inputShape'] = 'CheckSomeissue' PMMLMODELSTORAGE[pmmlName]['status'] = 'loaded' # print ('step 8') PMMLMODELSTORAGE[pmmlName]['model_graph'] = model_graph PMMLMODELSTORAGE[pmmlName]['tf_session'] = tf_session PMMLMODELSTORAGE[pmmlName]['modelType'] = 'kerasM' modelType = 'kerasM' # print ('###################',PMMLMODELSTORAGE) #Sklearn Model else: print('Next Step 2 >>>>>>>>>>>>') from nyoka.reconstruct.pmml_to_pipeline_model import generate_skl_model print('Next Step 3 >>>>>>>>>>>>') sklModelPipeline = generate_skl_model(filepath) print('Next Step 4 >>>>>>>>>>>>') # if hasattr(sklModelPipeline.steps[-1][-1],'classes_'): # print ('sklModelPipeline.steps[-1][-1] >>> ',sklModelPipeline.steps[-1][-1]) # predClasses=sklModelPipeline.steps[-1][-1].classes_ # else: try: predClasses = self.getPredClasses(nyoka_pmml_obj) except: predClasses = [] print('Next Step 5 >>>>>>>>>>>>') targetVar = self.getTargetVariable(nyoka_pmml_obj) PMMLMODELSTORAGE[pmmlName] = {} PMMLMODELSTORAGE[pmmlName]['model'] = sklModelPipeline PMMLMODELSTORAGE[pmmlName]['predClasses'] = list(predClasses) PMMLMODELSTORAGE[pmmlName]['targetVar'] = targetVar PMMLMODELSTORAGE[pmmlName]['modelType'] = 'sklearnM' modelType = 'sklearnM' return (pmmlName, 'Success', modelType) except Exception as e: print(str(e)) import traceback print(str(traceback.format_exc())) return (pmmlName, 'Failure', None)