def __init__(self, pmml): self.nyoka_pmml = ny.parse(pmml,True) self.image_input = None self.layer_input = None self.model = None self.layers_outputs = {} self.model = self._build_model()
def selectArchitecture(self, checkTemplateID): if checkTemplateID == 'mobilenetArch': pmmlObj = ny.parse(open(settingFilePath + 'MobilenetArch.pmml', 'r'), silence=True) templateArch = self.pmmlToJson(settingFilePath + 'MobilenetArch.pmml') elif checkTemplateID == 'vgg16Arch': pmmlObj = ny.parse(open(settingFilePath + 'vGG16Arch.pmml', 'r'), silence=True) templateArch = self.pmmlToJson(settingFilePath + 'vGG16Arch.pmml') elif checkTemplateID == 'vgg19Arch': pmmlObj = ny.parse(open(settingFilePath + 'vGG19Arch.pmml', 'r'), silence=True) templateArch = self.pmmlToJson(settingFilePath + 'vGG19Arch.pmml') return templateArch, pmmlObj
def post(self,requests): filePath = requests.POST.get('filePath') from nyokaBase import PMML43Ext as ny pmmlObj = ny.parse(open(filePath,'r'),silence=True) nn = NeuralNetworkModelTrainer() nn.pmmlfileObj = pmmlObj returnVal = nn.generateAndCompileModel('mean_squared_error','adam',0.1,['accuracy','f1'],compileTestOnly=True) if returnVal.__class__.__name__ == 'dict': return JsonResponse(returnVal) else: return JsonResponse({'status':'Model Compiled Successfully'},status=200)
def test_8_compileModel(self): logging.info("Test Case : Compile a model.(2)") filePath = 'testUseCase/supportdata/from_sklearn.pmml' from nyokaBase import PMML43Ext as ny pmmlObj = ny.parse(open(filePath, 'r'), silence=True) from trainModel.mergeTrainingNN import NeuralNetworkModelTrainer nn = NeuralNetworkModelTrainer() nn.pmmlfileObj = pmmlObj returnVal = nn.generateAndCompileModel('mean_squared_error', 'adam', 0.1, ['accuracy', 'f1'], compileTestOnly=True) self.assertEqual('status' in returnVal, True) self.assertEqual('errorMessage' in returnVal, True) self.assertEqual('errorTraceback' in returnVal, True) self.assertEqual(returnVal['status'], 'Model Compilation Failed')
def test_7_compileModel(self): logging.info("Test Case : Compile a model.(1)") filePath = 'testUseCase/supportdata/irisNN.pmml' from nyokaBase import PMML43Ext as ny pmmlObj = ny.parse(open(filePath, 'r'), silence=True) from trainModel.mergeTrainingNN import NeuralNetworkModelTrainer nn = NeuralNetworkModelTrainer() nn.pmmlfileObj = pmmlObj returnVal = nn.generateAndCompileModel('mean_squared_error', 'adam', 0.1, ['accuracy', 'f1'], compileTestOnly=True) self.assertEqual('nyoka_pmml' in returnVal.__dict__, True) self.assertEqual('model' in returnVal.__dict__, True) self.assertEqual(returnVal.nyoka_pmml.__class__.__name__, 'PMML') self.assertEqual(returnVal.__class__.__name__, 'GenerateKerasModel')
def getDetailsOfPMML(filepath): # print ('Enter this world') pmmlObj = pml.parse(filepath, silence=True) tempObj = pmmlObj.__dict__ listOfObjectstogetData = [] for j in tempObj.keys(): if (tempObj[j] is None): pass elif (isinstance(tempObj[j], typing.List)): if (len(tempObj[j]) == 0): pass else: listOfObjectstogetData.append(j) else: listOfObjectstogetData.append(j) allInfo = {} for towork in listOfObjectstogetData: if towork == 'version': allInfo['Version'] = tempObj['version'] elif towork == 'Header': allInfo.update(nyokaUtilities.getHeaderInfo(tempObj)) elif towork == 'DataDictionary': allInfo.update(nyokaUtilities.getDataFields(tempObj)) elif towork == 'NearestNeighborModel': allInfo.update( nyokaUtilities.getInfoNearestNeighborModel(tempObj)) elif towork == 'DeepNetwork': allInfo.update(nyokaUtilities.getInfoOfDeepNetwork(tempObj)) elif towork == 'MiningModel': allInfo.update(nyokaUtilities.getInfoMiningModel(tempObj)) elif towork == 'SupportVectorMachineModel': allInfo.update( nyokaUtilities.getInfoSupportVectorMachineModel(tempObj)) elif towork == 'TreeModel': allInfo.update(nyokaUtilities.getInfoTreeModel(tempObj)) elif towork == 'RegressionModel': allInfo.update(nyokaUtilities.getInfoLinearModel(tempObj)) elif towork == 'NaiveBayesModel': allInfo.update( nyokaUtilities.getInfoOfNaiveBayesModel(tempObj)) allInfo = nyokaUtilities.changeStructure(allInfo) # print('response sent') return JsonResponse(allInfo)
def loadExecutionModel(self, pmmlFile): pmmlFileObj = pathlib.Path(pmmlFile) pmmlFileForKey = pmmlFileObj.name.replace(pmmlFileObj.suffix, '') from nyokaBase import PMML43Ext as ny pmmlObj = ny.parse(pmmlFile, silence=True) modelObj = [] for inMod in modelObjectToCheck: if len(pmmlObj.__dict__[inMod]) > 0: modPMMLObj = pmmlObj.__dict__[inMod] if inMod == 'DeepNetwork': for ininMod in modPMMLObj: colInfo = self.getTargetAndColumnsName(ininMod) modelObj.append({ 'modelArchType': 'NNModel', 'pmmlModelObject': ininMod, 'recoModelObj': None, 'listOFColumns': None, 'targetCol': colInfo[1] }) else: for ininMod in modPMMLObj: colInfo = self.getTargetAndColumnsName(ininMod) # recoModelObj=generateModelfromPMML(ininMod) modelObj.append({ 'modelArchType': 'SKLModel', 'pmmlModelObject': ininMod, 'recoModelObj': None, 'listOFColumns': colInfo[0], 'targetCol': colInfo[1] }) tempDict = {} tempDict['train'] = {} tempDict['score'] = {} for singMod in modelObj: if singMod['pmmlModelObject'].taskType == 'trainAndscore': tempDict['train'][singMod['pmmlModelObject'].modelName] = {} tempDict['train'][ singMod['pmmlModelObject'].modelName]['modelObj'] = singMod tempDict['train'][singMod['pmmlModelObject'].modelName][ 'modelObj']['pmmlDdicObj'] = pmmlObj.DataDictionary tempDict['train'][singMod['pmmlModelObject'].modelName][ 'modelObj']['pmmlNyokaObj'] = self.nyObjOfModel( pmmlObj, singMod) tempDict['score'][singMod['pmmlModelObject'].modelName] = {} tempDict['score'][ singMod['pmmlModelObject'].modelName]['modelObj'] = singMod tempDict['score'][singMod['pmmlModelObject'].modelName][ 'modelObj']['pmmlDdicObj'] = pmmlObj.DataDictionary tempDict['train'][singMod['pmmlModelObject'].modelName][ 'modelObj']['pmmlNyokaObj'] = self.nyObjOfModel( pmmlObj, singMod) else: tempDict[singMod['pmmlModelObject'].taskType][ singMod['pmmlModelObject'].modelName] = {} tempDict[singMod['pmmlModelObject'].taskType][ singMod['pmmlModelObject'].modelName]['modelObj'] = singMod tempDict[singMod['pmmlModelObject'].taskType][ singMod['pmmlModelObject'].modelName]['modelObj'][ 'pmmlDdicObj'] = pmmlObj.DataDictionary tempDict[singMod['pmmlModelObject'].taskType][ singMod['pmmlModelObject']. modelName]['modelObj']['pmmlNyokaObj'] = self.nyObjOfModel( pmmlObj, singMod) tempDict2 = {} for taType in tempDict: tempTa = list(tempDict[taType].keys()) tempTa.sort() for taTTemp in tempTa: if taType in tempDict2: pass else: tempDict2[taType] = {} tempDict2[taType][taTTemp] = tempDict[taType][taTTemp] tempDict = tempDict2.copy() for sc1 in pmmlObj.script: if sc1.scriptPurpose == 'trainAndscore': tempDict['train'][sc1.for_][sc1.class_] = {} tempDict['train'][sc1.for_][sc1.class_ + '_code'] = self.getCode( sc1.valueOf_) tempDict['train'][sc1.for_][ sc1.class_] = self.getCodeObjectToProcess( self.getCode(sc1.valueOf_)) tempDict['score'][sc1.for_][sc1.class_] = {} tempDict['score'][sc1.for_][sc1.class_ + '_code'] = self.getCode( sc1.valueOf_) tempDict['score'][sc1.for_][ sc1.class_] = self.getCodeObjectToProcess( self.getCode(sc1.valueOf_)) else: tempDict[sc1.scriptPurpose][sc1.for_][sc1.class_] = {} tempDict[sc1.scriptPurpose][sc1.for_][sc1.class_ + '_code'] = self.getCode( sc1.valueOf_) tempDict[sc1.scriptPurpose][sc1.for_][ sc1.class_] = self.getCodeObjectToProcess( self.getCode(sc1.valueOf_)) taskTypesName = list(tempDict.keys()) listOfModelNames = set([k for j in tempDict for k in tempDict[j]]) hyperParDict = {} for extObj in pmmlObj.MiningBuildTask.Extension: if extObj.name == 'hyperparameters': hyperParDict[extObj.for_] = ast.literal_eval(extObj.value) try: miningBuildTaskList = pmmlObj.MiningBuildTask.__dict__['Extension'] for bTask in miningBuildTaskList: if bTask.__dict__['for_'] in listOfModelNames: for tT in taskTypesName: for modInd in listOfModelNames: tempDict[tT][modInd]['modelObj'][ 'miningExtension'] = bTask except: pass modelLoadStatus = [] for taskT in tempDict: print(taskT) for mO in tempDict[taskT]: if tempDict[taskT][mO]['modelObj'][ 'modelArchType'] == "NNModel": modelProp = tempDict[taskT][mO]['modelObj']['pmmlNyokaObj'] model_graph = Graph() with model_graph.as_default(): tf_session = Session() with tf_session.as_default(): print('step 5') from nyokaBase.reconstruct.pmml_to_pipeline_model import generate_skl_model print('step 5.1') model_net = generate_skl_model(modelProp) print('step 5.2') model = model_net.model model_graph = tf.get_default_graph() print('step 6') inputShapevals = [ inpuShape.value for inpuShape in list(model.input.shape) ] if str(model_net) != 'None': tempDict[taskT][mO]['modelObj'][ 'recoModelObj'] = model_net tempDict[taskT][mO]['modelObj'][ 'model_graph'] = model_graph tempDict[taskT][mO]['modelObj'][ 'tf_session'] = tf_session tempDict[taskT][mO]['modelObj'][ 'inputShape'] = inputShapevals modelLoadStatus.append(1) else: modelLoadStatus.append(0) try: tempDict[taskT][mO]['modelObj'][ 'hyperparameters'] = hyperParDict[mO] except: tempDict[taskT][mO]['modelObj'][ 'hyperparameters'] = None elif tempDict[taskT][mO]['modelObj'][ 'modelArchType'] == "SKLModel": modelProp = tempDict[taskT][mO]['modelObj']['pmmlNyokaObj'] from nyokaBase.reconstruct.pmml_to_pipeline_model import generate_skl_model recoModelObj = generate_skl_model(modelProp) if recoModelObj != None: tempDict[taskT][mO]['modelObj'][ 'recoModelObj'] = recoModelObj modelLoadStatus.append(1) else: modelLoadStatus.append(0) try: tempDict[taskT][mO]['modelObj'][ 'hyperparameters'] = hyperParDict[mO] except: tempDict[taskT][mO]['modelObj'][ 'hyperparameters'] = None # print('*'*100) # print(tempDict['score']['model2']) # print('*'*100) PMMLMODELSTORAGE[pmmlFileForKey] = tempDict if 0 in modelLoadStatus: messageToWorld = "Model load failed, please connect with admin" else: messageToWorld = "Model Loaded Successfully" resultResp = {'message': messageToWorld, 'keytoModel': pmmlFileForKey} return JsonResponse(resultResp, status=200)
def deletelayer(payload, projectID): global MEMORY_DICT_ARCHITECTURE global lockForPMML # print ('>>>>>',userInput) existingArch = MEMORY_DICT_ARCHITECTURE[projectID]['architecture'] # $update$ filetoSave = MEMORY_DICT_ARCHITECTURE[projectID]['filePath'] try: lockForPMML.acquire() existingPmmlObj = pml.parse(filetoSave, silence=True) except Exception as e: # print('>>>>>>>>>>>>>>>>> ', str(e)) existingPmmlObj = None finally: lockForPMML.release() # existingPmmlObj=pml.parse(filetoSave, silence=True) processTheInput = payload['layerDelete'] try: deleteFromSection = processTheInput['sectionId'] if processTheInput['itemType'] != 'FOLDING': idToDelete = processTheInput['id'] positionOfSection = existingArch.index([ j for j in existingArch if j['itemType'] == 'FOLDING' if j['sectionId'] == deleteFromSection ][0]) positionInChildren = [ j['id'] for j in existingArch[positionOfSection]['children'] ].index(idToDelete) del existingArch[positionOfSection]['children'][ positionInChildren] else: positionOfSection = existingArch.index([ j for j in existingArch if j['itemType'] == 'FOLDING' if j['sectionId'] == deleteFromSection ][0]) del existingArch[positionOfSection] except: idToDelete = processTheInput['id'] positionInArch = [j['id'] for j in existingArch].index(idToDelete) del existingArch[positionInArch] for num, lay in enumerate(existingArch): if lay['itemType'] == 'FOLDING': for num2, levLay in enumerate(lay['children']): levLay['layerIndex'] = num2 else: lay['layerIndex'] = num import ast if processTheInput['itemType'] == 'FOLDING': sectionToDelete = processTheInput['sectionId'] indexToDelete = -1 existingNetworkLayers = existingPmmlObj.DeepNetwork[0].NetworkLayer for index, layer in enumerate(existingNetworkLayers): if layer.Extension: sectionId = ast.literal_eval( layer.Extension[0].value)['sectionId'] if sectionId == sectionToDelete: indexToDelete = index break deleteTill = -1 if indexToDelete != -1: for idx in range(indexToDelete, len(existingNetworkLayers)): if not layer.Extension: deleteTill = idx - 1 break else: sectionId = ast.literal_eval( existingNetworkLayers[idx].Extension[0].value )['sectionId'] if sectionId == sectionToDelete: deleteTill = idx if deleteTill == -1: del existingNetworkLayers[indexToDelete] else: existingNetworkLayers = existingNetworkLayers[: indexToDelete] + existingNetworkLayers[ deleteTill + 1:] existingNetworkLayers = resetNetworkLayer( existingNetworkLayers, indexToDelete) existingPmmlObj.DeepNetwork[ 0].NetworkLayer = existingNetworkLayers elif processTheInput['itemType'] == 'LAYER': layerIdToDelete = processTheInput['layerId'] indexToDelete = -1 for index, layer in enumerate( existingPmmlObj.DeepNetwork[0].NetworkLayer): if layer.layerId == layerIdToDelete: indexToDelete = index break if indexToDelete != -1: del existingPmmlObj.DeepNetwork[0].NetworkLayer[indexToDelete] existingNetworkLayers = resetNetworkLayer( existingPmmlObj.DeepNetwork[0].NetworkLayer, indexToDelete) existingPmmlObj.DeepNetwork[ 0].NetworkLayer = existingNetworkLayers if existingPmmlObj.Header.Extension: existingPmmlObj.Header.Extension[0].anytypeobjs_ = [''] for lay in existingPmmlObj.DeepNetwork[0].NetworkLayer: if lay.Extension: lay.Extension[0].anytypeobjs_ = [''] existingPmmlObj.DeepNetwork[0].numberOfLayers = len( existingPmmlObj.DeepNetwork[0].NetworkLayer) writePmml(existingPmmlObj, filetoSave, lockForPMML) MEMORY_DICT_ARCHITECTURE[projectID]['architecture'] = existingArch message = {'message': 'Success'} return JsonResponse(message)
def updatetoWorkflow(payload, projectID): # print (payload) def getCodeObjectToProcess(codeVal): d = {} exec(codeVal, None, d) objeCode = d[list(d.keys())[0]] return objeCode def getCOlumDet(pmmlObj): import typing listOfObjectstogetData = [] tempObj = pmmlObj.__dict__ for j in tempObj.keys(): if (tempObj[j] is None): pass elif (isinstance(tempObj[j], typing.List)): if (len(tempObj[j]) == 0): pass else: listOfObjectstogetData.append(j) else: listOfObjectstogetData.append(j) for ob in listOfObjectstogetData: if ob == 'TreeModel': minigFieldList = tempObj['TreeModel'][0].__dict__[ 'MiningSchema'].__dict__['MiningField'] break elif ob == 'RegressionModel': minigFieldList = tempObj['RegressionModel'][0].__dict__[ 'MiningSchema'].__dict__['MiningField'] break elif ob == 'MiningModel': minigFieldList = tempObj['MiningModel'][0].__dict__[ 'MiningSchema'].__dict__['MiningField'] break elif ob == 'AnomalyDetectionModel': minigFieldList = tempObj['AnomalyDetectionModel'][ 0].__dict__['MiningSchema'].__dict__['MiningField'] break elif ob == 'DeepNetwork': minigFieldList = tempObj['DeepNetwork'][0].__dict__[ 'MiningSchema'].__dict__['MiningField'] break else: None targetCol = None colNames = [] for indCol in minigFieldList: if indCol.__dict__['usageType'] == 'target': targetCol = indCol.__dict__['name'] else: colNames.append(indCol.__dict__['name']) return (colNames, targetCol) from nyokaBase.skl.skl_to_pmml import model_to_pmml processTheInput = payload global MEMORY_DICT_ARCHITECTURE try: MEMORY_DICT_ARCHITECTURE[projectID]['toExportDict'] except: MEMORY_DICT_ARCHITECTURE[projectID]['toExportDict'] = {} try: MEMORY_DICT_ARCHITECTURE[projectID]['tempSecMem'] except: MEMORY_DICT_ARCHITECTURE[projectID]['tempSecMem'] = {} tempMem = MEMORY_DICT_ARCHITECTURE[projectID]['toExportDict'] tempSecMem = MEMORY_DICT_ARCHITECTURE[projectID]['tempSecMem'] # print ('processTheInput,',processTheInput) if processTheInput['itemType'] == 'FOLDING': tempSecMem[ processTheInput['sectionId']] = processTheInput['layerId'] tempMem[tempSecMem[processTheInput['sectionId']]] = { 'data': None, 'hyperparameters': None, 'preProcessingScript': None, 'pipelineObj': None, 'modelObj': None, 'featuresUsed': None, 'targetName': None, 'postProcessingScript': None, 'taskType': None, 'predictedClasses': None, 'dataSet': None } # print (tempSecMem) elif processTheInput['itemType'] == 'DATA': tempMem[tempSecMem[processTheInput['sectionId']]][ 'data'] = processTheInput['filePath'] elif processTheInput['itemType'] == 'CODE': scriptObj = open(processTheInput['filePath'], 'r').read() # print('scriptObj',scriptObj) if processTheInput['taskType'] == 'preprocessing': tempMem[tempSecMem[processTheInput['sectionId']]]['preProcessingScript']={'scripts':[scriptObj],\ 'scriptpurpose':[processTheInput['scriptPurpose']],\ 'scriptOutput':[processTheInput['scriptOutput']],\ 'scriptPath':[processTheInput['filePath']]} elif processTheInput['taskType'] == 'postprocessing': tempMem[tempSecMem[processTheInput['sectionId']]]['postProcessingScript']={'scripts':[scriptObj],\ 'scriptpurpose':[processTheInput['scriptPurpose']],\ 'scriptOutput':[processTheInput['scriptOutput']],\ 'scriptPath':[processTheInput['filePath']]} elif processTheInput['itemType'] == 'MODEL': modelPath = processTheInput['filePath'] from nyokaBase.reconstruct.pmml_to_pipeline_model import generate_skl_model from sklearn.pipeline import Pipeline from nyokaBase import PMML43Ext as pmmNY pmObj = pmmNY.parse(modelPath, silence=True) colInfo = getCOlumDet(pmObj) print('came to reconstruct') if len(pmObj.__dict__['DeepNetwork']) > 0: from tensorflow import Graph, Session import tensorflow as tf model_graph = Graph() with model_graph.as_default(): tf_session = Session() with tf_session.as_default(): print('step 5') from nyokaBase.reconstruct.pmml_to_pipeline_model import generate_skl_model print('step 5.1') modelOb = generate_skl_model(pmObj).model model_graph = tf.get_default_graph() else: modelOb = generate_skl_model(pmObj) model_graph = None # print (modelOb) # modelOb=None import sklearn if type(modelOb) == sklearn.pipeline.Pipeline: tempMem[tempSecMem[processTheInput['sectionId']]][ 'modelObj'] = modelOb.steps[-1][1] tempMem[tempSecMem[ processTheInput['sectionId']]]['pipelineObj'] = Pipeline( modelOb.steps[:-1]) tempMem[tempSecMem[ processTheInput['sectionId']]]['featuresUsed'] = colInfo[0] tempMem[tempSecMem[ processTheInput['sectionId']]]['targetName'] = colInfo[1] else: tempMem[tempSecMem[ processTheInput['sectionId']]]['modelObj'] = modelOb if model_graph != None: tempMem[tempSecMem[ processTheInput['sectionId']]]['model_graph'] = model_graph tempMem[tempSecMem[ processTheInput['sectionId']]]['tf_session'] = tf_session tempMem[tempSecMem[ processTheInput['sectionId']]]['modelPath'] = modelPath tempMem[tempSecMem[processTheInput['sectionId']]][ 'taskType'] = processTheInput['taskType'] MEMORY_DICT_ARCHITECTURE[projectID]['toExportDict'] = tempMem.copy() # print ('tempMem',tempMem) model_to_pmml( MEMORY_DICT_ARCHITECTURE[projectID]['toExportDict'], PMMLFileName=MEMORY_DICT_ARCHITECTURE[projectID]['filePath'], tyP='multi') # print ('processTheInput',processTheInput) # print ('MEMORY_DICT_ARCHITECTURE[projectID]',MEMORY_DICT_ARCHITECTURE[projectID]) returntoClient = { 'projectID': projectID, 'layerUpdated': processTheInput } return JsonResponse(returntoClient)
def updatetoArchitecture(payload, projectID): def selectArchitecture(checkTemplateID): if checkTemplateID == 'mobilenetArch': pmmlObj = pml.parse(open( settingFilePath + 'MobilenetArch.pmml', 'r'), silence=True) templateArch = nyokaUtilities.pmmlToJson(settingFilePath + 'MobilenetArch.pmml') elif checkTemplateID == 'vgg16Arch': pmmlObj = pml.parse(open(settingFilePath + 'vGG16Arch.pmml', 'r'), silence=True) templateArch = nyokaUtilities.pmmlToJson(settingFilePath + 'vGG16Arch.pmml') elif checkTemplateID == 'vgg19Arch': pmmlObj = pml.parse(open(settingFilePath + 'vGG19Arch.pmml', 'r'), silence=True) templateArch = nyokaUtilities.pmmlToJson(settingFilePath + 'vGG19Arch.pmml') return templateArch, pmmlObj def addTemplatetoArchitecture(checkTemplateID, indexInObj, listOFIndices, existingArch): if checkTemplateID != None: templateArch, templatePmml = selectArchitecture( checkTemplateID) if indexInObj not in listOFIndices: existingArch = existingArch + templateArch else: existingArch = existingArch[: indexInObj] + templateArch + existingArch[ indexInObj:] return existingArch, templatePmml def addLayerInArch(indexInObj, existingArch, processTheInput): existingArch.insert(indexInObj, processTheInput) return existingArch def getSectionArchitecture(existingArch, sectionIdOfInput): # print ('>>>>>>>>>>>>>>>>>>',type(existingArch)) tempArchSection = [] tempNum = None # print (len(existingArch)) for num, j in enumerate(existingArch): # print ('$$$$',j) if 'sectionId' in j: if j['sectionId'] == sectionIdOfInput: tempNum, tempArchSection = num, j['children'] else: pass return (tempNum, tempArchSection) def getSectionArchitecturefromLayerID(existingArch, idInObj): # print ('>>>>>>>>>>>>>>>>>>',idInObj) tempArchSection = [] tempNum = None # print ('LLLLLLLLLLLLLLLLLLLLLLLLLLLLLLL',len(existingArch)) for num, j in enumerate(existingArch): # print ('$$$$',j) if 'sectionId' in j: if j['sectionId'] != None: # print ('Prntong Section to check on',j) if idInObj in [i['id'] for i in j['children']]: # print ('came here') tempNum, tempArchSection = num, j['children'] else: pass return (tempNum, tempArchSection) def deleteAlayerFromArch(tempArchToDelete, idOfLayer): spaceToRem = [j['id'] for j in tempArchToDelete] indexToDelete = spaceToRem.index(idOfLayer) del tempArchToDelete[indexToDelete] return tempArchToDelete def reoderArch(tempArchtoReorder): for num, j in enumerate(tempArchtoReorder): tempArchtoReorder[num]['layerIndex'] = num return tempArchtoReorder def getIndexForNewLayer(existingArch, idInObj, runUpto): if runUpto == None: listOFIDS = [] for j in existingArch: if j['itemType'] == 'FOLDING': for k in j['children']: listOFIDS.append(k['id']) else: listOFIDS.append(j['id']) return listOFIDS.index(idInObj) if runUpto != None: listOFIDS = [] for j in existingArch[:runUpto]: if j['itemType'] == 'FOLDING': for k in j['children']: listOFIDS.append(k['id']) else: listOFIDS.append(j['id']) return len(listOFIDS) def getIndexForExistingLayer(existingPmmlObj, layerId, runUpto): _NetworkLayersObject = existingPmmlObj.DeepNetwork[0].NetworkLayer listOFIDS = [i.layerId for i in _NetworkLayersObject] if runUpto == None: return listOFIDS.index(layerId) if runUpto != None: return len(listOFIDS) def addLayerToPMML(_positionOfLayer, toUpdateLayer, processedOutput, pmmlObject): def checkIfFlushRequired(_oldPosition, _newPositon, _pmmlLayer, _networkLayers): if _oldPosition == _newPositon: if (_pmmlLayer.LayerParameters.inputDimension == _networkLayers[_oldPosition].LayerParameters. inputDimension) & ( _pmmlLayer.LayerParameters.outputDimension == _networkLayers[_oldPosition]. LayerParameters.outputDimension): return False else: return True else: return True if processedOutput['itemType'] == 'LAYER': _inputForPMML = nyokaPMMLUtilities.convertToStandardJson( processedOutput) # print ('',_inputForPMML) _pmmlOfLayer = nyokaPMMLUtilities.addLayer(_inputForPMML) # print('>>>>>>>',_pmmlOfLayer.__dict__) _deepNetworkObj = pmmlObject.DeepNetwork[0] # print ('step 1') _NetworkLayersObject = _deepNetworkObj.NetworkLayer # print ('step 2') _idsOfNetworklayer = [i.layerId for i in _NetworkLayersObject] flushMemory = False if toUpdateLayer == True: _oldPositionOFLayer = _idsOfNetworklayer.index( processedOutput['layerId']) # print('>>>> layer id is ',processedOutput['layerId']) # print('>>>> old position to delete',_oldPositionOFLayer) # print('>>>> new position ',_positionOfLayer) # print ('_idsOfNetworklayer',_idsOfNetworklayer) flushMemory = checkIfFlushRequired(_oldPositionOFLayer, _positionOfLayer, _pmmlOfLayer, _NetworkLayersObject) # print('flushmemory is ',flushMemory) if flushMemory == False: _NetworkLayersObject[ _oldPositionOFLayer].Extension = _pmmlOfLayer.Extension _pmmlOfLayer = _NetworkLayersObject[ _oldPositionOFLayer] del _NetworkLayersObject[_oldPositionOFLayer] _NetworkLayersObject.insert(_positionOfLayer, _pmmlOfLayer) # print ('step 3') if flushMemory == True: _NetworkLayersObject = resetNetworkLayer( _NetworkLayersObject, _positionOfLayer) # print ('step 6') _NetworkLayersObject = reorderIdsOfPmml(_NetworkLayersObject) _deepNetworkObj.NetworkLayer = _NetworkLayersObject _deepNetworkObj.numberOfLayers = len(_NetworkLayersObject) _deepNetworkObj.modelName = 'model1' _deepNetworkObj.taskType = "trainAndscore" pmmlObject.DeepNetwork[0] = _deepNetworkObj elif processedOutput['itemType'] == 'DATA': # print ("DATA layer came",processedOutput['filePath']) try: dataUrl = processedOutput['filePath'] # if processedOutput['for']: # dataTagValues=pml.Data(filePath=dataVal,for_=processedOutput['for']) # else: dataTagValues = pml.Data(filePath=dataUrl, for_='model1') pmmlObject.Data = [dataTagValues] # print ('Data Step 3') # pmmlObject.export(sys.stdout,0) except: pass elif processedOutput['itemType'] == 'CODE': print("CODE layer came") # print ('processedOutput',processedOutput) try: scrptVal = pmmlObject.script urlOfScript = processedOutput['url'] filePathUrl = processedOutput['filePath'] scriptFile = open(processedOutput['filePath'], 'r') scriptCode = scriptFile.read() scriptCode = scriptCode.replace('<', '<') # print (scriptCode) modelVal = 'model1' taskTypeVal = processedOutput['taskType'] scriptPurpose = processedOutput['scriptPurpose'] scriptOutput = processedOutput['scriptOutput'] scrp = pml.script(content=scriptCode, for_=modelVal, class_=taskTypeVal, scriptPurpose=scriptPurpose, scriptOutput=scriptOutput, filePath=filePathUrl) scrp.export(sys.stdout, 0) scrptVal.append(scrp) pmmlObject.script = scrptVal # print ('Code Step 10') # pmmlObject.export(sys.stdout,0) except: pass return (pmmlObject) def reorderIdsOfPmml(_NetworkLayersObject): _idsOfNetworklayer = [i.layerId for i in _NetworkLayersObject] # print ('step 5') for num, unitNetwork in enumerate(_idsOfNetworklayer): if num == 0: _NetworkLayersObject[num].connectionLayerId = 'NA' else: _NetworkLayersObject[ num].connectionLayerId = _NetworkLayersObject[ num - 1].layerId return _NetworkLayersObject def addTemplateToPMML(_positionOfLayer, existingPmmlObj, templatePmmlObj): noOfLayersInTemplate = len( templatePmmlObj.DeepNetwork[0].NetworkLayer) newNetworkLayers=existingPmmlObj.DeepNetwork[0].NetworkLayer[:_positionOfLayer]+templatePmml.DeepNetwork[0].NetworkLayer\ +existingPmmlObj.DeepNetwork[0].NetworkLayer[_positionOfLayer:] newNetworkLayers = resetNetworkLayer( newNetworkLayers, noOfLayersInTemplate + _positionOfLayer) existingPmmlObj.DeepNetwork[0].NetworkLayer = newNetworkLayers existingPmmlObj.DeepNetwork[0].numberOfLayers = len( existingPmmlObj.DeepNetwork[0].NetworkLayer) return existingPmmlObj # print ('#######################################################################') global MEMORY_DICT_ARCHITECTURE, lockForPMML tempGlobal = MEMORY_DICT_ARCHITECTURE[projectID] filetoSave = tempGlobal['filePath'] existingArch = tempGlobal['architecture'] oldLenOfArchitecture = len(existingArch) if 'sectionCollapse' in payload: returntoClient = { 'projectID': projectID, 'sectionCollapse': payload['sectionCollapse'] } return JsonResponse(returntoClient) #################################### try: lockForPMML.acquire() existingPmmlObj = pml.parse(filetoSave, silence=True) except Exception as e: # print('>>>>>>>>>>>>>>>>> ', str(e)) existingPmmlObj = None finally: lockForPMML.release() isItemType_FOLDING_DATA_CODE_TEMPLATE = payload['itemType'] in [ 'FOLDING', 'DATA', 'CODE', 'TEMPLATE' ] listOFIDS, listOFIndices, listOfIdOFSections, listOfIdOFLayers = nyokaUtilities.detailsofExistingArch( existingArch) indexInObj, idInObj = nyokaUtilities.getIndexOfInput( payload), nyokaUtilities.getIdOfInput(payload) lenOfExistingArch = len(existingArch) typeOfLayer = nyokaUtilities.getLayerType(payload) itemOfLayer = nyokaUtilities.checkItemType(payload) sectionIdOfInput = nyokaUtilities.getIdOfSection(payload) toUpdateLayer = None runUpto = None try: checkTemplateID = payload['templateId'] except: checkTemplateID = None tupleOFindexSect = [(i['layerIndex'], i['sectionId']) for i in existingArch if i['itemType'] == 'FOLDING'] hasChildren = nyokaUtilities.checkChildren(payload) # print (indexInObj,idInObj,lenOfExistingArch,typeOfLayer,sectionIdOfInput,hasChildren) # print (listOFIDS,listOFIndices,listOFIdIndex,listOfSectionID,listOFSectionIdIndex,listOFSectionIdAndId) if isItemType_FOLDING_DATA_CODE_TEMPLATE: # print ('Pointer 0: GOT FOLDING_DATA_CODE_TEMPLATE') processTheInput = payload else: # print ('Pointer 0: GOT LAYER') processTheInput = nyokaUtilities.addLayertoJson(payload) # processTheInput=payload # print ('##########',processTheInput) # print ('>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>') # print ('sectionIdOfInput',sectionIdOfInput) # print ('itemOfLayer',itemOfLayer) # print ('typeOfLayer',typeOfLayer) # print ('lenOfExistingArch',lenOfExistingArch) # print ('indexInObj',indexInObj) # print ('idInObj',idInObj) # print ('listOFIndices',listOFIndices) # # print ('getSectionArchitecture',getSectionArchitecture(existingArch,sectionIdOfInput)) # print('listOFIDS',listOFIDS) # print ('>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>>') newPmmlObj, templatePmml = None, None newArch = [] if lenOfExistingArch == 0: if typeOfLayer == 'TEMPLATE': runUpto = indexInObj # print ('Pointer 1.0: Adding template at 0 position') existingArch, templatePmml = addTemplatetoArchitecture( checkTemplateID, indexInObj, listOFIndices, existingArch) else: # print ('Pointer 2: existingArch is 0') newArch.append(processTheInput.copy()) existingArch = newArch.copy() # print (existingArch) elif lenOfExistingArch > 0: if typeOfLayer == 'TEMPLATE': runUpto = indexInObj # print ('Pointer 1.1: Adding template at {} position'.format(indexInObj)) existingArch, templatePmml = addTemplatetoArchitecture( checkTemplateID, indexInObj, listOFIndices, existingArch) elif typeOfLayer == 'LAYER': # print ('Pointer 2.0: We came in layer operation at {} position'.format(indexInObj)) # print (listOFIndices,listOFIDS) if (indexInObj not in listOFIndices) & (idInObj not in listOFIDS): # print ('Pointer 2.1 Got new LAYER object at last index {}'.format(indexInObj) ) existingArch.append(processTheInput.copy()) elif (indexInObj in listOFIndices) & (idInObj not in listOFIDS): # print ('Pointer 2.2 Got a new LAYER object at existing index {}'.format(indexInObj) ) newArch = nyokaUtilities.makeModification( existingArch, processTheInput) existingArch = newArch.copy() elif (indexInObj in listOFIndices) & (idInObj in listOFIDS): toUpdateLayer = True # print ('Pointer 2.3 Got an old LAYER object at existing index {}'.format(indexInObj) ) tempExistingArchO = existingArch tempExistingArchO = deleteAlayerFromArch( existingArch, idInObj) tempExistingArchO = addLayerInArch(indexInObj, tempExistingArchO, processTheInput) tempExistingArchO = reoderArch(tempExistingArchO) # print ('#########',len(tempExistingArchO)) # newArch=nyokaUtilities.makeModification(tempExistingArchO,processTheInput) newArch = tempExistingArchO existingArch = newArch.copy() elif (indexInObj not in listOFIndices) & ( idInObj in listOFIDS) & (idInObj in listOfIdOFSections): toUpdateLayer = True # print ('Pointer 2.4 Got an old LAYER from Section object at new index {}'.format(indexInObj) ) tempExistingArchO = existingArch _positionOfSection, tempExistingArchFromSection = getSectionArchitecturefromLayerID( existingArch, idInObj) # print (tempExistingArchFromSection) tempExistingArchFromSection = deleteAlayerFromArch( tempExistingArchFromSection, idInObj) if len(tempExistingArchFromSection) > 0: # print ('came here too') tempExistingArchO[_positionOfSection][ 'children'] = tempExistingArchFromSection else: # print ('came here too 3') del tempExistingArchO[_positionOfSection] tempExistingArchO = addLayerInArch(indexInObj, tempExistingArchO, processTheInput) tempExistingArchO = reoderArch(tempExistingArchO) # print ('#########',len(tempExistingArchO)) newArch = tempExistingArchO existingArch = newArch.copy() # for j in existingArch: # # print ('>>',j['layerId'],j['layerIndex']) # try: # for k in j['children']: # # print ('>> >>',k['layerId'],k['layerIndex']) # except: # pass elif typeOfLayer == 'SECTION': # print ('Pointer 3.0 Got new LAYER object at index {}'.format(indexInObj)) # print ('>>>>>>>',sectionIdOfInput) _positionOfSection, tempSectionArchInMemory = getSectionArchitecture( existingArch, sectionIdOfInput) # print (tempSectionArchInMemory) _lenSectionArch = len(tempSectionArchInMemory) if _positionOfSection is not None: _listOFIDSection, _listOFIndicesSection, _listOFIdIndexSection = nyokaUtilities.detailsofSectionArch( existingArch[_positionOfSection]) # print ('>>>>',_listOFIDSection) if (idInObj not in listOFIDS) & (itemOfLayer == 'FOLDING'): _tempArch = addLayerInArch(indexInObj, existingArch, processTheInput) newArch = reoderArch(_tempArch) # print (newArch) existingArch = newArch.copy() elif (idInObj not in listOFIDS) & (itemOfLayer == 'LAYER'): if _lenSectionArch == 0: tempSectionArchInMemory.append(processTheInput) else: if indexInObj in _listOFIndicesSection: # print ('Pointer 3.1 Got new LAYER object at index {}'.format(indexInObj) ) newArchSec = nyokaUtilities.makeModification( tempSectionArchInMemory, processTheInput) tempSectionArchInMemory = newArchSec.copy() else: # print ('Pointer 3.2 Got new LAYER object at last index {}'.format(indexInObj) ) tempSectionArchInMemory.append( processTheInput.copy()) existingArch[_positionOfSection][ 'children'] = tempSectionArchInMemory elif (idInObj in listOFIDS) & (itemOfLayer == 'LAYER') & ( idInObj in _listOFIDSection): toUpdateLayer = True # print ('Pointer 3.3 Got existing LAYER with {} object at index {}'.format(idInObj,indexInObj) ) # print ([(j['layerId'],j['layerIndex']) for j in tempSectionArchInMemory]) tempSectionArchInMemory = deleteAlayerFromArch( tempSectionArchInMemory, idInObj) # print ([(j['layerId'],j['layerIndex']) for j in tempSectionArchInMemory]) tempSectionArchInMemory = addLayerInArch( indexInObj, tempSectionArchInMemory, processTheInput) # print (len(tempSectionArchInMemory)) # print ([(j['layerId'],j['layerIndex']) for j in tempSectionArchInMemory]) tempSectionArchInMemory = reoderArch( tempSectionArchInMemory) existingArch[_positionOfSection][ 'children'] = tempSectionArchInMemory elif (idInObj in listOFIDS) & (itemOfLayer == 'LAYER') & ( idInObj not in _listOFIDSection) & ( idInObj not in listOfIdOFLayers): toUpdateLayer = True # print ('Pointer 3.4 Got existing LAYER with diff Section {} object at index {}'.format(idInObj,indexInObj) ) _fromNum, _fromArchi = getSectionArchitecturefromLayerID( existingArch, idInObj) # print ('_fromArchi',_fromArchi) _fromArchi = deleteAlayerFromArch(_fromArchi, idInObj) _fromArchi = reoderArch(_fromArchi) existingArch[_fromNum]['children'] = _fromArchi.copy() tempSectionArchInMemory = addLayerInArch( indexInObj, tempSectionArchInMemory, processTheInput) tempSectionArchInMemory = reoderArch( tempSectionArchInMemory) existingArch[_positionOfSection][ 'children'] = tempSectionArchInMemory elif (idInObj in listOFIDS) & (itemOfLayer == 'LAYER') & ( idInObj not in _listOFIDSection) & ( idInObj in listOfIdOFLayers): toUpdateLayer = True _positionOfSection, tempSectionArchInMemory = getSectionArchitecture( existingArch, sectionIdOfInput) # print ('Pointer 3.5 Got existing LAYER not in Section {} object at index {}'.format(idInObj,indexInObj) ) _fromNum, _fromArchi = getSectionArchitecturefromLayerID( existingArch, idInObj) # print ('_fromArchi',_fromArchi) existingArch = deleteAlayerFromArch(existingArch, idInObj) existingArch = reoderArch(existingArch) tempSectionArchInMemory = addLayerInArch( indexInObj, tempSectionArchInMemory, processTheInput) tempSectionArchInMemory = reoderArch( tempSectionArchInMemory) _positionOfSection, notToUSe = getSectionArchitecture( existingArch, sectionIdOfInput) existingArch[_positionOfSection][ 'children'] = tempSectionArchInMemory layerId = payload['layerId'] existingLayerIds = [ lay.layerId for lay in existingPmmlObj.DeepNetwork[0].NetworkLayer ] if itemOfLayer == 'TEMPLATE': _positionOfLayer = getIndexForNewLayer(existingArch, idInObj, runUpto) # if layerId not in existingLayerIds: # _positionOfLayer=getIndexForNewLayer(existingArch,idInObj,runUpto) # else: # _positionOfLayer=getIndexForExistingLayer(existingPmmlObj,layerId,runUpto) _updatedPMMLObj = addTemplateToPMML(_positionOfLayer, existingPmmlObj, templatePmml) writePmml(_updatedPMMLObj, filetoSave, lockForPMML) elif itemOfLayer == 'FOLDING': if len(processTheInput['children']) == 0: pass else: _positionOfLayer = getIndexForNewLayer(existingArch, idInObj, runUpto) # if layerId not in existingLayerIds: # _positionOfLayer=getIndexForNewLayer(existingArch,idInObj,runUpto) # else: # _positionOfLayer=getIndexForExistingLayer(existingPmmlObj,layerId,runUpto) _updatedPMMLObj = addLayerToPMML(_positionOfLayer, toUpdateLayer, processTheInput, existingPmmlObj) writePmml(_updatedPMMLObj, filetoSave, lockForPMML) tempGlobal['architecture'] = existingArch MEMORY_DICT_ARCHITECTURE[projectID]['architecture'] = tempGlobal[ 'architecture'] if typeOfLayer == 'TEMPLATE': returntoClient = { 'projectID': projectID, 'architecture': tempGlobal['architecture'] } else: returntoClient = { 'projectID': projectID, 'layerUpdated': processTheInput } return JsonResponse(returntoClient)
def getDetailsOfPMML(filepath): # print ('Enter this world') pmmlObj = pml.parse(filepath, silence=True) tempObj = pmmlObj.__dict__ if len(tempObj['DeepNetwork']) > 0: layerList = [] for kk in tempObj['DeepNetwork'][0].NetworkLayer: layerList.append(kk.get_layerType()) if (len(tempObj['script']) >= 1) or ('LSTM' in layerList) or ( tempObj['Header'].__dict__['description'] == 'Work Flow'): deployInfo = False else: deployInfo = True else: if (len(tempObj['script']) >= 1) or ( tempObj['Header'].__dict__['description'] == 'Work Flow'): deployInfo = False else: deployInfo = True listOfObjectstogetData = [] for j in tempObj.keys(): if (tempObj[j] is None): pass elif (isinstance(tempObj[j], typing.List)): if (len(tempObj[j]) == 0): pass else: listOfObjectstogetData.append(j) else: listOfObjectstogetData.append(j) allInfo = {} for towork in listOfObjectstogetData: if towork == 'type_': allInfo['type'] = tempObj['type_'] if towork == 'version': allInfo['Version'] = tempObj['version'] elif towork == 'Header': allInfo.update(nyokaUtilities.getHeaderInfo(tempObj)) elif towork == 'DataDictionary': allInfo.update(nyokaUtilities.getDataFields(tempObj)) elif towork == 'NearestNeighborModel': allInfo.update( nyokaUtilities.getInfoNearestNeighborModel(tempObj)) elif towork == 'DeepNetwork': allInfo.update(nyokaUtilities.getInfoOfDeepNetwork(tempObj)) elif towork == 'MiningModel': allInfo.update(nyokaUtilities.getInfoMiningModel(tempObj)) elif towork == 'SupportVectorMachineModel': allInfo.update( nyokaUtilities.getInfoSupportVectorMachineModel(tempObj)) elif towork == 'TreeModel': allInfo.update(nyokaUtilities.getInfoTreeModel(tempObj)) elif towork == 'RegressionModel': allInfo.update(nyokaUtilities.getInfoLinearModel(tempObj)) elif towork == 'NaiveBayesModel': allInfo.update( nyokaUtilities.getInfoOfNaiveBayesModel(tempObj)) elif towork == 'AnomalyDetectionModel': allInfo.update( nyokaUtilities.getInfoOfAnomalyDetectionModel(tempObj)) allInfo = nyokaUtilities.changeStructure(allInfo) allInfo['deployableToZAD'] = deployInfo # print('response sent',allInfo) return JsonResponse(allInfo)
print('\n>>>> PMML generated\n') resp=loadModelToZmk(path2) print(resp) if not resp['keytoModel']: print('\n>>>> Model load failed\n') import sys sys.exit(1) print('\n>>>> Model load successful\n') from nyokaBase.keras import pmml_to_keras_model as PMMLK import nyokaBase.PMML43Ext as nyc newpmmlObj=nyc.parse(path2,silence=True) newpmmlModel=PMMLK.GenerateKerasModel(newpmmlObj) print('\n>>>> Model reconstruction success\n') newpmmlModel=newpmmlModel.model mLayers=model.layers mLayers2=newpmmlModel.layers weightsInfo = [] from tqdm import tqdm for mL,mL2 in tqdm(zip(mLayers,mLayers2)): tp=mL.get_weights() tp2=mL2.get_weights()
def pmmlToJson(self, filePath): pmmlObj = ny.parse(filePath, silence=True) # print('parse success 1') pmmlDictObj = pmmlObj.__dict__ overAll = [] deepObject = pmmlDictObj['DeepNetwork'][0] # print('parse success 2') listOfNetworkLayer = deepObject.NetworkLayer for lay in listOfNetworkLayer: networkDict = lay.__dict__ tempDict = {} tempDict['layerParam'] = {} tempDict['netParam'] = {} for j in networkDict: if networkDict[j] is not None: if j not in [ 'original_tagname_', 'LayerWeights', 'LayerParameters', 'Extension', 'LayerBias' ]: tempDict['netParam'][j] = networkDict[j] layerDict = networkDict['LayerParameters'].__dict__ for kk in layerDict: if layerDict[kk] is not None: if kk not in ['original_tagname_', 'Extension']: try: evalVal = list( ast.literal_eval(layerDict[kk])) except: evalVal = layerDict[kk] tempDict['layerParam'][kk] = evalVal # tempDict['layerParam']['trainable']=layerDict['layerDict'] # print (layerDict['trainable']) tempDict['layerParam']['trainable'] = False if layerDict[ 'trainable'] == False else True if len(networkDict['Extension']) > 0: ttt = networkDict['Extension'][0] sectionVal = ttt.get_value() import ast tempDict['sectionId'] = ast.literal_eval( sectionVal)['sectionId'] else: tempDict['sectionId'] = None # print ('tempDict',tempDict['layerParam']['trainable']) overAll.append(tempDict) allLayers = MEMORY_OF_LAYERS['layerinfo'][0]['layers'] listOFLayersName = [ j['name'] for j in MEMORY_OF_LAYERS['layerinfo'][0]['layers'] ] architecture = [] for tempLay in overAll: tempSpace = copy.deepcopy(allLayers[listOFLayersName.index( tempLay['netParam']['layerType'])]) layerPARA = tempLay['layerParam'] netWorkPARA = tempLay['netParam'] for j in netWorkPARA: try: tempSpace[j] = netWorkPARA[j] except: pass for k in layerPARA: for k2 in tempSpace['properties']: if k2['id'] == k: k2['value'] = layerPARA[k] try: tempSpace['sectionId'] = tempLay['sectionId'] except: pass tempSpace['trainable'] = layerPARA['trainable'] architecture.append(tempSpace) # print ('architecture',architecture) forLoopSection = [j['sectionId'] for j in architecture] # print ('forLoopSection $$$$$$$$$$$$$$$',forLoopSection) tempSection = { 'children': [], 'class': 'wide', 'icon': 'mdi mdi-group', 'id': '', 'itemType': 'FOLDING', 'layerId': 'Section', 'layerIndex': '', 'name': 'Section', 'sectionId': '', "sectionCollapse": True } newarchitecture = [] tempSectionA = copy.deepcopy(tempSection) for num, secInfo in enumerate(forLoopSection): if secInfo is None: newarchitecture.append(architecture[num]) else: if (num + 1 < len(forLoopSection)) and ( forLoopSection[num] == forLoopSection[num + 1]): tempSectionA['children'].append(architecture[num]) else: tempSectionA['children'].append(architecture[num]) tempSectionA['sectionId'] = secInfo tempSectionA['layerId'] = 'Section_' + str(num) tempSectionA['name'] = 'Section_' + str(num) newarchitecture.append(tempSectionA) tempSectionA = copy.deepcopy(tempSection) hd = pmmlDictObj['Header'] scrptVal = pmmlDictObj['script'] # print ('Came here') import ast, pathlib try: hf = hd.Extension[0] headerExt = hf.get_value() # print ('headerExtheaderExtheaderExtheaderExtheaderExt',headerExt,type(headerExt)) try: dataUrl = ast.literal_eval(headerExt) except: dataUrl = headerExt # print ('$$$$$$$$$$$$$$$$$$$$$$',dataUrl) fObj = pathlib.Path(dataUrl['dataUrl']) # print ('data URL path>>>>>>>>>>>>',dataUrl) try: print('DataURL found') dataCon = { 'icon': 'mdi mdi-database-plus', 'id': 'NNN', 'itemType': 'DATA', 'layerId': fObj.name, 'layerIndex': 0, 'name': 'Data', 'url': dataUrl } newarchitecture.insert(0, dataCon) for counT, sc in enumerate(scrptVal): import pathlib scriptUrl = sc.class_ useForSc = sc.for_ fObjScrpt = pathlib.Path(scriptUrl) # print ('>>>>>>>>>>>>>>>',scriptUrl) scriptCon = { "name": "Code", "icon": "mdi mdi-code-braces", "itemType": "CODE", "layerId": fObjScrpt.name, 'url': scriptUrl, "layerIndex": "NA", 'useFor': useForSc } newarchitecture.insert(counT + 1, scriptCon) except: pass except Exception as e: for counT, sc in enumerate(scrptVal): scriptUrl = sc.class_ import pathlib fObjScrpt = pathlib.Path(scriptUrl) print('>>>>>>>>>>>>>>>', scriptUrl) scriptCon = { "name": "Code", "icon": "mdi mdi-code-braces", "itemType": "CODE", "layerId": fObjScrpt.name, 'url': scriptUrl, "layerIndex": "NA" } newarchitecture.insert(counT, scriptCon) print(e, 'some error occured') for num, i in enumerate(newarchitecture): if i['itemType'] == 'FOLDING': i['layerIndex'] = num i['id'] = ''.join(choice(ascii_uppercase) for i in range(12)) for num2, j in enumerate(i['children']): j['layerIndex'] = num2 j['id'] = ''.join( choice(ascii_uppercase) for i in range(12)) else: i['layerIndex'] = num i['id'] = ''.join(choice(ascii_uppercase) for i in range(12)) return newarchitecture
def pmmlToJson(self, filePath): pmmlObj = ny.parse(filePath, silence=True) pmmlDictObj = pmmlObj.__dict__ # print ('0'*100,pmmlObj.get_type()) if pmmlObj.get_type() == 'multi': print('came to Workflow') # print('*'*100) # print(PMMLMODELSTORAGE) # print('*'*100) import pathlib from trainModel.mergeTrainingV2 import TrainingViewModels pmmlFileObj = pathlib.Path(filePath) pmmlFileForKey = pmmlFileObj.name.replace(pmmlFileObj.suffix, '') from trainModel.mergeTrainingV2 import NewModelOperations NewModelOperations().loadExecutionModel(filePath) modelInformation = PMMLMODELSTORAGE[pmmlFileForKey] # print ('PMMLMODELSTORAGE after >>>>>>>>>>> ',PMMLMODELSTORAGE) # print (modelInformation) toexp = TrainingViewModels().restructureModelInforForExportDict( modelInformation) # print ('toexp'*20) # print ('toexportDictN >>>>>>>> ',toexp) import copy, json tempSec = { "name": "Section", "layerId": "Section", "children": [], "itemType": "FOLDING", "icon": "mdi mdi-group", "class": "wide", "modelType": "Workflow", "id": "id", "sectionId": "modName", "layerIndex": None, 'connectionLayerId': None } tempData = { "name": "Data", "icon": "mdi mdi-database-plus", "itemType": "DATA", "layerId": None, "trainable": False, "modelType": "Workflow", "id": None, "layerIndex": None, "connectionLayerId": None, "url": None, "filePath": None } tempCode = { "name": "Code", "icon": "mdi mdi-code-braces", "itemType": "CODE", "layerId": None, "trainable": False, "modelType": "Workflow", "id": "K2PVI4HZ3NBGF", "layerIndex": None, "connectionLayerId": None, "url": None, "filePath": None, "taskType": None, "scriptOutput": None, "scriptPurpose": None } tempModel = { "name": "Model", "icon": "mdi mdi-xml", "itemType": "MODEL", "layerId": None, "trainable": False, "modelType": "Workflow", "id": None, "layerIndex": None, "connectionLayerId": None, "url": None, "filePath": None, "taskType": None } # toexp={'K2PSSUKYFRSMF': {'hyperparameters': None, # 'data': 'C:/Users/swsh/Desktop/ZMODGit/ZMOD/ZMOD/Data/newData2', # 'preProcessingScript': {'scripts': ['def addVal(x):\n return x\n'], 'scriptpurpose': ['trainAndscore'], 'scriptOutput': ['DATA'], 'scriptPath': ['C:/Users/swsh/Desktop/ZMODGit/ZMOD/ZMOD/Code/scriptToTest.py']}, # 'modelObj': None, # 'pipelineObj': None, # 'featuresUsed': ['cylinders', 'displacement', 'horsepower', 'weight', 'acceleration'], # 'targetName': 'mpg', # 'postProcessingScript': {'scripts': [], 'scriptpurpose': [], 'scriptOutput': [], 'scriptPath': []}, # 'taskType': 'trainAndscore', # 'modelPath': 'C:\\Users\\swsh\\Desktop\\ZMODGit\\ZMOD\\ZMOD\\Models\\autoML2.pmml'}} workflowArch = [] for modTemp in list(toexp.keys()): temSecCop = copy.deepcopy(tempSec) #.copy() temSecCop['sectionId'] = modTemp temSecCop["layerId"] = modTemp if toexp[modTemp]['data'] != None: dataInfo = copy.deepcopy(tempData) import pathlib fileName = pathlib.Path(toexp[modTemp]['data']).name dataInfo['layerId'] = fileName dataInfo['url'] = '/Data/' + fileName dataInfo['filePath'] = toexp[modTemp]['data'] temSecCop['children'].append(dataInfo) for numSc, sC in enumerate( toexp[modTemp]['preProcessingScript']['scriptPath']): codeInfo = copy.deepcopy(tempCode) fileName = pathlib.Path( toexp[modTemp]['preProcessingScript']['scriptPath'] [numSc]).name codeInfo['layerId'] = fileName codeInfo['url'] = '/Code/' + fileName codeInfo['filePath'] = toexp[modTemp][ 'preProcessingScript']['scriptPath'][numSc] codeInfo['taskType'] = 'PREPROCESSING' codeInfo['scriptOutput'] = toexp[modTemp][ 'preProcessingScript']['scriptOutput'][numSc] codeInfo['scriptPurpose'] = toexp[modTemp][ 'preProcessingScript']['scriptpurpose'][numSc] temSecCop['children'].append(codeInfo) modtempC = copy.deepcopy(tempModel) fileName = pathlib.Path(toexp[modTemp]['modelPath']).name modtempC['layerId'] = fileName modtempC['url'] = '/Model/' + fileName modtempC['filePath'] = toexp[modTemp]['modelPath'] modtempC['taskType'] = toexp[modTemp]['taskType'] temSecCop['children'].append(modtempC) for numSc, sC in enumerate( toexp[modTemp]['postProcessingScript']['scriptPath']): codeInfo = copy.deepcopy(tempCode) fileName = pathlib.Path( toexp[modTemp]['postProcessingScript']['scriptPath'] [numSc]).name codeInfo['layerId'] = fileName codeInfo['url'] = '/Code/' + fileName codeInfo['filePath'] = toexp[modTemp][ 'postProcessingScript']['scriptPath'][numSc] codeInfo['taskType'] = 'POSTPROCESSING' codeInfo['scriptOutput'] = toexp[modTemp][ 'postProcessingScript']['scriptOutput'][numSc] codeInfo['scriptPurpose'] = toexp[modTemp][ 'postProcessingScript']['scriptpurpose'][numSc] temSecCop['children'].append(codeInfo) workflowArch.append(temSecCop) from random import choice from string import ascii_uppercase for num, i in enumerate(workflowArch): if i['itemType'] == 'FOLDING': i['layerIndex'] = num i['id'] = ''.join( choice(ascii_uppercase) for i in range(12)) for num2, j in enumerate(i['children']): j['layerIndex'] = num2 j['id'] = ''.join( choice(ascii_uppercase) for i in range(12)) else: i['layerIndex'] = num i['id'] = ''.join( choice(ascii_uppercase) for i in range(12)) # print ('l'*200) # print ('workflowArch',workflowArch) return workflowArch else: overAll = [] deepObject = pmmlDictObj['DeepNetwork'][0] listOfNetworkLayer = deepObject.NetworkLayer for lay in listOfNetworkLayer: networkDict = lay.__dict__ tempDict = {} tempDict['layerParam'] = {} tempDict['netParam'] = {} for j in networkDict: if networkDict[j] is not None: if j not in [ 'original_tagname_', 'LayerWeights', 'LayerParameters', 'Extension', 'LayerBias' ]: tempDict['netParam'][j] = networkDict[j] layerDict = networkDict['LayerParameters'].__dict__ for kk in layerDict: if layerDict[kk] is not None: if kk not in [ 'original_tagname_', 'Extension' ]: try: evalVal = list( ast.literal_eval(layerDict[kk])) except: evalVal = layerDict[kk] tempDict['layerParam'][kk] = evalVal tempDict['layerParam'][ 'trainable'] = False if layerDict[ 'trainable'] == False else True if len(networkDict['Extension']) > 0: ttt = networkDict['Extension'][0] sectionVal = ttt.get_value() import ast tempDict['sectionId'] = ast.literal_eval( sectionVal)['sectionId'] else: tempDict['sectionId'] = None overAll.append(tempDict) allLayers = MEMORY_OF_LAYERS['layerinfo'][0]['layers'] listOFLayersName = [ j['name'] for j in MEMORY_OF_LAYERS['layerinfo'][0]['layers'] ] architecture = [] for tempLay in overAll: import copy tempSpace = copy.deepcopy(allLayers[listOFLayersName.index( tempLay['netParam']['layerType'])]) layerPARA = tempLay['layerParam'] netWorkPARA = tempLay['netParam'] for j in netWorkPARA: try: tempSpace[j] = netWorkPARA[j] except: pass for k in layerPARA: for k2 in tempSpace['properties']: if k2['id'] == k: k2['value'] = layerPARA[k] try: tempSpace['sectionId'] = tempLay['sectionId'] except: pass tempSpace['trainable'] = layerPARA['trainable'] architecture.append(tempSpace) forLoopSection = [j['sectionId'] for j in architecture] # print ('forLoopSection $$$$$$$$$$$$$$$',forLoopSection) tempSection = { 'children': [], 'class': 'wide', 'icon': 'mdi mdi-group', 'id': '', 'itemType': 'FOLDING', 'layerId': 'Section', 'layerIndex': '', 'name': 'Section', 'sectionId': '', "sectionCollapse": True } import copy newarchitecture = [] tempSectionA = copy.deepcopy(tempSection) for num, secInfo in enumerate(forLoopSection): if secInfo is None: newarchitecture.append(architecture[num]) else: if (num + 1 < len(forLoopSection)) and ( forLoopSection[num] == forLoopSection[num + 1]): tempSectionA['children'].append(architecture[num]) else: tempSectionA['children'].append(architecture[num]) tempSectionA['sectionId'] = secInfo tempSectionA['layerId'] = 'Section_' + str(num) tempSectionA['name'] = 'Section_' + str(num) newarchitecture.append(tempSectionA) tempSectionA = copy.deepcopy(tempSection) hd = pmmlDictObj['Header'] scrptVal = pmmlDictObj['script'] DataVal = pmmlDictObj['Data'] import ast, pathlib try: try: dataUrl = DataVal[0].filePath except: dataUrl = 'Some issue' print('$$$$$$$$$$$$$$$$$$$$$$', dataUrl) if dataUrl != 'Some issue': fObj = pathlib.Path(dataUrl) dataCon = { 'icon': 'mdi mdi-database-plus', 'id': 'NNN', 'itemType': 'DATA', 'layerId': fObj.name, 'layerIndex': 0, 'name': 'Data', 'url': dataUrl } newarchitecture.insert(0, dataCon) for counT, sc in enumerate(scrptVal): import pathlib scriptPurpose = sc.scriptPurpose modelVal = sc.for_ classVal = sc.class_ filePathUrl = sc.filePath fObjScrpt = pathlib.Path(filePathUrl) scriptCon = { "name": "Code", "icon": "mdi mdi-code-braces", "itemType": "CODE", "modelFor": modelVal, "layerId": fObjScrpt.name, "scriptPurpose": scriptPurpose, 'url': filePathUrl, "layerIndex": "NA", 'useFor': classVal } newarchitecture.insert(counT + 1, scriptCon) else: pass except Exception as e: for counT, sc in enumerate(scrptVal): scriptUrl = sc.class_ import pathlib fObjScrpt = pathlib.Path(scriptUrl) scriptCon = { "name": "Code", "icon": "mdi mdi-code-braces", "itemType": "CODE", "layerId": fObjScrpt.name, 'url': scriptUrl, "layerIndex": "NA" } newarchitecture.insert(counT, scriptCon) print(e, 'some error occured') for num, i in enumerate(newarchitecture): if i['itemType'] == 'FOLDING': i['layerIndex'] = num i['id'] = ''.join( choice(ascii_uppercase) for i in range(12)) for num2, j in enumerate(i['children']): j['layerIndex'] = num2 j['id'] = ''.join( choice(ascii_uppercase) for i in range(12)) else: i['layerIndex'] = num from random import choice from string import ascii_uppercase i['id'] = ''.join( choice(ascii_uppercase) for i in range(12)) return newarchitecture
def loadPMMLmodel(self, filepath, idforData=None): def readScriptFromPMML(scrptCode, useForVal): code = None for num, sc in enumerate(scrptCode): useFor = sc.for_ # print ('>>>>>>>UUUUUU>>>>>>>',useFor,useForVal) if useFor == useForVal: scripCode = sc.get_valueOf_() # print (scripCode) code = scripCode.lstrip('\n') lines = [] code = scripCode.lstrip('\n') leading_spaces = len(code) - len(code.lstrip(' ')) for line in code.split('\n'): lines.append(line[leading_spaces:]) code = '\n'.join(lines) return code global PMMLMODELSTORAGE try: print('step 1', filepath) pmmlName = os.path.basename(filepath).split('.')[0] nyoka_pmml_obj = ny.parse(filepath, True) pmmlObj = nyoka_pmml_obj.__dict__ try: checkMRCNN = nyoka_pmml_obj.DeepNetwork[0].Extension[ 0].name == 'config' except: checkMRCNN = False #MaskRcnn model if (nyoka_pmml_obj.DeepNetwork) and (checkMRCNN == True): from nyokaBase.mrcnn import pmml_to_maskrcnn from nyokaBase.mrcnn import model as modellib predClasses = self.getPredClasses(nyoka_pmml_obj) modelFolder = './logs/MaskRCNNWei_' + ''.join( choice(ascii_uppercase) for i in range(12)) + '/' self.checkCreatePath(modelFolder) model_graph = Graph() with model_graph.as_default(): tf_session = Session() with tf_session.as_default(): modelRecon = pmml_to_maskrcnn.GenerateMaskRcnnModel( nyoka_pmml_obj) weight_file = modelFolder + '/dumpedWeights.h5' modelRecon.model.keras_model.save_weights(weight_file) MODEL_DIR = modelFolder model = modellib.MaskRCNN(mode="inference", model_dir=MODEL_DIR, config=modelRecon.config) model.load_weights(weight_file, by_name=True) model_graph = tf.get_default_graph() # pmmlName = pmmlName PMMLMODELSTORAGE[pmmlName] = {} PMMLMODELSTORAGE[pmmlName]['model'] = model PMMLMODELSTORAGE[pmmlName]['modelType'] = 'MRCNN' PMMLMODELSTORAGE[pmmlName]['model_graph'] = model_graph PMMLMODELSTORAGE[pmmlName]['predClasses'] = list(predClasses) PMMLMODELSTORAGE[pmmlName]['tf_session'] = tf_session modelType = 'MRCNN' #DeepNetwokr Model elif nyoka_pmml_obj.DeepNetwork: hdInfo = pmmlObj['Header'] try: hdExtDet = ast.literal_eval( hdInfo.Extension[0].get_value()) except: pass print('step 2') predClasses = self.getPredClasses(nyoka_pmml_obj) print('step 3') newNet = nyoka_pmml_obj.DeepNetwork[0] print('step 4') scrptCode = nyoka_pmml_obj.script # print ('Step 4.1 ',scrptCode) try: preCode = readScriptFromPMML(scrptCode, 'TEST') except: preCode = None try: postCode = readScriptFromPMML(scrptCode, 'POSTPROCESSING') except: postCode = None model_graph = Graph() with model_graph.as_default(): tf_session = Session() with tf_session.as_default(): print('step 5') from nyokaBase.keras.pmml_to_keras_model import GenerateKerasModel print('step 5.1') model_net = GenerateKerasModel(nyoka_pmml_obj) print('step 5.2') model = model_net.model model_graph = tf.get_default_graph() print('step 6') inputShapevals = [ inpuShape.value for inpuShape in list(model.input.shape) ] PMMLMODELSTORAGE[pmmlName] = {} PMMLMODELSTORAGE[pmmlName]['model'] = model PMMLMODELSTORAGE[pmmlName]['predClasses'] = predClasses PMMLMODELSTORAGE[pmmlName]['preProcessScript'] = preCode PMMLMODELSTORAGE[pmmlName]['postProcessScript'] = postCode try: PMMLMODELSTORAGE[pmmlName]['scriptOutput'] = hdExtDet[ 'scriptOutput'] except: PMMLMODELSTORAGE[pmmlName]['scriptOutput'] = '' print('step 7') try: PMMLMODELSTORAGE[pmmlName]['inputShape'] = inputShapevals except: PMMLMODELSTORAGE[pmmlName]['inputShape'] = 'CheckSomeissue' PMMLMODELSTORAGE[pmmlName]['status'] = 'loaded' # print ('step 8') PMMLMODELSTORAGE[pmmlName]['model_graph'] = model_graph PMMLMODELSTORAGE[pmmlName]['tf_session'] = tf_session PMMLMODELSTORAGE[pmmlName]['modelType'] = 'kerasM' modelType = 'kerasM' # print ('###################',PMMLMODELSTORAGE) #Sklearn Model else: print('Next Step 2 >>>>>>>>>>>>') from nyokaBase.skl.pmml_to_skl import pmml_to_skl print('Next Step 3 >>>>>>>>>>>>') sklModelPipeline = pmml_to_skl(filepath) print('Next Step 4 >>>>>>>>>>>>') if hasattr(sklModelPipeline.steps[-1][-1], 'classes_'): predClasses = sklModelPipeline.steps[-1][-1].classes_ else: predClasses = [] print('Next Step >>>>>>>>>>>>') targetVar = self.getTargetVariable(nyoka_pmml_obj) PMMLMODELSTORAGE[pmmlName] = {} PMMLMODELSTORAGE[pmmlName]['model'] = sklModelPipeline PMMLMODELSTORAGE[pmmlName]['predClasses'] = list(predClasses) PMMLMODELSTORAGE[pmmlName]['targetVar'] = targetVar PMMLMODELSTORAGE[pmmlName]['modelType'] = 'sklearnM' modelType = 'sklearnM' return (pmmlName, 'Success', modelType) except Exception as e: print(str(e)) import traceback print(str(traceback.format_exc())) return (pmmlName, 'Failure', None)
def train(self,pmmlFile,dataFolder,fileName,tensorboardLogFolder,lossType,listOfMetrics,batchSize,epoch,\ stepsPerEpoch,idforData,testSize,problemType,scriptOutput,optimizerName,learningRate): # print ('>>>>>> Step ',1) saveStatus = self.logFolder + idforData + '/' self.statusFile = saveStatus + 'status.txt' try: self.pmmlfileObj = ny.parse(pmmlFile, silence=True) except Exception as e: data_details = self.upDateStatus() data_details['status'] = 'Training Failed' data_details[ 'errorMessage'] = 'Error while parsing the PMML file >> ' + str( e) data_details['errorTraceback'] = traceback.format_exc() with open(self.statusFile, 'w') as filetosave: json.dump(data_details, filetosave) # sys.exit() return -1 # print ('>>>>>> Step ',2) self.pmmlObj = self.pmmlfileObj.__dict__ try: hdInfo = self.pmmlObj['Header'] self.hdExtDet = ast.literal_eval(hdInfo.Extension[0].get_value()) except Exception as e: data_details = self.upDateStatus() data_details['status'] = 'Training Failed' data_details[ 'errorMessage'] = 'Error while extracting Header information from the PMML file >> ' + str( e) data_details['errorTraceback'] = traceback.format_exc() with open(self.statusFile, 'w') as filetosave: json.dump(data_details, filetosave) # sys.exit() return -1 # print ('>>>>>> Step ',3) if scriptOutput is None: pass else: try: self.hdExtDet['scriptOutput'] = scriptOutput except Exception as e: data_details = self.upDateStatus() data_details['status'] = 'Training Failed' data_details[ 'errorMessage'] = 'scriptOutput is not found in the PMML Header >> ' + str( e) data_details['errorTraceback'] = traceback.format_exc() with open(self.statusFile, 'w') as filetosave: json.dump(data_details, filetosave) # sys.exit() return -1 try: self.pathOfData = self.hdExtDet['dataUrl'] except Exception as e: data_details = self.upDateStatus() data_details['status'] = 'Training Failed' data_details[ 'errorMessage'] = 'dataUrl is not found in the PMML Header >> ' + str( e) data_details['errorTraceback'] = traceback.format_exc() with open(self.statusFile, 'w') as filetosave: json.dump(data_details, filetosave) # sys.exit() return -1 # print ('>>>>>> Step ',4,self.pathOfData) if os.path.isdir(self.pathOfData): target = self.trainImageClassifierNN else: if self.pmmlObj['script'] == []: target = self.trainSimpleDNN else: if scriptOutput == 'IMAGE': target = self.trainCustomNN else: target = self.trainSimpleDNN try: train_prc = Process(target=target,args=(pmmlFile,self.pathOfData,fileName,tensorboardLogFolder,lossType,\ listOfMetrics,batchSize,epoch,stepsPerEpoch,idforData,testSize,problemType,scriptOutput,\ optimizerName,learningRate)) train_prc.start() except Exception as e: data_details = self.upDateStatus() data_details['status'] = 'Training Failed' data_details[ 'errorMessage'] = "Couldn't start the training process >> " + str( e) data_details['errorTraceback'] = traceback.format_exc() with open(self.statusFile, 'w') as filetosave: json.dump(data_details, filetosave) # sys.exit() return -1 return train_prc.ident