Exemple #1
0
    def trainAllModel(self, modelName):

        idforData = ''.join(choice(ascii_uppercase) for i in range(12))
        saveStatus = logFolder + idforData + '/'
        kerasUtilities.checkCreatePath(saveStatus)
        statusfileLocation = saveStatus + 'status.txt'

        pID = 0

        tempRunMemory = {
            'idforData': idforData,
            'status': 'Execution Failed' if pID == -1 else 'In Progress',
            'createdOn': str(datetime.datetime.now()),
            'type': 'NNModel',
            'pid': pID
        }
        tempRunMemory['taskName'] = modelName
        RUNNING_TASK_MEMORY.append(tempRunMemory)
        with open(statusfileLocation, 'w') as filetosave:
            json.dump(tempRunMemory, filetosave)
        import time
        time.sleep(5)

        trainVieClassObj = TrainingViewModels()
        train_prc = Thread(target=trainVieClassObj.trainModel,
                           args=(
                               modelName,
                               statusfileLocation,
                           ))
        train_prc.start()
        pID = train_prc.ident
        tempRunMemory = {
            'idforData': idforData,
            'status': 'Execution Failed' if pID == -1 else 'In Progress',
            'createdOn': str(datetime.datetime.now()),
            'type': 'NNModel',
            'pid': pID
        }
        tempRunMemory['taskName'] = modelName
        RUNNING_TASK_MEMORY.append(tempRunMemory)
        with open(statusfileLocation, 'w') as filetosave:
            json.dump(tempRunMemory, filetosave)

        return JsonResponse(tempRunMemory)
Exemple #2
0
    def predictFolderDataInBatch(self, pmmlstroragepointer, filpath, numFile):
        from threading import Thread

        idforData = ''.join(choice(ascii_uppercase) for i in range(12))
        saveStatus = './logs/' + idforData + '/'
        self.checkCreatePath(saveStatus)
        statusfileLocation = saveStatus + 'status.txt'
        with open(statusfileLocation, 'w') as filetosave:
            json.dump({}, filetosave)

        prc = Thread(target=self.predictFolderdata,
                     args=(pmmlstroragepointer, filpath, statusfileLocation))
        prc.start()
        pID = prc.ident
        tempRunMemory = {
            'idforData':
            idforData,
            'status':
            'Scoring Failed' if pID == -1 else 'In Progress',
            'createdOn':
            str(datetime.datetime.now()),
            'type':
            'BatchScore',
            'pid':
            pID,
            'newPMMLFileName':
            pmmlstroragepointer,
            'information': [{
                'property': 'No of files to be scored',
                'value': numFile
            }]
        }
        RUNNING_TASK_MEMORY.append(tempRunMemory)
        with open(statusfileLocation, 'w') as filetosave:
            json.dump(tempRunMemory, filetosave)
        return tempRunMemory
Exemple #3
0
    def trainNeuralNetworkModels(requests):
        def getValueFromReq(keyVal, bodyVal):
            # print ('requests',requests.body)
            try:
                # print (requests.POST.get(keyVal))
                return bodyVal[keyVal]
            except:
                return ''

        # pmmlFile=requests.POST.get('filePath')

        bodyVal = json.loads(requests.body)
        # print (bodyVal)

        # print ('came heer 2nd',bodyVal)

        pmmlFile = getValueFromReq('filePath', bodyVal)
        tensorboardUrl = getValueFromReq('tensorboardUrl', bodyVal)
        tensorboardLogFolder = getValueFromReq('tensorboardLogFolder', bodyVal)
        hyperParaUser = {}
        hyperParaUser['batchSize'] = getValueFromReq('batchSize', bodyVal)
        hyperParaUser['optimizer'] = getValueFromReq('optimizer', bodyVal)
        hyperParaUser['loss'] = getValueFromReq('loss', bodyVal)
        hyperParaUser['metrics'] = getValueFromReq('metrics', bodyVal)
        hyperParaUser['epoch'] = getValueFromReq('epoch', bodyVal)
        hyperParaUser['problemType'] = getValueFromReq('problemType', bodyVal)
        hyperParaUser['testSize'] = getValueFromReq('testSize', bodyVal)
        hyperParaUser['learningRate'] = getValueFromReq(
            'learningRate', bodyVal)
        # hyperParaUser['']=getValueFromReq('',requests)
        # hyperParaUser['']=getValueFromReq('',requests)
        # print ('>>>>>>>>PPPPPPPPPPPPPPPP   ',pmmlFile,tensorboardUrl,tensorboardLogFolder,hyperParaUser)
        idforData = int(time.time())
        idforData = str(idforData) + '_NN'
        saveStatus = logFolder + idforData + '/'
        kerasUtilities.checkCreatePath(saveStatus)
        statusfileLocation = saveStatus + 'status.txt'
        data_details = {}
        data_details['tensorboardUrl'] = tensorboardUrl
        data_details['idforData'] = idforData
        data_details['status'] = 'In Progress'
        fObjScrpt = pathlib.Path(pmmlFile)
        print('fObjScrpt.name.replace(fObjScrpt.suffix,'
              ')', fObjScrpt.name.replace(fObjScrpt.suffix, ''))
        data_details['taskName'] = fObjScrpt.name.replace(fObjScrpt.suffix, '')
        data_details['createdOn'] = str(datetime.datetime.now())
        data_details['type'] = 'NNProject'
        data_details['problem_type'] = hyperParaUser['problemType']
        data_details["newPMMLFileName"] = pmmlFile

        nntrainer = mergeTrainingNN.NeuralNetworkModelTrainer()

        pID = nntrainer.train(idforData, pmmlFile, tensorboardLogFolder,
                              hyperParaUser, pmmlFile)

        data_details['pID'] = str(pID)
        saveStatus = logFolder + idforData + '/'
        kerasUtilities.checkCreatePath(saveStatus)
        # statusfileLocation=saveStatus+'status.txt'
        with open(statusfileLocation, 'w') as filetosave:
            json.dump(data_details, filetosave)

        if pID == -1:
            # data_details['status']='In Progress'
            kerasUtilities.updateStatusOfTraining(statusfileLocation,
                                                  'Training Failed')
        else:
            pass

        runTemp = [i['idforData'] for i in RUNNING_TASK_MEMORY]
        if data_details['idforData'] not in runTemp:
            # print ('PPPPPPPPPPPPPPPPPPPP Saved to runningTask')
            tempRunMemory = data_details
            RUNNING_TASK_MEMORY.append(tempRunMemory)
        else:
            pass
        print('P' * 200)
        print('data_details', data_details)

        return JsonResponse(data_details, status=202)
Exemple #4
0
    def trainMRCNN(userInput):
        # userInput=requests.body
        # userInput=json.loads(userInput)
        # print (userInput)
        pmmlFile = userInput['filePath']
        try:
            dataFolder = userInput['dataFolder']
        except:
            print('Get Data folder')

        try:
            tensorboardLogFolder = userInput['tensorboardLogFolder']
        except:
            tensorboardLogFolder = target_path = './logs/' + ''.join(
                choice(ascii_uppercase) for i in range(12)) + '/'
            # print ('tensorboardLogFolder',tensorboardLogFolder)
            kerasUtilities.checkCreatePath(tensorboardLogFolder)

        # batchSize=userInput['batchSize']
        epoch = userInput['epoch']
        stepsPerEpoch = userInput['stepPerEpoch']
        # learningRate=userInput['learningRate']
        try:
            tensorboardUrl = userInput['tensorboardUrl']
        except:
            tensorboardUrl = ''
        # idforData=pmmlFile.split('/')[-1].replace('.pmml','')
        idforData = os.path.basename(pmmlFile).replace('.pmml', '') + '_MRCNN'

        saveStatus = logFolder + idforData + '/'
        kerasUtilities.checkCreatePath(saveStatus)
        statusfileLocation = saveStatus + 'status.txt'

        # print("status file generated")

        data_details = {}
        data_details['pmmlFile'] = idforData
        data_details['dataFolder'] = dataFolder
        data_details['fileName'] = pmmlFile
        data_details['tensorboardLogFolder'] = tensorboardLogFolder
        data_details['tensorboardUrl'] = tensorboardUrl
        # data_details['batchSize']=batchSize
        data_details['epoch'] = epoch
        data_details['stepsPerEpoch'] = stepsPerEpoch
        # data_details['learningRate']=learningRate
        data_details['idforData'] = idforData
        data_details['status'] = 'Building Architecture'

        with open(statusfileLocation, 'w') as filetosave:
            json.dump(data_details, filetosave)

        objtrainer = trainMaskRCNN.ObjectDetetctionModels()

        prc = Process(target=objtrainer.train, args=(pmmlFile,dataFolder,statusfileLocation,idforData,epoch,\
         tensorboardLogFolder,stepsPerEpoch))
        prc.start()
        pID = prc.ident

        data_details['pID'] = str(pID)

        if pID == -1:
            kerasUtilities.updateStatusOfTraining(statusfileLocation,
                                                  'Training Failed')
        else:
            with open(statusfileLocation, 'w') as filetosave:
                json.dump(data_details, filetosave)

        runTemp = [i['idforData'] for i in RUNNING_TASK_MEMORY]
        if data_details['idforData'] not in runTemp:
            # print ('PPPPPPPPPPPPPPPPPPPP Saved to runningTask')
            tempRunMemory = {
                'idforData': idforData,
                'status': 'Training Failed' if pID == -1 else 'In Progress',
                'createdOn': str(datetime.datetime.now()),
                'type': 'ObjectDetectionProject',
                'pid': pID,
                'newPMMLFileName': idforData + '.pmml'
            }
            tempRunMemory['taskName'] = tempRunMemory['newPMMLFileName']
            RUNNING_TASK_MEMORY.append(tempRunMemory)
        else:
            pass

        return JsonResponse(data_details, status=202)
Exemple #5
0
    def autoAnomalyModel(userInput):
        global DATA_MEMORY_OBJS_SKLEARN
        # userInput=requests.body
        # userInput=json.loads(userInput)
        paramToTrainModel = userInput['data']
        idforData = userInput['idforData']
        data = DATA_MEMORY_OBJS_SKLEARN[idforData]
        dataPath = userInput['filePath']
        try:
            targetVar = userInput['target_variable']
        except:
            targetVar = None
        try:
            problem_type = userInput['problem_type']
        except:
            problem_type = None
        algorithms = userInput['parameters']['algorithm']
        try:
            newPMMLFileName = userInput['newPMMLFileName']
            if not newPMMLFileName.endswith('.pmml'):
                newPMMLFileName = newPMMLFileName + '.pmml'
        except:
            newPMMLFileName = idforData + '.pmml'

        projectName = idforData
        projectPath = logFolder + projectName
        dataFolder = projectPath + '/dataFolder/'

        try:
            os.makedirs(projectPath)
            os.mkdir(dataFolder)
        except Exception as e:
            print('>>>>>>>>>>>>>>>>', str(e))

        autoMLLock = Lock()
        trainer = trainAutoMLV2.AnomalyTrainer(algorithms=algorithms,
                                               problemType=problem_type)
        train_prc = Process(target=trainer.trainAnomalyModel,
                            args=(data, logFolder, newPMMLFileName, autoMLLock,
                                  userInput))
        # train_prc = Process(target=trainAutoMLV2.mainTrainAutoML,args=(data,paramToTrainModel,targetVar,idforData,problem_type,logFolder,newPMMLFileName))
        train_prc.start()
        pID = train_prc.ident

        statusFile = dataFolder + 'status' + '.txt'
        # sFileText=sFile.read()
        # data_details=json.loads(sFileText)
        data_details = {}
        data_details['pID'] = str(pID)
        data_details['status'] = 'In Progress'
        data_details['newPMMLFileName'] = newPMMLFileName
        data_details['targetVar'] = targetVar
        data_details['problem_type'] = problem_type
        data_details['idforData'] = idforData
        data_details['shape'] = data.shape
        import pathlib
        fVar = pathlib.Path(dataPath)
        data_details['taskName'] = fVar.name.replace(
            fVar.suffix, '')  #newPMMLFileName.split('/')[-1]

        autoMLLock.acquire()
        with open(statusFile, 'w') as filetosave:
            json.dump(data_details, filetosave)
        autoMLLock.release()

        tempRunMemory = {
            'idforData': projectName,
            'status': 'In Progress',
            'type': 'AutoMLProject',
            'pid': pID,
            'createdOn': str(datetime.datetime.now()),
            'newPMMLFileName': newPMMLFileName.split('/')[-1]
        }
        tempRunMemory['taskName'] = data_details['taskName']
        print('>>>>>>>>>>>>>>>>>>>>AutoML', tempRunMemory)

        RUNNING_TASK_MEMORY.append(tempRunMemory)

        # print ('RUNNING_TASK_MEMORY >>>>>>>>>',RUNNING_TASK_MEMORY)

        return JsonResponse(data_details, status=202)
Exemple #6
0
    def autoMLtrainModel(userInput):
        global DATA_MEMORY_OBJS_SKLEARN
        # userInput=requests.body
        # userInput=json.loads(userInput)
        paramToTrainModel = userInput['data']
        idforData = userInput['idforData']
        # print (DATA_MEMORY_OBJS_SKLEARN)
        data = DATA_MEMORY_OBJS_SKLEARN[idforData]
        dataPath = userInput['filePath']
        targetVar = userInput['target_variable']
        problem_type = userInput['problem_type']
        # algorithms=userInput['parameters']['algorithm']
        try:
            algorithms = userInput['parameters']['algorithm']
            if algorithms[0] == 'All':
                raise Exception("")
        except:
            if problem_type == 'Regression':
                algorithms=['ExtraTreeRegressor','GradientBoostingRegressor','DecisionTreeRegressor','LinearSVR',\
                    'RandomForestRegressor','XGBRegressor','KNeighborsRegressor','LinearRegression','LGBMRegressor']
            else:
                algorithms=['DecisionTreeClassifier','ExtraTreesClassifier','RandomForestClassifier','GradientBoostingClassifier',\
                    'KNeighborsClassifier','LinearSVC','LogisticRegression','XGBClassifier','LGBMClassifier']
        try:
            newPMMLFileName = userInput['newPMMLFileName']
            if not newPMMLFileName.endswith('.pmml'):
                newPMMLFileName = newPMMLFileName + '.pmml'
        except:
            newPMMLFileName = idforData + '.pmml'

        projectName = idforData
        projectPath = logFolder + projectName
        dataFolder = projectPath + '/dataFolder/'
        tpotFolder = projectPath + '/tpotFolder/'

        try:
            os.makedirs(projectPath)
            os.mkdir(dataFolder)
            os.mkdir(tpotFolder)
        except Exception as e:
            print('>>>>>>>>>>>>>>>>', str(e))

        autoMLLock = Lock()
        trainer = trainAutoMLV2.AutoMLTrainer(algorithms=algorithms,
                                              problemType=problem_type)
        train_prc = Process(target=trainer.trainModel,
                            args=(data, logFolder, newPMMLFileName, autoMLLock,
                                  userInput))
        # train_prc = Process(target=trainAutoMLV2.mainTrainAutoML,args=(data,paramToTrainModel,targetVar,idforData,problem_type,logFolder,newPMMLFileName))
        train_prc.start()
        pID = train_prc.ident

        statusFile = dataFolder + 'status' + '.txt'
        # sFileText=sFile.read()
        # data_details=json.loads(sFileText)
        data_details = {}
        data_details['pID'] = str(pID)
        data_details['status'] = 'In Progress'
        data_details['newPMMLFileName'] = newPMMLFileName
        data_details['targetVar'] = targetVar
        data_details['problem_type'] = problem_type
        data_details['idforData'] = idforData
        data_details['shape'] = data.shape
        import pathlib
        fVar = pathlib.Path(dataPath)
        data_details['taskName'] = fVar.name.replace(
            fVar.suffix, '')  #newPMMLFileName.split('/')[-1]

        autoMLLock.acquire()
        with open(statusFile, 'w') as filetosave:
            json.dump(data_details, filetosave)
        autoMLLock.release()

        tempRunMemory = {
            'idforData': projectName,
            'status': 'In Progress',
            'type': 'AutoMLProject',
            'pid': pID,
            'createdOn': str(datetime.datetime.now()),
            'newPMMLFileName': newPMMLFileName.split('/')[-1]
        }
        tempRunMemory['taskName'] = data_details['taskName']
        # print ('>>>>>>>>>>>>>>>>>>>>AutoML',tempRunMemory)

        RUNNING_TASK_MEMORY.append(tempRunMemory)

        # print ('RUNNING_TASK_MEMORY >>>>>>>>>',RUNNING_TASK_MEMORY)

        return JsonResponse(data_details, status=202)
Exemple #7
0
    def executeCode(filePath, params):
        def updateStatusOfExecution(filePath, updatedStatus, info_dict):
            with open(filePath, 'r') as sFile:
                sFileText = sFile.read()
            data_details = json.loads(sFileText)
            data_details['status'] = updatedStatus
            data_details['information'] = []
            for key, val in info_dict.items():
                data_details['information'].append({
                    'property': key,
                    'value': val
                })

            if updatedStatus == 'Complete':
                data_details['completedOn'] = str(datetime.datetime.now())
            with open(filePath, 'w') as filetosave:
                json.dump(data_details, filetosave)
            return 'Success'

        def monitorThread(filePath, args, statusfileLocation):
            print(args)
            args = [str(a) for a in args]
            popen = subprocess.Popen([sys.executable, filePath] + args,
                                     stdout=subprocess.PIPE,
                                     stderr=subprocess.PIPE)
            output, error = popen.communicate()
            if output:
                output = output.decode('utf-8')
                info = {'Output': output}
                updateStatusOfExecution(statusfileLocation,
                                        'Complete',
                                        info_dict=info)
            if error:
                error = error.decode('utf-8')
                info = {'Error': error}
                updateStatusOfExecution(statusfileLocation,
                                        'Execution Failed',
                                        info_dict=info)

        idforData = ''.join(choice(ascii_uppercase) for i in range(12))
        saveStatus = logFolder + idforData + '/'
        kerasUtilities.checkCreatePath(saveStatus)
        statusfileLocation = saveStatus + 'status.txt'
        with open(statusfileLocation, 'w') as filetosave:
            json.dump({}, filetosave)

        # print ('>>>>>>>>>>>>>>>>',params)

        import threading
        pp = threading.Thread(target=monitorThread,
                              args=(filePath, params, statusfileLocation))
        pp.start()
        pID = pp.ident
        import pathlib
        fullPath = pathlib.Path(filePath)
        fileName = fullPath.name.replace('.py', '')
        tempRunMemory = {
            'idforData': idforData,
            'status': 'Execution Failed' if pID == -1 else 'In Progress',
            'createdOn': str(datetime.datetime.now()),
            'type': 'Code',
            'pid': pID,
            'newPMMLFileName': fileName.split('/')[-1],
            'information': [{
                'property': 'Parameters',
                'value': params
            }]
        }
        tempRunMemory['taskName'] = tempRunMemory['newPMMLFileName']
        RUNNING_TASK_MEMORY.append(tempRunMemory)
        with open(statusfileLocation, 'w') as filetosave:
            json.dump(tempRunMemory, filetosave)
        return JsonResponse(tempRunMemory, status=200)
Exemple #8
0
    def trainNeuralNetworkModels(userInput):
        # userInput=requests.body
        # userInput=json.loads(userInput)
        # print (userInput)
        pmmlFile = userInput['filePath']
        try:
            dataFolder = userInput['dataFolder']
        except:
            dataFolder = './logs/' + ''.join(
                choice(ascii_uppercase) for i in range(12)) + '/'
            kerasUtilities.checkCreatePath(dataFolder)

        try:
            fileName = userInput['filePath']
        except:
            fileName = userInput['filePath']
        try:
            tensorboardLogFolder = userInput['tensorboardLogFolder']
            print('Log folder came correct')
        except:
            print('Log folder has some issue')
            tensorboardLogFolder = target_path = './logs/' + ''.join(
                choice(ascii_uppercase) for i in range(12)) + '/'
            # print ('tensorboardLogFolder',tensorboardLogFolder)
            kerasUtilities.checkCreatePath(tensorboardLogFolder)

        lossType = userInput['loss']
        listOfMetrics = userInput['metrics']
        batchSize = userInput['batchSize']
        epoch = userInput['epoch']
        stepsPerEpoch = userInput['stepPerEpoch']
        problemType = userInput['problemType']
        testSize = userInput['testSize']
        scriptOutput = userInput['scriptOutput']
        optimizerName = userInput['optimizer']
        learningRate = userInput['learningRate']
        try:
            tensorboardUrl = userInput['tensorboardUrl']
        except:
            tensorboardUrl = ''
        # idforData=pmmlFile.split('/')[-1].replace('.pmml','')
        idforData = os.path.basename(pmmlFile).replace('.pmml', '')

        saveStatus = logFolder + idforData + '/'
        kerasUtilities.checkCreatePath(saveStatus)
        statusfileLocation = saveStatus + 'status.txt'

        # print("status file generated")

        data_details = {}
        data_details['pmmlFile'] = idforData
        data_details['dataFolder'] = dataFolder
        data_details['fileName'] = fileName
        data_details['tensorboardLogFolder'] = tensorboardLogFolder
        data_details['tensorboardUrl'] = tensorboardUrl
        data_details['lossType'] = lossType
        data_details['listOfMetrics'] = listOfMetrics
        data_details['batchSize'] = batchSize
        data_details['epoch'] = epoch
        data_details['stepsPerEpoch'] = stepsPerEpoch
        data_details['problemType'] = problemType
        data_details['testSize'] = testSize
        data_details['scriptOutput'] = scriptOutput
        data_details['optimizerName'] = optimizerName
        data_details['learningRate'] = learningRate
        data_details['idforData'] = idforData
        data_details['status'] = 'In Progress'
        fObjScrpt = pathlib.Path(fileName)
        data_details['taskName'] = fObjScrpt.name

        with open(statusfileLocation, 'w') as filetosave:
            json.dump(data_details, filetosave)
        nntrainer = mergeTrainingNN.NeuralNetworkModelTrainer()

        pID = nntrainer.train(pmmlFile,dataFolder,fileName,tensorboardLogFolder,lossType,listOfMetrics,batchSize,\
         epoch,stepsPerEpoch,idforData,testSize,problemType,scriptOutput,optimizerName,learningRate)

        data_details['pID'] = str(pID)
        saveStatus = logFolder + idforData + '/'
        kerasUtilities.checkCreatePath(saveStatus)
        statusfileLocation = saveStatus + 'status.txt'

        if pID == -1:
            kerasUtilities.updateStatusOfTraining(statusfileLocation,
                                                  'Training Failed')
        else:
            with open(statusfileLocation, 'w') as filetosave:
                json.dump(data_details, filetosave)

        runTemp = [i['idforData'] for i in RUNNING_TASK_MEMORY]
        if data_details['idforData'] not in runTemp:
            # print ('PPPPPPPPPPPPPPPPPPPP Saved to runningTask')
            tempRunMemory = {
                'idforData': idforData,
                'status': 'Training Failed' if pID == -1 else 'In Progress',
                'createdOn': str(datetime.datetime.now()),
                'type': 'NNProject',
                'pid': pID,
                'newPMMLFileName': fileName.split('/')[-1]
            }
            tempRunMemory['taskName'] = data_details['taskName']
            RUNNING_TASK_MEMORY.append(tempRunMemory)
        else:
            pass

        return JsonResponse(data_details, status=202)