def main(): print("Initializing") myAnn = keras_ann() myloc = os.path.expanduser('~') + "/kerasTimeSeries/myweights/rnn" myloc = '/nfshome/gst2d/localstorage/kerasTimeSeries/myweights/rnn' myData = cnn_data(dataPath= os.path.expanduser('~') + "/eegData/") tloc = myloc myloc += '/' for fname in listdir(myloc): if ('fileModel' in fname and '~' not in fname): useCandidate = myloc+str(fname) testing = True modelArgs = [] #getModels() small models only for now! #addToModels(modelArgs) print(useCandidate) mod = pd.read_csv(useCandidate, sep='|', header=0) print(mod.columns) for index, candidate in pd.read_csv(useCandidate, sep='|', header=0).iterrows(): modelArgs.append(json.loads(candidate["model"])) print(f"Number of Models: {len(modelArgs)}") weights = [] myAnn.getWeights(weights,myloc) print(weights) myAnn.updatePaths(outputPath = os.path.dirname(os.path.realpath(__file__)) + "/") myData.readData(fnames=inputData()) myAnn.testModel(modelArgs,myData.data,myData.labels,weights=weights,loadLoc=myloc)
def main(): # load models and weights print("Initializing") myAnn = keras_ann() mySaveLoc = path.expanduser('~') + "/eegData/" myLoadLoc = path.expanduser( '~') + "/localstorage/kerasTimeSeries/myweights/" myData = ann_data(dataPath=path.expanduser('~') + "/eegData/") allWeights = [] models = [] freqs = ['delta', 'theta', 'alpha', 'beta1', 'beta2'] # there are several models to choose from, # manually choose the index of the model modelChoice = {} for freq in freqs: modelChoice[freq] = 0 modelChoice['delta'] = 1 for freq in freqs: weights = [] myAnn.getWeights(weights, myLoadLoc + freq) allWeights.append(myLoadLoc + freq + '/' + weights[modelChoice[freq]][0]) #print(weights) #print(weights[modelChoice[freq]][0]) for filename in listdir(myLoadLoc + freq): if 'Model' in filename: with open(myLoadLoc + freq + '/' + filename) as tfile: modelIdtemp = 0 while modelIdtemp < modelChoice[freq]: tfile.readline() modelIdtemp += 1 #print(f"model: {modelIdtemp}") models.append( json.loads(tfile.readline().split('|')[-1].strip())) #print(allWeights[-1], models[-1], '\n') t = "input081.csv,input091.csv,input011.csv,input162.csv,input002.csv,input171.csv,input151.csv,input031.csv,input041.csv,input152.csv,input142.csv,input101.csv,input042.csv,input012.csv,input032.csv,input112.csv,input161.csv,input001.csv,input082.csv,input172.csv".split( ",") i = 0 for inputfile in t: print() print('=' * 16) print(inputfile) myData.readData(fnames=[inputfile]) [normSTD, normMean] = myAnn.getNorm(myLoadLoc + freq + '/') myData.expandDims() myData.normalize(normSTD=normSTD, normMean=normMean) myAnn.saveModelOutput(models, myData.data, myData.labels, weights=allWeights, saveLoc=mySaveLoc, saveName='out' + inputfile, loadLoc='') i += 1
def main(): print("Initializing") myAnn = keras_ann() myloc = os.path.expanduser('~') + "/kerasTimeSeries/" myData = cnn_data(dataPath= os.path.expanduser('~') + "/eegData/") # 0 1 2 3 useCandidate = ['', 'topTwo.csv', 'topTen.csv', 'candidate.csv'][0] testing = True optimizeOptimizer = False saveModel = False modelArgs = [] #getModels() small models only for now! #addToModels(modelArgs) print("Collecting Models") if (useCandidate == ''): addToModels(modelArgs) #else: # getCandidates(modelArgs, fname=useCandidate, optimize = optimizeOptimizer) # print(f"Number of Models: {len(modelArgs)}") myAnn.updatePaths(outputPath = os.path.dirname(os.path.realpath(__file__)) + "/") if (testing): myData.readData() else: myData.readData(fnames=inputData()) lowFreq=highFreq=None dataFiles = ",".join(inputData()) cvFolds = 0 if saveModel else 10 valPerc = 0.10 epochs = 1 if saveModel else 100 batchSize = 32 if saveModel else int(((myData.record_count*(1-valPerc))/cvFolds)+1) with open("fileTrainTestParams.txt",'w') as params: params.write(f"dataFiles: {dataFiles}\ncvFolds: {cvFolds}\n") params.write(f"validation_split: {valPerc}\nepoch: {epochs}\n") params.write(f"batchSize: {batchSize}\n") params.write(f"frequency: {lowFreq} - {highFreq}\n") params.write(f"normSTD : {myData.normSTD}\n") params.write(f"normMean : {myData.normMean}") if (saveModel): myAnn.trainModel(modelArgs,myData.data,myData.labels, valSplit=valPerc, epochs=epochs, batchSize=batchSize, visualize=False, saveLoc=myloc) return if (testing): myAnn.parameterSearch(modelArgs[:1],myData.data,myData.labels,valSplit=0.10) else: myAnn.parameterSearch(modelArgs,myData.data,myData.labels,numSplits=cvFolds, valSplit=valPerc, epochs=epochs, batchSize=batchSize, saveModel=saveModel, visualize=False, saveLoc=myloc)
def main(): if (len(sys.argv) < 2): freqBands = ['theta'] else: freqBands = [] for i in range(1, len(sys.argv)): freqBands.append(sys.argv[i]) print("Initializing") myAnn = keras_ann() myloc = os.path.expanduser('~') + "/kerasTimeSeries/" myData = ann_data(dataPath=os.path.expanduser('~') + "/eegData/") testing = False if (testing): myData.readData() else: myData.readData(fnames=inputData()) for freqBand in freqBands: print(f"FREQUENCY: {freqBand}") weightPath = os.path.expanduser( '~') + "/localstorage/kerasTimeSeries/myweights/" + freqBand + "/" runTest(myAnn, myloc, weightPath, myData, freqBand)
def main(): if (len(sys.argv) < 2): freqBand = 'theta' else: freqBand = sys.argv[1] print("Initializing") myAnn = keras_ann() mybuild = ModelBuilder() myloc = os.path.expanduser('~') + "/localstorage/kerasTimeSeries/" weightPath=os.path.expanduser('~') + "/localstorage/kerasTimeSeries/myweights/" + freqBand + "/" print("Collecting Models") weights = [] modelArgs = [] print("GET PARAMS") mybuild.getCandidates(modelArgs, fname=weightPath+"topTwo.csv", optimize = False) myAnn.getWeights(weights,weightPath) #============= # for collecting data #============= print("GET PARAMS") myData = ann_data(dataPath= os.path.expanduser('~') + "/eegData/") [normSTD, normMean] = myAnn.getNorm(weightPath) [lowFreq, highFreq, _] = ann_data.getFreqBand(freqBand) testing = True if (testing): myData.readData() else: pass#myData.readData(fnames=inputData()) #myData.filterFrequencyRange(low=lowFreq, high=highFreq) myData.expandDims() myData.normalize(normSTD=normSTD, normMean=normMean) #print(weights) print("PRINT MODEL") myAnn.printModel(modelArgs, weights=weights,printLoc=myloc, loadLoc=weightPath,X=myData.data,Y=myData.labels) print("DONE")
def main(): if (len(sys.argv) < 2): freqBand = 'theta' else: freqBand = sys.argv[1] print("Initializing") myAnn = keras_ann() myloc = os.path.expanduser('~') + "/kerasTimeSeries/" weightPath = os.path.expanduser( '~') + "/localstorage/kerasTimeSeries/myweights/" + freqBand + "/" myData = ann_data(dataPath=os.path.expanduser('~') + "/eegData/") modelArgs = [] print("Collecting Models") getCandidates(modelArgs, fname=weightPath + "topTwo.csv", optimize=False) weights = [] myAnn.getWeights(weights, weightPath) [normSTD, normMean] = myAnn.getNorm(weightPath) [lowFreq, highFreq, _] = ann_data.getFreqBand(freqBand) #print(modelArgs) #print(weights) #return testing = False if (testing): myData.readData() else: myData.readData(fnames=inputData()) myData.filterFrequencyRange(low=lowFreq, high=highFreq) myData.expandDims() myData.normalize(normSTD=normSTD, normMean=normMean) myAnn.testModel(modelArgs, myData.data, myData.labels, weights=weights, loadLoc=weightPath)
def main(): print("Initializing") myAnn = keras_ann() myloc = os.path.expanduser('~') + "/kerasTimeSeries/" myData = ann_data(dataPath=os.path.expanduser('~') + "/eegData/") #0 1 2 3 4 freqBand = ['delta', 'theta', 'alpha', 'beta1', 'beta2'][4] [lowFreq, highFreq, kernelsize] = ann_data.getFreqBand(freqBand) lowFreq = highFreq = None # 0 1 2 3 useCandidate = ['', 'topTwo.csv', 'topTen.csv', 'candidate.csv'][3] testing = True optimizeOptimizer = False saveModel = True modelArgs = [] #getModels() small models only for now! #addToModels(modelArgs) print("Collecting Models") if (useCandidate == ''): addToModelsTest_FrequencyFilters(modelArgs, addConvFilters=False, manyFilters=False, numKeepIndexes=100, kernalPreset=kernelsize) else: getCandidates(modelArgs, fname=useCandidate, optimize=optimizeOptimizer) # myAnn.updatePaths(outputPath=os.path.dirname(os.path.realpath(__file__)) + "/") if (testing): myData.readData() else: myData.readData(fnames=inputData()) myData.filterFrequencyRange(low=lowFreq, high=highFreq) myData.expandDims() myData.normalize() dataFiles = ",".join(inputData()) cvFolds = 0 if saveModel else 10 valPerc = 0.10 epochs = 1 if saveModel else 100 batchSize = 32 if saveModel else int(((myData.record_count * (1 - valPerc)) / cvFolds) + 1) with open("fileTrainTestParams.txt", 'w') as params: params.write(f"dataFiles: {dataFiles}\ncvFolds: {cvFolds}\n") params.write(f"validation_split: {valPerc}\nepoch: {epochs}\n") params.write(f"batchSize: {batchSize}\n") params.write(f"frequency: {lowFreq} - {highFreq}\n") params.write(f"normSTD : {myData.normSTD}\n") params.write(f"normMean : {myData.normMean}") if (saveModel): myAnn.trainModel(modelArgs, myData.data, myData.labels, valSplit=valPerc, epochs=epochs, batchSize=batchSize, visualize=False, saveLoc=myloc) return if (testing): myAnn.parameterSearch(modelArgs[:10], myData.data, myData.labels, valSplit=0.10) else: myAnn.parameterSearch(modelArgs, myData.data, myData.labels, numSplits=cvFolds, valSplit=valPerc, epochs=epochs, batchSize=batchSize, saveModel=saveModel, visualize=False, saveLoc=myloc)