# plt.scatter(cms, av, alpha=0.1) # plt.ylabel('Angle Velocity') # plt.xlabel('CMS Velocity') # plt.subplot(212) # plt.plot(cms, label='CMS velocity') # plt.plot(av, label='Angle Velocity', alpha=0.7) # plt.legend() # plt.show() ### play with LASSO beh = 'Eigenworm3' data = dh.loadData(folder, dataPars) results = dr.runLasso(data, pars, testIndices, trainingsIndices, plot = 1, behaviors = [beh])#, 'Eigenworm3']) results = dr.runElasticNet(data, pars, testIndices, trainingsIndices, plot = 1, behaviors = [beh])#, 'Eigenworm3']) ############################################### # # Test how stable LASSO is in response to median filtering # ############################################## medians = np.arange(1,15,2) medianFiltData = np.zeros((len(medians),3)) beh = 'Eigenworm3' dataPars['savGolayWindow'] = 13 for ind, medfilt in enumerate(medians): dataPars['medianWindow'] = medfilt data = dh.loadData(folder, dataPars) results = dr.runLasso(data, pars, testIndices, trainingsIndices, plot = 1, behaviors = [beh])#, 'Eigenworm3'])
#mp.plotPCAresults3D(dataSets, resultDict, keyList, pars, col = 'etho', flag = 'LASSO') plt.show() #%% ############################################### # # linear regression using elastic Net # ############################################## if elasticnet: for kindex, key in enumerate(keyList): print 'Running Elastic Net', key splits = resultDict[key]['Training'] resultDict[key]['ElasticNet'] = dr.runElasticNet(dataSets[key], pars, splits, plot=1, behaviors=behaviors) # calculate how much more neurons contribute tmpDict = dr.scoreModelProgression( dataSets[key], resultDict[key], splits, pars, fitmethod='ElasticNet', behaviors=behaviors, ) for tmpKey in tmpDict.keys(): resultDict[key]['ElasticNet'][tmpKey].update(tmpDict[tmpKey]) mp.plotLinearModelResults(dataSets, resultDict,