def rollBack(self, T0, T1, x1, U1, H1): x0 = self.xSet(T0) N0 = np.array([ self.hwMcSimulation.model.numeraire(x0[i]) for i in range(x1.shape[0]) ]) N1 = np.array([ self.hwMcSimulation.model.numeraire(x1[i]) for i in range(x1.shape[0]) ]) if (self.minSampleIdx > 0 and T0 > 0): # do not use regression for the last roll-back # we try state variable approach C = np.array([[x0[i][0]] for i in range(self.minSampleIdx)]) O = np.array([U1[i] - H1[i] for i in range(self.minSampleIdx)]) R = Regression(C, O, self.maxPolynomialDegree) else: R = None V0 = np.zeros(x1.shape[0]) for i in range(x1.shape[0]): I = U1[i] - H1[i] if R != None: I = R.value(np.array([x0[i][0]])) V = U1[i] if I > 0 else H1[i] V0[i] = N0[i] / N1[i] * V if T0 == 0: sampleIdx = (self.minSampleIdx if self.minSampleIdx < self.hwMcSimulation.nPaths else 0) return [ np.array([0.0]), np.array([np.sum(V0[sampleIdx:]) / V0[sampleIdx:].shape[0]]), ] return [x0, V0]
def Kfold(x, y, z, folds, degree, lambd=None): Xtrain, ztrain, Xtest, ztest = Evaluation.CV_split(x, y, z, folds) MSE_alltrain = np.zeros(folds) MSE_alltest = np.zeros(folds) for i in range(folds): # picking out the ith fold of training data xtrain = Xtrain[i] x = xtrain[:, 0] y = xtrain[:, 1] z = ztrain[i] # Finding error for train data X_train = Regression.CreateDesignMatrix_X(x, y, degree) beta_train = Regression.Beta(X_train, z, lambd) ztilde_train = Regression.Prediction(X_train, beta_train) MSE_alltrain[i] = Evaluation.MSE(z, ztilde_train) # picking out the ith fold for test data xtest = Xtest[i] x = xtest[:, 0] y = xtest[:, 1] z = ztest[i] # Finding error for test data X_test = Regression.CreateDesignMatrix_X(x, y, degree) ztilde_test = Regression.Prediction(X_test, beta_train) MSE_alltest[i] = Evaluation.MSE(ztest[i], ztilde_test) MSEtrain = np.mean(MSE_alltrain) MSEtest = np.mean(MSE_alltest) return MSEtrain, MSEtest
def rollBack(self, T0, T1, x1, U1, H1): x0 = self.xSet(T0) N0 = np.array([ self.hwMcSimulation.model.numeraire(x0[i]) for i in range(x1.shape[0]) ]) N1 = np.array([ self.hwMcSimulation.model.numeraire(x1[i]) for i in range(x1.shape[0]) ]) # we need to calculate regression variables for all paths swapRate = SwapRate(self.hwMcSimulation.model,T0,T0,self.maturityTime) liborRate = SwapRate(self.hwMcSimulation.model,T0,T0,T0+0.5) S = np.array([ swapRate.at(x) for x in x0 ]) L = np.array([ liborRate.at(x) for x in x0 ]) #Sp = np.array([ max(s-self.strikeRate,0.0) for s in S ]) if self.minSampleIdx>0 and T0>0: # do not use regression for the last roll-back # we use S and [S-K]^+ as basis functions #C = np.array([ [ S[i], Sp[i] ] for i in range(self.minSampleIdx) ]) C = np.array([ [ S[i], L[i] ] for i in range(self.minSampleIdx) ]) O = np.array([ N0[i]/N1[i]*max(U1[i],H1[i]) for i in range(self.minSampleIdx) ]) R = Regression(C,O,self.maxPolynomialDegree) else: R = None V0 = np.zeros(x1.shape[0]) for i in range(x1.shape[0]): V0[i] = N0[i]/N1[i]*max(U1[i],H1[i]) #if R!=None: V0[i] = R.value(np.array([ S[i], Sp[i] ])) if R!=None: V0[i] = R.value(np.array([ S[i], L[i] ])) if T0==0: sampleIdx = self.minSampleIdx if self.minSampleIdx<self.hwMcSimulation.nPaths else 0 return [ np.array([0.0]), np.array([np.sum(V0[sampleIdx:])/V0[sampleIdx:].shape[0] ]) ] return [x0, V0]
def buildPhiFace(self): # number of + and # over total pixels in line for node in self.samples: xList, yList = Regression.makeLists(node.image) m, b = Regression.findRegression(xList, yList) node.phiVector['m'] = round(m, 1) node.phiVector['hGap'] = round(Gap.horizontal(node.image)) node.phiVector['vGap'] = round(Gap.vertical(node.image))
def featureFace(x): xList, yList = Regression.makeLists(x.image) m, b = Regression.findRegression(xList, yList) v = Gap.vertical(x.image) h = Gap.horizontal(x.image) #im, s = ScaleDown.scale2(x.image, 7) #print(m, b, v, h) return [m, v, h]
def buildPhi(self): # number of + and # over total pixels in line for node in self.samples: node.phiVector = { 'm': None } xList, yList = Regression.makeLists(node.image) m, b = Regression.findRegression(xList, yList) node.phiVector['m'] = round(m, 1)
def __init__(self): self.convolution = Convolution(3, 8) self.pool = Maxpool() self.fcl = FCL(11 * 23 * 8, 128) self.fcl1 = FCL(128, 128) #self.fcl2 = FCL(64, 64) self.relu = Relu() self.relu1 = Relu() #self.relu2 = Relu() self.regression = Regression(11 * 23 * 8, 5)
def main(): reg = Regression() reg.set_max_iter = 20000 reg.set_lr = 0.01 reg.set_l2_penalty = 0.002 reg.set_tolerance = 1e-5 deg=9 num_sample = 10 x = np.arange(0,1,1.0/num_sample).reshape(num_sample,1) y_list = [math.sin(2*math.pi*e) for e in x] + np.random.normal(0,0.3,num_sample) y = np.array(y_list).reshape(num_sample,1) theta = np.zeros((deg+1,1)) theta, loss, repeat = reg.polynomial_fit(x,y,deg) z = np.linspace(0,1,100) prediction = reg.predict(z) fig = plt.figure() plt.plot(x,y,'o',label='Input data') plt.plot(z,prediction,'r-',label='Prediction') plt.plot(z,[math.sin(2*math.pi*e) for e in z], label='Sine Function') pylab.xlim([0,1]) pylab.ylim([-1.5,1.5]) plt.legend(loc=3) fig.suptitle('Polynomial Regression, N=10,Dgree=3,Lamda=0.002') plt.xlabel('Input') plt.ylabel('Output(prediction)')
def regression(self, regressor): self.classifier = None self.densityEstimator = None self.setCursor(QtGui.QCursor(QtCore.Qt.WaitCursor)) self.statusbar.showMessage('') self.regressor = Regression(regressor, self.regressionParameters, self.featurespace) self.regressor.initialize() self.featurespace.setClassificationImage(None) op = Operation(self, "dummy", None) self.operationStack.add(op) self.setCursor(QtGui.QCursor(QtCore.Qt.ArrowCursor)) self.statusbar.showMessage('regression done') self.repaint()
def get_opt_fields(self,prev_opt_flds,prev_opt_c): c = comb() combinations = c.get_combinations(len(self.fp.fields),2,2) model = Regression() opt_fields = None opt_c = None max_score = 0 for i,c in enumerate(combinations): fields = self.fp.get_fields(c) + prev_opt_flds score = model.run(self.df,fields) if score > max_score: max_score = score opt_fields = fields opt_c = c return max_score,opt_c,opt_fields
def create_training_set(self, lf_model, hf_model, id): regression_model = Regression( regression_type=self.regression_type, training_set_strategy=self.training_set_strategy) x_train = regression_model.create_training_set( lf_model=lf_model, hf_model=hf_model, id=id, regression_models=self.regression_models) # Save regression model self.regression_models.append(regression_model) return x_train
def BiasVarTradeOff(x, y, z, folds, degree, lambd=None): if len(x.shape) > 1 or len(y.shape) > 1 or len(z.shape) > 1: x, y, z = x.flatten(), y.flatten(), z.flatten() X = np.transpose(np.array([x, y, z])) np.random.shuffle(X) z = X[:, 2] X = X[:, 0:2] x_train, x_test, z_train, z_test = train_test_split( X, z, test_size=0.2) # Make test data that is not used in Kfold CV x1 = x_train[:, 0] y1 = x_train[:, 1] z1 = z_train folds = folds - 1 ### obs needs to be done to do splitting. Means Kfold is done with k-1 folds!!!! error_alltest = np.zeros(degree) bias_alltest = np.zeros(degree) var_alltest = np.zeros(degree) for d in range(degree): Xtrain, ztrain, Xtest, ztest = Evaluation.CV_split( x1, y1, z1, folds) ztilde_test = np.zeros([len(z_test), folds]) for i in range(folds): xtrain = Xtrain[i] x = xtrain[:, 0] y = xtrain[:, 1] X_train = Regression.CreateDesignMatrix_X(x, y, d) beta_train = Regression.Beta(X_train, ztrain[i], lambd) x = x_test[:, 0] y = x_test[:, 1] X_test = Regression.CreateDesignMatrix_X(x, y, d) ztilde_test[:, i] = Regression.Prediction(X_test, beta_train) z_test = z_test.reshape(z_test.shape[0], 1) error_alltest[d] = np.mean( np.mean((ztilde_test - z_test)**2, axis=1, keepdims=True)) bias_alltest[d] = np.mean( (z_test - np.mean(ztilde_test, axis=1, keepdims=True))**2) var_alltest[d] = np.mean(np.var(ztilde_test, axis=1, keepdims=True)) return bias_alltest, var_alltest, error_alltest
def test_Regression_dtype(): """ Test that the initialization of a regression class throws a type error for things that are not pandas dataframes """ some = "A wrong data type of type string" with pytest.raises(TypeError): Regression(some)
class NeuralNetworkMR: def __init__(self): self.convolution = Convolution(3, 6) self.pool = Maxpool() self.fcl = FCL(127 * 159 * 6, 64) self.relu = Relu() self.regression = Regression(64, 30) def normalize(self, image): return (image /255) - 0.5 def forward(self, image, label): im = self.normalize(image) #print(im.shape) out = self.convolution.apply(im) #print(out.shape) out = self.pool.apply(out) #print(out.shape) out = self.fcl.apply(out) #print(out.shape) out = self.relu.apply(out) #print(out.shape) out = self.regression.apply(out) #print(out.shape) #print("labels shape: " + str(label.shape)) loss = -np.sum(self.regression.squared_error(label)) acc = np.sum(np.abs(self.regression.error(label))) return out, loss, acc def train(self, im, label, lr=0.005): out, loss, acc = self.forward(im, label) gradient = self.regression.backprop(label, lr) gradient = self.relu.backprop(gradient) gradient = self.fcl.backprop(gradient, lr) gradient = self.pool.backprop(gradient) self.convolution.backprop(gradient, lr) return out, loss, acc
def regressionWithParameters(self, regressor): self.classifier = None self.densityEstimator = None self.regressionParameters.setTab(regressor) result = self.regressionParameters.exec_() if result == QtWidgets.QDialog.Accepted: self.setCursor(QtGui.QCursor(QtCore.Qt.WaitCursor)) self.statusbar.showMessage('') self.regressor = Regression(regressor, self.regressionParameters, self.featurespace) try: self.regressor.initialize() self.featurespace.setClassificationImage(None) op = Operation(self, "dummy", None) self.operationStack.add(op) except AssertionError as e: QtWidgets.QMessageBox.warning(self, 'Error', str(e), QtWidgets.QMessageBox.Ok, QtWidgets.QMessageBox.Ok) self.setCursor(QtGui.QCursor(QtCore.Qt.ArrowCursor)) self.statusbar.showMessage('regression done') self.repaint()
def _Holdout_validation(training_set, validation_set, X_indeces, Y_indeces, alfa, iterations, _lambda): trainingX = Input.giveme_cols(training_set, X_indeces) trainingY = Input.giveme_cols(training_set, Y_indeces) validationX = Input.giveme_cols(validation_set, X_indeces) validationY = Input.giveme_cols(validation_set, Y_indeces) training_model = Regression(trainingX, trainingY) validation_model = Regression(validationX, validationY) validation_model.thetas = training_model.batch_gradient_descent( alfa, _lambda, iterations) return [(training_model.cost_function(_lambda), validation_model.cost_function(_lambda)), training_model.thetas]
def DataAnalysis(self): # Reading the data file into a pandas data frame lifedata = pd.read_csv("life_expectancy_dataset.csv") print("Data", lifedata) desc_data = lifedata.describe() # Obtaining a descriptive statistics of data print("Descriptive statistics of data:", desc_data) # Obtaining the total number of null values within the dataset nullvalues = lifedata.isnull().sum() print("Total null and missing values :", nullvalues) #Dropping the null values final_lifedata = lifedata.dropna(axis=0, how='any') # Dropping columns having high multi-collinearity print("--------Cleaned data ----------") print(final_lifedata) # Creating instance of the class Regression reg = Regression() # Carrying out linear regression print("---------Visualisation of data---------------") reg.visualisation(final_lifedata) reg_coef = [] print("---------Multiple Regression of data---------------") reg_coef = reg.linearReg(final_lifedata) testdata = pd.read_csv("test.csv") print(testdata) replacements = { 'Developing': 1, 'Developed': 0, } testdata['Status'].replace(replacements, inplace=True) ypred = [] y_pred = 0 for index, row in testdata.iterrows(): y_pred = reg_coef[18] + reg_coef[0] * row['Year'] + reg_coef[ 1] * row['Status'] + reg_coef[2] * row[ 'Adult Mortality'] + reg_coef[3] * row[ 'infant deaths'] + reg_coef[4] * row['Alcohol'] reg_coef[5] * row['percentage expenditure'] + reg_coef[6] * row[ 'Hepatitis B'] + reg_coef[7] * row['Measles '] + reg_coef[ 8] * row[' BMI '] + reg_coef[9] * row['Polio'] reg_coef[10] * row['Total expenditure'] + reg_coef[11] * row[ 'Diphtheria '] + reg_coef[12] * row[' HIV/AIDS'] + reg_coef[ 13] * row['GDP'] + reg_coef[14] * row['Population'] +reg_coef[15] * row['thinness 5-9 years'] + reg_coef[16] * row[ 'Income composition of resources'] + reg_coef[17] * row[ 'Schooling'] ypred.append(y_pred) print("Predictions:") for i in ypred: print(i) # Carrying out logistic regression reg_coef = reg.logisticreg(final_lifedata)
def __init__(self): self.convolution = Convolution(3, 6) self.pool = Maxpool() self.fcl = FCL(127 * 159 * 6, 64) self.relu = Relu() self.regression = Regression(64, 30)
def run_for_all_fields(self): model = Regression() c = [i for i in range(len(self.fp.fields))] fields = self.fp.get_fields(c) score = model.get_data(self.df,fields,'Lasso',10)
root = 'D:/Bioinformatics' elif hostname == 'mingyu-Inspiron-7559': root = '/media/mingyu/8AB4D7C8B4D7B4C3/Bioinformatics' else: root = '/lustre/fs0/home/mcha/Bioinformatics' cor = Correlation(root) if hostname == '-DLOOJR6' or hostname == '-1NLOLK4': cor.to_server(root, "") else: from Regression import Regression from mir_gene import mir_gene from set_go import set_go from validation import validation rg = Regression(root) mg = mir_gene() sg = set_go(root) val = validation(root) # cor.high_correlation_by_thres(100) # cor.get_sample_corr(hbw) # cor.plot_sample() for hbw in [100]: for opt in ['nz']: # cor.corr_stats(hbw) # cor.correlation_fan_rna(hbw) # cor.high_clusters(hbw) # cor.high_correlation(hbw, 0.75) # exit(1)
COLUMNS = [ f.CLUMP_THICKNESS, f.UNIFORMITY_OF_CELL_SIZE, f.UNIFORMITY_OF_CELL_SHAPE, f.MARGINAL_ADHESION, f.SINGLE_EPITHELIAL_CELL_SIZE, f.BARE_NUCLEI, f.BLAND_CHROMATIN, f.NORMAL_NUCLEOLI, f.MITOSES ] print("|---------------------------------------------------------|") print("| {:<5} {:<25}|{:<3} {:<19} |".format(" ", "FEATURE", " ", "ACCURACY")) print("|---------------------------------------------------------|") for column in COLUMNS: _X = prepared_data_set.loc[:, [column]] X = np.asarray(_X).astype(float) _pre_y = prepared_data_set.loc[:, [f.CLASS]] _y = np.asarray(_pre_y).astype(float) y = _y.reshape((len(_y), )) X_train, X_test, y_train, y_test = train_test_split(X, y, train_size=0.65, test_size=0.35) rm = Regression(learning_rate=0.001, iterations=10000, threshold=.5) rm.fit(X_train, y_train) predictions = rm.predict(X_test) print("| {:<30} | {:<20} % |".format(column, accuracy(y_test, predictions) * 100)) print("|---------------------------------------------------------|")
X = df.iloc[132:, [1, 2, 5, 6, 9, 11, 12]].to_numpy() y_prev = df.iloc[132:, [3]].to_numpy() # Adding noise m = np.mean(y_prev) s = np.std(y_prev) y = y_prev + np.random.normal(m, s) # Hyperparameters for tuning hidden_neuron_list = [5, 5, 5] epochs = 500 runs = 30 lr_rate = 0.001 lmbd = 0.001 # Calling the class function containing activaion and cost function reg = Regression(hidden_activation='ReLU', output_activation="linear") # Initialize storing values r2_test_runs = np.zeros((runs, epochs)) r2_train_runs = np.zeros((runs, epochs)) r2_end_test = np.zeros(runs) r2_end_train = np.zeros(runs) MAPE_test_runs = np.zeros((runs, epochs)) MAPE_train_runs = np.zeros((runs, epochs)) MAPE_test_end = np.zeros(runs) MAPE_train_end = np.zeros(runs) for run in tqdm(range(runs)): X_train, X_test, Y_train, Y_test = train_test_split(X, y, test_size=0.2) Scaler = preprocessing.StandardScaler()
def calculate_everything(data): """ Runs everything, this is the default method run :return: None """ regression = Regression(data_file=data, actual_output=Constants.OUTPUT_FEATURE, iterations=Constants.ITERATIONS, step_size=Constants.STEP_SIZE) regression.set_features(Constants.SINGLE_FEATURE) print "===========================" print "1. Least Square Regression" print "===========================" print "\n\ti. wo + w1 * (sqft_living) + w2 * (sqft_living)[2] :" lsr_square_coefficient = least_square_regression(regression, 2) print "\n\tTrained Coefficients :" print "\t========================" print_weights(lsr_square_coefficient) print "\n\tii. wo + w1 * (sqft_living) + w2 * (sqft_living)[2] + w3 * (sqft_living)[3] + w4 * (sqft_living)[4] :" lsr_forth_coefficient = least_square_regression(regression, 4) print "\n\tTrained Coefficients :" print "\t========================" print_weights(lsr_forth_coefficient) print "\n\tiii. wo + w1 * (sqft_living) + w2 * (sqft_lot) + w3 * (bedrooms) + w4 * (bathrooms) :" regression.set_features(Constants.MULTIPLE_FEATURES) lsr_multi_feature_coefficient = least_square_regression(regression) print "\n\tTrained Coefficients :" print "\t========================" print_weights(lsr_multi_feature_coefficient) print "\n=====================================================" print "2. 10-Fold Cross Validation (Least Square Regression)" print "=====================================================" print "\n\ti. wo + w1 * (sqft_living) + w2 * (sqft_living)[2] :" regression.set_features(Constants.SINGLE_FEATURE) lsr_square_rmse = cross_validate(regression, degree=2) print "\n\tRoot Mean Square Error (RMSE) :" print "\t========================" print_rmse(lsr_square_rmse) print "\n\tii. wo + w1 * (sqft_living) + w2 * (sqft_living)[2] + w3 * (sqft_living)[3] + w4 * (sqft_living)[4] :" lsr_forth_rmse = cross_validate(regression, degree=4) print "\n\tRoot Mean Square Error (RMSE) :" print "\t========================" print_rmse(lsr_forth_rmse) print "\n\tiii. wo + w1 * (sqft_living) + w2 * (sqft_lot) + w3 * (bedrooms) + w4 * (bathrooms) :" regression.set_features(Constants.MULTIPLE_FEATURES) lsr_multi_feature_rmse = cross_validate(regression) print "\n\tRoot Mean Square Error (RMSE) :" print "\t========================" print_rmse(lsr_multi_feature_rmse) print "\n===========================" print "3. Ridge Regression" print "===========================" print "\n\ti. wo + w1 * (sqft_living) + w2 * (sqft_living)[2] :" regression.set_features(Constants.SINGLE_FEATURE) ridge_square_coefficient = ridge_regression(regression, degree=2) print "\n\tTrained Coefficients :" print "\t========================" print_weights(ridge_square_coefficient) print "\n\tii. wo + w1 * (sqft_living) + w2 * (sqft_living)[2] + w3 * (sqft_living)[3] + w4 * (sqft_living)[4] :" ridge_forth_coefficient = ridge_regression(regression, degree=4) print "\n\tTrained Coefficients :" print "\t========================" print_weights(ridge_forth_coefficient) print "\n\tiii. wo + w1 * (sqft_living) + w2 * (sqft_lot) + w3 * (bedrooms) + w4 * (bathrooms) :" regression.set_features(Constants.MULTIPLE_FEATURES) ridge_multi_feature_coefficient = ridge_regression(regression) print "\n\tTrained Coefficients :" print "\t========================" print_weights(ridge_multi_feature_coefficient) print "\n================================" print "4. Model Selection" print "================================" regression.iterations = Constants.MODEL_SELECTION_ITERATION print "\n\ti. wo + w1 * (sqft_living) + w2 * (sqft_living)[2] :" regression.set_features(Constants.SINGLE_FEATURE) lsr_sqr_model_selection = cross_validate(regression, is_ridge=True, degree=2) print "\n\tSelected Models :" print "\t========================\n" for error_index in range(0, len(lsr_sqr_model_selection)): for lamda, model_rmse in lsr_sqr_model_selection[ error_index].iteritems(): print "\tlamda: " + str(lamda) print "\tAverage RMSE : " + str(model_rmse) print "\n\tii. wo + w1 * (sqft_living) + w2 * (sqft_living)[2] + w3 * (sqft_living)[3] + w4 * (sqft_living)[4] :" lsr_forth_model_selection = cross_validate(regression, is_ridge=True, degree=4) print "\n\tSelected Models :" print "\t========================\n" for error_index in range(0, len(lsr_forth_model_selection)): for lamda, model_rmse in lsr_forth_model_selection[ error_index].iteritems(): print "\tlamda: " + str(lamda) print "\tAverage RMSE : " + str(model_rmse) print "\n\tiii. wo + w1 * (sqft_living) + w2 * (sqft_lot) + w3 * (bedrooms) + w4 * (bathrooms) :" regression.set_features(Constants.MULTIPLE_FEATURES) lsr_multi_feature_model_selection = cross_validate(regression, is_ridge=True) print "\n\tSelected Models :" print "\t========================\n" for error_index in range(0, len(lsr_multi_feature_model_selection)): for lamda, model_rmse in lsr_multi_feature_model_selection[ error_index].iteritems(): print "\tlamda: " + str(lamda) print "\tAverage RMSE : " + str(model_rmse)
def addRegression(self, name): if name in self.regressions.keys(): raise StandardError("ERROR: regression "+name+" has already been registered") reg = Regression() reg.id = 1 reg.name = self.baseName+"_"+name reg.inputFiles = self.inputFiles reg.tree = self.tree reg.method = self.method if self.trainerType=="TMVA": reg.tmvaTrainingOptions = copy.copy(self.tmvaTrainingOptions) reg.options = copy.copy(self.commonOptions) reg.doErrors = self.doErrors reg.doCombine = self.doCombine reg.variablesEB = copy.copy(self.commonVariablesEB) reg.variablesEE = copy.copy(self.commonVariablesEE) reg.variablesComb = copy.copy(self.commonVariablesComb) reg.target = self.target reg.targetError = self.targetError reg.targetComb = self.targetComb reg.cuts = copy.copy(self.commonCuts) reg.cutsEB = copy.copy(self.commonCutsEB) reg.cutsEE = copy.copy(self.commonCutsEE) reg.cutsError = copy.copy(self.commonCutsError) reg.cutsComb = copy.copy(self.commonCutsComb) self.regressions[name] = reg
def regression_results(self,models,testSize): model = Regression() c = [i for i in range(len(self.fp.fields))] fields = self.fp.get_fields(c) return model.get_data(self.df,fields,models,testSize)
class PyClassificationToolbox(QtWidgets.QMainWindow): def __init__(self): super(PyClassificationToolbox, self).__init__() self.initUI() try: self.featurespace.loadDefaultFeatureSpace('.featurespace.pyct') except: print("could not open default feature space") def initUI(self): self.clusteringParameters = ClusteringParameters(self) self.dimRedParameters = DimensionalityReductionParameters(self) self.classifierParameters = ClassifierParameters(self) self.regressionParameters = RegressionParameters(self) self.densityEstimationParameters = DensityEstimationParameters(self) self.probabilityDensityViewer = ProbabilityDensityViewer(self) self.licenseDialog = LicenseDialog(self) self.aboutDialog = AboutDialog(self, self.licenseDialog) self.infoDialog = InfoDialog(self) self.operationStack = OperationStack(self) self.createSamplesDockWidget = CreateSamplesProperties(self) self.addDockWidget(QtCore.Qt.BottomDockWidgetArea, self.createSamplesDockWidget) self.statusbar = QtWidgets.QStatusBar() self.setStatusBar(self.statusbar) self.featurespace = FeatureSpace(self, self.statusbar, self.createSamplesDockWidget) self.setCentralWidget(self.featurespace) self.classifier = None self.regressor = None self.densityEstimator = None featureSpaceNewAction = QtWidgets.QAction('&New', self) featureSpaceNewAction.setShortcut('Ctrl+N') featureSpaceNewAction.setStatusTip('Create empty feature space') featureSpaceNewAction.triggered.connect(self.newFeatureSpace) featureSpaceOpenAction = QtWidgets.QAction('&Open...', self) featureSpaceOpenAction.setShortcut('Ctrl+O') featureSpaceOpenAction.setStatusTip('Load a feature space') featureSpaceOpenAction.triggered.connect(self.openFeatureSpace) featureSpaceSaveAction = QtWidgets.QAction('&Save', self) featureSpaceSaveAction.setShortcut('Ctrl+S') featureSpaceSaveAction.setStatusTip('Save the feature space') featureSpaceSaveAction.triggered.connect(self.saveFeatureSpace) featureSpaceSaveAsAction = QtWidgets.QAction('Save &as...', self) featureSpaceSaveAsAction.setStatusTip( 'Save the feature space to a new file') featureSpaceSaveAsAction.triggered.connect(self.saveAsFeatureSpace) featureSpaceImportAction = QtWidgets.QAction('&Import samples...', self) featureSpaceImportAction.setStatusTip( 'Read feature vectors from an ASCII file') featureSpaceImportAction.triggered.connect(self.importFeatureSpace) featureSpaceExportAction = QtWidgets.QAction('&Export samples...', self) featureSpaceExportAction.setStatusTip( 'Write the feature vectors to an ASCII file') featureSpaceExportAction.triggered.connect(self.exportFeatureSpace) featureSpaceSaveImageAction = QtWidgets.QAction( 'Export as image...', self) featureSpaceSaveImageAction.setStatusTip( 'Export the feature space as image') featureSpaceSaveImageAction.triggered.connect( self.exportFeatureSpaceAsImage) self.__featureSpaceHideSamplesAction = QtWidgets.QAction( 'Hide samples', self) self.__featureSpaceHideSamplesAction.setStatusTip('Hide all samples') self.__featureSpaceHideSamplesAction.setShortcut('F8') self.__featureSpaceHideSamplesAction.triggered.connect( self.hideSamples) self.__featureSpaceHideSamplesAction.setCheckable(True) self.__featureSpaceHideSamplesAction.setChecked(False) exitAction = QtWidgets.QAction('&Quit', self) exitAction.setShortcut(QtCore.Qt.CTRL + QtCore.Qt.Key_Q) exitAction.setStatusTip('Exit the Python Classification Toolbox') exitAction.triggered.connect(self.close) menubar = self.menuBar() menubar.setNativeMenuBar(False) featureSpaceMenu = menubar.addMenu('&Feature Space') featureSpaceMenu.addAction(featureSpaceNewAction) featureSpaceMenu.addAction(featureSpaceOpenAction) featureSpaceMenu.addAction(featureSpaceSaveAction) featureSpaceMenu.addAction(featureSpaceSaveAsAction) featureSpaceMenu.addSeparator() featureSpaceMenu.addAction(featureSpaceImportAction) featureSpaceMenu.addAction(featureSpaceExportAction) featureSpaceMenu.addAction(featureSpaceSaveImageAction) featureSpaceMenu.addSeparator() featureSpaceMenu.addAction(self.__featureSpaceHideSamplesAction) featureSpaceMenu.addSeparator() featureSpaceMenu.addAction(exitAction) clusteringKMeansAction = QtWidgets.QAction('k-Means Clustering...', self) clusteringKMeansAction.setStatusTip('k-Means Clustering') clusteringKMeansAction.triggered.connect( lambda: self.clusterWithParameters(Clustering.kMeans)) clusteringGMMAction = QtWidgets.QAction('Gaussian Mixture Model...', self) clusteringGMMAction.setStatusTip('Gaussian Mixture Model') clusteringGMMAction.triggered.connect( lambda: self.clusterWithParameters(Clustering.GMM)) clusteringParametersAction = QtWidgets.QAction('Parameters...', self) clusteringParametersAction.setStatusTip( 'Edit the parameters of the clustering algorithms') clusteringParametersAction.triggered.connect( self.editClusteringParameters) clusteringMenu = menubar.addMenu('C&lustering') clusteringMenu.addAction(clusteringKMeansAction) clusteringMenu.addAction(clusteringGMMAction) clusteringMenu.addSeparator() clusteringMenu.addAction(clusteringParametersAction) dimRedPCAAction = QtWidgets.QAction('Principal Component Analysis...', self) dimRedPCAAction.setStatusTip('Principal Component Analysis (PCA)') dimRedPCAAction.triggered.connect( lambda: self.reduceDimensionalityWithParameters( DimensionalityReduction.PCA)) dimRedMenu = menubar.addMenu('&Dimensionality Reduction') dimRedMenu.addAction(dimRedPCAAction) classificationLogRegAction = QtWidgets.QAction( 'Linear Logistic Regression...', self) classificationLogRegAction.setStatusTip( 'Linear Logistic Regression classifier') classificationLogRegAction.triggered.connect( lambda: self.classifyWithParameters(Classifier.LogReg)) classificationNormAction = QtWidgets.QAction('Norm classifier...', self) classificationNormAction.setStatusTip( 'Classification based on the distance to the class centers') classificationNormAction.triggered.connect( lambda: self.classifyWithParameters(Classifier.Norm)) classificationNaiveBayesAction = QtWidgets.QAction('Naive Bayes', self) classificationNaiveBayesAction.setStatusTip('Naive Bayes classifier') classificationNaiveBayesAction.triggered.connect( lambda: self.classify(Classifier.NaiveBayes)) classificationGaussianAction = QtWidgets.QAction( 'Gaussian classifier', self) classificationGaussianAction.setStatusTip('Gaussian classifier') classificationGaussianAction.triggered.connect( lambda: self.classify(Classifier.Gauss)) classificationGMMAction = QtWidgets.QAction('GMM classifier...', self) classificationGMMAction.setStatusTip( 'Gaussian Mixture Model classifier') classificationGMMAction.triggered.connect( lambda: self.classifyWithParameters(Classifier.GMM)) classificationKNNAction = QtWidgets.QAction('kNN...', self) classificationKNNAction.setStatusTip('k Nearest Neighbor classifier') classificationKNNAction.triggered.connect( lambda: self.classifyWithParameters(Classifier.kNN)) classificationLinRegAction = QtWidgets.QAction('Linear Regression...', self) classificationLinRegAction.setStatusTip('Linear Regression classifier') classificationLinRegAction.triggered.connect( lambda: self.classifyWithParameters(Classifier.LinReg)) classificationPerceptronAction = QtWidgets.QAction( 'Rosenblatt\'s Perceptron...', self) classificationPerceptronAction.setStatusTip('Rosenblatt\'s Perceptron') classificationPerceptronAction.triggered.connect( lambda: self.classifyWithParameters(Classifier.Perceptron)) classificationMLPAction = QtWidgets.QAction('Multilayer Perceptron...', self) classificationMLPAction.setStatusTip('Multilayer Perceptron') classificationMLPAction.triggered.connect( lambda: self.classifyWithParameters(Classifier.MLP)) try: # requires sklearn >= 0.18.dev0 neural_network.MLPClassifier() except: classificationMLPAction.setEnabled(False) classificationSVMAction = QtWidgets.QAction('SVM...', self) classificationSVMAction.setStatusTip('Support Vector Machine') classificationSVMAction.triggered.connect( lambda: self.classifyWithParameters(Classifier.SVM)) classificationDecisionTreeAction = QtWidgets.QAction( 'Decision Tree...', self) classificationDecisionTreeAction.setStatusTip( 'Decision Tree classifier') classificationDecisionTreeAction.triggered.connect( lambda: self.classifyWithParameters(Classifier.DecisionTree)) classificationRandomForestAction = QtWidgets.QAction( 'Random Forest...', self) classificationRandomForestAction.setStatusTip( 'Random Forest classifier') classificationRandomForestAction.triggered.connect( lambda: self.classifyWithParameters(Classifier.RandomForest)) classificationParametersAction = QtWidgets.QAction( 'Parameters...', self) classificationParametersAction.setStatusTip( 'Edit the parameters of the classification algorithms') classificationParametersAction.triggered.connect( self.editClassificationParameters) classificationNoneAction = QtWidgets.QAction('None', self) classificationNoneAction.setStatusTip('Delete classification results') classificationNoneAction.triggered.connect(self.unsetClassifier) classificationMenu = menubar.addMenu('&Classification') classificationMenu.addAction(classificationLogRegAction) classificationMenu.addAction(classificationNormAction) classificationMenu.addAction(classificationNaiveBayesAction) classificationMenu.addAction(classificationGaussianAction) classificationMenu.addAction(classificationGMMAction) classificationMenu.addAction(classificationKNNAction) classificationMenu.addAction(classificationLinRegAction) classificationMenu.addAction(classificationPerceptronAction) classificationMenu.addAction(classificationMLPAction) classificationMenu.addAction(classificationSVMAction) classificationMenu.addAction(classificationDecisionTreeAction) classificationMenu.addAction(classificationRandomForestAction) classificationMenu.addSeparator() classificationMenu.addAction(classificationParametersAction) classificationMenu.addAction(classificationNoneAction) regressionLinRegAction = QtWidgets.QAction('Linear Regression...', self) regressionLinRegAction.setStatusTip('Linear Regression') regressionLinRegAction.triggered.connect( lambda: self.regressionWithParameters(Regression.LinearRegression)) regressionSVRAction = QtWidgets.QAction('Support Vector Regression...', self) regressionSVRAction.setStatusTip('Support Vector Regression (SVR)') regressionSVRAction.triggered.connect( lambda: self.regressionWithParameters(Regression.SVR)) regressionRegressionTreeAction = QtWidgets.QAction( 'Regression Tree...', self) regressionRegressionTreeAction.setStatusTip('Regression Tree') regressionRegressionTreeAction.triggered.connect( lambda: self.regressionWithParameters(Regression.RegressionTree)) regressionRegressionForestAction = QtWidgets.QAction( 'Regression Forest...', self) regressionRegressionForestAction.setStatusTip('Regression Forest') regressionRegressionForestAction.triggered.connect( lambda: self.regressionWithParameters(Regression.RegressionForest)) regressionParametersAction = QtWidgets.QAction('Parameters...', self) regressionParametersAction.setStatusTip( 'Edit the parameters of the regression algorithms') regressionParametersAction.triggered.connect( self.editRegressionParameters) regressionNoneAction = QtWidgets.QAction('None', self) regressionNoneAction.setStatusTip('Delete regression result') regressionNoneAction.triggered.connect(self.unsetRegressor) regressionMenu = menubar.addMenu('&Regression') regressionMenu.addAction(regressionLinRegAction) regressionMenu.addAction(regressionSVRAction) regressionMenu.addAction(regressionRegressionTreeAction) regressionMenu.addAction(regressionRegressionForestAction) regressionMenu.addSeparator() regressionMenu.addAction(regressionParametersAction) regressionMenu.addAction(regressionNoneAction) densityEstimationHistogramAction = QtWidgets.QAction( 'Histogram...', self) densityEstimationHistogramAction.setStatusTip('Histogram estimation') densityEstimationHistogramAction.triggered.connect( lambda: self.densityEstimationWithParameters(DensityEstimation. Histogram)) densityEstimationSphereAction = QtWidgets.QAction( 'Sphere Density Estimation...', self) densityEstimationSphereAction.setStatusTip('Sphere Density Estimation') densityEstimationSphereAction.triggered.connect( lambda: self.densityEstimationWithParameters( DensityEstimation.SphereDensityEstimation)) densityEstimationKernelAction = QtWidgets.QAction( 'Kernel Density Estimation...', self) densityEstimationKernelAction.setStatusTip('Kernel Density Estimation') densityEstimationKernelAction.triggered.connect( lambda: self.densityEstimationWithParameters( DensityEstimation.KernelDensityEstimation)) densityEstimationParametersAction = QtWidgets.QAction( 'Parameters...', self) densityEstimationParametersAction.setStatusTip( 'Edit the parameters of the density estimation algorithms') densityEstimationParametersAction.triggered.connect( self.editDensityEstimationParameters) densityEstimationNoneAction = QtWidgets.QAction('None', self) densityEstimationNoneAction.setStatusTip( 'Delete density estimation result') densityEstimationNoneAction.triggered.connect( self.unsetDensityEstimation) densityEstimationMenu = menubar.addMenu('Density &Estimation') densityEstimationMenu.addAction(densityEstimationHistogramAction) densityEstimationMenu.addAction(densityEstimationSphereAction) densityEstimationMenu.addAction(densityEstimationKernelAction) densityEstimationMenu.addSeparator() densityEstimationMenu.addAction(densityEstimationParametersAction) densityEstimationMenu.addAction(densityEstimationNoneAction) aboutAction = QtWidgets.QAction('About...', self) aboutAction.setStatusTip('About this software') aboutAction.triggered.connect(self.aboutDialog.exec_) licenseAction = QtWidgets.QAction('License...', self) licenseAction.setStatusTip('GNU General Public License') licenseAction.triggered.connect(self.licenseDialog.showLicense) infoAction = QtWidgets.QAction('Info...', self) infoAction.setStatusTip('Information about the Python distribution') infoAction.triggered.connect(self.infoDialog.exec_) helpMenu = menubar.addMenu('&Help') helpMenu.addAction(aboutAction) helpMenu.addAction(licenseAction) helpMenu.addAction(infoAction) # exitAction = QtGui.QAction(QtGui.QIcon('./img/exit.png'), 'Exit', self) # exitAction.setShortcut('Ctrl+Q') # exitAction.triggered.connect(QtGui.qApp.quit) # coordinateSystemAction = QtGui.QAction(QtGui.QIcon('./img/coord.png'), 'Move coordinate system', self) # coordinateSystemAction.triggered.connect(self.onMoveCoordinateSystem) # gaussCreateAction = QtGui.QAction(QtGui.QIcon('./img/create_gauss.png'), 'Create Gaussians', self) # gaussCreateAction.triggered.connect(self.onCreateGaussians) # gaussModifyAction = QtGui.QAction(QtGui.QIcon('./img/modify_gauss.png'), 'Modify Gaussians', self) # gaussModifyAction.triggered.connect(self.onModifyGaussians) self.moveCoordinateSystemButton = QtWidgets.QToolButton() self.moveCoordinateSystemButton.setIcon( QtGui.QIcon(resource_path('./img/coord.png'))) self.moveCoordinateSystemButton.setStatusTip( 'Move the coordinate system by dragging the mouse or zoom in or out using the mouse scroll wheel' ) self.moveCoordinateSystemButton.setCheckable(True) self.moveCoordinateSystemButton.setChecked(True) self.moveCoordinateSystemButton.clicked.connect( self.onMoveCoordinateSystem) self.createGaussButton = QtWidgets.QToolButton() self.createGaussButton.setIcon( QtGui.QIcon(resource_path('./img/create_gauss.png'))) self.createGaussButton.setStatusTip( 'Create samples drawn from a new Gaussian pdf by spanning the bounding box of the covariance matrix' ) self.createGaussButton.setCheckable(True) self.createGaussButton.clicked.connect(self.onCreateGaussians) self.modifyGaussButton = QtWidgets.QToolButton() self.modifyGaussButton.setIcon( QtGui.QIcon(resource_path('./img/modify_gauss.png'))) self.modifyGaussButton.setStatusTip( 'Modify existing Gaussian pdfs by left of right clicking on the center' ) self.modifyGaussButton.setCheckable(True) self.modifyGaussButton.clicked.connect(self.onModifyGaussians) self.createSamplesButton = QtWidgets.QToolButton() self.createSamplesButton.setIcon( QtGui.QIcon(resource_path('./img/samples.png'))) self.createSamplesButton.setStatusTip( 'Create and modify individual samples by spanning a rectangle that contains one or more samples' ) self.createSamplesButton.setCheckable(True) self.createSamplesButton.clicked.connect(self.onCreateSamples) self.toolbar = QtWidgets.QToolBar(self) self.toolbar.setIconSize(QtCore.QSize(48, 48)) self.addToolBar(QtCore.Qt.RightToolBarArea, self.toolbar) # self.toolbar.setToolButtonStyle(QtCore.Qt.ToolButtonTextBesideIcon) # self.toolbar.addAction(coordinateSystemAction) # self.toolbar.addAction(gaussCreateAction) # self.toolbar.addAction(gaussModifyAction) self.toolbar.addWidget(self.moveCoordinateSystemButton) self.toolbar.addWidget(self.createGaussButton) self.toolbar.addWidget(self.modifyGaussButton) self.toolbar.addWidget(self.createSamplesButton) QtGui.QShortcut(QtGui.QKeySequence("Ctrl+Z"), self, self.undo) QtGui.QShortcut(QtGui.QKeySequence("Ctrl+R"), self, self.redo) self.printLicenseMessage() def printLicenseMessage(self): print("The Python Classification Toolbox is free software:") print( "you can redistribute it and/or modify it under the terms of the") print( "GNU General Public License as published by the Free Software Foundation," ) print( "either version 3 of the License, or (at your option) any later version.\n" ) print( "The Python Classification Toolbox is distributed in the hope that" ) print( "it will be useful, but WITHOUT ANY WARRANTY; without even the implied" ) print( "warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.") print("See the GNU General Public License for more details.\n") print( "You should have received a copy of the GNU General Public License" ) print("along with the Python Classification Toolbox.") print("If not, see <http://www.gnu.org/licenses/>.") def closeEvent(self, event): try: self.featurespace.saveDefaultFeatureSpace('.featurespace.pyct') except: print("could not save default feature space") event.accept() def undo(self): self.operationStack.undo() def redo(self): self.operationStack.redo() def newFeatureSpace(self): self.classifier = None self.regressor = None self.densityEstimator = None self.__featureSpaceHideSamplesAction.setChecked(False) self.featurespace.new() self.operationStack.clear() def openFeatureSpace(self): self.classifier = None self.regressor = None self.densityEstimator = None self.__featureSpaceHideSamplesAction.setChecked(False) self.featurespace.open() self.operationStack.clear() def saveFeatureSpace(self): self.featurespace.save() def saveAsFeatureSpace(self): self.featurespace.saveAs() def exportFeatureSpace(self): self.featurespace.exportFile() def exportFeatureSpaceAsImage(self): self.featurespace.exportImage() def importFeatureSpace(self): self.featurespace.importFile() def hideSamples(self): hide = self.__featureSpaceHideSamplesAction.isChecked() self.featurespace.hideSamples(hide) def onMoveCoordinateSystem(self): self.moveCoordinateSystemButton.setChecked(True) self.createGaussButton.setChecked(False) self.modifyGaussButton.setChecked(False) self.createSamplesButton.setChecked(False) self.createSamplesDockWidget.setHidden(True) self.featurespace.changeAction( self.featurespace.ACTION_COORDINATE_SYSTEM) self.statusbar.showMessage('') def onCreateGaussians(self): self.moveCoordinateSystemButton.setChecked(False) self.createGaussButton.setChecked(True) self.modifyGaussButton.setChecked(False) self.createSamplesButton.setChecked(False) self.createSamplesDockWidget.setHidden(True) self.featurespace.changeAction( self.featurespace.ACTION_CREATE_GAUSSIAN) def onModifyGaussians(self): self.moveCoordinateSystemButton.setChecked(False) self.createGaussButton.setChecked(False) self.modifyGaussButton.setChecked(True) self.createSamplesButton.setChecked(False) self.createSamplesDockWidget.setHidden(True) self.featurespace.changeAction( self.featurespace.ACTION_MODIFY_GAUSSIAN) self.statusbar.showMessage('') def onCreateSamples(self): self.moveCoordinateSystemButton.setChecked(False) self.createGaussButton.setChecked(False) self.modifyGaussButton.setChecked(False) self.createSamplesButton.setChecked(True) self.createSamplesDockWidget.setHidden(False) self.featurespace.changeAction(self.featurespace.ACTION_CREATE_SAMPLES) self.statusbar.showMessage('') def clusterWithParameters(self, method): self.clusteringParameters.setTab(method) result = self.clusteringParameters.exec_() if result == QtWidgets.QDialog.Accepted: self.classifier = None self.featurespace.setClassificationImage(None) self.regressor = None self.densityEstimator = None self.clusterer = Clustering(method, self.clusteringParameters, self.featurespace) try: self.clusterer.initialize() except AssertionError as e: QtWidgets.QMessageBox.warning(self, 'Error', str(e), QtWidgets.QMessageBox.Ok, QtWidgets.QMessageBox.Ok) self.repaint() def editClusteringParameters(self): self.clusteringParameters.setTab(-1) self.clusteringParameters.exec_() def reduceDimensionalityWithParameters(self, method): self.dimRedParameters.setTab(method) result = self.dimRedParameters.exec_() if result == QtWidgets.QDialog.Accepted: self.classifier = None self.featurespace.setClassificationImage(None) self.regressor = None self.densityEstimator = None self.dimreduction = DimensionalityReduction( method, self.dimRedParameters, self.featurespace) self.dimreduction.initialize() self.repaint() def classify(self, classifier): self.regressor = None self.densityEstimator = None self.classifier = Classifier(classifier, self.classifierParameters, self.featurespace) try: self.classifier.initialize() self.runFeatureSpaceComputations() op = Operation(self, "dummy", None) self.operationStack.add(op) except Exception as e: QtWidgets.QMessageBox.warning(self, 'Error', str(e), QtWidgets.QMessageBox.Ok, QtWidgets.QMessageBox.Ok) def classifyWithParameters(self, classifier): self.regressor = None self.densityEstimator = None self.classifierParameters.setTab(classifier) result = self.classifierParameters.exec_() if result == QtWidgets.QDialog.Accepted: self.classifier = Classifier(classifier, self.classifierParameters, self.featurespace) try: self.classifier.initialize() self.runFeatureSpaceComputations() op = Operation(self, "dummy", None) self.operationStack.add(op) except AssertionError as e: QtWidgets.QMessageBox.warning(self, 'Error', str(e), QtWidgets.QMessageBox.Ok, QtWidgets.QMessageBox.Ok) def unsetClassifier(self): self.classifier = None self.featurespace.setClassificationImage(None) op = Operation(self, "dummy", None) self.operationStack.add(op) self.repaint() def runFeatureSpaceComputations(self, initialize=False): changes = False if self.classifier: self.setCursor(QtGui.QCursor(QtCore.Qt.WaitCursor)) self.statusbar.showMessage('') img = None if initialize: try: self.classifier = self.classifier.copy() self.classifier.initialize() img = self.classifier.runFeatureSpaceComputations() except AssertionError as e: QtWidgets.QMessageBox.warning(self, 'Error', str(e), QtWidgets.QMessageBox.Ok, QtWidgets.QMessageBox.Ok) else: img = self.classifier.runFeatureSpaceComputations() self.featurespace.setClassificationImage(img) self.featurespace.repaint() self.statusbar.showMessage('classification done') self.setCursor(QtGui.QCursor(QtCore.Qt.ArrowCursor)) changes = True elif self.regressor: self.statusbar.showMessage('') if initialize: self.regressor.initialize() self.featurespace.repaint() elif self.densityEstimator: self.setCursor(QtGui.QCursor(QtCore.Qt.WaitCursor)) self.statusbar.showMessage('') img = None if initialize: try: self.densityEstimator.initialize() img = self.densityEstimator.runFeatureSpaceComputations() except AssertionError as e: QtWidgets.QMessageBox.warning(self, 'Error', str(e), QtWidgets.QMessageBox.Ok, QtWidgets.QMessageBox.Ok) else: img = self.densityEstimator.runFeatureSpaceComputations() self.featurespace.setClassificationImage(img) self.featurespace.repaint() self.statusbar.showMessage('density estimation done') self.setCursor(QtGui.QCursor(QtCore.Qt.ArrowCursor)) changes = True return changes def editClassificationParameters(self): self.classifierParameters.setTab(-1) self.classifierParameters.exec_() def regression(self, regressor): self.classifier = None self.densityEstimator = None self.setCursor(QtGui.QCursor(QtCore.Qt.WaitCursor)) self.statusbar.showMessage('') self.regressor = Regression(regressor, self.regressionParameters, self.featurespace) self.regressor.initialize() self.featurespace.setClassificationImage(None) op = Operation(self, "dummy", None) self.operationStack.add(op) self.setCursor(QtGui.QCursor(QtCore.Qt.ArrowCursor)) self.statusbar.showMessage('regression done') self.repaint() def regressionWithParameters(self, regressor): self.classifier = None self.densityEstimator = None self.regressionParameters.setTab(regressor) result = self.regressionParameters.exec_() if result == QtWidgets.QDialog.Accepted: self.setCursor(QtGui.QCursor(QtCore.Qt.WaitCursor)) self.statusbar.showMessage('') self.regressor = Regression(regressor, self.regressionParameters, self.featurespace) try: self.regressor.initialize() self.featurespace.setClassificationImage(None) op = Operation(self, "dummy", None) self.operationStack.add(op) except AssertionError as e: QtWidgets.QMessageBox.warning(self, 'Error', str(e), QtWidgets.QMessageBox.Ok, QtWidgets.QMessageBox.Ok) self.setCursor(QtGui.QCursor(QtCore.Qt.ArrowCursor)) self.statusbar.showMessage('regression done') self.repaint() def paintRegressor(self, qp): if self.regressor: self.regressor.paint(qp) def unsetRegressor(self): self.regressor = None op = Operation(self, "dummy", None) self.operationStack.add(op) self.repaint() def editRegressionParameters(self): self.regressionParameters.setTab(-1) self.regressionParameters.exec_() def densityEstimationWithParameters(self, estimator): self.classifier = None self.regressor = None self.densityEstimationParameters.setTab(estimator) result = self.densityEstimationParameters.exec_() if result == QtWidgets.QDialog.Accepted: self.setCursor(QtGui.QCursor(QtCore.Qt.WaitCursor)) self.statusbar.showMessage('') self.densityEstimator = DensityEstimation( estimator, self.densityEstimationParameters, self.featurespace, self.probabilityDensityViewer) try: self.densityEstimator.initialize() self.runFeatureSpaceComputations() op = Operation(self, "dummy", None) self.operationStack.add(op) except AssertionError as e: QtWidgets.QMessageBox.warning(self, 'Error', str(e), QtWidgets.QMessageBox.Ok, QtWidgets.QMessageBox.Ok) self.setCursor(QtGui.QCursor(QtCore.Qt.ArrowCursor)) self.statusbar.showMessage('density estimation done') self.repaint() def editDensityEstimationParameters(self): self.densityEstimationParameters.setTab(-1) self.densityEstimationParameters.exec_() def unsetDensityEstimation(self): self.densityEstimator = None self.featurespace.setClassificationImage(None) op = Operation(self, "dummy", None) self.operationStack.add(op) self.repaint() def getToolboxImage(self): img = self.featurespace.getClassificationImage() # print("getToolboxImage: ", img) return (self.classifier, self.regressor, self.densityEstimator, img) def setToolboxImage(self, image): (self.classifier, self.regressor, self.densityEstimator, classificationImage) = image self.featurespace.setClassificationImage(classificationImage) self.featurespace.repaint()
class NeuralNetworkMR: def __init__(self): self.convolution = Convolution(3, 8) self.pool = Maxpool() self.fcl = FCL(11 * 23 * 8, 128) self.fcl1 = FCL(128, 128) #self.fcl2 = FCL(64, 64) self.relu = Relu() self.relu1 = Relu() #self.relu2 = Relu() self.regression = Regression(11 * 23 * 8, 5) def normalize(self, image): return (image /255) - 0.5 def normalize_batch(self, batch): batch[1] = (batch[1]/255) - 0.5 return batch def forward_train(self, im, label): im = self.normalize(im) out = self.convolution.apply(im) out = self.pool.apply(out) out = self.fcl.apply(out) out = self.relu.apply(out) out = self.fcl1.apply(out) out = self.relu.apply(out) out = self.regression.apply(out) #print(out.shape) #print("labels shape: " + str(label.shape)) loss = np.sum(self.regression.squared_error(label)) acc = np.abs(self.regression.error(label)) return out, loss, acc def forward(self, im): im = self.normalize(im) out = self.convolution.apply(im) out = self.pool.apply(out) out = self.fcl.apply(out) out = self.relu.apply(out) out = self.regression.apply(out) return out def forward_batch(self, batch): batch = self.normalize_batch(batch) out = self.convolution.apply_batch(batch[1]) out = self.pool.apply_batch(out) #out = self.fcl.apply_batch(out) #out = self.relu.apply_batch(out) #out = self.fcl1.apply_batch(out) #out = self.relu1.apply_batch(out) #out = self.fcl2.apply_batch(out) #out = self.relu2.apply_batch(out) out = self.regression.apply_batch(out) labels = batch[0] #print("labels:") #print(labels.shape) loss = np.sum(self.regression.squared_error_backprop(labels)) acc = np.abs(self.regression.error_backprop(labels)) return out, loss, acc def train_batch(self, batch, lr=0.005): out, loss, acc = self.forward_batch(batch) labels = batch[0] gradient = self.regression.backprop_batch(labels, lr) #gradient = self.relu2.backprop_batch(gradient) #gradient = self.fcl2.backprop_batch(gradient, lr) #gradient = self.relu1.backprop_batch(gradient) #gradient = self.fcl1.backprop_batch(gradient, lr) #gradient = self.relu.backprop(gradient) #gradient = self.fcl.backprop_batch(gradient, lr) #gradient = self.pool.backprop_batch(gradient) #self.convolution.backprop_batch(gradient, lr) return out, loss, acc # def grad_check(self, batch): # batch = self.normalize_batch(batch) # out = self.convolution.apply_batch(batch[1]) # out = self.pool.apply_batch(out) # out0 = self.regression.apply_batch(out) # error = self.regression.error(batch[0]) # gradient = error.T @ self.regression.fcl.last_input / self.regression.fcl.last_input_shape[0] # N, M = self.regression.fcl.weights.shape # for i in range(N): # for j in range(M): # self.regression.fcl.weights[i][j] += 0.0001 # batch = self.normalize_batch(batch) # out = self.convolution.apply_batch(batch[1]) # out = self.pool.apply_batch(out) # out1 = self.regression.apply_batch(out) # self.regression.fcl.weights[i][j] -= 0.0002 # out = self.convolution.apply_batch(batch[1]) # out = self.pool.apply_batch(out) # out2 = self.regression.apply_batch(out) # self.regression.fcl.weights[i][j] += 0.0001 # res = (out1 - out2) / (0.0002) # print() # print(res.shape) # print(out1.shape) # print(gradient.shape) # for k in range(out1.shape[0]): # print(str(res[k][0]) + "|||" + str(gradient[k][0])) def train(self, label, im, lr=0.00005): out, loss, acc = self.forward_train(im, label) gradient = self.regression.backprop(label, lr) gradient = self.relu.backprop(gradient) gradient = self.fcl.backprop(gradient, lr) gradient = self.pool.backprop(gradient) self.convolution.backprop(gradient, lr) return out, loss, acc
split_date = datetime.datetime.strptime("2019-03-23", "%Y-%m-%d") split_index = analysis_data[analysis_data.DATE == split_date].index.tolist()[0] print("split_index:", split_index, "\n") # x_train = x[0:split_index] # x_test = x[split_index:] x_train = x.iloc[0:split_index, :] x_test = x.iloc[split_index:, :] y_train = y.iloc[0:split_index] y_test = y.iloc[split_index:] # x_num = len(list(x)) x_num = x.shape[1] #------------------------------------------------ reg = Regression(x_train, y_train, x_test, y_test) reg.lasso_reg(1, 10e3) print() reg.raw_reg() print() reg.reg_with_rfe() input("continue") #--------------------------------------------- #一次式 reg = LinearRegression().fit(x_train, y_train)
def addRegression(self, name): if name in self.regressions.keys(): raise StandardError("ERROR: regression " + name + " has already been registered") reg = Regression() reg.id = 1 reg.name = self.baseName + "_" + name reg.inputFiles = self.inputFiles reg.tree = self.tree reg.method = self.method if self.trainerType == "TMVA": reg.tmvaTrainingOptions = copy.copy(self.tmvaTrainingOptions) reg.options = copy.copy(self.commonOptions) reg.doErrors = self.doErrors reg.doCombine = self.doCombine reg.variablesEB = copy.copy(self.commonVariablesEB) reg.variablesEE = copy.copy(self.commonVariablesEE) reg.variablesComb = copy.copy(self.commonVariablesComb) reg.target = self.target reg.targetError = self.targetError reg.targetComb = self.targetComb reg.cuts = copy.copy(self.commonCuts) reg.cutsEB = copy.copy(self.commonCutsEB) reg.cutsEE = copy.copy(self.commonCutsEE) reg.cutsError = copy.copy(self.commonCutsError) reg.cutsComb = copy.copy(self.commonCutsComb) self.regressions[name] = reg
def menu(data): """ User Interface for the application :return: None """ regression = Regression(data_file=data, actual_output=Constants.OUTPUT_FEATURE, iterations=Constants.ITERATIONS, step_size=Constants.STEP_SIZE) print "***************************************************************************************************" print "* Regression *" print "***************************************************************************************************" while True: print "\n\n\t1. Least Square Regression " print "\t2. Cross Validate Least Square Regression" print "\t3. Ridge Regression" print "\t4. Select Model from Ridge Regression" print "\t5. Exit\n" try: user_choice = int( raw_input("\tPlease select your option (1 - 5) : ")) if user_choice == 1: degree, multiple_feature = _regression_option() features = Constants.MULTIPLE_FEATURES if multiple_feature else Constants.SINGLE_FEATURE regression.set_features(features) coefficients = least_square_regression(regression, degree) print coefficients elif user_choice == 2: degree, multiple_feature = _regression_option() features = Constants.MULTIPLE_FEATURES if multiple_feature else Constants.SINGLE_FEATURE regression.set_features(features) rmse = cross_validate(regression, is_ridge=False, degree=degree) print rmse elif user_choice == 3: degree, multiple_feature = _regression_option() features = Constants.MULTIPLE_FEATURES if multiple_feature else Constants.SINGLE_FEATURE regression.set_features(features) coefficients = ridge_regression(regression, degree=degree) print coefficients elif user_choice == 4: regression.iterations = Constants.MODEL_SELECTION_ITERATION degree, multiple_feature = _regression_option() features = Constants.MULTIPLE_FEATURES if multiple_feature else Constants.SINGLE_FEATURE regression.set_features(features) rmse = cross_validate(regression, is_ridge=True, degree=degree) print rmse elif user_choice == 5: break else: raise ValueError("Invalid option") except ValueError as err: print err print "\n\n\tERROR: Please select a correct option."
spambaseFileLocation = 'spambase.csv' spambaseDataSet = importData(spambaseFileLocation) spambaseID3 = ID3(spambaseDataSet, 10, [0.05, 0.10, 0.15, 0.20, 0.25], True) spambaseID3.validate() print("Mushroom Dataset - Multiway Split") mushroomFileLocation = 'mushroom.csv' mushroomDataSet = importData(mushroomFileLocation) columnsLength = len(mushroomDataSet.columns) mushroomDataSet = transformMushroomTargetAttribute(mushroomDataSet, columnsLength - 1) mushroomMultiwayID3 = ID3(mushroomDataSet, 10, [0.05, 0.10, 0.15], False) mushroomMultiwayID3.validate() print("Mushroom Dataset - Binary Split") mushroomModifiedDataSet = pd.get_dummies(data=mushroomDataSet, columns=range(columnsLength - 1)) targetAttributeColumn = mushroomModifiedDataSet[columnsLength - 1] mushroomModifiedDataSet.drop(labels=[columnsLength - 1], axis=1, inplace=True) mushroomModifiedDataSet.insert(len(mushroomModifiedDataSet.columns), columnsLength - 1, targetAttributeColumn) mushroomBinaryID3 = ID3(mushroomModifiedDataSet, 10, [0.05, 0.10, 0.15], True) mushroomBinaryID3.validate() print("Housing Dataset") housingFileLocation = 'housing.csv' housingDataSet = importData(housingFileLocation) housingRegression = Regression(housingDataSet, 10, [0.05, 0.10, 0.15, 0.20]) housingRegression.validate()