def saveParams(self, best_estimator): #outdir = OutputMgr.checkCreateDSDir(self.estimator.dataset.name, self.estimator.nick) outdirI = OutputMgr.getOutIncludeDir() myFile = open(f"{outdirI}SVM_params.h", "w+") myFile.write(f"#ifndef SVM_PARAMS_H\n") myFile.write(f"#define SVM_PARAMS_H\n\n") if self.estimator.is_regr == False: myFile.write(f"#define N_CLASS {self.estimator.n_classes}\n\n") myFile.write( f"#define WEIGTH_DIM {best_estimator.coef_.shape[0]}\n\n") myFile.write(f"#ifndef N_FEATURE\n") myFile.write( f"#define N_FEATURE {best_estimator.coef_.shape[1]}\n") myFile.write(f"#endif\n\n") else: myFile.write(f"#define N_CLASS 0\n\n") myFile.write(f"#define WEIGTH_DIM 1\n\n") myFile.write(f"#ifndef N_FEATURE\n") myFile.write( f"#define N_FEATURE {best_estimator.coef_.shape[1]}\n") myFile.write(f"#endif\n\n") if self.estimator.is_regr == False: myFile.write( f"extern float support_vectors[WEIGTH_DIM][N_FEATURE];\n") else: #Same as above as a declaration myFile.write( f"extern float support_vectors[WEIGTH_DIM][N_FEATURE];\n") myFile.write(f"extern float bias[WEIGTH_DIM];\n") myFile.write(f"\n#endif") myFile.close() outdirS = OutputMgr.getOutSourceDir() myFile = open(f"{outdirS}SVM_params.c", "w+") myFile.write(f"#include \"SVM_params.h\"\n") import sys sys.path.insert(1, 'utils') import create_matrices if self.estimator.is_regr == False: stri = create_matrices.createMatrix('float', 'support_vectors', best_estimator.coef_, 'WEIGTH_DIM', 'N_FEATURE') else: stri = create_matrices.createMatrix( "float", "support_vectors", np.reshape(best_estimator.coef_, (1, best_estimator.coef_.shape[1])), 'WEIGTH_DIM', 'N_FEATURE') myFile.write(stri) stri = create_matrices.createArray("float", "bias", best_estimator.intercept_, 'WEIGTH_DIM') myFile.write(stri) myFile.close()
def saveTestingSet(X_test, y_test, estimator): import pandas as pd if isinstance(X_test, pd.core.series.Series): X_test = X_test.to_frame() #outdir = OutputMgr.checkCreateDSDir(estimator.dataset.name, estimator.nick) outdirI = OutputMgr.getOutIncludeDir() myFile = open(f"{outdirI}testing_set.h","w+") myFile.write(f"#ifndef TESTINGSET_H\n") myFile.write(f"#define TESTINGSET_H\n\n") myFile.write(f"#define N_TEST {y_test.shape[0]}\n\n") '''myFile.write(f"#ifndef N_FEATURE\n") myFile.write(f"#define N_FEATURE {X_test.shape[1]}\n") myFile.write(f"#endif\n\n")''' myFile.write(f"#ifndef N_ORIG_FEATURE\n") myFile.write(f"#define N_ORIG_FEATURE {X_test.shape[1]}\n") # Is this value correct??? xxx myFile.write(f"#endif\n\n") if estimator.is_regr: type_s = 'float' else: type_s = 'int' myFile.write(f"extern {type_s} y_test[N_TEST];\n") myFile.write(f"extern float X_test[N_TEST][N_ORIG_FEATURE];\n") # #if cfg.normalization!=None and cfg.regr and cfg.algo.lower() != 'dt': # saveTestNormalization(myFile) # myFile.write(f"#endif") myFile.close() #outdirI = OutputMgr.checkCreateGeneralIncludeDir() #from shutil import copyfile #copyfile(f"{outdir}testing_set.h", f"{outdirI}testing_set.h") outdirS = OutputMgr.getOutSourceDir() myFile = open(f"{outdirS}testing_set.c","w+") myFile.write(f"#include \"testing_set.h\"\n") if estimator.is_regr: type_s = 'float' else: type_s = 'int' import sys sys.path.insert(1, 'utils') import create_matrices import numpy as np stri = create_matrices.createArray(type_s, "y_test", np.reshape(y_test, (y_test.shape[0], )), 'N_TEST') myFile.write(stri) stri = create_matrices.createMatrix('float', 'X_test', X_test.values, 'N_TEST', 'N_ORIG_FEATURE') # changed by FB myFile.write(stri) myFile.close()
def saveTrainingSet(X_train, y_train, estimator): #outdir = OutputMgr.checkCreateDSDir(estimator.dataset.name, estimator.nick) outdirI = OutputMgr.getOutIncludeDir() myFile = open(f"{outdirI}training_set.h","w+") myFile.write(f"#define N_TRAIN {y_train.shape[0]}\n\n") myFile.write(f"#ifndef N_FEATURE\n") myFile.write(f"#define N_ORIG_FEATURE {X_train.shape[1]}\n") myFile.write(f"#endif\n\n") if estimator.is_regr: type_s = 'float' else: type_s = 'int' myFile.write(f"extern {type_s} y_train[N_TRAIN];\n") myFile.write(f"extern float X_train[N_TRAIN][N_FEATURE];\n") myFile.close() #outdirI = OutputMgr.checkCreateGeneralIncludeDir() #from shutil import copyfile #copyfile(f"{outdir}training_set.h", f"{outdirI}training_set.h") outdirS = OutputMgr.getOutSourceDir() myFile = open(f"{outdirS}training_set_params.c","w+") #myFile.write(f"#include \"AI_main.h\"\n") myFile.write(f"#include \"training_set.h\"\n") if estimator.is_regr: type_s = 'float' else: type_s = 'int' import sys sys.path.insert(1, 'utils') import create_matrices import numpy as np stri = create_matrices.createArray(type_s, "y_train", np.reshape(y_train, (y_train.shape[0], )), 'N_TRAIN') myFile.write(stri) stri = create_matrices.createMatrix('float', 'X_train', X_train.values, 'N_TRAIN', 'N_FEATURE') # changed by FB myFile.write(stri) myFile.close()
def saveTestingSet(X_test, y_test, estimator, full=True): outdir = OutputMgr.checkCreateDSDir(estimator.dataset.name, estimator.nick) if full: myFile = open(f"{outdir}testing_set.h", "w+") myFile.write(f"#ifndef TESTINGSET_H\n") myFile.write(f"#define TESTINGSET_H\n\n") else: myFile = open(f"{outdir}minimal_testing_set.h", "w+") myFile.write(f"#ifndef MINIMAL_TESTINGSET_H\n") myFile.write(f"#define MINIMAL_TESTINGSET_H\n\n") myFile.write(f"#define N_TEST {y_test.shape[0]}\n\n") myFile.write(f"#ifndef N_FEATURE\n") myFile.write(f"#define N_FEATURE {X_test.shape[1]}\n") myFile.write(f"#endif\n\n") myFile.write(f"#ifndef N_ORIG_FEATURE\n") myFile.write(f"#define N_ORIG_FEATURE {X_test.shape[1]}\n") myFile.write(f"#endif\n\n") if estimator.is_regr: type_s = 'float' else: type_s = 'int' myFile.write(f"extern {type_s} y_test[N_TEST];\n") myFile.write(f"extern float X_test[N_TEST][N_FEATURE];\n") # #if cfg.normalization!=None and cfg.regr and cfg.algo.lower() != 'dt': # saveTestNormalization(myFile) # myFile.write(f"#endif") myFile.close() outdirI = OutputMgr.checkCreateGeneralIncludeDir() from shutil import copyfile if full: copyfile(f"{outdir}testing_set.h", f"{outdirI}testing_set.h") else: copyfile(f"{outdir}minimal_testing_set.h", f"{outdirI}minimal_testing_set.h") outdirS = OutputMgr.checkCreateGeneralSourceDir() if full: myFile = open(f"{outdirS}testing_set.c", "w+") else: myFile = open(f"{outdirS}minimal_testing_set.c", "w+") #myFile.write(f"#include \"AI_main.h\"\n") if full: myFile.write(f"#include \"testing_set.h\"\n") else: myFile.write(f"#include \"minimal_testing_set.h\"\n") if estimator.is_regr: type_s = 'float' else: type_s = 'int' import sys sys.path.insert(1, 'utils') import create_matrices import numpy as np stri = create_matrices.createArray( type_s, "y_test", np.reshape(y_test, (y_test.shape[0], )), 'N_TEST') myFile.write(stri) stri = create_matrices.createMatrix('float', 'X_test', X_test.values, 'N_TEST', 'N_FEATURE') # changed by FB myFile.write(stri) myFile.close()
def savePPParams(scaler, reduce_dims, estimator): outdir = OutputMgr.checkCreateDSDir(estimator.dataset.name, estimator.nick) if scaler == None: sx = np.ones(estimator.dataset.X.shape[1]) else: sx = scaler.scale_ if isinstance(scaler, preprocessing.StandardScaler): ux = scaler.mean_ if reduce_dims == None: pca_components = np.identity(estimator.dataset.X.shape[1]) else: pca_components = reduce_dims.components_ myFile = open(f"{outdir}PPParams.h","w+") myFile.write(f"#ifndef PPPARAMS_H\n") myFile.write(f"#define PPPARAMS_H\n\n") myFile.write(f"#ifndef N_FEATURE\n") myFile.write(f"#define N_FEATURE {pca_components.shape[0]}\n") myFile.write(f"#endif\n\n") myFile.write(f"#ifndef N_ORIG_FEATURE\n") myFile.write(f"#define N_ORIG_FEATURE {pca_components.shape[1]}\n") myFile.write(f"#endif\n\n") myFile.write(f"extern float pca_components[N_FEATURE][N_ORIG_FEATURE];\n") myFile.write(f"\n") if scaler!=None: if isinstance(scaler, preprocessing.StandardScaler): myFile.write(f"#define STANDARD_NORMALIZATION\n\n") myFile.write(f"extern float s_x[N_ORIG_FEATURE];\n") myFile.write(f"extern float u_x[N_ORIG_FEATURE];\n") elif isinstance(scaler, preprocessing.MinMaxScaler): myFile.write(f"#define MINMAX_NORMALIZATION\n\n") myFile.write(f"extern float s_x[N_ORIG_FEATURE];\n") ''' if cfg.normalization!=None and cfg.regr and cfg.algo.lower() != 'dt': saveTestNormalization(myFile) ''' myFile.write(f"#endif") myFile.close() outdirI = OutputMgr.checkCreateGeneralIncludeDir() from shutil import copyfile copyfile(f"{outdir}PPParams.h", f"{outdirI}PPParams.h") outdirS = OutputMgr.checkCreateGeneralSourceDir() myFile = open(f"{outdirS}preprocess_params.c","w+") #myFile.write(f"#include \"AI_main.h\"\n") myFile.write(f"#include \"PPParams.h\"\n") import sys sys.path.insert(1, 'utils') import create_matrices stri = create_matrices.createMatrix('float', 'pca_components', pca_components, 'N_FEATURE', 'N_ORIG_FEATURE') myFile.write(stri) myFile.write(f"\n") if scaler!=None: if isinstance(scaler, preprocessing.StandardScaler): myFile.write(f"#define STANDARD_NORMALIZATION\n\n") stri = create_matrices.createArray('float', "s_x", np.reshape(s_x, (s_x.size, )), 'N_ORIG_FEATURE') myFile.write(stri) stri = create_matrices.createArray('float', "u_x", np.reshape(u_x, (u_x.size, )), 'N_ORIG_FEATURE') myFile.write(stri) elif isinstance(scaler, preprocessing.MinMaxScaler): myFile.write(f"#define MINMAX_NORMALIZATION\n\n") stri = create_matrices.createArray('float', "s_x", np.reshape(s_x, (s_x.size, )), 'N_ORIG_FEATURE') myFile.write(stri) myFile.close()
def saveParams(self, best_estimator): # outdir = OutputMgr.checkCreateDSDir(self.estimator.dataset.name, self.estimator.nick) outdirI = OutputMgr.getOutIncludeDir() n_nodes = getattr(best_estimator.tree_, 'node_count') values = getattr(best_estimator.tree_, 'value') children_left = getattr(best_estimator.tree_, 'children_left') children_right = getattr(best_estimator.tree_, 'children_right') feature = getattr(best_estimator.tree_, 'feature') threshold = getattr(best_estimator.tree_, 'threshold') target_classes = np.unique(self.estimator.dataset.y) myFile = open(f"{outdirI}DT_params.h", "w+") myFile.write(f"#ifndef DT_PARAMS_H\n") myFile.write(f"#define DT_PARAMS_H\n\n") if self.estimator.is_regr == False: myFile.write(f"#define N_CLASS {self.estimator.n_classes}\n") leaf_nodes = children_left == children_right n_leaves = np.count_nonzero(leaf_nodes) myFile.write(f"#define N_LEAVES {n_leaves}\n\n") else: myFile.write(f"#define N_CLASS 0\n\n") myFile.write(f"#define VALUES_DIM {values.shape[2]}\n\n") myFile.write(f"#define N_NODES {n_nodes}\n\n") myFile.write(f"extern int children_left[N_NODES];\n") myFile.write(f"extern int children_right[N_NODES];\n") myFile.write(f"extern int feature[N_NODES];\n") myFile.write(f"extern float threshold[N_NODES];\n") if self.estimator.is_regr == True: myFile.write(f"extern int values[N_NODES][VALUES_DIM];\n") else: myFile.write(f"extern int target_classes[N_CLASS];\n") myFile.write(f"extern int leaf_nodes[N_LEAVES][2];\n") myFile.write(f"\n#endif") myFile.close() #outdirI = OutputMgr.checkCreateGeneralIncludeDir() #from shutil import copyfile #copyfile(f"{outdir}DT_params.h", f"{outdirI}DT_params.h") outdirS = OutputMgr.getOutSourceDir() myFile = open(f"{outdirS}DT_params.c", "w+") myFile.write(f"#include \"DT_params.h\"\n") import sys sys.path.insert(1, 'utils') import create_matrices stri = create_matrices.createArray("int", "children_left", children_left, 'N_NODES') myFile.write(stri) stri = create_matrices.createArray("int", "children_right", children_right, 'N_NODES') myFile.write(stri) stri = create_matrices.createArray("int", "feature", feature, 'N_NODES') myFile.write(stri) stri = create_matrices.createArray("float", "threshold", threshold, 'N_NODES') myFile.write(stri) if self.estimator.is_regr == True: stri = create_matrices.createMatrix2('int', 'values', values, 'N_NODES', 'VALUES_DIM') myFile.write(stri) else: stri = create_matrices.createArray("int", "target_classes", target_classes, 'N_CLASS') myFile.write(stri) argmaxs = np.argmax(values[leaf_nodes][:, 0], axis=1).reshape(-1, 1) leaf_nodes_idx = np.asarray(np.nonzero(leaf_nodes)).T leaf_nodes = np.concatenate((leaf_nodes_idx, argmaxs), axis=1) stri = create_matrices.createMatrix('int', 'leaf_nodes', leaf_nodes, 'N_LEAVES', '2') myFile.write(stri) myFile.close()
def savePPParams(scaler, reduce_dims, estimator): #outdir = OutputMgr.checkCreateDSDir(estimator.dataset.name, estimator.nick) outdirI = OutputMgr.getOutIncludeDir() if scaler == None: sx = np.ones(estimator.dataset.X.shape[1]) else: sx = scaler.scale_ if isinstance(scaler, preprocessing.StandardScaler): ux = scaler.mean_ elif isinstance(scaler, preprocessing.MinMaxScaler): mx = scaler.min_ n_orig_feature = estimator.dataset.X.shape[1] if reduce_dims != None: pca_components = reduce_dims.components_ pca_means = reduce_dims.mean_ n_feature = pca_components.shape[0] else: n_feature = n_orig_feature myFile = open(f"{outdirI}PPParams.h","w+") myFile.write(f"#ifndef PPPARAMS_H\n") myFile.write(f"#define PPPARAMS_H\n\n") myFile.write(f"#ifndef N_FEATURE\n") myFile.write(f"#define N_FEATURE {n_feature}\n") myFile.write(f"#endif\n\n") myFile.write(f"#ifndef N_ORIG_FEATURE\n") myFile.write(f"#define N_ORIG_FEATURE {n_orig_feature}\n") myFile.write(f"#endif\n\n") if reduce_dims != None: myFile.write(f"#define DO_PCA 1\n") myFile.write(f"extern float pca_components[N_FEATURE][N_ORIG_FEATURE];\n") myFile.write(f"extern float pca_means[N_ORIG_FEATURE];\n") myFile.write(f"\n") if scaler!=None: if isinstance(scaler, preprocessing.StandardScaler): myFile.write(f"#define STANDARD_SCALING\n\n") myFile.write(f"extern float s_x[N_ORIG_FEATURE];\n") myFile.write(f"extern float u_x[N_ORIG_FEATURE];\n") elif isinstance(scaler, preprocessing.MinMaxScaler): myFile.write(f"#define MINMAX_SCALING\n\n") myFile.write(f"extern float s_x[N_ORIG_FEATURE];\n") myFile.write(f"extern float m_x[N_ORIG_FEATURE];\n") ''' if cfg.normalization!=None and cfg.regr and cfg.algo.lower() != 'dt': saveTestNormalization(myFile) ''' myFile.write(f"#endif") myFile.close() #outdirI = OutputMgr.checkCreateGeneralIncludeDir() #from shutil import copyfile #copyfile(f"{outdir}PPParams.h", f"{outdirI}PPParams.h") outdirS = OutputMgr.getOutSourceDir() myFile = open(f"{outdirS}preprocess_params.c","w+") myFile.write(f"#include \"PPParams.h\"\n") import sys sys.path.insert(1, 'utils') import create_matrices if reduce_dims != None: stri = create_matrices.createMatrix('float', 'pca_components', pca_components, 'N_FEATURE', 'N_ORIG_FEATURE') myFile.write(stri) stri = create_matrices.createArray('float', "pca_means", pca_means, 'N_ORIG_FEATURE') myFile.write(stri) myFile.write(f"\n") if scaler!=None: if isinstance(scaler, preprocessing.StandardScaler): myFile.write(f"#define STANDARD_SCALING\n\n") stri = create_matrices.createArray('float', "s_x", np.reshape(sx, (sx.shape[0], )), 'N_ORIG_FEATURE') myFile.write(stri) stri = create_matrices.createArray('float', "u_x", np.reshape(ux, (ux.shape[0], )), 'N_ORIG_FEATURE') myFile.write(stri) elif isinstance(scaler, preprocessing.MinMaxScaler): myFile.write(f"#define MINMAX_SCALING\n\n") stri = create_matrices.createArray('float', "s_x", np.reshape(sx, (sx.shape[0], )), 'N_ORIG_FEATURE') myFile.write(stri) stri = create_matrices.createArray('float', "m_x", np.reshape(mx, (mx.shape[0], )), 'N_ORIG_FEATURE') myFile.write(stri) myFile.close()