def linesLesen(): if (OSPATH.exists(LINES_FILE)): file = open(LINES_FILE, "rt") SCL.scale(file) file.close() else: SYS.exit("[WARN]: File: " + LINES_FILE + " konnte nicht gefunden werden!") return
def __test__(): from bezier_generator import bezier_generate from scaler import scale from forest_generator import generate_forest stack = [] points = [] class point : def __init__(self, x, y) : self.x = x self.y = y width = 90 height = 90 points.append(point(80 - randint(0, 60), 0)) points.append(point(40, 33)) points.append(point(60, 66)) points.append(point(width, 80 - randint(0, 60))) obj = "=" gmap = bezier_generate(points, width, height, obj) gmap = scale(gmap, "=", 2) stack.append(gmap) del points[3] points.append(point(80 - randint(0, 60), height)) gmap = bezier_generate(points, width, height, obj) stack.append(gmap) gmap = scale(gmap, "=", 4) stack = [merge(stack)] trees = ["%", "$"] trees_prop = [0.5, 0.5] scale_value = 10 start_prop = 1.0 gmap = generate_forest(",", trees, trees_prop, width, height, scale_value, start_prop) stack.append(gmap) merge(stack) for i in range(0, width) : s = "" for j in range(0, height) : if (gmap[i][j] == None) : s += "." else : s += gmap[i][j][-1] print s
def machePolylineListe(points): points = points + " " polylinePointsX = list() polylinePointsY = list() polylineCMDs = list(("m")) while len(points) != 0: # Trennen der x und y Koordinaten der Polyline von den "points" points = points.lstrip() index = points.index(',') polyNextX = float(points[0:index]) polylinePointsX.append(polyNextX) points = points[index + 1:len(points)] points = points.lstrip() index = points.index(' ') polyNextY = float(points[0:index]) polylinePointsY.append(polyNextY) points = points[index + 1:len(points)] points = points.lstrip() polylineCMDs.append("l") continue polylineCMDs.pop() SCALER.scale(polylinePointsX, polylinePointsY, polylineCMDs) return
def machePolylineListe(points): points = points + " " polylinePointsX = list() polylinePointsY = list() polylineCMDs = list(("m")) while len(points) != 0: # separate xpolynext and ypolynext coord from points points = points.lstrip() index = points.index(',') polyNextX = float(points[0:index]) polylinePointsX.append(polyNextX) points = points[index + 1:len(points)] points = points.lstrip() index = points.index(' ') polyNextY = float(points[0:index]) polylinePointsY.append(polyNextY) points = points[index + 1:len(points)] points = points.lstrip() polylineCMDs.append("l") continue polylineCMDs.pop() SCALER.scale(polylinePointsX, polylinePointsY, polylineCMDs) return
def _preprocess(self, x): assert self._training_finished, "Training is not finished" x_scaled = x # 1. Relace strings with numbers x_scaled = model_util.replace_cat_variables(x=x_scaled, catdict=self._catdict, param_names=self._param_names) # 2. Maybe encode data if self._encode: x_scaled, tmp_param_names = \ model_util.encode(sp=self._sp, x=x_scaled, param_names=self._param_names, num_folds=self._num_folds, catdict=self._catdict) assert self._used_param_names == tmp_param_names # 3. Scale data x_scaled = scaler.scale(scale_info=self._scale_info, x=x_scaled) return x_scaled
def _set_and_preprocess(self, x, param_names): assert not self._training_finished, "Training is not finished" scaled_x = copy.deepcopy(x) #if self._scale_info is None: # raise ValueError("No scale info available for this model") self.set_number_of_folds(scaled_x) self._param_names = param_names # 1. Replace strings with numbers scaled_x = model_util.replace_cat_variables(scaled_x, self._catdict, self._param_names) # 2. Maybe encode data if self._encode: scaled_x, self._used_param_names = \ model_util.encode(sp=self._sp, x=scaled_x, param_names=self._param_names, num_folds=self._num_folds, catdict=self._catdict) else: self._used_param_names = self._param_names # 4. Set statistics if self._encode: #raise NotImplementedError("One-hot-encoding is not implemented") self._scale_info = \ scaler.get_x_info_scaling_all(sp=self._sp, x=scaled_x, param_names=self._used_param_names, num_folds=self._num_folds, encoded=self._encode) else: self._scale_info = \ scaler.get_x_info_scaling_all(sp=self._sp, x=scaled_x, param_names=self._used_param_names, num_folds=self._num_folds, encoded=self._encode) # 4. Scale data scaled_x = scaler.scale(scale_info=self._scale_info, x=scaled_x) return scaled_x
def machePathListe(): # print(punkteX) # print(punkteY) # print(punkteCmds) SCALER.scale(punkteX, punkteY, punkteCmds) return
#np.random.seed(1337) # for reproducibility net_type = 'regression' # softmax|regression mode = 'validate' # full_train|validate activations = False features_NN_prefix = net_type + '.' # Scale if not (os.path.exists(constants.train_features_scaled_file) and os.path.exists(constants.test_features_scaled_file)): var, mean = variance(constants.train_features_file) sd = np.sqrt(var) if not os.path.exists(constants.train_features_scaled_file): print('Scaling ' + constants.train_features_scaled_file) scale(constants.train_features_file, constants.train_features_scaled_file, sd, mean) if not os.path.exists(constants.test_features_scaled_file): print('Scaling ' + constants.test_features_scaled_file) scale(constants.test_features_file, constants.test_features_scaled_file, sd, mean) n_epochs = 200 n_models = 10 if mode == 'validate': validate(n_epochs, n_models) elif mode == 'full_train': full_train(n_epochs, n_models) else: print('Unknown mode. Exiting.')