def _min_max_scaler(ranges, feature_range=(0, 100)): res = MinMaxScaler() res.data_max_ = ranges[:, 1] res.data_min_ = ranges[:, 0] res.data_range_ = res.data_max_ - res.data_min_ res.scale_ = (feature_range[1] - feature_range[0]) / (ranges[:, 1] - ranges[:, 0]) res.min_ = -res.scale_ * res.data_min_ res.n_samples_seen_ = 1 res.feature_range = feature_range return res
try: temp_min = data.min() if temp_min < min_val: #print(f"New min: {min_val}") min_val = temp_min temp_max = data.max() if temp_max > max_val: #print(f"New max: {max_val}") max_val = temp_max except: print(f"Did not work here: {file}") scaler = MinMaxScaler(feature_range=(np.iinfo(np.int8).min, np.iinfo(np.int8).max)) scaler.data_min_ = min_val scaler.data_max_ = max_val ####################################################### for file in tqdm(files[:]): temp = os.path.join('temp', os.path.basename(file)) with open(temp, 'wb') as f: f.write(zip.read(file)) data = np.fromfile(temp) os.remove(temp) try: data = compressData(data, scaler, dtype=np.int8) data = np.reshape(data, shape)