def standardize_all_data(): # Standardize all data # Let data's mean = 0 # standardard deviation = 1 # If data are not loaded, this function will load data first # Once all data are standardized, # function get_trajectory will return standardized data if config.STANDARDIZED is True: return get_trajectory() if config.ALL_TRAJECTORY is None: # Load data set get_trajectory() data = config.ALL_TRAJECTORY mean = data_mean() std = data_std() for i in range(len(data)): data[i] = list(data[i]) # First two elements are matadata, 3rd is data data[i][3] = (data[i][3] - mean) / std data[i] = tuple(data[i]) config.ALL_TRAJECTORY = data config.STANDARDIZED = True return get_trajectory()
def sample_net(net, n): samples = [] for i in range(n): sample = get_real_example(net) sample, _, _ = standardize_data(sample, mean=data_mean(), std=data_std()) samples.append(sample) return samples
def draw_spectra_of(path): x = _read_one_file(path) x, _, _ = standardize_data(x, data_mean(), data_std()) x = fft_data([x])[0] x = np.absolute(x) plt.plot(x) plt.show() return x
def istandardize_data(data): # Recover standardized data # The parameter 'data' should be a list of real numpy array # element in data has shape length * dimensionality if config.STANDARDIZED is False: raise Exception('Data set has not been standardized!') mean = data_mean() std = data_std() recovered = [] for d in data: r = d * std + mean recovered.append(r) return recovered
a = np.array([[1, 2], [1, 2]]) b = a c = np.concatenate([a, b, a, b], axis=0) d = np.concatenate((a, b), axis=1) print(c) print(d) # %% c # %% c.mean(axis=1) # %% print(config.DATA_MEAN) # %% data = get_trajectory() m = data_mean(data) # %% print(m) print(config.DATA_MEAN) # %% import config from data_processor import standardize_data from data_processor import trim_data from data_reader import get_trajectory from data_analyzer import data_length # %% data = get_trajectory() data[0] # %% data = config.ALL_TRAJECTORY