def generate_data(N, Ngrid): data = None if (direction in ["uv", "vu"]): if not os.path.exists( "../../cache/barkley/raw/{0}_{1}.uv.dat.npy".format(N, Ngrid)): data = bh.generate_uv_data(N, 50000, 5, Ngrid=Ngrid) np.save( "../../cache/barkley/raw/{0}_{1}.uv.dat.npy".format(N, Ngrid), data) else: data = np.load("../../cache/barkley/raw/{0}_{1}.uv.dat.npy".format( N, Ngrid)) else: if not os.path.exists( "../../cache/mitchell/raw/{0}_{1}.vh.dat.npy".format(N, Ngrid)): data = mh.generate_vh_data(N, 20000, 50, Ngrid=Ngrid) np.save( "../../cache/mitchell/raw/{0}_{1}.vh.dat.npy".format(N, Ngrid), data) else: data = np.load( "../../cache/mitchell/raw/{0}_{1}.vh.dat.npy".format(N, Ngrid)) #at the moment we are doing a u -> v / v -> h cross prediction. if (direction in ["vu", "hv"]): #switch the entries for the v -> u / h -> v prediction tmp = data[0].copy() data[0] = data[1].copy() data[1] = tmp.copy() return data
def generate_data(N, Ngrid): data = None if direction == "u": if not os.path.exists( "../../cache/barkley/raw/{0}_{1}.uv.dat.npy".format(N, Ngrid)): data = bh.generate_uv_data(N, 20000, 5, Ngrid=Ngrid) np.save( "../../cache/barkley/raw/{0}_{1}.uv.dat.npy".format(N, Ngrid), data) else: data = np.load("../../cache/barkley/raw/{0}_{1}.uv.dat.npy".format( N, Ngrid)) else: if not os.path.exists( "../../cache/mitchell/raw/{0}_{1}.vh.dat.npy".format(N, Ngrid)): data = mh.generate_vh_data(N, 20000, 50, Ngrid=Ngrid) np.save( "../../cache/mitchell/raw/{0}_{1}.vh.dat.npy".format(N, Ngrid), data) else: data = np.load( "../../cache/mitchell/raw/{0}_{1}.vh.dat.npy".format(N, Ngrid)) shared_output_data[:] = data[0, :] #blur the data for n in range(ndata): shared_input_data[n, :, :] = gaussian_filter(shared_output_data[n], sigma=9.0)
def generate_data(N, Ngrid): data = None if direction == "u": if not os.path.exists( "../../cache/barkley/raw/{0}_{1}.uv.dat.npy".format(N, Ngrid)): data = bh.generate_uv_data(N, 50000, 5, Ngrid=Ngrid) np.save( "../../cache/barkley/raw/{0}_{1}.uv.dat.npy".format(N, Ngrid), data) else: data = np.load("../../cache/barkley/raw/{0}_{1}.uv.dat.npy".format( N, Ngrid)) else: if not os.path.exists( "../../cache/mitchell/raw/{0}_{1}.vh.dat.npy".format(N, Ngrid)): data = mh.generate_vh_data(N, 20000, 50, Ngrid=Ngrid) np.save( "../../cache/mitchell/raw/{0}_{1}.vh.dat.npy".format(N, Ngrid), data) else: data = np.load( "../../cache/mitchell/raw/{0}_{1}.vh.dat.npy".format(N, Ngrid)) #fill the first ndata-predictionLength items with the real data and leave the last predictionLength items free #these items will never be used, and are only created to reduce the needed code shared_input_data[:-predictionLength] = data[0, :-predictionLength] shared_output_data[:-predictionLength] = data[0, predictionLength:]
def generate_data(N, trans, sample_rate, Ngrid): data = None if (direction == "u"): if (os.path.exists("../../cache/barkley/raw/{0}_{1}.uv.dat.npy".format( N, Ngrid)) == False): data = bh.generate_uv_data(N, 50000, 5, Ngrid=Ngrid) np.save( "../../cache/barkley/raw/{0}_{1}.uv.dat.npy".format(N, Ngrid), data) else: data = np.load("../../cache/barkley/raw/{0}_{1}.uv.dat.npy".format( N, Ngrid)) else: if (os.path.exists( "../../cache/mitchell/raw/{0}_{1}.vh.dat.npy".format( N, Ngrid)) == False): data = mh.generate_vh_data(N, 20000, 50, Ngrid=Ngrid) np.save( "../../cache/mitchell/raw/{0}_{1}.vh.dat.npy".format(N, Ngrid), data) else: data = np.load( "../../cache/mitchell/raw/{0}_{1}.vh.dat.npy".format(N, Ngrid)) #print("blurring...") for t in range(ndata): data[1, t, :, :] = gaussian_filter(data[0, t], sigma=9.0) return data
def generate_data(N, trans, sample_rate, Ngrid): data = None if direction in ["uv", "vu"]: if not os.path.exists( "../../cache/barkley/raw/{0}_{1}.uv.dat.npy".format(N, Ngrid)): data = bh.generate_uv_data(N, 50000, 5, Ngrid=Ngrid) np.save( "../../cache/barkley/raw/{0}_{1}.uv.dat.npy".format(N, Ngrid), data) else: data = np.load("../../cache/barkley/raw/{0}_{1}.uv.dat.npy".format( N, Ngrid)) elif direction in ["bocf_uv", "bocf_uw", "bocf_us"]: if not os.path.exists( "../../cache/bocf/raw/{0}_{1}.uvws.dat.npy".format(N, Ngrid)): print( "NO BOCF data set found. Please generate a chaotic data set manually." ) else: data = np.load("../../cache/bocf/raw/{0}_{1}.uvws.dat.npy".format( N, Ngrid)) else: if not os.path.exists( "../../cache/mitchell/raw/{0}_{1}.vh.dat.npy".format(N, Ngrid)): data = mh.generate_vh_data(N, 20000, 50, Ngrid=Ngrid) np.save( "../../cache/mitchell/raw/{0}_{1}.vh.dat.npy".format(N, Ngrid), data) else: data = np.load( "../../cache/mitchell/raw/{0}_{1}.vh.dat.npy".format(N, Ngrid)) #at the moment we are doing a u -> v / v -> h cross prediction (index 0 -> index 1) if (direction in ["vu", "hv"]): #switch the entries for the v -> u / h -> v prediction tmp = data[0].copy() data[0] = data[1].copy() data[1] = tmp.copy() if direction in ["bocf_uv", "bocf_uw", "bocf_ws"]: real_data = np.empty((2, N, Ngrid, Ngrid)) real_data[0] = data[0].copy() if direction == "bocf_uv": real_data[1] = data[1].copy() elif direction == "bocf_uw": real_data[1] = data[2].copy() elif direction == "bocf_us": real_data[1] = data[3].copy() data = real_data return data
def generate_data(N, trans, sample_rate, Ngrid): data = None if direction == "u": if not os.path.exists( "../../cache/barkley/raw/{0}_{1}.uv.dat.npy".format(N, Ngrid)): data = bh.generate_uv_data(N, 50000, 5, Ngrid=Ngrid) np.save( "../../cache/barkley/raw/{0}_{1}.uv.dat.npy".format(N, Ngrid), data) else: data = np.load("../../cache/barkley/raw/{0}_{1}.uv.dat.npy".format( N, Ngrid)) else: if not os.path.exists( "../../cache/mitchell/raw/{0}_{1}.vh.dat.npy".format(N, Ngrid)): data = mh.generate_vh_data(N, 20000, 50, Ngrid=Ngrid) np.save( "../../cache/mitchell/raw/{0}_{1}.vh.dat.npy".format(N, Ngrid), data) else: data = np.load( "../../cache/mitchell/raw/{0}_{1}.vh.dat.npy".format(N, Ngrid)) data = data[0] input_y, input_x, _, _ = create_patch_indices( (center - (halfInnerSize + borderSize), center + (halfInnerSize + borderSize) + rightBorderAdd), (center - (halfInnerSize + borderSize), center + (halfInnerSize + borderSize) + rightBorderAdd), (center - (halfInnerSize), center + (halfInnerSize) + rightBorderAdd), (center - (halfInnerSize), center + (halfInnerSize) + rightBorderAdd)) _, _, output_y, output_x = create_patch_indices( (center - (halfInnerSize + borderSize), center + (halfInnerSize + borderSize) + rightBorderAdd), (center - (halfInnerSize + borderSize), center + (halfInnerSize + borderSize) + rightBorderAdd), (center - (1), center + (1) + 0), (center - (1), center + (1) + 0)) inputData = data[:, input_y, input_x] outputData = data[:, output_y, output_x] return inputData, outputData
def generate_data(N, trans, sample_rate, Ngrid): data = None if direction == "u": if not os.path.exists("../../cache/barkley/raw/{0}_{1}.uv.dat.npy".format(N, Ngrid)): data = bh.generate_uv_data(N, 50000, 5, Ngrid=Ngrid) np.save("../../cache/barkley/raw/{0}_{1}.uv.dat.npy".format(N, Ngrid), data) else: data = np.load("../../cache/barkley/raw/{0}_{1}.uv.dat.npy".format(N, Ngrid)) else: if not os.path.exists("../../cache/mitchell/raw/{0}_{1}.vh.dat.npy".format(N, Ngrid)): data = mh.generate_vh_data(N, 20000, 50, Ngrid=Ngrid) np.save("../../cache/mitchell/raw/{0}_{1}.vh.dat.npy".format(N, Ngrid), data) else: data = np.load("../../cache/mitchell/raw/{0}_{1}.vh.dat.npy".format(N, Ngrid)) return data[0]
else: data = np.load("../../cache/barkley/raw/{0}_{1}.uv.dat.npy".format( ndata, N)) elif direction in ["bocf_uv", "bocf_uw", "bocf_us"]: if not os.path.exists("../../cache/bocf/raw/{0}_{1}.uvws.dat.npy".format( ndata, N)): print( "NO BOCF data set found. Please generate a chaotic data set manually." ) else: data = np.load("../../cache/bocf/raw/{0}_{1}.uvws.dat.npy".format( ndata, N)) else: if not os.path.exists("../../cache/mitchell/raw/{0}_{1}.vh.dat.npy".format( N, N)): data = mh.generate_vh_data(N, 20000, 50, Ngrid=N) np.save("../../cache/mitchell/raw/{0}_{1}.vh.dat.npy".format(ndata, N), data) else: data = np.load("../../cache/mitchell/raw/{0}_{1}.vh.dat.npy".format( ndata, N)) print("Data loaded") #at the moment we are doing a u -> v / v -> h cross prediction (index 0 -> index 1) if (direction in ["vu", "hv"]): #switch the entries for the v -> u / h -> v prediction tmp = data[0].copy() data[0] = data[1].copy() data[1] = tmp.copy()
def generate_data(N, trans, sample_rate=1, Ngrid=100): #return the u variable return generate_vh_data(N, trans, sample_rate=sample_rate, Ngrid=Ngrid)[0]
def generate_data(N, Ngrid): data = None if direction in ["uv", "vu"]: if not os.path.exists( "../../cache/barkley/raw/{0}_{1}.uv.dat.npy".format(N, Ngrid)): data = bh.generate_uv_data(N, 50000, 5, Ngrid=Ngrid) np.save( "../../cache/barkley/raw/{0}_{1}.uv.dat.npy".format(N, Ngrid), data) else: data = np.load("../../cache/barkley/raw/{0}_{1}.uv.dat.npy".format( N, Ngrid)) elif direction.startswith("bocf_"): if not os.path.exists( "../../cache/bocf/raw/{0}_{1}.uvws.dat.npy".format(N, Ngrid)): print( "NO BOCF data set found. Please generate a chaotic data set manually." ) else: data = np.load("../../cache/bocf/raw/{0}_{1}.uvws.dat.npy".format( N, Ngrid)) else: if not os.path.exists( "../../cache/mitchell/raw/{0}_{1}.vh.dat.npy".format(N, Ngrid)): data = mh.generate_vh_data(N, 20000, 50, Ngrid=Ngrid) np.save( "../../cache/mitchell/raw/{0}_{1}.vh.dat.npy".format(N, Ngrid), data) else: data = np.load( "../../cache/mitchell/raw/{0}_{1}.vh.dat.npy".format(N, Ngrid)) #at the moment we are doing a u -> v / v -> h cross prediction (index 0 -> index 1) if (direction in ["vu", "hv"]): #switch the entries for the v -> u / h -> v prediction tmp = data[0].copy() data[0] = data[1].copy() data[1] = tmp.copy() if direction.startswith("bocf_"): real_data = np.empty((2, N, Ngrid, Ngrid)) if direction[5] == "u": real_data[0] = data[0].copy() elif direction[5] == "v": real_data[0] = data[1].copy() elif direction[5] == "w": real_data[0] = data[2].copy() elif direction[5] == "s": real_data[0] = data[3].copy() if direction[6] == "u": real_data[1] = data[0].copy() elif direction[6] == "v": real_data[1] = data[1].copy() elif direction[6] == "w": real_data[1] = data[2].copy() elif direction[6] == "s": real_data[1] = data[3].copy() data = real_data global means_train means_train = [0, 0] #np.mean(data[:trainLength], axis=(1, 2)) data[0] -= means_train[0] data[1] -= means_train[1] if cpmtps.use_noise: data[0] += np.random.normal(loc=0.0, scale=noise, size=data[0].shape) if not direction.startswith("bocf"): data[0][data[0] < 0.0] = 0.0 data[0][data[0] > 1.0] = 1.0 shared_input_data[:ndata] = data[0] shared_output_data[:ndata] = data[1]