Ejemplo n.º 1
0
def generate_data(N, trans, sample_rate, Ngrid, def_param=(shared_input_data, shared_data)):
    data = None

    if direction == "u":
        if not os.path.exists("../../cache/barkley/raw/{0}_{1}.uv.dat.npy".format(N, Ngrid)):
            data = bh.generate_data(N, 20000, 5, Ngrid=Ngrid)
            np.save("../../cache/barkley/raw/{0}_{1}.uv.dat.npy".format(N, Ngrid), data)
        else:
            data = np.load("../../cache/barkley/raw/{0}_{1}.uv.dat.npy".format(N, Ngrid))
    else:
        if not os.path.exists("../../cache/mitchell/raw/{0}_{1}.vh.dat.npy".format(N, Ngrid)):
            data = mh.generate_data(N, 20000, 50, Ngrid=Ngrid)
            np.save("../../cache/mitchell/raw/{0}_{1}.vh.dat.npy".format(N, Ngrid), data)
        else:
            data = np.load("../../cache/mitchell/raw/{0}_{1}.vh.dat.npy".format(N, Ngrid))

    data = data[0, :]

    input_y, input_x, output_y, output_x = hp.create_patch_indices(
        (center - (half_inner_size+border_size), center + (half_inner_size+border_size) + right_border_add),
        (center - (half_inner_size+border_size), center + (half_inner_size+border_size) + right_border_add),
        (center - (half_inner_size), center + (half_inner_size) + right_border_add),
        (center - (half_inner_size), center + (half_inner_size) + right_border_add))

    input_data = data[:, input_y, input_x].reshape(ndata, -1)
    if prediction_mode in ["NN", "RBF"]:
        shared_input_data[:] = hp.create_1d_delay_coordinates(input_data, delay_dimension=ddim, tau=tau[direction]).reshape((ndata, -1))
    else:
        shared_input_data[:] = input_data[:]

    shared_data[:] = data[:]
    prediction[:] = data[trainLength:trainLength+predictionLength]
    prediction[:, output_y, output_x] = 0.0
Ejemplo n.º 2
0
def gather_data(filename):
    f = open(filename, "rb")
    viewData = pickle.load(f)
    f.close()

    print("data loaded")

    diff = None
    for (name, value) in viewData:
        if name.lower() == "diff":
            diff = value
            break

    N = 150

    indices = []

    for i in range(31):
        outer = (N // 2 - 32 + i, N // 2 + 32 - i)
        inner = (N // 2 - 31 + i, N // 2 + 31 - i)
        outer_y, outer_x, inner_y, inner_x = hp.create_patch_indices(
            outer, outer, inner, inner)
        indices.append((outer_y, outer_x))

    indices.append(([75], [75]))

    errors = []
    for ind in indices:
        mse = np.mean(diff[:, ind[0], ind[1]]**2)
        errors.append(mse)

    return errors
Ejemplo n.º 3
0
def mainFunction():
    generate_data(ndata, 20000, 50, Ngrid=N)

    queue = Queue() # use manager.queue() ?
    print("preparing threads...")
    pool = Pool(processes=16, initializer=get_prediction_init, initargs=[queue,])

    _, _, output_y, output_x = hp.create_patch_indices(
        (center - (half_inner_size+border_size), center + (half_inner_size+border_size) + right_border_add),
        (center - (half_inner_size+border_size), center + (half_inner_size+border_size) + right_border_add),
        (center - (half_inner_size), center + (half_inner_size) + right_border_add),
        (center - (half_inner_size), center + (half_inner_size) + right_border_add))

    jobs = []
    for i in range(len(output_y)):
        jobs.append((output_y[i], output_x[i]))

    print("fitting...")
    process_results_process = Process(target=process_thread_results, args=(queue, len(jobs)))
    process_results_process.start()
    pool.map(get_prediction, jobs)
    pool.close()

    process_results_process.join()

    print("finished fitting")

    prediction[prediction < 0.0] = 0.0
    prediction[prediction > 1.0] = 1.0

    diff = (shared_data[trainLength:trainLength+predictionLength]-prediction)
    mse_validation = np.mean((diff[:predictionLength-testLength, output_y, output_x])**2)
    mse_test = np.mean((diff[predictionLength-testLength:predictionLength, output_y, output_x])**2)
    print("validation error: {0}".format(mse_validation))
    print("test error: {0}".format(mse_test))

    model = "barkley" if direction == "u" else "mitchell"
    view_data = [("Orig", shared_data[trainLength:trainLength+testLength]), ("Pred", prediction), ("Diff", diff)]

    if prediction_mode == "NN":
        output_file = open("../../cache/{0}/viewdata/inner_cross_pred/{1}/{2}_viewdata_{3}_{4}_{5}_{6}_{7}.dat".format(
            model, direction, prediction_mode.lower(), trainLength, inner_size, border_size, ddim, k), "wb")
    elif prediction_mode == "RBF":
        output_file = open("../../cache/{0}/viewdata/inner_cross_pred/{1}/{2}_viewdata_{3}_{4}_{5}_{6}_{7}_{8}.dat".format(
            model, direction, prediction_mode.lower(), trainLength, inner_size, border_size, ddim, width, basis_points), "wb")
    else:
        output_file = open("../../cache/{0}/viewdata/inner_cross_pred/{1}/{2}_viewdata_{3}_{4}_{5}_{6}_{7}.dat".format(
            model, direction, prediction_mode.lower(), trainLength, inner_size, border_size, regression_parameter, n_units), "wb")
    pickle.dump(view_data, output_file)
    output_file.close()