def __init__(self,train_configs,for_train=True):
        self.end_points = {}
        if for_train:
            self.parameter_len = train_configs["parameter_len"]
            self.lumitexel_length = train_configs["lumitexel_length"]
            self.measurements_length = train_configs["measurements_length"]
            self.learning_rate = train_configs["learning_rate"]
            self.tamer_name = train_configs["tamer_name"]
            self.logPath = train_configs["log_details_dir"]
            make_dir(self.logPath)
            self.modelPath = train_configs["logPath"]+"models/"
            make_dir(self.modelPath)
            self.batch_size = train_configs["batch_size"]

            standard_rendering_parameters = {}

            standard_rendering_parameters["parameter_len"] = self.parameter_len
            standard_rendering_parameters["batch_size"] = self.batch_size
            standard_rendering_parameters["lumitexel_size"] = self.lumitexel_length
            standard_rendering_parameters["is_grey_scale"] = True
            standard_rendering_parameters["config_dir"] = "../tf_ggx_render/tf_ggx_render_configs_1x1/"
            self.end_points["standard_rendering_parameters"] = standard_rendering_parameters

            self.RENDER_SCALAR = 5*1e3/math.pi
        else:
            self.parameter_len = train_configs["parameter_len"]
Ejemplo n.º 2
0
def check_quality(result, log_path, global_step):
    img_path = log_path + "imgs_{}/".format(global_step)
    make_dir(img_path)
    np.savetxt(img_path + "param_ground_truth.csv", result[2], delimiter=',')
    # np.savetxt(img_path+"param_guessed.csv",result[3],delimiter=',')
    np.savetxt(img_path + "position_ground_truth.csv",
               result[3],
               delimiter=',')
    np.savetxt(img_path + "n_dot_view.csv",
               result[4].reshape([-1, 1]),
               delimiter=',')
    np.savetxt(img_path + "view_dir.csv",
               result[5].reshape([-1, 3]),
               delimiter=',')
    np.savetxt(img_path + "n.csv", result[6].reshape([-1, 3]), delimiter=',')
    np.savetxt(img_path + "n_local.csv",
               result[7].reshape([-1, 3]),
               delimiter=',')
    # np.savetxt(img_path+"param_origin.csv",result[5],delimiter=',')
    # np.savetxt(img_path+"position_guessed.csv",result[5],delimiter=',')
    for idx, a_gt_lumi in enumerate(result[0]):
        gt_lumi_img = visualize_new(a_gt_lumi.reshape([-1]), scalerf=255.0)
        guessed_lumi_img = visualize_new(np.exp(result[1][idx].reshape([-1])) -
                                         1,
                                         scalerf=255.0)
        cv2.imwrite(img_path + "{}_gt.png".format(idx), gt_lumi_img)
        cv2.imwrite(img_path + "{}_guessed.png".format(idx), guessed_lumi_img)
        np.savetxt(img_path + "{}_mm.csv".format(idx),
                   result[2][idx].reshape([-1, 1]),
                   delimiter=',')
Ejemplo n.º 3
0
    def save_model(self):
        print("saveModel!")
        path = self.modelPath + self.tamer_name +"_"+str(self.sess.run(self.end_points["global_step"])) + "/"
        make_dir(path)
        path_bak = path
        path = path + self.tamer_name
        self.end_points['saver'].save(self.sess, path)

        if self.sess.run(self.end_points["global_step"]) % self.DUMP_ITR == 0:
            shutil.copytree(path_bak,self.modelPath + self.tamer_name +"_"+str(self.sess.run(self.end_points["global_step"])) + "_bak/")
Ejemplo n.º 4
0
    def __init__(self,train_configs,for_train=True):
        self.end_points = {}
        if for_train:
            self.DUMP_ITR = train_configs["DUMP_ITR"]
            self.parameter_len = train_configs["parameter_len"]
            self.lumitexel_length = train_configs["lumitexel_length"]
            self.slice_sample_num_pd = 8
            self.slice_length_pd = self.slice_sample_num_pd*self.slice_sample_num_pd*6
            self.slice_sample_num_ps = 64
            self.slice_length_ps = self.slice_sample_num_ps*self.slice_sample_num_ps*6

            self.lambdas = train_configs["lambdas"]

            self.loss_configs = train_configs["loss_configs"]

            self.measurements_length = train_configs["measurements_length"]
            self.learning_rate = train_configs["learning_rate"]
            self.tamer_name = train_configs["tamer_name"]
            self.loss_with_form_fractor = train_configs["loss_with_form_fractor"]
            self.logPath = train_configs["log_details_dir"]
            make_dir(self.logPath)
            self.modelPath = train_configs["logPath"]+"models/"
            make_dir(self.modelPath)
            self.batch_size = train_configs["batch_size"]#*train_configs["rotate_num"]

            standard_rendering_parameters = {}

            standard_rendering_parameters["parameter_len"] = self.parameter_len
            standard_rendering_parameters["batch_size"] = self.batch_size
            standard_rendering_parameters["lumitexel_size"] = self.lumitexel_length
            standard_rendering_parameters["is_grey_scale"] = True
            standard_rendering_parameters["config_dir"] = "auxiliary/tf_ggx_render_newparam/tf_ggx_render_configs_1x1/"
            self.end_points["standard_rendering_parameters"] = standard_rendering_parameters
            self.with_length_predict = train_configs["with_length_predict"]

            self.RENDER_SCALAR = 5*1e3/math.pi
            self.SLICE_SCALAR = 5*1e3/math.pi
        else:
            self.parameter_len = train_configs["parameter_len"]
epsilon = 1e-3
param_bounds = {}
param_bounds["n"] = (epsilon, 1.0 - epsilon)
param_bounds["theta"] = (0.0, math.pi)
param_bounds["a"] = (0.006, 0.503)
param_bounds["pd"] = (0.0, 1.0)
param_bounds["ps"] = (0.0, 10.0)
param_bounds["box"] = (-50.0, 50.0)
param_bounds["angle"] = (0.0, 2.0 * math.pi)

if __name__ == "__main__":
    data_root = sys.argv[1]
    sample_num = int(sys.argv[2])
    train_data_ratio = float(sys.argv[3])
    fixed_angle_num = int(sys.argv[4])
    make_dir(data_root)

    os.system("\"auxiliary\\exe_utils\\random_number_generator.exe\" {} {} {}".
              format(sample_num, param_dim, data_root + "raw_params.bin"))

    raw_random_numbers = np.fromfile(data_root + "raw_params.bin",
                                     np.float32).reshape(
                                         [sample_num, param_dim])

    positions = raw_random_numbers[:, :3] * (
        param_bounds["box"][1] -
        param_bounds["box"][0]) + param_bounds["box"][0]

    ns = raw_random_numbers[:, 3:5]

    ts = raw_random_numbers[:, [5]] * (
Ejemplo n.º 6
0
    return components


def getW(lumi_len, K):
    random = gaussian_random_matrix(lumi_len, K)
    random_reshape = np.reshape(random, (lumi_len, K))
    return random_reshape


MAX_ITR = 5000000
VALIDATE_ITR = 5
CHECK_QUALITY_ITR = 5000
SAVE_MODEL_ITR = 10000
LOG_ROOT = "logs/"
if __name__ == "__main__":
    make_dir(LOG_ROOT)
    visualize_init(UTILS_CONFIG_PATH)
    data_pan = sys.argv[1]
    pretrained_model = sys.argv[2]
    ########################################
    ######step1 parse config
    ########################################
    train_configs = {}
    train_configs["parameter_len"] = 7  #normal2 tangent1 axay2 pd1 ps1
    train_configs["lumitexel_length"] = 24576
    train_configs["measurements_length"] = 16
    train_configs["learning_rate"] = 1e-4
    train_configs["tamer_name"] = "tamer"
    train_configs["logPath"] = LOG_ROOT + "logs_lumitexel_guesser_iso/"
    make_dir(train_configs["logPath"])
    train_configs[
Ejemplo n.º 7
0
                                  size=(n_components, n_features))
    return components

def getW(lumi_len, K):
    random = gaussian_random_matrix(lumi_len, K)
    random_reshape = np.reshape(random, (lumi_len, K))
    return random_reshape

MAX_ITR = 5000000
DUMP_ITR = 250000
VALIDATE_ITR = 5
CHECK_QUALITY_ITR=5000
SAVE_MODEL_ITR=10000
LOG_ROOT="logs/"
if __name__ == "__main__":
    make_dir(LOG_ROOT)
    visualize_init(UTILS_CONFIG_PATH)
    visualize_cube_slice_init(UTILS_CONFIG_PATH,64)
    visualize_cube_slice_init(UTILS_CONFIG_PATH,8)
    data_root = sys.argv[1]
    ########################################
    ######step1 parse config
    ########################################
    train_configs = {}
    train_configs["DUMP_ITR"] = DUMP_ITR
    train_configs["parameter_len"] = 7#normal2 tangent1 axay2 pd1 ps1
    train_configs["lumitexel_length"] = 24576
    train_configs["loss_with_form_fractor"] = False
    train_configs["measurements_length"] = 6
    train_configs["learning_rate"] = 1e-4
    train_configs["rotate_num"] = 12
Ejemplo n.º 8
0
import math
from tame_tamer import Tame_Tamer

sys.path.append("../utils/")
sys.path.append("../")
from tf_ggx_render.tf_ggx_render import tf_ggx_render
from lumitexel_related import visualize_init, visualize_new
from dir_folder_and_files import make_dir

UTILS_CONFIG_PATH = "G:/current_work/utils/"
if __name__ == "__main__":
    visualize_init(UTILS_CONFIG_PATH)
    data_root = "G:/2019_fresh_meat/3_29_beer_can/pattern/"
    log_root = data_root + "guess_log/"
    need_dump = True
    make_dir(log_root)
    GUESS_SCALAR = float("186.45603942871094")
    test_configs = {}
    test_configs["parameter_len"] = 7
    test_configs["lumitexel_length"] = 24576
    test_configs["measurements_length"] = 16
    test_configs["learning_rate"] = 1e-4
    test_configs["tamer_name"] = "tamer"
    test_configs["logPath"] = "logs/"
    test_configs[
        "pretrained_projection_matrix_path"] = "G:/2019_jointly_capture_training/3_7/Julia_feature_extractor_865000/"

    test_configs["batch_size"] = 50

    test_configs[
        "model_path"] = "G:/current_work/BRDF_param_guesser/logs/models/tamer_495000/tamer"
Ejemplo n.º 9
0
import numpy as np
import cv2
import sys
sys.path.append("../utils/")
from dir_folder_and_files import make_dir
from lumitexel_related import visualize_init,visualize_new

UTILS_CONFIG_PATH = "G:/current_work/utils/"

if __name__ == "__main__":
    data_root=sys.argv[1]
    data_name=sys.argv[2]

    visualize_init(UTILS_CONFIG_PATH)

    data = np.fromfile(data_root+data_name,np.float32).reshape([-1,3,24576])

    img_root = data_root+"img/"
    make_dir(img_root)
    for idx,apixel in enumerate(data):
        r = np.expand_dims(visualize_new(apixel[0]),axis=2)
        g = np.expand_dims(visualize_new(apixel[1]),axis=2)
        b = np.expand_dims(visualize_new(apixel[2]),axis=2)

        img = np.concatenate([b,g,r],axis=-1)
        cv2.imwrite(img_root+"{}.png".format(idx),img*255)
 def save_model(self):
     print("saveModel!")
     path = self.modelPath + self.tamer_name +"_"+str(self.sess.run(self.end_points["global_step"])) + "\\"
     make_dir(path)
     path = path + self.tamer_name
     self.end_points['saver'].save(self.sess, path)