def fill_params(expt_name, chkpt_num, batch_sz, gpus, sampler_fname, model_fname, augmentor_fname, **args): params = {} #Model params params["in_dim"] = 1 params["output_spec"] = collections.OrderedDict(synapse_label=1) params["depth"] = 4 params["batch_norm"] = True #Training procedure params params["max_iter"] = 1000000 params["lr"] = 0.00001 params["test_intv"] = 100 params["test_iter"] = 10 params["avgs_intv"] = 50 params["chkpt_intv"] = 10000 params["warm_up"] = 50 params["chkpt_num"] = chkpt_num params["batch_size"] = batch_sz #Sampling params params["data_dir"] = os.path.expanduser("~/seungmount/research/Nick/datasets/SNEMI3D/") assert os.path.isdir(params["data_dir"]),"nonexistent data directory" params["train_sets"] = ["K_val"] params["val_sets"] = ["K_val"] params["patchsz"] = (18,160,160) params["sampler_spec"] = dict(input=params["patchsz"], synapse_label=params["patchsz"]) #GPUS params["gpus"] = gpus #IO/Record params params["expt_name"] = expt_name params["expt_dir"] = "experiments/{}".format(expt_name) params["model_dir"] = os.path.join(params["expt_dir"], "models") params["log_dir"] = os.path.join(params["expt_dir"], "logs") params["fwd_dir"] = os.path.join(params["expt_dir"], "forward") params["tb_train"] = os.path.join(params["expt_dir"], "tb/train") params["tb_val"] = os.path.join(params["expt_dir"], "tb/val") #Use-specific Module imports params["model_class"] = utils.load_source(model_fname).Model params["sampler_class"] = utils.load_source(sampler_fname).Sampler params["augmentor_constr"] = utils.load_source(augmentor_fname).get_augmentation #"Schema" for turning the parameters above into arguments # for the model class params["model_args"] = [params["in_dim"], params["output_spec"], params["depth"]] params["model_kwargs"] = {"bn" : params["batch_norm"]} #modules used for record-keeping params["modules_used"] = [__file__, model_fname, sampler_fname, augmentor_fname, "loss.py"] return params
def create_features(features): import timeit source = load_source() start = timeit.default_timer() compute_features(source, features) end = timeit.default_timer() print("save all features takes ", (end-start))
def create_features(features): import timeit source = load_source() start = timeit.default_timer() compute_features(source, features) end = timeit.default_timer() print("save all features takes ", (end - start))
def fill_params(expt_name, chkpt_num, gpus, nobn, model_name, dset_names, tag): params = {} #Model params params["in_dim"] = 1 params["output_spec"] = collections.OrderedDict(soma_label=1) params["depth"] = 4 params["batch_norm"] = not (nobn) params["activation"] = F.sigmoid params["chkpt_num"] = chkpt_num #GPUS params["gpus"] = gpus #IO/Record params params["expt_name"] = expt_name params[ "expt_dir"] = "/jukebox/wang/zahra/conv_net/training/experiment_dirs/{}".format( expt_name) params["model_dir"] = os.path.join(params["expt_dir"], "models") params["log_dir"] = os.path.join(params["expt_dir"], "logs") params["fwd_dir"] = os.path.join(params["expt_dir"], "forward") params["log_tag"] = "fwd_" + tag if len(tag) > 0 else "fwd" params["output_tag"] = tag #Dataset params params[ "data_dir"] = "/jukebox/wang/pisano/conv_net/annotations/all_better_res/h129/otsu/inputRawImages" assert os.path.isdir(params["data_dir"]), "nonexistent data directory" params["dsets"] = dset_names params["input_spec"] = collections.OrderedDict( input=(20, 192, 192)) #dp dataset spec params["scan_spec"] = collections.OrderedDict(soma_label=(1, 20, 192, 192)) params["scan_params"] = dict(stride=(0.75, 0.75, 0.75), blend="bump") #Use-specific Module imports params["model_class"] = utils.load_source('models/RSUNet.py').Model #"Schema" for turning the parameters above into arguments # for the model class params["model_args"] = [ params["in_dim"], params["output_spec"], params["depth"] ] params["model_kwargs"] = {"bn": params["batch_norm"]} #Modules used for record-keeping params["modules_used"] = [__file__, 'models/RSUNet.py', "layers.py"] return params
def fill_params(expt_name, chkpt_num, gpus, nobn, model_fname, dset_names, tag): params = {} #Model params params["in_spec"] = dict(input=(1, 20, 192, 192)) params["output_spec"] = collections.OrderedDict(soma=(1, 20, 192, 192)) params["width"] = [32, 40, 80] params["activation"] = sigmoid params["chkpt_num"] = chkpt_num #GPUS params["gpus"] = gpus #IO/Record params params["expt_name"] = expt_name params[ "expt_dir"] = "/jukebox/wang/zahra/conv_net/training/prv/experiment_dirs/{}".format( expt_name) params["model_dir"] = os.path.join(params["expt_dir"], "models") params["log_dir"] = os.path.join(params["expt_dir"], "logs") params["fwd_dir"] = os.path.join(params["expt_dir"], "forward") params["log_tag"] = "fwd_" + tag if len(tag) > 0 else "fwd" params["output_tag"] = tag #Dataset params params[ "data_dir"] = "/home/wanglab/mounts/wang/zahra/conv_net/annotations/prv/hypothalamus/inputs" assert os.path.isdir(params["data_dir"]), "nonexistent data directory" params["dsets"] = dset_names params["input_spec"] = collections.OrderedDict( input=(20, 192, 192)) #dp dataset spec params["scan_spec"] = collections.OrderedDict(soma=(1, 20, 192, 192)) params["scan_params"] = dict(stride=(0.5, 0.5, 0.5), blend="bump") #Use-specific Module imports params["model_class"] = utils.load_source(model_fname).Model #"Schema" for turning the parameters above into arguments # for the model class params["model_args"] = [ params["in_spec"], params["output_spec"], params["width"] ] params["model_kwargs"] = {} #Modules used for record-keeping params["modules_used"] = [__file__, model_fname, "layers.py"] return params
def fill_params(expt_name, chkpt_num, gpus, nobn, model_fname, dset_name, tag, jobid): params = {} #Model params params["in_spec"] = dict(input=(1, 20, 32, 32)) params["output_spec"] = collections.OrderedDict(soma=(1, 20, 32, 32)) params["width"] = [32, 40, 80] params["activation"] = sigmoid params["chkpt_num"] = chkpt_num #GPUS params["gpus"] = gpus #IO/Record params params["expt_name"] = expt_name params["expt_dir"] = "/tigress/zmd/3dunet_data/cfos/experiments/{}".format( expt_name) params["model_dir"] = os.path.join(params["expt_dir"], "models") params["log_dir"] = os.path.join(params["expt_dir"], "logs") params["fwd_dir"] = os.path.join(params["expt_dir"], "forward") params["log_tag"] = "fwd_" + tag if len(tag) > 0 else "fwd" params["output_tag"] = tag params["jobid"] = jobid #Dataset params params["data_dir"] = "/scratch/gpfs/zmd/{}".format(dset_name) # assert os.path.isdir(params["data_dir"]),"nonexistent data directory" params["dsets"] = dset_name params["input_spec"] = collections.OrderedDict( input=(20, 32, 32)) #dp dataset spec params["scan_spec"] = collections.OrderedDict(soma=(1, 20, 32, 32)) params["scan_params"] = dict(stride=(0.1, 0.1, 0.1), blend="bump") #Use-specific Module imports params["model_class"] = utils.load_source(model_fname).Model #"Schema" for turning the parameters above into arguments # for the model class params["model_args"] = [ params["in_spec"], params["output_spec"], params["width"] ] params["model_kwargs"] = {} #Modules used for record-keeping params["modules_used"] = [__file__, model_fname, "layers.py"] return params
def fill_params(expt_name, chkpt_num, gpus, nobn, model_name, tag, jobid): params = {} #Model params params["in_dim"] = 1 params["output_spec"] = collections.OrderedDict(soma_label=1) params["depth"] = 4 params["batch_norm"] = not(nobn) params["activation"] = F.sigmoid params["chkpt_num"] = chkpt_num #GPUS params["gpus"] = gpus #IO/Record params params["expt_name"] = expt_name params["expt_dir"] = "/tigress/zmd/3dunet_data/experiments/{}".format(expt_name) params["model_dir"] = os.path.join(params["expt_dir"], "models") params["log_dir"] = os.path.join(params["expt_dir"], "logs") params["fwd_dir"] = os.path.join(params["expt_dir"], "forward") params["log_tag"] = "fwd_" + tag if len(tag) > 0 else "fwd" params["output_tag"] = tag #Dataset params params["data_dir"] = "/scratch/gpfs/zmd/20180327_jg40_bl6_sim_03" assert os.path.isdir(params["data_dir"]),"nonexistent data directory" params["jobid"] = jobid params["input_spec"] = collections.OrderedDict(input=(20,192,192)) #dp dataset spec params["scan_spec"] = collections.OrderedDict(soma_label=(1,20,192,192)) params["scan_params"] = dict(stride=(0.75,0.75,0.75), blend="bump") #Use-specific Module imports params["model_class"] = utils.load_source('models/RSUNet.py').Model #"Schema" for turning the parameters above into arguments # for the model class params["model_args"] = [params["in_dim"], params["output_spec"], params["depth"] ] params["model_kwargs"] = { "bn" : params["batch_norm"] } #Modules used for record-keeping params["modules_used"] = [__file__, 'models/RSUNet.py', "layers.py"] return params
def fill_params(expt_name, chkpt_num, batch_sz, gpus, sampler_fname, model_fname, augmentor_fname, **args): params = {} #Model params params["in_spec"] = dict(input=(1, 20, 192, 192)) params["output_spec"] = collections.OrderedDict(cleft=(1, 20, 192, 192)) params["width"] = [32, 40, 80] #Training procedure params params["max_iter"] = 1000000 params["lr"] = 0.00001 params["test_intv"] = 100 params["test_iter"] = 10 params["avgs_intv"] = 50 params["chkpt_intv"] = 1000 params["warm_up"] = 50 params["chkpt_num"] = chkpt_num params["batch_size"] = batch_sz #Sampling params params["data_dir"] = "/tigress/zmd/3dunet_data/ctb/training_inputs" assert os.path.isdir(params["data_dir"]), "nonexistent data directory" params["train_sets"] = [ "z269stackstart150", "z269stackstart475", "z266stackstart350", "z266stackstart250", "z268stackstart300", "z265_zpln165-191_x6325_y4458", "z265_zpln315-340_x4785_y3793" ] params["val_sets"] = ["z269stackstart100"] params["patchsz"] = (20, 192, 192) params["sampler_spec"] = dict(input=params["patchsz"], soma_label=params["patchsz"]) #GPUS params["gpus"] = gpus #IO/Record params params["expt_name"] = expt_name params["expt_dir"] = "/tigress/zmd/3dunet_data/ctb/network/{}".format( expt_name) params["model_dir"] = os.path.join(params["expt_dir"], "models") params["log_dir"] = os.path.join(params["expt_dir"], "logs") params["fwd_dir"] = os.path.join(params["expt_dir"], "forward") params["tb_train"] = os.path.join(params["expt_dir"], "tb/train") params["tb_val"] = os.path.join(params["expt_dir"], "tb/val") #Use-specific Module imports params["model_class"] = utils.load_source(model_fname).Model params["sampler_class"] = utils.load_source(sampler_fname).Sampler params["augmentor_constr"] = utils.load_source( augmentor_fname).get_augmentation #"Schema" for turning the parameters above into arguments # for the model class params["model_args"] = [ params["in_spec"], params["output_spec"], params["width"] ] params["model_kwargs"] = {} #modules used for record-keeping params["modules_used"] = [ __file__, model_fname, sampler_fname, augmentor_fname, "loss.py" ] return params
def fill_params_train(expt_name, batch_sz, gpus, sampler_fname, model_fname, augmentor_fname, **args): params = {} #Model params params["in_spec"] = dict(input=(1, 20, 192, 192)) params["output_spec"] = collections.OrderedDict(soma=(1, 20, 192, 192)) params["width"] = [32, 40, 80] #Training procedure params params["max_iter"] = 51 params["lr"] = 0.00001 params["test_intv"] = 25 params["test_iter"] = 10 params["avgs_intv"] = 10 params["chkpt_intv"] = 10 params["warm_up"] = 5 params["chkpt_num"] = 0 params["batch_size"] = batch_sz #Sampling params print("the working directory is: {}\n".format(os.getcwd())) params["data_dir"] = os.path.join(os.path.dirname(os.getcwd()), 'demo') assert os.path.isdir(params["data_dir"]), "nonexistent data directory" params["train_sets"] = ["train"] params["val_sets"] = ["val"] params["patchsz"] = (20, 192, 192) params["sampler_spec"] = dict(input=params["patchsz"], soma_label=params["patchsz"]) #GPUS params["gpus"] = gpus #IO/Record params params["expt_name"] = expt_name params["expt_dir"] = os.path.join(params["data_dir"], "experiments/{}".format(expt_name)) params["model_dir"] = os.path.join(params["expt_dir"], "models") params["log_dir"] = os.path.join(params["expt_dir"], "logs") params["fwd_dir"] = os.path.join(params["expt_dir"], "forward") params["tb_train"] = os.path.join(params["expt_dir"], "tb/train") params["tb_val"] = os.path.join(params["expt_dir"], "tb/val") #Use-specific Module imports params["model_class"] = utils.load_source(model_fname).Model params["sampler_class"] = utils.load_source(sampler_fname).Sampler params["augmentor_constr"] = utils.load_source( augmentor_fname).get_augmentation #"Schema" for turning the parameters above into arguments # for the model class params["model_args"] = [ params["in_spec"], params["output_spec"], params["width"] ] params["model_kwargs"] = {} #modules used for record-keeping params["modules_used"] = [ __file__, model_fname, sampler_fname, augmentor_fname, "loss.py" ] return params
def fill_params(expt_name, chkpt_num, batch_sz, gpus, sampler_fname, model_fname, augmentor_fname, **args): params = {} #Model params params["in_spec"] = dict(input=(1, 20, 192, 192)) params["output_spec"] = collections.OrderedDict(cleft=(1, 20, 192, 192)) params["width"] = [32, 40, 80] #Training procedure params params["max_iter"] = 1000000 params["lr"] = 0.00001 params["test_intv"] = 100 params["test_iter"] = 10 params["avgs_intv"] = 50 params["chkpt_intv"] = 10 params["warm_up"] = 50 params["chkpt_num"] = chkpt_num params["batch_size"] = batch_sz #Sampling params params["data_dir"] = "/tigress/zmd/3dunet_data/prv/all_inputs/" assert os.path.isdir(params["data_dir"]), "nonexistent data directory" params["train_sets"] = [ 'zd_ann_prv_jg05_neocortex_z310-449_01', 'zd_ann_prv_jg24_neocortex_z300-400_01', '20180306_jg_bl6f_prv_16_647_010na_7d5um_250msec_10povlp_ch00_C00_Z0650-0700_01', '20180306_jg_bl6f_prv_16_647_010na_7d5um_250msec_10povlp_ch00_C00_Z0450-0500_01', '20180306_jg_bl6f_prv_16_647_010na_7d5um_250msec_10povlp_ch00_C00_Z0450-0500_00', 'cj_ann_prv_jg05_neocortex_z250-449_02', 'zd_ann_prv_jg32_hypothal_z710-810_02', 'cj_ann_prv_jg05_hypothal_z661-760_02', 'JGANNOTATION_20180305_jg_bl6f_prv_12_647_010na_7d5um_250msec_10povlp_ch00_C00_400-440_01', 'cj_ann_prv_jg29_neocortex_z700-800_02', 'cj_ann_prv_jg24_neocortex_z300-400_01', 'cj_ann_prv_jg32_neocortex_z650-810_01', '20180305_jg_bl6f_prv_11_647_010na_7d5um_250msec_10povlp_ch00_C00_300-345_00', 'JGANNOTATION_20180306_jg_bl6f_prv_16_647_010na_7d5um_250msec_10povlp_ch00_C00_Z0450-0500_01', 'cj_ann_prv_jg24_neocortex_z300-400_02', 'cj_ann_prv_jg24_hypothal_z550-650_01', 'cj_ann_prv_jg29_hypothal_z580-700_01', 'zd_ann_prv_jg29_neocortex_z300-500_01', '20180305_jg_bl6f_prv_11_647_010na_7d5um_250msec_10povlp_ch00_C00_300-345_01', 'zd_ann_prv_jg32_neocortex_z650-810_01', 'zd_ann_prv_jg24_hypothal_z550-650_01', '20180215_jg_bl6f_prv_10_647_010na_z7d5um_250msec_10povlap_ch00_z200-400_y4500-4850_x2050-2400', '20180306_jg_bl6f_prv_16_647_010na_7d5um_250msec_10povlp_ch00_C00_Z0650-0700_06', 'cj_ann_prv_jg32_hypothal_z650-810_01', '20180305_jg_bl6f_prv_12_647_010na_7d5um_250msec_10povlp_ch00_C00_400-440_01', 'JGANNOTATION_20180306_jg_bl6f_prv_16_647_010na_7d5um_250msec_10povlp_ch00_C00_Z0450-0500_02', '20180305_jg_bl6f_prv_12_647_010na_7d5um_250msec_10povlp_ch00_C00_400-440_03', 'cj_ann_prv_jg05_neocortex_z310-449_01' ] params["val_sets"] = [ 'cj_ann_prv_jg29_hypothal_z700-800_02', 'JGANNOTATION_20180305_jg_bl6f_prv_12_647_010na_7d5um_250msec_10povlp_ch00_C00_400-440_02', '20180306_jg_bl6f_prv_16_647_010na_7d5um_250msec_10povlp_ch00_C00_Z0650-0700_00', '20180305_jg_bl6f_prv_12_647_010na_7d5um_250msec_10povlp_ch00_C00_400-440_02', 'JGANNOTATION_20180305_jg_bl6f_prv_12_647_010na_7d5um_250msec_10povlp_ch00_C00_400-440_03', '20180306_jg_bl6f_prv_16_647_010na_7d5um_250msec_10povlp_ch00_C00_Z0450-0500_06', '20180306_jg_bl6f_prv_16_647_010na_7d5um_250msec_10povlp_ch00_C00_Z0650-0700_05', '20180305_jg_bl6f_prv_12_647_010na_7d5um_250msec_10povlp_ch00_C00_400-440_00' ] params["patchsz"] = (20, 192, 192) params["sampler_spec"] = dict(input=params["patchsz"], soma_label=params["patchsz"]) #GPUS params["gpus"] = gpus #IO/Record params params["expt_name"] = expt_name params["expt_dir"] = "/tigress/zmd/3dunet_data/prv/experiments/{}".format( expt_name) params["model_dir"] = os.path.join(params["expt_dir"], "models") params["log_dir"] = os.path.join(params["expt_dir"], "logs") params["fwd_dir"] = os.path.join(params["expt_dir"], "forward") params["tb_train"] = os.path.join(params["expt_dir"], "tb/train") params["tb_val"] = os.path.join(params["expt_dir"], "tb/val") #Use-specific Module imports params["model_class"] = utils.load_source(model_fname).Model params["sampler_class"] = utils.load_source(sampler_fname).Sampler params["augmentor_constr"] = utils.load_source( augmentor_fname).get_augmentation #"Schema" for turning the parameters above into arguments # for the model class params["model_args"] = [ params["in_spec"], params["output_spec"], params["width"] ] params["model_kwargs"] = {} #modules used for record-keeping params["modules_used"] = [ __file__, model_fname, sampler_fname, augmentor_fname, "loss.py" ] return params
def fill_params(expt_name, chkpt_num, batch_sz, gpus, sampler_fname, model_fname, augmentor_fname, **args): params = {} #Model params params["in_spec"] = dict(input=(1, 20, 32, 32)) params["output_spec"] = collections.OrderedDict(cleft=(1, 20, 32, 32)) params["width"] = [32, 40, 80] #Training procedure params params["max_iter"] = 1000000 params["lr"] = 0.00001 params["test_intv"] = 100 params["test_iter"] = 10 params["avgs_intv"] = 50 params["chkpt_intv"] = 1000 params["warm_up"] = 50 params["chkpt_num"] = chkpt_num params["batch_size"] = batch_sz #Sampling params params[ "data_dir"] = "/home/wanglab/Documents/cfos_inputs/otsu_and_guassian_screened" assert os.path.isdir(params["data_dir"]), "nonexistent data directory" params["train_sets"] = [ 'dp_ann_201812_pcdev_lob6_4_forebrain_cortex_z200-219', 'tp_ann_201812_pcdev_lob6_9_forebrain_hypothal_z520-539', 'tp_ann_201812_pcdev_crus1_23_forebrain_cortex_z290-309', 'jd_ann_201904_an19_ymazefos_020719_thal_z350-369', 'jd_ann_201904_an21_ymazefos_020719_hypothal_z450-469', 'dp_ann_201904_an19_ymazefos_020719_pfc_z380-399', 'dp_ann_201904_an21_ymazefos_020719_hypothal_z450-469', 'tp_ann_201904_an10_ymzefos_020719_cortex_z280-279', 'jd_ann_201904_an22_ymazefos_020719_pfc_z150-169', 'jd_ann_201904_an22_ymazefos_020719_cb_z160-179', 'dp_ann_201904_an22_ymazefos_020719_cb_z160-179', 'tp_ann_201904_an19_ymazefos_020719_pfc_z380-399', 'dp_ann_201904_an12_ymazefos_020719_hypothal_z420-449', 'tp_ann_201812_pcdev_crus1_23_forebrain_midbrain_z260-279', 'tp_ann_201904_an4_ymazefos_020119_cortex_z200-219', 'tp_ann_201904_an4_ymazefos_020119_pfc_z200-219', 'tp_ann_201812_pcdev_lob6_4_forebrain_cortex_z200-219', 'dp_ann_201904_an19_ymazefos_020719_cortex_z380-399_02', 'tp_ann_201904_an22_ymazefos_020719_pfc_z150-169', 'jd_ann_201904_an30_ymazefos_020719_pfc_z410-429', 'jd_ann_201904_an10_ymazefos_020719_hypothal_z460-479', 'jd_ann_201904_an10_ymazefos_020719_pb_z260-279', 'jd_ann_201904_an30_ymazefos_020719_cortex_z400-419', 'dp_ann_201904_an19_ymazefos_020719_cortex_z350-369', 'dp_ann_an16_ymazecfos_z260-299_retrosplenial_cropped' ] params["val_sets"] = [ 'dp_ann_201904_an12_ymazefos_020719_cortex_z371-390', 'dp_ann_201904_an19_ymazefos_020719_cb_z380-399', 'dp_ann_201812_pcdev_lob6_9_forebrain_hypothal_z520-539', 'jd_ann_201904_an30_ymazefos_020719_striatum_z416-435', 'tp_ann_201904_an30_ymazefos_020719_striatum_z416-435', 'dp_ann_an22_ymazecfos_z230-249_sm_cortex_cropped', 'dp_ann_201904_an19_ymazefos_020719_thal_z350-369' ] params["patchsz"] = (20, 32, 32) params["sampler_spec"] = dict(input=params["patchsz"], soma_label=params["patchsz"]) #GPUS params["gpus"] = gpus #IO/Record params params["expt_name"] = expt_name params[ "expt_dir"] = "/home/wanglab/Documents/cfos_net/experiment_dirs/{}".format( expt_name) params["model_dir"] = os.path.join(params["expt_dir"], "models") params["log_dir"] = os.path.join(params["expt_dir"], "logs") params["fwd_dir"] = os.path.join(params["expt_dir"], "forward") params["tb_train"] = os.path.join(params["expt_dir"], "tb/train") params["tb_val"] = os.path.join(params["expt_dir"], "tb/val") #Use-specific Module imports params["model_class"] = utils.load_source(model_fname).Model params["sampler_class"] = utils.load_source(sampler_fname).Sampler params["augmentor_constr"] = utils.load_source( augmentor_fname).get_augmentation #"Schema" for turning the parameters above into arguments # for the model class params["model_args"] = [ params["in_spec"], params["output_spec"], params["width"] ] params["model_kwargs"] = {} #modules used for record-keeping params["modules_used"] = [ __file__, model_fname, sampler_fname, augmentor_fname, "loss.py" ] return params