def Tune():
    '''
    for small net
    re_max = -4
    re_min = -14
    lr_max = -6
    lr_min = -10
    '''
    re_max = -4
    re_min = -14
    lr_max = -6
    lr_min = -10
    distribution = np.array(['uniform', 'normal'])
    iteration = 1
    max_batch = 1000
    verbose = True
    
    #list_lr = np.exp(np.linspace(start=-9, stop=-13, num=iteration))
    list_lr = np.exp(np.random.uniform(low=lr_min, high=lr_max, size=iteration))
    list_re = np.exp(np.random.uniform(low=re_min, high=re_max, size=iteration))
    mask_distribution = np.random.randint(low=0, high=len(distribution), size=iteration)
    list_dstribution = distribution[mask_distribution]

    
    _Notice()
    list_report = []
    list_logs = []
    mcg = ConfigUtils.ModelCfgGenerator()
    ccg = ConfigUtils.TrainCfgGenerator()
    
    for i in range(iteration):
        print('number of hyperparam setting tried: {}/{}'.format(i+1, iteration))
        cfg_model = mcg.GetContent(distribution=list_dstribution[i], reg=list_re[i])
        cfg_train = ccg.GetContent(lr=list_lr[i], max_batch=max_batch, verbose=verbose)
        mnist = input_data.read_data_sets(cfg_train.data_dir , one_hot=False, seed=231)
        report, logs = Run(cfg_model,cfg_train, mnist)
        log_df = Logs2Df(logs)
        list_report.append(report)
        list_logs.append(log_df)
      
    df = Reports2Df(list_report)
    return df, list_logs