示例#1
0
def get_final_accuracy(directory, depth, width_param, ens_size, valid=True, n_epochs=250, metric="s:e", suffix=["fixed_depth"], show_ensemble=False, maximum=True, architecture="resnet"):
    
    """
    Returns result of size [experiments, number of epochs] containing the metric specified

    directory -- the directory to read the files from
    param -- range of param number
    ens_size -- the size of ensembles
    valid -- whether to report the validation accuracy or the training accuracy
    n_epochs -- how many epochs to include
    metric -- the metric that the result tensor will contain
    """
    os.chdir(directory)
    files = glob.glob("*.pkl")

    result = {}
    result['s'] = OrderedDict()
    result['h'] = OrderedDict()
    result['v'] = OrderedDict()
    result['hn'] = OrderedDict()
    result['vn'] = OrderedDict()

    i=0
    for conf in list(itertools.product(depth, width_param, ens_size)):
        f_v = "d_" + str(conf[0]) + "_w_" + str(conf[1]) + "_e_" + str(conf[2]) + "_vertical"  + ".pkl"
        f_h = "d_" + str(conf[0]) + "_w_" + str(conf[1]) + "_e_" + str(conf[2]) + "_horizontal"  + ".pkl"
        
        if architecture == "resnet":
            params = resnets.count_parameters(resnets.make_resnet(depth=conf[0], width_parameter = conf[1]))
        elif architecture == "wrn":
            params = resnets.count_parameters(wrn.WideResNet(depth=conf[0], widen_factor=conf[1], num_classes=10))
        
        params = round(params,2)

        if show_ensemble:
            key = " | ".join([str(conf[1]),str(conf[0]),str(conf[2]),str(params)])
        else:
            key = " | ".join([str(conf[1]),str(conf[0]),str(params)])
        
        exp_v = load_experiment(f_v)
        exp_h = load_experiment(f_h)
        
        
        s = np.array(exp_v.data['single'].valid_accy)
        h = np.array(exp_h.data['ensemble'].data['ensemble'].valid_accy)
        v = np.array(exp_v.data['ensemble'].data['ensemble'].valid_accy)

        if maximum:
            result['s'][key] = max(s)
            result['h'][key] = max(h)
            result['v'][key] = max(v)
        else:
            result['s'][key] = s[-1]
            result['h'][key] = h[-1]
            result['v'][key] = v[-1]


        i+=1
    return result
示例#2
0
def convert_to_csv(in_directory, out_directory, depth, width_param, ens_size, valid, suffix=["vertical","horizontal"]):

    
    files = glob.glob("*.pkl")

    result = np.zeros([len(width_param)*len(depth)*len(ens_size), n_epochs])
    keys = []
    
    i=0
    
    for conf in list(itertools.product(depth, width_param, ens_size)):
        
        params = resnets.count_parameters(resnets.make_resnet(depth=conf[0], width_parameter = conf[1]))/100000.0

        os.chdir(in_directory)
        f_v = "d_" + str(conf[0]) + "_w_" + str(conf[1]) + "_e_" + str(conf[2]) + "_vertical"  + ".pkl"
        

        columns = ["epoch", "train_time", "valid_error", "num_param"]
        
        
        for i in range(ensemble_size):
            
            ensemble_data_frame = pd.DataFrame(columns=columns)
        
        
        
        
        
        f_h = "d_" + str(conf[0]) + "_w_" + str(conf[1]) + "_e_" + str(conf[2]) + "_horizontal"  + ".pkl"
        
        
        
        keys.append("|".join([str(conf[0]),str(conf[1]),str(conf[2]),str(params)]))
        
        exp_v = load_experiment(f_v)
        exp_h = load_experiment(f_h)
        
        
        s = np.array(exp_v.data['single'].valid_accy)
        h = np.array(exp_h.data['ensemble'].data['ensemble'].valid_accy)
        v = np.array(exp_v.data['ensemble'].data['ensemble'].valid_accy)

        os.chdir(out_directory)

        out_dir_v
        out_dir_h
        
        os.mkdir()
        os.mkdir("d_" + str(conf[0]) + "_w_" + str(conf[1]) + "_h")
示例#3
0
def get_epoch_accuracy_resnet_combined(directory, depth, width_param, ens_size, valid=True, n_epochs=250, metric="s:e", suffix=["fixed_depth"], show_ensemble=False, architecture="resnet"):
    
    """
    Returns result of size [experiments, number of epochs] containing the metric specified

    directory -- the directory to read the files from
    param -- range of param number
    ens_size -- the size of ensembles
    valid -- whether to report the validation accuracy or the training accuracy
    n_epochs -- how many epochs to include
    metric -- the metric that the result tensor will contain
    """
    os.chdir(directory)
    files = glob.glob("*.pkl")

    result = np.zeros([len(width_param)*len(depth)*len(ens_size), n_epochs])
    keys = []
    i=0
    for conf in list(itertools.product(depth, width_param, ens_size)):
        f_v = "d_" + str(conf[0]) + "_w_" + str(conf[1]) + "_e_" + str(conf[2]) + "_vertical"  + ".pkl"
        f_h = "d_" + str(conf[0]) + "_w_" + str(conf[1]) + "_e_" + str(conf[2]) + "_horizontal"  + ".pkl"
        
        if architecture == "resnet":
            params = resnets.count_parameters(resnets.make_resnet(depth=conf[0], width_parameter = conf[1]))
        elif architecture == "wrn":
            params = resnets.count_parameters(wrn.WideResNet(depth=conf[0], widen_factor=conf[1], num_classes=10))
        
        params = round(params,2)
        
        if show_ensemble:
            keys.append(" | ".join([str(conf[1]),str(conf[0]),str(conf[2]),str(params)]))
        else:
            keys.append(" | ".join([str(conf[1]),str(conf[0]),str(params)]))
        
        exp_v = load_experiment(f_v)
        exp_h = load_experiment(f_h)
        
        
        s = np.array(exp_v.data['single'].valid_accy)
        h = np.array(exp_h.data['ensemble'].data['ensemble'].valid_accy)
        v = np.array(exp_v.data['ensemble'].data['ensemble'].valid_accy)

        result[i,:] =  np.logical_and(s>h, s>v) * 0 # single best
        result[i,:] += np.logical_and(h>s, v<s) * 0.5 # Only h
        result[i,:] += np.logical_and(v>s, h<s) * 1 # only v
        result[i,:] +=  np.logical_and(h>s, v>s) * 1.5 # both best


        i+=1
    return keys, result.T
示例#4
0
def get_data(file_name, networks=["net_1", "net_2", "net_3", "net_4"], single=True, ensemble=True, attr='valid_accy'):

    """
    Returns the data stored in a utils.result object

    file_name -- the pickle file to read
    networks -- the ensemble networks to include
    single -- whether to include single network accuracy 
    ensemble -- whether to include the ensemble accuracy
    attr -- the attribute to get
    """
    result = {}
    
    exp = load_experiment(file_name)
    
    if single:
        result['single'] = getattr(exp.data['single'], attr)
    
    if ensemble:
        result['ensemble'] = getattr(exp.data['ensemble'].data['ensemble'], attr)
    
    for network in networks:
        result[network] = getattr(exp.data['ensemble'].data[network], attr)
    
    return result
示例#5
0
def get_accuracy_training_time_resnet(directory, depth, width_param, ens_size, valid=True, n_epochs=250, metric="s:e", suffix="fixed_depth"):

    """
    Get the accuracy and training time from the pickle files in the directory. 

    directory -- the directory to read the pickle files from
    param -- range of parameters to read 
    ens_size -- ensemble size
    valid -- whether to include the validation accuracy or the training accuracy

    """
    os.chdir(directory)
    files = glob.glob("*.pkl")

    result = {}
    for conf in list(itertools.product(depth, width_param, ens_size)):
        f = "d_" + str(conf[0]) + "_w_" + str(conf[1]) + "_e_" + str(conf[2]) + "_" + suffix + ".pkl"
        print(f)

        exp = load_experiment(f)
        result[f] = {}
        # 
        result[f]['single'] = [ conf[0], conf[1], max(exp.data['single'].valid_accy), sum(exp.data['single'].time)/60/60]
        result[f]['ensemble'] = [ conf[0], conf[1], max(exp.data['ensemble'].data['ensemble'].valid_accy), sum(exp.data['ensemble'].data['ensemble'].time)/60/60 ]

        average_accuracy_ensembles = 0
        accuracies = []
        for k in ['net_1','net_2','net_3','net_4']:
            accuracies.append(exp.data['ensemble'].data[k].valid_accy[-1])
        min_accuracy_ensembles = np.min(accuracies)
        result[f]['per_ensemble_network'] = [ conf[0], conf[1], min_accuracy_ensembles, sum(exp.data['ensemble'].data['net_1'].time)/60/60]

    return result
示例#6
0
def get_epoch_accuracy(directory, param, ens_size, valid=True, n_epochs=400, metric="s:e"):
    
    """
    Returns result of size [experiments, number of epochs] containing the metric specified

    directory -- the directory to read the files from
    param -- range of param number
    ens_size -- the size of ensembles
    valid -- whether to report the validation accuracy or the training accuracy
    n_epochs -- how many epochs to include
    metric -- the metric that the result tensor will contain
    """
    os.chdir(directory)
    files = glob.glob("*.pkl")

    result = np.zeros([len(param), n_epochs])
    i=0
    for conf in list(itertools.product(param, ens_size)):
        f = str(conf[0])+"M_"+str(conf[1])+".pkl"
        exp = load_experiment(f)
        
        if valid == True:
            single = np.array(exp.data['single'].valid_accy)
            ensemble = np.array(exp.data['ensemble'].data['ensemble'].valid_accy)
        else:
            single = np.array(exp.data['single'].train_accy)
            ensemble = np.array(exp.data['ensemble'].data['ensemble'].train_accy)

        
        if metric == "s:e":
            result[i,:] = (single / ensemble)
        
        if metric == "e:s":
            result[i,:] = (ensemble / single)
        
        if metric == "s>e":
            result[i,:] = (single > ensemble)
        
        if metric == "e>s":
            result[i,:] = (single < ensemble)
        
        
        if metric == "s":
            result[i,:] = single
        
        if metric == "e":
            result[i,:] = ensemble

        i+=1
    return result.T
示例#7
0
def get_epoch_accuracy_resnet(directory, depth, width_param, ens_size, valid=True, n_epochs=250, metric="s:e", suffix=["fixed_depth"], architecture="resnet"):
    
    """
    Returns result of size [experiments, number of epochs] containing the metric specified

    directory -- the directory to read the files from
    param -- range of param number
    ens_size -- the size of ensembles
    valid -- whether to report the validation accuracy or the training accuracy
    n_epochs -- how many epochs to include
    metric -- the metric that the result tensor will contain
    """
    os.chdir(directory)
    files = glob.glob("*.pkl")

    result = np.zeros([len(width_param)*len(depth)*len(ens_size)*len(suffix), n_epochs])
    keys = []
    i=0
    for conf in list(itertools.product(depth, width_param, ens_size, suffix)):
        f = "d_" + str(conf[0]) + "_w_" + str(conf[1]) + "_e_" + str(conf[2]) + "_" + str(conf[3]) + ".pkl"
        
        if architecture == "resnet":
            params = resnets.count_parameters(resnets.make_resnet(depth=conf[0], width_parameter = conf[1]))/100000.0
        elif architecture == "wrn":
            params = resnets.count_parameters(wrn.WideResNet(depth=conf[0], widen_factor=conf[1], num_classes=10))


        
        keys.append("|".join([str(conf[1]),str(conf[0]),str(conf[2]),conf[3][0],str(params)]))
        
        print(f)
        exp = load_experiment(f)
        
        if valid == True:
            single = np.array(exp.data['single'].valid_accy)
            ensemble = np.array(exp.data['ensemble'].data['ensemble'].valid_accy)
        else:
            single = np.array(exp.data['single'].train_accy)
            ensemble = np.array(exp.data['ensemble'].data['ensemble'].train_accy)

        
        if metric == "s:e":
            result[i,:] = (single / ensemble)
        
        if metric == "e:s":
            result[i,:] = (ensemble / single)
        
        if metric == "s>e":
            result[i,:] = (single > ensemble)
        
        if metric == "e>s":
            result[i,:] = (single < ensemble)
        
        
        if metric == "s":
            result[i,:] = single
        
        if metric == "e":
            result[i,:] = ensemble

        i+=1
    return keys, result.T