Example #1
0
        mistakes = mistakes + 1
    counter = counter + 1
lossF = mistakes / counter

# enumerator <union>/<join> indicates an approach of next level slices combination process:
# in case of <join> in order to create new node of current level slicer
# combines only nodes of previous layer with each other
# <union> case implementation is based on DPSize algorithm
enumerator = "join"
if enumerator == "join":
    slicer.process(all_features,
                   clf,
                   complete_x,
                   lossF,
                   x_size,
                   complete_y,
                   predictions,
                   debug=True,
                   alpha=6,
                   k=10,
                   w=0.5,
                   loss_type=1)
elif enumerator == "union":
    union_slicer.process(all_features,
                         clf,
                         complete_x,
                         lossF,
                         x_size,
                         complete_y,
                         predictions,
                         debug=True,
                         alpha=6,
Example #2
0
for item in x_test:
    complete_x.append((counter, item))
    complete_y.append((counter, y_test[counter]))
    counter = counter + 1
x_size = counter
model = LinearRegression()
model.fit(x_train, y_train)
preds = (model.predict(x_test) - y_test)**2
f_l2 = sum(preds) / x_size
errors = []
counter = 0
for pred in preds:
    errors.append((counter, pred))
    counter = counter + 1
# alpha is size significance coefficient
# verbose option is for returning debug info while creating slices and printing it
# k is number of top-slices we want
# w is a weight of error function significance (1 - w) is a size significance propagated into optimization function
slicer.process(all_features,
               model,
               complete_x,
               f_l2,
               x_size,
               y_test,
               errors,
               debug=True,
               alpha=5,
               k=10,
               w=0.5,
               loss_type=0)
Example #3
0
for item in x:
    complete_row = (counter, item)
    complete_x.append(complete_row)
    complete_y.append((counter, y[counter]))
    counter = counter + 1
x_size = counter
# train model on a whole dataset
x_train, x_test, y_train, y_test = train_test_split(x,
                                                    y,
                                                    test_size=0.3,
                                                    random_state=0)
model = LinearRegression()
model.fit(x_train, y_train)
f_l2 = sum((model.predict(x_test) - y_test)**2) / x_size
# alpha is size significance coefficient
# verbose option is for returning debug info while creating slices and printing it
# k is number of top-slices we want
# w is a weight of error function significance (1 - w) is a size significance propagated into optimization function
slicer.process(enc,
               model,
               complete_x,
               complete_y,
               f_l2,
               x_size,
               x_test,
               y_test,
               debug=True,
               alpha=4,
               k=10,
               w=0.5)