예제 #1
0
    def plot_query(self, experiment, query, **kwargs):
        rows = self.query(experiment, query)

        # transpose the rows
        rows_transposed = zip(*rows)

        # plot the result
        myplot(*rows_transposed, **kwargs)
        show()
        return rows_transposed
예제 #2
0
        cv2.grabCut(imgUint8, mask, rect, bgdModel, fgdModel, 5,
                    cv2.GC_INIT_WITH_RECT)
    # If mask==2 or mask== 1, mask2 get 0, other wise it gets 1 as 'uint8' type.
    mask2 = np.where((mask == 2) | (mask == 0), 0, 1).astype('uint8')
    return mask2

if __name__ == '__main__':
    from saliency import *
    segmentList = [200, 300, 400]
    compactness = 20
    buildMethods = ['MY4']
    img = da.astronaut()
    labelMapDic = {}
    for n_segments in segmentList:
        labelMapDic[n_segments] = getSlic(img, n_segments, compactness)
    refinedImgs = []
    for buildMethod in buildMethods:
        funcation = buildMethodDic[buildMethod]
        _refinedImgs = []
        for n_segments in segmentList:

            _refinedImg = funcation(img=img,
                                    coarseImgs=coarseImgs,
                                    labelMap=labelMapDic[n_segments])
            _refinedImgs += [_refinedImg]

        refinedImg = integratImgsBy3way(_refinedImgs)
        refinedImgs += [refinedImg]
#    return refinedImgs
    show(refinedImgs)
예제 #3
0
                temp_max = ruler[i]
                while temp_max > e:
                    e += 1
                    if ruler[e] > temp_max:
                        temp_max = ruler[e]
                result.append(Interval(s, e))
                i = e + 1
        return result


s = Solution()

r = s.insert(
    [Interval(2, 4),
     Interval(5, 7),
     Interval(8, 10),
     Interval(11, 13)], Interval(3, 8))  # [2, 10] [11, 13
show(r)
r = s.insert([], Interval(5, 7))  # [5, 7]
show(r)
r = s.insert([Interval(1, 1)], Interval(5, 7))  # [1, 1], [5, 7]
show(r)
r = s.insert([
    Interval(1, 2),
    Interval(3, 5),
    Interval(6, 7),
    Interval(8, 10),
    Interval(12, 16)
], Interval(4, 9))  # [1, 2], [3, 10], [12, 16]
show(r)
예제 #4
0
# %%
get_ipython().run_line_magic('matplotlib', 'inline')
import matplotlib.pyplot as plt

print(images[0])

plt.figure(figsize=(5, 5))
plt.imshow(images[0, :, :])

# %%
#np_som = np.random.rand(som_dim, som_dim, neuron_dim, neuron_dim, number_of_channnels).astype(np.float32)
np_som = np.zeros(
    (som_dim, som_dim, neuron_dim, neuron_dim)).astype(np.float32)
np_som[0, 0, 0, 0] = 1
som = pink.SOM(np_som, neuron_layout="cartesian-2d")
image = tools.show(som, border=1)

plt.figure(figsize=(10, 10))
plt.axis("off")
plt.imshow(image)
plt.colorbar()

# %%
mapper = pink.Mapper(
    som,
    number_of_rotations=4,
    use_flip=False,
    euclidean_distance_dim=euclid_dim,
    euclidean_distance_shape=pink.EuclideanDistanceShape.QUADRATIC,
    use_gpu=False,
    verbosity=1)
예제 #5
0
def index(request):
    #tools.createUnitRelations()
    #tools.createBinaryRelations()
    #q =  UnitRelations.objects.filter(axe = "1" , part__label = 1 ).order_by('value')

    #q =  BinaryRelations.objects.filter(axe = "1" , part_one__label = 2 , part_two__label = 3 ).order_by('value')
    # for i in q :
    # 	print i.value

    model = json.load(open(settings.MEDIA_ROOT + 'models/chairs.json'))
    label1 = 1
    label2 = ""
    #default_shape = 9
    shape = Shapes.objects.order_by('size').first()
    data1, data2 = tools.ushow(model["updf"], label1, 1, 0)
    chart_title = "Extent Relation | Label " + str(label1)
    relation = "scale"
    value = -1

    if request.method == 'POST':

        label1 = request.POST.get("label1")
        label2 = request.POST.get("label2")
        relation = request.POST.get("relation")

        if "reload" in request.POST:
            if (label1 and label2):
                data1, data2 = tools.show(model["pdf"], int(label1),
                                          int(label2), 0, 0, relation)

            elif (label1):
                data1, data2 = tools.ushow(model["updf"], int(label1), 1, 0)
        else:
            value = float(request.POST.get("x"))
            if label1 and label2:
                #busqueda binaria
                if int(label1) > int(label2):
                    label1, label2 = tools.swap(label1, label2)
                if "next" in request.POST:
                    query = BinaryRelations.objects.filter(
                        axe="1",
                        name=relation,
                        part_one__label=label1,
                        part_two__label=label2,
                        value__gt=value).order_by('value')
                elif "back" in request.POST:
                    query = BinaryRelations.objects.filter(
                        axe="1",
                        name=relation,
                        value__lt=value,
                        part_one__label=label1,
                        part_two__label=label2).order_by('-value')

                if query.count():
                    value = query.first().value
                    shape = query.first().part_one.shape
                else:
                    value = -1

                data1, data2 = tools.show(model["pdf"],
                                          int(label1), int(label2), 1,
                                          float(value), relation)

            elif label1:
                #busqueda simple
                if "next" in request.POST:

                    query = UnitRelations.objects.filter(
                        axe="1", value__gt=value,
                        part__label=label1).order_by('value')

                elif "back" in request.POST:

                    query = UnitRelations.objects.filter(
                        axe="1", value__lt=value,
                        part__label=label1).order_by('-value')

                if query.count():
                    value = query.first().value
                    shape = query.first().part.shape

                else:
                    value = -1
                data1, data2 = tools.ushow(model["updf"], int(label1), 1,
                                           float(value))

        chart_title = tools.setTitle(label1, label2, relation)

    parts = Parts.objects.filter(shape=shape.id).values(
        'path', 'name', 'label')
    return render(
        request, 'exploration/index.html', {
            'data1': json.dumps(data1),
            'data2': json.dumps(data2),
            'chart_title': chart_title,
            'parts': json.dumps(list(parts)),
            'label1': label1,
            'label2': label2,
            'value': value,
            'shape': shape,
            'model': json.dumps(model["pdf"]),
            'relation': relation
        })
예제 #6
0
X[p] = np.min(X[p]) + (X[p] - np.min(X[p])) * 1.5
Y[p] = np.sin((X[p] - np.min(X[p])) / 1.5) * 2 + 2
Y[p] += np.random.uniform(-1, 1, X[p].shape) * .8
#Y[p] += np.random.normal(0, .1, X[p].shape)
X[p] += np.random.normal(0, .5, X[p].shape)
p = np.s_[n_samples // 3:2 * n_samples // 3]
X[p] += 7.5
X[p] = np.min(X[p]) + (X[p] - np.min(X[p])) * 2
Y[p] = np.sin(-np.pi * 1.3 / 2 + (X[p] - np.min(X[p])) * 1.5) * .5 + 5
Y[p] += np.random.normal(0, .05, X[p].shape)
p = np.s_[2 * n_samples // 3:-n_samples // 6]
X[p] += 10
X[p] = np.min(X[p]) + (X[p] - np.min(X[p])) / 2
Y[p] = np.sin((X[p] - np.min(X[p])) * 20) / 1.5 + 5.5
p = np.s_[-n_samples // 6:]
X[p] += 10.5
X[p] = np.min(X[p]) + (X[p] - np.min(X[p])) * 2
Y[p] = np.sin(-np.pi * 1.3 / 2 + (X[p] - np.min(X[p])) * 1.5) * -.5 + 5
Y[p] += np.random.normal(0, .05, X[p].shape)

plt.xlabel('$x$')
plt.ylabel('$y$')
plt.xlim([0, 15])
plt.ylim([0, 10])
plt.scatter(X, Y, marker='.', c='darkred', s=10)

show(plt, 'Toy 1D-dataset')
# %%

# TODO COULD DO SOME BAGGING HERE ALSO
예제 #7
0
# @File: multi-class-ann.py

from blocks import Dense
from model import Model
import numpy as np
from tools import get_classification_data, show

rate = 1e-2  # Learning rate
epoch = 100  # Learning epochs
patience = 10  # Early stop patience

model = Model("ANN")
model.add(Dense(2, 8, "relu", name = "Relu-1"))
model.add(Dense(8, 16, "relu", name = "Relu-2"))
model.add(Dense(16, 4, "relu", name = "Relu-3"))
model.add(Dense(4, 3, "softmax", name = "Softmax"))

# Get data
train_x, test_x, train_y, test_y = get_classification_data(samples = 1000, features = 2,
                                                           classes = 3, sep = 1, random_state = 0)

if __name__ == '__main__':
    model.fit(train_x, train_y, epochs = epoch, loss_func = "cross entropy loss", learning_rate = rate,
              patience = patience)

    pred = model.predict(test_x)

    print("Accuracy: %.2f" % (np.sum(pred == test_y) / len(test_y) * 100) + "%")

    show(test_x, pred, test_y, "ANN")
예제 #8
0
 def Signal_Show( self, event ):
     # TODO: Implement Signal_Show
     tools.show([self.dz,self.dh,self.dw,self.de])
     pass
def expe_nn_decay(offset, scale, pow=1, zoomout=2,
                  cmap='coolwarm', K=3, force_title=None,
                  show_first=False, show_second=True):

    title = f'$d^{{{pow}}}, K={K}, offset={offset}, scale={scale}$'
    if force_title is not None:
        title = force_title

    def predict_proba(train, trainY, X):
        n = np.shape(X)[0]
        t0 = train[trainY == 0]
        d0 = np.sum((t0[:, None, :] - X[None, :, :]) ** 2, axis=2)
        if K == 1:
            d0 = np.min(d0, axis=0) ** 0.5
        else:
            d0 = np.mean(np.partition(d0, K-1, axis=0)[:K, :]**0.5, axis=0)
        t1 = train[trainY == 1]
        d1 = np.sum((t1[:, None, :] - X[None, :, :]) ** 2, axis=2)
        if K == 1:
            d1 = np.min(d1, axis=0) ** 0.5
        else:
            d1 = np.mean(np.partition(d1, K-1, axis=0)[:K, :]**0.5, axis=0)

        dmin = np.minimum(d0, d1)
        d0 /= dmin
        d1 /= dmin

        res = np.c_[d0, d1]
        res = offset+np.exp(-scale*res**pow)
        res /= np.sum(res, axis=1, keepdims=True)
        return res

    # predict_proba(X, Y, _r(2))

    #levels = np.linspace(0, 1, 12)
    levels = [0, *np.linspace(0.05, .95, 10), 1]
    plot_dataset()
    xmin, xmax = plt.xlim()
    ymin, ymax = plt.ylim()
    plt.clf()
    if show_first:
        xx, yy = np.meshgrid(np.linspace(xmin, xmax, 301),
                             np.linspace(ymin, ymax, 301))
        Z = predict_proba(X, Y, np.c_[xx.ravel(), yy.ravel()])[:, 1]
        Z = Z.reshape(xx.shape)
        plt.contourf(xx, yy, Z, cmap=cmap, levels=levels, alpha=0.7)
        # plt.colorbar()
        plt.contour(xx, yy, Z, linestyles='dotted', colors='k', levels=[0.5], linewidths=4, alpha=.5)
        plot_dataset()
        show(plt, title)

    # second plot
    plt.xlim([xmin - (xmax-xmin)/2*zoomout, xmax + (xmax-xmin)/2*zoomout])
    plt.ylim([ymin - (ymax-ymin)/2*zoomout, ymax + (ymax-ymin)/2*zoomout])
    xmin, xmax = plt.xlim()
    ymin, ymax = plt.ylim()
    plt.clf()
    xx, yy = np.meshgrid(np.linspace(xmin, xmax, 301),
                         np.linspace(ymin, ymax, 301))
    Z = predict_proba(X, Y, np.c_[xx.ravel(), yy.ravel()])[:, 1]
    Z = Z.reshape(xx.shape)
    plt.contourf(xx, yy, Z, cmap=cmap, levels=levels, alpha=0.8)
    #plt.contour(xx, yy, Z, colors='g', levels=levels, alpha=1)
    # plt.colorbar()
    plt.contour(xx, yy, Z, colors='k', levels=[0.5], linewidths=1, alpha=.7)
    plt.contour(xx, yy, Z, linestyles='dotted', colors='k', levels=[0.5], linewidths=4, alpha=.5)
    plot_dataset()
    if show_second:
        show(plt, f'{title} (zoomed out)')
    else:
        plt.clf()
def explore_ensembling(baseclf, poly=None, n_estimators=25, max_samples=0.99999,
                       boundary_alpha=0.3, nicer=False,
                       l_levels=11, l_alpha=0.7,
                       meshgrid_n=301, cmap='coolwarm',  # 'Greys'
                       zoomout=4,
                       seed=42, X=X, Y=Y, more=''):

    clf_name = baseclf.__class__.__name__

    levels = np.linspace(0, 1, l_levels)
    def features(x): return x
    if poly is not None:
        polyfeats = PolynomialFeatures(degree=poly)
        def features(x): return polyfeats.fit_transform(x)
        clf_name += f',p={poly}'

    clf_name += more
    np.random.seed(seed)
    bagging = BaggingClassifier(baseclf, n_estimators=n_estimators, max_samples=max_samples)

    bagging.fit(features(X), Y)

    plot_dataset()
    xmin, xmax = plt.xlim()
    ymin, ymax = plt.ylim()
    plt.clf()
    xx, yy = np.meshgrid(np.linspace(xmin, xmax, meshgrid_n),
                         np.linspace(ymin, ymax, meshgrid_n))

    plot_dataset()
    for clf in bagging.estimators_:
        if nicer and hasattr(clf, 'coef_') and hasattr(clf, 'intercept_'):
            coef = clf.coef_
            intercept = clf.intercept_
            c = 0
            def line(x0): return (-(x0 * coef[c, 0]) - intercept[c]) / coef[c, 1]
            plt.plot([xmin, xmax], [line(xmin), line(xmax)], c='k', ls="-", alpha=boundary_alpha, lw=2)
        else:
            Z = clf.predict(features(np.c_[xx.ravel(), yy.ravel()]))
            Z = Z.reshape(xx.shape)
            plt.contour(xx, yy, Z, colors='k', levels=[0.5], alpha=boundary_alpha)

    plt.ylim([ymin, ymax])
    # plt.gca().patch.set_facecolor('#BBB')
    show(plt, f"All classifiers ({clf_name})")

    plot_dataset()
    xmin, xmax = plt.xlim()
    ymin, ymax = plt.ylim()
    plt.clf()
    xx, yy = np.meshgrid(np.linspace(xmin, xmax, meshgrid_n),
                         np.linspace(ymin, ymax, meshgrid_n))
    for iclf, clf in enumerate(bagging.estimators_[:3]+[bagging]):

        # Plot the decision boundary. For that, we will assign a color to each
        # point in the mesh [x_min, x_max]x[y_min, y_max].
        try:
            Z = clf.predict_proba(features(np.c_[xx.ravel(), yy.ravel()]))[:, 1]
        except:
            Z = clf.predict(features(np.c_[xx.ravel(), yy.ravel()]))
        # Put the result into a color plot
        Z = Z.reshape(xx.shape)
        plt.contourf(xx, yy, Z, cmap=cmap, levels=levels, alpha=l_alpha)
        plt.contour(xx, yy, Z, colors='k', levels=[0.5], linewidths=1, alpha=.5)
        plt.contour(xx, yy, Z, linestyles='dotted', colors='k', levels=[0.5], linewidths=4, alpha=.5)
        plot_dataset()
        show(plt, f"{'Ensemble' if clf.__class__.__name__ == 'BaggingClassifier' else 'Estimator'+str(iclf)} ({clf_name})")
    if zoomout is not None:
        plt.xlim([xmin - (xmax-xmin)/2*zoomout, xmax + (xmax-xmin)/2*zoomout])
        plt.ylim([ymin - (ymax-ymin)/2*zoomout, ymax + (ymax-ymin)/2*zoomout])
        xmin, xmax = plt.xlim()
        ymin, ymax = plt.ylim()
        xx, yy = np.meshgrid(np.linspace(xmin, xmax, meshgrid_n),
                             np.linspace(ymin, ymax, meshgrid_n))
        try:
            Z = clf.predict_proba(features(np.c_[xx.ravel(), yy.ravel()]))[:, 1]
        except:
            Z = clf.predict(features(np.c_[xx.ravel(), yy.ravel()]))
        Z = Z.reshape(xx.shape)
        plt.contourf(xx, yy, Z, cmap=cmap, levels=levels, alpha=l_alpha)
        plt.contour(xx, yy, Z, colors='k', levels=[0.5], linewidths=1, alpha=.5)
        plt.contour(xx, yy, Z, linestyles='dotted', colors='k', levels=[0.5], linewidths=4, alpha=.5)
        plot_dataset()
        show(plt, f"{'Ensemble' if clf.__class__.__name__ == 'BaggingClassifier' else 'Estimator'+str(iclf)} (zoomed out) ({clf_name})")