Exemple #1
0
def one_vs_all(_X, y, num_labels, lambda_):
    m, n = _X.shape
    X = np.hstack((np.ones((m, 1)), _X))

    all_theta = []
    for c in range(num_labels):
        if c == 0: c = 10

        def costf(theta):
            return logistic_cost_function(theta, X, y == c, lambda_)

        def gradf(theta):
            return logistic_grad_function(theta, X, y == c, lambda_)

        maxiter = 50
        initial_theta = np.zeros(n + 1)
        theta, allvec = opt.fmin_ncg(costf,
                                     initial_theta,
                                     gradf,
                                     maxiter=maxiter,
                                     retall=1,
                                     callback=step())
        #        Jhist = map(costf, allvec)
        #        pl.plot(Jhist)
        #        pl.show()
        all_theta.append(theta)
    return np.vstack(all_theta)
Exemple #2
0
    def on_mouse_motion(self, start, end, buttons, start_hovered, end_hovered):
        if buttons[0] == buttons[2]:
            return
        row = int((end[1] - self._bw) // (self._ts + self._lw))
        col = int((end[0] - self._bw) // (self._ts + self._lw))
        if not (0 <= row < self.nrows and 0 <= col < self.ncols) or (row, col) == self._previous:
            return
        current = row, col
        if buttons[0]:
            if self.color != self.at(current).color \
                    or self.mark != self.at(current).mark:
                self.erase(current)
                self._visited.discard(current)

            if self.connection_mode == Grid.TREE:
                if current not in self._visited:
                    adj = util.adjacency(current, self._previous)
                    self.connect(self._previous, adj)

            elif self.connection_mode == Grid.BLOB:
                for direction in NORTH, WEST, SOUTH, EAST:
                    if util.step(current, direction) in self._visited:
                        self.connect(current, direction)

            elif self.connection_mode == Grid.TRACE:
                self.connect(self._previous, util.adjacency(current, self._previous))

            self.put(current, self.color, self.mark)
            self._visited.add(current)
        else:
            self.erase(current)
            self._visited.discard(current)
        self._previous = current
Exemple #3
0
def flood_set(grid, p):
    queue = [p]
    visited = set()
    while queue:
        q = queue.pop()
        visited.add(q)
        for direction in (NORTH, WEST, SOUTH, EAST):
            adj = util.step(q, direction)
            if grid.at(q).connections & direction and adj not in visited:
                queue.append(adj)
    return visited
Exemple #4
0
def ex2_reg():
    data = load_txt(os.path.join(ex2path, 'ex2data2.txt'))
    _X = data[:, :2]
    y = data[:, 2]

    #    plot_data(_X, y)
    #    pl.xlabel('Microchip Test 1')
    #    pl.ylabel('Microchip Test 2')
    #    pl.legend(['y=1', 'y=0'])

    X = map_feature(_X[:, 0], _X[:, 1])

    initial_theta = np.zeros(X.shape[1])

    lambda_ = 0
    print 'cost at initial theta (zeros):', cost_function(
        initial_theta, X, y, lambda_)[0]

    # Regularization
    #X_norm, mu, sigma = feature_normalize(X[:,1:])
    #X = np.hstack((np.ones((m, 1)), X_norm))

    def costf(theta):
        return logistic_cost_function(theta, X, y, lambda_)

    def difff(theta):
        return logistic_grad_function(theta, X, y, lambda_)

    maxiter = 50
    theta, allvec = opt.fmin_ncg(costf,
                                 initial_theta,
                                 difff,
                                 retall=1,
                                 maxiter=maxiter,
                                 callback=step())
    #    theta, allvec = opt.fmin_bfgs(costf, initial_theta, difff, retall=1, maxiter=maxiter, callback=step())
    print 'optimal cost:', costf(theta)

    Jhist = [costf(t) for t in allvec]
    pl.figure()
    pl.plot(Jhist)
    plot_decision_boundary(theta, X, y)

    # Compute accuracy on our training set
    h = np.dot(X, theta)
    print 'Train Accuracy:', ((h > 0) == y).mean() * 100
Exemple #5
0
def one_vs_all(_X, y, num_labels, lambda_):
    m, n = _X.shape
    X = np.hstack((np.ones((m, 1)), _X))

    all_theta = []
    for c in range(num_labels):
        if c == 0: c = 10
        
        def costf(theta):
            return logistic_cost_function(theta, X, y==c, lambda_)
        def gradf(theta):
            return logistic_grad_function(theta, X, y==c, lambda_)

        maxiter = 50
        initial_theta = np.zeros(n+1)
        theta, allvec = opt.fmin_ncg(costf, initial_theta, gradf, maxiter=maxiter, retall=1, callback=step())
#        Jhist = map(costf, allvec)
#        pl.plot(Jhist)
#        pl.show()
        all_theta.append(theta)
    return np.vstack(all_theta)