Пример #1
0
def squared_clustering_errors(inputs, k):
    """finds the total squared error from k-means clustering the inputs"""
    clusterer = KMeans(k)
    clusterer.train(inputs)
    means = clusterer.means
    assignments = map(clusterer.classify, inputs)
    
    return sum(squared_distance(input,means[cluster])
               for input, cluster in zip(inputs, assignments))
Пример #2
0
def squared_clustering_errors(inputs, k):
    """finds the total squared error from k-means clustering the inputs"""
    clusterer = KMeans(k)
    clusterer.train(inputs)
    means = clusterer.means
    assignments = map(clusterer.classify, inputs)
    
    return sum(squared_distance(input,means[cluster])
               for input, cluster in zip(inputs, assignments))
Пример #3
0
def squared_clustering_errors(inputs, k):
    clusterer = KMeans(k)
    clusterer.train(inputs)
    means = clusterer.means
    assignments = list(map(clusterer.classify, inputs))

    return sum(
        squared_distance(input_, means[cluster])
        for input_, cluster in zip(inputs, assignments))
Пример #4
0
def squared_clustering_errors(inputs, k):
    """ finds the total squared error from k-means clustering the inputs """
    clusterer = KMeans(k)
    clusterer.train(inputs)
    means = clusterer.means
    assignments = map(clusterer.classify, inputs)
    
    return sum(squared_distance(input, means[cluster])
        for input, cluster in zip(inputs, assignments))
    
    # now plot from 1 up to len(inputs) clusters
    
    ks = range(1, len(inputs) + 1)
    errors = [squared_clustering_errors(inputs, k) for k in ks]
Пример #5
0
def squared_clustering_errors_pixels(inputs, inputs2):
    orig_means = []
    new_means = []
    for i in range(462):
   
      for j in range(315):
            
                sum1 = (inputs[i,j,0]**2)+(inputs[i,j,1]**2) +(inputs[i,j,2]**2)
                sum2 = (inputs2[i,j,0]**2)+(inputs2[i,j,1]**2) +(inputs2[i,j,2]**2)
                orig_means.append(math.sqrt(sum1))
                new_means.append(math.sqrt(sum2))

   

    return sum(squared_distance(x[0],x[1]) for x in zip(orig_means,new_means))
 def classify(self, input):
     """return the index of the cluster closest to the input"""
     return min(range(self.k),
                key=lambda i: squared_distance(input, self.means[i]))
Пример #7
0
 def classify(self, input):
     """return the index of the cluster closest to the input"""
     return min(range(self.k),
                key=lambda i: squared_distance(input, self.means[i]))
Пример #8
0
        # output_layer: NUM_HIDDEN inputs -> 4 outputs
        [[random.random() for _ in range(NUM_HIDDEN + 1)] for _ in range(4)]
    ]

    pprint("")
    pprint(network)

    learning_rate = 1.0

    with tqdm.trange(500) as t:
        for epoch in t:
            epoch_loss = 0.0

            for x, y in zip(xs, ys):
                predicted = feed_forward(network, x)[-1]
                epoch_loss += squared_distance(predicted, y)
                gradients = sqerror_gradients(network, x, y)

                # Take a gradient step for each neuron in each layer
                network = [[
                    gradient_step(neuron, grad, -learning_rate)
                    for neuron, grad in zip(layer, layer_grad)
                ] for layer, layer_grad in zip(network, gradients)]

            t.set_description(f"fizz buzz (loss: {epoch_loss:.2f})")

    assert argmax([0, -1]) == 0  # items[0] is largest
    assert argmax([-1, 0]) == 1  # items[1] is largest
    assert argmax([-1, 10, 5, 20, -3]) == 3  # items[3] is largest

    num_correct = 0
Пример #9
0
 def classify(self, input):
     return min(range(self.k), key=lambda i: squared_distance(input, self.means[i]))
Пример #10
0
 def test_squared_distance(self):
     self.assertEqual(8, squared_distance([4, 3], [2, 1]))