def margin(self, data, weight): posi_margin = float('inf') nege_margin = float('-inf') for y_i, x_i in data: if y_i*dot_prod(weight, x_i) > 1: dis = dot_prod(weight, x_i) / pow(dot_prod(weight, weight), 0.5) if y_i == -1: nege_margin = max(nege_margin, dis) else: posi_margin = min(posi_margin, dis) return posi_margin, nege_margin
def calulate_deltas_for_hidden_layer(self, next_layer): for index, neuron in enumerate(self.neurons): next_weights = [n.weights[index] for n in next_layer.neurons] next_deltas = [n.delta for n in next_layer.neurons] sum_weights_and_deltas = dot_prod(next_weights, next_deltas) neuron.delta = neuron.derivative_activation_function( neuron.output_cache) * sum_weights_and_deltas
def f(x, y): xhat = space_proj(a1 + (a2 - a1) * x) yhat = space_proj(b1 + (b2 - b1) * y) return fk2(x, y) * dot_prod(space_proj.normal(yhat), xhat - yhat) * timeQuadN(xhat - yhat, endT, basis, **kwargs).reshape(-1, 1)
def test(self, data, weight): mistake = 0 for y_i, x_i in data: if y_i*dot_prod(weight, x_i) <= 1: mistake += 1 return 1 - float(mistake)/len(data)
def get_furthest(data): max_dis = float('-inf') for y_i, x_i in data: dis = pow(dot_prod(x_i[1:], x_i[1:]), 0.5) max_dis = max(max_dis, dis) return max_dis
def update_weight(self, weight, x_i, y_i, r_t, c): base = scale_vector(weight, 1-r_t) penalty = [0 for _ in base] if y_i*(dot_prod(weight, x_i)) <= 1: penalty = scale_vector(x_i, r_t*c*y_i) return verctor_add(penalty, base)
def output(self, inputs: List[float]) -> float: self.output_cache = dot_prod(inputs, self.weights) return self.activation_function(self.output_cache)