def de_mean(A):
    """ returns result of subtracting from every value of A the mean value
        of its column """
    num_rows, num_cols = Ch4.shape(A)
    col_means, _ = scale(A)

    return Ch4.make_matrix(num_rows,num_cols,
                       lambda i,j: A[i][j]-col_means[j])
def de_mean(A):
    """ returns result of subtracting from every value of A the mean value
        of its column """
    num_rows, num_cols = Ch4.shape(A)
    col_means, _ = scale(A)

    return Ch4.make_matrix(num_rows, num_cols,
                           lambda i, j: A[i][j] - col_means[j])
Exemple #3
0
    plt.ylabel('actual')

    # Compute beta_hat by SGD #
    ###########################
    # pick a random initial beta (constant, beta1*experience, beta2*salary)
    beta_0 = [random.random() for _ in range(3)]
    beta_hat = maximize_stochastic(logistic_log_likelihood_i,
                                   logistic_log_gradient_i, x_train, y_train,
                                   beta_0)

    print "beta_hat", beta_hat

    # Transform beta_hat back to unscaled variables #
    #################################################
    # get the means and stds of const, years, experience cols in data
    means_x, stds_x = scale(x)

    # beta_i i!=0 has the following transform beta_i = beta_i_scaled/sigma_i
    # and beta_0 is
    beta_hat_unscaled = [
        beta_hat[0], beta_hat[1] / stds_x[1], beta_hat[2] / stds_x[2]
    ]
    print "beta_hat_unscaled", beta_hat_unscaled

    # Fit Quality #
    ###############
    # Examine the test data
    true_positives = false_positives = true_negatives = false_negatives = 0

    for x_i, y_i in zip(x_test, y_test):
        # For the test data get a prediction for y. This will be a
    plt.ylabel('actual')

    # Compute beta_hat by SGD #
    ###########################
    # pick a random initial beta (constant, beta1*experience, beta2*salary)
    beta_0 = [random.random() for _ in range(3)]
    beta_hat = maximize_stochastic(logistic_log_likelihood_i,
                                   logistic_log_gradient_i,
                                   x_train, y_train, beta_0)
   
    print "beta_hat", beta_hat

    # Transform beta_hat back to unscaled variables #
    #################################################
    # get the means and stds of const, years, experience cols in data
    means_x, stds_x = scale(x)

    # beta_i i!=0 has the following transform beta_i = beta_i_scaled/sigma_i
    # and beta_0 is 
    beta_hat_unscaled =[beta_hat[0],
                        beta_hat[1]/stds_x[1], 
                        beta_hat[2]/stds_x[2]]
    print "beta_hat_unscaled", beta_hat_unscaled
    
    # Fit Quality #
    ###############
    # Examine the test data
    true_positives = false_positives = true_negatives = false_negatives = 0

    for x_i, y_i in zip(x_test, y_test):
        # For the test data get a prediction for y. This will be a