/
hw7.py
153 lines (122 loc) · 4.61 KB
/
hw7.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
# version code 1049
# Please fill out this stencil and submit using the provided submission script.
from mat import Mat
from mat import mat2rowdict
from mat import transpose
from math import pow
from matutil import listlist2mat
from orthogonalization import orthogonalize
from QR import factor
from solver import solve
from triangular import triangular_solve
from vec import Vec
from vecutil import list2vec
## Problem 1
def basis(vlist):
'''
Input:
- vlist: a list of Vecs
Output:
- a list of linearly independent Vecs with equal span to vlist
'''
return [vec for vec in orthogonalize(vlist) if vec*vec > pow(10, -20)]
## Problem 2
def subset_basis(vlist):
'''
Input:
- vlist: a list of Vecs
Output:
- linearly independent subset of vlist with the same span as vlist
'''
return [vlist[i] for i, vec in enumerate(orthogonalize(vlist)) if vec*vec > pow(10, -20)]
## Problem 3
def orthogonal_vec2rep(Q, b):
'''
Input:
- Q: an orthogonal Mat
- b: Vec whose domain equals the column-label set of Q.
Output:
- The coordinate representation of b in terms of the rows of Q.
Example:
>>> Q = Mat(({0, 1}, {0, 1}), {(0, 1): 0, (1, 0): 0, (0, 0): 2, (1, 1): 2})
>>> b = Vec({0, 1},{0: 4, 1: 2})
>>> orthogonal_vec2rep(Q, b) == Vec({0, 1},{0: 8, 1: 4})
True
'''
# for orthogonal matrix the transpose is equal to its inverse
return b*transpose(Q)
## Problem 4
def orthogonal_change_of_basis(A, B, a):
'''
Input:
- A: an orthogonal Mat
- B: an orthogonal Mat whose column labels are the row labels of A
- a: the coordinate representation in terms of rows of A of some vector v
Output:
- the Vec b such that b is the coordinate representation of v in terms of columns of B
Example:
>>> A = Mat(({0, 1, 2}, {0, 1, 2}), {(0, 1): 0, (1, 2): 0, (0, 0): 1, (2, 0): 0, (1, 0): 0, (2, 2): 1, (0, 2): 0, (2, 1): 0, (1, 1): 1})
>>> B = Mat(({0, 1, 2}, {0, 1, 2}), {(0, 1): 0, (1, 2): 0, (0, 0): 2, (2, 0): 0, (1, 0): 0, (2, 2): 2, (0, 2): 0, (2, 1): 0, (1, 1): 2})
>>> a = Vec({0, 1, 2},{0: 4, 1: 1, 2: 3})
>>> orthogonal_change_of_basis(A, B, a) == Vec({0, 1, 2},{0: 8, 1: 2, 2: 6})
True
'''
# for orthogonal matrix the transpose is equal to its inverse
return transpose(B)*(a*A)
## Problem 5
def orthonormal_projection_orthogonal(W, b):
'''
Input:
- W: Mat whose rows are orthonormal
- b: Vec whose labels are equal to W's column labels
Output:
- The projection of b orthogonal to W's row space.
Example:
>>> W = Mat(({0, 1}, {0, 1, 2}), {(0, 1): 0, (1, 2): 0, (0, 0): 1, (1, 0): 0, (0, 2): 0, (1, 1): 1})
>>> b = Vec({0, 1, 2},{0: 3, 1: 1, 2: 4})
>>> orthonormal_projection_orthogonal(W, b) == Vec({0, 1, 2},{0: 0, 1: 0, 2: 4})
True
'''
# rows are orthonormal meaning that the row vectors have unit length
# so the sigmas for every row can be written as <b*rowW>/<rowW*rowW> = <b*rowW>
return b-(b*transpose(W))*W
## Problem 6
# Write your solution for this problem in orthonormalization.py.
## Problem 7
# Write your solution for this problem in orthonormalization.py.
## Problem 8
# Please give each solution as a Vec
least_squares_A1 = listlist2mat([[8, 1], [6, 2], [0, 6]])
least_squares_Q1 = listlist2mat([[.8,-0.099],[.6, 0.132],[0,0.986]])
least_squares_R1 = listlist2mat([[10,2],[0,6.08]])
least_squares_b1 = list2vec([10, 8, 6])
x_hat_1 = solve(least_squares_R1, transpose(least_squares_Q1)*least_squares_b1)
least_squares_A2 = listlist2mat([[3, 1], [4, 1], [5, 1]])
least_squares_Q2 = listlist2mat([[.424, .808],[.566, .115],[.707, -.577]])
least_squares_R2 = listlist2mat([[7.07, 1.7],[0,.346]])
least_squares_b2 = list2vec([10,13,15])
x_hat_2 = solve(least_squares_R2, transpose(least_squares_Q2)*least_squares_b2)
## Problem 9
def QR_solve(A, b):
'''
Input:
- A: a Mat
- b: a Vec
Output:
- vector x that minimizes norm(b - A*x)
Example:
>>> domain = ({'a','b','c'},{'A','B'})
>>> A = Mat(domain,{('a','A'):-1, ('a','B'):2,('b','A'):5, ('b','B'):3,('c','A'):1,('c','B'):-2})
>>> Q, R = factor(A)
>>> b = Vec(domain[0], {'a': 1, 'b': -1})
>>> x = QR_solve(A, b)
>>> result = A.transpose()*(b-A*x)
>>> result * result < 1E-10
True
'''
Q, R = factor(A)
c = Q.transpose()*b
Rdict = mat2rowdict(R)
Rrows = [Rdict[key] for key in Rdict]
labels = sorted(A.D[1], key=repr)
return triangular_solve(Rrows, labels, c)