# coding: utf-8
# # Solving Least-Squares Problems
# In[24]:
#keep
import numpy as np
import numpy.linalg as la
import scipy.linalg as spla
# In[25]:
#keep
m = 6
n = 4
A = np.random.randn(m, n)
b = np.random.randn(m)
# Let's try solving that as a linear system using `la.solve`:
# In[26]:
la.solve(A, b)
# OK, let's do QR-based least-squares then.
# In[27]:
Q, R = la.qr(A)
# What did we get? Full QR or reduced QR?
# In[28]:
#keep
Q.shape
# In[29]:
#keep
R.shape
# Is that a problem?
# * Do we really need the bottom part of $R$? (A bunch of zeros)
# * Do we really need the far right part of $Q$? (=the bottom part of $Q^T$)
#
# -----------------
# OK, so find the minimizing $x$:
# In[39]:
x = spla.solve_triangular(R, Q.T.dot(b), lower=False)
# We predicted that $\|Ax-b\|_2$ would be the same as $\|Rx-Q^Tb\|_2$:
# In[45]:
la.norm(A.dot(x)-b, 2)
# In[47]:
la.norm(R.dot(x) - Q.T.dot(b))
# --------------
# Heh--*reduced* QR left out the right half of Q. Let's try again with complete QR:
# In[59]:
#keep
Q2, R2 = la.qr(A, mode="complete")
# In[60]:
#keep
x2 = spla.solve_triangular(R[:n], Q.T[:n].dot(b), lower=False)
# In[63]:
#keep
la.norm(A.dot(x)-b, 2)
# In[64]:
#keep
la.norm(R2.dot(x2) - Q2.T.dot(b))
# Did we get the same `x` both times?
# In[69]:
x - x2
# Finally, let's compare against the normal equations:
# In[70]:
#keep
x3 = la.solve(A.T.dot(A), A.T.dot(b))
# In[71]:
x3 - x
# In[ ]: