Least Squares Demo¶

Preliminaries¶

In [1]:
import numpy as np
import matplotlib.pyplot as plt
In [2]:
def EvalPoly(x, coefs):
    '''
     y = EvalPoly(x, coefs)
     
     Evaluates the linear model
     
       y = coefs[0] + coefs[1]*x + coefs[2]*x**2 + ... + coefs[n]*x**n
    '''
    y = np.zeros(np.shape(x))
    for idx,c in enumerate(coefs):
        y += c*x**idx
    return y

A model for testing¶

Select coefficients¶

In [3]:
# Quadratic
c = np.array([-0.5, 0.75, -0.6, 0.1])

Samples (with noise)¶

In [4]:
N = 500
x = np.linspace(-0, 2, N)
x = x + 0.05*np.random.normal(size=np.shape(x))
y = EvalPoly(x, c) + 0.1*np.random.normal(size=np.shape(x))
In [5]:
plt.plot(x, y, '.');

Least-Squares Fit¶

Create the matrix of regressors¶

In [6]:
Alist = []
for idx in range(len(c)): 
    Alist.append(x**idx)
A = np.vstack(Alist).T
print(np.shape(A))
(500, 4)

Solve the normal equations¶

In [7]:
beta = np.linalg.lstsq(A, y, rcond=None)[0]
print(beta)
[-0.4919071   0.74863404 -0.60211473  0.1005194 ]
In [8]:
# The true model parameters
print(c)
[-0.5   0.75 -0.6   0.1 ]

Plot the LS model¶

In [9]:
xx = np.linspace(x[0], x[-1], 100)
yy_ls = EvalPoly(xx, beta)
In [10]:
# True model (for comparison)
yy = EvalPoly(xx, c)
In [11]:
plt.figure(figsize=(10,7))
plt.plot(x, y, '.')
plt.plot(xx, yy, ':', color='gray')
plt.plot(xx, yy_ls, 'r--');
plt.legend(['Data', 'True', 'LS']);
In [ ]: