Demo of projected gradient to solve the Lasso problem

Author : Alexandre Gramfort, INRIA

In [1]:
%matplotlib inline
import numpy as np
import matplotlib.pyplot as plt
In [2]:
n, p = 40, 60
A = np.random.randn(n, p)
plt.matshow(A);

Generate a sparse vector:

In [3]:
x = np.zeros(p)
x[10] = 30
x[44] = -20
b = np.dot(A, x)
b += 0.1 * np.random.randn(n)
In [4]:
b.shape
Out[4]:
(40,)
In [5]:
plt.stem(x);

Implement the projected gradient method

In [6]:
H = np.c_[A, -A]
c = np.ones(2 * p)
lambd = 100.

def pobj(z):
    return (0.5 * np.linalg.norm(b - np.dot(H, z)) ** 2
            + lambd * np.dot(c.T, z))

def grad(z):
    return - np.dot(H.T, b - np.dot(H, z)) + lambd * c

z = np.zeros(2 * p)
rho = 0.001

all_pobj = []

for k in range(1000):
    z = z - rho * grad(z)  # gradient step
    z[z < 0.] = 0.    # Projection step
    all_pobj.append(pobj(z))  # Store the value of the objective function for monitoring

plt.plot(all_pobj);
In [7]:
fig, (ax1, ax2) = plt.subplots(1, 2, figsize=(12, 5), sharey=True, sharex=True)
ax1.stem(x)

x_hat = z[:p] - z[p:]
ax2.stem(x_hat);