blob: cbb566b56808372d660ba9e1a8e8c5b67bf9591a [file] [log] [blame]
Austin Schuh9049e202022-02-20 17:34:16 -08001Lasso
2=====
3
4
5Lasso is a well known technique for sparse linear regression.
6It is obtained by adding an :math:`\ell_1` regularization term in the objective,
7
8.. math::
9 \begin{array}{ll}
10 \mbox{minimize} & \frac{1}{2} \| Ax - b \|_2^2 + \gamma \| x \|_1
11 \end{array}
12
13
14where :math:`x \in \mathbf{R}^{n}` is the vector of parameters, :math:`A \in \mathbf{R}^{m \times n}` is the data matrix, and :math:`\gamma > 0` is the weighting parameter.
15The problem has the following equivalent form,
16
17.. math::
18 \begin{array}{ll}
19 \mbox{minimize} & \frac{1}{2} y^T y + \gamma \boldsymbol{1}^T t \\
20 \mbox{subject to} & y = Ax - b \\
21 & -t \le x \le t
22 \end{array}
23
24
25In order to get a good trade-off between sparsity of the solution and quality of the linear fit, we solve the problem for varying weighting parameter :math:`\gamma`.
26Since :math:`\gamma` enters only in the linear part of the objective function, we can reuse the matrix factorization and enable warm starting to reduce the computation time.
27
28
29
30Python
31------
32
33.. code:: python
34
35 import osqp
36 import numpy as np
37 import scipy as sp
38 from scipy import sparse
39
40 # Generate problem data
41 sp.random.seed(1)
42 n = 10
43 m = 1000
44 Ad = sparse.random(m, n, density=0.5)
45 x_true = np.multiply((np.random.rand(n) > 0.8).astype(float),
46 np.random.randn(n)) / np.sqrt(n)
47 b = Ad.dot(x_true) + 0.5*np.random.randn(m)
48 gammas = np.linspace(1, 10, 11)
49
50 # Auxiliary data
51 In = sparse.eye(n)
52 Im = sparse.eye(m)
53 On = sparse.csc_matrix((n, n))
54 Onm = sparse.csc_matrix((n, m))
55
56 # OSQP data
57 P = sparse.block_diag([On, sparse.eye(m), On], format='csc')
58 q = np.zeros(2*n + m)
59 A = sparse.vstack([sparse.hstack([Ad, -Im, Onm.T]),
60 sparse.hstack([In, Onm, -In]),
61 sparse.hstack([In, Onm, In])], format='csc')
62 l = np.hstack([b, -np.inf * np.ones(n), np.zeros(n)])
63 u = np.hstack([b, np.zeros(n), np.inf * np.ones(n)])
64
65 # Create an OSQP object
66 prob = osqp.OSQP()
67
68 # Setup workspace
69 prob.setup(P, q, A, l, u, warm_start=True)
70
71 # Solve problem for different values of gamma parameter
72 for gamma in gammas:
73 # Update linear cost
74 q_new = np.hstack([np.zeros(n+m), gamma*np.ones(n)])
75 prob.update(q=q_new)
76
77 # Solve
78 res = prob.solve()
79
80
81Matlab
82------
83
84.. code:: matlab
85
86 % Generate problem data
87 rng(1)
88 n = 10;
89 m = 1000;
90 Ad = sprandn(m, n, 0.5);
91 x_true = (randn(n, 1) > 0.8) .* randn(n, 1) / sqrt(n);
92 b = Ad * x_true + 0.5 * randn(m, 1);
93 gammas = linspace(1, 10, 11);
94
95 % OSQP data
96 P = blkdiag(sparse(n, n), speye(m), sparse(n, n));
97 q = zeros(2*n+m, 1);
98 A = [Ad, -speye(m), sparse(m,n);
99 speye(n), sparse(n, m), -speye(n);
100 speye(n), sparse(n, m), speye(n);];
101 l = [b; -inf*ones(n, 1); zeros(n, 1)];
102 u = [b; zeros(n, 1); inf*ones(n, 1)];
103
104 % Create an OSQP object
105 prob = osqp;
106
107 % Setup workspace
108 prob.setup(P, q, A, l, u, 'warm_start', true);
109
110 % Solve problem for different values of gamma parameter
111 for i = 1 : length(gammas)
112 % Update linear cost
113 gamma = gammas(i);
114 q_new = [zeros(n+m,1); gamma*ones(n,1)];
115 prob.update('q', q_new);
116
117 % Solve
118 res = prob.solve();
119 end
120
121
122
123CVXPY
124-----
125
126.. code:: python
127
128 from cvxpy import *
129 import numpy as np
130 import scipy as sp
131 from scipy import sparse
132
133 # Generate problem data
134 sp.random.seed(1)
135 n = 10
136 m = 1000
137 A = sparse.random(m, n, density=0.5)
138 x_true = np.multiply((np.random.rand(n) > 0.8).astype(float),
139 np.random.randn(n)) / np.sqrt(n)
140 b = A.dot(x_true) + 0.5*np.random.randn(m)
141 gammas = np.linspace(1, 10, 11)
142
143 # Define problem
144 x = Variable(n)
145 gamma = Parameter(nonneg=True)
146 objective = 0.5*sum_squares(A*x - b) + gamma*norm1(x)
147 prob = Problem(Minimize(objective))
148
149 # Solve problem for different values of gamma parameter
150 for gamma_val in gammas:
151 gamma.value = gamma_val
152 prob.solve(solver=OSQP, warm_start=True)
153
154
155YALMIP
156------
157
158.. code:: matlab
159
160 % Generate problem data
161 rng(1)
162 n = 10;
163 m = 1000;
164 A = sprandn(m, n, 0.5);
165 x_true = (randn(n, 1) > 0.8) .* randn(n, 1) / sqrt(n);
166 b = A * x_true + 0.5 * randn(m, 1);
167 gammas = linspace(1, 10, 11);
168
169 % Define problem
170 x = sdpvar(n, 1);
171 gamma = sdpvar;
172 objective = 0.5*norm(A*x - b)^2 + gamma*norm(x,1);
173
174 % Solve with OSQP
175 options = sdpsettings('solver', 'osqp');
176 x_opt = optimizer([], objective, options, gamma, x);
177
178 % Solve problem for different values of gamma parameter
179 for i = 1 : length(gammas)
180 x_opt(gammas(i));
181 end