-
Notifications
You must be signed in to change notification settings - Fork 11
/
Copy pathobjective.py
57 lines (45 loc) · 1.93 KB
/
objective.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
import numpy as np
from benchopt import BaseObjective
def subdiff_distance(w, grad, lmbd, gamma):
"""Distance of negative gradient to Fréchet subdifferential of MCP at w."""
subdiff_dist = np.zeros_like(grad)
for j in range(len(w)):
if w[j] == 0:
# distance of -grad to [-lmbd, lmbd]
subdiff_dist[j] = max(0, np.abs(grad[j]) - lmbd)
elif np.abs(w[j]) < lmbd * gamma:
# distance of -grad to (lmbd - abs(w[j])/gamma) * sign(w[j])
subdiff_dist[j] = np.abs(
grad[j] + lmbd * np.sign(w[j]) - w[j] / gamma)
else:
# distance of -grad to 0
subdiff_dist[j] = np.abs(grad[j])
return subdiff_dist
class Objective(BaseObjective):
min_benchopt_version = "1.3"
name = "MCP Regression"
parameters = {"reg": [1, 0.5, 0.1, 0.01], "gamma": [3]}
def __init__(self, reg=0.1, gamma=1.2):
self.reg = reg
self.gamma = gamma
def set_data(self, X, y):
self.X, self.y = X, y
self.lmbd = self.reg * self._get_lambda_max()
def compute(self, beta):
diff = self.X @ beta - self.y
pen = (self.lmbd ** 2 * self.gamma / 2.) * np.ones(beta.shape)
idx = np.abs(beta) <= self.gamma * self.lmbd
pen[idx] = (
self.lmbd * np.abs(beta[idx]) - beta[idx] ** 2 / (2 * self.gamma)
)
# compute distance of -grad f to subdifferential of MCP penalty
grad = self.X.T @ diff / len(self.y)
opt = subdiff_distance(beta, grad, self.lmbd, self.gamma)
return dict(value=0.5 * diff @ diff / len(self.y) + pen.sum(),
sparsity=(beta != 0).sum(), opt_violation=opt.max())
def _get_lambda_max(self):
return abs(self.X.T @ self.y).max() / len(self.y)
def get_one_solution(self):
return np.zeros(self.X.shape[1])
def get_objective(self):
return dict(X=self.X, y=self.y, lmbd=self.lmbd, gamma=self.gamma)