forked from lenassero/linear-svm
-
Notifications
You must be signed in to change notification settings - Fork 0
/
newton_method_line_search.py
143 lines (113 loc) · 3.41 KB
/
newton_method_line_search.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
#!/usr/bin/env python
# -*- coding: utf-8 -*-
""" This script implements Newton's method with backtracking line-search.
"""
__version__ = "0.1"
__author__ = "Nasser Benabderrazik"
import numpy as np
import math
def NewtonStep(x, f, g, h):
""" Compute the Newton step at x for the function f.
Parameters
----------
x: array, shape = [d]
f: function
Function of x to minimize.
g: function
Gradient of f (function of x).
h: function
Hessian of f (function of x).
Returns
-------
newton_step: array, shape = [d]
Newton step to apply to x.
gap: float
Estimated gap between f(xnew) and min(f)
"""
g = g(x)
h = h(x)
h_inv = np.linalg.inv(h)
lambda_ = (g.T.dot(h_inv.dot(g)))**(1/2)
newton_step = -h_inv.dot(g)
gap = 1/2*lambda_**2
return newton_step, gap
def backTrackingLineSearch(x, step, f, g, A, b, alpha = 0.3, beta = 0.5):
""" Compute the step size minimizing(t) f(x + t*step) with
backtracking line-search.
Parameters
----------
x: array, shape = [d]
step: float
Step at x in the descend method.
f: function
Function of x to minimize.
g: function
Gradient of f (function of x).
alpha: float, optional (default = 0.3)
"Fraction of the decrease in f predicted by linear extrapolation that
we will accept".
beta: float, optional (default = 0.5)
Factor to reduce t in the search.
Returns
-------
step_size: float
Step size to use for the next update (x := x + step_size*step).
"""
# Initial step size
step_size = 1
# Number of inequalities
m = b.shape[0]
# First update
xnew = x + step_size*step
# Update step_size until xnew in dom(f)
while np.sum(A.dot(xnew) < b) < m:
step_size *= beta
xnew = x + step_size*step
# First evaluation
y = f(xnew)
z = f(x) + alpha*step_size*g(x).T.dot(step)
while y > z:
step_size *= beta
xnew = x + step_size*step
# Evaluation
y = f(xnew)
z = f(x) + alpha*step_size*g(x).T.dot(step)
return step_size
def newtonLS(x0, f, g, h, tol, A, b, alpha = 0.3, beta = 0.5):
""" Minimize the function f starting at x0 using Newton's method with
backtracking line-search.
Parameters
----------
x0: array, shape = [d]
f: function
Function of x to minimize.
g: function
Gradient of f (function of x).
h: function
Hessian of f (function of x).
tol: float
Stopping criterion (gap between f(new) and min(f) < tol)
alpha: float, optional (default = 0.3)
"Fraction of the decrease in f predicted by linear extrapolation that
we will accept".
beta: float, optional (default = 0.5)
Factor to reduce t in the search.
Returns
-------
xstar: array, shape = [d]
Estimated minimum of f.
xhist: float
History of all Newton updates.
"""
# First step
newton_step, gap = NewtonStep(x0, f, g, h)
step_size = backTrackingLineSearch(x0, newton_step, f, g, A, b, alpha, beta)
x = x0 + step_size*newton_step
xhist = [x]
while gap > tol:
newton_step, gap = NewtonStep(x, f, g, h)
step_size = backTrackingLineSearch(x, newton_step, f, g, A, b, alpha, beta)
x += step_size*newton_step
xhist.append(x)
xstar = x
return xstar, xhist