forked from DirkvdH/Online-Appendix-MetaGrad
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathloss.py
79 lines (50 loc) · 2.05 KB
/
loss.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
from copy import deepcopy
import numpy as np
from cycler import cycler
import matplotlib.pyplot as plt
import cvxpy as cvx
def hinge(weights, datarow):
loss = np.max([0, 1 - datarow[-1] * (np.dot(datarow[0:-1], weights))])
return(loss)
def gradhinge(weights, datarow, clip = 10e30):
if (datarow[-1] * (np.dot(datarow[0:-1], weights)) >= 1):
grad = np.zeros(len(weights))
else:
grad = -datarow[-1] * datarow[0:-1]
return(grad)
def Gboundhinge(ybound, xbound, wbound):
return(ybound * xbound)
def logistic(weights, datarow):
s = -datarow[-1] * (np.dot(datarow[0:-1], weights))
if s < 0:
loss = np.log(1 + np.exp(s))
else:
loss = s + np.log(1 + np.exp(-s))
return(loss)
def gradlogistic(weights, datarow, clip = 10e30):
s = datarow[-1] * (np.dot(datarow[0:-1], weights))
if(s < 0):
grad = -datarow[-1] * datarow[0:-1] /(1 + np.exp(s))
else:
grad = -datarow[-1] * datarow[0:-1] * np.exp(-s)/(1 + np.exp(-s))
return(grad)
def Gboundlogistic(ybound, xbound, wbound, clip = 10e30):
return (ybound * xbound * 1/(1 + np.exp(-ybound * wbound * xbound)))
def squaredloss(weights, datarow):
loss = (np.dot(datarow[0:-1], weights) - datarow[-1])**2
return(loss)
def gradsquaredloss(weights, datarow, clip = 10e30):
grad = 2*datarow[0:-1]*(np.dot(datarow[0:-1], weights) - datarow[-1])
return(grad)
def Gboundsquaredloss(ybound, xbound, wbound):
return(2 * xbound * (xbound*wbound + ybound))
def absoluteloss(weights, datarow):
loss = np.abs(np.dot(datarow[0:-1], weights) - datarow[-1])
return(loss)
def gradabsoluteloss(weights, datarow, clip = 10e30):
#loss = np.abs(np.inner(datarow[0:-1], weights) - datarow[-1])
grad = datarow[0:-1]*np.sign(np.dot(datarow[0:-1], weights) - datarow[-1])#(np.inner(datarow[0:-1], weights) - datarow[-1])/loss
return(grad)
def Gboundabsoluteloss(ybound, xbound, wbound):
return(xbound)
#%%