forked from jihite/Perceptron-python-
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathperceptron_duality.py
109 lines (96 loc) · 2.39 KB
/
perceptron_duality.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
import os
import sys
# An example in that book, the training set and parameters' sizes are fixed
training_set = []
w = []
a = []
b = 0
lens = 0
n = 0
Gram = []
def calInnerProduct(i, j):
global lens
res = 0
for p in range(lens):
res += training_set[i][0][p] * training_set[j][0][p]
return res
def AddVector(vec1, vec2):
for i in range(len(vec1)):
vec1[i] = vec1[i] + vec2[i]
return vec1
def NumProduct(num, vec):
for i in range(len(vec)):
vec[i] *= num
return vec
def createGram():
global lens
for i in range(len(training_set)):
tmp = []
for j in range(0, len(training_set)):
tmp.append(calInnerProduct(i, j))
Gram.append(tmp)
# update parameters using stochastic gradient descent
def update(k):
global a, b, n
a[k] += n
b = b + n * training_set[k][1]
print a, b # you can uncomment this line to check the process of stochastic gradient descent
# calculate the functional distance between 'item' an the dicision surface
def cal(k):
global a, b
res = 0
for i in range(len(training_set)):
res += a[i] * int(training_set[i][1]) * Gram[i][k]
res += b
res *= training_set[k][1]
return res
# check if the hyperplane can classify the examples correctly
def check():
global w, a
flag = False
for i in range(len(training_set)):
if cal(i) <= 0:
flag = True
update(i)
if not flag: #False
for i in range(len(training_set)):
w = AddVector(w, NumProduct(a[i] * int(training_set[i][1]), training_set[i][0]))
print "RESULT: w: ", w, " b: ", b
tmp = ''
for keys in w:
tmp += str(keys) + ' '
tmp = tmp.strip()
modelFile.write(tmp + '\n')
modelFile.write(str(b) + '\n')
modelFile.write(str(lens) + '\n')
modelFile.write(str(n) + '\n')
modelFile.close()
os._exit(0)
flag = False
if __name__=="__main__":
if len(sys.argv) != 4:
print "Usage: python perceptron_duality.py n trainFile modelFile"
exit(0)
n = float(sys.argv[1])
trainFile = file(sys.argv[2])
modelFile= file(sys.argv[3], 'w')
lens = 0
for line in trainFile:
chunk = line.strip().split(' ')
lens = len(chunk) - 1
tmp_all = []
tmp = []
for i in range(1, lens+1):
tmp.append(int(chunk[i]))
tmp_all.append(tmp)
tmp_all.append(int(chunk[0]))
training_set.append(tmp_all)
trainFile.close()
createGram()
for i in range(len(training_set)):
a.append(0)
for i in range(lens):
w.append(0)
for i in range(1000):
check()
print "The training_set is not linear separable. "