-
Notifications
You must be signed in to change notification settings - Fork 16
/
Copy pathperceptron.py
75 lines (68 loc) · 1.72 KB
/
perceptron.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
import os
import sys
# An example in that book, the training set and parameters' sizes are fixed
training_set = []
w = []
b = 0
lens = 0
n = 0
# update parameters using stochastic gradient descent
def update(item):
global w, b, lens, n
for i in range(lens):
w[i] = w[i] + n * item[1] * item[0][i]
b = b + n * item[1]
print w, b # you can uncomment this line to check the process of stochastic gradient descent
# calculate the functional distance between 'item' an the dicision surface
def cal(item):
global w, b
res = 0
for i in range(len(item[0])):
res += item[0][i] * w[i]
res += b
res *= item[1]
return res
# check if the hyperplane can classify the examples correctly
def check():
flag = False
for item in training_set:
if cal(item) <= 0:
flag = True
update(item)
if not flag: #False
print "RESULT: w: " + str(w) + " b: "+ str(b)
tmp = ''
for keys in w:
tmp += str(keys) + ' '
tmp = tmp.strip()
modelFile.write(tmp + '\n')
modelFile.write(str(b) + '\n')
modelFile.write(str(lens) + '\n')
modelFile.write(str(n) + '\n')
modelFile.close()
os._exit(0)
flag = False
if __name__=="__main__":
if len(sys.argv) != 4:
print "Usage: python perceptron.py n trainFile modelFile"
exit(0)
n = float(sys.argv[1])
trainFile = file(sys.argv[2])
modelFile= file(sys.argv[3], 'w')
lens = 0
for line in trainFile:
chunk = line.strip().split(' ')
lens = len(chunk) - 1
tmp_all = []
tmp = []
for i in range(1, lens+1):
tmp.append(int(chunk[i]))
tmp_all.append(tmp)
tmp_all.append(int(chunk[0]))
training_set.append(tmp_all)
trainFile.close()
for i in range(lens):
w.append(0)
for i in range(1000):
check()
print "The training_set is not linear separable. "