Skip to content

Commit

Permalink
first commit
Browse files Browse the repository at this point in the history
  • Loading branch information
Jeremy Kun committed Aug 18, 2013
0 parents commit e08c17e
Show file tree
Hide file tree
Showing 22 changed files with 3,115 additions and 0 deletions.
1,593 changes: 1,593 additions & 0 deletions digits.dat

Large diffs are not rendered by default.

Binary file added img/nerual-net-nolabels.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added img/nerual-net-nolabels.psd
Binary file not shown.
Binary file added img/neuron-with-sigma.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added img/neuron.psd
Binary file not shown.
Binary file added img/simple-neuron.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added img/sine-approximation.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added img/square-letters.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added img/three-node-ex.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added img/threenodeex.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added img/unknown-error.psd
Binary file not shown.
Binary file added img/weighted-sum-error.psd
Binary file not shown.
Binary file added img/weightedsumerror.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added neural-networks.tar.gz
Binary file not shown.
113 changes: 113 additions & 0 deletions neuralnetwork-tests.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,113 @@
from unittest import *
from neuralnetwork import *

def binaryNumbersTest():
network = Network()
inputNodes = [InputNode(i) for i in range(3)]
hiddenNodes = [Node() for i in range(3)]
outputNode = Node()

# weights are all randomized
for inputNode in inputNodes:
for node in hiddenNodes:
Edge(inputNode, node)

for node in hiddenNodes:
Edge(node, outputNode)

network.outputNode = outputNode
network.inputNodes.extend(inputNodes)

labeledExamples = [((0,0,0), 1),
((0,0,1), 0),
((0,1,0), 1),
((0,1,1), 0),
((1,0,0), 1),
((1,0,1), 0),
((1,1,0), 1),
((1,1,1), 0)]
network.train(labeledExamples, maxIterations=5000)

# test for consistency
for number, isEven in labeledExamples:
print "Error for %r is %0.4f. Output was:%0.4f" % (number, isEven - network.evaluate(number), network.evaluate(number))


def makeNetwork(numInputs, numHiddenLayers, numInEachLayer):
network = Network()
inputNodes = [InputNode(i) for i in range(numInputs)]
outputNode = Node()
network.outputNode = outputNode
network.inputNodes.extend(inputNodes)

layers = [[Node() for _ in range(numInEachLayer)] for _ in range(numHiddenLayers)]

# weights are all randomized
for inputNode in inputNodes:
for node in layers[0]:
Edge(inputNode, node)

for layer1, layer2 in [(layers[i], layers[i+1]) for i in range(numHiddenLayers-1)]:
for node1 in layer1:
for node2 in layer2:
Edge(node1, node2)

for node in layers[-1]:
Edge(node, outputNode)

return network


def sineTest(numLayers, numNodes):
import math
import random

f = lambda x: 0.5 * (1.0 + math.sin(x))
domain = lambda: [random.random()*math.pi*4 for _ in range(100)]

network = makeNetwork(1, numLayers, numNodes)
labeledExamples = [((x,), f(x)) for x in domain()]
network.train(labeledExamples, learningRate=0.25, maxIterations=100000)

errors = [abs(f(x) - network.evaluate((x,))) for x in domain()]
print "Avg error: %.4f" % (sum(errors) * 1.0 / len(errors))

with open('sine.txt', 'a') as theFile:
vals = tuple((x,network.evaluate((x,))) for x in domain())
line = "{%s},\n" % (",".join(["{%s}" % ",".join([str(n) for n in x]) for x in vals]),)
theFile.write(line)


def digitsTest():
import random
network = makeNetwork(256, 2, 15)

digits = []

with open('digits.dat', 'r') as dataFile:
for line in dataFile:
(exampleStr, classStr) = line.split(',')
digits.append(([int(x) for x in exampleStr.split()], float(classStr) / 9))

random.shuffle(digits)
trainingData, testData = digits[:-500], digits[-500:]

network.train(trainingData, learningRate=0.5, maxIterations=100000)
errors = [abs(testPt[-1] - round(network.evaluate(testPt[0]))) for testPt in testData]
print "Average error: %.4f" % (sum(errors)*1.0 / len(errors))


if __name__ == "__main__":
#binaryNumbersTest()

print "Sine"
with open('sine.txt','w') as theFile:
theFile.write("{")

sineTest(1, 20)

with open('sine.txt','a') as theFile:
theFile.write("}\n")

print "Digits"
digitsTest()
153 changes: 153 additions & 0 deletions neuralnetwork.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,153 @@
import random
import math

def activationFunction(x):
return 1.0 / (1.0 + math.exp(-x))

class Node:
def __init__(self):
self.lastOutput = None
self.lastInput = None
self.error = None
self.outgoingEdges = []
self.incomingEdges = []
self.addBias()

def addBias(self):
self.incomingEdges.append(Edge(BiasNode(), self))

def evaluate(self, inputVector):
if self.lastOutput is not None:
return self.lastOutput

self.lastInput = []
weightedSum = 0

for e in self.incomingEdges:
theInput = e.source.evaluate(inputVector)
self.lastInput.append(theInput)
weightedSum += e.weight * theInput

self.lastOutput = activationFunction(weightedSum)
self.evaluateCache = self.lastOutput
return self.lastOutput

def getError(self, label):
''' Get the error for a given node in the network. If the node is an
output node, label will be used to compute the error. For an input node, we
simply ignore the error. '''

if self.error is not None:
return self.error

assert self.lastOutput is not None

if self.outgoingEdges == []: # this is an output node
self.error = label - self.lastOutput
else:
self.error = sum([edge.weight * edge.target.getError(label) for edge in self.outgoingEdges])

return self.error

def updateWeights(self, learningRate):
''' Update the weights of a node, and all of its successor nodes.
Assume self is not an InputNode. If the error, lastOutput, and
lastInput are None, then this node has already been updated. '''

if (self.error is not None and self.lastOutput is not None
and self.lastInput is not None):

for i, edge in enumerate(self.incomingEdges):
edge.weight += (learningRate * self.lastOutput * (1 - self.lastOutput) *
self.error * self.lastInput[i])

for edge in self.outgoingEdges:
edge.target.updateWeights(learningRate)

self.error = None
self.lastInput = None
self.lastOutput = None

def clearEvaluateCache(self):
if self.lastOutput is not None:
self.lastOutput = None
for edge in self.incomingEdges:
edge.source.clearEvaluateCache()


class InputNode(Node):
''' Input nodes simply evaluate to the value of the input for that index.
As such, each input node must specify an index. We allow multiple copies
of an input node with the same index (why not?). '''

def __init__(self, index):
Node.__init__(self)
self.index = index;

def evaluate(self, inputVector):
self.lastOutput = inputVector[self.index]
return self.lastOutput

def updateWeights(self, learningRate):
for edge in self.outgoingEdges:
edge.target.updateWeights(learningRate)

def getError(self, label):
for edge in self.outgoingEdges:
edge.target.getError(label)

def addBias(self):
pass

def clearEvaluateCache(self):
self.lastOutput = None


class BiasNode(InputNode):
def __init__(self):
Node.__init__(self)

def evaluate(self, inputVector):
return 1.0


class Edge:
def __init__(self, source, target):
self.weight = random.uniform(0,1)
self.source = source
self.target = target

# attach the edges to its nodes
source.outgoingEdges.append(self)
target.incomingEdges.append(self)


class Network:
def __init__(self):
self.inputNodes = []
self.outputNode = None

def evaluate(self, inputVector):
assert max([v.index for v in self.inputNodes]) < len(inputVector)
self.outputNode.clearEvaluateCache()

output = self.outputNode.evaluate(inputVector)
return output

def propagateError(self, label):
for node in self.inputNodes:
node.getError(label)

def updateWeights(self, learningRate):
for node in self.inputNodes:
node.updateWeights(learningRate)

def train(self, labeledExamples, learningRate=0.9, maxIterations=10000):
while maxIterations > 0:
for example, label in labeledExamples:
output = self.evaluate(example)
self.propagateError(label)
self.updateWeights(learningRate)

maxIterations -= 1

Binary file added neuralnetwork.pyc
Binary file not shown.
Loading

0 comments on commit e08c17e

Please sign in to comment.