about summary refs log tree commit diff
path: root/src/python/experiments/nn.py
blob: b36b10337934aef6161bf8584673e2302ed3898d (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
#!/usr/bin/env python

# Import numpy for using matrix operations
import numpy as np

# define an actovation function
def sigmoid(x,deriv=False):
    if(deriv==True):
        return x*(1-x)
    return 1/(1+np.exp(-x))

# 0 0 1 0
# 0 1 1 0
# 1 0 1 1
# 1 1 1 1

# define an input matrix
X = np.array([ [0,0,1], [0,1,1], [1,0,1], [1,1,1] ])

# define an output matrix
y = np.array([[0,0,1,1]]).T

# seed numpy
np.random.seed(1)

# generate some weights
syn0 = 2*np.random.random((3,4)) - 1

# define how often the calculations should be run

n = 100000

# loop
for i in range(n):
    # define the first layer
    l0 = X

    # define the second layer using the first layer and the weights
    l1 = sigmoid(np.dot(l0,syn0))

    # calculate an error
    l1_error = y - l1

    # calculate how fatal the error is
    l1_delta = l1_error * sigmoid(l1,True)

    # adjust the weights
    syn0 += np.dot(l0.T,l1_delta)

    # print some information
    if (i % (n / 10000) == 0):
        print("l1: " + str(l1))

print("")
print("Output After Training:")
print(l1)

l0 = np.array([0, 1, 0])
l1 = sigmoid(np.dot(l0, syn0))
print(l1)