Skip to content

Commit efec6ff

Browse files
committed
add a plotter and modify network 1 to use it
1 parent ea229ac commit efec6ff

4 files changed

Lines changed: 76 additions & 15 deletions

File tree

MyPlotter.py

Lines changed: 24 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,24 @@
1+
from numpy import asarray
2+
from matplotlib import pyplot
3+
import numpy as np
4+
from matplotlib.pyplot import plot, draw, show
5+
6+
7+
class MyPloter(object):
8+
def __init__(self, framesize, max_rows, max_columns):
9+
self.__data_array = np.zeros((framesize * max_rows, framesize * max_columns))
10+
self.__framesize = framesize
11+
self.__max_rows = max_rows
12+
self.__max_columns = max_columns
13+
pyplot.ion()
14+
15+
16+
def add(self, pic, row, column):
17+
if row >= self.__max_rows or column >= self.__max_columns:
18+
return
19+
self.__data_array[row*self.__framesize:(row+1)*self.__framesize, column *self.__framesize:(column+1)*self.__framesize] = pic
20+
21+
def show(self):
22+
pyplot.imshow(self.__data_array , cmap='gray', vmin=0, vmax=255)
23+
pyplot.show()
24+
pyplot.pause(0.001)

demo1.py

Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,13 @@
1+
import mnist_loader
2+
3+
training_data, validation_data, test_data = mnist_loader.load_data_wrapper()
4+
training_data = list(training_data)
5+
6+
# ---------------------
7+
# - network.py example:
8+
import network
9+
10+
11+
net = network.Network([784, 28, 10])
12+
net.SGD(training_data, 20, 10, 3.0, test_data=test_data)
13+
input("any key to continue")

network.py

Lines changed: 23 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -16,9 +16,13 @@
1616
# Standard library
1717
import random
1818

19+
20+
1921
# Third-party libraries
2022
import numpy as np
2123

24+
import MyPlotter
25+
2226
class Network(object):
2327

2428
def __init__(self, sizes):
@@ -35,8 +39,8 @@ def __init__(self, sizes):
3539
self.num_layers = len(sizes)
3640
self.sizes = sizes
3741
self.biases = [np.random.randn(y, 1) for y in sizes[1:]]
38-
self.weights = [np.random.randn(y, x)
39-
for x, y in zip(sizes[:-1], sizes[1:])]
42+
self.weights = [np.random.randn(y, x) for x, y in zip(sizes[:-1], sizes[1:])]
43+
self.plotter = MyPlotter.MyPloter(28, 20, 20)
4044

4145
def feedforward(self, a):
4246
"""Return the output of the network if ``a`` is input."""
@@ -70,7 +74,7 @@ def SGD(self, training_data, epochs, mini_batch_size, eta,
7074
for mini_batch in mini_batches:
7175
self.update_mini_batch(mini_batch, eta)
7276
if test_data:
73-
print("Epoch {} : {} / {}".format(j,self.evaluate(test_data),n_test));
77+
print("Epoch {} : {} / {}".format(j,self.evaluate(test_data, j),n_test))
7478
else:
7579
print("Epoch {} complete".format(j))
7680

@@ -125,13 +129,28 @@ def backprop(self, x, y):
125129
nabla_w[-l] = np.dot(delta, activations[-l-1].transpose())
126130
return (nabla_b, nabla_w)
127131

128-
def evaluate(self, test_data):
132+
def evaluate(self, test_data, epoch):
129133
"""Return the number of test inputs for which the neural
130134
network outputs the correct result. Note that the neural
131135
network's output is assumed to be the index of whichever
132136
neuron in the final layer has the highest activation."""
133137
test_results = [(np.argmax(self.feedforward(x)), y)
134138
for (x, y) in test_data]
139+
140+
count = 0
141+
for result, data in zip(test_results, test_data):
142+
if count == 30:
143+
break
144+
(x,y) = result
145+
if x != y:
146+
(a,_) = data
147+
a = a.reshape(28,28)
148+
self.plotter.add(a*255.0, epoch, count)
149+
count += 1
150+
self.plotter.show()
151+
152+
153+
135154
return sum(int(x == y) for (x, y) in test_results)
136155

137156
def cost_derivative(self, output_activations, y):

test.py

Lines changed: 16 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -17,19 +17,22 @@
1717

1818
# ----------------------
1919
# - read the input data:
20-
'''
20+
21+
2122
import mnist_loader
23+
2224
training_data, validation_data, test_data = mnist_loader.load_data_wrapper()
2325
training_data = list(training_data)
24-
'''
26+
2527
# ---------------------
2628
# - network.py example:
27-
#import network
29+
import network
2830

29-
'''
30-
net = network.Network([784, 30, 10])
31+
32+
net = network.Network([784, 28, 10])
3133
net.SGD(training_data, 30, 10, 3.0, test_data=test_data)
32-
'''
34+
35+
3336

3437
# ----------------------
3538
# - network2.py example:
@@ -148,18 +151,18 @@ def testTheano():
148151
else:
149152
print('Used the gpu')
150153
# Perform check:
151-
#testTheano()
154+
# testTheano()
152155

153156

154157
# ----------------------
155158
# - network3.py example:
156-
import network3
157-
from network3 import Network, ConvPoolLayer, FullyConnectedLayer, SoftmaxLayer # softmax plus log-likelihood cost is more common in modern image classification networks.
159+
#import network3
160+
#from network3 import Network, ConvPoolLayer, FullyConnectedLayer, SoftmaxLayer # softmax plus log-likelihood cost is more common in modern image classification networks.
158161

159162
# read data:
160-
training_data, validation_data, test_data = network3.load_data_shared()
163+
#training_data, validation_data, test_data = network3.load_data_shared()
161164
# mini-batch size:
162-
mini_batch_size = 10
165+
#mini_batch_size = 10
163166

164167
# chapter 6 - shallow architecture using just a single hidden layer, containing 100 hidden neurons.
165168
'''
@@ -195,6 +198,7 @@ def testTheano():
195198
'''
196199

197200
# chapter 6 - rectified linear units and some l2 regularization (lmbda=0.1) => even better accuracy
201+
'''
198202
from network3 import ReLU
199203
net = Network([
200204
ConvPoolLayer(image_shape=(mini_batch_size, 1, 28, 28),
@@ -208,3 +212,4 @@ def testTheano():
208212
FullyConnectedLayer(n_in=40*4*4, n_out=100, activation_fn=ReLU),
209213
SoftmaxLayer(n_in=100, n_out=10)], mini_batch_size)
210214
net.SGD(training_data, 60, mini_batch_size, 0.03, validation_data, test_data, lmbda=0.1)
215+
'''

0 commit comments

Comments
 (0)