Skip to content

Commit 107d2eb

Browse files
qoocrabkkweon
authored andcommitted
Update lab-06-1-softmax_classifier.py (hunkim#232)
* Update lab-06-1-softmax_classifier.py More simplified code and added missing output. * Update lab-06-1-softmax_classifier.py
1 parent 796da36 commit 107d2eb

1 file changed

Lines changed: 26 additions & 19 deletions

File tree

lab-06-1-softmax_classifier.py

Lines changed: 26 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -40,41 +40,48 @@
4040
sess.run(tf.global_variables_initializer())
4141

4242
for step in range(2001):
43-
sess.run(optimizer, feed_dict={X: x_data, Y: y_data})
44-
if step % 200 == 0:
45-
print(step, sess.run(cost, feed_dict={X: x_data, Y: y_data}))
43+
_, cost_val = sess.run([optimizer, cost], feed_dict={X: x_data, Y: y_data})
4644

47-
print('--------------')
45+
if step % 200 == 0:
46+
print(step, cost_val)
4847

48+
print('--------------')
4949
# Testing & One-hot encoding
5050
a = sess.run(hypothesis, feed_dict={X: [[1, 11, 7, 9]]})
5151
print(a, sess.run(tf.argmax(a, 1)))
5252

5353
print('--------------')
54-
5554
b = sess.run(hypothesis, feed_dict={X: [[1, 3, 4, 3]]})
5655
print(b, sess.run(tf.argmax(b, 1)))
5756

5857
print('--------------')
59-
6058
c = sess.run(hypothesis, feed_dict={X: [[1, 1, 0, 1]]})
6159
print(c, sess.run(tf.argmax(c, 1)))
6260

6361
print('--------------')
64-
65-
all = sess.run(hypothesis, feed_dict={
66-
X: [[1, 11, 7, 9], [1, 3, 4, 3], [1, 1, 0, 1]]})
62+
all = sess.run(hypothesis, feed_dict={X: [[1, 11, 7, 9], [1, 3, 4, 3], [1, 1, 0, 1]]})
6763
print(all, sess.run(tf.argmax(all, 1)))
6864

6965
'''
70-
--------------
71-
[[ 1.38904958e-03 9.98601854e-01 9.06129117e-06]] [1]
72-
--------------
73-
[[ 0.93119204 0.06290206 0.0059059 ]] [0]
74-
--------------
75-
[[ 1.27327668e-08 3.34112905e-04 9.99665856e-01]] [2]
76-
--------------
77-
[[ 1.38904958e-03 9.98601854e-01 9.06129117e-06]
78-
[ 9.31192040e-01 6.29020557e-02 5.90589503e-03]
79-
[ 1.27327668e-08 3.34112905e-04 9.99665856e-01]] [1 0 2]
66+
0 6.926112
67+
200 0.6005015
68+
400 0.47295815
69+
600 0.37342924
70+
800 0.28018373
71+
1000 0.23280522
72+
1200 0.21065344
73+
1400 0.19229904
74+
1600 0.17682323
75+
1800 0.16359556
76+
2000 0.15216158
77+
-------------
78+
[[1.3890490e-03 9.9860185e-01 9.0613084e-06]] [1]
79+
-------------
80+
[[0.9311919 0.06290216 0.00590591]] [0]
81+
-------------
82+
[[1.2732815e-08 3.3411323e-04 9.9966586e-01]] [2]
83+
-------------
84+
[[1.3890490e-03 9.9860185e-01 9.0613084e-06]
85+
[9.3119192e-01 6.2902197e-02 5.9059085e-03]
86+
[1.2732815e-08 3.3411323e-04 9.9966586e-01]] [1 0 2]
8087
'''

0 commit comments

Comments
 (0)