20201019-Keras-2
2020-10-20 本文已影响0人
野山羊骑士
Keras 笔记
手写字体识别。
不使用CNN,直接两个全连接层的小示例。
简简单单,展示!
参考:https://www.bilibili.com/video/BV1gE411R7jd?p=8
import keras
from keras import layers
import matplotlib.pyplot as plt
%matplotlib inline
import os
os.environ["CUDA_VISIBLE_DEVICES"]="-1"
import keras.datasets.mnist as mnist
(train_image,train_label),(test_image,test_label) = mnist.load_data()
print(train_image.shape)
print(train_label.shape)
plt.imshow(train_image[0])
print(train_label[0])
(60000, 28, 28)
(60000,)
5
output_3_1.png
model = keras.Sequential()
model.add(layers.Flatten()) # 先把数据展平,(60000,28,28) --> (60000,28*28)
model.add(layers.Dense(64,activation='relu')) # 加个隐层,全连接 64个神经元
model.add(layers.Dense(10,activation='softmax')) # 输出层,10个
model.compile(optimizer='adam',
loss = 'sparse_categorical_crossentropy',
metrics=['acc'])
model.fit(train_image,train_label,epochs=50,batch_size=200)
Epoch 1/50
300/300 [==============================] - 1s 3ms/step - loss: 4.6570 - acc: 0.7502
Epoch 2/50
300/300 [==============================] - 1s 3ms/step - loss: 0.6366 - acc: 0.8559
Epoch 3/50
300/300 [==============================] - 1s 3ms/step - loss: 0.4424 - acc: 0.8951
Epoch 4/50
300/300 [==============================] - 1s 3ms/step - loss: 0.3547 - acc: 0.9128
Epoch 5/50
300/300 [==============================] - 1s 3ms/step - loss: 0.2996 - acc: 0.9240
Epoch 6/50
300/300 [==============================] - 1s 3ms/step - loss: 0.2564 - acc: 0.9332
Epoch 7/50
300/300 [==============================] - 1s 3ms/step - loss: 0.2287 - acc: 0.9395
Epoch 8/50
300/300 [==============================] - 1s 3ms/step - loss: 0.1997 - acc: 0.9462
Epoch 9/50
300/300 [==============================] - 1s 3ms/step - loss: 0.1820 - acc: 0.9506
Epoch 10/50
300/300 [==============================] - 1s 3ms/step - loss: 0.1677 - acc: 0.9546
Epoch 11/50
300/300 [==============================] - 1s 3ms/step - loss: 0.1534 - acc: 0.9576
Epoch 12/50
300/300 [==============================] - 1s 3ms/step - loss: 0.1453 - acc: 0.9588
Epoch 13/50
300/300 [==============================] - 1s 3ms/step - loss: 0.1400 - acc: 0.9616
Epoch 14/50
300/300 [==============================] - 1s 3ms/step - loss: 0.1284 - acc: 0.9641
Epoch 15/50
300/300 [==============================] - 1s 3ms/step - loss: 0.1235 - acc: 0.9647
Epoch 16/50
300/300 [==============================] - 1s 3ms/step - loss: 0.1219 - acc: 0.9655
Epoch 17/50
300/300 [==============================] - 1s 3ms/step - loss: 0.1185 - acc: 0.9670
Epoch 18/50
300/300 [==============================] - 1s 3ms/step - loss: 0.1105 - acc: 0.9688
Epoch 19/50
300/300 [==============================] - 1s 3ms/step - loss: 0.1075 - acc: 0.9700
Epoch 20/50
300/300 [==============================] - 1s 3ms/step - loss: 0.1066 - acc: 0.9699
Epoch 21/50
300/300 [==============================] - 1s 3ms/step - loss: 0.1033 - acc: 0.9707
Epoch 22/50
300/300 [==============================] - 1s 3ms/step - loss: 0.0994 - acc: 0.9723
Epoch 23/50
300/300 [==============================] - 1s 3ms/step - loss: 0.0942 - acc: 0.9729
Epoch 24/50
300/300 [==============================] - 1s 3ms/step - loss: 0.0977 - acc: 0.9714
Epoch 25/50
300/300 [==============================] - 1s 3ms/step - loss: 0.0903 - acc: 0.9746
Epoch 26/50
300/300 [==============================] - 1s 3ms/step - loss: 0.0922 - acc: 0.9737
Epoch 27/50
300/300 [==============================] - 1s 3ms/step - loss: 0.0932 - acc: 0.9735
Epoch 28/50
300/300 [==============================] - 1s 3ms/step - loss: 0.0883 - acc: 0.9751
Epoch 29/50
300/300 [==============================] - 1s 3ms/step - loss: 0.0820 - acc: 0.9753
Epoch 30/50
300/300 [==============================] - 1s 3ms/step - loss: 0.0835 - acc: 0.9755
Epoch 31/50
300/300 [==============================] - 1s 3ms/step - loss: 0.0795 - acc: 0.9769
Epoch 32/50
300/300 [==============================] - 1s 3ms/step - loss: 0.0817 - acc: 0.9760
Epoch 33/50
300/300 [==============================] - 1s 3ms/step - loss: 0.0754 - acc: 0.9781
Epoch 34/50
300/300 [==============================] - 1s 3ms/step - loss: 0.0729 - acc: 0.9782
Epoch 35/50
300/300 [==============================] - 1s 3ms/step - loss: 0.0763 - acc: 0.9777
Epoch 36/50
300/300 [==============================] - 1s 3ms/step - loss: 0.0749 - acc: 0.9780
Epoch 37/50
300/300 [==============================] - 1s 3ms/step - loss: 0.0748 - acc: 0.9783
Epoch 38/50
300/300 [==============================] - 1s 3ms/step - loss: 0.0675 - acc: 0.9798
Epoch 39/50
300/300 [==============================] - 1s 3ms/step - loss: 0.0659 - acc: 0.9806
Epoch 40/50
300/300 [==============================] - 1s 3ms/step - loss: 0.0679 - acc: 0.9794
Epoch 41/50
300/300 [==============================] - 1s 3ms/step - loss: 0.0660 - acc: 0.9806
Epoch 42/50
300/300 [==============================] - 1s 3ms/step - loss: 0.0604 - acc: 0.9821
Epoch 43/50
300/300 [==============================] - 1s 3ms/step - loss: 0.0635 - acc: 0.9810
Epoch 44/50
300/300 [==============================] - 1s 3ms/step - loss: 0.0657 - acc: 0.9811
Epoch 45/50
300/300 [==============================] - 1s 3ms/step - loss: 0.0642 - acc: 0.9811
Epoch 46/50
300/300 [==============================] - 1s 3ms/step - loss: 0.0614 - acc: 0.9816
Epoch 47/50
300/300 [==============================] - 1s 3ms/step - loss: 0.0623 - acc: 0.9815
Epoch 48/50
300/300 [==============================] - 1s 3ms/step - loss: 0.0610 - acc: 0.9826
Epoch 49/50
300/300 [==============================] - 1s 3ms/step - loss: 0.0546 - acc: 0.9840
Epoch 50/50
300/300 [==============================] - 1s 3ms/step - loss: 0.0613 - acc: 0.9829
model.evaluate(test_image,test_label)
313/313 [==============================] - 0s 1ms/step - loss: 0.3047 - acc: 0.9573
[0.3046773076057434, 0.9573000073432922]