1 import tensorflow as tf 2 import numpy as np 3 from keras.models import Sequential 4 from keras.layers.core import Dense, Dropout, Activation 5 from keras.layers import Conv2D, MaxPooling2D, Flatten 6 from keras.optimizers import SGD, Adam 7 from keras.utils import np_utils 8 from keras.datasets import mnist 9 #categrorical_crossentropy 10 11 def load_data(): 12 (x_train, y_train), (x_test, y_test) = mnist.load_data() 13 number = 10000 14 x_train = x_train[0:number] 15 y_train = y_train[0:number] 16 x_train = x_train.reshape(number, 28*28) 17 x_test = x_test.reshape(x_test.shape[0], 28*28) 18 x_train = x_train.astype('float32') 19 x_test = x_test.astype('float32') 20 # convert class vector to binary class matrices 21 y_train = np_utils.to_categorical(y_train, 10) 22 y_test = np_utils.to_categorical(y_test, 10) 23 x_train = x_train 24 x_test = x_test 25 #x_test = np.random.normal(x_test) 26 x_train = x_train / 255 27 x_test = x_test / 255 28 return (x_train, y_train), (x_test,y_test) 29 30 (x_train, y_train), (x_test,y_test) = load_data() 31 32 33 model = Sequential() 34 model.add(Dense(input_dim=28*28, units=689, activation='sigmoid')) #第一层 35 model.add(Dense(units=689, activation='sigmoid')) #第二层 36 37 # for i in range(10): 38 # model.add(Dense(units=689, activation='sigmoid')) 39 40 model.add(Dense(units=689, activation='sigmoid')) # 第三层 41 model.add(Dense(units=10, activation='softmax')) # 输出层 42 43 44 model.compile(loss='mse', optimizer=SGD(lr=0.1), metrics=['accuracy']) 45 46 model.fit(x_train, y_train, batch_size=100, epochs=20) 47 48 result = model.evaluate(x_test, y_test) 49 50 print(' Test Acc:', result[1])
基本实现是可以的,但需要调整参数,识别度低
程序优化
1 import tensorflow as tf 2 import numpy as np 3 from keras.models import Sequential 4 from keras.layers.core import Dense, Dropout, Activation 5 from keras.layers import Conv2D, MaxPooling2D, Flatten 6 from keras.optimizers import SGD, Adam 7 from keras.utils import np_utils 8 from keras.datasets import mnist 9 #categrorical_ crossentropy 10 11 def load_data(): 12 (x_train, y_train), (x_test, y_test) = mnist.load_data() 13 number = 10000 14 x_train = x_train[0:number] 15 y_train = y_train[0:number] 16 x_train = x_train.reshape(number, 28*28) 17 x_test = x_test.reshape(x_test.shape[0], 28*28) 18 x_train = x_train.astype('float32') 19 x_test = x_test.astype('float32') 20 # convert class vector to binary class matrices 21 y_train = np_utils.to_categorical(y_train, 10) 22 y_test = np_utils.to_categorical(y_test, 10) 23 x_train = x_train 24 x_test = x_test 25 #x_test = np.random.normal(x_test) 26 x_train = x_train / 255 27 x_test = x_test / 255 28 x_test = np.random.normal(x_test) 29 return (x_train, y_train), (x_test,y_test) 30 (x_train, y_train), (x_test,y_test) = load_data() 31 32 33 model = Sequential() 34 model.add(Dense(input_dim=28*28, units=689, activation='relu')) #第一层 将原先的sigmoid换成relu 35 model.add(Dropout(0.7)) # 设置dropout 设置在每一个hidden layer 之后 一般出现over fitting时加 36 model.add(Dense(units=689, activation='relu')) #第二层 37 model.add(Dropout(0.7)) 38 # for i in range(10): 39 # model.add(Dense(units=689, activation='relu')) 40 41 model.add(Dense(units=689, activation='relu')) # 第三层 42 model.add(Dropout(0.7)) 43 model.add(Dense(units=10, activation='softmax')) # 输出层 44 45 46 model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy']) #将loss 换成categorical_crossentropy ,optimizer换成 adam 47 48 model.fit(x_train, y_train, batch_size=100, epochs=20) 49 50 51 result0 = model.evaluate(x_train, y_train, batch_size=100) 52 53 print(' Train Acc:', result0[1]) 54 55 56 result = model.evaluate(x_test, y_test, batch_size=100) 57 58 print(' Test Acc:', result[1])
将第一个程序中的sigmoid换成relu Loss Function 改用 categorical_crossentropy 将optimizer改为adam test的正确率达到96%