herrDeng網內搜尋

自訂搜尋

Ads

2023年10月22日 星期日

AI人工智慧Keras實作神經網路模型之mnist手寫數字辨識


iPython code請進
  1. print('神經網路模型')
  2. import keras
  3. keras.__version__
  4. !nvcc --version
  5. from keras.datasets import mnist
  6. ? mnist
  7. import matplotlib.pyplot as plt
  8. import numpy as np
  9. (X_train, y_train), (X_test, y_test)=mnist.load_data()
  10. X_train.shape
  11. y_train.shape
  12. np.set_printoptions(linewidth=np.inf)
  13. X_train[7]
  14. y_train[0:10]
  15. plt.figure(figsize=(5, 5))
  16. for i in range(9):
  17. plt.subplot(3, 3, i+1)
  18. plt.imshow(X_train[i])
  19. plt.savefig("nums.jpg")
  20. plt.show()
  21. X_train=X_train.reshape(60000, 784).astype('float32')
  22. X_test=X_test.reshape(10000, 784).astype('float32')
  23. #normalizing
  24. X_train/=255
  25. X_test/=255
  26. X_train[0]
  27. from keras.models import Sequential
  28. from tensorflow.keras.layers import Dense
  29. from tensorflow.keras import optimizers
  30. from tensorflow.keras.utils import *
  31. n_class=10
  32. y_train=to_categorical(y_train, n_class) #one-hot coding
  33. y_test=to_categorical(y_test, n_class)
  34. print('處理好了後面就是關鍵,剩下5個指令就好了')
  35. model=Sequential()
  36. model.add(Dense(64, activation='tanh', input_shape=(784, ))) #sigmoid, tanh, relu
  37. model.add(Dense(10, activation='softmax'))
  38. model.compile(
  39. loss='mean_squared_error',
  40. optimizer=optimizers.SGD(learning_rate=0.01), #隨機梯度下降法(Stochastic gradient descent, SGD)
  41. metrics=['accuracy']
  42. )
  43. model.fit(
  44. X_train, y_train, batch_size=128, epochs=200,
  45. verbose=1,
  46. validation_data=(X_test, y_test)
  47. )

沒有留言:

Related Posts Plugin for WordPress, Blogger...

熱門文章