###############################################################################
## The DL (Deep Learning) Hello World Program
## References:
## https://www.tensorflow.org/tutorials/
## https://medium.com/the-andela-way/deep-learning-hello-world-e1fc53ea888
###############################################################################
import tensorflow as tf
from keras.datasets import mnist
(x_train, y_train),(x_test, y_test) = mnist.load_data()
x_train, x_test = x_train / 255.0, x_test / 255.0
model = tf.keras.models.Sequential([
tf.keras.layers.Flatten( ),
tf.keras.layers.Dense (512 , activation = tf.nn.relu ),
tf.keras.layers.Dropout(0.2 ),
tf.keras.layers.Dense (10 , activation = tf.nn.softmax)
])
model.compile(optimizer = 'adam' ,
loss = 'sparse_categorical_crossentropy',
metrics = ['accuracy'] )
model.fit(x_train, y_train, epochs=5)
model.evaluate(x_test, y_test)
###############################################################################
# Output
###############################################################################
# Using TensorFlow backend.
# Downloading data from https://s3.amazonaws.com/img-datasets/mnist.npz
# 11493376/11490434 [==============================] - 1s 0us/step
# Epoch 1/5
# 60000/60000 [==============================] - 14s 236us/step - loss: 0.2027 - acc: 0.9406
# Epoch 2/5
# 60000/60000 [==============================] - 14s 225us/step - loss: 0.0805 - acc: 0.9756
# Epoch 3/5
# 60000/60000 [==============================] - 13s 222us/step - loss: 0.0517 - acc: 0.9839
# Epoch 4/5
# 60000/60000 [==============================] - 14s 227us/step - loss: 0.0370 - acc: 0.9883
# Epoch 5/5
# 60000/60000 [==============================] - 13s 224us/step - loss: 0.0262 - acc: 0.9917
# 10000/10000 [==============================] - 1s 51us/step
# [0.07304962697861483, 0.9789]
Thursday, 17 January 2019
Deep Learning Hello World Program
Subscribe to:
Post Comments (Atom)
No comments:
Post a Comment