set HTTP_PROXY=Proxy settings
set HTTPS_PROXY=Proxy settings
pip install tensorflow
from __future__ import absolute_import, division, print_function, unicode_literals
import tensorflow as tf
#Data load
mnist = tf.keras.datasets.mnist
(x_train, y_train), (x_test, y_test) = mnist.load_data()
x_train, x_test = x_train / 255.0, x_test / 255.0
#Stacking layers tf.keras.Build a Sequential model
model = tf.keras.models.Sequential([
tf.keras.layers.Flatten(input_shape=(28, 28)), #Smoothing 28 x 28 size 2D data to 784 1D data
tf.keras.layers.Dense(128, activation='relu'), #Specify the ramp function as the activation function
tf.keras.layers.Dropout(0.2), #Specifying dropout rate to prevent overfitting
tf.keras.layers.Dense(10, activation='softmax') #Specify soft plus for activation function
])
#Choose an optimizer and loss function for training
model.compile(optimizer='adam', #Decide how to optimize learning
loss='sparse_categorical_crossentropy', #Decide how to define loss
metrics=['accuracy'])
#Train the model
model.fit(x_train, y_train, epochs=5)
#Return the loss value and evaluation value of the model
model.evaluate(x_test, y_test, verbose=2)
Data placement destination
C:\Users\User name\.keras\datasets
Execution result
Train on 60000 samples
Epoch 1/5
2019-11-13 14:08:58.762382: I tensorflow/core/profiler/lib/profiler_session.cc:184] Profiler session started.
60000/60000 [==============================] - 5s 82us/sample - loss: 0.2946 - accuracy: 0.9146 - val_loss: 0.1343 - val_accuracy: 0.9589
Epoch 2/5
60000/60000 [==============================] - 4s 66us/sample - loss: 0.1435 - accuracy: 0.9578 - val_loss: 0.1014 - val_accuracy: 0.9698
Epoch 3/5
60000/60000 [==============================] - 4s 67us/sample - loss: 0.1082 - accuracy: 0.9675 - val_loss: 0.0840 - val_accuracy: 0.9737
Epoch 4/5
60000/60000 [==============================] - 4s 67us/sample - loss: 0.0852 - accuracy: 0.9743 - val_loss: 0.0780 - val_accuracy: 0.9758
Epoch 5/5
60000/60000 [==============================] - 4s 66us/sample - loss: 0.0742 - accuracy: 0.9767 - val_loss: 0.0686 - val_accuracy: 0.9780
10000/1 - 0s - loss: 0.0365 - accuracy: 0.9780
from __future__ import absolute_import, division, print_function, unicode_literals
import tensorflow as tf
#Data load
mnist = tf.keras.datasets.mnist
(x_train, y_train), (x_test, y_test) = mnist.load_data()
x_train, x_test = x_train / 255.0, x_test / 255.0
#Stacking layers tf.keras.Build a Sequential model
model = tf.keras.models.Sequential([
tf.keras.layers.Flatten(input_shape=(28, 28)), #Smoothing 28 x 28 size 2D data to 784 1D data
tf.keras.layers.Dense(128, activation='relu'), #Specify the ramp function as the activation function
tf.keras.layers.Dropout(0.2), #Specifying dropout rate to prevent overfitting
tf.keras.layers.Dense(10, activation='softmax') #Specify soft plus for activation function
])
#Choose an optimizer and loss function for training
model.compile(optimizer='adam', #Decide how to optimize learning
loss='sparse_categorical_crossentropy', #Decide how to define loss
metrics=['accuracy'])
#Log output callback function for tensorboard
tensorboard_callback = tf.keras.callbacks.TensorBoard(log_dir="log_dir", histogram_freq=1)
#Train the model
model.fit(x_train, y_train, epochs=5, validation_data=(x_test, y_test), callbacks=[tensorboard_callback])
#Return the loss value and evaluation value of the model
model.evaluate(x_test, y_test, verbose=2)
#Log output callback function for tensorboard
tensorboard_callback = tf.keras.callbacks.TensorBoard(log_dir="log_dir", histogram_freq=1)
#Train the model
model.fit(x_train, y_train, epochs=5, validation_data=(x_test, y_test), callbacks=[tensorboard_callback])
tensorboard --logdir='./log_dir'
Recommended Posts