-
Notifications
You must be signed in to change notification settings - Fork 65
/
ML
95 lines (79 loc) · 2.66 KB
/
ML
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
import random
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
import tensorflow as tf
random.seed(0)
np.random.seed(seed=0)
tf.random.set_seed(seed=0)
(x_train, y_train), (x_test, y_test ) = tf.keras.datasets.mnist.load_data()
x_train.shape, y_train.shape, x_test.shape, y_test.shape
print(
f'Training Size - Inputs:{x_train.shape}, Targets:{y_train.shape}'
f'\nTest Size - Inputs:{x_test.shape}, Targets:{y_test.shape}'
)
rows = 5
digits_per_row = 5
fig, axes = plt.subplots(nrows=rows, ncols=digits_per_row, figsize=(6, 6))
axes = axes.flatten()
# Selecting random ids from 0 to 60000
total_digits = rows*digits_per_row
random_ids= np.random.choice(x_train.shape[0], total_digits, replace=False)
# Plotting the selected digits.
for i, ax in enumerate(axes):
idx = random_ids[i]
ax.imshow(x_train[idx], cmap='gray')
ax.set_title(f'Class: {y_train[idx]}')
ax.axis('off')
plt.tight_layout()
plt.show()
x_train = x_train.reshape((60000, 784))
y_train = tf.one_hot(y_train, depth=10)
x_test = x_test.reshape((10000, 784))
y_test = tf.one_hot(y_test, depth=10)
print(
f'Training Size - Inputs:{x_train.shape}, Targets:{y_train.shape}'
f'\nTest Size - Inputs:{x_test.shape}, Targets:{y_test.shape}'
)
activation = 'tanh'
loss = 'categorical_crossentropy' # Do not change this loss function.
metrics = ['accuracy']
learning_rate = 0.001
optimizer = tf.keras.optimizers.SGD(learning_rate=learning_rate) # Do not change this optimizer.
epochs = 10
model = tf.keras.Sequential([
tf.keras.layers.Input(shape=(784,), name='input'),
tf.keras.layers.Dense(units=256, activation=activation, name='hidden-1'),
tf.keras.layers.Dense(units=256, activation=activation, name='hidden-2'),
tf.keras.layers.Dense(units=10, activation='softmax', name='outputs') # Do not change this activation function.
])
model.summary(expand_nested=True)
model.compile(optimizer=optimizer, loss=loss, metrics=metrics)
history = model.fit(
x=x_train,
y=y_train,
epochs=epochs,
validation_data=(x_test, y_test)
)
train_loss = history.history['loss']
val_loss = history.history['val_loss']
train_acc = history.history['accuracy']
val_acc = history.history['val_accuracy']
plt.plot(train_loss, label='Train Loss')
plt.plot(val_loss, label='Validation Loss')
plt.title('Neural Network Loss per epoch')
plt.ylabel('Categorical Cross-Entropy')
plt.xlabel('Epochs')
plt.xlim(0, epochs)
plt.ylim(0, 1)
plt.legend()
plt.show()
plt.plot(train_acc, label='Train Accuracy')
plt.plot(val_acc, label='Validation Accuracy')
plt.title('Neural Network Accuracy per epoch')
plt.ylabel('Accuracy')
plt.xlabel('Epochs')
plt.xlim(0, epochs)
plt.ylim(0, 1)
plt.legend()
plt.show()