Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
greyhatguy007
GitHub Repository: greyhatguy007/Machine-Learning-Specialization-Coursera
Path: blob/main/C2 - Advanced Learning Algorithms/week2/C2W2A1/autils.py
3520 views
1
import numpy as np
2
import matplotlib.pyplot as plt
3
import tensorflow as tf
4
from tensorflow.keras.models import Sequential
5
from tensorflow.keras.layers import Dense
6
from tensorflow.keras.activations import linear, relu, sigmoid
7
8
dlc = dict(dlblue = '#0096ff', dlorange = '#FF9300', dldarkred='#C00000', dlmagenta='#FF40FF', dlpurple='#7030A0', dldarkblue = '#0D5BDC', dlmedblue='#4285F4')
9
dlblue = '#0096ff'; dlorange = '#FF9300'; dldarkred='#C00000'; dlmagenta='#FF40FF'; dlpurple='#7030A0'; dldarkblue = '#0D5BDC'; dlmedblue='#4285F4'
10
dlcolors = [dlblue, dlorange, dldarkred, dlmagenta, dlpurple]
11
plt.style.use('./deeplearning.mplstyle')
12
13
14
def load_data():
15
X = np.load("data/X.npy")
16
y = np.load("data/y.npy")
17
return X, y
18
19
def plt_act_trio():
20
X = np.linspace(-5,5,100)
21
fig,ax = plt.subplots(1,3, figsize=(6,2))
22
widgvis(fig)
23
ax[0].plot(X,tf.keras.activations.linear(X))
24
ax[0].axvline(0, lw=0.3, c="black")
25
ax[0].axhline(0, lw=0.3, c="black")
26
ax[0].set_title("Linear")
27
ax[1].plot(X,tf.keras.activations.sigmoid(X))
28
ax[1].axvline(0, lw=0.3, c="black")
29
ax[1].axhline(0, lw=0.3, c="black")
30
ax[1].set_title("Sigmoid")
31
ax[2].plot(X,tf.keras.activations.relu(X))
32
ax[2].axhline(0, lw=0.3, c="black")
33
ax[2].axvline(0, lw=0.3, c="black")
34
ax[2].set_title("ReLu")
35
fig.suptitle("Common Activation Functions", fontsize=14)
36
fig.tight_layout(pad=0.2)
37
plt.show()
38
39
def widgvis(fig):
40
fig.canvas.toolbar_visible = False
41
fig.canvas.header_visible = False
42
fig.canvas.footer_visible = False
43
44
def plt_ex1():
45
X = np.linspace(0,2*np.pi, 100)
46
y = np.cos(X)+1
47
y[50:100]=0
48
fig,ax = plt.subplots(1,1, figsize=(2,2))
49
widgvis(fig)
50
ax.set_title("Target")
51
ax.set_xlabel("x")
52
ax.set_ylabel("y")
53
ax.plot(X,y)
54
fig.tight_layout(pad=0.1)
55
plt.show()
56
return(X,y)
57
58
def plt_ex2():
59
X = np.linspace(0,2*np.pi, 100)
60
y = np.cos(X)+1
61
y[0:49]=0
62
fig,ax = plt.subplots(1,1, figsize=(2,2))
63
widgvis(fig)
64
ax.set_title("Target")
65
ax.set_xlabel("x")
66
ax.set_ylabel("y")
67
ax.plot(X,y)
68
fig.tight_layout(pad=0.1)
69
plt.show()
70
return(X,y)
71
72
def gen_data():
73
X = np.linspace(0,2*np.pi, 100)
74
y = np.cos(X)+1
75
X=X.reshape(-1,1)
76
return(X,y)
77
78
def plt_dual(X,y,yhat):
79
fig,ax = plt.subplots(1,2, figsize=(4,2))
80
widgvis(fig)
81
ax[0].set_title("Target")
82
ax[0].set_xlabel("x")
83
ax[0].set_ylabel("y")
84
ax[0].plot(X,y)
85
ax[1].set_title("Prediction")
86
ax[1].set_xlabel("x")
87
ax[1].set_ylabel("y")
88
ax[1].plot(X,y)
89
ax[1].plot(X,yhat)
90
fig.tight_layout(pad=0.1)
91
plt.show()
92
93
def plt_act1(X,y,z,a):
94
fig,ax = plt.subplots(1,3, figsize=(6,2.5))
95
widgvis(fig)
96
ax[0].plot(X,y,label="target")
97
ax[0].axvline(0, lw=0.3, c="black")
98
ax[0].axhline(0, lw=0.3, c="black")
99
ax[0].set_title("y - target")
100
ax[1].plot(X,y, label="target")
101
ax[1].plot(X,z, c=dlc["dldarkred"],label="z")
102
ax[1].axvline(0, lw=0.3, c="black")
103
ax[1].axhline(0, lw=0.3, c="black")
104
ax[1].set_title(r"$z = w \cdot x+b$")
105
ax[1].legend(loc="upper center")
106
ax[2].plot(X,y, label="target")
107
ax[2].plot(X,a, c=dlc["dldarkred"],label="ReLu(z)")
108
ax[2].axhline(0, lw=0.3, c="black")
109
ax[2].axvline(0, lw=0.3, c="black")
110
ax[2].set_title("max(0,z)")
111
ax[2].legend()
112
fig.suptitle("Role of Non-Linear Activation", fontsize=12)
113
fig.tight_layout(pad=0.22)
114
return(ax)
115
116
117
def plt_add_notation(ax):
118
ax[1].annotate(text = "matches\n here", xy =(1.5,1.0),
119
xytext = (0.1,-1.5), fontsize=9,
120
arrowprops=dict(facecolor=dlc["dlpurple"],width=2, headwidth=8))
121
ax[1].annotate(text = "but not\n here", xy =(5,-2.5),
122
xytext = (1,-3), fontsize=9,
123
arrowprops=dict(facecolor=dlc["dlpurple"],width=2, headwidth=8))
124
ax[2].annotate(text = "ReLu\n 'off'", xy =(2.6,0),
125
xytext = (0.1,0.1), fontsize=9,
126
arrowprops=dict(facecolor=dlc["dlpurple"],width=2, headwidth=8))
127
128
def compile_fit(model,X,y):
129
model.compile(
130
loss=tf.keras.losses.MeanSquaredError(),
131
optimizer=tf.keras.optimizers.Adam(0.01),
132
)
133
134
model.fit(
135
X,y,
136
epochs=100,
137
verbose = 0
138
)
139
l1=model.get_layer("l1")
140
l2=model.get_layer("l2")
141
w1,b1 = l1.get_weights()
142
w2,b2 = l2.get_weights()
143
return(w1,b1,w2,b2)
144
145
def plt_model(X,y,yhat_pre, yhat_post):
146
fig,ax = plt.subplots(1,3, figsize=(8,2))
147
widgvis(fig)
148
ax[0].set_title("Target")
149
ax[0].set_xlabel("x")
150
ax[0].set_ylabel("y")
151
ax[0].plot(X,y)
152
ax[1].set_title("Prediction, pre-training")
153
ax[1].set_xlabel("x")
154
ax[1].set_ylabel("y")
155
ax[1].plot(X,y)
156
ax[1].plot(X,yhat_pre)
157
ax[2].set_title("Prediction, post-training")
158
ax[2].set_xlabel("x")
159
ax[2].set_ylabel("y")
160
ax[2].plot(X,y)
161
ax[2].plot(X,yhat_post)
162
fig.tight_layout(pad=0.1)
163
plt.show()
164
165
def display_errors(model,X,y):
166
f = model.predict(X)
167
yhat = np.argmax(f, axis=1)
168
doo = yhat != y[:,0]
169
idxs = np.where(yhat != y[:,0])[0]
170
if len(idxs) == 0:
171
print("no errors found")
172
else:
173
cnt = min(8, len(idxs))
174
fig, ax = plt.subplots(1,cnt, figsize=(5,1.2))
175
fig.tight_layout(pad=0.13,rect=[0, 0.03, 1, 0.80]) #[left, bottom, right, top]
176
widgvis(fig)
177
178
for i in range(cnt):
179
j = idxs[i]
180
X_reshaped = X[j].reshape((20,20)).T
181
182
# Display the image
183
ax[i].imshow(X_reshaped, cmap='gray')
184
185
# Predict using the Neural Network
186
prediction = model.predict(X[j].reshape(1,400))
187
prediction_p = tf.nn.softmax(prediction)
188
yhat = np.argmax(prediction_p)
189
190
# Display the label above the image
191
ax[i].set_title(f"{y[j,0]},{yhat}",fontsize=10)
192
ax[i].set_axis_off()
193
fig.suptitle("Label, yhat", fontsize=12)
194
return(len(idxs))
195
196
def display_digit(X):
197
""" display a single digit. The input is one digit (400,). """
198
fig, ax = plt.subplots(1,1, figsize=(0.5,0.5))
199
widgvis(fig)
200
X_reshaped = X.reshape((20,20)).T
201
# Display the image
202
ax.imshow(X_reshaped, cmap='gray')
203
plt.show()
204
205
206
def plot_loss_tf(history):
207
fig,ax = plt.subplots(1,1, figsize = (4,3))
208
widgvis(fig)
209
ax.plot(history.history['loss'], label='loss')
210
ax.set_ylim([0, 2])
211
ax.set_xlabel('Epoch')
212
ax.set_ylabel('loss (cost)')
213
ax.legend()
214
ax.grid(True)
215
plt.show()
216
217