Path: blob/main/C2 - Advanced Learning Algorithms/week2/C2W2A1/autils.py
3520 views
import numpy as np1import matplotlib.pyplot as plt2import tensorflow as tf3from tensorflow.keras.models import Sequential4from tensorflow.keras.layers import Dense5from tensorflow.keras.activations import linear, relu, sigmoid67dlc = dict(dlblue = '#0096ff', dlorange = '#FF9300', dldarkred='#C00000', dlmagenta='#FF40FF', dlpurple='#7030A0', dldarkblue = '#0D5BDC', dlmedblue='#4285F4')8dlblue = '#0096ff'; dlorange = '#FF9300'; dldarkred='#C00000'; dlmagenta='#FF40FF'; dlpurple='#7030A0'; dldarkblue = '#0D5BDC'; dlmedblue='#4285F4'9dlcolors = [dlblue, dlorange, dldarkred, dlmagenta, dlpurple]10plt.style.use('./deeplearning.mplstyle')111213def load_data():14X = np.load("data/X.npy")15y = np.load("data/y.npy")16return X, y1718def plt_act_trio():19X = np.linspace(-5,5,100)20fig,ax = plt.subplots(1,3, figsize=(6,2))21widgvis(fig)22ax[0].plot(X,tf.keras.activations.linear(X))23ax[0].axvline(0, lw=0.3, c="black")24ax[0].axhline(0, lw=0.3, c="black")25ax[0].set_title("Linear")26ax[1].plot(X,tf.keras.activations.sigmoid(X))27ax[1].axvline(0, lw=0.3, c="black")28ax[1].axhline(0, lw=0.3, c="black")29ax[1].set_title("Sigmoid")30ax[2].plot(X,tf.keras.activations.relu(X))31ax[2].axhline(0, lw=0.3, c="black")32ax[2].axvline(0, lw=0.3, c="black")33ax[2].set_title("ReLu")34fig.suptitle("Common Activation Functions", fontsize=14)35fig.tight_layout(pad=0.2)36plt.show()3738def widgvis(fig):39fig.canvas.toolbar_visible = False40fig.canvas.header_visible = False41fig.canvas.footer_visible = False4243def plt_ex1():44X = np.linspace(0,2*np.pi, 100)45y = np.cos(X)+146y[50:100]=047fig,ax = plt.subplots(1,1, figsize=(2,2))48widgvis(fig)49ax.set_title("Target")50ax.set_xlabel("x")51ax.set_ylabel("y")52ax.plot(X,y)53fig.tight_layout(pad=0.1)54plt.show()55return(X,y)5657def plt_ex2():58X = np.linspace(0,2*np.pi, 100)59y = np.cos(X)+160y[0:49]=061fig,ax = plt.subplots(1,1, figsize=(2,2))62widgvis(fig)63ax.set_title("Target")64ax.set_xlabel("x")65ax.set_ylabel("y")66ax.plot(X,y)67fig.tight_layout(pad=0.1)68plt.show()69return(X,y)7071def gen_data():72X = np.linspace(0,2*np.pi, 100)73y = np.cos(X)+174X=X.reshape(-1,1)75return(X,y)7677def plt_dual(X,y,yhat):78fig,ax = plt.subplots(1,2, figsize=(4,2))79widgvis(fig)80ax[0].set_title("Target")81ax[0].set_xlabel("x")82ax[0].set_ylabel("y")83ax[0].plot(X,y)84ax[1].set_title("Prediction")85ax[1].set_xlabel("x")86ax[1].set_ylabel("y")87ax[1].plot(X,y)88ax[1].plot(X,yhat)89fig.tight_layout(pad=0.1)90plt.show()9192def plt_act1(X,y,z,a):93fig,ax = plt.subplots(1,3, figsize=(6,2.5))94widgvis(fig)95ax[0].plot(X,y,label="target")96ax[0].axvline(0, lw=0.3, c="black")97ax[0].axhline(0, lw=0.3, c="black")98ax[0].set_title("y - target")99ax[1].plot(X,y, label="target")100ax[1].plot(X,z, c=dlc["dldarkred"],label="z")101ax[1].axvline(0, lw=0.3, c="black")102ax[1].axhline(0, lw=0.3, c="black")103ax[1].set_title(r"$z = w \cdot x+b$")104ax[1].legend(loc="upper center")105ax[2].plot(X,y, label="target")106ax[2].plot(X,a, c=dlc["dldarkred"],label="ReLu(z)")107ax[2].axhline(0, lw=0.3, c="black")108ax[2].axvline(0, lw=0.3, c="black")109ax[2].set_title("max(0,z)")110ax[2].legend()111fig.suptitle("Role of Non-Linear Activation", fontsize=12)112fig.tight_layout(pad=0.22)113return(ax)114115116def plt_add_notation(ax):117ax[1].annotate(text = "matches\n here", xy =(1.5,1.0),118xytext = (0.1,-1.5), fontsize=9,119arrowprops=dict(facecolor=dlc["dlpurple"],width=2, headwidth=8))120ax[1].annotate(text = "but not\n here", xy =(5,-2.5),121xytext = (1,-3), fontsize=9,122arrowprops=dict(facecolor=dlc["dlpurple"],width=2, headwidth=8))123ax[2].annotate(text = "ReLu\n 'off'", xy =(2.6,0),124xytext = (0.1,0.1), fontsize=9,125arrowprops=dict(facecolor=dlc["dlpurple"],width=2, headwidth=8))126127def compile_fit(model,X,y):128model.compile(129loss=tf.keras.losses.MeanSquaredError(),130optimizer=tf.keras.optimizers.Adam(0.01),131)132133model.fit(134X,y,135epochs=100,136verbose = 0137)138l1=model.get_layer("l1")139l2=model.get_layer("l2")140w1,b1 = l1.get_weights()141w2,b2 = l2.get_weights()142return(w1,b1,w2,b2)143144def plt_model(X,y,yhat_pre, yhat_post):145fig,ax = plt.subplots(1,3, figsize=(8,2))146widgvis(fig)147ax[0].set_title("Target")148ax[0].set_xlabel("x")149ax[0].set_ylabel("y")150ax[0].plot(X,y)151ax[1].set_title("Prediction, pre-training")152ax[1].set_xlabel("x")153ax[1].set_ylabel("y")154ax[1].plot(X,y)155ax[1].plot(X,yhat_pre)156ax[2].set_title("Prediction, post-training")157ax[2].set_xlabel("x")158ax[2].set_ylabel("y")159ax[2].plot(X,y)160ax[2].plot(X,yhat_post)161fig.tight_layout(pad=0.1)162plt.show()163164def display_errors(model,X,y):165f = model.predict(X)166yhat = np.argmax(f, axis=1)167doo = yhat != y[:,0]168idxs = np.where(yhat != y[:,0])[0]169if len(idxs) == 0:170print("no errors found")171else:172cnt = min(8, len(idxs))173fig, ax = plt.subplots(1,cnt, figsize=(5,1.2))174fig.tight_layout(pad=0.13,rect=[0, 0.03, 1, 0.80]) #[left, bottom, right, top]175widgvis(fig)176177for i in range(cnt):178j = idxs[i]179X_reshaped = X[j].reshape((20,20)).T180181# Display the image182ax[i].imshow(X_reshaped, cmap='gray')183184# Predict using the Neural Network185prediction = model.predict(X[j].reshape(1,400))186prediction_p = tf.nn.softmax(prediction)187yhat = np.argmax(prediction_p)188189# Display the label above the image190ax[i].set_title(f"{y[j,0]},{yhat}",fontsize=10)191ax[i].set_axis_off()192fig.suptitle("Label, yhat", fontsize=12)193return(len(idxs))194195def display_digit(X):196""" display a single digit. The input is one digit (400,). """197fig, ax = plt.subplots(1,1, figsize=(0.5,0.5))198widgvis(fig)199X_reshaped = X.reshape((20,20)).T200# Display the image201ax.imshow(X_reshaped, cmap='gray')202plt.show()203204205def plot_loss_tf(history):206fig,ax = plt.subplots(1,1, figsize = (4,3))207widgvis(fig)208ax.plot(history.history['loss'], label='loss')209ax.set_ylim([0, 2])210ax.set_xlabel('Epoch')211ax.set_ylabel('loss (cost)')212ax.legend()213ax.grid(True)214plt.show()215216217