Path: blob/main/C2 - Advanced Learning Algorithms/week1/C2W1A1/public_tests.py
3585 views
# UNIT TESTS1from tensorflow.keras.activations import sigmoid2from tensorflow.keras.layers import Dense34import numpy as np56def test_c1(target):7assert len(target.layers) == 3, \8f"Wrong number of layers. Expected 3 but got {len(target.layers)}"9assert target.input.shape.as_list() == [None, 400], \10f"Wrong input shape. Expected [None, 400] but got {target.input.shape.as_list()}"11i = 012expected = [[Dense, [None, 25], sigmoid],13[Dense, [None, 15], sigmoid],14[Dense, [None, 1], sigmoid]]1516for layer in target.layers:17assert type(layer) == expected[i][0], \18f"Wrong type in layer {i}. Expected {expected[i][0]} but got {type(layer)}"19assert layer.output.shape.as_list() == expected[i][1], \20f"Wrong number of units in layer {i}. Expected {expected[i][1]} but got {layer.output.shape.as_list()}"21assert layer.activation == expected[i][2], \22f"Wrong activation in layer {i}. Expected {expected[i][2]} but got {layer.activation}"23i = i + 12425print("\033[92mAll tests passed!")2627def test_c2(target):2829def linear(a):30return a3132def linear_times3(a):33return a * 33435x_tst = np.array([1., 2., 3., 4.]) # (1 examples, 3 features)36W_tst = np.array([[1., 2.], [1., 2.], [1., 2.], [1., 2.]]) # (3 input features, 2 output features)37b_tst = np.array([0., 0.]) # (2 features)3839A_tst = target(x_tst, W_tst, b_tst, linear)40assert A_tst.shape[0] == len(b_tst)41assert np.allclose(A_tst, [10., 20.]), \42"Wrong output. Check the dot product"4344b_tst = np.array([3., 5.]) # (2 features)4546A_tst = target(x_tst, W_tst, b_tst, linear)47assert np.allclose(A_tst, [13., 25.]), \48"Wrong output. Check the bias term in the formula"4950A_tst = target(x_tst, W_tst, b_tst, linear_times3)51assert np.allclose(A_tst, [39., 75.]), \52"Wrong output. Did you apply the activation function at the end?"5354print("\033[92mAll tests passed!")5556def test_c3(target):5758def linear(a):59return a6061def linear_times3(a):62return a * 36364x_tst = np.array([1., 2., 3., 4.]) # (1 examples, 3 features)65W_tst = np.array([[1., 2.], [1., 2.], [1., 2.], [1., 2.]]) # (3 input features, 2 output features)66b_tst = np.array([0., 0.]) # (2 features)6768A_tst = target(x_tst, W_tst, b_tst, linear)69assert A_tst.shape[0] == len(b_tst)70assert np.allclose(A_tst, [10., 20.]), \71"Wrong output. Check the dot product"7273b_tst = np.array([3., 5.]) # (2 features)7475A_tst = target(x_tst, W_tst, b_tst, linear)76assert np.allclose(A_tst, [13., 25.]), \77"Wrong output. Check the bias term in the formula"7879A_tst = target(x_tst, W_tst, b_tst, linear_times3)80assert np.allclose(A_tst, [39., 75.]), \81"Wrong output. Did you apply the activation function at the end?"8283x_tst = np.array([[1., 2., 3., 4.], [5., 6., 7., 8.]]) # (2 examples, 4 features)84W_tst = np.array([[1., 2., 3.], [4., 5., 6.], [7., 8., 9.], [10., 11., 12]]) # (3 input features, 2 output features)85b_tst = np.array([0., 0., 0.]) # (2 features)8687A_tst = target(x_tst, W_tst, b_tst, linear)88assert A_tst.shape == (2, 3)89assert np.allclose(A_tst, [[ 70., 80., 90.], [158., 184., 210.]]), \90"Wrong output. Check the dot product"9192b_tst = np.array([3., 5., 6]) # (3 features)9394A_tst = target(x_tst, W_tst, b_tst, linear)95assert np.allclose(A_tst, [[ 73., 85., 96.], [161., 189., 216.]]), \96"Wrong output. Check the bias term in the formula"9798A_tst = target(x_tst, W_tst, b_tst, linear_times3)99assert np.allclose(A_tst, [[ 219., 255., 288.], [483., 567., 648.]]), \100"Wrong output. Did you apply the activation function at the end?"101102print("\033[92mAll tests passed!")103104105