Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
greyhatguy007
GitHub Repository: greyhatguy007/Machine-Learning-Specialization-Coursera
Path: blob/main/C2 - Advanced Learning Algorithms/week1/C2W1A1/public_tests.py
3585 views
1
# UNIT TESTS
2
from tensorflow.keras.activations import sigmoid
3
from tensorflow.keras.layers import Dense
4
5
import numpy as np
6
7
def test_c1(target):
8
assert len(target.layers) == 3, \
9
f"Wrong number of layers. Expected 3 but got {len(target.layers)}"
10
assert target.input.shape.as_list() == [None, 400], \
11
f"Wrong input shape. Expected [None, 400] but got {target.input.shape.as_list()}"
12
i = 0
13
expected = [[Dense, [None, 25], sigmoid],
14
[Dense, [None, 15], sigmoid],
15
[Dense, [None, 1], sigmoid]]
16
17
for layer in target.layers:
18
assert type(layer) == expected[i][0], \
19
f"Wrong type in layer {i}. Expected {expected[i][0]} but got {type(layer)}"
20
assert layer.output.shape.as_list() == expected[i][1], \
21
f"Wrong number of units in layer {i}. Expected {expected[i][1]} but got {layer.output.shape.as_list()}"
22
assert layer.activation == expected[i][2], \
23
f"Wrong activation in layer {i}. Expected {expected[i][2]} but got {layer.activation}"
24
i = i + 1
25
26
print("\033[92mAll tests passed!")
27
28
def test_c2(target):
29
30
def linear(a):
31
return a
32
33
def linear_times3(a):
34
return a * 3
35
36
x_tst = np.array([1., 2., 3., 4.]) # (1 examples, 3 features)
37
W_tst = np.array([[1., 2.], [1., 2.], [1., 2.], [1., 2.]]) # (3 input features, 2 output features)
38
b_tst = np.array([0., 0.]) # (2 features)
39
40
A_tst = target(x_tst, W_tst, b_tst, linear)
41
assert A_tst.shape[0] == len(b_tst)
42
assert np.allclose(A_tst, [10., 20.]), \
43
"Wrong output. Check the dot product"
44
45
b_tst = np.array([3., 5.]) # (2 features)
46
47
A_tst = target(x_tst, W_tst, b_tst, linear)
48
assert np.allclose(A_tst, [13., 25.]), \
49
"Wrong output. Check the bias term in the formula"
50
51
A_tst = target(x_tst, W_tst, b_tst, linear_times3)
52
assert np.allclose(A_tst, [39., 75.]), \
53
"Wrong output. Did you apply the activation function at the end?"
54
55
print("\033[92mAll tests passed!")
56
57
def test_c3(target):
58
59
def linear(a):
60
return a
61
62
def linear_times3(a):
63
return a * 3
64
65
x_tst = np.array([1., 2., 3., 4.]) # (1 examples, 3 features)
66
W_tst = np.array([[1., 2.], [1., 2.], [1., 2.], [1., 2.]]) # (3 input features, 2 output features)
67
b_tst = np.array([0., 0.]) # (2 features)
68
69
A_tst = target(x_tst, W_tst, b_tst, linear)
70
assert A_tst.shape[0] == len(b_tst)
71
assert np.allclose(A_tst, [10., 20.]), \
72
"Wrong output. Check the dot product"
73
74
b_tst = np.array([3., 5.]) # (2 features)
75
76
A_tst = target(x_tst, W_tst, b_tst, linear)
77
assert np.allclose(A_tst, [13., 25.]), \
78
"Wrong output. Check the bias term in the formula"
79
80
A_tst = target(x_tst, W_tst, b_tst, linear_times3)
81
assert np.allclose(A_tst, [39., 75.]), \
82
"Wrong output. Did you apply the activation function at the end?"
83
84
x_tst = np.array([[1., 2., 3., 4.], [5., 6., 7., 8.]]) # (2 examples, 4 features)
85
W_tst = np.array([[1., 2., 3.], [4., 5., 6.], [7., 8., 9.], [10., 11., 12]]) # (3 input features, 2 output features)
86
b_tst = np.array([0., 0., 0.]) # (2 features)
87
88
A_tst = target(x_tst, W_tst, b_tst, linear)
89
assert A_tst.shape == (2, 3)
90
assert np.allclose(A_tst, [[ 70., 80., 90.], [158., 184., 210.]]), \
91
"Wrong output. Check the dot product"
92
93
b_tst = np.array([3., 5., 6]) # (3 features)
94
95
A_tst = target(x_tst, W_tst, b_tst, linear)
96
assert np.allclose(A_tst, [[ 73., 85., 96.], [161., 189., 216.]]), \
97
"Wrong output. Check the bias term in the formula"
98
99
A_tst = target(x_tst, W_tst, b_tst, linear_times3)
100
assert np.allclose(A_tst, [[ 219., 255., 288.], [483., 567., 648.]]), \
101
"Wrong output. Did you apply the activation function at the end?"
102
103
print("\033[92mAll tests passed!")
104
105