Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
greyhatguy007
GitHub Repository: greyhatguy007/Machine-Learning-Specialization-Coursera
Path: blob/main/C2 - Advanced Learning Algorithms/week3/C2W3A1/public_tests_a1.py
3520 views
1
import tensorflow as tf
2
from tensorflow.keras.models import Sequential
3
from tensorflow.keras.layers import Dense
4
from tensorflow.keras.activations import relu,linear
5
from tensorflow.keras.losses import SparseCategoricalCrossentropy
6
from tensorflow.keras.optimizers import Adam
7
8
import numpy as np
9
10
def test_eval_mse(target):
11
y_hat = np.array([2.4, 4.2])
12
y_tmp = np.array([2.3, 4.1])
13
result = target(y_hat, y_tmp)
14
15
assert np.isclose(result, 0.005, atol=1e-6), f"Wrong value. Expected 0.005, got {result}"
16
17
y_hat = np.array([3.] * 10)
18
y_tmp = np.array([3.] * 10)
19
result = target(y_hat, y_tmp)
20
assert np.isclose(result, 0.), f"Wrong value. Expected 0.0 when y_hat == t_tmp, but got {result}"
21
22
y_hat = np.array([3.])
23
y_tmp = np.array([0.])
24
result = target(y_hat, y_tmp)
25
assert np.isclose(result, 4.5), f"Wrong value. Expected 4.5, but got {result}. Remember the square termn"
26
27
y_hat = np.array([3.] * 5)
28
y_tmp = np.array([2.] * 5)
29
result = target(y_hat, y_tmp)
30
assert np.isclose(result, 0.5), f"Wrong value. Expected 0.5, but got {result}. Remember to divide by (2*m)"
31
32
print("\033[92m All tests passed.")
33
34
def test_eval_cat_err(target):
35
y_hat = np.array([1, 0, 1, 1, 1, 0])
36
y_tmp = np.array([0, 1, 0, 0, 0, 1])
37
result = target(y_hat, y_tmp)
38
assert not np.isclose(result, 6.), f"Wrong value. Expected 1, but got {result}. Did you divided by m?"
39
40
y_hat = np.array([1, 2, 0])
41
y_tmp = np.array([1, 2, 3])
42
result = target(y_hat, y_tmp)
43
assert np.isclose(result, 1./3., atol=1e-6), f"Wrong value. Expected 0.333, but got {result}"
44
45
y_hat = np.array([1, 0, 1, 1, 1, 0])
46
y_tmp = np.array([1, 1, 1, 0, 0, 0])
47
result = target(y_hat, y_tmp)
48
assert np.isclose(result, 3./6., atol=1e-6), f"Wrong value. Expected 0.5, but got {result}"
49
50
y_hat = np.array([[1], [2], [0], [3]])
51
y_tmp = np.array([[1], [2], [1], [3]])
52
res_tmp = target(y_hat, y_tmp)
53
assert type(res_tmp) != np.ndarray, f"The output must be an scalar but got {type(res_tmp)}"
54
55
print("\033[92m All tests passed.")
56
57
def model_test(target, classes, input_size):
58
target.build(input_shape=(None,input_size))
59
expected_lr = 0.01
60
61
assert len(target.layers) == 3, \
62
f"Wrong number of layers. Expected 3 but got {len(target.layers)}"
63
assert target.input.shape.as_list() == [None, input_size], \
64
f"Wrong input shape. Expected [None, {input_size}] but got {target.input.shape.as_list()}"
65
i = 0
66
expected = [[Dense, [None, 120], relu],
67
[Dense, [None, 40], relu],
68
[Dense, [None, classes], linear]]
69
70
for layer in target.layers:
71
assert type(layer) == expected[i][0], \
72
f"Wrong type in layer {i}. Expected {expected[i][0]} but got {type(layer)}"
73
assert layer.output.shape.as_list() == expected[i][1], \
74
f"Wrong number of units in layer {i}. Expected {expected[i][1]} but got {layer.output.shape.as_list()}"
75
assert layer.activation == expected[i][2], \
76
f"Wrong activation in layer {i}. Expected {expected[i][2]} but got {layer.activation}"
77
assert layer.kernel_regularizer == None, "You must not specify any regularizer for any layer"
78
i = i + 1
79
80
assert type(target.loss)==SparseCategoricalCrossentropy, f"Wrong loss function. Expected {SparseCategoricalCrossentropy}, but got {target.loss}"
81
assert type(target.optimizer)==Adam, f"Wrong loss function. Expected {Adam}, but got {target.optimizer}"
82
lr = target.optimizer.learning_rate.numpy()
83
assert np.isclose(lr, expected_lr, atol=1e-8), f"Wrong learning rate. Expected {expected_lr}, but got {lr}"
84
assert target.loss.get_config()['from_logits'], f"Set from_logits=True in loss function"
85
86
print("\033[92mAll tests passed!")
87
88
def model_s_test(target, classes, input_size):
89
target.build(input_shape=(None,input_size))
90
expected_lr = 0.01
91
92
assert len(target.layers) == 2, \
93
f"Wrong number of layers. Expected 3 but got {len(target.layers)}"
94
assert target.input.shape.as_list() == [None, input_size], \
95
f"Wrong input shape. Expected [None, {input_size}] but got {target.input.shape.as_list()}"
96
i = 0
97
expected = [[Dense, [None, 6], relu],
98
[Dense, [None, classes], linear]]
99
100
for layer in target.layers:
101
assert type(layer) == expected[i][0], \
102
f"Wrong type in layer {i}. Expected {expected[i][0]} but got {type(layer)}"
103
assert layer.output.shape.as_list() == expected[i][1], \
104
f"Wrong number of units in layer {i}. Expected {expected[i][1]} but got {layer.output.shape.as_list()}"
105
assert layer.activation == expected[i][2], \
106
f"Wrong activation in layer {i}. Expected {expected[i][2]} but got {layer.activation}"
107
assert layer.kernel_regularizer == None, "You must not specify any regularizer any layer"
108
i = i + 1
109
110
assert type(target.loss)==SparseCategoricalCrossentropy, f"Wrong loss function. Expected {SparseCategoricalCrossentropy}, but got {target.loss}"
111
assert type(target.optimizer)==Adam, f"Wrong loss function. Expected {Adam}, but got {target.optimizer}"
112
lr = target.optimizer.learning_rate.numpy()
113
assert np.isclose(lr, expected_lr, atol=1e-8), f"Wrong learning rate. Expected {expected_lr}, but got {lr}"
114
assert target.loss.get_config()['from_logits'], f"Set from_logits=True in loss function"
115
116
print("\033[92mAll tests passed!")
117
118
def model_r_test(target, classes, input_size):
119
target.build(input_shape=(None,input_size))
120
expected_lr = 0.01
121
print("ddd")
122
assert len(target.layers) == 3, \
123
f"Wrong number of layers. Expected 3 but got {len(target.layers)}"
124
assert target.input.shape.as_list() == [None, input_size], \
125
f"Wrong input shape. Expected [None, {input_size}] but got {target.input.shape.as_list()}"
126
i = 0
127
expected = [[Dense, [None, 120], relu, (tf.keras.regularizers.l2, 0.1)],
128
[Dense, [None, 40], relu, (tf.keras.regularizers.l2, 0.1)],
129
[Dense, [None, classes], linear, None]]
130
131
for layer in target.layers:
132
assert type(layer) == expected[i][0], \
133
f"Wrong type in layer {i}. Expected {expected[i][0]} but got {type(layer)}"
134
assert layer.output.shape.as_list() == expected[i][1], \
135
f"Wrong number of units in layer {i}. Expected {expected[i][1]} but got {layer.output.shape.as_list()}"
136
assert layer.activation == expected[i][2], \
137
f"Wrong activation in layer {i}. Expected {expected[i][2]} but got {layer.activation}"
138
if not (expected[i][3] == None):
139
assert type(layer.kernel_regularizer) == expected[i][3][0], f"Wrong regularizer. Expected L2 regularizer but got {type(layer.kernel_regularizer)}"
140
assert np.isclose(layer.kernel_regularizer.l2, expected[i][3][1]), f"Wrong regularization factor. Expected {expected[i][3][1]}, but got {layer.kernel_regularizer.l2}"
141
else:
142
assert layer.kernel_regularizer == None, "You must not specify any regularizer for the 3th layer"
143
i = i + 1
144
145
assert type(target.loss)==SparseCategoricalCrossentropy, f"Wrong loss function. Expected {SparseCategoricalCrossentropy}, but got {target.loss}"
146
assert type(target.optimizer)==Adam, f"Wrong loss function. Expected {Adam}, but got {target.optimizer}"
147
lr = target.optimizer.learning_rate.numpy()
148
assert np.isclose(lr, expected_lr, atol=1e-8), f"Wrong learning rate. Expected {expected_lr}, but got {lr}"
149
assert target.loss.get_config()['from_logits'], f"Set from_logits=True in loss function"
150
151
print("\033[92mAll tests passed!")
152
153