Path: blob/main/C2 - Advanced Learning Algorithms/week3/C2W3A1/public_tests_a1.py
3520 views
import tensorflow as tf1from tensorflow.keras.models import Sequential2from tensorflow.keras.layers import Dense3from tensorflow.keras.activations import relu,linear4from tensorflow.keras.losses import SparseCategoricalCrossentropy5from tensorflow.keras.optimizers import Adam67import numpy as np89def test_eval_mse(target):10y_hat = np.array([2.4, 4.2])11y_tmp = np.array([2.3, 4.1])12result = target(y_hat, y_tmp)1314assert np.isclose(result, 0.005, atol=1e-6), f"Wrong value. Expected 0.005, got {result}"1516y_hat = np.array([3.] * 10)17y_tmp = np.array([3.] * 10)18result = target(y_hat, y_tmp)19assert np.isclose(result, 0.), f"Wrong value. Expected 0.0 when y_hat == t_tmp, but got {result}"2021y_hat = np.array([3.])22y_tmp = np.array([0.])23result = target(y_hat, y_tmp)24assert np.isclose(result, 4.5), f"Wrong value. Expected 4.5, but got {result}. Remember the square termn"2526y_hat = np.array([3.] * 5)27y_tmp = np.array([2.] * 5)28result = target(y_hat, y_tmp)29assert np.isclose(result, 0.5), f"Wrong value. Expected 0.5, but got {result}. Remember to divide by (2*m)"3031print("\033[92m All tests passed.")3233def test_eval_cat_err(target):34y_hat = np.array([1, 0, 1, 1, 1, 0])35y_tmp = np.array([0, 1, 0, 0, 0, 1])36result = target(y_hat, y_tmp)37assert not np.isclose(result, 6.), f"Wrong value. Expected 1, but got {result}. Did you divided by m?"3839y_hat = np.array([1, 2, 0])40y_tmp = np.array([1, 2, 3])41result = target(y_hat, y_tmp)42assert np.isclose(result, 1./3., atol=1e-6), f"Wrong value. Expected 0.333, but got {result}"4344y_hat = np.array([1, 0, 1, 1, 1, 0])45y_tmp = np.array([1, 1, 1, 0, 0, 0])46result = target(y_hat, y_tmp)47assert np.isclose(result, 3./6., atol=1e-6), f"Wrong value. Expected 0.5, but got {result}"4849y_hat = np.array([[1], [2], [0], [3]])50y_tmp = np.array([[1], [2], [1], [3]])51res_tmp = target(y_hat, y_tmp)52assert type(res_tmp) != np.ndarray, f"The output must be an scalar but got {type(res_tmp)}"5354print("\033[92m All tests passed.")5556def model_test(target, classes, input_size):57target.build(input_shape=(None,input_size))58expected_lr = 0.015960assert len(target.layers) == 3, \61f"Wrong number of layers. Expected 3 but got {len(target.layers)}"62assert target.input.shape.as_list() == [None, input_size], \63f"Wrong input shape. Expected [None, {input_size}] but got {target.input.shape.as_list()}"64i = 065expected = [[Dense, [None, 120], relu],66[Dense, [None, 40], relu],67[Dense, [None, classes], linear]]6869for layer in target.layers:70assert type(layer) == expected[i][0], \71f"Wrong type in layer {i}. Expected {expected[i][0]} but got {type(layer)}"72assert layer.output.shape.as_list() == expected[i][1], \73f"Wrong number of units in layer {i}. Expected {expected[i][1]} but got {layer.output.shape.as_list()}"74assert layer.activation == expected[i][2], \75f"Wrong activation in layer {i}. Expected {expected[i][2]} but got {layer.activation}"76assert layer.kernel_regularizer == None, "You must not specify any regularizer for any layer"77i = i + 17879assert type(target.loss)==SparseCategoricalCrossentropy, f"Wrong loss function. Expected {SparseCategoricalCrossentropy}, but got {target.loss}"80assert type(target.optimizer)==Adam, f"Wrong loss function. Expected {Adam}, but got {target.optimizer}"81lr = target.optimizer.learning_rate.numpy()82assert np.isclose(lr, expected_lr, atol=1e-8), f"Wrong learning rate. Expected {expected_lr}, but got {lr}"83assert target.loss.get_config()['from_logits'], f"Set from_logits=True in loss function"8485print("\033[92mAll tests passed!")8687def model_s_test(target, classes, input_size):88target.build(input_shape=(None,input_size))89expected_lr = 0.019091assert len(target.layers) == 2, \92f"Wrong number of layers. Expected 3 but got {len(target.layers)}"93assert target.input.shape.as_list() == [None, input_size], \94f"Wrong input shape. Expected [None, {input_size}] but got {target.input.shape.as_list()}"95i = 096expected = [[Dense, [None, 6], relu],97[Dense, [None, classes], linear]]9899for layer in target.layers:100assert type(layer) == expected[i][0], \101f"Wrong type in layer {i}. Expected {expected[i][0]} but got {type(layer)}"102assert layer.output.shape.as_list() == expected[i][1], \103f"Wrong number of units in layer {i}. Expected {expected[i][1]} but got {layer.output.shape.as_list()}"104assert layer.activation == expected[i][2], \105f"Wrong activation in layer {i}. Expected {expected[i][2]} but got {layer.activation}"106assert layer.kernel_regularizer == None, "You must not specify any regularizer any layer"107i = i + 1108109assert type(target.loss)==SparseCategoricalCrossentropy, f"Wrong loss function. Expected {SparseCategoricalCrossentropy}, but got {target.loss}"110assert type(target.optimizer)==Adam, f"Wrong loss function. Expected {Adam}, but got {target.optimizer}"111lr = target.optimizer.learning_rate.numpy()112assert np.isclose(lr, expected_lr, atol=1e-8), f"Wrong learning rate. Expected {expected_lr}, but got {lr}"113assert target.loss.get_config()['from_logits'], f"Set from_logits=True in loss function"114115print("\033[92mAll tests passed!")116117def model_r_test(target, classes, input_size):118target.build(input_shape=(None,input_size))119expected_lr = 0.01120print("ddd")121assert len(target.layers) == 3, \122f"Wrong number of layers. Expected 3 but got {len(target.layers)}"123assert target.input.shape.as_list() == [None, input_size], \124f"Wrong input shape. Expected [None, {input_size}] but got {target.input.shape.as_list()}"125i = 0126expected = [[Dense, [None, 120], relu, (tf.keras.regularizers.l2, 0.1)],127[Dense, [None, 40], relu, (tf.keras.regularizers.l2, 0.1)],128[Dense, [None, classes], linear, None]]129130for layer in target.layers:131assert type(layer) == expected[i][0], \132f"Wrong type in layer {i}. Expected {expected[i][0]} but got {type(layer)}"133assert layer.output.shape.as_list() == expected[i][1], \134f"Wrong number of units in layer {i}. Expected {expected[i][1]} but got {layer.output.shape.as_list()}"135assert layer.activation == expected[i][2], \136f"Wrong activation in layer {i}. Expected {expected[i][2]} but got {layer.activation}"137if not (expected[i][3] == None):138assert type(layer.kernel_regularizer) == expected[i][3][0], f"Wrong regularizer. Expected L2 regularizer but got {type(layer.kernel_regularizer)}"139assert np.isclose(layer.kernel_regularizer.l2, expected[i][3][1]), f"Wrong regularization factor. Expected {expected[i][3][1]}, but got {layer.kernel_regularizer.l2}"140else:141assert layer.kernel_regularizer == None, "You must not specify any regularizer for the 3th layer"142i = i + 1143144assert type(target.loss)==SparseCategoricalCrossentropy, f"Wrong loss function. Expected {SparseCategoricalCrossentropy}, but got {target.loss}"145assert type(target.optimizer)==Adam, f"Wrong loss function. Expected {Adam}, but got {target.optimizer}"146lr = target.optimizer.learning_rate.numpy()147assert np.isclose(lr, expected_lr, atol=1e-8), f"Wrong learning rate. Expected {expected_lr}, but got {lr}"148assert target.loss.get_config()['from_logits'], f"Set from_logits=True in loss function"149150print("\033[92mAll tests passed!")151152153