Book a Demo!
CoCalc Logo Icon
StoreFeaturesDocsShareSupportNewsAboutPoliciesSign UpSign In
greyhatguy007
GitHub Repository: greyhatguy007/machine-learning-specialization-coursera
Path: blob/main/C1 - Supervised Machine Learning - Regression and Classification/week1/Optional Labs/lab_utils_common.py
2201 views
1
"""
2
lab_utils_common.py
3
functions common to all optional labs, Course 1, Week 2
4
"""
5
6
import numpy as np
7
import matplotlib.pyplot as plt
8
9
plt.style.use('./deeplearning.mplstyle')
10
dlblue = '#0096ff'; dlorange = '#FF9300'; dldarkred='#C00000'; dlmagenta='#FF40FF'; dlpurple='#7030A0';
11
dlcolors = [dlblue, dlorange, dldarkred, dlmagenta, dlpurple]
12
dlc = dict(dlblue = '#0096ff', dlorange = '#FF9300', dldarkred='#C00000', dlmagenta='#FF40FF', dlpurple='#7030A0')
13
14
15
##########################################################
16
# Regression Routines
17
##########################################################
18
19
#Function to calculate the cost
20
def compute_cost_matrix(X, y, w, b, verbose=False):
21
"""
22
Computes the gradient for linear regression
23
Args:
24
X (ndarray (m,n)): Data, m examples with n features
25
y (ndarray (m,)) : target values
26
w (ndarray (n,)) : model parameters
27
b (scalar) : model parameter
28
verbose : (Boolean) If true, print out intermediate value f_wb
29
Returns
30
cost: (scalar)
31
"""
32
m = X.shape[0]
33
34
# calculate f_wb for all examples.
35
f_wb = X @ w + b
36
# calculate cost
37
total_cost = (1/(2*m)) * np.sum((f_wb-y)**2)
38
39
if verbose: print("f_wb:")
40
if verbose: print(f_wb)
41
42
return total_cost
43
44
def compute_gradient_matrix(X, y, w, b):
45
"""
46
Computes the gradient for linear regression
47
48
Args:
49
X (ndarray (m,n)): Data, m examples with n features
50
y (ndarray (m,)) : target values
51
w (ndarray (n,)) : model parameters
52
b (scalar) : model parameter
53
Returns
54
dj_dw (ndarray (n,1)): The gradient of the cost w.r.t. the parameters w.
55
dj_db (scalar): The gradient of the cost w.r.t. the parameter b.
56
57
"""
58
m,n = X.shape
59
f_wb = X @ w + b
60
e = f_wb - y
61
dj_dw = (1/m) * (X.T @ e)
62
dj_db = (1/m) * np.sum(e)
63
64
return dj_db,dj_dw
65
66
67
# Loop version of multi-variable compute_cost
68
def compute_cost(X, y, w, b):
69
"""
70
compute cost
71
Args:
72
X (ndarray (m,n)): Data, m examples with n features
73
y (ndarray (m,)) : target values
74
w (ndarray (n,)) : model parameters
75
b (scalar) : model parameter
76
Returns
77
cost (scalar) : cost
78
"""
79
m = X.shape[0]
80
cost = 0.0
81
for i in range(m):
82
f_wb_i = np.dot(X[i],w) + b #(n,)(n,)=scalar
83
cost = cost + (f_wb_i - y[i])**2
84
cost = cost/(2*m)
85
return cost
86
87
def compute_gradient(X, y, w, b):
88
"""
89
Computes the gradient for linear regression
90
Args:
91
X (ndarray (m,n)): Data, m examples with n features
92
y (ndarray (m,)) : target values
93
w (ndarray (n,)) : model parameters
94
b (scalar) : model parameter
95
Returns
96
dj_dw (ndarray Shape (n,)): The gradient of the cost w.r.t. the parameters w.
97
dj_db (scalar): The gradient of the cost w.r.t. the parameter b.
98
"""
99
m,n = X.shape #(number of examples, number of features)
100
dj_dw = np.zeros((n,))
101
dj_db = 0.
102
103
for i in range(m):
104
err = (np.dot(X[i], w) + b) - y[i]
105
for j in range(n):
106
dj_dw[j] = dj_dw[j] + err * X[i,j]
107
dj_db = dj_db + err
108
dj_dw = dj_dw/m
109
dj_db = dj_db/m
110
111
return dj_db,dj_dw
112
113
114