lab2
lab2
lab2
import numpy as np
import copy
import math
%matplotlib inline
print("Type of x_train:",type(x_train))
print("Type of y_train:",type(y_train))
1
plt.ylabel('Profit in $10,000')
plt.show()
"""
Args:
Returns
total_cost (float): The cost of using w,b as the parameters for linear regression
"""
m = x.shape[0]
total_cost = 0
for i in range(m):
f_wb = w*x[i] + b
total_cost += cost
total_cost *= 1/(2*m)
2
### END CODE HERE ###
return total_cost
initial_w = 2
initial_b = 1
print(type(cost))
<class 'numpy.float64'>
"""
Args:
Returns
"""
m = x.shape[0]
dj_dw = 0
dj_db = 0
for i in range(m):
3
f_wb = w * x[i] + b
dj_db *= 1/m
dj_dw *= 1/m
initial_w = 0
initial_b = 0
test_w = 0.2
test_b = 0.2
"""
Args:
4
gradient_function: function to compute the gradient
Returns
"""
m = len(x)
# An array to store cost J and w's at each iteration — primarily for graphing later
J_history = []
w_history = []
b = b_in
for i in range(num_iters):
w = w - alpha * dj_dw
b = b - alpha * dj_db
cost = cost_function(x, y, w, b)
J_history.append(cost)
if i% math.ceil(num_iters/10) == 0:
5
w_history.append(w)
initial_w = 0.
initial_b = 0.
iterations = 1500
alpha = 0.01
m = x_train.shape[0]
predicted = np.zeros(m)
for i in range(m):
predicted[i] = w * x_train[i] + b
6
plt.scatter(x_train, y_train, marker='x', c='r')
plt.ylabel('Profit in $10,000')
predict1 = 3.5 * w + b
predict2 = 7.0 * w + b