Skip to content

Commit 322eeb4

Browse files
committed
added 07 and 08
1 parent 7b19836 commit 322eeb4

File tree

2 files changed

+120
-0
lines changed

2 files changed

+120
-0
lines changed

07_linear_regression.py

Lines changed: 51 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,51 @@
1+
import torch
2+
import torch.nn as nn
3+
import numpy as np
4+
from sklearn import datasets
5+
import matplotlib.pyplot as plt
6+
7+
# 0) Prepare data
8+
X_numpy, y_numpy = datasets.make_regression(n_samples=100, n_features=1, noise=20, random_state=4)
9+
10+
# cast to float Tensor
11+
X = torch.from_numpy(X_numpy.astype(np.float32))
12+
y = torch.from_numpy(y_numpy.astype(np.float32))
13+
y = y.view(y.shape[0], 1)
14+
15+
n_samples, n_features = X.shape
16+
17+
# 1) Model
18+
# Linear model f = wx + b
19+
input_size = n_features
20+
output_size = 1
21+
model = nn.Linear(input_size, output_size)
22+
23+
# 2) Loss and optimizer
24+
learning_rate = 0.01
25+
26+
criterion = nn.MSELoss()
27+
optimizer = torch.optim.SGD(model.parameters(), lr=learning_rate)
28+
29+
# 3) Training loop
30+
num_epochs = 100
31+
for epoch in range(num_epochs):
32+
# Forward pass and loss
33+
y_predicted = model(X)
34+
loss = criterion(y_predicted, y)
35+
36+
# Backward pass and update
37+
loss.backward()
38+
optimizer.step()
39+
40+
# zero grad before new step
41+
optimizer.zero_grad()
42+
43+
if (epoch+1) % 10 == 0:
44+
print(f'epoch: {epoch+1}, loss = {loss.item():.4f}')
45+
46+
# Plot
47+
predicted = model(X).detach().numpy()
48+
49+
plt.plot(X_numpy, y_numpy, 'ro')
50+
plt.plot(X_numpy, predicted, 'b')
51+
plt.show()

08_logistic_regression.py

Lines changed: 69 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,69 @@
1+
import torch
2+
import torch.nn as nn
3+
import numpy as np
4+
from sklearn import datasets
5+
from sklearn.preprocessing import StandardScaler
6+
from sklearn.model_selection import train_test_split
7+
8+
# 0) Prepare data
9+
bc = datasets.load_breast_cancer()
10+
X, y = bc.data, bc.target
11+
12+
n_samples, n_features = X.shape
13+
14+
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=1234)
15+
16+
# scale
17+
sc = StandardScaler()
18+
X_train = sc.fit_transform(X_train)
19+
X_test = sc.transform(X_test)
20+
21+
X_train = torch.from_numpy(X_train.astype(np.float32))
22+
X_test = torch.from_numpy(X_test.astype(np.float32))
23+
y_train = torch.from_numpy(y_train.astype(np.float32))
24+
y_test = torch.from_numpy(y_test.astype(np.float32))
25+
26+
y_train = y_train.view(y_train.shape[0], 1)
27+
y_test = y_test.view(y_test.shape[0], 1)
28+
29+
# 1) Model
30+
# Linear model f = wx + b , sigmoid at the end
31+
class Model(nn.Module):
32+
def __init__(self, n_input_features):
33+
super(Model, self).__init__()
34+
self.linear = nn.Linear(n_input_features, 1) # One in and one out
35+
36+
def forward(self, x):
37+
y_pred = torch.sigmoid(self.linear(x))
38+
return y_pred
39+
40+
model = Model(n_features)
41+
42+
# 2) Loss and optimizer
43+
num_epochs = 100
44+
learning_rate = 0.01
45+
criterion = nn.BCELoss()
46+
optimizer = torch.optim.SGD(model.parameters(), lr=learning_rate)
47+
48+
# 3) Training loop
49+
for epoch in range(num_epochs):
50+
# Forward pass and loss
51+
y_pred = model(X_train)
52+
loss = criterion(y_pred, y_train)
53+
54+
# Backward pass and update
55+
loss.backward()
56+
optimizer.step()
57+
58+
# zero grad before new step
59+
optimizer.zero_grad()
60+
61+
if (epoch+1) % 10 == 0:
62+
print(f'epoch: {epoch+1}, loss = {loss.item():.4f}')
63+
64+
65+
with torch.no_grad():
66+
y_predicted = model(X_test)
67+
y_predicted_cls = y_predicted.round()
68+
acc = y_predicted_cls.eq(y_test).sum() / float(y_test.shape[0])
69+
print(f'accuracy: {acc.item():.4f}')

0 commit comments

Comments
 (0)
pFad - Phonifier reborn

Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.

Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.


Alternative Proxies:

Alternative Proxy

pFad Proxy

pFad v3 Proxy

pFad v4 Proxy