Understanding PyTorch Functionalities in Under 10 Minutes
PolycephalumAI PolycephalumAI
19 subscribers
613 views
23

 Published On Sep 15, 2024

Skript 1:
____________________________________
Numpy

import numpy as np
from matplotlib import pyplot as plt

X = np.array([1., 2., 3., 4., 5., 6., 7., 8.])
Y = np.array([3., 3., 6., 7., 11., 11., 15., 15.])

plt.scatter(X,Y)
plt.colorbar("blue")
plt.xlabel("month")
plt.ylabel("skill")
plt.show()

w = 0.

def Model(x):
return w * x

def MSE(y_pred, y):
return ((y_pred-y)**2).mean()

n_epochs = 100
learning_rate = 0.001

w_tracker = []
loss_tracker = []

for epoch in range(n_epochs):
forward pass
Y_Pred = Model(X)
loss = MSE(Y_Pred, Y)
loss_tracker.append(loss)

backward pass
grad += 1/len(X)*2*sum((w*X-Y)*X) # gradient = dMSE/dw
w -= learning_rate*grad # update weights
w_tracker.append(w)


visualization:

fig, (ax1, ax2) = plt.subplots(1, 2, figsize=(12, 5))

for i in range(len(w_tracker)):
ax1.cla()
ax2.cla()

ax1.scatter(X, Y, label="true values")
ax1.plot(X, w_tracker[i] * X, label=f"Prediction - Epoch No {i+1}")
ax1.set_title(f"Weight = {w_tracker[i]:.2f}")
ax1.grid()
ax1.legend()

ax2.plot(w_tracker, loss_tracker, label="Loss curve")
ax2.scatter(w_tracker[i], loss_tracker[i], color='red', label="Current Point")
ax2.set_title("Loss vs Weight")
ax2.grid()
ax2.legend()

plt.pause(0.5)
plt.draw()

plt.show()

____________________________________
Skript 2:
____________________________________
PyTorch

import torch
from matplotlib import pyplot as plt

X = torch.tensor([1., 2., 3., 4., 5., 6., 7., 8.])
Y = torch.tensor([3., 3., 6., 7., 11., 11., 15., 15.])

plt.scatter(X,Y)
plt.colorbar("blue")
plt.xlabel("month")
plt.ylabel("skill")
plt.show()

w = torch.tensor(0., requires_grad=True)

def Model(x):
return w * x

def MSE(y_pred, y):
return ((y_pred-y)**2).mean()

n_epochs = 100
learning_rate = 0.001

w_tracker = []
loss_tracker = []

for epoch in range(n_epochs):
forward pass
Y_pred = Model(X)
loss = MSE(Y_pred, Y)
loss_tracker.append(loss.item())

backward pass
loss.backward() # gradient = dMSE/dw
with torch.no_grad():
w -= learning_rate*w.grad
w_tracker.append(w.item())
w.grad.zero_()

visualization:

fig, (ax1, ax2) = plt.subplots(1, 2, figsize=(12, 5))

for i in range(len(w_tracker)):
ax1.cla()
ax2.cla()

ax1.scatter(X, Y, label="true values")
ax1.plot(X, w_tracker[i] * X, label=f"Prediction - Epoch No {i+1}")
ax1.set_title(f"Weight = {w_tracker[i]:.2f}")
ax1.grid()
ax1.legend()

ax2.plot(w_tracker, loss_tracker, label="Loss curve")
ax2.scatter(w_tracker[i], loss_tracker[i], color='red', label=f"w at Epoch No. {i+1}")
ax2.set_title("Loss vs Weight")
ax2.grid()
ax2.legend()

plt.pause(0.5)
plt.draw()

plt.show()

show more

Share/Embed