Skip to content

Commit

Permalink
Adding comments and updates to pytorch :)
Browse files Browse the repository at this point in the history
  • Loading branch information
Hammania689 committed Jun 18, 2019
1 parent fb9b279 commit 2d40855
Show file tree
Hide file tree
Showing 3 changed files with 28 additions and 23 deletions.
9 changes: 8 additions & 1 deletion 01_basics.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,22 +15,29 @@ def loss(x, y):
y_pred = forward(x)
return (y_pred - y) * (y_pred - y)


# List of weights/Mean square Error (Mse) for each input
w_list = []
mse_list = []

for w in np.arange(0.0, 4.1, 0.1):
# Print the weights and initialize the lost
print("w=", w)
l_sum = 0

for x_val, y_val in zip(x_data, y_data):
# For each input and output, calculate y_hat
# Compute the total loss and add to the total error
y_pred_val = forward(x_val)
l = loss(x_val, y_val)
l_sum += l
print("\t", x_val, y_val, y_pred_val, l)
# Now compute the Mean squared error (mse) of each
# Aggregate the weight/mse from this run
print("MSE=", l_sum / 3)
w_list.append(w)
mse_list.append(l_sum / 3)

# Plot it all
plt.plot(w_list, mse_list)
plt.ylabel('Loss')
plt.xlabel('w')
Expand Down
13 changes: 8 additions & 5 deletions 02_manual_gradient.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
# Training Data
x_data = [1.0, 2.0, 3.0]
y_data = [2.0, 4.0, 6.0]

w = 1.0 # a random guess: random value

# our model forward pass


# our model forward pass
def forward(x):
return x * w

Expand All @@ -20,18 +20,21 @@ def loss(x, y):
def gradient(x, y): # d_loss/d_w
return 2 * x * (x * w - y)


# Before training
print("predict (before training)", 4, forward(4))
print("Prediction (before training)", 4, forward(4))

# Training loop
for epoch in range(10):
for x_val, y_val in zip(x_data, y_data):
# Compute derivative w.r.t to the learned weights
# Update the weights
# Compute the loss and print progress
grad = gradient(x_val, y_val)
w = w - 0.01 * grad
print("\tgrad: ", x_val, y_val, round(grad, 2))
l = loss(x_val, y_val)

print("progress:", epoch, "w=", round(w, 2), "loss=", round(l, 2))

# After training
print("predict (after training)", "4 hours", forward(4))
print("Predicted score (after training)", "4 hours of studying: ", forward(4))
29 changes: 12 additions & 17 deletions 03_auto_gradient.py
Original file line number Diff line number Diff line change
@@ -1,39 +1,34 @@
import torch
from torch.autograd import Variable
import pdb

x_data = [1.0, 2.0, 3.0]
y_data = [2.0, 4.0, 6.0]

w = Variable(torch.Tensor([1.0]), requires_grad=True) # Any random value
w = torch.tensor([1.0], requires_grad=True)

# our model forward pass


def forward(x):
return x * w

# Loss function


def loss(x, y):
y_pred = forward(x)
return (y_pred - y) * (y_pred - y)
def loss(y_pred, y_val):
return (y_pred - y_val) ** 2

# Before training
print("predict (before training)", 4, forward(4).data[0])
print("Prediction (before training)", 4, forward(4).item())

# Training loop
for epoch in range(10):
for x_val, y_val in zip(x_data, y_data):
l = loss(x_val, y_val)
l.backward()

This comment has been minimized.

Copy link
@ansshahzadr

ansshahzadr Dec 1, 2019

Hi, using backward here give me an error saying "'float' object has no attribute 'backward'"

can you pls inform me on this, cant find any relevant material elsewhere.

This comment has been minimized.

Copy link
@Hammania689

Hammania689 Dec 2, 2019

Author Contributor

Hi @ansshahzadr, I just ran this code on my with the PyTorch 1.x using the CPU and GPU, things work fine. Are you using the latest commit from master (here)? If not, you will get an error unless you use the same version of PyTorch that Dr.Kim used when he prepared this script.

print("\tgrad: ", x_val, y_val, w.grad.data[0])
w.data = w.data - 0.01 * w.grad.data
y_pred = forward(x_val) # 1) Forward pass
l = loss(y_pred, y_val) # 2) Compute loss
l.backward() # 3) Back propagation to update weights
print("\tgrad: ", x_val, y_val, w.grad.item())
w.data = w.data - 0.01 * w.grad.item()

# Manually zero the gradients after updating weights
w.grad.data.zero_()

print("progress:", epoch, l.data[0])
print(f"Epoch: {epoch} | Loss: {l.item()}")

# After training
print("predict (after training)", 4, forward(4).data[0])
print("Prediction (after training)", 4, forward(4).item())

0 comments on commit 2d40855

Please sign in to comment.