Autodiff

Autodiff Helper
Author

Nipun Batra

Published

April 4, 2023

import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim

import numpy as np
import matplotlib.pyplot as plt

from torch.utils.data import DataLoader, TensorDataset
import seaborn as sns

%matplotlib inline
%config InlineBackend.figure_format = 'retina'

# Set random seed for reproducibility
torch.manual_seed(0)

# Torch version
torch.__version__
'2.0.0+cu118'
theta_0 = torch.tensor(1.0, requires_grad=True)
theta_1 = torch.tensor(1.0, requires_grad=True)
theta_2 = torch.tensor(2.0, requires_grad=True)

x1 = torch.tensor(1.0)
x2 = torch.tensor(2.0)

f1 = theta_1*x1
f2 = theta_2*x2

f3 = f1 + f2

f4 = f3 + theta_0

f5 = f4*-1

f6 = torch.exp(f5)

f7 = 1 + f6

f8 = 1/f7

f9 = torch.log(f8)

L = f9*-1

all_nodes = {"theta_0": theta_0, "theta_1": theta_1, "theta_2": theta_2,  
             "f1": f1, "f2": f2, "f3": f3, "f4": f4, "f5": f5, "f6": f6, "f7": f7, "f8": f8, "f9": f9, "L": L}

# Retain grad for all nodes
for node in all_nodes.values():
    node.retain_grad()
# Print out the function evaluation for all nodes along with name of the node
for name, node in all_nodes.items():
    print(f"{name}: {node.item()}")
theta_0: 1.0
theta_1: 1.0
theta_2: 2.0
f1: 1.0
f2: 4.0
f3: 5.0
f4: 6.0
f5: -6.0
f6: 0.0024787522852420807
f7: 1.0024787187576294
f8: 0.9975274205207825
f9: -0.0024756414350122213
L: 0.0024756414350122213
L.backward()

# Print out the gradient for all nodes along with name of the node
for name, node in all_nodes.items():
    print(f"{name}: {node.grad.item()}")
theta_0: -0.00247262348420918
theta_1: -0.00247262348420918
theta_2: -0.00494524696841836
f1: -0.00247262348420918
f2: -0.00247262348420918
f3: -0.00247262348420918
f4: -0.00247262348420918
f5: 0.00247262348420918
f6: 0.9975274801254272
f7: 0.9975274801254272
f8: -1.0024787187576294
f9: -1.0
L: 1.0
(-1/(f7**2))*-1.00247
tensor(0.9975, grad_fn=<MulBackward0>)
torch.exp(f5)*0.9975
tensor(0.0025, grad_fn=<MulBackward0>)