From 9bbdbb1c62bd77492f8807bb719cb781cc36b913 Mon Sep 17 00:00:00 2001 From: saurabhkthakur Date: Wed, 6 May 2026 18:28:20 +0530 Subject: [PATCH 1/3] Convert computational graph diagrams to Mermaid --- .../understanding_leaf_vs_nonleaf_tutorial.py | 49 +++++++++++++++---- 1 file changed, 39 insertions(+), 10 deletions(-) diff --git a/beginner_source/understanding_leaf_vs_nonleaf_tutorial.py b/beginner_source/understanding_leaf_vs_nonleaf_tutorial.py index 6c8fa91a011..349b749bd1a 100644 --- a/beginner_source/understanding_leaf_vs_nonleaf_tutorial.py +++ b/beginner_source/understanding_leaf_vs_nonleaf_tutorial.py @@ -87,11 +87,26 @@ # \cdots \cdot # \frac{\partial \mathbf{f}_1}{\partial \mathbf{x}} # -# .. figure:: /_static/img/understanding_leaf_vs_nonleaf/comp-graph-1.png -# :alt: Computational graph after forward pass -# -# Computational graph after forward pass -# +# .. mermaid:: +# +# graph TD + +# x["x
is_leaf=True
requires_grad=False
retains_grad=False
grad=None"] +# W["W
is_leaf=True
requires_grad=True
retains_grad=False
grad=None"] +# b["b
is_leaf=True
requires_grad=True
retains_grad=False
grad=None"] +# matmul["x @ W"] +# z["z = x @ W + b
is_leaf=False
requires_grad=True
retains_grad=False
grad=None"] +# relu["y_pred = relu(z)
is_leaf=False
requires_grad=True
retains_grad=False
grad=None"] +# y["y
is_leaf=True
requires_grad=False
retains_grad=False
grad=None"] +# loss["loss = mse(y_pred, y)
is_leaf=False
requires_grad=True
retains_grad=False
grad=None"] + +# x --> matmul +# W --> matmul +# matmul --> z +# b --> z +# z --> relu +# relu --> loss +# y --> loss # PyTorch considers a node to be a *leaf* if it is not the result of a # tensor operation with at least one input having ``requires_grad=True`` # (e.g. ``x``, ``W``, ``b``, and ``y``), and everything else to be @@ -260,11 +275,25 @@ # convention, this attribute will print ``False`` for any leaf node, even # if it requires its gradient. # -# .. figure:: /_static/img/understanding_leaf_vs_nonleaf/comp-graph-2.png -# :alt: Computational graph after backward pass -# -# Computational graph after backward pass -# +# .. mermaid:: +# +# graph TD + +# x["x
is_leaf=True
requires_grad=False
retains_grad=False
grad=None"] +# W["W
is_leaf=True
requires_grad=True
retains_grad=False
grad=torch.Tensor"] +# b["b
is_leaf=True
requires_grad=True
retains_grad=False
grad=torch.Tensor"] +# matmul["x @ W"] +# z["z = x @ W + b
is_leaf=False
requires_grad=True
retains_grad=True
grad=torch.Tensor"] +# relu["y_pred = relu(z)
is_leaf=False
requires_grad=True
retains_grad=True
grad=torch.Tensor"] +# y["y
is_leaf=True
requires_grad=True
retains_grad=False
grad=None"] +# loss["loss = mse(y_pred, y)
is_leaf=False
requires_grad=True
retains_grad=True
grad=torch.Tensor"] +# x --> matmul +# W --> matmul +# matmul --> z +# b --> z +# z --> relu +# relu --> loss +# y --> loss # If you call ``retain_grad()`` on a leaf tensor, it results in a no-op # since leaf tensors already retain their gradients by default (when # ``requires_grad=True``). From 68a023604ea79c599d520f569659e9f9916c25a8 Mon Sep 17 00:00:00 2001 From: saurabhkthakur Date: Wed, 6 May 2026 23:04:17 +0530 Subject: [PATCH 2/3] fixed indentation --- beginner_source/understanding_leaf_vs_nonleaf_tutorial.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/beginner_source/understanding_leaf_vs_nonleaf_tutorial.py b/beginner_source/understanding_leaf_vs_nonleaf_tutorial.py index 349b749bd1a..5e6e8b35e5a 100644 --- a/beginner_source/understanding_leaf_vs_nonleaf_tutorial.py +++ b/beginner_source/understanding_leaf_vs_nonleaf_tutorial.py @@ -90,7 +90,7 @@ # .. mermaid:: # # graph TD - +# # x["x
is_leaf=True
requires_grad=False
retains_grad=False
grad=None"] # W["W
is_leaf=True
requires_grad=True
retains_grad=False
grad=None"] # b["b
is_leaf=True
requires_grad=True
retains_grad=False
grad=None"] @@ -99,7 +99,7 @@ # relu["y_pred = relu(z)
is_leaf=False
requires_grad=True
retains_grad=False
grad=None"] # y["y
is_leaf=True
requires_grad=False
retains_grad=False
grad=None"] # loss["loss = mse(y_pred, y)
is_leaf=False
requires_grad=True
retains_grad=False
grad=None"] - +# # x --> matmul # W --> matmul # matmul --> z @@ -278,7 +278,7 @@ # .. mermaid:: # # graph TD - +# # x["x
is_leaf=True
requires_grad=False
retains_grad=False
grad=None"] # W["W
is_leaf=True
requires_grad=True
retains_grad=False
grad=torch.Tensor"] # b["b
is_leaf=True
requires_grad=True
retains_grad=False
grad=torch.Tensor"] @@ -287,6 +287,7 @@ # relu["y_pred = relu(z)
is_leaf=False
requires_grad=True
retains_grad=True
grad=torch.Tensor"] # y["y
is_leaf=True
requires_grad=True
retains_grad=False
grad=None"] # loss["loss = mse(y_pred, y)
is_leaf=False
requires_grad=True
retains_grad=True
grad=torch.Tensor"] +# # x --> matmul # W --> matmul # matmul --> z From de0ff0c04e32ebdf2c42307abd5a12f37aabdd8a Mon Sep 17 00:00:00 2001 From: saurabhkthakur Date: Wed, 6 May 2026 23:18:45 +0530 Subject: [PATCH 3/3] removed extra spaces --- beginner_source/understanding_leaf_vs_nonleaf_tutorial.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/beginner_source/understanding_leaf_vs_nonleaf_tutorial.py b/beginner_source/understanding_leaf_vs_nonleaf_tutorial.py index 5e6e8b35e5a..39bc0ed76f0 100644 --- a/beginner_source/understanding_leaf_vs_nonleaf_tutorial.py +++ b/beginner_source/understanding_leaf_vs_nonleaf_tutorial.py @@ -287,7 +287,7 @@ # relu["y_pred = relu(z)
is_leaf=False
requires_grad=True
retains_grad=True
grad=torch.Tensor"] # y["y
is_leaf=True
requires_grad=True
retains_grad=False
grad=None"] # loss["loss = mse(y_pred, y)
is_leaf=False
requires_grad=True
retains_grad=True
grad=torch.Tensor"] -# +# # x --> matmul # W --> matmul # matmul --> z