Skip to content

Commit

Permalink
Rebuild
Browse files Browse the repository at this point in the history
  • Loading branch information
9bow committed Jun 30, 2022
1 parent 63a17a1 commit 74e6d6c
Show file tree
Hide file tree
Showing 655 changed files with 16,060 additions and 13,561 deletions.
2 changes: 1 addition & 1 deletion docs/.buildinfo
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
# Sphinx build info version 1
# This file hashes the configuration used when building these files. When it is not found, a full rebuild will be done.
config: 7e600eae8c22497c2bb29617a2788154
config: 61898db0daf49ba194c21df05df11fbd
tags: 645f666f9bcd5a90fca523b33c5a78b7
Original file line number Diff line number Diff line change
Expand Up @@ -157,7 +157,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.8.8"
"version": "3.8.13"
}
},
"nbformat": 4,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -193,7 +193,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.8.8"
"version": "3.8.13"
}
},
"nbformat": 4,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@
},
"outputs": [],
"source": [
"import torch\nimport math\n\ndtype = torch.float\ndevice = torch.device(\"cpu\")\n# device = torch.device(\"cuda:0\") # GPU\uc5d0\uc11c \uc2e4\ud589\ud558\ub824\uba74 \uc774 \uc8fc\uc11d\uc744 \uc81c\uac70\ud558\uc138\uc694\n\n# \uc785\ub825\uac12\uacfc \ucd9c\ub825\uac12\uc744 \uac16\ub294 \ud150\uc11c\ub4e4\uc744 \uc0dd\uc131\ud569\ub2c8\ub2e4.\n# requires_grad=False\uac00 \uae30\ubcf8\uac12\uc73c\ub85c \uc124\uc815\ub418\uc5b4 \uc5ed\uc804\ud30c \ub2e8\uacc4 \uc911\uc5d0 \uc774 \ud150\uc11c\ub4e4\uc5d0 \ub300\ud55c \ubcc0\ud654\ub3c4\ub97c\n# \uacc4\uc0b0\ud560 \ud544\uc694\uac00 \uc5c6\uc74c\uc744 \ub098\ud0c0\ub0c5\ub2c8\ub2e4.\nx = torch.linspace(-math.pi, math.pi, 2000, device=device, dtype=dtype)\ny = torch.sin(x)\n\n# \uac00\uc911\uce58\ub97c \uac16\ub294 \uc784\uc758\uc758 \ud150\uc11c\ub97c \uc0dd\uc131\ud569\ub2c8\ub2e4. 3\ucc28 \ub2e4\ud56d\uc2dd\uc774\ubbc0\ub85c 4\uac1c\uc758 \uac00\uc911\uce58\uac00 \ud544\uc694\ud569\ub2c8\ub2e4:\n# y = a + b x + c x^2 + d x^3\n# requires_grad=True\ub85c \uc124\uc815\ud558\uc5ec \uc5ed\uc804\ud30c \ub2e8\uacc4 \uc911\uc5d0 \uc774 \ud150\uc11c\ub4e4\uc5d0 \ub300\ud55c \ubcc0\ud654\ub3c4\ub97c \uacc4\uc0b0\ud560 \ud544\uc694\uac00\n# \uc788\uc74c\uc744 \ub098\ud0c0\ub0c5\ub2c8\ub2e4.\na = torch.randn((), device=device, dtype=dtype, requires_grad=True)\nb = torch.randn((), device=device, dtype=dtype, requires_grad=True)\nc = torch.randn((), device=device, dtype=dtype, requires_grad=True)\nd = torch.randn((), device=device, dtype=dtype, requires_grad=True)\n\nlearning_rate = 1e-6\nfor t in range(2000):\n # \uc21c\uc804\ud30c \ub2e8\uacc4: \ud150\uc11c\ub4e4 \uac04\uc758 \uc5f0\uc0b0\uc744 \uc0ac\uc6a9\ud558\uc5ec \uc608\uce21\uac12 y\ub97c \uacc4\uc0b0\ud569\ub2c8\ub2e4.\n y_pred = a + b * x + c * x ** 2 + d * x ** 3\n\n # \ud150\uc11c\ub4e4\uac04\uc758 \uc5f0\uc0b0\uc744 \uc0ac\uc6a9\ud558\uc5ec \uc190\uc2e4(loss)\uc744 \uacc4\uc2fc\ud558\uace0 \ucd9c\ub825\ud569\ub2c8\ub2e4.\n # \uc774 \ub54c \uc190\uc2e4\uc740 (1,) shape\uc744 \uac16\ub294 \ud150\uc11c\uc785\ub2c8\ub2e4.\n # loss.item() \uc73c\ub85c \uc190\uc2e4\uc774 \uac16\uace0 \uc788\ub294 \uc2a4\uce7c\ub77c \uac12\uc744 \uac00\uc838\uc62c \uc218 \uc788\uc2b5\ub2c8\ub2e4.\n loss = (y_pred - y).pow(2).sum()\n if t % 100 == 99:\n print(t, loss.item())\n\n # autograd \ub97c \uc0ac\uc6a9\ud558\uc5ec \uc5ed\uc804\ud30c \ub2e8\uacc4\ub97c \uacc4\uc0b0\ud569\ub2c8\ub2e4. \uc774\ub294 requires_grad=True\ub97c \uac16\ub294\n # \ubaa8\ub4e0 \ud150\uc11c\ub4e4\uc5d0 \ub300\ud55c \uc190\uc2e4\uc758 \ubcc0\ud654\ub3c4\ub97c \uacc4\uc0b0\ud569\ub2c8\ub2e4.\n # \uc774\ud6c4 a.grad\uc640 b.grad, c.grad, d.grad\ub294 \uac01\uac01 a, b, c, d\uc5d0 \ub300\ud55c \uc190\uc2e4\uc758 \ubcc0\ud654\ub3c4\ub97c\n # \uac16\ub294 \ud150\uc11c\uac00 \ub429\ub2c8\ub2e4.\n loss.backward()\n\n # \uacbd\uc0ac\ud558\uac15\ubc95(gradient descent)\ub97c \uc0ac\uc6a9\ud558\uc5ec \uac00\uc911\uce58\ub97c \uc9c1\uc811 \uac31\uc2e0\ud569\ub2c8\ub2e4.\n # torch.no_grad()\ub85c \uac10\uc2f8\ub294 \uc774\uc720\ub294, \uac00\uc911\uce58\ub4e4\uc774 requires_grad=True \uc9c0\ub9cc\n # autograd\uc5d0\uc11c\ub294 \uc774\ub97c \ucd94\uc801\ud558\uc9c0 \uc54a\uc744 \uac83\uc774\uae30 \ub54c\ubb38\uc785\ub2c8\ub2e4.\n with torch.no_grad():\n a -= learning_rate * a.grad\n b -= learning_rate * b.grad\n c -= learning_rate * c.grad\n d -= learning_rate * d.grad\n\n # \uac00\uc911\uce58 \uac31\uc2e0 \ud6c4\uc5d0\ub294 \ubcc0\ud654\ub3c4\ub97c \uc9c1\uc811 0\uc73c\ub85c \ub9cc\ub4ed\ub2c8\ub2e4.\n a.grad = None\n b.grad = None\n c.grad = None\n d.grad = None\n\nprint(f'Result: y = {a.item()} + {b.item()} x + {c.item()} x^2 + {d.item()} x^3')"
"import torch\nimport math\n\ndtype = torch.float\ndevice = torch.device(\"cpu\")\n# device = torch.device(\"cuda:0\") # GPU\uc5d0\uc11c \uc2e4\ud589\ud558\ub824\uba74 \uc774 \uc8fc\uc11d\uc744 \uc81c\uac70\ud558\uc138\uc694\n\n# \uc785\ub825\uac12\uacfc \ucd9c\ub825\uac12\uc744 \uac16\ub294 \ud150\uc11c\ub4e4\uc744 \uc0dd\uc131\ud569\ub2c8\ub2e4.\n# requires_grad=False\uac00 \uae30\ubcf8\uac12\uc73c\ub85c \uc124\uc815\ub418\uc5b4 \uc5ed\uc804\ud30c \ub2e8\uacc4 \uc911\uc5d0 \uc774 \ud150\uc11c\ub4e4\uc5d0 \ub300\ud55c \ubcc0\ud654\ub3c4\ub97c\n# \uacc4\uc0b0\ud560 \ud544\uc694\uac00 \uc5c6\uc74c\uc744 \ub098\ud0c0\ub0c5\ub2c8\ub2e4.\nx = torch.linspace(-math.pi, math.pi, 2000, device=device, dtype=dtype)\ny = torch.sin(x)\n\n# \uac00\uc911\uce58\ub97c \uac16\ub294 \uc784\uc758\uc758 \ud150\uc11c\ub97c \uc0dd\uc131\ud569\ub2c8\ub2e4. 3\ucc28 \ub2e4\ud56d\uc2dd\uc774\ubbc0\ub85c 4\uac1c\uc758 \uac00\uc911\uce58\uac00 \ud544\uc694\ud569\ub2c8\ub2e4:\n# y = a + b x + c x^2 + d x^3\n# requires_grad=True\ub85c \uc124\uc815\ud558\uc5ec \uc5ed\uc804\ud30c \ub2e8\uacc4 \uc911\uc5d0 \uc774 \ud150\uc11c\ub4e4\uc5d0 \ub300\ud55c \ubcc0\ud654\ub3c4\ub97c \uacc4\uc0b0\ud560 \ud544\uc694\uac00\n# \uc788\uc74c\uc744 \ub098\ud0c0\ub0c5\ub2c8\ub2e4.\na = torch.randn((), device=device, dtype=dtype, requires_grad=True)\nb = torch.randn((), device=device, dtype=dtype, requires_grad=True)\nc = torch.randn((), device=device, dtype=dtype, requires_grad=True)\nd = torch.randn((), device=device, dtype=dtype, requires_grad=True)\n\nlearning_rate = 1e-6\nfor t in range(2000):\n # \uc21c\uc804\ud30c \ub2e8\uacc4: \ud150\uc11c\ub4e4 \uac04\uc758 \uc5f0\uc0b0\uc744 \uc0ac\uc6a9\ud558\uc5ec \uc608\uce21\uac12 y\ub97c \uacc4\uc0b0\ud569\ub2c8\ub2e4.\n y_pred = a + b * x + c * x ** 2 + d * x ** 3\n\n # \ud150\uc11c\ub4e4\uac04\uc758 \uc5f0\uc0b0\uc744 \uc0ac\uc6a9\ud558\uc5ec \uc190\uc2e4(loss)\uc744 \uacc4\uc0b0\ud558\uace0 \ucd9c\ub825\ud569\ub2c8\ub2e4.\n # \uc774 \ub54c \uc190\uc2e4\uc740 (1,) shape\uc744 \uac16\ub294 \ud150\uc11c\uc785\ub2c8\ub2e4.\n # loss.item() \uc73c\ub85c \uc190\uc2e4\uc774 \uac16\uace0 \uc788\ub294 \uc2a4\uce7c\ub77c \uac12\uc744 \uac00\uc838\uc62c \uc218 \uc788\uc2b5\ub2c8\ub2e4.\n loss = (y_pred - y).pow(2).sum()\n if t % 100 == 99:\n print(t, loss.item())\n\n # autograd \ub97c \uc0ac\uc6a9\ud558\uc5ec \uc5ed\uc804\ud30c \ub2e8\uacc4\ub97c \uacc4\uc0b0\ud569\ub2c8\ub2e4. \uc774\ub294 requires_grad=True\ub97c \uac16\ub294\n # \ubaa8\ub4e0 \ud150\uc11c\ub4e4\uc5d0 \ub300\ud55c \uc190\uc2e4\uc758 \ubcc0\ud654\ub3c4\ub97c \uacc4\uc0b0\ud569\ub2c8\ub2e4.\n # \uc774\ud6c4 a.grad\uc640 b.grad, c.grad, d.grad\ub294 \uac01\uac01 a, b, c, d\uc5d0 \ub300\ud55c \uc190\uc2e4\uc758 \ubcc0\ud654\ub3c4\ub97c\n # \uac16\ub294 \ud150\uc11c\uac00 \ub429\ub2c8\ub2e4.\n loss.backward()\n\n # \uacbd\uc0ac\ud558\uac15\ubc95(gradient descent)\ub97c \uc0ac\uc6a9\ud558\uc5ec \uac00\uc911\uce58\ub97c \uc9c1\uc811 \uac31\uc2e0\ud569\ub2c8\ub2e4.\n # torch.no_grad()\ub85c \uac10\uc2f8\ub294 \uc774\uc720\ub294, \uac00\uc911\uce58\ub4e4\uc774 requires_grad=True \uc9c0\ub9cc\n # autograd\uc5d0\uc11c\ub294 \uc774\ub97c \ucd94\uc801\ud558\uc9c0 \uc54a\uc744 \uac83\uc774\uae30 \ub54c\ubb38\uc785\ub2c8\ub2e4.\n with torch.no_grad():\n a -= learning_rate * a.grad\n b -= learning_rate * b.grad\n c -= learning_rate * c.grad\n d -= learning_rate * d.grad\n\n # \uac00\uc911\uce58 \uac31\uc2e0 \ud6c4\uc5d0\ub294 \ubcc0\ud654\ub3c4\ub97c \uc9c1\uc811 0\uc73c\ub85c \ub9cc\ub4ed\ub2c8\ub2e4.\n a.grad = None\n b.grad = None\n c.grad = None\n d.grad = None\n\nprint(f'Result: y = {a.item()} + {b.item()} x + {c.item()} x^2 + {d.item()} x^3')"
]
}
],
Expand All @@ -46,7 +46,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.8.8"
"version": "3.8.13"
}
},
"nbformat": 4,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -423,7 +423,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.8.8"
"version": "3.8.13"
}
},
"nbformat": 4,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -272,7 +272,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.8.8"
"version": "3.8.13"
}
},
"nbformat": 4,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -204,7 +204,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.8.8"
"version": "3.8.13"
}
},
"nbformat": 4,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.8.8"
"version": "3.8.13"
}
},
"nbformat": 4,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -186,7 +186,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.8.8"
"version": "3.8.13"
}
},
"nbformat": 4,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@
# 호좜 쀑에 λͺ¨λ“ˆμ— μ˜ν•΄ μ‚¬μš©λ  수 μžˆμŠ΅λ‹ˆλ‹€.
# 3. ``forward`` ν•¨μˆ˜. λͺ¨λ“ˆμ΄ 호좜될 λ•Œ μ‹€ν–‰λ˜λŠ” μ½”λ“œμž…λ‹ˆλ‹€.
#
# μž‘μ€ 예제둜 μ‹œμž‘ν•΄ λ³΄κ²ŸμŠ΅λ‹ˆλ‹€:
# μž‘μ€ 예제둜 μ‹œμž‘ν•΄ λ³΄κ² μŠ΅λ‹ˆλ‹€:
#

class MyCell(torch.nn.Module):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -503,7 +503,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.8.8"
"version": "3.8.13"
}
},
"nbformat": 4,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -206,7 +206,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.8.8"
"version": "3.8.13"
}
},
"nbformat": 4,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -358,7 +358,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.8.8"
"version": "3.8.13"
}
},
"nbformat": 4,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -139,7 +139,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.8.8"
"version": "3.8.13"
}
},
"nbformat": 4,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -294,7 +294,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.8.8"
"version": "3.8.13"
}
},
"nbformat": 4,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -172,7 +172,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.8.8"
"version": "3.8.13"
}
},
"nbformat": 4,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -308,7 +308,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.8.8"
"version": "3.8.13"
}
},
"nbformat": 4,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -236,7 +236,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.8.8"
"version": "3.8.13"
}
},
"nbformat": 4,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -136,7 +136,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.8.8"
"version": "3.8.13"
}
},
"nbformat": 4,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -274,7 +274,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.8.8"
"version": "3.8.13"
}
},
"nbformat": 4,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -107,7 +107,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.8.8"
"version": "3.8.13"
}
},
"nbformat": 4,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.8.8"
"version": "3.8.13"
}
},
"nbformat": 4,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -594,7 +594,7 @@ def forward(self, input):
# κ²°κ³Όλ₯Ό μ•Œμ•„λ΄…μ‹œλ‹€. 이 μ„Ήμ…˜μ—μ„œλŠ” 총 세가지λ₯Ό ν™•μΈν• κ²λ‹ˆλ‹€.
# μ²«λ²ˆμ§ΈλŠ” G와 D의 손싀값듀이 μ–΄λ–»κ²Œ λ³€ν–ˆλŠ”κ°€, λ‘λ²ˆμ§ΈλŠ” 맀 μ—ν­λ§ˆλ‹€
# fixed_noiseλ₯Ό μ΄μš©ν•΄ Gκ°€ λ§Œλ“€μ–΄λ‚Έ 이미지듀, λ§ˆμ§€λ§‰μ€ ν•™μŠ΅μ΄ λλ‚œ Gκ°€ λ§Œλ“€μ–΄λ‚Έ 이미지와
# μ§„μ§œμ΄λ―Έμ§€λ“€μ˜ λΉ„κ΅μž…λ‹ˆλ‹€
# μ§„μ§œ μ΄λ―Έμ§€λ“€μ˜ λΉ„κ΅μž…λ‹ˆλ‹€
#
# **ν•™μŠ΅ν•˜λŠ” λ™μ•ˆμ˜ 손싀값듀**
#
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -221,7 +221,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.8.8"
"version": "3.8.13"
}
},
"nbformat": 4,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.8.8"
"version": "3.8.13"
}
},
"nbformat": 4,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.8.8"
"version": "3.8.13"
}
},
"nbformat": 4,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -114,7 +114,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.8.8"
"version": "3.8.13"
}
},
"nbformat": 4,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -107,7 +107,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.8.8"
"version": "3.8.13"
}
},
"nbformat": 4,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.8.8"
"version": "3.8.13"
}
},
"nbformat": 4,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -290,7 +290,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.8.8"
"version": "3.8.13"
}
},
"nbformat": 4,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -225,7 +225,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.8.8"
"version": "3.8.13"
}
},
"nbformat": 4,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@

###############################################################
# ``narrow`` 와 같은 일뢀 연산듀은 λ°”κΏ”μΉ˜κΈ° 버전을 갖지 μ•ŠκΈ° λ•Œλ¬Έμ— ``.narrow_`` λŠ”
# μ‘΄μž¬ν•˜μ§€ μ•ŠμŠ΅λ‹ˆλ‹€. λ˜ν•œ, ``fill_`` 은 λ°˜ν™˜ν•˜κΈ° 버전을 갖지 μ•ŠκΈ° 떄문에 μ—­μ‹œ
# μ‘΄μž¬ν•˜μ§€ μ•ŠμŠ΅λ‹ˆλ‹€. λ˜ν•œ, ``fill_`` 은 λ°˜ν™˜ν•˜κΈ° 버전을 갖지 μ•ŠκΈ° λ•Œλ¬Έμ— μ—­μ‹œ
# ``.fill`` 도 μ‘΄μž¬ν•˜μ§€ μ•ŠμŠ΅λ‹ˆλ‹€.
#
# 0-인덱슀(Zero Indexing)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.8.8"
"version": "3.8.13"
}
},
"nbformat": 4,
Expand Down
Loading

0 comments on commit 74e6d6c

Please sign in to comment.