diff --git a/docs/_downloads/07d05907b3ff859aeed5f76f1acc5df4/Intro_to_TorchScript_tutorial.py b/docs/_downloads/07d05907b3ff859aeed5f76f1acc5df4/Intro_to_TorchScript_tutorial.py index b341cda8d..47da92ee6 100644 --- a/docs/_downloads/07d05907b3ff859aeed5f76f1acc5df4/Intro_to_TorchScript_tutorial.py +++ b/docs/_downloads/07d05907b3ff859aeed5f76f1acc5df4/Intro_to_TorchScript_tutorial.py @@ -154,7 +154,7 @@ def forward(self, x, h): # 계산할 때 거꾸로 재생합니다. 이런 방식으로, 프레임워크는 언어의 모든 구문에 # 대한 미분값을 명시적으로 정의할 필요가 없습니다. # -# .. figure:: https://github.com/pytorch/pytorch/raw/master/docs/source/_static/img/dynamic_graph.gif +# .. figure:: https://github.com/pytorch/pytorch/raw/main/docs/source/_static/img/dynamic_graph.gif # :alt: 오토그라드가 작동하는 방식 # # 오토그라드가 작동하는 방식 diff --git a/docs/_downloads/61a76849444a0a65d843361c26d1de16/Intro_to_TorchScript_tutorial.ipynb b/docs/_downloads/61a76849444a0a65d843361c26d1de16/Intro_to_TorchScript_tutorial.ipynb index 0ca63a4c1..8ac99f76d 100644 --- a/docs/_downloads/61a76849444a0a65d843361c26d1de16/Intro_to_TorchScript_tutorial.ipynb +++ b/docs/_downloads/61a76849444a0a65d843361c26d1de16/Intro_to_TorchScript_tutorial.ipynb @@ -87,7 +87,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "MyCell \ud074\ub798\uc2a4\ub97c \ub2e4\uc2dc \uc815\uc758\ud588\uc9c0\ub9cc, \uc5ec\uae30\uc120 ``MyDecisionGate`` \ub97c \uc815\uc758\ud588\uc2b5\ub2c8\ub2e4.\n\uc774 \ubaa8\ub4c8\uc740 **\uc81c\uc5b4 \ud750\ub984** \uc744 \ud65c\uc6a9\ud569\ub2c8\ub2e4. \uc81c\uc5b4 \ud750\ub984\uc740 \ub8e8\ud504\uc640 ``if`` \uba85\ub839\ubb38\uacfc\n\uac19\uc740 \uac83\uc73c\ub85c \uad6c\uc131\ub429\ub2c8\ub2e4.\n\n\ub9ce\uc740 \ud504\ub808\uc784\uc6cc\ud06c\ub4e4\uc740 \uc8fc\uc5b4\uc9c4 \ud504\ub85c\uadf8\ub7a8 \ucf54\ub4dc\ub85c\ubd80\ud130 \uae30\ud638\uc2dd \ubbf8\ubd84(symbolic\nderivatives)\uc744 \uacc4\uc0b0\ud558\ub294 \uc811\uadfc\ubc95\uc744 \ucde8\ud558\uace0 \uc788\uc2b5\ub2c8\ub2e4. \ud558\uc9c0\ub9cc, PyTorch\uc5d0\uc11c\ub294 \ubcc0\ud654\ub3c4\n\ud14c\uc774\ud504(gradient tape)\ub97c \uc0ac\uc6a9\ud569\ub2c8\ub2e4. \uc5f0\uc0b0\uc774 \ubc1c\uc0dd\ud560 \ub54c \uc774\ub97c \uae30\ub85d\ud558\uace0, \ubbf8\ubd84\uac12\uc744\n\uacc4\uc0b0\ud560 \ub54c \uac70\uafb8\ub85c \uc7ac\uc0dd\ud569\ub2c8\ub2e4. \uc774\ub7f0 \ubc29\uc2dd\uc73c\ub85c, \ud504\ub808\uc784\uc6cc\ud06c\ub294 \uc5b8\uc5b4\uc758 \ubaa8\ub4e0 \uad6c\ubb38\uc5d0\n\ub300\ud55c \ubbf8\ubd84\uac12\uc744 \uba85\uc2dc\uc801\uc73c\ub85c \uc815\uc758\ud560 \ud544\uc694\uac00 \uc5c6\uc2b5\ub2c8\ub2e4.\n\n.. figure:: https://github.com/pytorch/pytorch/raw/master/docs/source/_static/img/dynamic_graph.gif\n :alt: \uc624\ud1a0\uadf8\ub77c\ub4dc\uac00 \uc791\ub3d9\ud558\ub294 \ubc29\uc2dd\n\n \uc624\ud1a0\uadf8\ub77c\ub4dc\uac00 \uc791\ub3d9\ud558\ub294 \ubc29\uc2dd\n\n\n" + "MyCell \ud074\ub798\uc2a4\ub97c \ub2e4\uc2dc \uc815\uc758\ud588\uc9c0\ub9cc, \uc5ec\uae30\uc120 ``MyDecisionGate`` \ub97c \uc815\uc758\ud588\uc2b5\ub2c8\ub2e4.\n\uc774 \ubaa8\ub4c8\uc740 **\uc81c\uc5b4 \ud750\ub984** \uc744 \ud65c\uc6a9\ud569\ub2c8\ub2e4. \uc81c\uc5b4 \ud750\ub984\uc740 \ub8e8\ud504\uc640 ``if`` \uba85\ub839\ubb38\uacfc\n\uac19\uc740 \uac83\uc73c\ub85c \uad6c\uc131\ub429\ub2c8\ub2e4.\n\n\ub9ce\uc740 \ud504\ub808\uc784\uc6cc\ud06c\ub4e4\uc740 \uc8fc\uc5b4\uc9c4 \ud504\ub85c\uadf8\ub7a8 \ucf54\ub4dc\ub85c\ubd80\ud130 \uae30\ud638\uc2dd \ubbf8\ubd84(symbolic\nderivatives)\uc744 \uacc4\uc0b0\ud558\ub294 \uc811\uadfc\ubc95\uc744 \ucde8\ud558\uace0 \uc788\uc2b5\ub2c8\ub2e4. \ud558\uc9c0\ub9cc, PyTorch\uc5d0\uc11c\ub294 \ubcc0\ud654\ub3c4\n\ud14c\uc774\ud504(gradient tape)\ub97c \uc0ac\uc6a9\ud569\ub2c8\ub2e4. \uc5f0\uc0b0\uc774 \ubc1c\uc0dd\ud560 \ub54c \uc774\ub97c \uae30\ub85d\ud558\uace0, \ubbf8\ubd84\uac12\uc744\n\uacc4\uc0b0\ud560 \ub54c \uac70\uafb8\ub85c \uc7ac\uc0dd\ud569\ub2c8\ub2e4. \uc774\ub7f0 \ubc29\uc2dd\uc73c\ub85c, \ud504\ub808\uc784\uc6cc\ud06c\ub294 \uc5b8\uc5b4\uc758 \ubaa8\ub4e0 \uad6c\ubb38\uc5d0\n\ub300\ud55c \ubbf8\ubd84\uac12\uc744 \uba85\uc2dc\uc801\uc73c\ub85c \uc815\uc758\ud560 \ud544\uc694\uac00 \uc5c6\uc2b5\ub2c8\ub2e4.\n\n.. figure:: https://github.com/pytorch/pytorch/raw/main/docs/source/_static/img/dynamic_graph.gif\n :alt: \uc624\ud1a0\uadf8\ub77c\ub4dc\uac00 \uc791\ub3d9\ud558\ub294 \ubc29\uc2dd\n\n \uc624\ud1a0\uadf8\ub77c\ub4dc\uac00 \uc791\ub3d9\ud558\ub294 \ubc29\uc2dd\n\n\n" ] }, { diff --git a/docs/beginner/Intro_to_TorchScript_tutorial.html b/docs/beginner/Intro_to_TorchScript_tutorial.html index 6f5633816..f3c073221 100644 --- a/docs/beginner/Intro_to_TorchScript_tutorial.html +++ b/docs/beginner/Intro_to_TorchScript_tutorial.html @@ -463,11 +463,11 @@

PyTorch 모델 작성의 기초print(my_cell(x, h)) -

모듈 MyCell 을 재정의했지만, 이번에는 self.linear 속성을 추가하고 @@ -551,11 +551,11 @@

PyTorch 모델 작성의 기초MyDecisionGate 를 정의했습니다. @@ -567,7 +567,7 @@

PyTorch 모델 작성의 기초 -오토그라드가 작동하는 방식 +오토그라드가 작동하는 방식

오토그라드가 작동하는 방식

@@ -600,11 +600,11 @@

Module

살짝 앞으로 돌아가 MyCell 의 두 번째 버전을 가져왔습니다. 이전에 이것을 @@ -661,17 +661,17 @@

Moduleprint(traced_cell(x, h)) -
(tensor([[ 0.7324, -0.3243,  0.1321,  0.1625],
-        [ 0.0783,  0.4072,  0.1977, -0.2026],
-        [-0.0253,  0.4354,  0.1660,  0.2262]], grad_fn=<TanhBackward0>), tensor([[ 0.7324, -0.3243,  0.1321,  0.1625],
-        [ 0.0783,  0.4072,  0.1977, -0.2026],
-        [-0.0253,  0.4354,  0.1660,  0.2262]], grad_fn=<TanhBackward0>))
-(tensor([[ 0.7324, -0.3243,  0.1321,  0.1625],
-        [ 0.0783,  0.4072,  0.1977, -0.2026],
-        [-0.0253,  0.4354,  0.1660,  0.2262]],
-       grad_fn=<DifferentiableGraphBackward>), tensor([[ 0.7324, -0.3243,  0.1321,  0.1625],
-        [ 0.0783,  0.4072,  0.1977, -0.2026],
-        [-0.0253,  0.4354,  0.1660,  0.2262]],
+
(tensor([[0.9564, 0.6855, 0.8985, 0.6681],
+        [0.9028, 0.4467, 0.9141, 0.7140],
+        [0.9648, 0.7171, 0.8723, 0.8622]], grad_fn=<TanhBackward0>), tensor([[0.9564, 0.6855, 0.8985, 0.6681],
+        [0.9028, 0.4467, 0.9141, 0.7140],
+        [0.9648, 0.7171, 0.8723, 0.8622]], grad_fn=<TanhBackward0>))
+(tensor([[0.9564, 0.6855, 0.8985, 0.6681],
+        [0.9028, 0.4467, 0.9141, 0.7140],
+        [0.9648, 0.7171, 0.8723, 0.8622]],
+       grad_fn=<DifferentiableGraphBackward>), tensor([[0.9564, 0.6855, 0.8985, 0.6681],
+        [0.9028, 0.4467, 0.9141, 0.7140],
+        [0.9648, 0.7171, 0.8723, 0.8622]],
        grad_fn=<DifferentiableGraphBackward>))
 
@@ -765,11 +765,11 @@

스크립팅을 사용하여 모듈 변환traced_cell(x, h)

-
(tensor([[0.6585, 0.0558, 0.3720, 0.5105],
-        [0.8285, 0.5487, 0.5880, 0.2457],
-        [0.4929, 0.2075, 0.2141, 0.6373]], grad_fn=<TanhBackward0>), tensor([[0.6585, 0.0558, 0.3720, 0.5105],
-        [0.8285, 0.5487, 0.5880, 0.2457],
-        [0.4929, 0.2075, 0.2141, 0.6373]], grad_fn=<TanhBackward0>))
+
(tensor([[ 0.0281,  0.9534,  0.0623, -0.0350],
+        [ 0.4146,  0.9282,  0.3834,  0.3907],
+        [ 0.7277,  0.7082,  0.2041, -0.0696]], grad_fn=<TanhBackward0>), tensor([[ 0.0281,  0.9534,  0.0623, -0.0350],
+        [ 0.4146,  0.9282,  0.3834,  0.3907],
+        [ 0.7277,  0.7082,  0.2041, -0.0696]], grad_fn=<TanhBackward0>))
 
@@ -875,7 +875,7 @@

더 읽을거리https://colab.research.google.com/drive/1HiICg6jRkBnr5hvK2-VnMi88Vi9pUzEJ

-

Total running time of the script: ( 0 minutes 0.671 seconds)

+

Total running time of the script: ( 0 minutes 0.368 seconds)