forked from PyTorchKorea/tutorials-kr
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Makefile
133 lines (105 loc) Β· 5.54 KB
/
Makefile
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
# Minimal makefile for Sphinx documentation
#
# Locale
export LC_ALL=C
# You can set these variables from the command line.
SPHINXOPTS ?=
SPHINXBUILD = sphinx-build
SPHINXPROJ = PyTorchTutorials
SOURCEDIR = .
BUILDDIR = _build
DATADIR = _data
GH_PAGES_SOURCES = $(SOURCEDIR) Makefile
ZIPOPTS ?= -qo
TAROPTS ?=
# Put it first so that "make" without argument is like "make help".
help:
@$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
.PHONY: help Makefile docs
# Catch-all target: route all unknown targets to Sphinx using the new
# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
%: Makefile
@$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) -v
download:
# IMPORTANT NOTE: Please make sure your dataset is downloaded to *_source/data folder,
# otherwise CI might silently break.
# NOTE: Please consider using the Step1 and one of Step2 for new dataset,
# [something] should be replaced with the actual value.
# Step1. DOWNLOAD: wget -nv -N [SOURCE_FILE] -P $(DATADIR)
# Step2-1. UNZIP: unzip -o $(DATADIR)/[SOURCE_FILE] -d [*_source/data/]
# Step2-2. UNTAR: tar -xzf $(DATADIR)/[SOURCE_FILE] -C [*_source/data/]
# Step2-3. AS-IS: cp $(DATADIR)/[SOURCE_FILE] [*_source/data/]
# make data directories
mkdir -p $(DATADIR)
mkdir -p advanced_source/data
mkdir -p beginner_source/data
mkdir -p intermediate_source/data
mkdir -p prototype_source/data
mkdir -p recipes_source/recipes/data
# transfer learning tutorial data
wget -nv -N https://download.pytorch.org/tutorial/hymenoptera_data.zip -P $(DATADIR)
unzip $(ZIPOPTS) $(DATADIR)/hymenoptera_data.zip -d beginner_source/data/
# nlp tutorial data
wget -nv -N https://download.pytorch.org/tutorial/data.zip -P $(DATADIR)
unzip $(ZIPOPTS) $(DATADIR)/data.zip -d intermediate_source/ # This will unzip all files in data.zip to intermediate_source/data/ folder
# data loader tutorial
wget -nv -N https://download.pytorch.org/tutorial/faces.zip -P $(DATADIR)
unzip $(ZIPOPTS) $(DATADIR)/faces.zip -d beginner_source/data/
unzip $(ZIPOPTS) $(DATADIR)/faces.zip -d recipes_source/recipes/data/
wget -nv -N https://download.pytorch.org/models/tutorials/4000_checkpoint.tar -P $(DATADIR)
cp $(DATADIR)/4000_checkpoint.tar beginner_source/data/
# neural style images
rm -rf advanced_source/data/images/ || true
mkdir -p advanced_source/data/images/
cp -r _static/img/neural-style/ advanced_source/data/images/
# Download dataset for beginner_source/dcgan_faces_tutorial.py
wget -nv -N https://s3.amazonaws.com/pytorch-tutorial-assets/img_align_celeba.zip -P $(DATADIR)
unzip $(ZIPOPTS) $(DATADIR)/img_align_celeba.zip -d beginner_source/data/celeba
# Download dataset for beginner_source/hybrid_frontend/introduction_to_hybrid_frontend_tutorial.py
wget -nv -N https://s3.amazonaws.com/pytorch-tutorial-assets/iris.data -P $(DATADIR)
cp $(DATADIR)/iris.data beginner_source/data/
# Download dataset for beginner_source/chatbot_tutorial.py
wget -nv -N https://s3.amazonaws.com/pytorch-tutorial-assets/cornell_movie_dialogs_corpus_v2.zip -P $(DATADIR)
unzip $(ZIPOPTS) $(DATADIR)/cornell_movie_dialogs_corpus_v2.zip -d beginner_source/data/
# Download dataset for beginner_source/audio_classifier_tutorial.py
wget -nv -N https://s3.amazonaws.com/pytorch-tutorial-assets/UrbanSound8K.tar.gz -P $(DATADIR)
tar $(TAROPTS) -xzf $(DATADIR)/UrbanSound8K.tar.gz -C ./beginner_source/data/
# Download model for beginner_source/fgsm_tutorial.py
wget -nv -N https://s3.amazonaws.com/pytorch-tutorial-assets/lenet_mnist_model.pth -P $(DATADIR)
cp $(DATADIR)/lenet_mnist_model.pth ./beginner_source/data/lenet_mnist_model.pth
# Download model for advanced_source/dynamic_quantization_tutorial.py
wget -nv -N https://s3.amazonaws.com/pytorch-tutorial-assets/word_language_model_quantize.pth -P $(DATADIR)
cp $(DATADIR)/word_language_model_quantize.pth advanced_source/data/word_language_model_quantize.pth
# Download data for advanced_source/dynamic_quantization_tutorial.py
wget -nv -N https://s3.amazonaws.com/pytorch-tutorial-assets/wikitext-2.zip -P $(DATADIR)
unzip $(ZIPOPTS) $(DATADIR)/wikitext-2.zip -d advanced_source/data/
# Download model for advanced_source/static_quantization_tutorial.py
wget -nv -N https://download.pytorch.org/models/mobilenet_v2-b0353104.pth -P $(DATADIR)
cp $(DATADIR)/mobilenet_v2-b0353104.pth advanced_source/data/mobilenet_pretrained_float.pth
# Download model for prototype_source/graph_mode_static_quantization_tutorial.py
wget -nv -N https://download.pytorch.org/models/resnet18-5c106cde.pth -P $(DATADIR)
cp $(DATADIR)/resnet18-5c106cde.pth prototype_source/data/resnet18_pretrained_float.pth
# Download vocab for beginner_source/flava_finetuning_tutorial.py
wget -nv -N http://dl.fbaipublicfiles.com/pythia/data/vocab.tar.gz -P $(DATADIR)
tar $(TAROPTS) -xzf $(DATADIR)/vocab.tar.gz -C ./beginner_source/data/
# Download some dataset for beginner_source/translation_transformer.py
python -m spacy download en_core_web_sm
python -m spacy download de_core_news_sm
docs:
make download
make html
rm -rf docs
cp -r $(BUILDDIR)/html docs
cp CNAME docs/CNAME
cp robots.txt docs/robots.txt
touch docs/.nojekyll
@echo
@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
html-noplot:
$(SPHINXBUILD) -D plot_gallery=0 -b html $(SPHINXOPTS) "$(SOURCEDIR)" "$(BUILDDIR)/html"
# bash .jenkins/remove_invisible_code_block_batch.sh "$(BUILDDIR)/html"
@echo
@echo "HTML-ONLY build finished. The HTML pages are in $(BUILDDIR)/html."
clean-cache:
make clean
rm -rf advanced beginner intermediate recipes