-
Notifications
You must be signed in to change notification settings - Fork 1
/
challenge_2019.html
451 lines (430 loc) · 30.5 KB
/
challenge_2019.html
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
<!DOCTYPE html>
<html lang="en">
<head>
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
<title>VQA: Visual Question Answering</title>
<link rel="stylesheet" href="./static/css/foundation.css">
<link rel="stylesheet" href="./static/css/main.css">
<script src="./static/js/vendor/jquery.js"></script>
<script src="./static/js/jquery-2.1.3.min.js"></script>
<script type="text/javascript" src="./static/js/jquery.min.js"></script>
<script type="text/javascript" src="./static/js/jquery.countdown.min.js"></script>
<script type="text/javascript" src="./static/js/moment.min.js"></script>
<script type="text/javascript" src="./static/js/moment-timezone-with-data.min.js"></script>
<script type="text/javascript" src="./static/js/main-vqa.js"></script>
<script type="text/javascript" src="./static/js/main-visdial.js"></script>
</head>
<body class="off-canvas hide-extras" style="min-width:1200px; min-height:750px;">
<header>
<div class="row">
<a href="http://visualqa.org/"><img style="height: 100px; position:absolute; top:4px; left:0px;" src="./static/img/main.png" alt="logo" /></a>
<h1><img style="height: 90px;" src="./static/img/logo.png" alt="logo" /><br></h1>
<br>
</div>
</header>
<div class="contain-to-grid">
<nav class="top-bar" data-topbar>
<section class="top-bar-section">
<!-- Right Nav Section -->
<ul class="right">
<li><a href="index.html">Home</a></li>
<li><a href="people.html">People</a></li>
<li><a href="code.html">Code</a></li>
<li><a href="http://vqa.cloudcv.org/" onClick="ga('send', 'event', { eventCategory: 'Outgoing Link', eventAction: 'Demo', eventLabel: 'Demo'});">Demo</a></li>
<li class="has-dropdown"><a href="download.html">Download</a>
<ul class="dropdown">
<li><a href="download.html">VQA v2</a></li>
<li><a href="vqa_v1_download.html">VQA v1</a></li>
</ul>
</li>
<li><a href="evaluation.html">Evaluation</a></li>
<li class="active has-dropdown"><a href="challenge.html">Challenge</a>
<ul class="dropdown">
<li><a href="challenge.html">2021</a></li>
<li><a href="challenge_2020.html">2020</a></li>
<li><a href="challenge_2019.html">2019</a></li>
<li><a href="challenge_2018.html">2018</a></li>
<li><a href="challenge_2017.html">2017</a></li>
<li><a href="challenge_2016.html">2016</a></li>
</ul>
</li>
<li class="has-dropdown"><a href="http://visualqa.org/vqa_v2_teaser.html">Browse</a>
<ul class="dropdown">
<li><a href="http://visualqa.org/vqa_v2_teaser.html">VQA v2</a></li>
<li><a href="https://vqabrowser.cloudcv.org/">VQA v1</a></li>
</ul>
</li>
<li><a href="http://visualqa.org/visualize/">Visualize</a></li>
<!-- <li class="has-dropdown"><a href="http://visualqa.org/visualize/">Visualize</a>
<ul class="dropdown">
<li><a href="http://visualqa.org/visualize/">VQA v2</a></li>
<li><a href="http://visualqa.org/visualize/">VQA v1</a></li>
</ul>
</li> -->
<li class="has-dropdown"><a href="workshop.html">Workshop</a>
<ul class="dropdown">
<li><a href="workshop.html">2021</a></li>
<li><a href="workshop_2020.html">2020</a></li>
<li><a href="workshop_2019.html">2019</a></li>
<li><a href="workshop_2018.html">2018</a></li>
<li><a href="workshop_2017.html">2017</a></li>
<li><a href="workshop_2016.html">2016</a></li>
</ul>
</li>
<li><a href="sponsors.html">Sponsors</a></li>
<li><a href="terms.html">Terms</a></li>
<li><a href="external.html">External</a></li>
</ul>
</section>
</nav>
</div>
<section role="main" style="padding: 1em;">
<div class="row">
<p style="font-size:30px; color:black; font-weight: 50" align=center>Welcome to the VQA Challenge 2019!</p>
<p style="margin-top:-30px; font-size:20px; color:black; font-weight: 50;" align=center>Deadline: May 10, 2019 23:59:59 GMT</p>
<p style="font-size:20px; color:black; font-weight: 50" align=center>
<a href="#overview" style="padding:13px">Overview</a>
<a href="#guidelines" style="padding:13px">Challenge Guidelines</a>
<a href="https://evalai.cloudcv.org/web/challenges/challenge-page/163/overview" style="padding:13px">Evaluation Server</a>
<a href="roe_2019.html">Leaderboard</a>
<!-- <a href="#" data-dropdown="hover1" data-options="is_hover:true; hover_timeout:5000">Leaderboards ↓</a>
<ul id="hover1" class="f-dropdown" data-dropdown-content>
<li><a href="roe.html">Real Open-Ended</a></li>
<li><a href="aoe.html">Abstract Open-Ended</a></li>
<li><a href="rmc.html">Real Multiple-Choice</a></li>
<li><a href="amc.html">Abstract Multiple-Choice</a></li>
</ul> -->
</p>
<hr>
<div class="large-12 columns">
<img src="./static/img/challenge.png" height="900" width="500" style="display:block; margin:auto;" frameBorder="0">
</div>
</div>
<br>
<br>
<div class="row">
<div class="large-12 columns">
<br>Papers reporting results on the VQA v2.0 dataset should --
</p><br>
<p align=left style="margin-top:-10px;">1) Report test-standard accuracies, which can be calculated using
either of the non-test-dev phases, i.e., <a href="https://evalai.cloudcv.org/web/challenges/challenge-page/163/phases">"Test-Standard"
or "Test-Challenge"</a>.
<br>
<br> 2) Compare their test-standard accuracies with those on the <a href="https://evalai.cloudcv.org/featured-challenges/80/leaderboard/124">test2018
leaderboard</a> and <a href="https://evalai.cloudcv.org/featured-challenges/1/leaderboard/3">test2017
leaderboard</a>.
</p>
</div>
<hr>
</div>
<div class="row" id="overview">
<h1 style="font-size:30px; color:grey; font-weight: 200">Overview</h1>
<div class="large-12 columns" style="text-align:left;">
<p style="font-size:15px; font-weight: 400; text-align:left">
We are pleased to announce the Visual Question Answering (VQA) Challenge 2019. Given an image and a natural language question about the image, the task is to provide an accurate natural language answer. <!--Visual questions selectively target different areas of an image, including background details and underlying context.-->
The challenge is hosted on <a target="_blank" href="http://evalai.cloudcv.org/">EvalAI</a>. Challenge link: <a href="https://evalai.cloudcv.org/web/challenges/challenge-page/163/overview">https://evalai.cloudcv.org/web/challenges/challenge-page/163/overview</a>
<br>
<!-- Teams are encouraged to compete in the following VQA challenge: -->
</p>
<!-- <div class="large-12 columns">
<ul style="font-size:15px; font-weight: 400; display: inline;" align=left>
<li>Balanced Real images</li>
<li>Balanced Binary Abstract scenes</li>
<li>Abstract scenes (same as VQA Challenge 2016)</li> -->
<!-- <li>Open-Ended for real images: <a href="https://evalai.cloudcv.org/web/challenges/challenge-page/80/participate">Submission</a> and <a href="https://evalai.cloudcv.org/web/challenges/challenge-page/80/leaderboard/124">Leaderboard</a></li> -->
<!-- <li>Open-Ended for abstract scenes: <a href="https://www.codalab.org/competitions/6981">Submission</a> and <a href="https://www.codalab.org/competitions/6981#results">Leaderboard</a></li>
<li>Multiple-Choice for real images: <a href="https://www.codalab.org/competitions/6971">Submission</a> and <a href="https://www.codalab.org/competitions/6971#results">Leaderboard</a></li>
<li>Multiple-Choice for abstract scenes: <a href="https://www.codalab.org/competitions/6991">Submission</a> and <a href="https://www.codalab.org/competitions/6991#results">Leaderboard</a></li>
</ul>
<br>
</div> -->
<p style="font-size:15px; font-weight: 400; text-align:left">
The VQA v2.0 train, validation and test sets, containing more than 250K images and 1.1M questions, are available on the <a href="download.html">download</a> page. All questions are annotated with 10 concise, open-ended answers each. Annotations on the training and validation sets are publicly available.
</p>
<p style="font-size:15px; font-weight: 400; text-align:left">
VQA Challenge 2019 is the fourth edition of the VQA Challenge. Previous three versions of the VQA Challenge were organized in past three years, and the results were announced at VQA Challenge Workshop in CVPR 2018, CVPR 2017 and CVPR 2016. More details about past challenges can be found here: <a href="challenge_2018.html">VQA Challenge 2018</a>, <a href="challenge_2017.html">VQA Challenge 2017</a> and <a href="vqa_v1_challenge.html">VQA Challenge 2016</a>.
</p>
<p style="font-size:15px; font-weight: 400; text-align:left">
Answers to some common questions about the challenge can be found in <a href="#faq">the FAQ section</a>.
</p>
</div>
<hr>
</div>
<div class="row">
<h1 style="font-size:30px; color:grey; font-weight: 200">Dates</h1>
<div class="large-12 columns" style="text-align:left;">
<p style="font-size:15px; font-weight: 200; border-style: solid;
border-width: 1px; text-align:justify; padding:5px; width:99%">
<code>
<span style="width:20%; margin:5px; display:inline-block;">Jan 28, 2019 </span> <span style="display:inline-block; margin:0px;"><a target="_blank" href="https://evalai.cloudcv.org/web/challenges/challenge-page/163/overview">VQA Challenge 2019 launched!</a></span><br> <!-- #TODO: add evalai link -->
<span style="width:20%; margin:5px; display:inline-block;">May 10, 2019 </span> <span style="display:inline-block; margin:0px;">Submission deadline at 23:59:59 UTC</span><br>
<span style="width:20%; margin:5px; display:inline-block;">Jun, 2019 </span> <span style="display:inline-block; margin:0px;">Winners' announcement at the <a href="workshop.html">VQA and Dialog Workshop, CVPR 2019</a> </span>
<!-- <span style="background-color:red; color:white;">Extended</span> -->
</code></p>
</div>
<div class="large-12 columns" style="text-align:left;">
<p style="font-size:15px; font-weight: 400; text-align:justify; padding:5px; width:99%">
After the challenge deadline, all challenge participant results on test-standard split will be made public on a test-standard leaderboard.
</p>
</div>
<hr>
</div>
<div class="row">
<h1 style="font-size:30px; color:grey; font-weight: 200">Prize</h1>
<div class="large-12 columns" style="text-align:left;">
<p style="font-size:15px; font-weight: 400; text-align:left">
The winning team will receive <a target="_blank" href="https://cloud.google.com/">Google Cloud Platform</a> credits worth $10k!
<br>
</p>
</div>
<hr>
</div>
<div class="row" id="guidelines">
<h1 style="font-size:30px; color:grey; font-weight: 200">Challenge Guidelines</h1>
<div class="large-12 columns" style="text-align:left;">
<p style="font-size:15px; font-weight: 400; text-align:left">
Following <a href="http://mscoco.org/dataset/">COCO</a>, we have divided the test set for VQA v2.0 into a
number of splits, including test-dev, test-standard, test-challenge, and test-reserve, to limit overfitting
while giving researchers more flexibility to test their system. Test-dev is used for debugging and
validation experiments and allows for maximum 10 submissions per day (according to UTC timezone).
Test-standard is the default test data for the VQA competition. When comparing to the state of the art
(e.g., in papers), results should be reported on test-standard. Test-standard is also used to maintain a
public leaderboard that is updated upon submission. Test-reserve is used to protect against possible
overfitting. If there are substantial differences between a method's scores on test-standard and
test-reserve, this will raise a red-flag and prompt further investigation. Results on test-reserve will not
be publicly revealed. Finally, test-challenge is used to determine the winners of the challenge.
</p>
<p style="font-size:15px; font-weight: 400; text-align:left">
The <a href="evaluation.html">evaluation</a> page lists detailed information regarding how submissions will
be scored. The evaluation servers are open. Following last few years, we are hosting the evaluation servers on <a href="https://evalai.cloudcv.org">EvalAI</a>, developed by the <a href="https://cloudcv.org">CloudCV</a>
team. EvalAI is an <a href="https://github.com/Cloud-CV/EvalAI">open-source</a> web platform designed for
organizing and participating in challenges to push the state of the art on AI tasks. We encourage people to first submit to "Test-Dev" phase to make sure that
you understand the submission procedure, as it is identical to the full test set submission procedure. <b>Note
that the "Test-Dev" and "Test-Challenge" evaluation servers do not have public leaderboards.</b>
</p>
<p style="font-size:15px; font-weight: 400; text-align:left">
To <b>enter the competition</b>, first you need to create an account on <a href="https://evalai.cloudcv.org/auth/signup">EvalAI</a>.
<!-- From your account you will be able to participate in all VQA challenges. --> We allow people to enter
our challenge either privately or publicly. Any submissions to the "Test-Challenge" phase will be
considered to be participating in the challenge. For submissions to the "Test-Standard" phase, only ones that
were submitted before the challenge deadline and posted to the public leaderboard will be considered to be
participating in the challenge.
<br>
<br> Before uploading your results to EvalAI, you will need to create a JSON file containing your results
in the correct format as described on the <a href="evaluation.html">evaluation</a> page.
</p>
<p style="font-size:15px; font-weight: 400; text-align:left">
To <b>submit</b> your JSON file to the VQA evaluation servers, click on the “Submit” tab on the <a href="https://evalai.cloudcv.org/web/challenges/challenge-page/163/overview">VQA
Challenge 2019</a> page on EvalAI. Select the phase ("Test-Dev" or "Test-Standard" or "Test-Challenge
"). <b>Please select the JSON file to upload and fill in the required fields such as "method name"
and "method description" and click “Submit”</b>. After the file is uploaded, the evaluation server will
begin processing. To view the status of your submission please go to <a href="https://evalai.cloudcv.org/web/challenges/challenge-page/163/my-submission">“My
Submissions”</a> tab and choose the phase to which the results file was uploaded. Please be patient,
the evaluation may take quite some time to complete
<!--(~1min on test-dev and ~3min on the full test set)--> (~4 min). If the status of your submission is
“Failed” please check your "Stderr File" for the corresponding submission.
</p>
<p style="font-size:15px; font-weight: 400; text-align:left">
After evaluation is complete and the server shows a status of “Finished”, you will have the option to
download your evaluation results by selecting “Result File” for the corresponding submission. The "Result
File" will contain the aggregated accuracy on the corresponding test-split (test-dev split for
"Test-Dev" phase, test-standard and test-dev splits for both "Test-Standard" and "Test-Challenge"
phases). If you want your submission to appear on the public leaderboard, please submit to "Test-Standard" phase
and check the box under "Show on Leaderboard" for the corresponding submission.
</p>
<p style="font-size:15px; font-weight: 400; text-align:left">
Please limit the number of entries to the challenge evaluation server to a reasonable number, e.g., one
entry per paper. To avoid overfitting, the number of submissions per user is limited to 1 upload per day
(according to UTC timezone) and a maximum of 5 submissions per user. It is not acceptable to create
multiple accounts for a single project to circumvent this limit. The exception to this is if a group
publishes two papers describing unrelated methods, in this case both sets of results can be submitted for
evaluation. However, Test-Dev allows for 10 submissions per day.
<!--Please refer to the section on "Test-Dev Best Practices" in the <a href="http://mscoco.org/dataset/#detections-upload">MSCOCO detection challenge</a> page for more information about the test-dev set. -->
</p>
<p style="font-size:15px; font-weight: 400; text-align:left">
<!-- Competitors are recommended but not restricted to train their algorithms on VQA train and val sets. -->
The <a href="download.html">download</a> page contains links to all VQA v2.0 train/val/test images,
questions, and associated annotations (for train/val only). Please specify any and all external data used
for training in the "method description" when uploading results to the evaluation server.
</p>
<p style="font-size:15px; font-weight: 400; text-align:left">
Results must be submitted to the evaluation server by the challenge deadline. Competitors' algorithms will
be evaluated according to the rules described on the <a href="evaluation.html">evaluation</a> page.
Challenge participants with the most successful and innovative methods will be invited to present.
</p>
</div>
<hr>
</div>
<div class="row">
<h1 style="font-size:30px; color:grey; font-weight: 200">Tools and Instructions</h1>
<div class="large-12 columns" style="text-align:left;">
<p style="font-size:15px; font-weight: 400; text-align:left">
We provide API support for the VQA annotations and evaluation code. To download the VQA API, please visit our <a href="https://github.com/GT-vision-lab/VQA">GitHub repository</a>. For an overview of how to use the API, please visit the <a href="download.html">download</a> page and consult the section entitled VQA API. To obtain API support for COCO images, please visit the <a href="http://mscoco.org/dataset/#download">COCO download</a> page. To obtain API support for abstract scenes, please visit the <a href="https://github.com/VT-vision-lab/abstract_scenes_v002">GitHub repository</a>.
</p>
<p>
<!-- Due to the large size of the VQA dataset and the complexity of this challenge, the process of competing in this challenge may not seem simple.
To help guide competitors to victory, we provide explanations and instructions for each step of the process on the download, evaluation pages. -->
For challenge related questions, please contact <a href="mailto:[email protected]" target="_top">[email protected]</a>. In case of technical questions related to <a href="https://evalai.cloudcv.org">EvalAI</a>, please post on the <a href="https://evalai-forum.cloudcv.org/c/vqa-2019">VQA Challenge forum</a>.
</p>
</div>
<hr>
</div>
<div class="row">
<a name="faq"></a>
<h1 style="font-size:30px; color:grey; font-weight: 200">Frequently Asked Questions (FAQ)</h1>
<div class="large-12 columns" style="text-align:left;">
<p style="font-size:15px; font-weight: 400; text-align:left">
As a reminder, any submissions before the challenge deadline whose results are made publicly visible on the
"Test-Standard" leaderboard <em><b>OR</b></em> are submitted to the "Test-Challenge" phase will be enrolled
in the challenge. For further clarity, we answer some common questions below:
<div class="large-12 columns">
<ul style="font-size:15px; font-weight: 400; display: inline;" align=left>
<li>
<b>Q:</b> What do I do if I want to make my test-standard results public <em><b>and</b></em>
participate in the challenge?
</br>
<b>A:</b> Making your results public (<i>i.e.</i>, visible on the leaderboard) on the
"Test-Standard" phase implies that you are participating in the challenge.
</li>
<li>
<b>Q:</b> What do I do if I want to make my test-standard results public, but I do not want to
participate in the challenge?
</br>
<b>A:</b> We do not allow for this option.
</li>
<li>
<b>Q:</b> What do I do if I want to participate in the challenge, but I do not want to make my
test-standard results public yet?
</br>
<b>A:</b> Submit to the "Test-Challenge" phase. This phase was created for this scenario.
</li>
<li>
<b>Q:</b> When will I find out my test-challenge accuracies?
</br>
<b>A:</b> We will reveal challenge results some time after the deadline. Results will first be
announced at our <a href="http://www.visualqa.org/workshop.html">Visual Question Answering and
Dialog Workshop at CVPR 2019</a>.
</li>
<li>
<b>Q:</b> Can I participate from more than one EvalAI team in the VQA challenge?
</br>
<b>A:</b> No, you are allowed to participate from one team only.
</li>
<li>
<b>Q:</b> Can I add other members to my EvalAI team?
</br>
<b>A:</b> Yes.
</li>
<li>
<b>Q:</b> Is the daily/overall submission limit for a user or for a team?
</br>
<b>A:</b> It is for a team.
</li>
</ul>
</p>
</div>
</div>
<hr>
</div>
<div class="row">
<h1 style="font-size:30px; color:grey; font-weight: 200">Organizers</h1>
<div class="team" id="people">
<div class="row">
<!-- <div class="large-2 columns" style="padding-left:100px">
<p></p>
</div> -->
<div class="large-2 columns">
<a href="https://ayshrv.github.io/"><img src="./static/img/ayush.jpg" class="home_team_picture" style="width: 100px; height:100px;">
<br>
</a>
<p style="font-weight: 200;">Ayush Shrivastava
<br>Georgia Tech</p>
</div>
<div class="large-2 columns">
<a href="https://kdexd.github.io/"><img src="./static/img/karan.jpg" class="home_team_picture" style="width: 100px; height:100px;">
<br>
</a>
<p style="font-weight: 200;">Karan Desai
<br>Georgia Tech</p>
</div>
<div class="large-2 columns">
<a href="https://www.cc.gatech.edu/~ygoyal3/"><img src="./static/img/yash.jpg" class="home_team_picture" style="width: 100px; height:100px;">
<br>
</a>
<p style="font-weight: 200;">Yash Goyal
<br>Georgia Tech</p>
</div>
<div class="large-2 columns">
<a href="https://www.cc.gatech.edu/~aagrawal307/"><img src="./static/img/aishwarya.jpg" class="home_team_picture" style="width: 100px; height:100px;">
<br>
</a>
<p style="font-weight: 200; margin-left:-20px;">Aishwarya Agrawal
<br>Georgia Tech</p>
</div>
<!-- <div class="large-2 columns">
<a href="https://tejaskhot.github.io/"><img src="./static/img/tejas.jpg" class="home_team_picture" style="width: 100px; height:100px;">
<br>
</a>
<p style="font-weight: 200;">Tejas Khot
<br>Virginia Tech</p>
</div> -->
<div class="large-2 columns">
<a href="https://www.cc.gatech.edu/~dbatra/"><img src="./static/img/dhruv.jpg" class="home_team_picture" style="width: 100px; height:100px;">
<br>
</a>
<p style="font-weight: 200;">Dhruv Batra
<br>Georgia Tech / Facebook AI Research</p>
</div>
<div class="large-2 columns">
<a href="https://www.cc.gatech.edu/~parikh/"><img src="./static/img/devi.jpg" class="home_team_picture" style="width: 100px; height:100px;">
<br>
</a>
<p style="font-weight: 200;">Devi Parikh
<br>Georgia Tech / Facebook AI Research</p>
</div>
<!-- <div class="large-4 columns">
<p>*equal contribution</p>
</div> -->
<hr>
</div>
</div>
</div>
<script>
(function(i, s, o, g, r, a, m) {
i['GoogleAnalyticsObject'] = r;
i[r] = i[r] || function() {
(i[r].q = i[r].q || []).push(arguments)
}, i[r].l = 1 * new Date();
a = s.createElement(o),
m = s.getElementsByTagName(o)[0];
a.async = 1;
a.src = g;
m.parentNode.insertBefore(a, m)
})(window, document, 'script', '//www.google-analytics.com/analytics.js', 'ga');
ga('create', 'UA-63638588-1', 'auto');
ga('send', 'pageview');
</script>
<script src="./static/js/foundation.js"></script>
<script src="./static/js/foundation.dropdown.js"></script>
<script>
$(document).foundation();
</script>
<!-- jquery smooth scroll to id's
<script>
$(function() {
$('a[href*=#]:not([href=#])').click(function() {
if (location.pathname.replace(/^\//, '') == this.pathname.replace(/^\//, '') && location.hostname == this.hostname) {
var target = $(this.hash);
target = target.length ? target : $('[name=' + this.hash.slice(1) + ']');
if (target.length) {
$('html,body').animate({
scrollTop: target.offset().top
}, 1000);
return false;
}
}
});
});
</script>
</body>
</html>