forked from hoch/webvr-samples
-
Notifications
You must be signed in to change notification settings - Fork 0
/
04-simple-mirroring.html
261 lines (223 loc) · 9.8 KB
/
04-simple-mirroring.html
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
<!doctype html>
<!--
Copyright 2016 The Chromium Authors. All rights reserved.
Use of this source code is governed by a BSD-style license that can be
found in the LICENSE file.
-->
<html>
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, user-scalable=no">
<meta name="mobile-web-app-capable" content="yes">
<meta name="apple-mobile-web-app-capable" content="yes">
<title>04 - Simple Mirroring</title>
<!--
This sample demonstrates how to mirror content to an external display
while presenting to a VRDisplay.
-->
<style>
#webgl-canvas {
box-sizing: border-box;
height: 100%;
left: 0;
margin: 0;
position: absolute;
top: 0;
width: 100%;
}
</style>
<!-- This entire block in only to facilitate dynamically enabling and
disabling the WebVR polyfill, and is not necessary for most WebVR apps.
If you want to use the polyfill in your app, just include the js file and
everything will work the way you want it to by default. -->
<script>
var WebVRConfig = {
// Prevents the polyfill from initializing automatically.
DEFER_INITIALIZATION: true,
// Polyfill optimizations
DIRTY_SUBMIT_FRAME_BINDINGS: true,
BUFFER_SCALE: 0.75,
};
</script>
<script src="js/third-party/webvr-polyfill.js"></script>
<script src="js/third-party/wglu/wglu-url.js"></script>
<script>
// Dynamically turn the polyfill on if requested by the query args.
if (WGLUUrl.getBool('polyfill', false)) { InitializeWebVRPolyfill(); }
</script>
<!-- End sample polyfill enabling logic -->
<script src="js/third-party/gl-matrix-min.js"></script>
<script src="js/third-party/wglu/wglu-program.js"></script>
<script src="js/third-party/wglu/wglu-stats.js"></script>
<script src="js/third-party/wglu/wglu-texture.js"></script>
<script src="js/vr-cube-sea.js"></script>
<script src="js/vr-samples-util.js"></script>
</head>
<body>
<canvas id="webgl-canvas"></canvas>
<script>
/* global mat4, VRCubeSea, WGLUStats, WGLUTextureLoader, VRSamplesUtil */
(function () {
"use strict";
var vrDisplay = null;
var projectionMat = mat4.create();
var viewMat = mat4.create();
var vrPresentButton = null;
// ================================
// WebVR-specific code begins here.
// ================================
// WebGL setup.
var webglCanvas = document.getElementById("webgl-canvas");
var gl = null;
var cubeSea = null;
var stats = null;
function initWebGL (preserveDrawingBuffer) {
// Setting preserveDrawingBuffer to true prevents the canvas from being
// implicitly cleared when calling submitFrame or compositing the canvas
// on the document. For the simplest form of mirroring we want to create
// the canvas with that option enabled. Note that this may incur a
// performance penalty, as it may imply that additional copies of the
// canvas backbuffer need to be made. As a result, we ONLY want to set
// that if we know the VRDisplay has an external display, which is why
// we defer WebGL initialization until after we've gotten results back
// from navigator.getVRDisplays and know which device we'll be
// presenting with.
var glAttribs = {
alpha: false,
antialias: !VRSamplesUtil.isMobile(),
preserveDrawingBuffer: preserveDrawingBuffer
};
gl = webglCanvas.getContext("webgl", glAttribs);
gl.clearColor(0.1, 0.2, 0.3, 1.0);
gl.enable(gl.DEPTH_TEST);
gl.enable(gl.CULL_FACE);
var textureLoader = new WGLUTextureLoader(gl);
var texture = textureLoader.loadTexture("media/textures/cube-sea.png");
cubeSea = new VRCubeSea(gl, texture);
stats = new WGLUStats(gl);
// Wait until we have a WebGL context to resize and start rendering.
window.addEventListener("resize", onResize, false);
onResize();
window.requestAnimationFrame(onAnimationFrame);
}
function onVRRequestPresent () {
vrDisplay.requestPresent([{ source: webglCanvas }]).then(function () {
}, function () {
VRSamplesUtil.addError("requestPresent failed.", 2000);
});
}
function onVRExitPresent () {
if (!vrDisplay.isPresenting)
return;
vrDisplay.exitPresent().then(function () {
}, function () {
VRSamplesUtil.addError("exitPresent failed.", 2000);
});
}
function onVRPresentChange () {
onResize();
if (vrDisplay.isPresenting) {
if (vrDisplay.capabilities.hasExternalDisplay) {
VRSamplesUtil.removeButton(vrPresentButton);
vrPresentButton = VRSamplesUtil.addButton("Exit VR", "E", "media/icons/cardboard64.png", onVRExitPresent);
}
} else {
if (vrDisplay.capabilities.hasExternalDisplay) {
VRSamplesUtil.removeButton(vrPresentButton);
vrPresentButton = VRSamplesUtil.addButton("Enter VR", "E", "media/icons/cardboard64.png", onVRRequestPresent);
}
}
}
if (navigator.getVRDisplays) {
navigator.getVRDisplays().then(function (displays) {
if (displays.length > 0) {
vrDisplay = displays[0];
VRSamplesUtil.addButton("Reset Pose", "R", null, function () { vrDisplay.resetPose(); });
if (vrDisplay.capabilities.canPresent)
vrPresentButton = VRSamplesUtil.addButton("Enter VR", "E", "media/icons/cardboard64.png", onVRRequestPresent);
window.addEventListener('vrdisplaypresentchange', onVRPresentChange, false);
window.addEventListener('vrdisplayactivated', onVRRequestPresent, false);
window.addEventListener('vrdisplaydeactivated', onVRExitPresent, false);
// Only use preserveDrawingBuffer if we have an external display to
// mirror to.
initWebGL(vrDisplay.capabilities.hasExternalDisplay);
} else {
initWebGL(false);
}
});
} else if (navigator.getVRDevices) {
initWebGL(false);
VRSamplesUtil.addError("Your browser supports WebVR but not the latest version. See <a href='http://webvr.info'>webvr.info</a> for more info.");
} else {
// No VR means no mirroring, so create WebGL content without
// preserveDrawingBuffer
initWebGL(false);
VRSamplesUtil.addError("Your browser does not support WebVR. See <a href='http://webvr.info'>webvr.info</a> for assistance.");
}
function onResize () {
if (vrDisplay && vrDisplay.isPresenting) {
// If we're presenting we want to use the drawing buffer size
// recommended by the VRDevice, since that will ensure the best
// results post-distortion.
var leftEye = vrDisplay.getEyeParameters("left");
var rightEye = vrDisplay.getEyeParameters("right");
// For simplicity we're going to render both eyes at the same size,
// even if one eye needs less resolution. You can render each eye at
// the exact size it needs, but you'll need to adjust the viewports to
// account for that.
webglCanvas.width = Math.max(leftEye.renderWidth, rightEye.renderWidth) * 2;
webglCanvas.height = Math.max(leftEye.renderHeight, rightEye.renderHeight);
} else {
// We only want to change the size of the canvas drawing buffer to
// match the window dimensions when we're not presenting.
webglCanvas.width = webglCanvas.offsetWidth * window.devicePixelRatio;
webglCanvas.height = webglCanvas.offsetHeight * window.devicePixelRatio;
}
}
function renderSceneView (pose, eye) {
var orientation = pose.orientation;
var position = pose.position;
if (!orientation) { orientation = [0, 0, 0, 1]; }
if (!position) { position = [0, 0, 0]; }
if (eye)
mat4.perspectiveFromFieldOfView(projectionMat, eye.fieldOfView, 0.1, 1024.0);
else
mat4.perspective(projectionMat, Math.PI*0.4, webglCanvas.width / webglCanvas.height, 0.1, 1024.0);
mat4.fromRotationTranslation(viewMat, orientation, position);
if (eye)
mat4.translate(viewMat, viewMat, eye.offset);
mat4.invert(viewMat, viewMat);
cubeSea.render(projectionMat, viewMat, stats);
}
function onAnimationFrame (t) {
stats.begin();
gl.clear(gl.COLOR_BUFFER_BIT | gl.DEPTH_BUFFER_BIT);
if (vrDisplay) {
vrDisplay.requestAnimationFrame(onAnimationFrame);
var pose = vrDisplay.getPose();
if (vrDisplay.isPresenting) {
gl.viewport(0, 0, webglCanvas.width * 0.5, webglCanvas.height);
renderSceneView(pose, vrDisplay.getEyeParameters("left"));
gl.viewport(webglCanvas.width * 0.5, 0, webglCanvas.width * 0.5, webglCanvas.height);
renderSceneView(pose, vrDisplay.getEyeParameters("right"));
vrDisplay.submitFrame(pose);
} else {
gl.viewport(0, 0, webglCanvas.width, webglCanvas.height);
renderSceneView(pose, null);
stats.renderOrtho();
}
} else {
window.requestAnimationFrame(onAnimationFrame);
// No VRDisplay found.
gl.viewport(0, 0, webglCanvas.width, webglCanvas.height);
mat4.perspective(projectionMat, Math.PI*0.4, webglCanvas.width / webglCanvas.height, 0.1, 1024.0);
mat4.identity(viewMat);
cubeSea.render(projectionMat, viewMat, stats);
stats.renderOrtho();
}
stats.end();
}
})();
</script>
</body>
</html>