利用web技术实现AR

昨晚同事发我一链接Web前端也能做的AR互动,由腾讯游戏官方手机团队出品,发现原来web也能实现AR效果,之前还一直跟人家说这个实现不了,瞬间打脸,感觉到技术储备的重要性了。

AR效果大家都体验过,关键点是摄像头的实时视频流与3D模型结合到一起。
摄像头的实时视频流处理我们可以用getUserMedia(),并且将获取到的数据流放到video中展示,之后再video层上叠加任何我们需要的内容和操作,就可以达到AR效果。
目前只有Android设备下微信、手Q支持getUserMedia()网页拉起摄像头。那么在ios下我们只能通过建立全景图来做效果了。

关键代码:

html:

1
2
3
4
<div id="WebGL-output"></div>
<div id="videoWrap">
<video src="" id="videoBox" autoplay></video>
</div>

css:

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
html,
body {
margin: 0;
padding: 0;
height: 100%;
}
video {
/*filter: hue-rotate(180deg) saturate(200%);*/
/*-moz-filter: hue-rotate(180deg) saturate(200%);*/
/*-webkit-filter: hue-rotate(180deg) saturate(200%);*/
/*max-width: 100%;*/
/*width: 320px;*/
}
#videoWrap {
position: relative;
width: 100%;
height: 100%;
left: 0;
top: 0;
background: #4CAABE;
overflow: hidden;
}
#videoBox {
position: absolute;
/*width: 100%;*/
height: 100%;
left: 50%;
top: 50%;
z-index: 1;
-webkit-transform: translate(-50%, -50%);
-webkit-transform-origin: 50% 50%;
}
#WebGL-output{
position: absolute;
z-index: 2;
left: 0;
top: 0;
width: 100%;
height: 100%;
}

css中有个技巧,就是让video全屏的方法是,在video外层包一个div,这个div设置绝对定位,高度设置为100%,并且设置overflow:hidden,video就会自动全屏。

js:

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
navigator.getUserMedia = navigator.getUserMedia ||
navigator.webkitGetUserMedia || navigator.mozGetUserMedia;
var constraints = {
audio: false,
video: true
};
var exArray = []; //存储设备源ID
if (navigator.getUserMedia) {
if (MediaStreamTrack.getSources) {
MediaStreamTrack.getSources(function(sourceInfos) {
for (var i = 0; i != sourceInfos.length; ++i) {
var sourceInfo = sourceInfos[i];
//这里会遍历audio,video,所以要加以区分
if (sourceInfo.kind == 'video') {
exArray.push(sourceInfo.id);
}
}
constraints = {
audio: false,
video: {
'optional': [{
'sourceId': exArray[1] // 0为前置摄像头,1为后置
}]
}
};
getMedia();
});
} else {
getMedia();
}
};
var video = document.getElementById('videoBox');
function successCallback(stream) {
window.stream = stream; // stream available to console
if (window.URL) {
video.src = window.URL.createObjectURL(stream);
} else {
video.src = stream;
}
}
function errorCallback(error) {
alert('navigator.getUserMedia error: ' + error);
}
function getMedia() {
navigator.getUserMedia(constraints, successCallback, errorCallback);
}
// 下面是利用three.js加载模型的代码
// once everything is loaded, we run our Three.js stuff.
function init() {
var stats = initStats();
// create a scene, that will hold all our elements such as objects, cameras and lights.
var scene = new THREE.Scene();
// create a camera, which defines where we're looking at.
var camera = new THREE.PerspectiveCamera(45, window.innerWidth / window.innerHeight, 0.1, 1000);
// create a render and set the size
var webGLRenderer = new THREE.WebGLRenderer({alpha: true});
// webGLRenderer.setClearColor(new THREE.Color(0xEEEEEE, 1.0));
webGLRenderer.setSize(window.innerWidth, window.innerHeight);
webGLRenderer.shadowMapEnabled = true;
// position and point the camera to the center of the scene
camera.position.x = -30;
camera.position.y = 40;
camera.position.z = 50;
camera.lookAt(new THREE.Vector3(0, 10, 0));
// add spotlight for the shadows
var spotLight = new THREE.SpotLight(0xffffff);
spotLight.position.set(0, 50, 30);
spotLight.intensity = 2;
scene.add(spotLight);
// add the output of the renderer to the html element
document.getElementById("WebGL-output").appendChild(webGLRenderer.domElement);
// call the render function
var step = 0;
// setup the control gui
var controls = new function() {
// we need the first child, since it's a multimaterial
};
var gui = new dat.GUI();
var mesh;
var loader = new THREE.JSONLoader();
loader.load('../assets/models/misc_chair01.js', function(geometry, mat) {
mesh = new THREE.Mesh(geometry, mat[0]);
mesh.scale.x = 15;
mesh.scale.y = 15;
mesh.scale.z = 15;
scene.add(mesh);
}, '../assets/models/');
render();
function render() {
stats.update();
if (mesh) {
mesh.rotation.y += 0.02;
}
// render using requestAnimationFrame
requestAnimationFrame(render);
webGLRenderer.render(scene, camera);
}
function initStats() {
var stats = new Stats();
stats.setMode(0); // 0: fps, 1: ms
// Align top-left
stats.domElement.style.position = 'absolute';
stats.domElement.style.left = '0px';
stats.domElement.style.top = '0px';
document.getElementById("Stats-output").appendChild(stats.domElement);
return stats;
}
}
window.onload = init;

还有一点要注意的是,Andriod下只有https链接才可以访问。

demo地址:WebVR demo
扫码体验demo:
WebVR demo

参考
Web前端也能做的AR互动
MediaDevices.getUserMedia()

分享到