1 | /**************************************************************************/ |
2 | /* mobile_vr_interface.cpp */ |
3 | /**************************************************************************/ |
4 | /* This file is part of: */ |
5 | /* GODOT ENGINE */ |
6 | /* https://godotengine.org */ |
7 | /**************************************************************************/ |
8 | /* Copyright (c) 2014-present Godot Engine contributors (see AUTHORS.md). */ |
9 | /* Copyright (c) 2007-2014 Juan Linietsky, Ariel Manzur. */ |
10 | /* */ |
11 | /* Permission is hereby granted, free of charge, to any person obtaining */ |
12 | /* a copy of this software and associated documentation files (the */ |
13 | /* "Software"), to deal in the Software without restriction, including */ |
14 | /* without limitation the rights to use, copy, modify, merge, publish, */ |
15 | /* distribute, sublicense, and/or sell copies of the Software, and to */ |
16 | /* permit persons to whom the Software is furnished to do so, subject to */ |
17 | /* the following conditions: */ |
18 | /* */ |
19 | /* The above copyright notice and this permission notice shall be */ |
20 | /* included in all copies or substantial portions of the Software. */ |
21 | /* */ |
22 | /* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */ |
23 | /* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */ |
24 | /* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. */ |
25 | /* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */ |
26 | /* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */ |
27 | /* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */ |
28 | /* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ |
29 | /**************************************************************************/ |
30 | |
31 | #include "mobile_vr_interface.h" |
32 | |
33 | #include "core/input/input.h" |
34 | #include "core/os/os.h" |
35 | #include "servers/display_server.h" |
36 | #include "servers/rendering/rendering_server_globals.h" |
37 | |
38 | StringName MobileVRInterface::get_name() const { |
39 | return "Native mobile" ; |
40 | }; |
41 | |
42 | uint32_t MobileVRInterface::get_capabilities() const { |
43 | return XRInterface::XR_STEREO; |
44 | }; |
45 | |
46 | Vector3 MobileVRInterface::scale_magneto(const Vector3 &p_magnetometer) { |
47 | // Our magnetometer doesn't give us nice clean data. |
48 | // Well it may on macOS because we're getting a calibrated value in the current implementation but Android we're getting raw data. |
49 | // This is a fairly simple adjustment we can do to correct for the magnetometer data being elliptical |
50 | |
51 | Vector3 mag_raw = p_magnetometer; |
52 | Vector3 mag_scaled = p_magnetometer; |
53 | |
54 | // update our variables every x frames |
55 | if (mag_count > 20) { |
56 | mag_current_min = mag_next_min; |
57 | mag_current_max = mag_next_max; |
58 | mag_count = 0; |
59 | } else { |
60 | mag_count++; |
61 | }; |
62 | |
63 | // adjust our min and max |
64 | if (mag_raw.x > mag_next_max.x) { |
65 | mag_next_max.x = mag_raw.x; |
66 | } |
67 | if (mag_raw.y > mag_next_max.y) { |
68 | mag_next_max.y = mag_raw.y; |
69 | } |
70 | if (mag_raw.z > mag_next_max.z) { |
71 | mag_next_max.z = mag_raw.z; |
72 | } |
73 | |
74 | if (mag_raw.x < mag_next_min.x) { |
75 | mag_next_min.x = mag_raw.x; |
76 | } |
77 | if (mag_raw.y < mag_next_min.y) { |
78 | mag_next_min.y = mag_raw.y; |
79 | } |
80 | if (mag_raw.z < mag_next_min.z) { |
81 | mag_next_min.z = mag_raw.z; |
82 | } |
83 | |
84 | // scale our x, y and z |
85 | if (!(mag_current_max.x - mag_current_min.x)) { |
86 | mag_raw.x -= (mag_current_min.x + mag_current_max.x) / 2.0; |
87 | mag_scaled.x = (mag_raw.x - mag_current_min.x) / ((mag_current_max.x - mag_current_min.x) * 2.0 - 1.0); |
88 | }; |
89 | |
90 | if (!(mag_current_max.y - mag_current_min.y)) { |
91 | mag_raw.y -= (mag_current_min.y + mag_current_max.y) / 2.0; |
92 | mag_scaled.y = (mag_raw.y - mag_current_min.y) / ((mag_current_max.y - mag_current_min.y) * 2.0 - 1.0); |
93 | }; |
94 | |
95 | if (!(mag_current_max.z - mag_current_min.z)) { |
96 | mag_raw.z -= (mag_current_min.z + mag_current_max.z) / 2.0; |
97 | mag_scaled.z = (mag_raw.z - mag_current_min.z) / ((mag_current_max.z - mag_current_min.z) * 2.0 - 1.0); |
98 | }; |
99 | |
100 | return mag_scaled; |
101 | }; |
102 | |
103 | Basis MobileVRInterface::combine_acc_mag(const Vector3 &p_grav, const Vector3 &p_magneto) { |
104 | // yup, stock standard cross product solution... |
105 | Vector3 up = -p_grav.normalized(); |
106 | |
107 | Vector3 magneto_east = up.cross(p_magneto.normalized()); // or is this west?, but should be horizon aligned now |
108 | magneto_east.normalize(); |
109 | |
110 | Vector3 magneto = up.cross(magneto_east); // and now we have a horizon aligned north |
111 | magneto.normalize(); |
112 | |
113 | // We use our gravity and magnetometer vectors to construct our matrix |
114 | Basis acc_mag_m3; |
115 | acc_mag_m3.rows[0] = -magneto_east; |
116 | acc_mag_m3.rows[1] = up; |
117 | acc_mag_m3.rows[2] = magneto; |
118 | |
119 | return acc_mag_m3; |
120 | }; |
121 | |
122 | void MobileVRInterface::set_position_from_sensors() { |
123 | _THREAD_SAFE_METHOD_ |
124 | |
125 | // this is a helper function that attempts to adjust our transform using our 9dof sensors |
126 | // 9dof is a misleading marketing term coming from 3 accelerometer axis + 3 gyro axis + 3 magnetometer axis = 9 axis |
127 | // but in reality this only offers 3 dof (yaw, pitch, roll) orientation |
128 | |
129 | Basis orientation; |
130 | |
131 | uint64_t ticks = OS::get_singleton()->get_ticks_usec(); |
132 | uint64_t ticks_elapsed = ticks - last_ticks; |
133 | float delta_time = (double)ticks_elapsed / 1000000.0; |
134 | |
135 | // few things we need |
136 | Input *input = Input::get_singleton(); |
137 | Vector3 down(0.0, -1.0, 0.0); // Down is Y negative |
138 | Vector3 north(0.0, 0.0, 1.0); // North is Z positive |
139 | |
140 | // make copies of our inputs |
141 | bool has_grav = false; |
142 | Vector3 acc = input->get_accelerometer(); |
143 | Vector3 gyro = input->get_gyroscope(); |
144 | Vector3 grav = input->get_gravity(); |
145 | Vector3 magneto = scale_magneto(input->get_magnetometer()); // this may be overkill on iOS because we're already getting a calibrated magnetometer reading |
146 | |
147 | if (sensor_first) { |
148 | sensor_first = false; |
149 | } else { |
150 | acc = scrub(acc, last_accerometer_data, 2, 0.2); |
151 | magneto = scrub(magneto, last_magnetometer_data, 3, 0.3); |
152 | }; |
153 | |
154 | last_accerometer_data = acc; |
155 | last_magnetometer_data = magneto; |
156 | |
157 | if (grav.length() < 0.1) { |
158 | // not ideal but use our accelerometer, this will contain shaky user behavior |
159 | // maybe look into some math but I'm guessing that if this isn't available, it's because we lack the gyro sensor to actually work out |
160 | // what a stable gravity vector is |
161 | grav = acc; |
162 | if (grav.length() > 0.1) { |
163 | has_grav = true; |
164 | }; |
165 | } else { |
166 | has_grav = true; |
167 | }; |
168 | |
169 | bool has_magneto = magneto.length() > 0.1; |
170 | if (gyro.length() > 0.1) { |
171 | /* this can return to 0.0 if the user doesn't move the phone, so once on, it's on */ |
172 | has_gyro = true; |
173 | }; |
174 | |
175 | if (has_gyro) { |
176 | // start with applying our gyro (do NOT smooth our gyro!) |
177 | Basis rotate; |
178 | rotate.rotate(orientation.get_column(0), gyro.x * delta_time); |
179 | rotate.rotate(orientation.get_column(1), gyro.y * delta_time); |
180 | rotate.rotate(orientation.get_column(2), gyro.z * delta_time); |
181 | orientation = rotate * orientation; |
182 | |
183 | tracking_state = XRInterface::XR_NORMAL_TRACKING; |
184 | tracking_confidence = XRPose::XR_TRACKING_CONFIDENCE_HIGH; |
185 | }; |
186 | |
187 | ///@TODO improve this, the magnetometer is very fidgety sometimes flipping the axis for no apparent reason (probably a bug on my part) |
188 | // if you have a gyro + accelerometer that combo tends to be better than combining all three but without a gyro you need the magnetometer.. |
189 | if (has_magneto && has_grav && !has_gyro) { |
190 | // convert to quaternions, easier to smooth those out |
191 | Quaternion transform_quat(orientation); |
192 | Quaternion acc_mag_quat(combine_acc_mag(grav, magneto)); |
193 | transform_quat = transform_quat.slerp(acc_mag_quat, 0.1); |
194 | orientation = Basis(transform_quat); |
195 | |
196 | tracking_state = XRInterface::XR_NORMAL_TRACKING; |
197 | tracking_confidence = XRPose::XR_TRACKING_CONFIDENCE_HIGH; |
198 | } else if (has_grav) { |
199 | // use gravity vector to make sure down is down... |
200 | // transform gravity into our world space |
201 | grav.normalize(); |
202 | Vector3 grav_adj = orientation.xform(grav); |
203 | float dot = grav_adj.dot(down); |
204 | if ((dot > -1.0) && (dot < 1.0)) { |
205 | // axis around which we have this rotation |
206 | Vector3 axis = grav_adj.cross(down); |
207 | axis.normalize(); |
208 | |
209 | Basis drift_compensation(axis, acos(dot) * delta_time * 10); |
210 | orientation = drift_compensation * orientation; |
211 | }; |
212 | }; |
213 | |
214 | // and copy to our head transform |
215 | head_transform.basis = orientation.orthonormalized(); |
216 | |
217 | last_ticks = ticks; |
218 | }; |
219 | |
220 | void MobileVRInterface::_bind_methods() { |
221 | ClassDB::bind_method(D_METHOD("set_eye_height" , "eye_height" ), &MobileVRInterface::set_eye_height); |
222 | ClassDB::bind_method(D_METHOD("get_eye_height" ), &MobileVRInterface::get_eye_height); |
223 | |
224 | ClassDB::bind_method(D_METHOD("set_iod" , "iod" ), &MobileVRInterface::set_iod); |
225 | ClassDB::bind_method(D_METHOD("get_iod" ), &MobileVRInterface::get_iod); |
226 | |
227 | ClassDB::bind_method(D_METHOD("set_display_width" , "display_width" ), &MobileVRInterface::set_display_width); |
228 | ClassDB::bind_method(D_METHOD("get_display_width" ), &MobileVRInterface::get_display_width); |
229 | |
230 | ClassDB::bind_method(D_METHOD("set_display_to_lens" , "display_to_lens" ), &MobileVRInterface::set_display_to_lens); |
231 | ClassDB::bind_method(D_METHOD("get_display_to_lens" ), &MobileVRInterface::get_display_to_lens); |
232 | |
233 | ClassDB::bind_method(D_METHOD("set_oversample" , "oversample" ), &MobileVRInterface::set_oversample); |
234 | ClassDB::bind_method(D_METHOD("get_oversample" ), &MobileVRInterface::get_oversample); |
235 | |
236 | ClassDB::bind_method(D_METHOD("set_k1" , "k" ), &MobileVRInterface::set_k1); |
237 | ClassDB::bind_method(D_METHOD("get_k1" ), &MobileVRInterface::get_k1); |
238 | |
239 | ClassDB::bind_method(D_METHOD("set_k2" , "k" ), &MobileVRInterface::set_k2); |
240 | ClassDB::bind_method(D_METHOD("get_k2" ), &MobileVRInterface::get_k2); |
241 | |
242 | ADD_PROPERTY(PropertyInfo(Variant::FLOAT, "eye_height" , PROPERTY_HINT_RANGE, "0.0,3.0,0.1" ), "set_eye_height" , "get_eye_height" ); |
243 | ADD_PROPERTY(PropertyInfo(Variant::FLOAT, "iod" , PROPERTY_HINT_RANGE, "4.0,10.0,0.1" ), "set_iod" , "get_iod" ); |
244 | ADD_PROPERTY(PropertyInfo(Variant::FLOAT, "display_width" , PROPERTY_HINT_RANGE, "5.0,25.0,0.1" ), "set_display_width" , "get_display_width" ); |
245 | ADD_PROPERTY(PropertyInfo(Variant::FLOAT, "display_to_lens" , PROPERTY_HINT_RANGE, "5.0,25.0,0.1" ), "set_display_to_lens" , "get_display_to_lens" ); |
246 | ADD_PROPERTY(PropertyInfo(Variant::FLOAT, "oversample" , PROPERTY_HINT_RANGE, "1.0,2.0,0.1" ), "set_oversample" , "get_oversample" ); |
247 | ADD_PROPERTY(PropertyInfo(Variant::FLOAT, "k1" , PROPERTY_HINT_RANGE, "0.1,10.0,0.0001" ), "set_k1" , "get_k1" ); |
248 | ADD_PROPERTY(PropertyInfo(Variant::FLOAT, "k2" , PROPERTY_HINT_RANGE, "0.1,10.0,0.0001" ), "set_k2" , "get_k2" ); |
249 | } |
250 | |
251 | void MobileVRInterface::set_eye_height(const double p_eye_height) { |
252 | eye_height = p_eye_height; |
253 | } |
254 | |
255 | double MobileVRInterface::get_eye_height() const { |
256 | return eye_height; |
257 | } |
258 | |
259 | void MobileVRInterface::set_iod(const double p_iod) { |
260 | intraocular_dist = p_iod; |
261 | }; |
262 | |
263 | double MobileVRInterface::get_iod() const { |
264 | return intraocular_dist; |
265 | }; |
266 | |
267 | void MobileVRInterface::set_display_width(const double p_display_width) { |
268 | display_width = p_display_width; |
269 | }; |
270 | |
271 | double MobileVRInterface::get_display_width() const { |
272 | return display_width; |
273 | }; |
274 | |
275 | void MobileVRInterface::set_display_to_lens(const double p_display_to_lens) { |
276 | display_to_lens = p_display_to_lens; |
277 | }; |
278 | |
279 | double MobileVRInterface::get_display_to_lens() const { |
280 | return display_to_lens; |
281 | }; |
282 | |
283 | void MobileVRInterface::set_oversample(const double p_oversample) { |
284 | oversample = p_oversample; |
285 | }; |
286 | |
287 | double MobileVRInterface::get_oversample() const { |
288 | return oversample; |
289 | }; |
290 | |
291 | void MobileVRInterface::set_k1(const double p_k1) { |
292 | k1 = p_k1; |
293 | }; |
294 | |
295 | double MobileVRInterface::get_k1() const { |
296 | return k1; |
297 | }; |
298 | |
299 | void MobileVRInterface::set_k2(const double p_k2) { |
300 | k2 = p_k2; |
301 | }; |
302 | |
303 | double MobileVRInterface::get_k2() const { |
304 | return k2; |
305 | }; |
306 | |
307 | uint32_t MobileVRInterface::get_view_count() { |
308 | // needs stereo... |
309 | return 2; |
310 | }; |
311 | |
312 | XRInterface::TrackingStatus MobileVRInterface::get_tracking_status() const { |
313 | return tracking_state; |
314 | } |
315 | |
316 | bool MobileVRInterface::is_initialized() const { |
317 | return (initialized); |
318 | }; |
319 | |
320 | bool MobileVRInterface::initialize() { |
321 | XRServer *xr_server = XRServer::get_singleton(); |
322 | ERR_FAIL_NULL_V(xr_server, false); |
323 | |
324 | if (!initialized) { |
325 | // reset our sensor data |
326 | mag_count = 0; |
327 | has_gyro = false; |
328 | sensor_first = true; |
329 | mag_next_min = Vector3(10000, 10000, 10000); |
330 | mag_next_max = Vector3(-10000, -10000, -10000); |
331 | mag_current_min = Vector3(0, 0, 0); |
332 | mag_current_max = Vector3(0, 0, 0); |
333 | head_transform.basis = Basis(); |
334 | head_transform.origin = Vector3(0.0, eye_height, 0.0); |
335 | |
336 | // we must create a tracker for our head |
337 | head.instantiate(); |
338 | head->set_tracker_type(XRServer::TRACKER_HEAD); |
339 | head->set_tracker_name("head" ); |
340 | head->set_tracker_desc("Players head" ); |
341 | xr_server->add_tracker(head); |
342 | |
343 | // make this our primary interface |
344 | xr_server->set_primary_interface(this); |
345 | |
346 | last_ticks = OS::get_singleton()->get_ticks_usec(); |
347 | |
348 | initialized = true; |
349 | }; |
350 | |
351 | return true; |
352 | }; |
353 | |
354 | void MobileVRInterface::uninitialize() { |
355 | if (initialized) { |
356 | // do any cleanup here... |
357 | XRServer *xr_server = XRServer::get_singleton(); |
358 | if (xr_server != nullptr) { |
359 | if (head.is_valid()) { |
360 | xr_server->remove_tracker(head); |
361 | |
362 | head.unref(); |
363 | } |
364 | |
365 | if (xr_server->get_primary_interface() == this) { |
366 | // no longer our primary interface |
367 | xr_server->set_primary_interface(nullptr); |
368 | } |
369 | } |
370 | |
371 | initialized = false; |
372 | }; |
373 | }; |
374 | |
375 | Dictionary MobileVRInterface::get_system_info() { |
376 | Dictionary dict; |
377 | |
378 | dict[SNAME("XRRuntimeName" )] = String("Godot mobile VR interface" ); |
379 | dict[SNAME("XRRuntimeVersion" )] = String("" ); |
380 | |
381 | return dict; |
382 | } |
383 | |
384 | bool MobileVRInterface::supports_play_area_mode(XRInterface::PlayAreaMode p_mode) { |
385 | // This interface has no positional tracking so fix this to 3DOF |
386 | return p_mode == XR_PLAY_AREA_3DOF; |
387 | } |
388 | |
389 | XRInterface::PlayAreaMode MobileVRInterface::get_play_area_mode() const { |
390 | return XR_PLAY_AREA_3DOF; |
391 | } |
392 | |
393 | bool MobileVRInterface::set_play_area_mode(XRInterface::PlayAreaMode p_mode) { |
394 | return p_mode == XR_PLAY_AREA_3DOF; |
395 | } |
396 | |
397 | Size2 MobileVRInterface::get_render_target_size() { |
398 | _THREAD_SAFE_METHOD_ |
399 | |
400 | // we use half our window size |
401 | Size2 target_size = DisplayServer::get_singleton()->window_get_size(); |
402 | |
403 | target_size.x *= 0.5 * oversample; |
404 | target_size.y *= oversample; |
405 | |
406 | return target_size; |
407 | }; |
408 | |
409 | Transform3D MobileVRInterface::get_camera_transform() { |
410 | _THREAD_SAFE_METHOD_ |
411 | |
412 | Transform3D transform_for_eye; |
413 | |
414 | XRServer *xr_server = XRServer::get_singleton(); |
415 | ERR_FAIL_NULL_V(xr_server, transform_for_eye); |
416 | |
417 | if (initialized) { |
418 | float world_scale = xr_server->get_world_scale(); |
419 | |
420 | // just scale our origin point of our transform |
421 | Transform3D _head_transform = head_transform; |
422 | _head_transform.origin *= world_scale; |
423 | |
424 | transform_for_eye = (xr_server->get_reference_frame()) * _head_transform; |
425 | } |
426 | |
427 | return transform_for_eye; |
428 | }; |
429 | |
430 | Transform3D MobileVRInterface::get_transform_for_view(uint32_t p_view, const Transform3D &p_cam_transform) { |
431 | _THREAD_SAFE_METHOD_ |
432 | |
433 | Transform3D transform_for_eye; |
434 | |
435 | XRServer *xr_server = XRServer::get_singleton(); |
436 | ERR_FAIL_NULL_V(xr_server, transform_for_eye); |
437 | |
438 | if (initialized) { |
439 | float world_scale = xr_server->get_world_scale(); |
440 | |
441 | // we don't need to check for the existence of our HMD, doesn't affect our values... |
442 | // note * 0.01 to convert cm to m and * 0.5 as we're moving half in each direction... |
443 | if (p_view == 0) { |
444 | transform_for_eye.origin.x = -(intraocular_dist * 0.01 * 0.5 * world_scale); |
445 | } else if (p_view == 1) { |
446 | transform_for_eye.origin.x = intraocular_dist * 0.01 * 0.5 * world_scale; |
447 | } else { |
448 | // should not have any other values.. |
449 | }; |
450 | |
451 | // just scale our origin point of our transform |
452 | Transform3D _head_transform = head_transform; |
453 | _head_transform.origin *= world_scale; |
454 | |
455 | transform_for_eye = p_cam_transform * (xr_server->get_reference_frame()) * _head_transform * transform_for_eye; |
456 | } else { |
457 | // huh? well just return what we got.... |
458 | transform_for_eye = p_cam_transform; |
459 | }; |
460 | |
461 | return transform_for_eye; |
462 | }; |
463 | |
464 | Projection MobileVRInterface::get_projection_for_view(uint32_t p_view, double p_aspect, double p_z_near, double p_z_far) { |
465 | _THREAD_SAFE_METHOD_ |
466 | |
467 | Projection eye; |
468 | |
469 | aspect = p_aspect; |
470 | eye.set_for_hmd(p_view + 1, p_aspect, intraocular_dist, display_width, display_to_lens, oversample, p_z_near, p_z_far); |
471 | |
472 | return eye; |
473 | }; |
474 | |
475 | Vector<BlitToScreen> MobileVRInterface::post_draw_viewport(RID p_render_target, const Rect2 &p_screen_rect) { |
476 | _THREAD_SAFE_METHOD_ |
477 | |
478 | Vector<BlitToScreen> blit_to_screen; |
479 | |
480 | // We must have a valid render target |
481 | ERR_FAIL_COND_V(!p_render_target.is_valid(), blit_to_screen); |
482 | |
483 | // Because we are rendering to our device we must use our main viewport! |
484 | ERR_FAIL_COND_V(p_screen_rect == Rect2(), blit_to_screen); |
485 | |
486 | // and add our blits |
487 | BlitToScreen blit; |
488 | blit.render_target = p_render_target; |
489 | blit.multi_view.use_layer = true; |
490 | blit.lens_distortion.apply = true; |
491 | blit.lens_distortion.k1 = k1; |
492 | blit.lens_distortion.k2 = k2; |
493 | blit.lens_distortion.upscale = oversample; |
494 | blit.lens_distortion.aspect_ratio = aspect; |
495 | |
496 | // left eye |
497 | blit.dst_rect = p_screen_rect; |
498 | blit.dst_rect.size.width *= 0.5; |
499 | blit.multi_view.layer = 0; |
500 | blit.lens_distortion.eye_center.x = ((-intraocular_dist / 2.0) + (display_width / 4.0)) / (display_width / 2.0); |
501 | blit_to_screen.push_back(blit); |
502 | |
503 | // right eye |
504 | blit.dst_rect = p_screen_rect; |
505 | blit.dst_rect.size.width *= 0.5; |
506 | blit.dst_rect.position.x = blit.dst_rect.size.width; |
507 | blit.multi_view.layer = 1; |
508 | blit.lens_distortion.eye_center.x = ((intraocular_dist / 2.0) - (display_width / 4.0)) / (display_width / 2.0); |
509 | blit_to_screen.push_back(blit); |
510 | |
511 | return blit_to_screen; |
512 | } |
513 | |
514 | void MobileVRInterface::process() { |
515 | _THREAD_SAFE_METHOD_ |
516 | |
517 | if (initialized) { |
518 | // update our head transform orientation |
519 | set_position_from_sensors(); |
520 | |
521 | // update our head transform position (should be constant) |
522 | head_transform.origin = Vector3(0.0, eye_height, 0.0); |
523 | |
524 | if (head.is_valid()) { |
525 | // Set our head position, note in real space, reference frame and world scale is applied later |
526 | head->set_pose("default" , head_transform, Vector3(), Vector3(), tracking_confidence); |
527 | } |
528 | }; |
529 | }; |
530 | |
531 | MobileVRInterface::MobileVRInterface() {} |
532 | |
533 | MobileVRInterface::~MobileVRInterface() { |
534 | // and make sure we cleanup if we haven't already |
535 | if (is_initialized()) { |
536 | uninitialize(); |
537 | }; |
538 | }; |
539 | |