Merge pull request #8567 from BastiaanOlij/ar_vr_server

AR/VR base classes and position tracker support
This commit is contained in:
Rémi Verschelde 2017-07-31 13:29:18 +02:00 committed by GitHub
commit 62464839ad
25 changed files with 1745 additions and 35 deletions

View file

@ -91,6 +91,72 @@ void CameraMatrix::set_perspective(real_t p_fovy_degrees, real_t p_aspect, real_
matrix[3][3] = 0;
}
void CameraMatrix::set_perspective(real_t p_fovy_degrees, real_t p_aspect, real_t p_z_near, real_t p_z_far, bool p_flip_fov, int p_eye, real_t p_intraocular_dist, real_t p_convergence_dist) {
if (p_flip_fov) {
p_fovy_degrees = get_fovy(p_fovy_degrees, 1.0 / p_aspect);
}
real_t left, right, modeltranslation, ymax, xmax, frustumshift;
ymax = p_z_near * tan(p_fovy_degrees * Math_PI / 360.0f);
xmax = ymax * p_aspect;
frustumshift = (p_intraocular_dist / 2.0) * p_z_near / p_convergence_dist;
switch (p_eye) {
case 1: { // left eye
left = -xmax + frustumshift;
right = xmax + frustumshift;
modeltranslation = p_intraocular_dist / 2.0;
}; break;
case 2: { // right eye
left = -xmax - frustumshift;
right = xmax - frustumshift;
modeltranslation = -p_intraocular_dist / 2.0;
}; break;
default: { // mono, should give the same result as set_perspective(p_fovy_degrees,p_aspect,p_z_near,p_z_far,p_flip_fov)
left = -xmax;
right = xmax;
modeltranslation = 0.0;
}; break;
};
set_frustum(left, right, -ymax, ymax, p_z_near, p_z_far);
// translate matrix by (modeltranslation, 0.0, 0.0)
CameraMatrix cm;
cm.set_identity();
cm.matrix[3][0] = modeltranslation;
*this = *this * cm;
}
void CameraMatrix::set_for_hmd(int p_eye, real_t p_aspect, real_t p_intraocular_dist, real_t p_display_width, real_t p_display_to_lens, real_t p_oversample, real_t p_z_near, real_t p_z_far) {
// we first calculate our base frustum on our values without taking our lens magnification into account.
real_t display_to_eye = 2.0 * p_display_to_lens;
real_t f1 = (p_intraocular_dist * 0.5) / p_display_to_lens;
real_t f2 = ((p_display_width - p_intraocular_dist) * 0.5) / p_display_to_lens;
real_t f3 = (p_display_width / 4.0) / p_display_to_lens;
// now we apply our oversample factor to increase our FOV. how much we oversample is always a balance we strike between performance and how much
// we're willing to sacrifice in FOV.
real_t add = ((f1 + f2) * (p_oversample - 1.0)) / 2.0;
f1 += add;
f2 += add;
// always apply KEEP_WIDTH aspect ratio
f3 *= p_aspect;
switch (p_eye) {
case 1: { // left eye
set_frustum(-f2 * p_z_near, f1 * p_z_near, -f3 * p_z_near, f3 * p_z_near, p_z_near, p_z_far);
}; break;
case 2: { // right eye
set_frustum(-f1 * p_z_near, f2 * p_z_near, -f3 * p_z_near, f3 * p_z_near, p_z_near, p_z_far);
}; break;
default: { // mono, does not apply here!
}; break;
};
};
void CameraMatrix::set_orthogonal(real_t p_left, real_t p_right, real_t p_bottom, real_t p_top, real_t p_znear, real_t p_zfar) {
set_identity();
@ -243,23 +309,44 @@ bool CameraMatrix::get_endpoints(const Transform &p_transform, Vector3 *p_8point
-matrix[15] + matrix[13]);
top_plane.normalize();
Vector3 near_endpoint;
Vector3 far_endpoint;
Vector3 near_endpoint_left, near_endpoint_right;
Vector3 far_endpoint_left, far_endpoint_right;
bool res = near_plane.intersect_3(right_plane, top_plane, &near_endpoint);
bool res = near_plane.intersect_3(right_plane, top_plane, &near_endpoint_right);
ERR_FAIL_COND_V(!res, false);
res = far_plane.intersect_3(right_plane, top_plane, &far_endpoint);
res = far_plane.intersect_3(right_plane, top_plane, &far_endpoint_right);
ERR_FAIL_COND_V(!res, false);
p_8points[0] = p_transform.xform(Vector3(near_endpoint.x, near_endpoint.y, near_endpoint.z));
p_8points[1] = p_transform.xform(Vector3(near_endpoint.x, -near_endpoint.y, near_endpoint.z));
p_8points[2] = p_transform.xform(Vector3(-near_endpoint.x, near_endpoint.y, near_endpoint.z));
p_8points[3] = p_transform.xform(Vector3(-near_endpoint.x, -near_endpoint.y, near_endpoint.z));
p_8points[4] = p_transform.xform(Vector3(far_endpoint.x, far_endpoint.y, far_endpoint.z));
p_8points[5] = p_transform.xform(Vector3(far_endpoint.x, -far_endpoint.y, far_endpoint.z));
p_8points[6] = p_transform.xform(Vector3(-far_endpoint.x, far_endpoint.y, far_endpoint.z));
p_8points[7] = p_transform.xform(Vector3(-far_endpoint.x, -far_endpoint.y, far_endpoint.z));
if ((matrix[8] == 0) && (matrix[9] == 0)) {
near_endpoint_left = near_endpoint_right;
near_endpoint_left.x = -near_endpoint_left.x;
far_endpoint_left = far_endpoint_right;
far_endpoint_left.x = -far_endpoint_left.x;
} else {
///////--- Left Plane ---///////
Plane left_plane = Plane(matrix[0] + matrix[3],
matrix[4] + matrix[7],
matrix[8] + matrix[11],
-matrix[15] - matrix[12]);
left_plane.normalize();
res = near_plane.intersect_3(left_plane, top_plane, &near_endpoint_left);
ERR_FAIL_COND_V(!res, false);
res = far_plane.intersect_3(left_plane, top_plane, &far_endpoint_left);
ERR_FAIL_COND_V(!res, false);
}
p_8points[0] = p_transform.xform(Vector3(near_endpoint_right.x, near_endpoint_right.y, near_endpoint_right.z));
p_8points[1] = p_transform.xform(Vector3(near_endpoint_right.x, -near_endpoint_right.y, near_endpoint_right.z));
p_8points[2] = p_transform.xform(Vector3(near_endpoint_left.x, near_endpoint_left.y, near_endpoint_left.z));
p_8points[3] = p_transform.xform(Vector3(near_endpoint_left.x, -near_endpoint_left.y, near_endpoint_left.z));
p_8points[4] = p_transform.xform(Vector3(far_endpoint_right.x, far_endpoint_right.y, far_endpoint_right.z));
p_8points[5] = p_transform.xform(Vector3(far_endpoint_right.x, -far_endpoint_right.y, far_endpoint_right.z));
p_8points[6] = p_transform.xform(Vector3(far_endpoint_left.x, far_endpoint_left.y, far_endpoint_left.z));
p_8points[7] = p_transform.xform(Vector3(far_endpoint_left.x, -far_endpoint_left.y, far_endpoint_left.z));
return true;
}
@ -546,7 +633,18 @@ real_t CameraMatrix::get_fov() const {
-matrix[15] + matrix[12]);
right_plane.normalize();
return Math::rad2deg(Math::acos(Math::abs(right_plane.normal.x))) * 2.0;
if ((matrix[8] == 0) && (matrix[9] == 0)) {
return Math::rad2deg(Math::acos(Math::abs(right_plane.normal.x))) * 2.0;
} else {
// our frustum is asymetrical need to calculate the left planes angle seperately..
Plane left_plane = Plane(matrix[3] + matrix[0],
matrix[7] + matrix[4],
matrix[11] + matrix[8],
matrix[15] + matrix[12]);
left_plane.normalize();
return Math::rad2deg(Math::acos(Math::abs(left_plane.normal.x))) + Math::rad2deg(Math::acos(Math::abs(right_plane.normal.x)));
}
}
void CameraMatrix::make_scale(const Vector3 &p_scale) {

View file

@ -54,6 +54,8 @@ struct CameraMatrix {
void set_light_bias();
void set_light_atlas_rect(const Rect2 &p_rect);
void set_perspective(real_t p_fovy_degrees, real_t p_aspect, real_t p_z_near, real_t p_z_far, bool p_flip_fov = false);
void set_perspective(real_t p_fovy_degrees, real_t p_aspect, real_t p_z_near, real_t p_z_far, bool p_flip_fov, int p_eye, real_t p_intraocular_dist, real_t p_convergence_dist);
void set_for_hmd(int p_eye, real_t p_aspect, real_t p_intraocular_dist, real_t p_display_width, real_t p_display_to_lens, real_t p_oversample, real_t p_z_near, real_t p_z_far);
void set_orthogonal(real_t p_left, real_t p_right, real_t p_bottom, real_t p_top, real_t p_znear, real_t p_zfar);
void set_orthogonal(real_t p_size, real_t p_aspect, real_t p_znear, real_t p_zfar, bool p_flip_fov = false);
void set_frustum(real_t p_left, real_t p_right, real_t p_bottom, real_t p_top, real_t p_near, real_t p_far);

313
scene/3d/arvr_nodes.cpp Normal file
View file

@ -0,0 +1,313 @@
/*************************************************************************/
/* arvr_nodes.cpp */
/*************************************************************************/
/* This file is part of: */
/* GODOT ENGINE */
/* http://www.godotengine.org */
/*************************************************************************/
/* Copyright (c) 2007-2017 Juan Linietsky, Ariel Manzur. */
/* Copyright (c) 2014-2017 Godot Engine contributors (cf. AUTHORS.md) */
/* */
/* Permission is hereby granted, free of charge, to any person obtaining */
/* a copy of this software and associated documentation files (the */
/* "Software"), to deal in the Software without restriction, including */
/* without limitation the rights to use, copy, modify, merge, publish, */
/* distribute, sublicense, and/or sell copies of the Software, and to */
/* permit persons to whom the Software is furnished to do so, subject to */
/* the following conditions: */
/* */
/* The above copyright notice and this permission notice shall be */
/* included in all copies or substantial portions of the Software. */
/* */
/* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
/* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
/* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.*/
/* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
/* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */
/* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
/*************************************************************************/
#include "arvr_nodes.h"
#include "core/os/input.h"
#include "servers/arvr/arvr_interface.h"
#include "servers/arvr/arvr_positional_tracker.h"
#include "servers/arvr_server.h"
////////////////////////////////////////////////////////////////////////////////////////////////////
void ARVRCamera::_notification(int p_what) {
switch (p_what) {
case NOTIFICATION_ENTER_TREE: {
// need to find our ARVROrigin parent and let it know we're it's camera!
ARVROrigin *origin = get_parent()->cast_to<ARVROrigin>();
if (origin != NULL) {
origin->set_tracked_camera(this);
}
}; break;
case NOTIFICATION_EXIT_TREE: {
// need to find our ARVROrigin parent and let it know we're no longer it's camera!
ARVROrigin *origin = get_parent()->cast_to<ARVROrigin>();
if (origin != NULL) {
origin->clear_tracked_camera_if(this);
}
}; break;
};
};
String ARVRCamera::get_configuration_warning() const {
if (!is_visible() || !is_inside_tree())
return String();
// must be child node of ARVROrigin!
ARVROrigin *origin = get_parent()->cast_to<ARVROrigin>();
if (origin == NULL) {
return TTR("ARVRCamera must have an ARVROrigin node as its parent");
};
return String();
};
ARVRCamera::ARVRCamera(){
// nothing to do here yet for now..
};
ARVRCamera::~ARVRCamera(){
// nothing to do here yet for now..
};
////////////////////////////////////////////////////////////////////////////////////////////////////
void ARVRController::_notification(int p_what) {
switch (p_what) {
case NOTIFICATION_ENTER_TREE: {
set_process_internal(true);
}; break;
case NOTIFICATION_EXIT_TREE: {
set_process_internal(false);
}; break;
case NOTIFICATION_INTERNAL_PROCESS: {
// get our ARVRServer
ARVRServer *arvr_server = ARVRServer::get_singleton();
ERR_FAIL_NULL(arvr_server);
// find the tracker for our controller
ARVRPositionalTracker *tracker = arvr_server->find_by_type_and_id(ARVRServer::TRACKER_CONTROLLER, controller_id);
if (tracker == NULL) {
// this controller is currently turned off
is_active = false;
button_states = 0;
} else {
set_transform(tracker->get_transform(true));
int joy_id = tracker->get_joy_id();
if (joy_id >= 0) {
int mask = 1;
// check button states
for (int i = 0; i < 16; i++) {
bool was_pressed = (button_states && mask) == mask;
bool is_pressed = Input::get_singleton()->is_joy_button_pressed(joy_id, i);
if (!was_pressed && is_pressed) {
emit_signal("button_pressed", i);
button_states += mask;
} else if (was_pressed && !is_pressed) {
emit_signal("button_release", i);
button_states -= mask;
};
mask = mask << 1;
};
} else {
button_states = 0;
};
};
}; break;
default:
break;
};
};
void ARVRController::_bind_methods() {
ClassDB::bind_method(D_METHOD("set_controller_id", "controller_id"), &ARVRController::set_controller_id);
ClassDB::bind_method(D_METHOD("get_controller_id"), &ARVRController::get_controller_id);
ADD_PROPERTY(PropertyInfo(Variant::INT, "controller_id"), "set_controller_id", "get_controller_id");
ClassDB::bind_method(D_METHOD("get_controller_name"), &ARVRController::get_controller_name);
// passthroughs to information about our related joystick
ClassDB::bind_method(D_METHOD("get_joystick_id"), &ARVRController::get_joystick_id);
ClassDB::bind_method(D_METHOD("is_button_pressed", "button"), &ARVRController::is_button_pressed);
ClassDB::bind_method(D_METHOD("get_joystick_axis", "axis"), &ARVRController::get_joystick_axis);
ClassDB::bind_method(D_METHOD("get_is_active"), &ARVRController::get_is_active);
ADD_SIGNAL(MethodInfo("button_pressed", PropertyInfo(Variant::INT, "button")));
ADD_SIGNAL(MethodInfo("button_release", PropertyInfo(Variant::INT, "button")));
};
void ARVRController::set_controller_id(int p_controller_id) {
// we don't check any bounds here, this controller may not yet be active and just be a place holder until it is.
controller_id = p_controller_id;
};
int ARVRController::get_controller_id(void) const {
return controller_id;
};
String ARVRController::get_controller_name(void) const {
// get our ARVRServer
ARVRServer *arvr_server = ARVRServer::get_singleton();
ERR_FAIL_NULL_V(arvr_server, String());
ARVRPositionalTracker *tracker = arvr_server->find_by_type_and_id(ARVRServer::TRACKER_CONTROLLER, controller_id);
if (tracker == NULL) {
return String("Not connected");
};
return tracker->get_name();
};
int ARVRController::get_joystick_id() const {
// get our ARVRServer
ARVRServer *arvr_server = ARVRServer::get_singleton();
ERR_FAIL_NULL_V(arvr_server, 0);
ARVRPositionalTracker *tracker = arvr_server->find_by_type_and_id(ARVRServer::TRACKER_CONTROLLER, controller_id);
if (tracker == NULL) {
return 0;
};
return tracker->get_joy_id();
};
int ARVRController::is_button_pressed(int p_button) const {
int joy_id = get_joystick_id();
if (joy_id == 0) {
return false;
};
return Input::get_singleton()->is_joy_button_pressed(joy_id, p_button);
};
float ARVRController::get_joystick_axis(int p_axis) const {
int joy_id = get_joystick_id();
if (joy_id == 0) {
return 0.0;
};
return Input::get_singleton()->get_joy_axis(joy_id, p_axis);
};
bool ARVRController::get_is_active() const {
return is_active;
};
String ARVRController::get_configuration_warning() const {
if (!is_visible() || !is_inside_tree())
return String();
// must be child node of ARVROrigin!
ARVROrigin *origin = get_parent()->cast_to<ARVROrigin>();
if (origin == NULL) {
return TTR("ARVRController must have an ARVROrigin node as its parent");
};
if (controller_id == 0) {
return TTR("The controller id must not be 0 or this controller will not be bound to an actual controller");
};
return String();
};
ARVRController::ARVRController() {
controller_id = 0;
is_active = true;
};
ARVRController::~ARVRController(){
// nothing to do here yet for now..
};
////////////////////////////////////////////////////////////////////////////////////////////////////
String ARVROrigin::get_configuration_warning() const {
if (!is_visible() || !is_inside_tree())
return String();
if (tracked_camera == NULL)
return TTR("ARVROrigin requires an ARVRCamera child node");
return String();
};
void ARVROrigin::_bind_methods() {
ClassDB::bind_method(D_METHOD("set_world_scale", "world_scale"), &ARVROrigin::set_world_scale);
ClassDB::bind_method(D_METHOD("get_world_scale"), &ARVROrigin::get_world_scale);
ADD_PROPERTY(PropertyInfo(Variant::REAL, "world_scale"), "set_world_scale", "get_world_scale");
};
void ARVROrigin::set_tracked_camera(ARVRCamera *p_tracked_camera) {
tracked_camera = p_tracked_camera;
};
void ARVROrigin::clear_tracked_camera_if(ARVRCamera *p_tracked_camera) {
if (tracked_camera == p_tracked_camera) {
tracked_camera = NULL;
};
};
float ARVROrigin::get_world_scale() const {
// get our ARVRServer
ARVRServer *arvr_server = ARVRServer::get_singleton();
ERR_FAIL_NULL_V(arvr_server, 1.0);
return arvr_server->get_world_scale();
};
void ARVROrigin::set_world_scale(float p_world_scale) {
// get our ARVRServer
ARVRServer *arvr_server = ARVRServer::get_singleton();
ERR_FAIL_NULL(arvr_server);
arvr_server->set_world_scale(p_world_scale);
};
void ARVROrigin::_notification(int p_what) {
switch (p_what) {
case NOTIFICATION_ENTER_TREE: {
set_process_internal(true);
}; break;
case NOTIFICATION_EXIT_TREE: {
set_process_internal(false);
}; break;
case NOTIFICATION_INTERNAL_PROCESS: {
// get our ARVRServer
ARVRServer *arvr_server = ARVRServer::get_singleton();
ERR_FAIL_NULL(arvr_server);
// set our world origin to our node transform
arvr_server->set_world_origin(get_global_transform());
// check if we have a primary interface
Ref<ARVRInterface> arvr_interface = arvr_server->get_primary_interface();
if (arvr_interface.is_valid() && tracked_camera != NULL) {
// get our positioning transform for our headset
Transform t = arvr_interface->get_transform_for_eye(ARVRInterface::EYE_MONO, Transform());
// now apply this to our camera
tracked_camera->set_transform(t);
};
}; break;
default:
break;
};
};
ARVROrigin::ARVROrigin() {
tracked_camera = NULL;
};
ARVROrigin::~ARVROrigin(){
// nothing to do here yet for now..
};

126
scene/3d/arvr_nodes.h Normal file
View file

@ -0,0 +1,126 @@
/*************************************************************************/
/* arvr_nodes.h */
/*************************************************************************/
/* This file is part of: */
/* GODOT ENGINE */
/* http://www.godotengine.org */
/*************************************************************************/
/* Copyright (c) 2007-2017 Juan Linietsky, Ariel Manzur. */
/* Copyright (c) 2014-2017 Godot Engine contributors (cf. AUTHORS.md) */
/* */
/* Permission is hereby granted, free of charge, to any person obtaining */
/* a copy of this software and associated documentation files (the */
/* "Software"), to deal in the Software without restriction, including */
/* without limitation the rights to use, copy, modify, merge, publish, */
/* distribute, sublicense, and/or sell copies of the Software, and to */
/* permit persons to whom the Software is furnished to do so, subject to */
/* the following conditions: */
/* */
/* The above copyright notice and this permission notice shall be */
/* included in all copies or substantial portions of the Software. */
/* */
/* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
/* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
/* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.*/
/* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
/* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */
/* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
/*************************************************************************/
#ifndef ARVR_NODES_H
#define ARVR_NODES_H
#include "scene/3d/camera.h"
#include "scene/3d/spatial.h"
/**
@author Bastiaan Olij <mux213@gmail.com>
**/
/*
ARVRCamera is a subclass of camera which will register itself with its parent ARVROrigin and as a result is automatically positioned
*/
class ARVRCamera : public Camera {
GDCLASS(ARVRCamera, Camera);
protected:
void _notification(int p_what);
public:
String get_configuration_warning() const;
ARVRCamera();
~ARVRCamera();
};
/*
ARVRController is a helper node that automatically updates it's position based on tracker data.
It must be a child node of our ARVROrigin node
*/
class ARVRController : public Spatial {
GDCLASS(ARVRController, Spatial);
private:
int controller_id;
bool is_active;
int button_states;
protected:
void _notification(int p_what);
static void _bind_methods();
public:
void set_controller_id(int p_controller_id);
int get_controller_id(void) const;
String get_controller_name(void) const;
int get_joystick_id() const;
int is_button_pressed(int p_button) const;
float get_joystick_axis(int p_axis) const;
bool get_is_active() const;
String get_configuration_warning() const;
ARVRController();
~ARVRController();
};
/*
ARVROrigin is special spatial node that acts as our origin point mapping our real world center of our tracking volume into our virtual world.
It is this point that you will move around the world as the player 'moves while standing still', i.e. the player moves through teleporting or controller inputs as opposed to physically moving.
Our camera and controllers will always be child nodes and thus place relative to this origin point.
This node will automatically locate any camera child nodes and update its position while our ARVRController node will handle tracked controllers.
*/
class ARVROrigin : public Spatial {
GDCLASS(ARVROrigin, Spatial);
private:
ARVRCamera *tracked_camera;
protected:
void _notification(int p_what);
static void _bind_methods();
public:
String get_configuration_warning() const;
void set_tracked_camera(ARVRCamera *p_tracked_camera);
void clear_tracked_camera_if(ARVRCamera *p_tracked_camera);
float get_world_scale() const;
void set_world_scale(float p_world_scale);
ARVROrigin();
~ARVROrigin();
};
#endif /* ARVR_NODES_H */

View file

@ -701,6 +701,16 @@ RID Viewport::get_viewport_rid() const {
return viewport;
}
void Viewport::set_use_arvr(bool p_use_arvr) {
arvr = p_use_arvr;
VS::get_singleton()->viewport_set_use_arvr(viewport, arvr);
}
bool Viewport::use_arvr() {
return arvr;
}
void Viewport::set_size(const Size2 &p_size) {
if (size == p_size.floor())
@ -2543,6 +2553,9 @@ int Viewport::get_render_info(RenderInfo p_info) {
void Viewport::_bind_methods() {
ClassDB::bind_method(D_METHOD("set_use_arvr", "use"), &Viewport::set_use_arvr);
ClassDB::bind_method(D_METHOD("use_arvr"), &Viewport::use_arvr);
ClassDB::bind_method(D_METHOD("set_size", "size"), &Viewport::set_size);
ClassDB::bind_method(D_METHOD("get_size"), &Viewport::get_size);
ClassDB::bind_method(D_METHOD("set_world_2d", "world_2d:World2D"), &Viewport::set_world_2d);
@ -2644,6 +2657,8 @@ void Viewport::_bind_methods() {
ClassDB::bind_method(D_METHOD("set_shadow_atlas_quadrant_subdiv", "quadrant", "subdiv"), &Viewport::set_shadow_atlas_quadrant_subdiv);
ClassDB::bind_method(D_METHOD("get_shadow_atlas_quadrant_subdiv", "quadrant"), &Viewport::get_shadow_atlas_quadrant_subdiv);
ADD_PROPERTY(PropertyInfo(Variant::BOOL, "arvr"), "set_use_arvr", "use_arvr");
ADD_PROPERTY(PropertyInfo(Variant::RECT2, "size"), "set_size", "get_size");
ADD_PROPERTY(PropertyInfo(Variant::BOOL, "own_world"), "set_use_own_world", "is_using_own_world");
ADD_PROPERTY(PropertyInfo(Variant::OBJECT, "world", PROPERTY_HINT_RESOURCE_TYPE, "World"), "set_world", "get_world");
@ -2729,6 +2744,7 @@ Viewport::Viewport() {
parent = NULL;
listener = NULL;
camera = NULL;
arvr = false;
size_override = false;
size_override_stretch = false;
size_override_size = Size2(1, 1);

View file

@ -148,6 +148,8 @@ private:
Listener *listener;
Set<Listener *> listeners;
bool arvr;
Camera *camera;
Set<Camera *> cameras;
@ -349,6 +351,9 @@ public:
Listener *get_listener() const;
Camera *get_camera() const;
void set_use_arvr(bool p_use_arvr);
bool use_arvr();
void set_as_audio_listener(bool p_enable);
bool is_audio_listener() const;

View file

@ -199,6 +199,8 @@
#include "scene/3d/camera.h"
#include "scene/3d/listener.h"
#include "scene/3d/arvr_nodes.h"
#include "scene/3d/gi_probe.h"
#include "scene/3d/interpolated_camera.h"
#include "scene/3d/light.h"
@ -405,6 +407,9 @@ void register_scene_types() {
ClassDB::register_virtual_class<VisualInstance>();
ClassDB::register_class<Camera>();
ClassDB::register_class<Listener>();
ClassDB::register_class<ARVRCamera>();
ClassDB::register_class<ARVRController>();
ClassDB::register_class<ARVROrigin>();
ClassDB::register_class<InterpolatedCamera>();
ClassDB::register_class<MeshInstance>();
ClassDB::register_class<ImmediateGeometry>();

View file

@ -7,6 +7,7 @@ env.add_source_files(env.servers_sources, "*.cpp")
Export('env')
SConscript('arvr/SCsub')
SConscript('physics/SCsub')
SConscript('physics_2d/SCsub')
SConscript('visual/SCsub')

7
servers/arvr/SCsub Normal file
View file

@ -0,0 +1,7 @@
#!/usr/bin/env python
Import('env')
env.add_source_files(env.servers_sources, "*.cpp")
Export('env')

View file

@ -0,0 +1,82 @@
/*************************************************************************/
/* arvr_interface.cpp */
/*************************************************************************/
/* This file is part of: */
/* GODOT ENGINE */
/* http://www.godotengine.org */
/*************************************************************************/
/* Copyright (c) 2007-2017 Juan Linietsky, Ariel Manzur. */
/* Copyright (c) 2014-2017 Godot Engine contributors (cf. AUTHORS.md) */
/* */
/* Permission is hereby granted, free of charge, to any person obtaining */
/* a copy of this software and associated documentation files (the */
/* "Software"), to deal in the Software without restriction, including */
/* without limitation the rights to use, copy, modify, merge, publish, */
/* distribute, sublicense, and/or sell copies of the Software, and to */
/* permit persons to whom the Software is furnished to do so, subject to */
/* the following conditions: */
/* */
/* The above copyright notice and this permission notice shall be */
/* included in all copies or substantial portions of the Software. */
/* */
/* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
/* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
/* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.*/
/* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
/* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */
/* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
/*************************************************************************/
#include "arvr_interface.h"
void ARVRInterface::_bind_methods() {
ClassDB::bind_method(D_METHOD("get_name"), &ARVRInterface::get_name);
ClassDB::bind_method(D_METHOD("is_primary"), &ARVRInterface::is_primary);
ClassDB::bind_method(D_METHOD("set_is_primary", "enable"), &ARVRInterface::set_is_primary);
ClassDB::bind_method(D_METHOD("is_installed"), &ARVRInterface::is_installed);
ClassDB::bind_method(D_METHOD("hmd_is_present"), &ARVRInterface::hmd_is_present);
ClassDB::bind_method(D_METHOD("supports_hmd"), &ARVRInterface::supports_hmd);
ClassDB::bind_method(D_METHOD("is_initialized"), &ARVRInterface::is_initialized);
ClassDB::bind_method(D_METHOD("initialize"), &ARVRInterface::initialize);
ClassDB::bind_method(D_METHOD("uninitialize"), &ARVRInterface::uninitialize);
ClassDB::bind_method(D_METHOD("get_recommended_render_targetsize"), &ARVRInterface::get_recommended_render_targetsize);
// These are now purely used internally, we may expose them again if we expose CameraMatrix through Variant but reduz is not a fan for good reasons :)
// ClassDB::bind_method(D_METHOD("get_transform_for_eye", "eye", "cam_transform"), &ARVRInterface::get_transform_for_eye);
// ClassDB::bind_method(D_METHOD("get_projection_for_eye", "eye"), &ARVRInterface::get_projection_for_eye);
// ClassDB::bind_method(D_METHOD("commit_for_eye", "node:viewport"), &ARVRInterface::commit_for_eye);
ADD_PROPERTY(PropertyInfo(Variant::BOOL, "primary"), "set_is_primary", "is_primary");
BIND_CONSTANT(EYE_MONO);
BIND_CONSTANT(EYE_LEFT);
BIND_CONSTANT(EYE_RIGHT);
};
StringName ARVRInterface::get_name() const {
return "Unknown";
};
bool ARVRInterface::is_primary() {
ARVRServer *arvr_server = ARVRServer::get_singleton();
ERR_FAIL_NULL_V(arvr_server, false);
return arvr_server->get_primary_interface() == this;
};
void ARVRInterface::set_is_primary(bool p_is_primary) {
ARVRServer *arvr_server = ARVRServer::get_singleton();
ERR_FAIL_NULL(arvr_server);
if (p_is_primary) {
ERR_FAIL_COND(!is_initialized());
ERR_FAIL_COND(!supports_hmd());
arvr_server->set_primary_interface(this);
} else {
arvr_server->clear_primary_interface_if(this);
};
};

View file

@ -0,0 +1,89 @@
/*************************************************************************/
/* arvr_interface.h */
/*************************************************************************/
/* This file is part of: */
/* GODOT ENGINE */
/* http://www.godotengine.org */
/*************************************************************************/
/* Copyright (c) 2007-2017 Juan Linietsky, Ariel Manzur. */
/* Copyright (c) 2014-2017 Godot Engine contributors (cf. AUTHORS.md) */
/* */
/* Permission is hereby granted, free of charge, to any person obtaining */
/* a copy of this software and associated documentation files (the */
/* "Software"), to deal in the Software without restriction, including */
/* without limitation the rights to use, copy, modify, merge, publish, */
/* distribute, sublicense, and/or sell copies of the Software, and to */
/* permit persons to whom the Software is furnished to do so, subject to */
/* the following conditions: */
/* */
/* The above copyright notice and this permission notice shall be */
/* included in all copies or substantial portions of the Software. */
/* */
/* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
/* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
/* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.*/
/* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
/* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */
/* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
/*************************************************************************/
#ifndef ARVR_INTERFACE_H
#define ARVR_INTERFACE_H
#include "core/math/camera_matrix.h"
#include "os/thread_safe.h"
#include "scene/main/viewport.h"
#include "servers/arvr_server.h"
/**
@author Bastiaan Olij <mux213@gmail.com>
The ARVR interface is a template class ontop of which we build interface to differt AR, VR and tracking SDKs.
The idea is that we subclass this class, implement the logic, and then instantiate a singleton of each interface
when Godot starts. These instances do not initialize themselves but register themselves with the AR/VR server.
If the user wants to enable AR/VR the choose the interface they want to use and initialize it.
Note that we may make this into a fully instantiable class for GDNative support.
*/
class ARVRInterface : public Reference {
GDCLASS(ARVRInterface, Reference);
protected:
_THREAD_SAFE_CLASS_
static void _bind_methods();
public:
enum Eyes {
EYE_MONO, /* my son says we should call this EYE_CYCLOPS */
EYE_LEFT,
EYE_RIGHT
};
virtual StringName get_name() const;
bool is_primary();
void set_is_primary(bool p_is_primary);
virtual bool is_installed() = 0; /* returns true if the middle ware related to this interface has been installed */
virtual bool hmd_is_present() = 0; /* returns true if our HMD is connected */
virtual bool supports_hmd() = 0; /* returns true is this interface handles output to an HMD or only handles positioning */
virtual bool is_initialized() = 0; /* returns true if we've initialized this interface */
virtual bool initialize() = 0; /* initialize this interface, if this has an HMD it becomes the primary interface */
virtual void uninitialize() = 0; /* deinitialize this interface */
virtual Size2 get_recommended_render_targetsize() = 0; /* returns the recommended render target size per eye for this device */
virtual bool is_stereo() = 0; /* returns true if this interface requires stereo rendering (for VR HMDs) or mono rendering (for mobile AR) */
virtual Transform get_transform_for_eye(ARVRInterface::Eyes p_eye, const Transform &p_cam_transform) = 0; /* get each eyes camera transform, also implement EYE_MONO */
virtual CameraMatrix get_projection_for_eye(ARVRInterface::Eyes p_eye, real_t p_aspect, real_t p_z_near, real_t p_z_far) = 0; /* get each eyes projection matrix */
virtual void commit_for_eye(ARVRInterface::Eyes p_eye, RID p_render_target, const Rect2 &p_screen_rect) = 0; /* output the left or right eye */
virtual void process() = 0;
};
VARIANT_ENUM_CAST(ARVRInterface::Eyes);
#endif

View file

@ -0,0 +1,142 @@
/*************************************************************************/
/* arvr_postional_tracker.cpp */
/*************************************************************************/
/* This file is part of: */
/* GODOT ENGINE */
/* http://www.godotengine.org */
/*************************************************************************/
/* Copyright (c) 2007-2017 Juan Linietsky, Ariel Manzur. */
/* Copyright (c) 2014-2017 Godot Engine contributors (cf. AUTHORS.md) */
/* */
/* Permission is hereby granted, free of charge, to any person obtaining */
/* a copy of this software and associated documentation files (the */
/* "Software"), to deal in the Software without restriction, including */
/* without limitation the rights to use, copy, modify, merge, publish, */
/* distribute, sublicense, and/or sell copies of the Software, and to */
/* permit persons to whom the Software is furnished to do so, subject to */
/* the following conditions: */
/* */
/* The above copyright notice and this permission notice shall be */
/* included in all copies or substantial portions of the Software. */
/* */
/* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
/* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
/* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.*/
/* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
/* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */
/* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
/*************************************************************************/
#include "arvr_positional_tracker.h"
#include "core/os/input.h"
void ARVRPositionalTracker::_bind_methods() {
// this class is read only from GDScript, so we only have access to getters..
ClassDB::bind_method(D_METHOD("get_type"), &ARVRPositionalTracker::get_type);
ClassDB::bind_method(D_METHOD("get_name"), &ARVRPositionalTracker::get_name);
ClassDB::bind_method(D_METHOD("get_joy_id"), &ARVRPositionalTracker::get_joy_id);
ClassDB::bind_method(D_METHOD("get_tracks_orientation"), &ARVRPositionalTracker::get_tracks_orientation);
ClassDB::bind_method(D_METHOD("get_orientation"), &ARVRPositionalTracker::get_orientation);
ClassDB::bind_method(D_METHOD("get_tracks_position"), &ARVRPositionalTracker::get_tracks_position);
ClassDB::bind_method(D_METHOD("get_position"), &ARVRPositionalTracker::get_position);
ClassDB::bind_method(D_METHOD("get_transform", "adjust_by_reference_frame"), &ARVRPositionalTracker::get_transform);
};
void ARVRPositionalTracker::set_type(ARVRServer::TrackerType p_type) {
if (type != p_type) {
type = p_type;
ARVRServer *arvr_server = ARVRServer::get_singleton();
ERR_FAIL_NULL(arvr_server);
// get a tracker id for our type
tracker_id = arvr_server->get_free_tracker_id_for_type(p_type);
}
};
ARVRServer::TrackerType ARVRPositionalTracker::get_type() const {
return type;
};
void ARVRPositionalTracker::set_name(const String p_name) {
name = p_name;
};
StringName ARVRPositionalTracker::get_name() const {
return name;
};
int ARVRPositionalTracker::get_tracker_id() const {
return tracker_id;
};
void ARVRPositionalTracker::set_joy_id(int p_joy_id) {
joy_id = p_joy_id;
};
int ARVRPositionalTracker::get_joy_id() const {
return joy_id;
};
bool ARVRPositionalTracker::get_tracks_orientation() const {
return tracks_orientation;
};
void ARVRPositionalTracker::set_orientation(const Basis &p_orientation) {
_THREAD_SAFE_METHOD_
tracks_orientation = true; // obviously we have this
orientation = p_orientation;
};
Basis ARVRPositionalTracker::get_orientation() const {
_THREAD_SAFE_METHOD_
return orientation;
};
bool ARVRPositionalTracker::get_tracks_position() const {
return tracks_position;
};
void ARVRPositionalTracker::set_position(const Vector3 &p_position) {
_THREAD_SAFE_METHOD_
tracks_position = true; // obviously we have this
position = p_position;
};
Vector3 ARVRPositionalTracker::get_position() const {
_THREAD_SAFE_METHOD_
return position;
};
Transform ARVRPositionalTracker::get_transform(bool p_adjust_by_reference_frame) const {
Transform new_transform;
new_transform.basis = get_orientation();
new_transform.origin = get_position();
if (p_adjust_by_reference_frame) {
ARVRServer *arvr_server = ARVRServer::get_singleton();
ERR_FAIL_NULL_V(arvr_server, new_transform);
new_transform = arvr_server->get_reference_frame() * new_transform;
};
return new_transform;
};
ARVRPositionalTracker::ARVRPositionalTracker() {
type = ARVRServer::TRACKER_UNKNOWN;
name = "Unknown";
joy_id = -1;
tracker_id = 0;
tracks_orientation = false;
tracks_position = false;
};
ARVRPositionalTracker::~ARVRPositionalTracker(){
};

View file

@ -0,0 +1,85 @@
/*************************************************************************/
/* arvr_positional_tracker.h */
/*************************************************************************/
/* This file is part of: */
/* GODOT ENGINE */
/* http://www.godotengine.org */
/*************************************************************************/
/* Copyright (c) 2007-2017 Juan Linietsky, Ariel Manzur. */
/* Copyright (c) 2014-2017 Godot Engine contributors (cf. AUTHORS.md) */
/* */
/* Permission is hereby granted, free of charge, to any person obtaining */
/* a copy of this software and associated documentation files (the */
/* "Software"), to deal in the Software without restriction, including */
/* without limitation the rights to use, copy, modify, merge, publish, */
/* distribute, sublicense, and/or sell copies of the Software, and to */
/* permit persons to whom the Software is furnished to do so, subject to */
/* the following conditions: */
/* */
/* The above copyright notice and this permission notice shall be */
/* included in all copies or substantial portions of the Software. */
/* */
/* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
/* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
/* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.*/
/* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
/* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */
/* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
/*************************************************************************/
#ifndef ARVR_POSITIONAL_TRACKER_H
#define ARVR_POSITIONAL_TRACKER_H
#include "os/thread_safe.h"
#include "servers/arvr_server.h"
/**
@author Bastiaan Olij <mux213@gmail.com>
The positional tracker object as an object that represents the position and orientation of a tracked object like a controller or headset.
An AR/VR Interface will registered the trackers it manages with our AR/VR server and update its position and orientation.
This is where potentially additional AR/VR interfaces may be active as there are AR/VR SDKs that solely deal with positional tracking.
@TODO:
- create subclass of spatial node that uses one of our positional trackers to automatically determine its position
*/
class ARVRPositionalTracker : public Object {
GDCLASS(ARVRPositionalTracker, Object);
_THREAD_SAFE_CLASS_
private:
ARVRServer::TrackerType type; // type of tracker
StringName name; // (unique) name of the tracker
int tracker_id; // tracker index id that is unique per type
int joy_id; // if we also have a related joystick entity, the id of the joystick
bool tracks_orientation; // do we track orientation?
Basis orientation; // our orientation
bool tracks_position; // do we track position?
Vector3 position; // our position
protected:
static void _bind_methods();
public:
void set_type(ARVRServer::TrackerType p_type);
ARVRServer::TrackerType get_type() const;
void set_name(const String p_name);
StringName get_name() const;
int get_tracker_id() const;
void set_joy_id(int p_joy_id);
int get_joy_id() const;
bool get_tracks_orientation() const;
void set_orientation(const Basis &p_orientation);
Basis get_orientation() const;
bool get_tracks_position() const;
void set_position(const Vector3 &p_position);
Vector3 get_position() const;
Transform get_transform(bool p_adjust_by_reference_frame) const;
ARVRPositionalTracker();
~ARVRPositionalTracker();
};
#endif

View file

@ -0,0 +1,127 @@
#include "arvr_script_interface.h"
ARVRScriptInterface::ARVRScriptInterface() {
// testing
printf("Construct script interface");
}
ARVRScriptInterface::~ARVRScriptInterface() {
if (is_initialized()) {
uninitialize();
};
// testing
printf("Destruct script interface");
}
StringName ARVRScriptInterface::get_name() const {
if (get_script_instance() && get_script_instance()->has_method("get_name")) {
return get_script_instance()->call("get_name");
} else {
// just return something for now
return "ARVR Script interface";
}
}
bool ARVRScriptInterface::is_installed() {
ERR_FAIL_COND_V(!(get_script_instance() && get_script_instance()->has_method("is_installed")), false);
return get_script_instance()->call("is_installed");
}
bool ARVRScriptInterface::hmd_is_present() {
ERR_FAIL_COND_V(!(get_script_instance() && get_script_instance()->has_method("hmd_is_present")), false);
return get_script_instance()->call("hmd_is_present");
}
bool ARVRScriptInterface::supports_hmd() {
ERR_FAIL_COND_V(!(get_script_instance() && get_script_instance()->has_method("supports_hmd")), false);
return get_script_instance()->call("supports_hmd");
}
bool ARVRScriptInterface::is_stereo() {
ERR_FAIL_COND_V(!(get_script_instance() && get_script_instance()->has_method("is_stereo")), false);
return get_script_instance()->call("is_stereo");
}
bool ARVRScriptInterface::is_initialized() {
ERR_FAIL_COND_V(!(get_script_instance() && get_script_instance()->has_method("is_initialized")), false);
return get_script_instance()->call("is_initialized");
}
bool ARVRScriptInterface::initialize() {
ERR_FAIL_COND_V(!(get_script_instance() && get_script_instance()->has_method("initialize")), false);
return get_script_instance()->call("initialize");
}
void ARVRScriptInterface::uninitialize() {
ARVRServer *arvr_server = ARVRServer::get_singleton();
if (arvr_server != NULL) {
// Whatever happens, make sure this is no longer our primary interface
arvr_server->clear_primary_interface_if(this);
}
ERR_FAIL_COND(!(get_script_instance() && get_script_instance()->has_method("uninitialize")));
get_script_instance()->call("uninitialize");
}
Size2 ARVRScriptInterface::get_recommended_render_targetsize() {
ERR_FAIL_COND_V(!(get_script_instance() && get_script_instance()->has_method("get_recommended_render_targetsize")), Size2());
return get_script_instance()->call("get_recommended_render_targetsize");
}
Transform ARVRScriptInterface::get_transform_for_eye(Eyes p_eye, const Transform &p_cam_transform) {
ERR_FAIL_COND_V(!(get_script_instance() && get_script_instance()->has_method("get_transform_for_eye")), Transform());
return get_script_instance()->call("get_transform_for_eye", p_eye, p_cam_transform);
}
// Suggestion from Reduz, as we can't return a CameraMatrix, return a PoolVector with our 16 floats
PoolVector<float> ARVRScriptInterface::_get_projection_for_eye(Eyes p_eye, real_t p_aspect, real_t p_z_near, real_t p_z_far) {
ERR_FAIL_COND_V(!(get_script_instance() && get_script_instance()->has_method("_get_projection_for_eye")), PoolVector<float>());
return get_script_instance()->call("_get_projection_for_eye", p_eye, p_aspect, p_z_near, p_z_far);
}
CameraMatrix ARVRScriptInterface::get_projection_for_eye(Eyes p_eye, real_t p_aspect, real_t p_z_near, real_t p_z_far) {
CameraMatrix cm;
int i = 0;
int j = 0;
PoolVector<float> cm_as_floats = _get_projection_for_eye(p_eye, p_aspect, p_z_near, p_z_far);
for (int k = 0; k < cm_as_floats.size() && i < 4; k++) {
cm.matrix[i][j] = cm_as_floats[k];
j++;
if (j == 4) {
j = 0;
i++;
};
};
return cm;
}
void ARVRScriptInterface::commit_for_eye(ARVRInterface::Eyes p_eye, RID p_render_target, const Rect2 &p_screen_rect) {
ERR_FAIL_COND(!(get_script_instance() && get_script_instance()->has_method("commit_for_eye")));
get_script_instance()->call("commit_for_eye");
}
void ARVRScriptInterface::process() {
ERR_FAIL_COND(!(get_script_instance() && get_script_instance()->has_method("process")));
get_script_instance()->call("process");
}
void ARVRScriptInterface::_bind_methods() {
ClassDB::add_virtual_method(get_class_static(), MethodInfo(Variant::BOOL, "is_installed"));
ClassDB::add_virtual_method(get_class_static(), MethodInfo(Variant::BOOL, "hmd_is_present"));
ClassDB::add_virtual_method(get_class_static(), MethodInfo(Variant::BOOL, "supports_hmd"));
ClassDB::add_virtual_method(get_class_static(), MethodInfo(Variant::BOOL, "is_initialized"));
ClassDB::add_virtual_method(get_class_static(), MethodInfo(Variant::BOOL, "initialize"));
ClassDB::add_virtual_method(get_class_static(), MethodInfo("uninitialize"));
ClassDB::add_virtual_method(get_class_static(), MethodInfo(Variant::BOOL, "is_stereo"));
ClassDB::add_virtual_method(get_class_static(), MethodInfo(Variant::VECTOR2, "get_recommended_render_targetsize"));
ClassDB::add_virtual_method(get_class_static(), MethodInfo(Variant::TRANSFORM, "get_transform_for_eye", PropertyInfo(Variant::INT, "eye"), PropertyInfo(Variant::TRANSFORM, "cam_transform")));
ClassDB::add_virtual_method(get_class_static(), MethodInfo("_get_projection_for_eye"));
ClassDB::add_virtual_method(get_class_static(), MethodInfo("commit_for_eye", PropertyInfo(Variant::INT, "eye"), PropertyInfo(Variant::_RID, "render_target")));
ClassDB::add_virtual_method(get_class_static(), MethodInfo("process"));
}

View file

@ -0,0 +1,47 @@
#ifndef SCRIPT_INTERFACE_H
#define SCRIPT_INTERFACE_H
#include "arvr_interface.h"
/**
@authors Hinsbart & Karroffel
This subclass of our AR/VR interface forms a bridge to GDNative.
*/
class ARVRScriptInterface : public ARVRInterface {
GDCLASS(ARVRScriptInterface, ARVRInterface);
protected:
static void _bind_methods();
public:
ARVRScriptInterface();
~ARVRScriptInterface();
virtual StringName get_name() const;
virtual bool is_installed();
virtual bool hmd_is_present();
virtual bool supports_hmd();
virtual bool is_initialized();
virtual bool initialize();
virtual void uninitialize();
virtual Size2 get_recommended_render_targetsize();
virtual bool is_stereo();
virtual Transform get_transform_for_eye(ARVRInterface::Eyes p_eye, const Transform &p_cam_transform);
// we expose a PoolVector<float> version of this function to GDNative
PoolVector<float> _get_projection_for_eye(Eyes p_eye, real_t p_aspect, real_t p_z_near, real_t p_z_far);
// and a CameraMatrix version to ARVRServer
virtual CameraMatrix get_projection_for_eye(ARVRInterface::Eyes p_eye, real_t p_aspect, real_t p_z_near, real_t p_z_far);
virtual void commit_for_eye(ARVRInterface::Eyes p_eye, RID p_render_target, const Rect2 &p_screen_rect);
virtual void process();
};
#endif // SCRIPT_INTERFACE_H

313
servers/arvr_server.cpp Normal file
View file

@ -0,0 +1,313 @@
/*************************************************************************/
/* arvr_server.cpp */
/*************************************************************************/
/* This file is part of: */
/* GODOT ENGINE */
/* http://www.godotengine.org */
/*************************************************************************/
/* Copyright (c) 2007-2017 Juan Linietsky, Ariel Manzur. */
/* Copyright (c) 2014-2017 Godot Engine contributors (cf. AUTHORS.md) */
/* */
/* Permission is hereby granted, free of charge, to any person obtaining */
/* a copy of this software and associated documentation files (the */
/* "Software"), to deal in the Software without restriction, including */
/* without limitation the rights to use, copy, modify, merge, publish, */
/* distribute, sublicense, and/or sell copies of the Software, and to */
/* permit persons to whom the Software is furnished to do so, subject to */
/* the following conditions: */
/* */
/* The above copyright notice and this permission notice shall be */
/* included in all copies or substantial portions of the Software. */
/* */
/* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
/* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
/* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.*/
/* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
/* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */
/* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
/*************************************************************************/
#include "arvr_server.h"
#include "arvr/arvr_interface.h"
#include "arvr/arvr_positional_tracker.h"
#include "project_settings.h"
ARVRServer *ARVRServer::singleton = NULL;
ARVRServer *ARVRServer::get_singleton() {
return singleton;
};
void ARVRServer::_bind_methods() {
ClassDB::bind_method(D_METHOD("get_world_scale"), &ARVRServer::get_world_scale);
ClassDB::bind_method(D_METHOD("set_world_scale"), &ARVRServer::set_world_scale);
ClassDB::bind_method(D_METHOD("get_reference_frame"), &ARVRServer::get_reference_frame);
ClassDB::bind_method(D_METHOD("request_reference_frame", "ignore_tilt", "keep_height"), &ARVRServer::request_reference_frame);
ADD_PROPERTY(PropertyInfo(Variant::REAL, "world_scale"), "set_world_scale", "get_world_scale");
ClassDB::bind_method(D_METHOD("get_interface_count"), &ARVRServer::get_interface_count);
ClassDB::bind_method(D_METHOD("get_interface:ARVRInterface", "idx"), &ARVRServer::get_interface);
ClassDB::bind_method(D_METHOD("find_interface:ARVRInterface", "name"), &ARVRServer::find_interface);
ClassDB::bind_method(D_METHOD("get_tracker_count"), &ARVRServer::get_tracker_count);
ClassDB::bind_method(D_METHOD("get_tracker:ARVRPositionalTracker", "idx"), &ARVRServer::get_tracker);
ClassDB::bind_method(D_METHOD("set_primary_interface"), &ARVRServer::set_primary_interface);
ClassDB::bind_method(D_METHOD("add_interface"), &ARVRServer::add_interface);
ClassDB::bind_method(D_METHOD("remove_interface"), &ARVRServer::remove_interface);
BIND_CONSTANT(TRACKER_CONTROLLER);
BIND_CONSTANT(TRACKER_BASESTATION);
BIND_CONSTANT(TRACKER_ANCHOR);
BIND_CONSTANT(TRACKER_UNKNOWN);
BIND_CONSTANT(TRACKER_ANY_KNOWN);
BIND_CONSTANT(TRACKER_ANY);
ADD_SIGNAL(MethodInfo("interface_added", PropertyInfo(Variant::STRING, "name")));
ADD_SIGNAL(MethodInfo("interface_removed", PropertyInfo(Variant::STRING, "name")));
ADD_SIGNAL(MethodInfo("tracker_added", PropertyInfo(Variant::STRING, "name"), PropertyInfo(Variant::INT, "type")));
ADD_SIGNAL(MethodInfo("tracker_removed", PropertyInfo(Variant::STRING, "name")));
};
real_t ARVRServer::get_world_scale() const {
return world_scale;
};
void ARVRServer::set_world_scale(real_t p_world_scale) {
if (world_scale < 0.01) {
world_scale = 0.01;
} else if (world_scale > 1000.0) {
world_scale = 1000.0;
};
world_scale = p_world_scale;
};
Transform ARVRServer::get_world_origin() const {
return world_origin;
};
void ARVRServer::set_world_origin(const Transform p_world_origin) {
world_origin = p_world_origin;
};
Transform ARVRServer::get_reference_frame() const {
return reference_frame;
};
void ARVRServer::request_reference_frame(bool p_ignore_tilt, bool p_keep_height) {
if (primary_interface != NULL) {
// clear our current reference frame or we'll end up double adjusting it
reference_frame = Transform();
// requesting our EYE_MONO transform should return our current HMD position
Transform new_reference_frame = primary_interface->get_transform_for_eye(ARVRInterface::EYE_MONO, Transform());
// remove our tilt
if (p_ignore_tilt) {
// take the Y out of our Z
new_reference_frame.basis.set_axis(2, Vector3(new_reference_frame.basis.elements[0][2], 0.0, new_reference_frame.basis.elements[2][2]).normalized());
// Y is straight up
new_reference_frame.basis.set_axis(1, Vector3(0.0, 1.0, 0.0));
// and X is our cross reference
new_reference_frame.basis.set_axis(0, new_reference_frame.basis.get_axis(1).cross(new_reference_frame.basis.get_axis(2)).normalized());
};
// don't negate our height
if (p_keep_height) {
new_reference_frame.origin.y = 0.0;
};
reference_frame = new_reference_frame.inverse();
};
};
void ARVRServer::add_interface(const Ref<ARVRInterface> &p_interface) {
ERR_FAIL_COND(p_interface.is_null());
int idx = -1;
for (int i = 0; i < interfaces.size(); i++) {
if (interfaces[i] == p_interface) {
ERR_PRINT("Interface was already added");
return;
};
};
print_line("Registered interface " + p_interface->get_name());
interfaces.push_back(p_interface);
emit_signal("interface_added", p_interface->get_name());
};
void ARVRServer::remove_interface(const Ref<ARVRInterface> &p_interface) {
ERR_FAIL_COND(p_interface.is_null());
int idx = -1;
for (int i = 0; i < interfaces.size(); i++) {
if (interfaces[i] == p_interface) {
idx = i;
break;
};
};
ERR_FAIL_COND(idx == -1);
print_line("Removed interface" + p_interface->get_name());
emit_signal("interface_removed", p_interface->get_name());
interfaces.remove(idx);
};
int ARVRServer::get_interface_count() const {
return interfaces.size();
};
Ref<ARVRInterface> ARVRServer::get_interface(int p_index) const {
ERR_FAIL_INDEX_V(p_index, interfaces.size(), NULL);
return interfaces[p_index];
};
Ref<ARVRInterface> ARVRServer::find_interface(const String &p_name) const {
int idx = -1;
for (int i = 0; i < interfaces.size(); i++) {
if (interfaces[i]->get_name() == p_name) {
idx = i;
break;
};
};
ERR_FAIL_COND_V(idx == -1, NULL);
return interfaces[idx];
};
/*
A little extra info on the tracker ids, these are unique per tracker type so we get soem consistency in recognising our trackers, specifically controllers.
The first controller that is turned of will get ID 1, the second will get ID 2, etc.
The magic happens when one of the controllers is turned off, say controller 1 turns off, controller 2 will remain controller 2, controller 3 will remain controller 3.
If controller number 1 is turned on again it again gets ID 1 unless another new controller was turned on since.
The most likely scenario however is a controller that runs out of battery and another controller being used to replace it.
Because the controllers are often linked to physical objects, say you're holding a shield in controller 1, your left hand, and a gun in controller 2, your right hand, and controller 1 dies:
- using our tracker index would suddenly make the gun disappear and the shield jump into your right hand because controller 2 becomes controller 1.
- using this approach the shield disappears or is no longer tracked, but the gun stays firmly in your right hand because that is still controller 2, further more, if controller 1 is replaced the shield will return.
*/
bool ARVRServer::is_tracker_id_in_use_for_type(TrackerType p_tracker_type, int p_tracker_id) const {
for (int i = 0; i < trackers.size(); i++) {
if (trackers[i]->get_type() == p_tracker_type && trackers[i]->get_tracker_id() == p_tracker_id) {
return true;
};
};
// all good
return false;
};
int ARVRServer::get_free_tracker_id_for_type(TrackerType p_tracker_type) {
// we start checking at 1, 0 means that it's not a controller..
int tracker_id = 1;
while (is_tracker_id_in_use_for_type(p_tracker_type, tracker_id)) {
// try the next one
tracker_id++;
};
return tracker_id;
};
void ARVRServer::add_tracker(ARVRPositionalTracker *p_tracker) {
ERR_FAIL_NULL(p_tracker);
trackers.push_back(p_tracker);
emit_signal("tracker_added", p_tracker->get_name(), p_tracker->get_type());
};
void ARVRServer::remove_tracker(ARVRPositionalTracker *p_tracker) {
ERR_FAIL_NULL(p_tracker);
int idx = -1;
for (int i = 0; i < trackers.size(); i++) {
if (trackers[i] == p_tracker) {
idx = i;
break;
};
};
ERR_FAIL_COND(idx == -1);
emit_signal("tracker_removed", p_tracker->get_name());
trackers.remove(idx);
};
int ARVRServer::get_tracker_count() const {
return trackers.size();
};
ARVRPositionalTracker *ARVRServer::get_tracker(int p_index) const {
ERR_FAIL_INDEX_V(p_index, trackers.size(), NULL);
return trackers[p_index];
};
ARVRPositionalTracker *ARVRServer::find_by_type_and_id(TrackerType p_tracker_type, int p_tracker_id) const {
ERR_FAIL_COND_V(p_tracker_id == 0, NULL);
for (int i = 0; i < trackers.size(); i++) {
if (trackers[i]->get_type() == p_tracker_type && trackers[i]->get_tracker_id() == p_tracker_id) {
return trackers[i];
};
};
return NULL;
};
Ref<ARVRInterface> ARVRServer::get_primary_interface() const {
return primary_interface;
};
void ARVRServer::set_primary_interface(const Ref<ARVRInterface> &p_primary_interface) {
primary_interface = p_primary_interface;
print_line("Primary interface set to: " + primary_interface->get_name());
};
void ARVRServer::clear_primary_interface_if(const Ref<ARVRInterface> &p_primary_interface) {
if (primary_interface == p_primary_interface) {
print_line("Clearing primary interface");
primary_interface.unref();
};
};
ARVRServer::ARVRServer() {
singleton = this;
world_scale = 1.0;
};
ARVRServer::~ARVRServer() {
primary_interface.unref();
while (interfaces.size() > 0) {
interfaces.remove(0);
}
while (trackers.size() > 0) {
trackers.remove(0);
}
singleton = NULL;
};

167
servers/arvr_server.h Normal file
View file

@ -0,0 +1,167 @@
/*************************************************************************/
/* arvr_server.h */
/*************************************************************************/
/* This file is part of: */
/* GODOT ENGINE */
/* http://www.godotengine.org */
/*************************************************************************/
/* Copyright (c) 2007-2017 Juan Linietsky, Ariel Manzur. */
/* Copyright (c) 2014-2017 Godot Engine contributors (cf. AUTHORS.md) */
/* */
/* Permission is hereby granted, free of charge, to any person obtaining */
/* a copy of this software and associated documentation files (the */
/* "Software"), to deal in the Software without restriction, including */
/* without limitation the rights to use, copy, modify, merge, publish, */
/* distribute, sublicense, and/or sell copies of the Software, and to */
/* permit persons to whom the Software is furnished to do so, subject to */
/* the following conditions: */
/* */
/* The above copyright notice and this permission notice shall be */
/* included in all copies or substantial portions of the Software. */
/* */
/* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
/* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
/* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.*/
/* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
/* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */
/* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
/*************************************************************************/
#ifndef ARVR_SERVER_H
#define ARVR_SERVER_H
#include "os/thread_safe.h"
#include "reference.h"
#include "rid.h"
#include "variant.h"
class ARVRInterface;
class ARVRPositionalTracker;
/**
@author Bastiaan Olij <mux213@gmail.com>
The ARVR server is a singleton object that gives access to the various
objects and SDKs that are available on the system.
Because there can be multiple SDKs active this is exposed as an array
and our ARVR server object acts as a pass through
Also each positioning tracker is accessible from here.
I've added some additional info into this header file that should move
into the documention, I will do so when we're close to accepting this PR
or as a separate PR once this has been merged into the master branch.
**/
class ARVRServer : public Object {
GDCLASS(ARVRServer, Object);
_THREAD_SAFE_CLASS_
public:
enum TrackerType {
TRACKER_CONTROLLER = 0x01, /* tracks a controller */
TRACKER_BASESTATION = 0x02, /* tracks location of a base station */
TRACKER_ANCHOR = 0x04, /* tracks an anchor point, used in AR to track a real live location */
TRACKER_UNKNOWN = 0x80, /* unknown tracker */
TRACKER_ANY_KNOWN = 0x7f, /* all except unknown */
TRACKER_ANY = 0xff /* used by get_connected_trackers to return all types */
};
private:
Vector<Ref<ARVRInterface> > interfaces;
Vector<ARVRPositionalTracker *> trackers;
Ref<ARVRInterface> primary_interface; /* we'll identify one interface as primary, this will be used by our viewports */
real_t world_scale; /* scale by which we multiply our tracker positions */
Transform world_origin; /* our world origin point, maps a location in our virtual world to the origin point in our real world tracking volume */
Transform reference_frame; /* our reference frame */
bool is_tracker_id_in_use_for_type(TrackerType p_tracker_type, int p_tracker_id) const;
protected:
static ARVRServer *singleton;
static void _bind_methods();
public:
static ARVRServer *get_singleton();
/*
World scale allows you to specify a scale factor that is applied to all positioning vectors in our VR world in essence scaling up, or scaling down the world.
For stereoscopic rendering specifically this is very important to give an accurate sense of scale.
Add controllers into the mix and an accurate mapping of real world movement to percieved virtual movement becomes very important.
Most VR platforms, and our assumption, is that 1 unit in our virtual world equates to 1 meter in the real mode.
This scale basically effects the unit size relationship to real world size.
I may remove access to this property in GDScript in favour of exposing it on the ARVROrigin node
*/
real_t get_world_scale() const;
void set_world_scale(real_t p_world_scale);
/*
The world maps the 0,0,0 coordinate of our real world coordinate system for our tracking volume to a location in our
virtual world. It is this origin point that should be moved when the player is moved through the world by controller
actions be it straffing, teleporting, etc. Movement of the player by moving through the physical space is always tracked
in relation to this point.
Note that the ARVROrigin spatial node in your scene automatically updates this property and it should be used instead of
direct access to this property and it therefor is not available in GDScript
Note: this should not be used in AR and should be ignored by an AR based interface as it would throw what you're looking at in the real world
and in the virtual world out of sync
*/
Transform get_world_origin() const;
void set_world_origin(const Transform p_origin);
/*
Requesting a reference frame results in a matrix being calculated that ensures the HMD is positioned to 0,0,0 facing 0,0,-1 (need to verify this direction)
in the virtual world.
Note: this should not be used in AR and should be ignored by an AR based interface as it would throw what you're looking at in the real world
and in the virtual world out of sync
*/
Transform get_reference_frame() const;
void request_reference_frame(bool p_ignore_tilt, bool p_keep_height);
/*
Interfaces are objects that 'glue' Godot to an AR or VR SDK such as the Oculus SDK, OpenVR, OpenHMD, etc.
*/
void add_interface(const Ref<ARVRInterface> &p_interface);
void remove_interface(const Ref<ARVRInterface> &p_interface);
int get_interface_count() const;
Ref<ARVRInterface> get_interface(int p_index) const;
Ref<ARVRInterface> find_interface(const String &p_name) const;
/*
note, more then one interface can technically be active, especially on mobile, but only one interface is used for
rendering. This interface identifies itself by calling set_primary_interface when it is initialized
*/
Ref<ARVRInterface> get_primary_interface() const;
void set_primary_interface(const Ref<ARVRInterface> &p_primary_interface);
void clear_primary_interface_if(const Ref<ARVRInterface> &p_primary_interface); /* this is automatically called if an interface destructs */
/*
Our trackers are objects that expose the orientation and position of physical devices such as controller, anchor points, etc.
They are created and managed by our active AR/VR interfaces.
Note that for trackers that
*/
int get_free_tracker_id_for_type(TrackerType p_tracker_type);
void add_tracker(ARVRPositionalTracker *p_tracker);
void remove_tracker(ARVRPositionalTracker *p_tracker);
int get_tracker_count() const;
ARVRPositionalTracker *get_tracker(int p_index) const;
ARVRPositionalTracker *find_by_type_and_id(TrackerType p_tracker_type, int p_tracker_id) const;
ARVRServer();
~ARVRServer();
};
#define ARVR ARVRServer
VARIANT_ENUM_CAST(ARVRServer::TrackerType);
#endif

View file

@ -30,6 +30,10 @@
#include "register_server_types.h"
#include "project_settings.h"
#include "arvr/arvr_interface.h"
#include "arvr/arvr_positional_tracker.h"
#include "arvr/arvr_script_interface.h"
#include "arvr_server.h"
#include "audio/audio_effect.h"
#include "audio/audio_stream.h"
#include "audio/effects/audio_effect_amplify.h"
@ -70,16 +74,23 @@ static void _debugger_get_resource_usage(List<ScriptDebuggerRemote::ResourceUsag
}
ShaderTypes *shader_types = NULL;
ARVRServer *arvr_server = NULL;
void register_server_types() {
arvr_server = memnew(ARVRServer);
ProjectSettings::get_singleton()->add_singleton(ProjectSettings::Singleton("VisualServer", VisualServer::get_singleton()));
ProjectSettings::get_singleton()->add_singleton(ProjectSettings::Singleton("AudioServer", AudioServer::get_singleton()));
ProjectSettings::get_singleton()->add_singleton(ProjectSettings::Singleton("PhysicsServer", PhysicsServer::get_singleton()));
ProjectSettings::get_singleton()->add_singleton(ProjectSettings::Singleton("Physics2DServer", Physics2DServer::get_singleton()));
ProjectSettings::get_singleton()->add_singleton(ProjectSettings::Singleton("ARVRServer", ARVRServer::get_singleton()));
shader_types = memnew(ShaderTypes);
ClassDB::register_virtual_class<ARVRInterface>();
ClassDB::register_class<ARVRPositionalTracker>();
ClassDB::register_class<ARVRScriptInterface>();
ClassDB::register_virtual_class<AudioStream>();
ClassDB::register_virtual_class<AudioStreamPlayback>();
ClassDB::register_class<AudioStreamRandomPitch>();
@ -133,5 +144,9 @@ void register_server_types() {
void unregister_server_types() {
//@TODO move this into iPhone/Android implementation? just have this here for testing...
// mobile_interface = NULL;
memdelete(shader_types);
memdelete(arvr_server);
}

View file

@ -908,6 +908,7 @@ public:
BIND0R(RID, viewport_create)
BIND2(viewport_set_use_arvr, RID, bool)
BIND3(viewport_set_size, RID, int, int)
BIND2(viewport_set_active, RID, bool)

View file

@ -1658,6 +1658,7 @@ void VisualServerScene::_light_instance_update_shadow(Instance *p_instance, cons
}
void VisualServerScene::render_camera(RID p_camera, RID p_scenario, Size2 p_viewport_size, RID p_shadow_atlas) {
// render to mono camera
Camera *camera = camera_owner.getornull(p_camera);
ERR_FAIL_COND(!camera);
@ -1697,6 +1698,25 @@ void VisualServerScene::render_camera(RID p_camera, RID p_scenario, Size2 p_view
_render_scene(camera->transform, camera_matrix, ortho, camera->env, camera->visible_layers, p_scenario, p_shadow_atlas, RID(), -1);
}
void VisualServerScene::render_camera(Ref<ARVRInterface> &p_interface, ARVRInterface::Eyes p_eye, RID p_camera, RID p_scenario, Size2 p_viewport_size, RID p_shadow_atlas) {
// render for AR/VR interface
Camera *camera = camera_owner.getornull(p_camera);
ERR_FAIL_COND(!camera);
/* SETUP CAMERA, we are ignoring type and FOV here */
bool ortho = false;
float aspect = p_viewport_size.width / (float)p_viewport_size.height;
CameraMatrix camera_matrix = p_interface->get_projection_for_eye(p_eye, aspect, camera->znear, camera->zfar);
// We also ignore our camera position, it will have been positioned with a slightly old tracking position.
// Instead we take our origin point and have our ar/vr interface add fresh tracking data! Whoohoo!
Transform world_origin = ARVRServer::get_singleton()->get_world_origin();
Transform cam_transform = p_interface->get_transform_for_eye(p_eye, world_origin);
_render_scene(cam_transform, camera_matrix, ortho, camera->env, camera->visible_layers, p_scenario, p_shadow_atlas, RID(), -1);
};
void VisualServerScene::_render_scene(const Transform p_cam_transform, const CameraMatrix &p_cam_projection, bool p_cam_orthogonal, RID p_force_environment, uint32_t p_visible_layers, RID p_scenario, RID p_shadow_atlas, RID p_reflection_probe, int p_reflection_probe_pass) {
Scenario *scenario = scenario_owner.getornull(p_scenario);

View file

@ -38,6 +38,7 @@
#include "os/semaphore.h"
#include "os/thread.h"
#include "self_list.h"
#include "servers/arvr/arvr_interface.h"
class VisualServerScene {
public:
@ -521,6 +522,7 @@ public:
void render_empty_scene(RID p_scenario, RID p_shadow_atlas);
void render_camera(RID p_camera, RID p_scenario, Size2 p_viewport_size, RID p_shadow_atlas);
void render_camera(Ref<ARVRInterface> &p_interface, ARVRInterface::Eyes p_eye, RID p_camera, RID p_scenario, Size2 p_viewport_size, RID p_shadow_atlas);
void update_dirty_instances();
//probes

View file

@ -33,7 +33,7 @@
#include "visual_server_global.h"
#include "visual_server_scene.h"
void VisualServerViewport::_draw_viewport(Viewport *p_viewport) {
void VisualServerViewport::_draw_viewport(Viewport *p_viewport, ARVRInterface::Eyes p_eye) {
/* Camera should always be BEFORE any other 3D */
@ -90,8 +90,13 @@ void VisualServerViewport::_draw_viewport(Viewport *p_viewport) {
}
if (!scenario_draw_canvas_bg && can_draw_3d) {
Ref<ARVRInterface> arvr_interface = ARVRServer::get_singleton()->get_primary_interface();
VSG::scene->render_camera(p_viewport->camera, p_viewport->scenario, p_viewport->size, p_viewport->shadow_atlas);
if (p_viewport->use_arvr && arvr_interface.is_valid()) {
VSG::scene->render_camera(arvr_interface, p_eye, p_viewport->camera, p_viewport->scenario, p_viewport->size, p_viewport->shadow_atlas);
} else {
VSG::scene->render_camera(p_viewport->camera, p_viewport->scenario, p_viewport->size, p_viewport->shadow_atlas);
}
}
if (!p_viewport->hide_canvas) {
@ -260,15 +265,19 @@ void VisualServerViewport::_draw_viewport(Viewport *p_viewport) {
}
void VisualServerViewport::draw_viewports() {
//sort viewports
//draw viewports
// get our arvr interface in case we need it
Ref<ARVRInterface> arvr_interface = ARVRServer::get_singleton()->get_primary_interface();
if (arvr_interface.is_valid()) {
// update our positioning information as late as possible...
arvr_interface->process();
}
clear_color = GLOBAL_GET("rendering/environment/default_clear_color");
//sort viewports
active_viewports.sort_custom<ViewportSort>();
//draw viewports
for (int i = 0; i < active_viewports.size(); i++) {
Viewport *vp = active_viewports[i];
@ -286,25 +295,47 @@ void VisualServerViewport::draw_viewports() {
VSG::storage->render_target_clear_used(vp->render_target);
VSG::rasterizer->set_current_render_target(vp->render_target);
if (vp->use_arvr && arvr_interface.is_valid()) {
// override our size, make sure it matches our required size
Size2 size = arvr_interface->get_recommended_render_targetsize();
VSG::storage->render_target_set_size(vp->render_target, size.x, size.y);
VSG::scene_render->set_debug_draw_mode(vp->debug_draw);
VSG::storage->render_info_begin_capture();
// render mono or left eye first
ARVRInterface::Eyes leftOrMono = arvr_interface->is_stereo() ? ARVRInterface::EYE_LEFT : ARVRInterface::EYE_MONO;
VSG::rasterizer->set_current_render_target(vp->render_target);
_draw_viewport(vp, leftOrMono);
arvr_interface->commit_for_eye(leftOrMono, vp->render_target, vp->viewport_to_screen_rect);
_draw_viewport(vp);
// render right eye
if (leftOrMono == ARVRInterface::EYE_LEFT) {
// commit for eye may have changed the render target
VSG::rasterizer->set_current_render_target(vp->render_target);
VSG::storage->render_info_end_capture();
vp->render_info[VS::VIEWPORT_RENDER_INFO_OBJECTS_IN_FRAME] = VSG::storage->get_captured_render_info(VS::INFO_OBJECTS_IN_FRAME);
vp->render_info[VS::VIEWPORT_RENDER_INFO_VERTICES_IN_FRAME] = VSG::storage->get_captured_render_info(VS::INFO_VERTICES_IN_FRAME);
vp->render_info[VS::VIEWPORT_RENDER_INFO_MATERIAL_CHANGES_IN_FRAME] = VSG::storage->get_captured_render_info(VS::INFO_MATERIAL_CHANGES_IN_FRAME);
vp->render_info[VS::VIEWPORT_RENDER_INFO_SHADER_CHANGES_IN_FRAME] = VSG::storage->get_captured_render_info(VS::INFO_SHADER_CHANGES_IN_FRAME);
vp->render_info[VS::VIEWPORT_RENDER_INFO_SURFACE_CHANGES_IN_FRAME] = VSG::storage->get_captured_render_info(VS::INFO_SURFACE_CHANGES_IN_FRAME);
vp->render_info[VS::VIEWPORT_RENDER_INFO_DRAW_CALLS_IN_FRAME] = VSG::storage->get_captured_render_info(VS::INFO_DRAW_CALLS_IN_FRAME);
_draw_viewport(vp, ARVRInterface::EYE_RIGHT);
arvr_interface->commit_for_eye(ARVRInterface::EYE_RIGHT, vp->render_target, vp->viewport_to_screen_rect);
}
} else {
VSG::rasterizer->set_current_render_target(vp->render_target);
if (vp->viewport_to_screen_rect != Rect2()) {
//copy to screen if set as such
VSG::rasterizer->set_current_render_target(RID());
VSG::rasterizer->blit_render_target_to_screen(vp->render_target, vp->viewport_to_screen_rect, vp->viewport_to_screen);
VSG::scene_render->set_debug_draw_mode(vp->debug_draw);
VSG::storage->render_info_begin_capture();
// render standard mono camera
_draw_viewport(vp);
VSG::storage->render_info_end_capture();
vp->render_info[VS::VIEWPORT_RENDER_INFO_OBJECTS_IN_FRAME] = VSG::storage->get_captured_render_info(VS::INFO_OBJECTS_IN_FRAME);
vp->render_info[VS::VIEWPORT_RENDER_INFO_VERTICES_IN_FRAME] = VSG::storage->get_captured_render_info(VS::INFO_VERTICES_IN_FRAME);
vp->render_info[VS::VIEWPORT_RENDER_INFO_MATERIAL_CHANGES_IN_FRAME] = VSG::storage->get_captured_render_info(VS::INFO_MATERIAL_CHANGES_IN_FRAME);
vp->render_info[VS::VIEWPORT_RENDER_INFO_SHADER_CHANGES_IN_FRAME] = VSG::storage->get_captured_render_info(VS::INFO_SHADER_CHANGES_IN_FRAME);
vp->render_info[VS::VIEWPORT_RENDER_INFO_SURFACE_CHANGES_IN_FRAME] = VSG::storage->get_captured_render_info(VS::INFO_SURFACE_CHANGES_IN_FRAME);
vp->render_info[VS::VIEWPORT_RENDER_INFO_DRAW_CALLS_IN_FRAME] = VSG::storage->get_captured_render_info(VS::INFO_DRAW_CALLS_IN_FRAME);
if (vp->viewport_to_screen_rect != Rect2()) {
//copy to screen if set as such
VSG::rasterizer->set_current_render_target(RID());
VSG::rasterizer->blit_render_target_to_screen(vp->render_target, vp->viewport_to_screen_rect, vp->viewport_to_screen);
}
}
if (vp->update_mode == VS::VIEWPORT_UPDATE_ONCE) {
@ -329,6 +360,13 @@ RID VisualServerViewport::viewport_create() {
return rid;
}
void VisualServerViewport::viewport_set_use_arvr(RID p_viewport, bool p_use_arvr) {
Viewport *viewport = viewport_owner.getornull(p_viewport);
ERR_FAIL_COND(!viewport);
viewport->use_arvr = p_use_arvr;
}
void VisualServerViewport::viewport_set_size(RID p_viewport, int p_width, int p_height) {
ERR_FAIL_COND(p_width < 0 && p_height < 0);

View file

@ -32,6 +32,7 @@
#include "rasterizer.h"
#include "self_list.h"
#include "servers/arvr/arvr_interface.h"
#include "servers/visual_server.h"
class VisualServerViewport {
@ -44,6 +45,8 @@ public:
RID self;
RID parent;
bool use_arvr; /* use arvr interface to override camera positioning and projection matrices and control output */
Size2i size;
RID camera;
RID scenario;
@ -107,6 +110,7 @@ public:
for (int i = 0; i < VS::VIEWPORT_RENDER_INFO_MAX; i++) {
render_info[i] = 0;
}
use_arvr = false;
}
};
@ -131,11 +135,13 @@ public:
private:
Color clear_color;
void _draw_viewport(Viewport *p_viewport);
void _draw_viewport(Viewport *p_viewport, ARVRInterface::Eyes p_eye = ARVRInterface::EYE_MONO);
public:
RID viewport_create();
void viewport_set_use_arvr(RID p_viewport, bool p_use_arvr);
void viewport_set_size(RID p_viewport, int p_width, int p_height);
void viewport_attach_to_screen(RID p_viewport, const Rect2 &p_rect = Rect2(), int p_screen = 0);

View file

@ -344,6 +344,8 @@ public:
FUNC0R(RID, viewport_create)
FUNC2(viewport_set_use_arvr, RID, bool)
FUNC3(viewport_set_size, RID, int, int)
FUNC2(viewport_set_active, RID, bool)

View file

@ -532,6 +532,7 @@ public:
virtual RID viewport_create() = 0;
virtual void viewport_set_use_arvr(RID p_viewport, bool p_use_arvr) = 0;
virtual void viewport_set_size(RID p_viewport, int p_width, int p_height) = 0;
virtual void viewport_set_active(RID p_viewport, bool p_active) = 0;
virtual void viewport_set_parent_viewport(RID p_viewport, RID p_parent_viewport) = 0;