Removed the arvr server.

This commit is contained in:
Relintai 2022-03-16 02:39:38 +01:00
parent b633312366
commit 23c332974a
21 changed files with 33 additions and 1444 deletions

View File

@ -62,7 +62,6 @@
#include "scene/main/viewport.h"
#include "scene/register_scene_types.h"
#include "scene/resources/packed_scene.h"
#include "servers/arvr_server.h"
#include "servers/audio_server.h"
#include "servers/navigation_2d_server.h"
#include "servers/navigation_server.h"
@ -105,7 +104,6 @@ static MessageQueue *message_queue = nullptr;
// Initialized in setup2()
static AudioServer *audio_server = nullptr;
static ARVRServer *arvr_server = nullptr;
static PhysicsServer *physics_server = nullptr;
static Physics2DServer *physics_2d_server = nullptr;
static VisualServerCallbacks *visual_server_callbacks = nullptr;
@ -1364,9 +1362,6 @@ Error Main::setup2(Thread::ID p_main_tid_override) {
audio_server = memnew(AudioServer);
audio_server->init();
// also init our arvr_server from here
arvr_server = memnew(ARVRServer);
// and finally setup this property under visual_server
VisualServer::get_singleton()->set_render_loop_enabled(!disable_render_loop);
@ -2424,11 +2419,6 @@ void Main::cleanup(bool p_force) {
EditorNode::unregister_editor_types();
#endif
if (arvr_server) {
// cleanup now before we pull the rug from underneath...
memdelete(arvr_server);
}
ImageLoader::cleanup();
unregister_driver_types();

View File

@ -38,6 +38,7 @@
#include "main/main.h"
#include "servers/visual/visual_server_raster.h"
#include "servers/visual/visual_server_wrap_mt.h"
#include "scene/resources/texture.h"
#ifdef HAVE_MNTENT
#include <mntent.h>

View File

@ -690,16 +690,6 @@ RID Viewport::get_viewport_rid() const {
return viewport;
}
void Viewport::set_use_arvr(bool p_use_arvr) {
arvr = p_use_arvr;
VS::get_singleton()->viewport_set_use_arvr(viewport, arvr);
}
bool Viewport::use_arvr() {
return arvr;
}
void Viewport::update_canvas_items() {
if (!is_inside_tree()) {
return;
@ -3158,9 +3148,6 @@ void Viewport::_validate_property(PropertyInfo &property) const {
}
void Viewport::_bind_methods() {
ClassDB::bind_method(D_METHOD("set_use_arvr", "use"), &Viewport::set_use_arvr);
ClassDB::bind_method(D_METHOD("use_arvr"), &Viewport::use_arvr);
ClassDB::bind_method(D_METHOD("set_size", "size"), &Viewport::set_size);
ClassDB::bind_method(D_METHOD("get_size"), &Viewport::get_size);
ClassDB::bind_method(D_METHOD("set_world_2d", "world_2d"), &Viewport::set_world_2d);
@ -3293,9 +3280,7 @@ void Viewport::_bind_methods() {
ClassDB::bind_method(D_METHOD("_own_world_changed"), &Viewport::_own_world_changed);
ClassDB::bind_method(D_METHOD("_process_picking", "ignore_paused"), &Viewport::_process_picking);
ADD_PROPERTY(PropertyInfo(Variant::BOOL, "arvr"), "set_use_arvr", "use_arvr");
ADD_PROPERTY(PropertyInfo(Variant::VECTOR2, "size"), "set_size", "get_size");
ADD_PROPERTY(PropertyInfo(Variant::BOOL, "size_override_stretch"), "set_size_override_stretch", "is_size_override_stretch_enabled");
ADD_PROPERTY(PropertyInfo(Variant::BOOL, "own_world"), "set_use_own_world", "is_using_own_world");

View File

@ -438,9 +438,6 @@ public:
void set_camera_override_perspective(float p_fovy_degrees, float p_z_near, float p_z_far);
void set_camera_override_orthogonal(float p_size, float p_z_near, float p_z_far);
void set_use_arvr(bool p_use_arvr);
bool use_arvr();
void set_as_audio_listener(bool p_enable);
bool is_audio_listener() const;

View File

@ -5,8 +5,6 @@ Import("env")
env.servers_sources = []
env.add_source_files(env.servers_sources, "*.cpp")
SConscript("arvr/SCsub")
SConscript("camera/SCsub")
SConscript("physics/SCsub")
SConscript("physics_2d/SCsub")
SConscript("visual/SCsub")

View File

@ -1,5 +0,0 @@
#!/usr/bin/env python
Import("env")
env.add_source_files(env.servers_sources, "*.cpp")

View File

@ -1,149 +0,0 @@
/*************************************************************************/
/* arvr_interface.cpp */
/*************************************************************************/
/* This file is part of: */
/* GODOT ENGINE */
/* https://godotengine.org */
/*************************************************************************/
/* Copyright (c) 2007-2022 Juan Linietsky, Ariel Manzur. */
/* Copyright (c) 2014-2022 Godot Engine contributors (cf. AUTHORS.md). */
/* */
/* Permission is hereby granted, free of charge, to any person obtaining */
/* a copy of this software and associated documentation files (the */
/* "Software"), to deal in the Software without restriction, including */
/* without limitation the rights to use, copy, modify, merge, publish, */
/* distribute, sublicense, and/or sell copies of the Software, and to */
/* permit persons to whom the Software is furnished to do so, subject to */
/* the following conditions: */
/* */
/* The above copyright notice and this permission notice shall be */
/* included in all copies or substantial portions of the Software. */
/* */
/* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
/* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
/* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.*/
/* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
/* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */
/* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
/*************************************************************************/
#include "arvr_interface.h"
void ARVRInterface::_bind_methods() {
ClassDB::bind_method(D_METHOD("get_name"), &ARVRInterface::get_name);
ClassDB::bind_method(D_METHOD("get_capabilities"), &ARVRInterface::get_capabilities);
ClassDB::bind_method(D_METHOD("is_primary"), &ARVRInterface::is_primary);
ClassDB::bind_method(D_METHOD("set_is_primary", "enable"), &ARVRInterface::set_is_primary);
ClassDB::bind_method(D_METHOD("is_initialized"), &ARVRInterface::is_initialized);
ClassDB::bind_method(D_METHOD("set_is_initialized", "initialized"), &ARVRInterface::set_is_initialized);
ClassDB::bind_method(D_METHOD("initialize"), &ARVRInterface::initialize);
ClassDB::bind_method(D_METHOD("uninitialize"), &ARVRInterface::uninitialize);
ClassDB::bind_method(D_METHOD("get_tracking_status"), &ARVRInterface::get_tracking_status);
ClassDB::bind_method(D_METHOD("get_render_targetsize"), &ARVRInterface::get_render_targetsize);
ClassDB::bind_method(D_METHOD("is_stereo"), &ARVRInterface::is_stereo);
ADD_GROUP("Interface", "interface_");
ADD_PROPERTY(PropertyInfo(Variant::BOOL, "interface_is_primary"), "set_is_primary", "is_primary");
ADD_PROPERTY(PropertyInfo(Variant::BOOL, "interface_is_initialized"), "set_is_initialized", "is_initialized");
// we don't have any properties specific to VR yet....
// but we do have properties specific to AR....
ClassDB::bind_method(D_METHOD("get_anchor_detection_is_enabled"), &ARVRInterface::get_anchor_detection_is_enabled);
ClassDB::bind_method(D_METHOD("set_anchor_detection_is_enabled", "enable"), &ARVRInterface::set_anchor_detection_is_enabled);
ADD_GROUP("AR", "ar_");
ADD_PROPERTY(PropertyInfo(Variant::BOOL, "ar_is_anchor_detection_enabled"), "set_anchor_detection_is_enabled", "get_anchor_detection_is_enabled");
BIND_ENUM_CONSTANT(ARVR_NONE);
BIND_ENUM_CONSTANT(ARVR_MONO);
BIND_ENUM_CONSTANT(ARVR_STEREO);
BIND_ENUM_CONSTANT(ARVR_AR);
BIND_ENUM_CONSTANT(ARVR_EXTERNAL);
BIND_ENUM_CONSTANT(EYE_MONO);
BIND_ENUM_CONSTANT(EYE_LEFT);
BIND_ENUM_CONSTANT(EYE_RIGHT);
BIND_ENUM_CONSTANT(ARVR_NORMAL_TRACKING);
BIND_ENUM_CONSTANT(ARVR_EXCESSIVE_MOTION);
BIND_ENUM_CONSTANT(ARVR_INSUFFICIENT_FEATURES);
BIND_ENUM_CONSTANT(ARVR_UNKNOWN_TRACKING);
BIND_ENUM_CONSTANT(ARVR_NOT_TRACKING);
};
StringName ARVRInterface::get_name() const {
return "Unknown";
};
bool ARVRInterface::is_primary() {
ARVRServer *arvr_server = ARVRServer::get_singleton();
ERR_FAIL_NULL_V(arvr_server, false);
return arvr_server->get_primary_interface() == this;
};
void ARVRInterface::set_is_primary(bool p_is_primary) {
ARVRServer *arvr_server = ARVRServer::get_singleton();
ERR_FAIL_NULL(arvr_server);
if (p_is_primary) {
ERR_FAIL_COND(!is_initialized());
arvr_server->set_primary_interface(this);
} else {
arvr_server->clear_primary_interface_if(this);
};
};
void ARVRInterface::set_is_initialized(bool p_initialized) {
if (p_initialized) {
if (!is_initialized()) {
initialize();
};
} else {
if (is_initialized()) {
uninitialize();
};
};
};
ARVRInterface::Tracking_status ARVRInterface::get_tracking_status() const {
return tracking_state;
};
ARVRInterface::ARVRInterface() {
tracking_state = ARVR_UNKNOWN_TRACKING;
};
ARVRInterface::~ARVRInterface(){};
// optional render to external texture which enhances performance on those platforms that require us to submit our end result into special textures.
unsigned int ARVRInterface::get_external_texture_for_eye(ARVRInterface::Eyes p_eye) {
return 0;
};
// optional render to external depth texture which enhances performance on those platforms that require us to submit our end result into special textures.
unsigned int ARVRInterface::get_external_depth_for_eye(ARVRInterface::Eyes p_eye) {
return 0;
};
/** these will only be implemented on AR interfaces, so we want dummies for VR **/
bool ARVRInterface::get_anchor_detection_is_enabled() const {
return false;
};
void ARVRInterface::set_anchor_detection_is_enabled(bool p_enable){
// don't do anything here, this needs to be implemented on AR interface to enable/disable things like plane detection etc.
};
int ARVRInterface::get_camera_feed_id() {
// don't do anything here, this needs to be implemented on AR interface to enable/disable things like plane detection etc.
return 0;
};

View File

@ -1,127 +0,0 @@
/*************************************************************************/
/* arvr_interface.h */
/*************************************************************************/
/* This file is part of: */
/* GODOT ENGINE */
/* https://godotengine.org */
/*************************************************************************/
/* Copyright (c) 2007-2022 Juan Linietsky, Ariel Manzur. */
/* Copyright (c) 2014-2022 Godot Engine contributors (cf. AUTHORS.md). */
/* */
/* Permission is hereby granted, free of charge, to any person obtaining */
/* a copy of this software and associated documentation files (the */
/* "Software"), to deal in the Software without restriction, including */
/* without limitation the rights to use, copy, modify, merge, publish, */
/* distribute, sublicense, and/or sell copies of the Software, and to */
/* permit persons to whom the Software is furnished to do so, subject to */
/* the following conditions: */
/* */
/* The above copyright notice and this permission notice shall be */
/* included in all copies or substantial portions of the Software. */
/* */
/* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
/* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
/* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.*/
/* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
/* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */
/* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
/*************************************************************************/
#ifndef ARVR_INTERFACE_H
#define ARVR_INTERFACE_H
#include "core/math/camera_matrix.h"
#include "core/os/thread_safe.h"
#include "scene/main/viewport.h"
#include "servers/arvr_server.h"
/**
@author Bastiaan Olij <mux213@gmail.com>
The ARVR interface is a template class on top of which we build interface to different AR, VR and tracking SDKs.
The idea is that we subclass this class, implement the logic, and then instantiate a singleton of each interface
when Godot starts. These instances do not initialize themselves but register themselves with the AR/VR server.
If the user wants to enable AR/VR the choose the interface they want to use and initialize it.
Note that we may make this into a fully instantiable class for GDNative support.
*/
class ARVRInterface : public Reference {
GDCLASS(ARVRInterface, Reference);
public:
enum Capabilities { /* purely meta data, provides some info about what this interface supports */
ARVR_NONE = 0, /* no capabilities */
ARVR_MONO = 1, /* can be used with mono output */
ARVR_STEREO = 2, /* can be used with stereo output */
ARVR_AR = 4, /* offers a camera feed for AR */
ARVR_EXTERNAL = 8 /* renders to external device */
};
enum Eyes {
EYE_MONO, /* my son says we should call this EYE_CYCLOPS */
EYE_LEFT,
EYE_RIGHT
};
enum Tracking_status { /* tracking status currently based on AR but we can start doing more with this for VR as well */
ARVR_NORMAL_TRACKING,
ARVR_EXCESSIVE_MOTION,
ARVR_INSUFFICIENT_FEATURES,
ARVR_UNKNOWN_TRACKING,
ARVR_NOT_TRACKING
};
protected:
_THREAD_SAFE_CLASS_
Tracking_status tracking_state;
static void _bind_methods();
public:
/** general interface information **/
virtual StringName get_name() const;
virtual int get_capabilities() const = 0;
bool is_primary();
void set_is_primary(bool p_is_primary);
virtual bool is_initialized() const = 0; /* returns true if we've initialized this interface */
void set_is_initialized(bool p_initialized); /* helper function, will call initialize or uninitialize */
virtual bool initialize() = 0; /* initialize this interface, if this has an HMD it becomes the primary interface */
virtual void uninitialize() = 0; /* deinitialize this interface */
Tracking_status get_tracking_status() const; /* get the status of our current tracking */
/** specific to VR **/
// nothing yet
/** specific to AR **/
virtual bool get_anchor_detection_is_enabled() const;
virtual void set_anchor_detection_is_enabled(bool p_enable);
virtual int get_camera_feed_id();
/** rendering and internal **/
virtual Size2 get_render_targetsize() = 0; /* returns the recommended render target size per eye for this device */
virtual bool is_stereo() = 0; /* returns true if this interface requires stereo rendering (for VR HMDs) or mono rendering (for mobile AR) */
virtual Transform get_transform_for_eye(ARVRInterface::Eyes p_eye, const Transform &p_cam_transform) = 0; /* get each eyes camera transform, also implement EYE_MONO */
virtual CameraMatrix get_projection_for_eye(ARVRInterface::Eyes p_eye, real_t p_aspect, real_t p_z_near, real_t p_z_far) = 0; /* get each eyes projection matrix */
virtual unsigned int get_external_texture_for_eye(ARVRInterface::Eyes p_eye); /* if applicable return external texture to render to */
virtual unsigned int get_external_depth_for_eye(ARVRInterface::Eyes p_eye); /* if applicable return external depth texture to render to */
virtual void commit_for_eye(ARVRInterface::Eyes p_eye, RID p_render_target, const Rect2 &p_screen_rect) = 0; /* output the left or right eye */
virtual void process() = 0;
virtual void notification(int p_what) = 0;
ARVRInterface();
~ARVRInterface();
};
VARIANT_ENUM_CAST(ARVRInterface::Capabilities);
VARIANT_ENUM_CAST(ARVRInterface::Eyes);
VARIANT_ENUM_CAST(ARVRInterface::Tracking_status);
#endif

View File

@ -1,237 +0,0 @@
/*************************************************************************/
/* arvr_positional_tracker.cpp */
/*************************************************************************/
/* This file is part of: */
/* GODOT ENGINE */
/* https://godotengine.org */
/*************************************************************************/
/* Copyright (c) 2007-2022 Juan Linietsky, Ariel Manzur. */
/* Copyright (c) 2014-2022 Godot Engine contributors (cf. AUTHORS.md). */
/* */
/* Permission is hereby granted, free of charge, to any person obtaining */
/* a copy of this software and associated documentation files (the */
/* "Software"), to deal in the Software without restriction, including */
/* without limitation the rights to use, copy, modify, merge, publish, */
/* distribute, sublicense, and/or sell copies of the Software, and to */
/* permit persons to whom the Software is furnished to do so, subject to */
/* the following conditions: */
/* */
/* The above copyright notice and this permission notice shall be */
/* included in all copies or substantial portions of the Software. */
/* */
/* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
/* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
/* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.*/
/* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
/* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */
/* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
/*************************************************************************/
#include "arvr_positional_tracker.h"
#include "core/os/input.h"
void ARVRPositionalTracker::_bind_methods() {
BIND_ENUM_CONSTANT(TRACKER_HAND_UNKNOWN);
BIND_ENUM_CONSTANT(TRACKER_LEFT_HAND);
BIND_ENUM_CONSTANT(TRACKER_RIGHT_HAND);
// this class is read only from GDScript, so we only have access to getters..
ClassDB::bind_method(D_METHOD("get_type"), &ARVRPositionalTracker::get_type);
ClassDB::bind_method(D_METHOD("get_tracker_id"), &ARVRPositionalTracker::get_tracker_id);
ClassDB::bind_method(D_METHOD("get_name"), &ARVRPositionalTracker::get_name);
ClassDB::bind_method(D_METHOD("get_joy_id"), &ARVRPositionalTracker::get_joy_id);
ClassDB::bind_method(D_METHOD("get_tracks_orientation"), &ARVRPositionalTracker::get_tracks_orientation);
ClassDB::bind_method(D_METHOD("get_orientation"), &ARVRPositionalTracker::get_orientation);
ClassDB::bind_method(D_METHOD("get_tracks_position"), &ARVRPositionalTracker::get_tracks_position);
ClassDB::bind_method(D_METHOD("get_position"), &ARVRPositionalTracker::get_position);
ClassDB::bind_method(D_METHOD("get_hand"), &ARVRPositionalTracker::get_hand);
ClassDB::bind_method(D_METHOD("get_transform", "adjust_by_reference_frame"), &ARVRPositionalTracker::get_transform);
ClassDB::bind_method(D_METHOD("get_mesh"), &ARVRPositionalTracker::get_mesh);
// these functions we don't want to expose to normal users but do need to be callable from GDNative
ClassDB::bind_method(D_METHOD("_set_type", "type"), &ARVRPositionalTracker::set_type);
ClassDB::bind_method(D_METHOD("_set_name", "name"), &ARVRPositionalTracker::set_name);
ClassDB::bind_method(D_METHOD("_set_joy_id", "joy_id"), &ARVRPositionalTracker::set_joy_id);
ClassDB::bind_method(D_METHOD("_set_orientation", "orientation"), &ARVRPositionalTracker::set_orientation);
ClassDB::bind_method(D_METHOD("_set_rw_position", "rw_position"), &ARVRPositionalTracker::set_rw_position);
ClassDB::bind_method(D_METHOD("_set_mesh", "mesh"), &ARVRPositionalTracker::set_mesh);
ClassDB::bind_method(D_METHOD("get_rumble"), &ARVRPositionalTracker::get_rumble);
ClassDB::bind_method(D_METHOD("set_rumble", "rumble"), &ARVRPositionalTracker::set_rumble);
ADD_PROPERTY(PropertyInfo(Variant::REAL, "rumble"), "set_rumble", "get_rumble");
};
void ARVRPositionalTracker::set_type(ARVRServer::TrackerType p_type) {
if (type != p_type) {
type = p_type;
hand = ARVRPositionalTracker::TRACKER_HAND_UNKNOWN;
ARVRServer *arvr_server = ARVRServer::get_singleton();
ERR_FAIL_NULL(arvr_server);
// get a tracker id for our type
// note if this is a controller this will be 3 or higher but we may change it later.
tracker_id = arvr_server->get_free_tracker_id_for_type(p_type);
};
};
ARVRServer::TrackerType ARVRPositionalTracker::get_type() const {
return type;
};
void ARVRPositionalTracker::set_name(const String &p_name) {
name = p_name;
};
StringName ARVRPositionalTracker::get_name() const {
return name;
};
int ARVRPositionalTracker::get_tracker_id() const {
return tracker_id;
};
void ARVRPositionalTracker::set_joy_id(int p_joy_id) {
joy_id = p_joy_id;
};
int ARVRPositionalTracker::get_joy_id() const {
return joy_id;
};
bool ARVRPositionalTracker::get_tracks_orientation() const {
return tracks_orientation;
};
void ARVRPositionalTracker::set_orientation(const Basis &p_orientation) {
_THREAD_SAFE_METHOD_
tracks_orientation = true; // obviously we have this
orientation = p_orientation;
};
Basis ARVRPositionalTracker::get_orientation() const {
_THREAD_SAFE_METHOD_
return orientation;
};
bool ARVRPositionalTracker::get_tracks_position() const {
return tracks_position;
};
void ARVRPositionalTracker::set_position(const Vector3 &p_position) {
_THREAD_SAFE_METHOD_
ARVRServer *arvr_server = ARVRServer::get_singleton();
ERR_FAIL_NULL(arvr_server);
real_t world_scale = arvr_server->get_world_scale();
ERR_FAIL_COND(world_scale == 0);
tracks_position = true; // obviously we have this
rw_position = p_position / world_scale;
};
Vector3 ARVRPositionalTracker::get_position() const {
_THREAD_SAFE_METHOD_
ARVRServer *arvr_server = ARVRServer::get_singleton();
ERR_FAIL_NULL_V(arvr_server, rw_position);
real_t world_scale = arvr_server->get_world_scale();
return rw_position * world_scale;
};
void ARVRPositionalTracker::set_rw_position(const Vector3 &p_rw_position) {
_THREAD_SAFE_METHOD_
tracks_position = true; // obviously we have this
rw_position = p_rw_position;
};
Vector3 ARVRPositionalTracker::get_rw_position() const {
_THREAD_SAFE_METHOD_
return rw_position;
};
void ARVRPositionalTracker::set_mesh(const Ref<Mesh> &p_mesh) {
_THREAD_SAFE_METHOD_
mesh = p_mesh;
};
Ref<Mesh> ARVRPositionalTracker::get_mesh() const {
_THREAD_SAFE_METHOD_
return mesh;
};
ARVRPositionalTracker::TrackerHand ARVRPositionalTracker::get_hand() const {
return hand;
};
void ARVRPositionalTracker::set_hand(const ARVRPositionalTracker::TrackerHand p_hand) {
ARVRServer *arvr_server = ARVRServer::get_singleton();
ERR_FAIL_NULL(arvr_server);
if (hand != p_hand) {
// we can only set this if we've previously set this to be a controller!!
ERR_FAIL_COND((type != ARVRServer::TRACKER_CONTROLLER) && (p_hand != ARVRPositionalTracker::TRACKER_HAND_UNKNOWN));
hand = p_hand;
if (hand == ARVRPositionalTracker::TRACKER_LEFT_HAND) {
if (!arvr_server->is_tracker_id_in_use_for_type(type, 1)) {
tracker_id = 1;
};
} else if (hand == ARVRPositionalTracker::TRACKER_RIGHT_HAND) {
if (!arvr_server->is_tracker_id_in_use_for_type(type, 2)) {
tracker_id = 2;
};
};
};
};
Transform ARVRPositionalTracker::get_transform(bool p_adjust_by_reference_frame) const {
Transform new_transform;
new_transform.basis = get_orientation();
new_transform.origin = get_position();
if (p_adjust_by_reference_frame) {
ARVRServer *arvr_server = ARVRServer::get_singleton();
ERR_FAIL_NULL_V(arvr_server, new_transform);
new_transform = arvr_server->get_reference_frame() * new_transform;
};
return new_transform;
};
real_t ARVRPositionalTracker::get_rumble() const {
return rumble;
};
void ARVRPositionalTracker::set_rumble(real_t p_rumble) {
if (p_rumble > 0.0) {
rumble = p_rumble;
} else {
rumble = 0.0;
};
};
ARVRPositionalTracker::ARVRPositionalTracker() {
type = ARVRServer::TRACKER_UNKNOWN;
name = "Unknown";
joy_id = -1;
tracker_id = 0;
tracks_orientation = false;
tracks_position = false;
hand = TRACKER_HAND_UNKNOWN;
rumble = 0.0;
};
ARVRPositionalTracker::~ARVRPositionalTracker(){
};

View File

@ -1,104 +0,0 @@
/*************************************************************************/
/* arvr_positional_tracker.h */
/*************************************************************************/
/* This file is part of: */
/* GODOT ENGINE */
/* https://godotengine.org */
/*************************************************************************/
/* Copyright (c) 2007-2022 Juan Linietsky, Ariel Manzur. */
/* Copyright (c) 2014-2022 Godot Engine contributors (cf. AUTHORS.md). */
/* */
/* Permission is hereby granted, free of charge, to any person obtaining */
/* a copy of this software and associated documentation files (the */
/* "Software"), to deal in the Software without restriction, including */
/* without limitation the rights to use, copy, modify, merge, publish, */
/* distribute, sublicense, and/or sell copies of the Software, and to */
/* permit persons to whom the Software is furnished to do so, subject to */
/* the following conditions: */
/* */
/* The above copyright notice and this permission notice shall be */
/* included in all copies or substantial portions of the Software. */
/* */
/* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
/* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
/* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.*/
/* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
/* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */
/* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
/*************************************************************************/
#ifndef ARVR_POSITIONAL_TRACKER_H
#define ARVR_POSITIONAL_TRACKER_H
#include "core/os/thread_safe.h"
#include "scene/resources/mesh.h"
#include "servers/arvr_server.h"
/**
@author Bastiaan Olij <mux213@gmail.com>
The positional tracker object as an object that represents the position and orientation of a tracked object like a controller or headset.
An AR/VR Interface will registered the trackers it manages with our AR/VR server and update its position and orientation.
This is where potentially additional AR/VR interfaces may be active as there are AR/VR SDKs that solely deal with positional tracking.
*/
class ARVRPositionalTracker : public Reference {
GDCLASS(ARVRPositionalTracker, Reference);
_THREAD_SAFE_CLASS_
public:
enum TrackerHand {
TRACKER_HAND_UNKNOWN, /* unknown or not applicable */
TRACKER_LEFT_HAND, /* controller is the left hand controller */
TRACKER_RIGHT_HAND /* controller is the right hand controller */
};
private:
ARVRServer::TrackerType type; // type of tracker
StringName name; // (unique) name of the tracker
int tracker_id; // tracker index id that is unique per type
int joy_id; // if we also have a related joystick entity, the id of the joystick
bool tracks_orientation; // do we track orientation?
Basis orientation; // our orientation
bool tracks_position; // do we track position?
Vector3 rw_position; // our position "in the real world, so without world_scale applied"
Ref<Mesh> mesh; // when available, a mesh that can be used to render this tracker
TrackerHand hand; // if known, the hand this tracker is held in
real_t rumble; // rumble strength, 0.0 is off, 1.0 is maximum, note that we only record here, arvr_interface is responsible for execution
protected:
static void _bind_methods();
public:
void set_type(ARVRServer::TrackerType p_type);
ARVRServer::TrackerType get_type() const;
void set_name(const String &p_name);
StringName get_name() const;
int get_tracker_id() const;
void set_joy_id(int p_joy_id);
int get_joy_id() const;
bool get_tracks_orientation() const;
void set_orientation(const Basis &p_orientation);
Basis get_orientation() const;
bool get_tracks_position() const;
void set_position(const Vector3 &p_position); // set position with world_scale applied
Vector3 get_position() const; // get position with world_scale applied
void set_rw_position(const Vector3 &p_rw_position);
Vector3 get_rw_position() const;
ARVRPositionalTracker::TrackerHand get_hand() const;
void set_hand(const ARVRPositionalTracker::TrackerHand p_hand);
real_t get_rumble() const;
void set_rumble(real_t p_rumble);
void set_mesh(const Ref<Mesh> &p_mesh);
Ref<Mesh> get_mesh() const;
Transform get_transform(bool p_adjust_by_reference_frame) const;
ARVRPositionalTracker();
~ARVRPositionalTracker();
};
VARIANT_ENUM_CAST(ARVRPositionalTracker::TrackerHand);
#endif

View File

@ -1,385 +0,0 @@
/*************************************************************************/
/* arvr_server.cpp */
/*************************************************************************/
/* This file is part of: */
/* GODOT ENGINE */
/* https://godotengine.org */
/*************************************************************************/
/* Copyright (c) 2007-2022 Juan Linietsky, Ariel Manzur. */
/* Copyright (c) 2014-2022 Godot Engine contributors (cf. AUTHORS.md). */
/* */
/* Permission is hereby granted, free of charge, to any person obtaining */
/* a copy of this software and associated documentation files (the */
/* "Software"), to deal in the Software without restriction, including */
/* without limitation the rights to use, copy, modify, merge, publish, */
/* distribute, sublicense, and/or sell copies of the Software, and to */
/* permit persons to whom the Software is furnished to do so, subject to */
/* the following conditions: */
/* */
/* The above copyright notice and this permission notice shall be */
/* included in all copies or substantial portions of the Software. */
/* */
/* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
/* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
/* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.*/
/* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
/* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */
/* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
/*************************************************************************/
#include "arvr_server.h"
#include "arvr/arvr_interface.h"
#include "arvr/arvr_positional_tracker.h"
#include "core/project_settings.h"
ARVRServer *ARVRServer::singleton = nullptr;
ARVRServer *ARVRServer::get_singleton() {
return singleton;
};
void ARVRServer::_bind_methods() {
ClassDB::bind_method(D_METHOD("get_world_scale"), &ARVRServer::get_world_scale);
ClassDB::bind_method(D_METHOD("set_world_scale"), &ARVRServer::set_world_scale);
ClassDB::bind_method(D_METHOD("get_reference_frame"), &ARVRServer::get_reference_frame);
ClassDB::bind_method(D_METHOD("center_on_hmd", "rotation_mode", "keep_height"), &ARVRServer::center_on_hmd);
ClassDB::bind_method(D_METHOD("get_hmd_transform"), &ARVRServer::get_hmd_transform);
ADD_PROPERTY(PropertyInfo(Variant::REAL, "world_scale"), "set_world_scale", "get_world_scale");
ClassDB::bind_method(D_METHOD("add_interface", "interface"), &ARVRServer::add_interface);
ClassDB::bind_method(D_METHOD("clear_primary_interface_if", "interface"), &ARVRServer::clear_primary_interface_if);
ClassDB::bind_method(D_METHOD("get_interface_count"), &ARVRServer::get_interface_count);
ClassDB::bind_method(D_METHOD("remove_interface", "interface"), &ARVRServer::remove_interface);
ClassDB::bind_method(D_METHOD("get_interface", "idx"), &ARVRServer::get_interface);
ClassDB::bind_method(D_METHOD("get_interfaces"), &ARVRServer::get_interfaces);
ClassDB::bind_method(D_METHOD("find_interface", "name"), &ARVRServer::find_interface);
ClassDB::bind_method(D_METHOD("get_tracker_count"), &ARVRServer::get_tracker_count);
ClassDB::bind_method(D_METHOD("get_tracker", "idx"), &ARVRServer::get_tracker);
ClassDB::bind_method(D_METHOD("add_tracker", "tracker"), &ARVRServer::add_tracker);
ClassDB::bind_method(D_METHOD("remove_tracker", "tracker"), &ARVRServer::remove_tracker);
ClassDB::bind_method(D_METHOD("get_primary_interface"), &ARVRServer::get_primary_interface);
ClassDB::bind_method(D_METHOD("set_primary_interface", "interface"), &ARVRServer::set_primary_interface);
ADD_PROPERTY(PropertyInfo(Variant::OBJECT, "primary_interface"), "set_primary_interface", "get_primary_interface");
ClassDB::bind_method(D_METHOD("get_last_process_usec"), &ARVRServer::get_last_process_usec);
ClassDB::bind_method(D_METHOD("get_last_commit_usec"), &ARVRServer::get_last_commit_usec);
ClassDB::bind_method(D_METHOD("get_last_frame_usec"), &ARVRServer::get_last_frame_usec);
BIND_ENUM_CONSTANT(TRACKER_CONTROLLER);
BIND_ENUM_CONSTANT(TRACKER_BASESTATION);
BIND_ENUM_CONSTANT(TRACKER_ANCHOR);
BIND_ENUM_CONSTANT(TRACKER_ANY_KNOWN);
BIND_ENUM_CONSTANT(TRACKER_UNKNOWN);
BIND_ENUM_CONSTANT(TRACKER_ANY);
BIND_ENUM_CONSTANT(RESET_FULL_ROTATION);
BIND_ENUM_CONSTANT(RESET_BUT_KEEP_TILT);
BIND_ENUM_CONSTANT(DONT_RESET_ROTATION);
ADD_SIGNAL(MethodInfo("interface_added", PropertyInfo(Variant::STRING, "interface_name")));
ADD_SIGNAL(MethodInfo("interface_removed", PropertyInfo(Variant::STRING, "interface_name")));
ADD_SIGNAL(MethodInfo("tracker_added", PropertyInfo(Variant::STRING, "tracker_name"), PropertyInfo(Variant::INT, "type"), PropertyInfo(Variant::INT, "id")));
ADD_SIGNAL(MethodInfo("tracker_removed", PropertyInfo(Variant::STRING, "tracker_name"), PropertyInfo(Variant::INT, "type"), PropertyInfo(Variant::INT, "id")));
};
real_t ARVRServer::get_world_scale() const {
return world_scale;
};
void ARVRServer::set_world_scale(real_t p_world_scale) {
if (p_world_scale < 0.01) {
p_world_scale = 0.01;
} else if (p_world_scale > 1000.0) {
p_world_scale = 1000.0;
}
world_scale = p_world_scale;
};
Transform ARVRServer::get_world_origin() const {
return world_origin;
};
void ARVRServer::set_world_origin(const Transform &p_world_origin) {
world_origin = p_world_origin;
};
Transform ARVRServer::get_reference_frame() const {
return reference_frame;
};
void ARVRServer::center_on_hmd(RotationMode p_rotation_mode, bool p_keep_height) {
if (primary_interface != nullptr) {
// clear our current reference frame or we'll end up double adjusting it
reference_frame = Transform();
// requesting our EYE_MONO transform should return our current HMD position
Transform new_reference_frame = primary_interface->get_transform_for_eye(ARVRInterface::EYE_MONO, Transform());
// remove our tilt
if (p_rotation_mode == 1) {
// take the Y out of our Z
new_reference_frame.basis.set_axis(2, Vector3(new_reference_frame.basis.elements[0][2], 0.0, new_reference_frame.basis.elements[2][2]).normalized());
// Y is straight up
new_reference_frame.basis.set_axis(1, Vector3(0.0, 1.0, 0.0));
// and X is our cross reference
new_reference_frame.basis.set_axis(0, new_reference_frame.basis.get_axis(1).cross(new_reference_frame.basis.get_axis(2)).normalized());
} else if (p_rotation_mode == 2) {
// remove our rotation, we're only interesting in centering on position
new_reference_frame.basis = Basis();
};
// don't negate our height
if (p_keep_height) {
new_reference_frame.origin.y = 0.0;
};
reference_frame = new_reference_frame.inverse();
};
};
Transform ARVRServer::get_hmd_transform() {
Transform hmd_transform;
if (primary_interface != nullptr) {
hmd_transform = primary_interface->get_transform_for_eye(ARVRInterface::EYE_MONO, hmd_transform);
};
return hmd_transform;
};
void ARVRServer::add_interface(const Ref<ARVRInterface> &p_interface) {
ERR_FAIL_COND(p_interface.is_null());
for (int i = 0; i < interfaces.size(); i++) {
if (interfaces[i] == p_interface) {
ERR_PRINT("Interface was already added");
return;
};
};
interfaces.push_back(p_interface);
emit_signal("interface_added", p_interface->get_name());
};
void ARVRServer::remove_interface(const Ref<ARVRInterface> &p_interface) {
ERR_FAIL_COND(p_interface.is_null());
int idx = -1;
for (int i = 0; i < interfaces.size(); i++) {
if (interfaces[i] == p_interface) {
idx = i;
break;
};
};
ERR_FAIL_COND(idx == -1);
print_verbose("ARVR: Removed interface" + p_interface->get_name());
emit_signal("interface_removed", p_interface->get_name());
interfaces.remove(idx);
};
int ARVRServer::get_interface_count() const {
return interfaces.size();
};
Ref<ARVRInterface> ARVRServer::get_interface(int p_index) const {
ERR_FAIL_INDEX_V(p_index, interfaces.size(), nullptr);
return interfaces[p_index];
};
Ref<ARVRInterface> ARVRServer::find_interface(const String &p_name) const {
int idx = -1;
for (int i = 0; i < interfaces.size(); i++) {
if (interfaces[i]->get_name() == p_name) {
idx = i;
break;
};
};
ERR_FAIL_COND_V(idx == -1, nullptr);
return interfaces[idx];
};
Array ARVRServer::get_interfaces() const {
Array ret;
for (int i = 0; i < interfaces.size(); i++) {
Dictionary iface_info;
iface_info["id"] = i;
iface_info["name"] = interfaces[i]->get_name();
ret.push_back(iface_info);
};
return ret;
};
/*
A little extra info on the tracker ids, these are unique per tracker type so we get some consistency in recognising our trackers, specifically controllers.
The first controller that is turned of will get ID 1, the second will get ID 2, etc.
The magic happens when one of the controllers is turned off, say controller 1 turns off, controller 2 will remain controller 2, controller 3 will remain controller 3.
If controller number 1 is turned on again it again gets ID 1 unless another new controller was turned on since.
The most likely scenario however is a controller that runs out of battery and another controller being used to replace it.
Because the controllers are often linked to physical objects, say you're holding a shield in controller 1, your left hand, and a gun in controller 2, your right hand, and controller 1 dies:
- using our tracker index would suddenly make the gun disappear and the shield jump into your right hand because controller 2 becomes controller 1.
- using this approach the shield disappears or is no longer tracked, but the gun stays firmly in your right hand because that is still controller 2, further more, if controller 1 is replaced the shield will return.
*/
bool ARVRServer::is_tracker_id_in_use_for_type(TrackerType p_tracker_type, int p_tracker_id) const {
for (int i = 0; i < trackers.size(); i++) {
if (trackers[i]->get_type() == p_tracker_type && trackers[i]->get_tracker_id() == p_tracker_id) {
return true;
};
};
// all good
return false;
};
int ARVRServer::get_free_tracker_id_for_type(TrackerType p_tracker_type) {
// We start checking at 1, 0 means that it's not a controller..
// Note that for controller we reserve:
// - 1 for the left hand controller and
// - 2 for the right hand controller
// so we start at 3 :)
int tracker_id = p_tracker_type == ARVRServer::TRACKER_CONTROLLER ? 3 : 1;
while (is_tracker_id_in_use_for_type(p_tracker_type, tracker_id)) {
// try the next one
tracker_id++;
};
return tracker_id;
};
void ARVRServer::add_tracker(Ref<ARVRPositionalTracker> p_tracker) {
ERR_FAIL_COND(p_tracker.is_null());
trackers.push_back(p_tracker);
emit_signal("tracker_added", p_tracker->get_name(), p_tracker->get_type(), p_tracker->get_tracker_id());
};
void ARVRServer::remove_tracker(Ref<ARVRPositionalTracker> p_tracker) {
ERR_FAIL_COND(p_tracker.is_null());
int idx = -1;
for (int i = 0; i < trackers.size(); i++) {
if (trackers[i] == p_tracker) {
idx = i;
break;
};
};
ERR_FAIL_COND(idx == -1);
emit_signal("tracker_removed", p_tracker->get_name(), p_tracker->get_type(), p_tracker->get_tracker_id());
trackers.remove(idx);
};
int ARVRServer::get_tracker_count() const {
return trackers.size();
};
Ref<ARVRPositionalTracker> ARVRServer::get_tracker(int p_index) const {
ERR_FAIL_INDEX_V(p_index, trackers.size(), Ref<ARVRPositionalTracker>());
return trackers[p_index];
};
Ref<ARVRPositionalTracker> ARVRServer::find_by_type_and_id(TrackerType p_tracker_type, int p_tracker_id) const {
ERR_FAIL_COND_V(p_tracker_id == 0, Ref<ARVRPositionalTracker>());
for (int i = 0; i < trackers.size(); i++) {
if (trackers[i]->get_type() == p_tracker_type && trackers[i]->get_tracker_id() == p_tracker_id) {
return trackers[i];
};
};
return Ref<ARVRPositionalTracker>();
};
Ref<ARVRInterface> ARVRServer::get_primary_interface() const {
return primary_interface;
};
void ARVRServer::set_primary_interface(const Ref<ARVRInterface> &p_primary_interface) {
ERR_FAIL_COND(p_primary_interface.is_null());
primary_interface = p_primary_interface;
print_verbose("ARVR: Primary interface set to: " + primary_interface->get_name());
};
void ARVRServer::clear_primary_interface_if(const Ref<ARVRInterface> &p_primary_interface) {
if (primary_interface == p_primary_interface) {
print_verbose("ARVR: Clearing primary interface");
primary_interface.unref();
};
};
uint64_t ARVRServer::get_last_process_usec() {
return last_process_usec;
};
uint64_t ARVRServer::get_last_commit_usec() {
return last_commit_usec;
};
uint64_t ARVRServer::get_last_frame_usec() {
return last_frame_usec;
};
void ARVRServer::_process() {
/* called from visual_server_viewport.draw_viewports right before we start drawing our viewports */
/* mark for our frame timing */
last_process_usec = OS::get_singleton()->get_ticks_usec();
/* process all active interfaces */
for (int i = 0; i < interfaces.size(); i++) {
if (!interfaces[i].is_valid()) {
// ignore, not a valid reference
} else if (interfaces[i]->is_initialized()) {
interfaces.write[i]->process();
};
};
};
void ARVRServer::_mark_commit() {
/* time this */
last_commit_usec = OS::get_singleton()->get_ticks_usec();
/* now store our difference as we may overwrite last_process_usec before this is accessed */
last_frame_usec = last_commit_usec - last_process_usec;
};
ARVRServer::ARVRServer() {
singleton = this;
world_scale = 1.0;
};
ARVRServer::~ARVRServer() {
primary_interface.unref();
while (interfaces.size() > 0) {
interfaces.remove(0);
}
while (trackers.size() > 0) {
trackers.remove(0);
}
singleton = nullptr;
};

View File

@ -1,192 +0,0 @@
/*************************************************************************/
/* arvr_server.h */
/*************************************************************************/
/* This file is part of: */
/* GODOT ENGINE */
/* https://godotengine.org */
/*************************************************************************/
/* Copyright (c) 2007-2022 Juan Linietsky, Ariel Manzur. */
/* Copyright (c) 2014-2022 Godot Engine contributors (cf. AUTHORS.md). */
/* */
/* Permission is hereby granted, free of charge, to any person obtaining */
/* a copy of this software and associated documentation files (the */
/* "Software"), to deal in the Software without restriction, including */
/* without limitation the rights to use, copy, modify, merge, publish, */
/* distribute, sublicense, and/or sell copies of the Software, and to */
/* permit persons to whom the Software is furnished to do so, subject to */
/* the following conditions: */
/* */
/* The above copyright notice and this permission notice shall be */
/* included in all copies or substantial portions of the Software. */
/* */
/* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
/* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
/* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.*/
/* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
/* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */
/* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
/* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
/*************************************************************************/
#ifndef ARVR_SERVER_H
#define ARVR_SERVER_H
#include "core/os/os.h"
#include "core/os/thread_safe.h"
#include "core/reference.h"
#include "core/rid.h"
#include "core/variant.h"
class ARVRInterface;
class ARVRPositionalTracker;
/**
@author Bastiaan Olij <mux213@gmail.com>
The ARVR server is a singleton object that gives access to the various
objects and SDKs that are available on the system.
Because there can be multiple SDKs active this is exposed as an array
and our ARVR server object acts as a pass through
Also each positioning tracker is accessible from here.
I've added some additional info into this header file that should move
into the documentation, I will do so when we're close to accepting this PR
or as a separate PR once this has been merged into the master branch.
**/
class ARVRServer : public Object {
GDCLASS(ARVRServer, Object);
_THREAD_SAFE_CLASS_
public:
enum TrackerType {
TRACKER_CONTROLLER = 0x01, /* tracks a controller */
TRACKER_BASESTATION = 0x02, /* tracks location of a base station */
TRACKER_ANCHOR = 0x04, /* tracks an anchor point, used in AR to track a real live location */
TRACKER_UNKNOWN = 0x80, /* unknown tracker */
TRACKER_ANY_KNOWN = 0x7f, /* all except unknown */
TRACKER_ANY = 0xff /* used by get_connected_trackers to return all types */
};
enum RotationMode {
RESET_FULL_ROTATION = 0, /* we reset the full rotation, regardless of how the HMD is oriented, we're looking dead ahead */
RESET_BUT_KEEP_TILT = 1, /* reset rotation but keep tilt. */
DONT_RESET_ROTATION = 2, /* don't reset the rotation, we will only center on position */
};
private:
Vector<Ref<ARVRInterface>> interfaces;
Vector<Ref<ARVRPositionalTracker>> trackers;
Ref<ARVRInterface> primary_interface; /* we'll identify one interface as primary, this will be used by our viewports */
real_t world_scale; /* scale by which we multiply our tracker positions */
Transform world_origin; /* our world origin point, maps a location in our virtual world to the origin point in our real world tracking volume */
Transform reference_frame; /* our reference frame */
uint64_t last_process_usec; /* for frame timing, usec when we did our processing */
uint64_t last_commit_usec; /* for frame timing, usec when we finished committing both eyes */
uint64_t last_frame_usec; /* time it took between process and committing, we should probably average this over the last x frames */
protected:
static ARVRServer *singleton;
static void _bind_methods();
public:
static ARVRServer *get_singleton();
/*
World scale allows you to specify a scale factor that is applied to all positioning vectors in our VR world in essence scaling up, or scaling down the world.
For stereoscopic rendering specifically this is very important to give an accurate sense of scale.
Add controllers into the mix and an accurate mapping of real world movement to perceived virtual movement becomes very important.
Most VR platforms, and our assumption, is that 1 unit in our virtual world equates to 1 meter in the real mode.
This scale basically effects the unit size relationship to real world size.
I may remove access to this property in GDScript in favour of exposing it on the ARVROrigin node
*/
real_t get_world_scale() const;
void set_world_scale(real_t p_world_scale);
/*
The world maps the 0,0,0 coordinate of our real world coordinate system for our tracking volume to a location in our
virtual world. It is this origin point that should be moved when the player is moved through the world by controller
actions be it straffing, teleporting, etc. Movement of the player by moving through the physical space is always tracked
in relation to this point.
Note that the ARVROrigin spatial node in your scene automatically updates this property and it should be used instead of
direct access to this property and it therefore is not available in GDScript
Note: this should not be used in AR and should be ignored by an AR based interface as it would throw what you're looking at in the real world
and in the virtual world out of sync
*/
Transform get_world_origin() const;
void set_world_origin(const Transform &p_world_origin);
/*
center_on_hmd calculates a new reference frame. This ensures the HMD is positioned to 0,0,0 facing 0,0,-1 (need to verify this direction)
in the virtual world.
You can ignore the tilt of the device ensuring you're looking straight forward even if the player is looking down or sideways.
You can chose to keep the height the tracking provides which is important for room scale capable tracking.
Note: this should not be used in AR and should be ignored by an AR based interface as it would throw what you're looking at in the real world
and in the virtual world out of sync
*/
Transform get_reference_frame() const;
void center_on_hmd(RotationMode p_rotation_mode, bool p_keep_height);
/*
get_hmd_transform gets our hmd transform (centered between eyes) with most up to date tracking, relative to the origin
*/
Transform get_hmd_transform();
/*
Interfaces are objects that 'glue' Godot to an AR or VR SDK such as the Oculus SDK, OpenVR, OpenHMD, etc.
*/
void add_interface(const Ref<ARVRInterface> &p_interface);
void remove_interface(const Ref<ARVRInterface> &p_interface);
int get_interface_count() const;
Ref<ARVRInterface> get_interface(int p_index) const;
Ref<ARVRInterface> find_interface(const String &p_name) const;
Array get_interfaces() const;
/*
note, more then one interface can technically be active, especially on mobile, but only one interface is used for
rendering. This interface identifies itself by calling set_primary_interface when it is initialized
*/
Ref<ARVRInterface> get_primary_interface() const;
void set_primary_interface(const Ref<ARVRInterface> &p_primary_interface);
void clear_primary_interface_if(const Ref<ARVRInterface> &p_primary_interface); /* this is automatically called if an interface destructs */
/*
Our trackers are objects that expose the orientation and position of physical devices such as controller, anchor points, etc.
They are created and managed by our active AR/VR interfaces.
*/
bool is_tracker_id_in_use_for_type(TrackerType p_tracker_type, int p_tracker_id) const;
int get_free_tracker_id_for_type(TrackerType p_tracker_type);
void add_tracker(Ref<ARVRPositionalTracker> p_tracker);
void remove_tracker(Ref<ARVRPositionalTracker> p_tracker);
int get_tracker_count() const;
Ref<ARVRPositionalTracker> get_tracker(int p_index) const;
Ref<ARVRPositionalTracker> find_by_type_and_id(TrackerType p_tracker_type, int p_tracker_id) const;
uint64_t get_last_process_usec();
uint64_t get_last_commit_usec();
uint64_t get_last_frame_usec();
void _process();
void _mark_commit();
ARVRServer();
~ARVRServer();
};
#define ARVR ARVRServer
VARIANT_ENUM_CAST(ARVRServer::TrackerType);
VARIANT_ENUM_CAST(ARVRServer::RotationMode);
#endif

View File

@ -32,9 +32,6 @@
#include "core/engine.h"
#include "core/project_settings.h"
#include "arvr/arvr_interface.h"
#include "arvr/arvr_positional_tracker.h"
#include "arvr_server.h"
#include "audio/audio_effect.h"
#include "audio/audio_stream.h"
#include "audio/effects/audio_effect_amplify.h"
@ -114,13 +111,9 @@ void register_server_types() {
ClassDB::register_virtual_class<Physics2DServer>();
ClassDB::register_virtual_class<NavigationServer>();
ClassDB::register_virtual_class<Navigation2DServer>();
ClassDB::register_class<ARVRServer>();
shader_types = memnew(ShaderTypes);
ClassDB::register_virtual_class<ARVRInterface>();
ClassDB::register_class<ARVRPositionalTracker>();
ClassDB::register_virtual_class<AudioStream>();
ClassDB::register_virtual_class<AudioStreamPlayback>();
ClassDB::register_virtual_class<AudioStreamPlaybackResampled>();
@ -210,5 +203,4 @@ void register_server_singletons() {
Engine::get_singleton()->add_singleton(Engine::Singleton("Physics2DServer", Physics2DServer::get_singleton()));
Engine::get_singleton()->add_singleton(Engine::Singleton("NavigationServer", NavigationServer::get_singleton_mut()));
Engine::get_singleton()->add_singleton(Engine::Singleton("Navigation2DServer", Navigation2DServer::get_singleton_mut()));
Engine::get_singleton()->add_singleton(Engine::Singleton("ARVRServer", ARVRServer::get_singleton()));
}

View File

@ -380,7 +380,6 @@ public:
BIND0R(RID, viewport_create)
BIND2(viewport_set_use_arvr, RID, bool)
BIND3(viewport_set_size, RID, int, int)
BIND2(viewport_set_active, RID, bool)

View File

@ -34,6 +34,7 @@
#include "core/os/os.h"
#include "visual_server_globals.h"
#include "visual_server_raster.h"
#include "core/project_settings.h"
#include <new>
@ -2483,90 +2484,6 @@ void VisualServerScene::render_camera(RID p_camera, RID p_scenario, Size2 p_view
#endif
}
void VisualServerScene::render_camera(Ref<ARVRInterface> &p_interface, ARVRInterface::Eyes p_eye, RID p_camera, RID p_scenario, Size2 p_viewport_size, RID p_shadow_atlas) {
// render for AR/VR interface
Camera *camera = camera_owner.getornull(p_camera);
ERR_FAIL_COND(!camera);
/* SETUP CAMERA, we are ignoring type and FOV here */
float aspect = p_viewport_size.width / (float)p_viewport_size.height;
CameraMatrix camera_matrix = p_interface->get_projection_for_eye(p_eye, aspect, camera->znear, camera->zfar);
// We also ignore our camera position, it will have been positioned with a slightly old tracking position.
// Instead we take our origin point and have our ar/vr interface add fresh tracking data! Whoohoo!
Transform world_origin = ARVRServer::get_singleton()->get_world_origin();
Transform cam_transform = p_interface->get_transform_for_eye(p_eye, world_origin);
// For stereo render we only prepare for our left eye and then reuse the outcome for our right eye
if (p_eye == ARVRInterface::EYE_LEFT) {
///@TODO possibly move responsibility for this into our ARVRServer or ARVRInterface?
// Center our transform, we assume basis is equal.
Transform mono_transform = cam_transform;
Transform right_transform = p_interface->get_transform_for_eye(ARVRInterface::EYE_RIGHT, world_origin);
mono_transform.origin += right_transform.origin;
mono_transform.origin *= 0.5;
// We need to combine our projection frustums for culling.
// Ideally we should use our clipping planes for this and combine them,
// however our shadow map logic uses our projection matrix.
// Note: as our left and right frustums should be mirrored, we don't need our right projection matrix.
// - get some base values we need
float eye_dist = (mono_transform.origin - cam_transform.origin).length();
float z_near = camera_matrix.get_z_near(); // get our near plane
float z_far = camera_matrix.get_z_far(); // get our far plane
float width = (2.0 * z_near) / camera_matrix.matrix[0][0];
float x_shift = width * camera_matrix.matrix[2][0];
float height = (2.0 * z_near) / camera_matrix.matrix[1][1];
float y_shift = height * camera_matrix.matrix[2][1];
// printf("Eye_dist = %f, Near = %f, Far = %f, Width = %f, Shift = %f\n", eye_dist, z_near, z_far, width, x_shift);
// - calculate our near plane size (horizontal only, right_near is mirrored)
float left_near = -eye_dist - ((width - x_shift) * 0.5);
// - calculate our far plane size (horizontal only, right_far is mirrored)
float left_far = -eye_dist - (z_far * (width - x_shift) * 0.5 / z_near);
float left_far_right_eye = eye_dist - (z_far * (width + x_shift) * 0.5 / z_near);
if (left_far > left_far_right_eye) {
// on displays smaller then double our iod, the right eye far frustrum can overtake the left eyes.
left_far = left_far_right_eye;
}
// - figure out required z-shift
float slope = (left_far - left_near) / (z_far - z_near);
float z_shift = (left_near / slope) - z_near;
// - figure out new vertical near plane size (this will be slightly oversized thanks to our z-shift)
float top_near = (height - y_shift) * 0.5;
top_near += (top_near / z_near) * z_shift;
float bottom_near = -(height + y_shift) * 0.5;
bottom_near += (bottom_near / z_near) * z_shift;
// printf("Left_near = %f, Left_far = %f, Top_near = %f, Bottom_near = %f, Z_shift = %f\n", left_near, left_far, top_near, bottom_near, z_shift);
// - generate our frustum
CameraMatrix combined_matrix;
combined_matrix.set_frustum(left_near, -left_near, bottom_near, top_near, z_near + z_shift, z_far + z_shift);
// and finally move our camera back
Transform apply_z_shift;
apply_z_shift.origin = Vector3(0.0, 0.0, z_shift); // z negative is forward so this moves it backwards
mono_transform *= apply_z_shift;
// now prepare our scene with our adjusted transform projection matrix
_prepare_scene(mono_transform, combined_matrix, false, camera->env, camera->visible_layers, p_scenario, p_shadow_atlas, RID(), camera->previous_room_id_hint);
} else if (p_eye == ARVRInterface::EYE_MONO) {
// For mono render, prepare as per usual
_prepare_scene(cam_transform, camera_matrix, false, camera->env, camera->visible_layers, p_scenario, p_shadow_atlas, RID(), camera->previous_room_id_hint);
}
// And render our scene...
_render_scene(cam_transform, camera_matrix, p_eye, false, camera->env, p_scenario, p_shadow_atlas, RID(), -1);
};
void VisualServerScene::_prepare_scene(const Transform p_cam_transform, const CameraMatrix &p_cam_projection, bool p_cam_orthogonal, RID p_force_environment, uint32_t p_visible_layers, RID p_scenario, RID p_shadow_atlas, RID p_reflection_probe, int32_t &r_previous_room_id_hint) {
// Note, in stereo rendering:
// - p_cam_transform will be a transform in the middle of our two eyes

View File

@ -41,7 +41,6 @@
#include "core/safe_refcount.h"
#include "core/self_list.h"
#include "portals/portal_renderer.h"
#include "servers/arvr/arvr_interface.h"
class VisualServerScene {
public:
@ -711,7 +710,6 @@ public:
void render_empty_scene(RID p_scenario, RID p_shadow_atlas);
void render_camera(RID p_camera, RID p_scenario, Size2 p_viewport_size, RID p_shadow_atlas);
void render_camera(Ref<ARVRInterface> &p_interface, ARVRInterface::Eyes p_eye, RID p_camera, RID p_scenario, Size2 p_viewport_size, RID p_shadow_atlas);
void update_dirty_instances();
// interpolation

View File

@ -64,20 +64,11 @@ static Transform2D _canvas_get_transform(VisualServerViewport::Viewport *p_viewp
return xf;
}
void VisualServerViewport::_draw_3d(Viewport *p_viewport, ARVRInterface::Eyes p_eye) {
Ref<ARVRInterface> arvr_interface;
if (ARVRServer::get_singleton() != nullptr) {
arvr_interface = ARVRServer::get_singleton()->get_primary_interface();
}
if (p_viewport->use_arvr && arvr_interface.is_valid()) {
VSG::scene->render_camera(arvr_interface, p_eye, p_viewport->camera, p_viewport->scenario, p_viewport->size, p_viewport->shadow_atlas);
} else {
VSG::scene->render_camera(p_viewport->camera, p_viewport->scenario, p_viewport->size, p_viewport->shadow_atlas);
}
void VisualServerViewport::_draw_3d(Viewport *p_viewport) {
VSG::scene->render_camera(p_viewport->camera, p_viewport->scenario, p_viewport->size, p_viewport->shadow_atlas);
}
void VisualServerViewport::_draw_viewport(Viewport *p_viewport, ARVRInterface::Eyes p_eye) {
void VisualServerViewport::_draw_viewport(Viewport *p_viewport) {
/* Camera should always be BEFORE any other 3D */
bool scenario_draw_canvas_bg = false; //draw canvas, or some layer of it, as BG for 3D instead of in front
@ -103,7 +94,7 @@ void VisualServerViewport::_draw_viewport(Viewport *p_viewport, ARVRInterface::E
}
if (!scenario_draw_canvas_bg && can_draw_3d) {
_draw_3d(p_viewport, p_eye);
_draw_3d(p_viewport);
}
if (!p_viewport->hide_canvas) {
@ -212,7 +203,7 @@ void VisualServerViewport::_draw_viewport(Viewport *p_viewport, ARVRInterface::E
if (!can_draw_3d) {
VSG::scene->render_empty_scene(p_viewport->scenario, p_viewport->shadow_atlas);
} else {
_draw_3d(p_viewport, p_eye);
_draw_3d(p_viewport);
}
scenario_draw_canvas_bg = false;
}
@ -241,7 +232,7 @@ void VisualServerViewport::_draw_viewport(Viewport *p_viewport, ARVRInterface::E
if (!can_draw_3d) {
VSG::scene->render_empty_scene(p_viewport->scenario, p_viewport->shadow_atlas);
} else {
_draw_3d(p_viewport, p_eye);
_draw_3d(p_viewport);
}
scenario_draw_canvas_bg = false;
@ -252,7 +243,7 @@ void VisualServerViewport::_draw_viewport(Viewport *p_viewport, ARVRInterface::E
if (!can_draw_3d) {
VSG::scene->render_empty_scene(p_viewport->scenario, p_viewport->shadow_atlas);
} else {
_draw_3d(p_viewport, p_eye);
_draw_3d(p_viewport);
}
}
@ -261,16 +252,6 @@ void VisualServerViewport::_draw_viewport(Viewport *p_viewport, ARVRInterface::E
}
void VisualServerViewport::draw_viewports() {
// get our arvr interface in case we need it
Ref<ARVRInterface> arvr_interface;
if (ARVRServer::get_singleton() != nullptr) {
arvr_interface = ARVRServer::get_singleton()->get_primary_interface();
// process all our active interfaces
ARVRServer::get_singleton()->_process();
}
if (Engine::get_singleton()->is_editor_hint()) {
clear_color = GLOBAL_GET("rendering/environment/default_clear_color");
}
@ -288,17 +269,6 @@ void VisualServerViewport::draw_viewports() {
ERR_CONTINUE(!vp->render_target.is_valid());
if (vp->use_arvr) {
// In ARVR mode it is our interface that controls our size
if (arvr_interface.is_valid()) {
// override our size, make sure it matches our required size
vp->size = arvr_interface->get_render_targetsize();
} else {
// reset this, we can't render the output without a valid interface (this will likely be so when we're in the editor)
vp->size = Vector2(0, 0);
}
}
bool visible = vp->viewport_to_screen_rect != Rect2() || vp->update_mode == VS::VIEWPORT_UPDATE_ALWAYS || vp->update_mode == VS::VIEWPORT_UPDATE_ONCE || (vp->update_mode == VS::VIEWPORT_UPDATE_WHEN_VISIBLE && VSG::storage->render_target_was_used(vp->render_target));
visible = visible && vp->size.x > 1 && vp->size.y > 1;
@ -308,61 +278,29 @@ void VisualServerViewport::draw_viewports() {
VSG::storage->render_target_clear_used(vp->render_target);
if (vp->use_arvr && arvr_interface.is_valid()) {
VSG::storage->render_target_set_size(vp->render_target, vp->size.x, vp->size.y);
VSG::storage->render_target_set_external_texture(vp->render_target, 0, 0);
VSG::rasterizer->set_current_render_target(vp->render_target);
// render mono or left eye first
ARVRInterface::Eyes leftOrMono = arvr_interface->is_stereo() ? ARVRInterface::EYE_LEFT : ARVRInterface::EYE_MONO;
VSG::scene_render->set_debug_draw_mode(vp->debug_draw);
VSG::storage->render_info_begin_capture();
// check for an external texture destination for our left eye/mono
VSG::storage->render_target_set_external_texture(vp->render_target, arvr_interface->get_external_texture_for_eye(leftOrMono), arvr_interface->get_external_depth_for_eye(leftOrMono));
// render standard mono camera
_draw_viewport(vp);
// set our render target as current
VSG::rasterizer->set_current_render_target(vp->render_target);
VSG::storage->render_info_end_capture();
vp->render_info[VS::VIEWPORT_RENDER_INFO_OBJECTS_IN_FRAME] = VSG::storage->get_captured_render_info(VS::INFO_OBJECTS_IN_FRAME);
vp->render_info[VS::VIEWPORT_RENDER_INFO_VERTICES_IN_FRAME] = VSG::storage->get_captured_render_info(VS::INFO_VERTICES_IN_FRAME);
vp->render_info[VS::VIEWPORT_RENDER_INFO_MATERIAL_CHANGES_IN_FRAME] = VSG::storage->get_captured_render_info(VS::INFO_MATERIAL_CHANGES_IN_FRAME);
vp->render_info[VS::VIEWPORT_RENDER_INFO_SHADER_CHANGES_IN_FRAME] = VSG::storage->get_captured_render_info(VS::INFO_SHADER_CHANGES_IN_FRAME);
vp->render_info[VS::VIEWPORT_RENDER_INFO_SURFACE_CHANGES_IN_FRAME] = VSG::storage->get_captured_render_info(VS::INFO_SURFACE_CHANGES_IN_FRAME);
vp->render_info[VS::VIEWPORT_RENDER_INFO_DRAW_CALLS_IN_FRAME] = VSG::storage->get_captured_render_info(VS::INFO_DRAW_CALLS_IN_FRAME);
vp->render_info[VS::VIEWPORT_RENDER_INFO_2D_ITEMS_IN_FRAME] = VSG::storage->get_captured_render_info(VS::INFO_2D_ITEMS_IN_FRAME);
vp->render_info[VS::VIEWPORT_RENDER_INFO_2D_DRAW_CALLS_IN_FRAME] = VSG::storage->get_captured_render_info(VS::INFO_2D_DRAW_CALLS_IN_FRAME);
// and draw left eye/mono
_draw_viewport(vp, leftOrMono);
arvr_interface->commit_for_eye(leftOrMono, vp->render_target, vp->viewport_to_screen_rect);
// render right eye
if (leftOrMono == ARVRInterface::EYE_LEFT) {
// check for an external texture destination for our right eye
VSG::storage->render_target_set_external_texture(vp->render_target, arvr_interface->get_external_texture_for_eye(ARVRInterface::EYE_RIGHT), arvr_interface->get_external_depth_for_eye(ARVRInterface::EYE_RIGHT));
// commit for eye may have changed the render target
VSG::rasterizer->set_current_render_target(vp->render_target);
_draw_viewport(vp, ARVRInterface::EYE_RIGHT);
arvr_interface->commit_for_eye(ARVRInterface::EYE_RIGHT, vp->render_target, vp->viewport_to_screen_rect);
}
// and for our frame timing, mark when we've finished committing our eyes
ARVRServer::get_singleton()->_mark_commit();
} else {
VSG::storage->render_target_set_external_texture(vp->render_target, 0, 0);
VSG::rasterizer->set_current_render_target(vp->render_target);
VSG::scene_render->set_debug_draw_mode(vp->debug_draw);
VSG::storage->render_info_begin_capture();
// render standard mono camera
_draw_viewport(vp);
VSG::storage->render_info_end_capture();
vp->render_info[VS::VIEWPORT_RENDER_INFO_OBJECTS_IN_FRAME] = VSG::storage->get_captured_render_info(VS::INFO_OBJECTS_IN_FRAME);
vp->render_info[VS::VIEWPORT_RENDER_INFO_VERTICES_IN_FRAME] = VSG::storage->get_captured_render_info(VS::INFO_VERTICES_IN_FRAME);
vp->render_info[VS::VIEWPORT_RENDER_INFO_MATERIAL_CHANGES_IN_FRAME] = VSG::storage->get_captured_render_info(VS::INFO_MATERIAL_CHANGES_IN_FRAME);
vp->render_info[VS::VIEWPORT_RENDER_INFO_SHADER_CHANGES_IN_FRAME] = VSG::storage->get_captured_render_info(VS::INFO_SHADER_CHANGES_IN_FRAME);
vp->render_info[VS::VIEWPORT_RENDER_INFO_SURFACE_CHANGES_IN_FRAME] = VSG::storage->get_captured_render_info(VS::INFO_SURFACE_CHANGES_IN_FRAME);
vp->render_info[VS::VIEWPORT_RENDER_INFO_DRAW_CALLS_IN_FRAME] = VSG::storage->get_captured_render_info(VS::INFO_DRAW_CALLS_IN_FRAME);
vp->render_info[VS::VIEWPORT_RENDER_INFO_2D_ITEMS_IN_FRAME] = VSG::storage->get_captured_render_info(VS::INFO_2D_ITEMS_IN_FRAME);
vp->render_info[VS::VIEWPORT_RENDER_INFO_2D_DRAW_CALLS_IN_FRAME] = VSG::storage->get_captured_render_info(VS::INFO_2D_DRAW_CALLS_IN_FRAME);
if (vp->viewport_to_screen_rect != Rect2() && (!vp->viewport_render_direct_to_screen || !VSG::rasterizer->is_low_end())) {
//copy to screen if set as such
VSG::rasterizer->set_current_render_target(RID());
VSG::rasterizer->blit_render_target_to_screen(vp->render_target, vp->viewport_to_screen_rect, vp->viewport_to_screen);
}
if (vp->viewport_to_screen_rect != Rect2() && (!vp->viewport_render_direct_to_screen || !VSG::rasterizer->is_low_end())) {
//copy to screen if set as such
VSG::rasterizer->set_current_render_target(RID());
VSG::rasterizer->blit_render_target_to_screen(vp->render_target, vp->viewport_to_screen_rect, vp->viewport_to_screen);
}
if (vp->update_mode == VS::VIEWPORT_UPDATE_ONCE) {
@ -387,21 +325,6 @@ RID VisualServerViewport::viewport_create() {
return rid;
}
void VisualServerViewport::viewport_set_use_arvr(RID p_viewport, bool p_use_arvr) {
Viewport *viewport = viewport_owner.getornull(p_viewport);
ERR_FAIL_COND(!viewport);
if (viewport->use_arvr == p_use_arvr) {
return;
}
viewport->use_arvr = p_use_arvr;
if (!viewport->use_arvr && viewport->size.width > 0 && viewport->size.height > 0) {
// No longer controlled by our XR server, make sure we reset it
VSG::storage->render_target_set_size(viewport->render_target, viewport->size.width, viewport->size.height);
}
}
void VisualServerViewport::viewport_set_size(RID p_viewport, int p_width, int p_height) {
ERR_FAIL_COND(p_width < 0 && p_height < 0);
@ -409,10 +332,8 @@ void VisualServerViewport::viewport_set_size(RID p_viewport, int p_width, int p_
ERR_FAIL_COND(!viewport);
viewport->size = Size2(p_width, p_height);
if (!viewport->use_arvr) {
// Only update if this is not controlled by our XR server
VSG::storage->render_target_set_size(viewport->render_target, p_width, p_height);
}
VSG::storage->render_target_set_size(viewport->render_target, p_width, p_height);
}
void VisualServerViewport::viewport_set_active(RID p_viewport, bool p_active) {

View File

@ -33,7 +33,6 @@
#include "core/self_list.h"
#include "rasterizer.h"
#include "servers/arvr/arvr_interface.h"
#include "servers/visual_server.h"
class VisualServerViewport {
@ -45,8 +44,6 @@ public:
RID self;
RID parent;
bool use_arvr; /* use arvr interface to override camera positioning and projection matrices and control output */
Size2i size;
RID camera;
RID scenario;
@ -121,7 +118,6 @@ public:
for (int i = 0; i < VS::VIEWPORT_RENDER_INFO_MAX; i++) {
render_info[i] = 0;
}
use_arvr = false;
}
};
@ -143,14 +139,12 @@ public:
private:
Color clear_color;
void _draw_3d(Viewport *p_viewport, ARVRInterface::Eyes p_eye);
void _draw_viewport(Viewport *p_viewport, ARVRInterface::Eyes p_eye = ARVRInterface::EYE_MONO);
void _draw_3d(Viewport *p_viewport);
void _draw_viewport(Viewport *p_viewport);
public:
RID viewport_create();
void viewport_set_use_arvr(RID p_viewport, bool p_use_arvr);
void viewport_set_size(RID p_viewport, int p_width, int p_height);
void viewport_attach_to_screen(RID p_viewport, const Rect2 &p_rect = Rect2(), int p_screen = 0);

View File

@ -302,8 +302,6 @@ public:
FUNCRID(viewport)
FUNC2(viewport_set_use_arvr, RID, bool)
FUNC3(viewport_set_size, RID, int, int)
FUNC2(viewport_set_active, RID, bool)

View File

@ -2000,7 +2000,6 @@ void VisualServer::_bind_methods() {
ClassDB::bind_method(D_METHOD("camera_set_use_vertical_aspect", "camera", "enable"), &VisualServer::camera_set_use_vertical_aspect);
ClassDB::bind_method(D_METHOD("viewport_create"), &VisualServer::viewport_create);
ClassDB::bind_method(D_METHOD("viewport_set_use_arvr", "viewport", "use_arvr"), &VisualServer::viewport_set_use_arvr);
ClassDB::bind_method(D_METHOD("viewport_set_size", "viewport", "width", "height"), &VisualServer::viewport_set_size);
ClassDB::bind_method(D_METHOD("viewport_set_active", "viewport", "active"), &VisualServer::viewport_set_active);
ClassDB::bind_method(D_METHOD("viewport_set_parent_viewport", "viewport", "parent_viewport"), &VisualServer::viewport_set_parent_viewport);

View File

@ -543,7 +543,6 @@ public:
virtual RID viewport_create() = 0;
virtual void viewport_set_use_arvr(RID p_viewport, bool p_use_arvr) = 0;
virtual void viewport_set_size(RID p_viewport, int p_width, int p_height) = 0;
virtual void viewport_set_active(RID p_viewport, bool p_active) = 0;
virtual void viewport_set_parent_viewport(RID p_viewport, RID p_parent_viewport) = 0;