2020-04-04 02:17:49 +02:00
|
|
|
/*
|
|
|
|
|
* This program is free software; you can redistribute it and/or
|
|
|
|
|
* modify it under the terms of the GNU General Public License
|
|
|
|
|
* as published by the Free Software Foundation; either version 2
|
|
|
|
|
* of the License, or (at your option) any later version.
|
|
|
|
|
*
|
|
|
|
|
* This program is distributed in the hope that it will be useful,
|
|
|
|
|
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
|
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
|
* GNU General Public License for more details.
|
|
|
|
|
*
|
|
|
|
|
* You should have received a copy of the GNU General Public License
|
|
|
|
|
* along with this program; if not, write to the Free Software Foundation,
|
|
|
|
|
* Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
|
|
|
|
*/
|
|
|
|
|
|
|
|
|
|
/** \file
|
|
|
|
|
* \ingroup wm
|
|
|
|
|
*
|
|
|
|
|
* \name Window-Manager XR Drawing
|
|
|
|
|
*
|
|
|
|
|
* Implements Blender specific drawing functionality for use with the Ghost-XR API.
|
|
|
|
|
*/
|
|
|
|
|
|
|
|
|
|
#include <string.h>
|
|
|
|
|
|
2021-10-12 16:18:05 +09:00
|
|
|
#include "BKE_context.h"
|
|
|
|
|
|
2021-07-24 00:11:22 +09:00
|
|
|
#include "BLI_listbase.h"
|
2020-04-04 02:17:49 +02:00
|
|
|
#include "BLI_math.h"
|
|
|
|
|
|
|
|
|
|
#include "ED_view3d_offscreen.h"
|
|
|
|
|
|
|
|
|
|
#include "GHOST_C-api.h"
|
2021-10-12 16:18:05 +09:00
|
|
|
#include "GPU_batch_presets.h"
|
|
|
|
|
#include "GPU_immediate.h"
|
|
|
|
|
#include "GPU_matrix.h"
|
2020-04-04 02:17:49 +02:00
|
|
|
|
|
|
|
|
#include "GPU_viewport.h"
|
|
|
|
|
|
|
|
|
|
#include "WM_api.h"
|
|
|
|
|
|
|
|
|
|
#include "wm_surface.h"
|
|
|
|
|
#include "wm_xr_intern.h"
|
|
|
|
|
|
2021-08-05 21:11:01 +09:00
|
|
|
void wm_xr_pose_to_mat(const GHOST_XrPose *pose, float r_mat[4][4])
|
2020-04-04 02:17:49 +02:00
|
|
|
{
|
2021-08-05 21:11:01 +09:00
|
|
|
quat_to_mat4(r_mat, pose->orientation_quat);
|
|
|
|
|
copy_v3_v3(r_mat[3], pose->position);
|
2020-04-04 02:17:49 +02:00
|
|
|
}
|
|
|
|
|
|
XR Controller Support Step 5: Navigation
Adds navigation transforms (pose, scale) to the XR session state that
will be applied to the viewer/controller poses. By manipulating these
values, a viewer can move through the VR viewport without the need to
physically walk through it.
Add-ons can access these transforms via Python
(XrSessionState.navigation_location/rotation/scale) to use with custom
operators.
Also adds 3 new VR navigation operators that will be exposed to users
as default actions in the VR Scene Inspection add-on. While all three
of these operators have custom properties that can greatly influence
their behaviors, for now these properties will not be accessible by
users from the UI. However, other add-ons can still set these custom
properties if they desire.
1). Raycast-based teleport
Moves the user to a location pointed at on a mesh object. The result
can optionally be constrained to specific axes, for example to achieve
"elevation snapping" behavior by constraining to the Z-axis. In
addition, one can specify an interpolation factor and offset.
Credit to KISKA for the elevation snapping concept.
2). "Grab" navigation
Moves the user through the viewport by pressing inputs on one or two
held controllers and applying deltas to the navigation matrix based on
the displacement of these controllers. When inputs on both controllers
are pressed at the same time (bimanual interaction), the user can scale
themselves relative to the scene based on the distance between the
controllers.
Also supports locks for location, rotation, and scale.
3). Fly navigation
Navigates the viewport by pressing a button and moving/turning relative to
navigation space or the VR viewer or controller. Via the operator's
properties, one can select from a variety of these modes as well as
specify the min/max speed and whether to lock elevation.
Reviewed By: Severin
Differential Revision: https://developer.blender.org/D11501
2021-10-26 13:33:02 +09:00
|
|
|
void wm_xr_pose_scale_to_mat(const GHOST_XrPose *pose, float scale, float r_mat[4][4])
|
|
|
|
|
{
|
|
|
|
|
wm_xr_pose_to_mat(pose, r_mat);
|
|
|
|
|
|
|
|
|
|
BLI_assert(scale > 0.0f);
|
|
|
|
|
mul_v3_fl(r_mat[0], scale);
|
|
|
|
|
mul_v3_fl(r_mat[1], scale);
|
|
|
|
|
mul_v3_fl(r_mat[2], scale);
|
|
|
|
|
}
|
|
|
|
|
|
2021-08-05 21:11:01 +09:00
|
|
|
void wm_xr_pose_to_imat(const GHOST_XrPose *pose, float r_imat[4][4])
|
2021-05-16 03:33:10 +09:00
|
|
|
{
|
2021-08-05 21:11:01 +09:00
|
|
|
float iquat[4];
|
|
|
|
|
invert_qt_qt_normalized(iquat, pose->orientation_quat);
|
|
|
|
|
quat_to_mat4(r_imat, iquat);
|
|
|
|
|
translate_m4(r_imat, -pose->position[0], -pose->position[1], -pose->position[2]);
|
2021-05-16 03:33:10 +09:00
|
|
|
}
|
|
|
|
|
|
XR Controller Support Step 5: Navigation
Adds navigation transforms (pose, scale) to the XR session state that
will be applied to the viewer/controller poses. By manipulating these
values, a viewer can move through the VR viewport without the need to
physically walk through it.
Add-ons can access these transforms via Python
(XrSessionState.navigation_location/rotation/scale) to use with custom
operators.
Also adds 3 new VR navigation operators that will be exposed to users
as default actions in the VR Scene Inspection add-on. While all three
of these operators have custom properties that can greatly influence
their behaviors, for now these properties will not be accessible by
users from the UI. However, other add-ons can still set these custom
properties if they desire.
1). Raycast-based teleport
Moves the user to a location pointed at on a mesh object. The result
can optionally be constrained to specific axes, for example to achieve
"elevation snapping" behavior by constraining to the Z-axis. In
addition, one can specify an interpolation factor and offset.
Credit to KISKA for the elevation snapping concept.
2). "Grab" navigation
Moves the user through the viewport by pressing inputs on one or two
held controllers and applying deltas to the navigation matrix based on
the displacement of these controllers. When inputs on both controllers
are pressed at the same time (bimanual interaction), the user can scale
themselves relative to the scene based on the distance between the
controllers.
Also supports locks for location, rotation, and scale.
3). Fly navigation
Navigates the viewport by pressing a button and moving/turning relative to
navigation space or the VR viewer or controller. Via the operator's
properties, one can select from a variety of these modes as well as
specify the min/max speed and whether to lock elevation.
Reviewed By: Severin
Differential Revision: https://developer.blender.org/D11501
2021-10-26 13:33:02 +09:00
|
|
|
void wm_xr_pose_scale_to_imat(const GHOST_XrPose *pose, float scale, float r_imat[4][4])
|
|
|
|
|
{
|
|
|
|
|
float iquat[4];
|
|
|
|
|
invert_qt_qt_normalized(iquat, pose->orientation_quat);
|
|
|
|
|
quat_to_mat4(r_imat, iquat);
|
|
|
|
|
|
|
|
|
|
BLI_assert(scale > 0.0f);
|
|
|
|
|
scale = 1.0f / scale;
|
|
|
|
|
mul_v3_fl(r_imat[0], scale);
|
|
|
|
|
mul_v3_fl(r_imat[1], scale);
|
|
|
|
|
mul_v3_fl(r_imat[2], scale);
|
|
|
|
|
|
|
|
|
|
translate_m4(r_imat, -pose->position[0], -pose->position[1], -pose->position[2]);
|
|
|
|
|
}
|
|
|
|
|
|
2020-04-04 02:17:49 +02:00
|
|
|
static void wm_xr_draw_matrices_create(const wmXrDrawData *draw_data,
|
|
|
|
|
const GHOST_XrDrawViewInfo *draw_view,
|
|
|
|
|
const XrSessionSettings *session_settings,
|
XR Controller Support Step 5: Navigation
Adds navigation transforms (pose, scale) to the XR session state that
will be applied to the viewer/controller poses. By manipulating these
values, a viewer can move through the VR viewport without the need to
physically walk through it.
Add-ons can access these transforms via Python
(XrSessionState.navigation_location/rotation/scale) to use with custom
operators.
Also adds 3 new VR navigation operators that will be exposed to users
as default actions in the VR Scene Inspection add-on. While all three
of these operators have custom properties that can greatly influence
their behaviors, for now these properties will not be accessible by
users from the UI. However, other add-ons can still set these custom
properties if they desire.
1). Raycast-based teleport
Moves the user to a location pointed at on a mesh object. The result
can optionally be constrained to specific axes, for example to achieve
"elevation snapping" behavior by constraining to the Z-axis. In
addition, one can specify an interpolation factor and offset.
Credit to KISKA for the elevation snapping concept.
2). "Grab" navigation
Moves the user through the viewport by pressing inputs on one or two
held controllers and applying deltas to the navigation matrix based on
the displacement of these controllers. When inputs on both controllers
are pressed at the same time (bimanual interaction), the user can scale
themselves relative to the scene based on the distance between the
controllers.
Also supports locks for location, rotation, and scale.
3). Fly navigation
Navigates the viewport by pressing a button and moving/turning relative to
navigation space or the VR viewer or controller. Via the operator's
properties, one can select from a variety of these modes as well as
specify the min/max speed and whether to lock elevation.
Reviewed By: Severin
Differential Revision: https://developer.blender.org/D11501
2021-10-26 13:33:02 +09:00
|
|
|
const wmXrSessionState *session_state,
|
|
|
|
|
float r_viewmat[4][4],
|
|
|
|
|
float r_projmat[4][4])
|
2020-04-04 02:17:49 +02:00
|
|
|
{
|
|
|
|
|
GHOST_XrPose eye_pose;
|
XR Controller Support Step 5: Navigation
Adds navigation transforms (pose, scale) to the XR session state that
will be applied to the viewer/controller poses. By manipulating these
values, a viewer can move through the VR viewport without the need to
physically walk through it.
Add-ons can access these transforms via Python
(XrSessionState.navigation_location/rotation/scale) to use with custom
operators.
Also adds 3 new VR navigation operators that will be exposed to users
as default actions in the VR Scene Inspection add-on. While all three
of these operators have custom properties that can greatly influence
their behaviors, for now these properties will not be accessible by
users from the UI. However, other add-ons can still set these custom
properties if they desire.
1). Raycast-based teleport
Moves the user to a location pointed at on a mesh object. The result
can optionally be constrained to specific axes, for example to achieve
"elevation snapping" behavior by constraining to the Z-axis. In
addition, one can specify an interpolation factor and offset.
Credit to KISKA for the elevation snapping concept.
2). "Grab" navigation
Moves the user through the viewport by pressing inputs on one or two
held controllers and applying deltas to the navigation matrix based on
the displacement of these controllers. When inputs on both controllers
are pressed at the same time (bimanual interaction), the user can scale
themselves relative to the scene based on the distance between the
controllers.
Also supports locks for location, rotation, and scale.
3). Fly navigation
Navigates the viewport by pressing a button and moving/turning relative to
navigation space or the VR viewer or controller. Via the operator's
properties, one can select from a variety of these modes as well as
specify the min/max speed and whether to lock elevation.
Reviewed By: Severin
Differential Revision: https://developer.blender.org/D11501
2021-10-26 13:33:02 +09:00
|
|
|
float eye_inv[4][4], base_inv[4][4], nav_inv[4][4], m[4][4];
|
2020-04-04 02:17:49 +02:00
|
|
|
|
XR Controller Support Step 5: Navigation
Adds navigation transforms (pose, scale) to the XR session state that
will be applied to the viewer/controller poses. By manipulating these
values, a viewer can move through the VR viewport without the need to
physically walk through it.
Add-ons can access these transforms via Python
(XrSessionState.navigation_location/rotation/scale) to use with custom
operators.
Also adds 3 new VR navigation operators that will be exposed to users
as default actions in the VR Scene Inspection add-on. While all three
of these operators have custom properties that can greatly influence
their behaviors, for now these properties will not be accessible by
users from the UI. However, other add-ons can still set these custom
properties if they desire.
1). Raycast-based teleport
Moves the user to a location pointed at on a mesh object. The result
can optionally be constrained to specific axes, for example to achieve
"elevation snapping" behavior by constraining to the Z-axis. In
addition, one can specify an interpolation factor and offset.
Credit to KISKA for the elevation snapping concept.
2). "Grab" navigation
Moves the user through the viewport by pressing inputs on one or two
held controllers and applying deltas to the navigation matrix based on
the displacement of these controllers. When inputs on both controllers
are pressed at the same time (bimanual interaction), the user can scale
themselves relative to the scene based on the distance between the
controllers.
Also supports locks for location, rotation, and scale.
3). Fly navigation
Navigates the viewport by pressing a button and moving/turning relative to
navigation space or the VR viewer or controller. Via the operator's
properties, one can select from a variety of these modes as well as
specify the min/max speed and whether to lock elevation.
Reviewed By: Severin
Differential Revision: https://developer.blender.org/D11501
2021-10-26 13:33:02 +09:00
|
|
|
/* Calculate inverse eye matrix. */
|
2020-04-04 02:17:49 +02:00
|
|
|
copy_qt_qt(eye_pose.orientation_quat, draw_view->eye_pose.orientation_quat);
|
|
|
|
|
copy_v3_v3(eye_pose.position, draw_view->eye_pose.position);
|
|
|
|
|
if ((session_settings->flag & XR_SESSION_USE_POSITION_TRACKING) == 0) {
|
|
|
|
|
sub_v3_v3(eye_pose.position, draw_view->local_pose.position);
|
|
|
|
|
}
|
2021-07-23 14:41:31 +09:00
|
|
|
if ((session_settings->flag & XR_SESSION_USE_ABSOLUTE_TRACKING) == 0) {
|
|
|
|
|
sub_v3_v3(eye_pose.position, draw_data->eye_position_ofs);
|
|
|
|
|
}
|
2020-04-04 02:17:49 +02:00
|
|
|
|
2021-08-05 21:11:01 +09:00
|
|
|
wm_xr_pose_to_imat(&eye_pose, eye_inv);
|
|
|
|
|
|
XR Controller Support Step 5: Navigation
Adds navigation transforms (pose, scale) to the XR session state that
will be applied to the viewer/controller poses. By manipulating these
values, a viewer can move through the VR viewport without the need to
physically walk through it.
Add-ons can access these transforms via Python
(XrSessionState.navigation_location/rotation/scale) to use with custom
operators.
Also adds 3 new VR navigation operators that will be exposed to users
as default actions in the VR Scene Inspection add-on. While all three
of these operators have custom properties that can greatly influence
their behaviors, for now these properties will not be accessible by
users from the UI. However, other add-ons can still set these custom
properties if they desire.
1). Raycast-based teleport
Moves the user to a location pointed at on a mesh object. The result
can optionally be constrained to specific axes, for example to achieve
"elevation snapping" behavior by constraining to the Z-axis. In
addition, one can specify an interpolation factor and offset.
Credit to KISKA for the elevation snapping concept.
2). "Grab" navigation
Moves the user through the viewport by pressing inputs on one or two
held controllers and applying deltas to the navigation matrix based on
the displacement of these controllers. When inputs on both controllers
are pressed at the same time (bimanual interaction), the user can scale
themselves relative to the scene based on the distance between the
controllers.
Also supports locks for location, rotation, and scale.
3). Fly navigation
Navigates the viewport by pressing a button and moving/turning relative to
navigation space or the VR viewer or controller. Via the operator's
properties, one can select from a variety of these modes as well as
specify the min/max speed and whether to lock elevation.
Reviewed By: Severin
Differential Revision: https://developer.blender.org/D11501
2021-10-26 13:33:02 +09:00
|
|
|
/* Apply base pose and navigation. */
|
|
|
|
|
wm_xr_pose_scale_to_imat(&draw_data->base_pose, draw_data->base_scale, base_inv);
|
|
|
|
|
wm_xr_pose_scale_to_imat(&session_state->nav_pose_prev, session_state->nav_scale_prev, nav_inv);
|
|
|
|
|
mul_m4_m4m4(m, eye_inv, base_inv);
|
|
|
|
|
mul_m4_m4m4(r_viewmat, m, nav_inv);
|
2021-08-05 21:11:01 +09:00
|
|
|
|
XR Controller Support Step 5: Navigation
Adds navigation transforms (pose, scale) to the XR session state that
will be applied to the viewer/controller poses. By manipulating these
values, a viewer can move through the VR viewport without the need to
physically walk through it.
Add-ons can access these transforms via Python
(XrSessionState.navigation_location/rotation/scale) to use with custom
operators.
Also adds 3 new VR navigation operators that will be exposed to users
as default actions in the VR Scene Inspection add-on. While all three
of these operators have custom properties that can greatly influence
their behaviors, for now these properties will not be accessible by
users from the UI. However, other add-ons can still set these custom
properties if they desire.
1). Raycast-based teleport
Moves the user to a location pointed at on a mesh object. The result
can optionally be constrained to specific axes, for example to achieve
"elevation snapping" behavior by constraining to the Z-axis. In
addition, one can specify an interpolation factor and offset.
Credit to KISKA for the elevation snapping concept.
2). "Grab" navigation
Moves the user through the viewport by pressing inputs on one or two
held controllers and applying deltas to the navigation matrix based on
the displacement of these controllers. When inputs on both controllers
are pressed at the same time (bimanual interaction), the user can scale
themselves relative to the scene based on the distance between the
controllers.
Also supports locks for location, rotation, and scale.
3). Fly navigation
Navigates the viewport by pressing a button and moving/turning relative to
navigation space or the VR viewer or controller. Via the operator's
properties, one can select from a variety of these modes as well as
specify the min/max speed and whether to lock elevation.
Reviewed By: Severin
Differential Revision: https://developer.blender.org/D11501
2021-10-26 13:33:02 +09:00
|
|
|
perspective_m4_fov(r_projmat,
|
2020-04-04 02:17:49 +02:00
|
|
|
draw_view->fov.angle_left,
|
|
|
|
|
draw_view->fov.angle_right,
|
|
|
|
|
draw_view->fov.angle_up,
|
|
|
|
|
draw_view->fov.angle_down,
|
|
|
|
|
session_settings->clip_start,
|
|
|
|
|
session_settings->clip_end);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
static void wm_xr_draw_viewport_buffers_to_active_framebuffer(
|
|
|
|
|
const wmXrRuntimeData *runtime_data,
|
|
|
|
|
const wmXrSurfaceData *surface_data,
|
|
|
|
|
const GHOST_XrDrawViewInfo *draw_view)
|
|
|
|
|
{
|
2021-07-24 00:11:22 +09:00
|
|
|
const wmXrViewportPair *vp = BLI_findlink(&surface_data->viewports, draw_view->view_idx);
|
|
|
|
|
BLI_assert(vp && vp->viewport);
|
|
|
|
|
|
2020-04-04 02:17:49 +02:00
|
|
|
const bool is_upside_down = GHOST_XrSessionNeedsUpsideDownDrawing(runtime_data->context);
|
|
|
|
|
rcti rect = {.xmin = 0, .ymin = 0, .xmax = draw_view->width - 1, .ymax = draw_view->height - 1};
|
|
|
|
|
|
|
|
|
|
wmViewport(&rect);
|
|
|
|
|
|
|
|
|
|
/* For upside down contexts, draw with inverted y-values. */
|
|
|
|
|
if (is_upside_down) {
|
|
|
|
|
SWAP(int, rect.ymin, rect.ymax);
|
|
|
|
|
}
|
2021-07-24 00:11:22 +09:00
|
|
|
GPU_viewport_draw_to_screen_ex(vp->viewport, 0, &rect, draw_view->expects_srgb_buffer, true);
|
2020-04-04 02:17:49 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* \brief Draw a viewport for a single eye.
|
|
|
|
|
*
|
|
|
|
|
* This is the main viewport drawing function for VR sessions. It's assigned to Ghost-XR as a
|
|
|
|
|
* callback (see GHOST_XrDrawViewFunc()) and executed for each view (read: eye).
|
|
|
|
|
*/
|
|
|
|
|
void wm_xr_draw_view(const GHOST_XrDrawViewInfo *draw_view, void *customdata)
|
|
|
|
|
{
|
|
|
|
|
wmXrDrawData *draw_data = customdata;
|
|
|
|
|
wmXrData *xr_data = draw_data->xr_data;
|
|
|
|
|
wmXrSurfaceData *surface_data = draw_data->surface_data;
|
|
|
|
|
wmXrSessionState *session_state = &xr_data->runtime->session_state;
|
|
|
|
|
XrSessionSettings *settings = &xr_data->session_settings;
|
|
|
|
|
|
|
|
|
|
const int display_flags = V3D_OFSDRAW_OVERRIDE_SCENE_SETTINGS | settings->draw_flags;
|
|
|
|
|
|
|
|
|
|
float viewmat[4][4], winmat[4][4];
|
|
|
|
|
|
|
|
|
|
BLI_assert(WM_xr_session_is_ready(xr_data));
|
|
|
|
|
|
|
|
|
|
wm_xr_session_draw_data_update(session_state, settings, draw_view, draw_data);
|
XR Controller Support Step 5: Navigation
Adds navigation transforms (pose, scale) to the XR session state that
will be applied to the viewer/controller poses. By manipulating these
values, a viewer can move through the VR viewport without the need to
physically walk through it.
Add-ons can access these transforms via Python
(XrSessionState.navigation_location/rotation/scale) to use with custom
operators.
Also adds 3 new VR navigation operators that will be exposed to users
as default actions in the VR Scene Inspection add-on. While all three
of these operators have custom properties that can greatly influence
their behaviors, for now these properties will not be accessible by
users from the UI. However, other add-ons can still set these custom
properties if they desire.
1). Raycast-based teleport
Moves the user to a location pointed at on a mesh object. The result
can optionally be constrained to specific axes, for example to achieve
"elevation snapping" behavior by constraining to the Z-axis. In
addition, one can specify an interpolation factor and offset.
Credit to KISKA for the elevation snapping concept.
2). "Grab" navigation
Moves the user through the viewport by pressing inputs on one or two
held controllers and applying deltas to the navigation matrix based on
the displacement of these controllers. When inputs on both controllers
are pressed at the same time (bimanual interaction), the user can scale
themselves relative to the scene based on the distance between the
controllers.
Also supports locks for location, rotation, and scale.
3). Fly navigation
Navigates the viewport by pressing a button and moving/turning relative to
navigation space or the VR viewer or controller. Via the operator's
properties, one can select from a variety of these modes as well as
specify the min/max speed and whether to lock elevation.
Reviewed By: Severin
Differential Revision: https://developer.blender.org/D11501
2021-10-26 13:33:02 +09:00
|
|
|
wm_xr_draw_matrices_create(draw_data, draw_view, settings, session_state, viewmat, winmat);
|
2021-10-26 15:05:25 +09:00
|
|
|
wm_xr_session_state_update(settings, draw_data, draw_view, session_state);
|
2020-04-04 02:17:49 +02:00
|
|
|
|
|
|
|
|
if (!wm_xr_session_surface_offscreen_ensure(surface_data, draw_view)) {
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
|
2021-07-24 00:11:22 +09:00
|
|
|
const wmXrViewportPair *vp = BLI_findlink(&surface_data->viewports, draw_view->view_idx);
|
|
|
|
|
BLI_assert(vp && vp->offscreen && vp->viewport);
|
|
|
|
|
|
2020-04-04 02:17:49 +02:00
|
|
|
/* In case a framebuffer is still bound from drawing the last eye. */
|
|
|
|
|
GPU_framebuffer_restore();
|
|
|
|
|
/* Some systems have drawing glitches without this. */
|
2020-08-23 11:11:27 +02:00
|
|
|
GPU_clear_depth(1.0f);
|
2020-04-04 02:17:49 +02:00
|
|
|
|
2021-02-05 16:23:34 +11:00
|
|
|
/* Draws the view into the surface_data->viewport's frame-buffers. */
|
2020-04-04 02:17:49 +02:00
|
|
|
ED_view3d_draw_offscreen_simple(draw_data->depsgraph,
|
|
|
|
|
draw_data->scene,
|
|
|
|
|
&settings->shading,
|
2021-10-12 16:36:56 +09:00
|
|
|
(eDrawType)settings->shading.type,
|
2020-04-04 02:17:49 +02:00
|
|
|
draw_view->width,
|
|
|
|
|
draw_view->height,
|
|
|
|
|
display_flags,
|
|
|
|
|
viewmat,
|
|
|
|
|
winmat,
|
|
|
|
|
settings->clip_start,
|
|
|
|
|
settings->clip_end,
|
2021-10-12 16:18:05 +09:00
|
|
|
true,
|
2020-04-04 02:17:49 +02:00
|
|
|
false,
|
|
|
|
|
true,
|
|
|
|
|
NULL,
|
|
|
|
|
false,
|
2021-07-24 00:11:22 +09:00
|
|
|
vp->offscreen,
|
|
|
|
|
vp->viewport);
|
2020-04-04 02:17:49 +02:00
|
|
|
|
|
|
|
|
/* The draw-manager uses both GPUOffscreen and GPUViewport to manage frame and texture buffers. A
|
|
|
|
|
* call to GPU_viewport_draw_to_screen() is still needed to get the final result from the
|
|
|
|
|
* viewport buffers composited together and potentially color managed for display on screen.
|
|
|
|
|
* It needs a bound frame-buffer to draw into, for which we simply reuse the GPUOffscreen one.
|
|
|
|
|
*
|
|
|
|
|
* In a next step, Ghost-XR will use the currently bound frame-buffer to retrieve the image
|
|
|
|
|
* to be submitted to the OpenXR swap-chain. So do not un-bind the off-screen yet! */
|
|
|
|
|
|
2021-07-24 00:11:22 +09:00
|
|
|
GPU_offscreen_bind(vp->offscreen, false);
|
2020-04-04 02:17:49 +02:00
|
|
|
|
|
|
|
|
wm_xr_draw_viewport_buffers_to_active_framebuffer(xr_data->runtime, surface_data, draw_view);
|
|
|
|
|
}
|
2021-10-12 16:18:05 +09:00
|
|
|
|
|
|
|
|
static GPUBatch *wm_xr_controller_model_batch_create(GHOST_XrContextHandle xr_context,
|
|
|
|
|
const char *subaction_path)
|
|
|
|
|
{
|
|
|
|
|
GHOST_XrControllerModelData model_data;
|
|
|
|
|
|
|
|
|
|
if (!GHOST_XrGetControllerModelData(xr_context, subaction_path, &model_data) ||
|
|
|
|
|
model_data.count_vertices < 1) {
|
|
|
|
|
return NULL;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
GPUVertFormat format = {0};
|
|
|
|
|
GPU_vertformat_attr_add(&format, "pos", GPU_COMP_F32, 3, GPU_FETCH_FLOAT);
|
|
|
|
|
GPU_vertformat_attr_add(&format, "nor", GPU_COMP_F32, 3, GPU_FETCH_FLOAT);
|
|
|
|
|
|
|
|
|
|
GPUVertBuf *vbo = GPU_vertbuf_create_with_format(&format);
|
|
|
|
|
GPU_vertbuf_data_alloc(vbo, model_data.count_vertices);
|
|
|
|
|
void *vbo_data = GPU_vertbuf_get_data(vbo);
|
|
|
|
|
memcpy(
|
|
|
|
|
vbo_data, model_data.vertices, model_data.count_vertices * sizeof(model_data.vertices[0]));
|
|
|
|
|
|
|
|
|
|
GPUIndexBuf *ibo = NULL;
|
|
|
|
|
if (model_data.count_indices > 0 && ((model_data.count_indices % 3) == 0)) {
|
|
|
|
|
GPUIndexBufBuilder ibo_builder;
|
|
|
|
|
const unsigned int prim_len = model_data.count_indices / 3;
|
|
|
|
|
GPU_indexbuf_init(&ibo_builder, GPU_PRIM_TRIS, prim_len, model_data.count_vertices);
|
|
|
|
|
for (unsigned int i = 0; i < prim_len; ++i) {
|
|
|
|
|
const uint32_t *idx = &model_data.indices[i * 3];
|
|
|
|
|
GPU_indexbuf_add_tri_verts(&ibo_builder, idx[0], idx[1], idx[2]);
|
|
|
|
|
}
|
|
|
|
|
ibo = GPU_indexbuf_build(&ibo_builder);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return GPU_batch_create_ex(GPU_PRIM_TRIS, vbo, ibo, GPU_BATCH_OWNS_VBO | GPU_BATCH_OWNS_INDEX);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
static void wm_xr_controller_model_draw(const XrSessionSettings *settings,
|
|
|
|
|
GHOST_XrContextHandle xr_context,
|
|
|
|
|
wmXrSessionState *state)
|
|
|
|
|
{
|
|
|
|
|
GHOST_XrControllerModelData model_data;
|
|
|
|
|
|
|
|
|
|
float color[4];
|
|
|
|
|
switch (settings->controller_draw_style) {
|
|
|
|
|
case XR_CONTROLLER_DRAW_DARK:
|
|
|
|
|
case XR_CONTROLLER_DRAW_DARK_RAY:
|
|
|
|
|
color[0] = color[1] = color[2] = 0.0f, color[3] = 0.4f;
|
|
|
|
|
break;
|
|
|
|
|
case XR_CONTROLLER_DRAW_LIGHT:
|
|
|
|
|
case XR_CONTROLLER_DRAW_LIGHT_RAY:
|
|
|
|
|
color[0] = 0.422f, color[1] = 0.438f, color[2] = 0.446f, color[3] = 0.4f;
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
GPU_depth_test(GPU_DEPTH_NONE);
|
|
|
|
|
GPU_blend(GPU_BLEND_ALPHA);
|
|
|
|
|
|
|
|
|
|
LISTBASE_FOREACH (wmXrController *, controller, &state->controllers) {
|
|
|
|
|
GPUBatch *model = controller->model;
|
|
|
|
|
if (!model) {
|
|
|
|
|
model = controller->model = wm_xr_controller_model_batch_create(xr_context,
|
|
|
|
|
controller->subaction_path);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (model &&
|
|
|
|
|
GHOST_XrGetControllerModelData(xr_context, controller->subaction_path, &model_data) &&
|
|
|
|
|
model_data.count_components > 0) {
|
|
|
|
|
GPU_batch_program_set_builtin(model, GPU_SHADER_3D_UNIFORM_COLOR);
|
|
|
|
|
GPU_batch_uniform_4fv(model, "color", color);
|
|
|
|
|
|
|
|
|
|
GPU_matrix_push();
|
|
|
|
|
GPU_matrix_mul(controller->grip_mat);
|
|
|
|
|
for (unsigned int component_idx = 0; component_idx < model_data.count_components;
|
|
|
|
|
++component_idx) {
|
|
|
|
|
const GHOST_XrControllerModelComponent *component = &model_data.components[component_idx];
|
|
|
|
|
GPU_matrix_push();
|
|
|
|
|
GPU_matrix_mul(component->transform);
|
|
|
|
|
GPU_batch_draw_range(model,
|
|
|
|
|
model->elem ? component->index_offset : component->vertex_offset,
|
|
|
|
|
model->elem ? component->index_count : component->vertex_count);
|
|
|
|
|
GPU_matrix_pop();
|
|
|
|
|
}
|
|
|
|
|
GPU_matrix_pop();
|
|
|
|
|
}
|
|
|
|
|
else {
|
|
|
|
|
/* Fallback. */
|
|
|
|
|
const float scale = 0.05f;
|
|
|
|
|
GPUBatch *sphere = GPU_batch_preset_sphere(2);
|
|
|
|
|
GPU_batch_program_set_builtin(sphere, GPU_SHADER_3D_UNIFORM_COLOR);
|
|
|
|
|
GPU_batch_uniform_4fv(sphere, "color", color);
|
|
|
|
|
|
|
|
|
|
GPU_matrix_push();
|
|
|
|
|
GPU_matrix_mul(controller->grip_mat);
|
|
|
|
|
GPU_matrix_scale_1f(scale);
|
|
|
|
|
GPU_batch_draw(sphere);
|
|
|
|
|
GPU_matrix_pop();
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
static void wm_xr_controller_aim_draw(const XrSessionSettings *settings, wmXrSessionState *state)
|
|
|
|
|
{
|
|
|
|
|
bool draw_ray;
|
|
|
|
|
switch (settings->controller_draw_style) {
|
|
|
|
|
case XR_CONTROLLER_DRAW_DARK:
|
|
|
|
|
case XR_CONTROLLER_DRAW_LIGHT:
|
|
|
|
|
draw_ray = false;
|
|
|
|
|
break;
|
|
|
|
|
case XR_CONTROLLER_DRAW_DARK_RAY:
|
|
|
|
|
case XR_CONTROLLER_DRAW_LIGHT_RAY:
|
|
|
|
|
draw_ray = true;
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
GPUVertFormat *format = immVertexFormat();
|
|
|
|
|
uint pos = GPU_vertformat_attr_add(format, "pos", GPU_COMP_F32, 3, GPU_FETCH_FLOAT);
|
|
|
|
|
uint col = GPU_vertformat_attr_add(format, "color", GPU_COMP_U8, 4, GPU_FETCH_INT_TO_FLOAT_UNIT);
|
|
|
|
|
immBindBuiltinProgram(GPU_SHADER_3D_POLYLINE_FLAT_COLOR);
|
|
|
|
|
|
|
|
|
|
float viewport[4];
|
|
|
|
|
GPU_viewport_size_get_f(viewport);
|
|
|
|
|
immUniform2fv("viewportSize", &viewport[2]);
|
|
|
|
|
|
|
|
|
|
immUniform1f("lineWidth", 3.0f * U.pixelsize);
|
|
|
|
|
|
|
|
|
|
if (draw_ray) {
|
|
|
|
|
const uchar color[4] = {89, 89, 255, 127};
|
|
|
|
|
const float scale = settings->clip_end;
|
|
|
|
|
float ray[3];
|
|
|
|
|
|
|
|
|
|
GPU_depth_test(GPU_DEPTH_LESS_EQUAL);
|
|
|
|
|
GPU_blend(GPU_BLEND_ALPHA);
|
|
|
|
|
|
|
|
|
|
immBegin(GPU_PRIM_LINES, (uint)BLI_listbase_count(&state->controllers) * 2);
|
|
|
|
|
|
|
|
|
|
LISTBASE_FOREACH (wmXrController *, controller, &state->controllers) {
|
|
|
|
|
const float(*mat)[4] = controller->aim_mat;
|
|
|
|
|
madd_v3_v3v3fl(ray, mat[3], mat[2], -scale);
|
|
|
|
|
|
|
|
|
|
immAttrSkip(col);
|
|
|
|
|
immVertex3fv(pos, mat[3]);
|
|
|
|
|
immAttr4ubv(col, color);
|
|
|
|
|
immVertex3fv(pos, ray);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
immEnd();
|
|
|
|
|
}
|
|
|
|
|
else {
|
|
|
|
|
const uchar r[4] = {255, 51, 82, 255};
|
|
|
|
|
const uchar g[4] = {139, 220, 0, 255};
|
|
|
|
|
const uchar b[4] = {40, 144, 255, 255};
|
|
|
|
|
const float scale = 0.01f;
|
|
|
|
|
float x_axis[3], y_axis[3], z_axis[3];
|
|
|
|
|
|
|
|
|
|
GPU_depth_test(GPU_DEPTH_NONE);
|
|
|
|
|
GPU_blend(GPU_BLEND_NONE);
|
|
|
|
|
|
|
|
|
|
immBegin(GPU_PRIM_LINES, (uint)BLI_listbase_count(&state->controllers) * 6);
|
|
|
|
|
|
|
|
|
|
LISTBASE_FOREACH (wmXrController *, controller, &state->controllers) {
|
|
|
|
|
const float(*mat)[4] = controller->aim_mat;
|
|
|
|
|
madd_v3_v3v3fl(x_axis, mat[3], mat[0], scale);
|
|
|
|
|
madd_v3_v3v3fl(y_axis, mat[3], mat[1], scale);
|
|
|
|
|
madd_v3_v3v3fl(z_axis, mat[3], mat[2], scale);
|
|
|
|
|
|
|
|
|
|
immAttrSkip(col);
|
|
|
|
|
immVertex3fv(pos, mat[3]);
|
|
|
|
|
immAttr4ubv(col, r);
|
|
|
|
|
immVertex3fv(pos, x_axis);
|
|
|
|
|
|
|
|
|
|
immAttrSkip(col);
|
|
|
|
|
immVertex3fv(pos, mat[3]);
|
|
|
|
|
immAttr4ubv(col, g);
|
|
|
|
|
immVertex3fv(pos, y_axis);
|
|
|
|
|
|
|
|
|
|
immAttrSkip(col);
|
|
|
|
|
immVertex3fv(pos, mat[3]);
|
|
|
|
|
immAttr4ubv(col, b);
|
|
|
|
|
immVertex3fv(pos, z_axis);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
immEnd();
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
immUnbindProgram();
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
void wm_xr_draw_controllers(const bContext *UNUSED(C), ARegion *UNUSED(region), void *customdata)
|
|
|
|
|
{
|
|
|
|
|
wmXrData *xr = customdata;
|
|
|
|
|
const XrSessionSettings *settings = &xr->session_settings;
|
|
|
|
|
GHOST_XrContextHandle xr_context = xr->runtime->context;
|
|
|
|
|
wmXrSessionState *state = &xr->runtime->session_state;
|
|
|
|
|
|
|
|
|
|
wm_xr_controller_model_draw(settings, xr_context, state);
|
|
|
|
|
wm_xr_controller_aim_draw(settings, state);
|
|
|
|
|
}
|