2018-07-17 14:46:44 +02:00
|
|
|
/*
|
|
|
|
* This program is free software; you can redistribute it and/or
|
|
|
|
* modify it under the terms of the GNU General Public License
|
|
|
|
* as published by the Free Software Foundation; either version 2
|
|
|
|
* of the License, or (at your option) any later version.
|
|
|
|
*
|
|
|
|
* This program is distributed in the hope that it will be useful,
|
|
|
|
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
* GNU General Public License for more details.
|
|
|
|
*
|
|
|
|
* You should have received a copy of the GNU General Public License
|
|
|
|
* along with this program; if not, write to the Free Software Foundation,
|
|
|
|
* Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
|
|
|
*
|
|
|
|
* The Original Code is Copyright (C) 2016 by Mike Erwin.
|
|
|
|
* All rights reserved.
|
|
|
|
*/
|
|
|
|
|
2019-02-18 08:08:12 +11:00
|
|
|
/** \file
|
|
|
|
* \ingroup gpu
|
2018-07-17 14:46:44 +02:00
|
|
|
*
|
|
|
|
* Manage GL vertex array IDs in a thread-safe way
|
|
|
|
* Use these instead of glGenBuffers & its friends
|
|
|
|
* - alloc must be called from a thread that is bound
|
|
|
|
* to the context that will be used for drawing with
|
|
|
|
* this vao.
|
|
|
|
* - free can be called from any thread
|
|
|
|
*/
|
2018-02-19 23:50:52 +01:00
|
|
|
|
2020-08-07 17:00:28 +02:00
|
|
|
/* TODO Create cmake option. */
|
|
|
|
#define WITH_OPENGL_BACKEND 1
|
|
|
|
|
2018-07-19 15:48:13 +02:00
|
|
|
#include "BLI_assert.h"
|
|
|
|
#include "BLI_utildefines.h"
|
|
|
|
|
2018-07-17 21:11:23 +02:00
|
|
|
#include "GPU_context.h"
|
2018-07-19 15:48:13 +02:00
|
|
|
#include "GPU_framebuffer.h"
|
|
|
|
|
2020-08-06 04:30:38 +02:00
|
|
|
#include "GHOST_C-api.h"
|
|
|
|
|
2020-08-07 17:00:28 +02:00
|
|
|
#include "gpu_backend.hh"
|
2018-07-19 15:48:13 +02:00
|
|
|
#include "gpu_batch_private.h"
|
2020-08-08 03:01:45 +02:00
|
|
|
#include "gpu_context_private.hh"
|
2019-08-14 15:27:10 +02:00
|
|
|
#include "gpu_matrix_private.h"
|
2018-07-19 15:48:13 +02:00
|
|
|
|
2020-08-07 17:00:28 +02:00
|
|
|
#ifdef WITH_OPENGL_BACKEND
|
|
|
|
# include "gl_backend.hh"
|
|
|
|
# include "gl_context.hh"
|
|
|
|
#endif
|
|
|
|
|
2018-02-19 23:50:52 +01:00
|
|
|
#include <mutex>
|
2020-03-19 09:33:03 +01:00
|
|
|
#include <vector>
|
2018-02-19 23:50:52 +01:00
|
|
|
|
2020-08-08 01:18:18 +02:00
|
|
|
using namespace blender::gpu;
|
2018-07-19 15:48:13 +02:00
|
|
|
|
2018-07-19 16:06:37 +10:00
|
|
|
static thread_local GPUContext *active_ctx = NULL;
|
2018-02-19 23:50:52 +01:00
|
|
|
|
2020-08-08 01:18:18 +02:00
|
|
|
/* -------------------------------------------------------------------- */
|
|
|
|
/** \name GPUContext methods
|
|
|
|
* \{ */
|
2018-07-19 15:48:13 +02:00
|
|
|
|
2020-08-08 01:18:18 +02:00
|
|
|
GPUContext::GPUContext()
|
|
|
|
{
|
|
|
|
thread_ = pthread_self();
|
|
|
|
is_active_ = false;
|
|
|
|
matrix_state = GPU_matrix_state_create();
|
2018-07-19 15:48:13 +02:00
|
|
|
}
|
|
|
|
|
2020-08-08 01:18:18 +02:00
|
|
|
GPUContext::~GPUContext()
|
2018-07-19 15:48:13 +02:00
|
|
|
{
|
2020-08-08 01:18:18 +02:00
|
|
|
GPU_matrix_state_discard(matrix_state);
|
|
|
|
}
|
2019-04-17 06:17:24 +02:00
|
|
|
|
2020-08-08 01:18:18 +02:00
|
|
|
bool GPUContext::is_active_on_thread(void)
|
|
|
|
{
|
|
|
|
return (this == active_ctx) && pthread_equal(pthread_self(), thread_);
|
|
|
|
}
|
2019-04-17 06:17:24 +02:00
|
|
|
|
2020-08-08 01:18:18 +02:00
|
|
|
/** \} */
|
2019-04-17 06:17:24 +02:00
|
|
|
|
2020-08-08 01:18:18 +02:00
|
|
|
/* -------------------------------------------------------------------- */
|
2018-02-19 23:50:52 +01:00
|
|
|
|
2020-08-06 04:30:38 +02:00
|
|
|
GPUContext *GPU_context_create(void *ghost_window)
|
2018-07-17 14:46:44 +02:00
|
|
|
{
|
2020-08-07 17:00:28 +02:00
|
|
|
if (gpu_backend_get() == NULL) {
|
|
|
|
/* TODO move where it make sense. */
|
|
|
|
GPU_backend_init(GPU_BACKEND_OPENGL);
|
|
|
|
}
|
|
|
|
|
|
|
|
GPUContext *ctx = gpu_backend_get()->context_alloc(ghost_window);
|
2020-08-06 04:30:38 +02:00
|
|
|
|
2019-04-17 06:17:24 +02:00
|
|
|
GPU_context_active_set(ctx);
|
|
|
|
return ctx;
|
2018-07-17 14:46:44 +02:00
|
|
|
}
|
2018-02-19 23:50:52 +01:00
|
|
|
|
2018-07-18 00:12:21 +02:00
|
|
|
/* to be called after GPU_context_active_set(ctx_to_destroy) */
|
2018-07-19 16:06:37 +10:00
|
|
|
void GPU_context_discard(GPUContext *ctx)
|
2018-07-17 14:46:44 +02:00
|
|
|
{
|
2019-04-17 06:17:24 +02:00
|
|
|
delete ctx;
|
|
|
|
active_ctx = NULL;
|
2018-07-17 14:46:44 +02:00
|
|
|
}
|
2018-02-19 23:50:52 +01:00
|
|
|
|
2018-07-17 14:46:44 +02:00
|
|
|
/* ctx can be NULL */
|
2018-07-19 16:06:37 +10:00
|
|
|
void GPU_context_active_set(GPUContext *ctx)
|
2018-07-17 14:46:44 +02:00
|
|
|
{
|
2019-04-17 06:17:24 +02:00
|
|
|
if (active_ctx) {
|
2020-08-08 01:18:18 +02:00
|
|
|
active_ctx->deactivate();
|
2019-04-17 06:17:24 +02:00
|
|
|
}
|
2020-08-08 01:18:18 +02:00
|
|
|
|
|
|
|
active_ctx = ctx;
|
|
|
|
|
2019-04-17 06:17:24 +02:00
|
|
|
if (ctx) {
|
2020-08-08 01:18:18 +02:00
|
|
|
ctx->activate();
|
2019-04-17 06:17:24 +02:00
|
|
|
}
|
2018-07-17 14:46:44 +02:00
|
|
|
}
|
2018-02-19 23:50:52 +01:00
|
|
|
|
2018-07-19 16:06:37 +10:00
|
|
|
GPUContext *GPU_context_active_get(void)
|
2018-07-17 14:46:44 +02:00
|
|
|
{
|
2019-04-17 06:17:24 +02:00
|
|
|
return active_ctx;
|
2018-07-17 14:46:44 +02:00
|
|
|
}
|
2018-02-19 23:50:52 +01:00
|
|
|
|
2018-07-18 00:12:21 +02:00
|
|
|
GLuint GPU_vao_default(void)
|
2018-07-17 14:46:44 +02:00
|
|
|
{
|
2019-04-17 06:17:24 +02:00
|
|
|
BLI_assert(active_ctx); /* need at least an active context */
|
2020-08-08 01:18:18 +02:00
|
|
|
return static_cast<GLContext *>(active_ctx)->default_vao_;
|
2018-07-17 14:46:44 +02:00
|
|
|
}
|
2018-02-19 23:50:52 +01:00
|
|
|
|
2019-06-01 17:54:07 +02:00
|
|
|
GLuint GPU_framebuffer_default(void)
|
|
|
|
{
|
|
|
|
BLI_assert(active_ctx); /* need at least an active context */
|
2020-08-08 01:18:18 +02:00
|
|
|
return static_cast<GLContext *>(active_ctx)->default_framebuffer_;
|
2019-06-01 17:54:07 +02:00
|
|
|
}
|
|
|
|
|
2018-07-18 00:12:21 +02:00
|
|
|
GLuint GPU_vao_alloc(void)
|
2018-07-17 14:46:44 +02:00
|
|
|
{
|
2019-04-17 06:17:24 +02:00
|
|
|
GLuint new_vao_id = 0;
|
|
|
|
glGenVertexArrays(1, &new_vao_id);
|
|
|
|
return new_vao_id;
|
2018-07-17 14:46:44 +02:00
|
|
|
}
|
2018-02-19 23:50:52 +01:00
|
|
|
|
2018-07-19 15:48:13 +02:00
|
|
|
GLuint GPU_fbo_alloc(void)
|
|
|
|
{
|
2019-04-17 06:17:24 +02:00
|
|
|
GLuint new_fbo_id = 0;
|
|
|
|
glGenFramebuffers(1, &new_fbo_id);
|
|
|
|
return new_fbo_id;
|
2018-07-19 15:48:13 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
GLuint GPU_buf_alloc(void)
|
|
|
|
{
|
2019-04-17 06:17:24 +02:00
|
|
|
GLuint new_buffer_id = 0;
|
|
|
|
glGenBuffers(1, &new_buffer_id);
|
|
|
|
return new_buffer_id;
|
2018-07-19 15:48:13 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
GLuint GPU_tex_alloc(void)
|
|
|
|
{
|
2019-04-17 06:17:24 +02:00
|
|
|
GLuint new_texture_id = 0;
|
|
|
|
glGenTextures(1, &new_texture_id);
|
|
|
|
return new_texture_id;
|
2018-07-19 15:48:13 +02:00
|
|
|
}
|
|
|
|
|
2018-07-19 16:06:37 +10:00
|
|
|
void GPU_vao_free(GLuint vao_id, GPUContext *ctx)
|
2018-07-17 14:46:44 +02:00
|
|
|
{
|
2020-08-08 01:18:18 +02:00
|
|
|
static_cast<GLContext *>(ctx)->vao_free(vao_id);
|
2018-07-17 14:46:44 +02:00
|
|
|
}
|
2018-02-22 12:39:57 +01:00
|
|
|
|
2018-07-19 15:48:13 +02:00
|
|
|
void GPU_fbo_free(GLuint fbo_id, GPUContext *ctx)
|
|
|
|
{
|
2020-08-08 01:18:18 +02:00
|
|
|
static_cast<GLContext *>(ctx)->fbo_free(fbo_id);
|
2018-07-19 15:48:13 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
void GPU_buf_free(GLuint buf_id)
|
|
|
|
{
|
2020-08-08 01:18:18 +02:00
|
|
|
/* TODO avoid using backend */
|
|
|
|
GPUBackend *backend = gpu_backend_get();
|
|
|
|
static_cast<GLBackend *>(backend)->buf_free(buf_id);
|
2018-07-19 15:48:13 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
void GPU_tex_free(GLuint tex_id)
|
|
|
|
{
|
2020-08-08 01:18:18 +02:00
|
|
|
/* TODO avoid using backend */
|
|
|
|
GPUBackend *backend = gpu_backend_get();
|
|
|
|
static_cast<GLBackend *>(backend)->tex_free(tex_id);
|
2018-07-19 15:48:13 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
/* GPUBatch & GPUFrameBuffer contains respectively VAO & FBO indices
|
|
|
|
* which are not shared across contexts. So we need to keep track of
|
|
|
|
* ownership. */
|
|
|
|
|
2018-07-19 16:06:37 +10:00
|
|
|
void gpu_context_add_batch(GPUContext *ctx, GPUBatch *batch)
|
2018-07-17 14:46:44 +02:00
|
|
|
{
|
2019-04-17 06:17:24 +02:00
|
|
|
BLI_assert(ctx);
|
2020-08-08 01:18:18 +02:00
|
|
|
static_cast<GLContext *>(ctx)->batch_register(batch);
|
2018-07-17 14:46:44 +02:00
|
|
|
}
|
2018-02-22 12:39:57 +01:00
|
|
|
|
2018-07-19 16:06:37 +10:00
|
|
|
void gpu_context_remove_batch(GPUContext *ctx, GPUBatch *batch)
|
2018-07-17 14:46:44 +02:00
|
|
|
{
|
2019-04-17 06:17:24 +02:00
|
|
|
BLI_assert(ctx);
|
2020-08-08 01:18:18 +02:00
|
|
|
static_cast<GLContext *>(ctx)->batch_unregister(batch);
|
2018-07-17 14:46:44 +02:00
|
|
|
}
|
2018-07-19 15:48:13 +02:00
|
|
|
|
|
|
|
void gpu_context_add_framebuffer(GPUContext *ctx, GPUFrameBuffer *fb)
|
|
|
|
{
|
|
|
|
#ifdef DEBUG
|
2019-04-17 06:17:24 +02:00
|
|
|
BLI_assert(ctx);
|
2020-08-08 01:18:18 +02:00
|
|
|
static_cast<GLContext *>(ctx)->framebuffer_register(fb);
|
2018-07-19 15:48:13 +02:00
|
|
|
#else
|
2019-04-17 06:17:24 +02:00
|
|
|
UNUSED_VARS(ctx, fb);
|
2018-07-19 15:48:13 +02:00
|
|
|
#endif
|
|
|
|
}
|
|
|
|
|
|
|
|
void gpu_context_remove_framebuffer(GPUContext *ctx, GPUFrameBuffer *fb)
|
|
|
|
{
|
|
|
|
#ifdef DEBUG
|
2019-04-17 06:17:24 +02:00
|
|
|
BLI_assert(ctx);
|
2020-08-08 01:18:18 +02:00
|
|
|
static_cast<GLContext *>(ctx)->framebuffer_unregister(fb);
|
2018-07-19 15:48:13 +02:00
|
|
|
#else
|
2019-04-17 06:17:24 +02:00
|
|
|
UNUSED_VARS(ctx, fb);
|
2018-07-19 15:48:13 +02:00
|
|
|
#endif
|
|
|
|
}
|
2018-07-31 18:16:08 +02:00
|
|
|
|
|
|
|
void gpu_context_active_framebuffer_set(GPUContext *ctx, GPUFrameBuffer *fb)
|
|
|
|
{
|
2019-04-17 06:17:24 +02:00
|
|
|
ctx->current_fbo = fb;
|
2018-07-31 18:16:08 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
GPUFrameBuffer *gpu_context_active_framebuffer_get(GPUContext *ctx)
|
|
|
|
{
|
2019-04-17 06:17:24 +02:00
|
|
|
return ctx->current_fbo;
|
2018-07-31 18:16:08 +02:00
|
|
|
}
|
2019-08-14 15:27:10 +02:00
|
|
|
|
|
|
|
struct GPUMatrixState *gpu_context_active_matrix_state_get()
|
|
|
|
{
|
|
|
|
BLI_assert(active_ctx);
|
|
|
|
return active_ctx->matrix_state;
|
|
|
|
}
|
2020-08-05 15:26:49 +02:00
|
|
|
|
2020-08-08 01:18:18 +02:00
|
|
|
/* -------------------------------------------------------------------- */
|
|
|
|
/** \name Main context global mutex
|
|
|
|
*
|
|
|
|
* Used to avoid crash on some old drivers.
|
|
|
|
* \{ */
|
|
|
|
|
|
|
|
static std::mutex main_context_mutex;
|
|
|
|
|
2020-08-05 15:26:49 +02:00
|
|
|
void GPU_context_main_lock(void)
|
|
|
|
{
|
|
|
|
main_context_mutex.lock();
|
|
|
|
}
|
|
|
|
|
|
|
|
void GPU_context_main_unlock(void)
|
|
|
|
{
|
|
|
|
main_context_mutex.unlock();
|
|
|
|
}
|
2020-08-07 17:00:28 +02:00
|
|
|
|
2020-08-08 01:18:18 +02:00
|
|
|
/** \} */
|
|
|
|
|
2020-08-07 17:00:28 +02:00
|
|
|
/* -------------------------------------------------------------------- */
|
|
|
|
/** \name Backend selection
|
|
|
|
* \{ */
|
|
|
|
|
|
|
|
static GPUBackend *g_backend;
|
|
|
|
|
|
|
|
void GPU_backend_init(eGPUBackendType backend_type)
|
|
|
|
{
|
|
|
|
BLI_assert(g_backend == NULL);
|
|
|
|
|
|
|
|
switch (backend_type) {
|
|
|
|
#if WITH_OPENGL_BACKEND
|
|
|
|
case GPU_BACKEND_OPENGL:
|
|
|
|
g_backend = new GLBackend;
|
|
|
|
break;
|
|
|
|
#endif
|
|
|
|
default:
|
|
|
|
BLI_assert(0);
|
|
|
|
break;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void GPU_backend_exit(void)
|
|
|
|
{
|
2020-08-08 01:18:18 +02:00
|
|
|
if (g_backend) {
|
|
|
|
/* TODO assert no resource left. Currently UI textures are still not freed in their context
|
|
|
|
* correctly. */
|
|
|
|
delete g_backend;
|
|
|
|
}
|
2020-08-07 17:00:28 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
GPUBackend *gpu_backend_get(void)
|
|
|
|
{
|
|
|
|
return g_backend;
|
|
|
|
}
|
|
|
|
|
|
|
|
/** \} */
|