2018-01-09 14:09:14 +01:00
|
|
|
/*
|
|
|
|
* This program is free software; you can redistribute it and/or
|
|
|
|
* modify it under the terms of the GNU General Public License
|
|
|
|
* as published by the Free Software Foundation; either version 2
|
|
|
|
* of the License, or (at your option) any later version.
|
|
|
|
*
|
|
|
|
* This program is distributed in the hope that it will be useful,
|
|
|
|
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
* GNU General Public License for more details.
|
|
|
|
*
|
|
|
|
* You should have received a copy of the GNU General Public License
|
|
|
|
* along with this program; if not, write to the Free Software Foundation,
|
|
|
|
* Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
|
|
|
*
|
2019-01-23 11:29:18 +11:00
|
|
|
* Copyright 2016, Blender Foundation.
|
2018-01-09 14:09:14 +01:00
|
|
|
*/
|
|
|
|
|
2019-02-18 08:08:12 +11:00
|
|
|
/** \file
|
|
|
|
* \ingroup draw
|
2018-01-09 14:09:14 +01:00
|
|
|
*/
|
|
|
|
|
|
|
|
/**
|
|
|
|
* DRW Instance Data Manager
|
2020-09-07 15:57:12 +10:00
|
|
|
* This is a special memory manager that keeps memory blocks ready to send as VBO data in one
|
2019-08-18 04:11:41 +10:00
|
|
|
* continuous allocation. This way we avoid feeding #GPUBatch each instance data one by one and
|
2020-10-19 09:15:34 -07:00
|
|
|
* unnecessary memcpy. Since we lose which memory block was used each #DRWShadingGroup we need to
|
2019-05-01 10:35:46 +10:00
|
|
|
* redistribute them in the same order/size to avoid to realloc each frame. This is why
|
|
|
|
* #DRWInstanceDatas are sorted in a list for each different data size.
|
2019-03-19 15:17:46 +11:00
|
|
|
*/
|
2018-01-09 14:09:14 +01:00
|
|
|
|
|
|
|
#include "draw_instance_data.h"
|
|
|
|
#include "DRW_engine.h"
|
2018-02-14 18:16:52 +01:00
|
|
|
#include "DRW_render.h" /* For DRW_shgroup_get_instance_count() */
|
2018-01-09 14:09:14 +01:00
|
|
|
|
2019-05-13 17:56:20 +02:00
|
|
|
#include "BLI_memblock.h"
|
2020-03-19 09:33:03 +01:00
|
|
|
#include "BLI_mempool.h"
|
|
|
|
#include "BLI_utildefines.h"
|
|
|
|
#include "MEM_guardedalloc.h"
|
2018-01-09 14:09:14 +01:00
|
|
|
|
|
|
|
struct DRWInstanceData {
|
|
|
|
struct DRWInstanceData *next;
|
|
|
|
bool used; /* If this data is used or not. */
|
|
|
|
size_t data_size; /* Size of one instance data. */
|
2018-05-30 12:19:20 +02:00
|
|
|
BLI_mempool *mempool;
|
2018-01-09 14:09:14 +01:00
|
|
|
};
|
|
|
|
|
|
|
|
struct DRWInstanceDataList {
|
2018-02-20 01:55:19 +01:00
|
|
|
struct DRWInstanceDataList *next, *prev;
|
2018-01-09 14:09:14 +01:00
|
|
|
/* Linked lists for all possible data pool size */
|
|
|
|
DRWInstanceData *idata_head[MAX_INSTANCE_DATA_SIZE];
|
|
|
|
DRWInstanceData *idata_tail[MAX_INSTANCE_DATA_SIZE];
|
2018-02-14 18:16:52 +01:00
|
|
|
|
2019-05-13 17:56:20 +02:00
|
|
|
BLI_memblock *pool_instancing;
|
|
|
|
BLI_memblock *pool_batching;
|
|
|
|
BLI_memblock *pool_buffers;
|
2018-01-09 14:09:14 +01:00
|
|
|
};
|
|
|
|
|
2019-05-13 17:56:20 +02:00
|
|
|
typedef struct DRWTempBufferHandle {
|
2020-08-10 01:43:50 +02:00
|
|
|
GPUVertBuf *buf;
|
2019-05-13 17:56:20 +02:00
|
|
|
/** Format pointer for reuse. */
|
|
|
|
GPUVertFormat *format;
|
|
|
|
/** Touched vertex length for resize. */
|
2019-05-31 01:45:41 +02:00
|
|
|
int *vert_len;
|
2019-05-13 17:56:20 +02:00
|
|
|
} DRWTempBufferHandle;
|
|
|
|
|
2020-08-10 01:43:50 +02:00
|
|
|
typedef struct DRWTempInstancingHandle {
|
|
|
|
/** Copy of geom but with the per-instance attributes. */
|
|
|
|
GPUBatch *batch;
|
|
|
|
/** Batch containing instancing attributes. */
|
|
|
|
GPUBatch *instancer;
|
|
|
|
/** Callbuffer to be used instead of instancer . */
|
|
|
|
GPUVertBuf *buf;
|
|
|
|
/** Original non-instanced batch pointer. */
|
|
|
|
GPUBatch *geom;
|
|
|
|
} DRWTempInstancingHandle;
|
2018-02-20 01:55:19 +01:00
|
|
|
|
2020-08-10 01:43:50 +02:00
|
|
|
static ListBase g_idatalists = {NULL, NULL};
|
2018-02-14 18:16:52 +01:00
|
|
|
|
2020-08-10 01:43:50 +02:00
|
|
|
static void instancing_batch_references_add(GPUBatch *batch)
|
2018-02-14 18:16:52 +01:00
|
|
|
{
|
2020-08-10 01:43:50 +02:00
|
|
|
for (int i = 0; i < GPU_BATCH_VBO_MAX_LEN && batch->verts[i]; i++) {
|
|
|
|
GPU_vertbuf_handle_ref_add(batch->verts[i]);
|
|
|
|
}
|
|
|
|
for (int i = 0; i < GPU_BATCH_INST_VBO_MAX_LEN && batch->inst[i]; i++) {
|
|
|
|
GPU_vertbuf_handle_ref_add(batch->inst[i]);
|
2018-12-18 13:08:08 +01:00
|
|
|
}
|
2020-08-10 01:43:50 +02:00
|
|
|
}
|
2019-05-13 17:56:20 +02:00
|
|
|
|
2020-08-10 01:43:50 +02:00
|
|
|
static void instancing_batch_references_remove(GPUBatch *batch)
|
|
|
|
{
|
|
|
|
for (int i = 0; i < GPU_BATCH_VBO_MAX_LEN && batch->verts[i]; i++) {
|
|
|
|
GPU_vertbuf_handle_ref_remove(batch->verts[i]);
|
|
|
|
}
|
|
|
|
for (int i = 0; i < GPU_BATCH_INST_VBO_MAX_LEN && batch->inst[i]; i++) {
|
|
|
|
GPU_vertbuf_handle_ref_remove(batch->inst[i]);
|
2018-02-20 01:55:19 +01:00
|
|
|
}
|
|
|
|
}
|
2018-02-14 18:16:52 +01:00
|
|
|
|
2020-08-10 01:43:50 +02:00
|
|
|
/* -------------------------------------------------------------------- */
|
|
|
|
/** \name Instance Buffer Management
|
|
|
|
* \{ */
|
|
|
|
|
2019-05-13 17:56:20 +02:00
|
|
|
/**
|
|
|
|
* This manager allows to distribute existing batches for instancing
|
|
|
|
* attributes. This reduce the number of batches creation.
|
|
|
|
* Querying a batch is done with a vertex format. This format should
|
2020-10-19 08:12:33 -07:00
|
|
|
* be static so that its pointer never changes (because we are using
|
2019-05-13 17:56:20 +02:00
|
|
|
* this pointer as identifier [we don't want to check the full format
|
|
|
|
* that would be too slow]).
|
|
|
|
*/
|
|
|
|
GPUVertBuf *DRW_temp_buffer_request(DRWInstanceDataList *idatalist,
|
|
|
|
GPUVertFormat *format,
|
2019-05-31 01:45:41 +02:00
|
|
|
int *vert_len)
|
2018-02-20 01:55:19 +01:00
|
|
|
{
|
2019-05-13 17:56:20 +02:00
|
|
|
BLI_assert(format != NULL);
|
|
|
|
BLI_assert(vert_len != NULL);
|
|
|
|
|
|
|
|
DRWTempBufferHandle *handle = BLI_memblock_alloc(idatalist->pool_buffers);
|
|
|
|
|
|
|
|
if (handle->format != format) {
|
|
|
|
handle->format = format;
|
2020-08-10 01:43:50 +02:00
|
|
|
GPU_VERTBUF_DISCARD_SAFE(handle->buf);
|
|
|
|
|
2020-09-06 22:09:51 +02:00
|
|
|
GPUVertBuf *vert = GPU_vertbuf_calloc();
|
2019-05-13 17:56:20 +02:00
|
|
|
GPU_vertbuf_init_with_format_ex(vert, format, GPU_USAGE_DYNAMIC);
|
|
|
|
GPU_vertbuf_data_alloc(vert, DRW_BUFFER_VERTS_CHUNK);
|
2020-08-10 01:43:50 +02:00
|
|
|
|
|
|
|
handle->buf = vert;
|
2018-02-20 01:55:19 +01:00
|
|
|
}
|
2020-08-10 01:43:50 +02:00
|
|
|
handle->vert_len = vert_len;
|
|
|
|
return handle->buf;
|
2018-02-20 01:55:19 +01:00
|
|
|
}
|
2018-02-14 18:16:52 +01:00
|
|
|
|
2020-08-10 01:43:50 +02:00
|
|
|
/* NOTE: Does not return a valid drawable batch until DRW_instance_buffer_finish has run.
|
|
|
|
* Initialization is delayed because instancer or geom could still not be initialized. */
|
2019-05-13 17:56:20 +02:00
|
|
|
GPUBatch *DRW_temp_batch_instance_request(DRWInstanceDataList *idatalist,
|
|
|
|
GPUVertBuf *buf,
|
2019-12-02 01:40:58 +01:00
|
|
|
GPUBatch *instancer,
|
2019-05-13 17:56:20 +02:00
|
|
|
GPUBatch *geom)
|
2018-02-20 01:55:19 +01:00
|
|
|
{
|
2019-05-13 17:56:20 +02:00
|
|
|
/* Do not call this with a batch that is already an instancing batch. */
|
2019-12-02 01:40:58 +01:00
|
|
|
BLI_assert(geom->inst[0] == NULL);
|
|
|
|
/* Only call with one of them. */
|
|
|
|
BLI_assert((instancer != NULL) != (buf != NULL));
|
2019-05-13 17:56:20 +02:00
|
|
|
|
2020-08-10 01:43:50 +02:00
|
|
|
DRWTempInstancingHandle *handle = BLI_memblock_alloc(idatalist->pool_instancing);
|
|
|
|
if (handle->batch == NULL) {
|
2020-08-10 11:41:22 +02:00
|
|
|
handle->batch = GPU_batch_calloc();
|
2020-08-01 12:57:17 +02:00
|
|
|
}
|
|
|
|
|
2020-08-10 01:43:50 +02:00
|
|
|
GPUBatch *batch = handle->batch;
|
2020-09-06 16:40:07 +02:00
|
|
|
bool instancer_compat = buf ? ((batch->inst[0] == buf) &&
|
|
|
|
(GPU_vertbuf_get_status(buf) & GPU_VERTBUF_DATA_UPLOADED)) :
|
2020-08-10 01:43:50 +02:00
|
|
|
((batch->inst[0] == instancer->verts[0]) &&
|
|
|
|
(batch->inst[1] == instancer->verts[1]));
|
2020-08-08 23:37:43 +02:00
|
|
|
bool is_compatible = (batch->prim_type == geom->prim_type) && instancer_compat &&
|
2020-08-10 03:17:35 +02:00
|
|
|
(batch->flag & GPU_BATCH_BUILDING) == 0 && (batch->elem == geom->elem);
|
2019-05-13 17:56:20 +02:00
|
|
|
for (int i = 0; i < GPU_BATCH_VBO_MAX_LEN && is_compatible; i++) {
|
|
|
|
if (batch->verts[i] != geom->verts[i]) {
|
|
|
|
is_compatible = false;
|
2018-02-14 18:16:52 +01:00
|
|
|
}
|
|
|
|
}
|
2019-05-13 17:56:20 +02:00
|
|
|
|
|
|
|
if (!is_compatible) {
|
2020-08-10 01:43:50 +02:00
|
|
|
instancing_batch_references_remove(batch);
|
2019-05-13 17:56:20 +02:00
|
|
|
GPU_batch_clear(batch);
|
2020-08-10 01:43:50 +02:00
|
|
|
/* Save args and init later. */
|
2020-08-10 03:17:35 +02:00
|
|
|
batch->flag = GPU_BATCH_BUILDING;
|
2020-08-10 01:43:50 +02:00
|
|
|
handle->buf = buf;
|
|
|
|
handle->instancer = instancer;
|
|
|
|
handle->geom = geom;
|
2018-02-20 01:55:19 +01:00
|
|
|
}
|
2019-05-13 17:56:20 +02:00
|
|
|
return batch;
|
|
|
|
}
|
|
|
|
|
|
|
|
/* NOTE: Use only with buf allocated via DRW_temp_buffer_request. */
|
|
|
|
GPUBatch *DRW_temp_batch_request(DRWInstanceDataList *idatalist,
|
|
|
|
GPUVertBuf *buf,
|
|
|
|
GPUPrimType prim_type)
|
|
|
|
{
|
2020-08-10 01:43:50 +02:00
|
|
|
GPUBatch **batch_ptr = BLI_memblock_alloc(idatalist->pool_batching);
|
2020-08-01 12:57:17 +02:00
|
|
|
if (*batch_ptr == NULL) {
|
2020-08-10 11:41:22 +02:00
|
|
|
*batch_ptr = GPU_batch_calloc();
|
2020-08-01 12:57:17 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
GPUBatch *batch = *batch_ptr;
|
2020-09-06 16:40:07 +02:00
|
|
|
bool is_compatible = (batch->verts[0] == buf) && (batch->prim_type == prim_type) &&
|
|
|
|
(GPU_vertbuf_get_status(buf) & GPU_VERTBUF_DATA_UPLOADED);
|
2019-05-13 17:56:20 +02:00
|
|
|
if (!is_compatible) {
|
|
|
|
GPU_batch_clear(batch);
|
|
|
|
GPU_batch_init(batch, prim_type, buf, NULL);
|
2018-02-14 18:16:52 +01:00
|
|
|
}
|
2019-05-13 17:56:20 +02:00
|
|
|
return batch;
|
|
|
|
}
|
|
|
|
|
|
|
|
static void temp_buffer_handle_free(DRWTempBufferHandle *handle)
|
|
|
|
{
|
|
|
|
handle->format = NULL;
|
2020-08-10 01:43:50 +02:00
|
|
|
GPU_VERTBUF_DISCARD_SAFE(handle->buf);
|
|
|
|
}
|
|
|
|
|
|
|
|
static void temp_instancing_handle_free(DRWTempInstancingHandle *handle)
|
|
|
|
{
|
|
|
|
instancing_batch_references_remove(handle->batch);
|
|
|
|
GPU_BATCH_DISCARD_SAFE(handle->batch);
|
2018-02-14 18:16:52 +01:00
|
|
|
}
|
|
|
|
|
2020-08-01 12:57:17 +02:00
|
|
|
static void temp_batch_free(GPUBatch **batch)
|
|
|
|
{
|
|
|
|
GPU_BATCH_DISCARD_SAFE(*batch);
|
|
|
|
}
|
|
|
|
|
2018-02-14 18:16:52 +01:00
|
|
|
void DRW_instance_buffer_finish(DRWInstanceDataList *idatalist)
|
|
|
|
{
|
2019-05-13 17:56:20 +02:00
|
|
|
/* Resize down buffers in use and send data to GPU. */
|
|
|
|
BLI_memblock_iter iter;
|
|
|
|
DRWTempBufferHandle *handle;
|
|
|
|
BLI_memblock_iternew(idatalist->pool_buffers, &iter);
|
|
|
|
while ((handle = BLI_memblock_iterstep(&iter))) {
|
|
|
|
if (handle->vert_len != NULL) {
|
|
|
|
uint vert_len = *(handle->vert_len);
|
|
|
|
uint target_buf_size = ((vert_len / DRW_BUFFER_VERTS_CHUNK) + 1) * DRW_BUFFER_VERTS_CHUNK;
|
2020-09-06 16:40:07 +02:00
|
|
|
if (target_buf_size < GPU_vertbuf_get_vertex_alloc(handle->buf)) {
|
2020-08-10 01:43:50 +02:00
|
|
|
GPU_vertbuf_data_resize(handle->buf, target_buf_size);
|
2018-02-20 01:55:19 +01:00
|
|
|
}
|
2020-08-10 01:43:50 +02:00
|
|
|
GPU_vertbuf_data_len_set(handle->buf, vert_len);
|
|
|
|
GPU_vertbuf_use(handle->buf); /* Send data. */
|
2018-02-20 01:55:19 +01:00
|
|
|
}
|
|
|
|
}
|
2019-05-13 17:56:20 +02:00
|
|
|
/* Finish pending instancing batches. */
|
2020-08-10 01:43:50 +02:00
|
|
|
DRWTempInstancingHandle *handle_inst;
|
2019-05-13 17:56:20 +02:00
|
|
|
BLI_memblock_iternew(idatalist->pool_instancing, &iter);
|
2020-08-10 01:43:50 +02:00
|
|
|
while ((handle_inst = BLI_memblock_iterstep(&iter))) {
|
|
|
|
GPUBatch *batch = handle_inst->batch;
|
2020-08-10 03:17:35 +02:00
|
|
|
if (batch && batch->flag == GPU_BATCH_BUILDING) {
|
2020-08-10 01:43:50 +02:00
|
|
|
GPUVertBuf *inst_buf = handle_inst->buf;
|
|
|
|
GPUBatch *inst_batch = handle_inst->instancer;
|
|
|
|
GPUBatch *geom = handle_inst->geom;
|
2019-05-13 17:56:20 +02:00
|
|
|
GPU_batch_copy(batch, geom);
|
2019-12-02 01:40:58 +01:00
|
|
|
if (inst_batch != NULL) {
|
|
|
|
for (int i = 0; i < GPU_BATCH_INST_VBO_MAX_LEN && inst_batch->verts[i]; i++) {
|
|
|
|
GPU_batch_instbuf_add_ex(batch, inst_batch->verts[i], false);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
GPU_batch_instbuf_add_ex(batch, inst_buf, false);
|
|
|
|
}
|
2020-08-10 01:43:50 +02:00
|
|
|
/* Add reference to avoid comparing pointers (in DRW_temp_batch_request) that could
|
|
|
|
* potentially be the same. This will delay the freeing of the GPUVertBuf itself. */
|
|
|
|
instancing_batch_references_add(batch);
|
2019-04-17 06:17:24 +02:00
|
|
|
}
|
2018-02-14 18:16:52 +01:00
|
|
|
}
|
2019-05-13 17:56:20 +02:00
|
|
|
/* Resize pools and free unused. */
|
|
|
|
BLI_memblock_clear(idatalist->pool_buffers, (MemblockValFreeFP)temp_buffer_handle_free);
|
2020-08-10 01:43:50 +02:00
|
|
|
BLI_memblock_clear(idatalist->pool_instancing, (MemblockValFreeFP)temp_instancing_handle_free);
|
2020-08-01 12:57:17 +02:00
|
|
|
BLI_memblock_clear(idatalist->pool_batching, (MemblockValFreeFP)temp_batch_free);
|
2018-02-14 18:16:52 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
/** \} */
|
|
|
|
|
|
|
|
/* -------------------------------------------------------------------- */
|
2018-01-09 14:09:14 +01:00
|
|
|
/** \name Instance Data (DRWInstanceData)
|
|
|
|
* \{ */
|
|
|
|
|
2019-01-29 07:46:25 +11:00
|
|
|
static DRWInstanceData *drw_instance_data_create(DRWInstanceDataList *idatalist, uint attr_size)
|
2018-01-09 14:09:14 +01:00
|
|
|
{
|
2018-02-20 01:55:19 +01:00
|
|
|
DRWInstanceData *idata = MEM_callocN(sizeof(DRWInstanceData), "DRWInstanceData");
|
2018-01-09 14:09:14 +01:00
|
|
|
idata->next = NULL;
|
|
|
|
idata->used = true;
|
2019-01-29 07:46:25 +11:00
|
|
|
idata->data_size = attr_size;
|
2018-05-30 12:19:20 +02:00
|
|
|
idata->mempool = BLI_mempool_create(sizeof(float) * idata->data_size, 0, 16, 0);
|
2019-04-17 06:17:24 +02:00
|
|
|
|
2019-01-29 07:46:25 +11:00
|
|
|
BLI_assert(attr_size > 0);
|
2019-04-17 06:17:24 +02:00
|
|
|
|
2018-01-09 14:09:14 +01:00
|
|
|
/* Push to linked list. */
|
2019-01-29 07:46:25 +11:00
|
|
|
if (idatalist->idata_head[attr_size - 1] == NULL) {
|
|
|
|
idatalist->idata_head[attr_size - 1] = idata;
|
2018-01-09 14:09:14 +01:00
|
|
|
}
|
|
|
|
else {
|
2019-01-29 07:46:25 +11:00
|
|
|
idatalist->idata_tail[attr_size - 1]->next = idata;
|
2018-01-09 14:09:14 +01:00
|
|
|
}
|
2019-01-29 07:46:25 +11:00
|
|
|
idatalist->idata_tail[attr_size - 1] = idata;
|
2019-04-17 06:17:24 +02:00
|
|
|
|
2018-01-09 14:09:14 +01:00
|
|
|
return idata;
|
|
|
|
}
|
|
|
|
|
|
|
|
static void DRW_instance_data_free(DRWInstanceData *idata)
|
|
|
|
{
|
2018-05-30 12:19:20 +02:00
|
|
|
BLI_mempool_destroy(idata->mempool);
|
2018-01-09 14:09:14 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Return a pointer to the next instance data space.
|
2019-03-19 15:17:46 +11:00
|
|
|
*/
|
2018-01-09 14:09:14 +01:00
|
|
|
void *DRW_instance_data_next(DRWInstanceData *idata)
|
|
|
|
{
|
2018-05-30 12:19:20 +02:00
|
|
|
return BLI_mempool_alloc(idata->mempool);
|
2018-01-09 14:09:14 +01:00
|
|
|
}
|
|
|
|
|
2019-01-29 07:46:25 +11:00
|
|
|
DRWInstanceData *DRW_instance_data_request(DRWInstanceDataList *idatalist, uint attr_size)
|
2018-01-09 14:09:14 +01:00
|
|
|
{
|
2019-01-29 07:46:25 +11:00
|
|
|
BLI_assert(attr_size > 0 && attr_size <= MAX_INSTANCE_DATA_SIZE);
|
2018-01-09 14:09:14 +01:00
|
|
|
|
2019-01-29 07:46:25 +11:00
|
|
|
DRWInstanceData *idata = idatalist->idata_head[attr_size - 1];
|
2018-01-09 14:09:14 +01:00
|
|
|
|
|
|
|
/* Search for an unused data chunk. */
|
|
|
|
for (; idata; idata = idata->next) {
|
|
|
|
if (idata->used == false) {
|
|
|
|
idata->used = true;
|
|
|
|
return idata;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-01-29 07:46:25 +11:00
|
|
|
return drw_instance_data_create(idatalist, attr_size);
|
2018-01-09 14:09:14 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
/** \} */
|
|
|
|
|
|
|
|
/* -------------------------------------------------------------------- */
|
|
|
|
/** \name Instance Data List (DRWInstanceDataList)
|
|
|
|
* \{ */
|
|
|
|
|
|
|
|
DRWInstanceDataList *DRW_instance_data_list_create(void)
|
|
|
|
{
|
2018-02-14 18:16:52 +01:00
|
|
|
DRWInstanceDataList *idatalist = MEM_callocN(sizeof(DRWInstanceDataList), "DRWInstanceDataList");
|
2019-05-13 17:56:20 +02:00
|
|
|
|
2020-08-01 12:57:17 +02:00
|
|
|
idatalist->pool_batching = BLI_memblock_create(sizeof(GPUBatch *));
|
2020-08-10 01:43:50 +02:00
|
|
|
idatalist->pool_instancing = BLI_memblock_create(sizeof(DRWTempInstancingHandle));
|
2019-05-21 00:54:03 +02:00
|
|
|
idatalist->pool_buffers = BLI_memblock_create(sizeof(DRWTempBufferHandle));
|
2018-02-20 01:55:19 +01:00
|
|
|
|
|
|
|
BLI_addtail(&g_idatalists, idatalist);
|
2018-02-14 18:16:52 +01:00
|
|
|
|
|
|
|
return idatalist;
|
2018-01-09 14:09:14 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
void DRW_instance_data_list_free(DRWInstanceDataList *idatalist)
|
|
|
|
{
|
|
|
|
DRWInstanceData *idata, *next_idata;
|
2019-04-17 06:17:24 +02:00
|
|
|
|
2019-09-08 00:12:26 +10:00
|
|
|
for (int i = 0; i < MAX_INSTANCE_DATA_SIZE; i++) {
|
2018-01-09 14:09:14 +01:00
|
|
|
for (idata = idatalist->idata_head[i]; idata; idata = next_idata) {
|
|
|
|
next_idata = idata->next;
|
|
|
|
DRW_instance_data_free(idata);
|
|
|
|
MEM_freeN(idata);
|
|
|
|
}
|
|
|
|
idatalist->idata_head[i] = NULL;
|
|
|
|
idatalist->idata_tail[i] = NULL;
|
|
|
|
}
|
2019-04-17 06:17:24 +02:00
|
|
|
|
2019-05-13 17:56:20 +02:00
|
|
|
BLI_memblock_destroy(idatalist->pool_buffers, (MemblockValFreeFP)temp_buffer_handle_free);
|
2020-08-10 01:43:50 +02:00
|
|
|
BLI_memblock_destroy(idatalist->pool_instancing, (MemblockValFreeFP)temp_instancing_handle_free);
|
2020-08-01 12:57:17 +02:00
|
|
|
BLI_memblock_destroy(idatalist->pool_batching, (MemblockValFreeFP)temp_batch_free);
|
2019-04-17 06:17:24 +02:00
|
|
|
|
2018-02-20 01:55:19 +01:00
|
|
|
BLI_remlink(&g_idatalists, idatalist);
|
2018-01-09 14:09:14 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
void DRW_instance_data_list_reset(DRWInstanceDataList *idatalist)
|
|
|
|
{
|
|
|
|
DRWInstanceData *idata;
|
|
|
|
|
2019-09-08 00:12:26 +10:00
|
|
|
for (int i = 0; i < MAX_INSTANCE_DATA_SIZE; i++) {
|
2018-01-09 14:09:14 +01:00
|
|
|
for (idata = idatalist->idata_head[i]; idata; idata = idata->next) {
|
|
|
|
idata->used = false;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void DRW_instance_data_list_free_unused(DRWInstanceDataList *idatalist)
|
|
|
|
{
|
|
|
|
DRWInstanceData *idata, *next_idata;
|
2019-04-17 06:17:24 +02:00
|
|
|
|
2018-01-09 14:09:14 +01:00
|
|
|
/* Remove unused data blocks and sanitize each list. */
|
2019-09-08 00:12:26 +10:00
|
|
|
for (int i = 0; i < MAX_INSTANCE_DATA_SIZE; i++) {
|
2018-01-09 14:09:14 +01:00
|
|
|
idatalist->idata_tail[i] = NULL;
|
|
|
|
for (idata = idatalist->idata_head[i]; idata; idata = next_idata) {
|
|
|
|
next_idata = idata->next;
|
|
|
|
if (idata->used == false) {
|
|
|
|
if (idatalist->idata_head[i] == idata) {
|
|
|
|
idatalist->idata_head[i] = next_idata;
|
|
|
|
}
|
|
|
|
else {
|
2018-09-19 18:19:49 +02:00
|
|
|
/* idatalist->idata_tail[i] is guaranteed not to be null in this case. */
|
2018-01-09 14:09:14 +01:00
|
|
|
idatalist->idata_tail[i]->next = next_idata;
|
|
|
|
}
|
|
|
|
DRW_instance_data_free(idata);
|
|
|
|
MEM_freeN(idata);
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
if (idatalist->idata_tail[i] != NULL) {
|
|
|
|
idatalist->idata_tail[i]->next = idata;
|
|
|
|
}
|
|
|
|
idatalist->idata_tail[i] = idata;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void DRW_instance_data_list_resize(DRWInstanceDataList *idatalist)
|
|
|
|
{
|
|
|
|
DRWInstanceData *idata;
|
|
|
|
|
2019-09-08 00:12:26 +10:00
|
|
|
for (int i = 0; i < MAX_INSTANCE_DATA_SIZE; i++) {
|
2018-01-09 14:09:14 +01:00
|
|
|
for (idata = idatalist->idata_head[i]; idata; idata = idata->next) {
|
2018-05-30 12:19:20 +02:00
|
|
|
BLI_mempool_clear_ex(idata->mempool, BLI_mempool_len(idata->mempool));
|
2018-01-09 14:09:14 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/** \} */
|