Merge branch 'blender-v2.90-release'

# Conflicts:
#	source/blender/draw/engines/eevee/eevee_motion_blur.c
This commit is contained in:
2020-08-12 18:16:47 +02:00
4 changed files with 125 additions and 90 deletions

View File

@@ -29,6 +29,7 @@
#include "BKE_duplilist.h"
#include "BKE_modifier.h"
#include "BKE_object.h"
#include "DEG_depsgraph_query.h"
@@ -42,15 +43,20 @@
static void eevee_motion_blur_mesh_data_free(void *val)
{
EEVEE_GeometryMotionData *geom_mb = (EEVEE_GeometryMotionData *)val;
EEVEE_HairMotionData *hair_mb = (EEVEE_HairMotionData *)val;
switch (geom_mb->type) {
case EEVEE_HAIR_GEOM_MOTION_DATA:
for (int i = 0; i < ARRAY_SIZE(geom_mb->vbo); i++) {
GPU_VERTBUF_DISCARD_SAFE(geom_mb->hair_pos[i]);
DRW_TEXTURE_FREE_SAFE(geom_mb->hair_pos_tx[i]);
case EEVEE_MOTION_DATA_HAIR:
for (int j = 0; j < hair_mb->psys_len; j++) {
for (int i = 0; i < ARRAY_SIZE(hair_mb->psys[0].hair_pos); i++) {
GPU_VERTBUF_DISCARD_SAFE(hair_mb->psys[j].hair_pos[i]);
}
for (int i = 0; i < ARRAY_SIZE(hair_mb->psys[0].hair_pos); i++) {
DRW_TEXTURE_FREE_SAFE(hair_mb->psys[j].hair_pos_tx[i]);
}
}
break;
case EEVEE_MESH_GEOM_MOTION_DATA:
case EEVEE_MOTION_DATA_MESH:
for (int i = 0; i < ARRAY_SIZE(geom_mb->vbo); i++) {
GPU_VERTBUF_DISCARD_SAFE(geom_mb->vbo[i]);
}
@@ -64,7 +70,7 @@ static uint eevee_object_key_hash(const void *key)
EEVEE_ObjectKey *ob_key = (EEVEE_ObjectKey *)key;
uint hash = BLI_ghashutil_ptrhash(ob_key->ob);
hash = BLI_ghashutil_combine_hash(hash, BLI_ghashutil_ptrhash(ob_key->parent));
for (int i = 0; i < 16; i++) {
for (int i = 0; i < MAX_DUPLI_RECUR; i++) {
if (ob_key->id[i] != 0) {
hash = BLI_ghashutil_combine_hash(hash, BLI_ghashutil_inthash(ob_key->id[i]));
}
@@ -148,18 +154,40 @@ EEVEE_ObjectMotionData *EEVEE_motion_blur_object_data_get(EEVEE_MotionBlurData *
return ob_step;
}
static EEVEE_GeometryMotionData *motion_blur_geometry_data_get(EEVEE_MotionBlurData *mb,
void *key,
bool hair)
static void *motion_blur_deform_data_get(EEVEE_MotionBlurData *mb, Object *ob, bool hair)
{
if (mb->geom == NULL) {
return NULL;
}
DupliObject *dup = DRW_object_get_dupli(ob);
void *key;
if (dup) {
key = dup->ob;
}
else {
key = ob;
}
/* Only use data for object that have no modifiers. */
if (!BKE_object_is_modified(DRW_context_state_get()->scene, ob)) {
key = ob->data;
}
key = (char *)key + (int)hair;
EEVEE_GeometryMotionData *geom_step = BLI_ghash_lookup(mb->geom, key);
if (geom_step == NULL) {
geom_step = MEM_callocN(sizeof(EEVEE_GeometryMotionData), __func__);
geom_step->type = hair ? EEVEE_HAIR_GEOM_MOTION_DATA : EEVEE_MESH_GEOM_MOTION_DATA;
if (hair) {
EEVEE_HairMotionData *hair_step;
/* Ugly, we allocate for each modifiers and just fill based on modifier index in the list. */
int psys_len = (ob->type != OB_HAIR) ? BLI_listbase_count(&ob->modifiers) : 1;
hair_step = MEM_callocN(sizeof(EEVEE_HairMotionData) + sizeof(hair_step->psys[0]) * psys_len,
__func__);
hair_step->psys_len = psys_len;
geom_step = (EEVEE_GeometryMotionData *)hair_step;
geom_step->type = EEVEE_MOTION_DATA_HAIR;
}
else {
geom_step = MEM_callocN(sizeof(EEVEE_GeometryMotionData), __func__);
geom_step->type = EEVEE_MOTION_DATA_MESH;
}
BLI_ghash_insert(mb->geom, key, geom_step);
}
return geom_step;
@@ -167,25 +195,12 @@ static EEVEE_GeometryMotionData *motion_blur_geometry_data_get(EEVEE_MotionBlurD
EEVEE_GeometryMotionData *EEVEE_motion_blur_geometry_data_get(EEVEE_MotionBlurData *mb, Object *ob)
{
/* Use original data as key to ensure matching accross update. */
return motion_blur_geometry_data_get(mb, DEG_get_original_object(ob)->data, false);
return motion_blur_deform_data_get(mb, ob, false);
}
EEVEE_GeometryMotionData *EEVEE_motion_blur_hair_data_get(EEVEE_MotionBlurData *mb,
Object *ob,
ModifierData *md)
EEVEE_HairMotionData *EEVEE_motion_blur_hair_data_get(EEVEE_MotionBlurData *mb, Object *ob)
{
void *key;
if (md) {
/* Particle system. */
key = BKE_modifier_get_original(md);
}
else {
/* Hair object. */
key = DEG_get_original_object(ob)->data;
}
return motion_blur_geometry_data_get(mb, key, true);
return motion_blur_deform_data_get(mb, ob, true);
}
/* View Layer data. */

View File

@@ -289,8 +289,14 @@ void EEVEE_motion_blur_hair_cache_populate(EEVEE_ViewLayerData *UNUSED(sldata),
/* Store transform */
DRW_hair_duplimat_get(ob, psys, md, mb_data->obmat[mb_step]);
EEVEE_GeometryMotionData *mb_geom = EEVEE_motion_blur_hair_data_get(
&effects->motion_blur, ob, md);
EEVEE_HairMotionData *mb_hair = EEVEE_motion_blur_hair_data_get(&effects->motion_blur, ob);
int psys_id = (md != NULL) ? BLI_findindex(&ob->modifiers, md) : 0;
if (psys_id >= mb_hair->psys_len) {
/* This should never happen. It means the modifier list was changed by frame evaluation. */
BLI_assert(0);
return;
}
if (mb_step == MB_CURR) {
/* Fill missing matrices if the object was hidden in previous or next frame. */
@@ -301,18 +307,21 @@ void EEVEE_motion_blur_hair_cache_populate(EEVEE_ViewLayerData *UNUSED(sldata),
copy_m4_m4(mb_data->obmat[MB_NEXT], mb_data->obmat[MB_CURR]);
}
GPUTexture *tex_prev = mb_hair->psys[psys_id].hair_pos_tx[MB_PREV];
GPUTexture *tex_next = mb_hair->psys[psys_id].hair_pos_tx[MB_NEXT];
grp = DRW_shgroup_hair_create_sub(ob, psys, md, effects->motion_blur.hair_grp);
DRW_shgroup_uniform_mat4(grp, "prevModelMatrix", mb_data->obmat[MB_PREV]);
DRW_shgroup_uniform_mat4(grp, "currModelMatrix", mb_data->obmat[MB_CURR]);
DRW_shgroup_uniform_mat4(grp, "nextModelMatrix", mb_data->obmat[MB_NEXT]);
DRW_shgroup_uniform_texture(grp, "prvBuffer", mb_geom->hair_pos_tx[MB_PREV]);
DRW_shgroup_uniform_texture(grp, "nxtBuffer", mb_geom->hair_pos_tx[MB_NEXT]);
DRW_shgroup_uniform_bool(grp, "useDeform", &mb_geom->use_deform, 1);
DRW_shgroup_uniform_texture(grp, "prvBuffer", tex_prev);
DRW_shgroup_uniform_texture(grp, "nxtBuffer", tex_next);
DRW_shgroup_uniform_bool(grp, "useDeform", &mb_hair->use_deform, 1);
}
else {
/* Store vertex position buffer. */
mb_geom->hair_pos[mb_step] = DRW_hair_pos_buffer_get(ob, psys, md);
mb_geom->use_deform = true;
mb_hair->psys[psys_id].hair_pos[mb_step] = DRW_hair_pos_buffer_get(ob, psys, md);
mb_hair->use_deform = true;
}
}
}
@@ -339,7 +348,8 @@ void EEVEE_motion_blur_cache_populate(EEVEE_ViewLayerData *UNUSED(sldata),
const bool is_dupli = (ob->base_flag & BASE_FROM_DUPLI) != 0;
const bool object_moves = is_dupli || has_rigidbody || BKE_object_moves_in_time(ob, true);
#else
/* BKE_object_moves_in_time does not work in some cases. Better */
/* BKE_object_moves_in_time does not work in some cases.
* Better detect non moving object after evaluation. */
const bool object_moves = true;
#endif
const bool is_deform = BKE_object_is_deform_modified(DRW_context_state_get()->scene, ob) ||
@@ -375,17 +385,6 @@ void EEVEE_motion_blur_cache_populate(EEVEE_ViewLayerData *UNUSED(sldata),
}
if (mb_geom->use_deform) {
EEVEE_ObjectEngineData *oedata = EEVEE_object_data_ensure(ob);
if (!oedata->geom_update) {
/* FIXME(fclem) There can be false positive where the actual mesh is not updated.
* This avoids a crash but removes the motion blur from some object.
* Maybe an issue with depsgraph tagging. */
mb_geom->use_deform = false;
oedata->geom_update = false;
GPU_VERTBUF_DISCARD_SAFE(mb_geom->vbo[MB_PREV]);
GPU_VERTBUF_DISCARD_SAFE(mb_geom->vbo[MB_NEXT]);
}
/* Keep to modify later (after init). */
mb_geom->batch = batch;
}
@@ -445,29 +444,36 @@ void EEVEE_motion_blur_cache_finish(EEVEE_Data *vedata)
BLI_ghashIterator_done(&ghi) == false;
BLI_ghashIterator_step(&ghi)) {
EEVEE_GeometryMotionData *mb_geom = BLI_ghashIterator_getValue(&ghi);
EEVEE_HairMotionData *mb_hair = (EEVEE_HairMotionData *)mb_geom;
if (!mb_geom->use_deform) {
continue;
}
switch (mb_geom->type) {
case EEVEE_HAIR_GEOM_MOTION_DATA:
case EEVEE_MOTION_DATA_HAIR:
if (mb_step == MB_CURR) {
/* TODO(fclem) Check if vertex count mismatch. */
mb_geom->use_deform = true;
mb_hair->use_deform = true;
}
else {
mb_geom->hair_pos[mb_step] = GPU_vertbuf_duplicate(mb_geom->hair_pos[mb_step]);
for (int i = 0; i < mb_hair->psys_len; i++) {
if (mb_hair->psys[i].hair_pos[mb_step] == NULL) {
continue;
}
mb_hair->psys[i].hair_pos[mb_step] = GPU_vertbuf_duplicate(
mb_hair->psys[i].hair_pos[mb_step]);
/* Create vbo immediately to bind to texture buffer. */
GPU_vertbuf_use(mb_geom->hair_pos[mb_step]);
/* Create vbo immediately to bind to texture buffer. */
GPU_vertbuf_use(mb_hair->psys[i].hair_pos[mb_step]);
mb_geom->hair_pos_tx[mb_step] = GPU_texture_create_from_vertbuf(
mb_geom->hair_pos[mb_step]);
mb_hair->psys[i].hair_pos_tx[mb_step] = GPU_texture_create_from_vertbuf(
mb_hair->psys[i].hair_pos[mb_step]);
}
}
break;
case EEVEE_MESH_GEOM_MOTION_DATA:
case EEVEE_MOTION_DATA_MESH:
if (mb_step == MB_CURR) {
/* Modify batch to have data from adjacent frames. */
GPUBatch *batch = mb_geom->batch;
@@ -485,15 +491,7 @@ void EEVEE_motion_blur_cache_finish(EEVEE_Data *vedata)
break;
}
/* Modify the batch to include the previous & next position. */
if (i == MB_PREV) {
GPU_batch_vertbuf_add_ex(batch, vbo, true);
mb_geom->vbo[i] = NULL;
}
else {
/* This VBO can be reuse by next time step. Don't pass ownership. */
GPU_batch_vertbuf_add_ex(batch, vbo, false);
}
GPU_batch_vertbuf_add_ex(batch, vbo, false);
}
}
}
@@ -548,16 +546,28 @@ void EEVEE_motion_blur_swap_data(EEVEE_Data *vedata)
BLI_ghashIterator_done(&ghi) == false;
BLI_ghashIterator_step(&ghi)) {
EEVEE_GeometryMotionData *mb_geom = BLI_ghashIterator_getValue(&ghi);
EEVEE_HairMotionData *mb_hair = (EEVEE_HairMotionData *)mb_geom;
switch (mb_geom->type) {
case EEVEE_HAIR_GEOM_MOTION_DATA:
GPU_VERTBUF_DISCARD_SAFE(mb_geom->hair_pos[MB_PREV]);
DRW_TEXTURE_FREE_SAFE(mb_geom->hair_pos_tx[MB_PREV]);
mb_geom->hair_pos[MB_PREV] = mb_geom->hair_pos[MB_NEXT];
mb_geom->hair_pos_tx[MB_PREV] = mb_geom->hair_pos_tx[MB_NEXT];
case EEVEE_MOTION_DATA_HAIR:
for (int i = 0; i < mb_hair->psys_len; i++) {
GPU_VERTBUF_DISCARD_SAFE(mb_hair->psys[i].hair_pos[MB_PREV]);
DRW_TEXTURE_FREE_SAFE(mb_hair->psys[i].hair_pos_tx[MB_PREV]);
mb_hair->psys[i].hair_pos[MB_PREV] = mb_hair->psys[i].hair_pos[MB_NEXT];
mb_hair->psys[i].hair_pos_tx[MB_PREV] = mb_hair->psys[i].hair_pos_tx[MB_NEXT];
}
break;
case EEVEE_MESH_GEOM_MOTION_DATA:
case EEVEE_MOTION_DATA_MESH:
if (mb_geom->batch != NULL) {
for (int i = 0; i < GPU_BATCH_VBO_MAX_LEN; i++) {
if (mb_geom->batch->verts[i] == mb_geom->vbo[MB_PREV] ||
mb_geom->batch->verts[i] == mb_geom->vbo[MB_NEXT]) {
/* Avoid double reference of the VBOs. */
mb_geom->batch->verts[i] = NULL;
}
}
}
GPU_VERTBUF_DISCARD_SAFE(mb_geom->vbo[MB_PREV]);
mb_geom->vbo[MB_PREV] = mb_geom->vbo[MB_NEXT];

View File

@@ -595,25 +595,30 @@ typedef struct EEVEE_ObjectMotionData {
} EEVEE_ObjectMotionData;
typedef enum eEEVEEMotionData {
EEVEE_MESH_GEOM_MOTION_DATA = 0,
EEVEE_HAIR_GEOM_MOTION_DATA,
EEVEE_MOTION_DATA_MESH = 0,
EEVEE_MOTION_DATA_HAIR,
} eEEVEEMotionData;
typedef struct EEVEE_GeometryMotionData {
typedef struct EEVEE_HairMotionData {
/** Needs to be first to ensure casting. */
eEEVEEMotionData type;
int use_deform; /* To disable deform mb if vertcount mismatch. */
union {
struct {
/* Mesh */
struct GPUBatch *batch; /* Batch for time = t. */
struct GPUVertBuf *vbo[2]; /* Vbo for time = t +/- step. */
};
struct {
/* Hair */
struct GPUVertBuf *hair_pos[2]; /* Position buffer for time = t +/- step. */
struct GPUTexture *hair_pos_tx[2]; /* Buffer Texture of the corresponding VBO. */
};
};
int use_deform;
/** Allocator will alloc enough slot for all particle systems. Or 1 if it's a hair object. */
int psys_len;
struct {
struct GPUVertBuf *hair_pos[2]; /* Position buffer for time = t +/- step. */
struct GPUTexture *hair_pos_tx[2]; /* Buffer Texture of the corresponding VBO. */
} psys[0];
} EEVEE_HairMotionData;
typedef struct EEVEE_GeometryMotionData {
/** Needs to be first to ensure casting. */
eEEVEEMotionData type;
/** To disable deform mb if vertcount mismatch. */
int use_deform;
struct GPUBatch *batch; /* Batch for time = t. */
struct GPUVertBuf *vbo[2]; /* Vbo for time = t +/- step. */
} EEVEE_GeometryMotionData;
/* ************ EFFECTS DATA ************* */
@@ -914,6 +919,9 @@ typedef struct EEVEE_PrivateData {
float camtexcofac[4];
float size_orig[2];
/* Cached original camera when rendering for motion blur (see T79637). */
struct Object *cam_original_ob;
/* Mist Settings */
float mist_start, mist_inv_dist, mist_falloff;
@@ -971,9 +979,7 @@ EEVEE_ObjectMotionData *EEVEE_motion_blur_object_data_get(EEVEE_MotionBlurData *
bool hair);
EEVEE_GeometryMotionData *EEVEE_motion_blur_geometry_data_get(EEVEE_MotionBlurData *mb,
Object *ob);
EEVEE_GeometryMotionData *EEVEE_motion_blur_hair_data_get(EEVEE_MotionBlurData *mb,
Object *ob,
struct ModifierData *md);
EEVEE_HairMotionData *EEVEE_motion_blur_hair_data_get(EEVEE_MotionBlurData *mb, Object *ob);
EEVEE_LightProbeEngineData *EEVEE_lightprobe_data_get(Object *ob);
EEVEE_LightProbeEngineData *EEVEE_lightprobe_data_ensure(Object *ob);
EEVEE_LightEngineData *EEVEE_light_data_get(Object *ob);

View File

@@ -126,6 +126,9 @@ bool EEVEE_render_init(EEVEE_Data *ved, RenderEngine *engine, struct Depsgraph *
GPU_framebuffer_ensure_config(&fbl->main_color_fb,
{GPU_ATTACHMENT_NONE, GPU_ATTACHMENT_TEXTURE(txl->color)});
/* Camera could change because of Motion blur. */
g_data->cam_original_ob = RE_GetCamera(engine->re);
return true;
}
@@ -135,9 +138,10 @@ void EEVEE_render_modules_init(EEVEE_Data *vedata,
{
EEVEE_ViewLayerData *sldata = EEVEE_view_layer_data_ensure();
EEVEE_StorageList *stl = vedata->stl;
EEVEE_PrivateData *g_data = vedata->stl->g_data;
EEVEE_FramebufferList *fbl = vedata->fbl;
/* TODO(sergey): Shall render hold pointer to an evaluated camera instead? */
struct Object *ob_camera_eval = DEG_get_evaluated_object(depsgraph, RE_GetCamera(engine->re));
struct Object *ob_camera_eval = DEG_get_evaluated_object(depsgraph, g_data->cam_original_ob);
EEVEE_render_view_sync(vedata, engine, depsgraph);
/* `EEVEE_renderpasses_init` will set the active render passes used by `EEVEE_effects_init`.
@@ -156,7 +160,7 @@ void EEVEE_render_view_sync(EEVEE_Data *vedata, RenderEngine *engine, struct Dep
/* Set the pers & view matrix. */
float winmat[4][4], viewmat[4][4], viewinv[4][4];
/* TODO(sergey): Shall render hold pointer to an evaluated camera instead? */
struct Object *ob_camera_eval = DEG_get_evaluated_object(depsgraph, RE_GetCamera(engine->re));
struct Object *ob_camera_eval = DEG_get_evaluated_object(depsgraph, g_data->cam_original_ob);
RE_GetCameraWindow(engine->re, ob_camera_eval, winmat);
RE_GetCameraWindowWithOverscan(engine->re, g_data->overscan, winmat);