Add Support for Geometry Node Cache #92890

Open
Jonas Dichelle wants to merge 14 commits from JonasDichelle/blender-asset-tracer:geonodes_support into main

When changing the target branch, be careful to rebase the branch in your fork to match. See documentation.
2 changed files with 54 additions and 48 deletions
Showing only changes of commit 7edf7549c3 - Show all commits

View File

@ -70,3 +70,28 @@ def modifiers(object_block: BlendFileBlock) -> typing.Iterator[BlendFileBlock]:
# 'ob->modifiers[...]'
mods = object_block.get_pointer((b"modifiers", b"first"))
yield from listbase(mods, next_path=(b"modifier", b"next"))
def copy_block(block: BlendFileBlock) -> BlendFileBlock:
JonasDichelle marked this conversation as resolved

I think it's fine to make this function a method on BlendFileBlock. It could then simply be named .clone() or .copy().

Then again, is there any reason to not use copy.copy() and avoid the need for this function altogether?

I think it's fine to make this function a method on `BlendFileBlock`. It could then simply be named `.clone()` or `.copy()`. Then again, is there any reason to not use [copy.copy()](https://docs.python.org/3/library/copy.html#copy.copy) and avoid the need for this function altogether?
Review

yes copy should work fine for this too.

yes copy should work fine for this too.
"""Create a new BlendFileBlock instance with the same slot data as the provided block."""
if not isinstance(block, BlendFileBlock):
raise ValueError("The existing_block must be an instance of BlendFileBlock")
new_block = BlendFileBlock(block.bfile)
for slot in BlendFileBlock.__slots__:
setattr(new_block, slot, getattr(block, slot))
return new_block
def dynamic_array(block: BlendFileBlock) -> typing.Iterator[BlendFileBlock]:
"""Generator, yields all blocks in a block that is a dynamic array."""

Please add a bit more explanation to this documentation. This first line is fine, but then I would love to see some explanation of what a "dynamic array" is in this context.

Please add a bit more explanation to this documentation. This first line is fine, but then I would love to see some explanation of what a "dynamic array" is in this context.
offset = block.file_offset
for i in range(block.count):
new_block = copy_block(block)
new_block.file_offset = offset
new_block.size = block.dna_type.size // block.count
JonasDichelle marked this conversation as resolved Outdated

Instead of making Python do the block.dna_type.size lookup twice per loop, do that once outside the loop and store the value in a local variable.

block_size = block.dna_type.size // block.count
Instead of making Python do the `block.dna_type.size` lookup twice per loop, do that once outside the loop and store the value in a local variable. ``` block_size = block.dna_type.size // block.count ```
yield new_block
offset += block.dna_type.size
JonasDichelle marked this conversation as resolved Outdated

Either block.dna_type.size is the total size of the entire array (in which case this needs the // block.count) or it is the size of a single element in the array (in which case the // block.count above is incorrect).

Either `block.dna_type.size` is the total size of the entire array (in which case this needs the `// block.count`) or it is the size of a single element in the array (in which case the `// block.count` above is incorrect).

It's the size of an element the division there is incorrect.

It's the size of an element the division there is incorrect.

View File

@ -25,7 +25,6 @@ files used by the modifiers.
import logging
import typing
from blender_asset_tracer.blendfile.dna import Struct
from blender_asset_tracer import blendfile, bpathlib, cdefs
from . import result
@ -320,60 +319,42 @@ def modifier_cloth(
)
def split_bytes_array(block: blendfile.BlendFileBlock) -> typing.Iterator[bytes]:
"""Split a bytes array into parts based on the Struct definition."""
raw_data = block.raw_data()
item_size = block.dna_type.size
for i in range(0, len(raw_data), item_size):
data_part = raw_data[i : i + item_size]
yield data_part
def bytes_to_struct(
data_bytes: bytes, struct_type: Struct, block: blendfile.BlendFileBlock
) -> dict:
"""Convert raw bytes into a struct based on the provided Struct definition."""
struct_instance = {}
for field in struct_type.fields:
start = field.offset
end = start + field.size
field_data = data_bytes[start:end]
if field.name.name_only == b"directory":
directory_pointer = int.from_bytes(field_data, "little")
directory = block.bfile.dereference_pointer(directory_pointer).as_string()
struct_instance["directory"] = directory
if field.name.name_only == b"flag":
flag_bin = bin(int.from_bytes(field_data, "little"))
flag_bin_padded = flag_bin[2:].zfill(2)
use_custom_directory = flag_bin_padded[0]
struct_instance["use_custom_directory"] = use_custom_directory
return struct_instance
@mod_handler(cdefs.eModifierType_Nodes)
def modifier_nodes(
ctx: ModifierContext, modifier: blendfile.BlendFileBlock, block_name: bytes
) -> typing.Iterator[result.BlockUsage]:
bake_directory = modifier.get_pointer(b"simulation_bake_directory")
bake_directory = bake_directory.as_string()
mod_directory_ptr, mod_directory_field = modifier.get(
b"simulation_bake_directory", return_field=True
)
bakes = modifier.get_pointer(b"bakes")
dna_type = bakes.dna_type
bakes_split = split_bytes_array(bakes)
for bake_idx, bake in enumerate(blendfile.iterators.dynamic_array(bakes)):
bake_directory_ptr, bake_directory_field = bake.get(
b"directory", return_field=True
)
for bake_bytes in bakes_split:
bake_struct = bytes_to_struct(bake_bytes, dna_type, modifier)
if bake_struct["use_custom_directory"] == "1":
directory = bake_struct["directory"]
flag = bake.get(b"flag")
flag_bin = bin(flag)
flag_bin_padded = flag_bin[2:].zfill(2)
use_custom_directory = flag_bin_padded[0] == "1"
if use_custom_directory:
directory_ptr = bake_directory_ptr
field = bake_directory_field
else:
directory = bake_directory
directory_ptr = mod_directory_ptr
field = mod_directory_field
print(directory)
if directory_ptr == 0:
continue
directory = bake.bfile.dereference_pointer(directory_ptr)
if not directory:
continue
# yield from _walk_point_cache(
# ctx, block_name, modifier.bfile, pointcache, cdefs.PTCACHE_EXT
# )
bpath = bytes(directory.as_string(), "utf-8")
bake_block_name = block_name + b".bakes[%d]" % bake_idx
yield result.BlockUsage(
modifier, bpath, block_name=bake_block_name, path_full_field=field
)