Brush Assets: Support adding shortcut to asset shelf items #117861

Merged
Hans Goudey merged 15 commits from HooglyBoogly/blender:brush-assets-shortcut into brush-assets-project 2024-02-07 19:02:47 +01:00
45 changed files with 428 additions and 470 deletions
Showing only changes of commit eb02a851c9 - Show all commits

View File

@ -517,9 +517,9 @@ set(MATERIALX_HASH fad8f4e19305fb2ee920cbff638f3560)
set(MATERIALX_HASH_TYPE MD5)
set(MATERIALX_FILE materialx-v${MATERIALX_VERSION}.tar.gz)
set(OIDN_VERSION 2.1.0)
set(OIDN_VERSION 2.2.0-rc)
set(OIDN_URI https://github.com/OpenImageDenoise/oidn/releases/download/v${OIDN_VERSION}/oidn-${OIDN_VERSION}.src.tar.gz)
set(OIDN_HASH 997251847c49ce0f3ab21c7fc712bfb4)
set(OIDN_HASH 896d43b65c3fe71144914a1d6b8a5bfb)
set(OIDN_HASH_TYPE MD5)
set(OIDN_FILE oidn-${OIDN_VERSION}.src.tar.gz)
@ -634,9 +634,9 @@ set(OPENPGL_HASH 1ec806d434d45e43e098f82ee9be0cb74928343898c57490b34ff80584e9805
set(OPENPGL_HASH_TYPE SHA256)
set(OPENPGL_FILE openpgl-${OPENPGL_VERSION}.tar.gz)
set(LEVEL_ZERO_VERSION v1.8.8)
set(LEVEL_ZERO_URI https://github.com/oneapi-src/level-zero/archive/refs/tags/${LEVEL_ZERO_VERSION}.tar.gz)
set(LEVEL_ZERO_HASH 3553ae8fa0d2d69c4210a8f3428bd6612bd8bb8a627faf52c3658a01851e66d2)
set(LEVEL_ZERO_VERSION 1.15.8)
set(LEVEL_ZERO_URI https://codeload.github.com/oneapi-src/level-zero/tar.gz/refs/tags/v${LEVEL_ZERO_VERSION})
set(LEVEL_ZERO_HASH 80663dbd4d01d9519185c6e568f2e836bfea7484363f4da8cf5cf77c3bf58602)
set(LEVEL_ZERO_HASH_TYPE SHA256)
set(LEVEL_ZERO_FILE level-zero-${LEVEL_ZERO_VERSION}.tar.gz)

View File

@ -28,5 +28,5 @@ install(TARGETS ${PROJECT_NAME}
RUNTIME DESTINATION bin
LIBRARY DESTINATION lib
ARCHIVE DESTINATION lib
PUBLIC_HEADER DESTINATION include/pystring
PUBLIC_HEADER DESTINATION include
)

View File

@ -1,13 +1,17 @@
diff -Naur external_levelzero_org/CMakeLists.txt external_levelzero/CMakeLists.txt
--- external_levelzero_org/CMakeLists.txt 2022-03-07 13:22:11 -0700
+++ external_levelzero/CMakeLists.txt 2022-03-29 13:22:15 -0600
@@ -77,9 +77,6 @@
#enabling Control Flow Guard
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /guard:cf")
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} /guard:cf")
- # enable Spectre Mitigation
- set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /Qspectre")
- set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} /Qspectre")
@@ -81,13 +81,6 @@ if(MSVC)
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} /DYNAMICBASE")
set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} /DYNAMICBASE")
set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} /guard:cf")
- # enable Spectre Mitigation, not supported by clang-cl
- if(NOT CMAKE_CXX_COMPILER_ID STREQUAL Clang)
- set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /Qspectre")
- endif()
- if(NOT CMAKE_C_COMPILER_ID STREQUAL Clang)
- set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} /Qspectre")
- endif()
endif()
#CXX compiler support
#CXX compiler support

View File

@ -182,11 +182,11 @@ add_library(bli_lib
"../../../source/blender/blenlib/intern/path_util.c"
"../../../source/blender/blenlib/intern/BLI_dynstr.c"
"../../../source/blender/blenlib/intern/BLI_ghash.c"
"../../../source/blender/blenlib/intern/BLI_ghash_utils.c"
"../../../source/blender/blenlib/intern/BLI_ghash_utils.cc"
"../../../source/blender/blenlib/intern/BLI_linklist.c"
"../../../source/blender/blenlib/intern/BLI_memarena.c"
"../../../source/blender/blenlib/intern/BLI_mempool.c"
"../../../source/blender/blenlib/intern/hash_mm2a.c"
"../../../source/blender/blenlib/intern/hash_mm2a.cc"
"../../../source/blender/blenlib/intern/string_utils.c"
"../../../source/blender/blenlib/intern/system.c"
)

View File

@ -58,7 +58,7 @@ struct NodeBakeCache {
/** Where to load blobs from disk when loading the baked data lazily. */
std::optional<std::string> blobs_dir;
/** Used to avoid reading blobs multiple times for different frames. */
std::unique_ptr<BlobSharing> blob_sharing;
std::unique_ptr<BlobReadSharing> blob_sharing;
/** Used to avoid checking if a bake exists many times. */
bool failed_finding_bake = false;

View File

@ -48,10 +48,9 @@ class BlobWriter {
};
/**
* Allows for simple data deduplication when writing or reading data by making use of implicit
* sharing.
* Allows deduplicating data before it's written.
*/
class BlobSharing {
class BlobWriteSharing : NonCopyable, NonMovable {
private:
struct StoredByRuntimeValue {
/**
@ -73,6 +72,25 @@ class BlobSharing {
*/
Map<const ImplicitSharingInfo *, StoredByRuntimeValue> stored_by_runtime_;
public:
~BlobWriteSharing();
/**
* Check if the data referenced by `sharing_info` has been written before. If yes, return the
* identifier for the previously written data. Otherwise, write the data now and store the
* identifier for later use.
* \return Identifier that indicates from where the data has been written.
*/
[[nodiscard]] std::shared_ptr<io::serialize::DictionaryValue> write_implicitly_shared(
const ImplicitSharingInfo *sharing_info,
FunctionRef<std::shared_ptr<io::serialize::DictionaryValue>()> write_fn);
};
/**
* Avoids loading the same data multiple times by caching and sharing previously read buffers.
*/
class BlobReadSharing : NonCopyable, NonMovable {
private:
/**
* Use a mutex so that #read_shared can be implemented in a thread-safe way.
*/
@ -84,17 +102,7 @@ class BlobSharing {
mutable Map<std::string, ImplicitSharingInfoAndData> runtime_by_stored_;
public:
~BlobSharing();
/**
* Check if the data referenced by `sharing_info` has been written before. If yes, return the
* identifier for the previously written data. Otherwise, write the data now and store the
* identifier for later use.
* \return Identifier that indicates from where the data has been written.
*/
[[nodiscard]] std::shared_ptr<io::serialize::DictionaryValue> write_shared(
const ImplicitSharingInfo *sharing_info,
FunctionRef<std::shared_ptr<io::serialize::DictionaryValue>()> write_fn);
~BlobReadSharing();
/**
* Check if the data identified by `io_data` has been read before or load it now.
@ -139,11 +147,11 @@ class DiskBlobWriter : public BlobWriter {
void serialize_bake(const BakeState &bake_state,
BlobWriter &blob_writer,
BlobSharing &blob_sharing,
BlobWriteSharing &blob_sharing,
std::ostream &r_stream);
std::optional<BakeState> deserialize_bake(std::istream &stream,
const BlobReader &blob_reader,
const BlobSharing &blob_sharing);
const BlobReadSharing &blob_sharing);
} // namespace blender::bke::bake

View File

@ -14,7 +14,7 @@
#include "BKE_cryptomatte.h"
#include "BLI_hash_mm3.h"
#include "BLI_hash_mm3.hh"
#include "BLI_map.hh"
#include "BLI_string_ref.hh"

View File

@ -15,7 +15,7 @@
#include "BLI_binary_search.hh"
#include "BLI_fileops.hh"
#include "BLI_hash_md5.h"
#include "BLI_hash_md5.hh"
#include "BLI_path_util.h"
#include "BLI_string.h"
#include "BLI_string_utils.hh"

View File

@ -87,11 +87,15 @@ BlobSlice DiskBlobWriter::write(const void *data, const int64_t size)
return {blob_name_, {old_offset, size}};
}
BlobSharing::~BlobSharing()
BlobWriteSharing::~BlobWriteSharing()
{
for (const ImplicitSharingInfo *sharing_info : stored_by_runtime_.keys()) {
sharing_info->remove_weak_user_and_delete_if_last();
}
}
BlobReadSharing::~BlobReadSharing()
{
for (const ImplicitSharingInfoAndData &value : runtime_by_stored_.values()) {
if (value.sharing_info) {
value.sharing_info->remove_user_and_delete_if_last();
@ -99,8 +103,8 @@ BlobSharing::~BlobSharing()
}
}
DictionaryValuePtr BlobSharing::write_shared(const ImplicitSharingInfo *sharing_info,
FunctionRef<DictionaryValuePtr()> write_fn)
DictionaryValuePtr BlobWriteSharing::write_implicitly_shared(
const ImplicitSharingInfo *sharing_info, FunctionRef<DictionaryValuePtr()> write_fn)
{
if (sharing_info == nullptr) {
return write_fn();
@ -127,7 +131,7 @@ DictionaryValuePtr BlobSharing::write_shared(const ImplicitSharingInfo *sharing_
});
}
std::optional<ImplicitSharingInfoAndData> BlobSharing::read_shared(
std::optional<ImplicitSharingInfoAndData> BlobReadSharing::read_shared(
const DictionaryValue &io_data,
FunctionRef<std::optional<ImplicitSharingInfoAndData>()> read_fn) const
{
@ -318,18 +322,18 @@ static std::shared_ptr<DictionaryValue> write_blob_simple_gspan(BlobWriter &blob
static std::shared_ptr<DictionaryValue> write_blob_shared_simple_gspan(
BlobWriter &blob_writer,
BlobSharing &blob_sharing,
BlobWriteSharing &blob_sharing,
const GSpan data,
const ImplicitSharingInfo *sharing_info)
{
return blob_sharing.write_shared(sharing_info,
[&]() { return write_blob_simple_gspan(blob_writer, data); });
return blob_sharing.write_implicitly_shared(
sharing_info, [&]() { return write_blob_simple_gspan(blob_writer, data); });
}
[[nodiscard]] static const void *read_blob_shared_simple_gspan(
const DictionaryValue &io_data,
const BlobReader &blob_reader,
const BlobSharing &blob_sharing,
const BlobReadSharing &blob_sharing,
const CPPType &cpp_type,
const int size,
const ImplicitSharingInfo **r_sharing_info)
@ -355,7 +359,7 @@ static std::shared_ptr<DictionaryValue> write_blob_shared_simple_gspan(
template<typename T>
[[nodiscard]] static bool read_blob_shared_simple_span(const DictionaryValue &io_data,
const BlobReader &blob_reader,
const BlobSharing &blob_sharing,
const BlobReadSharing &blob_sharing,
const int size,
T **r_data,
const ImplicitSharingInfo **r_sharing_info)
@ -394,7 +398,7 @@ template<typename T>
[[nodiscard]] static bool load_attributes(const io::serialize::ArrayValue &io_attributes,
MutableAttributeAccessor &attributes,
const BlobReader &blob_reader,
const BlobSharing &blob_sharing)
const BlobReadSharing &blob_sharing)
{
for (const auto &io_attribute_value : io_attributes.elements()) {
const auto *io_attribute = io_attribute_value->as_dictionary_value();
@ -453,7 +457,7 @@ template<typename T>
static PointCloud *try_load_pointcloud(const DictionaryValue &io_geometry,
const BlobReader &blob_reader,
const BlobSharing &blob_sharing)
const BlobReadSharing &blob_sharing)
{
const DictionaryValue *io_pointcloud = io_geometry.lookup_dict("pointcloud");
if (!io_pointcloud) {
@ -487,7 +491,7 @@ static PointCloud *try_load_pointcloud(const DictionaryValue &io_geometry,
static Curves *try_load_curves(const DictionaryValue &io_geometry,
const BlobReader &blob_reader,
const BlobSharing &blob_sharing)
const BlobReadSharing &blob_sharing)
{
const DictionaryValue *io_curves = io_geometry.lookup_dict("curves");
if (!io_curves) {
@ -544,7 +548,7 @@ static Curves *try_load_curves(const DictionaryValue &io_geometry,
static Mesh *try_load_mesh(const DictionaryValue &io_geometry,
const BlobReader &blob_reader,
const BlobSharing &blob_sharing)
const BlobReadSharing &blob_sharing)
{
const DictionaryValue *io_mesh = io_geometry.lookup_dict("mesh");
if (!io_mesh) {
@ -603,11 +607,11 @@ static Mesh *try_load_mesh(const DictionaryValue &io_geometry,
static GeometrySet load_geometry(const DictionaryValue &io_geometry,
const BlobReader &blob_reader,
const BlobSharing &blob_sharing);
const BlobReadSharing &blob_sharing);
static std::unique_ptr<Instances> try_load_instances(const DictionaryValue &io_geometry,
const BlobReader &blob_reader,
const BlobSharing &blob_sharing)
const BlobReadSharing &blob_sharing)
{
const DictionaryValue *io_instances = io_geometry.lookup_dict("instances");
if (!io_instances) {
@ -664,7 +668,7 @@ static std::unique_ptr<Instances> try_load_instances(const DictionaryValue &io_g
static GeometrySet load_geometry(const DictionaryValue &io_geometry,
const BlobReader &blob_reader,
const BlobSharing &blob_sharing)
const BlobReadSharing &blob_sharing)
{
GeometrySet geometry;
geometry.replace_mesh(try_load_mesh(io_geometry, blob_reader, blob_sharing));
@ -699,7 +703,7 @@ static std::shared_ptr<io::serialize::ArrayValue> serialize_materials(
static std::shared_ptr<io::serialize::ArrayValue> serialize_attributes(
const AttributeAccessor &attributes,
BlobWriter &blob_writer,
BlobSharing &blob_sharing,
BlobWriteSharing &blob_sharing,
const Set<std::string> &attributes_to_ignore)
{
auto io_attributes = std::make_shared<io::serialize::ArrayValue>();
@ -734,7 +738,7 @@ static std::shared_ptr<io::serialize::ArrayValue> serialize_attributes(
static std::shared_ptr<DictionaryValue> serialize_geometry_set(const GeometrySet &geometry,
BlobWriter &blob_writer,
BlobSharing &blob_sharing)
BlobWriteSharing &blob_sharing)
{
auto io_geometry = std::make_shared<DictionaryValue>();
if (geometry.has_mesh()) {
@ -1007,7 +1011,7 @@ template<typename T>
static void serialize_bake_item(const BakeItem &item,
BlobWriter &blob_writer,
BlobSharing &blob_sharing,
BlobWriteSharing &blob_sharing,
DictionaryValue &r_io_item)
{
if (!item.name.empty()) {
@ -1046,7 +1050,7 @@ static void serialize_bake_item(const BakeItem &item,
static std::unique_ptr<BakeItem> deserialize_bake_item(const DictionaryValue &io_item,
const BlobReader &blob_reader,
const BlobSharing &blob_sharing)
const BlobReadSharing &blob_sharing)
{
const std::optional<StringRefNull> state_item_type = io_item.lookup_str("type");
@ -1115,7 +1119,7 @@ static constexpr int bake_file_version = 3;
void serialize_bake(const BakeState &bake_state,
BlobWriter &blob_writer,
BlobSharing &blob_sharing,
BlobWriteSharing &blob_sharing,
std::ostream &r_stream)
{
io::serialize::DictionaryValue io_root;
@ -1132,7 +1136,7 @@ void serialize_bake(const BakeState &bake_state,
std::optional<BakeState> deserialize_bake(std::istream &stream,
const BlobReader &blob_reader,
const BlobSharing &blob_sharing)
const BlobReadSharing &blob_sharing)
{
JsonFormatter formatter;
std::unique_ptr<io::serialize::Value> io_root_value;

View File

@ -19,7 +19,7 @@
#include "BLI_compiler_attrs.h"
#include "BLI_dynstr.h"
#include "BLI_hash_mm3.h"
#include "BLI_hash_mm3.hh"
#include "BLI_listbase.h"
#include "BLI_string.h"

View File

@ -548,7 +548,7 @@ double BLI_gset_calc_quality(GSet *gs);
/* -------------------------------------------------------------------- */
/** \name GHash/GSet Utils
*
* Defined in `BLI_ghash_utils.c`
* Defined in `BLI_ghash_utils.cc`
* \{ */
/**

View File

@ -8,9 +8,9 @@
* \ingroup bli
*/
#ifdef __cplusplus
extern "C" {
#endif
#include <cstdio>
#include "BLI_sys_types.h"
/**
* Compute MD5 message digest for 'len' bytes beginning at 'buffer'.
@ -27,7 +27,3 @@ void *BLI_hash_md5_buffer(const char *buffer, size_t len, void *resblock);
int BLI_hash_md5_stream(FILE *stream, void *resblock);
char *BLI_hash_md5_to_hexdigest(const void *resblock, char r_hex_digest[33]);
#ifdef __cplusplus
}
#endif

View File

@ -10,10 +10,6 @@
#include "BLI_sys_types.h"
#ifdef __cplusplus
extern "C" {
#endif
typedef struct BLI_HashMurmur2A {
uint32_t hash;
uint32_t tail;
@ -33,7 +29,3 @@ uint32_t BLI_hash_mm2a_end(BLI_HashMurmur2A *mm2);
* Non-incremental version, quicker for small keys.
*/
uint32_t BLI_hash_mm2(const unsigned char *data, size_t len, uint32_t seed);
#ifdef __cplusplus
}
#endif

View File

@ -10,12 +10,4 @@
#include "BLI_sys_types.h"
#ifdef __cplusplus
extern "C" {
#endif
uint32_t BLI_hash_mm3(const unsigned char *data, size_t len, uint32_t seed);
#ifdef __cplusplus
}
#endif

View File

@ -449,7 +449,10 @@ inline StringRefNull::StringRefNull(const char *str) : StringRefBase(str, int64_
* Reference a std::string. Remember that when the std::string is destructed, the StringRefNull
* will point to uninitialized memory.
*/
inline StringRefNull::StringRefNull(const std::string &str) : StringRefNull(str.c_str()) {}
inline StringRefNull::StringRefNull(const std::string &str)
: StringRefNull(str.c_str(), int64_t(str.size()))
{
}
/**
* Get the char at the given index.

View File

@ -29,7 +29,7 @@ set(SRC
intern/BLI_dynstr.c
intern/BLI_filelist.cc
intern/BLI_ghash.c
intern/BLI_ghash_utils.c
intern/BLI_ghash_utils.cc
intern/BLI_heap.c
intern/BLI_heap_simple.c
intern/BLI_index_range.cc
@ -77,9 +77,9 @@ set(SRC
intern/generic_virtual_array.cc
intern/generic_virtual_vector_array.cc
intern/gsqueue.c
intern/hash_md5.c
intern/hash_mm2a.c
intern/hash_mm3.c
intern/hash_md5.cc
intern/hash_mm2a.cc
intern/hash_mm3.cc
intern/hash_tables.cc
intern/implicit_sharing.cc
intern/index_mask.cc
@ -242,9 +242,9 @@ set(SRC
BLI_gsqueue.h
BLI_hash.h
BLI_hash.hh
BLI_hash_md5.h
BLI_hash_mm2a.h
BLI_hash_mm3.h
BLI_hash_md5.hh
BLI_hash_mm2a.hh
BLI_hash_mm3.hh
BLI_hash_tables.hh
BLI_heap.h
BLI_heap_simple.h

View File

@ -14,7 +14,7 @@
#include "MEM_guardedalloc.h"
#include "BLI_ghash.h" /* own include */
#include "BLI_hash_mm2a.h"
#include "BLI_hash_mm2a.hh"
#include "BLI_utildefines.h"
/* keep last */
@ -136,7 +136,7 @@ uint BLI_ghashutil_strhash_p(const void *ptr)
const signed char *p;
uint h = 5381;
for (p = ptr; *p != '\0'; p++) {
for (p = static_cast<const signed char *>(ptr); *p != '\0'; p++) {
h = (uint)((h << 5) + h) + (uint)*p;
}
@ -144,18 +144,18 @@ uint BLI_ghashutil_strhash_p(const void *ptr)
}
uint BLI_ghashutil_strhash_p_murmur(const void *ptr)
{
const uchar *key = ptr;
const uchar *key = static_cast<const uchar *>(ptr);
return BLI_hash_mm2(key, strlen((const char *)key) + 1, 0);
}
bool BLI_ghashutil_strcmp(const void *a, const void *b)
{
return (a == b) ? false : !STREQ(a, b);
return (a == b) ? false : !STREQ(static_cast<const char *>(a), static_cast<const char *>(b));
}
GHashPair *BLI_ghashutil_pairalloc(const void *first, const void *second)
{
GHashPair *pair = MEM_mallocN(sizeof(GHashPair), "GHashPair");
GHashPair *pair = static_cast<GHashPair *>(MEM_mallocN(sizeof(GHashPair), "GHashPair"));
pair->first = first;
pair->second = second;
return pair;
@ -163,15 +163,15 @@ GHashPair *BLI_ghashutil_pairalloc(const void *first, const void *second)
uint BLI_ghashutil_pairhash(const void *ptr)
{
const GHashPair *pair = ptr;
const GHashPair *pair = static_cast<const GHashPair *>(ptr);
uint hash = BLI_ghashutil_ptrhash(pair->first);
return hash ^ BLI_ghashutil_ptrhash(pair->second);
}
bool BLI_ghashutil_paircmp(const void *a, const void *b)
{
const GHashPair *A = a;
const GHashPair *B = b;
const GHashPair *A = static_cast<const GHashPair *>(a);
const GHashPair *B = static_cast<const GHashPair *>(b);
return ((A->first != B->first) || (A->second != B->second));
}

View File

@ -3,7 +3,7 @@
* SPDX-License-Identifier: GPL-2.0-or-later */
#include "BLI_compute_context.hh"
#include "BLI_hash_md5.h"
#include "BLI_hash_md5.hh"
#include <sstream>
namespace blender {

View File

@ -15,7 +15,7 @@
#include <string.h>
#include <sys/types.h>
#include "BLI_hash_md5.h" /* own include */
#include "BLI_hash_md5.hh" /* own include */
#if defined HAVE_LIMITS_H || defined _LIBC
# include <limits.h>
@ -114,7 +114,7 @@ static void md5_process_block(const void *buffer, size_t len, struct md5_ctx *ct
#define CYCLIC(w, s) (w = (w << s) | (w >> (32 - s)))
md5_uint32 correct_words[16];
const md5_uint32 *words = buffer;
const md5_uint32 *words = static_cast<const md5_uint32 *>(buffer);
size_t nwords = len / sizeof(md5_uint32);
const md5_uint32 *endp = words + nwords;
md5_uint32 A = ctx->A;
@ -259,7 +259,7 @@ static void md5_process_block(const void *buffer, size_t len, struct md5_ctx *ct
*/
static void *md5_read_ctx(const struct md5_ctx *ctx, void *resbuf)
{
md5_uint32 *digest = resbuf;
md5_uint32 *digest = static_cast<md5_uint32 *>(resbuf);
digest[0] = SWAP(ctx->A);
digest[1] = SWAP(ctx->B);
digest[2] = SWAP(ctx->C);
@ -268,7 +268,8 @@ static void *md5_read_ctx(const struct md5_ctx *ctx, void *resbuf)
return resbuf;
}
/* Top level public functions. */
/*
Top level public functions. */
int BLI_hash_md5_stream(FILE *stream, void *resblock)
{

View File

@ -20,7 +20,7 @@
#include "BLI_compiler_attrs.h"
#include "BLI_hash_mm2a.h" /* own include */
#include "BLI_hash_mm2a.hh" /* own include */
/* Helpers. */
#define MM2A_M 0x5bd1e995

View File

@ -15,7 +15,7 @@
#include "BLI_compiler_attrs.h"
#include "BLI_compiler_compat.h"
#include "BLI_hash_mm3.h" /* own include */
#include "BLI_hash_mm3.hh" /* own include */
#if defined(_MSC_VER)
# include <stdlib.h>

View File

@ -4,7 +4,7 @@
#include "testing/testing.h"
#include "BLI_hash_mm2a.h"
#include "BLI_hash_mm2a.hh"
/* NOTE: Reference results are taken from reference implementation
* (cpp code, CMurmurHash2A variant):

View File

@ -1008,6 +1008,28 @@ void ShadowModule::end_sync()
{
Manager &manager = *inst_.manager;
{
/* Mark for update all shadow pages touching an updated shadow caster. */
PassSimple &pass = caster_update_ps_;
pass.init();
pass.shader_set(inst_.shaders.static_shader_get(SHADOW_TILEMAP_TAG_UPDATE));
pass.bind_ssbo("tilemaps_buf", tilemap_pool.tilemaps_data);
pass.bind_ssbo("tiles_buf", tilemap_pool.tiles_data);
/* Past caster transforms. */
if (past_casters_updated_.size() > 0) {
pass.bind_ssbo("bounds_buf", &manager.bounds_buf.previous());
pass.bind_ssbo("resource_ids_buf", past_casters_updated_);
pass.dispatch(int3(past_casters_updated_.size(), 1, tilemap_pool.tilemaps_data.size()));
}
/* Current caster transforms. */
if (curr_casters_updated_.size() > 0) {
pass.bind_ssbo("bounds_buf", &manager.bounds_buf.current());
pass.bind_ssbo("resource_ids_buf", curr_casters_updated_);
pass.dispatch(int3(curr_casters_updated_.size(), 1, tilemap_pool.tilemaps_data.size()));
}
pass.barrier(GPU_BARRIER_SHADER_STORAGE);
}
{
PassSimple &pass = tilemap_setup_ps_;
pass.init();
@ -1055,26 +1077,6 @@ void ShadowModule::end_sync()
}
sub.barrier(GPU_BARRIER_SHADER_STORAGE);
}
{
/* Mark for update all shadow pages touching an updated shadow caster. */
PassSimple::Sub &sub = pass.sub("CasterUpdate");
sub.shader_set(inst_.shaders.static_shader_get(SHADOW_TILEMAP_TAG_UPDATE));
sub.bind_ssbo("tilemaps_buf", tilemap_pool.tilemaps_data);
sub.bind_ssbo("tiles_buf", tilemap_pool.tiles_data);
/* Past caster transforms. */
if (past_casters_updated_.size() > 0) {
sub.bind_ssbo("bounds_buf", &manager.bounds_buf.previous());
sub.bind_ssbo("resource_ids_buf", past_casters_updated_);
sub.dispatch(int3(past_casters_updated_.size(), 1, tilemap_pool.tilemaps_data.size()));
}
/* Current caster transforms. */
if (curr_casters_updated_.size() > 0) {
sub.bind_ssbo("bounds_buf", &manager.bounds_buf.current());
sub.bind_ssbo("resource_ids_buf", curr_casters_updated_);
sub.dispatch(int3(curr_casters_updated_.size(), 1, tilemap_pool.tilemaps_data.size()));
}
sub.barrier(GPU_BARRIER_SHADER_STORAGE);
}
}
/* Non volume usage tagging happens between these two steps.
@ -1275,7 +1277,7 @@ bool ShadowModule::shadow_update_finished()
return true;
}
int max_updated_view_count = tilemap_pool.tilemaps_data.size();
int max_updated_view_count = tilemap_pool.tilemaps_data.size() * SHADOW_TILEMAP_LOD;
if (max_updated_view_count <= SHADOW_VIEW_MAX) {
/* There is enough shadow views to cover all tilemap updates.
* No readback needed as it is guaranteed that all of them will be updated. */
@ -1283,6 +1285,7 @@ bool ShadowModule::shadow_update_finished()
}
/* Read back and check if there is still tile-map to update. */
statistics_buf_.current().async_flush_to_host();
statistics_buf_.current().read();
ShadowStatistics stats = statistics_buf_.current();
/* Rendering is finished if we rendered all the remaining pages. */
@ -1335,6 +1338,13 @@ void ShadowModule::set_view(View &view, GPUTexture *depth_tx)
inst_.hiz_buffer.update();
/* Run caster update once and before the update loop.
* This is valid even before the view update since only the static tilemaps
* are concerned about this tagging. */
/* TODO(fclem): There is an optimization opportunity here where we can
* test casters only against the static tilemaps instead of all of them. */
inst_.manager->submit(caster_update_ps_, view);
do {
DRW_stats_group_start("Shadow");
{

View File

@ -228,6 +228,7 @@ class ShadowModule {
Framebuffer usage_tag_fb;
PassSimple caster_update_ps_ = {"CasterUpdate"};
/** List of Resource IDs (to get bounds) for tagging passes. */
StorageVectorBuffer<uint, 128> past_casters_updated_ = {"PastCastersUpdated"};
StorageVectorBuffer<uint, 128> curr_casters_updated_ = {"CurrCastersUpdated"};

View File

@ -123,13 +123,12 @@ GPUBatch *point_cloud_sub_pass_setup_implementation(PassT &sub_ps,
if (gpu_material != nullptr) {
ListBase gpu_attrs = GPU_material_attributes(gpu_material);
LISTBASE_FOREACH (GPUMaterialAttribute *, gpu_attr, &gpu_attrs) {
char sampler_name[32];
/** NOTE: Reusing curve attribute function. */
drw_curves_get_attribute_sampler_name(gpu_attr->name, sampler_name);
GPUVertBuf **attribute_buf = DRW_pointcloud_evaluated_attribute(&pointcloud, gpu_attr->name);
if (attribute_buf) {
char sampler_name[32];
/** NOTE: Reusing curve attribute function. */
drw_curves_get_attribute_sampler_name(gpu_attr->name, sampler_name);
sub_ps.bind_texture(sampler_name, attribute_buf);
}
sub_ps.bind_texture(sampler_name, (attribute_buf) ? attribute_buf : &g_dummy_vbo);
}
}

View File

@ -1168,7 +1168,8 @@ static int move_to_collection_regular_invoke(bContext *C, wmOperator *op)
static int move_to_new_collection_invoke(bContext *C, wmOperator *op)
{
return WM_operator_props_dialog_popup(C, op, 200);
return WM_operator_props_dialog_popup(
C, op, 200, IFACE_("Move to New Collection"), IFACE_("Move"));
}
static int move_to_collection_invoke(bContext *C, wmOperator *op, const wmEvent * /*event*/)

View File

@ -237,7 +237,8 @@ static int pose_calculate_paths_invoke(bContext *C, wmOperator *op, const wmEven
/* show popup dialog to allow editing of range... */
/* FIXME: hard-coded dimensions here are just arbitrary. */
return WM_operator_props_dialog_popup(C, op, 270);
return WM_operator_props_dialog_popup(
C, op, 270, IFACE_("Calculate Paths for the Selected Bones"), IFACE_("Calculate"));
}
/**

View File

@ -180,7 +180,7 @@ class AbstractTreeViewItem : public AbstractViewItem, public TreeViewItemContain
protected:
/** This label is used as the default way to identifying an item within its parent. */
std::string label_{};
std::string label_;
public:
/* virtual */ ~AbstractTreeViewItem() override = default;

View File

@ -26,6 +26,8 @@
namespace blender::ui {
#define UI_TREEVIEW_INDENT short(0.7f * UI_UNIT_X)
static int unpadded_item_height()
{
return UI_UNIT_Y;
@ -162,14 +164,14 @@ void AbstractTreeView::draw_hierarchy_lines_recursive(const ARegion &region,
rcti last_child_rect;
ui_but_to_pixelrect(&last_child_rect, &region, block, &last_child_but);
/* Small vertical padding. */
const short line_padding = UI_UNIT_Y / 4.0f / aspect;
const float x = first_child_rect.xmin + ((first_descendant->indent_width() -
(0.5f * UI_ICON_SIZE) + U.pixelsize + UI_SCALE_FAC) /
aspect);
const int first_child_top = first_child_rect.ymax - (2.0f * UI_SCALE_FAC / aspect);
const int last_child_bottom = last_child_rect.ymin + (4.0f * UI_SCALE_FAC / aspect);
immBegin(GPU_PRIM_LINES, 2);
immVertex2f(pos, x, first_child_rect.ymax - line_padding);
immVertex2f(pos, x, last_child_rect.ymin + line_padding);
immVertex2f(pos, x, first_child_top);
immVertex2f(pos, x, last_child_bottom);
immEnd();
}
}
@ -180,20 +182,8 @@ void AbstractTreeView::draw_hierarchy_lines(const ARegion &region) const
GPUVertFormat *format = immVertexFormat();
uint pos = GPU_vertformat_attr_add(format, "pos", GPU_COMP_F32, 2, GPU_FETCH_FLOAT);
uchar col[4];
immBindBuiltinProgram(GPU_SHADER_3D_LINE_DASHED_UNIFORM_COLOR);
float viewport_size[4];
GPU_viewport_size_get_f(viewport_size);
immUniform2f("viewport_size", viewport_size[2] / UI_SCALE_FAC, viewport_size[3] / UI_SCALE_FAC);
immUniform1i("colors_len", 0); /* "simple" mode */
immUniform1f("dash_width", 8.0f);
/* >= is 1.0 for un-dashed lines. */
immUniform1f("udash_factor", 1.0f);
UI_GetThemeColorBlend3ubv(TH_BACK, TH_TEXT, 0.4f, col);
col[3] = 255;
immUniformColor4ubv(col);
immBindBuiltinProgram(GPU_SHADER_3D_UNIFORM_COLOR);
immUniformThemeColorAlpha(TH_TEXT, 0.2f);
GPU_line_width(1.0f / aspect);
GPU_blend(GPU_BLEND_ALPHA);
@ -317,7 +307,7 @@ void AbstractTreeViewItem::add_treerow_button(uiBlock &block)
int AbstractTreeViewItem::indent_width() const
{
return count_parents() * UI_ICON_SIZE;
return count_parents() * UI_TREEVIEW_INDENT;
}
void AbstractTreeViewItem::add_indent(uiLayout &row) const
@ -330,8 +320,9 @@ void AbstractTreeViewItem::add_indent(uiLayout &row) const
/* Indent items without collapsing icon some more within their parent. Makes it clear that they
* are actually nested and not just a row at the same level without a chevron. */
if (!is_collapsible() && parent_) {
uiDefBut(block, UI_BTYPE_SEPR, 0, "", 0, 0, 0.2f * UI_UNIT_X, 0, nullptr, 0.0, 0.0, 0, 0, "");
if (!is_collapsible()) {
uiDefBut(
block, UI_BTYPE_SEPR, 0, "", 0, 0, UI_TREEVIEW_INDENT, 0, nullptr, 0.0, 0.0, 0, 0, "");
}
/* Restore. */
@ -369,9 +360,20 @@ void AbstractTreeViewItem::add_collapse_chevron(uiBlock &block) const
}
const BIFIconID icon = is_collapsed() ? ICON_RIGHTARROW : ICON_DOWNARROW_HLT;
uiBut *but = uiDefIconBut(
&block, UI_BTYPE_BUT_TOGGLE, 0, icon, 0, 0, UI_UNIT_X, UI_UNIT_Y, nullptr, 0, 0, 0, 0, "");
/* Note that we're passing the tree-row button here, not the chevron one. */
uiBut *but = uiDefIconBut(&block,
UI_BTYPE_BUT_TOGGLE,
0,
icon,
0,
0,
UI_TREEVIEW_INDENT,
UI_UNIT_Y,
nullptr,
0,
0,
0,
0,
"");
UI_but_func_set(but, collapse_chevron_click_fn, nullptr, nullptr);
UI_but_flag_disable(but, UI_BUT_UNDO);
}
@ -524,8 +526,8 @@ bool AbstractTreeViewItem::set_collapsed(const bool collapsed)
bool AbstractTreeViewItem::is_collapsible() const
{
BLI_assert_msg(get_tree_view().is_reconstructed(),
"State can't be queried until reconstruction is completed");
// BLI_assert_msg(get_tree_view().is_reconstructed(),
// "State can't be queried until reconstruction is completed");
if (children_.is_empty()) {
return false;
}

View File

@ -228,7 +228,7 @@ struct NodeBakeRequest {
bake::BakePath path;
int frame_start;
int frame_end;
std::unique_ptr<bake::BlobSharing> blob_sharing;
std::unique_ptr<bake::BlobWriteSharing> blob_sharing;
};
struct BakeGeometryNodesJob {
@ -481,7 +481,7 @@ static Vector<NodeBakeRequest> collect_simulations_to_bake(Main &bmain,
request.nmd = nmd;
request.bake_id = id;
request.node_type = node->type;
request.blob_sharing = std::make_unique<bake::BlobSharing>();
request.blob_sharing = std::make_unique<bake::BlobWriteSharing>();
std::optional<bake::BakePath> path = bake::get_node_bake_path(bmain, *object, *nmd, id);
if (!path) {
continue;
@ -832,7 +832,7 @@ static Vector<NodeBakeRequest> bake_single_node_gather_bake_request(bContext *C,
request.nmd = &nmd;
request.bake_id = bake_id;
request.node_type = node->type;
request.blob_sharing = std::make_unique<bake::BlobSharing>();
request.blob_sharing = std::make_unique<bake::BlobWriteSharing>();
const NodesModifierBake *bake = nmd.find_bake(bake_id);
if (!bake) {

View File

@ -315,21 +315,6 @@ static bool paint_brush_update(bContext *C,
copy_v2_v2(ups->mask_tex_mouse, mouse);
stroke->cached_size_pressure = pressure;
ups->do_linear_conversion = false;
ups->colorspace = nullptr;
/* check here if color sampling the main brush should do color conversion. This is done here
* to avoid locking up to get the image buffer during sampling */
if (brush->mtex.tex && brush->mtex.tex->type == TEX_IMAGE && brush->mtex.tex->ima) {
ImBuf *tex_ibuf = BKE_image_pool_acquire_ibuf(
brush->mtex.tex->ima, &brush->mtex.tex->iuser, nullptr);
if (tex_ibuf && tex_ibuf->float_buffer.data == nullptr) {
ups->do_linear_conversion = true;
ups->colorspace = tex_ibuf->byte_buffer.colorspace;
}
BKE_image_pool_release_ibuf(brush->mtex.tex->ima, tex_ibuf, nullptr);
}
stroke->brush_init = true;
}
@ -929,6 +914,20 @@ PaintStroke *paint_stroke_new(bContext *C,
get_imapaint_zoom(C, &zoomx, &zoomy);
stroke->zoom_2d = max_ff(zoomx, zoomy);
/* Check here if color sampling the main brush should do color conversion. This is done here
* to avoid locking up to get the image buffer during sampling. */
ups->do_linear_conversion = false;
ups->colorspace = nullptr;
if (br->mtex.tex && br->mtex.tex->type == TEX_IMAGE && br->mtex.tex->ima) {
ImBuf *tex_ibuf = BKE_image_pool_acquire_ibuf(br->mtex.tex->ima, &br->mtex.tex->iuser, NULL);
if (tex_ibuf && tex_ibuf->float_buffer.data == nullptr) {
ups->do_linear_conversion = true;
ups->colorspace = tex_ibuf->byte_buffer.colorspace;
}
BKE_image_pool_release_ibuf(br->mtex.tex->ima, tex_ibuf, nullptr);
}
if (stroke->stroke_mode == BRUSH_STROKE_INVERT) {
if (br->flag & BRUSH_CURVE) {
RNA_enum_set(op->ptr, "mode", BRUSH_STROKE_NORMAL);

View File

@ -10,14 +10,13 @@
#include "BPy_Convert.h"
#include "BLI_hash_mm2a.hh"
#include "BLI_math_vector.h"
#ifdef __cplusplus
extern "C" {
#endif
#include "BLI_hash_mm2a.h"
using namespace Freestyle;
///////////////////////////////////////////////////////////////////////////////////////////

View File

@ -15,7 +15,7 @@
#include "DNA_material_types.h"
#include "BLI_ghash.h"
#include "BLI_hash_mm2a.h"
#include "BLI_hash_mm2a.hh"
#include "BLI_link_utils.h"
#include "BLI_listbase.h"
#include "BLI_string.h"

View File

@ -16,7 +16,7 @@
#include "BLI_fileops.h"
#include "BLI_ghash.h"
#include "BLI_hash_md5.h"
#include "BLI_hash_md5.hh"
#include "BLI_path_util.h"
#include "BLI_string.h"
#include "BLI_string_utils.hh"

View File

@ -7,7 +7,7 @@
*/
#include "BLI_fileops.h"
#include "BLI_hash_md5.h"
#include "BLI_hash_md5.hh"
#include "BLI_utildefines.h"
#include "IMB_imbuf.hh"

View File

@ -5,7 +5,7 @@
#include "testing/testing.h"
#include "BLI_fileops.hh"
#include "BLI_hash_mm2a.h"
#include "BLI_hash_mm2a.hh"
#include "ply_import.hh"
#include "ply_import_buffer.hh"

View File

@ -48,7 +48,7 @@ file(GENERATE OUTPUT ${dna_header_string_file} CONTENT "${DNA_FILE_LIST}")
set(SRC_BLENLIB
../../blenlib/intern/BLI_assert.c
../../blenlib/intern/BLI_ghash.c
../../blenlib/intern/BLI_ghash_utils.c
../../blenlib/intern/BLI_ghash_utils.cc
../../blenlib/intern/BLI_linklist.c
../../blenlib/intern/BLI_memarena.c
../../blenlib/intern/BLI_mempool.c
@ -57,7 +57,7 @@ set(SRC_BLENLIB
../../blenlib/intern/string.c
# Dependency of BLI_ghash.c
../../blenlib/intern/hash_mm2a.c
../../blenlib/intern/hash_mm2a.cc
# Dependencies of BLI_mempool.c when ASAN is enabled.
../../blenlib/intern/gsqueue.c

View File

@ -979,7 +979,7 @@ static bool try_find_baked_data(bake::NodeBakeCache &bake,
bake.frames.append(std::move(frame_cache));
}
bake.blobs_dir = bake_path->blobs_dir;
bake.blob_sharing = std::make_unique<bake::BlobSharing>();
bake.blob_sharing = std::make_unique<bake::BlobReadSharing>();
return true;
}

View File

@ -12,7 +12,7 @@
#include "BLI_assert.h"
#include "BLI_dynstr.h"
#include "BLI_hash_mm3.h"
#include "BLI_hash_mm3.hh"
#include "BLI_listbase.h"
#include "BLI_math_vector.h"
#include "BLI_math_vector_types.hh"

View File

@ -31,7 +31,7 @@
#include "BLI_cpp_types.hh"
#include "BLI_dot_export.hh"
#include "BLI_hash.h"
#include "BLI_hash_md5.h"
#include "BLI_hash_md5.hh"
#include "BLI_lazy_threading.hh"
#include "BLI_map.hh"

View File

@ -8735,7 +8735,9 @@ static int bpy_class_call(bContext *C, PointerRNA *ptr, FunctionRNA *func, Param
if (err != -1 && (is_staticmethod || is_classmethod || py_class_instance)) {
PyObject *item = PyObject_GetAttrString((PyObject *)py_class, RNA_function_identifier(func));
if (item) {
const bool item_type_valid = (item != nullptr) &&
(is_staticmethod ? PyMethod_Check(item) : PyFunction_Check(item));
if (item_type_valid) {
funcptr = RNA_pointer_create(nullptr, &RNA_Function, func);
if (is_staticmethod) {

View File

@ -14,7 +14,7 @@
#include "MEM_guardedalloc.h"
#include "BLI_ghash.h"
#include "BLI_hash_md5.h"
#include "BLI_hash_md5.hh"
#include "BLI_implicit_sharing.hh"
#include "BLI_listbase.h"
#include "BLI_path_util.h"

View File

@ -147,6 +147,8 @@ def main():
test_dir_name = Path(test_dir).name
if test_dir_name in ('motion_blur', 'integrator', ):
report.set_fail_threshold(0.032)
if test_dir_name == "denoise":
report.set_fail_threshold(0.25)
ok = report.run(test_dir, blender, get_arguments, batch=args.batch)

View File

@ -0,0 +1,219 @@
#!/usr/bin/env python3
# SPDX-FileCopyrightText: 2023 Blender Authors
#
# SPDX-License-Identifier: GPL-2.0-or-later
"""
This script is to validate the markdown page that documents Blender's file-structure, see:
https://developer.blender.org/docs/features/code_layout/
It can run without any arguments, where it will download the markdown to Blender's source root:
You may pass the markdown text as an argument, e.g.
check_docs_code_layout.py --markdown=markdown.txt
"""
import os
import re
import argparse
from typing import (
List,
Optional,
Tuple,
)
# -----------------------------------------------------------------------------
# Constants
CURRENT_DIR = os.path.abspath(os.path.dirname(__file__))
SOURCE_DIR = os.path.normpath(os.path.join(CURRENT_DIR, "..", ".."))
MARKDOWN_URL = "https://projects.blender.org/blender/blender-developer-docs/raw/branch/main/docs/features/code_layout.md"
# -----------------------------------------------------------------------------
# HTML Utilities
def text_with_title_underline(text: str, underline: str = "=") -> str:
return "\n{:s}\n{:s}\n".format(text, len(text) * underline)
def html_extract_markdown_from_url(url: str) -> Optional[str]:
"""
Download
"""
import urllib.request
req = urllib.request.Request(url=url)
with urllib.request.urlopen(req) as fh:
data = fh.read().decode('utf-8')
return data
# -----------------------------------------------------------------------------
# markdown Text Parsing
def markdown_to_paths(markdown: str) -> Tuple[List[str], List[str]]:
file_paths = []
markdown = markdown.replace("<p>", "")
markdown = markdown.replace("</p>", "")
markdown = markdown.replace("<strong>", "")
markdown = markdown.replace("</strong>", "")
markdown = markdown.replace("</td>", "")
path_prefix = "<td markdown>/"
for line in markdown.splitlines():
line = line.strip()
if line.startswith(path_prefix):
file_path = line[len(path_prefix):]
file_path = file_path.rstrip("/")
file_paths.append(file_path)
return file_paths
# -----------------------------------------------------------------------------
# Reporting
def report_known_markdown_paths(file_paths: List[str]) -> None:
heading = "Paths Found in markdown Table"
print(text_with_title_underline(heading))
for p in file_paths:
print("-", p)
def report_missing_source(file_paths: List[str]) -> int:
heading = "Missing in Source Dir"
test = [p for p in file_paths if not os.path.exists(os.path.join(SOURCE_DIR, p))]
amount = str(len(test)) if test else "none found"
print(text_with_title_underline("{:s} ({:s})".format(heading, amount)))
if not test:
return 0
print("The following paths were found in the markdown\n"
"but were not found in Blender's source directory:\n")
for p in test:
print("-", p)
return len(test)
def report_incomplete(file_paths: List[str]) -> int:
heading = "Missing Documentation"
test = []
basedirs = {os.path.dirname(p) for p in file_paths}
for base in sorted(basedirs):
base_abs = os.path.join(SOURCE_DIR, base)
if(os.path.exists(base_abs)):
for p in os.listdir(base_abs):
if not p.startswith("."):
p_abs = os.path.join(base_abs, p)
if os.path.isdir(p_abs):
p_rel = os.path.join(base, p)
if p_rel not in file_paths:
test.append(p_rel)
amount = str(len(test)) if test else "none found"
print(text_with_title_underline("{:s} ({:s})".format(heading, amount)))
if not test:
return 0
print("The following paths were found in Blender's source directory\n"
"but are missing from the markdown:\n")
for p in sorted(test):
print("-", p)
return len(test)
def report_alphabetical_order(file_paths: List[str]) -> int:
heading = "Non-Alphabetically Ordered"
test = []
p_prev = ""
p_prev_dir = ""
for p in file_paths:
p_dir = os.path.dirname(p)
if p_prev:
if p_dir == p_prev_dir:
if p < p_prev:
test.append((p_prev, p))
p_prev_dir = p_dir
p_prev = p
amount = str(len(test)) if test else "none found"
print(text_with_title_underline("{:s} ({:s})".format(heading, amount)))
if not test:
return 0
for p_prev, p in test:
print("-", p, "(should be before)\n ", p_prev)
return len(test)
# -----------------------------------------------------------------------------
# Argument Parser
def create_parser() -> argparse.ArgumentParser:
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument(
"-m",
"--markdown",
dest="markdown",
metavar='PATH',
default=os.path.join(SOURCE_DIR, "markdown_file_structure.txt"),
help="markdown text file path, NOTE: this will be downloaded if not found!",
)
return parser
# -----------------------------------------------------------------------------
# Main Function
def main() -> None:
parser = create_parser()
args = parser.parse_args()
if os.path.exists(args.markdown):
print("Using existing markdown text:", args.markdown)
else:
data = html_extract_markdown_from_url(MARKDOWN_URL)
if data is not None:
with open(args.markdown, 'w', encoding='utf-8') as fh:
fh.write(data)
print("Downloaded markdown text to:", args.markdown)
print("Update and save to:", MARKDOWN_URL)
else:
print("Failed to downloaded or extract markdown text, aborting!")
return
with open(args.markdown, 'r', encoding='utf-8') as fh:
file_paths = markdown_to_paths(fh.read())
# Disable, mostly useful when debugging why paths might not be found.
# report_known_markdown_paths()
issues = 0
issues += report_missing_source(file_paths)
issues += report_incomplete(file_paths)
issues += report_alphabetical_order(file_paths)
if issues:
print("Warning, found {:d} issues!\n".format(issues))
else:
print("Success! The markdown text is up to date with Blender's source tree!\n")
if __name__ == "__main__":
main()

View File

@ -1,277 +0,0 @@
#!/usr/bin/env python3
# SPDX-FileCopyrightText: 2023 Blender Authors
#
# SPDX-License-Identifier: GPL-2.0-or-later
"""
This script is to validate the WIKI page that documents Blender's file-structure, see:
https://wiki.blender.org/wiki/Source/File_Structure
It can run without any arguments, where it will download the WIKI to Blender's source root:
You may pass the wiki text as an argument, e.g.
check_wiki_file_structure.py --wiki=wiki.txt
"""
import os
import re
import argparse
from typing import (
List,
Optional,
Tuple,
)
# -----------------------------------------------------------------------------
# Constants
CURRENT_DIR = os.path.abspath(os.path.dirname(__file__))
SOURCE_DIR = os.path.normpath(os.path.join(CURRENT_DIR, "..", ".."))
WIKI_URL = "https://wiki.blender.org/wiki/Source/File_Structure"
WIKI_URL_EDIT = "https://wiki.blender.org/w/index.php?title=Source/File_Structure&action=edit"
# -----------------------------------------------------------------------------
# HTML Utilities
def text_with_title_underline(text: str, underline: str = "=") -> str:
return "\n{:s}\n{:s}\n".format(text, len(text) * underline)
def html_extract_first_textarea(data: str) -> Optional[str]:
"""
Extract and escape text within the first
``<textarea ...> ... </textarea>`` found in the HTML text.
"""
beg = data.find("<textarea")
if beg == -1:
print("Failed to extract <textarea ...> start")
return None
beg = data.find(">", beg)
if beg == -1:
print("Failed to extract <textarea ...> end")
return None
beg += 1
end = data.find("</textarea>", beg)
if end == -1:
print("Failed to extract </textarea>")
return None
data = data[beg:end]
for (src, dst) in (
("&lt;", "<"),
("&gt;", ">"),
("&amp;", "&"),
("&quot;", "\""),
):
data = data.replace(src, dst)
return data
def html_extract_first_textarea_from_url(url: str) -> Optional[str]:
"""
Download
"""
import urllib.request
req = urllib.request.Request(url=url)
with urllib.request.urlopen(req) as fh:
data = fh.read().decode('utf-8')
return html_extract_first_textarea(data)
# -----------------------------------------------------------------------------
# WIKI Text Parsing
def wiki_to_paths_and_docstrings(wiki_text: str) -> Tuple[List[str], List[str]]:
file_paths = []
file_paths_docstring = []
lines = wiki_text.split("\n")
i = 0
while i < len(lines):
if lines[i].startswith("| /"):
# Convert:
# `| /source/'''blender/'''` -> `/source/blender`.
p = lines[i][3:].replace("'''", "").split(" ", 1)[0].rstrip("/")
file_paths.append(p)
body = []
i += 1
while lines[i].strip() not in {"|-", "|}"}:
body.append(lines[i].lstrip("| "))
i += 1
i -= 1
file_paths_docstring.append("\n".join(body))
i += 1
return file_paths, file_paths_docstring
# -----------------------------------------------------------------------------
# Reporting
def report_known_wiki_paths(file_paths: List[str]) -> None:
heading = "Paths Found in WIKI Table"
print(text_with_title_underline(heading))
for p in file_paths:
print("-", p)
def report_missing_source(file_paths: List[str]) -> int:
heading = "Missing in Source Dir"
test = [p for p in file_paths if not os.path.exists(os.path.join(SOURCE_DIR, p))]
amount = str(len(test)) if test else "none found"
print(text_with_title_underline("{:s} ({:s})".format(heading, amount)))
if not test:
return 0
print("The following paths were found in the WIKI text\n"
"but were not found in Blender's source directory:\n")
for p in test:
print("-", p)
return len(test)
def report_incomplete(file_paths: List[str]) -> int:
heading = "Missing Documentation"
test = []
basedirs = {os.path.dirname(p) for p in file_paths}
for base in sorted(basedirs):
base_abs = os.path.join(SOURCE_DIR, base)
for p in os.listdir(base_abs):
if not p.startswith("."):
p_abs = os.path.join(base_abs, p)
if os.path.isdir(p_abs):
p_rel = os.path.join(base, p)
if p_rel not in file_paths:
test.append(p_rel)
amount = str(len(test)) if test else "none found"
print(text_with_title_underline("{:s} ({:s})".format(heading, amount)))
if not test:
return 0
print("The following paths were found in Blender's source directory\n"
"but are missing from the WIKI text:\n")
for p in sorted(test):
print("-", p)
return len(test)
def report_alphabetical_order(file_paths: List[str]) -> int:
heading = "Non-Alphabetically Ordered"
test = []
p_prev = ""
p_prev_dir = ""
for p in file_paths:
p_dir = os.path.dirname(p)
if p_prev:
if p_dir == p_prev_dir:
if p < p_prev:
test.append((p_prev, p))
p_prev_dir = p_dir
p_prev = p
amount = str(len(test)) if test else "none found"
print(text_with_title_underline("{:s} ({:s})".format(heading, amount)))
if not test:
return 0
for p_prev, p in test:
print("-", p, "(should be before)\n ", p_prev)
return len(test)
def report_todo_in_docstrings(file_paths: List[str], file_paths_docstring: List[str]) -> int:
heading = "Marked as TODO"
test = []
re_todo = re.compile(r"\bTODO\b")
for p, docstring in zip(file_paths, file_paths_docstring):
if re_todo.match(docstring):
test.append(p)
amount = str(len(test)) if test else "none found"
print(text_with_title_underline("{:s} ({:s})".format(heading, amount)))
if not test:
return 0
for p in test:
print("-", p)
return len(test)
# -----------------------------------------------------------------------------
# Argument Parser
def create_parser() -> argparse.ArgumentParser:
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument(
"-w",
"--wiki",
dest="wiki_text",
metavar='PATH',
default=os.path.join(SOURCE_DIR, "wiki_file_structure.txt"),
help="WIKI text file path, NOTE: this will be downloaded if not found!",
)
return parser
# -----------------------------------------------------------------------------
# Main Function
def main() -> None:
parser = create_parser()
args = parser.parse_args()
if os.path.exists(args.wiki_text):
print("Using existing WIKI text:", args.wiki_text)
else:
data = html_extract_first_textarea_from_url(WIKI_URL_EDIT)
if data is not None:
with open(args.wiki_text, 'w', encoding='utf-8') as fh:
fh.write(data)
print("Downloaded WIKI text to:", args.wiki_text)
print("Update and save to:", WIKI_URL)
else:
print("Failed to downloaded or extract WIKI text, aborting!")
return
with open(args.wiki_text, 'r', encoding='utf-8') as fh:
file_paths, file_paths_docstring = wiki_to_paths_and_docstrings(fh.read())
# Disable, mostly useful when debugging why paths might not be found.
# report_known_wiki_paths()
issues = 0
issues += report_missing_source(file_paths)
issues += report_incomplete(file_paths)
issues += report_alphabetical_order(file_paths)
issues += report_todo_in_docstrings(file_paths, file_paths_docstring)
if issues:
print("Warning, found {:d} issues!\n".format(issues))
else:
print("Success! The WIKI text is up to date with Blender's source tree!\n")
if __name__ == "__main__":
main()