WIP: Brush assets project #106303
|
@ -8,7 +8,6 @@
|
|||
|
||||
#pragma once
|
||||
|
||||
#include <map>
|
||||
#include <memory>
|
||||
#include <mutex>
|
||||
#include <set>
|
||||
|
@ -17,7 +16,6 @@
|
|||
#include "BLI_function_ref.hh"
|
||||
#include "BLI_map.hh"
|
||||
#include "BLI_set.hh"
|
||||
#include "BLI_string_ref.hh"
|
||||
#include "BLI_uuid.h"
|
||||
#include "BLI_vector.hh"
|
||||
|
||||
|
@ -279,120 +277,6 @@ class AssetCatalogService {
|
|||
const OwningAssetCatalogMap &get_deleted_catalogs() const;
|
||||
};
|
||||
|
||||
/**
|
||||
* All catalogs that are owned by a single asset library, and managed by a single instance of
|
||||
* #AssetCatalogService. The undo system for asset catalog edits contains historical copies of this
|
||||
* struct.
|
||||
*/
|
||||
class AssetCatalogCollection {
|
||||
protected:
|
||||
/** All catalogs known, except the known-but-deleted ones. */
|
||||
OwningAssetCatalogMap catalogs_;
|
||||
|
||||
/** Catalogs that have been deleted. They are kept around so that the load-merge-save of catalog
|
||||
* definition files can actually delete them if they already existed on disk (instead of the
|
||||
* merge operation resurrecting them). */
|
||||
OwningAssetCatalogMap deleted_catalogs_;
|
||||
|
||||
/* For now only a single catalog definition file is supported.
|
||||
* The aim is to support an arbitrary number of such files per asset library in the future. */
|
||||
std::unique_ptr<AssetCatalogDefinitionFile> catalog_definition_file_;
|
||||
|
||||
/** Whether any of the catalogs have unsaved changes. */
|
||||
bool has_unsaved_changes_ = false;
|
||||
|
||||
friend AssetCatalogService;
|
||||
|
||||
public:
|
||||
AssetCatalogCollection() = default;
|
||||
AssetCatalogCollection(const AssetCatalogCollection &other) = delete;
|
||||
AssetCatalogCollection(AssetCatalogCollection &&other) noexcept = default;
|
||||
|
||||
std::unique_ptr<AssetCatalogCollection> deep_copy() const;
|
||||
using OnDuplicateCatalogIdFn =
|
||||
FunctionRef<void(const AssetCatalog &existing, const AssetCatalog &to_be_ignored)>;
|
||||
/**
|
||||
* Copy the catalogs from \a other and append them to this collection. Copies no other data
|
||||
* otherwise.
|
||||
*
|
||||
* \note If a catalog from \a other already exists in this collection (identified by catalog ID),
|
||||
* it will be skipped and \a on_duplicate_items will be called.
|
||||
*/
|
||||
void add_catalogs_from_existing(const AssetCatalogCollection &other,
|
||||
OnDuplicateCatalogIdFn on_duplicate_items);
|
||||
|
||||
protected:
|
||||
static OwningAssetCatalogMap copy_catalog_map(const OwningAssetCatalogMap &orig);
|
||||
};
|
||||
|
||||
/**
|
||||
* Keeps track of which catalogs are defined in a certain file on disk.
|
||||
* Only contains non-owning pointers to the #AssetCatalog instances, so ensure the lifetime of this
|
||||
* class is shorter than that of the #`AssetCatalog`s themselves.
|
||||
*/
|
||||
class AssetCatalogDefinitionFile {
|
||||
protected:
|
||||
/* Catalogs stored in this file. They are mapped by ID to make it possible to query whether a
|
||||
* catalog is already known, without having to find the corresponding `AssetCatalog*`. */
|
||||
Map<CatalogID, AssetCatalog *> catalogs_;
|
||||
|
||||
public:
|
||||
/* For now this is the only version of the catalog definition files that is supported.
|
||||
* Later versioning code may be added to handle older files. */
|
||||
const static int SUPPORTED_VERSION;
|
||||
/* String that's matched in the catalog definition file to know that the line is the version
|
||||
* declaration. It has to start with a space to ensure it won't match any hypothetical future
|
||||
* field that starts with "VERSION". */
|
||||
const static std::string VERSION_MARKER;
|
||||
const static std::string HEADER;
|
||||
|
||||
CatalogFilePath file_path;
|
||||
|
||||
public:
|
||||
AssetCatalogDefinitionFile() = default;
|
||||
|
||||
/**
|
||||
* Write the catalog definitions to the same file they were read from.
|
||||
* Return true when the file was written correctly, false when there was a problem.
|
||||
*/
|
||||
bool write_to_disk() const;
|
||||
/**
|
||||
* Write the catalog definitions to an arbitrary file path.
|
||||
*
|
||||
* Any existing file is backed up to "filename~". Any previously existing backup is overwritten.
|
||||
*
|
||||
* Return true when the file was written correctly, false when there was a problem.
|
||||
*/
|
||||
bool write_to_disk(const CatalogFilePath &dest_file_path) const;
|
||||
|
||||
bool contains(CatalogID catalog_id) const;
|
||||
/** Add a catalog, overwriting the one with the same catalog ID. */
|
||||
void add_overwrite(AssetCatalog *catalog);
|
||||
/** Add a new catalog. Undefined behavior if a catalog with the same ID was already added. */
|
||||
void add_new(AssetCatalog *catalog);
|
||||
|
||||
/** Remove the catalog from the collection of catalogs stored in this file. */
|
||||
void forget(CatalogID catalog_id);
|
||||
|
||||
using AssetCatalogParsedFn = FunctionRef<bool(std::unique_ptr<AssetCatalog>)>;
|
||||
void parse_catalog_file(const CatalogFilePath &catalog_definition_file_path,
|
||||
AssetCatalogParsedFn callback);
|
||||
|
||||
std::unique_ptr<AssetCatalogDefinitionFile> copy_and_remap(
|
||||
const OwningAssetCatalogMap &catalogs, const OwningAssetCatalogMap &deleted_catalogs) const;
|
||||
|
||||
protected:
|
||||
bool parse_version_line(StringRef line);
|
||||
std::unique_ptr<AssetCatalog> parse_catalog_line(StringRef line);
|
||||
|
||||
/**
|
||||
* Write the catalog definitions to the given file path.
|
||||
* Return true when the file was written correctly, false when there was a problem.
|
||||
*/
|
||||
bool write_to_disk_unsafe(const CatalogFilePath &dest_file_path) const;
|
||||
bool ensure_directory_exists(const CatalogFilePath directory_path) const;
|
||||
};
|
||||
|
||||
/**
|
||||
* Asset Catalog definition, containing a symbolic ID and a path that points to a node in the
|
||||
* catalog hierarchy.
|
||||
|
|
|
@ -16,6 +16,8 @@
|
|||
|
||||
#pragma once
|
||||
|
||||
#include <map>
|
||||
|
||||
#include "AS_asset_catalog.hh"
|
||||
|
||||
namespace blender::asset_system {
|
||||
|
|
|
@ -69,6 +69,8 @@ class AssetLibrary {
|
|||
std::unique_ptr<AssetStorage> asset_storage_;
|
||||
|
||||
protected:
|
||||
std::unique_ptr<AssetCatalogService> catalog_service_;
|
||||
|
||||
std::optional<eAssetImportMethod> import_method_;
|
||||
/** Assets owned by this library may be imported with a different method than set in
|
||||
* #import_method_ above, it's just a default. */
|
||||
|
@ -83,8 +85,6 @@ class AssetLibrary {
|
|||
* for managing the "Save Catalog Changes" in the quit-confirmation dialog box. */
|
||||
static bool save_catalogs_when_file_is_saved;
|
||||
|
||||
std::unique_ptr<AssetCatalogService> catalog_service;
|
||||
|
||||
friend class AssetLibraryService;
|
||||
friend class AssetRepresentation;
|
||||
|
||||
|
@ -114,6 +114,8 @@ class AssetLibrary {
|
|||
|
||||
void load_catalogs();
|
||||
|
||||
AssetCatalogService &catalog_service() const;
|
||||
|
||||
/**
|
||||
* Create a representation of an asset to be considered part of this library. Once the
|
||||
* representation is not needed anymore, it must be freed using #remove_asset(), or there will be
|
||||
|
|
|
@ -13,6 +13,8 @@ set(INC_SYS
|
|||
|
||||
set(SRC
|
||||
intern/asset_catalog.cc
|
||||
intern/asset_catalog_collection.cc
|
||||
intern/asset_catalog_definition_file.cc
|
||||
intern/asset_catalog_path.cc
|
||||
intern/asset_catalog_tree.cc
|
||||
intern/asset_identifier.cc
|
||||
|
@ -34,6 +36,8 @@ set(SRC
|
|||
AS_asset_library.hh
|
||||
AS_asset_representation.hh
|
||||
AS_essentials_library.hh
|
||||
intern/asset_catalog_collection.hh
|
||||
intern/asset_catalog_definition_file.hh
|
||||
intern/asset_library_all.hh
|
||||
intern/asset_library_essentials.hh
|
||||
intern/asset_library_from_preferences.hh
|
||||
|
|
|
@ -6,15 +6,16 @@
|
|||
* \ingroup asset_system
|
||||
*/
|
||||
|
||||
#include <fstream>
|
||||
#include <iostream>
|
||||
#include <set>
|
||||
|
||||
#include "AS_asset_catalog.hh"
|
||||
#include "AS_asset_catalog_tree.hh"
|
||||
#include "AS_asset_library.hh"
|
||||
#include "asset_catalog_collection.hh"
|
||||
#include "asset_catalog_definition_file.hh"
|
||||
|
||||
#include "BLI_fileops.hh"
|
||||
#include "BLI_fileops.h"
|
||||
#include "BLI_path_util.h"
|
||||
|
||||
/* For S_ISREG() and S_ISDIR() on Windows. */
|
||||
|
@ -32,16 +33,6 @@ namespace blender::asset_system {
|
|||
|
||||
const CatalogFilePath AssetCatalogService::DEFAULT_CATALOG_FILENAME = "blender_assets.cats.txt";
|
||||
|
||||
const int AssetCatalogDefinitionFile::SUPPORTED_VERSION = 1;
|
||||
const std::string AssetCatalogDefinitionFile::VERSION_MARKER = "VERSION ";
|
||||
|
||||
const std::string AssetCatalogDefinitionFile::HEADER =
|
||||
"# This is an Asset Catalog Definition file for Blender.\n"
|
||||
"#\n"
|
||||
"# Empty lines and lines starting with `#` will be ignored.\n"
|
||||
"# The first non-ignored line should be the version indicator.\n"
|
||||
"# Other lines are of the format \"UUID:catalog/path/for/assets:simple catalog name\"\n";
|
||||
|
||||
AssetCatalogService::AssetCatalogService()
|
||||
: catalog_collection_(std::make_unique<AssetCatalogCollection>())
|
||||
{
|
||||
|
@ -689,313 +680,6 @@ void AssetCatalogService::undo_push()
|
|||
|
||||
/* ---------------------------------------------------------------------- */
|
||||
|
||||
std::unique_ptr<AssetCatalogCollection> AssetCatalogCollection::deep_copy() const
|
||||
{
|
||||
auto copy = std::make_unique<AssetCatalogCollection>();
|
||||
|
||||
copy->has_unsaved_changes_ = this->has_unsaved_changes_;
|
||||
copy->catalogs_ = this->copy_catalog_map(this->catalogs_);
|
||||
copy->deleted_catalogs_ = this->copy_catalog_map(this->deleted_catalogs_);
|
||||
|
||||
if (catalog_definition_file_) {
|
||||
copy->catalog_definition_file_ = catalog_definition_file_->copy_and_remap(
|
||||
copy->catalogs_, copy->deleted_catalogs_);
|
||||
}
|
||||
|
||||
return copy;
|
||||
}
|
||||
|
||||
static void copy_catalog_map_into_existing(
|
||||
const OwningAssetCatalogMap &source,
|
||||
OwningAssetCatalogMap &dest,
|
||||
AssetCatalogCollection::OnDuplicateCatalogIdFn on_duplicate_items)
|
||||
{
|
||||
for (const auto &orig_catalog_uptr : source.values()) {
|
||||
if (dest.contains(orig_catalog_uptr->catalog_id)) {
|
||||
if (on_duplicate_items) {
|
||||
on_duplicate_items(*dest.lookup(orig_catalog_uptr->catalog_id), *orig_catalog_uptr);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
auto copy_catalog_uptr = std::make_unique<AssetCatalog>(*orig_catalog_uptr);
|
||||
dest.add_new(copy_catalog_uptr->catalog_id, std::move(copy_catalog_uptr));
|
||||
}
|
||||
}
|
||||
|
||||
void AssetCatalogCollection::add_catalogs_from_existing(
|
||||
const AssetCatalogCollection &other,
|
||||
AssetCatalogCollection::OnDuplicateCatalogIdFn on_duplicate_items)
|
||||
{
|
||||
copy_catalog_map_into_existing(other.catalogs_, catalogs_, on_duplicate_items);
|
||||
}
|
||||
|
||||
OwningAssetCatalogMap AssetCatalogCollection::copy_catalog_map(const OwningAssetCatalogMap &orig)
|
||||
{
|
||||
OwningAssetCatalogMap copy;
|
||||
copy_catalog_map_into_existing(
|
||||
orig, copy, /*on_duplicate_items=*/[](const AssetCatalog &, const AssetCatalog &) {
|
||||
/* `copy` was empty before. If this happens it means there was a duplicate in the `orig`
|
||||
* catalog map which should've been caught already. */
|
||||
BLI_assert_unreachable();
|
||||
});
|
||||
return copy;
|
||||
}
|
||||
|
||||
/* ---------------------------------------------------------------------- */
|
||||
|
||||
bool AssetCatalogDefinitionFile::contains(const CatalogID catalog_id) const
|
||||
{
|
||||
return catalogs_.contains(catalog_id);
|
||||
}
|
||||
|
||||
void AssetCatalogDefinitionFile::add_new(AssetCatalog *catalog)
|
||||
{
|
||||
catalogs_.add_new(catalog->catalog_id, catalog);
|
||||
}
|
||||
|
||||
void AssetCatalogDefinitionFile::add_overwrite(AssetCatalog *catalog)
|
||||
{
|
||||
catalogs_.add_overwrite(catalog->catalog_id, catalog);
|
||||
}
|
||||
|
||||
void AssetCatalogDefinitionFile::forget(CatalogID catalog_id)
|
||||
{
|
||||
catalogs_.remove(catalog_id);
|
||||
}
|
||||
|
||||
void AssetCatalogDefinitionFile::parse_catalog_file(
|
||||
const CatalogFilePath &catalog_definition_file_path,
|
||||
AssetCatalogParsedFn catalog_loaded_callback)
|
||||
{
|
||||
fstream infile(catalog_definition_file_path, std::ios::in);
|
||||
|
||||
if (!infile.is_open()) {
|
||||
CLOG_ERROR(&LOG, "%s: unable to open file", catalog_definition_file_path.c_str());
|
||||
return;
|
||||
}
|
||||
bool seen_version_number = false;
|
||||
std::string line;
|
||||
while (std::getline(infile, line)) {
|
||||
const StringRef trimmed_line = StringRef(line).trim();
|
||||
if (trimmed_line.is_empty() || trimmed_line[0] == '#') {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!seen_version_number) {
|
||||
/* The very first non-ignored line should be the version declaration. */
|
||||
const bool is_valid_version = this->parse_version_line(trimmed_line);
|
||||
if (!is_valid_version) {
|
||||
std::cerr << catalog_definition_file_path
|
||||
<< ": first line should be version declaration; ignoring file." << std::endl;
|
||||
break;
|
||||
}
|
||||
seen_version_number = true;
|
||||
continue;
|
||||
}
|
||||
|
||||
std::unique_ptr<AssetCatalog> catalog = this->parse_catalog_line(trimmed_line);
|
||||
if (!catalog) {
|
||||
continue;
|
||||
}
|
||||
|
||||
AssetCatalog *non_owning_ptr = catalog.get();
|
||||
const bool keep_catalog = catalog_loaded_callback(std::move(catalog));
|
||||
if (!keep_catalog) {
|
||||
continue;
|
||||
}
|
||||
|
||||
/* The AssetDefinitionFile should include this catalog when writing it back to disk. */
|
||||
this->add_overwrite(non_owning_ptr);
|
||||
}
|
||||
}
|
||||
|
||||
bool AssetCatalogDefinitionFile::parse_version_line(const StringRef line)
|
||||
{
|
||||
if (!line.startswith(VERSION_MARKER)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const std::string version_string = line.substr(VERSION_MARKER.length());
|
||||
const int file_version = std::atoi(version_string.c_str());
|
||||
|
||||
/* No versioning, just a blunt check whether it's the right one. */
|
||||
return file_version == SUPPORTED_VERSION;
|
||||
}
|
||||
|
||||
std::unique_ptr<AssetCatalog> AssetCatalogDefinitionFile::parse_catalog_line(const StringRef line)
|
||||
{
|
||||
const char delim = ':';
|
||||
const int64_t first_delim = line.find_first_of(delim);
|
||||
if (first_delim == StringRef::not_found) {
|
||||
std::cerr << "Invalid catalog line in " << this->file_path << ": " << line << std::endl;
|
||||
return std::unique_ptr<AssetCatalog>(nullptr);
|
||||
}
|
||||
|
||||
/* Parse the catalog ID. */
|
||||
const std::string id_as_string = line.substr(0, first_delim).trim();
|
||||
bUUID catalog_id;
|
||||
const bool uuid_parsed_ok = BLI_uuid_parse_string(&catalog_id, id_as_string.c_str());
|
||||
if (!uuid_parsed_ok) {
|
||||
std::cerr << "Invalid UUID in " << this->file_path << ": " << line << std::endl;
|
||||
return std::unique_ptr<AssetCatalog>(nullptr);
|
||||
}
|
||||
|
||||
/* Parse the path and simple name. */
|
||||
const StringRef path_and_simple_name = line.substr(first_delim + 1);
|
||||
const int64_t second_delim = path_and_simple_name.find_first_of(delim);
|
||||
|
||||
std::string path_in_file;
|
||||
std::string simple_name;
|
||||
if (second_delim == 0) {
|
||||
/* Delimiter as first character means there is no path. These lines are to be ignored. */
|
||||
return std::unique_ptr<AssetCatalog>(nullptr);
|
||||
}
|
||||
|
||||
if (second_delim == StringRef::not_found) {
|
||||
/* No delimiter means no simple name, just treat it as all "path". */
|
||||
path_in_file = path_and_simple_name;
|
||||
simple_name = "";
|
||||
}
|
||||
else {
|
||||
path_in_file = path_and_simple_name.substr(0, second_delim);
|
||||
simple_name = path_and_simple_name.substr(second_delim + 1).trim();
|
||||
}
|
||||
|
||||
AssetCatalogPath catalog_path = path_in_file;
|
||||
return std::make_unique<AssetCatalog>(catalog_id, catalog_path.cleanup(), simple_name);
|
||||
}
|
||||
|
||||
bool AssetCatalogDefinitionFile::write_to_disk() const
|
||||
{
|
||||
BLI_assert_msg(!this->file_path.empty(), "Writing to CDF requires its file path to be known");
|
||||
return this->write_to_disk(this->file_path);
|
||||
}
|
||||
|
||||
bool AssetCatalogDefinitionFile::write_to_disk(const CatalogFilePath &dest_file_path) const
|
||||
{
|
||||
const CatalogFilePath writable_path = dest_file_path + ".writing";
|
||||
const CatalogFilePath backup_path = dest_file_path + "~";
|
||||
|
||||
if (!this->write_to_disk_unsafe(writable_path)) {
|
||||
/* TODO: communicate what went wrong. */
|
||||
return false;
|
||||
}
|
||||
if (BLI_exists(dest_file_path.c_str())) {
|
||||
if (BLI_rename_overwrite(dest_file_path.c_str(), backup_path.c_str())) {
|
||||
/* TODO: communicate what went wrong. */
|
||||
return false;
|
||||
}
|
||||
}
|
||||
if (BLI_rename_overwrite(writable_path.c_str(), dest_file_path.c_str())) {
|
||||
/* TODO: communicate what went wrong. */
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
bool AssetCatalogDefinitionFile::write_to_disk_unsafe(const CatalogFilePath &dest_file_path) const
|
||||
{
|
||||
char directory[PATH_MAX];
|
||||
BLI_path_split_dir_part(dest_file_path.c_str(), directory, sizeof(directory));
|
||||
if (!ensure_directory_exists(directory)) {
|
||||
/* TODO(Sybren): pass errors to the UI somehow. */
|
||||
return false;
|
||||
}
|
||||
|
||||
fstream output(dest_file_path, std::ios::out);
|
||||
|
||||
/* TODO(@sybren): remember the line ending style that was originally read, then use that to write
|
||||
* the file again. */
|
||||
|
||||
/* Write the header. */
|
||||
output << HEADER;
|
||||
output << "" << std::endl;
|
||||
output << VERSION_MARKER << SUPPORTED_VERSION << std::endl;
|
||||
output << "" << std::endl;
|
||||
|
||||
/* Write the catalogs, ordered by path (primary) and UUID (secondary). */
|
||||
AssetCatalogOrderedSet catalogs_by_path;
|
||||
for (const AssetCatalog *catalog : catalogs_.values()) {
|
||||
if (catalog->flags.is_deleted) {
|
||||
continue;
|
||||
}
|
||||
catalogs_by_path.insert(catalog);
|
||||
}
|
||||
|
||||
for (const AssetCatalog *catalog : catalogs_by_path) {
|
||||
output << catalog->catalog_id << ":" << catalog->path << ":" << catalog->simple_name
|
||||
<< std::endl;
|
||||
}
|
||||
output.close();
|
||||
return !output.bad();
|
||||
}
|
||||
|
||||
bool AssetCatalogDefinitionFile::ensure_directory_exists(
|
||||
const CatalogFilePath directory_path) const
|
||||
{
|
||||
/* TODO(@sybren): design a way to get such errors presented to users (or ensure that they never
|
||||
* occur). */
|
||||
if (directory_path.empty()) {
|
||||
std::cerr
|
||||
<< "AssetCatalogService: no asset library root configured, unable to ensure it exists."
|
||||
<< std::endl;
|
||||
return false;
|
||||
}
|
||||
|
||||
if (BLI_exists(directory_path.data())) {
|
||||
if (!BLI_is_dir(directory_path.data())) {
|
||||
std::cerr << "AssetCatalogService: " << directory_path
|
||||
<< " exists but is not a directory, this is not a supported situation."
|
||||
<< std::endl;
|
||||
return false;
|
||||
}
|
||||
|
||||
/* Root directory exists, work is done. */
|
||||
return true;
|
||||
}
|
||||
|
||||
/* Ensure the root directory exists. */
|
||||
std::error_code err_code;
|
||||
if (!BLI_dir_create_recursive(directory_path.data())) {
|
||||
std::cerr << "AssetCatalogService: error creating directory " << directory_path << ": "
|
||||
<< err_code << std::endl;
|
||||
return false;
|
||||
}
|
||||
|
||||
/* Root directory has been created, work is done. */
|
||||
return true;
|
||||
}
|
||||
|
||||
std::unique_ptr<AssetCatalogDefinitionFile> AssetCatalogDefinitionFile::copy_and_remap(
|
||||
const OwningAssetCatalogMap &catalogs, const OwningAssetCatalogMap &deleted_catalogs) const
|
||||
{
|
||||
auto copy = std::make_unique<AssetCatalogDefinitionFile>(*this);
|
||||
copy->catalogs_.clear();
|
||||
|
||||
/* Remap pointers of the copy from the original AssetCatalogCollection to the given one. */
|
||||
for (CatalogID catalog_id : catalogs_.keys()) {
|
||||
/* The catalog can be in the regular or the deleted map. */
|
||||
const std::unique_ptr<AssetCatalog> *remapped_catalog_uptr_ptr = catalogs.lookup_ptr(
|
||||
catalog_id);
|
||||
if (remapped_catalog_uptr_ptr) {
|
||||
copy->catalogs_.add_new(catalog_id, remapped_catalog_uptr_ptr->get());
|
||||
continue;
|
||||
}
|
||||
|
||||
remapped_catalog_uptr_ptr = deleted_catalogs.lookup_ptr(catalog_id);
|
||||
if (remapped_catalog_uptr_ptr) {
|
||||
copy->catalogs_.add_new(catalog_id, remapped_catalog_uptr_ptr->get());
|
||||
continue;
|
||||
}
|
||||
|
||||
BLI_assert(!"A CDF should only reference known catalogs.");
|
||||
}
|
||||
|
||||
return copy;
|
||||
}
|
||||
|
||||
AssetCatalog::AssetCatalog(const CatalogID catalog_id,
|
||||
const AssetCatalogPath &path,
|
||||
const std::string &simple_name)
|
||||
|
@ -1030,6 +714,8 @@ std::string AssetCatalog::sensible_simple_name_for_path(const AssetCatalogPath &
|
|||
return "..." + name.substr(name.length() - 60);
|
||||
}
|
||||
|
||||
/* ---------------------------------------------------------------------- */
|
||||
|
||||
AssetCatalogFilter::AssetCatalogFilter(Set<CatalogID> &&matching_catalog_ids,
|
||||
Set<CatalogID> &&known_catalog_ids)
|
||||
: matching_catalog_ids_(std::move(matching_catalog_ids)),
|
||||
|
|
|
@ -0,0 +1,68 @@
|
|||
/* SPDX-FileCopyrightText: 2023 Blender Authors
|
||||
*
|
||||
* SPDX-License-Identifier: GPL-2.0-or-later */
|
||||
|
||||
/** \file
|
||||
* \ingroup asset_system
|
||||
*/
|
||||
|
||||
#include "asset_catalog_definition_file.hh"
|
||||
|
||||
#include "asset_catalog_collection.hh"
|
||||
|
||||
namespace blender::asset_system {
|
||||
|
||||
std::unique_ptr<AssetCatalogCollection> AssetCatalogCollection::deep_copy() const
|
||||
{
|
||||
auto copy = std::make_unique<AssetCatalogCollection>();
|
||||
|
||||
copy->has_unsaved_changes_ = this->has_unsaved_changes_;
|
||||
copy->catalogs_ = this->copy_catalog_map(this->catalogs_);
|
||||
copy->deleted_catalogs_ = this->copy_catalog_map(this->deleted_catalogs_);
|
||||
|
||||
if (catalog_definition_file_) {
|
||||
copy->catalog_definition_file_ = catalog_definition_file_->copy_and_remap(
|
||||
copy->catalogs_, copy->deleted_catalogs_);
|
||||
}
|
||||
|
||||
return copy;
|
||||
}
|
||||
|
||||
static void copy_catalog_map_into_existing(
|
||||
const OwningAssetCatalogMap &source,
|
||||
OwningAssetCatalogMap &dest,
|
||||
AssetCatalogCollection::OnDuplicateCatalogIdFn on_duplicate_items)
|
||||
{
|
||||
for (const auto &orig_catalog_uptr : source.values()) {
|
||||
if (dest.contains(orig_catalog_uptr->catalog_id)) {
|
||||
if (on_duplicate_items) {
|
||||
on_duplicate_items(*dest.lookup(orig_catalog_uptr->catalog_id), *orig_catalog_uptr);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
auto copy_catalog_uptr = std::make_unique<AssetCatalog>(*orig_catalog_uptr);
|
||||
dest.add_new(copy_catalog_uptr->catalog_id, std::move(copy_catalog_uptr));
|
||||
}
|
||||
}
|
||||
|
||||
void AssetCatalogCollection::add_catalogs_from_existing(
|
||||
const AssetCatalogCollection &other,
|
||||
AssetCatalogCollection::OnDuplicateCatalogIdFn on_duplicate_items)
|
||||
{
|
||||
copy_catalog_map_into_existing(other.catalogs_, catalogs_, on_duplicate_items);
|
||||
}
|
||||
|
||||
OwningAssetCatalogMap AssetCatalogCollection::copy_catalog_map(const OwningAssetCatalogMap &orig)
|
||||
{
|
||||
OwningAssetCatalogMap copy;
|
||||
copy_catalog_map_into_existing(
|
||||
orig, copy, /*on_duplicate_items=*/[](const AssetCatalog &, const AssetCatalog &) {
|
||||
/* `copy` was empty before. If this happens it means there was a duplicate in the `orig`
|
||||
* catalog map which should've been caught already. */
|
||||
BLI_assert_unreachable();
|
||||
});
|
||||
return copy;
|
||||
}
|
||||
|
||||
} // namespace blender::asset_system
|
|
@ -0,0 +1,61 @@
|
|||
/* SPDX-FileCopyrightText: 2023 Blender Authors
|
||||
*
|
||||
* SPDX-License-Identifier: GPL-2.0-or-later */
|
||||
|
||||
/** \file
|
||||
* \ingroup asset_system
|
||||
*/
|
||||
|
||||
#pragma once
|
||||
|
||||
#include "AS_asset_catalog.hh"
|
||||
|
||||
namespace blender::asset_system {
|
||||
|
||||
/**
|
||||
* All catalogs that are owned by a single asset library, and managed by a single instance of
|
||||
* #AssetCatalogService. The undo system for asset catalog edits contains historical copies of this
|
||||
* struct.
|
||||
*/
|
||||
class AssetCatalogCollection {
|
||||
protected:
|
||||
/** All catalogs known, except the known-but-deleted ones. */
|
||||
OwningAssetCatalogMap catalogs_;
|
||||
|
||||
/** Catalogs that have been deleted. They are kept around so that the load-merge-save of catalog
|
||||
* definition files can actually delete them if they already existed on disk (instead of the
|
||||
* merge operation resurrecting them). */
|
||||
OwningAssetCatalogMap deleted_catalogs_;
|
||||
|
||||
/* For now only a single catalog definition file is supported.
|
||||
* The aim is to support an arbitrary number of such files per asset library in the future. */
|
||||
std::unique_ptr<AssetCatalogDefinitionFile> catalog_definition_file_;
|
||||
|
||||
/** Whether any of the catalogs have unsaved changes. */
|
||||
bool has_unsaved_changes_ = false;
|
||||
|
||||
friend AssetCatalogService;
|
||||
|
||||
public:
|
||||
AssetCatalogCollection() = default;
|
||||
AssetCatalogCollection(const AssetCatalogCollection &other) = delete;
|
||||
AssetCatalogCollection(AssetCatalogCollection &&other) noexcept = default;
|
||||
|
||||
std::unique_ptr<AssetCatalogCollection> deep_copy() const;
|
||||
using OnDuplicateCatalogIdFn =
|
||||
FunctionRef<void(const AssetCatalog &existing, const AssetCatalog &to_be_ignored)>;
|
||||
/**
|
||||
* Copy the catalogs from \a other and append them to this collection. Copies no other data
|
||||
* otherwise.
|
||||
*
|
||||
* \note If a catalog from \a other already exists in this collection (identified by catalog ID),
|
||||
* it will be skipped and \a on_duplicate_items will be called.
|
||||
*/
|
||||
void add_catalogs_from_existing(const AssetCatalogCollection &other,
|
||||
OnDuplicateCatalogIdFn on_duplicate_items);
|
||||
|
||||
protected:
|
||||
static OwningAssetCatalogMap copy_catalog_map(const OwningAssetCatalogMap &orig);
|
||||
};
|
||||
|
||||
}
|
|
@ -0,0 +1,284 @@
|
|||
/* SPDX-FileCopyrightText: 2023 Blender Authors
|
||||
*
|
||||
* SPDX-License-Identifier: GPL-2.0-or-later */
|
||||
|
||||
/** \file
|
||||
* \ingroup asset_system
|
||||
*/
|
||||
|
||||
#include <iostream>
|
||||
|
||||
#include "BLI_fileops.hh"
|
||||
#include "BLI_path_util.h"
|
||||
|
||||
#include "CLG_log.h"
|
||||
|
||||
#include "asset_catalog_definition_file.hh"
|
||||
|
||||
static CLG_LogRef LOG = {"asset_system.asset_catalog_definition_file"};
|
||||
|
||||
namespace blender::asset_system {
|
||||
|
||||
const int AssetCatalogDefinitionFile::SUPPORTED_VERSION = 1;
|
||||
const std::string AssetCatalogDefinitionFile::VERSION_MARKER = "VERSION ";
|
||||
|
||||
const std::string AssetCatalogDefinitionFile::HEADER =
|
||||
"# This is an Asset Catalog Definition file for Blender.\n"
|
||||
"#\n"
|
||||
"# Empty lines and lines starting with `#` will be ignored.\n"
|
||||
"# The first non-ignored line should be the version indicator.\n"
|
||||
"# Other lines are of the format \"UUID:catalog/path/for/assets:simple catalog name\"\n";
|
||||
|
||||
bool AssetCatalogDefinitionFile::contains(const CatalogID catalog_id) const
|
||||
{
|
||||
return catalogs_.contains(catalog_id);
|
||||
}
|
||||
|
||||
void AssetCatalogDefinitionFile::add_new(AssetCatalog *catalog)
|
||||
{
|
||||
catalogs_.add_new(catalog->catalog_id, catalog);
|
||||
}
|
||||
|
||||
void AssetCatalogDefinitionFile::add_overwrite(AssetCatalog *catalog)
|
||||
{
|
||||
catalogs_.add_overwrite(catalog->catalog_id, catalog);
|
||||
}
|
||||
|
||||
void AssetCatalogDefinitionFile::forget(CatalogID catalog_id)
|
||||
{
|
||||
catalogs_.remove(catalog_id);
|
||||
}
|
||||
|
||||
void AssetCatalogDefinitionFile::parse_catalog_file(
|
||||
const CatalogFilePath &catalog_definition_file_path,
|
||||
AssetCatalogParsedFn catalog_loaded_callback)
|
||||
{
|
||||
fstream infile(catalog_definition_file_path, std::ios::in);
|
||||
|
||||
if (!infile.is_open()) {
|
||||
CLOG_ERROR(&LOG, "%s: unable to open file", catalog_definition_file_path.c_str());
|
||||
return;
|
||||
}
|
||||
bool seen_version_number = false;
|
||||
std::string line;
|
||||
while (std::getline(infile, line)) {
|
||||
const StringRef trimmed_line = StringRef(line).trim();
|
||||
if (trimmed_line.is_empty() || trimmed_line[0] == '#') {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!seen_version_number) {
|
||||
/* The very first non-ignored line should be the version declaration. */
|
||||
const bool is_valid_version = this->parse_version_line(trimmed_line);
|
||||
if (!is_valid_version) {
|
||||
std::cerr << catalog_definition_file_path
|
||||
<< ": first line should be version declaration; ignoring file." << std::endl;
|
||||
break;
|
||||
}
|
||||
seen_version_number = true;
|
||||
continue;
|
||||
}
|
||||
|
||||
std::unique_ptr<AssetCatalog> catalog = this->parse_catalog_line(trimmed_line);
|
||||
if (!catalog) {
|
||||
continue;
|
||||
}
|
||||
|
||||
AssetCatalog *non_owning_ptr = catalog.get();
|
||||
const bool keep_catalog = catalog_loaded_callback(std::move(catalog));
|
||||
if (!keep_catalog) {
|
||||
continue;
|
||||
}
|
||||
|
||||
/* The AssetDefinitionFile should include this catalog when writing it back to disk. */
|
||||
this->add_overwrite(non_owning_ptr);
|
||||
}
|
||||
}
|
||||
|
||||
bool AssetCatalogDefinitionFile::parse_version_line(const StringRef line)
|
||||
{
|
||||
if (!line.startswith(VERSION_MARKER)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const std::string version_string = line.substr(VERSION_MARKER.length());
|
||||
const int file_version = std::atoi(version_string.c_str());
|
||||
|
||||
/* No versioning, just a blunt check whether it's the right one. */
|
||||
return file_version == SUPPORTED_VERSION;
|
||||
}
|
||||
|
||||
std::unique_ptr<AssetCatalog> AssetCatalogDefinitionFile::parse_catalog_line(const StringRef line)
|
||||
{
|
||||
const char delim = ':';
|
||||
const int64_t first_delim = line.find_first_of(delim);
|
||||
if (first_delim == StringRef::not_found) {
|
||||
std::cerr << "Invalid catalog line in " << this->file_path << ": " << line << std::endl;
|
||||
return std::unique_ptr<AssetCatalog>(nullptr);
|
||||
}
|
||||
|
||||
/* Parse the catalog ID. */
|
||||
const std::string id_as_string = line.substr(0, first_delim).trim();
|
||||
bUUID catalog_id;
|
||||
const bool uuid_parsed_ok = BLI_uuid_parse_string(&catalog_id, id_as_string.c_str());
|
||||
if (!uuid_parsed_ok) {
|
||||
std::cerr << "Invalid UUID in " << this->file_path << ": " << line << std::endl;
|
||||
return std::unique_ptr<AssetCatalog>(nullptr);
|
||||
}
|
||||
|
||||
/* Parse the path and simple name. */
|
||||
const StringRef path_and_simple_name = line.substr(first_delim + 1);
|
||||
const int64_t second_delim = path_and_simple_name.find_first_of(delim);
|
||||
|
||||
std::string path_in_file;
|
||||
std::string simple_name;
|
||||
if (second_delim == 0) {
|
||||
/* Delimiter as first character means there is no path. These lines are to be ignored. */
|
||||
return std::unique_ptr<AssetCatalog>(nullptr);
|
||||
}
|
||||
|
||||
if (second_delim == StringRef::not_found) {
|
||||
/* No delimiter means no simple name, just treat it as all "path". */
|
||||
path_in_file = path_and_simple_name;
|
||||
simple_name = "";
|
||||
}
|
||||
else {
|
||||
path_in_file = path_and_simple_name.substr(0, second_delim);
|
||||
simple_name = path_and_simple_name.substr(second_delim + 1).trim();
|
||||
}
|
||||
|
||||
AssetCatalogPath catalog_path = path_in_file;
|
||||
return std::make_unique<AssetCatalog>(catalog_id, catalog_path.cleanup(), simple_name);
|
||||
}
|
||||
|
||||
bool AssetCatalogDefinitionFile::write_to_disk() const
|
||||
{
|
||||
BLI_assert_msg(!this->file_path.empty(), "Writing to CDF requires its file path to be known");
|
||||
return this->write_to_disk(this->file_path);
|
||||
}
|
||||
|
||||
bool AssetCatalogDefinitionFile::write_to_disk(const CatalogFilePath &dest_file_path) const
|
||||
{
|
||||
const CatalogFilePath writable_path = dest_file_path + ".writing";
|
||||
const CatalogFilePath backup_path = dest_file_path + "~";
|
||||
|
||||
if (!this->write_to_disk_unsafe(writable_path)) {
|
||||
/* TODO: communicate what went wrong. */
|
||||
return false;
|
||||
}
|
||||
if (BLI_exists(dest_file_path.c_str())) {
|
||||
if (BLI_rename_overwrite(dest_file_path.c_str(), backup_path.c_str())) {
|
||||
/* TODO: communicate what went wrong. */
|
||||
return false;
|
||||
}
|
||||
}
|
||||
if (BLI_rename_overwrite(writable_path.c_str(), dest_file_path.c_str())) {
|
||||
/* TODO: communicate what went wrong. */
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
bool AssetCatalogDefinitionFile::write_to_disk_unsafe(const CatalogFilePath &dest_file_path) const
|
||||
{
|
||||
char directory[PATH_MAX];
|
||||
BLI_path_split_dir_part(dest_file_path.c_str(), directory, sizeof(directory));
|
||||
if (!ensure_directory_exists(directory)) {
|
||||
/* TODO(Sybren): pass errors to the UI somehow. */
|
||||
return false;
|
||||
}
|
||||
|
||||
fstream output(dest_file_path, std::ios::out);
|
||||
|
||||
/* TODO(@sybren): remember the line ending style that was originally read, then use that to write
|
||||
* the file again. */
|
||||
|
||||
/* Write the header. */
|
||||
output << HEADER;
|
||||
output << "" << std::endl;
|
||||
output << VERSION_MARKER << SUPPORTED_VERSION << std::endl;
|
||||
output << "" << std::endl;
|
||||
|
||||
/* Write the catalogs, ordered by path (primary) and UUID (secondary). */
|
||||
AssetCatalogOrderedSet catalogs_by_path;
|
||||
for (const AssetCatalog *catalog : catalogs_.values()) {
|
||||
if (catalog->flags.is_deleted) {
|
||||
continue;
|
||||
}
|
||||
catalogs_by_path.insert(catalog);
|
||||
}
|
||||
|
||||
for (const AssetCatalog *catalog : catalogs_by_path) {
|
||||
output << catalog->catalog_id << ":" << catalog->path << ":" << catalog->simple_name
|
||||
<< std::endl;
|
||||
}
|
||||
output.close();
|
||||
return !output.bad();
|
||||
}
|
||||
|
||||
bool AssetCatalogDefinitionFile::ensure_directory_exists(
|
||||
const CatalogFilePath directory_path) const
|
||||
{
|
||||
/* TODO(@sybren): design a way to get such errors presented to users (or ensure that they never
|
||||
* occur). */
|
||||
if (directory_path.empty()) {
|
||||
std::cerr
|
||||
<< "AssetCatalogService: no asset library root configured, unable to ensure it exists."
|
||||
<< std::endl;
|
||||
return false;
|
||||
}
|
||||
|
||||
if (BLI_exists(directory_path.data())) {
|
||||
if (!BLI_is_dir(directory_path.data())) {
|
||||
std::cerr << "AssetCatalogService: " << directory_path
|
||||
<< " exists but is not a directory, this is not a supported situation."
|
||||
<< std::endl;
|
||||
return false;
|
||||
}
|
||||
|
||||
/* Root directory exists, work is done. */
|
||||
return true;
|
||||
}
|
||||
|
||||
/* Ensure the root directory exists. */
|
||||
std::error_code err_code;
|
||||
if (!BLI_dir_create_recursive(directory_path.data())) {
|
||||
std::cerr << "AssetCatalogService: error creating directory " << directory_path << ": "
|
||||
<< err_code << std::endl;
|
||||
return false;
|
||||
}
|
||||
|
||||
/* Root directory has been created, work is done. */
|
||||
return true;
|
||||
}
|
||||
|
||||
std::unique_ptr<AssetCatalogDefinitionFile> AssetCatalogDefinitionFile::copy_and_remap(
|
||||
const OwningAssetCatalogMap &catalogs, const OwningAssetCatalogMap &deleted_catalogs) const
|
||||
{
|
||||
auto copy = std::make_unique<AssetCatalogDefinitionFile>(*this);
|
||||
copy->catalogs_.clear();
|
||||
|
||||
/* Remap pointers of the copy from the original AssetCatalogCollection to the given one. */
|
||||
for (CatalogID catalog_id : catalogs_.keys()) {
|
||||
/* The catalog can be in the regular or the deleted map. */
|
||||
const std::unique_ptr<AssetCatalog> *remapped_catalog_uptr_ptr = catalogs.lookup_ptr(
|
||||
catalog_id);
|
||||
if (remapped_catalog_uptr_ptr) {
|
||||
copy->catalogs_.add_new(catalog_id, remapped_catalog_uptr_ptr->get());
|
||||
continue;
|
||||
}
|
||||
|
||||
remapped_catalog_uptr_ptr = deleted_catalogs.lookup_ptr(catalog_id);
|
||||
if (remapped_catalog_uptr_ptr) {
|
||||
copy->catalogs_.add_new(catalog_id, remapped_catalog_uptr_ptr->get());
|
||||
continue;
|
||||
}
|
||||
|
||||
BLI_assert(!"A CDF should only reference known catalogs.");
|
||||
}
|
||||
|
||||
return copy;
|
||||
}
|
||||
|
||||
} // namespace blender::asset_system
|
|
@ -0,0 +1,87 @@
|
|||
/* SPDX-FileCopyrightText: 2023 Blender Authors
|
||||
*
|
||||
* SPDX-License-Identifier: GPL-2.0-or-later */
|
||||
|
||||
/** \file
|
||||
* \ingroup asset_system
|
||||
*
|
||||
* Classes internal to the asset system for asset catalog management.
|
||||
*/
|
||||
|
||||
#pragma once
|
||||
|
||||
#include "AS_asset_catalog.hh"
|
||||
|
||||
#include "BLI_string_ref.hh"
|
||||
|
||||
namespace blender::asset_system {
|
||||
|
||||
/**
|
||||
* Keeps track of which catalogs are defined in a certain file on disk.
|
||||
* Only contains non-owning pointers to the #AssetCatalog instances, so ensure the lifetime of this
|
||||
* class is shorter than that of the #`AssetCatalog`s themselves.
|
||||
*/
|
||||
class AssetCatalogDefinitionFile {
|
||||
protected:
|
||||
/* Catalogs stored in this file. They are mapped by ID to make it possible to query whether a
|
||||
* catalog is already known, without having to find the corresponding `AssetCatalog*`. */
|
||||
Map<CatalogID, AssetCatalog *> catalogs_;
|
||||
|
||||
public:
|
||||
/* For now this is the only version of the catalog definition files that is supported.
|
||||
* Later versioning code may be added to handle older files. */
|
||||
const static int SUPPORTED_VERSION;
|
||||
/* String that's matched in the catalog definition file to know that the line is the version
|
||||
* declaration. It has to start with a space to ensure it won't match any hypothetical future
|
||||
* field that starts with "VERSION". */
|
||||
const static std::string VERSION_MARKER;
|
||||
const static std::string HEADER;
|
||||
|
||||
CatalogFilePath file_path;
|
||||
|
||||
public:
|
||||
AssetCatalogDefinitionFile() = default;
|
||||
|
||||
/**
|
||||
* Write the catalog definitions to the same file they were read from.
|
||||
* Return true when the file was written correctly, false when there was a problem.
|
||||
*/
|
||||
bool write_to_disk() const;
|
||||
/**
|
||||
* Write the catalog definitions to an arbitrary file path.
|
||||
*
|
||||
* Any existing file is backed up to "filename~". Any previously existing backup is overwritten.
|
||||
*
|
||||
* Return true when the file was written correctly, false when there was a problem.
|
||||
*/
|
||||
bool write_to_disk(const CatalogFilePath &dest_file_path) const;
|
||||
|
||||
bool contains(CatalogID catalog_id) const;
|
||||
/** Add a catalog, overwriting the one with the same catalog ID. */
|
||||
void add_overwrite(AssetCatalog *catalog);
|
||||
/** Add a new catalog. Undefined behavior if a catalog with the same ID was already added. */
|
||||
void add_new(AssetCatalog *catalog);
|
||||
|
||||
/** Remove the catalog from the collection of catalogs stored in this file. */
|
||||
void forget(CatalogID catalog_id);
|
||||
|
||||
using AssetCatalogParsedFn = FunctionRef<bool(std::unique_ptr<AssetCatalog>)>;
|
||||
void parse_catalog_file(const CatalogFilePath &catalog_definition_file_path,
|
||||
AssetCatalogParsedFn callback);
|
||||
|
||||
std::unique_ptr<AssetCatalogDefinitionFile> copy_and_remap(
|
||||
const OwningAssetCatalogMap &catalogs, const OwningAssetCatalogMap &deleted_catalogs) const;
|
||||
|
||||
protected:
|
||||
bool parse_version_line(StringRef line);
|
||||
std::unique_ptr<AssetCatalog> parse_catalog_line(StringRef line);
|
||||
|
||||
/**
|
||||
* Write the catalog definitions to the given file path.
|
||||
* Return true when the file was written correctly, false when there was a problem.
|
||||
*/
|
||||
bool write_to_disk_unsafe(const CatalogFilePath &dest_file_path) const;
|
||||
bool ensure_directory_exists(const CatalogFilePath directory_path) const;
|
||||
};
|
||||
|
||||
} // namespace blender::asset_system
|
|
@ -24,6 +24,8 @@
|
|||
|
||||
#include "DNA_userdef_types.h"
|
||||
|
||||
#include "asset_catalog_collection.hh"
|
||||
#include "asset_catalog_definition_file.hh"
|
||||
#include "asset_library_service.hh"
|
||||
#include "asset_storage.hh"
|
||||
#include "utils.hh"
|
||||
|
@ -167,7 +169,7 @@ AssetLibrary::AssetLibrary(eAssetLibraryType library_type, StringRef name, Strin
|
|||
name_(name),
|
||||
root_path_(std::make_shared<std::string>(utils::normalize_directory_path(root_path))),
|
||||
asset_storage_(std::make_unique<AssetStorage>()),
|
||||
catalog_service(std::make_unique<AssetCatalogService>())
|
||||
catalog_service_(std::make_unique<AssetCatalogService>())
|
||||
{
|
||||
}
|
||||
|
||||
|
@ -189,7 +191,12 @@ void AssetLibrary::load_catalogs()
|
|||
{
|
||||
auto catalog_service = std::make_unique<AssetCatalogService>(root_path());
|
||||
catalog_service->load_from_disk();
|
||||
this->catalog_service = std::move(catalog_service);
|
||||
this->catalog_service_ = std::move(catalog_service);
|
||||
}
|
||||
|
||||
AssetCatalogService &AssetLibrary::catalog_service() const
|
||||
{
|
||||
return *catalog_service_;
|
||||
}
|
||||
|
||||
void AssetLibrary::refresh_catalogs() {}
|
||||
|
@ -254,12 +261,8 @@ void AssetLibrary::on_blend_save_post(Main *main,
|
|||
PointerRNA ** /*pointers*/,
|
||||
const int /*num_pointers*/)
|
||||
{
|
||||
if (this->catalog_service == nullptr) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (save_catalogs_when_file_is_saved) {
|
||||
this->catalog_service->write_to_disk(main->filepath);
|
||||
this->catalog_service().write_to_disk(main->filepath);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -281,7 +284,7 @@ void AssetLibrary::refresh_catalog_simplename(AssetMetaData *asset_data)
|
|||
asset_data->catalog_simple_name[0] = '\0';
|
||||
return;
|
||||
}
|
||||
const AssetCatalog *catalog = this->catalog_service->find_catalog(asset_data->catalog_id);
|
||||
const AssetCatalog *catalog = this->catalog_service().find_catalog(asset_data->catalog_id);
|
||||
if (catalog == nullptr) {
|
||||
/* No-op if the catalog cannot be found. This could be the kind of "the catalog definition file
|
||||
* is corrupt/lost" scenario that the simple name is meant to help recover from. */
|
||||
|
|
|
@ -9,6 +9,8 @@
|
|||
#include <memory>
|
||||
|
||||
#include "AS_asset_catalog_tree.hh"
|
||||
#include "asset_catalog_collection.hh"
|
||||
#include "asset_catalog_definition_file.hh"
|
||||
|
||||
#include "asset_library_all.hh"
|
||||
|
||||
|
@ -30,11 +32,11 @@ void AllAssetLibrary::rebuild_catalogs_from_nested(const bool reload_nested_cata
|
|||
AssetLibrary::foreach_loaded(
|
||||
[&](AssetLibrary &nested) {
|
||||
if (reload_nested_catalogs) {
|
||||
nested.catalog_service->reload_catalogs();
|
||||
nested.catalog_service().reload_catalogs();
|
||||
}
|
||||
|
||||
new_catalog_service->add_from_existing(
|
||||
*nested.catalog_service,
|
||||
nested.catalog_service(),
|
||||
/*on_duplicate_items=*/[](const AssetCatalog &existing,
|
||||
const AssetCatalog &to_be_ignored) {
|
||||
if (existing.path == to_be_ignored.path) {
|
||||
|
@ -56,7 +58,7 @@ void AllAssetLibrary::rebuild_catalogs_from_nested(const bool reload_nested_cata
|
|||
},
|
||||
false);
|
||||
|
||||
this->catalog_service = std::move(new_catalog_service);
|
||||
this->catalog_service_ = std::move(new_catalog_service);
|
||||
catalogs_dirty_ = false;
|
||||
}
|
||||
|
||||
|
|
|
@ -20,7 +20,7 @@ OnDiskAssetLibrary::OnDiskAssetLibrary(eAssetLibraryType library_type,
|
|||
|
||||
void OnDiskAssetLibrary::refresh_catalogs()
|
||||
{
|
||||
catalog_service->reload_catalogs();
|
||||
catalog_service().reload_catalogs();
|
||||
}
|
||||
|
||||
} // namespace blender::asset_system
|
||||
|
|
|
@ -503,7 +503,7 @@ bool AssetLibraryService::has_any_unsaved_catalogs() const
|
|||
|
||||
foreach_loaded_asset_library(
|
||||
[&has_unsaved_changes](AssetLibrary &library) {
|
||||
if (library.catalog_service->has_unsaved_changes()) {
|
||||
if (library.catalog_service().has_unsaved_changes()) {
|
||||
has_unsaved_changes = true;
|
||||
}
|
||||
},
|
||||
|
|
|
@ -4,6 +4,8 @@
|
|||
|
||||
#include "AS_asset_catalog.hh"
|
||||
#include "AS_asset_catalog_tree.hh"
|
||||
#include "asset_catalog_collection.hh"
|
||||
#include "asset_catalog_definition_file.hh"
|
||||
|
||||
#include "BKE_preferences.h"
|
||||
|
||||
|
|
|
@ -4,6 +4,8 @@
|
|||
|
||||
#include "AS_asset_catalog.hh"
|
||||
#include "AS_asset_catalog_tree.hh"
|
||||
#include "asset_catalog_collection.hh"
|
||||
#include "asset_catalog_definition_file.hh"
|
||||
|
||||
#include "BLI_path_util.h"
|
||||
|
||||
|
|
|
@ -173,10 +173,10 @@ TEST_F(AssetLibraryServiceTest, catalogs_loaded)
|
|||
AssetLibraryService *const service = AssetLibraryService::get();
|
||||
AssetLibrary *const lib = service->get_asset_library_on_disk_custom(__func__,
|
||||
asset_library_root_);
|
||||
AssetCatalogService *const cat_service = lib->catalog_service.get();
|
||||
AssetCatalogService &cat_service = lib->catalog_service();
|
||||
|
||||
const bUUID UUID_POSES_ELLIE("df60e1f6-2259-475b-93d9-69a1b4a8db78");
|
||||
EXPECT_NE(nullptr, cat_service->find_catalog(UUID_POSES_ELLIE))
|
||||
EXPECT_NE(nullptr, cat_service.find_catalog(UUID_POSES_ELLIE))
|
||||
<< "Catalogs should be loaded after getting an asset library from disk.";
|
||||
}
|
||||
|
||||
|
@ -188,21 +188,21 @@ TEST_F(AssetLibraryServiceTest, has_any_unsaved_catalogs)
|
|||
|
||||
AssetLibrary *const lib = service->get_asset_library_on_disk_custom(__func__,
|
||||
asset_library_root_);
|
||||
AssetCatalogService *const cat_service = lib->catalog_service.get();
|
||||
AssetCatalogService &cat_service = lib->catalog_service();
|
||||
EXPECT_FALSE(service->has_any_unsaved_catalogs())
|
||||
<< "Unchanged AssetLibrary should have no unsaved catalogs";
|
||||
|
||||
const bUUID UUID_POSES_ELLIE("df60e1f6-2259-475b-93d9-69a1b4a8db78");
|
||||
cat_service->prune_catalogs_by_id(UUID_POSES_ELLIE);
|
||||
cat_service.prune_catalogs_by_id(UUID_POSES_ELLIE);
|
||||
EXPECT_FALSE(service->has_any_unsaved_catalogs())
|
||||
<< "Deletion of catalogs via AssetCatalogService should not automatically tag as 'unsaved "
|
||||
"changes'.";
|
||||
|
||||
const bUUID UUID_POSES_RUZENA("79a4f887-ab60-4bd4-94da-d572e27d6aed");
|
||||
AssetCatalog *cat = cat_service->find_catalog(UUID_POSES_RUZENA);
|
||||
AssetCatalog *cat = cat_service.find_catalog(UUID_POSES_RUZENA);
|
||||
ASSERT_NE(nullptr, cat) << "Catalog " << UUID_POSES_RUZENA << " should be known";
|
||||
|
||||
cat_service->tag_has_unsaved_changes(cat);
|
||||
cat_service.tag_has_unsaved_changes(cat);
|
||||
EXPECT_TRUE(service->has_any_unsaved_catalogs())
|
||||
<< "Tagging as having unsaved changes of a single catalog service should result in unsaved "
|
||||
"changes being reported.";
|
||||
|
@ -224,17 +224,17 @@ TEST_F(AssetLibraryServiceTest, has_any_unsaved_catalogs_after_write)
|
|||
EXPECT_FALSE(service->has_any_unsaved_catalogs())
|
||||
<< "Unchanged AssetLibrary should have no unsaved catalogs";
|
||||
|
||||
AssetCatalogService *const cat_service = lib->catalog_service.get();
|
||||
AssetCatalog *cat = cat_service->find_catalog(UUID_POSES_ELLIE);
|
||||
AssetCatalogService &cat_service = lib->catalog_service();
|
||||
AssetCatalog *cat = cat_service.find_catalog(UUID_POSES_ELLIE);
|
||||
|
||||
cat_service->tag_has_unsaved_changes(cat);
|
||||
cat_service.tag_has_unsaved_changes(cat);
|
||||
|
||||
EXPECT_TRUE(service->has_any_unsaved_catalogs())
|
||||
<< "Tagging as having unsaved changes of a single catalog service should result in unsaved "
|
||||
"changes being reported.";
|
||||
EXPECT_TRUE(cat->flags.has_unsaved_changes);
|
||||
|
||||
cat_service->write_to_disk(writable_dir + "dummy_path.blend");
|
||||
cat_service.write_to_disk(writable_dir + "dummy_path.blend");
|
||||
EXPECT_FALSE(service->has_any_unsaved_catalogs())
|
||||
<< "Written AssetCatalogService should have no unsaved catalogs";
|
||||
EXPECT_FALSE(cat->flags.has_unsaved_changes);
|
||||
|
|
|
@ -47,14 +47,13 @@ TEST_F(AssetLibraryTest, AS_asset_library_load)
|
|||
ASSERT_NE(nullptr, library);
|
||||
|
||||
/* Check that it can be cast to the C++ type and has a Catalog Service. */
|
||||
AssetCatalogService *service = library->catalog_service.get();
|
||||
ASSERT_NE(nullptr, service);
|
||||
const AssetCatalogService &service = library->catalog_service();
|
||||
|
||||
/* Check that the catalogs defined in the library are actually loaded. This just tests one single
|
||||
* catalog, as that indicates the file has been loaded. Testing that loading went OK is for
|
||||
* the asset catalog service tests. */
|
||||
const bUUID uuid_poses_ellie("df60e1f6-2259-475b-93d9-69a1b4a8db78");
|
||||
AssetCatalog *poses_ellie = service->find_catalog(uuid_poses_ellie);
|
||||
AssetCatalog *poses_ellie = service.find_catalog(uuid_poses_ellie);
|
||||
ASSERT_NE(nullptr, poses_ellie) << "unable to find POSES_ELLIE catalog";
|
||||
EXPECT_EQ("character/Ellie/poselib", poses_ellie->path.str());
|
||||
}
|
||||
|
@ -73,11 +72,9 @@ TEST_F(AssetLibraryTest, load_nonexistent_directory)
|
|||
ASSERT_NE(nullptr, library);
|
||||
|
||||
/* Check that it can be cast to the C++ type and has a Catalog Service. */
|
||||
AssetCatalogService *service = library->catalog_service.get();
|
||||
ASSERT_NE(nullptr, service);
|
||||
|
||||
AssetCatalogService &service = library->catalog_service();
|
||||
/* Check that the catalog service doesn't have any catalogs. */
|
||||
EXPECT_TRUE(service->is_empty());
|
||||
EXPECT_TRUE(service.is_empty());
|
||||
}
|
||||
|
||||
} // namespace blender::asset_system::tests
|
||||
|
|
|
@ -1541,9 +1541,11 @@ void CurvesGeometry::blend_read(BlendDataReader &reader)
|
|||
CustomData_blend_read(&reader, &this->curve_data, this->curve_num);
|
||||
|
||||
if (this->curve_offsets) {
|
||||
BLO_read_int32_array(&reader, this->curve_num + 1, &this->curve_offsets);
|
||||
this->runtime->curve_offsets_sharing_info = implicit_sharing::info_for_mem_free(
|
||||
this->curve_offsets);
|
||||
this->runtime->curve_offsets_sharing_info = BLO_read_shared(
|
||||
&reader, &this->curve_offsets, [&]() {
|
||||
BLO_read_int32_array(&reader, this->curve_num + 1, &this->curve_offsets);
|
||||
return implicit_sharing::info_for_mem_free(this->curve_offsets);
|
||||
});
|
||||
}
|
||||
|
||||
BLO_read_list(&reader, &this->vertex_group_names);
|
||||
|
@ -1569,7 +1571,14 @@ void CurvesGeometry::blend_write(BlendWriter &writer,
|
|||
CustomData_blend_write(
|
||||
&writer, &this->curve_data, write_data.curve_layers, this->curve_num, CD_MASK_ALL, &id);
|
||||
|
||||
BLO_write_int32_array(&writer, this->curve_num + 1, this->curve_offsets);
|
||||
if (this->curve_offsets) {
|
||||
BLO_write_shared(
|
||||
&writer,
|
||||
this->curve_offsets,
|
||||
sizeof(int) * (this->curve_num + 1),
|
||||
this->runtime->curve_offsets_sharing_info,
|
||||
[&]() { BLO_write_int32_array(&writer, this->curve_num + 1, this->curve_offsets); });
|
||||
}
|
||||
|
||||
BKE_defbase_blend_write(&writer, &this->vertex_group_names);
|
||||
}
|
||||
|
|
|
@ -5374,7 +5374,10 @@ void CustomData_blend_write(BlendWriter *writer,
|
|||
writer, CustomDataLayer, data->totlayer, data->layers, layers_to_write.data());
|
||||
|
||||
for (const CustomDataLayer &layer : layers_to_write) {
|
||||
blend_write_layer_data(writer, layer, count);
|
||||
const size_t size_in_bytes = CustomData_sizeof(eCustomDataType(layer.type)) * count;
|
||||
BLO_write_shared(writer, layer.data, size_in_bytes, layer.sharing_info, [&]() {
|
||||
blend_write_layer_data(writer, layer, count);
|
||||
});
|
||||
}
|
||||
|
||||
if (data->external) {
|
||||
|
@ -5430,11 +5433,6 @@ static void blend_read_paint_mask(BlendDataReader *reader,
|
|||
static void blend_read_layer_data(BlendDataReader *reader, CustomDataLayer &layer, const int count)
|
||||
{
|
||||
BLO_read_data_address(reader, &layer.data);
|
||||
if (layer.data != nullptr) {
|
||||
/* Make layer data shareable. */
|
||||
layer.sharing_info = make_implicit_sharing_info_for_layer(
|
||||
eCustomDataType(layer.type), layer.data, count);
|
||||
}
|
||||
if (CustomData_layer_ensure_data_exists(&layer, count)) {
|
||||
/* Under normal operations, this shouldn't happen, but...
|
||||
* For a CD_PROP_BOOL example, see #84935.
|
||||
|
@ -5479,7 +5477,12 @@ void CustomData_blend_read(BlendDataReader *reader, CustomData *data, const int
|
|||
layer->sharing_info = nullptr;
|
||||
|
||||
if (CustomData_verify_versions(data, i)) {
|
||||
blend_read_layer_data(reader, *layer, count);
|
||||
layer->sharing_info = BLO_read_shared(reader, &layer->data, [&]() {
|
||||
blend_read_layer_data(reader, *layer, count);
|
||||
return layer->data ? make_implicit_sharing_info_for_layer(
|
||||
eCustomDataType(layer->type), layer->data, count) :
|
||||
nullptr;
|
||||
});
|
||||
i++;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -292,6 +292,7 @@ static void mesh_blend_write(BlendWriter *writer, ID *id, const void *id_address
|
|||
}
|
||||
}
|
||||
|
||||
const blender::bke::MeshRuntime *mesh_runtime = mesh->runtime;
|
||||
mesh->runtime = nullptr;
|
||||
|
||||
BLO_write_id_struct(writer, Mesh, id_address, &mesh->id);
|
||||
|
@ -317,7 +318,12 @@ static void mesh_blend_write(BlendWriter *writer, ID *id, const void *id_address
|
|||
writer, &mesh->face_data, face_layers, mesh->faces_num, CD_MASK_MESH.pmask, &mesh->id);
|
||||
|
||||
if (mesh->face_offset_indices) {
|
||||
BLO_write_int32_array(writer, mesh->faces_num + 1, mesh->face_offset_indices);
|
||||
BLO_write_shared(
|
||||
writer,
|
||||
mesh->face_offset_indices,
|
||||
sizeof(int) * mesh->faces_num,
|
||||
mesh_runtime->face_offsets_sharing_info,
|
||||
[&]() { BLO_write_int32_array(writer, mesh->faces_num + 1, mesh->face_offset_indices); });
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -363,9 +369,11 @@ static void mesh_blend_read_data(BlendDataReader *reader, ID *id)
|
|||
mesh->runtime = new blender::bke::MeshRuntime();
|
||||
|
||||
if (mesh->face_offset_indices) {
|
||||
BLO_read_int32_array(reader, mesh->faces_num + 1, &mesh->face_offset_indices);
|
||||
mesh->runtime->face_offsets_sharing_info = blender::implicit_sharing::info_for_mem_free(
|
||||
mesh->face_offset_indices);
|
||||
mesh->runtime->face_offsets_sharing_info = BLO_read_shared(
|
||||
reader, &mesh->face_offset_indices, [&]() {
|
||||
BLO_read_int32_array(reader, mesh->faces_num + 1, &mesh->face_offset_indices);
|
||||
return blender::implicit_sharing::info_for_mem_free(mesh->face_offset_indices);
|
||||
});
|
||||
}
|
||||
|
||||
if (mesh->mselect == nullptr) {
|
||||
|
|
|
@ -125,6 +125,11 @@ class ImplicitSharingInfo : NonCopyable, NonMovable {
|
|||
return version_.load(std::memory_order_acquire);
|
||||
}
|
||||
|
||||
int strong_users() const
|
||||
{
|
||||
return strong_users_.load(std::memory_order_acquire);
|
||||
}
|
||||
|
||||
/**
|
||||
* Call when the data is no longer needed. This might just decrement the user count, or it might
|
||||
* also delete the data if this was the last user.
|
||||
|
|
|
@ -33,6 +33,11 @@
|
|||
|
||||
#include "DNA_windowmanager_types.h" /* for eReportType */
|
||||
|
||||
#include "BLI_function_ref.hh"
|
||||
|
||||
namespace blender {
|
||||
class ImplicitSharingInfo;
|
||||
}
|
||||
struct BlendDataReader;
|
||||
struct BlendFileReadReport;
|
||||
struct BlendLibReader;
|
||||
|
@ -182,6 +187,21 @@ void BLO_write_string(BlendWriter *writer, const char *data_ptr);
|
|||
|
||||
/* Misc. */
|
||||
|
||||
/**
|
||||
* Check if the data can be written more efficiently by making use of implicit-sharing. If yes, the
|
||||
* user count of the sharing-info is increased making the data immutable. The provided callback
|
||||
* should serialize the potentially shared data. It is only called when necessary.
|
||||
*
|
||||
* \param approximate_size_in_bytes: Used to be able to approximate how large the undo step is in
|
||||
* total.
|
||||
* \param write_fn: Use the #BlendWrite to serialize the potentially shared data.
|
||||
*/
|
||||
void BLO_write_shared(BlendWriter *writer,
|
||||
const void *data,
|
||||
size_t approximate_size_in_bytes,
|
||||
const blender::ImplicitSharingInfo *sharing_info,
|
||||
blender::FunctionRef<void()> write_fn);
|
||||
|
||||
/**
|
||||
* Sometimes different data is written depending on whether the file is saved to disk or used for
|
||||
* undo. This function returns true when the current file-writing is done for undo.
|
||||
|
@ -245,6 +265,26 @@ void BLO_read_pointer_array(BlendDataReader *reader, void **ptr_p);
|
|||
|
||||
/* Misc. */
|
||||
|
||||
void blo_read_shared_impl(BlendDataReader *reader,
|
||||
void *data,
|
||||
const blender::ImplicitSharingInfo **r_sharing_info,
|
||||
blender::FunctionRef<const blender::ImplicitSharingInfo *()> read_fn);
|
||||
|
||||
/**
|
||||
* Check if there is any shared data for the given data pointer. If yes, return the existing
|
||||
* sharing-info. If not, call the provided function to actually read the data now.
|
||||
*/
|
||||
template<typename T>
|
||||
const blender::ImplicitSharingInfo *BLO_read_shared(
|
||||
BlendDataReader *reader,
|
||||
T **data_ptr,
|
||||
blender::FunctionRef<const blender::ImplicitSharingInfo *()> read_fn)
|
||||
{
|
||||
const blender::ImplicitSharingInfo *sharing_info;
|
||||
blo_read_shared_impl(reader, *data_ptr, &sharing_info, read_fn);
|
||||
return sharing_info;
|
||||
}
|
||||
|
||||
int BLO_read_fileversion_get(BlendDataReader *reader);
|
||||
bool BLO_read_requires_endian_switch(BlendDataReader *reader);
|
||||
bool BLO_read_data_is_undo(BlendDataReader *reader);
|
||||
|
|
|
@ -13,9 +13,22 @@
|
|||
#include "BLI_listbase.h"
|
||||
#include "BLI_map.hh"
|
||||
|
||||
namespace blender {
|
||||
class ImplicitSharingInfo;
|
||||
}
|
||||
struct GHash;
|
||||
struct Main;
|
||||
struct Scene;
|
||||
|
||||
struct MemFileSharedStorage {
|
||||
/**
|
||||
* Maps the data pointer to the sharing info that it is owned by.
|
||||
*/
|
||||
blender::Map<const void *, const blender::ImplicitSharingInfo *> map;
|
||||
|
||||
~MemFileSharedStorage();
|
||||
};
|
||||
|
||||
struct MemFileChunk {
|
||||
void *next, *prev;
|
||||
const char *buf;
|
||||
|
@ -35,6 +48,11 @@ struct MemFileChunk {
|
|||
struct MemFile {
|
||||
ListBase chunks;
|
||||
size_t size;
|
||||
/**
|
||||
* Some data is not serialized into a new buffer because the undo-step can take ownership of it
|
||||
* without making a copy. This is faster and requires less memory.
|
||||
*/
|
||||
MemFileSharedStorage *shared_storage;
|
||||
};
|
||||
|
||||
struct MemFileWriteData {
|
||||
|
@ -94,11 +112,5 @@ void BLO_memfile_clear_future(MemFile *memfile);
|
|||
/* Utilities. */
|
||||
|
||||
Main *BLO_memfile_main_get(MemFile *memfile, Main *bmain, Scene **r_scene);
|
||||
/**
|
||||
* Saves .blend using undo buffer.
|
||||
*
|
||||
* \return success.
|
||||
*/
|
||||
bool BLO_memfile_write_file(MemFile *memfile, const char *filepath);
|
||||
|
||||
FileReader *BLO_memfile_new_filereader(MemFile *memfile, int undo_direction);
|
||||
|
|
|
@ -4964,6 +4964,32 @@ void BLO_read_pointer_array(BlendDataReader *reader, void **ptr_p)
|
|||
*ptr_p = final_array;
|
||||
}
|
||||
|
||||
void blo_read_shared_impl(
|
||||
BlendDataReader *reader,
|
||||
void *data,
|
||||
const blender::ImplicitSharingInfo **r_sharing_info,
|
||||
const blender::FunctionRef<const blender::ImplicitSharingInfo *()> read_fn)
|
||||
{
|
||||
if (BLO_read_data_is_undo(reader)) {
|
||||
if (reader->fd->flags & FD_FLAGS_IS_MEMFILE) {
|
||||
UndoReader *undo_reader = reinterpret_cast<UndoReader *>(reader->fd->file);
|
||||
MemFile &memfile = *undo_reader->memfile;
|
||||
if (memfile.shared_storage) {
|
||||
/* Check if the data was saved with sharing-info. */
|
||||
if (const blender::ImplicitSharingInfo *sharing_info =
|
||||
memfile.shared_storage->map.lookup_default(data, nullptr))
|
||||
{
|
||||
/* Add a new owner of the data that is passed to the caller. */
|
||||
sharing_info->add_user();
|
||||
*r_sharing_info = sharing_info;
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
*r_sharing_info = read_fn();
|
||||
}
|
||||
|
||||
bool BLO_read_data_is_undo(BlendDataReader *reader)
|
||||
{
|
||||
return (reader->fd->flags & FD_FLAGS_IS_MEMFILE);
|
||||
|
|
|
@ -25,6 +25,7 @@
|
|||
#include "DNA_listBase.h"
|
||||
|
||||
#include "BLI_blenlib.h"
|
||||
#include "BLI_implicit_sharing.hh"
|
||||
|
||||
#include "BLO_readfile.hh"
|
||||
#include "BLO_undofile.hh"
|
||||
|
@ -45,9 +46,19 @@ void BLO_memfile_free(MemFile *memfile)
|
|||
}
|
||||
MEM_freeN(chunk);
|
||||
}
|
||||
MEM_delete(memfile->shared_storage);
|
||||
memfile->shared_storage = nullptr;
|
||||
memfile->size = 0;
|
||||
}
|
||||
|
||||
MemFileSharedStorage::~MemFileSharedStorage()
|
||||
{
|
||||
for (const blender::ImplicitSharingInfo *sharing_info : map.values()) {
|
||||
/* Removing the user makes sure shared data is freed when the undo step was its last owner. */
|
||||
sharing_info->remove_user_and_delete_if_last();
|
||||
}
|
||||
}
|
||||
|
||||
void BLO_memfile_merge(MemFile *first, MemFile *second)
|
||||
{
|
||||
/* We use this mapping to store the memory buffers from second memfile chunks which are not owned
|
||||
|
@ -175,61 +186,6 @@ Main *BLO_memfile_main_get(MemFile *memfile, Main *bmain, Scene **r_scene)
|
|||
return bmain_undo;
|
||||
}
|
||||
|
||||
bool BLO_memfile_write_file(MemFile *memfile, const char *filepath)
|
||||
{
|
||||
MemFileChunk *chunk;
|
||||
int file, oflags;
|
||||
|
||||
/* NOTE: This is currently used for auto-save and `quit.blend`,
|
||||
* where _not_ following symbolic-links is OK,
|
||||
* however if this is ever executed explicitly by the user,
|
||||
* we may want to allow writing to symbolic-links. */
|
||||
|
||||
oflags = O_BINARY | O_WRONLY | O_CREAT | O_TRUNC;
|
||||
#ifdef O_NOFOLLOW
|
||||
/* use O_NOFOLLOW to avoid writing to a symlink - use 'O_EXCL' (CVE-2008-1103) */
|
||||
oflags |= O_NOFOLLOW;
|
||||
#else
|
||||
/* TODO(sergey): How to deal with symlinks on windows? */
|
||||
# ifndef _MSC_VER
|
||||
# warning "Symbolic links will be followed on undo save, possibly causing CVE-2008-1103"
|
||||
# endif
|
||||
#endif
|
||||
file = BLI_open(filepath, oflags, 0666);
|
||||
|
||||
if (file == -1) {
|
||||
fprintf(stderr,
|
||||
"Unable to save '%s': %s\n",
|
||||
filepath,
|
||||
errno ? strerror(errno) : "Unknown error opening file");
|
||||
return false;
|
||||
}
|
||||
|
||||
for (chunk = static_cast<MemFileChunk *>(memfile->chunks.first); chunk;
|
||||
chunk = static_cast<MemFileChunk *>(chunk->next))
|
||||
{
|
||||
#ifdef _WIN32
|
||||
if (size_t(write(file, chunk->buf, uint(chunk->size))) != chunk->size)
|
||||
#else
|
||||
if (size_t(write(file, chunk->buf, chunk->size)) != chunk->size)
|
||||
#endif
|
||||
{
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
close(file);
|
||||
|
||||
if (chunk) {
|
||||
fprintf(stderr,
|
||||
"Unable to save '%s': %s\n",
|
||||
filepath,
|
||||
errno ? strerror(errno) : "Unknown error writing file");
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
static int64_t undo_read(FileReader *reader, void *buffer, size_t size)
|
||||
{
|
||||
UndoReader *undo = (UndoReader *)reader;
|
||||
|
|
|
@ -91,6 +91,7 @@
|
|||
#include "BLI_blenlib.h"
|
||||
#include "BLI_endian_defines.h"
|
||||
#include "BLI_endian_switch.h"
|
||||
#include "BLI_implicit_sharing.hh"
|
||||
#include "BLI_link_utils.h"
|
||||
#include "BLI_linklist.h"
|
||||
#include "BLI_math_base.h"
|
||||
|
@ -1833,6 +1834,33 @@ void BLO_write_string(BlendWriter *writer, const char *data_ptr)
|
|||
}
|
||||
}
|
||||
|
||||
void BLO_write_shared(BlendWriter *writer,
|
||||
const void *data,
|
||||
const size_t approximate_size_in_bytes,
|
||||
const blender::ImplicitSharingInfo *sharing_info,
|
||||
const blender::FunctionRef<void()> write_fn)
|
||||
{
|
||||
if (data == nullptr) {
|
||||
return;
|
||||
}
|
||||
if (BLO_write_is_undo(writer)) {
|
||||
MemFile &memfile = *writer->wd->mem.written_memfile;
|
||||
if (sharing_info != nullptr) {
|
||||
if (memfile.shared_storage == nullptr) {
|
||||
memfile.shared_storage = MEM_new<MemFileSharedStorage>(__func__);
|
||||
}
|
||||
if (memfile.shared_storage->map.add(data, sharing_info)) {
|
||||
/* The undo-step takes (shared) ownership of the data, which also makes it immutable. */
|
||||
sharing_info->add_user();
|
||||
/* This size is an estimate, but good enough to count data with many users less. */
|
||||
memfile.size += approximate_size_in_bytes / sharing_info->strong_users();
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
write_fn();
|
||||
}
|
||||
|
||||
bool BLO_write_is_undo(BlendWriter *writer)
|
||||
{
|
||||
return writer->wd->use_memfile;
|
||||
|
|
|
@ -454,7 +454,8 @@ vec3 shadow_pcf_offset(LightData light, const bool is_directional, vec3 P, vec3
|
|||
BP = line_plane_intersect(light._position, normalize(BP - light._position), P, Ng);
|
||||
}
|
||||
|
||||
mat2x3 TB = mat2x3(TP - P, BP - P);
|
||||
/* TODO: Use a mat2x3 (Currently not supported by the Metal backend). */
|
||||
mat3 TBN = mat3(TP - P, BP - P, Ng);
|
||||
|
||||
/* Compute the actual offset. */
|
||||
|
||||
|
@ -466,7 +467,7 @@ vec3 shadow_pcf_offset(LightData light, const bool is_directional, vec3 P, vec3
|
|||
pcf_offset = pcf_offset * 2.0 - 1.0;
|
||||
pcf_offset *= light.pcf_radius;
|
||||
|
||||
return TB * pcf_offset;
|
||||
return TBN * vec3(pcf_offset, 0.0);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -28,8 +28,8 @@ using namespace blender::asset_system;
|
|||
|
||||
bool catalogs_read_only(const AssetLibrary &library)
|
||||
{
|
||||
asset_system::AssetCatalogService *catalog_service = library.catalog_service.get();
|
||||
return catalog_service->is_read_only();
|
||||
const asset_system::AssetCatalogService &catalog_service = library.catalog_service();
|
||||
return catalog_service.is_read_only();
|
||||
}
|
||||
|
||||
struct CatalogUniqueNameFnData {
|
||||
|
@ -61,23 +61,20 @@ asset_system::AssetCatalog *catalog_add(AssetLibrary *library,
|
|||
StringRefNull name,
|
||||
StringRef parent_path)
|
||||
{
|
||||
asset_system::AssetCatalogService *catalog_service = library->catalog_service.get();
|
||||
if (!catalog_service) {
|
||||
return nullptr;
|
||||
}
|
||||
if (catalog_service->is_read_only()) {
|
||||
asset_system::AssetCatalogService &catalog_service = library->catalog_service();
|
||||
if (catalog_service.is_read_only()) {
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
std::string unique_name = catalog_name_ensure_unique(*catalog_service, name, parent_path);
|
||||
std::string unique_name = catalog_name_ensure_unique(catalog_service, name, parent_path);
|
||||
AssetCatalogPath fullpath = AssetCatalogPath(parent_path) / unique_name;
|
||||
|
||||
catalog_service->undo_push();
|
||||
asset_system::AssetCatalog *new_catalog = catalog_service->create_catalog(fullpath);
|
||||
catalog_service.undo_push();
|
||||
asset_system::AssetCatalog *new_catalog = catalog_service.create_catalog(fullpath);
|
||||
if (!new_catalog) {
|
||||
return nullptr;
|
||||
}
|
||||
catalog_service->tag_has_unsaved_changes(new_catalog);
|
||||
catalog_service.tag_has_unsaved_changes(new_catalog);
|
||||
|
||||
WM_main_add_notifier(NC_SPACE | ND_SPACE_ASSET_PARAMS, nullptr);
|
||||
return new_catalog;
|
||||
|
@ -85,18 +82,14 @@ asset_system::AssetCatalog *catalog_add(AssetLibrary *library,
|
|||
|
||||
void catalog_remove(AssetLibrary *library, const CatalogID &catalog_id)
|
||||
{
|
||||
asset_system::AssetCatalogService *catalog_service = library->catalog_service.get();
|
||||
if (!catalog_service) {
|
||||
BLI_assert_unreachable();
|
||||
return;
|
||||
}
|
||||
if (catalog_service->is_read_only()) {
|
||||
asset_system::AssetCatalogService &catalog_service = library->catalog_service();
|
||||
if (catalog_service.is_read_only()) {
|
||||
return;
|
||||
}
|
||||
|
||||
catalog_service->undo_push();
|
||||
catalog_service->tag_has_unsaved_changes(nullptr);
|
||||
catalog_service->prune_catalogs_by_id(catalog_id);
|
||||
catalog_service.undo_push();
|
||||
catalog_service.tag_has_unsaved_changes(nullptr);
|
||||
catalog_service.prune_catalogs_by_id(catalog_id);
|
||||
WM_main_add_notifier(NC_SPACE | ND_SPACE_ASSET_PARAMS, nullptr);
|
||||
}
|
||||
|
||||
|
@ -104,16 +97,12 @@ void catalog_rename(AssetLibrary *library,
|
|||
const CatalogID catalog_id,
|
||||
const StringRefNull new_name)
|
||||
{
|
||||
asset_system::AssetCatalogService *catalog_service = library->catalog_service.get();
|
||||
if (!catalog_service) {
|
||||
BLI_assert_unreachable();
|
||||
return;
|
||||
}
|
||||
if (catalog_service->is_read_only()) {
|
||||
asset_system::AssetCatalogService &catalog_service = library->catalog_service();
|
||||
if (catalog_service.is_read_only()) {
|
||||
return;
|
||||
}
|
||||
|
||||
AssetCatalog *catalog = catalog_service->find_catalog(catalog_id);
|
||||
AssetCatalog *catalog = catalog_service.find_catalog(catalog_id);
|
||||
|
||||
const AssetCatalogPath new_path = catalog->path.parent() / StringRef(new_name);
|
||||
const AssetCatalogPath clean_new_path = new_path.cleanup();
|
||||
|
@ -123,9 +112,9 @@ void catalog_rename(AssetLibrary *library,
|
|||
return;
|
||||
}
|
||||
|
||||
catalog_service->undo_push();
|
||||
catalog_service->tag_has_unsaved_changes(catalog);
|
||||
catalog_service->update_catalog_path(catalog_id, clean_new_path);
|
||||
catalog_service.undo_push();
|
||||
catalog_service.tag_has_unsaved_changes(catalog);
|
||||
catalog_service.update_catalog_path(catalog_id, clean_new_path);
|
||||
WM_main_add_notifier(NC_SPACE | ND_SPACE_ASSET_PARAMS, nullptr);
|
||||
}
|
||||
|
||||
|
@ -133,22 +122,18 @@ void catalog_move(AssetLibrary *library,
|
|||
const CatalogID src_catalog_id,
|
||||
const std::optional<CatalogID> dst_parent_catalog_id)
|
||||
{
|
||||
asset_system::AssetCatalogService *catalog_service = library->catalog_service.get();
|
||||
if (!catalog_service) {
|
||||
BLI_assert_unreachable();
|
||||
return;
|
||||
}
|
||||
if (catalog_service->is_read_only()) {
|
||||
asset_system::AssetCatalogService &catalog_service = library->catalog_service();
|
||||
if (catalog_service.is_read_only()) {
|
||||
return;
|
||||
}
|
||||
|
||||
AssetCatalog *src_catalog = catalog_service->find_catalog(src_catalog_id);
|
||||
AssetCatalog *src_catalog = catalog_service.find_catalog(src_catalog_id);
|
||||
if (!src_catalog) {
|
||||
BLI_assert_unreachable();
|
||||
return;
|
||||
}
|
||||
AssetCatalog *dst_catalog = dst_parent_catalog_id ?
|
||||
catalog_service->find_catalog(*dst_parent_catalog_id) :
|
||||
catalog_service.find_catalog(*dst_parent_catalog_id) :
|
||||
nullptr;
|
||||
if (!dst_catalog && dst_parent_catalog_id) {
|
||||
BLI_assert_unreachable();
|
||||
|
@ -156,7 +141,7 @@ void catalog_move(AssetLibrary *library,
|
|||
}
|
||||
|
||||
std::string unique_name = catalog_name_ensure_unique(
|
||||
*catalog_service, src_catalog->path.name(), dst_catalog ? dst_catalog->path.c_str() : "");
|
||||
catalog_service, src_catalog->path.name(), dst_catalog ? dst_catalog->path.c_str() : "");
|
||||
/* If a destination catalog was given, construct the path using that. Otherwise, the path is just
|
||||
* the name of the catalog to be moved, which means it ends up at the root level. */
|
||||
const AssetCatalogPath new_path = dst_catalog ? (dst_catalog->path / unique_name) :
|
||||
|
@ -168,27 +153,23 @@ void catalog_move(AssetLibrary *library,
|
|||
return;
|
||||
}
|
||||
|
||||
catalog_service->undo_push();
|
||||
catalog_service->tag_has_unsaved_changes(src_catalog);
|
||||
catalog_service->update_catalog_path(src_catalog_id, clean_new_path);
|
||||
catalog_service.undo_push();
|
||||
catalog_service.tag_has_unsaved_changes(src_catalog);
|
||||
catalog_service.update_catalog_path(src_catalog_id, clean_new_path);
|
||||
WM_main_add_notifier(NC_SPACE | ND_SPACE_ASSET_PARAMS, nullptr);
|
||||
}
|
||||
|
||||
void catalogs_save_from_main_path(AssetLibrary *library, const Main *bmain)
|
||||
{
|
||||
asset_system::AssetCatalogService *catalog_service = library->catalog_service.get();
|
||||
if (!catalog_service) {
|
||||
BLI_assert_unreachable();
|
||||
return;
|
||||
}
|
||||
if (catalog_service->is_read_only()) {
|
||||
asset_system::AssetCatalogService &catalog_service = library->catalog_service();
|
||||
if (catalog_service.is_read_only()) {
|
||||
return;
|
||||
}
|
||||
|
||||
/* Since writing to disk also means loading any on-disk changes, it may be a good idea to store
|
||||
* an undo step. */
|
||||
catalog_service->undo_push();
|
||||
catalog_service->write_to_disk(bmain->filepath);
|
||||
catalog_service.undo_push();
|
||||
catalog_service.write_to_disk(bmain->filepath);
|
||||
}
|
||||
|
||||
void catalogs_set_save_catalogs_when_file_is_saved(const bool should_save)
|
||||
|
|
|
@ -64,7 +64,7 @@ asset_system::AssetCatalogTree build_filtered_catalog_tree(
|
|||
return true;
|
||||
}
|
||||
|
||||
const asset_system::AssetCatalog *catalog = library.catalog_service->find_catalog(
|
||||
const asset_system::AssetCatalog *catalog = library.catalog_service().find_catalog(
|
||||
meta_data.catalog_id);
|
||||
if (catalog == nullptr) {
|
||||
return true;
|
||||
|
@ -75,13 +75,13 @@ asset_system::AssetCatalogTree build_filtered_catalog_tree(
|
|||
|
||||
/* Build catalog tree. */
|
||||
asset_system::AssetCatalogTree filtered_tree;
|
||||
const asset_system::AssetCatalogTree &full_tree = library.catalog_service->catalog_tree();
|
||||
const asset_system::AssetCatalogTree &full_tree = library.catalog_service().catalog_tree();
|
||||
full_tree.foreach_item([&](const asset_system::AssetCatalogTreeItem &item) {
|
||||
if (!known_paths.contains(item.catalog_path().str())) {
|
||||
return;
|
||||
}
|
||||
|
||||
asset_system::AssetCatalog *catalog = library.catalog_service->find_catalog(
|
||||
asset_system::AssetCatalog *catalog = library.catalog_service().find_catalog(
|
||||
item.get_catalog_id());
|
||||
if (catalog == nullptr) {
|
||||
return;
|
||||
|
@ -122,7 +122,7 @@ AssetItemTree build_filtered_all_catalog_tree(
|
|||
return true;
|
||||
}
|
||||
|
||||
const asset_system::AssetCatalog *catalog = library->catalog_service->find_catalog(
|
||||
const asset_system::AssetCatalog *catalog = library->catalog_service().find_catalog(
|
||||
meta_data.catalog_id);
|
||||
if (catalog == nullptr) {
|
||||
/* Also include assets with catalogs we're unable to find (e.g. the catalog was deleted) in
|
||||
|
@ -135,12 +135,12 @@ AssetItemTree build_filtered_all_catalog_tree(
|
|||
});
|
||||
|
||||
asset_system::AssetCatalogTree catalogs_with_node_assets;
|
||||
const asset_system::AssetCatalogTree &catalog_tree = library->catalog_service->catalog_tree();
|
||||
const asset_system::AssetCatalogTree &catalog_tree = library->catalog_service().catalog_tree();
|
||||
catalog_tree.foreach_item([&](const asset_system::AssetCatalogTreeItem &item) {
|
||||
if (assets_per_path.lookup(item.catalog_path()).is_empty()) {
|
||||
return;
|
||||
}
|
||||
asset_system::AssetCatalog *catalog = library->catalog_service->find_catalog(
|
||||
asset_system::AssetCatalog *catalog = library->catalog_service().find_catalog(
|
||||
item.get_catalog_id());
|
||||
if (catalog == nullptr) {
|
||||
return;
|
||||
|
|
|
@ -140,7 +140,7 @@ PointerRNA persistent_catalog_path_rna_pointer(const bScreen &owner_screen,
|
|||
const asset_system::AssetLibrary &library,
|
||||
const asset_system::AssetCatalogTreeItem &item)
|
||||
{
|
||||
const asset_system::AssetCatalog *catalog = library.catalog_service->find_catalog_by_path(
|
||||
const asset_system::AssetCatalog *catalog = library.catalog_service().find_catalog_by_path(
|
||||
item.catalog_path());
|
||||
if (!catalog) {
|
||||
return PointerRNA_NULL;
|
||||
|
|
|
@ -587,7 +587,7 @@ static asset_system::AssetCatalogService *get_catalog_service(bContext *C)
|
|||
}
|
||||
|
||||
asset_system::AssetLibrary *asset_lib = ED_fileselect_active_asset_library_get(sfile);
|
||||
return asset_lib->catalog_service.get();
|
||||
return &asset_lib->catalog_service();
|
||||
}
|
||||
|
||||
static int asset_catalog_undo_exec(bContext *C, wmOperator * /*op*/)
|
||||
|
|
|
@ -181,13 +181,13 @@ static std::optional<asset_system::AssetCatalogFilter> catalog_filter_from_shelf
|
|||
return {};
|
||||
}
|
||||
|
||||
asset_system::AssetCatalog *active_catalog = library.catalog_service->find_catalog_by_path(
|
||||
asset_system::AssetCatalog *active_catalog = library.catalog_service().find_catalog_by_path(
|
||||
shelf_settings.active_catalog_path);
|
||||
if (!active_catalog) {
|
||||
return {};
|
||||
}
|
||||
|
||||
return library.catalog_service->create_catalog_filter(active_catalog->catalog_id);
|
||||
return library.catalog_service().create_catalog_filter(active_catalog->catalog_id);
|
||||
}
|
||||
|
||||
/* ---------------------------------------------------------------------- */
|
||||
|
|
|
@ -183,8 +183,8 @@ AssetCatalogTreeView::AssetCatalogTreeView(asset_system::AssetLibrary *library,
|
|||
SpaceFile &space_file)
|
||||
: asset_library_(library), params_(params), space_file_(space_file)
|
||||
{
|
||||
if (library && library->catalog_service) {
|
||||
catalog_tree_ = &library->catalog_service->catalog_tree();
|
||||
if (library) {
|
||||
catalog_tree_ = &library->catalog_service().catalog_tree();
|
||||
}
|
||||
else {
|
||||
catalog_tree_ = nullptr;
|
||||
|
@ -503,10 +503,10 @@ AssetCatalog *AssetCatalogDropTarget::get_drag_catalog(
|
|||
if (drag.type != WM_DRAG_ASSET_CATALOG) {
|
||||
return nullptr;
|
||||
}
|
||||
const AssetCatalogService *catalog_service = asset_library.catalog_service.get();
|
||||
const AssetCatalogService &catalog_service = asset_library.catalog_service();
|
||||
const wmDragAssetCatalog *catalog_drag = WM_drag_get_asset_catalog_data(&drag);
|
||||
|
||||
return catalog_service->find_catalog(catalog_drag->drag_catalog_id);
|
||||
return catalog_service.find_catalog(catalog_drag->drag_catalog_id);
|
||||
}
|
||||
|
||||
bool AssetCatalogDropTarget::has_droppable_asset(const wmDrag &drag, const char **r_disabled_hint)
|
||||
|
@ -744,11 +744,11 @@ void file_ensure_updated_catalog_filter_data(
|
|||
{
|
||||
AssetCatalogFilterSettings *filter_settings = reinterpret_cast<AssetCatalogFilterSettings *>(
|
||||
filter_settings_handle);
|
||||
const AssetCatalogService *catalog_service = asset_library->catalog_service.get();
|
||||
const AssetCatalogService &catalog_service = asset_library->catalog_service();
|
||||
|
||||
if (filter_settings->asset_catalog_visibility != FILE_SHOW_ASSETS_ALL_CATALOGS) {
|
||||
filter_settings->catalog_filter = std::make_unique<AssetCatalogFilter>(
|
||||
catalog_service->create_catalog_filter(filter_settings->asset_catalog_id));
|
||||
catalog_service.create_catalog_filter(filter_settings->asset_catalog_id));
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -18,14 +18,18 @@ namespace blender::geometry {
|
|||
* \note this uses a trivial implementation currently that has to be replaced.
|
||||
*/
|
||||
class ReverseUVSampler {
|
||||
public:
|
||||
struct LookupGrid;
|
||||
|
||||
private:
|
||||
Span<float2> uv_map_;
|
||||
Span<int3> corner_tris_;
|
||||
int resolution_;
|
||||
MultiValueMap<int2, int> corner_tris_by_cell_;
|
||||
std::unique_ptr<LookupGrid> lookup_grid_;
|
||||
|
||||
public:
|
||||
ReverseUVSampler(Span<float2> uv_map, Span<int3> corner_tris);
|
||||
~ReverseUVSampler();
|
||||
|
||||
enum class ResultType {
|
||||
None,
|
||||
|
|
|
@ -3,51 +3,232 @@
|
|||
* SPDX-License-Identifier: GPL-2.0-or-later */
|
||||
|
||||
#include <algorithm>
|
||||
#include <fmt/format.h>
|
||||
|
||||
#include "GEO_reverse_uv_sampler.hh"
|
||||
|
||||
#include "BLI_bounds.hh"
|
||||
#include "BLI_enumerable_thread_specific.hh"
|
||||
#include "BLI_index_mask.hh"
|
||||
#include "BLI_linear_allocator_chunked_list.hh"
|
||||
#include "BLI_math_geom.h"
|
||||
#include "BLI_math_vector.hh"
|
||||
#include "BLI_offset_indices.hh"
|
||||
#include "BLI_task.hh"
|
||||
#include "BLI_timeit.hh"
|
||||
|
||||
namespace blender::geometry {
|
||||
|
||||
static int2 uv_to_cell_key(const float2 &uv, const int resolution)
|
||||
struct Row {
|
||||
/** The min and max horizontal cell index that is used in this row. */
|
||||
int x_min = 0;
|
||||
int x_max = 0;
|
||||
/** Offsets into the array of indices below. Also see #OffsetIndices. */
|
||||
Array<int> offsets;
|
||||
/** A flat array containing the triangle indices contained in each cell. */
|
||||
Array<int> tri_indices;
|
||||
};
|
||||
|
||||
struct ReverseUVSampler::LookupGrid {
|
||||
/** Minimum vertical cell index that contains triangles. */
|
||||
int y_min = 0;
|
||||
/** Information about all rows starting at `y_min`. */
|
||||
Array<Row> rows;
|
||||
};
|
||||
|
||||
struct TriWithRange {
|
||||
int tri_index;
|
||||
int x_min;
|
||||
int x_max;
|
||||
};
|
||||
|
||||
struct LocalRowData {
|
||||
linear_allocator::ChunkedList<TriWithRange, 8> tris;
|
||||
int x_min = INT32_MAX;
|
||||
int x_max = INT32_MIN;
|
||||
};
|
||||
|
||||
struct LocalData {
|
||||
LinearAllocator<> allocator;
|
||||
Map<int, destruct_ptr<LocalRowData>> rows;
|
||||
};
|
||||
|
||||
static int2 uv_to_cell(const float2 &uv, const int resolution)
|
||||
{
|
||||
return int2{uv * resolution};
|
||||
}
|
||||
|
||||
ReverseUVSampler::ReverseUVSampler(const Span<float2> uv_map, const Span<int3> corner_tris)
|
||||
: uv_map_(uv_map), corner_tris_(corner_tris)
|
||||
static Bounds<int2> tri_to_cell_bounds(const int3 &tri,
|
||||
const int resolution,
|
||||
const Span<float2> uv_map)
|
||||
{
|
||||
resolution_ = std::max<int>(3, std::sqrt(corner_tris.size()) * 2);
|
||||
const float2 &uv_0 = uv_map[tri[0]];
|
||||
const float2 &uv_1 = uv_map[tri[1]];
|
||||
const float2 &uv_2 = uv_map[tri[2]];
|
||||
|
||||
for (const int tri_i : corner_tris.index_range()) {
|
||||
const int3 &tri = corner_tris[tri_i];
|
||||
const float2 &uv_0 = uv_map_[tri[0]];
|
||||
const float2 &uv_1 = uv_map_[tri[1]];
|
||||
const float2 &uv_2 = uv_map_[tri[2]];
|
||||
const int2 cell_0 = uv_to_cell(uv_0, resolution);
|
||||
const int2 cell_1 = uv_to_cell(uv_1, resolution);
|
||||
const int2 cell_2 = uv_to_cell(uv_2, resolution);
|
||||
|
||||
const int2 key_0 = uv_to_cell_key(uv_0, resolution_);
|
||||
const int2 key_1 = uv_to_cell_key(uv_1, resolution_);
|
||||
const int2 key_2 = uv_to_cell_key(uv_2, resolution_);
|
||||
const int2 min_cell = math::min(math::min(cell_0, cell_1), cell_2);
|
||||
const int2 max_cell = math::max(math::max(cell_0, cell_1), cell_2);
|
||||
|
||||
const int2 min_key = math::min(math::min(key_0, key_1), key_2);
|
||||
const int2 max_key = math::max(math::max(key_0, key_1), key_2);
|
||||
return {min_cell, max_cell};
|
||||
}
|
||||
|
||||
for (int key_x = min_key.x; key_x <= max_key.x; key_x++) {
|
||||
for (int key_y = min_key.y; key_y <= max_key.y; key_y++) {
|
||||
const int2 key{key_x, key_y};
|
||||
corner_tris_by_cell_.add(key, tri_i);
|
||||
/**
|
||||
* Add each triangle to the rows that it is in. After this, the information about each row is still
|
||||
* scattered across multiple thread-specific lists. Those separate lists are then joined in a
|
||||
* separate step.
|
||||
*/
|
||||
static void sort_tris_into_rows(const Span<float2> uv_map,
|
||||
const Span<int3> corner_tris,
|
||||
const int resolution,
|
||||
threading::EnumerableThreadSpecific<LocalData> &data_per_thread)
|
||||
{
|
||||
threading::parallel_for(corner_tris.index_range(), 256, [&](const IndexRange tris_range) {
|
||||
LocalData &local_data = data_per_thread.local();
|
||||
for (const int tri_i : tris_range) {
|
||||
const int3 &tri = corner_tris[tri_i];
|
||||
|
||||
/* Compute the cells that the triangle touches approximately. */
|
||||
const Bounds<int2> cell_bounds = tri_to_cell_bounds(tri, resolution, uv_map);
|
||||
const TriWithRange tri_with_range{tri_i, cell_bounds.min.x, cell_bounds.max.x};
|
||||
|
||||
/* Go over each row that the triangle is in. */
|
||||
for (int cell_y = cell_bounds.min.y; cell_y <= cell_bounds.max.y; cell_y++) {
|
||||
LocalRowData &row = *local_data.rows.lookup_or_add_cb(
|
||||
cell_y, [&]() { return local_data.allocator.construct<LocalRowData>(); });
|
||||
row.tris.append(local_data.allocator, tri_with_range);
|
||||
row.x_min = std::min<int>(row.x_min, cell_bounds.min.x);
|
||||
row.x_max = std::max<int>(row.x_max, cell_bounds.max.x);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Consolidates the data that has been gather for each row so that it is each to look up which
|
||||
* triangles are in each cell.
|
||||
*/
|
||||
static void finish_rows(const Span<int> all_ys,
|
||||
const Span<const LocalData *> local_data_vec,
|
||||
const Bounds<int> y_bounds,
|
||||
ReverseUVSampler::LookupGrid &lookup_grid)
|
||||
{
|
||||
threading::parallel_for(all_ys.index_range(), 8, [&](const IndexRange all_ys_range) {
|
||||
Vector<const LocalRowData *, 32> local_rows;
|
||||
for (const int y : all_ys.slice(all_ys_range)) {
|
||||
Row &row = lookup_grid.rows[y - y_bounds.min];
|
||||
|
||||
local_rows.clear();
|
||||
for (const LocalData *local_data : local_data_vec) {
|
||||
if (const destruct_ptr<LocalRowData> *local_row = local_data->rows.lookup_ptr(y)) {
|
||||
local_rows.append(local_row->get());
|
||||
}
|
||||
}
|
||||
|
||||
int x_min = INT32_MAX;
|
||||
int x_max = INT32_MIN;
|
||||
for (const LocalRowData *local_row : local_rows) {
|
||||
x_min = std::min(x_min, local_row->x_min);
|
||||
x_max = std::max(x_max, local_row->x_max);
|
||||
}
|
||||
|
||||
const int x_num = x_max - x_min + 1;
|
||||
row.offsets.reinitialize(x_num + 1);
|
||||
{
|
||||
/* Count how many triangles are in each cell in the current row. */
|
||||
MutableSpan<int> counts = row.offsets;
|
||||
counts.fill(0);
|
||||
for (const LocalRowData *local_row : local_rows) {
|
||||
for (const TriWithRange &tri_with_range : local_row->tris) {
|
||||
for (int x = tri_with_range.x_min; x <= tri_with_range.x_max; x++) {
|
||||
counts[x - x_min]++;
|
||||
}
|
||||
}
|
||||
}
|
||||
offset_indices::accumulate_counts_to_offsets(counts);
|
||||
}
|
||||
const int tri_indices_num = row.offsets.last();
|
||||
row.tri_indices.reinitialize(tri_indices_num);
|
||||
|
||||
/* Populate the array containing all triangle indices in all cells in this row. */
|
||||
Array<int, 1000> current_offsets(x_num, 0);
|
||||
for (const LocalRowData *local_row : local_rows) {
|
||||
for (const TriWithRange &tri_with_range : local_row->tris) {
|
||||
for (int x = tri_with_range.x_min; x <= tri_with_range.x_max; x++) {
|
||||
const int offset_x = x - x_min;
|
||||
row.tri_indices[row.offsets[offset_x] + current_offsets[offset_x]] =
|
||||
tri_with_range.tri_index;
|
||||
current_offsets[offset_x]++;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
row.x_min = x_min;
|
||||
row.x_max = x_max;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
ReverseUVSampler::ReverseUVSampler(const Span<float2> uv_map, const Span<int3> corner_tris)
|
||||
: uv_map_(uv_map), corner_tris_(corner_tris), lookup_grid_(std::make_unique<LookupGrid>())
|
||||
{
|
||||
/* A lower resolution means that there will be fewer cells and more triangles in each cell. Fewer
|
||||
* cells make construction faster, but more triangles per cell make lookup slower. This value
|
||||
* needs to be determined experimentally. */
|
||||
resolution_ = std::max<int>(3, std::sqrt(corner_tris.size()) * 3);
|
||||
if (corner_tris.is_empty()) {
|
||||
return;
|
||||
}
|
||||
|
||||
threading::EnumerableThreadSpecific<LocalData> data_per_thread;
|
||||
sort_tris_into_rows(uv_map_, corner_tris_, resolution_, data_per_thread);
|
||||
|
||||
VectorSet<int> all_ys;
|
||||
Vector<const LocalData *> local_data_vec;
|
||||
for (const LocalData &local_data : data_per_thread) {
|
||||
local_data_vec.append(&local_data);
|
||||
for (const int y : local_data.rows.keys()) {
|
||||
all_ys.add(y);
|
||||
}
|
||||
}
|
||||
|
||||
const Bounds<int> y_bounds = *bounds::min_max(all_ys.as_span());
|
||||
lookup_grid_->y_min = y_bounds.min;
|
||||
|
||||
const int rows_num = y_bounds.max - y_bounds.min + 1;
|
||||
lookup_grid_->rows.reinitialize(rows_num);
|
||||
|
||||
finish_rows(all_ys, local_data_vec, y_bounds, *lookup_grid_);
|
||||
}
|
||||
|
||||
static Span<int> lookup_tris_in_cell(const int2 cell,
|
||||
const ReverseUVSampler::LookupGrid &lookup_grid)
|
||||
{
|
||||
if (cell.y < lookup_grid.y_min) {
|
||||
return {};
|
||||
}
|
||||
if (cell.y >= lookup_grid.y_min + lookup_grid.rows.size()) {
|
||||
return {};
|
||||
}
|
||||
const Row &row = lookup_grid.rows[cell.y - lookup_grid.y_min];
|
||||
if (cell.x < row.x_min) {
|
||||
return {};
|
||||
}
|
||||
if (cell.x > row.x_max) {
|
||||
return {};
|
||||
}
|
||||
const int offset = row.offsets[cell.x - row.x_min];
|
||||
const int tris_num = row.offsets[cell.x - row.x_min + 1] - offset;
|
||||
return row.tri_indices.as_span().slice(offset, tris_num);
|
||||
}
|
||||
|
||||
ReverseUVSampler::Result ReverseUVSampler::sample(const float2 &query_uv) const
|
||||
{
|
||||
const int2 cell_key = uv_to_cell_key(query_uv, resolution_);
|
||||
const Span<int> tri_indices = corner_tris_by_cell_.lookup(cell_key);
|
||||
const int2 cell = uv_to_cell(query_uv, resolution_);
|
||||
const Span<int> tri_indices = lookup_tris_in_cell(cell, *lookup_grid_);
|
||||
|
||||
float best_dist = FLT_MAX;
|
||||
float3 best_bary_weights;
|
||||
|
@ -100,6 +281,8 @@ ReverseUVSampler::Result ReverseUVSampler::sample(const float2 &query_uv) const
|
|||
return Result{};
|
||||
}
|
||||
|
||||
ReverseUVSampler::~ReverseUVSampler() = default;
|
||||
|
||||
void ReverseUVSampler::sample_many(const Span<float2> query_uvs,
|
||||
MutableSpan<Result> r_results) const
|
||||
{
|
||||
|
|
|
@ -2,11 +2,11 @@
|
|||
*
|
||||
* SPDX-License-Identifier: GPL-2.0-or-later */
|
||||
|
||||
#include "BLI_task.hh"
|
||||
|
||||
#include "BKE_curves.hh"
|
||||
#include "BKE_grease_pencil.hh"
|
||||
#include "BKE_instances.hh"
|
||||
#include "BKE_mesh.hh"
|
||||
#include "BKE_pointcloud.hh"
|
||||
|
||||
#include "node_geometry_util.hh"
|
||||
|
||||
|
@ -21,214 +21,137 @@ static void node_declare(NodeDeclarationBuilder &b)
|
|||
b.add_output<decl::Geometry>("Geometry").propagate_all();
|
||||
}
|
||||
|
||||
constexpr GrainSize grain_size{10000};
|
||||
|
||||
static bool check_positions_are_original(const AttributeAccessor &attributes,
|
||||
const VArray<float3> &in_positions)
|
||||
static const auto &get_add_fn()
|
||||
{
|
||||
const bke::AttributeReader positions_read_only = attributes.lookup<float3>("position");
|
||||
if (positions_read_only.varray.is_span() && in_positions.is_span()) {
|
||||
return positions_read_only.varray.get_internal_span().data() ==
|
||||
in_positions.get_internal_span().data();
|
||||
}
|
||||
return false;
|
||||
static const auto fn = mf::build::SI2_SO<float3, float3, float3>(
|
||||
"Add",
|
||||
[](const float3 a, const float3 b) { return a + b; },
|
||||
mf::build::exec_presets::AllSpanOrSingle());
|
||||
return fn;
|
||||
}
|
||||
|
||||
static void write_offset_positions(const bool positions_are_original,
|
||||
const IndexMask &selection,
|
||||
const VArray<float3> &in_positions,
|
||||
const VArray<float3> &in_offsets,
|
||||
VMutableArray<float3> &out_positions)
|
||||
static const auto &get_sub_fn()
|
||||
{
|
||||
if (positions_are_original) {
|
||||
if (const std::optional<float3> offset = in_offsets.get_if_single()) {
|
||||
if (math::is_zero(*offset)) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
MutableVArraySpan<float3> out_positions_span = out_positions;
|
||||
if (positions_are_original) {
|
||||
devirtualize_varray(in_offsets, [&](const auto in_offsets) {
|
||||
selection.foreach_index_optimized<int>(
|
||||
grain_size, [&](const int i) { out_positions_span[i] += in_offsets[i]; });
|
||||
});
|
||||
}
|
||||
else {
|
||||
devirtualize_varray2(
|
||||
in_positions, in_offsets, [&](const auto in_positions, const auto in_offsets) {
|
||||
selection.foreach_index_optimized<int>(grain_size, [&](const int i) {
|
||||
out_positions_span[i] = in_positions[i] + in_offsets[i];
|
||||
});
|
||||
});
|
||||
}
|
||||
out_positions_span.save();
|
||||
static const auto fn = mf::build::SI2_SO<float3, float3, float3>(
|
||||
"Add",
|
||||
[](const float3 a, const float3 b) { return a - b; },
|
||||
mf::build::exec_presets::AllSpanOrSingle());
|
||||
return fn;
|
||||
}
|
||||
|
||||
static void set_computed_position_and_offset(GeometryComponent &component,
|
||||
const VArray<float3> &in_positions,
|
||||
const VArray<float3> &in_offsets,
|
||||
const IndexMask &selection,
|
||||
MutableAttributeAccessor attributes)
|
||||
static void set_points_position(bke::MutableAttributeAccessor attributes,
|
||||
const fn::FieldContext &field_context,
|
||||
const Field<bool> &selection_field,
|
||||
const Field<float3> &position_field)
|
||||
{
|
||||
/* Optimize the case when `in_positions` references the original positions array. */
|
||||
switch (component.type()) {
|
||||
case GeometryComponent::Type::Curve: {
|
||||
if (attributes.contains("handle_right") && attributes.contains("handle_left")) {
|
||||
CurveComponent &curve_component = static_cast<CurveComponent &>(component);
|
||||
Curves &curves_id = *curve_component.get_for_write();
|
||||
bke::CurvesGeometry &curves = curves_id.geometry.wrap();
|
||||
SpanAttributeWriter<float3> handle_right_attribute =
|
||||
attributes.lookup_or_add_for_write_span<float3>("handle_right", AttrDomain::Point);
|
||||
SpanAttributeWriter<float3> handle_left_attribute =
|
||||
attributes.lookup_or_add_for_write_span<float3>("handle_left", AttrDomain::Point);
|
||||
|
||||
AttributeWriter<float3> positions = attributes.lookup_for_write<float3>("position");
|
||||
MutableVArraySpan<float3> out_positions_span = positions.varray;
|
||||
devirtualize_varray2(
|
||||
in_positions, in_offsets, [&](const auto in_positions, const auto in_offsets) {
|
||||
selection.foreach_index_optimized<int>(grain_size, [&](const int i) {
|
||||
const float3 new_position = in_positions[i] + in_offsets[i];
|
||||
const float3 delta = new_position - out_positions_span[i];
|
||||
handle_right_attribute.span[i] += delta;
|
||||
handle_left_attribute.span[i] += delta;
|
||||
out_positions_span[i] = new_position;
|
||||
});
|
||||
});
|
||||
|
||||
out_positions_span.save();
|
||||
positions.finish();
|
||||
handle_right_attribute.finish();
|
||||
handle_left_attribute.finish();
|
||||
|
||||
/* Automatic Bezier handles must be recalculated based on the new positions. */
|
||||
curves.calculate_bezier_auto_handles();
|
||||
break;
|
||||
}
|
||||
AttributeWriter<float3> positions = attributes.lookup_for_write<float3>("position");
|
||||
write_offset_positions(check_positions_are_original(attributes, in_positions),
|
||||
selection,
|
||||
in_positions,
|
||||
in_offsets,
|
||||
positions.varray);
|
||||
positions.finish();
|
||||
break;
|
||||
}
|
||||
case GeometryComponent::Type::Instance: {
|
||||
/* Special case for "position" which is no longer an attribute on instances. */
|
||||
auto &instances_component = reinterpret_cast<bke::InstancesComponent &>(component);
|
||||
bke::Instances &instances = *instances_component.get_for_write();
|
||||
VMutableArray<float3> positions = bke::instance_position_varray_for_write(instances);
|
||||
write_offset_positions(false, selection, in_positions, in_offsets, positions);
|
||||
break;
|
||||
}
|
||||
default: {
|
||||
AttributeWriter<float3> positions = attributes.lookup_for_write<float3>("position");
|
||||
write_offset_positions(check_positions_are_original(attributes, in_positions),
|
||||
selection,
|
||||
in_positions,
|
||||
in_offsets,
|
||||
positions.varray);
|
||||
positions.finish();
|
||||
break;
|
||||
}
|
||||
}
|
||||
bke::try_capture_field_on_geometry(attributes,
|
||||
field_context,
|
||||
"position",
|
||||
bke::AttrDomain::Point,
|
||||
selection_field,
|
||||
position_field);
|
||||
}
|
||||
|
||||
static void set_position_in_grease_pencil(GreasePencilComponent &grease_pencil_component,
|
||||
static void set_curves_position(bke::CurvesGeometry &curves,
|
||||
const fn::FieldContext &field_context,
|
||||
const Field<bool> &selection_field,
|
||||
const Field<float3> &position_field)
|
||||
{
|
||||
MutableAttributeAccessor attributes = curves.attributes_for_write();
|
||||
if (attributes.contains("handle_right") && attributes.contains("handle_left")) {
|
||||
fn::Field<float3> delta(fn::FieldOperation::Create(
|
||||
get_sub_fn(), {position_field, bke::AttributeFieldInput::Create<float3>("position")}));
|
||||
for (const StringRef name : {"handle_left", "handle_right"}) {
|
||||
bke::try_capture_field_on_geometry(
|
||||
attributes,
|
||||
field_context,
|
||||
name,
|
||||
bke::AttrDomain::Point,
|
||||
selection_field,
|
||||
Field<float3>(fn::FieldOperation::Create(
|
||||
get_add_fn(), {bke::AttributeFieldInput::Create<float3>(name), delta})));
|
||||
}
|
||||
}
|
||||
set_points_position(attributes, field_context, selection_field, position_field);
|
||||
curves.calculate_bezier_auto_handles();
|
||||
}
|
||||
|
||||
static void set_position_in_grease_pencil(GreasePencil &grease_pencil,
|
||||
const Field<bool> &selection_field,
|
||||
const Field<float3> &position_field,
|
||||
const Field<float3> &offset_field)
|
||||
const Field<float3> &position_field)
|
||||
{
|
||||
using namespace blender::bke::greasepencil;
|
||||
GreasePencil &grease_pencil = *grease_pencil_component.get_for_write();
|
||||
/* Set position for each layer. */
|
||||
for (const int layer_index : grease_pencil.layers().index_range()) {
|
||||
Drawing *drawing = bke::greasepencil::get_eval_grease_pencil_layer_drawing_for_write(
|
||||
grease_pencil, layer_index);
|
||||
if (drawing == nullptr || drawing->strokes().points_num() == 0) {
|
||||
continue;
|
||||
}
|
||||
bke::GreasePencilLayerFieldContext field_context(
|
||||
grease_pencil, AttrDomain::Point, layer_index);
|
||||
fn::FieldEvaluator evaluator{field_context, drawing->strokes().points_num()};
|
||||
evaluator.set_selection(selection_field);
|
||||
evaluator.add(position_field);
|
||||
evaluator.add(offset_field);
|
||||
evaluator.evaluate();
|
||||
|
||||
const IndexMask selection = evaluator.get_evaluated_selection_as_mask();
|
||||
if (selection.is_empty()) {
|
||||
continue;
|
||||
}
|
||||
|
||||
MutableAttributeAccessor attributes = drawing->strokes_for_write().attributes_for_write();
|
||||
const VArray<float3> positions_input = evaluator.get_evaluated<float3>(0);
|
||||
const VArray<float3> offsets_input = evaluator.get_evaluated<float3>(1);
|
||||
set_computed_position_and_offset(
|
||||
grease_pencil_component, positions_input, offsets_input, selection, attributes);
|
||||
set_curves_position(
|
||||
drawing->strokes_for_write(),
|
||||
bke::GreasePencilLayerFieldContext(grease_pencil, bke::AttrDomain::Point, layer_index),
|
||||
selection_field,
|
||||
position_field);
|
||||
drawing->tag_positions_changed();
|
||||
}
|
||||
}
|
||||
|
||||
static void set_position_in_component(GeometrySet &geometry,
|
||||
GeometryComponent::Type component_type,
|
||||
const Field<bool> &selection_field,
|
||||
const Field<float3> &position_field,
|
||||
const Field<float3> &offset_field)
|
||||
static void set_instances_position(bke::Instances &instances,
|
||||
const Field<bool> &selection_field,
|
||||
const Field<float3> &position_field)
|
||||
{
|
||||
const GeometryComponent &component = *geometry.get_component(component_type);
|
||||
const AttrDomain domain = component.type() == GeometryComponent::Type::Instance ?
|
||||
AttrDomain::Instance :
|
||||
AttrDomain::Point;
|
||||
const int domain_size = component.attribute_domain_size(domain);
|
||||
if (domain_size == 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
bke::GeometryFieldContext field_context{component, domain};
|
||||
fn::FieldEvaluator evaluator{field_context, domain_size};
|
||||
const bke::InstancesFieldContext context(instances);
|
||||
fn::FieldEvaluator evaluator(context, instances.instances_num());
|
||||
evaluator.set_selection(selection_field);
|
||||
evaluator.add(position_field);
|
||||
evaluator.add(offset_field);
|
||||
|
||||
/* Use a temporary array for the output to avoid potentially reading from freed memory if
|
||||
* retrieving the transforms has to make a mutable copy (then we can't depend on the user count
|
||||
* of the original read-only data). */
|
||||
Array<float3> result(instances.instances_num());
|
||||
evaluator.add_with_destination(position_field, result.as_mutable_span());
|
||||
evaluator.evaluate();
|
||||
|
||||
const IndexMask selection = evaluator.get_evaluated_selection_as_mask();
|
||||
if (selection.is_empty()) {
|
||||
return;
|
||||
}
|
||||
|
||||
GeometryComponent &mutable_component = geometry.get_component_for_write(component_type);
|
||||
MutableAttributeAccessor attributes = *mutable_component.attributes_for_write();
|
||||
const VArray<float3> positions_input = evaluator.get_evaluated<float3>(0);
|
||||
const VArray<float3> offsets_input = evaluator.get_evaluated<float3>(1);
|
||||
set_computed_position_and_offset(
|
||||
mutable_component, positions_input, offsets_input, selection, attributes);
|
||||
MutableSpan<float4x4> transforms = instances.transforms_for_write();
|
||||
threading::parallel_for(transforms.index_range(), 2048, [&](const IndexRange range) {
|
||||
for (const int i : range) {
|
||||
transforms[i].location() = result[i];
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
static void node_geo_exec(GeoNodeExecParams params)
|
||||
{
|
||||
GeometrySet geometry = params.extract_input<GeometrySet>("Geometry");
|
||||
Field<bool> selection_field = params.extract_input<Field<bool>>("Selection");
|
||||
Field<float3> offset_field = params.extract_input<Field<float3>>("Offset");
|
||||
Field<float3> position_field = params.extract_input<Field<float3>>("Position");
|
||||
const Field<bool> selection_field = params.extract_input<Field<bool>>("Selection");
|
||||
const fn::Field<float3> position_field(
|
||||
fn::FieldOperation::Create(get_add_fn(),
|
||||
{params.extract_input<Field<float3>>("Position"),
|
||||
params.extract_input<Field<float3>>("Offset")}));
|
||||
|
||||
if (geometry.has_grease_pencil()) {
|
||||
set_position_in_grease_pencil(geometry.get_component_for_write<GreasePencilComponent>(),
|
||||
selection_field,
|
||||
position_field,
|
||||
offset_field);
|
||||
if (Mesh *mesh = geometry.get_mesh_for_write()) {
|
||||
set_points_position(mesh->attributes_for_write(),
|
||||
bke::MeshFieldContext(*mesh, bke::AttrDomain::Point),
|
||||
selection_field,
|
||||
position_field);
|
||||
}
|
||||
|
||||
for (const GeometryComponent::Type type : {GeometryComponent::Type::Mesh,
|
||||
GeometryComponent::Type::PointCloud,
|
||||
GeometryComponent::Type::Curve,
|
||||
GeometryComponent::Type::Instance})
|
||||
{
|
||||
if (geometry.has(type)) {
|
||||
set_position_in_component(geometry, type, selection_field, position_field, offset_field);
|
||||
}
|
||||
if (PointCloud *point_cloud = geometry.get_pointcloud_for_write()) {
|
||||
set_points_position(point_cloud->attributes_for_write(),
|
||||
bke::PointCloudFieldContext(*point_cloud),
|
||||
selection_field,
|
||||
position_field);
|
||||
}
|
||||
if (Curves *curves_id = geometry.get_curves_for_write()) {
|
||||
bke::CurvesGeometry &curves = curves_id->geometry.wrap();
|
||||
set_curves_position(curves,
|
||||
bke::CurvesFieldContext(curves, bke::AttrDomain::Point),
|
||||
selection_field,
|
||||
position_field);
|
||||
}
|
||||
if (GreasePencil *grease_pencil = geometry.get_grease_pencil_for_write()) {
|
||||
set_position_in_grease_pencil(*grease_pencil, selection_field, position_field);
|
||||
}
|
||||
if (bke::Instances *instances = geometry.get_instances_for_write()) {
|
||||
set_instances_position(*instances, selection_field, position_field);
|
||||
}
|
||||
|
||||
params.set_output("Geometry", std::move(geometry));
|
||||
|
|
|
@ -514,7 +514,7 @@ class RealtimeCompositor {
|
|||
* However, while this has been tested on Linux and works well, on macOS it causes to
|
||||
* spontaneous invalid colors in the composite output. The Windows has not been extensively
|
||||
* tested yet. */
|
||||
#if defined(__linux__)
|
||||
#if defined(__linux__) || defined(_WIN32)
|
||||
if (G.background) {
|
||||
/* In the background mode the system context of the render engine might be nullptr, which
|
||||
* forces some code paths which more tightly couple it with the draw manager.
|
||||
|
@ -546,7 +546,7 @@ class RealtimeCompositor {
|
|||
context_->viewer_output_to_viewer_image();
|
||||
texture_pool_->free_unused_and_reset();
|
||||
|
||||
#if defined(__linux__)
|
||||
#if defined(__linux__) || defined(_WIN32)
|
||||
GPU_flush();
|
||||
GPU_render_end();
|
||||
GPU_context_active_set(nullptr);
|
||||
|
|
|
@ -2115,9 +2115,12 @@ static bool wm_autosave_write_try(Main *bmain, wmWindowManager *wm)
|
|||
|
||||
wm_autosave_location(filepath);
|
||||
|
||||
if (MemFile *memfile = ED_undosys_stack_memfile_get_if_active(wm->undo_stack)) {
|
||||
/* Fast save of last undo-buffer, now with UI. */
|
||||
BLO_memfile_write_file(memfile, filepath);
|
||||
/* Technically, we could always just save here, but that would cause performance regressions
|
||||
* compared to when the #MemFile undo step was used for saving undo-steps. So for now just skip
|
||||
* auto-save when we are in a mode where auto-save wouldn't have worked previously anyway. This
|
||||
* check can be removed once the performance regressions have been solved. */
|
||||
if (ED_undosys_stack_memfile_get_if_active(wm->undo_stack) != nullptr) {
|
||||
WM_autosave_write(wm, bmain);
|
||||
return true;
|
||||
}
|
||||
if ((U.uiflag & USER_GLOBALUNDO) == 0) {
|
||||
|
|
|
@ -473,26 +473,19 @@ void WM_exit_ex(bContext *C, const bool do_python_exit, const bool do_user_exit_
|
|||
/* NOTE: same code copied in `wm_files.cc`. */
|
||||
if (C && wm) {
|
||||
if (do_user_exit_actions) {
|
||||
/* save the undo state as quit.blend */
|
||||
/* Save quit.blend. */
|
||||
Main *bmain = CTX_data_main(C);
|
||||
char filepath[FILE_MAX];
|
||||
BLI_path_join(filepath, sizeof(filepath), BKE_tempdir_base(), BLENDER_QUIT_FILE);
|
||||
MemFile *undo_memfile = wm->undo_stack ?
|
||||
ED_undosys_stack_memfile_get_if_active(wm->undo_stack) :
|
||||
nullptr;
|
||||
/* When true, the `undo_memfile` doesn't contain all information necessary
|
||||
* for writing and up to date blend file. */
|
||||
const bool is_memfile_outdated = ED_editors_flush_edits(bmain);
|
||||
const int fileflags = G.fileflags & ~G_FILE_COMPRESS;
|
||||
|
||||
if (undo_memfile && !is_memfile_outdated) {
|
||||
BLO_memfile_write_file(undo_memfile, filepath);
|
||||
BLI_path_join(filepath, sizeof(filepath), BKE_tempdir_base(), BLENDER_QUIT_FILE);
|
||||
|
||||
ED_editors_flush_edits(bmain);
|
||||
|
||||
BlendFileWriteParams blend_file_write_params{};
|
||||
if (BLO_write_file(bmain, filepath, fileflags, &blend_file_write_params, nullptr)) {
|
||||
printf("Saved session recovery to \"%s\"\n", filepath);
|
||||
}
|
||||
else {
|
||||
const int fileflags = G.fileflags & ~G_FILE_COMPRESS;
|
||||
BlendFileWriteParams blend_file_write_params{};
|
||||
BLO_write_file(bmain, filepath, fileflags, &blend_file_write_params, nullptr);
|
||||
}
|
||||
printf("Saved session recovery to \"%s\"\n", filepath);
|
||||
}
|
||||
|
||||
WM_jobs_kill_all(wm);
|
||||
|
|
|
@ -54,12 +54,6 @@
|
|||
|
||||
# include "creator_intern.h" /* own include */
|
||||
|
||||
// #define USE_WRITE_CRASH_BLEND
|
||||
# ifdef USE_WRITE_CRASH_BLEND
|
||||
# include "BKE_undo_system.hh"
|
||||
# include "BLO_undofile.hh"
|
||||
# endif
|
||||
|
||||
/* set breakpoints here when running in debug mode, useful to catch floating point errors */
|
||||
# if defined(__linux__) || defined(_WIN32) || defined(OSX_SSE_FPE)
|
||||
static void sig_handle_fpe(int /*sig*/)
|
||||
|
@ -97,28 +91,6 @@ static void sig_handle_crash(int signum)
|
|||
|
||||
wmWindowManager *wm = G_MAIN ? static_cast<wmWindowManager *>(G_MAIN->wm.first) : nullptr;
|
||||
|
||||
# ifdef USE_WRITE_CRASH_BLEND
|
||||
if (wm && wm->undo_stack) {
|
||||
struct MemFile *memfile = BKE_undosys_stack_memfile_get_active(wm->undo_stack);
|
||||
if (memfile) {
|
||||
char filepath[FILE_MAX];
|
||||
|
||||
if (!(G_MAIN && G_MAIN->filepath[0])) {
|
||||
BLI_path_join(filepath, sizeof(filepath), BKE_tempdir_base(), "crash.blend");
|
||||
}
|
||||
else {
|
||||
STRNCPY(filepath, G_MAIN->filepath);
|
||||
BLI_path_extension_replace(filepath, sizeof(filepath), ".crash.blend");
|
||||
}
|
||||
|
||||
printf("Writing: %s\n", filepath);
|
||||
fflush(stdout);
|
||||
|
||||
BLO_memfile_write_file(memfile, filepath);
|
||||
}
|
||||
}
|
||||
# endif
|
||||
|
||||
FILE *fp;
|
||||
char header[512];
|
||||
|
||||
|
|
Loading…
Reference in New Issue