Compare commits
40 Commits
temp_test_
...
temp-field
Author | SHA1 | Date | |
---|---|---|---|
d26165747e | |||
6dc2045054 | |||
362bd7889b | |||
620da869f1 | |||
fc4f82d200 | |||
8c7c4549d1 | |||
11e11c41f2 | |||
7da9da2b27 | |||
029d042e85 | |||
07b482c2ff | |||
1e3c5fdb85 | |||
4eba920d15 | |||
1f51672d71 | |||
738f1dbeff | |||
c5c8c68eec | |||
c773443845 | |||
f973e0b75a | |||
c9e835fec1 | |||
25aa943e8c | |||
276eebb274 | |||
f256bfb3e2 | |||
942c471ce9 | |||
257c7753e9 | |||
69697fcca9 | |||
25e548c96b | |||
bdbc7e12a0 | |||
970c928f27 | |||
2618df7d03 | |||
136e357d8d | |||
a229a9dd64 | |||
50df35e4a4 | |||
6a72188b3e | |||
5d183c5af3 | |||
dcf72a30e1 | |||
0bec1f5dad | |||
a5fbd81510 | |||
234de0bf71 | |||
a5b9323fd5 | |||
52de232811 | |||
1931878f57 |
@@ -200,13 +200,14 @@ def submodules_update(args, release_version, branch):
|
||||
if msg:
|
||||
skip_msg += submodule_path + " skipped: " + msg + "\n"
|
||||
else:
|
||||
# We are using `exit_on_error=False` here because sub-modules are allowed to not have requested branch,
|
||||
# in which case falling back to default back-up branch is fine.
|
||||
if make_utils.git_branch(args.git_command) != submodule_branch:
|
||||
call([args.git_command, "fetch", "origin"])
|
||||
call([args.git_command, "checkout", submodule_branch])
|
||||
call([args.git_command, "pull", "--rebase", "origin", submodule_branch])
|
||||
call([args.git_command, "checkout", submodule_branch], exit_on_error=False)
|
||||
call([args.git_command, "pull", "--rebase", "origin", submodule_branch], exit_on_error=False)
|
||||
# If we cannot find the specified branch for this submodule, fallback to default one (aka master).
|
||||
if make_utils.git_branch(args.git_command) != submodule_branch:
|
||||
call([args.git_command, "fetch", "origin"])
|
||||
call([args.git_command, "checkout", submodule_branch_fallback])
|
||||
call([args.git_command, "pull", "--rebase", "origin", submodule_branch_fallback])
|
||||
finally:
|
||||
|
@@ -14,7 +14,7 @@ sound = aud.Sound('music.ogg')
|
||||
# play the audio, this return a handle to control play/pause
|
||||
handle = device.play(sound)
|
||||
# if the audio is not too big and will be used often you can buffer it
|
||||
sound_buffered = aud.Sound.buffer(sound)
|
||||
sound_buffered = aud.Sound.cache(sound)
|
||||
handle_buffered = device.play(sound_buffered)
|
||||
|
||||
# stop the sounds (otherwise they play until their ends)
|
||||
|
6
extern/audaspace/CMakeLists.txt
vendored
6
extern/audaspace/CMakeLists.txt
vendored
@@ -152,6 +152,7 @@ set(PUBLIC_HDR
|
||||
include/devices/ThreadedDevice.h
|
||||
include/Exception.h
|
||||
include/file/File.h
|
||||
include/file/FileInfo.h
|
||||
include/file/FileManager.h
|
||||
include/file/FileWriter.h
|
||||
include/file/IFileInput.h
|
||||
@@ -960,7 +961,10 @@ endif()
|
||||
if(BUILD_DEMOS)
|
||||
include_directories(${INCLUDE})
|
||||
|
||||
set(DEMOS audaplay audaconvert audaremap signalgen randsounds dynamicmusic playbackmanager)
|
||||
set(DEMOS audainfo audaplay audaconvert audaremap signalgen randsounds dynamicmusic playbackmanager)
|
||||
|
||||
add_executable(audainfo demos/audainfo.cpp)
|
||||
target_link_libraries(audainfo audaspace)
|
||||
|
||||
add_executable(audaplay demos/audaplay.cpp)
|
||||
target_link_libraries(audaplay audaspace)
|
||||
|
@@ -39,7 +39,7 @@ extern AUD_API void AUD_PlaybackManager_free(AUD_PlaybackManager* manager);
|
||||
* Plays a sound through the playback manager, adding it into a category.
|
||||
* \param manager The PlaybackManager object.
|
||||
* \param sound The sound to be played.
|
||||
* \param catKey The key of the category into which the sound will be added. If it doesn't exist a new one will be creatd.
|
||||
* \param catKey The key of the category into which the sound will be added. If it doesn't exist a new one will be created.
|
||||
*/
|
||||
extern AUD_API void AUD_PlaybackManager_play(AUD_PlaybackManager* manager, AUD_Sound* sound, unsigned int catKey);
|
||||
|
||||
|
42
extern/audaspace/bindings/C/AUD_Sound.cpp
vendored
42
extern/audaspace/bindings/C/AUD_Sound.cpp
vendored
@@ -94,6 +94,36 @@ AUD_API int AUD_Sound_getLength(AUD_Sound* sound)
|
||||
return (*sound)->createReader()->getLength();
|
||||
}
|
||||
|
||||
AUD_API int AUD_Sound_getFileStreams(AUD_Sound* sound, AUD_StreamInfo **stream_infos)
|
||||
{
|
||||
assert(sound);
|
||||
|
||||
std::shared_ptr<File> file = std::dynamic_pointer_cast<File>(*sound);
|
||||
|
||||
if(file)
|
||||
{
|
||||
auto streams = file->queryStreams();
|
||||
|
||||
size_t size = sizeof(AUD_StreamInfo) * streams.size();
|
||||
|
||||
if(!size)
|
||||
{
|
||||
*stream_infos = nullptr;
|
||||
return 0;
|
||||
}
|
||||
|
||||
*stream_infos = reinterpret_cast<AUD_StreamInfo*>(std::malloc(size));
|
||||
std::memcpy(*stream_infos, streams.data(), size);
|
||||
|
||||
return streams.size();
|
||||
}
|
||||
else
|
||||
{
|
||||
*stream_infos = nullptr;
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
AUD_API sample_t* AUD_Sound_data(AUD_Sound* sound, int* length, AUD_Specs* specs)
|
||||
{
|
||||
assert(sound);
|
||||
@@ -252,6 +282,12 @@ AUD_API AUD_Sound* AUD_Sound_bufferFile(unsigned char* buffer, int size)
|
||||
return new AUD_Sound(new File(buffer, size));
|
||||
}
|
||||
|
||||
AUD_API AUD_Sound* AUD_Sound_bufferFileStream(unsigned char* buffer, int size, int stream)
|
||||
{
|
||||
assert(buffer);
|
||||
return new AUD_Sound(new File(buffer, size, stream));
|
||||
}
|
||||
|
||||
AUD_API AUD_Sound* AUD_Sound_cache(AUD_Sound* sound)
|
||||
{
|
||||
assert(sound);
|
||||
@@ -272,6 +308,12 @@ AUD_API AUD_Sound* AUD_Sound_file(const char* filename)
|
||||
return new AUD_Sound(new File(filename));
|
||||
}
|
||||
|
||||
AUD_API AUD_Sound* AUD_Sound_fileStream(const char* filename, int stream)
|
||||
{
|
||||
assert(filename);
|
||||
return new AUD_Sound(new File(filename, stream));
|
||||
}
|
||||
|
||||
AUD_API AUD_Sound* AUD_Sound_sawtooth(float frequency, AUD_SampleRate rate)
|
||||
{
|
||||
return new AUD_Sound(new Sawtooth(frequency, rate));
|
||||
|
27
extern/audaspace/bindings/C/AUD_Sound.h
vendored
27
extern/audaspace/bindings/C/AUD_Sound.h
vendored
@@ -36,7 +36,15 @@ extern AUD_API AUD_Specs AUD_Sound_getSpecs(AUD_Sound* sound);
|
||||
* \return The length of the sound in samples.
|
||||
* \note This function creates a reader from the sound and deletes it again.
|
||||
*/
|
||||
extern AUD_API int AUD_getLength(AUD_Sound* sound);
|
||||
extern AUD_API int AUD_Sound_getLength(AUD_Sound* sound);
|
||||
|
||||
/**
|
||||
* Retrieves the stream infos of a sound file.
|
||||
* \param sound The sound to retrieve from which must be a file sound.
|
||||
* \param infos A pointer to a AUD_StreamInfo array that will be allocated and must afterwards be freed by the caller.
|
||||
* \return The number of items in the infos array.
|
||||
*/
|
||||
extern AUD_API int AUD_Sound_getFileStreams(AUD_Sound* sound, AUD_StreamInfo** stream_infos);
|
||||
|
||||
/**
|
||||
* Reads a sound's samples into memory.
|
||||
@@ -89,6 +97,15 @@ extern AUD_API AUD_Sound* AUD_Sound_buffer(sample_t* data, int length, AUD_Specs
|
||||
*/
|
||||
extern AUD_API AUD_Sound* AUD_Sound_bufferFile(unsigned char* buffer, int size);
|
||||
|
||||
/**
|
||||
* Loads a sound file from a memory buffer.
|
||||
* \param buffer The buffer which contains the sound file.
|
||||
* \param size The size of the buffer.
|
||||
* \param stream The index of the audio stream within the file if it contains multiple audio streams.
|
||||
* \return A handle of the sound file.
|
||||
*/
|
||||
extern AUD_API AUD_Sound* AUD_Sound_bufferFileStream(unsigned char* buffer, int size, int stream);
|
||||
|
||||
/**
|
||||
* Caches a sound into a memory buffer.
|
||||
* \param sound The sound to cache.
|
||||
@@ -103,6 +120,14 @@ extern AUD_API AUD_Sound* AUD_Sound_cache(AUD_Sound* sound);
|
||||
*/
|
||||
extern AUD_API AUD_Sound* AUD_Sound_file(const char* filename);
|
||||
|
||||
/**
|
||||
* Loads a sound file.
|
||||
* \param filename The filename of the sound file.
|
||||
* \param stream The index of the audio stream within the file if it contains multiple audio streams.
|
||||
* \return A handle of the sound file.
|
||||
*/
|
||||
extern AUD_API AUD_Sound* AUD_Sound_fileStream(const char* filename, int stream);
|
||||
|
||||
/**
|
||||
* Creates a sawtooth sound.
|
||||
* \param frequency The frequency of the generated sawtooth sound.
|
||||
|
8
extern/audaspace/bindings/C/AUD_Special.cpp
vendored
8
extern/audaspace/bindings/C/AUD_Special.cpp
vendored
@@ -86,7 +86,6 @@ AUD_API AUD_SoundInfo AUD_getInfo(AUD_Sound* sound)
|
||||
info.specs.channels = AUD_CHANNELS_INVALID;
|
||||
info.specs.rate = AUD_RATE_INVALID;
|
||||
info.length = 0.0f;
|
||||
info.start_offset = 0.0f;
|
||||
|
||||
try
|
||||
{
|
||||
@@ -96,7 +95,6 @@ AUD_API AUD_SoundInfo AUD_getInfo(AUD_Sound* sound)
|
||||
{
|
||||
info.specs = convSpecToC(reader->getSpecs());
|
||||
info.length = reader->getLength() / (float) info.specs.rate;
|
||||
info.start_offset = reader->getStartOffset();
|
||||
}
|
||||
}
|
||||
catch(Exception&)
|
||||
@@ -109,7 +107,7 @@ AUD_API AUD_SoundInfo AUD_getInfo(AUD_Sound* sound)
|
||||
AUD_API float* AUD_readSoundBuffer(const char* filename, float low, float high,
|
||||
float attack, float release, float threshold,
|
||||
int accumulate, int additive, int square,
|
||||
float sthreshold, double samplerate, int* length)
|
||||
float sthreshold, double samplerate, int* length, int stream)
|
||||
{
|
||||
Buffer buffer;
|
||||
DeviceSpecs specs;
|
||||
@@ -117,7 +115,7 @@ AUD_API float* AUD_readSoundBuffer(const char* filename, float low, float high,
|
||||
specs.rate = (SampleRate)samplerate;
|
||||
std::shared_ptr<ISound> sound;
|
||||
|
||||
std::shared_ptr<ISound> file = std::shared_ptr<ISound>(new File(filename));
|
||||
std::shared_ptr<ISound> file = std::shared_ptr<ISound>(new File(filename, stream));
|
||||
|
||||
int position = 0;
|
||||
|
||||
@@ -247,7 +245,7 @@ AUD_API int AUD_readSound(AUD_Sound* sound, float* buffer, int length, int sampl
|
||||
|
||||
buffer[i * 3] = min;
|
||||
buffer[i * 3 + 1] = max;
|
||||
buffer[i * 3 + 2] = sqrt(power / len); // RMS
|
||||
buffer[i * 3 + 2] = std::sqrt(power / len);
|
||||
|
||||
if(overallmax < max)
|
||||
overallmax = max;
|
||||
|
2
extern/audaspace/bindings/C/AUD_Special.h
vendored
2
extern/audaspace/bindings/C/AUD_Special.h
vendored
@@ -37,7 +37,7 @@ extern AUD_API float* AUD_readSoundBuffer(const char* filename, float low, float
|
||||
float attack, float release, float threshold,
|
||||
int accumulate, int additive, int square,
|
||||
float sthreshold, double samplerate,
|
||||
int* length);
|
||||
int* length, int stream);
|
||||
|
||||
/**
|
||||
* Pauses a playing sound after a specific amount of time.
|
||||
|
14
extern/audaspace/bindings/C/AUD_Types.h
vendored
14
extern/audaspace/bindings/C/AUD_Types.h
vendored
@@ -176,5 +176,17 @@ typedef struct
|
||||
{
|
||||
AUD_Specs specs;
|
||||
float length;
|
||||
double start_offset;
|
||||
} AUD_SoundInfo;
|
||||
|
||||
/// Specification of a sound source.
|
||||
typedef struct
|
||||
{
|
||||
/// Start time in seconds.
|
||||
double start;
|
||||
|
||||
/// Duration in seconds. May be estimated or 0 if unknown.
|
||||
double duration;
|
||||
|
||||
/// Audio data parameters.
|
||||
AUD_DeviceSpecs specs;
|
||||
} AUD_StreamInfo;
|
||||
|
12
extern/audaspace/bindings/python/PySound.cpp
vendored
12
extern/audaspace/bindings/python/PySound.cpp
vendored
@@ -89,10 +89,11 @@ Sound_new(PyTypeObject* type, PyObject* args, PyObject* kwds)
|
||||
self = (Sound*)type->tp_alloc(type, 0);
|
||||
if(self != nullptr)
|
||||
{
|
||||
static const char* kwlist[] = {"filename", nullptr};
|
||||
static const char* kwlist[] = {"filename", "stream", nullptr};
|
||||
const char* filename = nullptr;
|
||||
int stream = 0;
|
||||
|
||||
if(!PyArg_ParseTupleAndKeywords(args, kwds, "s:Sound", const_cast<char**>(kwlist), &filename))
|
||||
if(!PyArg_ParseTupleAndKeywords(args, kwds, "s|i:Sound", const_cast<char**>(kwlist), &filename, &stream))
|
||||
{
|
||||
Py_DECREF(self);
|
||||
return nullptr;
|
||||
@@ -100,7 +101,7 @@ Sound_new(PyTypeObject* type, PyObject* args, PyObject* kwds)
|
||||
|
||||
try
|
||||
{
|
||||
self->sound = new std::shared_ptr<ISound>(new File(filename));
|
||||
self->sound = new std::shared_ptr<ISound>(new File(filename, stream));
|
||||
}
|
||||
catch(Exception& e)
|
||||
{
|
||||
@@ -407,8 +408,9 @@ static PyObject *
|
||||
Sound_file(PyTypeObject* type, PyObject* args)
|
||||
{
|
||||
const char* filename = nullptr;
|
||||
int stream = 0;
|
||||
|
||||
if(!PyArg_ParseTuple(args, "s:file", &filename))
|
||||
if(!PyArg_ParseTuple(args, "s|i:file", &filename, &stream))
|
||||
return nullptr;
|
||||
|
||||
Sound* self;
|
||||
@@ -418,7 +420,7 @@ Sound_file(PyTypeObject* type, PyObject* args)
|
||||
{
|
||||
try
|
||||
{
|
||||
self->sound = new std::shared_ptr<ISound>(new File(filename));
|
||||
self->sound = new std::shared_ptr<ISound>(new File(filename, stream));
|
||||
}
|
||||
catch(Exception& e)
|
||||
{
|
||||
|
6
extern/audaspace/include/IReader.h
vendored
6
extern/audaspace/include/IReader.h
vendored
@@ -70,12 +70,6 @@ public:
|
||||
*/
|
||||
virtual int getPosition() const=0;
|
||||
|
||||
/**
|
||||
* Returns the start offset the sound should have to line up with related sources.
|
||||
* \return The required start offset in seconds.
|
||||
*/
|
||||
virtual double getStartOffset() const { return 0.0;}
|
||||
|
||||
/**
|
||||
* Returns the specification of the reader.
|
||||
* \return The Specs structure.
|
||||
|
23
extern/audaspace/include/file/File.h
vendored
23
extern/audaspace/include/file/File.h
vendored
@@ -23,9 +23,11 @@
|
||||
*/
|
||||
|
||||
#include "ISound.h"
|
||||
#include "FileInfo.h"
|
||||
|
||||
#include <string>
|
||||
#include <memory>
|
||||
#include <vector>
|
||||
|
||||
AUD_NAMESPACE_BEGIN
|
||||
|
||||
@@ -48,6 +50,14 @@ private:
|
||||
*/
|
||||
std::shared_ptr<Buffer> m_buffer;
|
||||
|
||||
/**
|
||||
* The index of the stream within the file if it contains multiple.
|
||||
* The first audio stream in the file has index 0 and the index increments by one
|
||||
* for every other audio stream in the file. Other types of streams in the file
|
||||
* do not count.
|
||||
*/
|
||||
int m_stream;
|
||||
|
||||
// delete copy constructor and operator=
|
||||
File(const File&) = delete;
|
||||
File& operator=(const File&) = delete;
|
||||
@@ -57,16 +67,25 @@ public:
|
||||
* Creates a new sound.
|
||||
* The file is read from the file system using the given path.
|
||||
* \param filename The sound file path.
|
||||
* \param stream The index of the audio stream within the file if it contains multiple audio streams.
|
||||
*/
|
||||
File(std::string filename);
|
||||
File(std::string filename, int stream = 0);
|
||||
|
||||
/**
|
||||
* Creates a new sound.
|
||||
* The file is read from memory using the supplied buffer.
|
||||
* \param buffer The buffer to read from.
|
||||
* \param size The size of the buffer.
|
||||
* \param stream The index of the audio stream within the file if it contains multiple audio streams.
|
||||
*/
|
||||
File(const data_t* buffer, int size);
|
||||
File(const data_t* buffer, int size, int stream = 0);
|
||||
|
||||
/**
|
||||
* Queries the streams of the file.
|
||||
* \return A vector with as many streams as there are in the file.
|
||||
* \exception Exception Thrown if the file specified cannot be read.
|
||||
*/
|
||||
std::vector<StreamInfo> queryStreams();
|
||||
|
||||
virtual std::shared_ptr<IReader> createReader();
|
||||
};
|
||||
|
42
extern/audaspace/include/file/FileInfo.h
vendored
Normal file
42
extern/audaspace/include/file/FileInfo.h
vendored
Normal file
@@ -0,0 +1,42 @@
|
||||
/*******************************************************************************
|
||||
* Copyright 2009-2016 Jörg Müller
|
||||
*
|
||||
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||
* you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
******************************************************************************/
|
||||
|
||||
#pragma once
|
||||
|
||||
/**
|
||||
* @file FileInfo.h
|
||||
* @ingroup file
|
||||
* The FileInfo data structures.
|
||||
*/
|
||||
|
||||
#include "respec/Specification.h"
|
||||
|
||||
AUD_NAMESPACE_BEGIN
|
||||
|
||||
/// Specification of a sound source.
|
||||
struct StreamInfo
|
||||
{
|
||||
/// Start time in seconds.
|
||||
double start;
|
||||
|
||||
/// Duration in seconds. May be estimated or 0 if unknown.
|
||||
double duration;
|
||||
|
||||
/// Audio data parameters.
|
||||
DeviceSpecs specs;
|
||||
};
|
||||
|
||||
AUD_NAMESPACE_END
|
24
extern/audaspace/include/file/FileManager.h
vendored
24
extern/audaspace/include/file/FileManager.h
vendored
@@ -22,12 +22,14 @@
|
||||
* The FileManager class.
|
||||
*/
|
||||
|
||||
#include "FileInfo.h"
|
||||
#include "respec/Specification.h"
|
||||
#include "IWriter.h"
|
||||
|
||||
#include <list>
|
||||
#include <memory>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
AUD_NAMESPACE_BEGIN
|
||||
|
||||
@@ -66,18 +68,36 @@ public:
|
||||
/**
|
||||
* Creates a file reader for the given filename if a registed IFileInput is able to read it.
|
||||
* @param filename The path to the file.
|
||||
* @param stream The index of the audio stream within the file if it contains multiple audio streams.
|
||||
* @return The reader created.
|
||||
* @exception Exception If no file input can read the file an exception is thrown.
|
||||
*/
|
||||
static std::shared_ptr<IReader> createReader(std::string filename);
|
||||
static std::shared_ptr<IReader> createReader(std::string filename, int stream = 0);
|
||||
|
||||
/**
|
||||
* Creates a file reader for the given buffer if a registed IFileInput is able to read it.
|
||||
* @param buffer The buffer to read the file from.
|
||||
* @param stream The index of the audio stream within the file if it contains multiple audio streams.
|
||||
* @return The reader created.
|
||||
* @exception Exception If no file input can read the file an exception is thrown.
|
||||
*/
|
||||
static std::shared_ptr<IReader> createReader(std::shared_ptr<Buffer> buffer);
|
||||
static std::shared_ptr<IReader> createReader(std::shared_ptr<Buffer> buffer, int stream = 0);
|
||||
|
||||
/**
|
||||
* Queries the streams of a sound file.
|
||||
* \param filename Path to the file to be read.
|
||||
* \return A vector with as many streams as there are in the file.
|
||||
* \exception Exception Thrown if the file specified cannot be read.
|
||||
*/
|
||||
static std::vector<StreamInfo> queryStreams(std::string filename);
|
||||
|
||||
/**
|
||||
* Queries the streams of a sound file.
|
||||
* \param buffer The in-memory file buffer.
|
||||
* \return A vector with as many streams as there are in the file.
|
||||
* \exception Exception Thrown if the file specified cannot be read.
|
||||
*/
|
||||
static std::vector<StreamInfo> queryStreams(std::shared_ptr<Buffer> buffer);
|
||||
|
||||
/**
|
||||
* Creates a file writer that writes a sound to the given file path.
|
||||
|
24
extern/audaspace/include/file/IFileInput.h
vendored
24
extern/audaspace/include/file/IFileInput.h
vendored
@@ -23,9 +23,11 @@
|
||||
*/
|
||||
|
||||
#include "Audaspace.h"
|
||||
#include "FileInfo.h"
|
||||
|
||||
#include <memory>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
AUD_NAMESPACE_BEGIN
|
||||
|
||||
@@ -48,18 +50,36 @@ public:
|
||||
/**
|
||||
* Creates a reader for a file to be read.
|
||||
* \param filename Path to the file to be read.
|
||||
* \param stream The index of the audio stream within the file if it contains multiple audio streams.
|
||||
* \return The reader that reads the file.
|
||||
* \exception Exception Thrown if the file specified cannot be read.
|
||||
*/
|
||||
virtual std::shared_ptr<IReader> createReader(std::string filename)=0;
|
||||
virtual std::shared_ptr<IReader> createReader(std::string filename, int stream = 0)=0;
|
||||
|
||||
/**
|
||||
* Creates a reader for a file to be read from memory.
|
||||
* \param buffer The in-memory file buffer.
|
||||
* \param stream The index of the audio stream within the file if it contains multiple audio streams.
|
||||
* \return The reader that reads the file.
|
||||
* \exception Exception Thrown if the file specified cannot be read.
|
||||
*/
|
||||
virtual std::shared_ptr<IReader> createReader(std::shared_ptr<Buffer> buffer)=0;
|
||||
virtual std::shared_ptr<IReader> createReader(std::shared_ptr<Buffer> buffer, int stream = 0)=0;
|
||||
|
||||
/**
|
||||
* Queries the streams of a sound file.
|
||||
* \param filename Path to the file to be read.
|
||||
* \return A vector with as many streams as there are in the file.
|
||||
* \exception Exception Thrown if the file specified cannot be read.
|
||||
*/
|
||||
virtual std::vector<StreamInfo> queryStreams(std::string filename)=0;
|
||||
|
||||
/**
|
||||
* Queries the streams of a sound file.
|
||||
* \param buffer The in-memory file buffer.
|
||||
* \return A vector with as many streams as there are in the file.
|
||||
* \exception Exception Thrown if the file specified cannot be read.
|
||||
*/
|
||||
virtual std::vector<StreamInfo> queryStreams(std::shared_ptr<Buffer> buffer)=0;
|
||||
};
|
||||
|
||||
AUD_NAMESPACE_END
|
||||
|
2
extern/audaspace/include/fx/VolumeReader.h
vendored
2
extern/audaspace/include/fx/VolumeReader.h
vendored
@@ -67,4 +67,4 @@ public:
|
||||
virtual void read(int& length, bool& eos, sample_t* buffer);
|
||||
};
|
||||
|
||||
AUD_NAMESPACE_END
|
||||
AUD_NAMESPACE_END
|
18
extern/audaspace/plugins/ffmpeg/FFMPEG.cpp
vendored
18
extern/audaspace/plugins/ffmpeg/FFMPEG.cpp
vendored
@@ -35,14 +35,24 @@ void FFMPEG::registerPlugin()
|
||||
FileManager::registerOutput(plugin);
|
||||
}
|
||||
|
||||
std::shared_ptr<IReader> FFMPEG::createReader(std::string filename)
|
||||
std::shared_ptr<IReader> FFMPEG::createReader(std::string filename, int stream)
|
||||
{
|
||||
return std::shared_ptr<IReader>(new FFMPEGReader(filename));
|
||||
return std::shared_ptr<IReader>(new FFMPEGReader(filename, stream));
|
||||
}
|
||||
|
||||
std::shared_ptr<IReader> FFMPEG::createReader(std::shared_ptr<Buffer> buffer)
|
||||
std::shared_ptr<IReader> FFMPEG::createReader(std::shared_ptr<Buffer> buffer, int stream)
|
||||
{
|
||||
return std::shared_ptr<IReader>(new FFMPEGReader(buffer));
|
||||
return std::shared_ptr<IReader>(new FFMPEGReader(buffer, stream));
|
||||
}
|
||||
|
||||
std::vector<StreamInfo> FFMPEG::queryStreams(std::string filename)
|
||||
{
|
||||
return FFMPEGReader(filename).queryStreams();
|
||||
}
|
||||
|
||||
std::vector<StreamInfo> FFMPEG::queryStreams(std::shared_ptr<Buffer> buffer)
|
||||
{
|
||||
return FFMPEGReader(buffer).queryStreams();
|
||||
}
|
||||
|
||||
std::shared_ptr<IWriter> FFMPEG::createWriter(std::string filename, DeviceSpecs specs, Container format, Codec codec, unsigned int bitrate)
|
||||
|
6
extern/audaspace/plugins/ffmpeg/FFMPEG.h
vendored
6
extern/audaspace/plugins/ffmpeg/FFMPEG.h
vendored
@@ -52,8 +52,10 @@ public:
|
||||
*/
|
||||
static void registerPlugin();
|
||||
|
||||
virtual std::shared_ptr<IReader> createReader(std::string filename);
|
||||
virtual std::shared_ptr<IReader> createReader(std::shared_ptr<Buffer> buffer);
|
||||
virtual std::shared_ptr<IReader> createReader(std::string filename, int stream = 0);
|
||||
virtual std::shared_ptr<IReader> createReader(std::shared_ptr<Buffer> buffer, int stream = 0);
|
||||
virtual std::vector<StreamInfo> queryStreams(std::string filename);
|
||||
virtual std::vector<StreamInfo> queryStreams(std::shared_ptr<Buffer> buffer);
|
||||
virtual std::shared_ptr<IWriter> createWriter(std::string filename, DeviceSpecs specs, Container format, Codec codec, unsigned int bitrate);
|
||||
};
|
||||
|
||||
|
169
extern/audaspace/plugins/ffmpeg/FFMPEGReader.cpp
vendored
169
extern/audaspace/plugins/ffmpeg/FFMPEGReader.cpp
vendored
@@ -31,6 +31,25 @@ AUD_NAMESPACE_BEGIN
|
||||
#define FFMPEG_OLD_CODE
|
||||
#endif
|
||||
|
||||
SampleFormat FFMPEGReader::convertSampleFormat(AVSampleFormat format)
|
||||
{
|
||||
switch(av_get_packed_sample_fmt(format))
|
||||
{
|
||||
case AV_SAMPLE_FMT_U8:
|
||||
return FORMAT_U8;
|
||||
case AV_SAMPLE_FMT_S16:
|
||||
return FORMAT_S16;
|
||||
case AV_SAMPLE_FMT_S32:
|
||||
return FORMAT_S32;
|
||||
case AV_SAMPLE_FMT_FLT:
|
||||
return FORMAT_FLOAT32;
|
||||
case AV_SAMPLE_FMT_DBL:
|
||||
return FORMAT_FLOAT64;
|
||||
default:
|
||||
AUD_THROW(FileException, "FFMPEG sample format unknown.");
|
||||
}
|
||||
}
|
||||
|
||||
int FFMPEGReader::decode(AVPacket& packet, Buffer& buffer)
|
||||
{
|
||||
int buf_size = buffer.getSize();
|
||||
@@ -68,7 +87,7 @@ int FFMPEGReader::decode(AVPacket& packet, Buffer& buffer)
|
||||
for(int i = 0; i < m_frame->nb_samples; i++)
|
||||
{
|
||||
std::memcpy(((data_t*)buffer.getBuffer()) + buf_pos + ((m_codecCtx->channels * i) + channel) * single_size,
|
||||
m_frame->data[channel] + i * single_size, single_size);
|
||||
m_frame->data[channel] + i * single_size, single_size);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -109,7 +128,7 @@ int FFMPEGReader::decode(AVPacket& packet, Buffer& buffer)
|
||||
for(int i = 0; i < m_frame->nb_samples; i++)
|
||||
{
|
||||
std::memcpy(((data_t*)buffer.getBuffer()) + buf_pos + ((m_codecCtx->channels * i) + channel) * single_size,
|
||||
m_frame->data[channel] + i * single_size, single_size);
|
||||
m_frame->data[channel] + i * single_size, single_size);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -123,13 +142,10 @@ int FFMPEGReader::decode(AVPacket& packet, Buffer& buffer)
|
||||
return buf_pos;
|
||||
}
|
||||
|
||||
void FFMPEGReader::init()
|
||||
void FFMPEGReader::init(int stream)
|
||||
{
|
||||
m_position = 0;
|
||||
m_start_offset = 0.0f;
|
||||
m_pkgbuf_left = 0;
|
||||
m_st_time = 0;
|
||||
m_duration = 0;
|
||||
|
||||
if(avformat_find_stream_info(m_formatCtx, nullptr) < 0)
|
||||
AUD_THROW(FileException, "File couldn't be read, ffmpeg couldn't find the stream info.");
|
||||
@@ -137,43 +153,22 @@ void FFMPEGReader::init()
|
||||
// find audio stream and codec
|
||||
m_stream = -1;
|
||||
|
||||
double dur_sec = 0;
|
||||
|
||||
for(unsigned int i = 0; i < m_formatCtx->nb_streams; i++)
|
||||
{
|
||||
#ifdef FFMPEG_OLD_CODE
|
||||
if(m_formatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_AUDIO)
|
||||
if((m_formatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_AUDIO)
|
||||
#else
|
||||
if(m_formatCtx->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_AUDIO)
|
||||
if((m_formatCtx->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_AUDIO)
|
||||
#endif
|
||||
&& (m_stream < 0))
|
||||
{
|
||||
AVStream *audio_stream = m_formatCtx->streams[i];
|
||||
double audio_timebase = av_q2d(audio_stream->time_base);
|
||||
|
||||
if (audio_stream->start_time != AV_NOPTS_VALUE)
|
||||
if(stream == 0)
|
||||
{
|
||||
m_st_time = audio_stream->start_time;
|
||||
}
|
||||
|
||||
int64_t ctx_start_time = 0;
|
||||
if (m_formatCtx->start_time != AV_NOPTS_VALUE) {
|
||||
ctx_start_time = m_formatCtx->start_time;
|
||||
}
|
||||
|
||||
m_start_offset = m_st_time * audio_timebase - (double)ctx_start_time / AV_TIME_BASE;
|
||||
|
||||
if(audio_stream->duration != AV_NOPTS_VALUE)
|
||||
{
|
||||
dur_sec = audio_stream->duration * audio_timebase;
|
||||
m_stream=i;
|
||||
break;
|
||||
}
|
||||
else
|
||||
{
|
||||
/* If the audio starts after the stream start time, subract this from the total duration. */
|
||||
dur_sec = (double)m_formatCtx->duration / AV_TIME_BASE - m_start_offset;
|
||||
}
|
||||
|
||||
m_stream=i;
|
||||
break;
|
||||
stream--;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -242,10 +237,9 @@ void FFMPEGReader::init()
|
||||
}
|
||||
|
||||
m_specs.rate = (SampleRate) m_codecCtx->sample_rate;
|
||||
m_duration = lround(dur_sec * m_codecCtx->sample_rate);
|
||||
}
|
||||
|
||||
FFMPEGReader::FFMPEGReader(std::string filename) :
|
||||
FFMPEGReader::FFMPEGReader(std::string filename, int stream) :
|
||||
m_pkgbuf(),
|
||||
m_formatCtx(nullptr),
|
||||
m_codecCtx(nullptr),
|
||||
@@ -259,7 +253,7 @@ FFMPEGReader::FFMPEGReader(std::string filename) :
|
||||
|
||||
try
|
||||
{
|
||||
init();
|
||||
init(stream);
|
||||
}
|
||||
catch(Exception&)
|
||||
{
|
||||
@@ -268,7 +262,7 @@ FFMPEGReader::FFMPEGReader(std::string filename) :
|
||||
}
|
||||
}
|
||||
|
||||
FFMPEGReader::FFMPEGReader(std::shared_ptr<Buffer> buffer) :
|
||||
FFMPEGReader::FFMPEGReader(std::shared_ptr<Buffer> buffer, int stream) :
|
||||
m_pkgbuf(),
|
||||
m_codecCtx(nullptr),
|
||||
m_frame(nullptr),
|
||||
@@ -295,7 +289,7 @@ FFMPEGReader::FFMPEGReader(std::shared_ptr<Buffer> buffer) :
|
||||
|
||||
try
|
||||
{
|
||||
init();
|
||||
init(stream);
|
||||
}
|
||||
catch(Exception&)
|
||||
{
|
||||
@@ -318,6 +312,51 @@ FFMPEGReader::~FFMPEGReader()
|
||||
avformat_close_input(&m_formatCtx);
|
||||
}
|
||||
|
||||
std::vector<StreamInfo> FFMPEGReader::queryStreams()
|
||||
{
|
||||
std::vector<StreamInfo> result;
|
||||
|
||||
for(unsigned int i = 0; i < m_formatCtx->nb_streams; i++)
|
||||
{
|
||||
#ifdef FFMPEG_OLD_CODE
|
||||
if(m_formatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_AUDIO)
|
||||
#else
|
||||
if(m_formatCtx->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_AUDIO)
|
||||
#endif
|
||||
{
|
||||
StreamInfo info;
|
||||
|
||||
double time_base = av_q2d(m_formatCtx->streams[i]->time_base);
|
||||
|
||||
if(m_formatCtx->streams[i]->start_time != AV_NOPTS_VALUE)
|
||||
info.start = m_formatCtx->streams[i]->start_time * time_base;
|
||||
else
|
||||
info.start = 0;
|
||||
|
||||
if(m_formatCtx->streams[i]->duration != AV_NOPTS_VALUE)
|
||||
info.duration = m_formatCtx->streams[i]->duration * time_base;
|
||||
else if(m_formatCtx->duration != AV_NOPTS_VALUE)
|
||||
info.duration = double(m_formatCtx->duration) / AV_TIME_BASE - info.start;
|
||||
else
|
||||
info.duration = 0;
|
||||
|
||||
#ifdef FFMPEG_OLD_CODE
|
||||
info.specs.channels = Channels(m_formatCtx->streams[i]->codec->channels);
|
||||
info.specs.rate = m_formatCtx->streams[i]->codec->sample_rate;
|
||||
info.specs.format = convertSampleFormat(m_formatCtx->streams[i]->codec->sample_fmt);
|
||||
#else
|
||||
info.specs.channels = Channels(m_formatCtx->streams[i]->codecpar->channels);
|
||||
info.specs.rate = m_formatCtx->streams[i]->codecpar->sample_rate;
|
||||
info.specs.format = convertSampleFormat(AVSampleFormat(m_formatCtx->streams[i]->codecpar->format));
|
||||
#endif
|
||||
|
||||
result.emplace_back(info);
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
int FFMPEGReader::read_packet(void* opaque, uint8_t* buf, int buf_size)
|
||||
{
|
||||
FFMPEGReader* reader = reinterpret_cast<FFMPEGReader*>(opaque);
|
||||
@@ -368,18 +407,16 @@ void FFMPEGReader::seek(int position)
|
||||
{
|
||||
if(position >= 0)
|
||||
{
|
||||
double pts_time_base =
|
||||
av_q2d(m_formatCtx->streams[m_stream]->time_base);
|
||||
double pts_time_base = av_q2d(m_formatCtx->streams[m_stream]->time_base);
|
||||
|
||||
uint64_t seek_pts = (((uint64_t)position) / ((uint64_t)m_specs.rate)) / pts_time_base;
|
||||
uint64_t st_time = m_formatCtx->streams[m_stream]->start_time;
|
||||
uint64_t seek_pos = (uint64_t)(position / (pts_time_base * m_specs.rate));
|
||||
|
||||
if(m_st_time != AV_NOPTS_VALUE) {
|
||||
seek_pts += m_st_time;
|
||||
}
|
||||
if(st_time != AV_NOPTS_VALUE)
|
||||
seek_pos += st_time;
|
||||
|
||||
// a value < 0 tells us that seeking failed
|
||||
if(av_seek_frame(m_formatCtx, m_stream, seek_pts,
|
||||
AVSEEK_FLAG_BACKWARD | AVSEEK_FLAG_ANY) >= 0)
|
||||
if(av_seek_frame(m_formatCtx, m_stream, seek_pos, AVSEEK_FLAG_BACKWARD | AVSEEK_FLAG_ANY) >= 0)
|
||||
{
|
||||
avcodec_flush_buffers(m_codecCtx);
|
||||
m_position = position;
|
||||
@@ -400,7 +437,7 @@ void FFMPEGReader::seek(int position)
|
||||
if(packet.pts != AV_NOPTS_VALUE)
|
||||
{
|
||||
// calculate real position, and read to frame!
|
||||
m_position = (packet.pts - m_st_time) * pts_time_base * m_specs.rate;
|
||||
m_position = (packet.pts - (st_time != AV_NOPTS_VALUE ? st_time : 0)) * pts_time_base * m_specs.rate;
|
||||
|
||||
if(m_position < position)
|
||||
{
|
||||
@@ -430,8 +467,25 @@ void FFMPEGReader::seek(int position)
|
||||
|
||||
int FFMPEGReader::getLength() const
|
||||
{
|
||||
auto stream = m_formatCtx->streams[m_stream];
|
||||
|
||||
double time_base = av_q2d(stream->time_base);
|
||||
double duration;
|
||||
|
||||
if(stream->duration != AV_NOPTS_VALUE)
|
||||
duration = stream->duration * time_base;
|
||||
else if(m_formatCtx->duration != AV_NOPTS_VALUE)
|
||||
{
|
||||
duration = float(m_formatCtx->duration) / AV_TIME_BASE;
|
||||
|
||||
if(stream->start_time != AV_NOPTS_VALUE)
|
||||
duration -= stream->start_time * time_base;
|
||||
}
|
||||
else
|
||||
duration = -1;
|
||||
|
||||
// return approximated remaning size
|
||||
return m_duration - m_position;
|
||||
return (int)(duration * m_codecCtx->sample_rate) - m_position;
|
||||
}
|
||||
|
||||
int FFMPEGReader::getPosition() const
|
||||
@@ -439,11 +493,6 @@ int FFMPEGReader::getPosition() const
|
||||
return m_position;
|
||||
}
|
||||
|
||||
double FFMPEGReader::getStartOffset() const
|
||||
{
|
||||
return m_start_offset;
|
||||
}
|
||||
|
||||
Specs FFMPEGReader::getSpecs() const
|
||||
{
|
||||
return m_specs.specs;
|
||||
@@ -480,13 +529,11 @@ void FFMPEGReader::read(int& length, bool& eos, sample_t* buffer)
|
||||
// decode the package
|
||||
pkgbuf_pos = decode(packet, m_pkgbuf);
|
||||
|
||||
if (packet.pts >= m_st_time) {
|
||||
// copy to output buffer
|
||||
data_size = std::min(pkgbuf_pos, left * sample_size);
|
||||
m_convert((data_t*) buf, (data_t*) m_pkgbuf.getBuffer(), data_size / AUD_FORMAT_SIZE(m_specs.format));
|
||||
buf += data_size / AUD_FORMAT_SIZE(m_specs.format);
|
||||
left -= data_size / sample_size;
|
||||
}
|
||||
// copy to output buffer
|
||||
data_size = std::min(pkgbuf_pos, left * sample_size);
|
||||
m_convert((data_t*) buf, (data_t*) m_pkgbuf.getBuffer(), data_size / AUD_FORMAT_SIZE(m_specs.format));
|
||||
buf += data_size / AUD_FORMAT_SIZE(m_specs.format);
|
||||
left -= data_size / sample_size;
|
||||
}
|
||||
av_packet_unref(&packet);
|
||||
}
|
||||
|
42
extern/audaspace/plugins/ffmpeg/FFMPEGReader.h
vendored
42
extern/audaspace/plugins/ffmpeg/FFMPEGReader.h
vendored
@@ -29,9 +29,11 @@
|
||||
#include "respec/ConverterFunctions.h"
|
||||
#include "IReader.h"
|
||||
#include "util/Buffer.h"
|
||||
#include "file/FileInfo.h"
|
||||
|
||||
#include <string>
|
||||
#include <memory>
|
||||
#include <vector>
|
||||
|
||||
struct AVCodecContext;
|
||||
extern "C" {
|
||||
@@ -54,22 +56,6 @@ private:
|
||||
*/
|
||||
int m_position;
|
||||
|
||||
/**
|
||||
* The start offset in seconds relative to the media container start time.
|
||||
* IE how much the sound should be delayed to be kept in sync with the rest of the containter streams.
|
||||
*/
|
||||
double m_start_offset;
|
||||
|
||||
/**
|
||||
* The start time pts of the stream. All packets before this timestamp shouldn't be played back (only decoded).
|
||||
*/
|
||||
int64_t m_st_time;
|
||||
|
||||
/**
|
||||
* The duration of the audio stream in samples.
|
||||
*/
|
||||
int64_t m_duration;
|
||||
|
||||
/**
|
||||
* The specification of the audio data.
|
||||
*/
|
||||
@@ -135,6 +121,13 @@ private:
|
||||
*/
|
||||
bool m_tointerleave;
|
||||
|
||||
/**
|
||||
* Converts an ffmpeg sample format to an audaspace one.
|
||||
* \param format The AVSampleFormat sample format.
|
||||
* \return The sample format as SampleFormat.
|
||||
*/
|
||||
AUD_LOCAL static SampleFormat convertSampleFormat(AVSampleFormat format);
|
||||
|
||||
/**
|
||||
* Decodes a packet into the given buffer.
|
||||
* \param packet The AVPacket to decode.
|
||||
@@ -145,8 +138,9 @@ private:
|
||||
|
||||
/**
|
||||
* Initializes the object.
|
||||
* \param stream The index of the audio stream within the file if it contains multiple audio streams.
|
||||
*/
|
||||
AUD_LOCAL void init();
|
||||
AUD_LOCAL void init(int stream);
|
||||
|
||||
// delete copy constructor and operator=
|
||||
FFMPEGReader(const FFMPEGReader&) = delete;
|
||||
@@ -156,24 +150,33 @@ public:
|
||||
/**
|
||||
* Creates a new reader.
|
||||
* \param filename The path to the file to be read.
|
||||
* \param stream The index of the audio stream within the file if it contains multiple audio streams.
|
||||
* \exception Exception Thrown if the file specified does not exist or
|
||||
* cannot be read with ffmpeg.
|
||||
*/
|
||||
FFMPEGReader(std::string filename);
|
||||
FFMPEGReader(std::string filename, int stream = 0);
|
||||
|
||||
/**
|
||||
* Creates a new reader.
|
||||
* \param buffer The buffer to read from.
|
||||
* \param stream The index of the audio stream within the file if it contains multiple audio streams.
|
||||
* \exception Exception Thrown if the buffer specified cannot be read
|
||||
* with ffmpeg.
|
||||
*/
|
||||
FFMPEGReader(std::shared_ptr<Buffer> buffer);
|
||||
FFMPEGReader(std::shared_ptr<Buffer> buffer, int stream = 0);
|
||||
|
||||
/**
|
||||
* Destroys the reader and closes the file.
|
||||
*/
|
||||
virtual ~FFMPEGReader();
|
||||
|
||||
/**
|
||||
* Queries the streams of a sound file.
|
||||
* \return A vector with as many streams as there are in the file.
|
||||
* \exception Exception Thrown if the file specified cannot be read.
|
||||
*/
|
||||
virtual std::vector<StreamInfo> queryStreams();
|
||||
|
||||
/**
|
||||
* Reads data to a memory buffer.
|
||||
* This function is used for avio only.
|
||||
@@ -198,7 +201,6 @@ public:
|
||||
virtual void seek(int position);
|
||||
virtual int getLength() const;
|
||||
virtual int getPosition() const;
|
||||
virtual double getStartOffset() const;
|
||||
virtual Specs getSpecs() const;
|
||||
virtual void read(int& length, bool& eos, sample_t* buffer);
|
||||
};
|
||||
|
14
extern/audaspace/plugins/libsndfile/SndFile.cpp
vendored
14
extern/audaspace/plugins/libsndfile/SndFile.cpp
vendored
@@ -32,16 +32,26 @@ void SndFile::registerPlugin()
|
||||
FileManager::registerOutput(plugin);
|
||||
}
|
||||
|
||||
std::shared_ptr<IReader> SndFile::createReader(std::string filename)
|
||||
std::shared_ptr<IReader> SndFile::createReader(std::string filename, int stream)
|
||||
{
|
||||
return std::shared_ptr<IReader>(new SndFileReader(filename));
|
||||
}
|
||||
|
||||
std::shared_ptr<IReader> SndFile::createReader(std::shared_ptr<Buffer> buffer)
|
||||
std::shared_ptr<IReader> SndFile::createReader(std::shared_ptr<Buffer> buffer, int stream)
|
||||
{
|
||||
return std::shared_ptr<IReader>(new SndFileReader(buffer));
|
||||
}
|
||||
|
||||
std::vector<StreamInfo> SndFile::queryStreams(std::string filename)
|
||||
{
|
||||
return SndFileReader(filename).queryStreams();
|
||||
}
|
||||
|
||||
std::vector<StreamInfo> SndFile::queryStreams(std::shared_ptr<Buffer> buffer)
|
||||
{
|
||||
return SndFileReader(buffer).queryStreams();
|
||||
}
|
||||
|
||||
std::shared_ptr<IWriter> SndFile::createWriter(std::string filename, DeviceSpecs specs, Container format, Codec codec, unsigned int bitrate)
|
||||
{
|
||||
return std::shared_ptr<IWriter>(new SndFileWriter(filename, specs, format, codec, bitrate));
|
||||
|
@@ -52,8 +52,10 @@ public:
|
||||
*/
|
||||
static void registerPlugin();
|
||||
|
||||
virtual std::shared_ptr<IReader> createReader(std::string filename);
|
||||
virtual std::shared_ptr<IReader> createReader(std::shared_ptr<Buffer> buffer);
|
||||
virtual std::shared_ptr<IReader> createReader(std::string filename, int stream = 0);
|
||||
virtual std::shared_ptr<IReader> createReader(std::shared_ptr<Buffer> buffer, int stream = 0);
|
||||
virtual std::vector<StreamInfo> queryStreams(std::string filename);
|
||||
virtual std::vector<StreamInfo> queryStreams(std::shared_ptr<Buffer> buffer);
|
||||
virtual std::shared_ptr<IWriter> createWriter(std::string filename, DeviceSpecs specs, Container format, Codec codec, unsigned int bitrate);
|
||||
};
|
||||
|
||||
|
@@ -118,6 +118,21 @@ SndFileReader::~SndFileReader()
|
||||
sf_close(m_sndfile);
|
||||
}
|
||||
|
||||
std::vector<StreamInfo> SndFileReader::queryStreams()
|
||||
{
|
||||
std::vector<StreamInfo> result;
|
||||
|
||||
StreamInfo info;
|
||||
info.start = 0;
|
||||
info.duration = double(getLength()) / m_specs.rate;
|
||||
info.specs.specs = m_specs;
|
||||
info.specs.format = FORMAT_FLOAT32;
|
||||
|
||||
result.emplace_back(info);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
bool SndFileReader::isSeekable() const
|
||||
{
|
||||
return m_seekable;
|
||||
|
@@ -28,9 +28,12 @@
|
||||
* The SndFileReader class.
|
||||
*/
|
||||
|
||||
#include "file/FileInfo.h"
|
||||
|
||||
#include <string>
|
||||
#include <sndfile.h>
|
||||
#include <memory>
|
||||
#include <vector>
|
||||
|
||||
AUD_NAMESPACE_BEGIN
|
||||
|
||||
@@ -96,6 +99,7 @@ public:
|
||||
/**
|
||||
* Creates a new reader.
|
||||
* \param filename The path to the file to be read.
|
||||
* \param stream The index of the audio stream within the file if it contains multiple audio streams.
|
||||
* \exception Exception Thrown if the file specified does not exist or
|
||||
* cannot be read with libsndfile.
|
||||
*/
|
||||
@@ -104,6 +108,7 @@ public:
|
||||
/**
|
||||
* Creates a new reader.
|
||||
* \param buffer The buffer to read from.
|
||||
* \param stream The index of the audio stream within the file if it contains multiple audio streams.
|
||||
* \exception Exception Thrown if the buffer specified cannot be read
|
||||
* with libsndfile.
|
||||
*/
|
||||
@@ -114,6 +119,13 @@ public:
|
||||
*/
|
||||
virtual ~SndFileReader();
|
||||
|
||||
/**
|
||||
* Queries the streams of a sound file.
|
||||
* \return A vector with as many streams as there are in the file.
|
||||
* \exception Exception Thrown if the file specified cannot be read.
|
||||
*/
|
||||
virtual std::vector<StreamInfo> queryStreams();
|
||||
|
||||
virtual bool isSeekable() const;
|
||||
virtual void seek(int position);
|
||||
virtual int getLength() const;
|
||||
|
20
extern/audaspace/src/file/File.cpp
vendored
20
extern/audaspace/src/file/File.cpp
vendored
@@ -23,23 +23,31 @@
|
||||
|
||||
AUD_NAMESPACE_BEGIN
|
||||
|
||||
File::File(std::string filename) :
|
||||
m_filename(filename)
|
||||
File::File(std::string filename, int stream) :
|
||||
m_filename(filename), m_stream(stream)
|
||||
{
|
||||
}
|
||||
|
||||
File::File(const data_t* buffer, int size) :
|
||||
m_buffer(new Buffer(size))
|
||||
File::File(const data_t* buffer, int size, int stream) :
|
||||
m_buffer(new Buffer(size)), m_stream(stream)
|
||||
{
|
||||
std::memcpy(m_buffer->getBuffer(), buffer, size);
|
||||
}
|
||||
|
||||
std::vector<StreamInfo> File::queryStreams()
|
||||
{
|
||||
if(m_buffer.get())
|
||||
return FileManager::queryStreams(m_buffer);
|
||||
else
|
||||
return FileManager::queryStreams(m_filename);
|
||||
}
|
||||
|
||||
std::shared_ptr<IReader> File::createReader()
|
||||
{
|
||||
if(m_buffer.get())
|
||||
return FileManager::createReader(m_buffer);
|
||||
return FileManager::createReader(m_buffer, m_stream);
|
||||
else
|
||||
return FileManager::createReader(m_filename);
|
||||
return FileManager::createReader(m_filename, m_stream);
|
||||
}
|
||||
|
||||
AUD_NAMESPACE_END
|
||||
|
36
extern/audaspace/src/file/FileManager.cpp
vendored
36
extern/audaspace/src/file/FileManager.cpp
vendored
@@ -43,13 +43,13 @@ void FileManager::registerOutput(std::shared_ptr<aud::IFileOutput> output)
|
||||
outputs().push_back(output);
|
||||
}
|
||||
|
||||
std::shared_ptr<IReader> FileManager::createReader(std::string filename)
|
||||
std::shared_ptr<IReader> FileManager::createReader(std::string filename, int stream)
|
||||
{
|
||||
for(std::shared_ptr<IFileInput> input : inputs())
|
||||
{
|
||||
try
|
||||
{
|
||||
return input->createReader(filename);
|
||||
return input->createReader(filename, stream);
|
||||
}
|
||||
catch(Exception&) {}
|
||||
}
|
||||
@@ -57,13 +57,41 @@ std::shared_ptr<IReader> FileManager::createReader(std::string filename)
|
||||
AUD_THROW(FileException, "The file couldn't be read with any installed file reader.");
|
||||
}
|
||||
|
||||
std::shared_ptr<IReader> FileManager::createReader(std::shared_ptr<Buffer> buffer)
|
||||
std::shared_ptr<IReader> FileManager::createReader(std::shared_ptr<Buffer> buffer, int stream)
|
||||
{
|
||||
for(std::shared_ptr<IFileInput> input : inputs())
|
||||
{
|
||||
try
|
||||
{
|
||||
return input->createReader(buffer);
|
||||
return input->createReader(buffer, stream);
|
||||
}
|
||||
catch(Exception&) {}
|
||||
}
|
||||
|
||||
AUD_THROW(FileException, "The file couldn't be read with any installed file reader.");
|
||||
}
|
||||
|
||||
std::vector<StreamInfo> FileManager::queryStreams(std::string filename)
|
||||
{
|
||||
for(std::shared_ptr<IFileInput> input : inputs())
|
||||
{
|
||||
try
|
||||
{
|
||||
return input->queryStreams(filename);
|
||||
}
|
||||
catch(Exception&) {}
|
||||
}
|
||||
|
||||
AUD_THROW(FileException, "The file couldn't be read with any installed file reader.");
|
||||
}
|
||||
|
||||
std::vector<StreamInfo> FileManager::queryStreams(std::shared_ptr<Buffer> buffer)
|
||||
{
|
||||
for(std::shared_ptr<IFileInput> input : inputs())
|
||||
{
|
||||
try
|
||||
{
|
||||
return input->queryStreams(buffer);
|
||||
}
|
||||
catch(Exception&) {}
|
||||
}
|
||||
|
2
extern/audaspace/src/fx/VolumeReader.cpp
vendored
2
extern/audaspace/src/fx/VolumeReader.cpp
vendored
@@ -57,4 +57,4 @@ void VolumeReader::read(int& length, bool& eos, sample_t* buffer)
|
||||
buffer[i] = buffer[i] * m_volumeStorage->getVolume();
|
||||
}
|
||||
|
||||
AUD_NAMESPACE_END
|
||||
AUD_NAMESPACE_END
|
@@ -106,7 +106,7 @@ GHOST_TSuccess GHOST_DisplayManagerSDL::setCurrentDisplaySetting(
|
||||
* ftp://ftp.idsoftware.com/idstuff/source/q2source-3.21.zip
|
||||
* See linux/gl_glx.c:GLimp_SetMode
|
||||
* http://wiki.bzflag.org/BZFlag_Source
|
||||
* See src/platform/SDLDisplay.cxx:SDLDisplay and createWindow
|
||||
* See: `src/platform/SDLDisplay.cxx:SDLDisplay` and `createWindow`.
|
||||
*/
|
||||
SDL_DisplayMode mode;
|
||||
const int num_modes = SDL_GetNumDisplayModes(display);
|
||||
|
@@ -546,6 +546,7 @@ geometry_node_categories = [
|
||||
NodeItem("GeometryNodeJoinGeometry"),
|
||||
NodeItem("GeometryNodeSeparateComponents"),
|
||||
NodeItem("GeometryNodeSetPosition", poll=geometry_nodes_fields_poll),
|
||||
NodeItem("GeometryNodeRealizeInstances", poll=geometry_nodes_fields_poll),
|
||||
]),
|
||||
GeometryNodeCategory("GEO_INPUT", "Input", items=[
|
||||
NodeItem("GeometryNodeObjectInfo"),
|
||||
@@ -603,6 +604,9 @@ geometry_node_categories = [
|
||||
NodeItem("FunctionNodeFloatToInt"),
|
||||
NodeItem("GeometryNodeSwitch"),
|
||||
]),
|
||||
GeometryNodeCategory("GEO_TEXTURE", "Texture", items=[
|
||||
NodeItem("ShaderNodeTexNoise", poll=geometry_nodes_fields_poll),
|
||||
]),
|
||||
GeometryNodeCategory("GEO_VECTOR", "Vector", items=[
|
||||
NodeItem("ShaderNodeVectorCurve"),
|
||||
NodeItem("ShaderNodeSeparateXYZ"),
|
||||
|
@@ -591,12 +591,17 @@ class InstancesComponent : public GeometryComponent {
|
||||
|
||||
blender::Span<int> almost_unique_ids() const;
|
||||
|
||||
int attribute_domain_size(const AttributeDomain domain) const final;
|
||||
|
||||
bool is_empty() const final;
|
||||
|
||||
bool owns_direct_data() const override;
|
||||
void ensure_owns_direct_data() override;
|
||||
|
||||
static constexpr inline GeometryComponentType static_type = GEO_COMPONENT_TYPE_INSTANCES;
|
||||
|
||||
private:
|
||||
const blender::bke::ComponentAttributeProviders *get_attribute_providers() const final;
|
||||
};
|
||||
|
||||
/** A geometry component that stores volume grids. */
|
||||
|
@@ -731,7 +731,7 @@ void nodeSetSocketAvailability(struct bNodeSocket *sock, bool is_available);
|
||||
|
||||
int nodeSocketLinkLimit(const struct bNodeSocket *sock);
|
||||
|
||||
void nodeDeclarationEnsure(struct bNodeTree *ntree, struct bNode *node);
|
||||
NodeDeclarationHandle *nodeDeclarationEnsure(struct bNodeTree *ntree, struct bNode *node);
|
||||
|
||||
/* Node Clipboard */
|
||||
void BKE_node_clipboard_init(const struct bNodeTree *ntree);
|
||||
@@ -1493,6 +1493,7 @@ int ntreeTexExecTree(struct bNodeTree *ntree,
|
||||
#define GEO_NODE_ATTRIBUTE_CAPTURE 1080
|
||||
#define GEO_NODE_MATERIAL_SELECTION 1081
|
||||
#define GEO_NODE_MATERIAL_ASSIGN 1082
|
||||
#define GEO_NODE_REALIZE_INSTANCES 1083
|
||||
|
||||
/** \} */
|
||||
|
||||
|
@@ -96,13 +96,24 @@ typedef struct SoundInfo {
|
||||
eSoundChannels channels;
|
||||
} specs;
|
||||
float length;
|
||||
double start_offset;
|
||||
} SoundInfo;
|
||||
|
||||
typedef struct SoundStreamInfo {
|
||||
double duration;
|
||||
double start;
|
||||
} SoundStreamInfo;
|
||||
|
||||
/* Get information about given sound. Returns truth on success., false if sound can not be loaded
|
||||
* or if the codes is not supported. */
|
||||
bool BKE_sound_info_get(struct Main *main, struct bSound *sound, SoundInfo *sound_info);
|
||||
|
||||
/* Get information about given sound. Returns truth on success., false if sound can not be loaded
|
||||
* or if the codes is not supported. */
|
||||
bool BKE_sound_stream_info_get(struct Main *main,
|
||||
const char *filepath,
|
||||
int stream,
|
||||
SoundStreamInfo *sound_info);
|
||||
|
||||
#if defined(WITH_AUDASPACE)
|
||||
AUD_Device *BKE_sound_mixdown(const struct Scene *scene,
|
||||
AUD_DeviceSpecs specs,
|
||||
|
@@ -110,7 +110,7 @@ void CurveEval::bounds_min_max(float3 &min, float3 &max, const bool use_evaluate
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the start indices for each of the curve spline's evaluated points, as if they were part
|
||||
* Return the start indices for each of the curve spline's control points, if they were part
|
||||
* of a flattened array. This can be used to facilitate parallelism by avoiding the need to
|
||||
* accumulate an offset while doing more complex calculations.
|
||||
*
|
||||
|
@@ -25,6 +25,8 @@
|
||||
|
||||
#include "BKE_geometry_set.hh"
|
||||
|
||||
#include "attribute_access_intern.hh"
|
||||
|
||||
using blender::float4x4;
|
||||
using blender::Map;
|
||||
using blender::MutableSpan;
|
||||
@@ -225,4 +227,85 @@ blender::Span<int> InstancesComponent::almost_unique_ids() const
|
||||
return almost_unique_ids_;
|
||||
}
|
||||
|
||||
int InstancesComponent::attribute_domain_size(const AttributeDomain domain) const
|
||||
{
|
||||
if (domain != ATTR_DOMAIN_POINT) {
|
||||
return 0;
|
||||
}
|
||||
return this->instances_amount();
|
||||
}
|
||||
|
||||
namespace blender::bke {
|
||||
|
||||
static float3 get_transform_position(const float4x4 &transform)
|
||||
{
|
||||
return transform.translation();
|
||||
}
|
||||
|
||||
static void set_transform_position(float4x4 &transform, const float3 position)
|
||||
{
|
||||
copy_v3_v3(transform.values[3], position);
|
||||
}
|
||||
|
||||
class InstancePositionAttributeProvider final : public BuiltinAttributeProvider {
|
||||
public:
|
||||
InstancePositionAttributeProvider()
|
||||
: BuiltinAttributeProvider(
|
||||
"position", ATTR_DOMAIN_POINT, CD_PROP_FLOAT3, NonCreatable, Writable, NonDeletable)
|
||||
{
|
||||
}
|
||||
|
||||
GVArrayPtr try_get_for_read(const GeometryComponent &component) const final
|
||||
{
|
||||
const InstancesComponent &instances_component = static_cast<const InstancesComponent &>(
|
||||
component);
|
||||
Span<float4x4> transforms = instances_component.instance_transforms();
|
||||
return std::make_unique<fn::GVArray_For_DerivedSpan<float4x4, float3, get_transform_position>>(
|
||||
transforms);
|
||||
}
|
||||
|
||||
GVMutableArrayPtr try_get_for_write(GeometryComponent &component) const final
|
||||
{
|
||||
InstancesComponent &instances_component = static_cast<InstancesComponent &>(component);
|
||||
MutableSpan<float4x4> transforms = instances_component.instance_transforms();
|
||||
return std::make_unique<fn::GVMutableArray_For_DerivedSpan<float4x4,
|
||||
float3,
|
||||
get_transform_position,
|
||||
set_transform_position>>(
|
||||
transforms);
|
||||
}
|
||||
|
||||
bool try_delete(GeometryComponent &UNUSED(component)) const final
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
bool try_create(GeometryComponent &UNUSED(component),
|
||||
const AttributeInit &UNUSED(initializer)) const final
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
bool exists(const GeometryComponent &UNUSED(component)) const final
|
||||
{
|
||||
return true;
|
||||
}
|
||||
};
|
||||
|
||||
static ComponentAttributeProviders create_attribute_providers_for_instances()
|
||||
{
|
||||
static InstancePositionAttributeProvider position;
|
||||
|
||||
return ComponentAttributeProviders({&position}, {});
|
||||
}
|
||||
} // namespace blender::bke
|
||||
|
||||
const blender::bke::ComponentAttributeProviders *InstancesComponent::get_attribute_providers()
|
||||
const
|
||||
{
|
||||
static blender::bke::ComponentAttributeProviders providers =
|
||||
blender::bke::create_attribute_providers_for_instances();
|
||||
return &providers;
|
||||
}
|
||||
|
||||
/** \} */
|
||||
|
@@ -29,6 +29,7 @@
|
||||
#include <cstddef>
|
||||
#include <cstdlib>
|
||||
#include <cstring>
|
||||
#include <queue>
|
||||
|
||||
/* Allow using deprecated functionality for .blend file I/O. */
|
||||
#define DNA_DEPRECATED_ALLOW
|
||||
@@ -52,9 +53,12 @@
|
||||
#include "BLI_map.hh"
|
||||
#include "BLI_math.h"
|
||||
#include "BLI_path_util.h"
|
||||
#include "BLI_set.hh"
|
||||
#include "BLI_stack.hh"
|
||||
#include "BLI_string.h"
|
||||
#include "BLI_string_utils.h"
|
||||
#include "BLI_utildefines.h"
|
||||
#include "BLI_vector_set.hh"
|
||||
|
||||
#include "BLT_translation.h"
|
||||
|
||||
@@ -80,6 +84,7 @@
|
||||
#include "NOD_function.h"
|
||||
#include "NOD_geometry.h"
|
||||
#include "NOD_node_declaration.hh"
|
||||
#include "NOD_node_tree_ref.hh"
|
||||
#include "NOD_shader.h"
|
||||
#include "NOD_socket.h"
|
||||
#include "NOD_texture.h"
|
||||
@@ -93,6 +98,14 @@
|
||||
|
||||
#define NODE_DEFAULT_MAX_WIDTH 700
|
||||
|
||||
using blender::Array;
|
||||
using blender::Set;
|
||||
using blender::Span;
|
||||
using blender::Stack;
|
||||
using blender::Vector;
|
||||
using blender::VectorSet;
|
||||
using namespace blender::nodes::node_tree_ref_types;
|
||||
|
||||
/* Fallback types for undefined tree, nodes, sockets */
|
||||
static bNodeTreeType NodeTreeTypeUndefined;
|
||||
bNodeType NodeTypeUndefined;
|
||||
@@ -647,6 +660,8 @@ void ntreeBlendReadData(BlendDataReader *reader, bNodeTree *ntree)
|
||||
ntree->progress = nullptr;
|
||||
ntree->execdata = nullptr;
|
||||
|
||||
ntree->output_field_dependencies = nullptr;
|
||||
|
||||
BLO_read_data_address(reader, &ntree->adt);
|
||||
BKE_animdata_blend_read_data(reader, ntree->adt);
|
||||
|
||||
@@ -1015,8 +1030,8 @@ IDTypeInfo IDType_ID_NT = {
|
||||
static void node_add_sockets_from_type(bNodeTree *ntree, bNode *node, bNodeType *ntype)
|
||||
{
|
||||
if (ntype->declare != nullptr) {
|
||||
nodeDeclarationEnsure(ntree, node);
|
||||
node->declaration->build(*ntree, *node);
|
||||
blender::nodes::NodeDeclaration *node_decl = nodeDeclarationEnsure(ntree, node);
|
||||
node_decl->build(*ntree, *node);
|
||||
return;
|
||||
}
|
||||
bNodeSocketTemplate *sockdef;
|
||||
@@ -3942,18 +3957,19 @@ int nodeSocketLinkLimit(const bNodeSocket *sock)
|
||||
* If the node implements a `declare` function, this function makes sure that `node->declaration`
|
||||
* is up to date.
|
||||
*/
|
||||
void nodeDeclarationEnsure(bNodeTree *UNUSED(ntree), bNode *node)
|
||||
NodeDeclarationHandle *nodeDeclarationEnsure(bNodeTree *UNUSED(ntree), bNode *node)
|
||||
{
|
||||
if (node->typeinfo->declare == nullptr) {
|
||||
return;
|
||||
return nullptr;
|
||||
}
|
||||
if (node->declaration != nullptr) {
|
||||
return;
|
||||
return node->declaration;
|
||||
}
|
||||
|
||||
node->declaration = new blender::nodes::NodeDeclaration();
|
||||
blender::nodes::NodeDeclarationBuilder builder{*node->declaration};
|
||||
node->typeinfo->declare(builder);
|
||||
return node->declaration;
|
||||
}
|
||||
|
||||
/* ************** Node Clipboard *********** */
|
||||
@@ -4468,6 +4484,296 @@ void ntreeUpdateAllUsers(Main *main, ID *id)
|
||||
}
|
||||
}
|
||||
|
||||
static bool is_field_socket_type(eNodeSocketDatatype type)
|
||||
{
|
||||
return ELEM(type, SOCK_FLOAT, SOCK_INT, SOCK_BOOLEAN, SOCK_VECTOR, SOCK_RGBA);
|
||||
}
|
||||
|
||||
static bool sockets_have_links(blender::Span<const SocketRef *> sockets)
|
||||
{
|
||||
for (const SocketRef *socket : sockets) {
|
||||
if (!socket->directly_linked_links().is_empty()) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
using OutputFieldDependencies = Vector<std::optional<Vector<int>>>;
|
||||
|
||||
static const std::optional<Vector<int>> *get_group_output_field_dependencies(
|
||||
const OutputSocketRef &output_socket)
|
||||
{
|
||||
const NodeRef &node = output_socket.node();
|
||||
BLI_assert(node.is_group_node());
|
||||
bNodeTree *group = (bNodeTree *)node.bnode()->id;
|
||||
if (group == nullptr) {
|
||||
return nullptr;
|
||||
}
|
||||
if (group->output_field_dependencies == nullptr) {
|
||||
return nullptr;
|
||||
}
|
||||
const OutputFieldDependencies *output_field_dependencies = (const OutputFieldDependencies *)
|
||||
group->output_field_dependencies;
|
||||
if (output_socket.index() >= output_field_dependencies->size()) {
|
||||
return nullptr;
|
||||
}
|
||||
return &(*output_field_dependencies)[output_socket.index()];
|
||||
}
|
||||
|
||||
static Vector<int> get_linked_field_input_indices(const OutputSocketRef &output_socket)
|
||||
{
|
||||
Vector<int> indices;
|
||||
const NodeRef &node = output_socket.node();
|
||||
if (node.is_group_node()) {
|
||||
const std::optional<Vector<int>> *optional_dependencies = get_group_output_field_dependencies(
|
||||
output_socket);
|
||||
if (optional_dependencies && optional_dependencies->has_value()) {
|
||||
indices.extend(**optional_dependencies);
|
||||
}
|
||||
}
|
||||
else {
|
||||
for (const InputSocketRef *input_socket : output_socket.node().inputs()) {
|
||||
if (is_field_socket_type((eNodeSocketDatatype)input_socket->typeinfo()->type)) {
|
||||
indices.append(input_socket->index());
|
||||
}
|
||||
}
|
||||
}
|
||||
return indices;
|
||||
}
|
||||
|
||||
static Vector<const NodeRef *> toposort_nodes(const NodeTreeRef &tree, bool left_to_right = true)
|
||||
{
|
||||
Vector<const NodeRef *> toposort;
|
||||
toposort.reserve(tree.nodes().size());
|
||||
Array<bool> node_is_pushed_by_id(tree.nodes().size(), false);
|
||||
std::queue<const NodeRef *> nodes_to_check;
|
||||
|
||||
for (const NodeRef *node : tree.nodes()) {
|
||||
if (!sockets_have_links(node->inputs_or_outputs(!left_to_right))) {
|
||||
node_is_pushed_by_id[node->id()] = true;
|
||||
nodes_to_check.push(node);
|
||||
}
|
||||
}
|
||||
|
||||
while (!nodes_to_check.empty()) {
|
||||
const NodeRef *node = nodes_to_check.front();
|
||||
nodes_to_check.pop();
|
||||
toposort.append(node);
|
||||
|
||||
for (const SocketRef *input_socket : node->inputs_or_outputs(left_to_right)) {
|
||||
for (const SocketRef *linked_socket : input_socket->directly_linked_sockets()) {
|
||||
const NodeRef &linked_node = linked_socket->node();
|
||||
const int linked_node_id = linked_node.id();
|
||||
if (!node_is_pushed_by_id[linked_node_id]) {
|
||||
node_is_pushed_by_id[linked_node_id] = true;
|
||||
nodes_to_check.push(&linked_node);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
toposort.as_mutable_span().reverse();
|
||||
return toposort;
|
||||
}
|
||||
|
||||
struct SocketFieldState {
|
||||
bool is_single = true;
|
||||
bool is_field_source = false;
|
||||
bool requires_single = false;
|
||||
};
|
||||
|
||||
static std::optional<Vector<int>> find_dependent_group_input_indices(
|
||||
const InputSocketRef &group_output_socket,
|
||||
const Span<SocketFieldState> field_state_by_socket_id)
|
||||
{
|
||||
Set<const InputSocketRef *> handled_sockets;
|
||||
Stack<const InputSocketRef *> sockets_to_check;
|
||||
|
||||
handled_sockets.add(&group_output_socket);
|
||||
sockets_to_check.push(&group_output_socket);
|
||||
|
||||
Set<int> found_input_indices;
|
||||
|
||||
while (!sockets_to_check.is_empty()) {
|
||||
const InputSocketRef *input_socket = sockets_to_check.pop();
|
||||
for (const OutputSocketRef *origin_socket : input_socket->logically_linked_sockets()) {
|
||||
const NodeRef &origin_node = origin_socket->node();
|
||||
const SocketFieldState &origin_state = field_state_by_socket_id[origin_socket->id()];
|
||||
if (origin_state.is_field_source) {
|
||||
if (origin_node.is_group_input_node()) {
|
||||
found_input_indices.add(origin_socket->index());
|
||||
}
|
||||
else {
|
||||
return std::nullopt;
|
||||
}
|
||||
}
|
||||
else if (!origin_state.is_single) {
|
||||
const Vector<int> input_socket_indices = get_linked_field_input_indices(*origin_socket);
|
||||
for (const int input_index : input_socket_indices) {
|
||||
const InputSocketRef &origin_input_socket = origin_node.input(input_index);
|
||||
if (!field_state_by_socket_id[origin_input_socket.id()].is_single) {
|
||||
if (handled_sockets.add(&origin_input_socket)) {
|
||||
sockets_to_check.push(&origin_input_socket);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return Vector<int>(found_input_indices.begin(), found_input_indices.end());
|
||||
}
|
||||
|
||||
static void update_socket_shapes_for_fields(bNodeTree &btree)
|
||||
{
|
||||
using namespace blender;
|
||||
using namespace blender::nodes;
|
||||
if (btree.type != NTREE_GEOMETRY) {
|
||||
return;
|
||||
}
|
||||
|
||||
NodeTreeRef tree{&btree};
|
||||
Vector<const NodeRef *> toposort_left_to_right = toposort_nodes(tree, true);
|
||||
Vector<const NodeRef *> toposort_right_to_left = toposort_nodes(tree, false);
|
||||
|
||||
Array<SocketFieldState> field_state_by_socket_id(tree.sockets().size());
|
||||
|
||||
auto check_if_node_is_adaptive = [](const NodeRef &node) {
|
||||
const StringRef node_idname = node.idname();
|
||||
return !node_idname.startswith("GeometryNode");
|
||||
};
|
||||
|
||||
for (const NodeRef *node : toposort_right_to_left) {
|
||||
NodeDeclaration *node_decl = nodeDeclarationEnsure(&btree, node->bnode());
|
||||
|
||||
const bool node_is_adaptive = check_if_node_is_adaptive(*node);
|
||||
|
||||
for (const OutputSocketRef *output_socket : node->outputs()) {
|
||||
SocketFieldState &state = field_state_by_socket_id[output_socket->id()];
|
||||
for (const InputSocketRef *target_socket : output_socket->directly_linked_sockets()) {
|
||||
state.requires_single |= field_state_by_socket_id[target_socket->id()].requires_single;
|
||||
}
|
||||
|
||||
if (state.requires_single) {
|
||||
const Vector<int> input_socket_indices = get_linked_field_input_indices(*output_socket);
|
||||
for (const int input_index : input_socket_indices) {
|
||||
const InputSocketRef &input_socket = node->input(input_index);
|
||||
field_state_by_socket_id[input_socket.id()].requires_single = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (const InputSocketRef *input_socket : node->inputs()) {
|
||||
SocketFieldState &state = field_state_by_socket_id[input_socket->id()];
|
||||
if (state.requires_single) {
|
||||
continue;
|
||||
}
|
||||
if (node_decl != nullptr && !node_is_adaptive) {
|
||||
const SocketDeclaration &socket_decl = *node_decl->inputs()[input_socket->index()];
|
||||
state.requires_single |= !socket_decl.is_field();
|
||||
}
|
||||
if (!is_field_socket_type((eNodeSocketDatatype)input_socket->bsocket()->type)) {
|
||||
state.requires_single = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (const NodeRef *node : tree.nodes_by_type("NodeGroupInput")) {
|
||||
for (const OutputSocketRef *output_socket : node->outputs().drop_back(1)) {
|
||||
SocketFieldState &state = field_state_by_socket_id[output_socket->id()];
|
||||
if (!state.requires_single) {
|
||||
state.is_single = false;
|
||||
state.is_field_source = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (const NodeRef *node : toposort_left_to_right) {
|
||||
NodeDeclaration *node_decl = nodeDeclarationEnsure(&btree, node->bnode());
|
||||
|
||||
for (const InputSocketRef *input_socket : node->inputs()) {
|
||||
SocketFieldState &state = field_state_by_socket_id[input_socket->id()];
|
||||
if (state.requires_single) {
|
||||
state.is_single = true;
|
||||
continue;
|
||||
}
|
||||
state.is_single = true;
|
||||
for (const OutputSocketRef *origin_socket : input_socket->logically_linked_sockets()) {
|
||||
if (!field_state_by_socket_id[origin_socket->id()].is_single) {
|
||||
state.is_single = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (const OutputSocketRef *output_socket : node->outputs()) {
|
||||
SocketFieldState &state = field_state_by_socket_id[output_socket->id()];
|
||||
if (node_decl != nullptr) {
|
||||
const SocketDeclaration &socket_decl = *node_decl->outputs()[output_socket->index()];
|
||||
if (socket_decl.is_field()) {
|
||||
state.is_single = false;
|
||||
state.is_field_source = true;
|
||||
}
|
||||
}
|
||||
if (output_socket->node().is_group_node()) {
|
||||
const std::optional<Vector<int>> *optional_dependencies =
|
||||
get_group_output_field_dependencies(*output_socket);
|
||||
if (optional_dependencies && !optional_dependencies->has_value()) {
|
||||
state.is_single = false;
|
||||
state.is_field_source = true;
|
||||
}
|
||||
}
|
||||
const Vector<int> input_socket_indices = get_linked_field_input_indices(*output_socket);
|
||||
for (const int input_index : input_socket_indices) {
|
||||
const InputSocketRef &input_socket = node->input(input_index);
|
||||
if (!field_state_by_socket_id[input_socket.id()].is_single) {
|
||||
state.is_single = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (const NodeRef *group_output_node : tree.nodes_by_type("NodeGroupOutput")) {
|
||||
if (!(group_output_node->bnode()->flag & NODE_DO_OUTPUT)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
OutputFieldDependencies *output_field_dependencies = new OutputFieldDependencies();
|
||||
for (const InputSocketRef *group_output_socket : group_output_node->inputs().drop_back(1)) {
|
||||
std::optional<Vector<int>> dependent_input_indices = find_dependent_group_input_indices(
|
||||
*group_output_socket, field_state_by_socket_id);
|
||||
output_field_dependencies->append(std::move(dependent_input_indices));
|
||||
}
|
||||
if (btree.output_field_dependencies != nullptr) {
|
||||
delete (OutputFieldDependencies *)btree.output_field_dependencies;
|
||||
}
|
||||
btree.output_field_dependencies = output_field_dependencies;
|
||||
break;
|
||||
}
|
||||
|
||||
for (const InputSocketRef *socket : tree.input_sockets()) {
|
||||
bNodeSocket *bsocket = socket->bsocket();
|
||||
const SocketFieldState &state = field_state_by_socket_id[socket->id()];
|
||||
if (state.requires_single) {
|
||||
bsocket->display_shape = SOCK_DISPLAY_SHAPE_CIRCLE;
|
||||
}
|
||||
else {
|
||||
bsocket->display_shape = SOCK_DISPLAY_SHAPE_DIAMOND;
|
||||
}
|
||||
}
|
||||
for (const OutputSocketRef *socket : tree.output_sockets()) {
|
||||
bNodeSocket *bsocket = socket->bsocket();
|
||||
const SocketFieldState &state = field_state_by_socket_id[socket->id()];
|
||||
if (state.is_single) {
|
||||
bsocket->display_shape = SOCK_DISPLAY_SHAPE_CIRCLE;
|
||||
}
|
||||
else {
|
||||
bsocket->display_shape = SOCK_DISPLAY_SHAPE_DIAMOND;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void ntreeUpdateTree(Main *bmain, bNodeTree *ntree)
|
||||
{
|
||||
if (!ntree) {
|
||||
@@ -4519,6 +4825,8 @@ void ntreeUpdateTree(Main *bmain, bNodeTree *ntree)
|
||||
/* update the node level from link dependencies */
|
||||
ntree_update_node_level(ntree);
|
||||
|
||||
update_socket_shapes_for_fields(*ntree);
|
||||
|
||||
/* check link validity */
|
||||
ntree_validate_links(ntree);
|
||||
}
|
||||
@@ -5229,6 +5537,7 @@ static void registerGeometryNodes()
|
||||
register_node_type_geo_point_translate();
|
||||
register_node_type_geo_points_to_volume();
|
||||
register_node_type_geo_raycast();
|
||||
register_node_type_geo_realize_instances();
|
||||
register_node_type_geo_sample_texture();
|
||||
register_node_type_geo_select_by_handle_type();
|
||||
register_node_type_geo_material_selection();
|
||||
|
@@ -1213,7 +1213,6 @@ static bool sound_info_from_playback_handle(void *playback_handle, SoundInfo *so
|
||||
AUD_SoundInfo info = AUD_getInfo(playback_handle);
|
||||
sound_info->specs.channels = (eSoundChannels)info.specs.channels;
|
||||
sound_info->length = info.length;
|
||||
sound_info->start_offset = info.start_offset;
|
||||
return true;
|
||||
}
|
||||
|
||||
@@ -1231,6 +1230,44 @@ bool BKE_sound_info_get(struct Main *main, struct bSound *sound, SoundInfo *soun
|
||||
return result;
|
||||
}
|
||||
|
||||
bool BKE_sound_stream_info_get(struct Main *main, const char *filepath, int stream, SoundStreamInfo *sound_info)
|
||||
{
|
||||
const char *path;
|
||||
char str[FILE_MAX];
|
||||
AUD_Sound *sound;
|
||||
AUD_StreamInfo *stream_infos;
|
||||
int stream_count;
|
||||
|
||||
BLI_strncpy(str, filepath, sizeof(str));
|
||||
path = BKE_main_blendfile_path(main);
|
||||
BLI_path_abs(str, path);
|
||||
|
||||
sound = AUD_Sound_file(str);
|
||||
if (!sound) {
|
||||
return false;
|
||||
}
|
||||
|
||||
stream_count = AUD_Sound_getFileStreams(sound, &stream_infos);
|
||||
|
||||
AUD_Sound_free(sound);
|
||||
|
||||
if (!stream_infos) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if ((stream < 0) || (stream >= stream_count)) {
|
||||
free(stream_infos);
|
||||
return false;
|
||||
}
|
||||
|
||||
sound_info->start = stream_infos[stream].start;
|
||||
sound_info->duration = stream_infos[stream].duration;
|
||||
|
||||
free(stream_infos);
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
#else /* WITH_AUDASPACE */
|
||||
|
||||
# include "BLI_utildefines.h"
|
||||
@@ -1400,6 +1437,14 @@ bool BKE_sound_info_get(struct Main *UNUSED(main),
|
||||
return false;
|
||||
}
|
||||
|
||||
bool BKE_sound_stream_info_get(struct Main *UNUSED(main),
|
||||
const char *UNUSED(filepath),
|
||||
int UNUSED(stream),
|
||||
SoundStreamInfo *UNUSED(sound_info))
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
#endif /* WITH_AUDASPACE */
|
||||
|
||||
void BKE_sound_reset_scene_runtime(Scene *scene)
|
||||
|
@@ -142,7 +142,8 @@ void Spline::reverse()
|
||||
int Spline::evaluated_edges_size() const
|
||||
{
|
||||
const int eval_size = this->evaluated_points_size();
|
||||
if (eval_size == 1) {
|
||||
if (eval_size < 2) {
|
||||
/* Two points are required for an edge. */
|
||||
return 0;
|
||||
}
|
||||
|
||||
@@ -190,7 +191,7 @@ static void accumulate_lengths(Span<float3> positions,
|
||||
* Return non-owning access to the cache of accumulated lengths along the spline. Each item is the
|
||||
* length of the subsequent segment, i.e. the first value is the length of the first segment rather
|
||||
* than 0. This calculation is rather trivial, and only depends on the evaluated positions.
|
||||
* However, the results are used often, so it makes sense to cache it.
|
||||
* However, the results are used often, and it is necessarily single threaded, so it is cached.
|
||||
*/
|
||||
Span<float> Spline::evaluated_lengths() const
|
||||
{
|
||||
@@ -205,9 +206,10 @@ Span<float> Spline::evaluated_lengths() const
|
||||
|
||||
const int total = evaluated_edges_size();
|
||||
evaluated_lengths_cache_.resize(total);
|
||||
|
||||
Span<float3> positions = this->evaluated_positions();
|
||||
accumulate_lengths(positions, is_cyclic_, evaluated_lengths_cache_);
|
||||
if (total != 0) {
|
||||
Span<float3> positions = this->evaluated_positions();
|
||||
accumulate_lengths(positions, is_cyclic_, evaluated_lengths_cache_);
|
||||
}
|
||||
|
||||
length_cache_dirty_ = false;
|
||||
return evaluated_lengths_cache_;
|
||||
|
@@ -37,7 +37,15 @@ extern "C" {
|
||||
* This function is not thread-safe. */
|
||||
UUID BLI_uuid_generate_random(void);
|
||||
|
||||
/** Compare two UUIDs, return true iff they are equal. */
|
||||
/**
|
||||
* Return the UUID nil value, consisting of all-zero fields.
|
||||
*/
|
||||
UUID BLI_uuid_nil(void);
|
||||
|
||||
/** Return true iff this is the nil UUID. */
|
||||
bool BLI_uuid_is_nil(UUID uuid);
|
||||
|
||||
/** Compare two UUIDs, return true if they are equal. */
|
||||
bool BLI_uuid_equal(UUID uuid1, UUID uuid2);
|
||||
|
||||
/**
|
||||
@@ -48,7 +56,7 @@ void BLI_uuid_format(char *buffer, UUID uuid) ATTR_NONNULL();
|
||||
|
||||
/**
|
||||
* Parse a string as UUID.
|
||||
* The string MUST be in the format xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx,
|
||||
* The string MUST be in the format `xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx`,
|
||||
* as produced by #BLI_uuid_format().
|
||||
*
|
||||
* Return true if the string could be parsed, and false otherwise. In the latter case, the UUID may
|
||||
|
@@ -22,7 +22,9 @@
|
||||
|
||||
#include <cstdio>
|
||||
#include <cstring>
|
||||
#include <ctime>
|
||||
#include <random>
|
||||
#include <string>
|
||||
|
||||
/* Ensure the UUID struct doesn't have any padding, to be compatible with memcmp(). */
|
||||
static_assert(sizeof(UUID) == 16, "expect UUIDs to be 128 bit exactly");
|
||||
@@ -37,8 +39,20 @@ UUID BLI_uuid_generate_random()
|
||||
static_assert(std::mt19937_64::max() == 0xffffffffffffffffLL);
|
||||
|
||||
struct timespec ts;
|
||||
#ifdef __APPLE__
|
||||
/* `timespec_get()` is only available on macOS 10.15+, so until that's the minimum version
|
||||
* supported by Blender, use another function to get the timespec.
|
||||
*
|
||||
* `clock_gettime()` is only available on POSIX, so not on Windows; Linux uses the newer C++11
|
||||
* function `timespec_get()` as well. */
|
||||
clock_gettime(CLOCK_REALTIME, &ts);
|
||||
#else
|
||||
timespec_get(&ts, TIME_UTC);
|
||||
rng.seed(ts.tv_nsec);
|
||||
#endif
|
||||
/* XOR the nanosecond and second fields, just in case the clock only has seconds resolution. */
|
||||
uint64_t seed = ts.tv_nsec;
|
||||
seed ^= ts.tv_sec;
|
||||
rng.seed(seed);
|
||||
|
||||
return rng;
|
||||
}();
|
||||
@@ -64,6 +78,17 @@ UUID BLI_uuid_generate_random()
|
||||
return uuid;
|
||||
}
|
||||
|
||||
UUID BLI_uuid_nil(void)
|
||||
{
|
||||
const UUID nil = {0, 0, 0, 0, 0, 0};
|
||||
return nil;
|
||||
}
|
||||
|
||||
bool BLI_uuid_is_nil(UUID uuid)
|
||||
{
|
||||
return BLI_uuid_equal(BLI_uuid_nil(), uuid);
|
||||
}
|
||||
|
||||
bool BLI_uuid_equal(const UUID uuid1, const UUID uuid2)
|
||||
{
|
||||
return std::memcmp(&uuid1, &uuid2, sizeof(uuid1)) == 0;
|
||||
|
@@ -48,6 +48,19 @@ TEST(BLI_uuid, generate_many_random)
|
||||
}
|
||||
}
|
||||
|
||||
TEST(BLI_uuid, nil_value)
|
||||
{
|
||||
const UUID nil_uuid = BLI_uuid_nil();
|
||||
const UUID zeroes_uuid = {0, 0, 0, 0, 0, 0};
|
||||
|
||||
EXPECT_TRUE(BLI_uuid_equal(nil_uuid, zeroes_uuid));
|
||||
EXPECT_TRUE(BLI_uuid_is_nil(nil_uuid));
|
||||
|
||||
std::string buffer(36, '\0');
|
||||
BLI_uuid_format(buffer.data(), nil_uuid);
|
||||
EXPECT_EQ("00000000-0000-0000-0000-000000000000", buffer);
|
||||
}
|
||||
|
||||
TEST(BLI_uuid, equality)
|
||||
{
|
||||
const UUID uuid1 = BLI_uuid_generate_random();
|
||||
@@ -78,13 +91,13 @@ TEST(BLI_uuid, string_formatting)
|
||||
EXPECT_EQ("00000001-0002-0003-0405-060000000007", buffer);
|
||||
|
||||
/* Somewhat more complex bit patterns. This is a version 1 UUID generated from Python. */
|
||||
const UUID uuid1 = {3540651616, 5282, 4588, 139, 153, 0xf7, 0x73, 0x69, 0x44, 0xdb, 0x8b};
|
||||
const UUID uuid1 = {3540651616, 5282, 4588, 139, 153, {0xf7, 0x73, 0x69, 0x44, 0xdb, 0x8b}};
|
||||
BLI_uuid_format(buffer.data(), uuid1);
|
||||
EXPECT_EQ("d30a0e60-14a2-11ec-8b99-f7736944db8b", buffer);
|
||||
|
||||
/* Namespace UUID, example listed in RFC4211. */
|
||||
const UUID namespace_dns = {
|
||||
0x6ba7b810, 0x9dad, 0x11d1, 0x80, 0xb4, 0x00, 0xc0, 0x4f, 0xd4, 0x30, 0xc8};
|
||||
0x6ba7b810, 0x9dad, 0x11d1, 0x80, 0xb4, {0x00, 0xc0, 0x4f, 0xd4, 0x30, 0xc8}};
|
||||
BLI_uuid_format(buffer.data(), namespace_dns);
|
||||
EXPECT_EQ("6ba7b810-9dad-11d1-80b4-00c04fd430c8", buffer);
|
||||
}
|
||||
@@ -126,7 +139,7 @@ TEST(BLI_uuid, string_parsing_fail)
|
||||
TEST(BLI_uuid, stream_operator)
|
||||
{
|
||||
std::stringstream ss;
|
||||
const UUID uuid = {3540651616, 5282, 4588, 139, 153, 0xf7, 0x73, 0x69, 0x44, 0xdb, 0x8b};
|
||||
const UUID uuid = {3540651616, 5282, 4588, 139, 153, {0xf7, 0x73, 0x69, 0x44, 0xdb, 0x8b}};
|
||||
ss << uuid;
|
||||
EXPECT_EQ(ss.str(), "d30a0e60-14a2-11ec-8b99-f7736944db8b");
|
||||
}
|
||||
|
@@ -468,11 +468,13 @@ static std::string get_operations_export_dir()
|
||||
|
||||
void DebugInfo::export_operation(const NodeOperation *op, MemoryBuffer *render)
|
||||
{
|
||||
ImBuf *ibuf = IMB_allocFromBuffer(nullptr,
|
||||
render->getBuffer(),
|
||||
render->getWidth(),
|
||||
render->getHeight(),
|
||||
render->get_num_channels());
|
||||
const int width = render->getWidth();
|
||||
const int height = render->getHeight();
|
||||
const int num_channels = render->get_num_channels();
|
||||
|
||||
ImBuf *ibuf = IMB_allocImBuf(width, height, 8 * num_channels, IB_rectfloat);
|
||||
MemoryBuffer mem_ibuf(ibuf->rect_float, 4, width, height);
|
||||
mem_ibuf.copy_from(render, render->get_rect(), 0, num_channels, 0);
|
||||
|
||||
const std::string file_name = operation_class_name(op) + "_" + std::to_string(op->get_id()) +
|
||||
".png";
|
||||
|
@@ -32,7 +32,7 @@
|
||||
BLI_assert((buf)->get_rect().ymax >= (y) + BLI_rcti_size_y(&(area)))
|
||||
|
||||
#define ASSERT_VALID_ELEM_SIZE(buf, channel_offset, elem_size) \
|
||||
BLI_assert((buf)->get_num_channels() <= (channel_offset) + (elem_size))
|
||||
BLI_assert((buf)->get_num_channels() >= (channel_offset) + (elem_size))
|
||||
|
||||
namespace blender::compositor {
|
||||
|
||||
|
@@ -31,6 +31,12 @@ DenoiseNode::DenoiseNode(bNode *editorNode) : Node(editorNode)
|
||||
void DenoiseNode::convertToOperations(NodeConverter &converter,
|
||||
const CompositorContext & /*context*/) const
|
||||
{
|
||||
if (!COM_is_denoise_supported()) {
|
||||
converter.mapOutputSocket(getOutputSocket(0),
|
||||
converter.addInputProxy(getInputSocket(0), false));
|
||||
return;
|
||||
}
|
||||
|
||||
bNode *node = this->getbNode();
|
||||
NodeDenoise *denoise = (NodeDenoise *)node->storage;
|
||||
|
||||
@@ -39,8 +45,28 @@ void DenoiseNode::convertToOperations(NodeConverter &converter,
|
||||
operation->setDenoiseSettings(denoise);
|
||||
|
||||
converter.mapInputSocket(getInputSocket(0), operation->getInputSocket(0));
|
||||
converter.mapInputSocket(getInputSocket(1), operation->getInputSocket(1));
|
||||
converter.mapInputSocket(getInputSocket(2), operation->getInputSocket(2));
|
||||
if (denoise && denoise->prefilter == CMP_NODE_DENOISE_PREFILTER_ACCURATE) {
|
||||
{
|
||||
DenoisePrefilterOperation *normal_prefilter = new DenoisePrefilterOperation(
|
||||
DataType::Vector);
|
||||
normal_prefilter->set_image_name("normal");
|
||||
converter.addOperation(normal_prefilter);
|
||||
converter.mapInputSocket(getInputSocket(1), normal_prefilter->getInputSocket(0));
|
||||
converter.addLink(normal_prefilter->getOutputSocket(), operation->getInputSocket(1));
|
||||
}
|
||||
{
|
||||
DenoisePrefilterOperation *albedo_prefilter = new DenoisePrefilterOperation(DataType::Color);
|
||||
albedo_prefilter->set_image_name("albedo");
|
||||
converter.addOperation(albedo_prefilter);
|
||||
converter.mapInputSocket(getInputSocket(2), albedo_prefilter->getInputSocket(0));
|
||||
converter.addLink(albedo_prefilter->getOutputSocket(), operation->getInputSocket(2));
|
||||
}
|
||||
}
|
||||
else {
|
||||
converter.mapInputSocket(getInputSocket(1), operation->getInputSocket(1));
|
||||
converter.mapInputSocket(getInputSocket(2), operation->getInputSocket(2));
|
||||
}
|
||||
|
||||
converter.mapOutputSocket(getOutputSocket(0), operation->getOutputSocket(0));
|
||||
}
|
||||
|
||||
|
@@ -62,7 +62,8 @@ void AlphaOverPremultiplyOperation::update_memory_buffer_row(PixelCursor &p)
|
||||
const float *over_color = p.color2;
|
||||
const float value = *p.value;
|
||||
|
||||
if (over_color[3] <= 0.0f) {
|
||||
/* Zero alpha values should still permit an add of RGB data. */
|
||||
if (over_color[3] < 0.0f) {
|
||||
copy_v4_v4(p.out, color1);
|
||||
}
|
||||
else if (value == 1.0f && over_color[3] >= 1.0f) {
|
||||
|
@@ -28,6 +28,137 @@ static pthread_mutex_t oidn_lock = BLI_MUTEX_INITIALIZER;
|
||||
|
||||
namespace blender::compositor {
|
||||
|
||||
bool COM_is_denoise_supported()
|
||||
{
|
||||
#ifdef WITH_OPENIMAGEDENOISE
|
||||
/* Always supported through Accelerate framework BNNS on macOS. */
|
||||
# ifdef __APPLE__
|
||||
return true;
|
||||
# else
|
||||
return BLI_cpu_support_sse41();
|
||||
# endif
|
||||
|
||||
#else
|
||||
return false;
|
||||
#endif
|
||||
}
|
||||
|
||||
class DenoiseFilter {
|
||||
private:
|
||||
#ifdef WITH_OPENIMAGEDENOISE
|
||||
oidn::DeviceRef device;
|
||||
oidn::FilterRef filter;
|
||||
#endif
|
||||
bool initialized_ = false;
|
||||
|
||||
public:
|
||||
~DenoiseFilter()
|
||||
{
|
||||
BLI_assert(!initialized_);
|
||||
}
|
||||
|
||||
#ifdef WITH_OPENIMAGEDENOISE
|
||||
void init_and_lock_denoiser(MemoryBuffer *output)
|
||||
{
|
||||
/* Since it's memory intensive, it's better to run only one instance of OIDN at a time.
|
||||
* OpenImageDenoise is multithreaded internally and should use all available cores
|
||||
* nonetheless. */
|
||||
BLI_mutex_lock(&oidn_lock);
|
||||
|
||||
device = oidn::newDevice();
|
||||
device.commit();
|
||||
filter = device.newFilter("RT");
|
||||
initialized_ = true;
|
||||
set_image("output", output);
|
||||
}
|
||||
|
||||
void deinit_and_unlock_denoiser()
|
||||
{
|
||||
BLI_mutex_unlock(&oidn_lock);
|
||||
initialized_ = false;
|
||||
}
|
||||
|
||||
void set_image(const StringRef name, MemoryBuffer *buffer)
|
||||
{
|
||||
BLI_assert(initialized_);
|
||||
BLI_assert(!buffer->is_a_single_elem());
|
||||
filter.setImage(name.data(),
|
||||
buffer->getBuffer(),
|
||||
oidn::Format::Float3,
|
||||
buffer->getWidth(),
|
||||
buffer->getHeight(),
|
||||
0,
|
||||
buffer->get_elem_bytes_len());
|
||||
}
|
||||
|
||||
template<typename T> void set(const StringRef option_name, T value)
|
||||
{
|
||||
BLI_assert(initialized_);
|
||||
filter.set(option_name.data(), value);
|
||||
}
|
||||
|
||||
void execute()
|
||||
{
|
||||
BLI_assert(initialized_);
|
||||
filter.commit();
|
||||
filter.execute();
|
||||
}
|
||||
|
||||
#else
|
||||
void init_and_lock_denoiser(MemoryBuffer *UNUSED(output))
|
||||
{
|
||||
}
|
||||
|
||||
void deinit_and_unlock_denoiser()
|
||||
{
|
||||
}
|
||||
|
||||
void set_image(const StringRef UNUSED(name), MemoryBuffer *UNUSED(buffer))
|
||||
{
|
||||
}
|
||||
|
||||
template<typename T> void set(const StringRef UNUSED(option_name), T UNUSED(value))
|
||||
{
|
||||
}
|
||||
|
||||
void execute()
|
||||
{
|
||||
}
|
||||
#endif
|
||||
};
|
||||
|
||||
DenoiseBaseOperation::DenoiseBaseOperation()
|
||||
{
|
||||
flags.is_fullframe_operation = true;
|
||||
output_rendered_ = false;
|
||||
}
|
||||
|
||||
bool DenoiseBaseOperation::determineDependingAreaOfInterest(rcti * /*input*/,
|
||||
ReadBufferOperation *readOperation,
|
||||
rcti *output)
|
||||
{
|
||||
if (isCached()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
rcti newInput;
|
||||
newInput.xmax = this->getWidth();
|
||||
newInput.xmin = 0;
|
||||
newInput.ymax = this->getHeight();
|
||||
newInput.ymin = 0;
|
||||
return NodeOperation::determineDependingAreaOfInterest(&newInput, readOperation, output);
|
||||
}
|
||||
|
||||
void DenoiseBaseOperation::get_area_of_interest(const int UNUSED(input_idx),
|
||||
const rcti &UNUSED(output_area),
|
||||
rcti &r_input_area)
|
||||
{
|
||||
r_input_area.xmin = 0;
|
||||
r_input_area.xmax = this->getWidth();
|
||||
r_input_area.ymin = 0;
|
||||
r_input_area.ymax = this->getHeight();
|
||||
}
|
||||
|
||||
DenoiseOperation::DenoiseOperation()
|
||||
{
|
||||
this->addInputSocket(DataType::Color);
|
||||
@@ -35,8 +166,6 @@ DenoiseOperation::DenoiseOperation()
|
||||
this->addInputSocket(DataType::Color);
|
||||
this->addOutputSocket(DataType::Color);
|
||||
this->m_settings = nullptr;
|
||||
flags.is_fullframe_operation = true;
|
||||
output_rendered_ = false;
|
||||
}
|
||||
void DenoiseOperation::initExecution()
|
||||
{
|
||||
@@ -54,6 +183,25 @@ void DenoiseOperation::deinitExecution()
|
||||
SingleThreadedOperation::deinitExecution();
|
||||
}
|
||||
|
||||
static bool are_guiding_passes_noise_free(NodeDenoise *settings)
|
||||
{
|
||||
switch (settings->prefilter) {
|
||||
case CMP_NODE_DENOISE_PREFILTER_NONE:
|
||||
case CMP_NODE_DENOISE_PREFILTER_ACCURATE: /* Prefiltered with #DenoisePrefilterOperation. */
|
||||
return true;
|
||||
case CMP_NODE_DENOISE_PREFILTER_FAST:
|
||||
default:
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
void DenoiseOperation::hash_output_params()
|
||||
{
|
||||
if (m_settings) {
|
||||
hash_params((int)m_settings->hdr, are_guiding_passes_noise_free(m_settings));
|
||||
}
|
||||
}
|
||||
|
||||
MemoryBuffer *DenoiseOperation::createMemoryBuffer(rcti *rect2)
|
||||
{
|
||||
MemoryBuffer *tileColor = (MemoryBuffer *)this->m_inputProgramColor->initializeTileData(rect2);
|
||||
@@ -69,22 +217,6 @@ MemoryBuffer *DenoiseOperation::createMemoryBuffer(rcti *rect2)
|
||||
return result;
|
||||
}
|
||||
|
||||
bool DenoiseOperation::determineDependingAreaOfInterest(rcti * /*input*/,
|
||||
ReadBufferOperation *readOperation,
|
||||
rcti *output)
|
||||
{
|
||||
if (isCached()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
rcti newInput;
|
||||
newInput.xmax = this->getWidth();
|
||||
newInput.xmin = 0;
|
||||
newInput.ymax = this->getHeight();
|
||||
newInput.ymin = 0;
|
||||
return NodeOperation::determineDependingAreaOfInterest(&newInput, readOperation, output);
|
||||
}
|
||||
|
||||
void DenoiseOperation::generateDenoise(MemoryBuffer *output,
|
||||
MemoryBuffer *input_color,
|
||||
MemoryBuffer *input_normal,
|
||||
@@ -96,104 +228,46 @@ void DenoiseOperation::generateDenoise(MemoryBuffer *output,
|
||||
return;
|
||||
}
|
||||
|
||||
#ifdef WITH_OPENIMAGEDENOISE
|
||||
/* Always supported through Accelerate framework BNNS on macOS. */
|
||||
# ifndef __APPLE__
|
||||
if (BLI_cpu_support_sse41())
|
||||
# endif
|
||||
{
|
||||
/* OpenImageDenoise needs full buffers. */
|
||||
MemoryBuffer *buf_color = input_color->is_a_single_elem() ? input_color->inflate() :
|
||||
input_color;
|
||||
MemoryBuffer *buf_normal = input_normal && input_normal->is_a_single_elem() ?
|
||||
input_normal->inflate() :
|
||||
input_normal;
|
||||
MemoryBuffer *buf_albedo = input_albedo && input_albedo->is_a_single_elem() ?
|
||||
input_albedo->inflate() :
|
||||
input_albedo;
|
||||
BLI_assert(COM_is_denoise_supported());
|
||||
/* OpenImageDenoise needs full buffers. */
|
||||
MemoryBuffer *buf_color = input_color->is_a_single_elem() ? input_color->inflate() : input_color;
|
||||
MemoryBuffer *buf_normal = input_normal && input_normal->is_a_single_elem() ?
|
||||
input_normal->inflate() :
|
||||
input_normal;
|
||||
MemoryBuffer *buf_albedo = input_albedo && input_albedo->is_a_single_elem() ?
|
||||
input_albedo->inflate() :
|
||||
input_albedo;
|
||||
|
||||
/* Since it's memory intensive, it's better to run only one instance of OIDN at a time.
|
||||
* OpenImageDenoise is multithreaded internally and should use all available cores nonetheless.
|
||||
*/
|
||||
BLI_mutex_lock(&oidn_lock);
|
||||
DenoiseFilter filter;
|
||||
filter.init_and_lock_denoiser(output);
|
||||
|
||||
oidn::DeviceRef device = oidn::newDevice();
|
||||
device.commit();
|
||||
filter.set_image("color", buf_color);
|
||||
filter.set_image("normal", buf_normal);
|
||||
filter.set_image("albedo", buf_albedo);
|
||||
|
||||
oidn::FilterRef filter = device.newFilter("RT");
|
||||
filter.setImage("color",
|
||||
buf_color->getBuffer(),
|
||||
oidn::Format::Float3,
|
||||
buf_color->getWidth(),
|
||||
buf_color->getHeight(),
|
||||
0,
|
||||
sizeof(float[4]));
|
||||
if (buf_normal && buf_normal->getBuffer()) {
|
||||
filter.setImage("normal",
|
||||
buf_normal->getBuffer(),
|
||||
oidn::Format::Float3,
|
||||
buf_normal->getWidth(),
|
||||
buf_normal->getHeight(),
|
||||
0,
|
||||
sizeof(float[3]));
|
||||
}
|
||||
if (buf_albedo && buf_albedo->getBuffer()) {
|
||||
filter.setImage("albedo",
|
||||
buf_albedo->getBuffer(),
|
||||
oidn::Format::Float3,
|
||||
buf_albedo->getWidth(),
|
||||
buf_albedo->getHeight(),
|
||||
0,
|
||||
sizeof(float[4]));
|
||||
}
|
||||
filter.setImage("output",
|
||||
output->getBuffer(),
|
||||
oidn::Format::Float3,
|
||||
buf_color->getWidth(),
|
||||
buf_color->getHeight(),
|
||||
0,
|
||||
sizeof(float[4]));
|
||||
|
||||
BLI_assert(settings);
|
||||
if (settings) {
|
||||
filter.set("hdr", settings->hdr);
|
||||
filter.set("srgb", false);
|
||||
}
|
||||
|
||||
filter.commit();
|
||||
filter.execute();
|
||||
BLI_mutex_unlock(&oidn_lock);
|
||||
|
||||
/* Copy the alpha channel, OpenImageDenoise currently only supports RGB. */
|
||||
output->copy_from(input_color, input_color->get_rect(), 3, COM_DATA_TYPE_VALUE_CHANNELS, 3);
|
||||
|
||||
/* Delete inflated buffers. */
|
||||
if (input_color->is_a_single_elem()) {
|
||||
delete buf_color;
|
||||
}
|
||||
if (input_normal && input_normal->is_a_single_elem()) {
|
||||
delete buf_normal;
|
||||
}
|
||||
if (input_albedo && input_albedo->is_a_single_elem()) {
|
||||
delete buf_albedo;
|
||||
}
|
||||
|
||||
return;
|
||||
BLI_assert(settings);
|
||||
if (settings) {
|
||||
filter.set("hdr", settings->hdr);
|
||||
filter.set("srgb", false);
|
||||
filter.set("cleanAux", are_guiding_passes_noise_free(settings));
|
||||
}
|
||||
#endif
|
||||
/* If built without OIDN or running on an unsupported CPU, just pass through. */
|
||||
UNUSED_VARS(input_albedo, input_normal, settings);
|
||||
output->copy_from(input_color, input_color->get_rect());
|
||||
}
|
||||
|
||||
void DenoiseOperation::get_area_of_interest(const int UNUSED(input_idx),
|
||||
const rcti &UNUSED(output_area),
|
||||
rcti &r_input_area)
|
||||
{
|
||||
r_input_area.xmin = 0;
|
||||
r_input_area.xmax = this->getWidth();
|
||||
r_input_area.ymin = 0;
|
||||
r_input_area.ymax = this->getHeight();
|
||||
filter.execute();
|
||||
filter.deinit_and_unlock_denoiser();
|
||||
|
||||
/* Copy the alpha channel, OpenImageDenoise currently only supports RGB. */
|
||||
output->copy_from(input_color, input_color->get_rect(), 3, COM_DATA_TYPE_VALUE_CHANNELS, 3);
|
||||
|
||||
/* Delete inflated buffers. */
|
||||
if (input_color->is_a_single_elem()) {
|
||||
delete buf_color;
|
||||
}
|
||||
if (input_normal && input_normal->is_a_single_elem()) {
|
||||
delete buf_normal;
|
||||
}
|
||||
if (input_albedo && input_albedo->is_a_single_elem()) {
|
||||
delete buf_albedo;
|
||||
}
|
||||
}
|
||||
|
||||
void DenoiseOperation::update_memory_buffer(MemoryBuffer *output,
|
||||
@@ -206,4 +280,57 @@ void DenoiseOperation::update_memory_buffer(MemoryBuffer *output,
|
||||
}
|
||||
}
|
||||
|
||||
DenoisePrefilterOperation::DenoisePrefilterOperation(DataType data_type)
|
||||
{
|
||||
this->addInputSocket(data_type);
|
||||
this->addOutputSocket(data_type);
|
||||
image_name_ = "";
|
||||
}
|
||||
|
||||
void DenoisePrefilterOperation::hash_output_params()
|
||||
{
|
||||
hash_param(image_name_);
|
||||
}
|
||||
|
||||
MemoryBuffer *DenoisePrefilterOperation::createMemoryBuffer(rcti *rect2)
|
||||
{
|
||||
MemoryBuffer *input = (MemoryBuffer *)this->get_input_operation(0)->initializeTileData(rect2);
|
||||
rcti rect;
|
||||
BLI_rcti_init(&rect, 0, getWidth(), 0, getHeight());
|
||||
|
||||
MemoryBuffer *result = new MemoryBuffer(getOutputSocket()->getDataType(), rect);
|
||||
generate_denoise(result, input);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
void DenoisePrefilterOperation::generate_denoise(MemoryBuffer *output, MemoryBuffer *input)
|
||||
{
|
||||
BLI_assert(COM_is_denoise_supported());
|
||||
|
||||
/* Denoising needs full buffers. */
|
||||
MemoryBuffer *input_buf = input->is_a_single_elem() ? input->inflate() : input;
|
||||
|
||||
DenoiseFilter filter;
|
||||
filter.init_and_lock_denoiser(output);
|
||||
filter.set_image(image_name_, input_buf);
|
||||
filter.execute();
|
||||
filter.deinit_and_unlock_denoiser();
|
||||
|
||||
/* Delete inflated buffers. */
|
||||
if (input->is_a_single_elem()) {
|
||||
delete input_buf;
|
||||
}
|
||||
}
|
||||
|
||||
void DenoisePrefilterOperation::update_memory_buffer(MemoryBuffer *output,
|
||||
const rcti &UNUSED(area),
|
||||
Span<MemoryBuffer *> inputs)
|
||||
{
|
||||
if (!output_rendered_) {
|
||||
this->generate_denoise(output, inputs[0]);
|
||||
output_rendered_ = true;
|
||||
}
|
||||
}
|
||||
|
||||
} // namespace blender::compositor
|
||||
|
@@ -23,7 +23,24 @@
|
||||
|
||||
namespace blender::compositor {
|
||||
|
||||
class DenoiseOperation : public SingleThreadedOperation {
|
||||
bool COM_is_denoise_supported();
|
||||
|
||||
class DenoiseBaseOperation : public SingleThreadedOperation {
|
||||
protected:
|
||||
bool output_rendered_;
|
||||
|
||||
protected:
|
||||
DenoiseBaseOperation();
|
||||
|
||||
public:
|
||||
bool determineDependingAreaOfInterest(rcti *input,
|
||||
ReadBufferOperation *readOperation,
|
||||
rcti *output) override;
|
||||
|
||||
void get_area_of_interest(int input_idx, const rcti &output_area, rcti &r_input_area) override;
|
||||
};
|
||||
|
||||
class DenoiseOperation : public DenoiseBaseOperation {
|
||||
private:
|
||||
/**
|
||||
* \brief Cached reference to the input programs
|
||||
@@ -37,8 +54,6 @@ class DenoiseOperation : public SingleThreadedOperation {
|
||||
*/
|
||||
NodeDenoise *m_settings;
|
||||
|
||||
bool output_rendered_;
|
||||
|
||||
public:
|
||||
DenoiseOperation();
|
||||
/**
|
||||
@@ -55,16 +70,13 @@ class DenoiseOperation : public SingleThreadedOperation {
|
||||
{
|
||||
this->m_settings = settings;
|
||||
}
|
||||
bool determineDependingAreaOfInterest(rcti *input,
|
||||
ReadBufferOperation *readOperation,
|
||||
rcti *output) override;
|
||||
|
||||
void get_area_of_interest(int input_idx, const rcti &output_area, rcti &r_input_area) override;
|
||||
void update_memory_buffer(MemoryBuffer *output,
|
||||
const rcti &area,
|
||||
Span<MemoryBuffer *> inputs) override;
|
||||
|
||||
protected:
|
||||
void hash_output_params() override;
|
||||
void generateDenoise(MemoryBuffer *output,
|
||||
MemoryBuffer *input_color,
|
||||
MemoryBuffer *input_normal,
|
||||
@@ -74,4 +86,28 @@ class DenoiseOperation : public SingleThreadedOperation {
|
||||
MemoryBuffer *createMemoryBuffer(rcti *rect) override;
|
||||
};
|
||||
|
||||
class DenoisePrefilterOperation : public DenoiseBaseOperation {
|
||||
private:
|
||||
std::string image_name_;
|
||||
|
||||
public:
|
||||
DenoisePrefilterOperation(DataType data_type);
|
||||
|
||||
void set_image_name(StringRef name)
|
||||
{
|
||||
image_name_ = name;
|
||||
}
|
||||
|
||||
void update_memory_buffer(MemoryBuffer *output,
|
||||
const rcti &area,
|
||||
Span<MemoryBuffer *> inputs) override;
|
||||
|
||||
protected:
|
||||
void hash_output_params() override;
|
||||
MemoryBuffer *createMemoryBuffer(rcti *rect) override;
|
||||
|
||||
private:
|
||||
void generate_denoise(MemoryBuffer *output, MemoryBuffer *input);
|
||||
};
|
||||
|
||||
} // namespace blender::compositor
|
||||
|
@@ -31,6 +31,7 @@
|
||||
|
||||
#include "BLT_translation.h"
|
||||
|
||||
#include "ED_asset.h"
|
||||
#include "ED_screen.h"
|
||||
|
||||
#include "MEM_guardedalloc.h"
|
||||
@@ -216,7 +217,18 @@ static void uilist_filter_items_default(struct uiList *ui_list,
|
||||
RNA_PROP_BEGIN (dataptr, itemptr, prop) {
|
||||
bool do_order = false;
|
||||
|
||||
char *namebuf = RNA_struct_name_get_alloc(&itemptr, nullptr, 0, nullptr);
|
||||
char *namebuf;
|
||||
if (RNA_struct_is_a(itemptr.type, &RNA_AssetHandle)) {
|
||||
/* XXX The AssetHandle design is hacky and meant to be temporary. It can't have a proper
|
||||
* name property, so for now this hardcoded exception is needed. */
|
||||
AssetHandle *asset_handle = (AssetHandle *)itemptr.data;
|
||||
const char *asset_name = ED_asset_handle_get_name(asset_handle);
|
||||
namebuf = BLI_strdup(asset_name);
|
||||
}
|
||||
else {
|
||||
namebuf = RNA_struct_name_get_alloc(&itemptr, nullptr, 0, nullptr);
|
||||
}
|
||||
|
||||
const char *name = namebuf ? namebuf : "";
|
||||
|
||||
if (filter[0]) {
|
||||
|
@@ -270,9 +270,10 @@ void SCREEN_OT_screenshot(wmOperatorType *ot)
|
||||
|
||||
void SCREEN_OT_screenshot_area(wmOperatorType *ot)
|
||||
{
|
||||
ot->name = "Save Screenshot (Area)";
|
||||
/* NOTE: the term "area" is a Blender internal name, "Editor" makes more sense for the UI. */
|
||||
ot->name = "Save Screenshot (Editor)";
|
||||
ot->idname = "SCREEN_OT_screenshot_area";
|
||||
ot->description = "Capture a picture of the active area";
|
||||
ot->description = "Capture a picture of an editor";
|
||||
|
||||
screen_screenshot_impl(ot);
|
||||
|
||||
|
@@ -1098,7 +1098,8 @@ static int graphkeys_sound_bake_exec(bContext *C, wmOperator *op)
|
||||
RNA_boolean_get(op->ptr, "use_square"),
|
||||
RNA_float_get(op->ptr, "sthreshold"),
|
||||
FPS,
|
||||
&sbi.length);
|
||||
&sbi.length,
|
||||
0);
|
||||
|
||||
if (sbi.samples == NULL) {
|
||||
BKE_report(op->reports, RPT_ERROR, "Unsupported audio format");
|
||||
|
@@ -2865,6 +2865,8 @@ static void node_composit_buts_denoise(uiLayout *layout, bContext *UNUSED(C), Po
|
||||
# endif
|
||||
#endif
|
||||
|
||||
uiItemL(layout, IFACE_("Prefilter:"), ICON_NONE);
|
||||
uiItemR(layout, ptr, "prefilter", DEFAULT_FLAGS, nullptr, ICON_NONE);
|
||||
uiItemR(layout, ptr, "use_hdr", DEFAULT_FLAGS, nullptr, ICON_NONE);
|
||||
}
|
||||
|
||||
@@ -4280,6 +4282,12 @@ void node_draw_link(View2D *v2d, SpaceNode *snode, bNodeLink *link)
|
||||
// th_col3 = -1; /* no shadow */
|
||||
}
|
||||
}
|
||||
if (snode->edittree->type == NTREE_GEOMETRY) {
|
||||
if ((link->fromsock && link->fromsock->display_shape == SOCK_DISPLAY_SHAPE_DIAMOND) &&
|
||||
(link->tosock && link->tosock->display_shape == SOCK_DISPLAY_SHAPE_CIRCLE)) {
|
||||
th_col1 = th_col2 = th_col3 = TH_REDALERT;
|
||||
}
|
||||
}
|
||||
|
||||
node_draw_link_bezier(v2d, snode, link, th_col1, th_col2, th_col3);
|
||||
}
|
||||
|
@@ -47,6 +47,7 @@
|
||||
#include "BKE_mask.h"
|
||||
#include "BKE_movieclip.h"
|
||||
#include "BKE_report.h"
|
||||
#include "BKE_sound.h"
|
||||
|
||||
#include "IMB_imbuf.h"
|
||||
|
||||
@@ -643,7 +644,15 @@ static void sequencer_add_movie_multiple_strips(bContext *C,
|
||||
BLI_strncpy(load_data->name, file_only, sizeof(load_data->name));
|
||||
Sequence *seq_movie = NULL;
|
||||
Sequence *seq_sound = NULL;
|
||||
double video_start_offset;
|
||||
double video_start_offset = -1;
|
||||
double audio_start_offset = 0;
|
||||
|
||||
if (RNA_boolean_get(op->ptr, "sound")) {
|
||||
SoundStreamInfo sound_info;
|
||||
if (BKE_sound_stream_info_get(bmain, load_data->path, 0, &sound_info)) {
|
||||
audio_start_offset = video_start_offset = sound_info.start;
|
||||
}
|
||||
}
|
||||
|
||||
load_data->channel++;
|
||||
seq_movie = SEQ_add_movie_strip(bmain, scene, ed->seqbasep, load_data, &video_start_offset);
|
||||
@@ -653,9 +662,30 @@ static void sequencer_add_movie_multiple_strips(bContext *C,
|
||||
}
|
||||
else {
|
||||
if (RNA_boolean_get(op->ptr, "sound")) {
|
||||
seq_sound = SEQ_add_sound_strip(bmain, scene, ed->seqbasep, load_data, video_start_offset);
|
||||
int minimum_frame_offset = MIN2(video_start_offset, audio_start_offset) * FPS;
|
||||
|
||||
int video_frame_offset = video_start_offset * FPS;
|
||||
int audio_frame_offset = audio_start_offset * FPS;
|
||||
|
||||
double video_frame_remainder = video_start_offset * FPS - video_frame_offset;
|
||||
double audio_frame_remainder = audio_start_offset * FPS - audio_frame_offset;
|
||||
|
||||
double audio_skip = (video_frame_remainder - audio_frame_remainder) / FPS;
|
||||
|
||||
video_frame_offset -= minimum_frame_offset;
|
||||
audio_frame_offset -= minimum_frame_offset;
|
||||
|
||||
load_data->start_frame += audio_frame_offset;
|
||||
seq_sound = SEQ_add_sound_strip(bmain, scene, ed->seqbasep, load_data, audio_skip);
|
||||
|
||||
int min_startdisp = MIN2(seq_movie->startdisp, seq_sound->startdisp);
|
||||
int max_enddisp = MAX2(seq_movie->enddisp, seq_sound->enddisp);
|
||||
|
||||
load_data->start_frame += max_enddisp - min_startdisp - audio_frame_offset;
|
||||
}
|
||||
else {
|
||||
load_data->start_frame += seq_movie->enddisp - seq_movie->startdisp;
|
||||
}
|
||||
load_data->start_frame += seq_movie->enddisp - seq_movie->startdisp;
|
||||
seq_load_apply_generic_options(C, op, seq_sound);
|
||||
seq_load_apply_generic_options(C, op, seq_movie);
|
||||
seq_build_proxy(C, seq_movie);
|
||||
@@ -672,7 +702,15 @@ static bool sequencer_add_movie_single_strip(bContext *C, wmOperator *op, SeqLoa
|
||||
|
||||
Sequence *seq_movie = NULL;
|
||||
Sequence *seq_sound = NULL;
|
||||
double video_start_offset;
|
||||
double video_start_offset = -1;
|
||||
double audio_start_offset = 0;
|
||||
|
||||
if (RNA_boolean_get(op->ptr, "sound")) {
|
||||
SoundStreamInfo sound_info;
|
||||
if (BKE_sound_stream_info_get(bmain, load_data->path, 0, &sound_info)) {
|
||||
audio_start_offset = video_start_offset = sound_info.start;
|
||||
}
|
||||
}
|
||||
|
||||
load_data->channel++;
|
||||
seq_movie = SEQ_add_movie_strip(bmain, scene, ed->seqbasep, load_data, &video_start_offset);
|
||||
@@ -683,7 +721,21 @@ static bool sequencer_add_movie_single_strip(bContext *C, wmOperator *op, SeqLoa
|
||||
return false;
|
||||
}
|
||||
if (RNA_boolean_get(op->ptr, "sound")) {
|
||||
seq_sound = SEQ_add_sound_strip(bmain, scene, ed->seqbasep, load_data, video_start_offset);
|
||||
int minimum_frame_offset = MIN2(video_start_offset, audio_start_offset) * FPS;
|
||||
|
||||
int video_frame_offset = video_start_offset * FPS;
|
||||
int audio_frame_offset = audio_start_offset * FPS;
|
||||
|
||||
double video_frame_remainder = video_start_offset * FPS - video_frame_offset;
|
||||
double audio_frame_remainder = audio_start_offset * FPS - audio_frame_offset;
|
||||
|
||||
double audio_skip = (video_frame_remainder - audio_frame_remainder) / FPS;
|
||||
|
||||
video_frame_offset -= minimum_frame_offset;
|
||||
audio_frame_offset -= minimum_frame_offset;
|
||||
|
||||
load_data->start_frame += audio_frame_offset;
|
||||
seq_sound = SEQ_add_sound_strip(bmain, scene, ed->seqbasep, load_data, audio_skip);
|
||||
}
|
||||
seq_load_apply_generic_options(C, op, seq_sound);
|
||||
seq_load_apply_generic_options(C, op, seq_movie);
|
||||
|
@@ -420,6 +420,10 @@ static void draw_seq_waveform_overlay(View2D *v2d,
|
||||
float sample_offset = start_sample + i * samples_per_pix;
|
||||
int p = sample_offset;
|
||||
|
||||
if (p < 0) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (p >= waveform->length) {
|
||||
break;
|
||||
}
|
||||
|
@@ -272,6 +272,22 @@ class MFParams {
|
||||
return span;
|
||||
}
|
||||
|
||||
/**
|
||||
* Same as #uninitialized_single_output, but returns an empty span when the output is not
|
||||
* required.
|
||||
*/
|
||||
template<typename T>
|
||||
MutableSpan<T> uninitialized_single_output_if_required(int param_index, StringRef name = "")
|
||||
{
|
||||
return this->uninitialized_single_output_if_required(param_index, name).typed<T>();
|
||||
}
|
||||
GMutableSpan uninitialized_single_output_if_required(int param_index, StringRef name = "")
|
||||
{
|
||||
this->assert_correct_param(param_index, name, MFParamType::SingleOutput);
|
||||
int data_index = builder_->signature_->data_index(param_index);
|
||||
return builder_->mutable_spans_[data_index];
|
||||
}
|
||||
|
||||
template<typename T>
|
||||
const VVectorArray<T> &readonly_vector_input(int param_index, StringRef name = "")
|
||||
{
|
||||
|
@@ -664,11 +664,6 @@ static int startffmpeg(struct anim *anim)
|
||||
anim->duration_in_frames = (int)(stream_dur * av_q2d(frame_rate) + 0.5f);
|
||||
}
|
||||
|
||||
double ctx_start = 0;
|
||||
if (pFormatCtx->start_time != AV_NOPTS_VALUE) {
|
||||
ctx_start = (double)pFormatCtx->start_time / AV_TIME_BASE;
|
||||
}
|
||||
|
||||
frs_num = frame_rate.num;
|
||||
frs_den = frame_rate.den;
|
||||
|
||||
@@ -683,7 +678,7 @@ static int startffmpeg(struct anim *anim)
|
||||
anim->frs_sec_base = frs_den;
|
||||
/* Save the relative start time for the video. IE the start time in relation to where playback
|
||||
* starts. */
|
||||
anim->start_offset = video_start - ctx_start;
|
||||
anim->start_offset = video_start;
|
||||
|
||||
anim->params = 0;
|
||||
|
||||
|
@@ -481,6 +481,8 @@ typedef struct bNodeTree {
|
||||
float view_center[2];
|
||||
|
||||
ListBase nodes, links;
|
||||
/* Vector<std::optional<Vector<int>>>. */
|
||||
void *output_field_dependencies;
|
||||
|
||||
/** Set init on fileread. */
|
||||
int type, init;
|
||||
@@ -1167,6 +1169,7 @@ typedef struct NodeCryptomatte {
|
||||
|
||||
typedef struct NodeDenoise {
|
||||
char hdr;
|
||||
char prefilter;
|
||||
} NodeDenoise;
|
||||
|
||||
typedef struct NodeAttributeClamp {
|
||||
@@ -1432,7 +1435,7 @@ typedef struct NodeGeometryCurvePrimitiveQuad {
|
||||
} NodeGeometryCurvePrimitiveQuad;
|
||||
|
||||
typedef struct NodeGeometryCurveResample {
|
||||
/* GeometryNodeCurveSampleMode. */
|
||||
/* GeometryNodeCurveResampleMode. */
|
||||
uint8_t mode;
|
||||
} NodeGeometryCurveResample;
|
||||
|
||||
@@ -1442,12 +1445,12 @@ typedef struct NodeGeometryCurveSubdivide {
|
||||
} NodeGeometryCurveSubdivide;
|
||||
|
||||
typedef struct NodeGeometryCurveTrim {
|
||||
/* GeometryNodeCurveInterpolateMode. */
|
||||
/* GeometryNodeCurveSampleMode. */
|
||||
uint8_t mode;
|
||||
} NodeGeometryCurveTrim;
|
||||
|
||||
typedef struct NodeGeometryCurveToPoints {
|
||||
/* GeometryNodeCurveSampleMode. */
|
||||
/* GeometryNodeCurveResampleMode. */
|
||||
uint8_t mode;
|
||||
} NodeGeometryCurveToPoints;
|
||||
|
||||
@@ -1840,6 +1843,14 @@ typedef enum CMPNodeSetAlphaMode {
|
||||
CMP_NODE_SETALPHA_MODE_REPLACE_ALPHA = 1,
|
||||
} CMPNodeSetAlphaMode;
|
||||
|
||||
/* Denoise Node. */
|
||||
/* `NodeDenoise.prefilter` */
|
||||
typedef enum CMPNodeDenoisePrefilter {
|
||||
CMP_NODE_DENOISE_PREFILTER_FAST = 0,
|
||||
CMP_NODE_DENOISE_PREFILTER_NONE = 1,
|
||||
CMP_NODE_DENOISE_PREFILTER_ACCURATE = 2
|
||||
} CMPNodeDenoisePrefilter;
|
||||
|
||||
#define CMP_NODE_PLANETRACKDEFORM_MBLUR_SAMPLES_MAX 64
|
||||
|
||||
/* Point Density shader node */
|
||||
@@ -2029,16 +2040,16 @@ typedef enum GeometryNodeCurvePrimitiveBezierSegmentMode {
|
||||
GEO_NODE_CURVE_PRIMITIVE_BEZIER_SEGMENT_OFFSET = 1,
|
||||
} GeometryNodeCurvePrimitiveBezierSegmentMode;
|
||||
|
||||
typedef enum GeometryNodeCurveSampleMode {
|
||||
GEO_NODE_CURVE_SAMPLE_COUNT = 0,
|
||||
GEO_NODE_CURVE_SAMPLE_LENGTH = 1,
|
||||
GEO_NODE_CURVE_SAMPLE_EVALUATED = 2,
|
||||
} GeometryNodeCurveSampleMode;
|
||||
typedef enum GeometryNodeCurveResampleMode {
|
||||
GEO_NODE_CURVE_RESAMPLE_COUNT = 0,
|
||||
GEO_NODE_CURVE_RESAMPLE_LENGTH = 1,
|
||||
GEO_NODE_CURVE_RESAMPLE_EVALUATED = 2,
|
||||
} GeometryNodeCurveResampleMode;
|
||||
|
||||
typedef enum GeometryNodeCurveInterpolateMode {
|
||||
GEO_NODE_CURVE_INTERPOLATE_FACTOR = 0,
|
||||
GEO_NODE_CURVE_INTERPOLATE_LENGTH = 1,
|
||||
} GeometryNodeCurveInterpolateMode;
|
||||
typedef enum GeometryNodeCurveSampleMode {
|
||||
GEO_NODE_CURVE_SAMPLE_FACTOR = 0,
|
||||
GEO_NODE_CURVE_SAMPLE_LENGTH = 1,
|
||||
} GeometryNodeCurveSampleMode;
|
||||
|
||||
typedef enum GeometryNodeAttributeTransferMapMode {
|
||||
GEO_NODE_ATTRIBUTE_TRANSFER_NEAREST_FACE_INTERPOLATED = 0,
|
||||
|
@@ -1134,7 +1134,7 @@ static int calculate_struct_sizes(int firststruct, FILE *file_verify, const char
|
||||
* to the struct to resolve the problem. */
|
||||
if ((size_64 % max_align_64 == 0) && (size_32 % max_align_32 == 4)) {
|
||||
fprintf(stderr,
|
||||
"Sizeerror in 32 bit struct: %s (add paddding pointer)\n",
|
||||
"Sizeerror in 32 bit struct: %s (add padding pointer)\n",
|
||||
types[structtype]);
|
||||
}
|
||||
else {
|
||||
|
@@ -843,17 +843,17 @@ static float rna_GPencilStrokePoints_weight_get(bGPDstroke *stroke,
|
||||
return -1.0f;
|
||||
}
|
||||
|
||||
if (dvert->totweight <= vertex_group_index || vertex_group_index < 0) {
|
||||
BKE_report(reports, RPT_ERROR, "Groups: index out of range");
|
||||
return -1.0f;
|
||||
}
|
||||
|
||||
if (stroke->totpoints <= point_index || point_index < 0) {
|
||||
BKE_report(reports, RPT_ERROR, "GPencilStrokePoints: index out of range");
|
||||
return -1.0f;
|
||||
}
|
||||
|
||||
MDeformVert *pt_dvert = stroke->dvert + point_index;
|
||||
if ((pt_dvert) && (pt_dvert->totweight <= vertex_group_index || vertex_group_index < 0)) {
|
||||
BKE_report(reports, RPT_ERROR, "Groups: index out of range");
|
||||
return -1.0f;
|
||||
}
|
||||
|
||||
MDeformWeight *dw = BKE_defvert_find_index(pt_dvert, vertex_group_index);
|
||||
if (dw) {
|
||||
return dw->weight;
|
||||
|
@@ -8890,12 +8890,37 @@ static void def_cmp_denoise(StructRNA *srna)
|
||||
{
|
||||
PropertyRNA *prop;
|
||||
|
||||
static const EnumPropertyItem prefilter_items[] = {
|
||||
{CMP_NODE_DENOISE_PREFILTER_NONE,
|
||||
"NONE",
|
||||
0,
|
||||
"None",
|
||||
"No prefiltering, use when guiding passes are noise-free"},
|
||||
{CMP_NODE_DENOISE_PREFILTER_FAST,
|
||||
"FAST",
|
||||
0,
|
||||
"Fast",
|
||||
"Denoise image and guiding passes together. Improves quality when guiding passes are noisy "
|
||||
"using least amount of extra processing time"},
|
||||
{CMP_NODE_DENOISE_PREFILTER_ACCURATE,
|
||||
"ACCURATE",
|
||||
0,
|
||||
"Accurate",
|
||||
"Prefilter noisy guiding passes before denoising image. Improves quality when guiding "
|
||||
"passes are noisy using extra processing time"},
|
||||
{0, NULL, 0, NULL, NULL}};
|
||||
|
||||
RNA_def_struct_sdna_from(srna, "NodeDenoise", "storage");
|
||||
|
||||
prop = RNA_def_property(srna, "use_hdr", PROP_BOOLEAN, PROP_NONE);
|
||||
RNA_def_property_boolean_sdna(prop, NULL, "hdr", 0);
|
||||
RNA_def_property_ui_text(prop, "HDR", "Process HDR images");
|
||||
RNA_def_property_update(prop, NC_NODE | NA_EDITED, "rna_Node_update");
|
||||
|
||||
prop = RNA_def_property(srna, "prefilter", PROP_ENUM, PROP_NONE);
|
||||
RNA_def_property_enum_items(prop, prefilter_items);
|
||||
RNA_def_property_ui_text(prop, "", "Denoising prefilter");
|
||||
RNA_def_property_update(prop, NC_NODE | NA_EDITED, "rna_Node_update");
|
||||
}
|
||||
|
||||
static void def_cmp_antialiasing(StructRNA *srna)
|
||||
@@ -10091,18 +10116,18 @@ static void def_geo_curve_resample(StructRNA *srna)
|
||||
PropertyRNA *prop;
|
||||
|
||||
static EnumPropertyItem mode_items[] = {
|
||||
{GEO_NODE_CURVE_SAMPLE_EVALUATED,
|
||||
{GEO_NODE_CURVE_RESAMPLE_EVALUATED,
|
||||
"EVALUATED",
|
||||
0,
|
||||
"Evaluated",
|
||||
"Output the input spline's evaluated points, based on the resolution attribute for NURBS "
|
||||
"and Bezier splines. Poly splines are unchanged"},
|
||||
{GEO_NODE_CURVE_SAMPLE_COUNT,
|
||||
{GEO_NODE_CURVE_RESAMPLE_COUNT,
|
||||
"COUNT",
|
||||
0,
|
||||
"Count",
|
||||
"Sample the specified number of points along each spline"},
|
||||
{GEO_NODE_CURVE_SAMPLE_LENGTH,
|
||||
{GEO_NODE_CURVE_RESAMPLE_LENGTH,
|
||||
"LENGTH",
|
||||
0,
|
||||
"Length",
|
||||
@@ -10136,18 +10161,18 @@ static void def_geo_curve_to_points(StructRNA *srna)
|
||||
PropertyRNA *prop;
|
||||
|
||||
static EnumPropertyItem mode_items[] = {
|
||||
{GEO_NODE_CURVE_SAMPLE_EVALUATED,
|
||||
{GEO_NODE_CURVE_RESAMPLE_EVALUATED,
|
||||
"EVALUATED",
|
||||
0,
|
||||
"Evaluated",
|
||||
"Create points from the curve's evaluated points, based on the resolution attribute for "
|
||||
"NURBS and Bezier splines"},
|
||||
{GEO_NODE_CURVE_SAMPLE_COUNT,
|
||||
{GEO_NODE_CURVE_RESAMPLE_COUNT,
|
||||
"COUNT",
|
||||
0,
|
||||
"Count",
|
||||
"Sample each spline by evenly distributing the specified number of points"},
|
||||
{GEO_NODE_CURVE_SAMPLE_LENGTH,
|
||||
{GEO_NODE_CURVE_RESAMPLE_LENGTH,
|
||||
"LENGTH",
|
||||
0,
|
||||
"Length",
|
||||
@@ -10168,12 +10193,12 @@ static void def_geo_curve_trim(StructRNA *srna)
|
||||
PropertyRNA *prop;
|
||||
|
||||
static EnumPropertyItem mode_items[] = {
|
||||
{GEO_NODE_CURVE_INTERPOLATE_FACTOR,
|
||||
{GEO_NODE_CURVE_SAMPLE_FACTOR,
|
||||
"FACTOR",
|
||||
0,
|
||||
"Factor",
|
||||
"Find the endpoint positions using a factor of each spline's length"},
|
||||
{GEO_NODE_CURVE_INTERPOLATE_LENGTH,
|
||||
{GEO_NODE_CURVE_RESAMPLE_LENGTH,
|
||||
"LENGTH",
|
||||
0,
|
||||
"Length",
|
||||
|
@@ -323,8 +323,8 @@ static Sequence *rna_Sequences_new_movie(ID *id,
|
||||
SEQ_add_load_data_init(&load_data, name, file, frame_start, channel);
|
||||
load_data.fit_method = fit_method;
|
||||
load_data.allow_invalid_file = true;
|
||||
double video_start_offset;
|
||||
Sequence *seq = SEQ_add_movie_strip(bmain, scene, seqbase, &load_data, &video_start_offset);
|
||||
double start_offset = -1;
|
||||
Sequence *seq = SEQ_add_movie_strip(bmain, scene, seqbase, &load_data, &start_offset);
|
||||
|
||||
DEG_relations_tag_update(bmain);
|
||||
DEG_id_tag_update(&scene->id, ID_RECALC_SEQUENCER_STRIPS);
|
||||
|
@@ -218,6 +218,7 @@ set(SRC
|
||||
geometry/nodes/node_geo_point_translate.cc
|
||||
geometry/nodes/node_geo_points_to_volume.cc
|
||||
geometry/nodes/node_geo_raycast.cc
|
||||
geometry/nodes/node_geo_realize_instances.cc
|
||||
geometry/nodes/node_geo_separate_components.cc
|
||||
geometry/nodes/node_geo_set_position.cc
|
||||
geometry/nodes/node_geo_subdivision_surface.cc
|
||||
|
@@ -103,6 +103,7 @@ void register_node_type_geo_point_separate(void);
|
||||
void register_node_type_geo_point_translate(void);
|
||||
void register_node_type_geo_points_to_volume(void);
|
||||
void register_node_type_geo_raycast(void);
|
||||
void register_node_type_geo_realize_instances(void);
|
||||
void register_node_type_geo_sample_texture(void);
|
||||
void register_node_type_geo_select_by_handle_type(void);
|
||||
void register_node_type_geo_separate_components(void);
|
||||
|
@@ -37,6 +37,7 @@ class SocketDeclaration {
|
||||
bool hide_label_ = false;
|
||||
bool hide_value_ = false;
|
||||
bool is_multi_input_ = false;
|
||||
bool is_field_ = false;
|
||||
|
||||
friend NodeDeclarationBuilder;
|
||||
template<typename SocketDecl> friend class SocketDeclarationBuilder;
|
||||
@@ -51,6 +52,8 @@ class SocketDeclaration {
|
||||
StringRefNull name() const;
|
||||
StringRefNull identifier() const;
|
||||
|
||||
bool is_field() const;
|
||||
|
||||
protected:
|
||||
void set_common_flags(bNodeSocket &socket) const;
|
||||
bool matches_common_data(const bNodeSocket &socket) const;
|
||||
@@ -93,6 +96,12 @@ class SocketDeclarationBuilder : public BaseSocketDeclarationBuilder {
|
||||
decl_->is_multi_input_ = value;
|
||||
return *(Self *)this;
|
||||
}
|
||||
|
||||
Self &is_field(bool value = true)
|
||||
{
|
||||
decl_->is_field_ = value;
|
||||
return *(Self *)this;
|
||||
}
|
||||
};
|
||||
|
||||
using SocketDeclarationPtr = std::unique_ptr<SocketDeclaration>;
|
||||
@@ -148,6 +157,11 @@ inline StringRefNull SocketDeclaration::identifier() const
|
||||
return identifier_;
|
||||
}
|
||||
|
||||
inline bool SocketDeclaration::is_field() const
|
||||
{
|
||||
return is_field_;
|
||||
}
|
||||
|
||||
/* --------------------------------------------------------------------
|
||||
* NodeDeclarationBuilder inline methods.
|
||||
*/
|
||||
|
@@ -181,6 +181,7 @@ class NodeRef : NonCopyable, NonMovable {
|
||||
|
||||
Span<const InputSocketRef *> inputs() const;
|
||||
Span<const OutputSocketRef *> outputs() const;
|
||||
Span<const SocketRef *> inputs_or_outputs(bool get_inputs) const;
|
||||
Span<const InternalLinkRef *> internal_links() const;
|
||||
|
||||
const InputSocketRef &input(int index) const;
|
||||
@@ -496,6 +497,12 @@ inline Span<const OutputSocketRef *> NodeRef::outputs() const
|
||||
return outputs_;
|
||||
}
|
||||
|
||||
inline Span<const SocketRef *> NodeRef::inputs_or_outputs(bool get_inputs) const
|
||||
{
|
||||
return get_inputs ? inputs_.as_span().cast<const SocketRef *>() :
|
||||
outputs_.as_span().cast<const SocketRef *>();
|
||||
}
|
||||
|
||||
inline Span<const InternalLinkRef *> NodeRef::internal_links() const
|
||||
{
|
||||
return internal_links_;
|
||||
|
@@ -173,6 +173,12 @@ class Bool : public SocketDeclaration {
|
||||
public:
|
||||
using Builder = BoolBuilder;
|
||||
|
||||
Bool &is_field(bool value)
|
||||
{
|
||||
is_field_ = value;
|
||||
return *this;
|
||||
}
|
||||
|
||||
bNodeSocket &build(bNodeTree &ntree, bNode &node, eNodeSocketInOut in_out) const override;
|
||||
bool matches(const bNodeSocket &socket) const override;
|
||||
};
|
||||
|
@@ -345,6 +345,7 @@ DefNode(GeometryNode, GEO_NODE_MESH_PRIMITIVE_LINE, def_geo_mesh_line, "MESH_PRI
|
||||
DefNode(GeometryNode, GEO_NODE_MESH_PRIMITIVE_UV_SPHERE, 0, "MESH_PRIMITIVE_UV_SPHERE", MeshUVSphere, "UV Sphere", "")
|
||||
DefNode(GeometryNode, GEO_NODE_MESH_SUBDIVIDE, 0, "MESH_SUBDIVIDE", MeshSubdivide, "Mesh Subdivide", "")
|
||||
DefNode(GeometryNode, GEO_NODE_OBJECT_INFO, def_geo_object_info, "OBJECT_INFO", ObjectInfo, "Object Info", "")
|
||||
DefNode(GeometryNode, GEO_NODE_REALIZE_INSTANCES, 0, "REALIZE_INSTANCES", RealizeInstances, "Realize Instances", "")
|
||||
DefNode(GeometryNode, GEO_NODE_SEPARATE_COMPONENTS, 0, "SEPARATE_COMPONENTS", SeparateComponents, "Separate Components", "")
|
||||
DefNode(GeometryNode, GEO_NODE_SET_POSITION, 0, "SET_POSITION", SetPosition, "Set Position", "")
|
||||
DefNode(GeometryNode, GEO_NODE_SUBDIVISION_SURFACE, def_geo_subdivision_surface, "SUBDIVISION_SURFACE", SubdivisionSurface, "Subdivision Surface", "")
|
||||
|
@@ -36,6 +36,7 @@ static void node_composit_init_denonise(bNodeTree *UNUSED(ntree), bNode *node)
|
||||
{
|
||||
NodeDenoise *ndg = MEM_callocN(sizeof(NodeDenoise), "node denoise data");
|
||||
ndg->hdr = true;
|
||||
ndg->prefilter = CMP_NODE_DENOISE_PREFILTER_ACCURATE;
|
||||
node->storage = ndg;
|
||||
}
|
||||
|
||||
|
@@ -66,8 +66,8 @@ static void geo_node_attribute_capture_update(bNodeTree *UNUSED(ntree), bNode *n
|
||||
node->storage;
|
||||
const CustomDataType data_type = static_cast<CustomDataType>(storage.data_type);
|
||||
|
||||
bNodeSocket *socket_value_attribute_name = (bNodeSocket *)node->inputs.first;
|
||||
bNodeSocket *socket_value_vector = socket_value_attribute_name->next;
|
||||
bNodeSocket *socket_value_geometry = (bNodeSocket *)node->inputs.first;
|
||||
bNodeSocket *socket_value_vector = socket_value_geometry->next;
|
||||
bNodeSocket *socket_value_float = socket_value_vector->next;
|
||||
bNodeSocket *socket_value_color4f = socket_value_float->next;
|
||||
bNodeSocket *socket_value_boolean = socket_value_color4f->next;
|
||||
@@ -79,8 +79,8 @@ static void geo_node_attribute_capture_update(bNodeTree *UNUSED(ntree), bNode *n
|
||||
nodeSetSocketAvailability(socket_value_boolean, data_type == CD_PROP_BOOL);
|
||||
nodeSetSocketAvailability(socket_value_int32, data_type == CD_PROP_INT32);
|
||||
|
||||
bNodeSocket *out_socket_value_attribute_name = (bNodeSocket *)node->outputs.first;
|
||||
bNodeSocket *out_socket_value_vector = out_socket_value_attribute_name->next;
|
||||
bNodeSocket *out_socket_value_geometry = (bNodeSocket *)node->outputs.first;
|
||||
bNodeSocket *out_socket_value_vector = out_socket_value_geometry->next;
|
||||
bNodeSocket *out_socket_value_float = out_socket_value_vector->next;
|
||||
bNodeSocket *out_socket_value_color4f = out_socket_value_float->next;
|
||||
bNodeSocket *out_socket_value_boolean = out_socket_value_color4f->next;
|
||||
|
@@ -50,24 +50,24 @@ static void geo_node_curve_resample_init(bNodeTree *UNUSED(tree), bNode *node)
|
||||
NodeGeometryCurveResample *data = (NodeGeometryCurveResample *)MEM_callocN(
|
||||
sizeof(NodeGeometryCurveResample), __func__);
|
||||
|
||||
data->mode = GEO_NODE_CURVE_SAMPLE_COUNT;
|
||||
data->mode = GEO_NODE_CURVE_RESAMPLE_COUNT;
|
||||
node->storage = data;
|
||||
}
|
||||
|
||||
static void geo_node_curve_resample_update(bNodeTree *UNUSED(ntree), bNode *node)
|
||||
{
|
||||
NodeGeometryCurveResample &node_storage = *(NodeGeometryCurveResample *)node->storage;
|
||||
const GeometryNodeCurveSampleMode mode = (GeometryNodeCurveSampleMode)node_storage.mode;
|
||||
const GeometryNodeCurveResampleMode mode = (GeometryNodeCurveResampleMode)node_storage.mode;
|
||||
|
||||
bNodeSocket *count_socket = ((bNodeSocket *)node->inputs.first)->next;
|
||||
bNodeSocket *length_socket = count_socket->next;
|
||||
|
||||
nodeSetSocketAvailability(count_socket, mode == GEO_NODE_CURVE_SAMPLE_COUNT);
|
||||
nodeSetSocketAvailability(length_socket, mode == GEO_NODE_CURVE_SAMPLE_LENGTH);
|
||||
nodeSetSocketAvailability(count_socket, mode == GEO_NODE_CURVE_RESAMPLE_COUNT);
|
||||
nodeSetSocketAvailability(length_socket, mode == GEO_NODE_CURVE_RESAMPLE_LENGTH);
|
||||
}
|
||||
|
||||
struct SampleModeParam {
|
||||
GeometryNodeCurveSampleMode mode;
|
||||
GeometryNodeCurveResampleMode mode;
|
||||
std::optional<float> length;
|
||||
std::optional<int> count;
|
||||
};
|
||||
@@ -172,7 +172,7 @@ static std::unique_ptr<CurveEval> resample_curve(const CurveEval &input_curve,
|
||||
output_curve->resize(input_splines.size());
|
||||
MutableSpan<SplinePtr> output_splines = output_curve->splines();
|
||||
|
||||
if (mode_param.mode == GEO_NODE_CURVE_SAMPLE_COUNT) {
|
||||
if (mode_param.mode == GEO_NODE_CURVE_RESAMPLE_COUNT) {
|
||||
threading::parallel_for(input_splines.index_range(), 128, [&](IndexRange range) {
|
||||
for (const int i : range) {
|
||||
BLI_assert(mode_param.count);
|
||||
@@ -180,7 +180,7 @@ static std::unique_ptr<CurveEval> resample_curve(const CurveEval &input_curve,
|
||||
}
|
||||
});
|
||||
}
|
||||
else if (mode_param.mode == GEO_NODE_CURVE_SAMPLE_LENGTH) {
|
||||
else if (mode_param.mode == GEO_NODE_CURVE_RESAMPLE_LENGTH) {
|
||||
threading::parallel_for(input_splines.index_range(), 128, [&](IndexRange range) {
|
||||
for (const int i : range) {
|
||||
const float length = input_splines[i]->length();
|
||||
@@ -189,7 +189,7 @@ static std::unique_ptr<CurveEval> resample_curve(const CurveEval &input_curve,
|
||||
}
|
||||
});
|
||||
}
|
||||
else if (mode_param.mode == GEO_NODE_CURVE_SAMPLE_EVALUATED) {
|
||||
else if (mode_param.mode == GEO_NODE_CURVE_RESAMPLE_EVALUATED) {
|
||||
threading::parallel_for(input_splines.index_range(), 128, [&](IndexRange range) {
|
||||
for (const int i : range) {
|
||||
output_splines[i] = resample_spline_evaluated(*input_splines[i]);
|
||||
@@ -215,10 +215,10 @@ static void geo_node_resample_exec(GeoNodeExecParams params)
|
||||
|
||||
const CurveEval &input_curve = *geometry_set.get_curve_for_read();
|
||||
NodeGeometryCurveResample &node_storage = *(NodeGeometryCurveResample *)params.node().storage;
|
||||
const GeometryNodeCurveSampleMode mode = (GeometryNodeCurveSampleMode)node_storage.mode;
|
||||
const GeometryNodeCurveResampleMode mode = (GeometryNodeCurveResampleMode)node_storage.mode;
|
||||
SampleModeParam mode_param;
|
||||
mode_param.mode = mode;
|
||||
if (mode == GEO_NODE_CURVE_SAMPLE_COUNT) {
|
||||
if (mode == GEO_NODE_CURVE_RESAMPLE_COUNT) {
|
||||
const int count = params.extract_input<int>("Count");
|
||||
if (count < 1) {
|
||||
params.set_output("Geometry", GeometrySet());
|
||||
@@ -226,7 +226,7 @@ static void geo_node_resample_exec(GeoNodeExecParams params)
|
||||
}
|
||||
mode_param.count.emplace(count);
|
||||
}
|
||||
else if (mode == GEO_NODE_CURVE_SAMPLE_LENGTH) {
|
||||
else if (mode == GEO_NODE_CURVE_RESAMPLE_LENGTH) {
|
||||
/* Don't allow asymptotic count increase for low resolution values. */
|
||||
const float resolution = std::max(params.extract_input<float>("Length"), 0.0001f);
|
||||
mode_param.length.emplace(resolution);
|
||||
|
@@ -339,7 +339,7 @@ struct ResultAttributes {
|
||||
|
||||
/**
|
||||
* Result attributes corresponding the attributes on the profile input, in the same order. The
|
||||
* attributes are optional in case the attribute names correspond to a namse used by the curve
|
||||
* attributes are optional in case the attribute names correspond to a names used by the curve
|
||||
* input, in which case the curve input attributes take precedence.
|
||||
*/
|
||||
Vector<std::optional<ResultAttributeData>> profile_point_attributes;
|
||||
|
@@ -46,20 +46,20 @@ static void geo_node_curve_to_points_init(bNodeTree *UNUSED(tree), bNode *node)
|
||||
NodeGeometryCurveToPoints *data = (NodeGeometryCurveToPoints *)MEM_callocN(
|
||||
sizeof(NodeGeometryCurveToPoints), __func__);
|
||||
|
||||
data->mode = GEO_NODE_CURVE_SAMPLE_COUNT;
|
||||
data->mode = GEO_NODE_CURVE_RESAMPLE_COUNT;
|
||||
node->storage = data;
|
||||
}
|
||||
|
||||
static void geo_node_curve_to_points_update(bNodeTree *UNUSED(ntree), bNode *node)
|
||||
{
|
||||
NodeGeometryCurveToPoints &node_storage = *(NodeGeometryCurveToPoints *)node->storage;
|
||||
const GeometryNodeCurveSampleMode mode = (GeometryNodeCurveSampleMode)node_storage.mode;
|
||||
const GeometryNodeCurveResampleMode mode = (GeometryNodeCurveResampleMode)node_storage.mode;
|
||||
|
||||
bNodeSocket *count_socket = ((bNodeSocket *)node->inputs.first)->next;
|
||||
bNodeSocket *length_socket = count_socket->next;
|
||||
|
||||
nodeSetSocketAvailability(count_socket, mode == GEO_NODE_CURVE_SAMPLE_COUNT);
|
||||
nodeSetSocketAvailability(length_socket, mode == GEO_NODE_CURVE_SAMPLE_LENGTH);
|
||||
nodeSetSocketAvailability(count_socket, mode == GEO_NODE_CURVE_RESAMPLE_COUNT);
|
||||
nodeSetSocketAvailability(length_socket, mode == GEO_NODE_CURVE_RESAMPLE_LENGTH);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -77,13 +77,13 @@ static void evaluate_splines(Span<SplinePtr> splines)
|
||||
}
|
||||
|
||||
static Array<int> calculate_spline_point_offsets(GeoNodeExecParams ¶ms,
|
||||
const GeometryNodeCurveSampleMode mode,
|
||||
const GeometryNodeCurveResampleMode mode,
|
||||
const CurveEval &curve,
|
||||
const Span<SplinePtr> splines)
|
||||
{
|
||||
const int size = curve.splines().size();
|
||||
switch (mode) {
|
||||
case GEO_NODE_CURVE_SAMPLE_COUNT: {
|
||||
case GEO_NODE_CURVE_RESAMPLE_COUNT: {
|
||||
const int count = params.extract_input<int>("Count");
|
||||
if (count < 1) {
|
||||
return {0};
|
||||
@@ -94,7 +94,7 @@ static Array<int> calculate_spline_point_offsets(GeoNodeExecParams ¶ms,
|
||||
}
|
||||
return offsets;
|
||||
}
|
||||
case GEO_NODE_CURVE_SAMPLE_LENGTH: {
|
||||
case GEO_NODE_CURVE_RESAMPLE_LENGTH: {
|
||||
/* Don't allow asymptotic count increase for low resolution values. */
|
||||
const float resolution = std::max(params.extract_input<float>("Length"), 0.0001f);
|
||||
Array<int> offsets(size + 1);
|
||||
@@ -106,7 +106,7 @@ static Array<int> calculate_spline_point_offsets(GeoNodeExecParams ¶ms,
|
||||
offsets.last() = offset;
|
||||
return offsets;
|
||||
}
|
||||
case GEO_NODE_CURVE_SAMPLE_EVALUATED: {
|
||||
case GEO_NODE_CURVE_RESAMPLE_EVALUATED: {
|
||||
return curve.evaluated_point_offsets();
|
||||
}
|
||||
}
|
||||
@@ -301,7 +301,7 @@ void curve_create_default_rotation_attribute(Span<float3> tangents,
|
||||
static void geo_node_curve_to_points_exec(GeoNodeExecParams params)
|
||||
{
|
||||
NodeGeometryCurveToPoints &node_storage = *(NodeGeometryCurveToPoints *)params.node().storage;
|
||||
const GeometryNodeCurveSampleMode mode = (GeometryNodeCurveSampleMode)node_storage.mode;
|
||||
const GeometryNodeCurveResampleMode mode = (GeometryNodeCurveResampleMode)node_storage.mode;
|
||||
GeometrySet geometry_set = params.extract_input<GeometrySet>("Geometry");
|
||||
|
||||
geometry_set = bke::geometry_set_realize_instances(geometry_set);
|
||||
@@ -331,11 +331,11 @@ static void geo_node_curve_to_points_exec(GeoNodeExecParams params)
|
||||
CurveToPointsResults new_attributes = curve_to_points_create_result_attributes(point_component,
|
||||
curve);
|
||||
switch (mode) {
|
||||
case GEO_NODE_CURVE_SAMPLE_COUNT:
|
||||
case GEO_NODE_CURVE_SAMPLE_LENGTH:
|
||||
case GEO_NODE_CURVE_RESAMPLE_COUNT:
|
||||
case GEO_NODE_CURVE_RESAMPLE_LENGTH:
|
||||
copy_uniform_sample_point_attributes(splines, offsets, new_attributes);
|
||||
break;
|
||||
case GEO_NODE_CURVE_SAMPLE_EVALUATED:
|
||||
case GEO_NODE_CURVE_RESAMPLE_EVALUATED:
|
||||
copy_evaluated_point_attributes(splines, offsets, new_attributes);
|
||||
break;
|
||||
}
|
||||
|
@@ -30,9 +30,9 @@ static void geo_node_curve_trim_declare(NodeDeclarationBuilder &b)
|
||||
{
|
||||
b.add_input<decl::Geometry>("Curve");
|
||||
b.add_input<decl::Float>("Start").min(0.0f).max(1.0f).subtype(PROP_FACTOR);
|
||||
b.add_input<decl::Float>("End").min(0.0f).max(1.0f).subtype(PROP_FACTOR);
|
||||
b.add_input<decl::Float>("End").min(0.0f).max(1.0f).default_value(1.0f).subtype(PROP_FACTOR);
|
||||
b.add_input<decl::Float>("Start", "Start_001").min(0.0f).subtype(PROP_DISTANCE);
|
||||
b.add_input<decl::Float>("End", "End_001").min(0.0f).subtype(PROP_DISTANCE);
|
||||
b.add_input<decl::Float>("End", "End_001").min(0.0f).default_value(1.0f).subtype(PROP_DISTANCE);
|
||||
b.add_output<decl::Geometry>("Curve");
|
||||
}
|
||||
|
||||
@@ -46,25 +46,24 @@ static void geo_node_curve_trim_init(bNodeTree *UNUSED(tree), bNode *node)
|
||||
NodeGeometryCurveTrim *data = (NodeGeometryCurveTrim *)MEM_callocN(sizeof(NodeGeometryCurveTrim),
|
||||
__func__);
|
||||
|
||||
data->mode = GEO_NODE_CURVE_INTERPOLATE_FACTOR;
|
||||
data->mode = GEO_NODE_CURVE_SAMPLE_FACTOR;
|
||||
node->storage = data;
|
||||
}
|
||||
|
||||
static void geo_node_curve_trim_update(bNodeTree *UNUSED(ntree), bNode *node)
|
||||
{
|
||||
const NodeGeometryCurveTrim &node_storage = *(NodeGeometryCurveTrim *)node->storage;
|
||||
const GeometryNodeCurveInterpolateMode mode = (GeometryNodeCurveInterpolateMode)
|
||||
node_storage.mode;
|
||||
const GeometryNodeCurveSampleMode mode = (GeometryNodeCurveSampleMode)node_storage.mode;
|
||||
|
||||
bNodeSocket *start_fac = ((bNodeSocket *)node->inputs.first)->next;
|
||||
bNodeSocket *end_fac = start_fac->next;
|
||||
bNodeSocket *start_len = end_fac->next;
|
||||
bNodeSocket *end_len = start_len->next;
|
||||
|
||||
nodeSetSocketAvailability(start_fac, mode == GEO_NODE_CURVE_INTERPOLATE_FACTOR);
|
||||
nodeSetSocketAvailability(end_fac, mode == GEO_NODE_CURVE_INTERPOLATE_FACTOR);
|
||||
nodeSetSocketAvailability(start_len, mode == GEO_NODE_CURVE_INTERPOLATE_LENGTH);
|
||||
nodeSetSocketAvailability(end_len, mode == GEO_NODE_CURVE_INTERPOLATE_LENGTH);
|
||||
nodeSetSocketAvailability(start_fac, mode == GEO_NODE_CURVE_SAMPLE_FACTOR);
|
||||
nodeSetSocketAvailability(end_fac, mode == GEO_NODE_CURVE_SAMPLE_FACTOR);
|
||||
nodeSetSocketAvailability(start_len, mode == GEO_NODE_CURVE_SAMPLE_LENGTH);
|
||||
nodeSetSocketAvailability(end_len, mode == GEO_NODE_CURVE_SAMPLE_LENGTH);
|
||||
}
|
||||
|
||||
struct TrimLocation {
|
||||
@@ -324,8 +323,7 @@ static void trim_bezier_spline(Spline &spline,
|
||||
static void geo_node_curve_trim_exec(GeoNodeExecParams params)
|
||||
{
|
||||
const NodeGeometryCurveTrim &node_storage = *(NodeGeometryCurveTrim *)params.node().storage;
|
||||
const GeometryNodeCurveInterpolateMode mode = (GeometryNodeCurveInterpolateMode)
|
||||
node_storage.mode;
|
||||
const GeometryNodeCurveSampleMode mode = (GeometryNodeCurveSampleMode)node_storage.mode;
|
||||
|
||||
GeometrySet geometry_set = params.extract_input<GeometrySet>("Curve");
|
||||
geometry_set = bke::geometry_set_realize_instances(geometry_set);
|
||||
@@ -338,12 +336,11 @@ static void geo_node_curve_trim_exec(GeoNodeExecParams params)
|
||||
CurveEval &curve = *curve_component.get_for_write();
|
||||
MutableSpan<SplinePtr> splines = curve.splines();
|
||||
|
||||
const float start = mode == GEO_NODE_CURVE_INTERPOLATE_FACTOR ?
|
||||
const float start = mode == GEO_NODE_CURVE_SAMPLE_FACTOR ?
|
||||
params.extract_input<float>("Start") :
|
||||
params.extract_input<float>("Start_001");
|
||||
const float end = mode == GEO_NODE_CURVE_INTERPOLATE_FACTOR ?
|
||||
params.extract_input<float>("End") :
|
||||
params.extract_input<float>("End_001");
|
||||
const float end = mode == GEO_NODE_CURVE_SAMPLE_FACTOR ? params.extract_input<float>("End") :
|
||||
params.extract_input<float>("End_001");
|
||||
|
||||
threading::parallel_for(splines.index_range(), 128, [&](IndexRange range) {
|
||||
for (const int i : range) {
|
||||
@@ -362,11 +359,11 @@ static void geo_node_curve_trim_exec(GeoNodeExecParams params)
|
||||
}
|
||||
|
||||
const Spline::LookupResult start_lookup =
|
||||
(mode == GEO_NODE_CURVE_INTERPOLATE_LENGTH) ?
|
||||
(mode == GEO_NODE_CURVE_SAMPLE_LENGTH) ?
|
||||
spline.lookup_evaluated_length(std::clamp(start, 0.0f, spline.length())) :
|
||||
spline.lookup_evaluated_factor(std::clamp(start, 0.0f, 1.0f));
|
||||
const Spline::LookupResult end_lookup =
|
||||
(mode == GEO_NODE_CURVE_INTERPOLATE_LENGTH) ?
|
||||
(mode == GEO_NODE_CURVE_SAMPLE_LENGTH) ?
|
||||
spline.lookup_evaluated_length(std::clamp(end, 0.0f, spline.length())) :
|
||||
spline.lookup_evaluated_factor(std::clamp(end, 0.0f, 1.0f));
|
||||
|
||||
|
@@ -20,7 +20,7 @@ namespace blender::nodes {
|
||||
|
||||
static void geo_node_input_index_declare(NodeDeclarationBuilder &b)
|
||||
{
|
||||
b.add_output<decl::Int>("Index");
|
||||
b.add_output<decl::Int>("Index").is_field();
|
||||
}
|
||||
|
||||
class IndexFieldInput final : public fn::FieldInput {
|
||||
|
@@ -25,7 +25,7 @@ namespace blender::nodes {
|
||||
|
||||
static void geo_node_input_normal_declare(NodeDeclarationBuilder &b)
|
||||
{
|
||||
b.add_output<decl::Vector>("Normal");
|
||||
b.add_output<decl::Vector>("Normal").is_field();
|
||||
}
|
||||
|
||||
static GVArrayPtr mesh_face_normals(const Mesh &mesh,
|
||||
|
@@ -20,7 +20,7 @@ namespace blender::nodes {
|
||||
|
||||
static void geo_node_input_position_declare(NodeDeclarationBuilder &b)
|
||||
{
|
||||
b.add_output<decl::Vector>("Position");
|
||||
b.add_output<decl::Vector>("Position").is_field();
|
||||
}
|
||||
|
||||
static void geo_node_input_position_exec(GeoNodeExecParams params)
|
||||
|
@@ -0,0 +1,48 @@
|
||||
/*
|
||||
* This program is free software; you can redistribute it and/or
|
||||
* modify it under the terms of the GNU General Public License
|
||||
* as published by the Free Software Foundation; either version 2
|
||||
* of the License, or (at your option) any later version.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful,
|
||||
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
* GNU General Public License for more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License
|
||||
* along with this program; if not, write to the Free Software Foundation,
|
||||
* Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
|
||||
*/
|
||||
|
||||
#include "node_geometry_util.hh"
|
||||
|
||||
#include "UI_interface.h"
|
||||
#include "UI_resources.h"
|
||||
|
||||
namespace blender::nodes {
|
||||
|
||||
static void geo_node_realize_instances_declare(NodeDeclarationBuilder &b)
|
||||
{
|
||||
b.add_input<decl::Geometry>("Geometry");
|
||||
b.add_output<decl::Geometry>("Geometry");
|
||||
}
|
||||
|
||||
static void geo_node_realize_instances_exec(GeoNodeExecParams params)
|
||||
{
|
||||
GeometrySet geometry_set = params.extract_input<GeometrySet>("Geometry");
|
||||
geometry_set = bke::geometry_set_realize_instances(geometry_set);
|
||||
params.set_output("Geometry", std::move(geometry_set));
|
||||
}
|
||||
|
||||
} // namespace blender::nodes
|
||||
|
||||
void register_node_type_geo_realize_instances()
|
||||
{
|
||||
static bNodeType ntype;
|
||||
|
||||
geo_node_type_base(
|
||||
&ntype, GEO_NODE_REALIZE_INSTANCES, "Realize Instances", NODE_CLASS_GEOMETRY, 0);
|
||||
ntype.declare = blender::nodes::geo_node_realize_instances_declare;
|
||||
ntype.geometry_node_execute = blender::nodes::geo_node_realize_instances_exec;
|
||||
nodeRegisterType(&ntype);
|
||||
}
|
@@ -23,8 +23,8 @@ namespace blender::nodes {
|
||||
static void geo_node_set_position_declare(NodeDeclarationBuilder &b)
|
||||
{
|
||||
b.add_input<decl::Geometry>("Geometry");
|
||||
b.add_input<decl::Vector>("Position");
|
||||
b.add_input<decl::Bool>("Selection").default_value(true).hide_value();
|
||||
b.add_input<decl::Vector>("Position").is_field();
|
||||
b.add_input<decl::Bool>("Selection").default_value(true).hide_value().is_field();
|
||||
b.add_output<decl::Geometry>("Geometry");
|
||||
}
|
||||
|
||||
@@ -51,12 +51,13 @@ static void set_position_in_component(GeometryComponent &component,
|
||||
static void geo_node_set_position_exec(GeoNodeExecParams params)
|
||||
{
|
||||
GeometrySet geometry = params.extract_input<GeometrySet>("Geometry");
|
||||
geometry = geometry_set_realize_instances(geometry);
|
||||
Field<bool> selection_field = params.extract_input<Field<bool>>("Selection");
|
||||
Field<float3> position_field = params.extract_input<Field<float3>>("Position");
|
||||
|
||||
for (const GeometryComponentType type :
|
||||
{GEO_COMPONENT_TYPE_MESH, GEO_COMPONENT_TYPE_POINT_CLOUD, GEO_COMPONENT_TYPE_CURVE}) {
|
||||
for (const GeometryComponentType type : {GEO_COMPONENT_TYPE_MESH,
|
||||
GEO_COMPONENT_TYPE_POINT_CLOUD,
|
||||
GEO_COMPONENT_TYPE_CURVE,
|
||||
GEO_COMPONENT_TYPE_INSTANCES}) {
|
||||
if (geometry.has(type)) {
|
||||
set_position_in_component(
|
||||
geometry.get_component_for_write(type), selection_field, position_field);
|
||||
|
@@ -19,6 +19,8 @@
|
||||
|
||||
#include "../node_shader_util.h"
|
||||
|
||||
#include "BLI_noise.hh"
|
||||
|
||||
/* **************** NOISE ******************** */
|
||||
|
||||
static bNodeSocketTemplate sh_node_tex_noise_in[] = {
|
||||
@@ -90,18 +92,173 @@ static void node_shader_update_tex_noise(bNodeTree *UNUSED(ntree), bNode *node)
|
||||
nodeSetSocketAvailability(sockW, tex->dimensions == 1 || tex->dimensions == 4);
|
||||
}
|
||||
|
||||
namespace blender::nodes {
|
||||
|
||||
class NoiseFunction : public fn::MultiFunction {
|
||||
private:
|
||||
int dimensions_;
|
||||
|
||||
public:
|
||||
NoiseFunction(int dimensions) : dimensions_(dimensions)
|
||||
{
|
||||
BLI_assert(dimensions >= 1 && dimensions <= 4);
|
||||
static std::array<fn::MFSignature, 4> signatures{
|
||||
create_signature(1),
|
||||
create_signature(2),
|
||||
create_signature(3),
|
||||
create_signature(4),
|
||||
};
|
||||
this->set_signature(&signatures[dimensions - 1]);
|
||||
}
|
||||
|
||||
static fn::MFSignature create_signature(int dimensions)
|
||||
{
|
||||
fn::MFSignatureBuilder signature{"Noise"};
|
||||
|
||||
if (ELEM(dimensions, 2, 3, 4)) {
|
||||
signature.single_input<float3>("Vector");
|
||||
}
|
||||
if (ELEM(dimensions, 1, 4)) {
|
||||
signature.single_input<float>("W");
|
||||
}
|
||||
|
||||
signature.single_input<float>("Scale");
|
||||
signature.single_input<float>("Detail");
|
||||
signature.single_input<float>("Roughness");
|
||||
signature.single_input<float>("Distortion");
|
||||
|
||||
signature.single_output<float>("Fac");
|
||||
signature.single_output<ColorGeometry4f>("Color");
|
||||
|
||||
return signature.build();
|
||||
}
|
||||
|
||||
void call(IndexMask mask, fn::MFParams params, fn::MFContext UNUSED(context)) const override
|
||||
{
|
||||
int param = ELEM(dimensions_, 2, 3, 4) + ELEM(dimensions_, 1, 4);
|
||||
const VArray<float> &scale = params.readonly_single_input<float>(param++, "Scale");
|
||||
const VArray<float> &detail = params.readonly_single_input<float>(param++, "Detail");
|
||||
const VArray<float> &roughness = params.readonly_single_input<float>(param++, "Roughness");
|
||||
const VArray<float> &distortion = params.readonly_single_input<float>(param++, "Distortion");
|
||||
|
||||
MutableSpan<float> r_factor = params.uninitialized_single_output_if_required<float>(param++,
|
||||
"Fac");
|
||||
MutableSpan<ColorGeometry4f> r_color =
|
||||
params.uninitialized_single_output_if_required<ColorGeometry4f>(param++, "Color");
|
||||
|
||||
const bool compute_factor = !r_factor.is_empty();
|
||||
const bool compute_color = !r_color.is_empty();
|
||||
|
||||
switch (dimensions_) {
|
||||
case 1: {
|
||||
const VArray<float> &w = params.readonly_single_input<float>(0, "W");
|
||||
if (compute_factor) {
|
||||
for (int64_t i : mask) {
|
||||
const float position = w[i] * scale[i];
|
||||
r_factor[i] = noise::perlin_fractal_distorted(
|
||||
position, detail[i], roughness[i], distortion[i]);
|
||||
}
|
||||
}
|
||||
if (compute_color) {
|
||||
for (int64_t i : mask) {
|
||||
const float position = w[i] * scale[i];
|
||||
const float3 c = noise::perlin_float3_fractal_distorted(
|
||||
position, detail[i], roughness[i], distortion[i]);
|
||||
r_color[i] = ColorGeometry4f(c[0], c[1], c[2], 1.0f);
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 2: {
|
||||
const VArray<float3> &vector = params.readonly_single_input<float3>(0, "Vector");
|
||||
if (compute_factor) {
|
||||
for (int64_t i : mask) {
|
||||
const float2 position = vector[i] * scale[i];
|
||||
r_factor[i] = noise::perlin_fractal_distorted(
|
||||
position, detail[i], roughness[i], distortion[i]);
|
||||
}
|
||||
}
|
||||
if (compute_color) {
|
||||
for (int64_t i : mask) {
|
||||
const float2 position = vector[i] * scale[i];
|
||||
const float3 c = noise::perlin_float3_fractal_distorted(
|
||||
position, detail[i], roughness[i], distortion[i]);
|
||||
r_color[i] = ColorGeometry4f(c[0], c[1], c[2], 1.0f);
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 3: {
|
||||
const VArray<float3> &vector = params.readonly_single_input<float3>(0, "Vector");
|
||||
if (compute_factor) {
|
||||
for (int64_t i : mask) {
|
||||
const float3 position = vector[i] * scale[i];
|
||||
r_factor[i] = noise::perlin_fractal_distorted(
|
||||
position, detail[i], roughness[i], distortion[i]);
|
||||
}
|
||||
}
|
||||
if (compute_color) {
|
||||
for (int64_t i : mask) {
|
||||
const float3 position = vector[i] * scale[i];
|
||||
const float3 c = noise::perlin_float3_fractal_distorted(
|
||||
position, detail[i], roughness[i], distortion[i]);
|
||||
r_color[i] = ColorGeometry4f(c[0], c[1], c[2], 1.0f);
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 4: {
|
||||
const VArray<float3> &vector = params.readonly_single_input<float3>(0, "Vector");
|
||||
const VArray<float> &w = params.readonly_single_input<float>(1, "W");
|
||||
if (compute_factor) {
|
||||
for (int64_t i : mask) {
|
||||
const float3 position_vector = vector[i] * scale[i];
|
||||
const float position_w = w[i] * scale[i];
|
||||
const float4 position{
|
||||
position_vector[0], position_vector[1], position_vector[2], position_w};
|
||||
r_factor[i] = noise::perlin_fractal_distorted(
|
||||
position, detail[i], roughness[i], distortion[i]);
|
||||
}
|
||||
}
|
||||
if (compute_color) {
|
||||
for (int64_t i : mask) {
|
||||
const float3 position_vector = vector[i] * scale[i];
|
||||
const float position_w = w[i] * scale[i];
|
||||
const float4 position{
|
||||
position_vector[0], position_vector[1], position_vector[2], position_w};
|
||||
const float3 c = noise::perlin_float3_fractal_distorted(
|
||||
position, detail[i], roughness[i], distortion[i]);
|
||||
r_color[i] = ColorGeometry4f(c[0], c[1], c[2], 1.0f);
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
static void sh_node_noise_build_multi_function(blender::nodes::NodeMultiFunctionBuilder &builder)
|
||||
{
|
||||
bNode &node = builder.node();
|
||||
NodeTexNoise *tex = (NodeTexNoise *)node.storage;
|
||||
builder.construct_and_set_matching_fn<NoiseFunction>(tex->dimensions);
|
||||
}
|
||||
|
||||
} // namespace blender::nodes
|
||||
|
||||
/* node type definition */
|
||||
void register_node_type_sh_tex_noise(void)
|
||||
{
|
||||
static bNodeType ntype;
|
||||
|
||||
sh_node_type_base(&ntype, SH_NODE_TEX_NOISE, "Noise Texture", NODE_CLASS_TEXTURE, 0);
|
||||
sh_fn_node_type_base(&ntype, SH_NODE_TEX_NOISE, "Noise Texture", NODE_CLASS_TEXTURE, 0);
|
||||
node_type_socket_templates(&ntype, sh_node_tex_noise_in, sh_node_tex_noise_out);
|
||||
node_type_init(&ntype, node_shader_init_tex_noise);
|
||||
node_type_storage(
|
||||
&ntype, "NodeTexNoise", node_free_standard_storage, node_copy_standard_storage);
|
||||
node_type_gpu(&ntype, node_shader_gpu_tex_noise);
|
||||
node_type_update(&ntype, node_shader_update_tex_noise);
|
||||
ntype.build_multi_function = blender::nodes::sh_node_noise_build_multi_function;
|
||||
|
||||
nodeRegisterType(&ntype);
|
||||
}
|
||||
|
@@ -88,7 +88,7 @@ struct Sequence *SEQ_add_movie_strip(struct Main *bmain,
|
||||
struct Scene *scene,
|
||||
struct ListBase *seqbase,
|
||||
struct SeqLoadData *load_data,
|
||||
double *r_video_start_offset);
|
||||
double *r_start_offset);
|
||||
struct Sequence *SEQ_add_scene_strip(struct Scene *scene,
|
||||
struct ListBase *seqbase,
|
||||
struct SeqLoadData *load_data);
|
||||
|
@@ -403,26 +403,8 @@ Sequence *SEQ_add_sound_strip(Main *bmain,
|
||||
return NULL;
|
||||
}
|
||||
|
||||
/* If this sound it part of a video, then the sound might start after the video.
|
||||
* In this case we need to then offset the start frame of the audio so it syncs up
|
||||
* properly with the video.
|
||||
*/
|
||||
int start_frame_offset = info.start_offset * FPS;
|
||||
double start_frame_offset_remainer = (info.start_offset * FPS - start_frame_offset) / FPS;
|
||||
|
||||
if (start_frame_offset_remainer > FLT_EPSILON) {
|
||||
/* We can't represent a fraction of a frame, so skip the first frame fraction of sound so we
|
||||
* start on a "whole" frame.
|
||||
*/
|
||||
start_frame_offset++;
|
||||
}
|
||||
|
||||
sound->offset_time += start_frame_offset_remainer;
|
||||
|
||||
Sequence *seq = SEQ_sequence_alloc(seqbase,
|
||||
load_data->start_frame + start_frame_offset,
|
||||
load_data->channel,
|
||||
SEQ_TYPE_SOUND_RAM);
|
||||
Sequence *seq = SEQ_sequence_alloc(
|
||||
seqbase, load_data->start_frame, load_data->channel, SEQ_TYPE_SOUND_RAM);
|
||||
seq->sound = sound;
|
||||
seq->scene_sound = NULL;
|
||||
|
||||
@@ -508,7 +490,7 @@ Sequence *SEQ_add_movie_strip(Main *bmain,
|
||||
Scene *scene,
|
||||
ListBase *seqbase,
|
||||
SeqLoadData *load_data,
|
||||
double *r_video_start_offset)
|
||||
double *r_start_offset)
|
||||
{
|
||||
char path[sizeof(load_data->path)];
|
||||
BLI_strncpy(path, load_data->path, sizeof(path));
|
||||
@@ -554,8 +536,40 @@ Sequence *SEQ_add_movie_strip(Main *bmain,
|
||||
return NULL;
|
||||
}
|
||||
|
||||
int video_frame_offset = 0;
|
||||
float video_fps = 0.0f;
|
||||
|
||||
if (anim_arr[0] != NULL) {
|
||||
short fps_denom;
|
||||
float fps_num;
|
||||
|
||||
IMB_anim_get_fps(anim_arr[0], &fps_denom, &fps_num, true);
|
||||
|
||||
video_fps = fps_denom / fps_num;
|
||||
|
||||
/* Adjust scene's frame rate settings to match. */
|
||||
if (load_data->flags & SEQ_LOAD_MOVIE_SYNC_FPS) {
|
||||
scene->r.frs_sec = fps_denom;
|
||||
scene->r.frs_sec_base = fps_num;
|
||||
}
|
||||
|
||||
double video_start_offset = IMD_anim_get_offset(anim_arr[0]);
|
||||
int minimum_frame_offset;
|
||||
|
||||
if (*r_start_offset >= 0) {
|
||||
minimum_frame_offset = MIN2(video_start_offset, *r_start_offset) * FPS;
|
||||
}
|
||||
else {
|
||||
minimum_frame_offset = video_start_offset * FPS;
|
||||
}
|
||||
|
||||
video_frame_offset = video_start_offset * FPS - minimum_frame_offset;
|
||||
|
||||
*r_start_offset = video_start_offset;
|
||||
}
|
||||
|
||||
Sequence *seq = SEQ_sequence_alloc(
|
||||
seqbase, load_data->start_frame, load_data->channel, SEQ_TYPE_MOVIE);
|
||||
seqbase, load_data->start_frame + video_frame_offset, load_data->channel, SEQ_TYPE_MOVIE);
|
||||
|
||||
/* Multiview settings. */
|
||||
if (load_data->use_multiview) {
|
||||
@@ -579,27 +593,11 @@ Sequence *SEQ_add_movie_strip(Main *bmain,
|
||||
|
||||
seq->blend_mode = SEQ_TYPE_CROSS; /* so alpha adjustment fade to the strip below */
|
||||
|
||||
float video_fps = 0.0f;
|
||||
|
||||
if (anim_arr[0] != NULL) {
|
||||
seq->len = IMB_anim_get_duration(anim_arr[0], IMB_TC_RECORD_RUN);
|
||||
*r_video_start_offset = IMD_anim_get_offset(anim_arr[0]);
|
||||
|
||||
IMB_anim_load_metadata(anim_arr[0]);
|
||||
|
||||
short fps_denom;
|
||||
float fps_num;
|
||||
|
||||
IMB_anim_get_fps(anim_arr[0], &fps_denom, &fps_num, true);
|
||||
|
||||
video_fps = fps_denom / fps_num;
|
||||
|
||||
/* Adjust scene's frame rate settings to match. */
|
||||
if (load_data->flags & SEQ_LOAD_MOVIE_SYNC_FPS) {
|
||||
scene->r.frs_sec = fps_denom;
|
||||
scene->r.frs_sec_base = fps_num;
|
||||
}
|
||||
|
||||
/* Set initial scale based on load_data->fit_method. */
|
||||
orig_width = IMB_anim_get_image_width(anim_arr[0]);
|
||||
orig_height = IMB_anim_get_image_height(anim_arr[0]);
|
||||
|
Reference in New Issue
Block a user