This repository has been archived on 2023-10-09. You can view files and clone it, but cannot push or open issues or pull requests.
Files
blender-archive/source/blender/makesrna/intern/rna_image_api.c

400 lines
13 KiB
C
Raw Normal View History

/*
2009-07-24 15:27:59 +00:00
* ***** BEGIN GPL LICENSE BLOCK *****
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or (at your option) any later version.
2009-07-24 15:27:59 +00:00
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software Foundation,
2010-02-12 13:34:04 +00:00
* Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
2009-07-24 15:27:59 +00:00
*
* The Original Code is Copyright (C) 2009 Blender Foundation.
* All rights reserved.
*
*
* Contributor(s): Arystanbek Dyussenov
2009-07-24 15:27:59 +00:00
*
* ***** END GPL LICENSE BLOCK *****
*/
2011-02-27 20:20:01 +00:00
/** \file blender/makesrna/intern/rna_image_api.c
* \ingroup RNA
*/
2009-07-24 15:27:59 +00:00
#include <stdlib.h>
#include <stdio.h>
#include <string.h>
#include <time.h>
#include "DNA_packedFile_types.h"
2009-07-24 15:27:59 +00:00
2013-03-07 02:44:55 +00:00
#include "BLI_utildefines.h"
#include "BLI_path_util.h"
2013-03-07 02:44:55 +00:00
#include "BIF_gl.h"
2013-03-07 02:44:55 +00:00
#include "RNA_define.h"
#include "RNA_enum_types.h"
#include "rna_internal.h" /* own include */
2009-07-24 15:27:59 +00:00
#ifdef RNA_RUNTIME
#include "BKE_image.h"
#include "BKE_packedFile.h"
#include "BKE_main.h"
#include "IMB_imbuf.h"
Color Management, Stage 2: Switch color pipeline to use OpenColorIO Replace old color pipeline which was supporting linear/sRGB color spaces only with OpenColorIO-based pipeline. This introduces two configurable color spaces: - Input color space for images and movie clips. This space is used to convert images/movies from color space in which file is saved to Blender's linear space (for float images, byte images are not internally converted, only input space is stored for such images and used later). This setting could be found in image/clip data block settings. - Display color space which defines space in which particular display is working. This settings could be found in scene's Color Management panel. When render result is being displayed on the screen, apart from converting image to display space, some additional conversions could happen. This conversions are: - View, which defines tone curve applying before display transformation. These are different ways to view the image on the same display device. For example it could be used to emulate film view on sRGB display. - Exposure affects on image exposure before tone map is applied. - Gamma is post-display gamma correction, could be used to match particular display gamma. - RGB curves are user-defined curves which are applying before display transformation, could be used for different purposes. All this settings by default are only applying on render result and does not affect on other images. If some particular image needs to be affected by this transformation, "View as Render" setting of image data block should be set to truth. Movie clips are always affected by all display transformations. This commit also introduces configurable color space in which sequencer is working. This setting could be found in scene's Color Management panel and it should be used if such stuff as grading needs to be done in color space different from sRGB (i.e. when Film view on sRGB display is use, using VD16 space as sequencer's internal space would make grading working in space which is close to the space using for display). Some technical notes: - Image buffer's float buffer is now always in linear space, even if it was created from 16bit byte images. - Space of byte buffer is stored in image buffer's rect_colorspace property. - Profile of image buffer was removed since it's not longer meaningful. - OpenGL and GLSL is supposed to always work in sRGB space. It is possible to support other spaces, but it's quite large project which isn't so much important. - Legacy Color Management option disabled is emulated by using None display. It could have some regressions, but there's no clear way to avoid them. - If OpenColorIO is disabled on build time, it should make blender behaving in the same way as previous release with color management enabled. More details could be found at this page (more details would be added soon): http://wiki.blender.org/index.php/Dev:Ref/Release_Notes/2.64/Color_Management -- Thanks to Xavier Thomas, Lukas Toene for initial work on OpenColorIO integration and to Brecht van Lommel for some further development and code/ usecase review!
2012-09-15 10:05:07 +00:00
#include "IMB_colormanagement.h"
#include "GPU_draw.h"
#include "GPU_debug.h"
#include "DNA_image_types.h"
#include "DNA_scene_types.h"
2009-07-24 15:27:59 +00:00
#include "MEM_guardedalloc.h"
static void rna_ImagePackedFile_save(ImagePackedFile *imapf, ReportList *reports)
{
if (writePackedFile(reports, imapf->filepath, imapf->packedfile, 0) != RET_OK) {
BKE_reportf(reports, RPT_ERROR, "Image could not save packed file to '%s'",
imapf->filepath);
}
}
static void rna_Image_save_render(Image *image, bContext *C, ReportList *reports, const char *path, Scene *scene)
{
ImBuf *ibuf;
if (scene == NULL) {
scene = CTX_data_scene(C);
}
if (scene) {
2015-03-14 12:10:09 +11:00
ImageUser iuser = {NULL};
void *lock;
iuser.scene = scene;
iuser.ok = 1;
ibuf = BKE_image_acquire_ibuf(image, &iuser, &lock);
if (ibuf == NULL) {
BKE_report(reports, RPT_ERROR, "Could not acquire buffer from image");
}
else {
ImBuf *write_ibuf;
Color Management, Stage 2: Switch color pipeline to use OpenColorIO Replace old color pipeline which was supporting linear/sRGB color spaces only with OpenColorIO-based pipeline. This introduces two configurable color spaces: - Input color space for images and movie clips. This space is used to convert images/movies from color space in which file is saved to Blender's linear space (for float images, byte images are not internally converted, only input space is stored for such images and used later). This setting could be found in image/clip data block settings. - Display color space which defines space in which particular display is working. This settings could be found in scene's Color Management panel. When render result is being displayed on the screen, apart from converting image to display space, some additional conversions could happen. This conversions are: - View, which defines tone curve applying before display transformation. These are different ways to view the image on the same display device. For example it could be used to emulate film view on sRGB display. - Exposure affects on image exposure before tone map is applied. - Gamma is post-display gamma correction, could be used to match particular display gamma. - RGB curves are user-defined curves which are applying before display transformation, could be used for different purposes. All this settings by default are only applying on render result and does not affect on other images. If some particular image needs to be affected by this transformation, "View as Render" setting of image data block should be set to truth. Movie clips are always affected by all display transformations. This commit also introduces configurable color space in which sequencer is working. This setting could be found in scene's Color Management panel and it should be used if such stuff as grading needs to be done in color space different from sRGB (i.e. when Film view on sRGB display is use, using VD16 space as sequencer's internal space would make grading working in space which is close to the space using for display). Some technical notes: - Image buffer's float buffer is now always in linear space, even if it was created from 16bit byte images. - Space of byte buffer is stored in image buffer's rect_colorspace property. - Profile of image buffer was removed since it's not longer meaningful. - OpenGL and GLSL is supposed to always work in sRGB space. It is possible to support other spaces, but it's quite large project which isn't so much important. - Legacy Color Management option disabled is emulated by using None display. It could have some regressions, but there's no clear way to avoid them. - If OpenColorIO is disabled on build time, it should make blender behaving in the same way as previous release with color management enabled. More details could be found at this page (more details would be added soon): http://wiki.blender.org/index.php/Dev:Ref/Release_Notes/2.64/Color_Management -- Thanks to Xavier Thomas, Lukas Toene for initial work on OpenColorIO integration and to Brecht van Lommel for some further development and code/ usecase review!
2012-09-15 10:05:07 +00:00
write_ibuf = IMB_colormanagement_imbuf_for_write(ibuf, true, true, &scene->view_settings,
&scene->display_settings, &scene->r.im_format);
Color Management, Stage 2: Switch color pipeline to use OpenColorIO Replace old color pipeline which was supporting linear/sRGB color spaces only with OpenColorIO-based pipeline. This introduces two configurable color spaces: - Input color space for images and movie clips. This space is used to convert images/movies from color space in which file is saved to Blender's linear space (for float images, byte images are not internally converted, only input space is stored for such images and used later). This setting could be found in image/clip data block settings. - Display color space which defines space in which particular display is working. This settings could be found in scene's Color Management panel. When render result is being displayed on the screen, apart from converting image to display space, some additional conversions could happen. This conversions are: - View, which defines tone curve applying before display transformation. These are different ways to view the image on the same display device. For example it could be used to emulate film view on sRGB display. - Exposure affects on image exposure before tone map is applied. - Gamma is post-display gamma correction, could be used to match particular display gamma. - RGB curves are user-defined curves which are applying before display transformation, could be used for different purposes. All this settings by default are only applying on render result and does not affect on other images. If some particular image needs to be affected by this transformation, "View as Render" setting of image data block should be set to truth. Movie clips are always affected by all display transformations. This commit also introduces configurable color space in which sequencer is working. This setting could be found in scene's Color Management panel and it should be used if such stuff as grading needs to be done in color space different from sRGB (i.e. when Film view on sRGB display is use, using VD16 space as sequencer's internal space would make grading working in space which is close to the space using for display). Some technical notes: - Image buffer's float buffer is now always in linear space, even if it was created from 16bit byte images. - Space of byte buffer is stored in image buffer's rect_colorspace property. - Profile of image buffer was removed since it's not longer meaningful. - OpenGL and GLSL is supposed to always work in sRGB space. It is possible to support other spaces, but it's quite large project which isn't so much important. - Legacy Color Management option disabled is emulated by using None display. It could have some regressions, but there's no clear way to avoid them. - If OpenColorIO is disabled on build time, it should make blender behaving in the same way as previous release with color management enabled. More details could be found at this page (more details would be added soon): http://wiki.blender.org/index.php/Dev:Ref/Release_Notes/2.64/Color_Management -- Thanks to Xavier Thomas, Lukas Toene for initial work on OpenColorIO integration and to Brecht van Lommel for some further development and code/ usecase review!
2012-09-15 10:05:07 +00:00
write_ibuf->planes = scene->r.im_format.planes;
write_ibuf->dither = scene->r.dither_intensity;
if (!BKE_imbuf_write(write_ibuf, path, &scene->r.im_format)) {
BKE_reportf(reports, RPT_ERROR, "Could not write image '%s'", path);
}
if (write_ibuf != ibuf)
IMB_freeImBuf(write_ibuf);
}
BKE_image_release_ibuf(image, ibuf, lock);
}
else {
BKE_report(reports, RPT_ERROR, "Scene not in context, could not get save parameters");
}
}
static void rna_Image_save(Image *image, Main *bmain, bContext *C, ReportList *reports)
{
ImBuf *ibuf = BKE_image_acquire_ibuf(image, NULL, NULL);
if (ibuf) {
char filename[FILE_MAX];
BLI_strncpy(filename, image->name, sizeof(filename));
BLI_path_abs(filename, ID_BLEND_PATH(bmain, &image->id));
/* note, we purposefully ignore packed files here,
* developers need to explicitly write them via 'packed_files' */
if (IMB_saveiff(ibuf, filename, ibuf->flags)) {
image->type = IMA_TYPE_IMAGE;
if (image->source == IMA_SRC_GENERATED)
image->source = IMA_SRC_FILE;
Color Management, Stage 2: Switch color pipeline to use OpenColorIO Replace old color pipeline which was supporting linear/sRGB color spaces only with OpenColorIO-based pipeline. This introduces two configurable color spaces: - Input color space for images and movie clips. This space is used to convert images/movies from color space in which file is saved to Blender's linear space (for float images, byte images are not internally converted, only input space is stored for such images and used later). This setting could be found in image/clip data block settings. - Display color space which defines space in which particular display is working. This settings could be found in scene's Color Management panel. When render result is being displayed on the screen, apart from converting image to display space, some additional conversions could happen. This conversions are: - View, which defines tone curve applying before display transformation. These are different ways to view the image on the same display device. For example it could be used to emulate film view on sRGB display. - Exposure affects on image exposure before tone map is applied. - Gamma is post-display gamma correction, could be used to match particular display gamma. - RGB curves are user-defined curves which are applying before display transformation, could be used for different purposes. All this settings by default are only applying on render result and does not affect on other images. If some particular image needs to be affected by this transformation, "View as Render" setting of image data block should be set to truth. Movie clips are always affected by all display transformations. This commit also introduces configurable color space in which sequencer is working. This setting could be found in scene's Color Management panel and it should be used if such stuff as grading needs to be done in color space different from sRGB (i.e. when Film view on sRGB display is use, using VD16 space as sequencer's internal space would make grading working in space which is close to the space using for display). Some technical notes: - Image buffer's float buffer is now always in linear space, even if it was created from 16bit byte images. - Space of byte buffer is stored in image buffer's rect_colorspace property. - Profile of image buffer was removed since it's not longer meaningful. - OpenGL and GLSL is supposed to always work in sRGB space. It is possible to support other spaces, but it's quite large project which isn't so much important. - Legacy Color Management option disabled is emulated by using None display. It could have some regressions, but there's no clear way to avoid them. - If OpenColorIO is disabled on build time, it should make blender behaving in the same way as previous release with color management enabled. More details could be found at this page (more details would be added soon): http://wiki.blender.org/index.php/Dev:Ref/Release_Notes/2.64/Color_Management -- Thanks to Xavier Thomas, Lukas Toene for initial work on OpenColorIO integration and to Brecht van Lommel for some further development and code/ usecase review!
2012-09-15 10:05:07 +00:00
IMB_colormanagment_colorspace_from_ibuf_ftype(&image->colorspace_settings, ibuf);
ibuf->userflags &= ~IB_BITMAPDIRTY;
}
else {
BKE_reportf(reports, RPT_ERROR, "Image '%s' could not be saved to '%s'", image->id.name + 2, image->name);
}
}
else {
BKE_reportf(reports, RPT_ERROR, "Image '%s' does not have any image data", image->id.name + 2);
}
BKE_image_release_ibuf(image, ibuf, NULL);
WM_event_add_notifier(C, NC_IMAGE | NA_EDITED, image);
}
static void rna_Image_pack(
Image *image, Main *bmain, bContext *C, ReportList *reports,
int as_png, const char *data, int data_len)
{
ImBuf *ibuf = BKE_image_acquire_ibuf(image, NULL, NULL);
if (!as_png && (ibuf && (ibuf->userflags & IB_BITMAPDIRTY))) {
BKE_report(reports, RPT_ERROR, "Cannot pack edited image from disk, only as internal PNG");
}
else {
BKE_image_free_packedfiles(image);
if (as_png) {
BKE_image_memorypack(image);
}
else if (data) {
char *data_dup = MEM_mallocN(sizeof(*data_dup) * (size_t)data_len, __func__);
memcpy(data_dup, data, (size_t)data_len);
BKE_image_packfiles_from_mem(reports, image, data_dup, (size_t)data_len);
}
else {
Multi-View and Stereo 3D Official Documentation: http://www.blender.org/manual/render/workflows/multiview.html Implemented Features ==================== Builtin Stereo Camera * Convergence Mode * Interocular Distance * Convergence Distance * Pivot Mode Viewport * Cameras * Plane * Volume Compositor * View Switch Node * Image Node Multi-View OpenEXR support Sequencer * Image/Movie Strips 'Use Multiview' UV/Image Editor * Option to see Multi-View images in Stereo-3D or its individual images * Save/Open Multi-View (OpenEXR, Stereo3D, individual views) images I/O * Save/Open Multi-View (OpenEXR, Stereo3D, individual views) images Scene Render Views * Ability to have an arbitrary number of views in the scene Missing Bits ============ First rule of Multi-View bug report: If something is not working as it should *when Views is off* this is a severe bug, do mention this in the report. Second rule is, if something works *when Views is off* but doesn't (or crashes) when *Views is on*, this is a important bug. Do mention this in the report. Everything else is likely small todos, and may wait until we are sure none of the above is happening. Apart from that there are those known issues: * Compositor Image Node poorly working for Multi-View OpenEXR (this was working prefectly before the 'Use Multi-View' functionality) * Selecting camera from Multi-View when looking from camera is problematic * Animation Playback (ctrl+F11) doesn't support stereo formats * Wrong filepath when trying to play back animated scene * Viewport Rendering doesn't support Multi-View * Overscan Rendering * Fullscreen display modes need to warn the user * Object copy should be aware of views suffix Acknowledgments =============== * Francesco Siddi for the help with the original feature specs and design * Brecht Van Lommel for the original review of the code and design early on * Blender Foundation for the Development Fund to support the project wrap up Final patch reviewers: * Antony Riakiotakis (psy-fi) * Campbell Barton (ideasman42) * Julian Eisel (Severin) * Sergey Sharybin (nazgul) * Thomas Dinged (dingto) Code contributors of the original branch in github: * Alexey Akishin * Gabriel Caraballo
2015-04-06 10:40:12 -03:00
BKE_image_packfiles(reports, image, ID_BLEND_PATH(bmain, &image->id));
}
}
BKE_image_release_ibuf(image, ibuf, NULL);
WM_event_add_notifier(C, NC_IMAGE | NA_EDITED, image);
}
static void rna_Image_unpack(Image *image, ReportList *reports, int method)
{
if (!BKE_image_has_packedfile(image)) {
BKE_report(reports, RPT_ERROR, "Image not packed");
}
else if (BKE_image_is_animated(image)) {
BKE_report(reports, RPT_ERROR, "Unpacking movies or image sequences not supported");
return;
}
else {
/* reports its own error on failure */
unpackImage(reports, image, method);
}
}
static void rna_Image_reload(Image *image)
{
BKE_image_signal(image, NULL, IMA_SIGNAL_RELOAD);
}
static void rna_Image_update(Image *image, ReportList *reports)
{
ImBuf *ibuf = BKE_image_acquire_ibuf(image, NULL, NULL);
if (ibuf == NULL) {
BKE_reportf(reports, RPT_ERROR, "Image '%s' does not have any image data", image->id.name + 2);
return;
}
if (ibuf->rect)
IMB_rect_from_float(ibuf);
ibuf->userflags |= IB_DISPLAY_BUFFER_INVALID;
BKE_image_release_ibuf(image, ibuf, NULL);
}
2012-06-29 11:15:02 +00:00
static void rna_Image_scale(Image *image, ReportList *reports, int width, int height)
{
2012-06-29 10:52:37 +00:00
if (!BKE_image_scale(image, width, height)) {
BKE_reportf(reports, RPT_ERROR, "Image '%s' does not have any image data", image->id.name + 2);
2012-06-29 10:52:37 +00:00
}
}
static int rna_Image_gl_load(Image *image, ReportList *reports, int frame, int filter, int mag)
{
ImBuf *ibuf;
unsigned int *bind = &image->bindcode;
int error = GL_NO_ERROR;
ImageUser iuser = {NULL};
void *lock;
if (*bind)
return error;
iuser.framenr = frame;
iuser.ok = true;
ibuf = BKE_image_acquire_ibuf(image, &iuser, &lock);
/* clean glError buffer */
while (glGetError() != GL_NO_ERROR) {}
if (ibuf == NULL || ibuf->rect == NULL) {
BKE_reportf(reports, RPT_ERROR, "Image '%s' does not have any image data", image->id.name + 2);
BKE_image_release_ibuf(image, ibuf, NULL);
return (int)GL_INVALID_OPERATION;
}
GPU_create_gl_tex(bind, ibuf->rect, ibuf->rect_float, ibuf->x, ibuf->y,
(filter != GL_NEAREST && filter != GL_LINEAR), false, image);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, (GLint)filter);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, (GLint)mag);
glTexEnvi(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_MODULATE);
error = glGetError();
if (error) {
glDeleteTextures(1, (GLuint *)bind);
image->bindcode = 0;
}
BKE_image_release_ibuf(image, ibuf, NULL);
return error;
}
static int rna_Image_gl_touch(Image *image, ReportList *reports, int frame, int filter, int mag)
{
unsigned int *bind = &image->bindcode;
int error = GL_NO_ERROR;
BKE_image_tag_time(image);
if (*bind == 0)
error = rna_Image_gl_load(image, reports, frame, filter, mag);
return error;
}
static void rna_Image_gl_free(Image *image)
{
GPU_free_image(image);
/* remove the nocollect flag, image is available for garbage collection again */
image->flag &= ~IMA_NOCOLLECT;
}
static void rna_Image_filepath_from_user(Image *image, ImageUser *image_user, char *filepath)
{
BKE_image_user_file_path(image_user, image, filepath);
}
static void rna_Image_buffers_free(Image *image)
{
BKE_image_free_buffers(image);
}
2009-07-24 15:27:59 +00:00
#else
void RNA_api_image_packed_file(StructRNA *srna)
{
FunctionRNA *func;
func = RNA_def_function(srna, "save", "rna_ImagePackedFile_save");
RNA_def_function_ui_description(func, "Save the packed file to its filepath");
RNA_def_function_flag(func, FUNC_USE_REPORTS);
}
2009-07-24 15:27:59 +00:00
void RNA_api_image(StructRNA *srna)
{
FunctionRNA *func;
PropertyRNA *parm;
func = RNA_def_function(srna, "save_render", "rna_Image_save_render");
RNA_def_function_ui_description(func, "Save image to a specific path using a scenes render settings");
RNA_def_function_flag(func, FUNC_USE_CONTEXT | FUNC_USE_REPORTS);
parm = RNA_def_string_file_path(func, "filepath", NULL, 0, "", "Save path");
RNA_def_property_flag(parm, PROP_REQUIRED);
RNA_def_pointer(func, "scene", "Scene", "", "Scene to take image parameters from");
func = RNA_def_function(srna, "save", "rna_Image_save");
RNA_def_function_ui_description(func, "Save image to its source path");
RNA_def_function_flag(func, FUNC_USE_MAIN | FUNC_USE_CONTEXT | FUNC_USE_REPORTS);
func = RNA_def_function(srna, "pack", "rna_Image_pack");
RNA_def_function_ui_description(func, "Pack an image as embedded data into the .blend file");
RNA_def_function_flag(func, FUNC_USE_MAIN | FUNC_USE_CONTEXT | FUNC_USE_REPORTS);
RNA_def_boolean(func, "as_png", 0, "as_png", "Pack the image as PNG (needed for generated/dirty images)");
parm = RNA_def_property(func, "data", PROP_STRING, PROP_BYTESTRING);
RNA_def_property_ui_text(parm, "data", "Raw data (bytes, exact content of the embedded file)");
RNA_def_int(func, "data_len", 0, 0, INT_MAX,
"data_len", "length of given data (mandatory if data is provided)", 0, INT_MAX);
func = RNA_def_function(srna, "unpack", "rna_Image_unpack");
RNA_def_function_ui_description(func, "Save an image packed in the .blend file to disk");
RNA_def_function_flag(func, FUNC_USE_REPORTS);
RNA_def_enum(func, "method", unpack_method_items, PF_USE_LOCAL, "method", "How to unpack");
func = RNA_def_function(srna, "reload", "rna_Image_reload");
RNA_def_function_ui_description(func, "Reload the image from its source path");
func = RNA_def_function(srna, "update", "rna_Image_update");
RNA_def_function_ui_description(func, "Update the display image from the floating point buffer");
RNA_def_function_flag(func, FUNC_USE_REPORTS);
func = RNA_def_function(srna, "scale", "rna_Image_scale");
RNA_def_function_ui_description(func, "Scale the image in pixels");
2012-06-29 10:52:37 +00:00
RNA_def_function_flag(func, FUNC_USE_REPORTS);
parm = RNA_def_int(func, "width", 1, 1, 10000, "", "Width", 1, 10000);
RNA_def_property_flag(parm, PROP_REQUIRED);
parm = RNA_def_int(func, "height", 1, 1, 10000, "", "Height", 1, 10000);
RNA_def_property_flag(parm, PROP_REQUIRED);
func = RNA_def_function(srna, "gl_touch", "rna_Image_gl_touch");
RNA_def_function_ui_description(func, "Delay the image from being cleaned from the cache due inactivity");
RNA_def_function_flag(func, FUNC_USE_REPORTS);
RNA_def_int(func, "frame", 0, 0, INT_MAX, "Frame",
"Frame of image sequence or movie", 0, INT_MAX);
RNA_def_int(func, "filter", GL_LINEAR_MIPMAP_NEAREST, -INT_MAX, INT_MAX, "Filter",
"The texture minifying function to use if the image wasn't loaded", -INT_MAX, INT_MAX);
RNA_def_int(func, "mag", GL_LINEAR, -INT_MAX, INT_MAX, "Magnification",
"The texture magnification function to use if the image wasn't loaded", -INT_MAX, INT_MAX);
/* return value */
parm = RNA_def_int(func, "error", 0, -INT_MAX, INT_MAX, "Error", "OpenGL error value", -INT_MAX, INT_MAX);
RNA_def_function_return(func, parm);
func = RNA_def_function(srna, "gl_load", "rna_Image_gl_load");
RNA_def_function_ui_description(func, "Load the image into OpenGL graphics memory");
RNA_def_function_flag(func, FUNC_USE_REPORTS);
RNA_def_int(func, "frame", 0, 0, INT_MAX, "Frame",
"Frame of image sequence or movie", 0, INT_MAX);
RNA_def_int(func, "filter", GL_LINEAR_MIPMAP_NEAREST, -INT_MAX, INT_MAX, "Filter",
"The texture minifying function", -INT_MAX, INT_MAX);
RNA_def_int(func, "mag", GL_LINEAR, -INT_MAX, INT_MAX, "Magnification",
"The texture magnification function", -INT_MAX, INT_MAX);
/* return value */
parm = RNA_def_int(func, "error", 0, -INT_MAX, INT_MAX, "Error", "OpenGL error value", -INT_MAX, INT_MAX);
RNA_def_function_return(func, parm);
func = RNA_def_function(srna, "gl_free", "rna_Image_gl_free");
RNA_def_function_ui_description(func, "Free the image from OpenGL graphics memory");
/* path to an frame specified by image user */
func = RNA_def_function(srna, "filepath_from_user", "rna_Image_filepath_from_user");
RNA_def_function_ui_description(func, "Return the absolute path to the filepath of an image frame specified by the image user");
RNA_def_pointer(func, "image_user", "ImageUser", "", "Image user of the image to get filepath for");
parm = RNA_def_string_file_path(func, "filepath", NULL, FILE_MAX, "File Path",
"The resulting filepath from the image and it's user");
RNA_def_property_flag(parm, PROP_THICK_WRAP); /* needed for string return value */
RNA_def_function_output(func, parm);
func = RNA_def_function(srna, "buffers_free", "rna_Image_buffers_free");
RNA_def_function_ui_description(func, "Free the image buffers from memory");
/* TODO, pack/unpack, maybe should be generic functions? */
2009-07-24 15:27:59 +00:00
}
#endif