OpenGL: use sized texture internal formats

Maybe this is pedantic but I read it’s best to explicitly set the
desired component size.

Also append “_ARB” to float texture formats since those need an
extension in GL 2.1.
This commit is contained in:
2015-12-08 01:19:08 -05:00
parent c013d64a0a
commit 6006173f4a
7 changed files with 21 additions and 17 deletions

View File

@@ -755,12 +755,12 @@ void GPU_create_gl_tex(unsigned int *bind, unsigned int *rect, float *frect, int
if (use_high_bit_depth) {
if (GLEW_ARB_texture_float)
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA16F, rectw, recth, 0, GL_RGBA, GL_FLOAT, frect);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA16F_ARB, rectw, recth, 0, GL_RGBA, GL_FLOAT, frect);
else
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA16, rectw, recth, 0, GL_RGBA, GL_FLOAT, frect);
}
else
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, rectw, recth, 0, GL_RGBA, GL_UNSIGNED_BYTE, rect);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA8, rectw, recth, 0, GL_RGBA, GL_UNSIGNED_BYTE, rect);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, gpu_get_mipmap_filter(1));
@@ -786,12 +786,12 @@ void GPU_create_gl_tex(unsigned int *bind, unsigned int *rect, float *frect, int
ImBuf *mip = ibuf->mipmap[i - 1];
if (use_high_bit_depth) {
if (GLEW_ARB_texture_float)
glTexImage2D(GL_TEXTURE_2D, i, GL_RGBA16F, mip->x, mip->y, 0, GL_RGBA, GL_FLOAT, mip->rect_float);
glTexImage2D(GL_TEXTURE_2D, i, GL_RGBA16F_ARB, mip->x, mip->y, 0, GL_RGBA, GL_FLOAT, mip->rect_float);
else
glTexImage2D(GL_TEXTURE_2D, i, GL_RGBA16, mip->x, mip->y, 0, GL_RGBA, GL_FLOAT, mip->rect_float);
}
else {
glTexImage2D(GL_TEXTURE_2D, i, GL_RGBA, mip->x, mip->y, 0, GL_RGBA, GL_UNSIGNED_BYTE, mip->rect);
glTexImage2D(GL_TEXTURE_2D, i, GL_RGBA8, mip->x, mip->y, 0, GL_RGBA, GL_UNSIGNED_BYTE, mip->rect);
}
}
}