move GPU_code_generate_glsl_lib to GPU_extensions_init. This makes more sense as we cleanup the library string in GPU_extensions_exit. Thanks to Mitchel Stokes for reporting and proposing this
This commit is contained in:
@@ -112,6 +112,10 @@ void GPU_extensions_init(void)
|
||||
if (!GLEW_ARB_vertex_shader) GG.glslsupport = 0;
|
||||
if (!GLEW_ARB_fragment_shader) GG.glslsupport = 0;
|
||||
|
||||
if(GG.glslsupport){
|
||||
GPU_code_generate_glsl_lib();
|
||||
}
|
||||
|
||||
glGetIntegerv(GL_RED_BITS, &r);
|
||||
glGetIntegerv(GL_GREEN_BITS, &g);
|
||||
glGetIntegerv(GL_BLUE_BITS, &b);
|
||||
|
@@ -176,7 +176,6 @@ void WM_init(bContext *C, int argc, const char **argv)
|
||||
GPU_extensions_init();
|
||||
GPU_set_mipmap(!(U.gameflags & USER_DISABLE_MIPMAP));
|
||||
GPU_set_anisotropic(U.anisotropic_filter);
|
||||
GPU_code_generate_glsl_lib();
|
||||
|
||||
UI_init();
|
||||
}
|
||||
|
Reference in New Issue
Block a user