make glsupport.d compile with ldc2

This commit is contained in:
vennos5 2017-11-06 22:08:00 +02:00
parent 48649f51ae
commit 565d0ab133
1 changed files with 5 additions and 4 deletions

View File

@ -700,7 +700,7 @@ bool initGLSupport(bool legacy = false) {
} }
if (!_glSupport) { if (!_glSupport) {
Log.d("glSupport not initialized: trying to create"); Log.d("glSupport not initialized: trying to create");
int major = to!int(glGetString(GL_VERSION)[0 .. 1]); int major = *cast(int*)(glGetString(GL_VERSION)[0 .. 1].ptr);
legacy = legacy || (major < 3); legacy = legacy || (major < 3);
_glSupport = new GLSupport(legacy); _glSupport = new GLSupport(legacy);
if (!_glSupport.valid) { if (!_glSupport.valid) {
@ -835,11 +835,12 @@ final class GLSupport {
/// This function is needed to draw custom OpenGL scene correctly (especially on legacy API) /// This function is needed to draw custom OpenGL scene correctly (especially on legacy API)
private void resetBindings() { private void resetBindings() {
if (glUseProgram) import std.traits : isFunction;
if (isFunction!glUseProgram)
GLProgram.unbind(); GLProgram.unbind();
if (glBindVertexArray) if (isFunction!glBindVertexArray)
VAO.unbind(); VAO.unbind();
if (glBindBuffer) if (isFunction!glBindBuffer)
VBO.unbind(); VBO.unbind();
} }