OpenGLRenderer: Fix version parsing
OpenGL ES 3.0 implementations don't seem to follow the spec, use GL_MAJOR_VERSION and GL_MINOR_VERSION instead
This commit is contained in:
parent
8f04412a3f
commit
a73251f2df
|
|
@ -185,14 +185,14 @@ namespace Nz::GL
|
||||||
return -1;
|
return -1;
|
||||||
};
|
};
|
||||||
|
|
||||||
std::string_view versionString = reinterpret_cast<const char*>(glGetString(GL_VERSION));
|
GLint majorVersion = 0;
|
||||||
if (versionString.size() > 2 && DecodeDigit(versionString[0]) >= 0 && versionString[1] == '.' && DecodeDigit(versionString[2]) >= 0)
|
glGetIntegerv(GL_MAJOR_VERSION, &majorVersion);
|
||||||
{
|
|
||||||
m_params.glMajorVersion = DecodeDigit(versionString[0]);
|
GLint minorVersion = 0;
|
||||||
m_params.glMinorVersion = DecodeDigit(versionString[2]);
|
glGetIntegerv(GL_MINOR_VERSION, &minorVersion);
|
||||||
}
|
|
||||||
else
|
m_params.glMajorVersion = majorVersion;
|
||||||
NazaraWarning("Failed to decode OpenGL version: " + std::string(versionString));
|
m_params.glMinorVersion = minorVersion;
|
||||||
|
|
||||||
// Load extensions
|
// Load extensions
|
||||||
GLint extensionCount = 0;
|
GLint extensionCount = 0;
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue