Merge pull request #1475 from ksuprynowicz/auto_texture_fix

Fix automatic texture memory bug.
This commit is contained in:
Dale Glass 2021-11-25 23:11:23 +01:00 committed by GitHub
commit 113223cdee
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23

View file

@ -155,7 +155,7 @@ void GLBackend::init() {
#if defined(Q_OS_ANDROID) || defined(USE_GLES) || defined(Q_OS_DARWIN) #if defined(Q_OS_ANDROID) || defined(USE_GLES) || defined(Q_OS_DARWIN)
qCDebug(gpugllogging) << "Automatic texture memory not supported in this configuration"; qCDebug(gpugllogging) << "Automatic texture memory not supported in this configuration";
_videoCard = Unknown; _videoCard = Unknown;
_dedicatedMemory = gpu->getMemory() * BYTES_PER_MIB; _dedicatedMemory = (size_t)(gpu->getMemory()) * BYTES_PER_MIB;
_totalMemory = _dedicatedMemory; _totalMemory = _dedicatedMemory;
#endif #endif
@ -171,8 +171,8 @@ void GLBackend::init() {
qCDebug(gpugllogging) << "GPU_MEMORY_INFO_TOTAL_AVAILABLE_MEMORY_NVX: " << GPU_MEMORY_INFO_TOTAL_AVAILABLE_MEMORY_NVX; qCDebug(gpugllogging) << "GPU_MEMORY_INFO_TOTAL_AVAILABLE_MEMORY_NVX: " << GPU_MEMORY_INFO_TOTAL_AVAILABLE_MEMORY_NVX;
qCDebug(gpugllogging) << "GPU_MEMORY_INFO_CURRENT_AVAILABLE_VIDMEM_NVX: " << GPU_MEMORY_INFO_CURRENT_AVAILABLE_VIDMEM_NVX; qCDebug(gpugllogging) << "GPU_MEMORY_INFO_CURRENT_AVAILABLE_VIDMEM_NVX: " << GPU_MEMORY_INFO_CURRENT_AVAILABLE_VIDMEM_NVX;
_totalMemory = GPU_MEMORY_INFO_TOTAL_AVAILABLE_MEMORY_NVX * BYTES_PER_KIB; _totalMemory = (size_t)(GPU_MEMORY_INFO_TOTAL_AVAILABLE_MEMORY_NVX) * BYTES_PER_KIB;
_dedicatedMemory = GPU_MEMORY_INFO_DEDICATED_VIDMEM_NVX * BYTES_PER_KIB; _dedicatedMemory = (size_t)(GPU_MEMORY_INFO_DEDICATED_VIDMEM_NVX) * BYTES_PER_KIB;
_videoCard = NVIDIA; _videoCard = NVIDIA;
@ -182,20 +182,20 @@ void GLBackend::init() {
GL_GET_INTEGER(TEXTURE_FREE_MEMORY_ATI); GL_GET_INTEGER(TEXTURE_FREE_MEMORY_ATI);
// We are actually getting free memory instead of total memory // We are actually getting free memory instead of total memory
_totalMemory = TEXTURE_FREE_MEMORY_ATI * BYTES_PER_KIB; _totalMemory = (size_t)(TEXTURE_FREE_MEMORY_ATI) * BYTES_PER_KIB;
_dedicatedMemory = _totalMemory; _dedicatedMemory = _totalMemory;
_videoCard = ATI; _videoCard = ATI;
} else if ( ::gl::queryCurrentRendererIntegerMESA(GLX_RENDERER_VIDEO_MEMORY_MESA, &mem) ) { } else if ( ::gl::queryCurrentRendererIntegerMESA(GLX_RENDERER_VIDEO_MEMORY_MESA, &mem) ) {
// This works only on Linux. queryCurrentRendererIntegerMESA will return false if the // This works only on Linux. queryCurrentRendererIntegerMESA will return false if the
// function is not supported because we're not on Linux, or for any other reason. // function is not supported because we're not on Linux, or for any other reason.
qCDebug(gpugllogging) << "MESA card detected"; qCDebug(gpugllogging) << "MESA card detected";
_totalMemory = mem * BYTES_PER_MIB; _totalMemory = (size_t)(mem) * BYTES_PER_MIB;
_dedicatedMemory = _totalMemory; _dedicatedMemory = _totalMemory;
_videoCard = MESA; _videoCard = MESA;
} else { } else {
qCCritical(gpugllogging) << "Don't know how to get memory for OpenGL vendor " << vendor << "; renderer " << renderer << ", trying fallback"; qCCritical(gpugllogging) << "Don't know how to get memory for OpenGL vendor " << vendor << "; renderer " << renderer << ", trying fallback";
_videoCard = Unknown; _videoCard = Unknown;
_dedicatedMemory = gpu->getMemory() * BYTES_PER_MIB; _dedicatedMemory = (size_t)(gpu->getMemory()) * BYTES_PER_MIB;
_totalMemory = _dedicatedMemory; _totalMemory = _dedicatedMemory;
} }
#endif #endif
@ -237,12 +237,12 @@ size_t GLBackend::getAvailableMemory() {
#if !defined(Q_OS_ANDROID) && !defined(USE_GLES) #if !defined(Q_OS_ANDROID) && !defined(USE_GLES)
glGetIntegerv(GL_GPU_MEMORY_INFO_CURRENT_AVAILABLE_VIDMEM_NVX, &mem[0]); glGetIntegerv(GL_GPU_MEMORY_INFO_CURRENT_AVAILABLE_VIDMEM_NVX, &mem[0]);
#endif #endif
return mem[0] * BYTES_PER_KIB; return (size_t)(mem[0]) * BYTES_PER_KIB;
case ATI: case ATI:
#if !defined(Q_OS_ANDROID) && !defined(USE_GLES) #if !defined(Q_OS_ANDROID) && !defined(USE_GLES)
glGetIntegerv(GL_TEXTURE_FREE_MEMORY_ATI, &mem[0]); glGetIntegerv(GL_TEXTURE_FREE_MEMORY_ATI, &mem[0]);
#endif #endif
return mem[0] * BYTES_PER_KIB; return (size_t)(mem[0]) * BYTES_PER_KIB;
case MESA: case MESA:
return 0; // Don't know the current value return 0; // Don't know the current value
case Unknown: case Unknown: