GLint level = 0;
GLint internalformat = GL_NONE;
_glGetTexLevelParameteriv(target, level, GL_TEXTURE_INTERNAL_FORMAT, &internalformat);
+ // XXX: GL_TEXTURE_INTERNAL_FORMAT cannot be trusted on NVIDIA
+ // -- it sometimes returns GL_BGRA, even though GL_BGR/BGRA is
+ // not a valid internal format.
+ switch (internalformat) {
+ case GL_BGR:
+ internalformat = GL_RGB;
+ break;
+ case GL_BGRA:
+ internalformat = GL_RGBA;
+ break;
+ }
GLint width = 0;
_glGetTexLevelParameteriv(target, level, GL_TEXTURE_WIDTH, &width);
GLint height = 0;
_glGetTexLevelParameteriv(target, level, GL_TEXTURE_HEIGHT, &height);
GLint border = 0;
- unsigned glx_format = 0;
- _glXQueryDrawable(display, drawable, GLX_TEXTURE_FORMAT_EXT, &glx_format);
- GLenum format;
- switch (glx_format) {
- case GLX_TEXTURE_FORMAT_RGB_EXT:
- format = GL_RGB;
- break;
- case GLX_TEXTURE_FORMAT_RGBA_EXT:
- format = GL_RGBA;
- break;
- case GLX_TEXTURE_FORMAT_NONE_EXT:
- // XXX: This really shouldn't happen but some
- // implementations (Mesa) appear return bogus results to
- // the GLX_TEXTURE_FORMAT_EXT query
- default:
- //os::log("apitrace: warning: %s: unexpected GLX_TEXTURE_FORMAT_EXT 0x%u\n", __FUNCTION__, glx_format);
- format = GL_RGBA;
- break;
- }
+ // XXX: We always use GL_RGBA format to read the pixels because:
+ // - some implementations (Mesa) seem to return bogus results
+ // for GLX_TEXTURE_FORMAT_EXT
+ // - hardware usually stores GL_RGB with 32bpp, so it should be
+ // faster to read/write
+ // - it is more robust against GL_(UN)PACK_ALIGNMENT state
+ // changes
+ // The drawback is that traces will be slightly bigger.
+ GLenum format = GL_RGBA;
GLenum type = GL_UNSIGNED_BYTE;
- if (target && internalformat && height && width && format) {
- GLint channels = _gl_format_channels(format);
- // FIXME: This assumes (UN)PACK state is set to its
- // defaults. We really should temporarily reset the state
- // here (and emit according fake calls) to cope when its
- // not. At very least we need a heads up warning that this
- // will cause problems.
+ if (target && internalformat && height && width) {
+ // FIXME: This assumes (UN)PACK state (in particular
+ // GL_(UN)PACK_ROW_LENGTH) is set to its defaults. We
+ // really should temporarily reset the state here (and emit
+ // according fake calls) to cope when its not. At very
+ // least we need a heads up warning that this will cause
+ // problems.
GLint alignment = 4;
- GLint stride = _align(width * channels, alignment);
- GLvoid * pixels = malloc(height * stride);
+ GLint row_stride = _align(width * 4, alignment);
+ GLvoid * pixels = malloc(height * row_stride);
_glGetTexImage(target, level, format, type, pixels);
'''
self.emitFakeTexture2D()
print '#include <stdlib.h>'
print '#include <string.h>'
print
- print '#include <dlfcn.h>'
- print
print '#include "trace_writer_local.hpp"'
print
print '// To validate our prototypes'
print '#define GL_GLEXT_PROTOTYPES'
print '#define GLX_GLXEXT_PROTOTYPES'
print
+ print '#include "dlopen.hpp"'
print '#include "glproc.hpp"'
print '#include "glsize.hpp"'
print
print r'''
-/*
- * Invoke the true dlopen() function.
- */
-static void *_dlopen(const char *filename, int flag)
-{
- typedef void * (*PFN_DLOPEN)(const char *, int);
- static PFN_DLOPEN dlopen_ptr = NULL;
-
- if (!dlopen_ptr) {
- dlopen_ptr = (PFN_DLOPEN)dlsym(RTLD_NEXT, "dlopen");
- if (!dlopen_ptr) {
- os::log("apitrace: error: dlsym(RTLD_NEXT, \"dlopen\") failed\n");
- return NULL;
- }
- }
-
- return dlopen_ptr(filename, flag);
-}
-
-
/*
* Several applications, such as Quake3, use dlopen("libGL.so.1"), but
* LD_PRELOAD does not intercept symbols obtained via dlopen/dlsym, therefore