]> git.cworth.org Git - apitrace/blobdiff - wrappers/glxtrace.py
Replace dynamic_cast with virtual functions.
[apitrace] / wrappers / glxtrace.py
index 21bf8f144cea89f92f1588720a804da72aa1f748..e9c43a9c65132e69ad0f34cb7697616b2a4f1a5e 100644 (file)
@@ -110,34 +110,42 @@ class GlxTracer(GlTracer):
                 GLint level = 0;
                 GLint internalformat = GL_NONE;
                 _glGetTexLevelParameteriv(target, level, GL_TEXTURE_INTERNAL_FORMAT, &internalformat);
+                // XXX: GL_TEXTURE_INTERNAL_FORMAT cannot be trusted on NVIDIA
+                // -- it sometimes returns GL_BGRA, even though GL_BGR/BGRA is
+                // not a valid internal format.
+                switch (internalformat) {
+                case GL_BGR:
+                    internalformat = GL_RGB;
+                    break;
+                case GL_BGRA:
+                    internalformat = GL_RGBA;
+                    break;
+                }
                 GLint width = 0;
                 _glGetTexLevelParameteriv(target, level, GL_TEXTURE_WIDTH, &width);
                 GLint height = 0;
                 _glGetTexLevelParameteriv(target, level, GL_TEXTURE_HEIGHT, &height);
                 GLint border = 0;
-                unsigned glx_format = 0;
-                _glXQueryDrawable(display, drawable, GLX_TEXTURE_FORMAT_EXT, &glx_format);
-                GLenum format;
-                switch (glx_format) {
-                case GLX_TEXTURE_FORMAT_RGB_EXT:
-                    format = GL_RGB;
-                    break;
-                case GLX_TEXTURE_FORMAT_RGBA_EXT:
-                    format = GL_RGBA;
-                    break;
-                case GLX_TEXTURE_FORMAT_NONE_EXT:
-                    // XXX: This really shouldn't happen but some
-                    // implementations (Mesa) appear return bogus results to
-                    // the GLX_TEXTURE_FORMAT_EXT query
-                default:
-                    //os::log("apitrace: warning: %s: unexpected GLX_TEXTURE_FORMAT_EXT 0x%u\n", __FUNCTION__, glx_format);
-                    format = GL_RGBA;
-                    break;
-                }
+                // XXX: We always use GL_RGBA format to read the pixels because:
+                // - some implementations (Mesa) seem to return bogus results
+                //   for GLX_TEXTURE_FORMAT_EXT
+                // - hardware usually stores GL_RGB with 32bpp, so it should be
+                //   faster to read/write
+                // - it is more robust against GL_(UN)PACK_ALIGNMENT state
+                //   changes
+                // The drawback is that traces will be slightly bigger.
+                GLenum format = GL_RGBA;
                 GLenum type = GL_UNSIGNED_BYTE;
-                if (target && internalformat && height && width && format) {
-                    GLint channels = _gl_format_channels(format);
-                    GLvoid * pixels = malloc(height * width * channels);
+                if (target && internalformat && height && width) {
+                    // FIXME: This assumes (UN)PACK state (in particular
+                    // GL_(UN)PACK_ROW_LENGTH) is set to its defaults. We
+                    // really should temporarily reset the state here (and emit
+                    // according fake calls) to cope when its not. At very
+                    // least we need a heads up warning that this will cause
+                    // problems.
+                    GLint alignment = 4;
+                    GLint row_stride = _align(width * 4, alignment);
+                    GLvoid * pixels = malloc(height * row_stride);
                     _glGetTexImage(target, level, format, type, pixels);
             '''
             self.emitFakeTexture2D()
@@ -152,14 +160,13 @@ if __name__ == '__main__':
     print '#include <stdlib.h>'
     print '#include <string.h>'
     print
-    print '#include <dlfcn.h>'
-    print
     print '#include "trace_writer_local.hpp"'
     print
     print '// To validate our prototypes'
     print '#define GL_GLEXT_PROTOTYPES'
     print '#define GLX_GLXEXT_PROTOTYPES'
     print
+    print '#include "dlopen.hpp"'
     print '#include "glproc.hpp"'
     print '#include "glsize.hpp"'
     print
@@ -175,26 +182,6 @@ if __name__ == '__main__':
     print r'''
 
 
-/*
- * Invoke the true dlopen() function.
- */
-static void *_dlopen(const char *filename, int flag)
-{
-    typedef void * (*PFN_DLOPEN)(const char *, int);
-    static PFN_DLOPEN dlopen_ptr = NULL;
-
-    if (!dlopen_ptr) {
-        dlopen_ptr = (PFN_DLOPEN)dlsym(RTLD_NEXT, "dlopen");
-        if (!dlopen_ptr) {
-            os::log("apitrace: error: dlsym(RTLD_NEXT, \"dlopen\") failed\n");
-            return NULL;
-        }
-    }
-
-    return dlopen_ptr(filename, flag);
-}
-
-
 /*
  * Several applications, such as Quake3, use dlopen("libGL.so.1"), but
  * LD_PRELOAD does not intercept symbols obtained via dlopen/dlsym, therefore