I’ve rewritten this “test” in pyglet. I know it’s Python, there is no vertex buffer, etc…
import sys
import time
import ctypes
import pyglet
pyglet.options['debug_gl'] = False
from pyglet.gl import *
from pyglet.app import *
VERTEX_SHADER = """
uniform vec4 myColor;
void main() {
//gl_FrontColor = myColor;
//gl_FrontColor = gl_Color;
//gl_Position = gl_ModelViewProjectionMatrix * gl_Vertex;
}
"""
FRAGMENT_SHADER = """
void main() {
//gl_FragColor = gl_Color;
//gl_FragColor = vec4(1.0, 0.0, 1.0, 1.0);
}
"""
SIZE = 20
fpsStart = time.time()
fpsCount = 0
window = pyglet.window.Window(width = 800, height = 600, vsync = False)
def compile(source, type):
shader = glCreateShader(type)
buff = ctypes.create_string_buffer(source)
c_text = ctypes.cast(ctypes.pointer(ctypes.pointer(buff)), ctypes.POINTER(ctypes.POINTER(GLchar)))
glShaderSource(shader, 1, c_text, None)
glCompileShader(shader)
return shader
vertexShader = compile(VERTEX_SHADER, GL_VERTEX_SHADER)
fragmentShader = compile(FRAGMENT_SHADER, GL_FRAGMENT_SHADER)
program = glCreateProgram()
glAttachShader(program, vertexShader)
glAttachShader(program, fragmentShader)
glLinkProgram(program)
p = glGetUniformLocation(program, "myColor")
#glUseProgram(program)
def on_draw():
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT)
glMatrixMode(GL_MODELVIEW)
glLoadIdentity()
glTranslatef((SIZE - 1) * -0.5, (SIZE - 1) * -0.5, -50.0)
for y in range(SIZE):
for x in range(SIZE):
glPushMatrix()
glTranslatef(x, y, 0.0)
#glColor4f(x / (SIZE - 1.0), y / (SIZE - 1.0), 1.0, 1.0)
#glUniform4f(p, x / (SIZE - 1.0), y / (SIZE - 1.0), 1.0, 1.0)
glBegin(GL_QUADS)
glVertex3f(-0.45, 0.45, 0.0)
glVertex3f(0.45, 0.45, 0.0)
glVertex3f(0.45, -0.45, 0.0)
glVertex3f(-0.45, -0.45, 0.0)
glEnd()
glPopMatrix()
global fpsCount, fpsStart
fpsCount += 1
fpsCurrent = time.time()
fpsDelta = fpsCurrent - fpsStart
if (fpsDelta) > 1.0:
print "FPS", fpsCount / fpsDelta
fpsStart = fpsCurrent
fpsCount = 0
def on_resize(width, height):
glClearColor(0.0, 0.0, 0.2, 0.0)
glEnable(GL_DEPTH_TEST)
glViewport(0, 0, width, height)
glMatrixMode(GL_PROJECTION)
glLoadIdentity()
gluPerspective(45.0, float(width) / float(height), 0.1, 100.0)
return pyglet.event.EVENT_HANDLED
on_resize(800, 600)
for i in range(1000):
on_draw()
window.flip()
There are serveral lines commented inside Python and GLSL. If you uncomment the right ones it is possible to test different cases.
On my NVIDIA card I get the following result:
Fixed Function, No Colors: 202
Fixed Function, Colors: 150
Shader, No Colors: 200
Shader, (glColor4f) Colors: 158
Shader, (glUniform4f) Colors: 142
It may be ok that the framerate drops if I have to call glColor4f (Python overhead, …). But that there is a difference between glColor4f and glUniform4f I really really don’t want to understand (bashing my head on the desk).
I first tested it on the ATI card but the results are worse. The framerate drops if I only activate the shader without doing anything.
Maybe time to say once more that Panda3D makes the most of this disaster.