Hi,
Im trying to get lwjgl to work under OSX (10.7.3) using opengl 3.2 with the core profile.
The following works flawless under in 2.1 and 3.2 on both windows and linux, but only under 2.1 in osx.
import org.lwjgl.opengl.GL11;
import org.lwjgl.opengl.GL15;
import org.lwjgl.opengl.GL20;
import java.nio.FloatBuffer;
import java.nio.IntBuffer;
public class VboTest {
private final FloatBuffer vertexBuffer;
private final IntBuffer indices;
private final int shaderId;
private int vboId;
private int indicesId;
public Vbo21Test(FloatBuffer vertexBuffer, IntBuffer indices, int shaderId) {
this.vertexBuffer = vertexBuffer;
this.indices = indices;
this.shaderId = shaderId;
}
private void setupVbo() {
vboId = GL15.glGenBuffers();
GL15.glBindBuffer(GL15.GL_ARRAY_BUFFER, vboId);
GL15.glBufferData(GL15.GL_ARRAY_BUFFER, vertexBuffer, GL15.GL_STATIC_DRAW);
indicesId = GL15.glGenBuffers();
GL15.glBindBuffer(GL15.GL_ELEMENT_ARRAY_BUFFER, indicesId);
GL15.glBufferData(GL15.GL_ELEMENT_ARRAY_BUFFER, indices, GL15.GL_STATIC_DRAW);
GL15.glBindBuffer(GL15.GL_ELEMENT_ARRAY_BUFFER, 0);
GL15.glBindBuffer(GL15.GL_ARRAY_BUFFER, 0);
}
private void render(FloatBuffer projection, FloatBuffer modelView) {
GL15.glBindBuffer(GL15.GL_ARRAY_BUFFER, vboId);
GL20.glUseProgram(shaderId);
setUniformMatrix4("projectionMatrix", projection);
setUniformMatrix4("modelViewMatrix", modelView);
int pointer = GL20.glGetAttribLocation(shaderId, "vertex");
GL20.glEnableVertexAttribArray(pointer);
GL20.glVertexAttribPointer(pointer, 3, GL11.GL_FLOAT, true, 3 * 4, 0);
GL15.glBindBuffer(GL15.GL_ELEMENT_ARRAY_BUFFER, indicesId);
GL11.glDrawElements(GL11.GL_TRIANGLES, indices.limit(), GL11.GL_UNSIGNED_INT, 0);
GL15.glBindBuffer(GL15.GL_ARRAY_BUFFER, 0);
GL15.glBindBuffer(GL15.GL_ELEMENT_ARRAY_BUFFER, 0);
GL20.glUseProgram(0);
}
private void setUniformMatrix4(String name, FloatBuffer buffer) {
int pointer = GL20.glGetUniformLocation(shaderId, name);
GL20.glUniformMatrix4(pointer, false, buffer);
}
}
The following shaders are used for 2.1:
Vertex Shader:
#version 120
uniform mat4 projectionMatrix;
uniform mat4 modelViewMatrix;
attribute vec3 vertex;
varying vec4 vertColor;
void main(){
vec4 modelViewPosition = modelViewMatrix * vec4(vertex, 1.0);
gl_Position = projectionMatrix * modelViewPosition;
vertColor = vec4(vertex.xyz, 1.0);
}
Fragment shader:
#version 120
varying vec4 vertColor;
void main(){
gl_FragColor = vertColor;
}
The following shaders are used for 3.2:
Vertex Shader:
#version 150
uniform mat4 projectionMatrix;
uniform mat4 modelViewMatrix;
in vec3 vertex;
out vec4 vertColor;
void main() {
vec4 modelViewPosition = modelViewMatrix * vec4(vertex, 1.0);
gl_Position = projectionMatrix * modelViewPosition;
vertColor = vec4(vertex.xyz, 1.0);
}
Fragment shader:
#version 150
in vec4 vertColor;
out vec4 fragColor;
void main() {
fragColor = vertColor;
}
For forcing OSX to use 3.2 (core) i use:
Display.create(new PixelFormat(8, 24, 0), new ContextAttribs(3, 2).withProfileCore(true));
but then the screen remains black (but works perfectly under linux and windows).
glGetString(GL11.GL_VERSION) reports the correct version of OpenGL as well.
Any idea on what the problem could be?