Tangent space bug

Started by thepalacsinta007, November 25, 2017, 17:17:15

Previous topic - Next topic

thepalacsinta007

Hi,

Im working on a 3D OpenGL engine with the LWJGL. I managed to do the model importing part with Assimp, now working on the normal mapping.
Luckily Assimp can calculate tangent space for me, but it isn't working correctly.
Here is a picture how is it working on the sponza model:

This is the T visualized from TBN matrix:

The code looks like this in the vertex shader:
        mat3 normalMatrix = transpose(inverse(mat3(transformationMatrix)));
	vec3 T = normalize(normalMatrix * tangent);
        vec3 N = normalize(normalMatrix * normal);
	T = normalize(T - dot(T, N) * N);
	vec3 B = normalize(normalMatrix * bitangent);

	if (dot(cross(N, T), B) < 0.0){
			T = T * -1.0;
	}
	TBN = transpose(mat3(T, B, N));


Its checking if the model has symetric UVs, and fixes it.
I think this causes the problem, because the B from TBN looks even worse.


After weeks i tried to calculate tangents and bitangent by myself, but my bitangents are looking worse than Assimps:


private void generateTangentBitangent(){
		for(int i = 0; i < indices.size(); i+=3){
			Vector3f v0 = new Vector3f (vertices.get(indices.get(i)).getPos());
			Vector3f v1 = new Vector3f (vertices.get(indices.get(i+1)).getPos());
			Vector3f v2 = new Vector3f (vertices.get(indices.get(i+2)).getPos());
			
			Vector2f uv0 = new Vector2f (vertices.get(indices.get(i)).getTextureCoord());
			Vector2f uv1 = new Vector2f (vertices.get(indices.get(i+1)).getTextureCoord());
			Vector2f uv2 = new Vector2f (vertices.get(indices.get(i+2)).getTextureCoord());
			
			Vector3f e1 = new Vector3f (v1.sub(v0));
			Vector3f e2 = new Vector3f (v2.sub(v0));
			
			Vector2f deltaUV1 = new Vector2f (uv1.sub(uv0));
			Vector2f deltaUV2 = new Vector2f (uv2.sub(uv0));
			
			float r = (float) (1.0f / (deltaUV1.x() * deltaUV2.y() - deltaUV1.y() * deltaUV2.x()));
			
			Vector3f tangent = new Vector3f ((e1.mul(deltaUV2.y()).sub(e2.mul(deltaUV1.y()))).mul(r));
			//System.out.println(tangent);
			Vector3f bitangent = new Vector3f ((e2.mul(deltaUV1.x()).sub(e1.mul(deltaUV2.x()))).mul(r));
			
			if(vertices.get(indices.get(i)).getTangent() == null)
				vertices.get(indices.get(i)).setTangent(new Vector3f(0,0,0));
			if(vertices.get(indices.get(i)).getBitangent() == null)
				vertices.get(indices.get(i)).setBitangent(new Vector3f(0,0,0));
			if(vertices.get(indices.get(i+1)).getTangent() == null)
				vertices.get(indices.get(i+1)).setTangent(new Vector3f(0,0,0));
			if(vertices.get(indices.get(i+1)).getBitangent() == null)
				vertices.get(indices.get(i+1)).setBitangent(new Vector3f(0,0,0));
			if(vertices.get(indices.get(i+2)).getTangent() == null)
				vertices.get(indices.get(i+2)).setTangent(new Vector3f(0,0,0));
			if(vertices.get(indices.get(i+2)).getBitangent() == null)
				vertices.get(indices.get(i+2)).setBitangent(new Vector3f(0,0,0));
			
			vertices.get(indices.get(i)).getTangent().add(tangent);
			vertices.get(indices.get(i)).getBitangent().add(bitangent);
			vertices.get(indices.get(i+1)).getTangent().add(tangent);
			vertices.get(indices.get(i+1)).getBitangent().add(bitangent);
			vertices.get(indices.get(i+2)).getTangent().add(tangent);
			vertices.get(indices.get(i+2)).getBitangent().add(bitangent);
		}
		
		for (int i = 0 ; i < vertices.size() ; i++) {
		    vertices.get(i).getTangent().normalize();
		    vertices.get(i).getBitangent().normalize();
		} 
	}


Can someone help me please what causes the problem ?