Commits

Cliff Biffle committed 22fef44

Split the vertex shader out into a separate file and cleaned it up a little.

  • Participants
  • Parent commits 5a6f5dc
  • Branches shaders

Comments (0)

Files changed (3)

 
 #pragma mark --- GL Stuff ---
 
-static const char *vertex_shader_source[] = {
-  "const float halfFov = (57.0 / 180.0 * 3.14159265358) / 2.0;",
-  "vec3 kinect_unproject(vec3 point) {",
-  "  float linearZ = -325.616 / (point.z + -1084.61);",
-  "  vec2 angles = point.xy * halfFov;",
-  "  vec2 xyPrime = linearZ * sin(angles);",
-  "  return vec3(xyPrime, linearZ);"
-  "}",
+- (void) prepareOpenGL {
+  NSData *vertexShaderSource = [NSData dataWithContentsOfFile: [[NSBundle mainBundle] pathForResource: @"kinect_project" ofType: @"vs"]];
   
-  "void main() {",
-  // Undo the Kinect's perspective projection
-  "  float linearZ = -325.616 / (gl_Vertex.z + -1084.61);",
-  "  vec2 angles = gl_Vertex.xy * halfFov;",
-  "  vec2 xyPrime = linearZ * sin(angles);",
-  "  vec4 vertex = vec4(kinect_unproject(gl_Vertex.xyz), gl_Vertex.w);",
-  "  gl_Position = gl_ModelViewProjectionMatrix * vertex;",
-
-  // Unproject the normal too
-  "  vec3 normalEnd = normalize(gl_Normal) + gl_Vertex.xyz;",
-  "  vec3 unprojectedNormalEnd = kinect_unproject(normalEnd);",
-  "  vec3 correctedNormal = unprojectedNormalEnd - vertex.xyz;",
-  
-  // Compute the angle between the normal and the light
-  "  vec3 normal = normalize(gl_NormalMatrix * correctedNormal);",
-  "  vec3 lightDir = normalize(vec3(gl_LightSource[0].position));",
-  "  float NdotL = max(dot(normal, lightDir), 0.0);",
-  
-  // Compute the diffuse term
-  "  vec4 diffuse = gl_FrontMaterial.diffuse * gl_LightSource[0].diffuse;",
-  
-  // Compute the ambient terms
-  "  vec4 ambient = gl_FrontMaterial.ambient * gl_LightSource[0].ambient;",
-  "  vec4 globalAmbient = gl_LightModel.ambient * gl_FrontMaterial.ambient;",
-  
-  // Compute the specular term using Blinn-Phong
-  "  vec4 specular = vec4(0,0,0,0);",
-  "  if (NdotL > 0.0) {",
-  "    float NdotHV = max(dot(normal, gl_LightSource[0].halfVector.xyz), 0.0);",
-  "    specular = gl_FrontMaterial.specular * gl_LightSource[0].specular",
-  "        * pow(NdotHV, gl_FrontMaterial.shininess);",
-  "  }",
-  
-  // Apply the color
-  "  gl_FrontColor = NdotL * diffuse + globalAmbient + ambient + specular;",
-  "}",
-};
-
-- (void) prepareOpenGL {
   vertexShader = glCreateShader(GL_VERTEX_SHADER);
-  glShaderSource(vertexShader, sizeof(vertex_shader_source) / sizeof(char *), vertex_shader_source, NULL);
+  const char *sourcePointer = [vertexShaderSource bytes];
+  GLint sourceLength = (GLint) [vertexShaderSource length];
+  glShaderSource(vertexShader, 1, &sourcePointer, &sourceLength);
   glCompileShader(vertexShader);
   
   program = glCreateProgram();

KinectViewer.xcodeproj/project.pbxproj

 		C7759B4612A8B2D3003479EF /* KVKinectHardware.m in Sources */ = {isa = PBXBuildFile; fileRef = C7759B4512A8B2D3003479EF /* KVKinectHardware.m */; };
 		C7759C0712A97873003479EF /* KVDepthRecorder.m in Sources */ = {isa = PBXBuildFile; fileRef = C7759C0612A97873003479EF /* KVDepthRecorder.m */; };
 		C7759C1012A97A3B003479EF /* KVDepthPlayer.m in Sources */ = {isa = PBXBuildFile; fileRef = C7759C0F12A97A3B003479EF /* KVDepthPlayer.m */; };
+		C775A0F812AC111E003479EF /* kinect_project.vs in Resources */ = {isa = PBXBuildFile; fileRef = C775A0D512AC0942003479EF /* kinect_project.vs */; };
 /* End PBXBuildFile section */
 
 /* Begin PBXContainerItemProxy section */
 		C7759C0612A97873003479EF /* KVDepthRecorder.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = KVDepthRecorder.m; sourceTree = "<group>"; };
 		C7759C0E12A97A3B003479EF /* KVDepthPlayer.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = KVDepthPlayer.h; sourceTree = "<group>"; };
 		C7759C0F12A97A3B003479EF /* KVDepthPlayer.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = KVDepthPlayer.m; sourceTree = "<group>"; };
+		C775A0D512AC0942003479EF /* kinect_project.vs */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text; path = kinect_project.vs; sourceTree = "<group>"; };
 /* End PBXFileReference section */
 
 /* Begin PBXFrameworksBuildPhase section */
 				C70E6683129C8FD5004A44B3 /* driver */,
 				C7759C0012A8CBE9003479EF /* Utils */,
 				C7F36B8812A19EBD00EBC3AB /* Model */,
+				C775A0D312AC08D4003479EF /* Shaders */,
 				080E96DDFE201D6D7F000001 /* App */,
 				29B97315FDCFA39411CA2CEA /* Other Sources */,
 				29B97317FDCFA39411CA2CEA /* Resources */,
 			name = Utils;
 			sourceTree = "<group>";
 		};
+		C775A0D312AC08D4003479EF /* Shaders */ = {
+			isa = PBXGroup;
+			children = (
+				C775A0D512AC0942003479EF /* kinect_project.vs */,
+			);
+			name = Shaders;
+			sourceTree = "<group>";
+		};
 		C7F36B8812A19EBD00EBC3AB /* Model */ = {
 			isa = PBXGroup;
 			children = (
 			isa = PBXResourcesBuildPhase;
 			buildActionMask = 2147483647;
 			files = (
+				C775A0F812AC111E003479EF /* kinect_project.vs in Resources */,
 				8D11072B0486CEB800E47090 /* InfoPlist.strings in Resources */,
 				1DDD58160DA1D0A300B32029 /* MainMenu.xib in Resources */,
 			);

kinect_project.vs

+/*
+ * Copyright 2010 Cliff L. Biffle.  All Rights Reserved.
+ * Use of this source code is governed by the Apache License 2.0,
+ * which can be found in the LICENSE file.
+ */
+ 
+/*
+ * Kinect Projection Vertex Shader
+ *
+ * Since the Kinect is, fundamentally, a camera, its data has already
+ * undergone perspective projection.  Closer objects seem relatively
+ * larger, and distant objects seem small.  If we left the POV fixed,
+ * we could simply display the Kinect's raw data using an orthographic
+ * projection -- but what's the fun in that?
+ *
+ * This shader maps from one perspective space (the Kinect's) into
+ * another (GL's modelview/projection space).  We receive unprocessed
+ * vertices from the CPU, which are structured like this:
+ *   X and Y: -1.0 .. 1.0, normalized pixel location in the input.
+ *   Z: the Kinect's raw 11-bit non-linear depth sample.
+ *
+ * (Note: you might expect the Kinect's raw depth samples to measure
+ * distance from the Kinect -- i.e. the length of the ray cast from
+ * the lens to the object.  Not so!  The Kinect pre-processes depths,
+ * and by the time we receive them, they are the distance from the
+ * object to the nearest point on the camera's XY plane.)
+ *
+ * Our job is to:
+ *  1. Linearize and scale the depth.  In this case, we convert it
+ *     such that 1 GL Unit = 1 meter.  Because depths are already
+ *     measured from the XY plane, we don't have to do any raycasting.
+ *  2. Reverse the perspective warping of the XY data, using what we
+ *     know about the Kinect's optics.
+ */
+
+// The Kinect's lens appears to have a 57-degree horizontal FOV.
+const float kinectFovXDegrees = 57.0;
+
+// Multiplying this by our normalized XY gives us ray angles.
+const float halfFov = (kinectFovXDegrees / 180.0 * 3.14159265358) / 2.0;
+
+/*
+ * Reverses the Kinect's perspective projection.  The input point must
+ * be in our "raw" format, that is:
+ *   X and Y: -1.0 .. 1.0, normalized pixel location in the input.
+ *   Z: the Kinect's raw 11-bit non-linear depth sample.
+ *
+ * The result is the point's original location in orthonormal space,
+ * with 1 GL unit per meter.
+ */
+vec3 kinect_unproject(vec3 point) {
+  // Linearization equation derived by the ROS folks at CCNY.
+  float linearZ = -325.616 / (point.z + -1084.61);
+  
+  // Compute the psi/theta angles of the ray through this pixel.
+  vec2 angles = point.xy * halfFov;
+  // Find the intersection of that ray with the plane at linearZ.
+  vec2 intersection = linearZ * sin(angles);
+  
+  return vec3(intersection, linearZ);
+}
+
+/*
+ * In the incoming data, normals have been computed in Kinect-space.
+ * This function converts them into GL space.  I am not spectacularly
+ * happy with the approach and may try again later.
+ *
+ * Note that the result may not, itself, be normalized.
+ */
+vec3 kinect_unproject_normal(vec3 normal, vec3 cameraOrigin, vec3 glOrigin) {
+  return kinect_unproject(normal + cameraOrigin) - glOrigin;
+}
+
+/*
+ * Computes lighting per-vertex using Blinn-Phong, plus diffuse/ambient.
+ */
+vec4 light_vertex(vec3 normal) {
+  vec3 eyeSpaceNormal = normalize(gl_NormalMatrix * normal);
+  // Note: this forces all lights to be directional lights.
+  vec3 lightDir = normalize(vec3(gl_LightSource[0].position));
+  float NdotL = max(dot(eyeSpaceNormal, lightDir), 0.0);
+  
+  // Diffuse
+  vec4 diffuse = gl_FrontMaterial.diffuse * gl_LightSource[0].diffuse;
+  vec4 ambient = gl_FrontMaterial.ambient * gl_LightSource[0].ambient
+               + gl_FrontMaterial.ambient * gl_LightModel.ambient;
+  vec4 specular = vec4(0, 0, 0, 0);
+  if (NdotL > 0.0) {
+    float NdotHV = max(dot(eyeSpaceNormal, gl_LightSource[0].halfVector.xyz), 0.0);
+    specular = gl_FrontMaterial.specular * gl_LightSource[0].specular
+             * pow(NdotHV, gl_FrontMaterial.shininess);
+  }
+  
+  return NdotL * diffuse + ambient + specular;
+}
+
+/*
+ * Vertex Shader entry point.
+ */
+void main() {
+  // Undo the Kinect's perspective projection, leaving the homogeneous term.
+  vec4 vertex = vec4(kinect_unproject(gl_Vertex.xyz), gl_Vertex.w);
+  
+  // Project the point using GL's perspective settings.
+  gl_Position = gl_ModelViewProjectionMatrix * vertex;
+
+  // The normal was computed in Kinect-space.  Unproject it too.
+  vec3 normal = kinect_unproject_normal(normalize(gl_Normal), gl_Vertex.xyz, vertex.xyz);
+  
+  gl_FrontColor = light_vertex(normal);
+}