16 #ifndef _vtkVolumeShaderComposer_h
17 #define _vtkVolumeShaderComposer_h
36 std::string::size_type pos = 0;
38 while ((pos = source.find(search, 0)) != std::string::npos)
40 source.replace(pos, search.length(),
replace);
41 pos += search.length();
60 \n vec4 pos = in_projectionMatrix * in_modelViewMatrix *\
61 \n in_volumeMatrix * vec4(in_vertexPos.xyz, 1.0);\
62 \n gl_Position = pos;"
72 "\n // Assuming point data only. Also, we offset the texture coordinate\
73 \n // to account for OpenGL treating voxel at the center of the cell.\
74 \n vec3 uvx = (in_vertexPos - in_volumeExtentsMin) /\
75 \n (in_volumeExtentsMax - in_volumeExtentsMin);\
76 \n vec3 delta = in_textureExtentsMax - in_textureExtentsMin;\
77 \n ip_textureCoords = (uvx * (delta - vec3(1.0)) + vec3(0.5)) / delta;"
87 \n uniform mat4 in_modelViewMatrix;\
88 \n uniform mat4 in_projectionMatrix;\
89 \n uniform mat4 in_volumeMatrix;\
91 \n uniform vec3 in_volumeExtentsMin;\
92 \n uniform vec3 in_volumeExtentsMax;\
94 \n uniform vec3 in_textureExtentsMax;\
95 \n uniform vec3 in_textureExtentsMin;"
104 int lightingComplexity,
106 int independentComponents)
110 \nuniform sampler3D in_volume;\
111 \nuniform int in_noOfComponents;\
112 \nuniform int in_independentComponents;\
114 \nuniform sampler2D in_noiseSampler;\
115 \nuniform sampler2D in_depthSampler;\
117 \n// Camera position\
118 \nuniform vec3 in_cameraPos;\
120 \n// view and model matrices\
121 \nuniform mat4 in_volumeMatrix;\
122 \nuniform mat4 in_inverseVolumeMatrix;\
123 \nuniform mat4 in_projectionMatrix;\
124 \nuniform mat4 in_inverseProjectionMatrix;\
125 \nuniform mat4 in_modelViewMatrix;\
126 \nuniform mat4 in_inverseModelViewMatrix;\
127 \nuniform mat4 in_textureDatasetMatrix;\
128 \nuniform mat4 in_inverseTextureDatasetMatrix;\
129 \nuniform mat3 in_texureToEyeIt;\
132 \nuniform vec3 in_cellStep;\
133 \nuniform vec2 in_scalarsRange;\
134 \nuniform vec3 in_cellSpacing;\
136 \n// Sample distance\
137 \nuniform float in_sampleDistance;\
140 \nuniform vec3 in_cellScale;\
141 \nuniform vec2 in_windowLowerLeftCorner;\
142 \nuniform vec2 in_inverseOriginalWindowSize;\
143 \nuniform vec2 in_inverseWindowSize;\
144 \nuniform vec3 in_textureExtentsMax;\
145 \nuniform vec3 in_textureExtentsMin;\
146 \nuniform float in_volumeScale;\
147 \nuniform float in_volumeBias;\
149 \n// Material and lighting\
150 \nuniform vec3 in_diffuse;\
151 \nuniform vec3 in_ambient;\
152 \nuniform vec3 in_specular;\
153 \nuniform float in_shininess;\
156 if (lightingComplexity > 0)
159 \nuniform bool in_twoSidedLighting;"
163 if (lightingComplexity == 3)
166 \nuniform int in_numberOfLights;\
167 \nuniform vec3 in_lightAmbientColor[6];\
168 \nuniform vec3 in_lightDiffuseColor[6];\
169 \nuniform vec3 in_lightSpecularColor[6];\
170 \nuniform vec3 in_lightDirection[6];\
171 \nuniform vec3 in_lightPosition[6];\
172 \nuniform vec3 in_lightAttenuation[6];\
173 \nuniform float in_lightConeAngle[6];\
174 \nuniform float in_lightExponent[6];\
175 \nuniform int in_lightPositional[6];\
178 else if (lightingComplexity == 2)
181 \nuniform int in_numberOfLights;\
182 \nuniform vec3 in_lightAmbientColor[6];\
183 \nuniform vec3 in_lightDiffuseColor[6];\
184 \nuniform vec3 in_lightSpecularColor[6];\
185 \nuniform vec3 in_lightDirection[6];\
191 \nuniform vec3 in_lightAmbientColor[1];\
192 \nuniform vec3 in_lightDiffuseColor[1];\
193 \nuniform vec3 in_lightSpecularColor[1];\
197 if (noOfComponents > 1 && independentComponents)
200 \nuniform vec4 in_componentWeight;");
212 \n // Get the 3D texture coordinates for lookup into the in_volume dataset\
213 \n g_dataPos = ip_textureCoords.xyz;\
215 \n // Eye position in object space\
216 \n g_eyePosObj = (in_inverseVolumeMatrix * vec4(in_cameraPos, 1.0));\
217 \n if (g_eyePosObj.w != 0.0)\
219 \n g_eyePosObj.x /= g_eyePosObj.w;\
220 \n g_eyePosObj.y /= g_eyePosObj.w;\
221 \n g_eyePosObj.z /= g_eyePosObj.w;\
222 \n g_eyePosObj.w = 1.0;\
225 \n // Getting the ray marching direction (in object space);\
226 \n vec3 rayDir = computeRayDirection();\
228 \n // Multiply the raymarching direction with the step size to get the\
229 \n // sub-step size we need to take at each raymarching step\
230 \n g_dirStep = (in_inverseTextureDatasetMatrix *\
231 \n vec4(rayDir, 0.0)).xyz * in_sampleDistance;\
233 \n g_dataPos += g_dirStep * (texture2D(in_noiseSampler, g_dataPos.xy).x);\
235 \n // Flag to deternmine if voxel should be considered for the rendering\
236 \n bool l_skip = false;");
262 int independentComponents,
263 std::map<int, std::string> gradientTableMap)
269 \nuniform sampler1D in_gradientTransferFunc;\
270 \nfloat computeGradientOpacity(vec4 grad)\
272 \n return texture1D(in_gradientTransferFunc, grad.w).w;\
276 else if (noOfComponents > 1 && independentComponents &&
279 for (
int i = 0; i < noOfComponents; ++i)
281 shaderStr +=
std::string(
"\n uniform sampler1D ") +
286 \nfloat computeGradientOpacity(vec4 grad, int component)\
288 \n if (component == 0)\
290 \n return texture1D(in_gradientTransferFunc, grad.w).w;\
292 \n if (component == 1)\
294 \n return texture1D(in_gradientTransferFunc1, grad.w).w;\
296 \n if (component == 2)\
298 \n return texture1D(in_gradientTransferFunc2, grad.w).w;\
300 \n if (component == 3)\
302 \n return texture1D(in_gradientTransferFunc3, grad.w).w;\
312 \nvec4 computeGradient()\
316 \n vec3 xvec = vec3(in_cellStep[0], 0.0, 0.0);\
317 \n vec3 yvec = vec3(0.0, in_cellStep[1], 0.0);\
318 \n vec3 zvec = vec3(0.0, 0.0, in_cellStep[2]);\
319 \n g1.x = texture3D(in_volume, vec3(g_dataPos + xvec)).x;\
320 \n g1.y = texture3D(in_volume, vec3(g_dataPos + yvec)).x;\
321 \n g1.z = texture3D(in_volume, vec3(g_dataPos + zvec)).x;\
322 \n g2.x = texture3D(in_volume, vec3(g_dataPos - xvec)).x;\
323 \n g2.y = texture3D(in_volume, vec3(g_dataPos - yvec)).x;\
324 \n g2.z = texture3D(in_volume, vec3(g_dataPos - zvec)).x;\
325 \n return vec4((g1 - g2), -1.0);\
333 \nvec4 computeGradient()\
337 \n vec3 xvec = vec3(in_cellStep[0], 0.0, 0.0);\
338 \n vec3 yvec = vec3(0.0, in_cellStep[1], 0.0);\
339 \n vec3 zvec = vec3(0.0, 0.0, in_cellStep[2]);\
340 \n g1.x = texture3D(in_volume, vec3(g_dataPos + xvec)).x;\
341 \n g1.y = texture3D(in_volume, vec3(g_dataPos + yvec)).x;\
342 \n g1.z = texture3D(in_volume, vec3(g_dataPos + zvec)).x;\
343 \n g2.x = texture3D(in_volume, vec3(g_dataPos - xvec)).x;\
344 \n g2.y = texture3D(in_volume, vec3(g_dataPos - yvec)).x;\
345 \n g2.z = texture3D(in_volume, vec3(g_dataPos - zvec)).x;\
346 \n g1.x = in_scalarsRange[0] + (\
347 \n in_scalarsRange[1] - in_scalarsRange[0]) * g1.x;\
348 \n g1.y = in_scalarsRange[0] + (\
349 \n in_scalarsRange[1] - in_scalarsRange[0]) * g1.y;\
350 \n g1.z = in_scalarsRange[0] + (\
351 \n in_scalarsRange[1] - in_scalarsRange[0]) * g1.z;\
352 \n g2.x = in_scalarsRange[0] + (\
353 \n in_scalarsRange[1] - in_scalarsRange[0]) * g2.x;\
354 \n g2.y = in_scalarsRange[0] + (\
355 \n in_scalarsRange[1] - in_scalarsRange[0]) * g2.y;\
356 \n g2.z = in_scalarsRange[0] + (\
357 \n in_scalarsRange[1] - in_scalarsRange[0]) * g2.z;\
358 \n g2.xyz = g1 - g2.xyz;\
359 \n vec3 cellSpacing = vec3(in_cellSpacing[0],\
360 \n in_cellSpacing[1],\
361 \n in_cellSpacing[2]);\
363 \n float avgSpacing = (cellSpacing[0] +\
365 \n cellSpacing[2])/3.0;\
366 \n // Adjust the aspect\
367 \n aspect.x = cellSpacing[0] * 2.0 / avgSpacing;\
368 \n aspect.y = cellSpacing[1] * 2.0 / avgSpacing;\
369 \n aspect.z = cellSpacing[2] * 2.0 / avgSpacing;\
370 \n g2.x /= aspect.x;\
371 \n g2.y /= aspect.y;\
372 \n g2.z /= aspect.z;\
373 \n float grad_mag = sqrt(g2.x * g2.x +\
376 \n if (grad_mag > 0.0)\
378 \n g2.x /= grad_mag;\
379 \n g2.y /= grad_mag;\
380 \n g2.z /= grad_mag;\
384 \n g2.xyz = vec3(0.0, 0.0, 0.0);\
386 \n grad_mag = grad_mag * 1.0 / (0.25 * (in_scalarsRange[1] -\
387 \n (in_scalarsRange[0])));\
388 \n grad_mag = clamp(grad_mag, 0.0, 1.0);\
397 \nvec4 computeGradient()\
399 \n return vec4(0.0);\
411 int independentComponents,
413 int lightingComplexity)
417 \nvec4 computeLighting(vec4 color)\
424 \n // Compute gradient function only once\
425 \n vec4 gradient = computeGradient();"
431 if (lightingComplexity == 1)
434 \n // Light position in object space\
435 \n vec4 lightPosObj = (in_inverseVolumeMatrix *\
436 \n vec4(in_cameraPos, 1.0));\
437 \n if (lightPosObj.w != 0.0)\
439 \n lightPosObj.x /= lightPosObj.w;\
440 \n lightPosObj.y /= lightPosObj.w;\
441 \n lightPosObj.z /= lightPosObj.w;\
442 \n lightPosObj.w = 1.0;\
444 \n vec3 diffuse = vec3(0.0);\
445 \n vec3 specular = vec3(0.0);\
446 \n vec3 ldir = normalize(lightPosObj.xyz - ip_vertexPos);\
447 \n vec3 vdir = normalize(g_eyePosObj.xyz - ip_vertexPos);\
448 \n vec3 h = normalize(ldir + vdir);\
449 \n vec3 g2 = gradient.xyz;\
450 \n g2 = (1.0/in_cellSpacing) * g2;\
451 \n float normalLength = length(g2);\
452 \n if (normalLength > 0.0)\
454 \n g2 = normalize(g2);\
458 \n g2 = vec3(0.0, 0.0, 0.0);\
460 \n float nDotL = dot(g2, ldir);\
461 \n float nDotH = dot(g2, h);\
462 \n if (nDotL < 0.0 && in_twoSidedLighting)\
466 \n if (nDotH < 0.0 && in_twoSidedLighting)\
472 \n diffuse = nDotL * in_diffuse * in_lightDiffuseColor[0]\
477 \n specular = pow(nDotH, in_shininess) * in_specular *\
478 \n in_lightSpecularColor[0];\
480 \n // For the headlight, ignore the light's ambient color\
481 \n // for now as it is causing the old mapper tests to fail\
482 \n vec3 finalColor = (in_ambient * color.rgb +\
483 \n diffuse + specular);"
486 else if (lightingComplexity == 2)
489 \n vec4 fragWorldPos = in_modelViewMatrix * in_volumeMatrix *\
490 \n in_textureDatasetMatrix * vec4(-g_dataPos, 1.0);\
491 \n if (fragWorldPos.w != 0.0)\
493 \n fragWorldPos /= fragWorldPos.w;\
495 \n vec3 vdir = normalize(fragWorldPos.xyz);\
496 \n vec3 normal = gradient.xyz;\
497 \n vec3 ambient = vec3(0.0);\
498 \n vec3 diffuse = vec3(0.0);\
499 \n vec3 specular = vec3(0.0);\
500 \n float normalLength = length(normal);\
501 \n if (normalLength > 0.0)\
503 \n normal = normalize(in_texureToEyeIt * normal);\
507 \n normal = vec3(0.0, 0.0, 0.0);\
509 \n for (int lightNum = 0; lightNum < in_numberOfLights; lightNum++)\
511 \n vec3 ldir = in_lightDirection[lightNum].xyz;\
512 \n vec3 h = normalize(ldir + vdir);\
513 \n float nDotH = dot(normal, h);\
514 \n if (nDotH < 0.0 && in_twoSidedLighting)\
518 \n float nDotL = dot(normal, ldir);\
519 \n if (nDotL < 0.0 && in_twoSidedLighting)\
525 \n diffuse += in_lightDiffuseColor[lightNum] * nDotL;\
529 \n specular = in_lightSpecularColor[lightNum] * pow(nDotH, in_shininess);\
531 \n ambient += in_lightAmbientColor[lightNum];\
533 \n vec3 finalColor = in_ambient * ambient +\
534 \n in_diffuse * diffuse * color.rgb +\
535 \n in_specular * specular;"
538 else if (lightingComplexity == 3)
541 \n vec4 fragWorldPos = in_modelViewMatrix * in_volumeMatrix *\
542 \n in_textureDatasetMatrix * vec4(g_dataPos, 1.0);\
543 \n if (fragWorldPos.w != 0.0)\
545 \n fragWorldPos /= fragWorldPos.w;\
547 \n vec3 viewDirection = normalize(-fragWorldPos.xyz);\
548 \n vec3 ambient = vec3(0,0,0);\
549 \n vec3 diffuse = vec3(0,0,0);\
550 \n vec3 specular = vec3(0,0,0);\
551 \n vec3 vertLightDirection;\
552 \n vec3 normal = normalize(in_texureToEyeIt * gradient.xyz);\
554 \n for (int lightNum = 0; lightNum < in_numberOfLights; lightNum++)\
556 \n float attenuation = 1.0;\
558 \n lightDir = in_lightDirection[lightNum];\
559 \n if (in_lightPositional[lightNum] == 0)\
561 \n vertLightDirection = lightDir;\
565 \n vertLightDirection = (fragWorldPos.xyz - in_lightPosition[lightNum]);\
566 \n float distance = length(vertLightDirection);\
567 \n vertLightDirection = normalize(vertLightDirection);\
568 \n attenuation = 1.0 /\
569 \n (in_lightAttenuation[lightNum].x\
570 \n + in_lightAttenuation[lightNum].y * distance\
571 \n + in_lightAttenuation[lightNum].z * distance * distance);\
572 \n // per OpenGL standard cone angle is 90 or less for a spot light\
573 \n if (in_lightConeAngle[lightNum] <= 90.0)\
575 \n float coneDot = dot(vertLightDirection, lightDir);\
576 \n // if inside the cone\
577 \n if (coneDot >= cos(radians(in_lightConeAngle[lightNum])))\
579 \n attenuation = attenuation * pow(coneDot, in_lightExponent[lightNum]);\
583 \n attenuation = 0.0;\
587 \n // diffuse and specular lighting\
588 \n float nDotL = dot(normal, vertLightDirection);\
589 \n if (nDotL < 0.0 && in_twoSidedLighting)\
595 \n float df = max(0.0, attenuation * nDotL);\
596 \n diffuse += (df * in_lightDiffuseColor[lightNum]);\
598 \n vec3 h = normalize(vertLightDirection + viewDirection);\
599 \n float nDotH = dot(normal, h);\
600 \n if (nDotH < 0.0 && in_twoSidedLighting)\
606 \n float sf = attenuation * pow(nDotH, in_shininess);\
607 \n specular += (sf * in_lightSpecularColor[lightNum]);\
609 \n ambient += in_lightAmbientColor[lightNum];\
611 \n vec3 finalColor = in_ambient * ambient + in_diffuse *\
612 \n diffuse * color.rgb + in_specular * specular;\
619 "\n vec3 finalColor = color.rgb;"
626 \n if (gradient.w >= 0.0)\
628 \n color.a = color.a *\
629 \n computeGradientOpacity(gradient);\
633 else if (noOfComponents > 1 && independentComponents &&
637 \n if (gradient.w >= 0.0)\
639 \n for (int i = 0; i < in_noOfComponents; ++i)\
641 \n color.a = color.a *\
642 \n computeGradientOpacity(gradient, i) * in_componentWeight[i];\
648 \n return vec4(finalColor, color.a);\
664 \nvec3 computeRayDirection()\
666 \n return normalize(ip_vertexPos.xyz - g_eyePosObj.xyz);\
672 \nuniform vec3 in_projectionDirection;\
673 \nvec3 computeRayDirection()\
675 \n return normalize((in_inverseVolumeMatrix *\
676 \n vec4(in_projectionDirection, 0.0)).xyz);\
686 int independentComponents,
687 std::map<int, std::string> colorTableMap)
689 if (noOfComponents == 1)
692 \nuniform sampler1D in_colorTransferFunc;\
693 \nvec4 computeColor(vec4 scalar)\
695 \n return computeLighting(vec4(texture1D(in_colorTransferFunc,\
697 \n computeOpacity(scalar)));\
700 else if (noOfComponents > 1 && independentComponents)
703 for (
int i = 0; i < noOfComponents; ++i)
705 shaderStr +=
std::string(
"\n uniform sampler1D ") +
710 \nvec4 computeColor(vec4 scalar, int component)\
712 \n if (component == 0)\
714 \n return computeLighting(vec4(texture1D(\
715 \n in_colorTransferFunc,\
716 \n scalar[component]).xyz,\
717 \n computeOpacity(scalar, component)));\
719 \n if (component == 1)\
721 \n return computeLighting(vec4(texture1D(\
722 \n in_colorTransferFunc1,\
723 \n scalar[component]).xyz,\
724 \n computeOpacity(scalar, component)));\
726 \n if (component == 2)\
728 \n return computeLighting(vec4(texture1D(\
729 \n in_colorTransferFunc2,\
730 \n scalar[component]).xyz,\
731 \n computeOpacity(scalar, component)));\
733 \n if (component == 3)\
735 \n return computeLighting(vec4(texture1D(\
736 \n in_colorTransferFunc3,\
737 \n scalar[component]).xyz,\
738 \n computeOpacity(scalar, component)));\
746 \nvec4 computeColor(vec4 scalar)\
748 \n return computeLighting(vec4(scalar.xyz, computeOpacity(scalar)));\
757 int independentComponents,
758 std::map<int, std::string> opacityTableMap)
760 if (noOfComponents > 1 && independentComponents)
763 for (
int i = 0; i < noOfComponents; ++i)
765 shaderStr +=
std::string(
"\n uniform sampler1D ") +
770 \nfloat computeOpacity(vec4 scalar, int component)\
772 \n if (component == 0)\
774 \n return texture1D(in_opacityTransferFunc,\
775 \n scalar[component]).w;\
777 \n if (component == 1)\
779 \n return texture1D(in_opacityTransferFunc1,\
780 \n scalar[component]).w;\
782 \n if (component == 2)\
784 \n return texture1D(in_opacityTransferFunc2,\
785 \n scalar[component]).w;\
787 \n if (component == 3)\
789 \n return texture1D(in_opacityTransferFunc3,\
790 \n scalar[component]).w;\
799 \nuniform sampler1D in_opacityTransferFunc;\
800 \nfloat computeOpacity(vec4 scalar)\
802 \n return texture1D(in_opacityTransferFunc, scalar.w).w;\
831 \n // We get data between 0.0 - 1.0 range\
832 \n bool l_firstValue = true;\
833 \n vec4 l_maxValue = vec4(0.0);"
839 \n //We get data between 0.0 - 1.0 range\
840 \n bool l_firstValue = true;\
841 \n vec4 l_minValue = vec4(1.0);"
847 \n //We get data between 0.0 - 1.0 range\
848 \n float l_sumValue = 0.0;"
864 int independentComponents = 0)
873 if (noOfComponents == 4)
876 \n vec4 scalar = texture3D(in_volume, g_dataPos);\
877 \n if (l_maxValue.w < scalar.w || l_firstValue)\
879 \n l_maxValue = scalar;\
882 \n if (l_firstValue)\
884 \n l_firstValue = false;\
891 \n vec4 scalar = texture3D(in_volume, g_dataPos);\
892 \n if (l_maxValue.w < scalar.x || l_firstValue)\
894 \n l_maxValue.w = scalar.x;\
897 \n if (l_firstValue)\
899 \n l_firstValue = false;\
906 if (noOfComponents == 4)
909 \n vec4 scalar = texture3D(in_volume, g_dataPos);\
910 \n if (l_minValue.w > scalar.w || l_firstValue)\
912 \n l_minValue = scalar;\
915 \n if (l_firstValue)\
917 \n l_firstValue = false;\
924 \n vec4 scalar = texture3D(in_volume, g_dataPos);\
925 \n if (l_minValue.w > scalar.x || l_firstValue)\
927 \n l_minValue.w = scalar.x;\
930 \n if (l_firstValue)\
932 \n l_firstValue = false;\
940 \n vec4 scalar = texture3D(in_volume, g_dataPos);\
941 \n float opacity = computeOpacity(scalar);\
942 \n l_sumValue = l_sumValue + opacity * scalar.x;"
947 if (noOfComponents > 1 && independentComponents)
950 \n vec4 color[4]; vec4 tmp = vec4(0.0);\
951 \n float totalAlpha = 0.0;\
952 \n vec4 scalar = texture3D(in_volume, g_dataPos);\
953 \n for (int i = 0; i < in_noOfComponents; ++i)\
956 if (!mask || !maskInput ||
960 \n // Data fetching from the red channel of volume texture\
961 \n color[i] = vec4(computeColor(scalar, i));\
962 \n totalAlpha += color[i][3] * in_componentWeight[i];\
964 \n if (totalAlpha > 0.0)\
966 \n for (int i = 0; i < in_noOfComponents; ++i)\
968 \n tmp.x += color[i].x * color[i].w * in_componentWeight[i] ;\
969 \n tmp.y += color[i].y * color[i].w * in_componentWeight[i];\
970 \n tmp.z += color[i].z * color[i].w * in_componentWeight[i];\
971 \n tmp.w += ((color[i].w * color[i].w)/totalAlpha);\
974 \n g_fragColor = (1.0f - g_fragColor.a) * tmp + g_fragColor;"
980 if (!mask || !maskInput ||
984 \n // Data fetching from the red channel of volume texture\
985 \n vec4 scalar = texture3D(in_volume, g_dataPos);\
986 \n vec4 g_srcColor = computeColor(scalar);"
991 \n // Opacity calculation using compositing:\
992 \n // here we use front to back compositing scheme whereby the current\
993 \n // sample value is multiplied to the currently accumulated alpha\
994 \n // and then this product is subtracted from the sample value to\
995 \n // get the alpha from the previous steps.\
996 \n // Next, this alpha is multiplied with the current sample colour\
997 \n // and accumulated to the composited colour. The alpha value from\
998 \n // the previous steps is then accumulated to the composited colour\
1000 \n g_srcColor.rgb *= g_srcColor.a;\
1001 \n g_fragColor = (1.0f - g_fragColor.a) * g_srcColor + g_fragColor;"
1021 int independentComponents = 0)
1025 if (noOfComponents > 1 && independentComponents)
1028 \n vec4 g_srcColor = vec4(0);\
1029 \n for (int i = 0; i < in_noOfComponents; ++i)\
1031 \n vec4 tmp = computeColor(l_maxValue, i);\
1032 \n g_srcColor[0] += tmp[0] * tmp[3] * in_componentWeight[i];\
1033 \n g_srcColor[1] += tmp[1] * tmp[3] * in_componentWeight[i];\
1034 \n g_srcColor[2] += tmp[2] * tmp[3] * in_componentWeight[i];\
1035 \n g_srcColor[3] += tmp[3] * in_componentWeight[i];\
1037 \n g_fragColor = g_srcColor;"
1043 \n vec4 g_srcColor = vec4(computeColor(l_maxValue).xyz,\
1044 \n computeOpacity(l_maxValue));\
1045 \n g_fragColor.rgb = g_srcColor.rgb * g_srcColor.a;\
1046 \n g_fragColor.a = g_srcColor.a;"
1052 if (noOfComponents > 1 && independentComponents)
1055 \n vec4 g_srcColor = vec4(0);\
1056 \n for (int i = 0; i < in_noOfComponents; ++i)\
1058 \n vec4 tmp = computeColor(l_minValue, i);\
1059 \n g_srcColor[0] += tmp[0] * tmp[3] * in_componentWeight[i];\
1060 \n g_srcColor[1] += tmp[1] * tmp[3] * in_componentWeight[i];\
1061 \n g_srcColor[2] += tmp[2] * tmp[3] * in_componentWeight[i];\
1062 \n g_srcColor[2] += tmp[3] * tmp[3] * in_componentWeight[i];\
1064 \n g_fragColor = g_srcColor;"
1070 \n vec4 g_srcColor = vec4(computeColor(l_minValue).xyz,\
1071 \n computeOpacity(l_minValue));\
1072 \n g_fragColor.rgb = g_srcColor.rgb * g_srcColor.a;\
1073 \n g_fragColor.a = g_srcColor.a;"
1080 \n l_sumValue = clamp(l_sumValue, 0.0, 1.0);\
1081 \n g_fragColor = vec4(vec3(l_sumValue), 1.0);"
1112 \n // Minimum texture access coordinate\
1113 \n const vec3 l_tex_min = vec3(0);\
1115 \n // Maximum texture access coordinate\
1116 \n const vec3 l_tex_max = vec3(1);\
1118 \n // Flag to indicate if the raymarch loop should terminate \
1119 \n bool stop = false;\
1121 \n // 2D Texture fragment coordinates [0,1] from fragment coordinates \
1122 \n // the frame buffer texture has the size of the plain buffer but \
1123 \n // we use a fraction of it. The texture coordinates is less than 1 if \
1124 \n // the reduction factor is less than 1. \
1125 \n // Device coordinates are between -1 and 1. We need texture \
1126 \n // coordinates between 0 and 1 the in_depthSampler buffer has the \
1127 \n // original size buffer. \
1128 \n vec2 fragTexCoord = (gl_FragCoord.xy - in_windowLowerLeftCorner) *\
1129 \n in_inverseWindowSize;\
1130 \n vec4 l_depthValue = texture2D(in_depthSampler, fragTexCoord);\
1131 \n float l_terminatePointMax = 0.0;\
1134 \n if(gl_FragCoord.z >= l_depthValue.x)\
1139 \n // color buffer or max scalar buffer have a reduced size.\
1140 \n fragTexCoord = (gl_FragCoord.xy - in_windowLowerLeftCorner) *\
1141 \n in_inverseOriginalWindowSize;\
1143 \n // Compute max number of iterations it will take before we hit\
1144 \n // the termination point\
1146 \n // Abscissa of the point on the depth buffer along the ray.\
1147 \n // point in texture coordinates\
1148 \n vec4 terminatePoint;\
1149 \n terminatePoint.x = (gl_FragCoord.x - in_windowLowerLeftCorner.x) * 2.0 *\
1150 \n in_inverseWindowSize.x - 1.0;\
1151 \n terminatePoint.y = (gl_FragCoord.y - in_windowLowerLeftCorner.y) * 2.0 *\
1152 \n in_inverseWindowSize.y - 1.0;\
1153 \n terminatePoint.z = (2.0 * l_depthValue.x - (gl_DepthRange.near +\
1154 \n gl_DepthRange.far)) / gl_DepthRange.diff;\
1155 \n terminatePoint.w = 1.0;\
1157 \n // From normalized device coordinates to eye coordinates.\
1158 \n // in_projectionMatrix is inversed because of way VT\
1159 \n // From eye coordinates to texture coordinates\
1160 \n terminatePoint = in_inverseTextureDatasetMatrix *\
1161 \n in_inverseVolumeMatrix *\
1162 \n in_inverseModelViewMatrix *\
1163 \n in_inverseProjectionMatrix *\
1165 \n terminatePoint /= terminatePoint.w;\
1167 \n l_terminatePointMax = length(terminatePoint.xyz - g_dataPos.xyz) /\
1168 \n length(g_dirStep);\
1169 \n float l_currentT = 0.0;");
1178 \n // The two constants l_tex_min and l_tex_max have a value of\
1179 \n // vec3(-1,-1,-1) and vec3(1,1,1) respectively. To determine if the\
1180 \n // data value is outside the in_volume data, we use the sign function.\
1181 \n // The sign function return -1 if the value is less than 0, 0 if the\
1182 \n // value is equal to 0 and 1 if value is greater than 0. Hence, the\
1183 \n // sign function for the calculation (sign(g_dataPos-l_tex_min) and\
1184 \n // sign (l_tex_max-g_dataPos)) will give us vec3(1,1,1) at the\
1185 \n // possible minimum and maximum position.\
1186 \n // When we do a dot product between two vec3(1,1,1) we get answer 3.\
1187 \n // So to be within the dataset limits, the dot product will return a\
1188 \n // value less than 3. If it is greater than 3, we are already out of\
1189 \n // the in_volume dataset\
1190 \n stop = dot(sign(g_dataPos - l_tex_min), sign(l_tex_max - g_dataPos))\
1193 \n // If the stopping condition is true we brek out of the ray marching\
1199 \n // Early ray termination\
1200 \n // if the currently composited colour alpha is already fully saturated\
1201 \n // we terminated the loop or if we have hit an obstacle in the\
1202 \n // direction of they ray (using depth buffer) we terminate as well.\
1203 \n if((g_fragColor.a > (1 - 1/255.0)) || \
1204 \n l_currentT >= l_terminatePointMax)\
1238 \nuniform float cropping_planes[6];\
1239 \nuniform int cropping_flags [32];\
1240 \n// X: axis = 0, Y: axis = 1, Z: axis = 2\
1241 \n// cp Cropping plane bounds (minX, maxX, minY, maxY, minZ, maxZ)\
1242 \nint computeRegionCoord(float cp[6], vec3 pos, int axis)\
1244 \n int cpmin = axis * 2;\
1245 \n int cpmax = cpmin + 1;\
1247 \n if (pos[axis] < cp[cpmin])\
1251 \n else if (pos[axis] >= cp[cpmin] &&\
1252 \n pos[axis] < cp[cpmax])\
1256 \n else if (pos[axis] >= cp[cpmax])\
1263 \nint computeRegion(float cp[6], vec3 pos)\
1265 \n return (computeRegionCoord(cp, pos, 0) +\
1266 \n (computeRegionCoord(cp, pos, 1) - 1) * 3 +\
1267 \n (computeRegionCoord(cp, pos, 2) - 1) * 9);\
1282 \n // Convert cropping region to texture space\
1283 \n float cropping_planes_ts[6];\
1284 \n mat4 datasetToTextureMat = in_inverseTextureDatasetMatrix;\
1285 \n vec4 temp = vec4(cropping_planes[0], cropping_planes[1], 0.0, 1.0);\
1286 \n temp = datasetToTextureMat * temp;\
1287 \n if (temp[3] != 0.0)\
1289 \n temp[0] /= temp[3]; temp[1] /= temp[3];\
1291 \n cropping_planes_ts[0] = temp[0];\
1292 \n cropping_planes_ts[1] = temp[1];\
1294 \n temp = vec4(cropping_planes[2], cropping_planes[3], 0.0, 1.0);\
1295 \n temp = datasetToTextureMat * temp;\
1296 \n if (temp[3] != 0.0)\
1298 \n temp[0] /= temp[3]; temp[1] /= temp[3];\
1300 \n cropping_planes_ts[2] = temp[0];\
1301 \n cropping_planes_ts[3] = temp[1];\
1303 \n temp = vec4(cropping_planes[4], cropping_planes[5], 0.0, 1.0);\
1304 \n temp = datasetToTextureMat * temp;\
1305 \n if (temp[3] != 0.0)\
1307 \n temp[0] /= temp[3]; temp[1] /= temp[3];\
1309 \n cropping_planes_ts[4] = temp[0];\
1310 \n cropping_planes_ts[5] = temp[1];"
1324 \n // Determine region\
1325 \n int regionNo = computeRegion(cropping_planes_ts, g_dataPos);\
1327 \n // Do & operation with cropping flags\
1328 \n // Pass the flag that its Ok to sample or not to sample\
1329 \n if (cropping_flags[regionNo] == 0)\
1331 \n // Skip this voxel\
1373 \nfloat clippingPlanesTexture[48];\
1374 \nint clippingPlanesSize = int(in_clippingPlanes[0]);\
1376 \nmat4 world_to_texture_mat = in_inverseTextureDatasetMatrix *\
1377 \n in_inverseVolumeMatrix;\
1378 \nfor (int i = 0; i < clippingPlanesSize; i = i + 6)\
1380 \n vec4 origin = vec4(in_clippingPlanes[i + 1],\
1381 \n in_clippingPlanes[i + 2],\
1382 \n in_clippingPlanes[i + 3], 1.0);\
1383 \n vec4 normal = vec4(in_clippingPlanes[i + 4],\
1384 \n in_clippingPlanes[i + 5],\
1385 \n in_clippingPlanes[i + 6], 0.0);\
1387 \n origin = world_to_texture_mat * origin;\
1388 \n normal = world_to_texture_mat * normal;\
1390 \n if (origin[3] != 0.0)\
1392 \n origin[0] = origin[0] / origin[3];\
1393 \n origin[1] = origin[1] / origin[3];\
1394 \n origin[2] = origin[2] / origin[3];\
1396 \n if (normal[3] != 0.0)\
1398 \n normal[0] = normal[0] / normal[3];\
1399 \n normal[1] = normal[1] / normal[3];\
1400 \n normal[2] = normal[2] / normal[3];\
1403 \n clippingPlanesTexture[i] = origin[0];\
1404 \n clippingPlanesTexture[i + 1] = origin[1];\
1405 \n clippingPlanesTexture[i + 2] = origin[2];\
1407 \n clippingPlanesTexture[i + 3] = normal[0];\
1408 \n clippingPlanesTexture[i + 4] = normal[1];\
1409 \n clippingPlanesTexture[i + 5] = normal[2];\
1427 \n for (int i = 0; i < (clippingPlanesSize) && !l_skip; i = i + 6)\
1429 \n if (dot(vec3(g_dataPos - vec3(clippingPlanesTexture[i],\
1430 \n clippingPlanesTexture[i + 1],\
1431 \n clippingPlanesTexture[i + 2])),\
1432 \n vec3(clippingPlanesTexture[i + 3],\
1433 \n clippingPlanesTexture[i + 4],\
1434 \n clippingPlanesTexture[i + 5])) < 0)\
1460 if (!mask || !maskInput)
1478 if (!mask || !maskInput ||
1486 \nvec4 maskValue = texture3D(in_mask, g_dataPos);\
1487 \nif(maskValue.a <= 0.0)\
1503 if (!mask || !maskInput ||
1511 \nuniform float in_maskBlendFactor;\
1512 \nuniform sampler1D in_mask1;\
1513 \nuniform sampler1D in_mask2;"
1526 if (!mask || !maskInput ||
1534 \nvec4 scalar = texture3D(in_volume, g_dataPos);\
1535 \nif (in_maskBlendFactor == 0.0)\
1537 \n g_srcColor = computeColor(scalar);\
1541 \n // Get the mask value at this same location\
1542 \n vec4 maskValue = texture3D(in_mask, g_dataPos);\
1543 \n if(maskValue.a == 0.0)\
1545 \n g_srcColor = computeColor(scalar);\
1549 \n if (maskValue.a == 1.0/255.0)\
1551 \n g_srcColor = texture1D(in_mask1, scalar.w);\
1555 \n // maskValue.a == 2.0/255.0\
1556 \n g_srcColor = texture1D(in_mask2, scalar.w);\
1558 \n g_srcColor.a = 1.0;\
1559 \n if(in_maskBlendFactor < 1.0)\
1561 \n g_srcColor = (1.0 - in_maskBlendFactor) * computeColor(scalar)\
1562 \n + in_maskBlendFactor * g_srcColor;\
1565 \n g_srcColor.a = computeOpacity(scalar);\
1572 #endif // _vtkVolumeShaderComposer_h
std::string ShadingExit(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *mapper, vtkVolume *vtkNotUsed(vol), int noOfComponents, int independentComponents=0)
represents a volume (data & properties) in a rendered scene
Abstract class for a volume mapper.
std::string ClippingIncrement(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *mapper, vtkVolume *vtkNotUsed(vol))
virtual int GetBlendMode()
std::string ComputeTextureCoords(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol))
std::string CompositeMaskGlobalsFrag(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol), vtkImageData *maskInput, vtkVolumeMask *mask, int maskType)
std::string CompositeMaskIncrement(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol), vtkImageData *maskInput, vtkVolumeMask *mask, int maskType)
abstract specification for renderers
std::string TerminationExit(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol))
std::string GradientsComputeFunc(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vol, int noOfComponents, int independentComponents, std::map< int, std::string > gradientTableMap)
std::string BaseInit(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol))
std::string TerminationIncrement(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol))
virtual vtkPlaneCollection * GetClippingPlanes()
vtkCamera * GetActiveCamera()
std::string CroppingGlobalsFrag(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *mapper, vtkVolume *vtkNotUsed(vol))
std::string BaseGlobalsVert(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol))
std::string CroppingGlobalsVert(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol))
std::string BinaryMaskIncrement(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol), vtkImageData *maskInput, vtkVolumeMask *mask, int maskType)
std::string TerminationInit(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol))
std::string ClippingExit(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol))
std::string RayDirectionFunc(vtkRenderer *ren, vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol), int vtkNotUsed(noOfComponents))
topologically and geometrically regular array of data
std::string ClippingGlobalsVert(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol))
bool HasGradientOpacity(int index=0)
std::string ColorTransferFunc(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol), int noOfComponents, int independentComponents, std::map< int, std::string > colorTableMap)
std::string ShadingGlobalsFrag(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol))
std::string ShadingGlobalsVert(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol))
std::string OpacityTransferFunc(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol), int noOfComponents, int independentComponents, std::map< int, std::string > opacityTableMap)
std::string CroppingIncrement(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *mapper, vtkVolume *vtkNotUsed(vol))
virtual int GetParallelProjection()
std::string BaseIncrement(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol))
std::string ShadingInit(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *mapper, vtkVolume *vtkNotUsed(vol))
represents the common properties for rendering a volume.
std::string CroppingExit(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol))
std::string ClippingGlobalsFrag(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol))
std::string TerminationGlobalsVert(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol))
std::string ClippingInit(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *mapper, vtkVolume *vtkNotUsed(vol))
boost::graph_traits< vtkGraph * >::vertex_descriptor source(boost::graph_traits< vtkGraph * >::edge_descriptor e, vtkGraph *)
std::string ShadingIncrement(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *mapper, vtkVolume *vtkNotUsed(vol), vtkImageData *maskInput, vtkVolumeMask *mask, int maskType, int noOfComponents, int independentComponents=0)
std::string CroppingInit(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *mapper, vtkVolume *vtkNotUsed(vol))
vtkVolumeProperty * GetProperty()
virtual int GetCropping()
std::string replace(std::string source, const std::string &search, const std::string replace, bool all)
std::string TerminationGlobalsFrag(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol))
std::string BaseGlobalsFrag(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol), int vtkNotUsed(numberOfLights), int lightingComplexity, int noOfComponents, int independentComponents)
std::string BaseExit(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol))
GLsizei const GLchar ** string
std::string LightComputeFunc(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vol, int noOfComponents, int independentComponents, int vtkNotUsed(numberOfLights), int lightingComplexity)
std::string ComputeClip(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol))
std::string BinaryMaskGlobalsFrag(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol), vtkImageData *maskInput, vtkVolumeMask *mask, int vtkNotUsed(maskType))