16 #ifndef _vtkVolumeShaderComposer_h
17 #define _vtkVolumeShaderComposer_h
42 std::string::size_type pos = 0;
44 while ((pos = source.find(search, 0)) != std::string::npos)
46 source.replace(pos, search.length(),
replace);
47 pos += search.length();
66 \n vec4 pos = in_projectionMatrix * in_modelViewMatrix *\
67 \n in_volumeMatrix * vec4(in_vertexPos.xyz, 1.0);\
68 \n gl_Position = pos;"
78 "\n // Assuming point data only. Also, we offset the texture coordinate\
79 \n // to account for OpenGL treating voxel at the center of the cell.\
80 \n vec3 uvx = (in_vertexPos - in_volumeExtentsMin) /\
81 \n (in_volumeExtentsMax - in_volumeExtentsMin);\
82 \n vec3 delta = in_textureExtentsMax - in_textureExtentsMin;\
83 \n ip_textureCoords = (uvx * (delta - vec3(1.0)) + vec3(0.5)) / delta;"
93 \n uniform mat4 in_modelViewMatrix;\
94 \n uniform mat4 in_projectionMatrix;\
95 \n uniform mat4 in_volumeMatrix;\
97 \n uniform vec3 in_volumeExtentsMin;\
98 \n uniform vec3 in_volumeExtentsMax;\
100 \n uniform vec3 in_textureExtentsMax;\
101 \n uniform vec3 in_textureExtentsMin;"
109 int vtkNotUsed(numberOfLights),
110 int lightingComplexity,
112 int independentComponents)
116 \nuniform sampler3D in_volume;\
117 \nuniform int in_noOfComponents;\
118 \nuniform int in_independentComponents;\
120 \nuniform sampler2D in_noiseSampler;\
121 \nuniform sampler2D in_depthSampler;\
123 \n// Camera position\
124 \nuniform vec3 in_cameraPos;\
126 \n// view and model matrices\
127 \nuniform mat4 in_volumeMatrix;\
128 \nuniform mat4 in_inverseVolumeMatrix;\
129 \nuniform mat4 in_projectionMatrix;\
130 \nuniform mat4 in_inverseProjectionMatrix;\
131 \nuniform mat4 in_modelViewMatrix;\
132 \nuniform mat4 in_inverseModelViewMatrix;\
133 \nuniform mat4 in_textureDatasetMatrix;\
134 \nuniform mat4 in_inverseTextureDatasetMatrix;\
135 \nuniform mat3 in_texureToEyeIt;\
138 \nuniform vec3 in_cellStep;\
139 \nuniform vec2 in_scalarsRange;\
140 \nuniform vec3 in_cellSpacing;\
142 \n// Sample distance\
143 \nuniform float in_sampleDistance;\
146 \nuniform vec3 in_cellScale;\
147 \nuniform vec2 in_windowLowerLeftCorner;\
148 \nuniform vec2 in_inverseOriginalWindowSize;\
149 \nuniform vec2 in_inverseWindowSize;\
150 \nuniform vec3 in_textureExtentsMax;\
151 \nuniform vec3 in_textureExtentsMin;\
153 \n// Material and lighting\
154 \nuniform vec3 in_diffuse;\
155 \nuniform vec3 in_ambient;\
156 \nuniform vec3 in_specular;\
157 \nuniform float in_shininess;\
160 if (lightingComplexity > 0)
163 \nuniform bool in_twoSidedLighting;\
167 \nvec3 g_cellSpacing;\
168 \nfloat g_avgSpacing;"
172 if (lightingComplexity == 3)
175 \nvec4 g_fragWorldPos;\
176 \nuniform int in_numberOfLights;\
177 \nuniform vec3 in_lightAmbientColor[6];\
178 \nuniform vec3 in_lightDiffuseColor[6];\
179 \nuniform vec3 in_lightSpecularColor[6];\
180 \nuniform vec3 in_lightDirection[6];\
181 \nuniform vec3 in_lightPosition[6];\
182 \nuniform vec3 in_lightAttenuation[6];\
183 \nuniform float in_lightConeAngle[6];\
184 \nuniform float in_lightExponent[6];\
185 \nuniform int in_lightPositional[6];\
188 else if (lightingComplexity == 2)
191 \nvec4 g_fragWorldPos;\
192 \nuniform int in_numberOfLights;\
193 \nuniform vec3 in_lightAmbientColor[6];\
194 \nuniform vec3 in_lightDiffuseColor[6];\
195 \nuniform vec3 in_lightSpecularColor[6];\
196 \nuniform vec3 in_lightDirection[6];\
202 \nuniform vec3 in_lightAmbientColor[1];\
203 \nuniform vec3 in_lightDiffuseColor[1];\
204 \nuniform vec3 in_lightSpecularColor[1];\
205 \nvec4 g_lightPosObj;\
213 if (noOfComponents > 1 && independentComponents)
216 \nuniform vec4 in_componentWeight;");
226 int lightingComplexity)
229 \n // Get the 3D texture coordinates for lookup into the in_volume dataset\
230 \n g_dataPos = ip_textureCoords.xyz;\
232 \n // Eye position in object space\
233 \n g_eyePosObj = (in_inverseVolumeMatrix * vec4(in_cameraPos, 1.0));\
234 \n if (g_eyePosObj.w != 0.0)\
236 \n g_eyePosObj.x /= g_eyePosObj.w;\
237 \n g_eyePosObj.y /= g_eyePosObj.w;\
238 \n g_eyePosObj.z /= g_eyePosObj.w;\
239 \n g_eyePosObj.w = 1.0;\
242 \n // Getting the ray marching direction (in object space);\
243 \n vec3 rayDir = computeRayDirection();\
245 \n // Multiply the raymarching direction with the step size to get the\
246 \n // sub-step size we need to take at each raymarching step\
247 \n g_dirStep = (in_inverseTextureDatasetMatrix *\
248 \n vec4(rayDir, 0.0)).xyz * in_sampleDistance;\
250 \n g_dataPos += g_dirStep * (texture2D(in_noiseSampler, g_dataPos.xy).x);\
252 \n // Flag to deternmine if voxel should be considered for the rendering\
253 \n bool l_skip = false;");
258 \n // Light position in object space\
259 \n g_lightPosObj = (in_inverseVolumeMatrix *\
260 \n vec4(in_cameraPos, 1.0));\
261 \n if (g_lightPosObj.w != 0.0)\
263 \n g_lightPosObj.x /= g_lightPosObj.w;\
264 \n g_lightPosObj.y /= g_lightPosObj.w;\
265 \n g_lightPosObj.z /= g_lightPosObj.w;\
266 \n g_lightPosObj.w = 1.0;\
268 \n g_ldir = normalize(g_lightPosObj.xyz - ip_vertexPos);\
269 \n g_vdir = normalize(g_eyePosObj.xyz - ip_vertexPos);\
270 \n g_h = normalize(g_ldir + g_vdir);\
271 \n g_cellSpacing = vec3(in_cellSpacing[0],\
272 \n in_cellSpacing[1],\
273 \n in_cellSpacing[2]);\
274 \n g_avgSpacing = (g_cellSpacing[0] +\
275 \n g_cellSpacing[1] +\
276 \n g_cellSpacing[2])/3.0;\
277 \n // Adjust the aspect\
278 \n g_aspect.x = g_cellSpacing[0] * 2.0 / g_avgSpacing;\
279 \n g_aspect.y = g_cellSpacing[1] * 2.0 / g_avgSpacing;\
280 \n g_aspect.z = g_cellSpacing[2] * 2.0 / g_avgSpacing;"
286 \n g_xvec = vec3(in_cellStep[0], 0.0, 0.0);\
287 \n g_yvec = vec3(0.0, in_cellStep[1], 0.0);\
288 \n g_zvec = vec3(0.0, 0.0, in_cellStep[2]);"
317 int independentComponents,
318 std::map<int, std::string> gradientTableMap)
324 \nuniform sampler1D in_gradientTransferFunc;\
325 \nfloat computeGradientOpacity(vec4 grad)\
327 \n return texture1D(in_gradientTransferFunc, grad.w).r;\
331 else if (noOfComponents > 1 && independentComponents &&
334 for (
int i = 0; i < noOfComponents; ++i)
336 shaderStr +=
std::string(
"\n uniform sampler1D ") +
341 \nfloat computeGradientOpacity(vec4 grad, int component)\
343 \n if (component == 0)\
345 \n return texture1D(in_gradientTransferFunc, grad.w).r;\
347 \n if (component == 1)\
349 \n return texture1D(in_gradientTransferFunc1, grad.w).r;\
351 \n if (component == 2)\
353 \n return texture1D(in_gradientTransferFunc2, grad.w).r;\
355 \n if (component == 3)\
357 \n return texture1D(in_gradientTransferFunc3, grad.w).r;\
367 \nvec4 computeGradient()\
371 \n g1.x = texture3D(in_volume, vec3(g_dataPos + g_xvec)).x;\
372 \n g1.y = texture3D(in_volume, vec3(g_dataPos + g_yvec)).x;\
373 \n g1.z = texture3D(in_volume, vec3(g_dataPos + g_zvec)).x;\
374 \n g2.x = texture3D(in_volume, vec3(g_dataPos - g_xvec)).x;\
375 \n g2.y = texture3D(in_volume, vec3(g_dataPos - g_yvec)).x;\
376 \n g2.z = texture3D(in_volume, vec3(g_dataPos - g_zvec)).x;\
377 \n g1 = g1 * in_volume_scale.r + in_volume_bias.r;\
378 \n g2 = g2 * in_volume_scale.r + in_volume_bias.r;\
379 \n return vec4((g1 - g2), -1.0);\
387 \nvec4 computeGradient()\
391 \n g1.x = texture3D(in_volume, vec3(g_dataPos + g_xvec)).x;\
392 \n g1.y = texture3D(in_volume, vec3(g_dataPos + g_yvec)).x;\
393 \n g1.z = texture3D(in_volume, vec3(g_dataPos + g_zvec)).x;\
394 \n g2.x = texture3D(in_volume, vec3(g_dataPos - g_xvec)).x;\
395 \n g2.y = texture3D(in_volume, vec3(g_dataPos - g_yvec)).x;\
396 \n g2.z = texture3D(in_volume, vec3(g_dataPos - g_zvec)).x;\
397 \n g1 = g1*in_volume_scale.r + in_volume_bias.r;\
398 \n g2 = g2*in_volume_scale.r + in_volume_bias.r;\
399 \n g1.x = in_scalarsRange[0] + (\
400 \n in_scalarsRange[1] - in_scalarsRange[0]) * g1.x;\
401 \n g1.y = in_scalarsRange[0] + (\
402 \n in_scalarsRange[1] - in_scalarsRange[0]) * g1.y;\
403 \n g1.z = in_scalarsRange[0] + (\
404 \n in_scalarsRange[1] - in_scalarsRange[0]) * g1.z;\
405 \n g2.x = in_scalarsRange[0] + (\
406 \n in_scalarsRange[1] - in_scalarsRange[0]) * g2.x;\
407 \n g2.y = in_scalarsRange[0] + (\
408 \n in_scalarsRange[1] - in_scalarsRange[0]) * g2.y;\
409 \n g2.z = in_scalarsRange[0] + (\
410 \n in_scalarsRange[1] - in_scalarsRange[0]) * g2.z;\
411 \n g2.xyz = g1 - g2.xyz;\
412 \n g2.x /= g_aspect.x;\
413 \n g2.y /= g_aspect.y;\
414 \n g2.z /= g_aspect.z;\
415 \n float grad_mag = sqrt(g2.x * g2.x +\
418 \n if (grad_mag > 0.0)\
420 \n g2.x /= grad_mag;\
421 \n g2.y /= grad_mag;\
422 \n g2.z /= grad_mag;\
426 \n g2.xyz = vec3(0.0, 0.0, 0.0);\
428 \n grad_mag = grad_mag * 1.0 / (0.25 * (in_scalarsRange[1] -\
429 \n (in_scalarsRange[0])));\
430 \n grad_mag = clamp(grad_mag, 0.0, 1.0);\
439 \nvec4 computeGradient()\
441 \n return vec4(0.0);\
453 int independentComponents,
454 int vtkNotUsed(numberOfLights),
455 int lightingComplexity)
459 \nvec4 computeLighting(vec4 color)\
461 \n vec4 finalColor = vec4(0.0);"
467 \n // Compute gradient function only once\
468 \n vec4 gradient = computeGradient();"
474 if (lightingComplexity == 1)
477 \n vec3 diffuse = vec3(0.0);\
478 \n vec3 specular = vec3(0.0);\
479 \n vec3 normal = gradient.xyz / in_cellSpacing;\
480 \n float normalLength = length(normal);\
481 \n if (normalLength > 0.0)\
483 \n normal = normalize(normal);\
487 \n normal = vec3(0.0, 0.0, 0.0);\
489 \n float nDotL = dot(normal, g_ldir);\
490 \n float nDotH = dot(normal, g_h);\
491 \n if (nDotL < 0.0 && in_twoSidedLighting)\
495 \n if (nDotH < 0.0 && in_twoSidedLighting)\
501 \n diffuse = nDotL * in_diffuse * in_lightDiffuseColor[0]\
504 \n specular = pow(nDotH, in_shininess) * in_specular *\
505 \n in_lightSpecularColor[0];\
506 \n // For the headlight, ignore the light's ambient color\
507 \n // for now as it is causing the old mapper tests to fail\
508 \n finalColor.xyz = in_ambient * color.rgb + diffuse + specular;"
511 else if (lightingComplexity == 2)
514 \n g_fragWorldPos = in_modelViewMatrix * in_volumeMatrix *\
515 \n in_textureDatasetMatrix * vec4(-g_dataPos, 1.0);\
516 \n if (g_fragWorldPos.w != 0.0)\
518 \n g_fragWorldPos /= g_fragWorldPos.w;\
520 \n vec3 vdir = normalize(g_fragWorldPos.xyz);\
521 \n vec3 normal = gradient.xyz;\
522 \n vec3 ambient = vec3(0.0);\
523 \n vec3 diffuse = vec3(0.0);\
524 \n vec3 specular = vec3(0.0);\
525 \n float normalLength = length(normal);\
526 \n if (normalLength > 0.0)\
528 \n normal = normalize(in_texureToEyeIt * normal);\
532 \n normal = vec3(0.0, 0.0, 0.0);\
534 \n for (int lightNum = 0; lightNum < in_numberOfLights; lightNum++)\
536 \n vec3 ldir = in_lightDirection[lightNum].xyz;\
537 \n vec3 h = normalize(ldir + vdir);\
538 \n float nDotH = dot(normal, h);\
539 \n if (nDotH < 0.0 && in_twoSidedLighting)\
543 \n float nDotL = dot(normal, ldir);\
544 \n if (nDotL < 0.0 && in_twoSidedLighting)\
550 \n diffuse += in_lightDiffuseColor[lightNum] * nDotL;\
554 \n specular = in_lightSpecularColor[lightNum] * pow(nDotH, in_shininess);\
556 \n ambient += in_lightAmbientColor[lightNum];\
558 \n finalColor.xyz = in_ambient * ambient +\
559 \n in_diffuse * diffuse * color.rgb +\
560 \n in_specular * specular;"
563 else if (lightingComplexity == 3)
566 \n g_fragWorldPos = in_modelViewMatrix * in_volumeMatrix *\
567 \n in_textureDatasetMatrix * vec4(g_dataPos, 1.0);\
568 \n if (g_fragWorldPos.w != 0.0)\
570 \n g_fragWorldPos /= g_fragWorldPos.w;\
572 \n vec3 viewDirection = normalize(-g_fragWorldPos.xyz);\
573 \n vec3 ambient = vec3(0,0,0);\
574 \n vec3 diffuse = vec3(0,0,0);\
575 \n vec3 specular = vec3(0,0,0);\
576 \n vec3 vertLightDirection;\
577 \n vec3 normal = normalize(in_texureToEyeIt * gradient.xyz);\
579 \n for (int lightNum = 0; lightNum < in_numberOfLights; lightNum++)\
581 \n float attenuation = 1.0;\
583 \n lightDir = in_lightDirection[lightNum];\
584 \n if (in_lightPositional[lightNum] == 0)\
586 \n vertLightDirection = lightDir;\
590 \n vertLightDirection = (g_fragWorldPos.xyz - in_lightPosition[lightNum]);\
591 \n float distance = length(vertLightDirection);\
592 \n vertLightDirection = normalize(vertLightDirection);\
593 \n attenuation = 1.0 /\
594 \n (in_lightAttenuation[lightNum].x\
595 \n + in_lightAttenuation[lightNum].y * distance\
596 \n + in_lightAttenuation[lightNum].z * distance * distance);\
597 \n // per OpenGL standard cone angle is 90 or less for a spot light\
598 \n if (in_lightConeAngle[lightNum] <= 90.0)\
600 \n float coneDot = dot(vertLightDirection, lightDir);\
601 \n // if inside the cone\
602 \n if (coneDot >= cos(radians(in_lightConeAngle[lightNum])))\
604 \n attenuation = attenuation * pow(coneDot, in_lightExponent[lightNum]);\
608 \n attenuation = 0.0;\
612 \n // diffuse and specular lighting\
613 \n float nDotL = dot(normal, vertLightDirection);\
614 \n if (nDotL < 0.0 && in_twoSidedLighting)\
620 \n float df = max(0.0, attenuation * nDotL);\
621 \n diffuse += (df * in_lightDiffuseColor[lightNum]);\
623 \n vec3 h = normalize(vertLightDirection + viewDirection);\
624 \n float nDotH = dot(normal, h);\
625 \n if (nDotH < 0.0 && in_twoSidedLighting)\
631 \n float sf = attenuation * pow(nDotH, in_shininess);\
632 \n specular += (sf * in_lightSpecularColor[lightNum]);\
634 \n ambient += in_lightAmbientColor[lightNum];\
636 \n finalColor.xyz = in_ambient * ambient + in_diffuse *\
637 \n diffuse * color.rgb + in_specular * specular;\
644 "\n finalColor = vec4(color.rgb, 0.0);"
651 \n if (gradient.w >= 0.0)\
653 \n color.a = color.a *\
654 \n computeGradientOpacity(gradient);\
658 else if (noOfComponents > 1 && independentComponents &&
662 \n if (gradient.w >= 0.0)\
664 \n for (int i = 0; i < in_noOfComponents; ++i)\
666 \n color.a = color.a *\
667 \n computeGradientOpacity(gradient, i) * in_componentWeight[i];\
673 \n finalColor.a = color.a;\
674 \n return finalColor;\
685 int vtkNotUsed(noOfComponents))
690 \nvec3 computeRayDirection()\
692 \n return normalize(ip_vertexPos.xyz - g_eyePosObj.xyz);\
698 \nuniform vec3 in_projectionDirection;\
699 \nvec3 computeRayDirection()\
701 \n return normalize((in_inverseVolumeMatrix *\
702 \n vec4(in_projectionDirection, 0.0)).xyz);\
712 int independentComponents,
713 std::map<int, std::string> colorTableMap)
715 if (noOfComponents == 1)
718 \nuniform sampler1D in_colorTransferFunc;\
719 \nvec4 computeColor(vec4 scalar, float opacity)\
721 \n return computeLighting(vec4(texture1D(in_colorTransferFunc,\
722 \n scalar.w).xyz, opacity));\
725 else if (noOfComponents > 1 && independentComponents)
728 std::ostringstream toString;
729 for (
int i = 0; i < noOfComponents; ++i)
731 shaderStr +=
std::string(
"\n uniform sampler1D ") +
736 \nvec4 computeColor(vec4 scalar, float opacity, int component)\
739 for (
int i = 0; i < noOfComponents; ++i)
743 \n if (component == " + toString.str() +
")");
747 \n return computeLighting(vec4(texture1D(\
748 \n in_colorTransferFunc");
749 shaderStr += (i == 0 ?
"" : toString.str());
751 \n scalar[" + toString.str() +
"]).xyz,\
763 else if (noOfComponents == 2&& !independentComponents)
766 \nuniform sampler1D in_colorTransferFunc;\
767 \nvec4 computeColor(vec4 scalar, float opacity)\
769 \n return computeLighting(vec4(texture1D(in_colorTransferFunc,\
777 \nvec4 computeColor(vec4 scalar, float opacity)\
779 \n return computeLighting(vec4(scalar.xyz, opacity));\
789 int independentComponents,
790 std::map<int, std::string> opacityTableMap)
792 if (noOfComponents > 1 && independentComponents)
795 std::ostringstream toString;
797 for (
int i = 0; i < noOfComponents; ++i)
799 shaderStr +=
std::string(
"\n uniform sampler1D ") +
805 \nfloat computeOpacity(vec4 scalar, int component)\
808 for (
int i = 0; i < noOfComponents; ++i)
812 \n if (component == " + toString.str() +
")");
816 \n return texture1D(in_opacityTransferFunc");
817 shaderStr += (i == 0 ?
"" : toString.str());
818 shaderStr +=
std::string(
",scalar[" + toString.str() +
"]).r;\
829 else if (noOfComponents == 2 && !independentComponents)
832 \nuniform sampler1D in_opacityTransferFunc;\
833 \nfloat computeOpacity(vec4 scalar)\
835 \n return texture1D(in_opacityTransferFunc, scalar.y).r;\
841 \nuniform sampler1D in_opacityTransferFunc;\
842 \nfloat computeOpacity(vec4 scalar)\
844 \n return texture1D(in_opacityTransferFunc, scalar.w).r;\
873 \n // We get data between 0.0 - 1.0 range\
874 \n bool l_firstValue = true;\
875 \n vec4 l_maxValue = vec4(0.0);"
881 \n //We get data between 0.0 - 1.0 range\
882 \n bool l_firstValue = true;\
883 \n vec4 l_minValue = vec4(1.0);"
889 \n //We get data between 0.0 - 1.0 range\
890 \n float l_sumValue = 0.0;"
906 int independentComponents = 0)
911 \n vec4 scalar = texture3D(in_volume, g_dataPos);"
915 if (noOfComponents == 1)
918 \n scalar.r = scalar.r*in_volume_scale.r + in_volume_bias.r;\
919 \n scalar = vec4(scalar.r,scalar.r,scalar.r,scalar.r);"
926 \n scalar = scalar*in_volume_scale + in_volume_bias;"
932 if (noOfComponents > 1)
934 if (!independentComponents)
937 \n if (l_maxValue.w < scalar.w || l_firstValue)\
939 \n l_maxValue = scalar;\
942 \n if (l_firstValue)\
944 \n l_firstValue = false;\
951 \n for (int i = 0; i < in_noOfComponents; ++i)\
953 \n if (l_maxValue[i] < scalar[i] || l_firstValue)\
955 \n l_maxValue[i] = scalar[i];\
958 \n if (l_firstValue)\
960 \n l_firstValue = false;\
968 \n if (l_maxValue.w < scalar.x || l_firstValue)\
970 \n l_maxValue.w = scalar.x;\
973 \n if (l_firstValue)\
975 \n l_firstValue = false;\
982 if (noOfComponents > 1)
984 if (!independentComponents)
987 \n if (l_minValue.w > scalar.w || l_firstValue)\
989 \n l_minValue = scalar;\
992 \n if (l_firstValue)\
994 \n l_firstValue = false;\
1001 \n for (int i = 0; i < in_noOfComponents; ++i)\
1003 \n if (l_minValue[i] < scalar[i] || l_firstValue)\
1005 \n l_minValue[i] = scalar[i];\
1008 \n if (l_firstValue)\
1010 \n l_firstValue = false;\
1018 \n if (l_minValue.w > scalar.x || l_firstValue)\
1020 \n l_minValue.w = scalar.x;\
1023 \n if (l_firstValue)\
1025 \n l_firstValue = false;\
1032 if (noOfComponents > 1)
1034 if (!independentComponents)
1037 \n float opacity = computeOpacity(scalar);\
1038 \n l_sumValue = l_sumValue + opacity * scalar.x;"
1044 \n for (int i = 0; i < in_noOfComponents; ++i)\
1046 \n float opacity = computeOpacity(scalar, i);\
1047 \n l_sumValue[i] = l_sumValue[i] + opacity * scalar[i];\
1055 \n float opacity = computeOpacity(scalar);\
1056 \n l_sumValue = l_sumValue + opacity * scalar.x;"
1062 if (noOfComponents > 1 && independentComponents)
1065 \n vec4 color[4]; vec4 tmp = vec4(0.0);\
1066 \n float totalAlpha = 0.0;\
1067 \n for (int i = 0; i < in_noOfComponents; ++i)\
1070 if (!mask || !maskInput ||
1074 \n // Data fetching from the red channel of volume texture\
1075 \n color[i][3] = computeOpacity(scalar, i);\
1076 \n color[i] = computeColor(scalar, color[i][3], i);\
1077 \n totalAlpha += color[i][3] * in_componentWeight[i];\
1079 \n if (totalAlpha > 0.0)\
1081 \n for (int i = 0; i < in_noOfComponents; ++i)\
1083 \n tmp.x += color[i].x * color[i].w * in_componentWeight[i] ;\
1084 \n tmp.y += color[i].y * color[i].w * in_componentWeight[i];\
1085 \n tmp.z += color[i].z * color[i].w * in_componentWeight[i];\
1086 \n tmp.w += ((color[i].w * color[i].w)/totalAlpha);\
1089 \n g_fragColor = (1.0f - g_fragColor.a) * tmp + g_fragColor;"
1095 if (!mask || !maskInput ||
1099 \n vec4 g_srcColor = vec4(0.0);\
1100 \n g_srcColor.a = computeOpacity(scalar);\
1101 \n if (g_srcColor.a > 0.0)\
1103 \n g_srcColor = computeColor(scalar, g_srcColor.a);"
1108 \n // Opacity calculation using compositing:\
1109 \n // here we use front to back compositing scheme whereby the current\
1110 \n // sample value is multiplied to the currently accumulated alpha\
1111 \n // and then this product is subtracted from the sample value to\
1112 \n // get the alpha from the previous steps.\
1113 \n // Next, this alpha is multiplied with the current sample colour\
1114 \n // and accumulated to the composited colour. The alpha value from\
1115 \n // the previous steps is then accumulated to the composited colour\
1117 \n g_srcColor.rgb *= g_srcColor.a;\
1118 \n g_fragColor = (1.0f - g_fragColor.a) * g_srcColor + g_fragColor;"
1121 if (!mask || !maskInput ||
1146 int independentComponents = 0)
1150 if (noOfComponents > 1 && independentComponents)
1153 \n vec4 g_srcColor = vec4(0);\
1154 \n for (int i = 0; i < in_noOfComponents; ++i)\
1156 \n vec4 tmp = computeColor(l_maxValue, computeOpacity(l_maxValue, i), i);\
1157 \n g_srcColor[0] += tmp[0] * tmp[3] * in_componentWeight[i];\
1158 \n g_srcColor[1] += tmp[1] * tmp[3] * in_componentWeight[i];\
1159 \n g_srcColor[2] += tmp[2] * tmp[3] * in_componentWeight[i];\
1160 \n g_srcColor[3] += tmp[3] * in_componentWeight[i];\
1162 \n g_fragColor = g_srcColor;"
1168 \n vec4 g_srcColor = computeColor(l_maxValue,\
1169 computeOpacity(l_maxValue));\
1170 \n g_fragColor.rgb = g_srcColor.rgb * g_srcColor.a;\
1171 \n g_fragColor.a = g_srcColor.a;"
1177 if (noOfComponents > 1 && independentComponents)
1180 \n vec4 g_srcColor = vec4(0);\
1181 \n for (int i = 0; i < in_noOfComponents; ++i)\
1183 \n vec4 tmp = computeColor(l_minValue, computeOpacity(l_minValue, i), i);\
1184 \n g_srcColor[0] += tmp[0] * tmp[3] * in_componentWeight[i];\
1185 \n g_srcColor[1] += tmp[1] * tmp[3] * in_componentWeight[i];\
1186 \n g_srcColor[2] += tmp[2] * tmp[3] * in_componentWeight[i];\
1187 \n g_srcColor[2] += tmp[3] * tmp[3] * in_componentWeight[i];\
1189 \n g_fragColor = g_srcColor;"
1195 \n vec4 g_srcColor = computeColor(l_minValue,\
1196 \n computeOpacity(l_minValue));\
1197 \n g_fragColor.rgb = g_srcColor.rgb * g_srcColor.a;\
1198 \n g_fragColor.a = g_srcColor.a;"
1204 if (noOfComponents > 1 && independentComponents)
1207 \n l_sumValue = clamp(l_sumValue, 0.0, 1.0);\
1208 \n g_fragColor = vec4(l_sumValue);"
1214 \n l_sumValue = clamp(l_sumValue, 0.0, 1.0);\
1215 \n g_fragColor = vec4(vec3(l_sumValue), 1.0);"
1247 \n // Minimum texture access coordinate\
1248 \n const vec3 l_tex_min = vec3(0);\
1250 \n // Maximum texture access coordinate\
1251 \n const vec3 l_tex_max = vec3(1);\
1253 \n // Flag to indicate if the raymarch loop should terminate \
1254 \n bool stop = false;\
1256 \n // 2D Texture fragment coordinates [0,1] from fragment coordinates \
1257 \n // the frame buffer texture has the size of the plain buffer but \
1258 \n // we use a fraction of it. The texture coordinates is less than 1 if \
1259 \n // the reduction factor is less than 1. \
1260 \n // Device coordinates are between -1 and 1. We need texture \
1261 \n // coordinates between 0 and 1 the in_depthSampler buffer has the \
1262 \n // original size buffer. \
1263 \n vec2 fragTexCoord = (gl_FragCoord.xy - in_windowLowerLeftCorner) *\
1264 \n in_inverseWindowSize;\
1265 \n vec4 l_depthValue = texture2D(in_depthSampler, fragTexCoord);\
1266 \n float l_terminatePointMax = 0.0;\
1269 \n if(gl_FragCoord.z >= l_depthValue.x)\
1274 \n // color buffer or max scalar buffer have a reduced size.\
1275 \n fragTexCoord = (gl_FragCoord.xy - in_windowLowerLeftCorner) *\
1276 \n in_inverseOriginalWindowSize;\
1278 \n // Compute max number of iterations it will take before we hit\
1279 \n // the termination point\
1281 \n // Abscissa of the point on the depth buffer along the ray.\
1282 \n // point in texture coordinates\
1283 \n vec4 terminatePoint;\
1284 \n terminatePoint.x = (gl_FragCoord.x - in_windowLowerLeftCorner.x) * 2.0 *\
1285 \n in_inverseWindowSize.x - 1.0;\
1286 \n terminatePoint.y = (gl_FragCoord.y - in_windowLowerLeftCorner.y) * 2.0 *\
1287 \n in_inverseWindowSize.y - 1.0;\
1288 \n terminatePoint.z = (2.0 * l_depthValue.x - (gl_DepthRange.near +\
1289 \n gl_DepthRange.far)) / gl_DepthRange.diff;\
1290 \n terminatePoint.w = 1.0;\
1292 \n // From normalized device coordinates to eye coordinates.\
1293 \n // in_projectionMatrix is inversed because of way VT\
1294 \n // From eye coordinates to texture coordinates\
1295 \n terminatePoint = in_inverseTextureDatasetMatrix *\
1296 \n in_inverseVolumeMatrix *\
1297 \n in_inverseModelViewMatrix *\
1298 \n in_inverseProjectionMatrix *\
1300 \n terminatePoint /= terminatePoint.w;\
1302 \n l_terminatePointMax = length(terminatePoint.xyz - g_dataPos.xyz) /\
1303 \n length(g_dirStep);\
1304 \n float l_currentT = 0.0;");
1313 \n // The two constants l_tex_min and l_tex_max have a value of\
1314 \n // vec3(-1,-1,-1) and vec3(1,1,1) respectively. To determine if the\
1315 \n // data value is outside the in_volume data, we use the sign function.\
1316 \n // The sign function return -1 if the value is less than 0, 0 if the\
1317 \n // value is equal to 0 and 1 if value is greater than 0. Hence, the\
1318 \n // sign function for the calculation (sign(g_dataPos-l_tex_min) and\
1319 \n // sign (l_tex_max-g_dataPos)) will give us vec3(1,1,1) at the\
1320 \n // possible minimum and maximum position.\
1321 \n // When we do a dot product between two vec3(1,1,1) we get answer 3.\
1322 \n // So to be within the dataset limits, the dot product will return a\
1323 \n // value less than 3. If it is greater than 3, we are already out of\
1324 \n // the in_volume dataset\
1325 \n stop = dot(sign(g_dataPos - l_tex_min), sign(l_tex_max - g_dataPos))\
1328 \n // If the stopping condition is true we brek out of the ray marching\
1334 \n // Early ray termination\
1335 \n // if the currently composited colour alpha is already fully saturated\
1336 \n // we terminated the loop or if we have hit an obstacle in the\
1337 \n // direction of they ray (using depth buffer) we terminate as well.\
1338 \n if((g_fragColor.a > (1 - 1/255.0)) || \
1339 \n l_currentT >= l_terminatePointMax)\
1373 \nuniform float cropping_planes[6];\
1374 \nuniform int cropping_flags [32];\
1375 \n// X: axis = 0, Y: axis = 1, Z: axis = 2\
1376 \n// cp Cropping plane bounds (minX, maxX, minY, maxY, minZ, maxZ)\
1377 \nint computeRegionCoord(float cp[6], vec3 pos, int axis)\
1379 \n int cpmin = axis * 2;\
1380 \n int cpmax = cpmin + 1;\
1382 \n if (pos[axis] < cp[cpmin])\
1386 \n else if (pos[axis] >= cp[cpmin] &&\
1387 \n pos[axis] < cp[cpmax])\
1391 \n else if (pos[axis] >= cp[cpmax])\
1398 \nint computeRegion(float cp[6], vec3 pos)\
1400 \n return (computeRegionCoord(cp, pos, 0) +\
1401 \n (computeRegionCoord(cp, pos, 1) - 1) * 3 +\
1402 \n (computeRegionCoord(cp, pos, 2) - 1) * 9);\
1417 \n // Convert cropping region to texture space\
1418 \n float cropping_planes_ts[6];\
1419 \n mat4 datasetToTextureMat = in_inverseTextureDatasetMatrix;\
1421 \n vec4 temp = vec4(cropping_planes[0], 0.0, 0.0, 1.0);\
1422 \n temp = datasetToTextureMat * temp;\
1423 \n if (temp[3] != 0.0)\
1425 \n temp[0] /= temp[3];\
1427 \n cropping_planes_ts[0] = temp[0];\
1429 \n temp = vec4(cropping_planes[1], 0.0, 0.0, 1.0);\
1430 \n temp = datasetToTextureMat * temp;\
1431 \n if (temp[3] != 0.0)\
1433 \n temp[0] /= temp[3];\
1435 \n cropping_planes_ts[1] = temp[0];\
1437 \n temp = vec4(0.0, cropping_planes[2], 0.0, 1.0);\
1438 \n temp = datasetToTextureMat * temp;\
1439 \n if (temp[3] != 0.0)\
1441 \n temp[1] /= temp[3];\
1443 \n cropping_planes_ts[2] = temp[1];\
1445 \n temp = vec4(0.0, cropping_planes[3], 0.0, 1.0);\
1446 \n temp = datasetToTextureMat * temp;\
1447 \n if (temp[3] != 0.0)\
1449 \n temp[1] /= temp[3];\
1451 \n cropping_planes_ts[3] = temp[1];\
1453 \n temp = vec4(0.0, 0.0, cropping_planes[4], 1.0);\
1454 \n temp = datasetToTextureMat * temp;\
1455 \n if (temp[3] != 0.0)\
1457 \n temp[2] /= temp[3];\
1459 \n cropping_planes_ts[4] = temp[2];\
1461 \n temp = vec4(0.0, 0.0, cropping_planes[5], 1.0);\
1462 \n temp = datasetToTextureMat * temp;\
1463 \n if (temp[3] != 0.0)\
1465 \n temp[2] /= temp[3];\
1467 \n cropping_planes_ts[5] = temp[2];"
1481 \n // Determine region\
1482 \n int regionNo = computeRegion(cropping_planes_ts, g_dataPos);\
1484 \n // Do & operation with cropping flags\
1485 \n // Pass the flag that its Ok to sample or not to sample\
1486 \n if (cropping_flags[regionNo] == 0)\
1488 \n // Skip this voxel\
1530 \nfloat clippingPlanesTexture[48];\
1531 \nint clippingPlanesSize = int(in_clippingPlanes[0]);\
1533 \nmat4 world_to_texture_mat = in_inverseTextureDatasetMatrix *\
1534 \n in_inverseVolumeMatrix;\
1535 \nfor (int i = 0; i < clippingPlanesSize; i = i + 6)\
1537 \n vec4 origin = vec4(in_clippingPlanes[i + 1],\
1538 \n in_clippingPlanes[i + 2],\
1539 \n in_clippingPlanes[i + 3], 1.0);\
1540 \n vec4 normal = vec4(in_clippingPlanes[i + 4],\
1541 \n in_clippingPlanes[i + 5],\
1542 \n in_clippingPlanes[i + 6], 0.0);\
1544 \n origin = world_to_texture_mat * origin;\
1545 \n normal = world_to_texture_mat * normal;\
1547 \n if (origin[3] != 0.0)\
1549 \n origin[0] = origin[0] / origin[3];\
1550 \n origin[1] = origin[1] / origin[3];\
1551 \n origin[2] = origin[2] / origin[3];\
1553 \n if (normal[3] != 0.0)\
1555 \n normal[0] = normal[0] / normal[3];\
1556 \n normal[1] = normal[1] / normal[3];\
1557 \n normal[2] = normal[2] / normal[3];\
1560 \n clippingPlanesTexture[i] = origin[0];\
1561 \n clippingPlanesTexture[i + 1] = origin[1];\
1562 \n clippingPlanesTexture[i + 2] = origin[2];\
1564 \n clippingPlanesTexture[i + 3] = normal[0];\
1565 \n clippingPlanesTexture[i + 4] = normal[1];\
1566 \n clippingPlanesTexture[i + 5] = normal[2];\
1584 \n for (int i = 0; i < (clippingPlanesSize) && !l_skip; i = i + 6)\
1586 \n if (dot(vec3(g_dataPos - vec3(clippingPlanesTexture[i],\
1587 \n clippingPlanesTexture[i + 1],\
1588 \n clippingPlanesTexture[i + 2])),\
1589 \n vec3(clippingPlanesTexture[i + 3],\
1590 \n clippingPlanesTexture[i + 4],\
1591 \n clippingPlanesTexture[i + 5])) < 0)\
1615 int vtkNotUsed(maskType))
1617 if (!mask || !maskInput)
1635 if (!mask || !maskInput ||
1643 \nvec4 maskValue = texture3D(in_mask, g_dataPos);\
1644 \nif(maskValue.r <= 0.0)\
1660 if (!mask || !maskInput ||
1668 \nuniform float in_maskBlendFactor;\
1669 \nuniform sampler1D in_mask1;\
1670 \nuniform sampler1D in_mask2;"
1684 if (!mask || !maskInput ||
1692 \nvec4 scalar = texture3D(in_volume, g_dataPos);");
1695 if (noOfComponents == 1)
1698 \n scalar.r = scalar.r*in_volume_scale.r + in_volume_bias.r;\
1699 \n scalar = vec4(scalar.r,scalar.r,scalar.r,scalar.r);"
1706 \n scalar = scalar*in_volume_scale + in_volume_bias;"
1711 \nif (in_maskBlendFactor == 0.0)\
1713 \n g_srcColor = computeColor(scalar, computeOpacity(scalar));\
1717 \n float opacity = computeOpacity(scalar);\
1718 \n // Get the mask value at this same location\
1719 \n vec4 maskValue = texture3D(in_mask, g_dataPos);\
1720 \n if(maskValue.r == 0.0)\
1722 \n g_srcColor = computeColor(scalar, opacity);\
1726 \n if (maskValue.r == 1.0/255.0)\
1728 \n g_srcColor = texture1D(in_mask1, scalar.w);\
1732 \n // maskValue.r == 2.0/255.0\
1733 \n g_srcColor = texture1D(in_mask2, scalar.w);\
1735 \n g_srcColor.a = 1.0;\
1736 \n if(in_maskBlendFactor < 1.0)\
1738 \n g_srcColor = (1.0 - in_maskBlendFactor) * computeColor(scalar, opacity)\
1739 \n + in_maskBlendFactor * g_srcColor;\
1742 \n g_srcColor.a = opacity;\
1749 #endif // _vtkVolumeShaderComposer_h
std::string ShadingExit(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *mapper, vtkVolume *vtkNotUsed(vol), int noOfComponents, int independentComponents=0)
std::string BaseImplementation(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol))
represents a volume (data & properties) in a rendered scene
std::string CroppingDeclarationVertex(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol))
std::string CompositeMaskImplementation(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol), vtkImageData *maskInput, vtkVolumeMask *mask, int maskType, int noOfComponents)
Abstract class for a volume mapper.
std::string BinaryMaskDeclaration(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol), vtkImageData *maskInput, vtkVolumeMask *mask, int vtkNotUsed(maskType))
virtual int GetBlendMode()
std::string BaseInit(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vol, int lightingComplexity)
abstract specification for renderers
std::string CroppingDeclarationFragment(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *mapper, vtkVolume *vtkNotUsed(vol))
std::string ShadingDeclarationFragment(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol))
std::string TerminationExit(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol))
std::string ShadingDeclarationVertex(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol))
std::string BinaryMaskImplementation(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol), vtkImageData *maskInput, vtkVolumeMask *mask, int maskType)
std::string ComputeLightingDeclaration(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vol, int noOfComponents, int independentComponents, int vtkNotUsed(numberOfLights), int lightingComplexity)
virtual vtkPlaneCollection * GetClippingPlanes()
vtkCamera * GetActiveCamera()
std::string ClippingImplementation(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *mapper, vtkVolume *vtkNotUsed(vol))
std::string TerminationInit(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol))
std::string CroppingImplementation(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *mapper, vtkVolume *vtkNotUsed(vol))
std::string TerminationImplementation(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol))
std::string ClippingDeclarationVertex(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol))
std::string ClippingExit(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol))
std::string CompositeMaskDeclarationFragment(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol), vtkImageData *maskInput, vtkVolumeMask *mask, int maskType)
topologically and geometrically regular array of data
bool HasGradientOpacity(int index=0)
std::string ShadingImplementation(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *mapper, vtkVolume *vtkNotUsed(vol), vtkImageData *maskInput, vtkVolumeMask *mask, int maskType, int noOfComponents, int independentComponents=0)
virtual int GetParallelProjection()
std::string ComputeClipPositionImplementation(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol))
std::string ComputeRayDirectionDeclaration(vtkRenderer *ren, vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol), int vtkNotUsed(noOfComponents))
std::string BaseDeclarationFragment(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol), int vtkNotUsed(numberOfLights), int lightingComplexity, int noOfComponents, int independentComponents)
std::string ShadingInit(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *mapper, vtkVolume *vtkNotUsed(vol))
represents the common properties for rendering a volume.
std::string CroppingExit(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol))
std::string ClippingDeclarationFragment(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol))
std::string ComputeTextureCoordinates(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol))
std::string ClippingInit(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *mapper, vtkVolume *vtkNotUsed(vol))
boost::graph_traits< vtkGraph * >::vertex_descriptor source(boost::graph_traits< vtkGraph * >::edge_descriptor e, vtkGraph *)
std::string ComputeGradientDeclaration(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vol, int noOfComponents, int independentComponents, std::map< int, std::string > gradientTableMap)
std::string CroppingInit(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *mapper, vtkVolume *vtkNotUsed(vol))
vtkVolumeProperty * GetProperty()
virtual int GetCropping()
std::string replace(std::string source, const std::string &search, const std::string replace, bool all)
std::string TerminationDeclarationVertex(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol))
std::string BaseDeclarationVertex(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol))
std::string BaseExit(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol))
std::string TerminationDeclarationFragment(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol))
std::string ComputeOpacityDeclaration(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol), int noOfComponents, int independentComponents, std::map< int, std::string > opacityTableMap)
std::string ComputeColorDeclaration(vtkRenderer *vtkNotUsed(ren), vtkVolumeMapper *vtkNotUsed(mapper), vtkVolume *vtkNotUsed(vol), int noOfComponents, int independentComponents, std::map< int, std::string > colorTableMap)