GTAOShader.js 12 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418
  1. import {
  2. DataTexture,
  3. Matrix4,
  4. RepeatWrapping,
  5. Vector2,
  6. Vector3,
  7. } from 'three';
  8. /** @module GTAOShader */
  9. /**
  10. * GTAO shader. Use by {@link GTAOPass}.
  11. *
  12. * References:
  13. * - [Practical Realtime Strategies for Accurate Indirect Occlusion]{@link https://iryoku.com/downloads/Practical-Realtime-Strategies-for-Accurate-Indirect-Occlusion.pdf}.
  14. * - [Horizon-Based Indirect Lighting (HBIL)]{@link https://github.com/Patapom/GodComplex/blob/master/Tests/TestHBIL/2018%20Mayaux%20-%20Horizon-Based%20Indirect%20Lighting%20(HBIL).pdf}
  15. *
  16. * @constant
  17. * @type {Object}
  18. */
  19. const GTAOShader = {
  20. name: 'GTAOShader',
  21. defines: {
  22. PERSPECTIVE_CAMERA: 1,
  23. SAMPLES: 16,
  24. NORMAL_VECTOR_TYPE: 1,
  25. DEPTH_SWIZZLING: 'x',
  26. SCREEN_SPACE_RADIUS: 0,
  27. SCREEN_SPACE_RADIUS_SCALE: 100.0,
  28. SCENE_CLIP_BOX: 0,
  29. },
  30. uniforms: {
  31. tNormal: { value: null },
  32. tDepth: { value: null },
  33. tNoise: { value: null },
  34. resolution: { value: new Vector2() },
  35. cameraNear: { value: null },
  36. cameraFar: { value: null },
  37. cameraProjectionMatrix: { value: new Matrix4() },
  38. cameraProjectionMatrixInverse: { value: new Matrix4() },
  39. cameraWorldMatrix: { value: new Matrix4() },
  40. radius: { value: 0.25 },
  41. distanceExponent: { value: 1. },
  42. thickness: { value: 1. },
  43. distanceFallOff: { value: 1. },
  44. scale: { value: 1. },
  45. sceneBoxMin: { value: new Vector3( - 1, - 1, - 1 ) },
  46. sceneBoxMax: { value: new Vector3( 1, 1, 1 ) },
  47. },
  48. vertexShader: /* glsl */`
  49. varying vec2 vUv;
  50. void main() {
  51. vUv = uv;
  52. gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
  53. }`,
  54. fragmentShader: /* glsl */`
  55. varying vec2 vUv;
  56. uniform highp sampler2D tNormal;
  57. uniform highp sampler2D tDepth;
  58. uniform sampler2D tNoise;
  59. uniform vec2 resolution;
  60. uniform float cameraNear;
  61. uniform float cameraFar;
  62. uniform mat4 cameraProjectionMatrix;
  63. uniform mat4 cameraProjectionMatrixInverse;
  64. uniform mat4 cameraWorldMatrix;
  65. uniform float radius;
  66. uniform float distanceExponent;
  67. uniform float thickness;
  68. uniform float distanceFallOff;
  69. uniform float scale;
  70. #if SCENE_CLIP_BOX == 1
  71. uniform vec3 sceneBoxMin;
  72. uniform vec3 sceneBoxMax;
  73. #endif
  74. #include <common>
  75. #include <packing>
  76. #ifndef FRAGMENT_OUTPUT
  77. #define FRAGMENT_OUTPUT vec4(vec3(ao), 1.)
  78. #endif
  79. vec3 getViewPosition(const in vec2 screenPosition, const in float depth) {
  80. vec4 clipSpacePosition = vec4(vec3(screenPosition, depth) * 2.0 - 1.0, 1.0);
  81. vec4 viewSpacePosition = cameraProjectionMatrixInverse * clipSpacePosition;
  82. return viewSpacePosition.xyz / viewSpacePosition.w;
  83. }
  84. float getDepth(const vec2 uv) {
  85. return textureLod(tDepth, uv.xy, 0.0).DEPTH_SWIZZLING;
  86. }
  87. float fetchDepth(const ivec2 uv) {
  88. return texelFetch(tDepth, uv.xy, 0).DEPTH_SWIZZLING;
  89. }
  90. float getViewZ(const in float depth) {
  91. #if PERSPECTIVE_CAMERA == 1
  92. return perspectiveDepthToViewZ(depth, cameraNear, cameraFar);
  93. #else
  94. return orthographicDepthToViewZ(depth, cameraNear, cameraFar);
  95. #endif
  96. }
  97. vec3 computeNormalFromDepth(const vec2 uv) {
  98. vec2 size = vec2(textureSize(tDepth, 0));
  99. ivec2 p = ivec2(uv * size);
  100. float c0 = fetchDepth(p);
  101. float l2 = fetchDepth(p - ivec2(2, 0));
  102. float l1 = fetchDepth(p - ivec2(1, 0));
  103. float r1 = fetchDepth(p + ivec2(1, 0));
  104. float r2 = fetchDepth(p + ivec2(2, 0));
  105. float b2 = fetchDepth(p - ivec2(0, 2));
  106. float b1 = fetchDepth(p - ivec2(0, 1));
  107. float t1 = fetchDepth(p + ivec2(0, 1));
  108. float t2 = fetchDepth(p + ivec2(0, 2));
  109. float dl = abs((2.0 * l1 - l2) - c0);
  110. float dr = abs((2.0 * r1 - r2) - c0);
  111. float db = abs((2.0 * b1 - b2) - c0);
  112. float dt = abs((2.0 * t1 - t2) - c0);
  113. vec3 ce = getViewPosition(uv, c0).xyz;
  114. vec3 dpdx = (dl < dr) ? ce - getViewPosition((uv - vec2(1.0 / size.x, 0.0)), l1).xyz : -ce + getViewPosition((uv + vec2(1.0 / size.x, 0.0)), r1).xyz;
  115. vec3 dpdy = (db < dt) ? ce - getViewPosition((uv - vec2(0.0, 1.0 / size.y)), b1).xyz : -ce + getViewPosition((uv + vec2(0.0, 1.0 / size.y)), t1).xyz;
  116. return normalize(cross(dpdx, dpdy));
  117. }
  118. vec3 getViewNormal(const vec2 uv) {
  119. #if NORMAL_VECTOR_TYPE == 2
  120. return normalize(textureLod(tNormal, uv, 0.).rgb);
  121. #elif NORMAL_VECTOR_TYPE == 1
  122. return unpackRGBToNormal(textureLod(tNormal, uv, 0.).rgb);
  123. #else
  124. return computeNormalFromDepth(uv);
  125. #endif
  126. }
  127. vec3 getSceneUvAndDepth(vec3 sampleViewPos) {
  128. vec4 sampleClipPos = cameraProjectionMatrix * vec4(sampleViewPos, 1.);
  129. vec2 sampleUv = sampleClipPos.xy / sampleClipPos.w * 0.5 + 0.5;
  130. float sampleSceneDepth = getDepth(sampleUv);
  131. return vec3(sampleUv, sampleSceneDepth);
  132. }
  133. void main() {
  134. float depth = getDepth(vUv.xy);
  135. if (depth >= 1.0) {
  136. discard;
  137. return;
  138. }
  139. vec3 viewPos = getViewPosition(vUv, depth);
  140. vec3 viewNormal = getViewNormal(vUv);
  141. float radiusToUse = radius;
  142. float distanceFalloffToUse = thickness;
  143. #if SCREEN_SPACE_RADIUS == 1
  144. float radiusScale = getViewPosition(vec2(0.5 + float(SCREEN_SPACE_RADIUS_SCALE) / resolution.x, 0.0), depth).x;
  145. radiusToUse *= radiusScale;
  146. distanceFalloffToUse *= radiusScale;
  147. #endif
  148. #if SCENE_CLIP_BOX == 1
  149. vec3 worldPos = (cameraWorldMatrix * vec4(viewPos, 1.0)).xyz;
  150. float boxDistance = length(max(vec3(0.0), max(sceneBoxMin - worldPos, worldPos - sceneBoxMax)));
  151. if (boxDistance > radiusToUse) {
  152. discard;
  153. return;
  154. }
  155. #endif
  156. vec2 noiseResolution = vec2(textureSize(tNoise, 0));
  157. vec2 noiseUv = vUv * resolution / noiseResolution;
  158. vec4 noiseTexel = textureLod(tNoise, noiseUv, 0.0);
  159. vec3 randomVec = noiseTexel.xyz * 2.0 - 1.0;
  160. vec3 tangent = normalize(vec3(randomVec.xy, 0.));
  161. vec3 bitangent = vec3(-tangent.y, tangent.x, 0.);
  162. mat3 kernelMatrix = mat3(tangent, bitangent, vec3(0., 0., 1.));
  163. const int DIRECTIONS = SAMPLES < 30 ? 3 : 5;
  164. const int STEPS = (SAMPLES + DIRECTIONS - 1) / DIRECTIONS;
  165. float ao = 0.0;
  166. for (int i = 0; i < DIRECTIONS; ++i) {
  167. float angle = float(i) / float(DIRECTIONS) * PI;
  168. vec4 sampleDir = vec4(cos(angle), sin(angle), 0., 0.5 + 0.5 * noiseTexel.w);
  169. sampleDir.xyz = normalize(kernelMatrix * sampleDir.xyz);
  170. vec3 viewDir = normalize(-viewPos.xyz);
  171. vec3 sliceBitangent = normalize(cross(sampleDir.xyz, viewDir));
  172. vec3 sliceTangent = cross(sliceBitangent, viewDir);
  173. vec3 normalInSlice = normalize(viewNormal - sliceBitangent * dot(viewNormal, sliceBitangent));
  174. vec3 tangentToNormalInSlice = cross(normalInSlice, sliceBitangent);
  175. vec2 cosHorizons = vec2(dot(viewDir, tangentToNormalInSlice), dot(viewDir, -tangentToNormalInSlice));
  176. for (int j = 0; j < STEPS; ++j) {
  177. vec3 sampleViewOffset = sampleDir.xyz * radiusToUse * sampleDir.w * pow(float(j + 1) / float(STEPS), distanceExponent);
  178. vec3 sampleSceneUvDepth = getSceneUvAndDepth(viewPos + sampleViewOffset);
  179. vec3 sampleSceneViewPos = getViewPosition(sampleSceneUvDepth.xy, sampleSceneUvDepth.z);
  180. vec3 viewDelta = sampleSceneViewPos - viewPos;
  181. if (abs(viewDelta.z) < thickness) {
  182. float sampleCosHorizon = dot(viewDir, normalize(viewDelta));
  183. cosHorizons.x += max(0., (sampleCosHorizon - cosHorizons.x) * mix(1., 2. / float(j + 2), distanceFallOff));
  184. }
  185. sampleSceneUvDepth = getSceneUvAndDepth(viewPos - sampleViewOffset);
  186. sampleSceneViewPos = getViewPosition(sampleSceneUvDepth.xy, sampleSceneUvDepth.z);
  187. viewDelta = sampleSceneViewPos - viewPos;
  188. if (abs(viewDelta.z) < thickness) {
  189. float sampleCosHorizon = dot(viewDir, normalize(viewDelta));
  190. cosHorizons.y += max(0., (sampleCosHorizon - cosHorizons.y) * mix(1., 2. / float(j + 2), distanceFallOff));
  191. }
  192. }
  193. vec2 sinHorizons = sqrt(1. - cosHorizons * cosHorizons);
  194. float nx = dot(normalInSlice, sliceTangent);
  195. float ny = dot(normalInSlice, viewDir);
  196. float nxb = 1. / 2. * (acos(cosHorizons.y) - acos(cosHorizons.x) + sinHorizons.x * cosHorizons.x - sinHorizons.y * cosHorizons.y);
  197. float nyb = 1. / 2. * (2. - cosHorizons.x * cosHorizons.x - cosHorizons.y * cosHorizons.y);
  198. float occlusion = nx * nxb + ny * nyb;
  199. ao += occlusion;
  200. }
  201. ao = clamp(ao / float(DIRECTIONS), 0., 1.);
  202. #if SCENE_CLIP_BOX == 1
  203. ao = mix(ao, 1., smoothstep(0., radiusToUse, boxDistance));
  204. #endif
  205. ao = pow(ao, scale);
  206. gl_FragColor = FRAGMENT_OUTPUT;
  207. }`
  208. };
  209. /**
  210. * GTAO depth shader. Use by {@link GTAOPass}.
  211. *
  212. * @constant
  213. * @type {Object}
  214. */
  215. const GTAODepthShader = {
  216. name: 'GTAODepthShader',
  217. defines: {
  218. PERSPECTIVE_CAMERA: 1
  219. },
  220. uniforms: {
  221. tDepth: { value: null },
  222. cameraNear: { value: null },
  223. cameraFar: { value: null },
  224. },
  225. vertexShader: /* glsl */`
  226. varying vec2 vUv;
  227. void main() {
  228. vUv = uv;
  229. gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
  230. }`,
  231. fragmentShader: /* glsl */`
  232. uniform sampler2D tDepth;
  233. uniform float cameraNear;
  234. uniform float cameraFar;
  235. varying vec2 vUv;
  236. #include <packing>
  237. float getLinearDepth( const in vec2 screenPosition ) {
  238. #if PERSPECTIVE_CAMERA == 1
  239. float fragCoordZ = texture2D( tDepth, screenPosition ).x;
  240. float viewZ = perspectiveDepthToViewZ( fragCoordZ, cameraNear, cameraFar );
  241. return viewZToOrthographicDepth( viewZ, cameraNear, cameraFar );
  242. #else
  243. return texture2D( tDepth, screenPosition ).x;
  244. #endif
  245. }
  246. void main() {
  247. float depth = getLinearDepth( vUv );
  248. gl_FragColor = vec4( vec3( 1.0 - depth ), 1.0 );
  249. }`
  250. };
  251. /**
  252. * GTAO blend shader. Use by {@link GTAOPass}.
  253. *
  254. * @constant
  255. * @type {Object}
  256. */
  257. const GTAOBlendShader = {
  258. name: 'GTAOBlendShader',
  259. uniforms: {
  260. tDiffuse: { value: null },
  261. intensity: { value: 1.0 }
  262. },
  263. vertexShader: /* glsl */`
  264. varying vec2 vUv;
  265. void main() {
  266. vUv = uv;
  267. gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
  268. }`,
  269. fragmentShader: /* glsl */`
  270. uniform float intensity;
  271. uniform sampler2D tDiffuse;
  272. varying vec2 vUv;
  273. void main() {
  274. vec4 texel = texture2D( tDiffuse, vUv );
  275. gl_FragColor = vec4(mix(vec3(1.), texel.rgb, intensity), texel.a);
  276. }`
  277. };
  278. function generateMagicSquareNoise( size = 5 ) {
  279. const noiseSize = Math.floor( size ) % 2 === 0 ? Math.floor( size ) + 1 : Math.floor( size );
  280. const magicSquare = generateMagicSquare( noiseSize );
  281. const noiseSquareSize = magicSquare.length;
  282. const data = new Uint8Array( noiseSquareSize * 4 );
  283. for ( let inx = 0; inx < noiseSquareSize; ++ inx ) {
  284. const iAng = magicSquare[ inx ];
  285. const angle = ( 2 * Math.PI * iAng ) / noiseSquareSize;
  286. const randomVec = new Vector3(
  287. Math.cos( angle ),
  288. Math.sin( angle ),
  289. 0
  290. ).normalize();
  291. data[ inx * 4 ] = ( randomVec.x * 0.5 + 0.5 ) * 255;
  292. data[ inx * 4 + 1 ] = ( randomVec.y * 0.5 + 0.5 ) * 255;
  293. data[ inx * 4 + 2 ] = 127;
  294. data[ inx * 4 + 3 ] = 255;
  295. }
  296. const noiseTexture = new DataTexture( data, noiseSize, noiseSize );
  297. noiseTexture.wrapS = RepeatWrapping;
  298. noiseTexture.wrapT = RepeatWrapping;
  299. noiseTexture.needsUpdate = true;
  300. return noiseTexture;
  301. }
  302. function generateMagicSquare( size ) {
  303. const noiseSize = Math.floor( size ) % 2 === 0 ? Math.floor( size ) + 1 : Math.floor( size );
  304. const noiseSquareSize = noiseSize * noiseSize;
  305. const magicSquare = Array( noiseSquareSize ).fill( 0 );
  306. let i = Math.floor( noiseSize / 2 );
  307. let j = noiseSize - 1;
  308. for ( let num = 1; num <= noiseSquareSize; ) {
  309. if ( i === - 1 && j === noiseSize ) {
  310. j = noiseSize - 2;
  311. i = 0;
  312. } else {
  313. if ( j === noiseSize ) {
  314. j = 0;
  315. }
  316. if ( i < 0 ) {
  317. i = noiseSize - 1;
  318. }
  319. }
  320. if ( magicSquare[ i * noiseSize + j ] !== 0 ) {
  321. j -= 2;
  322. i ++;
  323. continue;
  324. } else {
  325. magicSquare[ i * noiseSize + j ] = num ++;
  326. }
  327. j ++;
  328. i --;
  329. }
  330. return magicSquare;
  331. }
  332. export { generateMagicSquareNoise, GTAOShader, GTAODepthShader, GTAOBlendShader };
粤ICP备19079148号