GTAONode.js 16 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571
  1. import { DataTexture, RenderTarget, RepeatWrapping, Vector2, Vector3, TempNode, QuadMesh, NodeMaterial, RendererUtils, RedFormat } from 'three/webgpu';
  2. import { reference, logarithmicDepthToViewZ, viewZToPerspectiveDepth, getNormalFromDepth, getScreenPosition, getViewPosition, nodeObject, Fn, float, NodeUpdateType, uv, uniform, Loop, vec2, vec3, vec4, int, dot, max, pow, abs, If, textureSize, sin, cos, PI, texture, passTexture, mat3, add, normalize, mul, cross, div, mix, sqrt, sub, acos, clamp } from 'three/tsl';
  3. const _quadMesh = /*@__PURE__*/ new QuadMesh();
  4. const _size = /*@__PURE__*/ new Vector2();
  5. // From Activision GTAO paper: https://www.activision.com/cdn/research/s2016_pbs_activision_occlusion.pptx
  6. const _temporalRotations = [ 60, 300, 180, 240, 120, 0 ];
  7. let _rendererState;
  8. /**
  9. * Post processing node for applying Ground Truth Ambient Occlusion (GTAO) to a scene.
  10. * ```js
  11. * const postProcessing = new THREE.PostProcessing( renderer );
  12. *
  13. * const scenePass = pass( scene, camera );
  14. * scenePass.setMRT( mrt( {
  15. * output: output,
  16. * normal: normalView
  17. * } ) );
  18. *
  19. * const scenePassColor = scenePass.getTextureNode( 'output' );
  20. * const scenePassNormal = scenePass.getTextureNode( 'normal' );
  21. * const scenePassDepth = scenePass.getTextureNode( 'depth' );
  22. *
  23. * const aoPass = ao( scenePassDepth, scenePassNormal, camera );
  24. *
  25. * postProcessing.outputNod = aoPass.getTextureNode().mul( scenePassColor );
  26. * ```
  27. *
  28. * Reference: [Practical Real-Time Strategies for Accurate Indirect Occlusion](https://www.activision.com/cdn/research/Practical_Real_Time_Strategies_for_Accurate_Indirect_Occlusion_NEW%20VERSION_COLOR.pdf).
  29. *
  30. * @augments TempNode
  31. * @three_import import { ao } from 'three/addons/tsl/display/GTAONode.js';
  32. */
  33. class GTAONode extends TempNode {
  34. static get type() {
  35. return 'GTAONode';
  36. }
  37. /**
  38. * Constructs a new GTAO node.
  39. *
  40. * @param {Node<float>} depthNode - A node that represents the scene's depth.
  41. * @param {?Node<vec3>} normalNode - A node that represents the scene's normals.
  42. * @param {Camera} camera - The camera the scene is rendered with.
  43. */
  44. constructor( depthNode, normalNode, camera ) {
  45. super( 'float' );
  46. /**
  47. * A node that represents the scene's depth.
  48. *
  49. * @type {Node<float>}
  50. */
  51. this.depthNode = depthNode;
  52. /**
  53. * A node that represents the scene's normals. If no normals are passed to the
  54. * constructor (because MRT is not available), normals can be automatically
  55. * reconstructed from depth values in the shader.
  56. *
  57. * @type {?Node<vec3>}
  58. */
  59. this.normalNode = normalNode;
  60. /**
  61. * The resolution scale. By default the effect is rendered in full resolution
  62. * for best quality but a value of `0.5` should be sufficient for most scenes.
  63. *
  64. * @type {number}
  65. * @default 1
  66. */
  67. this.resolutionScale = 1;
  68. /**
  69. * The `updateBeforeType` is set to `NodeUpdateType.FRAME` since the node renders
  70. * its effect once per frame in `updateBefore()`.
  71. *
  72. * @type {string}
  73. * @default 'frame'
  74. */
  75. this.updateBeforeType = NodeUpdateType.FRAME;
  76. /**
  77. * The render target the ambient occlusion is rendered into.
  78. *
  79. * @private
  80. * @type {RenderTarget}
  81. */
  82. this._aoRenderTarget = new RenderTarget( 1, 1, { depthBuffer: false, format: RedFormat } );
  83. this._aoRenderTarget.texture.name = 'GTAONode.AO';
  84. // uniforms
  85. /**
  86. * The radius of the ambient occlusion.
  87. *
  88. * @type {UniformNode<float>}
  89. */
  90. this.radius = uniform( 0.25 );
  91. /**
  92. * The resolution of the effect. Can be scaled via
  93. * `resolutionScale`.
  94. *
  95. * @type {UniformNode<vec2>}
  96. */
  97. this.resolution = uniform( new Vector2() );
  98. /**
  99. * The thickness of the ambient occlusion.
  100. *
  101. * @type {UniformNode<float>}
  102. */
  103. this.thickness = uniform( 1 );
  104. /**
  105. * Another option to tweak the occlusion. The recommended range is
  106. * `[1,2]` for attenuating the AO.
  107. *
  108. * @type {UniformNode<float>}
  109. */
  110. this.distanceExponent = uniform( 1 );
  111. /**
  112. * The distance fall off value of the ambient occlusion.
  113. * A lower value leads to a larger AO effect. The value
  114. * should lie in the range `[0,1]`.
  115. *
  116. * @type {UniformNode<float>}
  117. */
  118. this.distanceFallOff = uniform( 1 );
  119. /**
  120. * The scale of the ambient occlusion.
  121. *
  122. * @type {UniformNode<float>}
  123. */
  124. this.scale = uniform( 1 );
  125. /**
  126. * How many samples are used to compute the AO.
  127. * A higher value results in better quality but also
  128. * in a more expensive runtime behavior.
  129. *
  130. * @type {UniformNode<float>}
  131. */
  132. this.samples = uniform( 16 );
  133. /**
  134. * Whether to use temporal filtering or not. Setting this property to
  135. * `true` requires the usage of `TRAANode`. This will help to reduce noise
  136. * although it introduces typical TAA artifacts like ghosting and temporal
  137. * instabilities.
  138. *
  139. * If setting this property to `false`, a manual denoise via `DenoiseNode`
  140. * might be required.
  141. *
  142. * @type {boolean}
  143. * @default false
  144. */
  145. this.useTemporalFiltering = false;
  146. /**
  147. * The node represents the internal noise texture used by the AO.
  148. *
  149. * @private
  150. * @type {TextureNode}
  151. */
  152. this._noiseNode = texture( generateMagicSquareNoise() );
  153. /**
  154. * Represents the projection matrix of the scene's camera.
  155. *
  156. * @private
  157. * @type {UniformNode<mat4>}
  158. */
  159. this._cameraProjectionMatrix = uniform( camera.projectionMatrix );
  160. /**
  161. * Represents the inverse projection matrix of the scene's camera.
  162. *
  163. * @private
  164. * @type {UniformNode<mat4>}
  165. */
  166. this._cameraProjectionMatrixInverse = uniform( camera.projectionMatrixInverse );
  167. /**
  168. * Represents the near value of the scene's camera.
  169. *
  170. * @private
  171. * @type {ReferenceNode<float>}
  172. */
  173. this._cameraNear = reference( 'near', 'float', camera );
  174. /**
  175. * Represents the far value of the scene's camera.
  176. *
  177. * @private
  178. * @type {ReferenceNode<float>}
  179. */
  180. this._cameraFar = reference( 'far', 'float', camera );
  181. /**
  182. * Temporal direction that influences the rotation angle for each slice.
  183. *
  184. * @private
  185. * @type {UniformNode<float>}
  186. */
  187. this._temporalDirection = uniform( 0 );
  188. /**
  189. * The material that is used to render the effect.
  190. *
  191. * @private
  192. * @type {NodeMaterial}
  193. */
  194. this._material = new NodeMaterial();
  195. this._material.name = 'GTAO';
  196. /**
  197. * The result of the effect is represented as a separate texture node.
  198. *
  199. * @private
  200. * @type {PassTextureNode}
  201. */
  202. this._textureNode = passTexture( this, this._aoRenderTarget.texture );
  203. }
  204. /**
  205. * Returns the result of the effect as a texture node.
  206. *
  207. * @return {PassTextureNode} A texture node that represents the result of the effect.
  208. */
  209. getTextureNode() {
  210. return this._textureNode;
  211. }
  212. /**
  213. * Sets the size of the effect.
  214. *
  215. * @param {number} width - The width of the effect.
  216. * @param {number} height - The height of the effect.
  217. */
  218. setSize( width, height ) {
  219. width = Math.round( this.resolutionScale * width );
  220. height = Math.round( this.resolutionScale * height );
  221. this.resolution.value.set( width, height );
  222. this._aoRenderTarget.setSize( width, height );
  223. }
  224. /**
  225. * This method is used to render the effect once per frame.
  226. *
  227. * @param {NodeFrame} frame - The current node frame.
  228. */
  229. updateBefore( frame ) {
  230. const { renderer } = frame;
  231. _rendererState = RendererUtils.resetRendererState( renderer, _rendererState );
  232. // update temporal uniforms
  233. if ( this.useTemporalFiltering === true ) {
  234. const frameId = frame.frameId;
  235. this._temporalDirection.value = _temporalRotations[ frameId % 6 ] / 360;
  236. } else {
  237. this._temporalDirection.value = 0;
  238. }
  239. //
  240. const size = renderer.getDrawingBufferSize( _size );
  241. this.setSize( size.width, size.height );
  242. _quadMesh.material = this._material;
  243. _quadMesh.name = 'AO';
  244. // clear
  245. renderer.setClearColor( 0xffffff, 1 );
  246. // ao
  247. renderer.setRenderTarget( this._aoRenderTarget );
  248. _quadMesh.render( renderer );
  249. // restore
  250. RendererUtils.restoreRendererState( renderer, _rendererState );
  251. }
  252. /**
  253. * This method is used to setup the effect's TSL code.
  254. *
  255. * @param {NodeBuilder} builder - The current node builder.
  256. * @return {PassTextureNode}
  257. */
  258. setup( builder ) {
  259. const uvNode = uv();
  260. const sampleDepth = ( uv ) => {
  261. const depth = this.depthNode.sample( uv ).r;
  262. if ( builder.renderer.logarithmicDepthBuffer === true ) {
  263. const viewZ = logarithmicDepthToViewZ( depth, this._cameraNear, this._cameraFar );
  264. return viewZToPerspectiveDepth( viewZ, this._cameraNear, this._cameraFar );
  265. }
  266. return depth;
  267. };
  268. const sampleNoise = ( uv ) => this._noiseNode.sample( uv );
  269. const sampleNormal = ( uv ) => ( this.normalNode !== null ) ? this.normalNode.sample( uv ).rgb.normalize() : getNormalFromDepth( uv, this.depthNode.value, this._cameraProjectionMatrixInverse );
  270. const ao = Fn( () => {
  271. const depth = sampleDepth( uvNode ).toVar();
  272. depth.greaterThanEqual( 1.0 ).discard();
  273. const viewPosition = getViewPosition( uvNode, depth, this._cameraProjectionMatrixInverse ).toVar();
  274. const viewNormal = sampleNormal( uvNode ).toVar();
  275. const radiusToUse = this.radius;
  276. const noiseResolution = textureSize( this._noiseNode, 0 );
  277. let noiseUv = vec2( uvNode.x, uvNode.y.oneMinus() );
  278. noiseUv = noiseUv.mul( this.resolution.div( noiseResolution ) );
  279. const noiseTexel = sampleNoise( noiseUv );
  280. const randomVec = noiseTexel.xyz.mul( 2.0 ).sub( 1.0 );
  281. const tangent = vec3( randomVec.xy, 0.0 ).normalize();
  282. const bitangent = vec3( tangent.y.mul( - 1.0 ), tangent.x, 0.0 );
  283. const kernelMatrix = mat3( tangent, bitangent, vec3( 0.0, 0.0, 1.0 ) );
  284. const DIRECTIONS = this.samples.lessThan( 30 ).select( 3, 5 ).toVar();
  285. const STEPS = add( this.samples, DIRECTIONS.sub( 1 ) ).div( DIRECTIONS ).toVar();
  286. const ao = float( 0 ).toVar();
  287. // Each iteration analyzes one vertical "slice" of the 3D space around the fragment.
  288. Loop( { start: int( 0 ), end: DIRECTIONS, type: 'int', condition: '<' }, ( { i } ) => {
  289. const angle = float( i ).div( float( DIRECTIONS ) ).mul( PI ).add( this._temporalDirection ).toVar();
  290. const sampleDir = vec4( cos( angle ), sin( angle ), 0., add( 0.5, mul( 0.5, noiseTexel.w ) ) );
  291. sampleDir.xyz = normalize( kernelMatrix.mul( sampleDir.xyz ) );
  292. const viewDir = normalize( viewPosition.xyz.negate() ).toVar();
  293. const sliceBitangent = normalize( cross( sampleDir.xyz, viewDir ) ).toVar();
  294. const sliceTangent = cross( sliceBitangent, viewDir );
  295. const normalInSlice = normalize( viewNormal.sub( sliceBitangent.mul( dot( viewNormal, sliceBitangent ) ) ) );
  296. const tangentToNormalInSlice = cross( normalInSlice, sliceBitangent ).toVar();
  297. const cosHorizons = vec2( dot( viewDir, tangentToNormalInSlice ), dot( viewDir, tangentToNormalInSlice.negate() ) ).toVar();
  298. // For each slice, the inner loop performs ray marching to find the horizons.
  299. Loop( { end: STEPS, type: 'int', name: 'j', condition: '<' }, ( { j } ) => {
  300. const sampleViewOffset = sampleDir.xyz.mul( radiusToUse ).mul( sampleDir.w ).mul( pow( div( float( j ).add( 1.0 ), float( STEPS ) ), this.distanceExponent ) );
  301. // The loop marches in two opposite directions (x and y) along the slice's line to find the horizon on both sides.
  302. // x
  303. const sampleScreenPositionX = getScreenPosition( viewPosition.add( sampleViewOffset ), this._cameraProjectionMatrix ).toVar();
  304. const sampleDepthX = sampleDepth( sampleScreenPositionX ).toVar();
  305. const sampleSceneViewPositionX = getViewPosition( sampleScreenPositionX, sampleDepthX, this._cameraProjectionMatrixInverse ).toVar();
  306. const viewDeltaX = sampleSceneViewPositionX.sub( viewPosition ).toVar();
  307. If( abs( viewDeltaX.z ).lessThan( this.thickness ), () => {
  308. const sampleCosHorizon = dot( viewDir, normalize( viewDeltaX ) );
  309. cosHorizons.x.addAssign( max( 0, mul( sampleCosHorizon.sub( cosHorizons.x ), mix( 1.0, float( 2.0 ).div( float( j ).add( 2 ) ), this.distanceFallOff ) ) ) );
  310. } );
  311. // y
  312. const sampleScreenPositionY = getScreenPosition( viewPosition.sub( sampleViewOffset ), this._cameraProjectionMatrix ).toVar();
  313. const sampleDepthY = sampleDepth( sampleScreenPositionY ).toVar();
  314. const sampleSceneViewPositionY = getViewPosition( sampleScreenPositionY, sampleDepthY, this._cameraProjectionMatrixInverse ).toVar();
  315. const viewDeltaY = sampleSceneViewPositionY.sub( viewPosition ).toVar();
  316. If( abs( viewDeltaY.z ).lessThan( this.thickness ), () => {
  317. const sampleCosHorizon = dot( viewDir, normalize( viewDeltaY ) );
  318. cosHorizons.y.addAssign( max( 0, mul( sampleCosHorizon.sub( cosHorizons.y ), mix( 1.0, float( 2.0 ).div( float( j ).add( 2 ) ), this.distanceFallOff ) ) ) );
  319. } );
  320. } );
  321. // After the horizons are found for a given slice, their contribution to the total occlusion is calculated.
  322. const sinHorizons = sqrt( sub( 1.0, cosHorizons.mul( cosHorizons ) ) ).toVar();
  323. const nx = dot( normalInSlice, sliceTangent );
  324. const ny = dot( normalInSlice, viewDir );
  325. const nxb = mul( 0.5, acos( cosHorizons.y ).sub( acos( cosHorizons.x ) ).add( sinHorizons.x.mul( cosHorizons.x ).sub( sinHorizons.y.mul( cosHorizons.y ) ) ) );
  326. const nyb = mul( 0.5, sub( 2.0, cosHorizons.x.mul( cosHorizons.x ) ).sub( cosHorizons.y.mul( cosHorizons.y ) ) );
  327. const occlusion = nx.mul( nxb ).add( ny.mul( nyb ) );
  328. ao.addAssign( occlusion );
  329. } );
  330. ao.assign( clamp( ao.div( DIRECTIONS ), 0, 1 ) );
  331. ao.assign( pow( ao, this.scale ) );
  332. return ao;
  333. } );
  334. this._material.fragmentNode = ao().context( builder.getSharedContext() );
  335. this._material.needsUpdate = true;
  336. //
  337. return this._textureNode;
  338. }
  339. /**
  340. * Frees internal resources. This method should be called
  341. * when the effect is no longer required.
  342. */
  343. dispose() {
  344. this._aoRenderTarget.dispose();
  345. this._material.dispose();
  346. }
  347. }
  348. export default GTAONode;
  349. /**
  350. * Generates the AO's noise texture for the given size.
  351. *
  352. * @param {number} [size=5] - The noise size.
  353. * @return {DataTexture} The generated noise texture.
  354. */
  355. function generateMagicSquareNoise( size = 5 ) {
  356. const noiseSize = Math.floor( size ) % 2 === 0 ? Math.floor( size ) + 1 : Math.floor( size );
  357. const magicSquare = generateMagicSquare( noiseSize );
  358. const noiseSquareSize = magicSquare.length;
  359. const data = new Uint8Array( noiseSquareSize * 4 );
  360. for ( let inx = 0; inx < noiseSquareSize; ++ inx ) {
  361. const iAng = magicSquare[ inx ];
  362. const angle = ( 2 * Math.PI * iAng ) / noiseSquareSize;
  363. const randomVec = new Vector3(
  364. Math.cos( angle ),
  365. Math.sin( angle ),
  366. 0
  367. ).normalize();
  368. data[ inx * 4 ] = ( randomVec.x * 0.5 + 0.5 ) * 255;
  369. data[ inx * 4 + 1 ] = ( randomVec.y * 0.5 + 0.5 ) * 255;
  370. data[ inx * 4 + 2 ] = 127;
  371. data[ inx * 4 + 3 ] = 255;
  372. }
  373. const noiseTexture = new DataTexture( data, noiseSize, noiseSize );
  374. noiseTexture.wrapS = RepeatWrapping;
  375. noiseTexture.wrapT = RepeatWrapping;
  376. noiseTexture.needsUpdate = true;
  377. return noiseTexture;
  378. }
  379. /**
  380. * Computes an array of magic square values required to generate the noise texture.
  381. *
  382. * @param {number} size - The noise size.
  383. * @return {Array<number>} The magic square values.
  384. */
  385. function generateMagicSquare( size ) {
  386. const noiseSize = Math.floor( size ) % 2 === 0 ? Math.floor( size ) + 1 : Math.floor( size );
  387. const noiseSquareSize = noiseSize * noiseSize;
  388. const magicSquare = Array( noiseSquareSize ).fill( 0 );
  389. let i = Math.floor( noiseSize / 2 );
  390. let j = noiseSize - 1;
  391. for ( let num = 1; num <= noiseSquareSize; ) {
  392. if ( i === - 1 && j === noiseSize ) {
  393. j = noiseSize - 2;
  394. i = 0;
  395. } else {
  396. if ( j === noiseSize ) {
  397. j = 0;
  398. }
  399. if ( i < 0 ) {
  400. i = noiseSize - 1;
  401. }
  402. }
  403. if ( magicSquare[ i * noiseSize + j ] !== 0 ) {
  404. j -= 2;
  405. i ++;
  406. continue;
  407. } else {
  408. magicSquare[ i * noiseSize + j ] = num ++;
  409. }
  410. j ++;
  411. i --;
  412. }
  413. return magicSquare;
  414. }
  415. /**
  416. * TSL function for creating a Ground Truth Ambient Occlusion (GTAO) effect.
  417. *
  418. * @tsl
  419. * @function
  420. * @param {Node<float>} depthNode - A node that represents the scene's depth.
  421. * @param {?Node<vec3>} normalNode - A node that represents the scene's normals.
  422. * @param {Camera} camera - The camera the scene is rendered with.
  423. * @returns {GTAONode}
  424. */
  425. export const ao = ( depthNode, normalNode, camera ) => new GTAONode( nodeObject( depthNode ), nodeObject( normalNode ), camera );
粤ICP备19079148号