SSGINode.js 21 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642
  1. import { RenderTarget, Vector2, TempNode, QuadMesh, NodeMaterial, RendererUtils, MathUtils } from 'three/webgpu';
  2. import { clamp, normalize, reference, nodeObject, Fn, NodeUpdateType, uniform, vec4, passTexture, uv, logarithmicDepthToViewZ, viewZToPerspectiveDepth, getViewPosition, screenCoordinate, float, sub, fract, dot, vec2, rand, vec3, Loop, mul, PI, cos, sin, uint, cross, acos, sign, pow, luminance, If, max, abs, Break, sqrt, HALF_PI, div, ceil, shiftRight, convertToTexture, bool, getNormalFromDepth, countOneBits, interleavedGradientNoise } from 'three/tsl';
  3. const _quadMesh = /*@__PURE__*/ new QuadMesh();
  4. const _size = /*@__PURE__*/ new Vector2();
  5. // From Activision GTAO paper: https://www.activision.com/cdn/research/s2016_pbs_activision_occlusion.pptx
  6. const _temporalRotations = [ 60, 300, 180, 240, 120, 0 ];
  7. const _spatialOffsets = [ 0, 0.5, 0.25, 0.75 ];
  8. let _rendererState;
  9. /**
  10. * Post processing node for applying Screen Space Global Illumination (SSGI) to a scene.
  11. *
  12. * References:
  13. * - {@link https://github.com/cdrinmatane/SSRT3}.
  14. * - {@link https://cdrinmatane.github.io/posts/ssaovb-code/}.
  15. * - {@link https://cdrinmatane.github.io/cgspotlight-slides/ssilvb_slides.pdf}.
  16. *
  17. * The quality and performance of the effect mainly depend on `sliceCount` and `stepCount`.
  18. * The total number of samples taken per pixel is `sliceCount` * `stepCount` * `2`. Here are some
  19. * recommended presets depending on whether temporal filtering is used or not.
  20. *
  21. * With temporal filtering (recommended):
  22. *
  23. * - Low: `sliceCount` of `1`, `stepCount` of `12`.
  24. * - Medium: `sliceCount` of `2`, `stepCount` of `8`.
  25. * - High: `sliceCount` of `3`, `stepCount` of `16`.
  26. *
  27. * Use for a higher slice count if you notice temporal instabilities like flickering. Reduce the sample
  28. * count then to mitigate the performance lost.
  29. *
  30. * Without temporal filtering:
  31. *
  32. * - Low: `sliceCount` of `2`, `stepCount` of `6`.
  33. * - Medium: `sliceCount` of `3`, `stepCount` of `8`.
  34. * - High: `sliceCount` of `4`, `stepCount` of `12`.
  35. *
  36. * @augments TempNode
  37. * @three_import import { ssgi } from 'three/addons/tsl/display/SSGINode.js';
  38. */
  39. class SSGINode extends TempNode {
  40. static get type() {
  41. return 'SSGINode';
  42. }
  43. /**
  44. * Constructs a new SSGI node.
  45. *
  46. * @param {TextureNode} beautyNode - A texture node that represents the beauty or scene pass.
  47. * @param {TextureNode} depthNode - A texture node that represents the scene's depth.
  48. * @param {TextureNode} normalNode - A texture node that represents the scene's normals.
  49. * @param {PerspectiveCamera} camera - The camera the scene is rendered with.
  50. */
  51. constructor( beautyNode, depthNode, normalNode, camera ) {
  52. super( 'vec4' );
  53. /**
  54. * A texture node that represents the beauty or scene pass.
  55. *
  56. * @type {TextureNode}
  57. */
  58. this.beautyNode = beautyNode;
  59. /**
  60. * A node that represents the scene's depth.
  61. *
  62. * @type {TextureNode}
  63. */
  64. this.depthNode = depthNode;
  65. /**
  66. * A node that represents the scene's normals. If no normals are passed to the
  67. * constructor (because MRT is not available), normals can be automatically
  68. * reconstructed from depth values in the shader.
  69. *
  70. * @type {TextureNode}
  71. */
  72. this.normalNode = normalNode;
  73. /**
  74. * The `updateBeforeType` is set to `NodeUpdateType.FRAME` since the node renders
  75. * its effect once per frame in `updateBefore()`.
  76. *
  77. * @type {string}
  78. * @default 'frame'
  79. */
  80. this.updateBeforeType = NodeUpdateType.FRAME;
  81. /**
  82. * Number of per-pixel hemisphere slices. This has a big performance cost and should be kept as low as possible.
  83. * Should be in the range `[1, 4]`.
  84. *
  85. * @type {UniformNode<uint>}
  86. * @default 1
  87. */
  88. this.sliceCount = uniform( 1, 'uint' );
  89. /**
  90. * Number of samples taken along one side of a given hemisphere slice. This has a big performance cost and should
  91. * be kept as low as possible. Should be in the range `[1, 32]`.
  92. *
  93. * @type {UniformNode<uint>}
  94. * @default 12
  95. */
  96. this.stepCount = uniform( 12, 'uint' );
  97. /**
  98. * Power function applied to AO to make it appear darker/lighter. Should be in the range `[0, 4]`.
  99. *
  100. * @type {UniformNode<float>}
  101. * @default 1
  102. */
  103. this.aoIntensity = uniform( 1, 'float' );
  104. /**
  105. * Intensity of the indirect diffuse light. Should be in the range `[0, 100]`.
  106. *
  107. * @type {UniformNode<float>}
  108. * @default 10
  109. */
  110. this.giIntensity = uniform( 10, 'float' );
  111. /**
  112. * Effective sampling radius in world space. AO and GI can only have influence within that radius.
  113. * Should be in the range `[1, 25]`.
  114. *
  115. * @type {UniformNode<float>}
  116. * @default 12
  117. */
  118. this.radius = uniform( 12, 'float' );
  119. /**
  120. * Makes the sample distance in screen space instead of world-space (helps having more detail up close).
  121. *
  122. * @type {UniformNode<bool>}
  123. * @default false
  124. */
  125. this.useScreenSpaceSampling = uniform( true, 'bool' );
  126. /**
  127. * Controls samples distribution. It's an exponent applied at each step get increasing step size over the distance.
  128. * Should be in the range `[1, 3]`.
  129. *
  130. * @type {UniformNode<float>}
  131. * @default 2
  132. */
  133. this.expFactor = uniform( 2, 'float' );
  134. /**
  135. * Constant thickness value of objects on the screen in world units. Allows light to pass behind surfaces past that thickness value.
  136. * Should be in the range `[0.01, 10]`.
  137. *
  138. * @type {UniformNode<float>}
  139. * @default 1
  140. */
  141. this.thickness = uniform( 1, 'float' );
  142. /**
  143. * Whether to increase thickness linearly over distance or not (avoid losing detail over the distance).
  144. *
  145. * @type {UniformNode<bool>}
  146. * @default false
  147. */
  148. this.useLinearThickness = uniform( false, 'bool' );
  149. /**
  150. * How much light backface surfaces emit.
  151. * Should be in the range `[0, 1]`.
  152. *
  153. * @type {UniformNode<float>}
  154. * @default 0
  155. */
  156. this.backfaceLighting = uniform( 0, 'float' );
  157. /**
  158. * Whether to use temporal filtering or not. Setting this property to
  159. * `true` requires the usage of `TRAANode`. This will help to reduce noise
  160. * although it introduces typical TAA artifacts like ghosting and temporal
  161. * instabilities.
  162. *
  163. * If setting this property to `false`, a manual denoise via `DenoiseNode`
  164. * is required.
  165. *
  166. * @type {boolean}
  167. * @default true
  168. */
  169. this.useTemporalFiltering = true;
  170. // private uniforms
  171. /**
  172. * The resolution of the effect.
  173. *
  174. * @private
  175. * @type {UniformNode<vec2>}
  176. */
  177. this._resolution = uniform( new Vector2() );
  178. /**
  179. * Used to compute the effective step radius when viewSpaceSampling is `false`.
  180. *
  181. * @private
  182. * @type {UniformNode<vec2>}
  183. */
  184. this._halfProjScale = uniform( 1 );
  185. /**
  186. * Temporal direction that influences the rotation angle for each slice.
  187. *
  188. * @private
  189. * @type {UniformNode<float>}
  190. */
  191. this._temporalDirection = uniform( 0 );
  192. /**
  193. * Temporal offset added to the initial ray step.
  194. *
  195. * @private
  196. * @type {UniformNode<float>}
  197. */
  198. this._temporalOffset = uniform( 0 );
  199. /**
  200. * Represents the projection matrix of the scene's camera.
  201. *
  202. * @private
  203. * @type {UniformNode<mat4>}
  204. */
  205. this._cameraProjectionMatrix = uniform( camera.projectionMatrix );
  206. /**
  207. * Represents the inverse projection matrix of the scene's camera.
  208. *
  209. * @private
  210. * @type {UniformNode<mat4>}
  211. */
  212. this._cameraProjectionMatrixInverse = uniform( camera.projectionMatrixInverse );
  213. /**
  214. * Represents the near value of the scene's camera.
  215. *
  216. * @private
  217. * @type {ReferenceNode<float>}
  218. */
  219. this._cameraNear = reference( 'near', 'float', camera );
  220. /**
  221. * Represents the far value of the scene's camera.
  222. *
  223. * @private
  224. * @type {ReferenceNode<float>}
  225. */
  226. this._cameraFar = reference( 'far', 'float', camera );
  227. /**
  228. * A reference to the scene's camera.
  229. *
  230. * @private
  231. * @type {PerspectiveCamera}
  232. */
  233. this._camera = camera;
  234. /**
  235. * The render target the GI is rendered into.
  236. *
  237. * @private
  238. * @type {RenderTarget}
  239. */
  240. this._ssgiRenderTarget = new RenderTarget( 1, 1, { depthBuffer: false } );
  241. this._ssgiRenderTarget.texture.name = 'SSGI';
  242. /**
  243. * The material that is used to render the effect.
  244. *
  245. * @private
  246. * @type {NodeMaterial}
  247. */
  248. this._material = new NodeMaterial();
  249. this._material.name = 'SSGI';
  250. /**
  251. * The result of the effect is represented as a separate texture node.
  252. *
  253. * @private
  254. * @type {PassTextureNode}
  255. */
  256. this._textureNode = passTexture( this, this._ssgiRenderTarget.texture );
  257. }
  258. /**
  259. * Returns the result of the effect as a texture node.
  260. *
  261. * @return {PassTextureNode} A texture node that represents the result of the effect.
  262. */
  263. getTextureNode() {
  264. return this._textureNode;
  265. }
  266. /**
  267. * Sets the size of the effect.
  268. *
  269. * @param {number} width - The width of the effect.
  270. * @param {number} height - The height of the effect.
  271. */
  272. setSize( width, height ) {
  273. this._resolution.value.set( width, height );
  274. this._ssgiRenderTarget.setSize( width, height );
  275. this._halfProjScale.value = height / ( Math.tan( this._camera.fov * MathUtils.DEG2RAD * 0.5 ) * 2 ) * 0.5;
  276. }
  277. /**
  278. * This method is used to render the effect once per frame.
  279. *
  280. * @param {NodeFrame} frame - The current node frame.
  281. */
  282. updateBefore( frame ) {
  283. const { renderer } = frame;
  284. _rendererState = RendererUtils.resetRendererState( renderer, _rendererState );
  285. //
  286. const size = renderer.getDrawingBufferSize( _size );
  287. this.setSize( size.width, size.height );
  288. // update temporal uniforms
  289. if ( this.useTemporalFiltering === true ) {
  290. const frameId = frame.frameId;
  291. this._temporalDirection.value = _temporalRotations[ frameId % 6 ] / 360;
  292. this._temporalOffset.value = _spatialOffsets[ frameId % 4 ];
  293. } else {
  294. this._temporalDirection.value = 1;
  295. this._temporalOffset.value = 1;
  296. }
  297. //
  298. _quadMesh.material = this._material;
  299. _quadMesh.name = 'SSGI';
  300. // clear
  301. renderer.setClearColor( 0x000000, 1 );
  302. // gi
  303. renderer.setRenderTarget( this._ssgiRenderTarget );
  304. _quadMesh.render( renderer );
  305. // restore
  306. RendererUtils.restoreRendererState( renderer, _rendererState );
  307. }
  308. /**
  309. * This method is used to setup the effect's TSL code.
  310. *
  311. * @param {NodeBuilder} builder - The current node builder.
  312. * @return {PassTextureNode}
  313. */
  314. setup( builder ) {
  315. const uvNode = uv();
  316. const MAX_RAY = uint( 32 );
  317. const globalOccludedBitfield = uint( 0 );
  318. const sampleDepth = ( uv ) => {
  319. const depth = this.depthNode.sample( uv ).r;
  320. if ( builder.renderer.logarithmicDepthBuffer === true ) {
  321. const viewZ = logarithmicDepthToViewZ( depth, this._cameraNear, this._cameraFar );
  322. return viewZToPerspectiveDepth( viewZ, this._cameraNear, this._cameraFar );
  323. }
  324. return depth;
  325. };
  326. const sampleNormal = ( uv ) => ( this.normalNode !== null ) ? this.normalNode.sample( uv ).rgb.normalize() : getNormalFromDepth( uv, this.depthNode.value, this._cameraProjectionMatrixInverse );
  327. const sampleBeauty = ( uv ) => this.beautyNode.sample( uv );
  328. // From Activision GTAO paper: https://www.activision.com/cdn/research/s2016_pbs_activision_occlusion.pptx
  329. const spatialOffsets = Fn( ( [ position ] ) => {
  330. return float( 0.25 ).mul( sub( position.y, position.x ).bitAnd( 3 ) );
  331. } ).setLayout( {
  332. name: 'spatialOffsets',
  333. type: 'float',
  334. inputs: [
  335. { name: 'position', type: 'vec2' }
  336. ]
  337. } );
  338. const GTAOFastAcos = Fn( ( [ value ] ) => {
  339. const outVal = abs( value ).mul( float( - 0.156583 ) ).add( HALF_PI );
  340. outVal.mulAssign( sqrt( abs( value ).oneMinus() ) );
  341. const x = value.x.greaterThanEqual( 0 ).select( outVal.x, PI.sub( outVal.x ) );
  342. const y = value.y.greaterThanEqual( 0 ).select( outVal.y, PI.sub( outVal.y ) );
  343. return vec2( x, y );
  344. } ).setLayout( {
  345. name: 'GTAOFastAcos',
  346. type: 'vec2',
  347. inputs: [
  348. { name: 'value', type: 'vec2' }
  349. ]
  350. } );
  351. const horizonSampling = Fn( ( [ directionIsRight, RADIUS, viewPosition, slideDirTexelSize, initialRayStep, uvNode, viewDir, viewNormal, n ] ) => {
  352. const STEP_COUNT = this.stepCount.toConst();
  353. const EXP_FACTOR = this.expFactor.toConst();
  354. const THICKNESS = this.thickness.toConst();
  355. const BACKFACE_LIGHTING = this.backfaceLighting.toConst();
  356. const stepRadius = float( 0 );
  357. If( this.useScreenSpaceSampling.equal( true ), () => {
  358. stepRadius.assign( RADIUS.mul( this._resolution.x.div( 2 ) ).div( float( 16 ) ) ); // SSRT3 has a bug where stepRadius is divided by STEP_COUNT twice; fix here
  359. } ).Else( () => {
  360. stepRadius.assign( max( RADIUS.mul( this._halfProjScale ).div( viewPosition.z.negate() ), float( STEP_COUNT ) ) ); // Port note: viewZ is negative so a negate is required
  361. } );
  362. stepRadius.divAssign( float( STEP_COUNT ).add( 1 ) );
  363. const radiusVS = max( 1, float( STEP_COUNT.sub( 1 ) ) ).mul( stepRadius );
  364. const uvDirection = directionIsRight.equal( true ).select( vec2( 1, - 1 ), vec2( - 1, 1 ) ); // Port note: Because of different uv conventions, uv-y has a different sign
  365. const samplingDirection = directionIsRight.equal( true ).select( 1, - 1 );
  366. const color = vec3( 0 );
  367. const lastSampleViewPosition = vec3( viewPosition ).toVar();
  368. Loop( { start: uint( 0 ), end: STEP_COUNT, type: 'uint', condition: '<' }, ( { i } ) => {
  369. const offset = pow( abs( mul( stepRadius, float( i ).add( initialRayStep ) ).div( radiusVS ) ), EXP_FACTOR ).mul( radiusVS ).toConst();
  370. const uvOffset = slideDirTexelSize.mul( max( offset, float( i ).add( 1 ) ) ).toConst();
  371. const sampleUV = uvNode.add( uvOffset.mul( uvDirection ) ).toConst();
  372. If( sampleUV.x.lessThanEqual( 0 ).or( sampleUV.y.lessThanEqual( 0 ) ).or( sampleUV.x.greaterThanEqual( 1 ) ).or( sampleUV.y.greaterThanEqual( 1 ) ), () => {
  373. Break();
  374. } );
  375. const sampleViewPosition = getViewPosition( sampleUV, sampleDepth( sampleUV ), this._cameraProjectionMatrixInverse ).toConst();
  376. const pixelToSample = sampleViewPosition.sub( viewPosition ).normalize().toConst();
  377. const linearThicknessMultiplier = this.useLinearThickness.equal( true ).select( sampleViewPosition.z.negate().div( this._cameraFar ).clamp().mul( 100 ), float( 1 ) );
  378. const pixelToSampleBackface = normalize( sampleViewPosition.sub( linearThicknessMultiplier.mul( viewDir ).mul( THICKNESS ) ).sub( viewPosition ) );
  379. let frontBackHorizon = vec2( dot( pixelToSample, viewDir ), dot( pixelToSampleBackface, viewDir ) );
  380. frontBackHorizon = GTAOFastAcos( clamp( frontBackHorizon, - 1, 1 ) );
  381. frontBackHorizon = clamp( div( mul( samplingDirection, frontBackHorizon.negate() ).sub( n.sub( HALF_PI ) ), PI ) ); // Port note: subtract half pi instead of adding it
  382. frontBackHorizon = directionIsRight.equal( true ).select( frontBackHorizon.yx, frontBackHorizon.xy ); // Front/Back get inverted depending on angle
  383. // inline ComputeOccludedBitfield() for easier debugging
  384. const minHorizon = frontBackHorizon.x.toConst();
  385. const maxHorizon = frontBackHorizon.y.toConst();
  386. const startHorizonInt = uint( frontBackHorizon.mul( float( MAX_RAY ) ) ).toConst();
  387. const angleHorizonInt = uint( ceil( maxHorizon.sub( minHorizon ).mul( float( MAX_RAY ) ) ) ).toConst();
  388. const angleHorizonBitfield = angleHorizonInt.greaterThan( uint( 0 ) ).select( uint( shiftRight( uint( 0xFFFFFFFF ), uint( 32 ).sub( MAX_RAY ).add( MAX_RAY.sub( angleHorizonInt ) ) ) ), uint( 0 ) ).toConst();
  389. let currentOccludedBitfield = angleHorizonBitfield.shiftLeft( startHorizonInt );
  390. currentOccludedBitfield = currentOccludedBitfield.bitAnd( globalOccludedBitfield.bitNot() );
  391. globalOccludedBitfield.assign( globalOccludedBitfield.bitOr( currentOccludedBitfield ) );
  392. const numOccludedZones = countOneBits( currentOccludedBitfield );
  393. //
  394. If( numOccludedZones.greaterThan( 0 ), () => { // If a ray hit the sample, that sample is visible from shading point
  395. const lightColor = sampleBeauty( sampleUV );
  396. If( luminance( lightColor ).greaterThan( 0.001 ), () => { // Continue if there is light at that location (intensity > 0)
  397. const lightDirectionVS = normalize( pixelToSample );
  398. const normalDotLightDirection = clamp( dot( viewNormal, lightDirectionVS ) );
  399. If( normalDotLightDirection.greaterThan( 0.001 ), () => { // Continue if light is facing surface normal
  400. const lightNormalVS = sampleNormal( sampleUV );
  401. // Intensity of outgoing light in the direction of the shading point
  402. let lightNormalDotLightDirection = dot( lightNormalVS, lightDirectionVS.negate() );
  403. const d = sign( lightNormalDotLightDirection ).lessThan( 0 ).select( abs( lightNormalDotLightDirection ).mul( BACKFACE_LIGHTING ), abs( lightNormalDotLightDirection ) );
  404. lightNormalDotLightDirection = BACKFACE_LIGHTING.greaterThan( 0 ).and( dot( lightNormalVS, viewDir ).greaterThan( 0 ) ).select( d, clamp( lightNormalDotLightDirection ) );
  405. color.rgb.addAssign( float( numOccludedZones ).div( float( MAX_RAY ) ).mul( lightColor ).mul( normalDotLightDirection ).mul( lightNormalDotLightDirection ) );
  406. } );
  407. } );
  408. } );
  409. lastSampleViewPosition.assign( sampleViewPosition );
  410. } );
  411. return vec3( color );
  412. } );
  413. const gi = Fn( () => {
  414. const depth = sampleDepth( uvNode ).toVar();
  415. depth.greaterThanEqual( 1.0 ).discard();
  416. const viewPosition = getViewPosition( uvNode, depth, this._cameraProjectionMatrixInverse ).toVar();
  417. const viewNormal = sampleNormal( uvNode ).toVar();
  418. const viewDir = normalize( viewPosition.xyz.negate() ).toVar();
  419. //
  420. const noiseOffset = spatialOffsets( screenCoordinate );
  421. const noiseDirection = interleavedGradientNoise( screenCoordinate );
  422. const noiseJitterIdx = this._temporalDirection.mul( 0.02 ); // Port: Add noiseJitterIdx here for slightly better noise convergence with TRAA (see #31890 for more details)
  423. const initialRayStep = fract( noiseOffset.add( this._temporalOffset ) ).add( rand( uvNode.add( noiseJitterIdx ).mul( 2 ).sub( 1 ) ) );
  424. const ao = float( 0 );
  425. const color = vec3( 0 );
  426. const ROTATION_COUNT = this.sliceCount.toConst();
  427. const AO_INTENSITY = this.aoIntensity.toConst();
  428. const GI_INTENSITY = this.giIntensity.toConst();
  429. const RADIUS = this.radius.toConst();
  430. Loop( { start: uint( 0 ), end: ROTATION_COUNT, type: 'uint', condition: '<' }, ( { i } ) => {
  431. const rotationAngle = mul( float( i ).add( noiseDirection ).add( this._temporalDirection ), PI.div( float( ROTATION_COUNT ) ) ).toConst();
  432. const sliceDir = vec3( vec2( cos( rotationAngle ), sin( rotationAngle ) ), 0 ).toConst();
  433. const slideDirTexelSize = sliceDir.xy.mul( float( 1 ).div( this._resolution ) ).toConst();
  434. const planeNormal = normalize( cross( sliceDir, viewDir ) ).toConst();
  435. const tangent = cross( viewDir, planeNormal ).toConst();
  436. const projectedNormal = viewNormal.sub( planeNormal.mul( dot( viewNormal, planeNormal ) ) ).toConst();
  437. const projectedNormalNormalized = normalize( projectedNormal ).toConst();
  438. const cos_n = clamp( dot( projectedNormalNormalized, viewDir ), - 1, 1 ).toConst();
  439. const n = sign( dot( projectedNormal, tangent ) ).negate().mul( acos( cos_n ) ).toConst();
  440. globalOccludedBitfield.assign( 0 );
  441. color.addAssign( horizonSampling( bool( true ), RADIUS, viewPosition, slideDirTexelSize, initialRayStep, uvNode, viewDir, viewNormal, n ) );
  442. color.addAssign( horizonSampling( bool( false ), RADIUS, viewPosition, slideDirTexelSize, initialRayStep, uvNode, viewDir, viewNormal, n ) );
  443. ao.addAssign( float( countOneBits( globalOccludedBitfield ) ).div( float( MAX_RAY ) ) );
  444. } );
  445. ao.divAssign( float( ROTATION_COUNT ) );
  446. ao.assign( pow( ao.clamp().oneMinus(), AO_INTENSITY ).clamp() );
  447. color.divAssign( float( ROTATION_COUNT ) );
  448. color.mulAssign( GI_INTENSITY );
  449. // scale color based on luminance
  450. const maxLuminance = float( 7 ).toConst(); // 7 represent a HDR luminance value
  451. const currentLuminance = luminance( color );
  452. const scale = currentLuminance.greaterThan( maxLuminance ).select( maxLuminance.div( currentLuminance ), float( 1 ) );
  453. color.mulAssign( scale );
  454. return vec4( color, ao );
  455. } );
  456. this._material.fragmentNode = gi().context( builder.getSharedContext() );
  457. this._material.needsUpdate = true;
  458. //
  459. return this._textureNode;
  460. }
  461. /**
  462. * Frees internal resources. This method should be called
  463. * when the effect is no longer required.
  464. */
  465. dispose() {
  466. this._ssgiRenderTarget.dispose();
  467. this._material.dispose();
  468. }
  469. }
  470. export default SSGINode;
  471. /**
  472. * TSL function for creating a SSGI effect.
  473. *
  474. * @tsl
  475. * @function
  476. * @param {TextureNode} beautyNode - The texture node that represents the input of the effect.
  477. * @param {TextureNode} depthNode - A texture node that represents the scene's depth.
  478. * @param {TextureNode} normalNode - A texture node that represents the scene's normals.
  479. * @param {Camera} camera - The camera the scene is rendered with.
  480. * @returns {SSGINode}
  481. */
  482. export const ssgi = ( beautyNode, depthNode, normalNode, camera ) => nodeObject( new SSGINode( convertToTexture( beautyNode ), depthNode, normalNode, camera ) );
粤ICP备19079148号