SSAAPassNode.js 8.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357
  1. import { AdditiveBlending, Color, Vector2, RendererUtils, PassNode, QuadMesh, NodeMaterial } from 'three/webgpu';
  2. import { nodeObject, uniform, mrt, texture, getTextureIndex } from 'three/tsl';
  3. const _size = /*@__PURE__*/ new Vector2();
  4. let _rendererState;
  5. /**
  6. * A special render pass node that renders the scene with SSAA (Supersampling Anti-Aliasing).
  7. * This manual SSAA approach re-renders the scene ones for each sample with camera jitter and accumulates the results.
  8. *
  9. * This node produces a high-quality anti-aliased output but is also extremely expensive because of
  10. * its brute-force approach of re-rendering the entire scene multiple times.
  11. *
  12. * Reference: {@link https://en.wikipedia.org/wiki/Supersampling}
  13. *
  14. * @augments PassNode
  15. */
  16. class SSAAPassNode extends PassNode {
  17. static get type() {
  18. return 'SSAAPassNode';
  19. }
  20. /**
  21. * Constructs a new SSAA pass node.
  22. *
  23. * @param {Scene} scene - The scene to render.
  24. * @param {Camera} camera - The camera to render the scene with.
  25. */
  26. constructor( scene, camera ) {
  27. super( PassNode.COLOR, scene, camera );
  28. /**
  29. * This flag can be used for type testing.
  30. *
  31. * @type {boolean}
  32. * @readonly
  33. * @default true
  34. */
  35. this.isSSAAPassNode = true;
  36. /**
  37. * The sample level specified as n, where the number of samples is 2^n,
  38. * so sampleLevel = 4, is 2^4 samples, 16.
  39. *
  40. * @type {number}
  41. * @default 4
  42. */
  43. this.sampleLevel = 4;
  44. /**
  45. * Whether rounding errors should be mitigated or not.
  46. *
  47. * @type {boolean}
  48. * @default true
  49. */
  50. this.unbiased = true;
  51. /**
  52. * The clear color of the pass.
  53. *
  54. * @type {Color}
  55. * @default 0x000000
  56. */
  57. this.clearColor = new Color( 0x000000 );
  58. /**
  59. * The clear alpha of the pass.
  60. *
  61. * @type {number}
  62. * @default 0
  63. */
  64. this.clearAlpha = 0;
  65. /**
  66. * A uniform node representing the sample weight.
  67. *
  68. * @type {UniformNode<float>}
  69. * @default 1
  70. */
  71. this.sampleWeight = uniform( 1 );
  72. /**
  73. * Reference to the internal render target that holds the current sample.
  74. *
  75. * @private
  76. * @type {?RenderTarget}
  77. * @default null
  78. */
  79. this._sampleRenderTarget = null;
  80. /**
  81. * Reference to the internal quad mesh.
  82. *
  83. * @private
  84. * @type {QuadMesh}
  85. */
  86. this._quadMesh = new QuadMesh();
  87. }
  88. /**
  89. * This method is used to render the SSAA effect once per frame.
  90. *
  91. * @param {NodeFrame} frame - The current node frame.
  92. */
  93. updateBefore( frame ) {
  94. const { renderer } = frame;
  95. const { scene, camera } = this;
  96. _rendererState = RendererUtils.resetRendererAndSceneState( renderer, scene, _rendererState );
  97. //
  98. this._pixelRatio = renderer.getPixelRatio();
  99. const size = renderer.getSize( _size );
  100. this.setSize( size.width, size.height );
  101. this._sampleRenderTarget.setSize( this.renderTarget.width, this.renderTarget.height );
  102. //
  103. this._cameraNear.value = camera.near;
  104. this._cameraFar.value = camera.far;
  105. renderer.setMRT( this.getMRT() );
  106. renderer.autoClear = false;
  107. const jitterOffsets = _JitterVectors[ Math.max( 0, Math.min( this.sampleLevel, 5 ) ) ];
  108. const baseSampleWeight = 1.0 / jitterOffsets.length;
  109. const roundingRange = 1 / 32;
  110. const viewOffset = {
  111. fullWidth: this.renderTarget.width,
  112. fullHeight: this.renderTarget.height,
  113. offsetX: 0,
  114. offsetY: 0,
  115. width: this.renderTarget.width,
  116. height: this.renderTarget.height
  117. };
  118. const originalViewOffset = Object.assign( {}, camera.view );
  119. if ( originalViewOffset.enabled ) Object.assign( viewOffset, originalViewOffset );
  120. // render the scene multiple times, each slightly jitter offset from the last and accumulate the results.
  121. for ( let i = 0; i < jitterOffsets.length; i ++ ) {
  122. const jitterOffset = jitterOffsets[ i ];
  123. if ( camera.setViewOffset ) {
  124. camera.setViewOffset(
  125. viewOffset.fullWidth, viewOffset.fullHeight,
  126. viewOffset.offsetX + jitterOffset[ 0 ] * 0.0625, viewOffset.offsetY + jitterOffset[ 1 ] * 0.0625, // 0.0625 = 1 / 16
  127. viewOffset.width, viewOffset.height
  128. );
  129. }
  130. this.sampleWeight.value = baseSampleWeight;
  131. if ( this.unbiased ) {
  132. // the theory is that equal weights for each sample lead to an accumulation of rounding errors.
  133. // The following equation varies the sampleWeight per sample so that it is uniformly distributed
  134. // across a range of values whose rounding errors cancel each other out.
  135. const uniformCenteredDistribution = ( - 0.5 + ( i + 0.5 ) / jitterOffsets.length );
  136. this.sampleWeight.value += roundingRange * uniformCenteredDistribution;
  137. }
  138. renderer.setClearColor( this.clearColor, this.clearAlpha );
  139. renderer.setRenderTarget( this._sampleRenderTarget );
  140. renderer.clear();
  141. renderer.render( scene, camera );
  142. // accumulation
  143. renderer.setRenderTarget( this.renderTarget );
  144. if ( i === 0 ) {
  145. renderer.setClearColor( 0x000000, 0.0 );
  146. renderer.clear();
  147. }
  148. this._quadMesh.render( renderer );
  149. }
  150. renderer.copyTextureToTexture( this._sampleRenderTarget.depthTexture, this.renderTarget.depthTexture );
  151. // restore
  152. if ( camera.setViewOffset && originalViewOffset.enabled ) {
  153. camera.setViewOffset(
  154. originalViewOffset.fullWidth, originalViewOffset.fullHeight,
  155. originalViewOffset.offsetX, originalViewOffset.offsetY,
  156. originalViewOffset.width, originalViewOffset.height
  157. );
  158. } else if ( camera.clearViewOffset ) {
  159. camera.clearViewOffset();
  160. }
  161. //
  162. RendererUtils.restoreRendererAndSceneState( renderer, scene, _rendererState );
  163. }
  164. /**
  165. * This method is used to setup the effect's MRT configuration and quad mesh.
  166. *
  167. * @param {NodeBuilder} builder - The current node builder.
  168. * @return {PassTextureNode}
  169. */
  170. setup( builder ) {
  171. if ( this._sampleRenderTarget === null ) {
  172. this._sampleRenderTarget = this.renderTarget.clone();
  173. }
  174. let sampleTexture;
  175. const passMRT = this.getMRT();
  176. if ( passMRT !== null ) {
  177. const outputs = {};
  178. for ( const name in passMRT.outputNodes ) {
  179. const index = getTextureIndex( this._sampleRenderTarget.textures, name );
  180. if ( index >= 0 ) {
  181. outputs[ name ] = texture( this._sampleRenderTarget.textures[ index ] ).mul( this.sampleWeight );
  182. }
  183. }
  184. sampleTexture = mrt( outputs );
  185. } else {
  186. sampleTexture = texture( this._sampleRenderTarget.texture ).mul( this.sampleWeight );
  187. }
  188. this._quadMesh.material = new NodeMaterial();
  189. this._quadMesh.material.fragmentNode = sampleTexture;
  190. this._quadMesh.material.transparent = true;
  191. this._quadMesh.material.depthTest = false;
  192. this._quadMesh.material.depthWrite = false;
  193. this._quadMesh.material.premultipliedAlpha = true;
  194. this._quadMesh.material.blending = AdditiveBlending;
  195. this._quadMesh.material.name = 'SSAA';
  196. return super.setup( builder );
  197. }
  198. /**
  199. * Frees internal resources. This method should be called
  200. * when the pass is no longer required.
  201. */
  202. dispose() {
  203. super.dispose();
  204. if ( this._sampleRenderTarget !== null ) {
  205. this._sampleRenderTarget.dispose();
  206. }
  207. }
  208. }
  209. export default SSAAPassNode;
  210. // These jitter vectors are specified in integers because it is easier.
  211. // I am assuming a [-8,8) integer grid, but it needs to be mapped onto [-0.5,0.5)
  212. // before being used, thus these integers need to be scaled by 1/16.
  213. //
  214. // Sample patterns reference: https://msdn.microsoft.com/en-us/library/windows/desktop/ff476218%28v=vs.85%29.aspx?f=255&MSPPError=-2147217396
  215. const _JitterVectors = [
  216. [
  217. [ 0, 0 ]
  218. ],
  219. [
  220. [ 4, 4 ], [ - 4, - 4 ]
  221. ],
  222. [
  223. [ - 2, - 6 ], [ 6, - 2 ], [ - 6, 2 ], [ 2, 6 ]
  224. ],
  225. [
  226. [ 1, - 3 ], [ - 1, 3 ], [ 5, 1 ], [ - 3, - 5 ],
  227. [ - 5, 5 ], [ - 7, - 1 ], [ 3, 7 ], [ 7, - 7 ]
  228. ],
  229. [
  230. [ 1, 1 ], [ - 1, - 3 ], [ - 3, 2 ], [ 4, - 1 ],
  231. [ - 5, - 2 ], [ 2, 5 ], [ 5, 3 ], [ 3, - 5 ],
  232. [ - 2, 6 ], [ 0, - 7 ], [ - 4, - 6 ], [ - 6, 4 ],
  233. [ - 8, 0 ], [ 7, - 4 ], [ 6, 7 ], [ - 7, - 8 ]
  234. ],
  235. [
  236. [ - 4, - 7 ], [ - 7, - 5 ], [ - 3, - 5 ], [ - 5, - 4 ],
  237. [ - 1, - 4 ], [ - 2, - 2 ], [ - 6, - 1 ], [ - 4, 0 ],
  238. [ - 7, 1 ], [ - 1, 2 ], [ - 6, 3 ], [ - 3, 3 ],
  239. [ - 7, 6 ], [ - 3, 6 ], [ - 5, 7 ], [ - 1, 7 ],
  240. [ 5, - 7 ], [ 1, - 6 ], [ 6, - 5 ], [ 4, - 4 ],
  241. [ 2, - 3 ], [ 7, - 2 ], [ 1, - 1 ], [ 4, - 1 ],
  242. [ 2, 1 ], [ 6, 2 ], [ 0, 4 ], [ 4, 4 ],
  243. [ 2, 5 ], [ 7, 5 ], [ 5, 6 ], [ 3, 7 ]
  244. ]
  245. ];
  246. /**
  247. * TSL function for creating a SSAA pass node for Supersampling Anti-Aliasing.
  248. *
  249. * @tsl
  250. * @function
  251. * @param {Scene} scene - The scene to render.
  252. * @param {Camera} camera - The camera to render the scene with.
  253. * @returns {SSAAPassNode}
  254. */
  255. export const ssaaPass = ( scene, camera ) => nodeObject( new SSAAPassNode( scene, camera ) );
粤ICP备19079148号