SSRNode.js 16 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538
  1. import { NearestFilter, RenderTarget, Vector2, RendererUtils, QuadMesh, TempNode, NodeMaterial, NodeUpdateType } from 'three/webgpu';
  2. import { reference, viewZToPerspectiveDepth, logarithmicDepthToViewZ, getScreenPosition, getViewPosition, sqrt, mul, div, cross, float, Continue, Break, Loop, int, max, abs, sub, If, dot, reflect, normalize, screenCoordinate, nodeObject, Fn, passTexture, uv, uniform, perspectiveDepthToViewZ, orthographicDepthToViewZ, vec2, vec3, vec4 } from 'three/tsl';
  3. const _quadMesh = /*@__PURE__*/ new QuadMesh();
  4. const _size = /*@__PURE__*/ new Vector2();
  5. let _rendererState;
  6. /**
  7. * Post processing node for computing screen space reflections (SSR).
  8. *
  9. * Reference: {@link https://lettier.github.io/3d-game-shaders-for-beginners/screen-space-reflection.html}
  10. *
  11. * @augments TempNode
  12. */
  13. class SSRNode extends TempNode {
  14. static get type() {
  15. return 'SSRNode';
  16. }
  17. /**
  18. * Constructs a new SSR node.
  19. *
  20. * @param {Node<vec4>} colorNode - The node that represents the beauty pass.
  21. * @param {Node<float>} depthNode - A node that represents the beauty pass's depth.
  22. * @param {Node<vec3>} normalNode - A node that represents the beauty pass's normals.
  23. * @param {Node<float>} metalnessNode - A node that represents the beauty pass's metalness.
  24. * @param {Camera} camera - The camera the scene is rendered with.
  25. */
  26. constructor( colorNode, depthNode, normalNode, metalnessNode, camera ) {
  27. super( 'vec4' );
  28. /**
  29. * The node that represents the beauty pass.
  30. *
  31. * @type {Node<vec4>}
  32. */
  33. this.colorNode = colorNode;
  34. /**
  35. * A node that represents the beauty pass's depth.
  36. *
  37. * @type {Node<float>}
  38. */
  39. this.depthNode = depthNode;
  40. /**
  41. * A node that represents the beauty pass's normals.
  42. *
  43. * @type {Node<vec3>}
  44. */
  45. this.normalNode = normalNode;
  46. /**
  47. * A node that represents the beauty pass's metalness.
  48. *
  49. * @type {Node<float>}
  50. */
  51. this.metalnessNode = metalnessNode;
  52. /**
  53. * The camera the scene is rendered with.
  54. *
  55. * @type {Camera}
  56. */
  57. this.camera = camera;
  58. /**
  59. * The resolution scale. By default SSR reflections
  60. * are computed in half resolutions. Setting the value
  61. * to `1` improves quality but also results in more
  62. * computational overhead.
  63. *
  64. * @type {number}
  65. * @default 0.5
  66. */
  67. this.resolutionScale = 0.5;
  68. /**
  69. * The `updateBeforeType` is set to `NodeUpdateType.FRAME` since the node renders
  70. * its effect once per frame in `updateBefore()`.
  71. *
  72. * @type {string}
  73. * @default 'frame'
  74. */
  75. this.updateBeforeType = NodeUpdateType.FRAME;
  76. /**
  77. * The render target the SSR is rendered into.
  78. *
  79. * @private
  80. * @type {RenderTarget}
  81. */
  82. this._ssrRenderTarget = new RenderTarget( 1, 1, { depthBuffer: false, minFilter: NearestFilter, magFilter: NearestFilter } );
  83. this._ssrRenderTarget.texture.name = 'SSRNode.SSR';
  84. /**
  85. * Controls how far a fragment can reflect
  86. *
  87. *
  88. * @type {UniformNode<float>}
  89. */
  90. this.maxDistance = uniform( 1 );
  91. /**
  92. * Controls the cutoff between what counts as a possible reflection hit and what does not.
  93. *
  94. * @type {UniformNode<float>}
  95. */
  96. this.thickness = uniform( 0.1 );
  97. /**
  98. * Controls the transparency of the reflected colors.
  99. *
  100. * @type {UniformNode<float>}
  101. */
  102. this.opacity = uniform( 1 );
  103. /**
  104. * Represents the projection matrix of the scene's camera.
  105. *
  106. * @private
  107. * @type {UniformNode<mat4>}
  108. */
  109. this._cameraProjectionMatrix = uniform( camera.projectionMatrix );
  110. /**
  111. * Represents the inverse projection matrix of the scene's camera.
  112. *
  113. * @private
  114. * @type {UniformNode<mat4>}
  115. */
  116. this._cameraProjectionMatrixInverse = uniform( camera.projectionMatrixInverse );
  117. /**
  118. * Represents the near value of the scene's camera.
  119. *
  120. * @private
  121. * @type {ReferenceNode<float>}
  122. */
  123. this._cameraNear = reference( 'near', 'float', camera );
  124. /**
  125. * Represents the far value of the scene's camera.
  126. *
  127. * @private
  128. * @type {ReferenceNode<float>}
  129. */
  130. this._cameraFar = reference( 'far', 'float', camera );
  131. /**
  132. * Whether the scene's camera is perspective or orthographic.
  133. *
  134. * @private
  135. * @type {UniformNode<bool>}
  136. */
  137. this._isPerspectiveCamera = uniform( camera.isPerspectiveCamera ? 1 : 0 );
  138. /**
  139. * The resolution of the pass.
  140. *
  141. * @private
  142. * @type {UniformNode<vec2>}
  143. */
  144. this._resolution = uniform( new Vector2() );
  145. /**
  146. * This value is derived from the resolution and restricts
  147. * the maximum raymarching steps in the fragment shader.
  148. *
  149. * @private
  150. * @type {UniformNode<float>}
  151. */
  152. this._maxStep = uniform( 0 );
  153. /**
  154. * The material that is used to render the effect.
  155. *
  156. * @private
  157. * @type {NodeMaterial}
  158. */
  159. this._material = new NodeMaterial();
  160. this._material.name = 'SSRNode.SSR';
  161. /**
  162. * The result of the effect is represented as a separate texture node.
  163. *
  164. * @private
  165. * @type {PassTextureNode}
  166. */
  167. this._textureNode = passTexture( this, this._ssrRenderTarget.texture );
  168. }
  169. /**
  170. * Returns the result of the effect as a texture node.
  171. *
  172. * @return {PassTextureNode} A texture node that represents the result of the effect.
  173. */
  174. getTextureNode() {
  175. return this._textureNode;
  176. }
  177. /**
  178. * Sets the size of the effect.
  179. *
  180. * @param {number} width - The width of the effect.
  181. * @param {number} height - The height of the effect.
  182. */
  183. setSize( width, height ) {
  184. width = Math.round( this.resolutionScale * width );
  185. height = Math.round( this.resolutionScale * height );
  186. this._resolution.value.set( width, height );
  187. this._maxStep.value = Math.round( Math.sqrt( width * width + height * height ) );
  188. this._ssrRenderTarget.setSize( width, height );
  189. }
  190. /**
  191. * This method is used to render the effect once per frame.
  192. *
  193. * @param {NodeFrame} frame - The current node frame.
  194. */
  195. updateBefore( frame ) {
  196. const { renderer } = frame;
  197. _rendererState = RendererUtils.resetRendererState( renderer, _rendererState );
  198. const size = renderer.getDrawingBufferSize( _size );
  199. _quadMesh.material = this._material;
  200. this.setSize( size.width, size.height );
  201. // clear
  202. renderer.setMRT( null );
  203. renderer.setClearColor( 0x000000, 0 );
  204. // ssr
  205. renderer.setRenderTarget( this._ssrRenderTarget );
  206. _quadMesh.render( renderer );
  207. // restore
  208. RendererUtils.restoreRendererState( renderer, _rendererState );
  209. }
  210. /**
  211. * This method is used to setup the effect's TSL code.
  212. *
  213. * @param {NodeBuilder} builder - The current node builder.
  214. * @return {PassTextureNode}
  215. */
  216. setup( builder ) {
  217. const uvNode = uv();
  218. const pointToLineDistance = Fn( ( [ point, linePointA, linePointB ] )=> {
  219. // https://mathworld.wolfram.com/Point-LineDistance3-Dimensional.html
  220. return cross( point.sub( linePointA ), point.sub( linePointB ) ).length().div( linePointB.sub( linePointA ).length() );
  221. } );
  222. const pointPlaneDistance = Fn( ( [ point, planePoint, planeNormal ] )=> {
  223. // https://mathworld.wolfram.com/Point-PlaneDistance.html
  224. // https://en.wikipedia.org/wiki/Plane_(geometry)
  225. // http://paulbourke.net/geometry/pointlineplane/
  226. const d = mul( planeNormal.x, planePoint.x ).add( mul( planeNormal.y, planePoint.y ) ).add( mul( planeNormal.z, planePoint.z ) ).negate().toVar();
  227. const denominator = sqrt( mul( planeNormal.x, planeNormal.x, ).add( mul( planeNormal.y, planeNormal.y ) ).add( mul( planeNormal.z, planeNormal.z ) ) ).toVar();
  228. const distance = div( mul( planeNormal.x, point.x ).add( mul( planeNormal.y, point.y ) ).add( mul( planeNormal.z, point.z ) ).add( d ), denominator );
  229. return distance;
  230. } );
  231. const getViewZ = Fn( ( [ depth ] ) => {
  232. let viewZNode;
  233. if ( this.camera.isPerspectiveCamera ) {
  234. viewZNode = perspectiveDepthToViewZ( depth, this._cameraNear, this._cameraFar );
  235. } else {
  236. viewZNode = orthographicDepthToViewZ( depth, this._cameraNear, this._cameraFar );
  237. }
  238. return viewZNode;
  239. } );
  240. const sampleDepth = ( uv ) => {
  241. const depth = this.depthNode.sample( uv ).r;
  242. if ( builder.renderer.logarithmicDepthBuffer === true ) {
  243. const viewZ = logarithmicDepthToViewZ( depth, this._cameraNear, this._cameraFar );
  244. return viewZToPerspectiveDepth( viewZ, this._cameraNear, this._cameraFar );
  245. }
  246. return depth;
  247. };
  248. const ssr = Fn( () => {
  249. const metalness = this.metalnessNode.sample( uvNode ).r;
  250. // fragments with no metalness do not reflect their environment
  251. metalness.equal( 0.0 ).discard();
  252. // compute some standard FX entities
  253. const depth = sampleDepth( uvNode ).toVar();
  254. const viewPosition = getViewPosition( uvNode, depth, this._cameraProjectionMatrixInverse ).toVar();
  255. const viewNormal = this.normalNode.rgb.normalize().toVar();
  256. // compute the direction from the position in view space to the camera
  257. const viewIncidentDir = ( ( this.camera.isPerspectiveCamera ) ? normalize( viewPosition ) : vec3( 0, 0, - 1 ) ).toVar();
  258. // compute the direction in which the light is reflected on the surface
  259. const viewReflectDir = reflect( viewIncidentDir, viewNormal ).toVar();
  260. // adapt maximum distance to the local geometry (see https://www.mathsisfun.com/algebra/vectors-dot-product.html)
  261. const maxReflectRayLen = this.maxDistance.div( dot( viewIncidentDir.negate(), viewNormal ) ).toVar();
  262. // compute the maximum point of the reflection ray in view space
  263. const d1viewPosition = viewPosition.add( viewReflectDir.mul( maxReflectRayLen ) ).toVar();
  264. // check if d1viewPosition lies behind the camera near plane
  265. If( this._isPerspectiveCamera.equal( float( 1 ) ).and( d1viewPosition.z.greaterThan( this._cameraNear.negate() ) ), () => {
  266. // if so, ensure d1viewPosition is clamped on the near plane.
  267. // this prevents artifacts during the ray marching process
  268. const t = sub( this._cameraNear.negate(), viewPosition.z ).div( viewReflectDir.z );
  269. d1viewPosition.assign( viewPosition.add( viewReflectDir.mul( t ) ) );
  270. } );
  271. // d0 and d1 are the start and maximum points of the reflection ray in screen space
  272. const d0 = screenCoordinate.xy.toVar();
  273. const d1 = getScreenPosition( d1viewPosition, this._cameraProjectionMatrix ).mul( this._resolution ).toVar();
  274. // below variables are used to control the raymarching process
  275. // total length of the ray
  276. const totalLen = d1.sub( d0 ).length().toVar();
  277. // offset in x and y direction
  278. const xLen = d1.x.sub( d0.x ).toVar();
  279. const yLen = d1.y.sub( d0.y ).toVar();
  280. // determine the larger delta
  281. // The larger difference will help to determine how much to travel in the X and Y direction each iteration and
  282. // how many iterations are needed to travel the entire ray
  283. const totalStep = max( abs( xLen ), abs( yLen ) ).toVar();
  284. // step sizes in the x and y directions
  285. const xSpan = xLen.div( totalStep ).toVar();
  286. const ySpan = yLen.div( totalStep ).toVar();
  287. const output = vec4( 0 ).toVar();
  288. // the actual ray marching loop
  289. // starting from d0, the code gradually travels along the ray and looks for an intersection with the geometry.
  290. // it does not exceed d1 (the maximum ray extend)
  291. Loop( { start: int( 0 ), end: int( this._maxStep ), type: 'int', condition: '<' }, ( { i } ) => {
  292. // TODO: Remove this when Chrome is fixed, see https://issues.chromium.org/issues/372714384#comment14
  293. If( metalness.equal( 0 ), () => {
  294. Break();
  295. } );
  296. // stop if the maximum number of steps is reached for this specific ray
  297. If( float( i ).greaterThanEqual( totalStep ), () => {
  298. Break();
  299. } );
  300. // advance on the ray by computing a new position in screen space
  301. const xy = vec2( d0.x.add( xSpan.mul( float( i ) ) ), d0.y.add( ySpan.mul( float( i ) ) ) ).toVar();
  302. // stop processing if the new position lies outside of the screen
  303. If( xy.x.lessThan( 0 ).or( xy.x.greaterThan( this._resolution.x ) ).or( xy.y.lessThan( 0 ) ).or( xy.y.greaterThan( this._resolution.y ) ), () => {
  304. Break();
  305. } );
  306. // compute new uv, depth, viewZ and viewPosition for the new location on the ray
  307. const uvNode = xy.div( this._resolution );
  308. const d = sampleDepth( uvNode ).toVar();
  309. const vZ = getViewZ( d ).toVar();
  310. const vP = getViewPosition( uvNode, d, this._cameraProjectionMatrixInverse ).toVar();
  311. const viewReflectRayZ = float( 0 ).toVar();
  312. // normalized distance between the current position xy and the starting point d0
  313. const s = xy.sub( d0 ).length().div( totalLen );
  314. // depending on the camera type, we now compute the z-coordinate of the reflected ray at the current step in view space
  315. If( this._isPerspectiveCamera.equal( float( 1 ) ), () => {
  316. const recipVPZ = float( 1 ).div( viewPosition.z ).toVar();
  317. viewReflectRayZ.assign( float( 1 ).div( recipVPZ.add( s.mul( float( 1 ).div( d1viewPosition.z ).sub( recipVPZ ) ) ) ) );
  318. } ).Else( () => {
  319. viewReflectRayZ.assign( viewPosition.z.add( s.mul( d1viewPosition.z.sub( viewPosition.z ) ) ) );
  320. } );
  321. // if viewReflectRayZ is less or equal than the real z-coordinate at this place, it potentially intersects the geometry
  322. If( viewReflectRayZ.lessThanEqual( vZ ), () => {
  323. // compute the distance of the new location to the ray in view space
  324. // to clarify vP is the fragment's view position which is not an exact point on the ray
  325. const away = pointToLineDistance( vP, viewPosition, d1viewPosition ).toVar();
  326. // compute the minimum thickness between the current fragment and its neighbor in the x-direction.
  327. const xyNeighbor = vec2( xy.x.add( 1 ), xy.y ).toVar(); // move one pixel
  328. const uvNeighbor = xyNeighbor.div( this._resolution );
  329. const vPNeighbor = getViewPosition( uvNeighbor, d, this._cameraProjectionMatrixInverse ).toVar();
  330. const minThickness = vPNeighbor.x.sub( vP.x ).toVar();
  331. minThickness.mulAssign( 3 ); // expand a bit to avoid errors
  332. const tk = max( minThickness, this.thickness ).toVar();
  333. If( away.lessThanEqual( tk ), () => { // hit
  334. const vN = this.normalNode.sample( uvNode ).rgb.normalize().toVar();
  335. If( dot( viewReflectDir, vN ).greaterThanEqual( 0 ), () => {
  336. // the reflected ray is pointing towards the same side as the fragment's normal (current ray position),
  337. // which means it wouldn't reflect off the surface. The loop continues to the next step for the next ray sample.
  338. Continue();
  339. } );
  340. // this distance represents the depth of the intersection point between the reflected ray and the scene.
  341. const distance = pointPlaneDistance( vP, viewPosition, viewNormal ).toVar();
  342. If( distance.greaterThan( this.maxDistance ), () => {
  343. // Distance exceeding limit: The reflection is potentially too far away and
  344. // might not contribute significantly to the final color
  345. Break();
  346. } );
  347. const op = this.opacity.mul( metalness ).toVar();
  348. // distance attenuation (the reflection should fade out the farther it is away from the surface)
  349. const ratio = float( 1 ).sub( distance.div( this.maxDistance ) ).toVar();
  350. const attenuation = ratio.mul( ratio );
  351. op.mulAssign( attenuation );
  352. // fresnel (reflect more light on surfaces that are viewed at grazing angles)
  353. const fresnelCoe = div( dot( viewIncidentDir, viewReflectDir ).add( 1 ), 2 );
  354. op.mulAssign( fresnelCoe );
  355. // output
  356. const reflectColor = this.colorNode.sample( uvNode );
  357. output.assign( vec4( reflectColor.rgb, op ) );
  358. Break();
  359. } );
  360. } );
  361. } );
  362. return output;
  363. } );
  364. this._material.fragmentNode = ssr().context( builder.getSharedContext() );
  365. this._material.needsUpdate = true;
  366. //
  367. return this._textureNode;
  368. }
  369. /**
  370. * Frees internal resources. This method should be called
  371. * when the effect is no longer required.
  372. */
  373. dispose() {
  374. this._ssrRenderTarget.dispose();
  375. this._material.dispose();
  376. }
  377. }
  378. export default SSRNode;
  379. /**
  380. * TSL function for creating screen space reflections (SSR).
  381. *
  382. * @tsl
  383. * @function
  384. * @param {Node<vec4>} colorNode - The node that represents the beauty pass.
  385. * @param {Node<float>} depthNode - A node that represents the beauty pass's depth.
  386. * @param {Node<vec3>} normalNode - A node that represents the beauty pass's normals.
  387. * @param {Node<float>} metalnessNode - A node that represents the beauty pass's metalness.
  388. * @param {Camera} camera - The camera the scene is rendered with.
  389. * @returns {SSRNode}
  390. */
  391. export const ssr = ( colorNode, depthNode, normalNode, metalnessNode, camera ) => nodeObject( new SSRNode( nodeObject( colorNode ), nodeObject( depthNode ), nodeObject( normalNode ), nodeObject( metalnessNode ), camera ) );
粤ICP备19079148号