SSRNode.js 20 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656
  1. import { HalfFloatType, RenderTarget, Vector2, RendererUtils, QuadMesh, TempNode, NodeMaterial, NodeUpdateType, LinearFilter, LinearMipmapLinearFilter } from 'three/webgpu';
  2. import { texture, reference, viewZToPerspectiveDepth, logarithmicDepthToViewZ, getScreenPosition, getViewPosition, sqrt, mul, div, cross, float, Continue, Break, Loop, int, max, abs, sub, If, dot, reflect, normalize, screenCoordinate, nodeObject, Fn, passTexture, uv, uniform, perspectiveDepthToViewZ, orthographicDepthToViewZ, vec2, vec3, vec4 } from 'three/tsl';
  3. import { boxBlur } from './boxBlur.js';
  4. const _quadMesh = /*@__PURE__*/ new QuadMesh();
  5. const _size = /*@__PURE__*/ new Vector2();
  6. let _rendererState;
  7. /**
  8. * Post processing node for computing screen space reflections (SSR).
  9. *
  10. * Reference: {@link https://lettier.github.io/3d-game-shaders-for-beginners/screen-space-reflection.html}
  11. *
  12. * @augments TempNode
  13. * @three_import import { ssr } from 'three/addons/tsl/display/SSRNode.js';
  14. */
  15. class SSRNode extends TempNode {
  16. static get type() {
  17. return 'SSRNode';
  18. }
  19. /**
  20. * Constructs a new SSR node.
  21. *
  22. * @param {Node<vec4>} colorNode - The node that represents the beauty pass.
  23. * @param {Node<float>} depthNode - A node that represents the beauty pass's depth.
  24. * @param {Node<vec3>} normalNode - A node that represents the beauty pass's normals.
  25. * @param {Node<float>} metalnessNode - A node that represents the beauty pass's metalness.
  26. * @param {?Node<float>} [roughnessNode=null] - A node that represents the beauty pass's roughness.
  27. * @param {?Camera} [camera=null] - The camera the scene is rendered with.
  28. */
  29. constructor( colorNode, depthNode, normalNode, metalnessNode, roughnessNode = null, camera = null ) {
  30. super( 'vec4' );
  31. /**
  32. * The node that represents the beauty pass.
  33. *
  34. * @type {Node<vec4>}
  35. */
  36. this.colorNode = colorNode;
  37. /**
  38. * A node that represents the beauty pass's depth.
  39. *
  40. * @type {Node<float>}
  41. */
  42. this.depthNode = depthNode;
  43. /**
  44. * A node that represents the beauty pass's normals.
  45. *
  46. * @type {Node<vec3>}
  47. */
  48. this.normalNode = normalNode;
  49. /**
  50. * A node that represents the beauty pass's metalness.
  51. *
  52. * @type {Node<float>}
  53. */
  54. this.metalnessNode = metalnessNode;
  55. /**
  56. * Whether the SSR reflections should be blurred or not. Blurring is a costly
  57. * operation so turn it off if you encounter performance issues on certain
  58. * devices.
  59. *
  60. * @private
  61. * @type {Node<float>}
  62. * @default false
  63. */
  64. this.roughnessNode = roughnessNode;
  65. /**
  66. * The resolution scale. Valid values are in the range
  67. * `[0,1]`. `1` means best quality but also results in
  68. * more computational overhead. Setting to `0.5` means
  69. * the effect is computed in half-resolution.
  70. *
  71. * @type {number}
  72. * @default 1
  73. */
  74. this.resolutionScale = 1;
  75. /**
  76. * The `updateBeforeType` is set to `NodeUpdateType.FRAME` since the node renders
  77. * its effect once per frame in `updateBefore()`.
  78. *
  79. * @type {string}
  80. * @default 'frame'
  81. */
  82. this.updateBeforeType = NodeUpdateType.FRAME;
  83. /**
  84. * Controls how far a fragment can reflect. Increasing this value result in more
  85. * computational overhead but also increases the reflection distance.
  86. *
  87. * @type {UniformNode<float>}
  88. */
  89. this.maxDistance = uniform( 1 );
  90. /**
  91. * Controls the cutoff between what counts as a possible reflection hit and what does not.
  92. *
  93. * @type {UniformNode<float>}
  94. */
  95. this.thickness = uniform( 0.1 );
  96. /**
  97. * Controls how the SSR reflections are blended with the beauty pass.
  98. *
  99. * @type {UniformNode<float>}
  100. */
  101. this.opacity = uniform( 1 );
  102. /**
  103. * This parameter controls how detailed the raymarching process works.
  104. * The value ranges is `[0,1]` where `1` means best quality (the maximum number
  105. * of raymarching iterations/samples) and `0` means no samples at all.
  106. *
  107. * A quality of `0.5` is usually sufficient for most use cases. Try to keep
  108. * this parameter as low as possible. Larger values result in noticeable more
  109. * overhead.
  110. *
  111. * @type {UniformNode<float>}
  112. */
  113. this.quality = uniform( 0.5 );
  114. /**
  115. * The quality of the blur. Must be an integer in the range `[1,3]`.
  116. *
  117. * @type {UniformNode<int>}
  118. */
  119. this.blurQuality = uniform( 2 );
  120. //
  121. if ( camera === null ) {
  122. if ( this.colorNode.passNode && this.colorNode.passNode.isPassNode === true ) {
  123. camera = this.colorNode.passNode.camera;
  124. } else {
  125. throw new Error( 'THREE.TSL: No camera found. ssr() requires a camera.' );
  126. }
  127. }
  128. /**
  129. * The camera the scene is rendered with.
  130. *
  131. * @type {Camera}
  132. */
  133. this.camera = camera;
  134. /**
  135. * The spread of the blur. Automatically set when generating mips.
  136. *
  137. * @private
  138. * @type {UniformNode<int>}
  139. */
  140. this._blurSpread = uniform( 1 );
  141. /**
  142. * Represents the projection matrix of the scene's camera.
  143. *
  144. * @private
  145. * @type {UniformNode<mat4>}
  146. */
  147. this._cameraProjectionMatrix = uniform( camera.projectionMatrix );
  148. /**
  149. * Represents the inverse projection matrix of the scene's camera.
  150. *
  151. * @private
  152. * @type {UniformNode<mat4>}
  153. */
  154. this._cameraProjectionMatrixInverse = uniform( camera.projectionMatrixInverse );
  155. /**
  156. * Represents the near value of the scene's camera.
  157. *
  158. * @private
  159. * @type {ReferenceNode<float>}
  160. */
  161. this._cameraNear = reference( 'near', 'float', camera );
  162. /**
  163. * Represents the far value of the scene's camera.
  164. *
  165. * @private
  166. * @type {ReferenceNode<float>}
  167. */
  168. this._cameraFar = reference( 'far', 'float', camera );
  169. /**
  170. * Whether the scene's camera is perspective or orthographic.
  171. *
  172. * @private
  173. * @type {UniformNode<bool>}
  174. */
  175. this._isPerspectiveCamera = uniform( camera.isPerspectiveCamera === true );
  176. /**
  177. * The resolution of the pass.
  178. *
  179. * @private
  180. * @type {UniformNode<vec2>}
  181. */
  182. this._resolution = uniform( new Vector2() );
  183. /**
  184. * The render target the SSR is rendered into.
  185. *
  186. * @private
  187. * @type {RenderTarget}
  188. */
  189. this._ssrRenderTarget = new RenderTarget( 1, 1, { depthBuffer: false, type: HalfFloatType } );
  190. this._ssrRenderTarget.texture.name = 'SSRNode.SSR';
  191. /**
  192. * The render target for the blurred SSR reflections.
  193. *
  194. * @private
  195. * @type {RenderTarget}
  196. */
  197. this._blurRenderTarget = new RenderTarget( 1, 1, { depthBuffer: false, type: HalfFloatType, minFilter: LinearMipmapLinearFilter, magFilter: LinearFilter } );
  198. this._blurRenderTarget.texture.name = 'SSRNode.Blur';
  199. this._blurRenderTarget.texture.mipmaps.push( {}, {}, {}, {}, {} );
  200. /**
  201. * The material that is used to render the effect.
  202. *
  203. * @private
  204. * @type {NodeMaterial}
  205. */
  206. this._ssrMaterial = new NodeMaterial();
  207. this._ssrMaterial.name = 'SSRNode.SSR';
  208. /**
  209. * The blur material.
  210. *
  211. * @private
  212. * @type {NodeMaterial}
  213. */
  214. this._blurMaterial = new NodeMaterial();
  215. this._blurMaterial.name = 'SSRNode.Blur';
  216. /**
  217. * The copy material.
  218. *
  219. * @private
  220. * @type {NodeMaterial}
  221. */
  222. this._copyMaterial = new NodeMaterial();
  223. this._copyMaterial.name = 'SSRNode.Copy';
  224. /**
  225. * The result of the effect is represented as a separate texture node.
  226. *
  227. * @private
  228. * @type {PassTextureNode}
  229. */
  230. this._textureNode = passTexture( this, this._ssrRenderTarget.texture );
  231. let blurredTextureNode = null;
  232. if ( this.roughnessNode !== null ) {
  233. const mips = this._blurRenderTarget.texture.mipmaps.length - 1;
  234. const lod = float( this.roughnessNode ).mul( mips ).clamp( 0, mips );
  235. blurredTextureNode = passTexture( this, this._blurRenderTarget.texture ).level( lod );
  236. }
  237. /**
  238. * Holds the blurred SSR reflections.
  239. *
  240. * @private
  241. * @type {?PassTextureNode}
  242. */
  243. this._blurredTextureNode = blurredTextureNode;
  244. }
  245. /**
  246. * Returns the result of the effect as a texture node.
  247. *
  248. * @return {PassTextureNode} A texture node that represents the result of the effect.
  249. */
  250. getTextureNode() {
  251. return this.roughnessNode !== null ? this._blurredTextureNode : this._textureNode;
  252. }
  253. /**
  254. * Sets the size of the effect.
  255. *
  256. * @param {number} width - The width of the effect.
  257. * @param {number} height - The height of the effect.
  258. */
  259. setSize( width, height ) {
  260. width = Math.round( this.resolutionScale * width );
  261. height = Math.round( this.resolutionScale * height );
  262. this._resolution.value.set( width, height );
  263. this._ssrRenderTarget.setSize( width, height );
  264. this._blurRenderTarget.setSize( width, height );
  265. }
  266. /**
  267. * This method is used to render the effect once per frame.
  268. *
  269. * @param {NodeFrame} frame - The current node frame.
  270. */
  271. updateBefore( frame ) {
  272. const { renderer } = frame;
  273. _rendererState = RendererUtils.resetRendererState( renderer, _rendererState );
  274. const ssrRenderTarget = this._ssrRenderTarget;
  275. const blurRenderTarget = this._blurRenderTarget;
  276. const size = renderer.getDrawingBufferSize( _size );
  277. _quadMesh.material = this._ssrMaterial;
  278. this.setSize( size.width, size.height );
  279. // clear
  280. renderer.setMRT( null );
  281. renderer.setClearColor( 0x000000, 0 );
  282. // ssr
  283. renderer.setRenderTarget( ssrRenderTarget );
  284. _quadMesh.name = 'SSR [ Reflections ]';
  285. _quadMesh.render( renderer );
  286. // blur (optional)
  287. if ( this.roughnessNode !== null ) {
  288. // blur mips but leave the base mip unblurred
  289. for ( let i = 0; i < blurRenderTarget.texture.mipmaps.length; i ++ ) {
  290. _quadMesh.material = ( i === 0 ) ? this._copyMaterial : this._blurMaterial;
  291. this._blurSpread.value = i;
  292. renderer.setRenderTarget( blurRenderTarget, 0, i );
  293. _quadMesh.name = 'SSR [ Blur Level ' + i + ' ]';
  294. _quadMesh.render( renderer );
  295. }
  296. }
  297. // restore
  298. RendererUtils.restoreRendererState( renderer, _rendererState );
  299. }
  300. /**
  301. * This method is used to setup the effect's TSL code.
  302. *
  303. * @param {NodeBuilder} builder - The current node builder.
  304. * @return {PassTextureNode}
  305. */
  306. setup( builder ) {
  307. const uvNode = uv();
  308. const pointToLineDistance = Fn( ( [ point, linePointA, linePointB ] )=> {
  309. // https://mathworld.wolfram.com/Point-LineDistance3-Dimensional.html
  310. return cross( point.sub( linePointA ), point.sub( linePointB ) ).length().div( linePointB.sub( linePointA ).length() );
  311. } );
  312. const pointPlaneDistance = Fn( ( [ point, planePoint, planeNormal ] )=> {
  313. // https://mathworld.wolfram.com/Point-PlaneDistance.html
  314. // https://en.wikipedia.org/wiki/Plane_(geometry)
  315. // http://paulbourke.net/geometry/pointlineplane/
  316. const d = mul( planeNormal.x, planePoint.x ).add( mul( planeNormal.y, planePoint.y ) ).add( mul( planeNormal.z, planePoint.z ) ).negate().toVar();
  317. const denominator = sqrt( mul( planeNormal.x, planeNormal.x, ).add( mul( planeNormal.y, planeNormal.y ) ).add( mul( planeNormal.z, planeNormal.z ) ) ).toVar();
  318. const distance = div( mul( planeNormal.x, point.x ).add( mul( planeNormal.y, point.y ) ).add( mul( planeNormal.z, point.z ) ).add( d ), denominator );
  319. return distance;
  320. } );
  321. const getViewZ = Fn( ( [ depth ] ) => {
  322. let viewZNode;
  323. if ( this.camera.isPerspectiveCamera ) {
  324. viewZNode = perspectiveDepthToViewZ( depth, this._cameraNear, this._cameraFar );
  325. } else {
  326. viewZNode = orthographicDepthToViewZ( depth, this._cameraNear, this._cameraFar );
  327. }
  328. return viewZNode;
  329. } );
  330. const sampleDepth = ( uv ) => {
  331. const depth = this.depthNode.sample( uv ).r;
  332. if ( builder.renderer.logarithmicDepthBuffer === true ) {
  333. const viewZ = logarithmicDepthToViewZ( depth, this._cameraNear, this._cameraFar );
  334. return viewZToPerspectiveDepth( viewZ, this._cameraNear, this._cameraFar );
  335. }
  336. return depth;
  337. };
  338. const ssr = Fn( () => {
  339. const metalness = float( this.metalnessNode );
  340. // fragments with no metalness do not reflect their environment
  341. metalness.equal( 0.0 ).discard();
  342. // compute some standard FX entities
  343. const depth = sampleDepth( uvNode ).toVar();
  344. const viewPosition = getViewPosition( uvNode, depth, this._cameraProjectionMatrixInverse ).toVar();
  345. const viewNormal = this.normalNode.rgb.normalize().toVar();
  346. // compute the direction from the position in view space to the camera
  347. const viewIncidentDir = ( ( this.camera.isPerspectiveCamera ) ? normalize( viewPosition ) : vec3( 0, 0, - 1 ) ).toVar();
  348. // compute the direction in which the light is reflected on the surface
  349. const viewReflectDir = reflect( viewIncidentDir, viewNormal ).toVar();
  350. // adapt maximum distance to the local geometry (see https://www.mathsisfun.com/algebra/vectors-dot-product.html)
  351. const maxReflectRayLen = this.maxDistance.div( dot( viewIncidentDir.negate(), viewNormal ) ).toVar();
  352. // compute the maximum point of the reflection ray in view space
  353. const d1viewPosition = viewPosition.add( viewReflectDir.mul( maxReflectRayLen ) ).toVar();
  354. // check if d1viewPosition lies behind the camera near plane
  355. If( this._isPerspectiveCamera.and( d1viewPosition.z.greaterThan( this._cameraNear.negate() ) ), () => {
  356. // if so, ensure d1viewPosition is clamped on the near plane.
  357. // this prevents artifacts during the ray marching process
  358. const t = sub( this._cameraNear.negate(), viewPosition.z ).div( viewReflectDir.z );
  359. d1viewPosition.assign( viewPosition.add( viewReflectDir.mul( t ) ) );
  360. } );
  361. // d0 and d1 are the start and maximum points of the reflection ray in screen space
  362. const d0 = screenCoordinate.xy.toVar();
  363. const d1 = getScreenPosition( d1viewPosition, this._cameraProjectionMatrix ).mul( this._resolution ).toVar();
  364. // below variables are used to control the raymarching process
  365. // total length of the ray
  366. const totalLen = d1.sub( d0 ).length().toVar();
  367. // offset in x and y direction
  368. const xLen = d1.x.sub( d0.x ).toVar();
  369. const yLen = d1.y.sub( d0.y ).toVar();
  370. // determine the larger delta
  371. // The larger difference will help to determine how much to travel in the X and Y direction each iteration and
  372. // how many iterations are needed to travel the entire ray
  373. const totalStep = int( max( abs( xLen ), abs( yLen ) ).mul( this.quality.clamp() ) ).toConst();
  374. // step sizes in the x and y directions
  375. const xSpan = xLen.div( totalStep ).toVar();
  376. const ySpan = yLen.div( totalStep ).toVar();
  377. const output = vec4( 0 ).toVar();
  378. // the actual ray marching loop
  379. // starting from d0, the code gradually travels along the ray and looks for an intersection with the geometry.
  380. // it does not exceed d1 (the maximum ray extend)
  381. Loop( totalStep, ( { i } ) => {
  382. // advance on the ray by computing a new position in screen coordinates
  383. const xy = vec2( d0.x.add( xSpan.mul( float( i ) ) ), d0.y.add( ySpan.mul( float( i ) ) ) ).toVar();
  384. // stop processing if the new position lies outside of the screen
  385. If( xy.x.lessThan( 0 ).or( xy.x.greaterThan( this._resolution.x ) ).or( xy.y.lessThan( 0 ) ).or( xy.y.greaterThan( this._resolution.y ) ), () => {
  386. Break();
  387. } );
  388. // compute new uv, depth and viewZ for the next fragment
  389. const uvNode = xy.div( this._resolution );
  390. const d = sampleDepth( uvNode ).toVar();
  391. const vZ = getViewZ( d ).toVar();
  392. const viewReflectRayZ = float( 0 ).toVar();
  393. // normalized distance between the current position xy and the starting point d0
  394. const s = xy.sub( d0 ).length().div( totalLen );
  395. // depending on the camera type, we now compute the z-coordinate of the reflected ray at the current step in view space
  396. If( this._isPerspectiveCamera, () => {
  397. const recipVPZ = float( 1 ).div( viewPosition.z ).toVar();
  398. viewReflectRayZ.assign( float( 1 ).div( recipVPZ.add( s.mul( float( 1 ).div( d1viewPosition.z ).sub( recipVPZ ) ) ) ) );
  399. } ).Else( () => {
  400. viewReflectRayZ.assign( viewPosition.z.add( s.mul( d1viewPosition.z.sub( viewPosition.z ) ) ) );
  401. } );
  402. // if viewReflectRayZ is less or equal than the real z-coordinate at this place, it potentially intersects the geometry
  403. If( viewReflectRayZ.lessThanEqual( vZ ), () => {
  404. // compute the distance of the new location to the ray in view space
  405. // to clarify vP is the fragment's view position which is not an exact point on the ray
  406. const vP = getViewPosition( uvNode, d, this._cameraProjectionMatrixInverse ).toVar();
  407. const away = pointToLineDistance( vP, viewPosition, d1viewPosition ).toVar();
  408. // compute the minimum thickness between the current fragment and its neighbor in the x-direction.
  409. const xyNeighbor = vec2( xy.x.add( 1 ), xy.y ).toVar(); // move one pixel
  410. const uvNeighbor = xyNeighbor.div( this._resolution );
  411. const vPNeighbor = getViewPosition( uvNeighbor, d, this._cameraProjectionMatrixInverse ).toVar();
  412. const minThickness = vPNeighbor.x.sub( vP.x ).toVar();
  413. minThickness.mulAssign( 3 ); // expand a bit to avoid errors
  414. const tk = max( minThickness, this.thickness ).toVar();
  415. If( away.lessThanEqual( tk ), () => { // hit
  416. const vN = this.normalNode.sample( uvNode ).rgb.normalize().toVar();
  417. If( dot( viewReflectDir, vN ).greaterThanEqual( 0 ), () => {
  418. // the reflected ray is pointing towards the same side as the fragment's normal (current ray position),
  419. // which means it wouldn't reflect off the surface. The loop continues to the next step for the next ray sample.
  420. Continue();
  421. } );
  422. // this distance represents the depth of the intersection point between the reflected ray and the scene.
  423. const distance = pointPlaneDistance( vP, viewPosition, viewNormal ).toVar();
  424. If( distance.greaterThan( this.maxDistance ), () => {
  425. // Distance exceeding limit: The reflection is potentially too far away and
  426. // might not contribute significantly to the final color
  427. Break();
  428. } );
  429. const op = this.opacity.mul( metalness ).toVar();
  430. // distance attenuation (the reflection should fade out the farther it is away from the surface)
  431. const ratio = float( 1 ).sub( distance.div( this.maxDistance ) ).toVar();
  432. const attenuation = ratio.mul( ratio );
  433. op.mulAssign( attenuation );
  434. // fresnel (reflect more light on surfaces that are viewed at grazing angles)
  435. const fresnelCoe = div( dot( viewIncidentDir, viewReflectDir ).add( 1 ), 2 );
  436. op.mulAssign( fresnelCoe );
  437. // output
  438. const reflectColor = this.colorNode.sample( uvNode );
  439. output.assign( vec4( reflectColor.rgb, op ) );
  440. Break();
  441. } );
  442. } );
  443. } );
  444. return output;
  445. } );
  446. this._ssrMaterial.fragmentNode = ssr().context( builder.getSharedContext() );
  447. this._ssrMaterial.needsUpdate = true;
  448. // below materials are used for blurring
  449. const reflectionBuffer = texture( this._ssrRenderTarget.texture );
  450. this._blurMaterial.fragmentNode = boxBlur( reflectionBuffer, { size: this.blurQuality, separation: this._blurSpread } );
  451. this._blurMaterial.needsUpdate = true;
  452. this._copyMaterial.fragmentNode = reflectionBuffer;
  453. this._copyMaterial.needsUpdate = true;
  454. //
  455. return this.getTextureNode();
  456. }
  457. /**
  458. * Frees internal resources. This method should be called
  459. * when the effect is no longer required.
  460. */
  461. dispose() {
  462. this._ssrRenderTarget.dispose();
  463. this._blurRenderTarget.dispose();
  464. this._ssrMaterial.dispose();
  465. this._blurMaterial.dispose();
  466. this._copyMaterial.dispose();
  467. }
  468. }
  469. export default SSRNode;
  470. /**
  471. * TSL function for creating screen space reflections (SSR).
  472. *
  473. * @tsl
  474. * @function
  475. * @param {Node<vec4>} colorNode - The node that represents the beauty pass.
  476. * @param {Node<float>} depthNode - A node that represents the beauty pass's depth.
  477. * @param {Node<vec3>} normalNode - A node that represents the beauty pass's normals.
  478. * @param {Node<float>} metalnessNode - A node that represents the beauty pass's metalness.
  479. * @param {?Node<float>} [roughnessNode=null] - A node that represents the beauty pass's roughness.
  480. * @param {?Camera} [camera=null] - The camera the scene is rendered with.
  481. * @returns {SSRNode}
  482. */
  483. export const ssr = ( colorNode, depthNode, normalNode, metalnessNode, roughnessNode = null, camera = null ) => nodeObject( new SSRNode( nodeObject( colorNode ), nodeObject( depthNode ), nodeObject( normalNode ), nodeObject( metalnessNode ), nodeObject( roughnessNode ), camera ) );
粤ICP备19079148号