SSRNode.js 20 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656
  1. import { HalfFloatType, RenderTarget, Vector2, RendererUtils, QuadMesh, TempNode, NodeMaterial, NodeUpdateType, LinearFilter, LinearMipmapLinearFilter } from 'three/webgpu';
  2. import { texture, reference, viewZToPerspectiveDepth, logarithmicDepthToViewZ, getScreenPosition, getViewPosition, mul, div, cross, float, Continue, Break, Loop, int, max, abs, sub, If, dot, reflect, normalize, screenCoordinate, nodeObject, Fn, passTexture, uv, uniform, perspectiveDepthToViewZ, orthographicDepthToViewZ, vec2, vec3, vec4 } from 'three/tsl';
  3. import { boxBlur } from './boxBlur.js';
  4. const _quadMesh = /*@__PURE__*/ new QuadMesh();
  5. const _size = /*@__PURE__*/ new Vector2();
  6. let _rendererState;
  7. /**
  8. * Post processing node for computing screen space reflections (SSR).
  9. *
  10. * Reference: {@link https://lettier.github.io/3d-game-shaders-for-beginners/screen-space-reflection.html}
  11. *
  12. * @augments TempNode
  13. * @three_import import { ssr } from 'three/addons/tsl/display/SSRNode.js';
  14. */
  15. class SSRNode extends TempNode {
  16. static get type() {
  17. return 'SSRNode';
  18. }
  19. /**
  20. * Constructs a new SSR node.
  21. *
  22. * @param {Node<vec4>} colorNode - The node that represents the beauty pass.
  23. * @param {Node<float>} depthNode - A node that represents the beauty pass's depth.
  24. * @param {Node<vec3>} normalNode - A node that represents the beauty pass's normals.
  25. * @param {Node<float>} metalnessNode - A node that represents the beauty pass's metalness.
  26. * @param {?Node<float>} [roughnessNode=null] - A node that represents the beauty pass's roughness.
  27. * @param {?Camera} [camera=null] - The camera the scene is rendered with.
  28. */
  29. constructor( colorNode, depthNode, normalNode, metalnessNode, roughnessNode = null, camera = null ) {
  30. super( 'vec4' );
  31. /**
  32. * The node that represents the beauty pass.
  33. *
  34. * @type {Node<vec4>}
  35. */
  36. this.colorNode = colorNode;
  37. /**
  38. * A node that represents the beauty pass's depth.
  39. *
  40. * @type {Node<float>}
  41. */
  42. this.depthNode = depthNode;
  43. /**
  44. * A node that represents the beauty pass's normals.
  45. *
  46. * @type {Node<vec3>}
  47. */
  48. this.normalNode = normalNode;
  49. /**
  50. * A node that represents the beauty pass's metalness.
  51. *
  52. * @type {Node<float>}
  53. */
  54. this.metalnessNode = metalnessNode;
  55. /**
  56. * Whether the SSR reflections should be blurred or not. Blurring is a costly
  57. * operation so turn it off if you encounter performance issues on certain
  58. * devices.
  59. *
  60. * @private
  61. * @type {Node<float>}
  62. * @default false
  63. */
  64. this.roughnessNode = roughnessNode;
  65. /**
  66. * The resolution scale. Valid values are in the range
  67. * `[0,1]`. `1` means best quality but also results in
  68. * more computational overhead. Setting to `0.5` means
  69. * the effect is computed in half-resolution.
  70. *
  71. * @type {number}
  72. * @default 1
  73. */
  74. this.resolutionScale = 1;
  75. /**
  76. * The `updateBeforeType` is set to `NodeUpdateType.FRAME` since the node renders
  77. * its effect once per frame in `updateBefore()`.
  78. *
  79. * @type {string}
  80. * @default 'frame'
  81. */
  82. this.updateBeforeType = NodeUpdateType.FRAME;
  83. /**
  84. * Controls how far a fragment can reflect. Increasing this value result in more
  85. * computational overhead but also increases the reflection distance.
  86. *
  87. * @type {UniformNode<float>}
  88. */
  89. this.maxDistance = uniform( 1 );
  90. /**
  91. * Controls the cutoff between what counts as a possible reflection hit and what does not.
  92. *
  93. * @type {UniformNode<float>}
  94. */
  95. this.thickness = uniform( 0.1 );
  96. /**
  97. * Controls how the SSR reflections are blended with the beauty pass.
  98. *
  99. * @type {UniformNode<float>}
  100. */
  101. this.opacity = uniform( 1 );
  102. /**
  103. * This parameter controls how detailed the raymarching process works.
  104. * The value ranges is `[0,1]` where `1` means best quality (the maximum number
  105. * of raymarching iterations/samples) and `0` means no samples at all.
  106. *
  107. * A quality of `0.5` is usually sufficient for most use cases. Try to keep
  108. * this parameter as low as possible. Larger values result in noticeable more
  109. * overhead.
  110. *
  111. * @type {UniformNode<float>}
  112. */
  113. this.quality = uniform( 0.5 );
  114. /**
  115. * The quality of the blur. Must be an integer in the range `[1,3]`.
  116. *
  117. * @type {UniformNode<int>}
  118. */
  119. this.blurQuality = uniform( 2 );
  120. //
  121. if ( camera === null ) {
  122. if ( this.colorNode.passNode && this.colorNode.passNode.isPassNode === true ) {
  123. camera = this.colorNode.passNode.camera;
  124. } else {
  125. throw new Error( 'THREE.TSL: No camera found. ssr() requires a camera.' );
  126. }
  127. }
  128. /**
  129. * The camera the scene is rendered with.
  130. *
  131. * @type {Camera}
  132. */
  133. this.camera = camera;
  134. /**
  135. * The spread of the blur. Automatically set when generating mips.
  136. *
  137. * @private
  138. * @type {UniformNode<int>}
  139. */
  140. this._blurSpread = uniform( 1 );
  141. /**
  142. * Represents the projection matrix of the scene's camera.
  143. *
  144. * @private
  145. * @type {UniformNode<mat4>}
  146. */
  147. this._cameraProjectionMatrix = uniform( camera.projectionMatrix );
  148. /**
  149. * Represents the inverse projection matrix of the scene's camera.
  150. *
  151. * @private
  152. * @type {UniformNode<mat4>}
  153. */
  154. this._cameraProjectionMatrixInverse = uniform( camera.projectionMatrixInverse );
  155. /**
  156. * Represents the near value of the scene's camera.
  157. *
  158. * @private
  159. * @type {ReferenceNode<float>}
  160. */
  161. this._cameraNear = reference( 'near', 'float', camera );
  162. /**
  163. * Represents the far value of the scene's camera.
  164. *
  165. * @private
  166. * @type {ReferenceNode<float>}
  167. */
  168. this._cameraFar = reference( 'far', 'float', camera );
  169. /**
  170. * Whether the scene's camera is perspective or orthographic.
  171. *
  172. * @private
  173. * @type {UniformNode<bool>}
  174. */
  175. this._isPerspectiveCamera = uniform( camera.isPerspectiveCamera === true );
  176. /**
  177. * The resolution of the pass.
  178. *
  179. * @private
  180. * @type {UniformNode<vec2>}
  181. */
  182. this._resolution = uniform( new Vector2() );
  183. /**
  184. * The render target the SSR is rendered into.
  185. *
  186. * @private
  187. * @type {RenderTarget}
  188. */
  189. this._ssrRenderTarget = new RenderTarget( 1, 1, { depthBuffer: false, type: HalfFloatType } );
  190. this._ssrRenderTarget.texture.name = 'SSRNode.SSR';
  191. /**
  192. * The render target for the blurred SSR reflections.
  193. *
  194. * @private
  195. * @type {RenderTarget}
  196. */
  197. this._blurRenderTarget = new RenderTarget( 1, 1, { depthBuffer: false, type: HalfFloatType, minFilter: LinearMipmapLinearFilter, magFilter: LinearFilter } );
  198. this._blurRenderTarget.texture.name = 'SSRNode.Blur';
  199. this._blurRenderTarget.texture.mipmaps.push( {}, {}, {}, {}, {} );
  200. /**
  201. * The material that is used to render the effect.
  202. *
  203. * @private
  204. * @type {NodeMaterial}
  205. */
  206. this._ssrMaterial = new NodeMaterial();
  207. this._ssrMaterial.name = 'SSRNode.SSR';
  208. /**
  209. * The blur material.
  210. *
  211. * @private
  212. * @type {NodeMaterial}
  213. */
  214. this._blurMaterial = new NodeMaterial();
  215. this._blurMaterial.name = 'SSRNode.Blur';
  216. /**
  217. * The copy material.
  218. *
  219. * @private
  220. * @type {NodeMaterial}
  221. */
  222. this._copyMaterial = new NodeMaterial();
  223. this._copyMaterial.name = 'SSRNode.Copy';
  224. /**
  225. * The result of the effect is represented as a separate texture node.
  226. *
  227. * @private
  228. * @type {PassTextureNode}
  229. */
  230. this._textureNode = passTexture( this, this._ssrRenderTarget.texture );
  231. let blurredTextureNode = null;
  232. if ( this.roughnessNode !== null ) {
  233. const mips = this._blurRenderTarget.texture.mipmaps.length - 1;
  234. const r = float( this.roughnessNode );
  235. const lod = r.mul( r ).mul( mips ).clamp( 0, mips );
  236. blurredTextureNode = passTexture( this, this._blurRenderTarget.texture ).level( lod );
  237. }
  238. /**
  239. * Holds the blurred SSR reflections.
  240. *
  241. * @private
  242. * @type {?PassTextureNode}
  243. */
  244. this._blurredTextureNode = blurredTextureNode;
  245. }
  246. /**
  247. * Returns the result of the effect as a texture node.
  248. *
  249. * @return {PassTextureNode} A texture node that represents the result of the effect.
  250. */
  251. getTextureNode() {
  252. return this.roughnessNode !== null ? this._blurredTextureNode : this._textureNode;
  253. }
  254. /**
  255. * Sets the size of the effect.
  256. *
  257. * @param {number} width - The width of the effect.
  258. * @param {number} height - The height of the effect.
  259. */
  260. setSize( width, height ) {
  261. width = Math.round( this.resolutionScale * width );
  262. height = Math.round( this.resolutionScale * height );
  263. this._resolution.value.set( width, height );
  264. this._ssrRenderTarget.setSize( width, height );
  265. this._blurRenderTarget.setSize( width, height );
  266. }
  267. /**
  268. * This method is used to render the effect once per frame.
  269. *
  270. * @param {NodeFrame} frame - The current node frame.
  271. */
  272. updateBefore( frame ) {
  273. const { renderer } = frame;
  274. _rendererState = RendererUtils.resetRendererState( renderer, _rendererState );
  275. const ssrRenderTarget = this._ssrRenderTarget;
  276. const blurRenderTarget = this._blurRenderTarget;
  277. const size = renderer.getDrawingBufferSize( _size );
  278. _quadMesh.material = this._ssrMaterial;
  279. this.setSize( size.width, size.height );
  280. // clear
  281. renderer.setMRT( null );
  282. renderer.setClearColor( 0x000000, 0 );
  283. // ssr
  284. renderer.setRenderTarget( ssrRenderTarget );
  285. _quadMesh.name = 'SSR [ Reflections ]';
  286. _quadMesh.render( renderer );
  287. // blur (optional)
  288. if ( this.roughnessNode !== null ) {
  289. // blur mips but leave the base mip unblurred
  290. for ( let i = 0; i < blurRenderTarget.texture.mipmaps.length; i ++ ) {
  291. _quadMesh.material = ( i === 0 ) ? this._copyMaterial : this._blurMaterial;
  292. this._blurSpread.value = i;
  293. renderer.setRenderTarget( blurRenderTarget, 0, i );
  294. _quadMesh.name = 'SSR [ Blur Level ' + i + ' ]';
  295. _quadMesh.render( renderer );
  296. }
  297. }
  298. // restore
  299. RendererUtils.restoreRendererState( renderer, _rendererState );
  300. }
  301. /**
  302. * This method is used to setup the effect's TSL code.
  303. *
  304. * @param {NodeBuilder} builder - The current node builder.
  305. * @return {PassTextureNode}
  306. */
  307. setup( builder ) {
  308. const uvNode = uv();
  309. const pointToLineDistance = Fn( ( [ point, linePointA, linePointB ] )=> {
  310. // https://mathworld.wolfram.com/Point-LineDistance3-Dimensional.html
  311. return cross( point.sub( linePointA ), point.sub( linePointB ) ).length().div( linePointB.sub( linePointA ).length() );
  312. } );
  313. const pointPlaneDistance = Fn( ( [ point, planePoint, planeNormal ] )=> {
  314. // https://mathworld.wolfram.com/Point-PlaneDistance.html
  315. // https://en.wikipedia.org/wiki/Plane_(geometry)
  316. // http://paulbourke.net/geometry/pointlineplane/
  317. // planeNormal is already normalized, so denominator is 1
  318. const d = mul( planeNormal.x, planePoint.x ).add( mul( planeNormal.y, planePoint.y ) ).add( mul( planeNormal.z, planePoint.z ) ).negate().toVar();
  319. const distance = mul( planeNormal.x, point.x ).add( mul( planeNormal.y, point.y ) ).add( mul( planeNormal.z, point.z ) ).add( d );
  320. return distance;
  321. } );
  322. const getViewZ = Fn( ( [ depth ] ) => {
  323. let viewZNode;
  324. if ( this.camera.isPerspectiveCamera ) {
  325. viewZNode = perspectiveDepthToViewZ( depth, this._cameraNear, this._cameraFar );
  326. } else {
  327. viewZNode = orthographicDepthToViewZ( depth, this._cameraNear, this._cameraFar );
  328. }
  329. return viewZNode;
  330. } );
  331. const sampleDepth = ( uv ) => {
  332. const depth = this.depthNode.sample( uv ).r;
  333. if ( builder.renderer.logarithmicDepthBuffer === true ) {
  334. const viewZ = logarithmicDepthToViewZ( depth, this._cameraNear, this._cameraFar );
  335. return viewZToPerspectiveDepth( viewZ, this._cameraNear, this._cameraFar );
  336. }
  337. return depth;
  338. };
  339. const ssr = Fn( () => {
  340. const metalness = float( this.metalnessNode );
  341. // fragments with no metalness do not reflect their environment
  342. metalness.equal( 0.0 ).discard();
  343. // compute some standard FX entities
  344. const depth = sampleDepth( uvNode ).toVar();
  345. const viewPosition = getViewPosition( uvNode, depth, this._cameraProjectionMatrixInverse ).toVar();
  346. const viewNormal = this.normalNode.rgb.normalize().toVar();
  347. // compute the direction from the position in view space to the camera
  348. const viewIncidentDir = ( ( this.camera.isPerspectiveCamera ) ? normalize( viewPosition ) : vec3( 0, 0, - 1 ) ).toVar();
  349. // compute the direction in which the light is reflected on the surface
  350. const viewReflectDir = reflect( viewIncidentDir, viewNormal ).toVar();
  351. // adapt maximum distance to the local geometry (see https://www.mathsisfun.com/algebra/vectors-dot-product.html)
  352. const maxReflectRayLen = this.maxDistance.div( dot( viewIncidentDir.negate(), viewNormal ) ).toVar();
  353. // compute the maximum point of the reflection ray in view space
  354. const d1viewPosition = viewPosition.add( viewReflectDir.mul( maxReflectRayLen ) ).toVar();
  355. // check if d1viewPosition lies behind the camera near plane
  356. If( this._isPerspectiveCamera.and( d1viewPosition.z.greaterThan( this._cameraNear.negate() ) ), () => {
  357. // if so, ensure d1viewPosition is clamped on the near plane.
  358. // this prevents artifacts during the ray marching process
  359. const t = sub( this._cameraNear.negate(), viewPosition.z ).div( viewReflectDir.z );
  360. d1viewPosition.assign( viewPosition.add( viewReflectDir.mul( t ) ) );
  361. } );
  362. // d0 and d1 are the start and maximum points of the reflection ray in screen space
  363. const d0 = screenCoordinate.xy.toVar();
  364. const d1 = getScreenPosition( d1viewPosition, this._cameraProjectionMatrix ).mul( this._resolution ).toVar();
  365. // below variables are used to control the raymarching process
  366. // total length of the ray
  367. const totalLen = d1.sub( d0 ).length().toVar();
  368. // offset in x and y direction
  369. const xLen = d1.x.sub( d0.x ).toVar();
  370. const yLen = d1.y.sub( d0.y ).toVar();
  371. // determine the larger delta
  372. // The larger difference will help to determine how much to travel in the X and Y direction each iteration and
  373. // how many iterations are needed to travel the entire ray
  374. const totalStep = int( max( abs( xLen ), abs( yLen ) ).mul( this.quality.clamp() ) ).toConst();
  375. // step sizes in the x and y directions
  376. const xSpan = xLen.div( totalStep ).toVar();
  377. const ySpan = yLen.div( totalStep ).toVar();
  378. const output = vec4( 0 ).toVar();
  379. // the actual ray marching loop
  380. // starting from d0, the code gradually travels along the ray and looks for an intersection with the geometry.
  381. // it does not exceed d1 (the maximum ray extend)
  382. Loop( totalStep, ( { i } ) => {
  383. // advance on the ray by computing a new position in screen coordinates
  384. const xy = vec2( d0.x.add( xSpan.mul( float( i ) ) ), d0.y.add( ySpan.mul( float( i ) ) ) ).toVar();
  385. // stop processing if the new position lies outside of the screen
  386. If( xy.x.lessThan( 0 ).or( xy.x.greaterThan( this._resolution.x ) ).or( xy.y.lessThan( 0 ) ).or( xy.y.greaterThan( this._resolution.y ) ), () => {
  387. Break();
  388. } );
  389. // compute new uv, depth and viewZ for the next fragment
  390. const uvNode = xy.div( this._resolution );
  391. const d = sampleDepth( uvNode ).toVar();
  392. const vZ = getViewZ( d ).toVar();
  393. const viewReflectRayZ = float( 0 ).toVar();
  394. // normalized distance between the current position xy and the starting point d0
  395. const s = xy.sub( d0 ).length().div( totalLen );
  396. // depending on the camera type, we now compute the z-coordinate of the reflected ray at the current step in view space
  397. If( this._isPerspectiveCamera, () => {
  398. const recipVPZ = float( 1 ).div( viewPosition.z ).toVar();
  399. viewReflectRayZ.assign( float( 1 ).div( recipVPZ.add( s.mul( float( 1 ).div( d1viewPosition.z ).sub( recipVPZ ) ) ) ) );
  400. } ).Else( () => {
  401. viewReflectRayZ.assign( viewPosition.z.add( s.mul( d1viewPosition.z.sub( viewPosition.z ) ) ) );
  402. } );
  403. // if viewReflectRayZ is less or equal than the real z-coordinate at this place, it potentially intersects the geometry
  404. If( viewReflectRayZ.lessThanEqual( vZ ), () => {
  405. // compute the distance of the new location to the ray in view space
  406. // to clarify vP is the fragment's view position which is not an exact point on the ray
  407. const vP = getViewPosition( uvNode, d, this._cameraProjectionMatrixInverse ).toVar();
  408. const away = pointToLineDistance( vP, viewPosition, d1viewPosition ).toVar();
  409. // compute the minimum thickness between the current fragment and its neighbor in the x-direction.
  410. const xyNeighbor = vec2( xy.x.add( 1 ), xy.y ).toVar(); // move one pixel
  411. const uvNeighbor = xyNeighbor.div( this._resolution );
  412. const vPNeighbor = getViewPosition( uvNeighbor, d, this._cameraProjectionMatrixInverse ).toVar();
  413. const minThickness = vPNeighbor.x.sub( vP.x ).toVar();
  414. minThickness.mulAssign( 3 ); // expand a bit to avoid errors
  415. const tk = max( minThickness, this.thickness ).toVar();
  416. If( away.lessThanEqual( tk ), () => { // hit
  417. const vN = this.normalNode.sample( uvNode ).rgb.normalize().toVar();
  418. If( dot( viewReflectDir, vN ).greaterThanEqual( 0 ), () => {
  419. // the reflected ray is pointing towards the same side as the fragment's normal (current ray position),
  420. // which means it wouldn't reflect off the surface. The loop continues to the next step for the next ray sample.
  421. Continue();
  422. } );
  423. // this distance represents the depth of the intersection point between the reflected ray and the scene.
  424. const distance = pointPlaneDistance( vP, viewPosition, viewNormal ).toVar();
  425. If( distance.greaterThan( this.maxDistance ), () => {
  426. // Distance exceeding limit: The reflection is potentially too far away and
  427. // might not contribute significantly to the final color
  428. Break();
  429. } );
  430. const op = this.opacity.mul( metalness ).toVar();
  431. // distance attenuation (the reflection should fade out the farther it is away from the surface)
  432. const ratio = float( 1 ).sub( distance.div( this.maxDistance ) ).toVar();
  433. const attenuation = ratio.mul( ratio );
  434. op.mulAssign( attenuation );
  435. // fresnel (reflect more light on surfaces that are viewed at grazing angles)
  436. const fresnelCoe = div( dot( viewIncidentDir, viewReflectDir ).add( 1 ), 2 );
  437. op.mulAssign( fresnelCoe );
  438. // output
  439. const reflectColor = this.colorNode.sample( uvNode );
  440. output.assign( vec4( reflectColor.rgb.mul( op ), 1 ) );
  441. Break();
  442. } );
  443. } );
  444. } );
  445. return output;
  446. } );
  447. this._ssrMaterial.fragmentNode = ssr().context( builder.getSharedContext() );
  448. this._ssrMaterial.needsUpdate = true;
  449. // below materials are used for blurring
  450. const reflectionBuffer = texture( this._ssrRenderTarget.texture );
  451. this._blurMaterial.fragmentNode = boxBlur( reflectionBuffer, { size: this.blurQuality, separation: this._blurSpread } );
  452. this._blurMaterial.needsUpdate = true;
  453. this._copyMaterial.fragmentNode = reflectionBuffer;
  454. this._copyMaterial.needsUpdate = true;
  455. //
  456. return this.getTextureNode();
  457. }
  458. /**
  459. * Frees internal resources. This method should be called
  460. * when the effect is no longer required.
  461. */
  462. dispose() {
  463. this._ssrRenderTarget.dispose();
  464. this._blurRenderTarget.dispose();
  465. this._ssrMaterial.dispose();
  466. this._blurMaterial.dispose();
  467. this._copyMaterial.dispose();
  468. }
  469. }
  470. export default SSRNode;
  471. /**
  472. * TSL function for creating screen space reflections (SSR).
  473. *
  474. * @tsl
  475. * @function
  476. * @param {Node<vec4>} colorNode - The node that represents the beauty pass.
  477. * @param {Node<float>} depthNode - A node that represents the beauty pass's depth.
  478. * @param {Node<vec3>} normalNode - A node that represents the beauty pass's normals.
  479. * @param {Node<float>} metalnessNode - A node that represents the beauty pass's metalness.
  480. * @param {?Node<float>} [roughnessNode=null] - A node that represents the beauty pass's roughness.
  481. * @param {?Camera} [camera=null] - The camera the scene is rendered with.
  482. * @returns {SSRNode}
  483. */
  484. export const ssr = ( colorNode, depthNode, normalNode, metalnessNode, roughnessNode = null, camera = null ) => new SSRNode( nodeObject( colorNode ), nodeObject( depthNode ), nodeObject( normalNode ), nodeObject( metalnessNode ), nodeObject( roughnessNode ), camera );
粤ICP备19079148号