|
|
@@ -63662,6 +63662,252 @@ function WebGLObjects( gl, geometries, attributes, info ) {
|
|
|
|
|
|
}
|
|
|
|
|
|
+const toneMappingMap = {
|
|
|
+ [ LinearToneMapping ]: 'LINEAR_TONE_MAPPING',
|
|
|
+ [ ReinhardToneMapping ]: 'REINHARD_TONE_MAPPING',
|
|
|
+ [ CineonToneMapping ]: 'CINEON_TONE_MAPPING',
|
|
|
+ [ ACESFilmicToneMapping ]: 'ACES_FILMIC_TONE_MAPPING',
|
|
|
+ [ AgXToneMapping ]: 'AGX_TONE_MAPPING',
|
|
|
+ [ NeutralToneMapping ]: 'NEUTRAL_TONE_MAPPING',
|
|
|
+ [ CustomToneMapping ]: 'CUSTOM_TONE_MAPPING'
|
|
|
+};
|
|
|
+
|
|
|
+function WebGLOutput( type, width, height, depth, stencil ) {
|
|
|
+
|
|
|
+ // render targets for scene and post-processing
|
|
|
+ const targetA = new WebGLRenderTarget( width, height, {
|
|
|
+ type: type,
|
|
|
+ depthBuffer: depth,
|
|
|
+ stencilBuffer: stencil
|
|
|
+ } );
|
|
|
+
|
|
|
+ const targetB = new WebGLRenderTarget( width, height, {
|
|
|
+ type: HalfFloatType,
|
|
|
+ depthBuffer: false,
|
|
|
+ stencilBuffer: false
|
|
|
+ } );
|
|
|
+
|
|
|
+ // create fullscreen triangle geometry
|
|
|
+ const geometry = new BufferGeometry();
|
|
|
+ geometry.setAttribute( 'position', new Float32BufferAttribute( [ -1, 3, 0, -1, -1, 0, 3, -1, 0 ], 3 ) );
|
|
|
+ geometry.setAttribute( 'uv', new Float32BufferAttribute( [ 0, 2, 0, 0, 2, 0 ], 2 ) );
|
|
|
+
|
|
|
+ // create output material with tone mapping support
|
|
|
+ const material = new RawShaderMaterial( {
|
|
|
+ uniforms: {
|
|
|
+ tDiffuse: { value: null }
|
|
|
+ },
|
|
|
+ vertexShader: /* glsl */`
|
|
|
+ precision highp float;
|
|
|
+
|
|
|
+ uniform mat4 modelViewMatrix;
|
|
|
+ uniform mat4 projectionMatrix;
|
|
|
+
|
|
|
+ attribute vec3 position;
|
|
|
+ attribute vec2 uv;
|
|
|
+
|
|
|
+ varying vec2 vUv;
|
|
|
+
|
|
|
+ void main() {
|
|
|
+ vUv = uv;
|
|
|
+ gl_Position = projectionMatrix * modelViewMatrix * vec4( position, 1.0 );
|
|
|
+ }`,
|
|
|
+ fragmentShader: /* glsl */`
|
|
|
+ precision highp float;
|
|
|
+
|
|
|
+ uniform sampler2D tDiffuse;
|
|
|
+
|
|
|
+ varying vec2 vUv;
|
|
|
+
|
|
|
+ #include <tonemapping_pars_fragment>
|
|
|
+ #include <colorspace_pars_fragment>
|
|
|
+
|
|
|
+ void main() {
|
|
|
+ gl_FragColor = texture2D( tDiffuse, vUv );
|
|
|
+
|
|
|
+ #ifdef LINEAR_TONE_MAPPING
|
|
|
+ gl_FragColor.rgb = LinearToneMapping( gl_FragColor.rgb );
|
|
|
+ #elif defined( REINHARD_TONE_MAPPING )
|
|
|
+ gl_FragColor.rgb = ReinhardToneMapping( gl_FragColor.rgb );
|
|
|
+ #elif defined( CINEON_TONE_MAPPING )
|
|
|
+ gl_FragColor.rgb = CineonToneMapping( gl_FragColor.rgb );
|
|
|
+ #elif defined( ACES_FILMIC_TONE_MAPPING )
|
|
|
+ gl_FragColor.rgb = ACESFilmicToneMapping( gl_FragColor.rgb );
|
|
|
+ #elif defined( AGX_TONE_MAPPING )
|
|
|
+ gl_FragColor.rgb = AgXToneMapping( gl_FragColor.rgb );
|
|
|
+ #elif defined( NEUTRAL_TONE_MAPPING )
|
|
|
+ gl_FragColor.rgb = NeutralToneMapping( gl_FragColor.rgb );
|
|
|
+ #elif defined( CUSTOM_TONE_MAPPING )
|
|
|
+ gl_FragColor.rgb = CustomToneMapping( gl_FragColor.rgb );
|
|
|
+ #endif
|
|
|
+
|
|
|
+ #ifdef SRGB_TRANSFER
|
|
|
+ gl_FragColor = sRGBTransferOETF( gl_FragColor );
|
|
|
+ #endif
|
|
|
+ }`,
|
|
|
+ depthTest: false,
|
|
|
+ depthWrite: false
|
|
|
+ } );
|
|
|
+
|
|
|
+ const mesh = new Mesh( geometry, material );
|
|
|
+ const camera = new OrthographicCamera( -1, 1, 1, -1, 0, 1 );
|
|
|
+
|
|
|
+ let _outputColorSpace = null;
|
|
|
+ let _outputToneMapping = null;
|
|
|
+ let _isCompositing = false;
|
|
|
+ let _savedToneMapping;
|
|
|
+ let _savedRenderTarget = null;
|
|
|
+ let _effects = [];
|
|
|
+ let _hasRenderPass = false;
|
|
|
+
|
|
|
+ this.setSize = function ( width, height ) {
|
|
|
+
|
|
|
+ targetA.setSize( width, height );
|
|
|
+ targetB.setSize( width, height );
|
|
|
+
|
|
|
+ for ( let i = 0; i < _effects.length; i ++ ) {
|
|
|
+
|
|
|
+ const effect = _effects[ i ];
|
|
|
+ if ( effect.setSize ) effect.setSize( width, height );
|
|
|
+
|
|
|
+ }
|
|
|
+
|
|
|
+ };
|
|
|
+
|
|
|
+ this.setEffects = function ( effects ) {
|
|
|
+
|
|
|
+ _effects = effects;
|
|
|
+ _hasRenderPass = _effects.length > 0 && _effects[ 0 ].isRenderPass === true;
|
|
|
+
|
|
|
+ const width = targetA.width;
|
|
|
+ const height = targetA.height;
|
|
|
+
|
|
|
+ for ( let i = 0; i < _effects.length; i ++ ) {
|
|
|
+
|
|
|
+ const effect = _effects[ i ];
|
|
|
+ if ( effect.setSize ) effect.setSize( width, height );
|
|
|
+
|
|
|
+ }
|
|
|
+
|
|
|
+ };
|
|
|
+
|
|
|
+ this.begin = function ( renderer, renderTarget ) {
|
|
|
+
|
|
|
+ // Don't begin during compositing phase (post-processing effects call render())
|
|
|
+ if ( _isCompositing ) return false;
|
|
|
+
|
|
|
+ if ( renderer.toneMapping === NoToneMapping && _effects.length === 0 ) return false;
|
|
|
+
|
|
|
+ _savedRenderTarget = renderTarget;
|
|
|
+
|
|
|
+ // resize internal buffers to match render target (e.g. XR resolution)
|
|
|
+ if ( renderTarget !== null ) {
|
|
|
+
|
|
|
+ const width = renderTarget.width;
|
|
|
+ const height = renderTarget.height;
|
|
|
+
|
|
|
+ if ( targetA.width !== width || targetA.height !== height ) {
|
|
|
+
|
|
|
+ this.setSize( width, height );
|
|
|
+
|
|
|
+ }
|
|
|
+
|
|
|
+ }
|
|
|
+
|
|
|
+ // if first effect is a RenderPass, it will set its own render target
|
|
|
+ if ( _hasRenderPass === false ) {
|
|
|
+
|
|
|
+ renderer.setRenderTarget( targetA );
|
|
|
+
|
|
|
+ }
|
|
|
+
|
|
|
+ // disable tone mapping during render - it will be applied in end()
|
|
|
+ _savedToneMapping = renderer.toneMapping;
|
|
|
+ renderer.toneMapping = NoToneMapping;
|
|
|
+
|
|
|
+ return true;
|
|
|
+
|
|
|
+ };
|
|
|
+
|
|
|
+ this.hasRenderPass = function () {
|
|
|
+
|
|
|
+ return _hasRenderPass;
|
|
|
+
|
|
|
+ };
|
|
|
+
|
|
|
+ this.end = function ( renderer, deltaTime ) {
|
|
|
+
|
|
|
+ // restore tone mapping
|
|
|
+ renderer.toneMapping = _savedToneMapping;
|
|
|
+
|
|
|
+ _isCompositing = true;
|
|
|
+
|
|
|
+ // run post-processing effects
|
|
|
+ let readBuffer = targetA;
|
|
|
+ let writeBuffer = targetB;
|
|
|
+
|
|
|
+ for ( let i = 0; i < _effects.length; i ++ ) {
|
|
|
+
|
|
|
+ const effect = _effects[ i ];
|
|
|
+
|
|
|
+ if ( effect.enabled === false ) continue;
|
|
|
+
|
|
|
+ effect.render( renderer, writeBuffer, readBuffer, deltaTime );
|
|
|
+
|
|
|
+ if ( effect.needsSwap !== false ) {
|
|
|
+
|
|
|
+ const temp = readBuffer;
|
|
|
+ readBuffer = writeBuffer;
|
|
|
+ writeBuffer = temp;
|
|
|
+
|
|
|
+ }
|
|
|
+
|
|
|
+ }
|
|
|
+
|
|
|
+ // update output material defines if settings changed
|
|
|
+ if ( _outputColorSpace !== renderer.outputColorSpace || _outputToneMapping !== renderer.toneMapping ) {
|
|
|
+
|
|
|
+ _outputColorSpace = renderer.outputColorSpace;
|
|
|
+ _outputToneMapping = renderer.toneMapping;
|
|
|
+
|
|
|
+ material.defines = {};
|
|
|
+
|
|
|
+ if ( ColorManagement.getTransfer( _outputColorSpace ) === SRGBTransfer ) material.defines.SRGB_TRANSFER = '';
|
|
|
+
|
|
|
+ const toneMapping = toneMappingMap[ _outputToneMapping ];
|
|
|
+ if ( toneMapping ) material.defines[ toneMapping ] = '';
|
|
|
+
|
|
|
+ material.needsUpdate = true;
|
|
|
+
|
|
|
+ }
|
|
|
+
|
|
|
+ // final output to canvas (or XR render target)
|
|
|
+ material.uniforms.tDiffuse.value = readBuffer.texture;
|
|
|
+ renderer.setRenderTarget( _savedRenderTarget );
|
|
|
+ renderer.render( mesh, camera );
|
|
|
+
|
|
|
+ _savedRenderTarget = null;
|
|
|
+ _isCompositing = false;
|
|
|
+
|
|
|
+ };
|
|
|
+
|
|
|
+ this.isCompositing = function () {
|
|
|
+
|
|
|
+ return _isCompositing;
|
|
|
+
|
|
|
+ };
|
|
|
+
|
|
|
+ this.dispose = function () {
|
|
|
+
|
|
|
+ targetA.dispose();
|
|
|
+ targetB.dispose();
|
|
|
+ geometry.dispose();
|
|
|
+ material.dispose();
|
|
|
+
|
|
|
+ };
|
|
|
+
|
|
|
+}
|
|
|
+
|
|
|
/**
|
|
|
* Uniforms of a program.
|
|
|
* Those form a tree structure with a special top-level container for the root,
|
|
|
@@ -74428,6 +74674,7 @@ class WebGLRenderer {
|
|
|
powerPreference = 'default',
|
|
|
failIfMajorPerformanceCaveat = false,
|
|
|
reversedDepthBuffer = false,
|
|
|
+ outputBufferType = UnsignedByteType,
|
|
|
} = parameters;
|
|
|
|
|
|
/**
|
|
|
@@ -74457,6 +74704,8 @@ class WebGLRenderer {
|
|
|
|
|
|
}
|
|
|
|
|
|
+ const _outputBufferType = outputBufferType;
|
|
|
+
|
|
|
const INTEGER_FORMATS = new Set( [
|
|
|
RGBAIntegerFormat,
|
|
|
RGIntegerFormat,
|
|
|
@@ -74484,6 +74733,10 @@ class WebGLRenderer {
|
|
|
const renderListStack = [];
|
|
|
const renderStateStack = [];
|
|
|
|
|
|
+ // internal render target for non-UnsignedByteType color buffer
|
|
|
+
|
|
|
+ let output = null;
|
|
|
+
|
|
|
// public properties
|
|
|
|
|
|
/**
|
|
|
@@ -74879,6 +75132,14 @@ class WebGLRenderer {
|
|
|
|
|
|
initGLContext();
|
|
|
|
|
|
+ // initialize internal render target for non-UnsignedByteType color buffer
|
|
|
+
|
|
|
+ if ( _outputBufferType !== UnsignedByteType ) {
|
|
|
+
|
|
|
+ output = new WebGLOutput( _outputBufferType, canvas.width, canvas.height, depth, stencil );
|
|
|
+
|
|
|
+ }
|
|
|
+
|
|
|
// xr
|
|
|
|
|
|
const xr = new WebXRManager( _this, _gl );
|
|
|
@@ -75001,6 +75262,12 @@ class WebGLRenderer {
|
|
|
|
|
|
}
|
|
|
|
|
|
+ if ( output !== null ) {
|
|
|
+
|
|
|
+ output.setSize( canvas.width, canvas.height );
|
|
|
+
|
|
|
+ }
|
|
|
+
|
|
|
this.setViewport( 0, 0, width, height );
|
|
|
|
|
|
};
|
|
|
@@ -75044,6 +75311,39 @@ class WebGLRenderer {
|
|
|
|
|
|
};
|
|
|
|
|
|
+ /**
|
|
|
+ * Sets the post-processing effects to be applied after rendering.
|
|
|
+ *
|
|
|
+ * @param {Array} effects - An array of post-processing effects.
|
|
|
+ */
|
|
|
+ this.setEffects = function ( effects ) {
|
|
|
+
|
|
|
+ if ( _outputBufferType === UnsignedByteType ) {
|
|
|
+
|
|
|
+ console.error( 'THREE.WebGLRenderer: setEffects() requires outputBufferType set to HalfFloatType or FloatType.' );
|
|
|
+ return;
|
|
|
+
|
|
|
+ }
|
|
|
+
|
|
|
+ if ( effects ) {
|
|
|
+
|
|
|
+ for ( let i = 0; i < effects.length; i ++ ) {
|
|
|
+
|
|
|
+ if ( effects[ i ].isOutputPass === true ) {
|
|
|
+
|
|
|
+ console.warn( 'THREE.WebGLRenderer: OutputPass is not needed in setEffects(). Tone mapping and color space conversion are applied automatically.' );
|
|
|
+ break;
|
|
|
+
|
|
|
+ }
|
|
|
+
|
|
|
+ }
|
|
|
+
|
|
|
+ }
|
|
|
+
|
|
|
+ output.setEffects( effects || [] );
|
|
|
+
|
|
|
+ };
|
|
|
+
|
|
|
/**
|
|
|
* Returns the current viewport definition.
|
|
|
*
|
|
|
@@ -75893,6 +76193,12 @@ class WebGLRenderer {
|
|
|
|
|
|
if ( _isContextLost === true ) return;
|
|
|
|
|
|
+ // use internal render target for HalfFloatType color buffer (only when tone mapping is enabled)
|
|
|
+
|
|
|
+ const isXRPresenting = xr.enabled === true && xr.isPresenting === true;
|
|
|
+
|
|
|
+ const useOutput = output !== null && ( _currentRenderTarget === null || isXRPresenting ) && output.begin( _this, _currentRenderTarget );
|
|
|
+
|
|
|
// update scene graph
|
|
|
|
|
|
if ( scene.matrixWorldAutoUpdate === true ) scene.updateMatrixWorld();
|
|
|
@@ -75901,7 +76207,7 @@ class WebGLRenderer {
|
|
|
|
|
|
if ( camera.parent === null && camera.matrixWorldAutoUpdate === true ) camera.updateMatrixWorld();
|
|
|
|
|
|
- if ( xr.enabled === true && xr.isPresenting === true ) {
|
|
|
+ if ( xr.enabled === true && xr.isPresenting === true && ( output === null || output.isCompositing() === false ) ) {
|
|
|
|
|
|
if ( xr.cameraAutoUpdate === true ) xr.updateCamera( camera );
|
|
|
|
|
|
@@ -75973,46 +76279,52 @@ class WebGLRenderer {
|
|
|
|
|
|
if ( this.info.autoReset === true ) this.info.reset();
|
|
|
|
|
|
- // render scene
|
|
|
+ // render scene (skip if first effect is a render pass - it will render the scene itself)
|
|
|
|
|
|
- const opaqueObjects = currentRenderList.opaque;
|
|
|
- const transmissiveObjects = currentRenderList.transmissive;
|
|
|
+ const skipSceneRender = useOutput && output.hasRenderPass();
|
|
|
|
|
|
- currentRenderState.setupLights();
|
|
|
+ if ( skipSceneRender === false ) {
|
|
|
|
|
|
- if ( camera.isArrayCamera ) {
|
|
|
+ const opaqueObjects = currentRenderList.opaque;
|
|
|
+ const transmissiveObjects = currentRenderList.transmissive;
|
|
|
|
|
|
- const cameras = camera.cameras;
|
|
|
+ currentRenderState.setupLights();
|
|
|
|
|
|
- if ( transmissiveObjects.length > 0 ) {
|
|
|
+ if ( camera.isArrayCamera ) {
|
|
|
|
|
|
- for ( let i = 0, l = cameras.length; i < l; i ++ ) {
|
|
|
+ const cameras = camera.cameras;
|
|
|
|
|
|
- const camera2 = cameras[ i ];
|
|
|
+ if ( transmissiveObjects.length > 0 ) {
|
|
|
+
|
|
|
+ for ( let i = 0, l = cameras.length; i < l; i ++ ) {
|
|
|
|
|
|
- renderTransmissionPass( opaqueObjects, transmissiveObjects, scene, camera2 );
|
|
|
+ const camera2 = cameras[ i ];
|
|
|
+
|
|
|
+ renderTransmissionPass( opaqueObjects, transmissiveObjects, scene, camera2 );
|
|
|
+
|
|
|
+ }
|
|
|
|
|
|
}
|
|
|
|
|
|
- }
|
|
|
+ if ( _renderBackground ) background.render( scene );
|
|
|
|
|
|
- if ( _renderBackground ) background.render( scene );
|
|
|
+ for ( let i = 0, l = cameras.length; i < l; i ++ ) {
|
|
|
|
|
|
- for ( let i = 0, l = cameras.length; i < l; i ++ ) {
|
|
|
+ const camera2 = cameras[ i ];
|
|
|
|
|
|
- const camera2 = cameras[ i ];
|
|
|
+ renderScene( currentRenderList, scene, camera2, camera2.viewport );
|
|
|
|
|
|
- renderScene( currentRenderList, scene, camera2, camera2.viewport );
|
|
|
+ }
|
|
|
|
|
|
- }
|
|
|
+ } else {
|
|
|
|
|
|
- } else {
|
|
|
+ if ( transmissiveObjects.length > 0 ) renderTransmissionPass( opaqueObjects, transmissiveObjects, scene, camera );
|
|
|
|
|
|
- if ( transmissiveObjects.length > 0 ) renderTransmissionPass( opaqueObjects, transmissiveObjects, scene, camera );
|
|
|
+ if ( _renderBackground ) background.render( scene );
|
|
|
|
|
|
- if ( _renderBackground ) background.render( scene );
|
|
|
+ renderScene( currentRenderList, scene, camera );
|
|
|
|
|
|
- renderScene( currentRenderList, scene, camera );
|
|
|
+ }
|
|
|
|
|
|
}
|
|
|
|
|
|
@@ -76030,6 +76342,14 @@ class WebGLRenderer {
|
|
|
|
|
|
}
|
|
|
|
|
|
+ // copy from internal render target to canvas using fullscreen quad
|
|
|
+
|
|
|
+ if ( useOutput ) {
|
|
|
+
|
|
|
+ output.end( _this );
|
|
|
+
|
|
|
+ }
|
|
|
+
|
|
|
//
|
|
|
|
|
|
if ( scene.isScene === true ) scene.onAfterRender( _this, scene, camera );
|
|
|
@@ -76218,9 +76538,11 @@ class WebGLRenderer {
|
|
|
|
|
|
if ( currentRenderState.state.transmissionRenderTarget[ camera.id ] === undefined ) {
|
|
|
|
|
|
+ const hasHalfFloatSupport = extensions.has( 'EXT_color_buffer_half_float' ) || extensions.has( 'EXT_color_buffer_float' );
|
|
|
+
|
|
|
currentRenderState.state.transmissionRenderTarget[ camera.id ] = new WebGLRenderTarget( 1, 1, {
|
|
|
generateMipmaps: true,
|
|
|
- type: ( extensions.has( 'EXT_color_buffer_half_float' ) || extensions.has( 'EXT_color_buffer_float' ) ) ? HalfFloatType : UnsignedByteType,
|
|
|
+ type: hasHalfFloatSupport ? HalfFloatType : UnsignedByteType,
|
|
|
minFilter: LinearMipmapLinearFilter,
|
|
|
samples: capabilities.samples,
|
|
|
stencilBuffer: stencil,
|
|
|
@@ -77068,7 +77390,6 @@ class WebGLRenderer {
|
|
|
_currentActiveCubeFace = activeCubeFace;
|
|
|
_currentActiveMipmapLevel = activeMipmapLevel;
|
|
|
|
|
|
- let useDefaultFramebuffer = true;
|
|
|
let framebuffer = null;
|
|
|
let isCube = false;
|
|
|
let isRenderTarget3D = false;
|
|
|
@@ -77079,9 +77400,21 @@ class WebGLRenderer {
|
|
|
|
|
|
if ( renderTargetProperties.__useDefaultFramebuffer !== undefined ) {
|
|
|
|
|
|
- // We need to make sure to rebind the framebuffer.
|
|
|
- state.bindFramebuffer( _gl.FRAMEBUFFER, null );
|
|
|
- useDefaultFramebuffer = false;
|
|
|
+ // Externally-managed framebuffer (e.g. XR)
|
|
|
+ // Bind to the stored framebuffer (may be null for default, or a WebGLFramebuffer)
|
|
|
+ state.bindFramebuffer( _gl.FRAMEBUFFER, renderTargetProperties.__webglFramebuffer );
|
|
|
+
|
|
|
+ _currentViewport.copy( renderTarget.viewport );
|
|
|
+ _currentScissor.copy( renderTarget.scissor );
|
|
|
+ _currentScissorTest = renderTarget.scissorTest;
|
|
|
+
|
|
|
+ state.viewport( _currentViewport );
|
|
|
+ state.scissor( _currentScissor );
|
|
|
+ state.setScissorTest( _currentScissorTest );
|
|
|
+
|
|
|
+ _currentMaterialId = -1;
|
|
|
+
|
|
|
+ return;
|
|
|
|
|
|
} else if ( renderTargetProperties.__webglFramebuffer === undefined ) {
|
|
|
|
|
|
@@ -77180,7 +77513,7 @@ class WebGLRenderer {
|
|
|
|
|
|
const framebufferBound = state.bindFramebuffer( _gl.FRAMEBUFFER, framebuffer );
|
|
|
|
|
|
- if ( framebufferBound && useDefaultFramebuffer ) {
|
|
|
+ if ( framebufferBound ) {
|
|
|
|
|
|
state.drawBuffers( renderTarget, framebuffer );
|
|
|
|