فهرست منبع

WebGPURenderer: Persistent video texture approach (#31416)

sunag 6 ماه پیش
والد
کامیت
b38ac17fe2

+ 1 - 3
src/renderers/common/SampledTexture.js

@@ -78,8 +78,6 @@ class SampledTexture extends Binding {
 	 */
 	needsBindingsUpdate( generation ) {
 
-		const { texture } = this;
-
 		if ( generation !== this.generation ) {
 
 			this.generation = generation;
@@ -88,7 +86,7 @@ class SampledTexture extends Binding {
 
 		}
 
-		return texture.isVideoTexture;
+		return false;
 
 	}
 

+ 13 - 3
src/renderers/common/Textures.js

@@ -366,9 +366,19 @@ class Textures extends DataMap {
 
 			if ( image.image !== undefined ) image = image.image;
 
-			target.width = image.width || 1;
-			target.height = image.height || 1;
-			target.depth = texture.isCubeTexture ? 6 : ( image.depth || 1 );
+			if ( image instanceof HTMLVideoElement ) {
+
+				target.width = image.videoWidth || 1;
+				target.height = image.videoHeight || 1;
+				target.depth = 1;
+
+			} else {
+
+				target.width = image.width || 1;
+				target.height = image.height || 1;
+				target.depth = texture.isCubeTexture ? 6 : ( image.depth || 1 );
+
+			}
 
 		} else {
 

+ 21 - 49
src/renderers/webgpu/nodes/WGSLNodeBuilder.js

@@ -16,7 +16,7 @@ import { NodeAccess } from '../../../nodes/core/constants.js';
 import VarNode from '../../../nodes/core/VarNode.js';
 import ExpressionNode from '../../../nodes/code/ExpressionNode.js';
 
-import { NoColorSpace, FloatType, RepeatWrapping, ClampToEdgeWrapping, MirroredRepeatWrapping, NearestFilter } from '../../../constants.js';
+import { NoColorSpace, FloatType, RepeatWrapping, ClampToEdgeWrapping, MirroredRepeatWrapping, NearestFilter, SRGBColorSpace } from '../../../constants.js';
 
 // GPUShaderStage is not defined in browsers not supporting WebGPU
 const GPUShaderStage = ( typeof self !== 'undefined' ) ? self.GPUShaderStage : { VERTEX: 1, FRAGMENT: 2, COMPUTE: 4 };
@@ -219,7 +219,14 @@ class WGSLNodeBuilder extends NodeBuilder {
 	 */
 	needsToWorkingColorSpace( texture ) {
 
-		return texture.isVideoTexture === true && texture.colorSpace !== NoColorSpace;
+		if ( texture.isVideoTexture && texture.colorSpace === SRGBColorSpace ) {
+
+			// Video textures are always in sRGB color space, so no conversion is needed
+			return false;
+
+		}
+
+		return texture.colorSpace !== NoColorSpace;
 
 	}
 
@@ -250,30 +257,7 @@ class WGSLNodeBuilder extends NodeBuilder {
 
 		} else {
 
-			return this._generateTextureSampleLevel( texture, textureProperty, uvSnippet, '0', depthSnippet );
-
-		}
-
-	}
-
-	/**
-	 * Generates the WGSL snippet when sampling video textures.
-	 *
-	 * @private
-	 * @param {string} textureProperty - The name of the video texture uniform in the shader.
-	 * @param {string} uvSnippet - A WGSL snippet that represents texture coordinates used for sampling.
-	 * @param {string} [shaderStage=this.shaderStage] - The shader stage this code snippet is generated for.
-	 * @return {string} The WGSL snippet.
-	 */
-	_generateVideoSample( textureProperty, uvSnippet, shaderStage = this.shaderStage ) {
-
-		if ( shaderStage === 'fragment' ) {
-
-			return `textureSampleBaseClampToEdge( ${ textureProperty }, ${ textureProperty }_sampler, vec2<f32>( ${ uvSnippet }.x, 1.0 - ${ uvSnippet }.y ) )`;
-
-		} else {
-
-			console.error( `WebGPURenderer: THREE.VideoTexture does not support ${ shaderStage } shader.` );
+			return this.generateTextureSampleLevel( texture, textureProperty, uvSnippet, '0', depthSnippet );
 
 		}
 
@@ -290,7 +274,7 @@ class WGSLNodeBuilder extends NodeBuilder {
 	 * @param {string} depthSnippet - A WGSL snippet that represents 0-based texture array index to sample.
 	 * @return {string} The WGSL snippet.
 	 */
-	_generateTextureSampleLevel( texture, textureProperty, uvSnippet, levelSnippet, depthSnippet ) {
+	generateTextureSampleLevel( texture, textureProperty, uvSnippet, levelSnippet, depthSnippet ) {
 
 		if ( this.isUnfilterable( texture ) === false ) {
 
@@ -434,7 +418,7 @@ class WGSLNodeBuilder extends NodeBuilder {
 			}
 
 			// Build parameters string based on texture type and multisampling
-			if ( isMultisampled || texture.isVideoTexture || texture.isStorageTexture ) {
+			if ( isMultisampled || texture.isStorageTexture ) {
 
 				textureDimensionsParams = textureProperty;
 
@@ -531,11 +515,7 @@ class WGSLNodeBuilder extends NodeBuilder {
 
 		let snippet;
 
-		if ( texture.isVideoTexture === true ) {
-
-			snippet = `textureLoad( ${ textureProperty }, ${ uvIndexSnippet } )`;
-
-		} else if ( depthSnippet ) {
+		if ( depthSnippet ) {
 
 			snippet = `textureLoad( ${ textureProperty }, ${ uvIndexSnippet }, ${ depthSnippet }, u32( ${ levelSnippet } ) )`;
 
@@ -624,11 +604,7 @@ class WGSLNodeBuilder extends NodeBuilder {
 
 		let snippet = null;
 
-		if ( texture.isVideoTexture === true ) {
-
-			snippet = this._generateVideoSample( textureProperty, uvSnippet, shaderStage );
-
-		} else if ( this.isUnfilterable( texture ) ) {
+		if ( this.isUnfilterable( texture ) ) {
 
 			snippet = this.generateTextureLod( texture, textureProperty, uvSnippet, depthSnippet, '0', shaderStage );
 
@@ -711,22 +687,22 @@ class WGSLNodeBuilder extends NodeBuilder {
 	 * @param {string} [shaderStage=this.shaderStage] - The shader stage this code snippet is generated for.
 	 * @return {string} The WGSL snippet.
 	 */
-	generateTextureLevel( texture, textureProperty, uvSnippet, levelSnippet, depthSnippet, shaderStage = this.shaderStage ) {
+	generateTextureLevel( texture, textureProperty, uvSnippet, levelSnippet, depthSnippet ) {
 
-		let snippet = null;
+		if ( this.isUnfilterable( texture ) === false ) {
 
-		if ( texture.isVideoTexture === true ) {
+			return `textureSampleLevel( ${ textureProperty }, ${ textureProperty }_sampler, ${ uvSnippet }, ${ levelSnippet } )`;
 
-			snippet = this._generateVideoSample( textureProperty, uvSnippet, shaderStage );
+		} else if ( this.isFilteredTexture( texture ) ) {
+
+			return this.generateFilteredTexture( texture, textureProperty, uvSnippet, levelSnippet );
 
 		} else {
 
-			snippet = this._generateTextureSampleLevel( texture, textureProperty, uvSnippet, levelSnippet, depthSnippet );
+			return this.generateTextureLod( texture, textureProperty, uvSnippet, depthSnippet, levelSnippet );
 
 		}
 
-		return snippet;
-
 	}
 
 	/**
@@ -1729,10 +1705,6 @@ ${ flowData.code }
 
 					textureType = 'texture_3d<f32>';
 
-				} else if ( texture.isVideoTexture === true ) {
-
-					textureType = 'texture_external';
-
 				} else {
 
 					const componentPrefix = this.getComponentTypeFromTexture( texture ).charAt( 0 );

+ 0 - 4
src/renderers/webgpu/utils/WebGPUBindingUtils.js

@@ -113,10 +113,6 @@ class WebGPUBindingUtils {
 
 				bindingGPU.sampler = sampler;
 
-			} else if ( binding.isSampledTexture && binding.texture.isVideoTexture ) {
-
-				bindingGPU.externalTexture = {}; // GPUExternalTextureBindingLayout
-
 			} else if ( binding.isSampledTexture && binding.store ) {
 
 				const storageTexture = {}; // GPUStorageTextureBindingLayout

+ 11 - 64
src/renderers/webgpu/utils/WebGPUTextureUtils.js

@@ -159,10 +159,6 @@ class WebGPUTextureUtils {
 
 			textureGPU = this._getDefaultCubeTextureGPU( format );
 
-		} else if ( texture.isVideoTexture ) {
-
-			this.backend.get( texture ).externalTexture = this._getDefaultVideoFrame();
-
 		} else {
 
 			textureGPU = this._getDefaultTextureGPU( format );
@@ -247,39 +243,23 @@ class WebGPUTextureUtils {
 
 		// texture creation
 
-		if ( texture.isVideoTexture ) {
-
-			const video = texture.source.data;
-			const videoFrame = new VideoFrame( video );
-
-			textureDescriptorGPU.size.width = videoFrame.displayWidth;
-			textureDescriptorGPU.size.height = videoFrame.displayHeight;
-
-			videoFrame.close();
-
-			textureData.externalTexture = video;
-
-		} else {
-
-			if ( format === undefined ) {
+		if ( format === undefined ) {
 
-				console.warn( 'WebGPURenderer: Texture format not supported.' );
+			console.warn( 'WebGPURenderer: Texture format not supported.' );
 
-				this.createDefaultTexture( texture );
-				return;
-
-			}
-
-			if ( texture.isCubeTexture ) {
+			this.createDefaultTexture( texture );
+			return;
 
-				textureDescriptorGPU.textureBindingViewDimension = GPUTextureViewDimension.Cube;
+		}
 
-			}
+		if ( texture.isCubeTexture ) {
 
-			textureData.texture = backend.device.createTexture( textureDescriptorGPU );
+			textureDescriptorGPU.textureBindingViewDimension = GPUTextureViewDimension.Cube;
 
 		}
 
+		textureData.texture = backend.device.createTexture( textureDescriptorGPU );
+
 		if ( isMSAA ) {
 
 			const msaaTextureDescriptorGPU = Object.assign( {}, textureDescriptorGPU );
@@ -480,12 +460,6 @@ class WebGPUTextureUtils {
 
 			this._copyCubeMapToTexture( options.images, textureData.texture, textureDescriptorGPU, texture.flipY, texture.premultiplyAlpha );
 
-		} else if ( texture.isVideoTexture ) {
-
-			const video = texture.source.data;
-
-			textureData.externalTexture = video;
-
 		} else {
 
 			this._copyImageToTexture( options.image, textureData.texture, textureDescriptorGPU, 0, texture.flipY, texture.premultiplyAlpha );
@@ -615,33 +589,6 @@ class WebGPUTextureUtils {
 
 	}
 
-	/**
-	 * Returns the default video frame used as default data in context of video textures.
-	 *
-	 * @private
-	 * @return {VideoFrame} The video frame.
-	 */
-	_getDefaultVideoFrame() {
-
-		let defaultVideoFrame = this.defaultVideoFrame;
-
-		if ( defaultVideoFrame === null ) {
-
-			const init = {
-				timestamp: 0,
-				codedWidth: 1,
-				codedHeight: 1,
-				format: 'RGBA',
-			};
-
-			this.defaultVideoFrame = defaultVideoFrame = new VideoFrame( new Uint8Array( [ 0, 0, 0, 0xff ] ), init );
-
-		}
-
-		return defaultVideoFrame;
-
-	}
-
 	/**
 	 * Uploads cube texture image data to the GPU memory.
 	 *
@@ -699,8 +646,8 @@ class WebGPUTextureUtils {
 				origin: { x: 0, y: 0, z: originDepth },
 				premultipliedAlpha: premultiplyAlpha
 			}, {
-				width: image.width,
-				height: image.height,
+				width: textureDescriptorGPU.size.width,
+				height: textureDescriptorGPU.size.height,
 				depthOrArrayLayers: 1
 			}
 		);

粤ICP备19079148号