webgpu_compute_audio.html 5.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238
  1. <html lang="en">
  2. <head>
  3. <title>three.js webgpu - audio processing</title>
  4. <meta charset="utf-8">
  5. <meta name="viewport" content="width=device-width, user-scalable=no, minimum-scale=1.0, maximum-scale=1.0">
  6. <link type="text/css" rel="stylesheet" href="example.css">
  7. </head>
  8. <body>
  9. <div id="overlay">
  10. <button id="startButton">Play</button>
  11. </div>
  12. <div id="info">
  13. <a href="https://threejs.org/" target="_blank" rel="noopener" class="logo-link"></a>
  14. <div class="title-wrapper">
  15. <a href="https://threejs.org/" target="_blank" rel="noopener">three.js</a><span>Audio Processing</span>
  16. </div>
  17. <small>Click on screen to process the audio using WebGPU.</small>
  18. </div>
  19. <script type="importmap">
  20. {
  21. "imports": {
  22. "three": "../build/three.webgpu.js",
  23. "three/webgpu": "../build/three.webgpu.js",
  24. "three/tsl": "../build/three.tsl.js",
  25. "three/addons/": "./jsm/"
  26. }
  27. }
  28. </script>
  29. <script type="module">
  30. import * as THREE from 'three/webgpu';
  31. import { Fn, uniform, instanceIndex, instancedArray, float, texture, screenUV, color } from 'three/tsl';
  32. import { Inspector } from 'three/addons/inspector/Inspector.js';
  33. let camera, scene, renderer;
  34. let computeNode;
  35. let waveBuffer, sampleRate;
  36. let waveArray;
  37. let currentAudio, currentAnalyser;
  38. const analyserBuffer = new Uint8Array( 1024 );
  39. let analyserTexture;
  40. const startButton = document.getElementById( 'startButton' );
  41. startButton.addEventListener( 'click', init );
  42. async function playAudioBuffer() {
  43. if ( currentAudio ) currentAudio.stop();
  44. // compute audio
  45. renderer.compute( computeNode );
  46. const wave = new Float32Array( await renderer.getArrayBufferAsync( waveArray.value ) );
  47. // play result
  48. const audioOutputContext = new AudioContext( { sampleRate } );
  49. const audioOutputBuffer = audioOutputContext.createBuffer( 1, wave.length, sampleRate );
  50. audioOutputBuffer.copyToChannel( wave, 0 );
  51. const source = audioOutputContext.createBufferSource();
  52. source.connect( audioOutputContext.destination );
  53. source.buffer = audioOutputBuffer;
  54. source.start();
  55. currentAudio = source;
  56. // visual feedback
  57. currentAnalyser = audioOutputContext.createAnalyser();
  58. currentAnalyser.fftSize = 2048;
  59. source.connect( currentAnalyser );
  60. }
  61. async function init() {
  62. const overlay = document.getElementById( 'overlay' );
  63. overlay.remove();
  64. // audio buffer
  65. const soundBuffer = await fetch( 'sounds/webgpu-audio-processing.mp3' ).then( res => res.arrayBuffer() );
  66. const audioContext = new AudioContext();
  67. const audioBuffer = await audioContext.decodeAudioData( soundBuffer );
  68. waveBuffer = audioBuffer.getChannelData( 0 );
  69. // adding extra silence to delay and pitch
  70. waveBuffer = new Float32Array( [ ...waveBuffer, ...new Float32Array( 200000 ) ] );
  71. sampleRate = audioBuffer.sampleRate / audioBuffer.numberOfChannels;
  72. // create webgpu buffers
  73. waveArray = instancedArray( waveBuffer );
  74. // read-only buffer
  75. const originalWave = instancedArray( waveBuffer ).toReadOnly();
  76. // The Pixel Buffer Object (PBO) is required to get the GPU computed data to the CPU in the WebGL2 fallback.
  77. // As used in `renderer.getArrayBufferAsync( waveArray.value )`.
  78. originalWave.setPBO( true );
  79. waveArray.setPBO( true );
  80. // params
  81. const pitch = uniform( 1.5 );
  82. const delayVolume = uniform( .2 );
  83. const delayOffset = uniform( .55 );
  84. // compute (shader-node)
  85. const computeShaderFn = Fn( () => {
  86. const index = float( instanceIndex );
  87. // pitch
  88. const time = index.mul( pitch );
  89. let wave = originalWave.element( time );
  90. // delay
  91. for ( let i = 1; i < 7; i ++ ) {
  92. const waveOffset = originalWave.element( index.sub( delayOffset.mul( sampleRate ).mul( i ) ).mul( pitch ) );
  93. const waveOffsetVolume = waveOffset.mul( delayVolume.div( i * i ) );
  94. wave = wave.add( waveOffsetVolume );
  95. }
  96. // store
  97. const waveStorageElementNode = waveArray.element( instanceIndex );
  98. waveStorageElementNode.assign( wave );
  99. } );
  100. // compute
  101. computeNode = computeShaderFn().compute( waveBuffer.length );
  102. // renderer
  103. const container = document.createElement( 'div' );
  104. document.body.appendChild( container );
  105. camera = new THREE.PerspectiveCamera( 45, window.innerWidth / window.innerHeight, 0.01, 30 );
  106. // nodes
  107. analyserTexture = new THREE.DataTexture( analyserBuffer, analyserBuffer.length, 1, THREE.RedFormat );
  108. const spectrum = texture( analyserTexture, screenUV.x ).x.mul( screenUV.y );
  109. const backgroundNode = color( 0x0000FF ).mul( spectrum );
  110. // scene
  111. scene = new THREE.Scene();
  112. scene.backgroundNode = backgroundNode;
  113. // renderer
  114. renderer = new THREE.WebGPURenderer( { antialias: true } );
  115. renderer.setPixelRatio( window.devicePixelRatio );
  116. renderer.setSize( window.innerWidth, window.innerHeight );
  117. renderer.setAnimationLoop( render );
  118. renderer.inspector = new Inspector();
  119. container.appendChild( renderer.domElement );
  120. await renderer.init();
  121. window.addEventListener( 'resize', onWindowResize );
  122. document.addEventListener( 'click', playAudioBuffer );
  123. // gui
  124. const gui = renderer.inspector.createParameters( 'Audio' );
  125. gui.add( pitch, 'value', .5, 2, 0.01 ).name( 'pitch' );
  126. gui.add( delayVolume, 'value', 0, 1, .01 ).name( 'delayVolume' );
  127. gui.add( delayOffset, 'value', .1, 1, .01 ).name( 'delayOffset' );
  128. //
  129. playAudioBuffer();
  130. }
  131. function onWindowResize() {
  132. camera.aspect = window.innerWidth / window.innerHeight;
  133. camera.updateProjectionMatrix();
  134. renderer.setSize( window.innerWidth, window.innerHeight );
  135. }
  136. function render() {
  137. if ( currentAnalyser ) {
  138. currentAnalyser.getByteFrequencyData( analyserBuffer );
  139. analyserTexture.needsUpdate = true;
  140. }
  141. renderer.render( scene, camera );
  142. }
  143. </script>
  144. </body>
  145. </html>
粤ICP备19079148号