diff --git a/package-lock.json b/package-lock.json index 01821195..45a81b86 100644 --- a/package-lock.json +++ b/package-lock.json @@ -9,6 +9,7 @@ "version": "0.1.0", "license": "BSD-3-Clause", "dependencies": { + "@types/dom-mediacapture-transform": "^0.1.5", "codemirror": "^5.58.2", "dat.gui": "^0.7.6", "file-loader": "^6.2.0", @@ -469,6 +470,19 @@ "integrity": "sha512-Dx9f9CkXJkDAxt9M05vc7DItSqsiEhWN7Rx3vgO/maltv/nX9TaMX2sd/iAMENnL1D5FivetktJEyCBLFu50CQ==", "dev": true }, + "node_modules/@types/dom-mediacapture-transform": { + "version": "0.1.5", + "resolved": "https://registry.npmjs.org/@types/dom-mediacapture-transform/-/dom-mediacapture-transform-0.1.5.tgz", + "integrity": "sha512-Mgu6H5LVJPgJuAumx0xFEdZvn9whHy+J3gEJbJz5xdWrUJ8ZwZ/JTvWwYOQPkCGzGWykPN7ufQn94iil+VCWGw==", + "dependencies": { + "@types/dom-webcodecs": "*" + } + }, + "node_modules/@types/dom-webcodecs": { + "version": "0.1.7", + "resolved": "https://registry.npmjs.org/@types/dom-webcodecs/-/dom-webcodecs-0.1.7.tgz", + "integrity": "sha512-1euwRyJ7rQrddOa24d5ZcoDRMp68xEIwyb7PKb3Wpc2OullUxHQ4a63WPa5V8R+KtCWISKjgS+f83HCUVhXS/w==" + }, "node_modules/@types/eslint": { "version": "8.21.1", "resolved": "https://registry.npmjs.org/@types/eslint/-/eslint-8.21.1.tgz", diff --git a/package.json b/package.json index f60e6423..7d5d9c27 100644 --- a/package.json +++ b/package.json @@ -16,6 +16,7 @@ "export": "next export" }, "dependencies": { + "@types/dom-mediacapture-transform": "^0.1.5", "codemirror": "^5.58.2", "dat.gui": "^0.7.6", "file-loader": "^6.2.0", diff --git a/src/components/SampleLayout.tsx b/src/components/SampleLayout.tsx index 419ad998..5b5d457e 100644 --- a/src/components/SampleLayout.tsx +++ b/src/components/SampleLayout.tsx @@ -66,6 +66,7 @@ const SampleLayout: React.FunctionComponent< React.PropsWithChildren<{ name: string; description: string; + originTrial?: string; filename: string; gui?: boolean; init: SampleInit; @@ -155,6 +156,7 @@ const SampleLayout: React.FunctionComponent< /> {`${props.name} - WebGPU Samples`} +

{props.name}

diff --git a/src/pages/samples/[slug].tsx b/src/pages/samples/[slug].tsx index 158fdcc6..52487ee9 100644 --- a/src/pages/samples/[slug].tsx +++ b/src/pages/samples/[slug].tsx @@ -24,6 +24,9 @@ export const pages = { computeBoids: dynamic(() => import('../../sample/computeBoids/main')), animometer: dynamic(() => import('../../sample/animometer/main')), videoUploading: dynamic(() => import('../../sample/videoUploading/main')), + videoUploadingWebCodecs: dynamic( + () => import('../../sample/videoUploadingWebCodecs/main') + ), imageBlur: dynamic(() => import('../../sample/imageBlur/main')), shadowMapping: dynamic(() => import('../../sample/shadowMapping/main')), reversedZ: dynamic(() => import('../../sample/reversedZ/main')), diff --git a/src/sample/videoUploadingWebCodecs/main.ts b/src/sample/videoUploadingWebCodecs/main.ts new file mode 100644 index 00000000..82746f78 --- /dev/null +++ b/src/sample/videoUploadingWebCodecs/main.ts @@ -0,0 +1,176 @@ +import { makeSample, SampleInit } from '../../components/SampleLayout'; + +import fullscreenTexturedQuadWGSL from '../../shaders/fullscreenTexturedQuad.wgsl'; +import sampleExternalTextureWGSL from '../../shaders/sampleExternalTexture.frag.wgsl'; + +const init: SampleInit = async ({ canvas, pageState }) => { + // Set video element + const video = document.createElement('video'); + video.loop = true; + video.autoplay = true; + video.muted = true; + video.src = new URL( + '../../../assets/video/pano.webm', + import.meta.url + ).toString(); + await video.play(); + + const adapter = await navigator.gpu.requestAdapter(); + const device = await adapter.requestDevice(); + + if (!pageState.active) return; + + const context = canvas.getContext('webgpu') as GPUCanvasContext; + const devicePixelRatio = window.devicePixelRatio || 1; + canvas.width = canvas.clientWidth * devicePixelRatio; + canvas.height = canvas.clientHeight * devicePixelRatio; + const presentationFormat = navigator.gpu.getPreferredCanvasFormat(); + + context.configure({ + device, + format: presentationFormat, + alphaMode: 'premultiplied', + }); + + const pipeline = device.createRenderPipeline({ + layout: 'auto', + vertex: { + module: device.createShaderModule({ + code: fullscreenTexturedQuadWGSL, + }), + entryPoint: 'vert_main', + }, + fragment: { + module: device.createShaderModule({ + code: sampleExternalTextureWGSL, + }), + entryPoint: 'main', + targets: [ + { + format: presentationFormat, + }, + ], + }, + primitive: { + topology: 'triangle-list', + }, + }); + + const sampler = device.createSampler({ + magFilter: 'linear', + minFilter: 'linear', + }); + + function getVideoFrameFromVideoElement(video) { + return new Promise((resolve) => { + const videoTrack = video.captureStream().getVideoTracks()[0]; + const trackProcessor = new MediaStreamTrackProcessor({ + track: videoTrack, + }); + const transformer = new TransformStream({ + transform(videoFrame) { + videoTrack.stop(); + resolve(videoFrame); + }, + flush(controller) { + controller.terminate(); + }, + }); + const trackGenerator = new MediaStreamTrackGenerator({ + kind: 'video', + }); + trackProcessor.readable + .pipeThrough(transformer) + .pipeTo(trackGenerator.writable); + }); + } + + async function frame() { + // Sample is no longer the active page. + if (!pageState.active) return; + + const videoFrame = await getVideoFrameFromVideoElement(video); + + const uniformBindGroup = device.createBindGroup({ + layout: pipeline.getBindGroupLayout(0), + entries: [ + { + binding: 1, + resource: sampler, + }, + { + binding: 2, + resource: device.importExternalTexture({ + source: videoFrame as any, // eslint-disable-line @typescript-eslint/no-explicit-any + }), + }, + ], + }); + + const commandEncoder = device.createCommandEncoder(); + const textureView = context.getCurrentTexture().createView(); + + const renderPassDescriptor: GPURenderPassDescriptor = { + colorAttachments: [ + { + view: textureView, + clearValue: { r: 0.0, g: 0.0, b: 0.0, a: 1.0 }, + loadOp: 'clear', + storeOp: 'store', + }, + ], + }; + + const passEncoder = commandEncoder.beginRenderPass(renderPassDescriptor); + passEncoder.setPipeline(pipeline); + passEncoder.setBindGroup(0, uniformBindGroup); + passEncoder.draw(6, 1, 0, 0); + passEncoder.end(); + device.queue.submit([commandEncoder.finish()]); + + if ('requestVideoFrameCallback' in video) { + video.requestVideoFrameCallback(frame); + } else { + requestAnimationFrame(frame); + } + } + + if ('requestVideoFrameCallback' in video) { + video.requestVideoFrameCallback(frame); + } else { + requestAnimationFrame(frame); + } +}; + +const VideoUploadingWebCodecs: () => JSX.Element = () => + makeSample({ + name: 'Video Uploading with WebCodecs (Experimental)', + description: `This example shows how to upload a WebCodecs VideoFrame to WebGPU. + Support for using a VideoFrame as the source for a GPUExternalTexture requires + running Chrome with the "WebGPU Developer Features" flag or the WebGPU WebCodecs + integration origin trial. + See https://developer.chrome.com/origintrials/#/view_trial/1705738358866575361 + `, + originTrial: + 'Auo9JMDbdn/Jg1pd8liB9Ofp1OLzi9mecxjBBfjv/3f8O8775CXgcTobX4t6KYxMC1wnO4Z7MWArPSptGtkD2woAAABZeyJvcmlnaW4iOiJodHRwczovL3dlYmdwdS5naXRodWIuaW86NDQzIiwiZmVhdHVyZSI6IldlYkdQVVdlYkNvZGVjcyIsImV4cGlyeSI6MTcwMTk5MzU5OX0=', + init, + sources: [ + { + name: __filename.substring(__dirname.length + 1), + contents: __SOURCE__, + }, + { + name: '../../shaders/fullscreenTexturedQuad.wgsl', + contents: fullscreenTexturedQuadWGSL, + editable: true, + }, + { + name: '../../shaders/sampleExternalTexture.wgsl', + contents: sampleExternalTextureWGSL, + editable: true, + }, + ], + filename: __filename, + }); + +export default VideoUploadingWebCodecs;