mirror of
https://github.com/webgpu/webgpufundamentals.git
synced 2026-05-16 05:41:01 -04:00
416 lines
12 KiB
HTML
416 lines
12 KiB
HTML
<!DOCTYPE html>
|
|
<html>
|
|
<head>
|
|
<meta charset="utf-8">
|
|
<meta name="viewport" content="width=device-width, initial-scale=1.0, user-scalable=yes">
|
|
<title>WebGPU Post Processing - Step 1 - No-op</title>
|
|
<style>
|
|
@import url(resources/webgpu-lesson.css);
|
|
html, body {
|
|
margin: 0; /* remove the default margin */
|
|
height: 100%; /* make the html,body fill the page */
|
|
}
|
|
canvas {
|
|
display: block; /* make the canvas act like a block */
|
|
width: 100%; /* make the canvas fill its container */
|
|
height: 100%;
|
|
}
|
|
</style>
|
|
</head>
|
|
<body>
|
|
<canvas></canvas>
|
|
</body>
|
|
<script type="module">
|
|
import GUI from '../3rdparty/muigui-0.x.module.js';
|
|
// see https://webgpufundamentals.org/webgpu/lessons/webgpu-utils.html#webgpu-utils
|
|
import {createTextureFromImage} from '../3rdparty/webgpu-utils-1.x.module.js';
|
|
// see https://webgpufundamentals.org/webgpu/lessons/webgpu-matrix-math.html
|
|
import {mat4} from '../3rdparty/wgpu-matrix.module.js';
|
|
|
|
import * as dragAndDrop from './resources/js/drag-and-drop.js';
|
|
import onPasteImage from './resources/js/on-paste-image.js';
|
|
|
|
async function main() {
|
|
const adapter = await navigator.gpu?.requestAdapter();
|
|
const device = await adapter?.requestDevice();
|
|
if (!device) {
|
|
fail('need a browser that supports WebGPU');
|
|
return;
|
|
}
|
|
|
|
// Get a WebGPU context from the canvas and configure it
|
|
const canvas = document.querySelector('canvas');
|
|
const context = canvas.getContext('webgpu');
|
|
const presentationFormat = navigator.gpu.getPreferredCanvasFormat();
|
|
context.configure({
|
|
device,
|
|
format: presentationFormat,
|
|
});
|
|
|
|
const module = device.createShaderModule({
|
|
code: `
|
|
struct VSOutput {
|
|
@builtin(position) position: vec4f,
|
|
@location(0) texcoord: vec2f,
|
|
};
|
|
|
|
struct Uniforms {
|
|
matrix: mat4x4f,
|
|
};
|
|
|
|
@group(0) @binding(0) var<uniform> uni: Uniforms;
|
|
@group(0) @binding(1) var tex: texture_2d<f32>;
|
|
@group(0) @binding(2) var smp: sampler;
|
|
|
|
@vertex fn vs(@builtin(vertex_index) vNdx: u32) -> VSOutput {
|
|
let positions = array(
|
|
vec2f( 0, 0),
|
|
vec2f( 1, 0),
|
|
vec2f( 0, 1),
|
|
vec2f( 0, 1),
|
|
vec2f( 1, 0),
|
|
vec2f( 1, 1),
|
|
);
|
|
let pos = positions[vNdx];
|
|
return VSOutput(
|
|
uni.matrix * vec4f(pos, 0, 1),
|
|
pos,
|
|
);
|
|
}
|
|
|
|
@fragment fn fs(fsInput: VSOutput) -> @location(0) vec4f {
|
|
return textureSample(tex, smp, fsInput.texcoord);
|
|
}
|
|
`,
|
|
});
|
|
|
|
const pipeline = device.createRenderPipeline({
|
|
label: 'textured unit quad',
|
|
layout: 'auto',
|
|
vertex: {
|
|
module,
|
|
},
|
|
fragment: {
|
|
module,
|
|
targets: [{ format: 'rgba8unorm' }],
|
|
},
|
|
});
|
|
|
|
const renderPassDescriptor = {
|
|
label: 'our basic canvas renderPass',
|
|
colorAttachments: [
|
|
{
|
|
// view: <- to be filled out when we render
|
|
clearValue: [0.3, 0.3, 0.3, 1],
|
|
loadOp: 'clear',
|
|
storeOp: 'store',
|
|
},
|
|
],
|
|
};
|
|
|
|
const imageUniformBuffer = device.createBuffer({
|
|
size: 4 * 16,
|
|
usage: GPUBufferUsage.UNIFORM | GPUBufferUsage.COPY_DST,
|
|
});
|
|
|
|
let imageTexture = await createTextureFromImage(
|
|
device,
|
|
'resources/images/david-clode-clown-fish.jpg', /* webgpufundamentals: url */
|
|
);
|
|
|
|
const imageSampler = device.createSampler({
|
|
minFilter: 'linear',
|
|
magFilter: 'linear',
|
|
});
|
|
|
|
let imageBindGroup;
|
|
function updateBindGroup() {
|
|
imageBindGroup = device.createBindGroup({
|
|
layout: pipeline.getBindGroupLayout(0),
|
|
entries: [
|
|
{ binding: 0, resource: imageUniformBuffer },
|
|
{ binding: 1, resource: imageTexture },
|
|
{ binding: 2, resource: imageSampler },
|
|
],
|
|
});
|
|
}
|
|
updateBindGroup();
|
|
|
|
const postProcessModule = device.createShaderModule({
|
|
code: `
|
|
struct VSOutput {
|
|
@builtin(position) position: vec4f,
|
|
@location(0) texcoord: vec2f,
|
|
};
|
|
|
|
struct HSL {
|
|
h: f32,
|
|
s: f32,
|
|
l: f32,
|
|
};
|
|
|
|
fn rgbToHsl(rgb: vec3f) -> HSL {
|
|
let cMin = min(min(rgb.r, rgb.b), rgb.g);
|
|
let cMax = max(max(rgb.r, rgb.b), rgb.g);
|
|
let delta = cMax - cMin;
|
|
|
|
let l = (cMax + cMin) / 2.0;
|
|
if (delta == 0.0) {
|
|
return HSL(0, 0, l);
|
|
}
|
|
|
|
var h = 0.0;
|
|
if (rgb.r == cMax) {
|
|
h = (rgb.g - rgb.b) / delta;
|
|
} else if (rgb.g == cMax) {
|
|
h = 2.0 + (rgb.b - rgb.r) / delta;
|
|
} else {
|
|
h = 4.0 + (rgb.r - rgb.g) / delta;
|
|
}
|
|
h = h / 6.0;
|
|
let s = delta / (1.0 - abs(2.0 * l - 1.0));
|
|
return HSL(h, s, l);
|
|
}
|
|
|
|
fn hslToRgb(hsl: HSL) -> vec3f {
|
|
let c = vec3f(fract(hsl.h), clamp(vec2f(hsl.s, hsl.l), vec2f(0), vec2f(1)));
|
|
let rgb = clamp(abs((c.x * 6.0 + vec3f(0.0, 4.0, 2.0)) % 6.0 - 3.0) - 1.0, vec3f(0), vec3f(1));
|
|
return c.z + c.y * (rgb - 0.5) * (1.0 - abs(2.0 * c.z - 1.0));
|
|
}
|
|
|
|
fn adjustBrightness(color: vec3f, brightness: f32) -> vec3f {
|
|
return color + brightness;
|
|
}
|
|
|
|
fn adjustContrast(color: vec3f, contrast: f32) -> vec3f {
|
|
let c = contrast + 1.0;
|
|
return clamp(0.5 + c * (color - 0.5), vec3f(0), vec3f(1));
|
|
}
|
|
|
|
fn adjustHSL(color: vec3f, adjust: HSL) -> vec3f {
|
|
let hsl = rgbToHsl(color);
|
|
let newHSL = HSL(hsl.h + adjust.h, hsl.s + adjust.s, hsl.l + adjust.l);
|
|
return hslToRgb(newHSL);
|
|
}
|
|
|
|
fn luminance(color: vec3f) -> f32 {
|
|
return dot(color, vec3f(0.2126, 0.7152, 0.0722));
|
|
}
|
|
|
|
fn applyDuotone(color: vec3f, color1: vec3f, color2: vec3f) -> vec3f {
|
|
let l = luminance(color);
|
|
return mix(color1, color2, l);
|
|
}
|
|
|
|
@vertex fn vs(
|
|
@builtin(vertex_index) vertexIndex : u32,
|
|
) -> VSOutput {
|
|
var pos = array(
|
|
vec2f(-1.0, -1.0),
|
|
vec2f(-1.0, 3.0),
|
|
vec2f( 3.0, -1.0),
|
|
);
|
|
|
|
var vsOutput: VSOutput;
|
|
let xy = pos[vertexIndex];
|
|
vsOutput.position = vec4f(xy, 0.0, 1.0);
|
|
vsOutput.texcoord = xy * vec2f(0.5) + vec2f(0.5);
|
|
return vsOutput;
|
|
}
|
|
|
|
struct Uniforms {
|
|
brightness: f32,
|
|
contrast: f32,
|
|
@align(16) hsl: HSL,
|
|
@align(16) duotone: f32,
|
|
@align(16) duotoneColor1: vec3f,
|
|
@align(16) duotoneColor2: vec3f,
|
|
};
|
|
|
|
@group(0) @binding(0) var postTexture2d: texture_2d<f32>;
|
|
@group(0) @binding(1) var postSampler: sampler;
|
|
@group(0) @binding(2) var<uniform> uni: Uniforms;
|
|
|
|
@fragment fn fs2d(fsInput: VSOutput) -> @location(0) vec4f {
|
|
let color = textureSample(postTexture2d, postSampler, fsInput.texcoord);
|
|
var rgb = color.rgb;
|
|
rgb = adjustHSL(rgb, uni.hsl);
|
|
rgb = adjustBrightness(rgb, uni.brightness);
|
|
rgb = adjustContrast(rgb, uni.contrast);
|
|
rgb = mix(rgb, applyDuotone(rgb, uni.duotoneColor1, uni.duotoneColor2), uni.duotone);
|
|
return vec4f(rgb, color.a);
|
|
}
|
|
`,
|
|
});
|
|
|
|
const postProcessPipeline = device.createRenderPipeline({
|
|
layout: 'auto',
|
|
vertex: { module: postProcessModule },
|
|
fragment: {
|
|
module: postProcessModule,
|
|
targets: [ { format: presentationFormat }],
|
|
},
|
|
});
|
|
|
|
const postProcessSampler = device.createSampler({
|
|
minFilter: 'linear',
|
|
magFilter: 'linear',
|
|
});
|
|
|
|
const postProcessRenderPassDescriptor = {
|
|
label: 'post process render pass',
|
|
colorAttachments: [
|
|
{ loadOp: 'clear', storeOp: 'store' },
|
|
],
|
|
};
|
|
|
|
const postProcessUniformBuffer = device.createBuffer({
|
|
size: 80,
|
|
usage: GPUBufferUsage.UNIFORM | GPUBufferUsage.COPY_DST,
|
|
});
|
|
|
|
let renderTarget;
|
|
let postProcessBindGroup;
|
|
|
|
function setupPostProcess(canvasTexture) {
|
|
if (renderTarget?.width === canvasTexture.width &&
|
|
renderTarget?.height === canvasTexture.height) {
|
|
return;
|
|
}
|
|
|
|
renderTarget?.destroy();
|
|
renderTarget = device.createTexture({
|
|
size: canvasTexture,
|
|
format: 'rgba8unorm',
|
|
usage: GPUTextureUsage.RENDER_ATTACHMENT | GPUTextureUsage.TEXTURE_BINDING,
|
|
});
|
|
const renderTargetView = renderTarget.createView();
|
|
renderPassDescriptor.colorAttachments[0].view = renderTargetView;
|
|
|
|
postProcessBindGroup = device.createBindGroup({
|
|
layout: postProcessPipeline.getBindGroupLayout(0),
|
|
entries: [
|
|
{ binding: 0, resource: renderTargetView },
|
|
{ binding: 1, resource: postProcessSampler },
|
|
{ binding: 2, resource: postProcessUniformBuffer },
|
|
],
|
|
});
|
|
}
|
|
|
|
function postProcess(encoder, srcTexture, dstTexture) {
|
|
device.queue.writeBuffer(
|
|
postProcessUniformBuffer,
|
|
0,
|
|
new Float32Array([
|
|
settings.brightness,
|
|
settings.contrast,
|
|
0,
|
|
0,
|
|
settings.hue,
|
|
settings.saturation,
|
|
settings.lightness,
|
|
0,
|
|
settings.duotone,
|
|
0,
|
|
0,
|
|
0,
|
|
...settings.duotoneColor1, 0,
|
|
...settings.duotoneColor2, 0,
|
|
]),
|
|
);
|
|
|
|
postProcessRenderPassDescriptor.colorAttachments[0].view = dstTexture.createView();
|
|
const pass = encoder.beginRenderPass(postProcessRenderPassDescriptor);
|
|
pass.setPipeline(postProcessPipeline);
|
|
pass.setBindGroup(0, postProcessBindGroup);
|
|
pass.draw(3);
|
|
pass.end();
|
|
}
|
|
|
|
const settings = {
|
|
brightness: 0,
|
|
contrast: 0,
|
|
hue: 0,
|
|
saturation: 0,
|
|
lightness: 0,
|
|
duotone: 0,
|
|
duotoneColor1: new Float32Array([0.1, 0, 0.5]),
|
|
duotoneColor2: new Float32Array([1, 0.69, 0.4]),
|
|
};
|
|
|
|
const gui = new GUI();
|
|
gui.onChange(render);
|
|
gui.add(settings, 'brightness', -1, 1);
|
|
gui.add(settings, 'contrast', -1, 10);
|
|
gui.add(settings, 'hue', -0.5, 0.5);
|
|
gui.add(settings, 'saturation', -1, 1);
|
|
gui.add(settings, 'lightness', -1, 1);
|
|
gui.add(settings, 'duotone', 0, 1);
|
|
gui.addColor(settings, 'duotoneColor1');
|
|
gui.addColor(settings, 'duotoneColor2');
|
|
|
|
function render() {
|
|
const canvasTexture = context.getCurrentTexture();
|
|
setupPostProcess(canvasTexture);
|
|
|
|
// css 'cover'
|
|
const canvasAspect = canvas.clientWidth / canvas.clientHeight;
|
|
const imageAspect = imageTexture.width / imageTexture.height;
|
|
const aspect = canvasAspect / imageAspect;
|
|
const aspectScale = aspect > 1 ? [1, aspect, 1] : [1 / aspect, 1, 1];
|
|
|
|
const matrix = mat4.identity();
|
|
mat4.scale(matrix, aspectScale, matrix);
|
|
mat4.scale(matrix, [2, 2, 1], matrix);
|
|
mat4.translate(matrix, [-0.5, -0.5, 1], matrix);
|
|
|
|
// Set the uniform values in our JavaScript side Float32Array
|
|
device.queue.writeBuffer(imageUniformBuffer, 0, matrix);
|
|
|
|
const encoder = device.createCommandEncoder();
|
|
const pass = encoder.beginRenderPass(renderPassDescriptor);
|
|
pass.setPipeline(pipeline);
|
|
pass.setBindGroup(0, imageBindGroup);
|
|
pass.draw(6);
|
|
pass.end();
|
|
|
|
postProcess(encoder, renderTarget, canvasTexture);
|
|
|
|
const commandBuffer = encoder.finish();
|
|
device.queue.submit([commandBuffer]);
|
|
}
|
|
|
|
const observer = new ResizeObserver(entries => {
|
|
for (const entry of entries) {
|
|
const canvas = entry.target;
|
|
const width = entry.contentBoxSize[0].inlineSize;
|
|
const height = entry.contentBoxSize[0].blockSize;
|
|
canvas.width = Math.max(1, Math.min(width, device.limits.maxTextureDimension2D));
|
|
canvas.height = Math.max(1, Math.min(height, device.limits.maxTextureDimension2D));
|
|
}
|
|
render();
|
|
});
|
|
observer.observe(canvas);
|
|
|
|
async function readImageFile(file) {
|
|
const newImageTexture = await createTextureFromImage(device, URL.createObjectURL(file));
|
|
imageTexture.destroy();
|
|
imageTexture = newImageTexture;
|
|
updateBindGroup();
|
|
render();
|
|
}
|
|
|
|
dragAndDrop.setup({msg: 'Drop Image File here'});
|
|
dragAndDrop.onDropFile(readImageFile);
|
|
|
|
onPasteImage(readImageFile);
|
|
}
|
|
|
|
function fail(msg) {
|
|
alert(msg);
|
|
}
|
|
|
|
main();
|
|
</script>
|
|
</html>
|