Add WebGPU renderer (#648)

This commit is contained in:
redphx
2025-02-02 21:37:21 +07:00
parent 39ecef976c
commit fd665b6fcd
31 changed files with 1428 additions and 841 deletions

View File

@@ -0,0 +1,119 @@
import { BxLogger } from "@/utils/bx-logger";
import { BaseStreamPlayer, StreamPlayerElement, StreamPlayerFilter } from "./base-stream-player";
import { StreamVideoProcessing, type StreamPlayerType } from "@/enums/pref-values";
export abstract class BaseCanvasPlayer extends BaseStreamPlayer {
protected $canvas: HTMLCanvasElement;
protected targetFps = 60;
protected frameInterval = 0;
protected lastFrameTime = 0;
protected animFrameId: number | null = null;
protected frameCallback: any;
private boundDrawFrame: () => void;
constructor(playerType: StreamPlayerType, $video: HTMLVideoElement, logTag: string) {
super(playerType, StreamPlayerElement.CANVAS, $video, logTag);
const $canvas = document.createElement('canvas');
$canvas.width = $video.videoWidth;
$canvas.height = $video.videoHeight;
this.$canvas = $canvas;
$video.insertAdjacentElement('afterend', this.$canvas);
let frameCallback: any;
if ('requestVideoFrameCallback' in HTMLVideoElement.prototype) {
const $video = this.$video;
frameCallback = $video.requestVideoFrameCallback.bind($video);
} else {
frameCallback = requestAnimationFrame;
}
this.frameCallback = frameCallback;
this.boundDrawFrame = this.drawFrame.bind(this);
}
async init(): Promise<void> {
super.init();
await this.setupShaders();
this.setupRendering();
}
setTargetFps(target: number) {
this.targetFps = target;
this.lastFrameTime = 0;
this.frameInterval = target ? Math.floor(1000 / target) : 0;
}
getCanvas() {
return this.$canvas;
}
destroy() {
BxLogger.info(this.logTag, 'Destroy');
this.isStopped = true;
if (this.animFrameId) {
if ('requestVideoFrameCallback' in HTMLVideoElement.prototype) {
this.$video.cancelVideoFrameCallback(this.animFrameId);
} else {
cancelAnimationFrame(this.animFrameId);
}
this.animFrameId = null;
}
if (this.$canvas.isConnected) {
this.$canvas.remove();
}
this.$canvas.width = 1;
this.$canvas.height = 1;
}
toFilterId(processing: StreamVideoProcessing) {
return processing === StreamVideoProcessing.CAS ? StreamPlayerFilter.CAS : StreamPlayerFilter.USM;
}
protected shouldDraw() {
if (this.targetFps >= 60) {
// Always draw
return true;
} else if (this.targetFps === 0) {
// Don't draw when FPS is 0
return false;
}
const currentTime = performance.now();
const timeSinceLastFrame = currentTime - this.lastFrameTime;
if (timeSinceLastFrame < this.frameInterval) {
// Skip frame to limit FPS
return false;
}
this.lastFrameTime = currentTime;
return true;
}
private drawFrame() {
if (this.isStopped) {
return;
}
this.animFrameId = this.frameCallback(this.boundDrawFrame);
if (!this.shouldDraw()) {
return;
}
this.updateFrame();
}
protected setupRendering(): void {
this.animFrameId = this.frameCallback(this.boundDrawFrame);
}
protected abstract setupShaders(): void;
abstract updateFrame(): void;
}

View File

@@ -0,0 +1,48 @@
import { StreamVideoProcessing, type StreamPlayerType } from "@/enums/pref-values";
import type { StreamPlayerOptions } from "@/types/stream";
import { BxLogger } from "@/utils/bx-logger";
export const enum StreamPlayerElement {
VIDEO = 'video',
CANVAS = 'canvas',
}
export const enum StreamPlayerFilter {
USM = 1,
CAS = 2,
}
export abstract class BaseStreamPlayer {
protected logTag: string;
protected playerType: StreamPlayerType;
protected elementType: StreamPlayerElement;
protected $video: HTMLVideoElement;
protected options: StreamPlayerOptions = {
processing: StreamVideoProcessing.USM,
sharpness: 0,
brightness: 1.0,
contrast: 1.0,
saturation: 1.0,
};
protected isStopped = false;
constructor(playerType: StreamPlayerType, elementType: StreamPlayerElement, $video: HTMLVideoElement, logTag: string) {
this.playerType = playerType;
this.elementType = elementType;
this.$video = $video;
this.logTag = logTag;
}
init() {
BxLogger.info(this.logTag, 'Initialize');
}
updateOptions(newOptions: Partial<StreamPlayerOptions>, refresh=false) {
this.options = Object.assign(this.options, newOptions);
refresh && this.refreshPlayer();
}
abstract refreshPlayer(): void;
}

View File

@@ -0,0 +1,102 @@
import { CE } from "@/utils/html";
import { BaseStreamPlayer, StreamPlayerElement } from "../base-stream-player";
import { StreamPlayerType, StreamVideoProcessing } from "@/enums/pref-values";
import { GlobalPref } from "@/enums/pref-keys";
import { getGlobalPref } from "@/utils/pref-utils";
import { ScreenshotManager } from "@/utils/screenshot-manager";
export class VideoPlayer extends BaseStreamPlayer {
private $videoCss!: HTMLStyleElement;
private $usmMatrix!: SVGFEConvolveMatrixElement;
constructor($video: HTMLVideoElement, logTag: string) {
super(StreamPlayerType.VIDEO, StreamPlayerElement.VIDEO, $video, logTag);
}
init(): void {
super.init();
// Setup SVG filters
const xmlns = 'http://www.w3.org/2000/svg';
const $svg = CE('svg', {
id: 'bx-video-filters',
class: 'bx-gone',
xmlns,
},
CE('defs', { xmlns: 'http://www.w3.org/2000/svg' },
CE('filter', {
id: 'bx-filter-usm',
xmlns,
}, this.$usmMatrix = CE('feConvolveMatrix', {
id: 'bx-filter-usm-matrix',
order: '3',
xmlns,
}) as unknown as SVGFEConvolveMatrixElement),
),
);
this.$videoCss = CE('style', { id: 'bx-video-css' });
const $fragment = document.createDocumentFragment();
$fragment.append(this.$videoCss, $svg);
document.documentElement.appendChild($fragment);
}
protected setupRendering(): void {}
forceDrawFrame(): void {}
updateCanvas(): void {}
refreshPlayer() {
let filters = this.getVideoPlayerFilterStyle();
let videoCss = '';
if (filters) {
videoCss += `filter: ${filters} !important;`;
}
// Apply video filters to screenshots
if (getGlobalPref(GlobalPref.SCREENSHOT_APPLY_FILTERS)) {
ScreenshotManager.getInstance().updateCanvasFilters(filters);
}
let css = '';
if (videoCss) {
css = `#game-stream video { ${videoCss} }`;
}
this.$videoCss.textContent = css;
}
clearFilters() {
this.$videoCss.textContent = '';
}
private getVideoPlayerFilterStyle() {
const filters = [];
const sharpness = this.options.sharpness || 0;
if (this.options.processing === StreamVideoProcessing.USM && sharpness != 0) {
const level = (7 - ((sharpness / 2) - 1) * 0.5).toFixed(1); // 5, 5.5, 6, 6.5, 7
const matrix = `0 -1 0 -1 ${level} -1 0 -1 0`;
this.$usmMatrix?.setAttributeNS(null, 'kernelMatrix', matrix);
filters.push(`url(#bx-filter-usm)`);
}
const saturation = this.options.saturation || 100;
if (saturation != 100) {
filters.push(`saturate(${saturation}%)`);
}
const contrast = this.options.contrast || 100;
if (contrast != 100) {
filters.push(`contrast(${contrast}%)`);
}
const brightness = this.options.brightness || 100;
if (brightness != 100) {
filters.push(`brightness(${brightness}%)`);
}
return filters.join(' ');
}
}

View File

@@ -1,268 +0,0 @@
import vertClarityBoost from "./shaders/clarity_boost.vert" with { type: "text" };
import fsClarityBoost from "./shaders/clarity_boost.fs" with { type: "text" };
import { BxLogger } from "@/utils/bx-logger";
import { StreamPref } from "@/enums/pref-keys";
import { getStreamPref } from "@/utils/pref-utils";
export class WebGL2Player {
private readonly LOG_TAG = 'WebGL2Player';
private $video: HTMLVideoElement;
private $canvas: HTMLCanvasElement;
private gl: WebGL2RenderingContext | null = null;
private resources: Array<any> = [];
private program: WebGLProgram | null = null;
private stopped: boolean = false;
private options = {
filterId: 1,
sharpenFactor: 0,
brightness: 0.0,
contrast: 0.0,
saturation: 0.0,
};
private targetFps = 60;
private frameInterval = 0;
private lastFrameTime = 0;
private animFrameId: number | null = null;
constructor($video: HTMLVideoElement) {
BxLogger.info(this.LOG_TAG, 'Initialize');
this.$video = $video;
const $canvas = document.createElement('canvas');
$canvas.width = $video.videoWidth;
$canvas.height = $video.videoHeight;
this.$canvas = $canvas;
this.setupShaders();
this.setupRendering();
$video.insertAdjacentElement('afterend', $canvas);
}
setFilter(filterId: number, update = true) {
this.options.filterId = filterId;
update && this.updateCanvas();
}
setSharpness(sharpness: number, update = true) {
this.options.sharpenFactor = sharpness;
update && this.updateCanvas();
}
setBrightness(brightness: number, update = true) {
this.options.brightness = 1 + (brightness - 100) / 100;
update && this.updateCanvas();
}
setContrast(contrast: number, update = true) {
this.options.contrast = 1 + (contrast - 100) / 100;
update && this.updateCanvas();
}
setSaturation(saturation: number, update = true) {
this.options.saturation = 1 + (saturation - 100) / 100;
update && this.updateCanvas();
}
setTargetFps(target: number) {
this.targetFps = target;
this.lastFrameTime = 0;
this.frameInterval = target ? Math.floor(1000 / target) : 0;
}
getCanvas() {
return this.$canvas;
}
updateCanvas() {
const gl = this.gl!;
const program = this.program!;
gl.uniform2f(gl.getUniformLocation(program, 'iResolution'), this.$canvas.width, this.$canvas.height);
gl.uniform1i(gl.getUniformLocation(program, 'filterId'), this.options.filterId);
gl.uniform1f(gl.getUniformLocation(program, 'sharpenFactor'), this.options.sharpenFactor);
gl.uniform1f(gl.getUniformLocation(program, 'brightness'), this.options.brightness);
gl.uniform1f(gl.getUniformLocation(program, 'contrast'), this.options.contrast);
gl.uniform1f(gl.getUniformLocation(program, 'saturation'), this.options.saturation);
}
forceDrawFrame() {
const gl = this.gl!;
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGB, gl.RGB, gl.UNSIGNED_BYTE, this.$video);
gl.drawArrays(gl.TRIANGLES, 0, 6);
}
private setupRendering() {
let frameCallback: any;
if ('requestVideoFrameCallback' in HTMLVideoElement.prototype) {
const $video = this.$video;
frameCallback = $video.requestVideoFrameCallback.bind($video);
} else {
frameCallback = requestAnimationFrame;
}
let animate = () => {
if (this.stopped) {
return;
}
this.animFrameId = frameCallback(animate);
let draw = true;
// Don't draw when FPS is 0
if (this.targetFps === 0) {
draw = false;
} else if (this.targetFps < 60) {
// Limit FPS
const currentTime = performance.now();
const timeSinceLastFrame = currentTime - this.lastFrameTime;
if (timeSinceLastFrame < this.frameInterval) {
draw = false;
} else {
this.lastFrameTime = currentTime;
}
}
if (draw) {
const gl = this.gl!;
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGB, gl.RGB, gl.UNSIGNED_BYTE, this.$video);
gl.drawArrays(gl.TRIANGLES, 0, 6);
}
}
this.animFrameId = frameCallback(animate);
}
private setupShaders() {
BxLogger.info(this.LOG_TAG, 'Setting up', getStreamPref(StreamPref.VIDEO_POWER_PREFERENCE));
const gl = this.$canvas.getContext('webgl2', {
isBx: true,
antialias: true,
alpha: false,
powerPreference: getStreamPref(StreamPref.VIDEO_POWER_PREFERENCE),
}) as WebGL2RenderingContext;
this.gl = gl;
gl.viewport(0, 0, gl.drawingBufferWidth, gl.drawingBufferWidth);
// Vertex shader: Identity map
const vShader = gl.createShader(gl.VERTEX_SHADER)!;
gl.shaderSource(vShader, vertClarityBoost);
gl.compileShader(vShader);
const fShader = gl.createShader(gl.FRAGMENT_SHADER)!;
gl.shaderSource(fShader, fsClarityBoost);
gl.compileShader(fShader);
// Create and link program
const program = gl.createProgram()!;
this.program = program;
gl.attachShader(program, vShader);
gl.attachShader(program, fShader);
gl.linkProgram(program);
gl.useProgram(program);
if (!gl.getProgramParameter(program, gl.LINK_STATUS)) {
console.error(`Link failed: ${gl.getProgramInfoLog(program)}`);
console.error(`vs info-log: ${gl.getShaderInfoLog(vShader)}`);
console.error(`fs info-log: ${gl.getShaderInfoLog(fShader)}`);
}
this.updateCanvas();
// Vertices: A screen-filling quad made from two triangles
const buffer = gl.createBuffer();
this.resources.push(buffer);
gl.bindBuffer(gl.ARRAY_BUFFER, buffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([-1, -1, 1, -1, -1, 1, -1, 1, 1, -1, 1, 1]), gl.STATIC_DRAW);
gl.enableVertexAttribArray(0);
gl.vertexAttribPointer(0, 2, gl.FLOAT, false, 0, 0);
// Texture to contain the video data
const texture = gl.createTexture();
this.resources.push(texture);
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, true);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
// Bind texture to the "data" argument to the fragment shader
gl.uniform1i(gl.getUniformLocation(program, 'data'), 0);
gl.activeTexture(gl.TEXTURE0);
// gl.bindTexture(gl.TEXTURE_2D, texture);
}
resume() {
this.stop();
this.stopped = false;
BxLogger.info(this.LOG_TAG, 'Resume');
this.$canvas.classList.remove('bx-gone');
this.setupRendering();
}
stop() {
BxLogger.info(this.LOG_TAG, 'Stop');
this.$canvas.classList.add('bx-gone');
this.stopped = true;
if (this.animFrameId) {
if ('requestVideoFrameCallback' in HTMLVideoElement.prototype) {
this.$video.cancelVideoFrameCallback(this.animFrameId);
} else {
cancelAnimationFrame(this.animFrameId);
}
this.animFrameId = null;
}
}
destroy() {
BxLogger.info(this.LOG_TAG, 'Destroy');
this.stop();
const gl = this.gl;
if (gl) {
gl.getExtension('WEBGL_lose_context')?.loseContext();
gl.useProgram(null);
for (const resource of this.resources) {
if (resource instanceof WebGLProgram) {
gl.deleteProgram(resource);
} else if (resource instanceof WebGLShader) {
gl.deleteShader(resource);
} else if (resource instanceof WebGLTexture) {
gl.deleteTexture(resource);
} else if (resource instanceof WebGLBuffer) {
gl.deleteBuffer(resource);
}
}
this.gl = null;
}
if (this.$canvas.isConnected) {
this.$canvas.parentElement?.removeChild(this.$canvas);
}
this.$canvas.width = 1;
this.$canvas.height = 1;
}
}

View File

@@ -10,8 +10,8 @@ const int FILTER_UNSHARP_MASKING = 1;
// constrast = 0.8
const float CAS_CONTRAST_PEAK = 0.8 * -3.0 + 8.0;
// Luminosity factor
const vec3 LUMINOSITY_FACTOR = vec3(0.2126, 0.7152, 0.0722);
// Luminosity factor: https://www.w3.org/TR/AERT/#color-contrast
const vec3 LUMINOSITY_FACTOR = vec3(0.299, 0.587, 0.114);
uniform int filterId;
uniform float sharpenFactor;

View File

@@ -0,0 +1,141 @@
import vertClarityBoost from "./shaders/clarity-boost.vert" with { type: "text" };
import fsClarityBoost from "./shaders/clarity-boost.fs" with { type: "text" };
import { StreamPref } from "@/enums/pref-keys";
import { getStreamPref } from "@/utils/pref-utils";
import { BaseCanvasPlayer } from "../base-canvas-player";
import { StreamPlayerType } from "@/enums/pref-values";
export class WebGL2Player extends BaseCanvasPlayer {
private gl: WebGL2RenderingContext | null = null;
private resources: Array<WebGLBuffer | WebGLTexture | WebGLProgram | WebGLShader> = [];
private program: WebGLProgram | null = null;
constructor($video: HTMLVideoElement) {
super(StreamPlayerType.WEBGL2, $video, 'WebGL2Player');
}
private updateCanvas() {
console.log('updateCanvas', this.options);
const gl = this.gl!;
const program = this.program!;
const filterId = this.toFilterId(this.options.processing);
gl.uniform2f(gl.getUniformLocation(program, 'iResolution'), this.$canvas.width, this.$canvas.height);
gl.uniform1i(gl.getUniformLocation(program, 'filterId'), filterId);
gl.uniform1f(gl.getUniformLocation(program, 'sharpenFactor'), this.options.sharpness);
gl.uniform1f(gl.getUniformLocation(program, 'brightness'), this.options.brightness / 100);
gl.uniform1f(gl.getUniformLocation(program, 'contrast'), this.options.contrast / 100);
gl.uniform1f(gl.getUniformLocation(program, 'saturation'), this.options.saturation / 100);
}
updateFrame() {
const gl = this.gl!;
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGB, gl.RGB, gl.UNSIGNED_BYTE, this.$video);
gl.drawArrays(gl.TRIANGLES, 0, 3);
}
protected async setupShaders(): Promise<void> {
const gl = this.$canvas.getContext('webgl2', {
isBx: true,
antialias: true,
alpha: false,
depth: false,
preserveDrawingBuffer: false,
stencil: false,
powerPreference: getStreamPref(StreamPref.VIDEO_POWER_PREFERENCE),
} as WebGLContextAttributes) as WebGL2RenderingContext;
this.gl = gl;
gl.viewport(0, 0, gl.drawingBufferWidth, gl.drawingBufferWidth);
// Vertex shader: Identity map
const vShader = gl.createShader(gl.VERTEX_SHADER)!;
gl.shaderSource(vShader, vertClarityBoost);
gl.compileShader(vShader);
const fShader = gl.createShader(gl.FRAGMENT_SHADER)!;
gl.shaderSource(fShader, fsClarityBoost);
gl.compileShader(fShader);
// Create and link program
const program = gl.createProgram()!;
this.program = program;
gl.attachShader(program, vShader);
gl.attachShader(program, fShader);
gl.linkProgram(program);
gl.useProgram(program);
if (!gl.getProgramParameter(program, gl.LINK_STATUS)) {
console.error(`Link failed: ${gl.getProgramInfoLog(program)}`);
console.error(`vs info-log: ${gl.getShaderInfoLog(vShader)}`);
console.error(`fs info-log: ${gl.getShaderInfoLog(fShader)}`);
}
this.updateCanvas();
// Vertices: A screen-filling quad made from two triangles
const buffer = gl.createBuffer();
this.resources.push(buffer);
gl.bindBuffer(gl.ARRAY_BUFFER, buffer);
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([
-1.0, -1.0, // Bottom-left
3.0, -1.0, // Bottom-right
-1.0, 3.0, // Top-left
]), gl.STATIC_DRAW);
gl.enableVertexAttribArray(0);
gl.vertexAttribPointer(0, 2, gl.FLOAT, false, 0, 0);
// Texture to contain the video data
const texture = gl.createTexture();
this.resources.push(texture);
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.pixelStorei(gl.UNPACK_FLIP_Y_WEBGL, true);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
// Bind texture to the "data" argument to the fragment shader
gl.uniform1i(gl.getUniformLocation(program, 'data'), 0);
gl.activeTexture(gl.TEXTURE0);
// gl.bindTexture(gl.TEXTURE_2D, texture);
}
destroy() {
super.destroy();
const gl = this.gl;
if (!gl) {
return;
}
gl.getExtension('WEBGL_lose_context')?.loseContext();
gl.useProgram(null);
for (const resource of this.resources) {
if (resource instanceof WebGLProgram) {
gl.deleteProgram(resource);
} else if (resource instanceof WebGLShader) {
gl.deleteShader(resource);
} else if (resource instanceof WebGLTexture) {
gl.deleteTexture(resource);
} else if (resource instanceof WebGLBuffer) {
gl.deleteBuffer(resource);
}
}
this.gl = null;
}
refreshPlayer(): void {
this.updateCanvas();
}
}

View File

@@ -0,0 +1,93 @@
struct Params {
filterId: f32,
sharpness: f32,
brightness: f32,
contrast: f32,
saturation: f32,
};
struct VertexOutput {
@builtin(position) position: vec4<f32>,
@location(0) uv: vec2<f32>,
};
@group(0) @binding(0) var ourSampler: sampler;
@group(0) @binding(1) var ourTexture: texture_external;
@group(0) @binding(2) var<uniform> ourParams: Params;
const FILTER_UNSHARP_MASKING: f32 = 1.0;
const CAS_CONTRAST_PEAK: f32 = 0.8 * -3.0 + 8.0;
// Luminosity factor: https://www.w3.org/TR/AERT/#color-contrast
const LUMINOSITY_FACTOR = vec3(0.299, 0.587, 0.114);
@vertex
fn vsMain(@location(0) pos: vec2<f32>) -> VertexOutput {
var out: VertexOutput;
out.position = vec4(pos, 0.0, 1.0);
// Flip the Y-coordinate of UVs
out.uv = (vec2(pos.x, 1.0 - (pos.y + 1.0)) + vec2(1.0, 1.0)) * 0.5;
return out;
}
fn clarityBoost(coord: vec2<f32>, texSize: vec2<f32>, e: vec3<f32>) -> vec3<f32> {
let texelSize = 1.0 / texSize;
// Load 3x3 neighborhood samples
let a = textureSampleBaseClampToEdge(ourTexture, ourSampler, coord + texelSize * vec2(-1.0, 1.0)).rgb;
let b = textureSampleBaseClampToEdge(ourTexture, ourSampler, coord + texelSize * vec2( 0.0, 1.0)).rgb;
let c = textureSampleBaseClampToEdge(ourTexture, ourSampler, coord + texelSize * vec2( 1.0, 1.0)).rgb;
let d = textureSampleBaseClampToEdge(ourTexture, ourSampler, coord + texelSize * vec2(-1.0, 0.0)).rgb;
let f = textureSampleBaseClampToEdge(ourTexture, ourSampler, coord + texelSize * vec2( 1.0, 0.0)).rgb;
let g = textureSampleBaseClampToEdge(ourTexture, ourSampler, coord + texelSize * vec2(-1.0, -1.0)).rgb;
let h = textureSampleBaseClampToEdge(ourTexture, ourSampler, coord + texelSize * vec2( 0.0, -1.0)).rgb;
let i = textureSampleBaseClampToEdge(ourTexture, ourSampler, coord + texelSize * vec2( 1.0, -1.0)).rgb;
// Unsharp Masking (USM)
if ourParams.filterId == FILTER_UNSHARP_MASKING {
let gaussianBlur = (a + c + g + i) * 1.0 + (b + d + f + h) * 2.0 + e * 4.0;
let blurred = gaussianBlur / 16.0;
return e + (e - blurred) * (ourParams.sharpness / 3.0);
}
// Contrast Adaptive Sharpening (CAS)
let minRgb = min(min(min(d, e), min(f, b)), h) + min(min(a, c), min(g, i));
let maxRgb = max(max(max(d, e), max(f, b)), h) + max(max(a, c), max(g, i));
let reciprocalMaxRgb = 1.0 / maxRgb;
var amplifyRgb = clamp(min(minRgb, 2.0 - maxRgb) * reciprocalMaxRgb, vec3(0.0), vec3(1.0));
amplifyRgb = 1.0 / sqrt(amplifyRgb);
let weightRgb = -(1.0 / (amplifyRgb * CAS_CONTRAST_PEAK));
let reciprocalWeightRgb = 1.0 / (4.0 * weightRgb + 1.0);
let window = b + d + f + h;
let outColor = clamp((window * weightRgb + e) * reciprocalWeightRgb, vec3(0.0), vec3(1.0));
return mix(e, outColor, ourParams.sharpness / 2.0);
}
@fragment
fn fsMain(input: VertexOutput) -> @location(0) vec4<f32> {
let texSize = vec2<f32>(textureDimensions(ourTexture));
let center = textureSampleBaseClampToEdge(ourTexture, ourSampler, input.uv);
var adjustedRgb = clarityBoost(input.uv, texSize, center.rgb);
// Compute grayscale intensity
let gray = dot(adjustedRgb, LUMINOSITY_FACTOR);
// Interpolate between grayscale and color
adjustedRgb = mix(vec3(gray), adjustedRgb, ourParams.saturation);
// Adjust contrast
adjustedRgb = (adjustedRgb - 0.5) * ourParams.contrast + 0.5;
// Adjust brightness
adjustedRgb *= ourParams.brightness;
return vec4(adjustedRgb, 1.0);
}

View File

@@ -0,0 +1,186 @@
import wgslClarityBoost from "./shaders/clarity-boost.wgsl" with { type: "text" };
import { BaseCanvasPlayer } from "../base-canvas-player";
import { StreamPlayerType } from "@/enums/pref-values";
import { BxEventBus } from "@/utils/bx-event-bus";
export class WebGPUPlayer extends BaseCanvasPlayer {
static device: GPUDevice;
context!: GPUCanvasContext | null;
pipeline!: GPURenderPipeline | null;
sampler!: GPUSampler | null;
bindGroup!: GPUBindGroup | null;
optionsUpdated: boolean = false;
paramsBuffer!: GPUBuffer | null;
vertexBuffer!: GPUBuffer | null;
static async prepare(): Promise<void> {
if (!navigator.gpu) {
BxEventBus.Script.emit('webgpu.ready', {});
return;
}
try {
const adapter = await navigator.gpu.requestAdapter();
if (adapter) {
WebGPUPlayer.device = await adapter.requestDevice();
WebGPUPlayer.device?.addEventListener('uncapturederror', e => {
console.error((e as GPUUncapturedErrorEvent).error.message);
});
}
} catch (ex) {
alert(ex);
}
BxEventBus.Script.emit('webgpu.ready', {});
}
constructor($video: HTMLVideoElement) {
super(StreamPlayerType.WEBGPU, $video, 'WebGPUPlayer');
}
protected setupShaders(): void {
this.context = this.$canvas.getContext('webgpu')!;
if (!this.context) {
alert('Can\'t initiate context');
return;
}
const format = navigator.gpu.getPreferredCanvasFormat();
this.context.configure({
device: WebGPUPlayer.device,
format,
alphaMode: 'opaque',
});
this.vertexBuffer = WebGPUPlayer.device.createBuffer({
label: 'vertex buffer',
size: 6 * 4, // 6 floats (2 per vertex)
usage: GPUBufferUsage.VERTEX,
mappedAtCreation: true,
});
const mappedRange = this.vertexBuffer.getMappedRange();
new Float32Array(mappedRange).set([
-1, 3, // Vertex 1
-1, -1, // Vertex 2
3, -1, // Vertex 3
]);
this.vertexBuffer.unmap();
const shaderModule = WebGPUPlayer.device.createShaderModule({ code: wgslClarityBoost });
this.pipeline = WebGPUPlayer.device.createRenderPipeline({
layout: 'auto',
vertex: {
module: shaderModule,
entryPoint: 'vsMain',
buffers: [{
arrayStride: 8,
attributes: [{
format: 'float32x2',
offset: 0,
shaderLocation: 0,
}],
}],
},
fragment: {
module: shaderModule,
entryPoint: 'fsMain',
targets: [{ format }],
},
primitive: { topology: 'triangle-list' },
});
this.sampler = WebGPUPlayer.device.createSampler({ magFilter: 'linear', minFilter: 'linear' });
this.updateCanvas();
}
private prepareUniformBuffer(value: any, classType: any) {
const uniform = new classType(value);
const uniformBuffer = WebGPUPlayer.device.createBuffer({
size: uniform.byteLength,
usage: GPUBufferUsage.UNIFORM | GPUBufferUsage.COPY_DST
});
WebGPUPlayer.device.queue.writeBuffer(uniformBuffer, 0, uniform);
return uniformBuffer;
}
private updateCanvas() {
const externalTexture = WebGPUPlayer.device.importExternalTexture({ source: this.$video });
if (!this.optionsUpdated) {
this.paramsBuffer = this.prepareUniformBuffer([
this.toFilterId(this.options.processing),
this.options.sharpness,
this.options.brightness / 100,
this.options.contrast / 100,
this.options.saturation / 100,
], Float32Array);
this.optionsUpdated = true;
}
this.bindGroup = WebGPUPlayer.device.createBindGroup({
layout: this.pipeline!.getBindGroupLayout(0),
entries: [
{ binding: 0, resource: this.sampler },
{ binding: 1, resource: externalTexture as any },
{ binding: 2, resource: { buffer: this.paramsBuffer } },
],
});
}
updateFrame(): void {
this.updateCanvas();
const commandEncoder = WebGPUPlayer.device.createCommandEncoder();
const passEncoder = commandEncoder.beginRenderPass({
colorAttachments: [{
view: this.context!.getCurrentTexture().createView(),
loadOp: 'clear',
storeOp: 'store',
clearValue: [0.0, 0.0, 0.0, 1.0],
}]
});
passEncoder.setPipeline(this.pipeline!);
passEncoder.setBindGroup(0, this.bindGroup);
passEncoder.setVertexBuffer(0, this.vertexBuffer);
passEncoder.draw(3);
passEncoder.end();
WebGPUPlayer.device.queue.submit([commandEncoder.finish()]);
}
refreshPlayer(): void {
this.optionsUpdated = false;
this.updateCanvas();
}
destroy(): void {
super.destroy();
this.isStopped = true;
// Unset GPU resources
this.pipeline = null;
this.bindGroup = null;
this.sampler = null;
this.paramsBuffer?.destroy();
this.paramsBuffer = null;
this.vertexBuffer?.destroy();
this.vertexBuffer = null;
// Reset the WebGPU context (force garbage collection)
if (this.context) {
this.context.unconfigure();
this.context = null;
}
console.log('WebGPU context successfully freed.');
}
}