split stream player worker and

provide a main-thread wrapper for it
This commit is contained in:
Lily Tsuru 2024-09-09 06:11:56 -04:00
parent 085a985c13
commit b44a55f10d
8 changed files with 323 additions and 290 deletions

View file

@ -1,12 +1,7 @@
import { Config } from "./config";
import { GetKeySym } from "./key";
import { MouseState } from "./mouse";
import {
PlayerConfiguredMessage,
PlayerInputMessage,
PlayerOutputMessage,
} from "./player_worker_messages";
import { TypedWorker } from "./typed_worker";
import { VideoStreamPlayer } from "./streamplayer/player";
class Client {
private uri: string;
@ -23,8 +18,7 @@ class Client {
) as HTMLDivElement;
private webSocket: WebSocket;
private player: TypedWorker<PlayerInputMessage, PlayerOutputMessage> | null =
null;
private player: VideoStreamPlayer;
private mouse = new MouseState();
@ -32,7 +26,25 @@ class Client {
this.canvas = canvas;
this.add_handlers();
this.InitVideoPlayer();
this.player = new VideoStreamPlayer(this.canvas);
this.player.onConfigured = (usingHw: boolean) => {
let hwLabelElement = document.getElementById(
"hw-label"
) as HTMLSpanElement;
if (usingHw) {
if (!hwLabelElement.classList.contains("hw-good")) {
hwLabelElement.classList.add("hw-good");
hwLabelElement.innerText = "IS :)";
}
} else {
if (!hwLabelElement.classList.contains("hw-bad")) {
hwLabelElement.classList.add("hw-bad");
hwLabelElement.innerText = "is NOT :(";
}
}
};
// Remove / if the URL already has it.
if (uri.endsWith("/")) this.uri = uri.slice(0, uri.lastIndexOf("/"));
@ -126,53 +138,7 @@ class Client {
});
}
InitVideoPlayer() {
if (this.player == null) {
let offscreen = this.canvas.transferControlToOffscreen();
this.player = new TypedWorker<PlayerInputMessage, PlayerOutputMessage>(
new URL("./player_worker.ts", import.meta.url),
{
type: "module",
}
);
this.player.onMessage = (message: PlayerOutputMessage) => {
switch (message.type) {
case "configured": {
// set the message
let configMessage = message as PlayerConfiguredMessage;
let hwLabelElement = document.getElementById(
"hw-label"
) as HTMLSpanElement;
if (configMessage.usingHwDecode) {
if (!hwLabelElement.classList.contains("hw-good")) {
hwLabelElement.classList.add("hw-good");
hwLabelElement.innerText = "IS :)";
}
} else {
if (!hwLabelElement.classList.contains("hw-bad")) {
hwLabelElement.classList.add("hw-bad");
hwLabelElement.innerText = "is NOT :(";
}
}
}
}
};
// Send the init message to the worker to give it
// access to the canvas
this.player.post(
{
type: "init",
canvas: offscreen,
},
[offscreen]
);
}
}
ConnectToWS() {
// connect to the WebSocket server
this.webSocket = new WebSocket(`${this.uri}/`);
@ -183,20 +149,12 @@ class Client {
this.webSocket.addEventListener("message", this.OnWSMessage.bind(this));
}
Init() {
this.ConnectToWS();
}
OnWSOpen() {
this.player?.post({
type: "init-decoder",
});
this.player?.initDecoder();
}
OnWSClosed() {
this.player?.post({
type: "shutdown-decoder",
});
this.player?.shutdownDecoder();
setTimeout(() => {
this.ConnectToWS();
@ -206,13 +164,7 @@ class Client {
OnWSMessage(ev: MessageEvent<string | ArrayBuffer>) {
// Video data is binary
if (typeof ev.data !== "string") {
this.player?.post(
{
type: "data",
data: ev.data as ArrayBuffer,
},
[ev.data as ArrayBuffer]
);
this.player?.pushData(ev.data);
return;
}
@ -273,8 +225,9 @@ class Client {
}
}
let hostURL = `ws://${Config.host}`;
if (window.location.protocol === "https:") hostURL = `wss://${Config.host}`;
// WebCodecs can only be used on a secure context, so the
// https:// check to use wss:// is effectively moot
let hostURL = `wss://${Config.host}`;
let client = new Client(hostURL, document.getElementById("vm-display-canvas"));
client.Init();
client.ConnectToWS();

View file

@ -1,214 +0,0 @@
import { NALUStream, SPS, Slice } from "h264-interp-utils";
import {
PlayerInputMessage,
PlayerConfiguredMessage,
PlayerInitMessage,
PlayerVideoDataMessage,
} from "./player_worker_messages";
// shared iface later :)
class CanvasRenderer {
private canvas: OffscreenCanvas;
private ctx: OffscreenCanvasRenderingContext2D;
constructor(c: OffscreenCanvas) {
this.canvas = c;
this.ctx = this.canvas.getContext("2d")!;
}
draw(frame: VideoFrame) {
this.canvas.width = frame.displayWidth;
this.canvas.height = frame.displayHeight;
this.ctx.drawImage(frame, 0, 0, frame.displayWidth, frame.displayHeight);
}
}
// player logic
class VideoStreamPlayer {
private renderer: CanvasRenderer | null = null;
private pendingFrame: VideoFrame | null = null;
private decoder: VideoDecoder | null = null;
private streamInitSPS: SPS | null = null;
// only async for VideoStreamPlayer#configureDecoder
async onVideoData(buffer: ArrayBuffer) {
let u8ar = new Uint8Array(buffer);
let stream = new NALUStream(u8ar, {
type: "annexB",
strict: true,
});
let key = false;
for (const nalu of stream) {
// Try and obtain the base SPS required to initalize the video decoder
// (if we didn't get one yet). Once we have one we try configuring the decoder
if (this.streamInitSPS == null) {
try {
let sps = new SPS(nalu);
console.log(
`Got stream SPS (avc codec string: ${sps.MIME}), pic dims ${sps.picWidth}x${sps.picHeight}`
);
this.streamInitSPS = sps;
await this.configureDecoder();
} catch (e) {}
}
// Determine if this frame is a keyframe (I frame, because we don't send B frames) or not
try {
let slice = new Slice(nalu);
if (slice.slice_type == 2 || slice.slice_type == 7) key = true;
else key = false;
} catch (e) {}
}
if (this.decoder && this.decoder.state == "configured") {
stream.convertToPacket();
let frame = new EncodedVideoChunk({
type: key ? "key" : "delta",
data: buffer,
// munge the PTS so that frames are always
// played as soon as possible
timestamp: performance.now(),
duration: performance.now(),
// do the webcodecs typings seriously still not have this
transfer: [buffer],
} as any);
this.decoder?.decode(frame);
}
}
renderFrame(frame: VideoFrame) {
if (!this.pendingFrame) {
requestAnimationFrame(() => {
this.renderer?.draw(this.pendingFrame!);
this.pendingFrame?.close();
this.pendingFrame = null;
});
} else {
this.pendingFrame.close();
}
this.pendingFrame = frame;
}
initDecoder() {
if (!this.decoder) {
let self = this;
this.decoder = new VideoDecoder({
output(frame) {
self.renderFrame(frame);
},
// TODO handle errors properly
error(e) {},
});
}
}
async configureDecoder() {
if (this.streamInitSPS) {
let config: VideoDecoderConfig = {
codec: this.streamInitSPS.MIME,
// set some parameters that make sense
optimizeForLatency: true,
hardwareAcceleration: "prefer-hardware",
};
let configMessage: PlayerConfiguredMessage = {
type: "configured",
usingHwDecode: false,
};
// Probe for hardware accleration support.
let supportedConfig = await VideoDecoder.isConfigSupported(config);
if (supportedConfig.supported) {
console.log("Browser supports hardware preference");
configMessage.usingHwDecode = true;
this.decoder?.configure(supportedConfig.config!);
} else {
console.log(
"Browser doesn't like hardware preference, removing it and trying again"
);
// Remove the property for hardware preference and try again.
delete config.hardwareAcceleration;
supportedConfig = await VideoDecoder.isConfigSupported(config);
if (!supportedConfig.supported) {
await this.shutdownDecoder();
throw new Error("I give up, the browser doesn't like no preference either.");
}
configMessage.usingHwDecode = false;
this.decoder?.configure(supportedConfig.config!);
}
self.postMessage(configMessage);
}
}
async shutdownDecoder() {
await this.decoder?.flush();
this.decoder?.close();
this.decoder = null;
// clear resources
if (this.pendingFrame) {
this.pendingFrame.close();
this.pendingFrame = null;
}
if (this.streamInitSPS) {
this.streamInitSPS = null;
}
}
hasRenderer() {
return this.renderer !== null;
}
setRenderer(r: CanvasRenderer) {
this.renderer = r;
}
}
let player = new VideoStreamPlayer();
async function onMessage(msg: PlayerInputMessage) {
switch (msg.type) {
case "init":
if (!player.hasRenderer())
player.setRenderer(
new CanvasRenderer((msg as PlayerInitMessage).canvas)
);
break;
case "init-decoder":
player.initDecoder();
break;
case "data":
await player.onVideoData((msg as PlayerVideoDataMessage).data);
break;
case "shutdown-decoder":
await player.shutdownDecoder();
break;
}
}
self.addEventListener(
"message",
async (msg: MessageEvent<PlayerInputMessage>) => {
return onMessage(msg.data);
}
);

View file

@ -0,0 +1,69 @@
import { TypedWorker } from "../shared/typed_worker";
import {
PlayerInputMessage,
PlayerOutputMessage,
PlayerConfiguredMessage,
} from "./worker/stream_worker_messages";
export class VideoStreamPlayer {
private player;
private canvas;
public onConfigured: null | ((usingHw: boolean) => void) = null;
constructor(canvas: HTMLCanvasElement) {
this.canvas = canvas;
let offscreen = this.canvas.transferControlToOffscreen();
this.player = new TypedWorker<PlayerInputMessage, PlayerOutputMessage>(
new URL("./worker/stream_worker.ts", import.meta.url),
{
type: "module",
}
);
let self = this;
this.player.onMessage = (message: PlayerOutputMessage) => {
switch (message.type) {
case "configured": {
// set the message
let configMessage = message as PlayerConfiguredMessage;
if (self.onConfigured) self.onConfigured(configMessage.usingHwDecode);
}
}
};
// Send the init message to the worker to give it
// access to the canvas
this.player.post(
{
type: "init",
canvas: offscreen,
},
[offscreen]
);
}
initDecoder() {
this.player.post({
type: "init-decoder",
});
}
shutdownDecoder() {
this.player.post({
type: "shutdown-decoder",
});
}
pushData(chunk: ArrayBuffer) {
this.player.post(
{
type: "data",
data: chunk,
},
[chunk]
);
}
}

View file

@ -0,0 +1,16 @@
// shared interface to allow gl later :)
export class Canvas2DRenderer {
private canvas: OffscreenCanvas;
private ctx: OffscreenCanvasRenderingContext2D;
constructor(c: OffscreenCanvas) {
this.canvas = c;
this.ctx = this.canvas.getContext("2d")!;
}
draw(frame: VideoFrame) {
this.canvas.width = frame.displayWidth;
this.canvas.height = frame.displayHeight;
this.ctx.drawImage(frame, 0, 0, frame.displayWidth, frame.displayHeight);
}
}

View file

@ -0,0 +1,168 @@
import { NALUStream, SPS, Slice } from "h264-interp-utils";
import {
PlayerInputMessage,
PlayerConfiguredMessage,
PlayerInitMessage,
PlayerVideoDataMessage,
} from "./stream_worker_messages";
import { Canvas2DRenderer } from "./canvas_2d_renderer";
// player logic
export class VideoStreamPlayer {
private renderer: Canvas2DRenderer | null = null;
private pendingFrame: VideoFrame | null = null;
private decoder: VideoDecoder | null = null;
private streamInitSPS: SPS | null = null;
// only async for VideoStreamPlayer#configureDecoder
async onVideoData(buffer: ArrayBuffer) {
let u8ar = new Uint8Array(buffer);
let stream = new NALUStream(u8ar, {
type: "annexB",
strict: true,
});
let key = false;
for (const nalu of stream) {
// Try and obtain the base SPS required to initalize the video decoder
// (if we didn't get one yet). Once we have one we try configuring the decoder
if (this.streamInitSPS == null) {
try {
let sps = new SPS(nalu);
console.log(
`Got stream SPS (avc codec string: ${sps.MIME}), pic dims ${sps.picWidth}x${sps.picHeight}`
);
this.streamInitSPS = sps;
await this.configureDecoder();
} catch (e) {}
}
// Determine if this frame is a keyframe (I frame, because we don't send B frames) or not
try {
let slice = new Slice(nalu);
if (slice.slice_type == 2 || slice.slice_type == 7) key = true;
else key = false;
} catch (e) {}
}
if (this.decoder && this.decoder.state == "configured") {
stream.convertToPacket();
let frame = new EncodedVideoChunk({
type: key ? "key" : "delta",
data: buffer,
// munge the PTS so that frames are always
// played as soon as possible
timestamp: performance.now(),
duration: performance.now(),
// do the webcodecs typings seriously still not have this
transfer: [buffer],
} as any);
this.decoder?.decode(frame);
}
}
renderFrame(frame: VideoFrame) {
if (!this.pendingFrame) {
requestAnimationFrame(() => {
this.renderer?.draw(this.pendingFrame!);
this.pendingFrame?.close();
this.pendingFrame = null;
});
} else {
this.pendingFrame.close();
}
this.pendingFrame = frame;
}
initDecoder() {
if (!this.decoder) {
let self = this;
this.decoder = new VideoDecoder({
output(frame) {
self.renderFrame(frame);
},
// TODO handle errors properly
error(e) {},
});
}
}
async configureDecoder() {
if (this.streamInitSPS) {
let config: VideoDecoderConfig = {
codec: this.streamInitSPS.MIME,
// set some parameters that make sense
optimizeForLatency: true,
hardwareAcceleration: "prefer-hardware",
};
let configMessage: PlayerConfiguredMessage = {
type: "configured",
usingHwDecode: false,
};
// Probe for hardware accleration support.
let supportedConfig = await VideoDecoder.isConfigSupported(config);
if (supportedConfig.supported) {
console.log("Browser supports hardware preference");
configMessage.usingHwDecode = true;
this.decoder?.configure(supportedConfig.config!);
} else {
console.log(
"Browser doesn't like hardware preference, removing it and trying again"
);
// Remove the property for hardware preference and try again.
delete config.hardwareAcceleration;
supportedConfig = await VideoDecoder.isConfigSupported(config);
if (!supportedConfig.supported) {
await this.shutdownDecoder();
throw new Error("I give up, the browser doesn't like no preference either.");
}
configMessage.usingHwDecode = false;
this.decoder?.configure(supportedConfig.config!);
}
self.postMessage(configMessage);
}
}
async shutdownDecoder() {
await this.decoder?.flush();
this.decoder?.close();
this.decoder = null;
// clear resources
if (this.pendingFrame) {
this.pendingFrame.close();
this.pendingFrame = null;
}
if (this.streamInitSPS) {
this.streamInitSPS = null;
}
}
hasRenderer() {
return this.renderer !== null;
}
setRenderer(r: Canvas2DRenderer) {
this.renderer = r;
}
}

View file

@ -0,0 +1,41 @@
import {
PlayerInputMessage,
PlayerConfiguredMessage,
PlayerInitMessage,
PlayerVideoDataMessage,
} from "./stream_worker_messages";
import { Canvas2DRenderer } from "./canvas_2d_renderer";
import { VideoStreamPlayer } from "./player";
let player = new VideoStreamPlayer();
async function onMessage(msg: PlayerInputMessage) {
switch (msg.type) {
case "init":
if (!player.hasRenderer())
player.setRenderer(
new Canvas2DRenderer((msg as PlayerInitMessage).canvas)
);
break;
case "init-decoder":
player.initDecoder();
break;
case "data":
await player.onVideoData((msg as PlayerVideoDataMessage).data);
break;
case "shutdown-decoder":
await player.shutdownDecoder();
break;
}
}
self.addEventListener(
"message",
async (msg: MessageEvent<PlayerInputMessage>) => {
return onMessage(msg.data);
}
);