make rendering abstract (so that a webgl renderer will actually be possible)
This commit is contained in:
parent
b44a55f10d
commit
a6734667de
3 changed files with 167 additions and 156 deletions
|
@ -1,10 +1,11 @@
|
||||||
// shared interface to allow gl later :)
|
import { CanvasRenderer } from "./canvas_renderer";
|
||||||
export class Canvas2DRenderer {
|
|
||||||
private canvas: OffscreenCanvas;
|
// renderer for the streamplayer that uses the canvas2d apis
|
||||||
|
export class Canvas2DRenderer extends CanvasRenderer {
|
||||||
private ctx: OffscreenCanvasRenderingContext2D;
|
private ctx: OffscreenCanvasRenderingContext2D;
|
||||||
|
|
||||||
constructor(c: OffscreenCanvas) {
|
constructor(c: OffscreenCanvas) {
|
||||||
this.canvas = c;
|
super(c);
|
||||||
this.ctx = this.canvas.getContext("2d")!;
|
this.ctx = this.canvas.getContext("2d")!;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
10
client/src/streamplayer/worker/canvas_renderer.ts
Normal file
10
client/src/streamplayer/worker/canvas_renderer.ts
Normal file
|
@ -0,0 +1,10 @@
|
||||||
|
// mixin thing
|
||||||
|
export abstract class CanvasRenderer {
|
||||||
|
protected canvas: OffscreenCanvas;
|
||||||
|
|
||||||
|
constructor(c: OffscreenCanvas) {
|
||||||
|
this.canvas = c;
|
||||||
|
}
|
||||||
|
|
||||||
|
abstract draw(frame: VideoFrame): void;
|
||||||
|
}
|
|
@ -1,4 +1,3 @@
|
||||||
|
|
||||||
import { NALUStream, SPS, Slice } from "h264-interp-utils";
|
import { NALUStream, SPS, Slice } from "h264-interp-utils";
|
||||||
import {
|
import {
|
||||||
PlayerInputMessage,
|
PlayerInputMessage,
|
||||||
|
@ -6,163 +5,164 @@ import {
|
||||||
PlayerInitMessage,
|
PlayerInitMessage,
|
||||||
PlayerVideoDataMessage,
|
PlayerVideoDataMessage,
|
||||||
} from "./stream_worker_messages";
|
} from "./stream_worker_messages";
|
||||||
import { Canvas2DRenderer } from "./canvas_2d_renderer";
|
import { CanvasRenderer } from "./canvas_renderer";
|
||||||
|
|
||||||
// player logic
|
// player logic
|
||||||
export class VideoStreamPlayer {
|
export class VideoStreamPlayer {
|
||||||
private renderer: Canvas2DRenderer | null = null;
|
private renderer: CanvasRenderer | null = null;
|
||||||
private pendingFrame: VideoFrame | null = null;
|
private pendingFrame: VideoFrame | null = null;
|
||||||
private decoder: VideoDecoder | null = null;
|
private decoder: VideoDecoder | null = null;
|
||||||
private streamInitSPS: SPS | null = null;
|
private streamInitSPS: SPS | null = null;
|
||||||
|
|
||||||
// only async for VideoStreamPlayer#configureDecoder
|
// only async for VideoStreamPlayer#configureDecoder
|
||||||
async onVideoData(buffer: ArrayBuffer) {
|
async onVideoData(buffer: ArrayBuffer) {
|
||||||
let u8ar = new Uint8Array(buffer);
|
let u8ar = new Uint8Array(buffer);
|
||||||
|
|
||||||
let stream = new NALUStream(u8ar, {
|
let stream = new NALUStream(u8ar, {
|
||||||
type: "annexB",
|
type: "annexB",
|
||||||
strict: true,
|
strict: true,
|
||||||
});
|
});
|
||||||
|
|
||||||
let key = false;
|
let key = false;
|
||||||
|
|
||||||
for (const nalu of stream) {
|
for (const nalu of stream) {
|
||||||
// Try and obtain the base SPS required to initalize the video decoder
|
// Try and obtain the base SPS required to initalize the video decoder
|
||||||
// (if we didn't get one yet). Once we have one we try configuring the decoder
|
// (if we didn't get one yet). Once we have one we try configuring the decoder
|
||||||
if (this.streamInitSPS == null) {
|
if (this.streamInitSPS == null) {
|
||||||
try {
|
|
||||||
let sps = new SPS(nalu);
|
|
||||||
console.log(
|
|
||||||
`Got stream SPS (avc codec string: ${sps.MIME}), pic dims ${sps.picWidth}x${sps.picHeight}`
|
|
||||||
);
|
|
||||||
|
|
||||||
this.streamInitSPS = sps;
|
|
||||||
await this.configureDecoder();
|
|
||||||
} catch (e) {}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Determine if this frame is a keyframe (I frame, because we don't send B frames) or not
|
|
||||||
try {
|
try {
|
||||||
let slice = new Slice(nalu);
|
let sps = new SPS(nalu);
|
||||||
if (slice.slice_type == 2 || slice.slice_type == 7) key = true;
|
console.log(
|
||||||
else key = false;
|
`Got stream SPS (avc codec string: ${sps.MIME}), pic dims ${sps.picWidth}x${sps.picHeight}`
|
||||||
|
);
|
||||||
|
|
||||||
|
this.streamInitSPS = sps;
|
||||||
|
await this.configureDecoder();
|
||||||
} catch (e) {}
|
} catch (e) {}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (this.decoder && this.decoder.state == "configured") {
|
// Determine if this frame is a keyframe (I frame, because we don't send B frames) or not
|
||||||
stream.convertToPacket();
|
try {
|
||||||
|
let slice = new Slice(nalu);
|
||||||
let frame = new EncodedVideoChunk({
|
if (slice.slice_type == 2 || slice.slice_type == 7) key = true;
|
||||||
type: key ? "key" : "delta",
|
else key = false;
|
||||||
data: buffer,
|
} catch (e) {}
|
||||||
|
|
||||||
// munge the PTS so that frames are always
|
|
||||||
// played as soon as possible
|
|
||||||
timestamp: performance.now(),
|
|
||||||
duration: performance.now(),
|
|
||||||
|
|
||||||
// do the webcodecs typings seriously still not have this
|
|
||||||
transfer: [buffer],
|
|
||||||
} as any);
|
|
||||||
|
|
||||||
this.decoder?.decode(frame);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
renderFrame(frame: VideoFrame) {
|
if (this.decoder && this.decoder.state == "configured") {
|
||||||
if (!this.pendingFrame) {
|
stream.convertToPacket();
|
||||||
requestAnimationFrame(() => {
|
|
||||||
this.renderer?.draw(this.pendingFrame!);
|
let frame = new EncodedVideoChunk({
|
||||||
this.pendingFrame?.close();
|
type: key ? "key" : "delta",
|
||||||
this.pendingFrame = null;
|
data: buffer,
|
||||||
});
|
|
||||||
} else {
|
// munge the PTS so that frames are always
|
||||||
this.pendingFrame.close();
|
// played as soon as possible
|
||||||
}
|
timestamp: performance.now(),
|
||||||
|
duration: performance.now(),
|
||||||
this.pendingFrame = frame;
|
|
||||||
}
|
// do the webcodecs typings seriously still not have this
|
||||||
|
transfer: [buffer],
|
||||||
initDecoder() {
|
} as any);
|
||||||
if (!this.decoder) {
|
|
||||||
let self = this;
|
this.decoder?.decode(frame);
|
||||||
this.decoder = new VideoDecoder({
|
|
||||||
output(frame) {
|
|
||||||
self.renderFrame(frame);
|
|
||||||
},
|
|
||||||
|
|
||||||
// TODO handle errors properly
|
|
||||||
error(e) {},
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async configureDecoder() {
|
|
||||||
if (this.streamInitSPS) {
|
|
||||||
let config: VideoDecoderConfig = {
|
|
||||||
codec: this.streamInitSPS.MIME,
|
|
||||||
// set some parameters that make sense
|
|
||||||
optimizeForLatency: true,
|
|
||||||
hardwareAcceleration: "prefer-hardware",
|
|
||||||
};
|
|
||||||
|
|
||||||
let configMessage: PlayerConfiguredMessage = {
|
|
||||||
type: "configured",
|
|
||||||
usingHwDecode: false,
|
|
||||||
};
|
|
||||||
|
|
||||||
// Probe for hardware accleration support.
|
|
||||||
let supportedConfig = await VideoDecoder.isConfigSupported(config);
|
|
||||||
|
|
||||||
if (supportedConfig.supported) {
|
|
||||||
console.log("Browser supports hardware preference");
|
|
||||||
configMessage.usingHwDecode = true;
|
|
||||||
this.decoder?.configure(supportedConfig.config!);
|
|
||||||
} else {
|
|
||||||
console.log(
|
|
||||||
"Browser doesn't like hardware preference, removing it and trying again"
|
|
||||||
);
|
|
||||||
|
|
||||||
// Remove the property for hardware preference and try again.
|
|
||||||
delete config.hardwareAcceleration;
|
|
||||||
|
|
||||||
supportedConfig = await VideoDecoder.isConfigSupported(config);
|
|
||||||
|
|
||||||
if (!supportedConfig.supported) {
|
|
||||||
await this.shutdownDecoder();
|
|
||||||
throw new Error("I give up, the browser doesn't like no preference either.");
|
|
||||||
}
|
|
||||||
|
|
||||||
configMessage.usingHwDecode = false;
|
|
||||||
this.decoder?.configure(supportedConfig.config!);
|
|
||||||
}
|
|
||||||
|
|
||||||
self.postMessage(configMessage);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async shutdownDecoder() {
|
|
||||||
await this.decoder?.flush();
|
|
||||||
this.decoder?.close();
|
|
||||||
this.decoder = null;
|
|
||||||
|
|
||||||
// clear resources
|
|
||||||
if (this.pendingFrame) {
|
|
||||||
this.pendingFrame.close();
|
|
||||||
this.pendingFrame = null;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (this.streamInitSPS) {
|
|
||||||
this.streamInitSPS = null;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
hasRenderer() {
|
|
||||||
return this.renderer !== null;
|
|
||||||
}
|
|
||||||
|
|
||||||
setRenderer(r: Canvas2DRenderer) {
|
|
||||||
this.renderer = r;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
renderFrame(frame: VideoFrame) {
|
||||||
|
if (!this.pendingFrame) {
|
||||||
|
requestAnimationFrame(() => {
|
||||||
|
this.renderer?.draw(this.pendingFrame!);
|
||||||
|
this.pendingFrame?.close();
|
||||||
|
this.pendingFrame = null;
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
this.pendingFrame.close();
|
||||||
|
}
|
||||||
|
|
||||||
|
this.pendingFrame = frame;
|
||||||
|
}
|
||||||
|
|
||||||
|
initDecoder() {
|
||||||
|
if (!this.decoder) {
|
||||||
|
let self = this;
|
||||||
|
this.decoder = new VideoDecoder({
|
||||||
|
output(frame) {
|
||||||
|
self.renderFrame(frame);
|
||||||
|
},
|
||||||
|
|
||||||
|
// TODO handle errors properly
|
||||||
|
error(e) {},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async configureDecoder() {
|
||||||
|
if (this.streamInitSPS) {
|
||||||
|
let config: VideoDecoderConfig = {
|
||||||
|
codec: this.streamInitSPS.MIME,
|
||||||
|
// set some parameters that make sense
|
||||||
|
optimizeForLatency: true,
|
||||||
|
hardwareAcceleration: "prefer-hardware",
|
||||||
|
};
|
||||||
|
|
||||||
|
let configMessage: PlayerConfiguredMessage = {
|
||||||
|
type: "configured",
|
||||||
|
usingHwDecode: false,
|
||||||
|
};
|
||||||
|
|
||||||
|
// Probe for hardware accleration support.
|
||||||
|
let supportedConfig = await VideoDecoder.isConfigSupported(config);
|
||||||
|
|
||||||
|
if (supportedConfig.supported) {
|
||||||
|
console.log("Browser supports hardware preference");
|
||||||
|
configMessage.usingHwDecode = true;
|
||||||
|
this.decoder?.configure(supportedConfig.config!);
|
||||||
|
} else {
|
||||||
|
console.log(
|
||||||
|
"Browser doesn't like hardware preference, removing it and trying again"
|
||||||
|
);
|
||||||
|
|
||||||
|
// Remove the property for hardware preference and try again.
|
||||||
|
delete config.hardwareAcceleration;
|
||||||
|
|
||||||
|
supportedConfig = await VideoDecoder.isConfigSupported(config);
|
||||||
|
|
||||||
|
if (!supportedConfig.supported) {
|
||||||
|
await this.shutdownDecoder();
|
||||||
|
throw new Error(
|
||||||
|
"I give up, the browser doesn't like no preference either."
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
configMessage.usingHwDecode = false;
|
||||||
|
this.decoder?.configure(supportedConfig.config!);
|
||||||
|
}
|
||||||
|
|
||||||
|
self.postMessage(configMessage);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async shutdownDecoder() {
|
||||||
|
await this.decoder?.flush();
|
||||||
|
this.decoder?.close();
|
||||||
|
this.decoder = null;
|
||||||
|
|
||||||
|
// clear resources
|
||||||
|
if (this.pendingFrame) {
|
||||||
|
this.pendingFrame.close();
|
||||||
|
this.pendingFrame = null;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this.streamInitSPS) {
|
||||||
|
this.streamInitSPS = null;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
hasRenderer() {
|
||||||
|
return this.renderer !== null;
|
||||||
|
}
|
||||||
|
|
||||||
|
setRenderer(r: CanvasRenderer) {
|
||||||
|
this.renderer = r;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
Loading…
Reference in a new issue