initial public release
This commit is contained in:
commit
e58a765cfa
25 changed files with 6602 additions and 0 deletions
5
.gitignore
vendored
Normal file
5
.gitignore
vendored
Normal file
|
@ -0,0 +1,5 @@
|
|||
client/.parcel-cache
|
||||
client/dist
|
||||
client/node_modules
|
||||
|
||||
/server/target
|
29
README.md
Normal file
29
README.md
Normal file
|
@ -0,0 +1,29 @@
|
|||
# vncstream
|
||||
|
||||
Prototype of collabvm 3.0 (2.0. yep i get the irony)'s new h.264 video streaming on the client and server.
|
||||
|
||||
Some changes will ultimately be made before this is ever thought about being integrated. They are listed below.
|
||||
|
||||
## Server side changes (probably)
|
||||
|
||||
- HW encode support (with software being a fallback)
|
||||
- for now nvenc is fine enough (it's also what we will have access to)
|
||||
|
||||
- Code cleanup
|
||||
- also maybe nal SPS rewriting (stolen from webrtc) to force 1:1 decoding,
|
||||
although it seems that it's fine enough most of the time...
|
||||
- maybe pull it out into its own crate instead of it being fairly tightly packed
|
||||
|
||||
- output a [LOC](https://datatracker.ietf.org/doc/draft-mzanaty-moq-loc/)-like container that can hold h.264 NAL packets, opus packets, or both ""interleaved"" into a single container entry
|
||||
- the client will parse this as well
|
||||
|
||||
## Client
|
||||
|
||||
- Warn for webcodecs not being supported
|
||||
|
||||
- Code cleanup
|
||||
- Maybe the video playing code could even be pulled out into its own thing?
|
||||
|
||||
- WebSockets probably will not be used because they blow
|
||||
- WebSocketStream "helps" by getting rid of the even bigger elephant of the room (backpressure, which is a "fun" feature of the originally standardized DOM api), but the reality is TCP head of line blocking and many other issues just mean that anything TCP will be meh at best and very paltry at worst.
|
||||
- MoQ over WebTransport is probably the way to go anyways, although if we diverge we should standardize an webtransport subprotocol to communicate that we're different (and agree on it everywhere)
|
10
client/.editorconfig
Normal file
10
client/.editorconfig
Normal file
|
@ -0,0 +1,10 @@
|
|||
root = true
|
||||
|
||||
[*]
|
||||
end_of_line = lf
|
||||
insert_final_newline = true
|
||||
|
||||
[*.{js,json,yml}]
|
||||
charset = utf-8
|
||||
indent_style = space
|
||||
indent_size = 2
|
4
client/.gitattributes
vendored
Normal file
4
client/.gitattributes
vendored
Normal file
|
@ -0,0 +1,4 @@
|
|||
/.yarn/** linguist-vendored
|
||||
/.yarn/releases/* binary
|
||||
/.yarn/plugins/**/* binary
|
||||
/.pnp.* binary linguist-generated
|
13
client/.gitignore
vendored
Normal file
13
client/.gitignore
vendored
Normal file
|
@ -0,0 +1,13 @@
|
|||
.yarn/*
|
||||
!.yarn/patches
|
||||
!.yarn/plugins
|
||||
!.yarn/releases
|
||||
!.yarn/sdks
|
||||
!.yarn/versions
|
||||
|
||||
# Swap the comments on the following lines if you wish to use zero-installs
|
||||
# In that case, don't forget to run `yarn config set enableGlobalCache false`!
|
||||
# Documentation here: https://yarnpkg.com/features/caching#zero-installs
|
||||
|
||||
#!.yarn/cache
|
||||
.pnp.*
|
2
client/.yarnrc.yml
Normal file
2
client/.yarnrc.yml
Normal file
|
@ -0,0 +1,2 @@
|
|||
# just to play ball
|
||||
nodeLinker: node-modules
|
1
client/README.md
Normal file
1
client/README.md
Normal file
|
@ -0,0 +1 @@
|
|||
# client
|
13
client/package.json
Normal file
13
client/package.json
Normal file
|
@ -0,0 +1,13 @@
|
|||
{
|
||||
"name": "client",
|
||||
"packageManager": "yarn@4.1.1",
|
||||
"type": "module",
|
||||
"dependencies": {
|
||||
"h264-interp-utils": "^1.1.1",
|
||||
"parcel": "^2.12.0"
|
||||
},
|
||||
"scripts": {
|
||||
"serve": "parcel src/index.html",
|
||||
"build": "parcel build --public-url . src/index.html"
|
||||
}
|
||||
}
|
5
client/src/config.ts
Normal file
5
client/src/config.ts
Normal file
|
@ -0,0 +1,5 @@
|
|||
export let Config = {
|
||||
// Sets to your server address.
|
||||
//host: `${window.location.host}${window.location.pathname}`
|
||||
host: `computernewb.com/~lily/fgvm1/`
|
||||
}
|
33
client/src/index.html
Normal file
33
client/src/index.html
Normal file
|
@ -0,0 +1,33 @@
|
|||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<title>fgvm test</title>
|
||||
<link rel="stylesheet" href="./style.css">
|
||||
<script type="module" src="./index.ts"></script>
|
||||
</head>
|
||||
<body>
|
||||
<h3>test of some new h264-powered screen streaming awesomemess</h3>
|
||||
<h3>(for the record, the protocol sucks on purpose.)</h3>
|
||||
<div>Your browser <span id="hw-label"></span> using hardware decoding functionality.</div>
|
||||
<center>
|
||||
<div id="vm-display">
|
||||
<canvas id="vm-display-canvas" tabindex="-1" width="640" height="480">
|
||||
Please use a browser made after the Gregorian calendar year 2010. Thanks.
|
||||
</canvas>
|
||||
</div>
|
||||
|
||||
|
||||
<div id="vm-chat">
|
||||
|
||||
<input id="chat-text" type="text" maxlength="150" placeholder="Chat Message" />
|
||||
<button id="chat-btn">Chat</button>
|
||||
|
||||
|
||||
<div id="chat-history">
|
||||
Chat history (it sucks, but this is a prototype so /shrug):
|
||||
</div>
|
||||
|
||||
</div>
|
||||
</center>
|
||||
</body>
|
||||
</html>
|
277
client/src/index.ts
Normal file
277
client/src/index.ts
Normal file
|
@ -0,0 +1,277 @@
|
|||
import { Config } from "./config";
|
||||
import { GetKeySym } from "./key";
|
||||
import { MouseState } from "./mouse";
|
||||
import {
|
||||
PlayerConfiguredMessage,
|
||||
PlayerOutputMessage,
|
||||
} from "./player_worker_messages";
|
||||
|
||||
class Client {
|
||||
private uri: string;
|
||||
private canvas: HTMLCanvasElement;
|
||||
private chatText: HTMLInputElement = document.getElementById(
|
||||
"chat-text"
|
||||
) as HTMLInputElement;
|
||||
private chatButton: HTMLButtonElement = document.getElementById(
|
||||
"chat-btn"
|
||||
) as HTMLButtonElement;
|
||||
|
||||
private chatHistory: HTMLDivElement = document.getElementById(
|
||||
"chat-history"
|
||||
) as HTMLDivElement;
|
||||
|
||||
private webSocket: WebSocket;
|
||||
private player: Worker | null = null;
|
||||
|
||||
private mouse = new MouseState();
|
||||
|
||||
constructor(uri, canvas) {
|
||||
this.canvas = canvas;
|
||||
this.add_handlers();
|
||||
|
||||
this.InitVideoPlayer();
|
||||
|
||||
// Remove / if the URL already has it.
|
||||
if (uri.endsWith("/")) this.uri = uri.slice(0, uri.lastIndexOf("/"));
|
||||
else this.uri = uri;
|
||||
}
|
||||
|
||||
add_handlers() {
|
||||
let self = this;
|
||||
|
||||
this.canvas.addEventListener(
|
||||
"mousedown",
|
||||
(e) => {
|
||||
self.mouse.InitFromMouseEvent(e);
|
||||
self.send_mouse(self.mouse.x, self.mouse.y, self.mouse.Mask());
|
||||
},
|
||||
{
|
||||
capture: true,
|
||||
}
|
||||
);
|
||||
|
||||
this.canvas.addEventListener(
|
||||
"mouseup",
|
||||
(e: MouseEvent) => {
|
||||
self.mouse.InitFromMouseEvent(e);
|
||||
self.send_mouse(self.mouse.x, self.mouse.y, self.mouse.Mask());
|
||||
},
|
||||
{
|
||||
capture: true,
|
||||
}
|
||||
);
|
||||
|
||||
this.canvas.addEventListener(
|
||||
"mousemove",
|
||||
(e: MouseEvent) => {
|
||||
self.mouse.InitFromMouseEvent(e);
|
||||
self.send_mouse(self.mouse.x, self.mouse.y, self.mouse.Mask());
|
||||
},
|
||||
{
|
||||
capture: true,
|
||||
}
|
||||
);
|
||||
|
||||
this.canvas.addEventListener(
|
||||
"keydown",
|
||||
(e: KeyboardEvent) => {
|
||||
e.preventDefault();
|
||||
let keysym = GetKeySym(e.keyCode, e.key, e.location);
|
||||
if (keysym === null) return;
|
||||
self.send_key(keysym, 1);
|
||||
},
|
||||
{
|
||||
capture: true,
|
||||
}
|
||||
);
|
||||
|
||||
this.canvas.addEventListener(
|
||||
"keyup",
|
||||
(e: KeyboardEvent) => {
|
||||
e.preventDefault();
|
||||
let keysym = GetKeySym(e.keyCode, e.key, e.location);
|
||||
if (keysym === null) return;
|
||||
self.send_key(keysym, 0);
|
||||
},
|
||||
{
|
||||
capture: true,
|
||||
}
|
||||
);
|
||||
|
||||
this.canvas.addEventListener(
|
||||
"wheel",
|
||||
(ev: WheelEvent) => {
|
||||
ev.preventDefault();
|
||||
self.mouse.InitFromWheelEvent(ev);
|
||||
self.send_mouse(self.mouse.x, self.mouse.y, self.mouse.Mask());
|
||||
|
||||
if (self.mouse.scrollUp) self.mouse.scrollUp = false;
|
||||
else if (self.mouse.scrollDown) self.mouse.scrollDown = false;
|
||||
|
||||
self.send_mouse(self.mouse.x, self.mouse.y, self.mouse.Mask());
|
||||
},
|
||||
{
|
||||
capture: true,
|
||||
}
|
||||
);
|
||||
|
||||
this.canvas.addEventListener("contextmenu", (e) => e.preventDefault());
|
||||
|
||||
this.chatButton.addEventListener("click", (e) => {
|
||||
self.send_chat(self.chatText.value);
|
||||
self.chatText.value = "";
|
||||
});
|
||||
}
|
||||
|
||||
InitVideoPlayer() {
|
||||
if (this.player == null) {
|
||||
let offscreen = this.canvas.transferControlToOffscreen();
|
||||
|
||||
this.player = new Worker(new URL("./player_worker.ts", import.meta.url), {
|
||||
type: "module",
|
||||
});
|
||||
|
||||
this.player.addEventListener(
|
||||
"message",
|
||||
(message: MessageEvent<PlayerOutputMessage>) => {
|
||||
switch (message.data.type) {
|
||||
case "configured": {
|
||||
// set the message
|
||||
let configMessage = message.data as PlayerConfiguredMessage;
|
||||
let hwLabelElement = document.getElementById(
|
||||
"hw-label"
|
||||
) as HTMLSpanElement;
|
||||
|
||||
if (configMessage.usingHwDecode) {
|
||||
if (!hwLabelElement.classList.contains("hw-good")) {
|
||||
hwLabelElement.classList.add("hw-good");
|
||||
hwLabelElement.innerText = "IS :)";
|
||||
}
|
||||
} else {
|
||||
if (!hwLabelElement.classList.contains("hw-bad")) {
|
||||
hwLabelElement.classList.add("hw-bad");
|
||||
hwLabelElement.innerText = "is NOT :(";
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
// Send the init message to the worker to give it
|
||||
// access to the canvas
|
||||
this.player.postMessage(
|
||||
{
|
||||
type: "init",
|
||||
canvas: offscreen,
|
||||
},
|
||||
[offscreen]
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
ConnectToWS() {
|
||||
// connect to the WebSocket server
|
||||
this.webSocket = new WebSocket(`${this.uri}/`);
|
||||
this.webSocket.binaryType = "arraybuffer";
|
||||
|
||||
this.webSocket.addEventListener("open", this.OnWSOpen.bind(this));
|
||||
this.webSocket.addEventListener("close", this.OnWSClosed.bind(this));
|
||||
this.webSocket.addEventListener("message", this.OnWSMessage.bind(this));
|
||||
}
|
||||
|
||||
Init() {
|
||||
this.ConnectToWS();
|
||||
}
|
||||
|
||||
OnWSOpen() {
|
||||
this.player?.postMessage({
|
||||
type: "init-decoder",
|
||||
});
|
||||
}
|
||||
|
||||
OnWSClosed() {
|
||||
this.player?.postMessage({
|
||||
type: "shutdown-decoder",
|
||||
});
|
||||
|
||||
setTimeout(() => {
|
||||
this.ConnectToWS();
|
||||
}, 5000);
|
||||
}
|
||||
|
||||
OnWSMessage(ev: MessageEvent<string | ArrayBuffer>) {
|
||||
// Video data is binary
|
||||
if (typeof ev.data !== "string") {
|
||||
this.player?.postMessage(
|
||||
{
|
||||
type: "data",
|
||||
data: ev.data as ArrayBuffer,
|
||||
},
|
||||
[ev.data as ArrayBuffer]
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
let msg = JSON.parse(ev.data);
|
||||
switch (msg.type) {
|
||||
case "chat":
|
||||
{
|
||||
this.AddChatMessage(msg.username, msg.msg);
|
||||
}
|
||||
break;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
} catch (err) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
private AddChatMessage(username: string, msg: string) {
|
||||
let div = document.createElement("div");
|
||||
let b = document.createElement("b");
|
||||
b.innerText = username;
|
||||
div.innerText = `: ${msg}`;
|
||||
|
||||
div.prepend(b);
|
||||
this.chatHistory.appendChild(div);
|
||||
}
|
||||
|
||||
private send_chat(msg: string) {
|
||||
this.webSocket.send(
|
||||
JSON.stringify({
|
||||
type: "chat",
|
||||
msg: msg,
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
private send_mouse(x: number, y: number, mask: number) {
|
||||
this.webSocket.send(
|
||||
JSON.stringify({
|
||||
type: "mouse",
|
||||
x: x,
|
||||
y: y,
|
||||
mask: mask,
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
private send_key(keysym, pressed) {
|
||||
this.webSocket.send(
|
||||
JSON.stringify({
|
||||
type: "key",
|
||||
keysym: keysym,
|
||||
pressed: pressed,
|
||||
})
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
let hostURL = `ws://${Config.host}`;
|
||||
if (window.location.protocol === "https:") hostURL = `wss://${Config.host}`;
|
||||
let client = new Client(hostURL, document.getElementById("vm-display-canvas"));
|
||||
|
||||
client.Init();
|
403
client/src/key.ts
Normal file
403
client/src/key.ts
Normal file
|
@ -0,0 +1,403 @@
|
|||
export function GetKeySym(keyCode: number, key: string, location: number): number | null {
|
||||
let keysym = keysym_from_key_identifier(key, location) || keysym_from_keycode(keyCode, location);
|
||||
return keysym;
|
||||
}
|
||||
|
||||
function keysym_from_key_identifier(identifier: string, location: number): number | null {
|
||||
if (!identifier) return null;
|
||||
|
||||
let typedCharacter: string | undefined;
|
||||
|
||||
// If identifier is U+xxxx, decode Unicode character
|
||||
const unicodePrefixLocation = identifier.indexOf('U+');
|
||||
if (unicodePrefixLocation >= 0) {
|
||||
const hex = identifier.substring(unicodePrefixLocation + 2);
|
||||
typedCharacter = String.fromCharCode(parseInt(hex, 16));
|
||||
} else if (identifier.length === 1) typedCharacter = identifier;
|
||||
else return get_keysym(keyidentifier_keysym[identifier], location);
|
||||
|
||||
if (!typedCharacter) return null;
|
||||
|
||||
const codepoint = typedCharacter.charCodeAt(0);
|
||||
return keysym_from_charcode(codepoint);
|
||||
}
|
||||
|
||||
function get_keysym(keysyms: number[] | null, location: number): number | null {
|
||||
if (!keysyms) return null;
|
||||
return keysyms[location] || keysyms[0];
|
||||
}
|
||||
|
||||
function keysym_from_charcode(codepoint: number): number | null {
|
||||
if (isControlCharacter(codepoint)) return 0xff00 | codepoint;
|
||||
if (codepoint >= 0x0000 && codepoint <= 0x00ff) return codepoint;
|
||||
if (codepoint >= 0x0100 && codepoint <= 0x10ffff) return 0x01000000 | codepoint;
|
||||
return null;
|
||||
}
|
||||
|
||||
function isControlCharacter(codepoint: number): boolean {
|
||||
return codepoint <= 0x1f || (codepoint >= 0x7f && codepoint <= 0x9f);
|
||||
}
|
||||
|
||||
function keysym_from_keycode(keyCode: number, location: number): number | null {
|
||||
return get_keysym(keycodeKeysyms[keyCode], location);
|
||||
}
|
||||
|
||||
function key_identifier_sane(keyCode: number, keyIdentifier: string): boolean {
|
||||
if (!keyIdentifier) return false;
|
||||
const unicodePrefixLocation = keyIdentifier.indexOf('U+');
|
||||
if (unicodePrefixLocation === -1) return true;
|
||||
|
||||
const codepoint = parseInt(keyIdentifier.substring(unicodePrefixLocation + 2), 16);
|
||||
if (keyCode !== codepoint) return true;
|
||||
if ((keyCode >= 65 && keyCode <= 90) || (keyCode >= 48 && keyCode <= 57)) return true;
|
||||
return false;
|
||||
}
|
||||
|
||||
export function OSK_buttonToKeysym(button: string): number | null {
|
||||
const keyMapping = OSK_keyMappings.find((mapping) => mapping.includes(button));
|
||||
if (keyMapping) {
|
||||
const [, keyCode, keyIdentifier, key, location] = keyMapping;
|
||||
return GetKeySym(keyCode, key, location);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
interface KeyIdentifierKeysym {
|
||||
[key: string]: number[] | null;
|
||||
}
|
||||
|
||||
interface KeyCodeKeysyms {
|
||||
[key: number]: number[] | null;
|
||||
}
|
||||
|
||||
const keycodeKeysyms: KeyCodeKeysyms = {
|
||||
8: [0xff08], // backspace
|
||||
9: [0xff09], // tab
|
||||
12: [0xff0b, 0xff0b, 0xff0b, 0xffb5], // clear / KP 5
|
||||
13: [0xff0d], // enter
|
||||
16: [0xffe1, 0xffe1, 0xffe2], // shift
|
||||
17: [0xffe3, 0xffe3, 0xffe4], // ctrl
|
||||
18: [0xffe9, 0xffe9, 0xfe03], // alt
|
||||
19: [0xff13], // pause/break
|
||||
20: [0xffe5], // caps lock
|
||||
27: [0xff1b], // escape
|
||||
32: [0x0020], // space
|
||||
33: [0xff55, 0xff55, 0xff55, 0xffb9], // page up / KP 9
|
||||
34: [0xff56, 0xff56, 0xff56, 0xffb3], // page down / KP 3
|
||||
35: [0xff57, 0xff57, 0xff57, 0xffb1], // end / KP 1
|
||||
36: [0xff50, 0xff50, 0xff50, 0xffb7], // home / KP 7
|
||||
37: [0xff51, 0xff51, 0xff51, 0xffb4], // left arrow / KP 4
|
||||
38: [0xff52, 0xff52, 0xff52, 0xffb8], // up arrow / KP 8
|
||||
39: [0xff53, 0xff53, 0xff53, 0xffb6], // right arrow / KP 6
|
||||
40: [0xff54, 0xff54, 0xff54, 0xffb2], // down arrow / KP 2
|
||||
45: [0xff63, 0xff63, 0xff63, 0xffb0], // insert / KP 0
|
||||
46: [0xffff, 0xffff, 0xffff, 0xffae], // delete / KP decimal
|
||||
91: [0xffeb], // left window key (hyper_l)
|
||||
92: [0xff67], // right window key (menu key?)
|
||||
93: null, // select key
|
||||
96: [0xffb0], // KP 0
|
||||
97: [0xffb1], // KP 1
|
||||
98: [0xffb2], // KP 2
|
||||
99: [0xffb3], // KP 3
|
||||
100: [0xffb4], // KP 4
|
||||
101: [0xffb5], // KP 5
|
||||
102: [0xffb6], // KP 6
|
||||
103: [0xffb7], // KP 7
|
||||
104: [0xffb8], // KP 8
|
||||
105: [0xffb9], // KP 9
|
||||
106: [0xffaa], // KP multiply
|
||||
107: [0xffab], // KP add
|
||||
109: [0xffad], // KP subtract
|
||||
110: [0xffae], // KP decimal
|
||||
111: [0xffaf], // KP divide
|
||||
112: [0xffbe], // f1
|
||||
113: [0xffbf], // f2
|
||||
114: [0xffc0], // f3
|
||||
115: [0xffc1], // f4
|
||||
116: [0xffc2], // f5
|
||||
117: [0xffc3], // f6
|
||||
118: [0xffc4], // f7
|
||||
119: [0xffc5], // f8
|
||||
120: [0xffc6], // f9
|
||||
121: [0xffc7], // f10
|
||||
122: [0xffc8], // f11
|
||||
123: [0xffc9], // f12
|
||||
144: [0xff7f], // num lock
|
||||
145: [0xff14], // scroll lock
|
||||
225: [0xfe03] // altgraph (iso_level3_shift)
|
||||
};
|
||||
|
||||
const keyidentifier_keysym: KeyIdentifierKeysym = {
|
||||
Again: [0xff66],
|
||||
AllCandidates: [0xff3d],
|
||||
Alphanumeric: [0xff30],
|
||||
Alt: [0xffe9, 0xffe9, 0xfe03],
|
||||
Attn: [0xfd0e],
|
||||
AltGraph: [0xfe03],
|
||||
ArrowDown: [0xff54],
|
||||
ArrowLeft: [0xff51],
|
||||
ArrowRight: [0xff53],
|
||||
ArrowUp: [0xff52],
|
||||
Backspace: [0xff08],
|
||||
CapsLock: [0xffe5],
|
||||
Cancel: [0xff69],
|
||||
Clear: [0xff0b],
|
||||
Convert: [0xff21],
|
||||
Copy: [0xfd15],
|
||||
Crsel: [0xfd1c],
|
||||
CrSel: [0xfd1c],
|
||||
CodeInput: [0xff37],
|
||||
Compose: [0xff20],
|
||||
Control: [0xffe3, 0xffe3, 0xffe4],
|
||||
ContextMenu: [0xff67],
|
||||
DeadGrave: [0xfe50],
|
||||
DeadAcute: [0xfe51],
|
||||
DeadCircumflex: [0xfe52],
|
||||
DeadTilde: [0xfe53],
|
||||
DeadMacron: [0xfe54],
|
||||
DeadBreve: [0xfe55],
|
||||
DeadAboveDot: [0xfe56],
|
||||
DeadUmlaut: [0xfe57],
|
||||
DeadAboveRing: [0xfe58],
|
||||
DeadDoubleacute: [0xfe59],
|
||||
DeadCaron: [0xfe5a],
|
||||
DeadCedilla: [0xfe5b],
|
||||
DeadOgonek: [0xfe5c],
|
||||
DeadIota: [0xfe5d],
|
||||
DeadVoicedSound: [0xfe5e],
|
||||
DeadSemivoicedSound: [0xfe5f],
|
||||
Delete: [0xffff],
|
||||
Down: [0xff54],
|
||||
End: [0xff57],
|
||||
Enter: [0xff0d],
|
||||
EraseEof: [0xfd06],
|
||||
Escape: [0xff1b],
|
||||
Execute: [0xff62],
|
||||
Exsel: [0xfd1d],
|
||||
ExSel: [0xfd1d],
|
||||
F1: [0xffbe],
|
||||
F2: [0xffbf],
|
||||
F3: [0xffc0],
|
||||
F4: [0xffc1],
|
||||
F5: [0xffc2],
|
||||
F6: [0xffc3],
|
||||
F7: [0xffc4],
|
||||
F8: [0xffc5],
|
||||
F9: [0xffc6],
|
||||
F10: [0xffc7],
|
||||
F11: [0xffc8],
|
||||
F12: [0xffc9],
|
||||
F13: [0xffca],
|
||||
F14: [0xffcb],
|
||||
F15: [0xffcc],
|
||||
F16: [0xffcd],
|
||||
F17: [0xffce],
|
||||
F18: [0xffcf],
|
||||
F19: [0xffd0],
|
||||
F20: [0xffd1],
|
||||
F21: [0xffd2],
|
||||
F22: [0xffd3],
|
||||
F23: [0xffd4],
|
||||
F24: [0xffd5],
|
||||
Find: [0xff68],
|
||||
GroupFirst: [0xfe0c],
|
||||
GroupLast: [0xfe0e],
|
||||
GroupNext: [0xfe08],
|
||||
GroupPrevious: [0xfe0a],
|
||||
FullWidth: null,
|
||||
HalfWidth: null,
|
||||
HangulMode: [0xff31],
|
||||
Hankaku: [0xff29],
|
||||
HanjaMode: [0xff34],
|
||||
Help: [0xff6a],
|
||||
Hiragana: [0xff25],
|
||||
HiraganaKatakana: [0xff27],
|
||||
Home: [0xff50],
|
||||
Hyper: [0xffed, 0xffed, 0xffee],
|
||||
Insert: [0xff63],
|
||||
JapaneseHiragana: [0xff25],
|
||||
JapaneseKatakana: [0xff26],
|
||||
JapaneseRomaji: [0xff24],
|
||||
JunjaMode: [0xff38],
|
||||
KanaMode: [0xff2d],
|
||||
KanjiMode: [0xff21],
|
||||
Katakana: [0xff26],
|
||||
Left: [0xff51],
|
||||
Meta: [0xffe7, 0xffe7, 0xffe8],
|
||||
ModeChange: [0xff7e],
|
||||
NumLock: [0xff7f],
|
||||
PageDown: [0xff56],
|
||||
PageUp: [0xff55],
|
||||
Pause: [0xff13],
|
||||
Play: [0xfd16],
|
||||
PreviousCandidate: [0xff3e],
|
||||
PrintScreen: [0xfd1d],
|
||||
Redo: [0xff66],
|
||||
Right: [0xff53],
|
||||
RomanCharacters: null,
|
||||
Scroll: [0xff14],
|
||||
Select: [0xff60],
|
||||
Separator: [0xffac],
|
||||
Shift: [0xffe1, 0xffe1, 0xffe2],
|
||||
SingleCandidate: [0xff3c],
|
||||
Super: [0xffeb, 0xffeb, 0xffec],
|
||||
Tab: [0xff09],
|
||||
Up: [0xff52],
|
||||
Undo: [0xff65],
|
||||
Win: [0xffeb],
|
||||
Zenkaku: [0xff28],
|
||||
ZenkakuHankaku: [0xff2a]
|
||||
};
|
||||
|
||||
const OSK_keyMappings: [string, number, string, string, number][] = [
|
||||
['!', 49, 'Digit1', '!', 0],
|
||||
['#', 51, 'Digit3', '#', 0],
|
||||
['$', 52, 'Digit4', '$', 0],
|
||||
['%', 53, 'Digit5', '%', 0],
|
||||
['&', 55, 'Digit7', '&', 0],
|
||||
["'", 222, 'Quote', "'", 0],
|
||||
['(', 57, 'Digit9', '(', 0],
|
||||
[')', 48, 'Digit0', ')', 0],
|
||||
['*', 56, 'Digit8', '*', 0],
|
||||
['+', 187, 'Equal', '+', 0],
|
||||
[',', 188, 'Comma', ',', 0],
|
||||
['-', 189, 'Minus', '-', 0],
|
||||
['.', 190, 'Period', '.', 0],
|
||||
['/', 191, 'Slash', '/', 0],
|
||||
['0', 48, 'Digit0', '0', 0],
|
||||
['1', 49, 'Digit1', '1', 0],
|
||||
['2', 50, 'Digit2', '2', 0],
|
||||
['3', 51, 'Digit3', '3', 0],
|
||||
['4', 52, 'Digit4', '4', 0],
|
||||
['5', 53, 'Digit5', '5', 0],
|
||||
['6', 54, 'Digit6', '6', 0],
|
||||
['7', 55, 'Digit7', '7', 0],
|
||||
['8', 56, 'Digit8', '8', 0],
|
||||
['9', 57, 'Digit9', '9', 0],
|
||||
[':', 186, 'Semicolon', ':', 0],
|
||||
[';', 186, 'Semicolon', ';', 0],
|
||||
['<', 188, 'Comma', '<', 0],
|
||||
['=', 187, 'Equal', '=', 0],
|
||||
['>', 190, 'Period', '>', 0],
|
||||
['?', 191, 'Slash', '?', 0],
|
||||
['@', 50, 'Digit2', '@', 0],
|
||||
['A', 65, 'KeyA', 'A', 0],
|
||||
['B', 66, 'KeyB', 'B', 0],
|
||||
['C', 67, 'KeyC', 'C', 0],
|
||||
['D', 68, 'KeyD', 'D', 0],
|
||||
['E', 69, 'KeyE', 'E', 0],
|
||||
['F', 70, 'KeyF', 'F', 0],
|
||||
['G', 71, 'KeyG', 'G', 0],
|
||||
['H', 72, 'KeyH', 'H', 0],
|
||||
['I', 73, 'KeyI', 'I', 0],
|
||||
['J', 74, 'KeyJ', 'J', 0],
|
||||
['K', 75, 'KeyK', 'K', 0],
|
||||
['L', 76, 'KeyL', 'L', 0],
|
||||
['M', 77, 'KeyM', 'M', 0],
|
||||
['N', 78, 'KeyN', 'N', 0],
|
||||
['O', 79, 'KeyO', 'O', 0],
|
||||
['P', 80, 'KeyP', 'P', 0],
|
||||
['Q', 81, 'KeyQ', 'Q', 0],
|
||||
['R', 82, 'KeyR', 'R', 0],
|
||||
['S', 83, 'KeyS', 'S', 0],
|
||||
['T', 84, 'KeyT', 'T', 0],
|
||||
['U', 85, 'KeyU', 'U', 0],
|
||||
['V', 86, 'KeyV', 'V', 0],
|
||||
['W', 87, 'KeyW', 'W', 0],
|
||||
['X', 88, 'KeyX', 'X', 0],
|
||||
['Y', 89, 'KeyY', 'Y', 0],
|
||||
['Z', 90, 'KeyZ', 'Z', 0],
|
||||
['[', 219, 'BracketLeft', '[', 0],
|
||||
['\\', 220, 'Backslash', '\\', 0],
|
||||
[']', 221, 'BracketRight', ']', 0],
|
||||
['^', 54, 'Digit6', '^', 0],
|
||||
['_', 189, 'Minus', '_', 0],
|
||||
['`', 192, 'Backquote', '`', 0],
|
||||
['a', 65, 'KeyA', 'a', 0],
|
||||
['b', 66, 'KeyB', 'b', 0],
|
||||
['c', 67, 'KeyC', 'c', 0],
|
||||
['d', 68, 'KeyD', 'd', 0],
|
||||
['e', 69, 'KeyE', 'e', 0],
|
||||
['f', 70, 'KeyF', 'f', 0],
|
||||
['g', 71, 'KeyG', 'g', 0],
|
||||
['h', 72, 'KeyH', 'h', 0],
|
||||
['i', 73, 'KeyI', 'i', 0],
|
||||
['j', 74, 'KeyJ', 'j', 0],
|
||||
['k', 75, 'KeyK', 'k', 0],
|
||||
['l', 76, 'KeyL', 'l', 0],
|
||||
['m', 77, 'KeyM', 'm', 0],
|
||||
['n', 78, 'KeyN', 'n', 0],
|
||||
['o', 79, 'KeyO', 'o', 0],
|
||||
['p', 80, 'KeyP', 'p', 0],
|
||||
['q', 81, 'KeyQ', 'q', 0],
|
||||
['r', 82, 'KeyR', 'r', 0],
|
||||
['s', 83, 'KeyS', 's', 0],
|
||||
['t', 84, 'KeyT', 't', 0],
|
||||
['u', 85, 'KeyU', 'u', 0],
|
||||
['v', 86, 'KeyV', 'v', 0],
|
||||
['w', 87, 'KeyW', 'w', 0],
|
||||
['x', 88, 'KeyX', 'x', 0],
|
||||
['y', 89, 'KeyY', 'y', 0],
|
||||
['z', 90, 'KeyZ', 'z', 0],
|
||||
['{', 219, 'BracketLeft', '{', 0],
|
||||
['{altleft}', 18, 'AltLeft', 'AltLeft', 1],
|
||||
['{altright}', 18, 'AltRight', 'AltRight', 2],
|
||||
['{arrowdown}', 40, 'ArrowDown', 'ArrowDown', 0],
|
||||
['{arrowleft}', 37, 'ArrowLeft', 'ArrowLeft', 0],
|
||||
['{arrowright}', 39, 'ArrowRight', 'ArrowRight', 0],
|
||||
['{arrowup}', 38, 'ArrowUp', 'ArrowUp', 0],
|
||||
['{backspace}', 8, 'Backspace', 'Backspace', 0],
|
||||
['{capslock}', 20, 'CapsLock', 'CapsLock', 0],
|
||||
['{controlleft}', 17, 'ControlLeft', 'ControlLeft', 1],
|
||||
['{controlright}', 17, 'ControlRight', 'ControlRight', 2],
|
||||
['{delete}', 46, 'Delete', 'Delete', 0],
|
||||
['{end}', 35, 'End', 'End', 0],
|
||||
['{enter}', 13, 'Enter', 'Enter', 0],
|
||||
['{escape}', 27, 'Escape', 'Escape', 0],
|
||||
['{f10}', 121, 'F10', 'F10', 0],
|
||||
['{f11}', 122, 'F11', 'F11', 0],
|
||||
['{f12}', 123, 'F12', 'F12', 0],
|
||||
['{f1}', 112, 'F1', 'F1', 0],
|
||||
['{f2}', 113, 'F2', 'F2', 0],
|
||||
['{f3}', 114, 'F3', 'F3', 0],
|
||||
['{f4}', 115, 'F4', 'F4', 0],
|
||||
['{f5}', 116, 'F5', 'F5', 0],
|
||||
['{f6}', 117, 'F6', 'F6', 0],
|
||||
['{f7}', 118, 'F7', 'F7', 0],
|
||||
['{f8}', 119, 'F8', 'F8', 0],
|
||||
['{f9}', 120, 'F9', 'F9', 0],
|
||||
['{home}', 36, 'Home', 'Home', 0],
|
||||
['{insert}', 45, 'Insert', 'Insert', 0],
|
||||
['{metaleft}', 91, 'OSLeft', 'OSLeft', 1],
|
||||
['{metaright}', 92, 'OSRight', 'OSRight', 2],
|
||||
['{numlock}', 144, 'NumLock', 'NumLock', 0],
|
||||
['{numpad0}', 96, 'Numpad0', 'Numpad0', 3],
|
||||
['{numpad1}', 97, 'Numpad1', 'Numpad1', 3],
|
||||
['{numpad2}', 98, 'Numpad2', 'Numpad2', 3],
|
||||
['{numpad3}', 99, 'Numpad3', 'Numpad3', 3],
|
||||
['{numpad4}', 100, 'Numpad4', 'Numpad4', 3],
|
||||
['{numpad5}', 101, 'Numpad5', 'Numpad5', 3],
|
||||
['{numpad6}', 102, 'Numpad6', 'Numpad6', 3],
|
||||
['{numpad7}', 103, 'Numpad7', 'Numpad7', 3],
|
||||
['{numpad8}', 104, 'Numpad8', 'Numpad8', 3],
|
||||
['{numpad9}', 105, 'Numpad9', 'Numpad9', 3],
|
||||
['{numpadadd}', 107, 'NumpadAdd', 'NumpadAdd', 3],
|
||||
['{numpaddecimal}', 110, 'NumpadDecimal', 'NumpadDecimal', 3],
|
||||
['{numpaddivide}', 111, 'NumpadDivide', 'NumpadDivide', 3],
|
||||
['{numpadenter}', 13, 'NumpadEnter', 'NumpadEnter', 3],
|
||||
['{numpadmultiply}', 106, 'NumpadMultiply', 'NumpadMultiply', 3],
|
||||
['{numpadsubtract}', 109, 'NumpadSubtract', 'NumpadSubtract', 3],
|
||||
['{pagedown}', 34, 'PageDown', 'PageDown', 0],
|
||||
['{pageup}', 33, 'PageUp', 'PageUp', 0],
|
||||
['{pause}', 19, 'Pause', 'Pause', 0],
|
||||
['{prtscr}', 44, 'PrintScreen', 'PrintScreen', 0],
|
||||
['{scrolllock}', 145, 'ScrollLock', 'ScrollLock', 0],
|
||||
['{shiftleft}', 16, 'ShiftLeft', 'ShiftLeft', 1],
|
||||
['{shiftright}', 16, 'ShiftRight', 'ShiftRight', 2],
|
||||
['{space}', 32, 'Space', 'Space', 0],
|
||||
['{tab}', 9, 'Tab', 'Tab', 0],
|
||||
['|', 220, 'Backslash', '|', 0],
|
||||
['}', 221, 'BracketRight', '}', 0],
|
||||
['~', 192, 'Backquote', '~', 0],
|
||||
['"', 222, 'Quote', '"', 0]
|
||||
];
|
39
client/src/mouse.ts
Normal file
39
client/src/mouse.ts
Normal file
|
@ -0,0 +1,39 @@
|
|||
function bitmaskContains(mask: number, bit: number): boolean {
|
||||
return ((mask >>> 0) & bit) == bit;
|
||||
}
|
||||
|
||||
export class MouseState {
|
||||
left: boolean = false;
|
||||
middle: boolean = false;
|
||||
right: boolean = false;
|
||||
scrollDown: boolean = false;
|
||||
scrollUp: boolean = false;
|
||||
x: number = 0;
|
||||
y: number = 0;
|
||||
constructor() {}
|
||||
|
||||
public Mask() {
|
||||
let mask = 0;
|
||||
if (this.left) mask |= 1;
|
||||
if (this.middle) mask |= 2;
|
||||
if (this.right) mask |= 4;
|
||||
if (this.scrollUp) mask |= 8;
|
||||
if (this.scrollDown) mask |= 16;
|
||||
return mask;
|
||||
}
|
||||
|
||||
public InitFromMouseEvent(e: MouseEvent) {
|
||||
this.left = bitmaskContains(e.buttons, 1);
|
||||
this.right = bitmaskContains(e.buttons, 2);
|
||||
this.middle = bitmaskContains(e.buttons, 4);
|
||||
|
||||
this.x = e.offsetX;
|
||||
this.y = e.offsetY;
|
||||
}
|
||||
|
||||
public InitFromWheelEvent(ev: WheelEvent) {
|
||||
this.InitFromMouseEvent(ev as MouseEvent);
|
||||
if (ev.deltaY < 0) this.scrollUp = true;
|
||||
else if (ev.deltaY > 0) this.scrollDown = true;
|
||||
}
|
||||
}
|
212
client/src/player_worker.ts
Normal file
212
client/src/player_worker.ts
Normal file
|
@ -0,0 +1,212 @@
|
|||
import { NALUStream, SPS, Slice } from "h264-interp-utils";
|
||||
import {
|
||||
PlayerInputMessage,
|
||||
PlayerConfiguredMessage,
|
||||
PlayerInitMessage,
|
||||
PlayerVideoDataMessage,
|
||||
} from "./player_worker_messages";
|
||||
|
||||
// shared iface later :)
|
||||
class CanvasRenderer {
|
||||
private canvas: OffscreenCanvas;
|
||||
private ctx: OffscreenCanvasRenderingContext2D;
|
||||
|
||||
constructor(c: OffscreenCanvas) {
|
||||
this.canvas = c;
|
||||
this.ctx = this.canvas.getContext("2d")!;
|
||||
}
|
||||
|
||||
draw(frame: VideoFrame) {
|
||||
this.canvas.width = frame.displayWidth;
|
||||
this.canvas.height = frame.displayHeight;
|
||||
this.ctx.drawImage(frame, 0, 0, frame.displayWidth, frame.displayHeight);
|
||||
}
|
||||
}
|
||||
|
||||
// player logic
|
||||
class VideoPlayer {
|
||||
private renderer: CanvasRenderer | null = null;
|
||||
private pendingFrame: VideoFrame | null = null;
|
||||
private decoder: VideoDecoder | null = null;
|
||||
private streamInitSPS: SPS | null = null;
|
||||
|
||||
// only async for isConfigSupported
|
||||
async onData(value: ArrayBuffer) {
|
||||
let u8ar = new Uint8Array(value);
|
||||
|
||||
let stream = new NALUStream(u8ar, {
|
||||
type: "annexB",
|
||||
strict: true,
|
||||
});
|
||||
|
||||
let key = false;
|
||||
|
||||
for (const nalu of stream) {
|
||||
// Try and obtain the base SPS required to initalize the video decoder
|
||||
// (if we didn't get one yet)
|
||||
if (this.streamInitSPS == null) {
|
||||
try {
|
||||
let sps = new SPS(nalu);
|
||||
console.log(
|
||||
`Got stream SPS (avc codec string: ${sps.MIME}), pic dims ${sps.picWidth}x${sps.picHeight}`
|
||||
);
|
||||
|
||||
this.streamInitSPS = sps;
|
||||
await this.configureDecoder();
|
||||
} catch (e) {}
|
||||
}
|
||||
|
||||
// Determine if this frame is a keyframe (I frame, because we don't send B frames) or not
|
||||
try {
|
||||
let slice = new Slice(nalu);
|
||||
if (slice.slice_type == 2 || slice.slice_type == 7) key = true;
|
||||
else key = false;
|
||||
} catch (e) {}
|
||||
}
|
||||
|
||||
if (this.decoder && this.decoder.state == "configured") {
|
||||
stream.convertToPacket();
|
||||
|
||||
let frame = new EncodedVideoChunk({
|
||||
type: key ? "key" : "delta",
|
||||
data: value,
|
||||
|
||||
// munge the PTS so that frames are always
|
||||
// played as soon as possible
|
||||
timestamp: performance.now(),
|
||||
duration: performance.now(),
|
||||
|
||||
// do the webcodecs typings seriously still not have this
|
||||
transfer: [value],
|
||||
} as any);
|
||||
|
||||
this.decoder?.decode(frame);
|
||||
}
|
||||
}
|
||||
|
||||
renderFrame(frame: VideoFrame) {
|
||||
if (!this.pendingFrame) {
|
||||
requestAnimationFrame(() => {
|
||||
this.renderer?.draw(this.pendingFrame!);
|
||||
this.pendingFrame?.close();
|
||||
this.pendingFrame = null;
|
||||
});
|
||||
} else {
|
||||
this.pendingFrame.close();
|
||||
}
|
||||
|
||||
this.pendingFrame = frame;
|
||||
}
|
||||
|
||||
initDecoder() {
|
||||
if (!this.decoder) {
|
||||
let self = this;
|
||||
this.decoder = new VideoDecoder({
|
||||
output(frame) {
|
||||
self.renderFrame(frame);
|
||||
},
|
||||
|
||||
// TODO handle errors properly
|
||||
error(e) {},
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
async configureDecoder() {
|
||||
if (this.streamInitSPS) {
|
||||
let config: VideoDecoderConfig = {
|
||||
codec: this.streamInitSPS.MIME,
|
||||
// set some parameters that make sense
|
||||
optimizeForLatency: true,
|
||||
hardwareAcceleration: "prefer-hardware",
|
||||
};
|
||||
|
||||
let configMessage: PlayerConfiguredMessage = {
|
||||
type: "configured",
|
||||
usingHwDecode: false,
|
||||
};
|
||||
|
||||
// Probe for hardware accleration support.
|
||||
let supportedConfig = await VideoDecoder.isConfigSupported(config);
|
||||
|
||||
if (supportedConfig.supported) {
|
||||
console.log("Browser supports hardware preference");
|
||||
configMessage.usingHwDecode = true;
|
||||
this.decoder?.configure(supportedConfig.config!);
|
||||
} else {
|
||||
console.log(
|
||||
"Browser doesn't like hardware preference, removing it and trying again"
|
||||
);
|
||||
|
||||
// Remove the property and try again.
|
||||
// If the browser STILL doesn't like it we give up.
|
||||
delete config.hardwareAcceleration;
|
||||
|
||||
supportedConfig = await VideoDecoder.isConfigSupported(config);
|
||||
|
||||
if (!supportedConfig.supported) return;
|
||||
|
||||
configMessage.usingHwDecode = false;
|
||||
this.decoder?.configure(supportedConfig.config!);
|
||||
}
|
||||
|
||||
self.postMessage(configMessage);
|
||||
}
|
||||
}
|
||||
|
||||
async shutdownDecoder() {
|
||||
await this.decoder?.flush();
|
||||
this.decoder?.close();
|
||||
this.decoder = null;
|
||||
|
||||
// clear resources
|
||||
if (this.pendingFrame) {
|
||||
this.pendingFrame.close();
|
||||
this.pendingFrame = null;
|
||||
}
|
||||
|
||||
if (this.streamInitSPS) {
|
||||
this.streamInitSPS = null;
|
||||
}
|
||||
}
|
||||
|
||||
hasRenderer() {
|
||||
return this.renderer !== null;
|
||||
}
|
||||
|
||||
setRenderer(r: CanvasRenderer) {
|
||||
this.renderer = r;
|
||||
}
|
||||
}
|
||||
|
||||
let player = new VideoPlayer();
|
||||
|
||||
async function onMessage(msg: PlayerInputMessage) {
|
||||
switch (msg.type) {
|
||||
case "init":
|
||||
if (!player.hasRenderer())
|
||||
player.setRenderer(
|
||||
new CanvasRenderer((msg as PlayerInitMessage).canvas)
|
||||
);
|
||||
break;
|
||||
|
||||
case "init-decoder":
|
||||
player.initDecoder();
|
||||
break;
|
||||
|
||||
case "data":
|
||||
await player.onData((msg as PlayerVideoDataMessage).data);
|
||||
break;
|
||||
|
||||
case "shutdown-decoder":
|
||||
await player.shutdownDecoder();
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
self.addEventListener(
|
||||
"message",
|
||||
async (msg: MessageEvent<PlayerInputMessage>) => {
|
||||
return onMessage(msg.data);
|
||||
}
|
||||
);
|
42
client/src/player_worker_messages.ts
Normal file
42
client/src/player_worker_messages.ts
Normal file
|
@ -0,0 +1,42 @@
|
|||
// Shared between the main thread and the player worker
|
||||
export type PlayerInputMessageKind =
|
||||
| "init"
|
||||
| "init-decoder"
|
||||
| "data"
|
||||
| "shutdown-decoder";
|
||||
export type PlayerOutputMessageKind = "configured";
|
||||
|
||||
export interface PlayerMessage<K> {
|
||||
type: K;
|
||||
}
|
||||
|
||||
export type PlayerInputMessage = PlayerMessage<PlayerInputMessageKind>;
|
||||
export type PlayerOutputMessage = PlayerMessage<PlayerOutputMessageKind>;
|
||||
|
||||
// input to the worker
|
||||
|
||||
export interface PlayerInitMessage extends PlayerInputMessage {
|
||||
type: "init";
|
||||
canvas: OffscreenCanvas;
|
||||
}
|
||||
|
||||
export interface PlayerInitDecoderMessage extends PlayerInputMessage {
|
||||
type: "init-decoder";
|
||||
}
|
||||
|
||||
export interface PlayerVideoDataMessage extends PlayerInputMessage {
|
||||
type: "data";
|
||||
data: ArrayBuffer;
|
||||
}
|
||||
|
||||
export interface PlayerShutdownDecoderMessage extends PlayerInputMessage {
|
||||
type: "shutdown-decoder";
|
||||
}
|
||||
|
||||
// output from the worker
|
||||
|
||||
export interface PlayerConfiguredMessage extends PlayerOutputMessage {
|
||||
type: "configured";
|
||||
// true if the player was able to handshake hardware decoding; false if it could not
|
||||
usingHwDecode: boolean;
|
||||
}
|
13
client/src/style.css
Normal file
13
client/src/style.css
Normal file
|
@ -0,0 +1,13 @@
|
|||
html {
|
||||
font-family:'Lucida Sans', 'Lucida Sans Regular', 'Lucida Grande', 'Lucida Sans Unicode', Geneva, Verdana, sans-serif;
|
||||
background-color: rgb(55, 55, 55);
|
||||
color: rgb(158, 150, 149);
|
||||
}
|
||||
|
||||
.hw-good {
|
||||
color: rgb(32, 170, 32) !important;
|
||||
}
|
||||
|
||||
.hw-bad {
|
||||
color: rgb(170, 32, 32) !important;
|
||||
}
|
2984
client/yarn.lock
Normal file
2984
client/yarn.lock
Normal file
File diff suppressed because it is too large
Load diff
1496
server/Cargo.lock
generated
Normal file
1496
server/Cargo.lock
generated
Normal file
File diff suppressed because it is too large
Load diff
23
server/Cargo.toml
Normal file
23
server/Cargo.toml
Normal file
|
@ -0,0 +1,23 @@
|
|||
[package]
|
||||
name = "vncstream_server"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
|
||||
anyhow = "1.0.86"
|
||||
|
||||
# vnc
|
||||
vnc-rs = { git = "https://github.com/computernewb/vnc-rs.git" }
|
||||
tokio = { version = "1.39.3", features = ["full"] }
|
||||
|
||||
#ws
|
||||
axum = { version = "0.7.5", features = ["ws", "macros"] }
|
||||
futures = "0.3"
|
||||
futures-util = { version = "0.3", default-features = false, features = ["sink", "std"] }
|
||||
|
||||
# ffmpeg
|
||||
ffmpeg-the-third = "2.0.1"
|
||||
rand = "0.8.5"
|
||||
serde = "1.0.209"
|
||||
serde_json = "1.0.128"
|
176
server/src/encoder_thread.rs
Normal file
176
server/src/encoder_thread.rs
Normal file
|
@ -0,0 +1,176 @@
|
|||
use std::{
|
||||
sync::{Arc, Mutex},
|
||||
time::Duration,
|
||||
};
|
||||
use tokio::sync::mpsc::{self, error::TryRecvError};
|
||||
|
||||
pub enum EncodeThreadInput {
|
||||
Init { size: crate::types::Size },
|
||||
ForceKeyframe,
|
||||
Shutdown,
|
||||
SendFrame,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub enum EncodeThreadOutput {
|
||||
Frame { packet: ffmpeg_the_third::Packet },
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn set_frame_flags(frame: &mut ffmpeg_the_third::Frame, force_keyframe: bool) {
|
||||
unsafe {
|
||||
if force_keyframe {
|
||||
//println!("frame {frame_number} will be a keyframe");
|
||||
(*frame.as_mut_ptr()).pict_type =
|
||||
ffmpeg_the_third::sys::AVPictureType::AV_PICTURE_TYPE_I;
|
||||
(*frame.as_mut_ptr()).flags = ffmpeg_the_third::sys::AV_FRAME_FLAG_KEY;
|
||||
(*frame.as_mut_ptr()).key_frame = 1;
|
||||
} else {
|
||||
(*frame.as_mut_ptr()).pict_type =
|
||||
ffmpeg_the_third::sys::AVPictureType::AV_PICTURE_TYPE_NONE;
|
||||
(*frame.as_mut_ptr()).flags = 0i32;
|
||||
(*frame.as_mut_ptr()).key_frame = 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn encoder_thread_main(
|
||||
mut rx: mpsc::Receiver<EncodeThreadInput>,
|
||||
tx: mpsc::Sender<EncodeThreadOutput>,
|
||||
frame: &Arc<Mutex<Option<ffmpeg_the_third::frame::Video>>>,
|
||||
) {
|
||||
let mut packet = ffmpeg_the_third::Packet::empty();
|
||||
|
||||
let mut encoder: Option<crate::ffmpeg::H264Encoder> = None;
|
||||
let mut sws = None;
|
||||
|
||||
let mut yuv_frame = None;
|
||||
|
||||
let mut frame_number = 0usize;
|
||||
let mut force_keyframe = false;
|
||||
|
||||
println!("encoder thread spawned");
|
||||
|
||||
loop {
|
||||
match rx.try_recv() {
|
||||
Ok(msg) => match msg {
|
||||
EncodeThreadInput::Init { size } => {
|
||||
frame_number = 0;
|
||||
|
||||
if force_keyframe {
|
||||
force_keyframe = false;
|
||||
}
|
||||
|
||||
yuv_frame = Some(ffmpeg_the_third::frame::Video::new(
|
||||
ffmpeg_the_third::format::Pixel::YUV420P,
|
||||
size.clone().width,
|
||||
size.clone().height,
|
||||
));
|
||||
|
||||
sws = Some(
|
||||
ffmpeg_the_third::software::converter(
|
||||
size.clone().into(),
|
||||
ffmpeg_the_third::format::Pixel::BGRA,
|
||||
ffmpeg_the_third::format::Pixel::YUV420P,
|
||||
)
|
||||
.expect("Failed to create SWS conversion context"),
|
||||
);
|
||||
|
||||
encoder = Some(
|
||||
crate::ffmpeg::H264Encoder::new(size, 60, 3 * (1000 * 1000))
|
||||
.expect("Failed to create encoder"),
|
||||
);
|
||||
}
|
||||
|
||||
EncodeThreadInput::Shutdown => {
|
||||
if encoder.is_some() {
|
||||
let enc = encoder.as_mut().unwrap();
|
||||
|
||||
enc.send_eof();
|
||||
enc.receive_packet(&mut packet)
|
||||
.expect("failed to recv eof packet");
|
||||
|
||||
unsafe {
|
||||
if !packet.is_empty() {
|
||||
let _ = tx.blocking_send(EncodeThreadOutput::Frame {
|
||||
packet: packet.clone(),
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
EncodeThreadInput::ForceKeyframe => {
|
||||
force_keyframe = true;
|
||||
}
|
||||
|
||||
EncodeThreadInput::SendFrame => {
|
||||
let enc = encoder.as_mut().unwrap();
|
||||
|
||||
// let's encode a frame
|
||||
let producer_frame_locked = frame.lock().expect("Couldn't lock producer frame");
|
||||
|
||||
let producer_frame = producer_frame_locked.as_ref().expect("NOOOO");
|
||||
let mut_yuv_frame = yuv_frame.as_mut().unwrap();
|
||||
|
||||
let sws_mut = sws.as_mut().unwrap();
|
||||
|
||||
// scale
|
||||
sws_mut
|
||||
.run(producer_frame, mut_yuv_frame)
|
||||
.expect("Failed to convert producer frame to YUV");
|
||||
|
||||
// set the right flags!!
|
||||
set_frame_flags(mut_yuv_frame, force_keyframe);
|
||||
|
||||
unsafe {
|
||||
(*mut_yuv_frame.as_mut_ptr()).pts = frame_number as i64;
|
||||
}
|
||||
|
||||
enc.send_frame(mut_yuv_frame);
|
||||
|
||||
enc.receive_packet(&mut packet)
|
||||
.expect("failed to recv packet");
|
||||
|
||||
// If a packet was recieved dump it
|
||||
unsafe {
|
||||
if !packet.is_empty() {
|
||||
let _ = tx.blocking_send(EncodeThreadOutput::Frame {
|
||||
packet: packet.clone(),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
frame_number += 1;
|
||||
|
||||
if force_keyframe {
|
||||
force_keyframe = false;
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
Err(TryRecvError::Disconnected) => break,
|
||||
Err(TryRecvError::Empty) => {
|
||||
std::thread::sleep(Duration::from_millis(1));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn encoder_thread_spawn(
|
||||
frame: &Arc<Mutex<Option<ffmpeg_the_third::frame::Video>>>,
|
||||
) -> (
|
||||
mpsc::Receiver<EncodeThreadOutput>,
|
||||
mpsc::Sender<EncodeThreadInput>,
|
||||
) {
|
||||
let (in_tx, in_rx) = mpsc::channel(32);
|
||||
let (out_tx, out_rx) = mpsc::channel(32);
|
||||
|
||||
let clone = Arc::clone(frame);
|
||||
|
||||
std::thread::spawn(move || encoder_thread_main(in_rx, out_tx, &clone));
|
||||
|
||||
(out_rx, in_tx)
|
||||
}
|
116
server/src/ffmpeg.rs
Normal file
116
server/src/ffmpeg.rs
Normal file
|
@ -0,0 +1,116 @@
|
|||
use anyhow::Context;
|
||||
use ffmpeg::error::EAGAIN;
|
||||
use ffmpeg_the_third as ffmpeg;
|
||||
|
||||
use ffmpeg::codec as lavc; // lavc
|
||||
|
||||
use crate::types::Size;
|
||||
|
||||
/// this is required for libx264 to like. Work
|
||||
pub fn create_context_from_codec(codec: ffmpeg::Codec) -> Result<lavc::Context, ffmpeg::Error> {
|
||||
unsafe {
|
||||
let context = ffmpeg::sys::avcodec_alloc_context3(codec.as_ptr());
|
||||
if context.is_null() {
|
||||
return Err(ffmpeg::Error::Unknown);
|
||||
}
|
||||
|
||||
let context = lavc::Context::wrap(context, None);
|
||||
Ok(context)
|
||||
}
|
||||
}
|
||||
|
||||
/// A simple H.264 encoder.
|
||||
pub struct H264Encoder {
|
||||
encoder: ffmpeg::encoder::video::Encoder,
|
||||
}
|
||||
|
||||
impl H264Encoder {
|
||||
pub fn new(size: Size, max_framerate: u32, bitrate: usize) -> anyhow::Result<Self> {
|
||||
let encoder = ffmpeg::encoder::find(lavc::Id::H264).expect("could not find libx264");
|
||||
|
||||
let mut video_encoder_context = create_context_from_codec(encoder)?.encoder().video()?;
|
||||
|
||||
let gop = /*if max_framerate / 2 != 0 {
|
||||
max_framerate / 2
|
||||
} else {
|
||||
max_framerate
|
||||
} */
|
||||
i32::MAX as u32;
|
||||
|
||||
video_encoder_context.set_width(size.width);
|
||||
video_encoder_context.set_height(size.height);
|
||||
video_encoder_context.set_frame_rate(Some(ffmpeg::Rational(1, max_framerate as i32)));
|
||||
|
||||
video_encoder_context.set_bit_rate(bitrate);
|
||||
//video_encoder_context.set_max_bit_rate(bitrate);
|
||||
|
||||
// qp TODO:
|
||||
//video_encoder_context.set_qmax(30);
|
||||
//video_encoder_context.set_qmin(35);
|
||||
|
||||
video_encoder_context.set_time_base(ffmpeg::Rational(1, max_framerate as i32).invert());
|
||||
video_encoder_context.set_format(ffmpeg::format::Pixel::YUV420P);
|
||||
|
||||
video_encoder_context.set_gop(gop);
|
||||
video_encoder_context.set_max_b_frames(0);
|
||||
|
||||
unsafe {
|
||||
(*video_encoder_context.as_mut_ptr()).delay = 0;
|
||||
(*video_encoder_context.as_mut_ptr()).refs = 0;
|
||||
}
|
||||
|
||||
let threads = 4;
|
||||
println!("using {threads} threads to encode");
|
||||
|
||||
// frame-level threading causes [N] frames of latency
|
||||
// so we use slice-level threading to reduce the latency
|
||||
// as much as possible while still using it
|
||||
video_encoder_context.set_threading(ffmpeg::threading::Config {
|
||||
kind: ffmpeg::threading::Type::Slice,
|
||||
count: threads,
|
||||
});
|
||||
|
||||
let mut dict = ffmpeg::Dictionary::new();
|
||||
dict.set("tune", "zerolatency");
|
||||
dict.set("preset", "veryfast");
|
||||
dict.set("profile", "main");
|
||||
// TODO:
|
||||
dict.set("crf", "43");
|
||||
dict.set("crf_max", "48");
|
||||
|
||||
let encoder = video_encoder_context
|
||||
.open_as_with(encoder, dict)
|
||||
.with_context(|| "While opening x264 video codec")?;
|
||||
|
||||
Ok(Self { encoder: encoder })
|
||||
}
|
||||
|
||||
pub fn send_frame(&mut self, frame: &ffmpeg::Frame) {
|
||||
self.encoder.send_frame(frame).unwrap();
|
||||
}
|
||||
|
||||
pub fn send_eof(&mut self) {
|
||||
self.encoder.send_eof().unwrap();
|
||||
}
|
||||
|
||||
// Shuold this return a Result<ControlFlow> so we can make it easier to know when to continue?
|
||||
pub fn receive_packet(&mut self, packet: &mut ffmpeg::Packet) -> anyhow::Result<()> {
|
||||
loop {
|
||||
match self.encoder.receive_packet(packet) {
|
||||
Ok(_) => break,
|
||||
Err(ffmpeg::Error::Other { errno }) => {
|
||||
if errno != EAGAIN {
|
||||
return Err(ffmpeg::Error::Other { errno: errno }.into());
|
||||
} else {
|
||||
// EAGAIN is not fatal, and simply means
|
||||
// we should just try again
|
||||
break;
|
||||
}
|
||||
}
|
||||
Err(e) => return Err(e.into()),
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
267
server/src/main.rs
Normal file
267
server/src/main.rs
Normal file
|
@ -0,0 +1,267 @@
|
|||
mod encoder_thread;
|
||||
mod ffmpeg;
|
||||
mod surface;
|
||||
mod types;
|
||||
mod vnc_engine;
|
||||
|
||||
use std::sync::{Arc, Mutex};
|
||||
|
||||
use rand::distributions::DistString;
|
||||
use tokio::sync::{broadcast, mpsc};
|
||||
use vnc_engine::VncMessageOutput;
|
||||
|
||||
use std::net::SocketAddr;
|
||||
|
||||
use axum::{
|
||||
extract::{
|
||||
connect_info::ConnectInfo,
|
||||
ws::{Message, WebSocket, WebSocketUpgrade},
|
||||
State,
|
||||
},
|
||||
response::IntoResponse,
|
||||
routing::get,
|
||||
Router,
|
||||
};
|
||||
|
||||
use futures::{sink::SinkExt, stream::StreamExt};
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
enum WsMessage {
|
||||
Text(String),
|
||||
Buffer(Vec<u8>),
|
||||
}
|
||||
|
||||
struct AppState {
|
||||
/// Channel for sending things to the VNC engine
|
||||
/// should later be used for control
|
||||
engine_tx: mpsc::Sender<vnc_engine::VncMessageInput>,
|
||||
|
||||
websocket_broadcast_tx: broadcast::Sender<WsMessage>,
|
||||
}
|
||||
|
||||
impl AppState {
|
||||
fn new(engine_tx: mpsc::Sender<vnc_engine::VncMessageInput>) -> Self {
|
||||
let (chat_tx, _chat_rx) = broadcast::channel(10);
|
||||
Self {
|
||||
engine_tx: engine_tx,
|
||||
websocket_broadcast_tx: chat_tx,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[tokio::main(flavor = "multi_thread", worker_threads = 4)]
|
||||
async fn main() -> anyhow::Result<()> {
|
||||
let surface = Arc::new(Mutex::new(surface::Surface::new()));
|
||||
|
||||
let (engine_output_tx, mut engine_output_rx) = mpsc::channel(32);
|
||||
let (engine_input_tx, engine_input_rx) = mpsc::channel(16);
|
||||
|
||||
let state = Arc::new(AppState::new(engine_input_tx));
|
||||
|
||||
let app: Router<()> = Router::new()
|
||||
.route(
|
||||
"/",
|
||||
get(
|
||||
|ws: WebSocketUpgrade,
|
||||
info: ConnectInfo<SocketAddr>,
|
||||
state: State<Arc<AppState>>| async move {
|
||||
ws_handler(ws, info, state).await
|
||||
},
|
||||
),
|
||||
)
|
||||
.with_state(state.clone());
|
||||
|
||||
let mut engine = vnc_engine::Client::new(engine_output_tx, engine_input_rx, surface);
|
||||
|
||||
let tcp_listener = tokio::net::TcpListener::bind("0.0.0.0:4940")
|
||||
.await
|
||||
.expect("failed to listen");
|
||||
|
||||
let vnc_engine_handle = tokio::spawn(async move {
|
||||
let addr = vnc_engine::Address::Tcp("10.16.0.1:5930".parse().expect("its over"));
|
||||
//let addr = vnc_engine::Address::Tcp("127.0.0.1:6930".parse().expect("its over"));
|
||||
engine.connect_and_run(addr).await
|
||||
});
|
||||
|
||||
let state_clone = Arc::clone(&state);
|
||||
tokio::spawn(async move {
|
||||
while let Some(msg) = engine_output_rx.recv().await {
|
||||
match msg {
|
||||
VncMessageOutput::Connect => {
|
||||
println!("connected")
|
||||
}
|
||||
|
||||
VncMessageOutput::Disconnect => {
|
||||
println!("disconnect")
|
||||
}
|
||||
|
||||
VncMessageOutput::FramebufferUpdate(vec) => {
|
||||
let _ = state_clone
|
||||
.websocket_broadcast_tx
|
||||
.send(WsMessage::Buffer(vec));
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
let (res1, res2) = tokio::join!(
|
||||
axum::serve(
|
||||
tcp_listener,
|
||||
app.into_make_service_with_connect_info::<SocketAddr>(),
|
||||
),
|
||||
vnc_engine_handle
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn ws_handler(
|
||||
ws: WebSocketUpgrade,
|
||||
ConnectInfo(addr): ConnectInfo<SocketAddr>,
|
||||
|
||||
State(state): State<Arc<AppState>>,
|
||||
) -> impl IntoResponse {
|
||||
// finalize the upgrade process by returning upgrade callback.
|
||||
// we can customize the callback by sending additional info such as address.
|
||||
ws.on_upgrade(move |socket| handle_socket(socket, addr, state))
|
||||
}
|
||||
|
||||
/// Actual websocket statemachine (one will be spawned per connection)
|
||||
async fn handle_socket(socket: WebSocket, who: SocketAddr, state: Arc<AppState>) {
|
||||
let (mut sender, mut receiver) = socket.split();
|
||||
|
||||
// Force a ws connection to mean a keyframe
|
||||
let _ = state
|
||||
.engine_tx
|
||||
.send(vnc_engine::VncMessageInput::ForceKeyframe)
|
||||
.await;
|
||||
|
||||
// random username
|
||||
let username: Arc<String> =
|
||||
Arc::new(rand::distributions::Alphanumeric.sample_string(&mut rand::thread_rng(), 16));
|
||||
|
||||
println!("{username} ({who}) connected.");
|
||||
|
||||
let send_clone = Arc::clone(&state);
|
||||
let mut send_task = tokio::spawn(async move {
|
||||
let mut sub = send_clone.websocket_broadcast_tx.subscribe();
|
||||
|
||||
while let Ok(msg) = sub.recv().await {
|
||||
match msg {
|
||||
WsMessage::Text(c) => {
|
||||
let _ = sender.send(Message::Text(c)).await;
|
||||
}
|
||||
|
||||
WsMessage::Buffer(buffer) => {
|
||||
let _ = sender.send(Message::Binary(buffer)).await;
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
let username_clone = Arc::clone(&username);
|
||||
|
||||
let mut recv_task = tokio::spawn(async move {
|
||||
while let Some(Ok(msg)) = receiver.next().await {
|
||||
match msg {
|
||||
Message::Text(msg) => {
|
||||
// println!("{}", msg);
|
||||
if let Ok(json) = serde_json::from_str::<serde_json::Value>(&msg) {
|
||||
if !json["type"].is_string() {
|
||||
break;
|
||||
}
|
||||
|
||||
match json["type"].as_str().unwrap() {
|
||||
"chat" => {
|
||||
if !json["msg"].is_string() {
|
||||
break;
|
||||
}
|
||||
|
||||
let send = serde_json::json!({
|
||||
"type": "chat",
|
||||
"username": *username_clone,
|
||||
"msg": json["msg"].as_str().unwrap()
|
||||
});
|
||||
|
||||
state
|
||||
.websocket_broadcast_tx
|
||||
.send(WsMessage::Text(
|
||||
serde_json::to_string(&send).expect("penis"),
|
||||
))
|
||||
.expect("boom");
|
||||
|
||||
continue;
|
||||
}
|
||||
|
||||
"key" => {
|
||||
if !json["keysym"].is_number() {
|
||||
break;
|
||||
}
|
||||
|
||||
if !json["pressed"].is_number() {
|
||||
break;
|
||||
}
|
||||
|
||||
let keysym = json["keysym"].as_u64().unwrap() as u32;
|
||||
let pressed = json["pressed"].as_u64().unwrap() == 1;
|
||||
|
||||
let _ = state
|
||||
.engine_tx
|
||||
.send(vnc_engine::VncMessageInput::KeyEvent {
|
||||
keysym: keysym,
|
||||
pressed: pressed,
|
||||
})
|
||||
.await;
|
||||
}
|
||||
|
||||
"mouse" => {
|
||||
if json["x"].as_u64().is_none() {
|
||||
break;
|
||||
}
|
||||
|
||||
if json["y"].as_u64().is_none() {
|
||||
break;
|
||||
}
|
||||
|
||||
if json["mask"].as_u64().is_none() {
|
||||
break;
|
||||
}
|
||||
|
||||
let x = json["x"].as_u64().unwrap() as u32;
|
||||
let y = json["y"].as_u64().unwrap() as u32;
|
||||
let mask = json["mask"].as_u64().unwrap() as u8;
|
||||
|
||||
let _ = state
|
||||
.engine_tx
|
||||
.send(vnc_engine::VncMessageInput::MouseEvent {
|
||||
pt: types::Point { x: x, y: y },
|
||||
buttons: mask,
|
||||
})
|
||||
.await;
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
Message::Close(_) => break,
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
tokio::select! {
|
||||
_ = (&mut send_task) => {
|
||||
|
||||
recv_task.abort();
|
||||
},
|
||||
|
||||
_ = (&mut recv_task) => {
|
||||
send_task.abort();
|
||||
}
|
||||
}
|
||||
|
||||
println!("{username} ({who}) left.");
|
||||
}
|
74
server/src/surface.rs
Normal file
74
server/src/surface.rs
Normal file
|
@ -0,0 +1,74 @@
|
|||
//! A rewrite of CollabVM 3.0's Surface primitive in Rust.
|
||||
//! Note that thread safety has been removed from this implementation,
|
||||
//! since Rust chooses a different type-based model for thread safety
|
||||
//! that the implicit thread-safety surfaces used previously just wouldn't
|
||||
//! be very well with.
|
||||
|
||||
use super::types::*;
|
||||
use std::alloc;
|
||||
|
||||
/// Allocates a boxed slice.
|
||||
/// Unlike a [Vec<_>], this can't grow,
|
||||
/// but is just as safe to use, and slightly more predictable.
|
||||
pub fn alloc_boxed_slice<T: Sized>(len: usize) -> Box<[T]> {
|
||||
assert_ne!(len, 0, "length cannot be 0");
|
||||
let layout = alloc::Layout::array::<T>(len).expect("?");
|
||||
|
||||
let ptr = unsafe { alloc::alloc_zeroed(layout) as *mut T };
|
||||
|
||||
let slice = core::ptr::slice_from_raw_parts_mut(ptr, len);
|
||||
|
||||
unsafe { Box::from_raw(slice) }
|
||||
}
|
||||
|
||||
/// A BGRA-format surface.
|
||||
pub struct Surface {
|
||||
buffer: Option<Box<[u32]>>,
|
||||
pub size: Size,
|
||||
}
|
||||
|
||||
impl Surface {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
buffer: None,
|
||||
size: Size {
|
||||
width: 0,
|
||||
height: 0,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
pub fn resize(&mut self, size: Size) {
|
||||
self.size = size;
|
||||
|
||||
self.buffer = Some(alloc_boxed_slice(self.size.linear()));
|
||||
}
|
||||
|
||||
pub fn get_buffer(&mut self) -> &mut [u32] {
|
||||
let buf = self.buffer.as_mut().unwrap();
|
||||
&mut *buf
|
||||
}
|
||||
|
||||
/// Blits a buffer to this surface.
|
||||
pub fn blit_buffer(&mut self, src_at: Rect, data: &[u32]) {
|
||||
let mut off = 0;
|
||||
|
||||
let buf = self.buffer.as_mut().unwrap();
|
||||
let buf_slice = &mut *buf;
|
||||
|
||||
for y in src_at.y..src_at.y + src_at.height {
|
||||
let src = &data[off..off + src_at.width as usize];
|
||||
let dest_start_offset = (y as usize * self.size.width as usize) + src_at.x as usize;
|
||||
|
||||
let dest =
|
||||
&mut buf_slice[dest_start_offset..dest_start_offset + src_at.width as usize];
|
||||
|
||||
// This forces alpha to always be 0xff. I *could* probably do this in a clearer way though :(
|
||||
for (dest, src_item) in dest.iter_mut().zip(src.iter()) {
|
||||
*dest = ((*src_item) & 0x00ffffff) | 0xff000000;
|
||||
}
|
||||
|
||||
off += src_at.width as usize;
|
||||
}
|
||||
}
|
||||
}
|
54
server/src/types.rs
Normal file
54
server/src/types.rs
Normal file
|
@ -0,0 +1,54 @@
|
|||
//! Shared types.
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct Rect {
|
||||
pub x: u32,
|
||||
pub y: u32,
|
||||
pub width: u32,
|
||||
pub height: u32,
|
||||
}
|
||||
|
||||
impl From<vnc::Rect> for Rect {
|
||||
fn from(value: vnc::Rect) -> Self {
|
||||
Self {
|
||||
x: value.x as u32,
|
||||
y: value.y as u32,
|
||||
width: value.width as u32,
|
||||
height: value.height as u32,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Point {
|
||||
pub x: u32,
|
||||
pub y: u32,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct Size {
|
||||
pub width: u32,
|
||||
pub height: u32,
|
||||
}
|
||||
|
||||
impl Size {
|
||||
/// Returns the linear size.
|
||||
pub fn linear(&self) -> usize {
|
||||
(self.width * self.height) as usize
|
||||
}
|
||||
}
|
||||
|
||||
impl From<(u32, u32)> for Size {
|
||||
fn from(value: (u32, u32)) -> Self {
|
||||
Size {
|
||||
width: value.0,
|
||||
height: value.1,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Into<(u32, u32)> for Size {
|
||||
fn into(self) -> (u32, u32) {
|
||||
(self.width, self.height)
|
||||
}
|
||||
}
|
311
server/src/vnc_engine.rs
Normal file
311
server/src/vnc_engine.rs
Normal file
|
@ -0,0 +1,311 @@
|
|||
//! Native-side VNC client. This is usually run in another OS thread.
|
||||
|
||||
use crate::encoder_thread::{encoder_thread_spawn, EncodeThreadOutput};
|
||||
|
||||
use super::surface::Surface;
|
||||
use super::types::*;
|
||||
|
||||
use std::{
|
||||
sync::{Arc, Mutex},
|
||||
time::Duration,
|
||||
};
|
||||
|
||||
use tokio::{
|
||||
io::{AsyncRead, AsyncWrite},
|
||||
net::{TcpStream, UnixStream},
|
||||
sync::mpsc::{error::TryRecvError, Receiver, Sender},
|
||||
};
|
||||
|
||||
use vnc::{ClientKeyEvent, ClientMouseEvent, PixelFormat, VncConnector, VncEvent, X11Event};
|
||||
|
||||
pub enum Address {
|
||||
Tcp(std::net::SocketAddr),
|
||||
Unix(std::path::PathBuf),
|
||||
}
|
||||
|
||||
/// Output message
|
||||
#[derive(Debug)]
|
||||
pub enum VncMessageOutput {
|
||||
Connect,
|
||||
Disconnect,
|
||||
// this will contain a single annex B packet
|
||||
FramebufferUpdate(Vec<u8>),
|
||||
FramebufferResized(Size),
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum VncMessageInput {
|
||||
KeyEvent {
|
||||
keysym: u32,
|
||||
pressed: bool,
|
||||
},
|
||||
MouseEvent {
|
||||
pt: Point,
|
||||
buttons: u8,
|
||||
},
|
||||
Disconnect,
|
||||
|
||||
/// Forces a keyframe to occur in the video stream.
|
||||
ForceKeyframe,
|
||||
}
|
||||
|
||||
pub struct Client {
|
||||
surf: Arc<Mutex<Surface>>,
|
||||
|
||||
out_tx: Sender<VncMessageOutput>,
|
||||
in_rx: Receiver<VncMessageInput>,
|
||||
rects_in_frame: Vec<Rect>,
|
||||
}
|
||||
|
||||
impl Client {
|
||||
/// Creates a new VNC client.
|
||||
pub fn new(
|
||||
out_tx: Sender<VncMessageOutput>,
|
||||
in_rx: Receiver<VncMessageInput>,
|
||||
surface: Arc<Mutex<Surface>>,
|
||||
) -> Box<Self> {
|
||||
let client_obj = Box::new(Self {
|
||||
surf: surface,
|
||||
out_tx,
|
||||
in_rx,
|
||||
rects_in_frame: Vec::new(),
|
||||
});
|
||||
|
||||
client_obj
|
||||
}
|
||||
|
||||
pub async fn connect_and_run(&mut self, address: Address) -> anyhow::Result<()> {
|
||||
match address {
|
||||
Address::Tcp(addr) => {
|
||||
let stream = TcpStream::connect(addr).await?;
|
||||
self.connect_and_run_impl(stream).await?
|
||||
}
|
||||
Address::Unix(uds) => {
|
||||
let stream = UnixStream::connect(uds).await?;
|
||||
self.connect_and_run_impl(stream).await?
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn connect_and_run_impl<S>(&mut self, stream: S) -> anyhow::Result<()>
|
||||
where
|
||||
S: AsyncRead + AsyncWrite + Unpin + Send + Sync + 'static,
|
||||
{
|
||||
// the builder pattern should have stayed in java
|
||||
let vnc = VncConnector::new(stream)
|
||||
.set_auth_method(async move { Ok("".into()) })
|
||||
//.add_encoding(vnc::VncEncoding::Tight)
|
||||
//.add_encoding(vnc::VncEncoding::Zrle)
|
||||
//.add_encoding(vnc::VncEncoding::CopyRect)
|
||||
.add_encoding(vnc::VncEncoding::DesktopSizePseudo)
|
||||
.add_encoding(vnc::VncEncoding::Raw)
|
||||
.allow_shared(true)
|
||||
.set_pixel_format(PixelFormat::bgra())
|
||||
.build()?
|
||||
.try_start()
|
||||
.await?
|
||||
.finish()?;
|
||||
|
||||
self.out_tx.send(VncMessageOutput::Connect).await?;
|
||||
|
||||
// h.264 encoder related
|
||||
let frame: Arc<Mutex<Option<ffmpeg_the_third::frame::Video>>> = Arc::new(Mutex::new(None));
|
||||
let (mut encoder_rx, encoder_tx) = encoder_thread_spawn(&frame);
|
||||
|
||||
loop {
|
||||
// Pull a event and act on it. If none are there, it's fine and we can just move on to
|
||||
// advancing the vnc client, but if the channel is closed, that means we are to disconnect
|
||||
//
|
||||
// Note that we do not timeout because we will eventually wait for a event later
|
||||
// either way.
|
||||
match self.in_rx.try_recv() {
|
||||
Ok(val) => match val {
|
||||
VncMessageInput::KeyEvent { keysym, pressed } => {
|
||||
vnc.input(X11Event::KeyEvent(ClientKeyEvent {
|
||||
keycode: keysym,
|
||||
down: pressed,
|
||||
}))
|
||||
.await?;
|
||||
}
|
||||
VncMessageInput::MouseEvent { pt, buttons } => {
|
||||
vnc.input(X11Event::PointerEvent(ClientMouseEvent {
|
||||
position_x: pt.x as u16,
|
||||
position_y: pt.y as u16,
|
||||
bottons: buttons,
|
||||
}))
|
||||
.await?;
|
||||
}
|
||||
VncMessageInput::Disconnect => break,
|
||||
|
||||
VncMessageInput::ForceKeyframe => {
|
||||
encoder_tx
|
||||
.send(crate::encoder_thread::EncodeThreadInput::ForceKeyframe)
|
||||
.await?;
|
||||
encoder_tx
|
||||
.send(crate::encoder_thread::EncodeThreadInput::SendFrame)
|
||||
.await?;
|
||||
}
|
||||
},
|
||||
|
||||
Err(TryRecvError::Empty) => {}
|
||||
|
||||
// On disconnection from the client input channel
|
||||
// we just give up and disconnect early.
|
||||
Err(TryRecvError::Disconnected) => {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// pull events until there is no more event to pull
|
||||
match vnc.poll_event().await {
|
||||
Ok(Some(e)) => {
|
||||
match e {
|
||||
VncEvent::SetResolution(res) => {
|
||||
{
|
||||
let mut lk = self.surf.lock().expect("couldn't lock Surface");
|
||||
lk.resize(Size {
|
||||
width: res.width as u32,
|
||||
height: res.height as u32,
|
||||
});
|
||||
}
|
||||
|
||||
let cvm_size = Size {
|
||||
width: res.width as u32,
|
||||
height: res.height as u32,
|
||||
};
|
||||
|
||||
// make a new frame for the encoder
|
||||
{
|
||||
let mut lk_frame = frame.lock().expect("oh FUCK");
|
||||
*lk_frame = Some(ffmpeg_the_third::frame::Video::new(
|
||||
ffmpeg_the_third::format::Pixel::BGRA,
|
||||
cvm_size.clone().width,
|
||||
cvm_size.clone().height,
|
||||
));
|
||||
}
|
||||
|
||||
encoder_tx
|
||||
.send(crate::encoder_thread::EncodeThreadInput::Init {
|
||||
size: cvm_size.clone(),
|
||||
})
|
||||
.await?;
|
||||
|
||||
self.out_tx
|
||||
.send(VncMessageOutput::FramebufferResized(cvm_size))
|
||||
.await?;
|
||||
}
|
||||
|
||||
// TODO: implement copyrect support in Surface
|
||||
//VncEvent::Copy(dest_rect, src_rect) => {
|
||||
// TODO copy rect
|
||||
//}
|
||||
VncEvent::RawImage(rects) => {
|
||||
let mut lk = self.surf.lock().expect("couldn't lock Surface");
|
||||
|
||||
for rect in rects.iter() {
|
||||
let cvm_rect = Rect::from(rect.rect);
|
||||
|
||||
// blit onto the surface
|
||||
lk.blit_buffer(cvm_rect.clone(), unsafe {
|
||||
std::slice::from_raw_parts(
|
||||
rect.data.as_ptr() as *const u32,
|
||||
rect.data.len() / core::mem::size_of::<u32>(),
|
||||
)
|
||||
});
|
||||
|
||||
self.rects_in_frame.push(cvm_rect);
|
||||
}
|
||||
}
|
||||
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
// No events, so let's request some more and push what we got in the meantime
|
||||
Ok(None) => {
|
||||
vnc.input(X11Event::Refresh).await?;
|
||||
|
||||
if !self.rects_in_frame.is_empty() {
|
||||
// We don't care what rects ARE there, but
|
||||
// if none were pressent then we probably need not bother
|
||||
//println!("vnc engine frame");
|
||||
|
||||
// let's encode a frame
|
||||
// First we copy the current VNC framebuffer to the shared
|
||||
// frame between the encoder thread and ffmpeg
|
||||
{
|
||||
let mut frame_locked =
|
||||
frame.lock().expect("Couldn't lock the damn frame. FUCK");
|
||||
|
||||
let mut_frame = frame_locked.as_mut().expect("NOOOO");
|
||||
|
||||
let width = mut_frame.width();
|
||||
let height = mut_frame.height();
|
||||
|
||||
let mut surf = self.surf.lock().expect("frame lock fail");
|
||||
let surf_buf = surf.get_buffer();
|
||||
|
||||
let buf_ptr =
|
||||
unsafe { (*(*mut_frame.as_mut_ptr()).buf[0]).data as *mut u32 };
|
||||
|
||||
for y in 0..height {
|
||||
for x in 0..width {
|
||||
unsafe {
|
||||
let ofs = (y * width + x) as usize;
|
||||
*buf_ptr.add(ofs) = surf_buf[ofs];
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
encoder_tx
|
||||
.send(crate::encoder_thread::EncodeThreadInput::SendFrame)
|
||||
.await?;
|
||||
|
||||
self.rects_in_frame.clear();
|
||||
}
|
||||
|
||||
// send frame to encoder thread, pull encoded data back
|
||||
}
|
||||
|
||||
// TODO: we might want to pass this to js at some point
|
||||
Err(_e) => {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
match encoder_rx.try_recv() {
|
||||
Ok(msg) => {
|
||||
match msg {
|
||||
EncodeThreadOutput::Frame { mut packet } => {
|
||||
let vec = {
|
||||
//f !packet.is_empty()
|
||||
|
||||
let data = packet.data_mut().expect("packet is not empty");
|
||||
|
||||
data.to_vec()
|
||||
};
|
||||
|
||||
self.out_tx
|
||||
.send(VncMessageOutput::FramebufferUpdate(vec))
|
||||
.await?;
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(TryRecvError::Empty) => {}
|
||||
_ => break,
|
||||
}
|
||||
|
||||
// Sleep to give CPU time
|
||||
tokio::time::sleep(Duration::from_millis(2)).await;
|
||||
}
|
||||
|
||||
// Disconnect if we exit. We don't care about errors in this path
|
||||
let _ = vnc.close().await;
|
||||
self.out_tx.send(VncMessageOutput::Disconnect).await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
Loading…
Reference in a new issue