backend: Port to superqemu

This commit is contained in:
Lily Tsuru 2024-07-24 04:22:31 -04:00
parent 7d5b7bb9d4
commit 203e8c258c
30 changed files with 4465 additions and 5297 deletions

BIN
.yarn/install-state.gz Normal file

Binary file not shown.

1
.yarnrc.yml Normal file
View file

@ -0,0 +1 @@
nodeLinker: node-modules

View file

@ -3,12 +3,9 @@
socket.computer, except not powered by socket.io anymore, and with many less bugs. This monorepo builds
- The backend
- A QEMU VM runner package (feel free to steal it)
- Shared components
- The webapp (TODO)
## Building
```bash

View file

@ -1,35 +1,29 @@
{
"name": "@socketcomputer/backend",
"version": "1.0.0",
"private": "true",
"description": "socket 2.0 backend",
"type": "module",
"scripts": {
"build": "parcel build src/index.ts --target node"
},
"author": "modeco80",
"license": "MIT",
"targets": {
"node":{
"context": "node",
"outputFormat": "esmodule"
}
"node": {
"context": "node",
"outputFormat": "esmodule"
}
},
"dependencies": {
"@computernewb/superqemu": "^0.1.0",
"@fastify/websocket": "^10.0.1",
"@socketcomputer/qemu": "*",
"@socketcomputer/shared": "*",
"canvas": "^2.11.2",
"fastify": "^4.26.2",
"mnemonist": "^0.39.8",
"canvas": "^2.11.2"
"mnemonist": "^0.39.8"
},
"devDependencies": {
"parcel": "^2.12.0",
"@types/ws": "^8.5.10"
"@types/ws": "^8.5.10",
"parcel": "^2.12.0"
}
}

View file

@ -2,7 +2,7 @@
// which are standardized across all crusttest slots.
// (This file has been bastardized for socket2)
import { QemuVmDefinition } from '@socketcomputer/qemu';
import { QemuVmDefinition } from '@computernewb/superqemu';
const kQemuPath = '/srv/collabvm/qemu/bin/qemu-system-x86_64';
@ -63,6 +63,7 @@ export function Slot_PCDef(
return {
id: 'socketvm1',
command: qCommand
command: qCommand.join(' '),
snapshot: true
};
}

View file

@ -1,4 +1,4 @@
import { QemuVmDefinition, QemuVM, setSnapshot } from '@socketcomputer/qemu';
import { QemuVmDefinition, QemuVM } from '@computernewb/superqemu';
import { Slot_PCDef } from './SlotQemuDefs.js';
import { FastifyInstance, fastify, FastifyRequest } from 'fastify';
@ -6,6 +6,7 @@ import * as fastifyWebsocket from '@fastify/websocket';
import { SocketVM } from './SocketVM.js';
import { VMUser } from './VMUser.js';
import pino from 'pino';
// CONFIG types (not used yet)
export type SocketComputerConfig_VM = {
@ -26,6 +27,10 @@ export class SocketComputerServer {
exposeHeadRoutes: false
});
private logger = pino({
name: "Sc2Server"
});
Init() {
this.fastify.register(fastifyWebsocket.default);
this.fastify.register(async (app, _) => this.Routes(app), {});
@ -33,7 +38,7 @@ export class SocketComputerServer {
async Listen() {
try {
console.log('Backend starting...');
this.logger.info('Backend starting...');
// create and start teh VMxorz!!!!
await this.InitVM();
@ -43,6 +48,7 @@ export class SocketComputerServer {
port: 4050
});
} catch (err) {
this.logger.error(err, 'Error listening');
return;
}
}
@ -56,7 +62,6 @@ export class SocketComputerServer {
this.vm = new SocketVM(new QemuVM(slotDef));
// Boot it up
setSnapshot(true);
await this.vm.Start();
}
@ -75,6 +80,7 @@ export class SocketComputerServer {
return;
}
self.vm?.AddUser(new VMUser(connection, address));
});
}

View file

@ -1,13 +1,14 @@
import { EventEmitter } from 'node:events';
import { TurnQueue, UserTimeTuple, kTurnTimeSeconds } from './TurnQueue.js';
import { VMUser } from './VMUser.js';
import { QemuDisplay, QemuVM, VMState } from '@socketcomputer/qemu';
import { QemuDisplay, QemuVM, VMState } from '@computernewb/superqemu';
import { ExtendableTimer } from './ExtendableTimer.js';
import { kMaxUserNameLength } from '@socketcomputer/shared';
import * as Shared from '@socketcomputer/shared';
import { Canvas } from 'canvas';
import pino from 'pino';
// for the maximum socket.io experience
const kCanvasJpegQuality = 0.25;
@ -24,18 +25,22 @@ export class SocketVM extends EventEmitter {
private users: Array<VMUser> = [];
private queue: TurnQueue = new TurnQueue();
private logger = pino({
name: 'Sc2VM'
});
constructor(vm: QemuVM) {
super();
this.vm = vm;
this.timer.on('expired', async () => {
// bye bye!
console.log(`[SocketVM] VM timer expired, resetting..`);
this.logger.info(`[SocketVM] VM timer expired, resetting..`);
await this.vm.Stop();
});
this.timer.on('expiry-near', async () => {
console.log(`[SocketVM] VM timer expires in 1 minute.`);
this.logger.info(`[SocketVM] VM timer expires in 1 minute.`);
});
this.queue.on('turnQueue', (arr: Array<UserTimeTuple>) => {
@ -97,7 +102,7 @@ export class SocketVM extends EventEmitter {
user.username = VMUser.GenerateName();
user.vm = this;
console.log(`[SocketVM] ${user.username} (IP ${user.address}) joined`);
this.logger.info({user: user.username, ip: user.address}, 'User joined');
await this.SendInitialScreen(user);
@ -129,7 +134,7 @@ export class SocketVM extends EventEmitter {
}
async RemUser(user: VMUser) {
console.log(`[SocketVM] ${user.username} (IP ${user.address}) left`);
this.logger.info({user: user.username, ip: user.address}, 'User left');
this.users.splice(this.users.indexOf(user), 1);
this.queue.TryRemove(user);
@ -148,7 +153,7 @@ export class SocketVM extends EventEmitter {
this.OnMessage(user, await Shared.MessageDecoder.ReadMessage(messageBuffer, false));
} catch (err) {
// Log the error and close the connection
console.log(`Error decoding message, closing connection: (user ${user.username}, ip ${user.address})`, err);
this.logger.error({ err: err, user: user.username, ip: user.address }, `Error decoding message, closing connection`);
user.connection.close();
return;
}
@ -203,8 +208,7 @@ export class SocketVM extends EventEmitter {
let buffers = this.MakeFullScreenData();
for (let buffer of buffers)
await self.BroadcastBuffer(buffer);
for (let buffer of buffers) await self.BroadcastBuffer(buffer);
});
this.display?.on('rect', async (x: number, y: number, rect: ImageData) => {

View file

@ -1,28 +1,28 @@
{
"name": "socketcomputer-repo",
"private": "true",
"workspaces": [
"shared",
"backend",
"qemu",
"webapp"
],
"scripts": {
"build": "yarn build:service && yarn build:frontend",
"build:service": "npm -w shared run build && npm -w qemu run build && npm -w backend run build",
"build": "yarn build:service && yarn build:frontend",
"build:service": "npm -w shared run build && npm -w backend run build",
"build:frontend": "npm -w shared run build && npm -w webapp run build"
},
"dependencies": {
"canvas": "^2.11.2"
"canvas": "^2.11.2",
"pino": "^9.3.1"
},
"devDependencies": {
"parcel": "^2.12.0",
"@parcel/packager-ts": "2.12.0",
"@parcel/transformer-sass": "^2.12.0",
"@parcel/transformer-sass": "^2.12.0",
"@parcel/transformer-typescript-types": "2.12.0",
"@types/node": "^20.12.2",
"parcel": "^2.12.0",
"prettier": "^3.2.5",
"stream-http": "^3.1.0",
"typescript": "^5.4.3"
}
},
"packageManager": "yarn@4.3.1"
}

View file

@ -1,32 +0,0 @@
{
"name": "@socketcomputer/qemu",
"version": "1.0.0",
"private": "true",
"description": "QEMU runtime for socketcomputer backend",
"exports": "./dist/index.js",
"types": "./dist/index.d.ts",
"type": "module",
"scripts": {
"build": "parcel build src/index.ts --target node --target types"
},
"author": "",
"license": "MIT",
"targets": {
"types": {},
"node": {
"context": "node",
"isLibrary": true,
"outputFormat": "esmodule"
}
},
"dependencies": {
"canvas": "^2.11.2",
"execa": "^8.0.1",
"split": "^1.0.1"
},
"devDependencies": {
"parcel": "^2.12.0",
"@types/node": "^20.12.2",
"@types/split": "^1.0.5"
}
}

View file

@ -1,148 +0,0 @@
import { VncClient } from './rfb/client.js';
import { EventEmitter } from 'node:events';
import { Canvas, CanvasRenderingContext2D, createImageData } from 'canvas';
import { BatchRects, Size, Rect } from './QemuUtil.js';
const kQemuFps = 30;
export type VncRect = {
x: number;
y: number;
width: number;
height: number;
};
// events:
//
// 'resize' -> (w, h) -> done when resize occurs
// 'rect' -> (x, y, ImageData) -> framebuffer
// 'frame' -> () -> done at end of frame
export class QemuDisplay extends EventEmitter {
private displayVnc = new VncClient({
debug: false,
fps: kQemuFps,
encodings: [
VncClient.consts.encodings.raw,
//VncClient.consts.encodings.pseudoQemuAudio,
VncClient.consts.encodings.pseudoDesktopSize
// For now?
//VncClient.consts.encodings.pseudoCursor
]
});
private displayCanvas: Canvas = new Canvas(640, 480);
private displayCtx: CanvasRenderingContext2D = this.displayCanvas.getContext('2d');
private vncShouldReconnect: boolean = false;
private vncSocketPath: string;
constructor(socketPath: string) {
super();
this.vncSocketPath = socketPath;
this.displayVnc.on('connectTimeout', () => {
this.Reconnect();
});
this.displayVnc.on('authError', () => {
this.Reconnect();
});
this.displayVnc.on('disconnect', () => {
this.Reconnect();
});
this.displayVnc.on('closed', () => {
this.Reconnect();
});
this.displayVnc.on('firstFrameUpdate', () => {
// apparently this library is this good.
// at least it's better than the two others which exist.
this.displayVnc.changeFps(kQemuFps);
this.emit('connected');
this.displayCanvas.width = this.displayVnc.clientWidth;
this.displayCanvas.height = this.displayVnc.clientHeight;
this.emit('resize', this.displayVnc.clientWidth, this.displayVnc.clientHeight);
this.emit('rect', 0, 0, this.displayCtx.getImageData(0, 0, this.displayVnc.clientWidth, this.displayVnc.clientHeight));
this.emit('frame');
});
this.displayVnc.on('desktopSizeChanged', ({ width, height }) => {
this.emit('resize', width, height);
this.displayCanvas.width = width;
this.displayCanvas.height = height;
});
let rects: Rect[] = [];
this.displayVnc.on('rectUpdateProcessed', (rect) => {
rects.push(rect);
});
this.displayVnc.on('frameUpdated', (fb) => {
this.displayCtx.putImageData(createImageData(new Uint8ClampedArray(fb.buffer), this.displayVnc.clientWidth, this.displayVnc.clientHeight), 0, 0);
// TODO: optimize the rects a bit. using guacamole's cheap method
// of just flushing the whole screen if the area of all the updated rects gets too big
// might just work.
//for (const rect of rects) {
// this.emit('rect', rect.x, rect.y, this.displayCtx.getImageData(rect.x, rect.y, rect.width, rect.height));
//}
// cvmts batcher
let batched = BatchRects(this.Size(), rects);
this.emit('rect', batched.x, batched.y, this.displayCtx.getImageData(batched.x, batched.y, batched.width, batched.height));
rects = [];
this.emit('frame');
});
}
private Reconnect() {
if (this.displayVnc.connected) return;
if (!this.vncShouldReconnect) return;
// TODO: this should also give up after a max tries count
// if we fail after max tries, emit a event
this.displayVnc.connect({
path: this.vncSocketPath
});
}
Connect() {
this.vncShouldReconnect = true;
this.Reconnect();
}
Disconnect() {
this.vncShouldReconnect = false;
this.displayVnc.disconnect();
}
GetCanvas() {
return this.displayCanvas;
}
Size(): Size {
return {
width: this.displayVnc.clientWidth,
height: this.displayVnc.clientHeight
};
}
MouseEvent(x: number, y: number, buttons: number) {
this.displayVnc.sendPointerEvent(x, y, buttons);
}
KeyboardEvent(keysym: number, pressed: boolean) {
this.displayVnc.sendKeyEvent(keysym, pressed);
}
}

View file

@ -1,73 +0,0 @@
// QEMU utility functions
// most of these are just for randomly generated/temporary files
import { execa } from 'execa';
import * as crypto from 'node:crypto';
export type Size = { width: number, height: number };
export type Rect = {height:number,width:number,x:number,y:number};
export function BatchRects(size: Size, rects: Array<Rect>): Rect {
var mergedX = size.width;
var mergedY = size.height;
var mergedHeight = 0;
var mergedWidth = 0;
// can't batch these
if(rects.length == 0) {
return {
x: 0,
y: 0,
width: size.width,
height: size.height
};
}
if(rects.length == 1) {
if(rects[0].width == size.width && rects[0].height == size.height) {
return rects[0];
}
}
rects.forEach((r) => {
if (r.x < mergedX) mergedX = r.x;
if (r.y < mergedY) mergedY = r.y;
});
rects.forEach(r => {
if (((r.height + r.y) - mergedY) > mergedHeight) mergedHeight = (r.height + r.y) - mergedY;
if (((r.width + r.x) - mergedX) > mergedWidth) mergedWidth = (r.width + r.x) - mergedX;
});
return {
x: mergedX,
y: mergedY,
width: mergedWidth,
height: mergedHeight
};
}
// Generates a random unicast/local MAC address.
export async function GenMacAddress(): Promise<string> {
return new Promise((res, rej) => {
crypto.randomBytes(6, (err, buf) => {
if (err) rej(err);
// Modify byte 0 to make this MAC address proper
let rawByte0 = buf.readUInt8(0);
rawByte0 &= ~0b00000011; // keep most of the bits set from what we got, except for the Unicast and Local bits
rawByte0 |= 1 << 1; // Always set the Local bit. Leave the Unicast bit unset.
buf.writeUInt8(rawByte0);
// this makes me wanna cry but it seems to working
res(
buf
.toString('hex')
.split(/(.{2})/)
.filter((o) => o)
.join(':')
);
});
});
}

View file

@ -1,287 +0,0 @@
import { execa, ExecaChildProcess } from 'execa';
import { EventEmitter } from 'events';
import QmpClient from './QmpClient.js';
import { QemuDisplay } from './QemuDisplay.js';
import { unlink } from 'node:fs/promises';
export enum VMState {
Stopped,
Starting,
Started,
Stopping
}
export type QemuVmDefinition = {
id: string;
command: string[];
};
/// Temporary path base (for UNIX sockets/etc.)
const kVmTmpPathBase = `/tmp`;
/// The max amount of times QMP connection is allowed to fail before
/// the VM is forcefully stopped.
const kMaxFailCount = 5;
let gVMShouldSnapshot = false;
async function Sleep(ms: number) {
return new Promise((resolve) => setTimeout(resolve, ms));
}
export function setSnapshot(val: boolean) {
gVMShouldSnapshot = val;
}
export class QemuVM extends EventEmitter {
private state = VMState.Stopped;
private qmpInstance: QmpClient | null = null;
private qmpConnected = false;
private qmpFailCount = 0;
private qemuProcess: ExecaChildProcess | null = null;
private qemuRunning = false;
private display: QemuDisplay | null = null;
private definition: QemuVmDefinition;
private addedCommandShit = false;
constructor(def: QemuVmDefinition) {
super();
this.definition = def;
}
async Start() {
// Don't start while either trying to start or starting.
if (this.state == VMState.Started || this.state == VMState.Starting) return;
var cmd = this.definition.command;
// build additional command line statements to enable qmp/vnc over unix sockets
if(!this.addedCommandShit) {
cmd.push('-no-shutdown');
if(gVMShouldSnapshot)
cmd.push('-snapshot');
cmd.push(`-qmp`);
cmd.push(`unix:${this.GetQmpPath()},server,nowait`);
cmd.push(`-vnc`);
cmd.push(`unix:${this.GetVncPath()}`);
this.addedCommandShit = true;
}
this.VMLog(`Starting QEMU with command \"${cmd.join(' ')}\"`);
await this.StartQemu(cmd);
}
async Stop() {
// This is called in certain lifecycle places where we can't safely assert state yet
//this.AssertState(VMState.Started, 'cannot use QemuVM#Stop on a non-started VM');
// Start indicating we're stopping, so we don't
// erroneously start trying to restart everything
// we're going to tear down in this function call.
this.SetState(VMState.Stopping);
// Kill the QEMU process and QMP/display connections if they are running.
await this.DisconnectQmp();
this.DisconnectDisplay();
await this.StopQemu();
}
async Reset() {
this.AssertState(VMState.Started, 'cannot use QemuVM#Reset on a non-started VM');
// let code know the VM is going to reset
// N.B: In the crusttest world, a reset simply amounts to a
// mean cold reboot of the qemu process basically
this.emit('reset');
await this.Stop();
await Sleep(500);
await this.Start();
}
async QmpCommand(command: string, args: any | null): Promise<any> {
return await this.qmpInstance?.Execute(command, args);
}
async MonitorCommand(command: string) {
this.AssertState(VMState.Started, 'cannot use QemuVM#MonitorCommand on a non-started VM');
return await this.QmpCommand('human-monitor-command', {
'command-line': command
});
}
async ChangeRemovableMedia(deviceName: string, imagePath: string): Promise<void> {
this.AssertState(VMState.Started, 'cannot use QemuVM#ChangeRemovableMedia on a non-started VM');
// N.B: if this throws, the code which called this should handle the error accordingly
await this.QmpCommand('blockdev-change-medium', {
device: deviceName, // techinically deprecated, but I don't feel like figuring out QOM path just for a simple function
filename: imagePath
});
}
async EjectRemovableMedia(deviceName: string) {
this.AssertState(VMState.Started, 'cannot use QemuVM#EjectRemovableMedia on a non-started VM');
await this.QmpCommand('eject', {
device: deviceName
});
}
GetDisplay() {
return this.display;
}
/// Private fun bits :)
private VMLog(...args: any[]) {
// TODO: hook this into a logger of some sort
console.log(`[QemuVM] [${this.definition.id}] ${args.join('')}`);
}
private AssertState(stateShouldBe: VMState, message: string) {
if (this.state !== stateShouldBe) throw new Error(message);
}
private SetState(state: VMState) {
this.state = state;
this.emit('statechange', this.state);
}
private GetQmpPath() {
return `${kVmTmpPathBase}/socket2-${this.definition.id}-ctrl`;
}
private GetVncPath() {
return `${kVmTmpPathBase}/socket2-${this.definition.id}-vnc`;
}
private async StartQemu(split: Array<string>) {
let self = this;
this.SetState(VMState.Starting);
// Start QEMU
this.qemuProcess = execa(split[0], split.slice(1));
this.qemuProcess.on('spawn', async () => {
self.qemuRunning = true;
await Sleep(500);
await self.ConnectQmp();
});
this.qemuProcess.on('exit', async (code) => {
self.qemuRunning = false;
console.log("qemu process go boom")
// ?
if (self.qmpConnected) {
await self.DisconnectQmp();
}
self.DisconnectDisplay();
if (self.state != VMState.Stopping) {
if (code == 0) {
await Sleep(500);
await self.StartQemu(split);
} else {
self.VMLog('QEMU exited with a non-zero exit code. This usually means an error in the command line. Stopping VM.');
await self.Stop();
}
} else {
this.SetState(VMState.Stopped);
}
});
}
private async StopQemu() {
if (this.qemuRunning == true) this.qemuProcess?.kill('SIGKILL');
}
private async ConnectQmp() {
let self = this;
if (!this.qmpConnected) {
self.qmpInstance = new QmpClient();
self.qmpInstance.on('close', async () => {
self.qmpConnected = false;
// If we aren't stopping, then we do actually need to care QMP disconnected
if (self.state != VMState.Stopping) {
if (self.qmpFailCount++ < kMaxFailCount) {
this.VMLog(`Failed to connect to QMP ${self.qmpFailCount} times`);
await Sleep(500);
await self.ConnectQmp();
} else {
this.VMLog(`Failed to connect to QMP ${self.qmpFailCount} times, giving up`);
await self.Stop();
}
}
});
self.qmpInstance.on('event', async (ev) => {
switch (ev.event) {
// Handle the STOP event sent when using -no-shutdown
case 'STOP':
await self.qmpInstance?.Execute('system_reset');
break;
case 'RESET':
self.VMLog('got a reset event!');
await self.qmpInstance?.Execute('cont');
break;
}
});
self.qmpInstance.on('qmp-ready', async (hadError) => {
self.VMLog('QMP ready');
self.display = new QemuDisplay(self.GetVncPath());
self.display.Connect();
// QMP has been connected so the VM is ready to be considered started
self.qmpFailCount = 0;
self.qmpConnected = true;
self.SetState(VMState.Started);
});
try {
await Sleep(500);
this.qmpInstance?.ConnectUNIX(this.GetQmpPath());
} catch (err) {
// just try again
await Sleep(500);
await this.ConnectQmp();
}
}
}
private async DisconnectDisplay() {
try {
this.display?.Disconnect();
this.display = null; // disassociate with that display object.
await unlink(this.GetVncPath());
} catch (err) {
// oh well lol
}
}
private async DisconnectQmp() {
if (this.qmpConnected) return;
if(this.qmpInstance == null)
return;
this.qmpConnected = false;
this.qmpInstance.end();
this.qmpInstance = null;
try {
await unlink(this.GetQmpPath());
} catch(err) {
}
}
}

View file

@ -1,135 +0,0 @@
// This was originally based off the contents of the node-qemu-qmp package,
// but I've modified it possibly to the point where it could be treated as my own creation.
import split from 'split';
import { Socket } from 'net';
export type QmpCallback = (err: Error | null, res: any | null) => void;
type QmpCommandEntry = {
callback: QmpCallback | null;
id: number;
};
// TODO: Instead of the client "Is-A"ing a Socket, this should instead contain/store a Socket,
// (preferrably) passed by the user, to use for QMP communications.
// The client shouldn't have to know or care about the protocol, and it effectively hackily uses the fact
// Socket extends EventEmitter.
export default class QmpClient extends Socket {
public qmpHandshakeData: any;
private commandEntries: QmpCommandEntry[] = [];
private lastID = 0;
private ExecuteSync(command: string, args: any | null, callback: QmpCallback | null) {
let cmd: QmpCommandEntry = {
callback: callback,
id: ++this.lastID
};
let qmpOut: any = {
execute: command,
id: cmd.id
};
if (args) qmpOut['arguments'] = args;
// Add stuff
this.commandEntries.push(cmd);
this.write(JSON.stringify(qmpOut));
}
// TODO: Make this function a bit more ergonomic?
async Execute(command: string, args: any | null = null): Promise<any> {
return new Promise((res, rej) => {
this.ExecuteSync(command, args, (err, result) => {
if (err) rej(err);
res(result);
});
});
}
private Handshake(callback: () => void) {
this.write(
JSON.stringify({
execute: 'qmp_capabilities'
})
);
this.once('data', (data) => {
// Once QEMU replies to us, the handshake is done.
// We do not negotiate anything special.
callback();
});
}
// this can probably be made async
private ConnectImpl() {
let self = this;
this.once('connect', () => {
this.removeAllListeners('error');
});
this.once('error', (err) => {
// just rethrow lol
//throw err;
console.log("you have pants: rules,", err);
});
this.once('data', (data) => {
// Handshake QMP with the server.
self.qmpHandshakeData = JSON.parse(data.toString('utf8')).QMP;
self.Handshake(() => {
// Now ready to parse QMP responses/events.
self.pipe(split(JSON.parse))
.on('data', (json: any) => {
if (json == null) return self.end();
if (json.return || json.error) {
// Our handshake has a spurious return because we never assign it an ID,
// and it is gathered by this pipe for some reason I'm not quite sure about.
// So, just for safety's sake, don't process any return objects which don't have an ID attached to them.
if (json.id == null) return;
let callbackEntry = this.commandEntries.find((entry) => entry.id === json.id);
let error: Error | null = json.error ? new Error(json.error.desc) : null;
// we somehow didn't find a callback entry for this response.
// I don't know how. Techinically not an error..., but I guess you're not getting a reponse to whatever causes this to happen
if (callbackEntry == null) return;
if (callbackEntry?.callback) callbackEntry.callback(error, json.return);
// Remove the completed callback entry.
this.commandEntries.slice(this.commandEntries.indexOf(callbackEntry));
} else if (json.event) {
this.emit('event', json);
}
})
.on('error', () => {
// Give up.
return self.end();
});
this.emit('qmp-ready');
});
});
this.once('close', () => {
this.end();
this.removeAllListeners('data'); // wow. good job bud. cool memory leak
});
}
Connect(host: string, port: number) {
super.connect(port, host);
this.ConnectImpl();
}
ConnectUNIX(path: string) {
super.connect(path);
this.ConnectImpl();
}
}

View file

@ -1,3 +0,0 @@
export * from './QemuDisplay.js';
export * from './QemuUtil.js';
export * from './QemuVM.js';

View file

@ -1,21 +0,0 @@
Copyright 2021 Filipe Calaça Barbosa
Copyright 2022 dither
Copyright 2023-2024 Lily Tsuru/modeco80 <lily.modeco80@protonmail.ch>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View file

@ -1,11 +0,0 @@
# Notice
The source here was originally taken from a fork of [vnc-rfb-client](https://github.com/ayunami2000/vnc-rfb-client) made for the LucidVM project, available [here](https://github.com/lucidvm/rfb).
It has been grossly modified for the usecases for the socket2 backend, but is more than likely usable to other clients.
- converted to (strict) TypeScript; my initial port beforehand was not strict and it Sucked
- Also, actually use interfaces, and reduce the amount of code duplicated significantly in the rect decoders.
- all modules rewritten to use ESM
- some noisy debug prints removed
- (some, very tiny) code cleanup

View file

@ -1,919 +0,0 @@
import { IRectDecoder } from './decoders/decoder.js';
import { HextileDecoder } from './decoders/hextile.js';
import { RawDecoder } from './decoders/raw.js';
import { ZrleDecoder } from './decoders/zrle.js';
// import { TightDecoder } from "./decoders/tight.js";
import { CopyRectDecoder } from './decoders/copyrect.js';
import { EventEmitter } from 'node:events';
import { consts } from './constants.js';
import * as net from 'node:net';
import * as crypto from 'node:crypto';
import { SocketBuffer } from './socketbuffer.js';
import { VncRectangle, Color3, PixelFormat, Cursor } from './types.js';
export class VncClient extends EventEmitter {
// These are in no particular order.
public debug: boolean = false;
private _connected: boolean = false;
private _authenticated: boolean = false;
private _version: string = "";
private _password: string = "";
private _audioChannels: number = 2;
private _audioFrequency: number = 22050;
private _rects: number = 0;
private _decoders: Array<IRectDecoder> = [];
private _fps: number;
private _timerInterval: number;
private _timerPointer : NodeJS.Timeout|null = null;
public fb: Buffer = Buffer.from([]);
private _handshaked: boolean = false;
private _waitingServerInit: boolean = false;
private _expectingChallenge: boolean = false;
private _challengeResponseSent: boolean = false;
private _set8BitColor: boolean = false;
private _frameBufferReady = false;
private _firstFrameReceived = false;
private _processingFrame = false;
private _relativePointer: boolean = false;
public bigEndianFlag: boolean = false;
public clientWidth: number = 0;
public clientHeight: number = 0;
public clientName: string = "";
public pixelFormat: PixelFormat = {
bitsPerPixel: 0,
depth: 0,
bigEndianFlag: 0,
trueColorFlag: 0,
redMax: 0,
greenMax: 0,
blueMax: 0,
redShift: 0,
greenShift: 0,
blueShift: 0
};
private _colorMap: Color3[] = [];
private _audioData: Buffer = Buffer.from([]);
private _cursor: Cursor = {
width: 0,
height: 0,
x: 0,
y: 0,
cursorPixels: null,
bitmask: null,
posX: 0,
posY: 0
};
public encodings: number[];
private _connection: net.Socket|null = null;
private _socketBuffer: SocketBuffer;
static get consts() {
return {
encodings: consts.encodings
};
}
/**
* Return if client is connected
* @returns {boolean}
*/
get connected() {
return this._connected;
}
/**
* Return if client is authenticated
* @returns {boolean}
*/
get authenticated() {
return this._authenticated;
}
/**
* Return negotiated protocol version
* @returns {string}
*/
get protocolVersion() {
return this._version;
}
/**
* Return the local port used by the client
* @returns {number}
*/
get localPort() {
return this._connection ? this._connection?.localPort : 0;
}
constructor(options: any = { debug: false, fps: 0, encodings: [] }) {
super();
this._socketBuffer = new SocketBuffer();
this.resetState();
this.debug = options.debug || false;
this._fps = Number(options.fps) || 0;
// Calculate interval to meet configured FPS
this._timerInterval = this._fps > 0 ? 1000 / this._fps : 0;
// Default encodings
this.encodings =
options.encodings && options.encodings.length
? options.encodings
: [consts.encodings.copyRect, consts.encodings.zrle, consts.encodings.hextile, consts.encodings.raw, consts.encodings.pseudoDesktopSize];
this._audioChannels = options.audioChannels || 2;
this._audioFrequency = options.audioFrequency || 22050;
this._rects = 0;
this._decoders[consts.encodings.raw] = new RawDecoder();
// TODO: Implement tight encoding
// this._decoders[encodings.tight] = new tightDecoder();
this._decoders[consts.encodings.zrle] = new ZrleDecoder();
this._decoders[consts.encodings.copyRect] = new CopyRectDecoder();
this._decoders[consts.encodings.hextile] = new HextileDecoder();
if (this._timerInterval) {
this._fbTimer();
}
}
/**
* Timer used to limit the rate of frame update requests according to configured FPS
* @private
*/
_fbTimer() {
this._timerPointer = setTimeout(() => {
this._fbTimer();
if (this._firstFrameReceived && !this._processingFrame && this._fps > 0) {
this.requestFrameUpdate();
}
}, this._timerInterval);
}
/**
* Adjuste the configured FPS
* @param fps {number} - Number of update requests send by second
*/
changeFps(fps: number) {
if (!Number.isNaN(fps)) {
this._fps = Number(fps);
this._timerInterval = this._fps > 0 ? 1000 / this._fps : 0;
if (this._timerPointer && !this._fps) {
// If FPS was zeroed stop the timer
clearTimeout(this._timerPointer);
this._timerPointer = null;
} else if (this._fps && !this._timerPointer) {
// If FPS was zero and is now set, start the timer
this._fbTimer();
}
} else {
throw new Error('Invalid FPS. Must be a number.');
}
}
/**
* Starts the connection with the VNC server
* @param options
*/
connect(
options: any /* = {
host: '',
password: '',
path: '',
set8BitColor: false,
port: 5900
} */
) {
if (options.password) {
this._password = options.password;
}
this._set8BitColor = options.set8BitColor || false;
if (options.path === null) {
if (!options.host) {
throw new Error('Host missing.');
}
this._connection = net.connect(options.port || 5900, options.host);
// disable nagle's algorithm for TCP
this._connection?.setNoDelay();
} else {
// unix socket. bodged in but oh well
this._connection = net.connect(options.path);
}
this._connection?.on('connect', () => {
this._connected = true;
this.emit('connected');
});
this._connection?.on('close', () => {
this.resetState();
this.emit('closed');
});
this._connection?.on('timeout', () => {
this.emit('connectTimeout');
});
this._connection?.on('error', (err) => {
this.emit('connectError', err);
});
this._connection?.on('data', async (data) => {
this._socketBuffer.pushData(data);
if (!this._handshaked) {
this._handleHandshake();
} else if (this._expectingChallenge) {
this._handleAuthChallenge();
} else if (this._waitingServerInit) {
await this._handleServerInit();
} else {
await this._handleData();
}
});
}
/**
* Disconnect the client
*/
disconnect() {
if (this._connection) {
this._connection?.end();
this.resetState();
this.emit('disconnected');
}
}
/**
* Request the server a frame update
* @param full - If the server should send all the frame buffer or just the last changes
* @param incremental - Incremental number for not full requests
* @param x - X position of the update area desired, usually 0
* @param y - Y position of the update area desired, usually 0
* @param width - Width of the update area desired, usually client width
* @param height - Height of the update area desired, usually client height
*/
requestFrameUpdate(full = false, incremental = 1, x = 0, y = 0, width = this.clientWidth, height = this.clientHeight) {
if ((this._frameBufferReady || full) && this._connection && !this._rects) {
// Request data
const message = Buffer.alloc(10);
message.writeUInt8(3); // Message type
message.writeUInt8(full ? 0 : incremental, 1); // Incremental
message.writeUInt16BE(x, 2); // X-Position
message.writeUInt16BE(y, 4); // Y-Position
message.writeUInt16BE(width, 6); // Width
message.writeUInt16BE(height, 8); // Height
this._connection?.write(message);
this._frameBufferReady = true;
}
}
/**
* Handle handshake msg
* @private
*/
_handleHandshake() {
// Handshake, negotiating protocol version
if (this._socketBuffer.toString() === consts.versionString.V3_003) {
this._log('Sending 3.3', true);
this._connection?.write(consts.versionString.V3_003);
this._version = '3.3';
} else if (this._socketBuffer.toString() === consts.versionString.V3_007) {
this._log('Sending 3.7', true);
this._connection?.write(consts.versionString.V3_007);
this._version = '3.7';
} else if (this._socketBuffer.toString() === consts.versionString.V3_008) {
this._log('Sending 3.8', true);
this._connection?.write(consts.versionString.V3_008);
this._version = '3.8';
} else {
// Negotiating auth mechanism
this._handshaked = true;
if (this._socketBuffer.includes(0x02) && this._password) {
this._log('Password provided and server support VNC auth. Choosing VNC auth.', true);
this._expectingChallenge = true;
this._connection?.write(Buffer.from([0x02]));
} else if (this._socketBuffer.includes(1)) {
this._log('Password not provided or server does not support VNC auth. Trying none.', true);
this._connection?.write(Buffer.from([0x01]));
if (this._version === '3.7') {
this._waitingServerInit = true;
} else {
this._expectingChallenge = true;
this._challengeResponseSent = true;
}
} else {
this._log('Connection error. Msg: ' + this._socketBuffer.toString());
this.disconnect();
}
}
this._socketBuffer?.flush(false);
}
/**
* Handle VNC auth challenge
* @private
*/
_handleAuthChallenge() {
if (this._challengeResponseSent) {
// Challenge response already sent. Checking result.
if (this._socketBuffer.buffer[3] === 0) {
// Auth success
this._authenticated = true;
this.emit('authenticated');
this._expectingChallenge = false;
this._sendClientInit();
} else {
// Auth fail
this.emit('authError');
this.resetState();
}
} else {
const key = Buffer.alloc(8);
key.fill(0);
key.write(this._password.slice(0, 8));
this.reverseBits(key);
const des1 = crypto.createCipheriv('des', key, Buffer.alloc(8));
const des2 = crypto.createCipheriv('des', key, Buffer.alloc(8));
const response = Buffer.alloc(16);
response.fill(des1.update(this._socketBuffer.buffer.slice(0, 8)), 0, 8);
response.fill(des2.update(this._socketBuffer.buffer.slice(8, 16)), 8, 16);
this._connection?.write(response);
this._challengeResponseSent = true;
}
this._socketBuffer.flush(false);
}
/**
* Reverse bits order of a byte
* @param buf - Buffer to be flipped
*/
reverseBits(buf: Buffer) {
for (let x = 0; x < buf.length; x++) {
let newByte = 0;
newByte += buf[x] & 128 ? 1 : 0;
newByte += buf[x] & 64 ? 2 : 0;
newByte += buf[x] & 32 ? 4 : 0;
newByte += buf[x] & 16 ? 8 : 0;
newByte += buf[x] & 8 ? 16 : 0;
newByte += buf[x] & 4 ? 32 : 0;
newByte += buf[x] & 2 ? 64 : 0;
newByte += buf[x] & 1 ? 128 : 0;
buf[x] = newByte;
}
}
/**
* Handle server init msg
* @returns {Promise<void>}
* @private
*/
async _handleServerInit() {
this._waitingServerInit = false;
await this._socketBuffer.waitBytes(18);
this.clientWidth = this._socketBuffer.readUInt16BE();
this.clientHeight = this._socketBuffer.readUInt16BE();
this.pixelFormat.bitsPerPixel = this._socketBuffer.readUInt8();
this.pixelFormat.depth = this._socketBuffer.readUInt8();
this.pixelFormat.bigEndianFlag = this._socketBuffer.readUInt8();
this.pixelFormat.trueColorFlag = this._socketBuffer.readUInt8();
this.pixelFormat.redMax = this.bigEndianFlag ? this._socketBuffer.readUInt16BE() : this._socketBuffer.readUInt16LE();
this.pixelFormat.greenMax = this.bigEndianFlag ? this._socketBuffer.readUInt16BE() : this._socketBuffer.readUInt16LE();
this.pixelFormat.blueMax = this.bigEndianFlag ? this._socketBuffer.readUInt16BE() : this._socketBuffer.readUInt16LE();
this.pixelFormat.redShift = this._socketBuffer.readInt8();
this.pixelFormat.greenShift = this._socketBuffer.readInt8();
this.pixelFormat.blueShift = this._socketBuffer.readInt8();
this.updateFbSize();
this.clientName = this._socketBuffer.buffer.slice(24).toString();
this._socketBuffer.flush(false);
// FIXME: Removed because these are noise
//this._log(`Screen size: ${this.clientWidth}x${this.clientHeight}`);
//this._log(`Client name: ${this.clientName}`);
//this._log(`pixelFormat: ${JSON.stringify(this.pixelFormat)}`);
if (this._set8BitColor) {
//this._log(`8 bit color format requested, only raw encoding is supported.`);
this._setPixelFormatToColorMap();
}
this._sendEncodings();
setTimeout(() => {
this.requestFrameUpdate(true);
}, 1000);
}
/**
* Update the frame buffer size according to client width and height (RGBA)
*/
updateFbSize() {
this.fb = Buffer.alloc(this.clientWidth * this.clientHeight * 4);
}
/**
* Request the server to change to 8bit color format (Color palette). Only works with Raw encoding.
* @private
*/
_setPixelFormatToColorMap() {
this._log(`Requesting PixelFormat change to ColorMap (8 bits).`);
const message = Buffer.alloc(20);
message.writeUInt8(0); // Tipo da mensagem
message.writeUInt8(0, 1); // Padding
message.writeUInt8(0, 2); // Padding
message.writeUInt8(0, 3); // Padding
message.writeUInt8(8, 4); // PixelFormat - BitsPerPixel
message.writeUInt8(8, 5); // PixelFormat - Depth
message.writeUInt8(0, 6); // PixelFormat - BigEndianFlag
message.writeUInt8(0, 7); // PixelFormat - TrueColorFlag
message.writeUInt16BE(255, 8); // PixelFormat - RedMax
message.writeUInt16BE(255, 10); // PixelFormat - GreenMax
message.writeUInt16BE(255, 12); // PixelFormat - BlueMax
message.writeUInt8(0, 14); // PixelFormat - RedShift
message.writeUInt8(8, 15); // PixelFormat - GreenShift
message.writeUInt8(16, 16); // PixelFormat - BlueShift
message.writeUInt8(0, 17); // PixelFormat - Padding
message.writeUInt8(0, 18); // PixelFormat - Padding
message.writeUInt8(0, 19); // PixelFormat - Padding
// Envia um setPixelFormat trocando para mapa de cores
this._connection?.write(message);
this.pixelFormat.bitsPerPixel = 8;
this.pixelFormat.depth = 8;
}
/**
* Send supported encodings
* @private
*/
_sendEncodings() {
//this._log('Sending encodings.');
// If this._set8BitColor is set, only copyrect and raw encodings are supported
const message = Buffer.alloc(4 + (!this._set8BitColor ? this.encodings.length : 2) * 4);
message.writeUInt8(2); // Message type
message.writeUInt8(0, 1); // Padding
message.writeUInt16BE(!this._set8BitColor ? this.encodings.length : 2, 2); // Padding
let offset = 4;
// If 8bits is not set, send all encodings configured
if (!this._set8BitColor) {
for (const e of this.encodings) {
message.writeInt32BE(e, offset);
offset += 4;
}
} else {
message.writeInt32BE(consts.encodings.copyRect, offset);
message.writeInt32BE(consts.encodings.raw, offset + 4);
}
this._connection?.write(message);
}
/**
* Send client init msg
* @private
*/
_sendClientInit() {
//this._log(`Sending clientInit`);
this._waitingServerInit = true;
// Shared bit set
this._connection?.write('1');
}
/**
* Handle data msg
* @returns {Promise<void>}
* @private
*/
async _handleData() {
if (!this._rects) {
switch (this._socketBuffer.buffer[0]) {
case consts.serverMsgTypes.fbUpdate:
await this._handleFbUpdate();
break;
case consts.serverMsgTypes.setColorMap:
await this._handleSetColorMap();
break;
case consts.serverMsgTypes.bell:
this.emit('bell');
this._socketBuffer.flush();
break;
case consts.serverMsgTypes.cutText:
await this._handleCutText();
break;
case consts.serverMsgTypes.qemuAudio:
await this._handleQemuAudio();
break;
}
}
}
/**
* Cut message (text was copied to clipboard on server)
* @returns {Promise<void>}
* @private
*/
async _handleCutText() {
this._socketBuffer.setOffset(4);
await this._socketBuffer.waitBytes(1);
const length = this._socketBuffer.readUInt32BE();
await this._socketBuffer.waitBytes(length);
this.emit('cutText', this._socketBuffer.readNBytesOffset(length).toString());
this._socketBuffer.flush();
}
/**
* Gets the pseudocursor framebuffer
*/
_getPseudoCursor() {
if (!this._cursor.width)
return {
width: 1,
height: 1,
data: Buffer.alloc(4)
};
const { width, height, bitmask, cursorPixels } = this._cursor;
if(bitmask == null || cursorPixels == null)
throw new Error('No cursor data to get!');
const data = Buffer.alloc(height * width * 4);
for (var y = 0; y < height; y++) {
for (var x = 0; x < width; x++) {
const offset = (y * width + x) * 4;
const active = (bitmask[Math.floor((width + 7) / 8) * y + Math.floor(x / 8)] >> (7 - (x % 8))) & 1;
if (active) {
switch (this.pixelFormat.bitsPerPixel) {
case 8:
console.log(8);
const index = cursorPixels.readUInt8(offset);
// @ts-ignore (This line is extremely suspect anyways. I bet this is horribly broken!!)
const color = this._colorMap[index] | 0xff;
data.writeIntBE(color, offset, 4);
break;
case 32:
// TODO: compatibility with VMware actually using the alpha channel
const b = cursorPixels.readUInt8(offset);
const g = cursorPixels.readUInt8(offset + 1);
const r = cursorPixels.readUInt8(offset + 2);
data.writeUInt8(r, offset);
data.writeUInt8(g, offset + 1);
data.writeUInt8(b, offset + 2);
data.writeUInt8(0xff, offset + 3);
break;
default:
data.writeIntBE(cursorPixels.readIntBE(offset, this.pixelFormat.bitsPerPixel / 8), offset, this.pixelFormat.bitsPerPixel / 8);
break;
}
}
}
}
return {
x: this._cursor.x,
y: this._cursor.y,
width,
height,
data
};
}
/**
* Handle a rects of update message
*/
async _handleRect() {
this._processingFrame = true;
const sendFbUpdate = this._rects;
while (this._rects) {
await this._socketBuffer.waitBytes(12);
const rect: VncRectangle = {
x: this._socketBuffer.readUInt16BE(),
y: this._socketBuffer.readUInt16BE(),
width: this._socketBuffer.readUInt16BE(),
height: this._socketBuffer.readUInt16BE(),
encoding: this._socketBuffer.readInt32BE(),
data: null // for now
};
if (rect.encoding === consts.encodings.pseudoQemuAudio) {
this.sendAudio(true);
this.sendAudioConfig(this._audioChannels, this._audioFrequency); //todo: future: setFrequency(...) to update mid thing
} else if (rect.encoding === consts.encodings.pseudoQemuPointerMotionChange) {
this._relativePointer = rect.x == 0;
} else if (rect.encoding === consts.encodings.pseudoCursor) {
const dataSize = rect.width * rect.height * (this.pixelFormat.bitsPerPixel / 8);
const bitmaskSize = Math.floor((rect.width + 7) / 8) * rect.height;
this._cursor.width = rect.width;
this._cursor.height = rect.height;
this._cursor.x = rect.x;
this._cursor.y = rect.y;
this._cursor.cursorPixels = this._socketBuffer.readNBytesOffset(dataSize);
this._cursor.bitmask = this._socketBuffer.readNBytesOffset(bitmaskSize);
rect.data = Buffer.concat([this._cursor.cursorPixels, this._cursor.bitmask]);
this.emit('cursorChanged', this._getPseudoCursor());
} else if (rect.encoding === consts.encodings.pseudoDesktopSize) {
this._log('Frame Buffer size change requested by the server', true);
this.clientHeight = rect.height;
this.clientWidth = rect.width;
this.updateFbSize();
this.emit('desktopSizeChanged', { width: this.clientWidth, height: this.clientHeight });
} else if (this._decoders[rect.encoding]) {
await this._decoders[rect.encoding].decode(
rect,
this.fb,
this.pixelFormat.bitsPerPixel,
this._colorMap,
this.clientWidth,
this.clientHeight,
this._socketBuffer,
this.pixelFormat.depth
);
this.emit('rectUpdateProcessed', rect);
} else {
this._log('Non supported update received. Encoding: ' + rect.encoding);
}
this._rects--;
this.emit('rectProcessed', rect);
if (!this._rects) {
this._socketBuffer.flush(true);
}
}
if (sendFbUpdate) {
if (!this._firstFrameReceived) {
this._firstFrameReceived = true;
this.emit('firstFrameUpdate', this.fb);
}
this._log('Frame buffer updated.', true);
this.emit('frameUpdated', this.fb);
}
this._processingFrame = false;
if (this._fps === 0) {
// If FPS is not set, request a new update as soon as the last received has been processed
this.requestFrameUpdate();
}
}
async _handleFbUpdate() {
this._socketBuffer.setOffset(2);
this._rects = this._socketBuffer.readUInt16BE();
this._log('Frame update received. Rects: ' + this._rects, true);
await this._handleRect();
}
/**
* Handle setColorMap msg
* @returns {Promise<void>}
* @private
*/
async _handleSetColorMap() {
this._socketBuffer.setOffset(2);
let firstColor = this._socketBuffer.readUInt16BE();
const numColors = this._socketBuffer.readUInt16BE();
this._log(`ColorMap received. Colors: ${numColors}.`);
await this._socketBuffer.waitBytes(numColors * 6);
for (let x = 0; x < numColors; x++) {
this._colorMap[firstColor] = {
r: Math.floor((this._socketBuffer.readUInt16BE() / 65535) * 255),
g: Math.floor((this._socketBuffer.readUInt16BE() / 65535) * 255),
b: Math.floor((this._socketBuffer.readUInt16BE() / 65535) * 255),
};
firstColor++;
}
this.emit('colorMapUpdated', this._colorMap);
this._socketBuffer.flush();
}
async _handleQemuAudio() {
this._socketBuffer.setOffset(2);
let operation = this._socketBuffer.readUInt16BE();
if (operation == 2) {
const length = this._socketBuffer.readUInt32BE();
//this._log(`Audio received. Length: ${length}.`);
await this._socketBuffer.waitBytes(length);
let audioBuffer = this._socketBuffer.readNBytes(length);
this._audioData = audioBuffer;
}
this.emit('audioStream', this._audioData);
this._socketBuffer.flush();
}
/**
* Reset the class state
*/
resetState() {
if (this._connection) {
this._connection?.end();
}
if (this._timerPointer) {
clearInterval(this._timerPointer);
}
this._timerPointer = null;
//this._connection = null;
this._connected = false;
this._authenticated = false;
this._version = '';
this._password = '';
this._audioChannels = 2;
this._audioFrequency = 22050;
this._handshaked = false;
this._expectingChallenge = false;
this._challengeResponseSent = false;
this._frameBufferReady = false;
this._firstFrameReceived = false;
this._processingFrame = false;
this.clientWidth = 0;
this.clientHeight = 0;
this.clientName = '';
this.pixelFormat = {
bitsPerPixel: 0,
depth: 0,
bigEndianFlag: 0,
trueColorFlag: 0,
redMax: 0,
greenMax: 0,
blueMax: 0,
redShift: 0,
blueShift: 0,
greenShift: 0
};
this._rects = 0;
this._colorMap = [];
this.fb = Buffer.from([]);
this._socketBuffer?.flush(false);
this._cursor = {
width: 0,
height: 0,
x: 0,
y: 0,
cursorPixels: null,
bitmask: null,
posX: 0,
posY: 0
};
}
// NIT(lily) instead of allocating it'd be way better if these just re-used a 16/32 byte buffer used just for these.
/**
* Send a key event
* @param key - Key code (keysym) defined by X Window System, check https://wiki.linuxquestions.org/wiki/List_of_keysyms
* @param down - True if the key is pressed, false if it is not
*/
sendKeyEvent(key: number, down: boolean = false) {
const message = Buffer.alloc(8);
message.writeUInt8(4); // Message type
message.writeUInt8(down ? 1 : 0, 1); // Down flag
message.writeUInt8(0, 2); // Padding
message.writeUInt8(0, 3); // Padding
message.writeUInt32BE(key, 4); // Key code
this._connection?.write(message);
}
/**
* Send a raw pointer event
* @param xPosition - X Position
* @param yPosition - Y Position
* @param mask - Raw RFB button mask
*/
sendPointerEvent(xPosition: number, yPosition: number, buttonMask: number) {
const message = Buffer.alloc(6);
message.writeUInt8(consts.clientMsgTypes.pointerEvent); // Message type
message.writeUInt8(buttonMask, 1); // Button Mask
const reladd = this._relativePointer ? 0x7fff : 0;
message.writeUInt16BE(xPosition + reladd, 2); // X Position
message.writeUInt16BE(yPosition + reladd, 4); // Y Position
this._cursor.posX = xPosition;
this._cursor.posY = yPosition;
this._connection?.write(message);
}
/**
* Send client cut message to server
* @param text - latin1 encoded
*/
clientCutText(text: string) {
const textBuffer = Buffer.from(text, 'latin1');
const message = Buffer.alloc(8 + textBuffer.length);
message.writeUInt8(6); // Message type
message.writeUInt8(0, 1); // Padding
message.writeUInt8(0, 2); // Padding
message.writeUInt8(0, 3); // Padding
message.writeUInt32BE(textBuffer.length, 4); // Padding
textBuffer.copy(message, 8);
this._connection?.write(message);
}
sendAudio(enable: boolean) {
const message = Buffer.alloc(4);
message.writeUInt8(consts.clientMsgTypes.qemuAudio); // Message type
message.writeUInt8(1, 1); // Submessage Type
message.writeUInt16BE(enable ? 0 : 1, 2); // Operation
this._connection?.write(message);
}
sendAudioConfig(channels: number, frequency: number) {
const message = Buffer.alloc(10);
message.writeUInt8(consts.clientMsgTypes.qemuAudio); // Message type
message.writeUInt8(1, 1); // Submessage Type
message.writeUInt16BE(2, 2); // Operation
message.writeUInt8(0 /*U8*/, 4); // Sample Format
message.writeUInt8(channels, 5); // Number of Channels
message.writeUInt32BE(frequency, 6); // Frequency
this._connection?.write(message);
}
/**
* Print log info
* @param text
* @param debug
* @private
*/
_log(text: string, debug = false) {
if (!debug || (debug && this.debug)) {
console.log(text);
}
}
}

View file

@ -1,46 +0,0 @@
// based on https://github.com/sidorares/node-rfb2
export const consts = {
clientMsgTypes: {
setPixelFormat: 0,
setEncodings: 2,
fbUpdate: 3,
keyEvent: 4,
pointerEvent: 5,
cutText: 6,
qemuAudio: 255
},
serverMsgTypes: {
fbUpdate: 0,
setColorMap: 1,
bell: 2,
cutText: 3,
qemuAudio: 255
},
versionString: {
V3_003: 'RFB 003.003\n',
V3_007: 'RFB 003.007\n',
V3_008: 'RFB 003.008\n'
},
encodings: {
raw: 0,
copyRect: 1,
rre: 2,
corre: 4,
hextile: 5,
zlib: 6,
tight: 7,
zlibhex: 8,
trle: 15,
zrle: 16,
h264: 50,
pseudoCursor: -239,
pseudoDesktopSize: -223,
pseudoQemuPointerMotionChange: -257,
pseudoQemuAudio: -259
},
security: {
None: 1,
VNC: 2
}
};

View file

@ -1,31 +0,0 @@
import { SocketBuffer } from "../socketbuffer";
import { IRectDecoder } from "./decoder";
import { getPixelBytePos } from "./util";
import { VncRectangle, Color3 } from "../types";
export class CopyRectDecoder implements IRectDecoder {
async decode(rect: VncRectangle, fb: Buffer, bitsPerPixel: number, colorMap: Array<Color3>, screenW: number, screenH: number, socket: SocketBuffer, depth: number): Promise<void> {
return new Promise(async (resolve, reject) => {
await socket.waitBytes(4);
rect.data = socket.readNBytesOffset(4);
const x = rect.data.readUInt16BE();
const y = rect.data.readUInt16BE(2);
for (let h = 0; h < rect.height; h++) {
for (let w = 0; w < rect.width; w++) {
const fbOrigBytePosOffset = getPixelBytePos(x + w, y + h, screenW, screenH);
const fbBytePosOffset = getPixelBytePos(rect.x + w, rect.y + h, screenW, screenH);
fb.writeUInt8(fb.readUInt8(fbOrigBytePosOffset), fbBytePosOffset);
fb.writeUInt8(fb.readUInt8(fbOrigBytePosOffset + 1), fbBytePosOffset + 1);
fb.writeUInt8(fb.readUInt8(fbOrigBytePosOffset + 2), fbBytePosOffset + 2);
fb.writeUInt8(fb.readUInt8(fbOrigBytePosOffset + 3), fbBytePosOffset + 3);
}
}
resolve();
});
}
}

View file

@ -1,7 +0,0 @@
import { SocketBuffer } from "../socketbuffer";
import { Color3, VncRectangle } from "../types";
export interface IRectDecoder {
decode(rect: VncRectangle, fb: Buffer, bitsPerPixel: number, colorMap: Array<Color3>, screenW: number, screenH: number, socket: SocketBuffer, depth: number): Promise<void>;
}

View file

@ -1,228 +0,0 @@
import { SocketBuffer } from "../socketbuffer";
import { IRectDecoder } from "./decoder";
import { applyColor, getPixelBytePos } from "./util";
import { VncRectangle, Color3 } from "../types";
export class HextileDecoder implements IRectDecoder {
async decode(rect: VncRectangle, fb: Buffer, bitsPerPixel: number, colorMap: Array<Color3>, screenW: number, screenH: number, socket: SocketBuffer, depth: number): Promise<void> {
return new Promise(async (resolve, reject) => {
const initialOffset = socket.offset;
let dataSize = 0;
let lastSubEncoding = 0;
const backgroundColor = { r: 0, g: 0, b: 0, a: 255 };
const foregroundColor = { r: 0, g: 0, b: 0, a: 255 };
let tilesX = Math.ceil(rect.width / 16);
let tilesY = Math.ceil(rect.height / 16);
let tiles = tilesX * tilesY;
let totalTiles = tiles;
while (tiles) {
await socket.waitBytes(1);
const subEncoding = socket.readUInt8();
dataSize++;
const currTile = totalTiles - tiles;
// Calculate tile position and size
const tileX = currTile % tilesX;
const tileY = Math.floor(currTile / tilesX);
const tx = rect.x + tileX * 16;
const ty = rect.y + tileY * 16;
const tw = Math.min(16, rect.x + rect.width - tx);
const th = Math.min(16, rect.y + rect.height - ty);
if (subEncoding === 0) {
if (lastSubEncoding & 0x01) {
// We need to ignore zeroed tile after a raw tile
} else {
// If zeroed tile and last tile was not raw, use the last backgroundColor
applyColor(tw, th, tx, ty, screenW, screenH, backgroundColor, fb);
}
} else if (subEncoding & 0x01) {
// If Raw, ignore all other bits
await socket.waitBytes(th * tw * (bitsPerPixel / 8));
dataSize += th * tw * (bitsPerPixel / 8);
for (let h = 0; h < th; h++) {
for (let w = 0; w < tw; w++) {
const fbBytePosOffset = getPixelBytePos(tx + w, ty + h, screenW, screenH);
if (bitsPerPixel === 8) {
const index = socket.readUInt8();
const color = colorMap[index];
// RGB
// fb.writeUInt8(color?.r || 255, fbBytePosOffset);
// fb.writeUInt8(color?.g || 255, fbBytePosOffset + 1);
// fb.writeUInt8(color?.b || 255, fbBytePosOffset + 2);
// BGR
fb.writeUInt8(color?.r || 255, fbBytePosOffset + 2);
fb.writeUInt8(color?.g || 255, fbBytePosOffset + 1);
fb.writeUInt8(color?.b || 255, fbBytePosOffset);
} else if (bitsPerPixel === 24) {
fb.writeUInt8(socket.readUInt8(), fbBytePosOffset);
fb.writeUInt8(socket.readUInt8(), fbBytePosOffset + 1);
fb.writeUInt8(socket.readUInt8(), fbBytePosOffset + 2);
} else if (bitsPerPixel === 32) {
// RGB
// fb.writeUInt8(rect.data.readUInt8(bytePosOffset), fbBytePosOffset);
// fb.writeUInt8(rect.data.readUInt8(bytePosOffset + 1), fbBytePosOffset + 1);
// fb.writeUInt8(rect.data.readUInt8(bytePosOffset + 2), fbBytePosOffset + 2);
// BGR
fb.writeUInt8(socket.readUInt8(), fbBytePosOffset + 2);
fb.writeUInt8(socket.readUInt8(), fbBytePosOffset + 1);
fb.writeUInt8(socket.readUInt8(), fbBytePosOffset);
socket.readUInt8();
}
// Alpha, always 255
fb.writeUInt8(255, fbBytePosOffset + 3);
}
}
lastSubEncoding = subEncoding;
} else {
// Background bit
if (subEncoding & 0x02) {
switch (bitsPerPixel) {
case 8:
await socket.waitBytes(1);
const index = socket.readUInt8();
dataSize++;
backgroundColor.r = colorMap[index].r || 255;
backgroundColor.g = colorMap[index].g || 255;
backgroundColor.b = colorMap[index].b || 255;
break;
case 24:
await socket.waitBytes(3);
dataSize += 3;
backgroundColor.r = socket.readUInt8();
backgroundColor.g = socket.readUInt8();
backgroundColor.b = socket.readUInt8();
break;
case 32:
await socket.waitBytes(4);
dataSize += 4;
backgroundColor.r = socket.readUInt8();
backgroundColor.g = socket.readUInt8();
backgroundColor.b = socket.readUInt8();
backgroundColor.a = socket.readUInt8();
break;
}
}
// Foreground bit
if (subEncoding & 0x04) {
switch (bitsPerPixel) {
case 8:
await socket.waitBytes(1);
const index = socket.readUInt8();
dataSize++;
foregroundColor.r = colorMap[index].r || 255;
foregroundColor.g = colorMap[index].g || 255;
foregroundColor.b = colorMap[index].b || 255;
break;
case 24:
await socket.waitBytes(3);
dataSize += 3;
foregroundColor.r = socket.readUInt8();
foregroundColor.g = socket.readUInt8();
foregroundColor.b = socket.readUInt8();
break;
case 32:
await socket.waitBytes(4);
dataSize += 4;
foregroundColor.r = socket.readUInt8();
foregroundColor.g = socket.readUInt8();
foregroundColor.b = socket.readUInt8();
foregroundColor.a = socket.readUInt8();
break;
}
}
// Initialize tile with the background color
applyColor(tw, th, tx, ty, screenW, screenH, backgroundColor, fb);
// AnySubrects bit
if (subEncoding & 0x08) {
await socket.waitBytes(1);
let subRects = socket.readUInt8();
if (subRects) {
while (subRects) {
subRects--;
const color = { r: 0, g: 0, b: 0, a: 255 };
// SubrectsColoured
if (subEncoding & 0x10) {
switch (bitsPerPixel) {
case 8:
await socket.waitBytes(1);
const index = socket.readUInt8();
dataSize++;
color.r = colorMap[index].r || 255;
color.g = colorMap[index].g || 255;
color.b = colorMap[index].b || 255;
break;
case 24:
await socket.waitBytes(3);
dataSize += 3;
color.r = socket.readUInt8();
color.g = socket.readUInt8();
color.b = socket.readUInt8();
break;
case 32:
await socket.waitBytes(4);
dataSize += 4;
color.r = socket.readUInt8();
color.g = socket.readUInt8();
color.b = socket.readUInt8();
color.a = socket.readUInt8();
break;
}
} else {
color.r = foregroundColor.r;
color.g = foregroundColor.g;
color.b = foregroundColor.b;
color.a = foregroundColor.a;
}
await socket.waitBytes(2);
const xy = socket.readUInt8();
const wh = socket.readUInt8();
dataSize += 2;
const sx = xy >> 4;
const sy = xy & 0x0f;
const sw = (wh >> 4) + 1;
const sh = (wh & 0x0f) + 1;
applyColor(sw, sh, tx + sx, ty + sy, screenW, screenH, color, fb);
}
} else {
applyColor(tw, th, tx, ty, screenW, screenH, backgroundColor, fb);
}
} else {
applyColor(tw, th, tx, ty, screenW, screenH, backgroundColor, fb);
}
lastSubEncoding = subEncoding;
}
tiles--;
}
rect.data = socket.readNBytes(dataSize, initialOffset);
resolve();
});
}
}

View file

@ -1,57 +0,0 @@
import { SocketBuffer } from "../socketbuffer";
import { IRectDecoder } from "./decoder";
import { getPixelBytePos } from "./util";
import { VncRectangle, Color3 } from "../types";
export class RawDecoder implements IRectDecoder {
async decode(rect: VncRectangle, fb: Buffer, bitsPerPixel: number, colorMap: Array<Color3>, screenW: number, screenH: number, socket: SocketBuffer, depth: number): Promise<void> {
return new Promise(async (resolve, reject) => {
await socket.waitBytes(rect.width * rect.height * (bitsPerPixel / 8));
rect.data = socket.readNBytesOffset(rect.width * rect.height * (bitsPerPixel / 8));
for (let h = 0; h < rect.height; h++) {
for (let w = 0; w < rect.width; w++) {
const fbBytePosOffset = getPixelBytePos(rect.x + w, rect.y + h, screenW, screenH);
if (bitsPerPixel === 8) {
const bytePosOffset = h * rect.width + w;
const index = rect.data.readUInt8(bytePosOffset);
const color = colorMap[index];
// RGB
// fb.writeUInt8(color?.r || 255, fbBytePosOffset);
// fb.writeUInt8(color?.g || 255, fbBytePosOffset + 1);
// fb.writeUInt8(color?.b || 255, fbBytePosOffset + 2);
// BGR
fb.writeUInt8(color?.r || 255, fbBytePosOffset + 2);
fb.writeUInt8(color?.g || 255, fbBytePosOffset + 1);
fb.writeUInt8(color?.b || 255, fbBytePosOffset);
fb.writeUInt8(255, fbBytePosOffset + 3);
} else if (bitsPerPixel === 24) {
const bytePosOffset = (h * rect.width + w) * 3;
fb.writeUInt8(rect.data.readUInt8(bytePosOffset), fbBytePosOffset);
fb.writeUInt8(rect.data.readUInt8(bytePosOffset + 1), fbBytePosOffset + 1);
fb.writeUInt8(rect.data.readUInt8(bytePosOffset + 2), fbBytePosOffset + 2);
fb.writeUInt8(255, fbBytePosOffset + 3);
} else if (bitsPerPixel === 32) {
const bytePosOffset = (h * rect.width + w) * 4;
// RGB
// fb.writeUInt8(rect.data.readUInt8(bytePosOffset), fbBytePosOffset);
// fb.writeUInt8(rect.data.readUInt8(bytePosOffset + 1), fbBytePosOffset + 1);
// fb.writeUInt8(rect.data.readUInt8(bytePosOffset + 2), fbBytePosOffset + 2);
// BGR
fb.writeUInt8(rect.data.readUInt8(bytePosOffset), fbBytePosOffset + 2);
fb.writeUInt8(rect.data.readUInt8(bytePosOffset + 1), fbBytePosOffset + 1);
fb.writeUInt8(rect.data.readUInt8(bytePosOffset + 2), fbBytePosOffset);
// fb.writeUInt8(rect.data.readUInt8(bytePosOffset + 3), fbBytePosOffset + 3);
fb.writeUInt8(255, fbBytePosOffset + 3);
}
}
}
resolve();
});
}
}

View file

@ -1,19 +0,0 @@
import { Color3 } from "../types";
export function getPixelBytePos(x: number, y: number, width: number, height: number, stride: number = 4): number {
return (y * width + x) * stride;
}
// Apply color to a rect on buffer
export function applyColor(tw: number, th: number, tx: number, ty: number, screenW: number, screenH: number, color: Color3, fb: Buffer) {
for (let h = 0; h < th; h++) {
for (let w = 0; w < tw; w++) {
const fbBytePosOffset = getPixelBytePos(tx + w, ty + h, screenW, screenH);
fb.writeUInt8(color.r || 255, fbBytePosOffset + 2);
fb.writeUInt8(color.g || 255, fbBytePosOffset + 1);
fb.writeUInt8(color.b || 255, fbBytePosOffset);
fb.writeUInt8(255, fbBytePosOffset + 3);
}
}
}

View file

@ -1,340 +0,0 @@
import * as zlib from 'node:zlib';
import { SocketBuffer } from "../socketbuffer";
import { IRectDecoder } from "./decoder";
import { applyColor, getPixelBytePos } from "./util";
import { VncRectangle, Color3, Color4 } from "../types";
export class ZrleDecoder implements IRectDecoder {
private zlib: zlib.Inflate;
private unBuffer: SocketBuffer;
constructor() {
this.zlib = zlib.createInflate({ chunkSize: 16 * 1024 * 1024, flush: zlib.constants.Z_FULL_FLUSH });
this.unBuffer = new SocketBuffer();
this.zlib.on('data', async (chunk) => {
this.unBuffer.pushData(chunk);
});
}
async decode(rect: VncRectangle, fb: Buffer, bitsPerPixel: number, colorMap: Array<Color3>, screenW: number, screenH: number, socket: SocketBuffer, depth: number): Promise<void> {
return new Promise(async (resolve, reject) => {
await socket.waitBytes(4);
const initialOffset = socket.offset;
const dataSize = socket.readUInt32BE();
await socket.waitBytes(dataSize);
const compressedData = socket.readNBytesOffset(dataSize);
rect.data = socket.readNBytes(dataSize + 4, initialOffset);
this.unBuffer.flush();
this.zlib.write(compressedData, async () => {
this.zlib.flush();
let tilesX = Math.ceil(rect.width / 64);
let tilesY = Math.ceil(rect.height / 64);
let tiles = tilesX * tilesY;
let totalTiles = tiles;
while (tiles) {
await this.unBuffer.waitBytes(1, 'tile begin.');
const subEncoding = this.unBuffer.readUInt8();
const currTile = totalTiles - tiles;
const tileX = currTile % tilesX;
const tileY = Math.floor(currTile / tilesX);
const tx = rect.x + tileX * 64;
const ty = rect.y + tileY * 64;
const tw = Math.min(64, rect.x + rect.width - tx);
const th = Math.min(64, rect.y + rect.height - ty);
const now = process.hrtime.bigint();
let totalRun = 0;
let runs = 0;
if (subEncoding === 127 || subEncoding === 129) {
console.log('Invalid subencoding. ' + subEncoding);
} else if (subEncoding === 0) {
// Raw
for (let h = 0; h < th; h++) {
for (let w = 0; w < tw; w++) {
const fbBytePosOffset = getPixelBytePos(tx + w, ty + h, screenW, screenH);
if (bitsPerPixel === 8) {
await this.unBuffer.waitBytes(1, 'raw 8bits');
const index = this.unBuffer.readUInt8();
const color = colorMap[index];
// RGB
// fb.writeUInt8(color?.r || 255, fbBytePosOffset);
// fb.writeUInt8(color?.g || 255, fbBytePosOffset + 1);
// fb.writeUInt8(color?.b || 255, fbBytePosOffset + 2);
// BGR
fb.writeUInt8(color?.r || 255, fbBytePosOffset + 2);
fb.writeUInt8(color?.g || 255, fbBytePosOffset + 1);
fb.writeUInt8(color?.b || 255, fbBytePosOffset);
} else if (bitsPerPixel === 24 || (bitsPerPixel === 32 && depth === 24)) {
await this.unBuffer.waitBytes(3, 'raw 24bits');
fb.writeUInt8(this.unBuffer.readUInt8(), fbBytePosOffset + 2);
fb.writeUInt8(this.unBuffer.readUInt8(), fbBytePosOffset + 1);
fb.writeUInt8(this.unBuffer.readUInt8(), fbBytePosOffset);
} else if (bitsPerPixel === 32) {
// RGB
// fb.writeUInt8(rect.data.readUInt8(bytePosOffset), fbBytePosOffset);
// fb.writeUInt8(rect.data.readUInt8(bytePosOffset + 1), fbBytePosOffset + 1);
// fb.writeUInt8(rect.data.readUInt8(bytePosOffset + 2), fbBytePosOffset + 2);
// BGR
await this.unBuffer.waitBytes(4, 'raw 32bits');
fb.writeUInt8(this.unBuffer.readUInt8(), fbBytePosOffset + 2);
fb.writeUInt8(this.unBuffer.readUInt8(), fbBytePosOffset + 1);
fb.writeUInt8(this.unBuffer.readUInt8(), fbBytePosOffset);
this.unBuffer.readUInt8();
}
// Alpha
fb.writeUInt8(255, fbBytePosOffset + 3);
}
}
} else if (subEncoding === 1) {
// Single Color
let color: Color4 = { r: 0, g: 0, b: 0, a: 255 };
if (bitsPerPixel === 8) {
await this.unBuffer.waitBytes(1, 'single color 8bits');
const index = this.unBuffer.readUInt8();
color = (colorMap[index] as Color4);
} else if (bitsPerPixel === 24 || (bitsPerPixel === 32 && depth === 24)) {
await this.unBuffer.waitBytes(3, 'single color 24bits');
color.r = this.unBuffer.readUInt8();
color.g = this.unBuffer.readUInt8();
color.b = this.unBuffer.readUInt8();
} else if (bitsPerPixel === 32) {
await this.unBuffer.waitBytes(4, 'single color 32bits');
color.r = this.unBuffer.readUInt8();
color.g = this.unBuffer.readUInt8();
color.b = this.unBuffer.readUInt8();
color.a = this.unBuffer.readUInt8();
}
applyColor(tw, th, tx, ty, screenW, screenH, color, fb);
} else if (subEncoding >= 2 && subEncoding <= 16) {
// Palette
const palette = [];
for (let x = 0; x < subEncoding; x++) {
let color;
if (bitsPerPixel === 24 || (bitsPerPixel === 32 && depth === 24)) {
await this.unBuffer.waitBytes(3, 'palette 24 bits');
color = {
r: this.unBuffer.readUInt8(),
g: this.unBuffer.readUInt8(),
b: this.unBuffer.readUInt8(),
a: 255
};
} else if (bitsPerPixel === 32) {
await this.unBuffer.waitBytes(3, 'palette 32 bits');
color = {
r: this.unBuffer.readUInt8(),
g: this.unBuffer.readUInt8(),
b: this.unBuffer.readUInt8(),
a: this.unBuffer.readUInt8()
};
}
palette.push(color);
}
const bitsPerIndex = subEncoding === 2 ? 1 : subEncoding < 5 ? 2 : 4;
// const i = (tw * th) / (8 / bitsPerIndex);
// const pixels = [];
let byte = 0;
let bitPos = 0;
for (let h = 0; h < th; h++) {
for (let w = 0; w < tw; w++) {
if (bitPos === 0 || w === 0) {
await this.unBuffer.waitBytes(1, 'palette index data');
byte = this.unBuffer.readUInt8();
bitPos = 0;
}
let color : Color4 = { r: 0, g: 0, b: 0, a: 255 };
switch (bitsPerIndex) {
case 1:
if (bitPos === 0) {
color = palette[(byte & 128) >> 7] || { r: 255, g: 255, b: 255, a: 255 };
} else if (bitPos === 1) {
color = palette[(byte & 64) >> 6] || { r: 255, g: 255, b: 255, a: 255 };
} else if (bitPos === 2) {
color = palette[(byte & 32) >> 5] || { r: 255, g: 255, b: 255, a: 255 };
} else if (bitPos === 3) {
color = palette[(byte & 16) >> 4] || { r: 255, g: 255, b: 255, a: 255 };
} else if (bitPos === 4) {
color = palette[(byte & 8) >> 3] || { r: 255, g: 255, b: 255, a: 255 };
} else if (bitPos === 5) {
color = palette[(byte & 4) >> 2] || { r: 255, g: 255, b: 255, a: 255 };
} else if (bitPos === 6) {
color = palette[(byte & 2) >> 1] || { r: 255, g: 255, b: 255, a: 255 };
} else if (bitPos === 7) {
color = palette[byte & 1] || { r: 255, g: 255, b: 255, a: 255 };
}
bitPos++;
if (bitPos === 8) {
bitPos = 0;
}
break;
case 2:
if (bitPos === 0) {
color = palette[(byte & 196) >> 6] || { r: 255, g: 255, b: 255, a: 255 };
} else if (bitPos === 1) {
color = palette[(byte & 48) >> 4] || { r: 255, g: 255, b: 255, a: 255 };
} else if (bitPos === 2) {
color = palette[(byte & 12) >> 2] || { r: 255, g: 255, b: 255, a: 255 };
} else if (bitPos === 3) {
color = palette[byte & 3] || { r: 255, g: 255, b: 255, a: 255 };
}
bitPos++;
if (bitPos === 4) {
bitPos = 0;
}
break;
case 4:
if (bitPos === 0) {
color = palette[(byte & 240) >> 4] || { r: 255, g: 255, b: 255, a: 255 };
} else if (bitPos === 1) {
color = palette[byte & 15] || { r: 255, g: 255, b: 255, a: 255 };
}
bitPos++;
if (bitPos === 2) {
bitPos = 0;
}
break;
}
const fbBytePosOffset = getPixelBytePos(tx + w, ty + h, screenW, screenH);
fb.writeUInt8(color.b ?? 0, fbBytePosOffset);
fb.writeUInt8(color.g ?? 0, fbBytePosOffset + 1);
fb.writeUInt8(color.r ?? 0, fbBytePosOffset + 2);
fb.writeUInt8(color.a ?? 255, fbBytePosOffset + 3);
}
}
} else if (subEncoding === 128) {
// Plain RLE
let runLength = 0;
let color = { r: 0, g: 0, b: 0, a: 0 };
for (let h = 0; h < th; h++) {
for (let w = 0; w < tw; w++) {
if (!runLength) {
if (bitsPerPixel === 24 || (bitsPerPixel === 32 && depth === 24)) {
await this.unBuffer.waitBytes(3, 'rle 24bits');
color = {
r: this.unBuffer.readUInt8(),
g: this.unBuffer.readUInt8(),
b: this.unBuffer.readUInt8(),
a: 255
};
} else if (bitsPerPixel === 32) {
await this.unBuffer.waitBytes(4, 'rle 32bits');
color = {
r: this.unBuffer.readUInt8(),
g: this.unBuffer.readUInt8(),
b: this.unBuffer.readUInt8(),
a: this.unBuffer.readUInt8()
};
}
await this.unBuffer.waitBytes(1, 'rle runsize');
let runSize = this.unBuffer.readUInt8();
while (runSize === 255) {
runLength += runSize;
await this.unBuffer.waitBytes(1, 'rle runsize');
runSize = this.unBuffer.readUInt8();
}
runLength += runSize + 1;
totalRun += runLength;
runs++;
}
const fbBytePosOffset = getPixelBytePos(tx + w, ty + h, screenW, screenH);
fb.writeUInt8(color.b ?? 0, fbBytePosOffset);
fb.writeUInt8(color.g ?? 0, fbBytePosOffset + 1);
fb.writeUInt8(color.r ?? 0, fbBytePosOffset + 2);
fb.writeUInt8(color.a ?? 255, fbBytePosOffset + 3);
runLength--;
}
}
} else if (subEncoding >= 130) {
// Palette RLE
const paletteSize = subEncoding - 128;
const palette = [];
for (let x = 0; x < paletteSize; x++) {
let color;
if (bitsPerPixel === 24 || (bitsPerPixel === 32 && depth === 24)) {
await this.unBuffer.waitBytes(3, 'paletterle 24bits');
color = {
r: this.unBuffer.readUInt8(),
g: this.unBuffer.readUInt8(),
b: this.unBuffer.readUInt8(),
a: 255
};
} else if (bitsPerPixel === 32) {
await this.unBuffer.waitBytes(4, 'paletterle 32bits');
color = {
r: this.unBuffer.readUInt8(),
g: this.unBuffer.readUInt8(),
b: this.unBuffer.readUInt8(),
a: this.unBuffer.readUInt8()
};
}
palette.push(color);
}
let runLength = 0;
let color = { r: 0, g: 0, b: 0, a: 255 };
for (let h = 0; h < th; h++) {
for (let w = 0; w < tw; w++) {
if (!runLength) {
await this.unBuffer.waitBytes(1, 'paletterle indexdata');
const colorIndex = this.unBuffer.readUInt8();
if (!(colorIndex & 128)) {
// Run de tamanho 1
color = palette[colorIndex] ?? { r: 0, g: 0, b: 0, a: 255 };
runLength = 1;
} else {
color = palette[colorIndex - 128] ?? { r: 0, g: 0, b: 0, a: 255 };
await this.unBuffer.waitBytes(1, 'paletterle runlength');
let runSize = this.unBuffer.readUInt8();
while (runSize === 255) {
runLength += runSize;
await this.unBuffer.waitBytes(1, 'paletterle runlength');
runSize = this.unBuffer.readUInt8();
}
runLength += runSize + 1;
}
totalRun += runLength;
runs++;
}
const fbBytePosOffset = getPixelBytePos(tx + w, ty + h, screenW, screenH);
fb.writeUInt8(color.b ?? 0, fbBytePosOffset);
fb.writeUInt8(color.g ?? 0, fbBytePosOffset + 1);
fb.writeUInt8(color.r ?? 0, fbBytePosOffset + 2);
fb.writeUInt8(color.a ?? 255, fbBytePosOffset + 3);
runLength--;
}
}
}
// 127 and 129 are not valid
// 17 to 126 are not used
tiles--;
}
this.unBuffer.flush();
resolve();
});
});
}
}

View file

@ -1,134 +0,0 @@
/// this is a pretty poor name.
export class SocketBuffer {
public buffer: Buffer;
public offset: number; // :(
constructor() {
this.buffer = Buffer.from([]);
this.offset = 0;
this.flush();
}
flush(keep = true) {
if (keep && this.buffer?.length) {
this.buffer = this.buffer.subarray(this.offset);
this.offset = 0;
} else {
this.buffer = Buffer.from([]);
this.offset = 0;
}
}
toString() {
return this.buffer.toString();
}
includes(check: Buffer|number|string) {
return this.buffer.includes(check);
}
pushData(data: Buffer) {
this.buffer = Buffer.concat([this.buffer, data]);
}
readInt32BE() {
const data = this.buffer.readInt32BE(this.offset);
this.offset += 4;
return data;
}
readInt32LE() {
const data = this.buffer.readInt32LE(this.offset);
this.offset += 4;
return data;
}
readUInt32BE() {
const data = this.buffer.readUInt32BE(this.offset);
this.offset += 4;
return data;
}
readUInt32LE() {
const data = this.buffer.readUInt32LE(this.offset);
this.offset += 4;
return data;
}
readUInt16BE() {
const data = this.buffer.readUInt16BE(this.offset);
this.offset += 2;
return data;
}
readUInt16LE() {
const data = this.buffer.readUInt16LE(this.offset);
this.offset += 2;
return data;
}
readUInt8() {
const data = this.buffer.readUInt8(this.offset);
this.offset += 1;
return data;
}
readInt8() {
const data = this.buffer.readInt8(this.offset);
this.offset += 1;
return data;
}
readNBytes(bytes: number, offset = this.offset) {
return this.buffer.slice(offset, offset + bytes);
}
readNBytesOffset(bytes: number) {
const data = this.buffer.slice(this.offset, this.offset + bytes);
this.offset += bytes;
return data;
}
setOffset(n: number) {
this.offset = n;
}
bytesLeft() {
return this.buffer.length - this.offset;
}
// name is nullable because there are Many(yay....) times it just isn't passed
async waitBytes(bytes: number, name: any | null = null): Promise<void> {
if (this.bytesLeft() >= bytes) {
return;
}
let counter = 0;
return new Promise(async (resolve, reject) => {
while (this.bytesLeft() < bytes) {
counter++;
// console.log('Esperando. BytesLeft: ' + this.bytesLeft() + ' Desejados: ' + bytes);
await this.sleep(4);
if (counter === 50) {
console.log('Stucked on ' + name + ' - Buffer Size: ' + this.buffer.length + ' BytesLeft: ' + this.bytesLeft() + ' BytesNeeded: ' + bytes);
}
}
resolve();
});
}
fill(data: Buffer) {
this.buffer.fill(data, this.offset, this.offset + data.length);
this.offset += data.length;
}
fillMultiple(data: Buffer, repeats: number) {
this.buffer.fill(data, this.offset, this.offset + data.length * repeats);
this.offset += data.length * repeats;
}
sleep(n: number): Promise<void> {
return new Promise((resolve, reject) => {
setTimeout(resolve, n);
});
}
}

View file

@ -1,44 +0,0 @@
export type Rect = {
x: number,
y: number,
width: number,
height: number,
}
export type VncRectangle = Rect & {
encoding: number,
data: Buffer | null // ?
};
export type PixelFormat = {
bitsPerPixel: number,
depth: number,
bigEndianFlag: number,
trueColorFlag: number,
redMax: number,
greenMax: number,
blueMax: number,
redShift: number,
blueShift: number,
greenShift: number
}
export type Cursor = Rect & {
cursorPixels: Buffer|null,
bitmask: Buffer|null,
posX: number,
posY: number
}
export type Color3 = {
r: number,
g: number,
b: number
}
export type Color4 = Color3 & {
a: number,
};

View file

@ -1 +0,0 @@
../tsconfig.json

View file

@ -1,7 +1,6 @@
{
"name": "@socketcomputer/shared",
"version": "1.0.0",
"private": "true",
"description": "crusttest shared bits",
"type": "module",
"main": "dist/index.js",
@ -9,21 +8,16 @@
"files": [
"dist"
],
"targets": {
"types": {},
"shared":{
"context": "browser",
"isLibrary": true,
"outputFormat": "esmodule"
}
"types": {},
"shared": {
"context": "browser",
"isLibrary": true,
"outputFormat": "esmodule"
}
},
"dependencies": {
},
"devDependencies": {
"parcel": "^2.12.0"
"parcel": "^2.12.0"
},
"scripts": {
"build": "parcel build src/index.ts --target shared --target types"

View file

@ -1,7 +1,6 @@
{
"name": "@socketcomputer/webapp",
"version": "1.0.0",
"private": "true",
"description": "",
"scripts": {
"serve": "parcel src/index.html",
@ -11,7 +10,5 @@
"license": "ISC",
"devDependencies": {
"parcel": "^2.12.0"
},
"dependencies": {
}
}

7122
yarn.lock

File diff suppressed because it is too large Load diff