feat: Host a relay on Hetzner (#114)

We are hosting a [MoQ](https://quic.video) relay on a remote (bare
metal) server on Hetzner

With a lot of help from @victorpahuus
This commit is contained in:
Wanjohi
2024-09-26 21:34:42 +03:00
committed by GitHub
parent c4a6895726
commit bae089e223
74 changed files with 7107 additions and 96 deletions

View File

@@ -0,0 +1,120 @@
export class Deferred<T> {
promise: Promise<T>
resolve!: (value: T | PromiseLike<T>) => void
reject!: (reason: any) => void
pending = true
constructor() {
this.promise = new Promise((resolve, reject) => {
this.resolve = (value) => {
this.pending = false
resolve(value)
}
this.reject = (reason) => {
this.pending = false
reject(reason)
}
})
}
}
export type WatchNext<T> = [T, Promise<WatchNext<T>> | undefined]
export class Watch<T> {
#current: WatchNext<T>
#next = new Deferred<WatchNext<T>>()
constructor(init: T) {
this.#next = new Deferred<WatchNext<T>>()
this.#current = [init, this.#next.promise]
}
value(): WatchNext<T> {
return this.#current
}
update(v: T | ((v: T) => T)) {
if (!this.#next.pending) {
throw new Error("already closed")
}
// If we're given a function, call it with the current value
if (v instanceof Function) {
v = v(this.#current[0])
}
const next = new Deferred<WatchNext<T>>()
this.#current = [v, next.promise]
this.#next.resolve(this.#current)
this.#next = next
}
close() {
this.#current[1] = undefined
this.#next.resolve(this.#current)
}
}
// Wakes up a multiple consumers.
export class Notify {
#next = new Deferred<void>()
async wait() {
return this.#next.promise
}
wake() {
if (!this.#next.pending) {
throw new Error("closed")
}
this.#next.resolve()
this.#next = new Deferred<void>()
}
close() {
this.#next.resolve()
}
}
// Allows queuing N values, like a Channel.
export class Queue<T> {
#stream: TransformStream<T, T>
#closed = false
constructor(capacity = 1) {
const queue = new CountQueuingStrategy({ highWaterMark: capacity })
this.#stream = new TransformStream({}, undefined, queue)
}
async push(v: T) {
const w = this.#stream.writable.getWriter()
await w.write(v)
w.releaseLock()
}
async next(): Promise<T | undefined> {
const r = this.#stream.readable.getReader()
const { value, done } = await r.read()
r.releaseLock()
if (done) return
return value
}
async abort(err: Error) {
if (this.#closed) return
await this.#stream.writable.abort(err)
this.#closed = true
}
async close() {
if (this.#closed) return
await this.#stream.writable.close()
this.#closed = true
}
closed() {
return this.#closed
}
}

View File

@@ -0,0 +1,18 @@
// Utility function to download a Uint8Array for debugging.
export function download(data: Uint8Array, name: string) {
const blob = new Blob([data], {
type: "application/octet-stream",
})
const url = window.URL.createObjectURL(blob)
const a = document.createElement("a")
a.href = url
a.download = name
document.body.appendChild(a)
a.style.display = "none"
a.click()
a.remove()
setTimeout(() => window.URL.revokeObjectURL(url), 1000)
}

View File

@@ -0,0 +1,14 @@
// I hate javascript
export function asError(e: any): Error {
if (e instanceof Error) {
return e
} else if (typeof e === "string") {
return new Error(e)
} else {
return new Error(String(e))
}
}
export function isError(e: any): e is Error {
return e instanceof Error
}

View File

@@ -0,0 +1 @@
export { asError } from "./error"

176
packages/moq/common/ring.ts Normal file
View File

@@ -0,0 +1,176 @@
// Ring buffer with audio samples.
enum STATE {
READ_POS = 0, // The current read position
WRITE_POS, // The current write position
LENGTH, // Clever way of saving the total number of enums values.
}
interface FrameCopyToOptions {
frameCount?: number
frameOffset?: number
planeIndex: number
}
// This is implemented by AudioData in WebCodecs, but we don't import it because it's a DOM type.
interface Frame {
numberOfFrames: number
numberOfChannels: number
copyTo(dst: Float32Array, options: FrameCopyToOptions): void
}
// No prototype to make this easier to send via postMessage
export class RingShared {
state: SharedArrayBuffer
channels: SharedArrayBuffer[]
capacity: number
constructor(channels: number, capacity: number) {
// Store the current state in a separate ring buffer.
this.state = new SharedArrayBuffer(STATE.LENGTH * Int32Array.BYTES_PER_ELEMENT)
// Create a buffer for each audio channel
this.channels = []
for (let i = 0; i < channels; i += 1) {
const buffer = new SharedArrayBuffer(capacity * Float32Array.BYTES_PER_ELEMENT)
this.channels.push(buffer)
}
this.capacity = capacity
}
}
export class Ring {
state: Int32Array
channels: Float32Array[]
capacity: number
constructor(shared: RingShared) {
this.state = new Int32Array(shared.state)
this.channels = []
for (const channel of shared.channels) {
this.channels.push(new Float32Array(channel))
}
this.capacity = shared.capacity
}
// Write samples for single audio frame, returning the total number written.
write(frame: Frame): number {
const readPos = Atomics.load(this.state, STATE.READ_POS)
const writePos = Atomics.load(this.state, STATE.WRITE_POS)
const startPos = writePos
let endPos = writePos + frame.numberOfFrames
if (endPos > readPos + this.capacity) {
endPos = readPos + this.capacity
if (endPos <= startPos) {
// No space to write
return 0
}
}
const startIndex = startPos % this.capacity
const endIndex = endPos % this.capacity
// Loop over each channel
for (let i = 0; i < this.channels.length; i += 1) {
const channel = this.channels[i]
// If the AudioData doesn't have enough channels, duplicate it.
const planeIndex = Math.min(i, frame.numberOfChannels - 1)
if (startIndex < endIndex) {
// One continuous range to copy.
const full = channel.subarray(startIndex, endIndex)
frame.copyTo(full, {
planeIndex,
frameCount: endIndex - startIndex,
})
} else {
const first = channel.subarray(startIndex)
const second = channel.subarray(0, endIndex)
frame.copyTo(first, {
planeIndex,
frameCount: first.length,
})
// We need this conditional when startIndex == 0 and endIndex == 0
// When capacity=4410 and frameCount=1024, this was happening 52s into the audio.
if (second.length) {
frame.copyTo(second, {
planeIndex,
frameOffset: first.length,
frameCount: second.length,
})
}
}
}
Atomics.store(this.state, STATE.WRITE_POS, endPos)
return endPos - startPos
}
read(dst: Float32Array[]): number {
const readPos = Atomics.load(this.state, STATE.READ_POS)
const writePos = Atomics.load(this.state, STATE.WRITE_POS)
const startPos = readPos
let endPos = startPos + dst[0].length
if (endPos > writePos) {
endPos = writePos
if (endPos <= startPos) {
// Nothing to read
return 0
}
}
const startIndex = startPos % this.capacity
const endIndex = endPos % this.capacity
// Loop over each channel
for (let i = 0; i < dst.length; i += 1) {
if (i >= this.channels.length) {
// ignore excess channels
}
const input = this.channels[i]
const output = dst[i]
if (startIndex < endIndex) {
const full = input.subarray(startIndex, endIndex)
output.set(full)
} else {
const first = input.subarray(startIndex)
const second = input.subarray(0, endIndex)
output.set(first)
output.set(second, first.length)
}
}
Atomics.store(this.state, STATE.READ_POS, endPos)
return endPos - startPos
}
clear() {
const pos = Atomics.load(this.state, STATE.WRITE_POS)
Atomics.store(this.state, STATE.READ_POS, pos)
}
size() {
// TODO is this thread safe?
const readPos = Atomics.load(this.state, STATE.READ_POS)
const writePos = Atomics.load(this.state, STATE.WRITE_POS)
return writePos - readPos
}
}

View File

@@ -0,0 +1,33 @@
// MediaTrackSettings can represent both audio and video, which means a LOT of possibly undefined properties.
// This is a fork of the MediaTrackSettings interface with properties required for audio or vidfeo.
export interface AudioTrackSettings {
deviceId: string
groupId: string
autoGainControl: boolean
channelCount: number
echoCancellation: boolean
noiseSuppression: boolean
sampleRate: number
sampleSize: number
}
export interface VideoTrackSettings {
deviceId: string
groupId: string
aspectRatio: number
facingMode: "user" | "environment" | "left" | "right"
frameRate: number
height: number
resizeMode: "none" | "crop-and-scale"
width: number
}
export function isAudioTrackSettings(settings: MediaTrackSettings): settings is AudioTrackSettings {
return "sampleRate" in settings
}
export function isVideoTrackSettings(settings: MediaTrackSettings): settings is VideoTrackSettings {
return "width" in settings
}

View File

@@ -0,0 +1,4 @@
{
"extends": "../tsconfig.json",
"include": ["."]
}