mirror of
https://github.com/nestriness/nestri.git
synced 2025-12-12 16:55:37 +02:00
✨ feat: Host a relay on Hetzner (#114)
We are hosting a [MoQ](https://quic.video) relay on a remote (bare metal) server on Hetzner With a lot of help from @victorpahuus
This commit is contained in:
73
packages/moq/playback/worker/audio.ts
Normal file
73
packages/moq/playback/worker/audio.ts
Normal file
@@ -0,0 +1,73 @@
|
||||
import * as Message from "./message"
|
||||
import { Ring } from "../../common/ring"
|
||||
import { Component, Frame } from "./timeline"
|
||||
import * as MP4 from "../../media/mp4"
|
||||
|
||||
// This is run in a worker.
|
||||
export class Renderer {
|
||||
#ring: Ring
|
||||
#timeline: Component
|
||||
|
||||
#decoder!: AudioDecoder
|
||||
#stream: TransformStream<Frame, AudioData>
|
||||
|
||||
constructor(config: Message.ConfigAudio, timeline: Component) {
|
||||
this.#timeline = timeline
|
||||
this.#ring = new Ring(config.ring)
|
||||
|
||||
this.#stream = new TransformStream({
|
||||
start: this.#start.bind(this),
|
||||
transform: this.#transform.bind(this),
|
||||
})
|
||||
|
||||
this.#run().catch(console.error)
|
||||
}
|
||||
|
||||
#start(controller: TransformStreamDefaultController) {
|
||||
this.#decoder = new AudioDecoder({
|
||||
output: (frame: AudioData) => {
|
||||
controller.enqueue(frame)
|
||||
},
|
||||
error: console.warn,
|
||||
})
|
||||
}
|
||||
|
||||
#transform(frame: Frame) {
|
||||
if (this.#decoder.state !== "configured") {
|
||||
const track = frame.track
|
||||
if (!MP4.isAudioTrack(track)) throw new Error("expected audio track")
|
||||
|
||||
// We only support OPUS right now which doesn't need a description.
|
||||
this.#decoder.configure({
|
||||
codec: track.codec,
|
||||
sampleRate: track.audio.sample_rate,
|
||||
numberOfChannels: track.audio.channel_count,
|
||||
})
|
||||
}
|
||||
|
||||
const chunk = new EncodedAudioChunk({
|
||||
type: frame.sample.is_sync ? "key" : "delta",
|
||||
timestamp: frame.sample.dts / frame.track.timescale,
|
||||
duration: frame.sample.duration,
|
||||
data: frame.sample.data,
|
||||
})
|
||||
|
||||
this.#decoder.decode(chunk)
|
||||
}
|
||||
|
||||
async #run() {
|
||||
const reader = this.#timeline.frames.pipeThrough(this.#stream).getReader()
|
||||
|
||||
for (;;) {
|
||||
const { value: frame, done } = await reader.read()
|
||||
if (done) break
|
||||
|
||||
// Write audio samples to the ring buffer, dropping when there's no space.
|
||||
const written = this.#ring.write(frame)
|
||||
|
||||
if (written < frame.numberOfFrames) {
|
||||
console.warn(`droppped ${frame.numberOfFrames - written} audio samples`)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
119
packages/moq/playback/worker/index.ts
Normal file
119
packages/moq/playback/worker/index.ts
Normal file
@@ -0,0 +1,119 @@
|
||||
import { Timeline } from "./timeline"
|
||||
|
||||
import * as Audio from "./audio"
|
||||
import * as Video from "./video"
|
||||
|
||||
import * as MP4 from "../../media/mp4"
|
||||
import * as Message from "./message"
|
||||
import { asError } from "../../common/error"
|
||||
import { Deferred } from "../../common/async"
|
||||
import { GroupReader, Reader } from "../../transport/objects"
|
||||
|
||||
class Worker {
|
||||
// Timeline receives samples, buffering them and choosing the timestamp to render.
|
||||
#timeline = new Timeline()
|
||||
|
||||
// A map of init tracks.
|
||||
#inits = new Map<string, Deferred<Uint8Array>>()
|
||||
|
||||
// Renderer requests samples, rendering video frames and emitting audio frames.
|
||||
#audio?: Audio.Renderer
|
||||
#video?: Video.Renderer
|
||||
|
||||
on(e: MessageEvent) {
|
||||
const msg = e.data as Message.ToWorker
|
||||
|
||||
if (msg.config) {
|
||||
this.#onConfig(msg.config)
|
||||
} else if (msg.init) {
|
||||
// TODO buffer the init segmnet so we don't hold the stream open.
|
||||
this.#onInit(msg.init)
|
||||
} else if (msg.segment) {
|
||||
this.#onSegment(msg.segment).catch(console.warn)
|
||||
} else {
|
||||
throw new Error(`unknown message: + ${JSON.stringify(msg)}`)
|
||||
}
|
||||
}
|
||||
|
||||
#onConfig(msg: Message.Config) {
|
||||
if (msg.audio) {
|
||||
this.#audio = new Audio.Renderer(msg.audio, this.#timeline.audio)
|
||||
}
|
||||
|
||||
if (msg.video) {
|
||||
this.#video = new Video.Renderer(msg.video, this.#timeline.video)
|
||||
}
|
||||
}
|
||||
|
||||
#onInit(msg: Message.Init) {
|
||||
let init = this.#inits.get(msg.name)
|
||||
if (!init) {
|
||||
init = new Deferred()
|
||||
this.#inits.set(msg.name, init)
|
||||
}
|
||||
|
||||
init.resolve(msg.data)
|
||||
}
|
||||
|
||||
async #onSegment(msg: Message.Segment) {
|
||||
let init = this.#inits.get(msg.init)
|
||||
if (!init) {
|
||||
init = new Deferred()
|
||||
this.#inits.set(msg.init, init)
|
||||
}
|
||||
|
||||
// Create a new stream that we will use to decode.
|
||||
const container = new MP4.Parser(await init.promise)
|
||||
|
||||
const timeline = msg.kind === "audio" ? this.#timeline.audio : this.#timeline.video
|
||||
const reader = new GroupReader(msg.header, new Reader(msg.buffer, msg.stream))
|
||||
|
||||
// Create a queue that will contain each MP4 frame.
|
||||
const queue = new TransformStream<MP4.Frame>({})
|
||||
const segment = queue.writable.getWriter()
|
||||
|
||||
// Add the segment to the timeline
|
||||
const segments = timeline.segments.getWriter()
|
||||
await segments.write({
|
||||
sequence: msg.header.group,
|
||||
frames: queue.readable,
|
||||
})
|
||||
segments.releaseLock()
|
||||
|
||||
// Read each chunk, decoding the MP4 frames and adding them to the queue.
|
||||
for (;;) {
|
||||
const chunk = await reader.read()
|
||||
if (!chunk) {
|
||||
break
|
||||
}
|
||||
|
||||
if (!(chunk.payload instanceof Uint8Array)) {
|
||||
throw new Error(`invalid payload: ${chunk.payload}`)
|
||||
}
|
||||
|
||||
const frames = container.decode(chunk.payload)
|
||||
for (const frame of frames) {
|
||||
await segment.write(frame)
|
||||
}
|
||||
}
|
||||
|
||||
// We done.
|
||||
await segment.close()
|
||||
}
|
||||
}
|
||||
|
||||
// Pass all events to the worker
|
||||
const worker = new Worker()
|
||||
self.addEventListener("message", (msg) => {
|
||||
try {
|
||||
worker.on(msg)
|
||||
} catch (e) {
|
||||
const err = asError(e)
|
||||
console.warn("worker error:", err)
|
||||
}
|
||||
})
|
||||
|
||||
// Validates this is an expected message
|
||||
function _send(msg: Message.FromWorker) {
|
||||
postMessage(msg)
|
||||
}
|
||||
98
packages/moq/playback/worker/message.ts
Normal file
98
packages/moq/playback/worker/message.ts
Normal file
@@ -0,0 +1,98 @@
|
||||
import { GroupHeader } from "../../transport/objects"
|
||||
import { RingShared } from "../../common/ring"
|
||||
|
||||
export interface Config {
|
||||
audio?: ConfigAudio
|
||||
video?: ConfigVideo
|
||||
}
|
||||
|
||||
export interface ConfigAudio {
|
||||
channels: number
|
||||
sampleRate: number
|
||||
|
||||
ring: RingShared
|
||||
}
|
||||
|
||||
export interface ConfigVideo {
|
||||
canvas: OffscreenCanvas
|
||||
}
|
||||
|
||||
export interface Init {
|
||||
name: string // name of the init object
|
||||
data: Uint8Array
|
||||
}
|
||||
|
||||
export interface Segment {
|
||||
init: string // name of the init object
|
||||
kind: "audio" | "video"
|
||||
header: GroupHeader
|
||||
buffer: Uint8Array
|
||||
stream: ReadableStream<Uint8Array>
|
||||
}
|
||||
|
||||
/*
|
||||
export interface Play {
|
||||
// Start playback once the minimum buffer size has been reached.
|
||||
minBuffer: number
|
||||
}
|
||||
|
||||
export interface Seek {
|
||||
timestamp: number
|
||||
}
|
||||
*/
|
||||
|
||||
// Sent periodically with the current timeline info.
|
||||
export interface Timeline {
|
||||
// The current playback position
|
||||
timestamp?: number
|
||||
|
||||
// Audio specific information
|
||||
audio: TimelineAudio
|
||||
|
||||
// Video specific information
|
||||
video: TimelineVideo
|
||||
}
|
||||
|
||||
export interface TimelineAudio {
|
||||
buffer: Range[]
|
||||
}
|
||||
|
||||
export interface TimelineVideo {
|
||||
buffer: Range[]
|
||||
}
|
||||
|
||||
export interface Range {
|
||||
start: number
|
||||
end: number
|
||||
}
|
||||
|
||||
// Used to validate that only the correct messages can be sent.
|
||||
|
||||
// Any top level messages that can be sent to the worker.
|
||||
export interface ToWorker {
|
||||
// Sent to configure on startup.
|
||||
config?: Config
|
||||
|
||||
// Sent on each init/data stream
|
||||
init?: Init
|
||||
segment?: Segment
|
||||
|
||||
/*
|
||||
// Sent to control playback
|
||||
play?: Play
|
||||
seek?: Seek
|
||||
*/
|
||||
}
|
||||
|
||||
// Any top-level messages that can be sent from the worker.
|
||||
export interface FromWorker {
|
||||
// Sent back to the main thread regularly to update the UI
|
||||
timeline?: Timeline
|
||||
}
|
||||
|
||||
/*
|
||||
interface ToWorklet {
|
||||
config?: Audio.Config
|
||||
}
|
||||
|
||||
*/
|
||||
118
packages/moq/playback/worker/timeline.ts
Normal file
118
packages/moq/playback/worker/timeline.ts
Normal file
@@ -0,0 +1,118 @@
|
||||
import type { Frame } from "../../media/mp4"
|
||||
export type { Frame }
|
||||
|
||||
export interface Range {
|
||||
start: number
|
||||
end: number
|
||||
}
|
||||
|
||||
export class Timeline {
|
||||
// Maintain audio and video seprarately
|
||||
audio: Component
|
||||
video: Component
|
||||
|
||||
// Construct a timeline
|
||||
constructor() {
|
||||
this.audio = new Component()
|
||||
this.video = new Component()
|
||||
}
|
||||
}
|
||||
|
||||
interface Segment {
|
||||
sequence: number
|
||||
frames: ReadableStream<Frame>
|
||||
}
|
||||
|
||||
export class Component {
|
||||
#current?: Segment
|
||||
|
||||
frames: ReadableStream<Frame>
|
||||
#segments: TransformStream<Segment, Segment>
|
||||
|
||||
constructor() {
|
||||
this.frames = new ReadableStream({
|
||||
pull: this.#pull.bind(this),
|
||||
cancel: this.#cancel.bind(this),
|
||||
})
|
||||
|
||||
// This is a hack to have an async channel with 100 items.
|
||||
this.#segments = new TransformStream({}, { highWaterMark: 100 })
|
||||
}
|
||||
|
||||
get segments() {
|
||||
return this.#segments.writable
|
||||
}
|
||||
|
||||
async #pull(controller: ReadableStreamDefaultController<Frame>) {
|
||||
for (;;) {
|
||||
// Get the next segment to render.
|
||||
const segments = this.#segments.readable.getReader()
|
||||
|
||||
let res
|
||||
if (this.#current) {
|
||||
// Get the next frame to render.
|
||||
const frames = this.#current.frames.getReader()
|
||||
|
||||
// Wait for either the frames or segments to be ready.
|
||||
// NOTE: This assume that the first promise gets priority.
|
||||
res = await Promise.race([frames.read(), segments.read()])
|
||||
|
||||
frames.releaseLock()
|
||||
} else {
|
||||
res = await segments.read()
|
||||
}
|
||||
|
||||
segments.releaseLock()
|
||||
|
||||
const { value, done } = res
|
||||
|
||||
if (done) {
|
||||
// We assume the current segment has been closed
|
||||
// TODO support the segments stream closing
|
||||
this.#current = undefined
|
||||
continue
|
||||
}
|
||||
|
||||
if (!isSegment(value)) {
|
||||
// Return so the reader can decide when to get the next frame.
|
||||
controller.enqueue(value)
|
||||
return
|
||||
}
|
||||
|
||||
// We didn't get any frames, and instead got a new segment.
|
||||
if (this.#current) {
|
||||
if (value.sequence < this.#current.sequence) {
|
||||
// Our segment is older than the current, abandon it.
|
||||
await value.frames.cancel("skipping segment; too old")
|
||||
continue
|
||||
} else {
|
||||
// Our segment is newer than the current, cancel the old one.
|
||||
await this.#current.frames.cancel("skipping segment; too slow")
|
||||
}
|
||||
}
|
||||
|
||||
this.#current = value
|
||||
}
|
||||
}
|
||||
|
||||
async #cancel(reason: any) {
|
||||
if (this.#current) {
|
||||
await this.#current.frames.cancel(reason)
|
||||
}
|
||||
|
||||
const segments = this.#segments.readable.getReader()
|
||||
for (;;) {
|
||||
const { value: segment, done } = await segments.read()
|
||||
if (done) break
|
||||
|
||||
await segment.frames.cancel(reason)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Return if a type is a segment or frame
|
||||
// eslint-disable-next-line @typescript-eslint/no-redundant-type-constituents
|
||||
function isSegment(value: Segment | Frame): value is Segment {
|
||||
// eslint-disable-next-line @typescript-eslint/no-unnecessary-condition
|
||||
return (value as Segment).frames !== undefined
|
||||
}
|
||||
84
packages/moq/playback/worker/video.ts
Normal file
84
packages/moq/playback/worker/video.ts
Normal file
@@ -0,0 +1,84 @@
|
||||
import { Frame, Component } from "./timeline"
|
||||
import * as MP4 from "../../media/mp4"
|
||||
import * as Message from "./message"
|
||||
|
||||
export class Renderer {
|
||||
#canvas: OffscreenCanvas
|
||||
#timeline: Component
|
||||
|
||||
#decoder!: VideoDecoder
|
||||
#queue: TransformStream<Frame, VideoFrame>
|
||||
|
||||
constructor(config: Message.ConfigVideo, timeline: Component) {
|
||||
this.#canvas = config.canvas
|
||||
this.#timeline = timeline
|
||||
|
||||
this.#queue = new TransformStream({
|
||||
start: this.#start.bind(this),
|
||||
transform: this.#transform.bind(this),
|
||||
})
|
||||
|
||||
this.#run().catch(console.error)
|
||||
}
|
||||
|
||||
async #run() {
|
||||
const reader = this.#timeline.frames.pipeThrough(this.#queue).getReader()
|
||||
for (;;) {
|
||||
const { value: frame, done } = await reader.read()
|
||||
if (done) break
|
||||
|
||||
self.requestAnimationFrame(() => {
|
||||
this.#canvas.width = frame.displayWidth
|
||||
this.#canvas.height = frame.displayHeight
|
||||
|
||||
const ctx = this.#canvas.getContext("2d")
|
||||
if (!ctx) throw new Error("failed to get canvas context")
|
||||
|
||||
ctx.drawImage(frame, 0, 0, frame.displayWidth, frame.displayHeight) // TODO respect aspect ratio
|
||||
frame.close()
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#start(controller: TransformStreamDefaultController<VideoFrame>) {
|
||||
this.#decoder = new VideoDecoder({
|
||||
output: (frame: VideoFrame) => {
|
||||
controller.enqueue(frame)
|
||||
},
|
||||
error: console.error,
|
||||
})
|
||||
}
|
||||
|
||||
#transform(frame: Frame) {
|
||||
// Configure the decoder with the first frame
|
||||
if (this.#decoder.state !== "configured") {
|
||||
const { sample, track } = frame
|
||||
|
||||
const desc = sample.description
|
||||
const box = desc.avcC ?? desc.hvcC ?? desc.vpcC ?? desc.av1C
|
||||
if (!box) throw new Error(`unsupported codec: ${track.codec}`)
|
||||
|
||||
const buffer = new MP4.Stream(undefined, 0, MP4.Stream.BIG_ENDIAN)
|
||||
box.write(buffer)
|
||||
const description = new Uint8Array(buffer.buffer, 8) // Remove the box header.
|
||||
|
||||
if (!MP4.isVideoTrack(track)) throw new Error("expected video track")
|
||||
|
||||
this.#decoder.configure({
|
||||
codec: track.codec,
|
||||
codedHeight: track.video.height,
|
||||
codedWidth: track.video.width,
|
||||
description,
|
||||
// optimizeForLatency: true
|
||||
})
|
||||
}
|
||||
|
||||
const chunk = new EncodedVideoChunk({
|
||||
type: frame.sample.is_sync ? "key" : "delta",
|
||||
data: frame.sample.data,
|
||||
timestamp: frame.sample.dts / frame.track.timescale,
|
||||
})
|
||||
|
||||
this.#decoder.decode(chunk)
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user