Skip to content

Commit

Permalink
Merge pull request #4 from TUM-Master-Thesis-MoQ/testbed
Browse files Browse the repository at this point in the history
  • Loading branch information
Ender-Wang authored Oct 8, 2024
2 parents 0699b86 + 3955682 commit d885b7e
Show file tree
Hide file tree
Showing 47 changed files with 373,077 additions and 10 deletions.
3 changes: 2 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
@@ -1 +1,2 @@
utilities/*.pem
utilities/*.pem
**/__pycache__/
32 changes: 32 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -102,6 +102,38 @@ This thesis aims to implement a prototype live-streaming system based on the MoQ

</table>

## Testbed Network Setup

<table>
<tr>
<td>
<img width="500" src="https://github.com/user-attachments/assets/e4711289-3148-4d85-aba5-c423f5b7714c">
</td>
</tr>

<tr>
<td>
<details>
<summary>Version History</summary>
<table>
<tr>
<td>
<img width="500" src="https://github.com/user-attachments/assets/1445e9ee-2120-4c88-a4d6-8a45ed9d27c4"/>
</td>
</tr>
<tr>
<tr>
<td>
<img width="500" src="https://github.com/user-attachments/assets/525925fa-0576-4592-910b-50d26c9f3f4d"/>
</td>
</tr>
</table>
</details>
</td>
</tr>

</table>

## Roadmap

- [x] Build a client-server app using quic-go
Expand Down
11 changes: 8 additions & 3 deletions client/audience-app/src/App.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -18,11 +18,12 @@ let context: CanvasRenderingContext2D | null = null;
let sessionInternal: Session | null = null;
let selectedChannel = "";
let videoTracks: string[] = [];
let currentTrack = "";

let mediaType = new Map<string, number>(); // tracks the media type (video, audio etc.) for each subscription, possible keys: "hd", "md", "audio"

function App() {
let latencyLogging = false; //! testbed: latency test_0

const [session, setSession] = useState<Session | null>(null); // UI: session

const [channelListObj, setChannelList] = useState<string[]>([]); // UI: channel list
Expand Down Expand Up @@ -245,12 +246,13 @@ function App() {

const videoDecoderWorker = new VideoDecoderWorker();
videoDecoderWorker.onmessage = (e) => {
const { action, frame } = e.data;
const { action, frame }: { action: string; frame: VideoFrame } = e.data;
// console.log("got frame from worker", frame);
if (action == "renderFrame") {
try {
requestAnimationFrame(() => {
context!.drawImage(frame, 0, 0, canvas!.width, canvas!.height);
latencyLogging && console.log(`🧪 🎬 obj latency ${frame.timestamp} #6: ${Date.now()}`);
frame.close();
});
} catch (err) {
Expand All @@ -261,7 +263,7 @@ function App() {

const audioDecoderWorker = new AudioDecoderWorker();
audioDecoderWorker.onmessage = (e) => {
const { action, audio } = e.data;
const { action, audio }: { action: string; audio: AudioData } = e.data;
if (action == "playAudio") {
// console.log("🔊 Decoded audio data:", audio);
if (audioContextRef.current) {
Expand All @@ -281,6 +283,7 @@ function App() {
source.buffer = audioBuffer;
source.connect(audioContextRef.current.destination);
source.start(0, 0, audio.duration / 1000000);
latencyLogging && console.log(`🧪 🔊 obj latency ${audio.timestamp} #6: ${Date.now()}`);
}
}
};
Expand All @@ -303,6 +306,7 @@ function App() {
timestamp: timestamp,
data: videoData,
});
latencyLogging && console.log(`🧪 🎬 obj latency ${timestamp} #3: ${Date.now()}`);
// console.log(`🎥 Got video frame: ${evc.type}, timestamp: ${timestamp}, ${videoData.byteLength} bytes`);
try {
videoDecoderWorker.postMessage({ action: "insertFrame", frame: evc });
Expand All @@ -320,6 +324,7 @@ function App() {
duration: duration,
data: audioData,
});
latencyLogging && console.log(`🧪 🔊 obj latency ${timestamp} #3: ${Date.now()}`);
// console.log(
// `🔊 Got audio chunk: ${eac.type}, timestamp: ${timestamp},duration: ${duration}, ${audioData.byteLength} bytes`,
// );
Expand Down
4 changes: 4 additions & 0 deletions client/audience-app/src/worker/AudioDecoderWorker.ts
Original file line number Diff line number Diff line change
Expand Up @@ -11,16 +11,20 @@ let lastSyncTime = 0;
let timeDriftThreshold = audioInterval * 2;
let syncInterval = 10000;

let latencyLogging = false; //! testbed: latency test_0

function initDecoder() {
audioDecoder = new AudioDecoder({
output: (decodedAudio) => {
latencyLogging && console.log(`🧪 🔊 obj latency ${decodedAudio.timestamp} #4: ${Date.now()}`);
decodedAudioHeap.insert(decodedAudio);

const currentTime = performance.now();

// buffer for bufferingTime second(s) before sending to main thread for rendering
if (currentTime - audioCollectionStartTime >= bufferingTime) {
// console.log("audio heap size: ", decodedAudioHeap.size());
latencyLogging && console.log(`🧪 🔊 obj latency ${decodedAudio.timestamp} #5: ${Date.now()}`);
const audio = decodedAudioHeap.extractMin();
postMessage({ action: "playAudio", audio });
audioSent++;
Expand Down
4 changes: 4 additions & 0 deletions client/audience-app/src/worker/VideoDecoderWorker.ts
Original file line number Diff line number Diff line change
Expand Up @@ -11,16 +11,20 @@ let lastSyncTime = 0;
let timeDriftThreshold = frameInterval * 2;
let syncInterval = 10000;

let latencyLogging = false; //! testbed: latency test_0

function initDecoder() {
videoDecoder = new VideoDecoder({
output: (decodedFrame) => {
latencyLogging && console.log(`🧪 🎬 obj latency ${decodedFrame.timestamp} #4: ${Date.now()}`);
decodedFrameHeap.insert(decodedFrame);

const currentTime = performance.now();

// buffer for bufferingTime second(s) before sending to main thread for rendering
if (currentTime - frameCollectionStartTime >= bufferingTime) {
// console.log("frame heap size: ", decodedFrameHeap.size());
latencyLogging && console.log(`🧪 🎬 obj latency ${decodedFrame.timestamp} #5: ${Date.now()}`);
const frame = decodedFrameHeap.extractMin();
postMessage({ action: "renderFrame", frame });
frameSent++;
Expand Down
41 changes: 38 additions & 3 deletions client/streamer-app/src/App.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,8 @@ import { VideoEncoderConfig } from "./interface/VideoEncoderConfig";
import { AudioEncoderConfig } from "./interface/AudioEncoderConfig";

function App() {
let latencyLogging = false; //! testbed: latency test_0

let mediaType = new Map<string, number>(); // tracks the media type (video, audio etc.) for each subscription, possible keys: "hd", "md", "audio"

const [session, setSession] = useState<Session | null>();
Expand Down Expand Up @@ -140,6 +142,7 @@ function App() {
console.log("🔔 Capturing media...");
// only start capturing when all media tracks are subscribed => easier for synchronization tracks
if (mediaType.size === newCatalogJSON.tracks.length) {
// if (mediaType.size === 2) { //! testbed latency test_0
startCapturing();
}
break;
Expand Down Expand Up @@ -171,6 +174,21 @@ function App() {

newCatalogJSON = catalog;

// add two fallback tracks for the two video tracks
const hdRateAdaptation = { ...catalog.tracks[1] };
hdRateAdaptation.name += "-ra";
hdRateAdaptation.selectionParams.bitrate *= 0.5;
// console.log("🔔 hd-ra track:", hdRateAdaptation);

const mdRateAdaptation = { ...catalog.tracks[2] };
mdRateAdaptation.name += "-ra";
mdRateAdaptation.selectionParams.bitrate *= 0.5;
// console.log("🔔 md-ra track:", mdRateAdaptation);

catalog.tracks.push(hdRateAdaptation);
catalog.tracks.push(mdRateAdaptation);
// console.log("🔔 Updated catalogJSON:", catalog);

const encoder = new TextEncoder();
const jsonString = JSON.stringify(catalog);
// console.log("📤 Serialized catalogJSON string:", jsonString);
Expand All @@ -184,9 +202,10 @@ function App() {
let height = 1080;
let frameRate = 30;
// audio encoder config: highest quality the hardware supports
let audioBitrate = 128_000;
let audioBitrate = 32_000;
let sampleRate = 48_000;
let numberOfChannels = 1;
let frameDuration = 10_000;
async function startCapturing() {
try {
const mediaStream = await navigator.mediaDevices.getUserMedia({
Expand All @@ -210,6 +229,7 @@ function App() {

// worker for each track
for (let i = 0; i < newCatalogJSON.tracks.length; i++) {
// for (let i = 0; i < 2; i++) { //! testbed latency test_0
initWorker(
newCatalogJSON.tracks[i].name,
i,
Expand All @@ -235,6 +255,9 @@ function App() {
sampleRate: sampleRate, // newCatalogJSON.tracks[trackIndex].selectionParams.samplerate!, // hardware dependent
bitrate: audioBitrate, // newCatalogJSON.tracks[trackIndex].selectionParams.bitrate, // hardware dependent
numberOfChannels: numberOfChannels, // Number(newCatalogJSON.tracks[trackIndex].selectionParams.channelConfig), // hardware dependent
opus: {
frameDuration: frameDuration, // In us. Lower latency than default 20000
},
};
} else {
config = {
Expand All @@ -243,6 +266,7 @@ function App() {
height: newCatalogJSON.tracks[trackIndex].selectionParams.height!,
bitrate: newCatalogJSON.tracks[trackIndex].selectionParams.bitrate,
framerate: frameRate, // newCatalogJSON.tracks[trackIndex].selectionParams.framerate!, // hardware dependent
latencyMode: "realtime", // send 1 chunk per frame
};
}
const readableStream = new MediaStreamTrackProcessor(track).readable;
Expand Down Expand Up @@ -303,21 +327,28 @@ function App() {
new Uint8Array(serializeBuffer, 18, dataBytes.byteLength).set(dataBytes);
}

sendEncodedChunk(serializeBuffer, trackName, chunk.type, chunk.duration!);
sendEncodedChunk(serializeBuffer, trackName, chunk.type, chunk.duration!, chunk.timestamp);
}

let keyFrameSet = false;
let audioGroupId = 0;
let audioObjId = 0;
let videoGroupId = 0;
let videoObjectId = 0;
async function sendEncodedChunk(buffer: ArrayBuffer, trackName: string, key: string, duration: number) {
async function sendEncodedChunk(
buffer: ArrayBuffer,
trackName: string,
key: string,
duration: number,
timestamp: number, //! testbed latency test_0
) {
if (trackName === "audio") {
// audio chunk
let id = mediaType.get(trackName);
// 1 sec of audio chunks in a group
if (audioObjId < 1000000 / duration) {
await writeMediaStream(id!, id!, audioGroupId, audioObjId, 0, 0, new Uint8Array(buffer));
latencyLogging && console.log(`🧪 🔊 obj latency ${timestamp} #2: ${Date.now()}`);
// console.log(
// `🔊 Audio Chunk: groupId ${audioGroupId}, objId ${audioObjId}, chunk size: ${buffer.byteLength} bytes`,
// );
Expand All @@ -326,6 +357,7 @@ function App() {
audioObjId = 0;
audioGroupId++;
await writeMediaStream(id!, id!, audioGroupId, audioObjId, 0, 0, new Uint8Array(buffer));
latencyLogging && console.log(`🧪 🔊 obj latency ${timestamp} #2: ${Date.now()}`);
// console.log(
// `🔊 Audio Chunk: groupId ${audioGroupId}, objId ${audioObjId}, chunk size: ${buffer.byteLength} bytes`,
// );
Expand All @@ -338,19 +370,22 @@ function App() {
if (key === "key" && !keyFrameSet) {
keyFrameSet = true;
await writeMediaStream(subId!, subId!, videoGroupId, videoObjectId, 0, 0, new Uint8Array(buffer));
latencyLogging && console.log(`🧪 🎬 obj latency ${timestamp} #2: ${Date.now()}`);
// console.log(`🔑 Key Frame: groupId ${videoGroupId}, objId ${videoObjectId}, frame size: ${buffer.byteLength} bytes`);
videoObjectId++;
}
if (keyFrameSet) {
if (key === "delta") {
await writeMediaStream(subId!, subId!, videoGroupId, videoObjectId, 0, 0, new Uint8Array(buffer));
latencyLogging && console.log(`🧪 🎬 obj latency ${timestamp} #2: ${Date.now()}`);
// console.log(`🔲 Delta Frame: groupId ${videoGroupId}, objId ${videoObjectId}, frame size: ${buffer.byteLength} bytes`);
videoObjectId++;
} else {
// key frame
videoGroupId++;
videoObjectId = 0;
await writeMediaStream(subId!, subId!, videoGroupId, videoObjectId, 0, 0, new Uint8Array(buffer));
latencyLogging && console.log(`🧪 🎬 obj latency ${timestamp} #2: ${Date.now()}`);
// console.log(`🔑 Key Frame: groupId ${videoGroupId}, objId ${videoObjectId}, frame size: ${buffer.byteLength} bytes`);
videoObjectId++;
}
Expand Down
3 changes: 3 additions & 0 deletions client/streamer-app/src/interface/AudioEncoderConfig.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,4 +3,7 @@ export interface AudioEncoderConfig {
sampleRate: number;
bitrate: number;
numberOfChannels: number;
opus: {
frameDuration: number;
};
}
1 change: 1 addition & 0 deletions client/streamer-app/src/interface/VideoEncoderConfig.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,4 +4,5 @@ export interface VideoEncoderConfig {
height: number;
bitrate: number;
framerate: number;
latencyMode: string;
}
4 changes: 4 additions & 0 deletions client/streamer-app/src/worker/AudioEncoderWorker.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,7 @@
let latencyLogging = false; //! testbed: latency test_0

function send(chunk: EncodedAudioChunk) {
latencyLogging && console.log(`🧪 🔊 obj latency ${chunk.timestamp} #1: ${Date.now()}`);
postMessage(chunk);
}

Expand All @@ -24,6 +27,7 @@ async function encodeAudio(reader: ReadableStreamDefaultReader<AudioData>, audio
const { done, value } = await reader.read();
if (done) break;
if (audioEncoder) {
latencyLogging && console.log(`🧪 🔊 obj latency ${value.timestamp} #0: ${Date.now()}}`);
audioEncoder.encode(value);
// console.log(`🔊 Encoded audio: ${value.timestamp}`);
}
Expand Down
4 changes: 4 additions & 0 deletions client/streamer-app/src/worker/VideoEncoderWorker.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,9 @@
import { VideoEncoderConfig } from "../interface/VideoEncoderConfig";

let latencyLogging = false; //! testbed: latency test_0

function send(chunk: EncodedVideoChunk) {
latencyLogging && console.log(`🧪 🎬 obj latency ${chunk.timestamp} #1: ${Date.now()}`);
postMessage(chunk);
}

Expand Down Expand Up @@ -38,6 +41,7 @@ async function encodeVideo(
} else {
isKeyFrame = false;
}
latencyLogging && console.log(`🧪 🎬 obj latency ${value.timestamp} #0: ${Date.now()}`);
videoEncoder.encode(value, { keyFrame: isKeyFrame });
// console.log(`🎥 Encoded video: ${isKeyFrame ? "key" : "delta"} frame ${frameIndex}`);
frameIndex++;
Expand Down
Empty file removed data/data
Empty file.
13 changes: 13 additions & 0 deletions log/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
# Testbed Log

## Latency Test

### Test 0

- Description: Test the time cost of different stages of the same obj across its lifecycle, different test states details are [here on the Notion page](https://www.notion.so/enderwang/Test-current-implementation-latency-1125ced186b580efbf5afeb4890d7375?pvs=4#1175ced186b5807497b8e83c48494c3e).

- Test methods:
1. Use `performance.now()`.
2. Use `Date.now()`.

- Test results: In `/date log/d_1/output`.
Loading

0 comments on commit d885b7e

Please sign in to comment.