Skip to content

Commit

Permalink
Add rtc score to check quality
Browse files Browse the repository at this point in the history
  • Loading branch information
Karolk99 committed Jul 2, 2024
1 parent 590c60d commit 0ca3bad
Show file tree
Hide file tree
Showing 17 changed files with 2,653 additions and 2,976 deletions.
61 changes: 61 additions & 0 deletions report_plots.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,61 @@
import csv
import matplotlib.pyplot as plt

# Mbps
LOW = 0.15
MID = 0.5
HIGH = 1.5


report = []
with open("../report.csv", mode="r") as file:
csvFile = csv.DictReader(file)
for line in csvFile:
report.append(line)

timestamps = list(map(lambda a: int(a["timestamp"]), report))
min_curve = list(map(lambda a: float(a["min"]), report))
max_curve = list(map(lambda a: float(a["max"]), report))
q1_curve = list(map(lambda a: float(a["q1"]), report))
q2_curve = list(map(lambda a: float(a["q2"]), report))
q3_curve = list(map(lambda a: float(a["q3"]), report))

encoding_low = list(map(lambda a: int(a["low"]), report))
encoding_medium = list(map(lambda a: int(a["mid"]), report))
encoding_high = list(map(lambda a: int(a["high"]), report))

bandwidth = list(map(lambda a: int(a["low"]) * LOW + int(a["mid"]) * MID + int(a["high"]) * HIGH, report))

# Plotting the arrays
plt.plot(timestamps, min_curve, label="minimum")
plt.plot(timestamps, max_curve, label="maximum")
plt.plot(timestamps, q1_curve, label="Q1")
plt.plot(timestamps, q2_curve, label="median")
plt.plot(timestamps, q3_curve, label="Q3")

plt.xlabel("Timestamp")
plt.ylabel("RTC Score")
plt.legend()

plt.show()

plt.figure()
plt.plot(timestamps, encoding_low, label="low")
plt.plot(timestamps, encoding_medium, label="medium")
plt.plot(timestamps, encoding_high, label="high")

plt.xlabel("Timestamp")
plt.ylabel("Encodings")
plt.legend()

plt.show()


plt.figure()
plt.plot(timestamps, bandwidth, label="low")

plt.xlabel("Timestamp")
plt.ylabel("Bandwidth")
plt.legend()

plt.show()
37 changes: 26 additions & 11 deletions webrtc/frontend/src/main.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ import "./style.css";
import "./mediaDevices.ts";
import { FishjamClient, TrackEncoding } from "@fishjam-dev/ts-client";
import { startDevices } from "./mediaDevices";
import { rtc_score_callback } from "./rtcScore.ts";

const startClient = () => {
const params: QueryParams = parseQueryParams();
Expand Down Expand Up @@ -42,6 +43,9 @@ const startClient = () => {
},
});

// every second sends rtc score report to fishjam grinder using `console.log`
rtc_score_callback(client);

return client;
};

Expand All @@ -50,17 +54,28 @@ const addMediaTracks = (client: FishjamClient<PeerMetadata, TrackMetadata>) => {

const activeEncodings: TrackEncoding[] = process.env.ACTIVE_ENCODINGS?.split("") as TrackEncoding[];

client.addTrack(
videoTrack,
videoMediaStream,
undefined,
{ enabled: true, activeEncodings: activeEncodings, disabledEncodings: [] },
new Map<TrackEncoding, number>([
["l", 150],
["m", 500],
["h", 1500],
]),
);
if (process.env.USE_SIMULCAST) {
client.addTrack(
videoTrack,
videoMediaStream,
undefined,
{ enabled: true, activeEncodings: activeEncodings, disabledEncodings: [] },
new Map<TrackEncoding, number>([
["l", 150],
["m", 500],
["h", 1500],
]),
);
} else {
client.addTrack(
videoTrack,
videoMediaStream,
undefined,
undefined,
500
);
}

console.log("Added video");

const audioTrack = audioMediaStream.getAudioTracks()?.[0];
Expand Down
93 changes: 93 additions & 0 deletions webrtc/frontend/src/rtcMOS1.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,93 @@
// Original implementation: https://github.com/ggarber/rtcscore
// I just adjusted the code to our needs.
// This implementation reflects the score for the last second rather than for the entire session.

import { z } from "zod";

export const VideoStatsSchema = z.object({
bitrate: z.number().default(0),
roundTripTime: z.number().default(0),
bufferDelay: z.number().default(0),
codec: z.string().optional(),
frameRate: z.number().default(0),
packetLoss: z.number().default(0) // %
});

export const AudioStatsSchema = z.object({
bitrate: z.number().default(0),
roundTripTime: z.number().default(0),
bufferDelay: z.number().default(0),
packetLoss: z.number().default(0), // %
fec: z.boolean().default(false),
dtx: z.boolean().default(false)
});

const clamp = (value: number, min: number, max: number) => Math.max(min, Math.min(value, max));

export type VideoStats = z.infer<typeof VideoStatsSchema>
export type AudioStats = z.infer<typeof AudioStatsSchema>

export type ScoreVideoParams = Omit<VideoStats, "packetLoss"> & {
expectedWidth: number;
expectedHeight: number;
expectedFrameRate: number;
}

export const calculateVideoScore = (
{
bitrate,
roundTripTime,
bufferDelay,
codec,
expectedWidth,
expectedHeight,
frameRate,
expectedFrameRate
}: ScoreVideoParams) => {
const pixels = expectedWidth * expectedHeight;
const codecFactor = codec === "vp9" ? 1.2 : 1.0;
const delay = bufferDelay + roundTripTime / 2;
// These parameters are generated with a logaritmic regression
// on some very limited test data for now
// They are based on the bits per pixel per frame (bPPPF)
if (frameRate === 0) return 1;

const bPPPF = (codecFactor * bitrate) / pixels / frameRate;
const base = clamp(0.56 * Math.log(bPPPF) + 5.36, 1, 5);
const MOS =
base -
1.9 * Math.log(expectedFrameRate / frameRate) -
delay * 0.002;
return clamp(Math.round(MOS * 100) / 100, 1, 5);
};

export type ScoreInputAudio = {
bufferDelay: number;
packetLoss: number;
bitrate: number;
roundTripTime: number;
fec: boolean;
dtx: boolean;
}

export const calculateAudioScore = ({ bitrate, roundTripTime, bufferDelay, fec, dtx, packetLoss }: ScoreInputAudio) => {
// Audio MOS calculation is based on E-Model algorithm
// Assume 20 packetization delay
const delay = 20 + bufferDelay + roundTripTime / 2;
const pl = packetLoss;
const R0 = 100;
// Ignore audio bitrate in dtx mode
const Ie = dtx
? 8
: bitrate
? clamp(55 - 4.6 * Math.log(bitrate), 0, 30)
: 6;
const Bpl = fec ? 20 : 10;
const Ipl = Ie + (100 - Ie) * (pl / (pl + Bpl));

const Id = delay * 0.03 + (delay > 150 ? 0.1 * delay - 150 : 0);
const R = clamp(R0 - Ipl - Id, 0, 100);
const MOS = 1 + 0.035 * R + (R * (R - 60) * (100 - R) * 7) / 1000000;

return clamp(Math.round(MOS * 100) / 100, 1, 5);
};
116 changes: 116 additions & 0 deletions webrtc/frontend/src/rtcScore.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,116 @@
import { VideoStatsSchema } from "./rtcMOS1.ts";
import { AudioStats, VideoStats } from "./rtcMOS1";
import { calculateVideoScore } from "./rtcMOS1";
import { FishjamClient } from "@fishjam-dev/ts-client";

export type VideoStatistics = VideoStats & { type: "video" };
export type AudioStatistics = AudioStats & { type: "audio" };
export type Statistics = VideoStatistics | AudioStatistics;
export type TrackIdentifier = string;

type InboundRtpId = string;
type Inbound = Record<InboundRtpId, any>;

let intervalId: NodeJS.Timer | null = null;
let data: Record<TrackIdentifier, Statistics> = {};

export const rtc_score_callback = (client: FishjamClient<PeerMetadata, TrackMetadata>) => {
let prevTime: number = 0;
let lastInbound: Inbound | null = null;

intervalId = setInterval(async () => {
if (!client) return;

const currTime = Date.now();
const dx = currTime - prevTime;

if (!dx) return;

const stats: RTCStatsReport = await client.getStatistics();
const result: Record<string, any> = {};

stats.forEach((report, id) => {
result[id] = report;
});

const inbound: Inbound = getGroupedStats(result, "inbound-rtp");
Object.entries(inbound).forEach(([id, report]) => {
if (!lastInbound) return;
if (report?.kind !== "video") return;

const lastReport = lastInbound[id];

const currentBytesReceived: number = report?.bytesReceived ?? 0;

if (!currentBytesReceived) return;

const prevBytesReceived: number = lastReport?.bytesReceived ?? 0;

const bitrate = (8 * (currentBytesReceived - prevBytesReceived) * 1000) / dx; // bits per seconds

const dxPacketsLost = (report?.packetsLost ?? 0) - (lastReport?.packetsLost ?? 0);
const dxPacketsReceived = (report?.packetsReceived ?? 0) - (lastReport?.packetsReceived ?? 0);
const packetLoss = dxPacketsReceived ? (dxPacketsLost / dxPacketsReceived) * 100 : NaN; // in %

const selectedCandidatePairId = result[report?.transportId || ""]?.selectedCandidatePairId;
const roundTripTime = result[selectedCandidatePairId]?.currentRoundTripTime;

const dxJitterBufferEmittedCount =
(report?.jitterBufferEmittedCount ?? 0) - (lastReport?.jitterBufferEmittedCount ?? 0);
const dxJitterBufferDelay = (report?.jitterBufferDelay ?? 0) - (lastReport?.jitterBufferDelay ?? 0);
const bufferDelay = dxJitterBufferEmittedCount > 0 ? dxJitterBufferDelay / dxJitterBufferEmittedCount : 0;

const codecId = report?.codecId || "";

const codec = result[codecId]?.mimeType?.split("/")?.[1];

const videoStats = VideoStatsSchema.safeParse({
bitrate,
packetLoss,
codec,
bufferDelay,
roundTripTime,
frameRate: report?.framesPerSecond ?? NaN,
});

if (videoStats.success && report?.trackIdentifier) {
const stats = { ...videoStats.data, type: "video" as const };
data[report.trackIdentifier] = {
...data[report.trackIdentifier],
...stats,
};
}
});

const videoScores = generate_video_scores(data);
console.log(`scores: ${JSON.stringify(videoScores)}`);

lastInbound = inbound;
prevTime = currTime;
}, 1000);
};

const generate_video_scores = (data: Record<TrackIdentifier, Statistics>) => {
const videoIds = Object.keys(data).filter((key: string) => data[key].type == "video");
return videoIds.map((id) => {
const videoStats = data[id] as VideoStatistics;
return calculateVideoScore({
codec: videoStats.codec,
bitrate: videoStats.bitrate,
bufferDelay: videoStats.bufferDelay,
roundTripTime: videoStats.roundTripTime,
frameRate: videoStats.frameRate,
expectedWidth: 1280,
expectedFrameRate: 24,
expectedHeight: 720,
});
});
};

const getGroupedStats = (result: Record<string, any>, type: string) =>
Object.entries(result)
.filter(([_, value]) => value.type === type)
.reduce((prev, [key, value]) => {
prev[key] = value;
return prev;
}, {});
13 changes: 12 additions & 1 deletion webrtc/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,16 @@ const args = yargs(hideBin(process.argv))
description: "Number of peers spawned per browser",
default: 1,
})
.option("csv-report-path", {
type: "string",
description: "Path used to save csv report",
default: "./report.csv"
})
.option("use-simulcast", {
type: "boolean",
description: "If set to true simulcast will be enabled",
default: false
})
.demandOption([
"fishjam-address",
"fishjam-token",
Expand All @@ -62,7 +72,7 @@ const args = yargs(hideBin(process.argv))
]).argv;

(async () => {
args.targetEncoding = "h";
args.targetEncoding = args.useSimulcast ? "h" : "m";
args.availableEncodings = ["l", "m", "h"];

// Start the frontend server
Expand All @@ -71,6 +81,7 @@ const args = yargs(hideBin(process.argv))
secure: args.secure,
targetEncoding: args.targetEncoding,
activeEncodings: args.availableEncodings,
useSimulcast: args.useSimulcast,
});

args.peersPerRoom = Math.min(args.peersPerRoom, args.peers);
Expand Down
11 changes: 11 additions & 0 deletions webrtc/package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

3 changes: 3 additions & 0 deletions webrtc/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -17,5 +17,8 @@
"grind": "npx ts-node index.ts",
"install-chrome": "npx playwright install chrome",
"setup": "npm i && cd frontend && npm i && cd .."
},
"dependencies": {
"zod": "^3.23.8"
}
}
Loading

0 comments on commit 0ca3bad

Please sign in to comment.