diff --git a/.github/workflows/healthcheck-libraries.yml b/.github/workflows/healthcheck-libraries.yml index c454c1c2..d22f4256 100644 --- a/.github/workflows/healthcheck-libraries.yml +++ b/.github/workflows/healthcheck-libraries.yml @@ -57,4 +57,8 @@ jobs: - name: Clean build of frontend implementation working-directory: Frontend/implementations/typescript - run: npm install && npm run build \ No newline at end of file + run: npm install && npm run build + + - name: Clean build of frontend implementation as ES6 + working-directory: Frontend/implementations/typescript + run: npm install && npm run build:esm \ No newline at end of file diff --git a/.github/workflows/healthcheck-streaming.yml b/.github/workflows/healthcheck-streaming.yml index 679e22bb..d51cbeed 100644 --- a/.github/workflows/healthcheck-streaming.yml +++ b/.github/workflows/healthcheck-streaming.yml @@ -60,11 +60,11 @@ jobs: uses: robinraju/release-downloader@v1 with: repository: 'EpicGamesExt/PixelStreamingInfrastructure' - tag: 'minimal-streamer' - fileName: 'Minimal-PixelStreamer-5.5.7z' + tag: 'minimal-streamer-5.5' + fileName: 'Minimal-PixelStreamer-5.5-Win64-Development.7z' - name: Extract streamer - run: 7z x -oStreamer Minimal-PixelStreamer-5.5.7z + run: 7z x -oStreamer Minimal-PixelStreamer-5.5-Win64-Development.7z - name: Build Common working-directory: Common @@ -109,7 +109,7 @@ jobs: - name: Run Streamer working-directory: Streamer - run: Start-Process ".\Minimal\Binaries\Win64\Minimal-Win64-Shipping-Cmd.exe" -ArgumentList "-warp","-dx12","-windowed","-res=1920","-resy=720","-PixelStreamingURL=ws://localhost:8888","-RenderOffScreen","-AllowSoftwaRerendering","-PixelStreamingEncoderCodec=vp8" + run: Start-Process ".\Minimal\Binaries\Win64\Minimal-Cmd.exe" -ArgumentList "-warp","-dx12","-windowed","-resx=1920","-resy=720","-PixelStreamingURL=ws://localhost:8888","-RenderOffScreen","-AllowSoftwareRendering","-PixelStreamingEncoderCodec=vp8", "-Log=Minimal.log" - name: Prepare test working-directory: Extras\MinimalStreamTester @@ -121,9 +121,14 @@ jobs: - name: Wait for signalling to come up run: curl --retry 10 --retry-delay 20 --retry-connrefused http://localhost:999/api/status + - name: Wait for streamer to come up run: curl --retry 10 --retry-delay 20 --retry-connrefused http://localhost:999/api/streamers/DefaultStreamer + - name: Output streamer logs + working-directory: Streamer + run: ls ".\Minimal\" && Test-Path ".\Minimal\Saved\Logs\Minimal.log" && cat ".\Minimal\Saved\Logs\Minimal.log" + - name: Test if we can stream working-directory: Extras\MinimalStreamTester run: | diff --git a/Common/src/Util/SdpUtils.ts b/Common/src/Util/SdpUtils.ts new file mode 100644 index 00000000..1a2a4ffe --- /dev/null +++ b/Common/src/Util/SdpUtils.ts @@ -0,0 +1,31 @@ +// Copyright Epic Games, Inc. All Rights Reserved. + +export class SDPUtils { + static addVideoHeaderExtensionToSdp(sdp: string, uri: string): string { + // Find the highest used header extension id by sorting the extension ids used, + // eliminating duplicates and adding one. + // Todo: Update this when WebRTC in Chrome supports the header extension API. + const usedIds = sdp + .split('\n') + .filter((line) => line.startsWith('a=extmap:')) + .map((line) => parseInt(line.split(' ')[0].substring(9), 10)) + .sort((a, b) => a - b) + .filter((item, index, array) => array.indexOf(item) === index); + const nextId = usedIds[usedIds.length - 1] + 1; + const extmapLine = 'a=extmap:' + nextId + ' ' + uri + '\r\n'; + + const sections = sdp.split('\nm=').map((part, index) => { + return (index > 0 ? 'm=' + part : part).trim() + '\r\n'; + }); + const sessionPart = sections.shift(); + // Only add extension to m=video media section + return ( + sessionPart + + sections + .map((mediaSection) => + mediaSection.startsWith('m=video') ? mediaSection + extmapLine : mediaSection + ) + .join('') + ); + } +} diff --git a/Common/src/pixelstreamingcommon.ts b/Common/src/pixelstreamingcommon.ts index 512beb56..59b5b300 100644 --- a/Common/src/pixelstreamingcommon.ts +++ b/Common/src/pixelstreamingcommon.ts @@ -10,3 +10,4 @@ export { MessageRegistry } from './Messages/message_registry'; export * as Messages from './Messages/signalling_messages'; export * as MessageHelpers from './Messages/message_helpers'; export { KeepaliveMonitor } from './Protocol/KeepaliveMonitor'; +export * from './Util/SdpUtils'; diff --git a/Extras/FrontendTests/README.md b/Extras/FrontendTests/README.md index 74d2c925..d2d5a0ba 100755 --- a/Extras/FrontendTests/README.md +++ b/Extras/FrontendTests/README.md @@ -4,13 +4,9 @@ ### Setup ``` npm install -npx playwright install --with-deps -``` - -The above command should install the required browsers but for some reason I find I have to install chrome manually using the following command. - -``` -npx playwright install chrome +npx playwright install-deps +npx playwright install firefox +npx playwright install chromium ``` ### Prepare diff --git a/Extras/FrontendTests/dockerfiles/linux/Dockerfile b/Extras/FrontendTests/dockerfiles/linux/Dockerfile index be60bb78..c2d08b4d 100644 --- a/Extras/FrontendTests/dockerfiles/linux/Dockerfile +++ b/Extras/FrontendTests/dockerfiles/linux/Dockerfile @@ -4,8 +4,9 @@ WORKDIR /tester COPY /Extras/FrontendTests . RUN npm install -RUN npx playwright install --with-deps -RUN npx playwright install chrome +RUN npx playwright install firefox +RUN npx playwright install chromium +RUN npx playwright install-deps VOLUME /tester/playwright-report diff --git a/Extras/FrontendTests/package.json b/Extras/FrontendTests/package.json index bc1e022c..07e1184e 100755 --- a/Extras/FrontendTests/package.json +++ b/Extras/FrontendTests/package.json @@ -4,21 +4,20 @@ "description": "", "main": "index.js", "scripts": { - "test": "playwright test", - "build": "", - "clean": "" + "test": "npx playwright test", + "build": "" }, "keywords": [], "author": "Epic Games", "license": "MIT", "devDependencies": { - "@playwright/test": "^1.49.0", + "@playwright/test": "^1.49.1", "@types/node": "^20.12.7", "@types/uuid": "^9.0.8" }, "dependencies": { - "@epicgames-ps/lib-pixelstreamingfrontend-ue5.5": "*", "@epicgames-ps/js-streamer": "^0.0.4", + "@epicgames-ps/lib-pixelstreamingfrontend-ue5.5": "*", "dotenv": "^16.4.5", "node-fetch": "^2.7.0", "uuid": "^9.0.0" diff --git a/Extras/FrontendTests/playwright.config.ts b/Extras/FrontendTests/playwright.config.ts index 05c5d484..2c0248af 100755 --- a/Extras/FrontendTests/playwright.config.ts +++ b/Extras/FrontendTests/playwright.config.ts @@ -14,8 +14,8 @@ export default defineConfig({ forbidOnly: !!process.env.CI, /* Retry on CI only */ retries: process.env.CI ? 2 : 0, - /* Opt out of parallel tests on CI. */ - workers: process.env.CI ? 1 : 5, + /* Opt out of parallel tests in general as multiple streamers mean they can connect to the wrong test */ + workers: 1, /* Reporter to use. See https://playwright.dev/docs/test-reporters */ reporter: [['html', { open: 'never' }]], /* Shared settings for all the projects below. See https://playwright.dev/docs/api/class-testoptions. */ @@ -31,7 +31,7 @@ export default defineConfig({ projects: [ { name: 'chrome', - use: { ...devices['Desktop Chrome'], channel: 'chrome' }, + use: { ...devices['Desktop Chrome'], channel: 'chromium' }, }, // { // name: 'chromium', diff --git a/Extras/FrontendTests/tests/basic_stream.spec.ts b/Extras/FrontendTests/tests/basic_stream.spec.ts index d1fd212d..c715a7b3 100644 --- a/Extras/FrontendTests/tests/basic_stream.spec.ts +++ b/Extras/FrontendTests/tests/basic_stream.spec.ts @@ -1,6 +1,7 @@ import { test } from './fixtures'; import { expect } from './matchers'; import * as helpers from './helpers'; +import { StatsReceivedEvent } from '@epicgames-ps/lib-pixelstreamingfrontend-ue5.5'; // NOTE: add a new test to check qp values @@ -10,20 +11,22 @@ test('Test default stream.', { }, async ({ page, streamerId }) => { await page.goto(`/?StreamerId=${streamerId}`); - await page.getByText('Click to start').click(); // let the stream run for a short duration - await helpers.waitForVideo(page); - await helpers.delay(1000); - - // query the frontend for its calculated stats - const frame_count:number = await page.evaluate(()=> { - let videoStats = pixelStreaming._webRtcController.peerConnectionController.aggregatedStats.inboundVideoStats; - return videoStats.framesReceived; + await helpers.startAndWaitForVideo(page); + + let frameCount: number = await page.evaluate(()=> { + return new Promise((resolve) => { + window.pixelStreaming.addEventListener("statsReceived", (e: StatsReceivedEvent) => { + if(e.data.aggregatedStats && e.data.aggregatedStats.inboundVideoStats && e.data.aggregatedStats.inboundVideoStats.framesReceived) { + resolve(e.data.aggregatedStats.inboundVideoStats.framesReceived); + } + }); + }); }); // pass the test if we recorded any frames - expect(frame_count).toBeGreaterThan(0); + expect(frameCount).toBeGreaterThan(0); }); diff --git a/Extras/FrontendTests/tests/extras.ts b/Extras/FrontendTests/tests/extras.ts index d3e53f48..f7fcc143 100644 --- a/Extras/FrontendTests/tests/extras.ts +++ b/Extras/FrontendTests/tests/extras.ts @@ -1,98 +1,109 @@ -import { Page } from 'playwright'; -import { Streamer, DataProtocol } from '@epicgames-ps/js-streamer'; -import { PixelStreaming } from '@epicgames-ps/lib-pixelstreamingfrontend-ue5.5'; -import { delay } from './helpers'; - -declare global { - interface Window { - pixelStreaming: PixelStreaming; - streamer: Streamer; - signallingURL?: string; - dataMessages: Record; - dataMessageListener(playerId: number, message: any): void; - } -} - -export enum PSEventTypes { - MouseDown = DataProtocol.ToStreamer.MouseDown.id, - MouseUp = DataProtocol.ToStreamer.MouseUp.id, - MouseMove = DataProtocol.ToStreamer.MouseMove.id, - MouseWheel = DataProtocol.ToStreamer.MouseWheel.id, - MouseDouble = DataProtocol.ToStreamer.MouseDouble.id, - MouseEnter = DataProtocol.ToStreamer.MouseEnter.id, - MouseLeave = DataProtocol.ToStreamer.MouseLeave.id, - KeyDown = DataProtocol.ToStreamer.KeyDown.id, - KeyUp = DataProtocol.ToStreamer.KeyUp.id, - KeyPress = DataProtocol.ToStreamer.KeyPress.id, - Command = DataProtocol.ToStreamer.Command.id, -}; - -type NumberValidator = (n: number) => boolean; - -// mouse input events captured by the streamer -export interface DataChannelMouseInput { - type: number; - button?: number; - x?: number | NumberValidator; - y?: number | NumberValidator; - deltaX?: number | NumberValidator; - deltaY?: number | NumberValidator; - delta?: number | NumberValidator; -}; - -// keyboard input events captured by the streamer -export interface DataChannelKeyboardInput { - type: number; - keyCode: number; -}; - -export interface DataChannelCommandInput { - type: number; - command: string; -}; - -// a generic type for inputs captured by the streamer -export type DataChannelEvent = DataChannelMouseInput | DataChannelKeyboardInput | DataChannelCommandInput; - -// sets up the streamer page to capture data channel messages -// will capture events in a map on the window keyed by player id -export function setupEventCapture(streamerPage: Page) { - return streamerPage.evaluate(() => { - window.dataMessages = {}; - window.dataMessageListener = (playerId, message) => { - if (window.dataMessages[playerId] == undefined) { - window.dataMessages[playerId] = []; - } - window.dataMessages[playerId].push({ type: message.type, ...message.message }); - }; - window.streamer.on('data_channel_message', window.dataMessageListener); - }); -} - -// turns off the data channel capturing on the streamer page -export function teardownEventCapture(streamerPage: Page) { - return streamerPage.evaluate(() => { - window.streamer.off('data_channel_message', window.dataMessageListener); - }); -} - -// gets all the captured data channel messages between setup/teardownEventCapture -export function getCapturedEvents(streamerPage: Page): Promise> { - return streamerPage.evaluate(() => { - return window.dataMessages; - }); -} - -export async function getEventSetFrom(streamerPage: Page, performAction: () => Promise): Promise> { - await setupEventCapture(streamerPage); - await performAction(); - await delay(5); // just give a little time for the events to come through - await teardownEventCapture(streamerPage); - return await getCapturedEvents(streamerPage); -} - -export function getEvents(eventSet: Record, playerId?: string): DataChannelEvent[] { - playerId = playerId || Object.keys(eventSet)[0]; - return eventSet[playerId]; -} - +import { Page } from 'playwright'; +import { Streamer, DataProtocol } from '@epicgames-ps/js-streamer'; +import { delay } from './helpers'; +import { PixelStreaming, WebRtcSdpAnswerEvent } from '@epicgames-ps/lib-pixelstreamingfrontend-ue5.5'; + +declare global { + interface Window { + pixelStreaming: PixelStreaming; + streamer: Streamer; + signallingURL?: string; + dataMessages: Record; + dataMessageListener(playerId: number, message: any): void; + } +} + +export enum PSEventTypes { + MouseDown = DataProtocol.ToStreamer.MouseDown.id, + MouseUp = DataProtocol.ToStreamer.MouseUp.id, + MouseMove = DataProtocol.ToStreamer.MouseMove.id, + MouseWheel = DataProtocol.ToStreamer.MouseWheel.id, + MouseDouble = DataProtocol.ToStreamer.MouseDouble.id, + MouseEnter = DataProtocol.ToStreamer.MouseEnter.id, + MouseLeave = DataProtocol.ToStreamer.MouseLeave.id, + KeyDown = DataProtocol.ToStreamer.KeyDown.id, + KeyUp = DataProtocol.ToStreamer.KeyUp.id, + KeyPress = DataProtocol.ToStreamer.KeyPress.id, + Command = DataProtocol.ToStreamer.Command.id, +}; + +type NumberValidator = (n: number) => boolean; + +// mouse input events captured by the streamer +export interface DataChannelMouseInput { + type: number; + button?: number; + x?: number | NumberValidator; + y?: number | NumberValidator; + deltaX?: number | NumberValidator; + deltaY?: number | NumberValidator; + delta?: number | NumberValidator; +}; + +// keyboard input events captured by the streamer +export interface DataChannelKeyboardInput { + type: number; + keyCode: number; +}; + +export interface DataChannelCommandInput { + type: number; + command: string; +}; + +// a generic type for inputs captured by the streamer +export type DataChannelEvent = DataChannelMouseInput | DataChannelKeyboardInput | DataChannelCommandInput; + +// sets up the streamer page to capture data channel messages +// will capture events in a map on the window keyed by player id +export function setupEventCapture(streamerPage: Page) { + return streamerPage.evaluate(() => { + window.dataMessages = {}; + window.dataMessageListener = (playerId, message) => { + if (window.dataMessages[playerId] == undefined) { + window.dataMessages[playerId] = []; + } + window.dataMessages[playerId].push({ type: message.type, ...message.message }); + }; + window.streamer.on('data_channel_message', window.dataMessageListener); + }); +} + +// turns off the data channel capturing on the streamer page +export function teardownEventCapture(streamerPage: Page) { + return streamerPage.evaluate(() => { + window.streamer.off('data_channel_message', window.dataMessageListener); + }); +} + +// gets all the captured data channel messages between setup/teardownEventCapture +export function getCapturedEvents(streamerPage: Page): Promise> { + return streamerPage.evaluate(() => { + return window.dataMessages; + }); +} + +// gets the SDP answer from the player page +export function getSdpAnswer(playerPage: Page): Promise { + return playerPage.evaluate(() => { + return new Promise((resolve) => { + window.pixelStreaming.addEventListener("webRtcSdpAnswer", (evt : WebRtcSdpAnswerEvent) => { + resolve(evt.data.sdp); + }); + }); + }); +} + +export async function getEventSetFrom(streamerPage: Page, performAction: () => Promise): Promise> { + await setupEventCapture(streamerPage); + await performAction(); + await delay(5); // just give a little time for the events to come through + await teardownEventCapture(streamerPage); + return await getCapturedEvents(streamerPage); +} + +export function getEvents(eventSet: Record, playerId?: string): DataChannelEvent[] { + playerId = playerId || Object.keys(eventSet)[0]; + return eventSet[playerId]; +} + diff --git a/Extras/FrontendTests/tests/fixtures.ts b/Extras/FrontendTests/tests/fixtures.ts index fa1e4127..7a5ef55b 100644 --- a/Extras/FrontendTests/tests/fixtures.ts +++ b/Extras/FrontendTests/tests/fixtures.ts @@ -3,25 +3,39 @@ import { test as base, Page } from '@playwright/test'; type PSTestFixtures = { streamerPage: Page; streamerId: string; + localDescription: RTCSessionDescriptionInit; }; export const test = base.extend({ streamerPage: async ({ context }, use) => { const streamerPage = await context.newPage(); - await streamerPage.goto(`${process.env.PIXELSTREAMER_URL || 'http://localhost:4000'}?SignallingURL=${process.env.STREAMER_SIGNALLING_URL}`); + await streamerPage.goto(`${process.env.PIXELSTREAMER_URL || 'http://localhost:4000'}` + `${process.env.STREAMER_SIGNALLING_URL !== undefined ? '?SignallingURL=' + process.env.STREAMER_SIGNALLING_URL : ""}`); await use(streamerPage); }, streamerId: async ({ streamerPage }, use) => { - const idPromise: Promise = streamerPage.evaluate(()=> { - return new Promise((resolve) => { + + const idPromise: Promise = new Promise(async (resolve)=> { + + // Expose the resolve function to the browser context + await streamerPage.exposeFunction('resolveFromIdPromise', resolve); + + // Note: If page.evaluate is passed a promise it will try to await it immediately + // to avoid this hanging here waiting for endpoint_id_confirmed we instead + // wrap the page.evaluate in a promise and expose the resolve argument/function into the streamer page + // to be called when the endpoint_id_confirmed is actually called. + streamerPage.evaluate(()=> { window.streamer.on('endpoint_id_confirmed', () => { - resolve(window.streamer.id); + window.resolveFromIdPromise(window.streamer.id); }); - }) + }); }); + await streamerPage.getByText('Start Streaming').click(); const streamerId: string = await idPromise; await use(streamerId); - }, + } }); + +// Ensure tests run in serial as we don't want a clash with multiple peers putting things in different states +test.describe.configure({ mode: 'serial' }); \ No newline at end of file diff --git a/Extras/FrontendTests/tests/helpers.ts b/Extras/FrontendTests/tests/helpers.ts index bc39a490..78190f4a 100644 --- a/Extras/FrontendTests/tests/helpers.ts +++ b/Extras/FrontendTests/tests/helpers.ts @@ -31,12 +31,16 @@ export function delay(time: number) { }); } -export async function waitForVideo(page: Page) { + +export async function startAndWaitForVideo(page: Page) { await page.evaluate(()=> { return new Promise((resolve) => { + // Note: Assign listener before we start the connection window.pixelStreaming.addEventListener('playStream', (event) => { return resolve(event); }); + // Make the actual connection initiation + window.pixelStreaming.connect(); }); }); } diff --git a/Extras/FrontendTests/tests/keyboard.spec.ts b/Extras/FrontendTests/tests/keyboard.spec.ts index 0c410ea9..2792be67 100644 --- a/Extras/FrontendTests/tests/keyboard.spec.ts +++ b/Extras/FrontendTests/tests/keyboard.spec.ts @@ -12,8 +12,8 @@ test('Test keyboard events', { }, async ({ page, streamerPage, streamerId }) => { await page.goto(`/?StreamerId=${streamerId}&MatchViewportRes=true`); - await page.getByText('Click to start').click(); - await helpers.waitForVideo(page); + + await helpers.startAndWaitForVideo(page); const playerBox = await page.locator('#videoElementParent').boundingBox(); expect(playerBox).not.toBeNull(); diff --git a/Extras/FrontendTests/tests/mouse.spec.ts b/Extras/FrontendTests/tests/mouse.spec.ts index fa9fc780..7fbdb20a 100644 --- a/Extras/FrontendTests/tests/mouse.spec.ts +++ b/Extras/FrontendTests/tests/mouse.spec.ts @@ -22,9 +22,8 @@ test('Test mouse enter/leave', { // }); await page.goto(`/?StreamerId=${streamerId}&MatchViewportRes=true&HoveringMouse=true`); - await page.getByText('Click to start').click(); - await helpers.waitForVideo(page); + await helpers.startAndWaitForVideo(page); // reduce the size of the window so we can leave await page.setViewportSize({ width: 100, height: 100 }); @@ -66,9 +65,8 @@ test('Test mouse wheel', { // }); await page.goto(`/?StreamerId=${streamerId}&MatchViewportRes=true&HoveringMouse=false`); - await page.getByText('Click to start').click(); - await helpers.waitForVideo(page); + await helpers.startAndWaitForVideo(page); const playerBox = await page.locator('#videoElementParent').boundingBox(); expect(playerBox).not.toBeNull(); @@ -115,9 +113,8 @@ test('Test locked mouse movement', { // }); await page.goto(`/?StreamerId=${streamerId}&MatchViewportRes=true&HoveringMouse=false`); - await page.getByText('Click to start').click(); - await helpers.waitForVideo(page); + await helpers.startAndWaitForVideo(page); const playerBox = await page.locator('#videoElementParent').boundingBox(); expect(playerBox).not.toBeNull(); @@ -193,9 +190,8 @@ test('Test hovering mouse movement', { // }); await page.goto(`/?StreamerId=${streamerId}&MatchViewportRes=true&HoveringMouse=true`); - await page.getByText('Click to start').click(); - await helpers.waitForVideo(page); + await helpers.startAndWaitForVideo(page); const playerBox = await page.locator('#videoElementParent').boundingBox(); expect(playerBox).not.toBeNull(); @@ -245,9 +241,8 @@ test('Test mouse input after resizing. Hover mouse.', { // }); await page.goto(`/?StreamerId=${streamerId}&MatchViewportRes=true&HoveringMouse=true`); - await page.getByText('Click to start').click(); - await helpers.waitForVideo(page); + await helpers.startAndWaitForVideo(page); // resize the window to be smaller const oldSize = page.viewportSize(); @@ -309,9 +304,8 @@ test('Test mouse input after resizing. locked mouse.', { // }); await page.goto(`/?StreamerId=${streamerId}&MatchViewportRes=true&HoveringMouse=false`); - await page.getByText('Click to start').click(); - await helpers.waitForVideo(page); + await helpers.startAndWaitForVideo(page); // resize the window to be smaller const oldSize = page.viewportSize(); diff --git a/Extras/FrontendTests/tests/peerconnection.spec.ts b/Extras/FrontendTests/tests/peerconnection.spec.ts new file mode 100644 index 00000000..bb62d0bb --- /dev/null +++ b/Extras/FrontendTests/tests/peerconnection.spec.ts @@ -0,0 +1,189 @@ +import { test } from './fixtures'; +import { expect } from './matchers'; +import * as helpers from './helpers'; +import { Flags, PixelStreaming, WebRtcSdpAnswerEvent, WebRtcSdpOfferEvent, LatencyCalculator, LatencyInfo, LatencyCalculatedEvent } from '@epicgames-ps/lib-pixelstreamingfrontend-ue5.5'; + +test('Test abs-capture-time header extension found for streamer', { + tag: ['@capture-time'], +}, async ({ page, streamerPage, streamerId, browserName }) => { + + if(browserName !== 'chromium') { + // Chrome based browsers are the only ones that support. + test.skip(); + } + + const localDescription: Promise = new Promise(async (resolve) => { + + // Expose the resolve function to the browser context + await streamerPage.exposeFunction('resolveFromLocalDescriptionPromise', resolve); + + streamerPage.evaluate(() => { + window.streamer.on('local_description_set', (localDescription: RTCSessionDescriptionInit) => { + resolveFromLocalDescriptionPromise(localDescription); + }); + }); + }); + + await page.goto(`/?StreamerId=${streamerId}`); + await page.waitForLoadState("load"); + + // Wait for the sdp offer + let getSdpOffer = new Promise(async (resolve) => { + + // Expose the resolve function to the browser context + await page.exposeFunction('resolveFromSdpOfferPromise', resolve); + + page.evaluate(() => { + window.pixelStreaming.addEventListener("webRtcSdpOffer", (e: WebRtcSdpOfferEvent) => { + resolveFromSdpOfferPromise(e.data.sdp); + }); + }); + + }); + + await helpers.startAndWaitForVideo(page); + + let localDescSdp: RTCSessionDescriptionInit = await localDescription; + let remoteDescSdp: RTCSessionDescriptionInit = await getSdpOffer; + + expect(localDescSdp.sdp).toBeDefined(); + expect(remoteDescSdp.sdp).toBeDefined(); + + // If this string is found in the sdp we can say we have turned on the capture time header extension on the streamer + expect(localDescSdp.sdp).toContain("abs-capture-time"); + expect(remoteDescSdp.sdp).toContain("abs-capture-time"); +}); + +test('Test abs-capture-time header extension found in PSInfra frontend', { + tag: ['@capture-time'], +}, async ({ page, streamerPage, streamerId, browserName }) => { + + if(browserName !== 'chromium') { + // Chrome based browsers are the only ones that support. + test.skip(); + } + + await page.goto(`/?StreamerId=${streamerId}`); + + await page.waitForLoadState("load"); + + // Enable the flag for the capture extension + await page.evaluate(() => { + window.pixelStreaming.config.setFlagEnabled("EnableCaptureTimeExt", true); + }); + + // Wait for the sdp answer + let getSdpAnswer = new Promise(async (resolve) => { + + // Expose the resolve function to the browser context + await page.exposeFunction('resolveFromSdpAnswerPromise', resolve); + + page.evaluate(() => { + window.pixelStreaming.addEventListener("webRtcSdpAnswer", (e: WebRtcSdpAnswerEvent) => { + resolveFromSdpAnswerPromise(e.data.sdp); + }); + }); + + }); + + await helpers.startAndWaitForVideo(page); + const answer: RTCSessionDescriptionInit = await getSdpAnswer; + + expect(answer).toBeDefined(); + expect(answer.sdp).toBeDefined(); + + // If this string is found in the sdp we can say we have turned on the capture time header extension on the streamer + expect(answer.sdp).toContain("abs-capture-time"); +}); + +test('Test video-timing header extension found in PSInfra frontend', { + tag: ['@capture-time'], +}, async ({ page, streamerPage, streamerId, browserName }) => { + + if(browserName !== 'chromium') { + // Chrome based browsers are the only ones that support. + test.skip(); + } + + await page.goto(`/?StreamerId=${streamerId}`); + + await page.waitForLoadState("load"); + + // Wait for the sdp answer + let getSdpAnswer = new Promise(async (resolve) => { + + // Expose the resolve function to the browser context + await page.exposeFunction('resolveFromSdpAnswerPromise', resolve); + + page.evaluate(() => { + window.pixelStreaming.addEventListener("webRtcSdpAnswer", (e: WebRtcSdpAnswerEvent) => { + resolveFromSdpAnswerPromise(e.data.sdp); + }); + }); + + }); + + await helpers.startAndWaitForVideo(page); + const answer: RTCSessionDescriptionInit = await getSdpAnswer; + + expect(answer).toBeDefined(); + expect(answer.sdp).toBeDefined(); + + // If this string is found in the sdp we can say we have turned on the capture time header extension on the streamer + expect(answer.sdp).toContain("video-timing"); +}); + +test('Test latency calculation with video timing', { + tag: ['@video-timing'], +}, async ({ page, streamerPage, streamerId, browserName }) => { + + if(browserName !== 'chromium') { + // Chrome based browsers are the only ones that support. + test.skip(); + } + + await page.goto(`/?StreamerId=${streamerId}`); + + await page.waitForLoadState("load"); + + await helpers.startAndWaitForVideo(page); + + // Wait for the latency info event to be fired + let latencyInfo: LatencyInfo = await page.evaluate(() => { + return new Promise((resolve) => { + window.pixelStreaming.addEventListener("latencyCalculated", (e: LatencyCalculatedEvent) => { + if(e.data.latencyInfo && e.data.latencyInfo.frameTiming && + e.data.latencyInfo.frameTiming.captureToSendLatencyMs && + e.data.latencyInfo.rttMs) { + resolve(e.data.latencyInfo); + } + }); + }); + }); + + expect(latencyInfo).toBeDefined(); + expect(latencyInfo.frameTiming).toBeDefined(); + expect(latencyInfo.frameTiming?.captureToSendLatencyMs).toBeDefined(); + expect(latencyInfo.averageJitterBufferDelayMs).toBeDefined(); + expect(latencyInfo.averageProcessingDelayMs).toBeDefined(); + expect(latencyInfo.rttMs).toBeDefined(); + expect(latencyInfo.averageAssemblyDelayMs).toBeDefined(); + expect(latencyInfo.averageDecodeLatencyMs).toBeDefined(); + + // Sender side latency should be less than 500ms in pure CPU test + expect(latencyInfo.frameTiming?.captureToSendLatencyMs).toBeLessThanOrEqual(500) + + // Expect jitter buffer/processing delay to be no greater than 500ms on local link + expect(latencyInfo.averageJitterBufferDelayMs).toBeLessThanOrEqual(500); + expect(latencyInfo.averageProcessingDelayMs).toBeLessThanOrEqual(500); + + // Expect RTT to be less than 10ms on loopback + expect(latencyInfo.rttMs).toBeLessThanOrEqual(10); + + // Expect time to assemble frame from packets to be less than the frame rate itself at 30 fps + expect(latencyInfo.averageAssemblyDelayMs).toBeLessThanOrEqual(33); + + // Expect CPU decoder to at least be able to do 30 fps decode + expect(latencyInfo.averageDecodeLatencyMs).toBeLessThanOrEqual(33); + +}); diff --git a/Extras/FrontendTests/tests/resolution_changes.spec.ts b/Extras/FrontendTests/tests/resolution_changes.spec.ts index 49da761c..f16261e7 100644 --- a/Extras/FrontendTests/tests/resolution_changes.spec.ts +++ b/Extras/FrontendTests/tests/resolution_changes.spec.ts @@ -1,61 +1,60 @@ -import { test } from './fixtures'; -import { expect } from './matchers'; -import { - PSEventTypes, - DataChannelEvent, - getEventSetFrom, - getEvents, -} from './extras'; -import * as helpers from './helpers'; - -test('Test resolution changes with match viewport on.', { - tag: ['@resolution'], -}, async ({ page, streamerPage, streamerId }) => { - - // first with match viewport enabled - await page.goto(`/?StreamerId=${streamerId}&MatchViewportRes=true`); - await page.getByText('Click to start').click(); - await helpers.waitForVideo(page); - - const events = await getEventSetFrom(streamerPage, async () => { - await page.setViewportSize({ width: 100, height: 100 }); - await helpers.delay(1000); - await page.setViewportSize({ width: 800, height: 600 }); - await helpers.delay(1000); - }); - - const singlePlayerEvents = getEvents(events); - const expectedActions: DataChannelEvent[] = [ - { type: PSEventTypes.Command, command: '{\"Resolution.Width\":100,\"Resolution.Height\":100}' }, - { type: PSEventTypes.Command, command: '{\"Resolution.Width\":800,\"Resolution.Height\":600}' }, - ]; - expect(singlePlayerEvents).toContainActions(expectedActions); -}); - - -test('Test resolution changes with match viewport off.', { - tag: ['@resolution'], -}, async ({ page, streamerPage, streamerId }) => { - - // first with match viewport enabled - await page.goto(`/?StreamerId=${streamerId}&MatchViewportRes=false`); - await page.getByText('Click to start').click(); - await helpers.waitForVideo(page); - await page.click("#streamingVideo"); - - const events = await getEventSetFrom(streamerPage, async () => { - await page.setViewportSize({ width: 100, height: 100 }); - await helpers.delay(1000); - await page.setViewportSize({ width: 800, height: 600 }); - await helpers.delay(1000); - }); - - const singlePlayerEvents = getEvents(events); - const expectedActions: DataChannelEvent[] = [ - { type: PSEventTypes.Command, command: '{\"Resolution.Width\":100,\"Resolution.Height\":100}' }, - { type: PSEventTypes.Command, command: '{\"Resolution.Width\":800,\"Resolution.Height\":600}' }, - ]; - expect(singlePlayerEvents).not.toContainActions(expectedActions); -}); - - +import { test } from './fixtures'; +import { expect } from './matchers'; +import { + PSEventTypes, + DataChannelEvent, + getEventSetFrom, + getEvents, +} from './extras'; +import * as helpers from './helpers'; + +test('Test resolution changes with match viewport on.', { + tag: ['@resolution'], +}, async ({ page, streamerPage, streamerId }) => { + + // first with match viewport enabled + await page.goto(`/?StreamerId=${streamerId}&MatchViewportRes=true`); + + await helpers.startAndWaitForVideo(page); + + const events = await getEventSetFrom(streamerPage, async () => { + await page.setViewportSize({ width: 100, height: 100 }); + await helpers.delay(1000); + await page.setViewportSize({ width: 800, height: 600 }); + await helpers.delay(1000); + }); + + const singlePlayerEvents = getEvents(events); + const expectedActions: DataChannelEvent[] = [ + { type: PSEventTypes.Command, command: '{\"Resolution.Width\":100,\"Resolution.Height\":100}' }, + { type: PSEventTypes.Command, command: '{\"Resolution.Width\":800,\"Resolution.Height\":600}' }, + ]; + expect(singlePlayerEvents).toContainActions(expectedActions); +}); + + +test('Test resolution changes with match viewport off.', { + tag: ['@resolution'], +}, async ({ page, streamerPage, streamerId }) => { + + // first with match viewport enabled + await page.goto(`/?StreamerId=${streamerId}&MatchViewportRes=false`); + + await helpers.startAndWaitForVideo(page); + + const events = await getEventSetFrom(streamerPage, async () => { + await page.setViewportSize({ width: 100, height: 100 }); + await helpers.delay(1000); + await page.setViewportSize({ width: 800, height: 600 }); + await helpers.delay(1000); + }); + + const singlePlayerEvents = getEvents(events); + const expectedActions: DataChannelEvent[] = [ + { type: PSEventTypes.Command, command: '{\"Resolution.Width\":100,\"Resolution.Height\":100}' }, + { type: PSEventTypes.Command, command: '{\"Resolution.Width\":800,\"Resolution.Height\":600}' }, + ]; + expect(singlePlayerEvents).not.toContainActions(expectedActions); +}); + + diff --git a/Extras/JSStreamer/Dockerfile b/Extras/JSStreamer/Dockerfile index 04283420..776852cd 100644 --- a/Extras/JSStreamer/Dockerfile +++ b/Extras/JSStreamer/Dockerfile @@ -1,9 +1,22 @@ FROM node:20-bookworm +## Note: This dockerfile is expected to be called from the root of this repo +## Maybe something like: docker build -t epicgames/jsstreamer:latest -f ./Extras/JSStreamer/Dockerfile . + WORKDIR /streamer -COPY /Extras/JSStreamer . +COPY /Common ./Common +COPY /Extras/JSStreamer ./Extras/JSStreamer +COPY ./package.json ./package.json +# Initiate NPM workspaces so we can install deps like our common lib using local built packages as opposed to remove published packages RUN npm install -CMD npm run develop +# Install and build common +RUN cd ./Common && npm install && npm run build + +# Install and build JSStream using the common lib we just build +RUN cd ./Extras/JSStreamer && npm install && npm run build + +# Run JSStreamer +CMD cd ./Extras/JSStreamer && npm run develop diff --git a/Extras/JSStreamer/src/streamer.ts b/Extras/JSStreamer/src/streamer.ts index c8488f85..08ef1fd0 100644 --- a/Extras/JSStreamer/src/streamer.ts +++ b/Extras/JSStreamer/src/streamer.ts @@ -5,7 +5,8 @@ import { Messages, MessageHelpers, BaseMessage, - EventEmitter + EventEmitter, + SDPUtils } from '@epicgames-ps/lib-pixelstreamingcommon-ue5.5'; import { DataProtocol } from './protocol'; @@ -31,6 +32,7 @@ interface WebRTCSettings { MaxBitrate: number; LowQP: number; HighQP: number; + AbsCaptureTimeHeaderExt: boolean } interface Settings { @@ -89,7 +91,8 @@ export class Streamer extends EventEmitter { MinBitrate: 100000, MaxBitrate: 100000000, LowQP: 25, - HighQP: 37 + HighQP: 37, + AbsCaptureTimeHeaderExt: true }, ConfigOptions: {} }; @@ -130,8 +133,17 @@ export class Streamer extends EventEmitter { this.transport.connect(signallingURL); } + stopStreaming() { + this.transport.disconnect(1000, "Normal shutdown by calling stopStreaming"); + for(let peer of this.playerMap.values()) { + peer.peerConnection.close(); + } + } + handleConfigMessage(msg: Messages.config) { - this.peerConnectionOptions = msg.peerConnectionOptions; + if(msg.peerConnectionOptions !== undefined) { + this.peerConnectionOptions = msg.peerConnectionOptions; + } } handleIdentifyMessage(_msg: Messages.identify) { @@ -172,7 +184,7 @@ export class Streamer extends EventEmitter { } const tranceiverOptions: RTCRtpTransceiverInit = { streams: [this.localStream], - direction: 'sendonly', + direction: 'sendrecv', sendEncodings: [ { maxBitrate: this.settings.WebRTC.MaxBitrate, @@ -212,9 +224,19 @@ export class Streamer extends EventEmitter { dataChannel: dataChannel }; + const offerOptions: RTCOfferOptions = { offerToReceiveAudio: true, offerToReceiveVideo: true } + peerConnection - .createOffer() + .createOffer(offerOptions) .then((offer) => { + + if(offer.sdp == undefined) { + return; + } + + // Munge offer + offer.sdp = this.mungeOffer(offer.sdp); + peerConnection .setLocalDescription(offer) .then(() => { @@ -224,6 +246,7 @@ export class Streamer extends EventEmitter { sdp: offer.sdp }) ); + this.emit('local_description_set', offer); }) .catch(() => {}); }) @@ -234,8 +257,8 @@ export class Streamer extends EventEmitter { peerConnection .getStats() .then((stats: RTCStatsReport) => { - let qpSum: number; - let fps: number; + let qpSum: number | undefined = undefined; + let fps: number | undefined = undefined; stats.forEach((report) => { /* eslint-disable @typescript-eslint/no-unsafe-member-access, @typescript-eslint/no-unsafe-assignment */ if (report.type == 'outbound-rtp' && report.mediaType == 'video') { @@ -245,14 +268,12 @@ export class Streamer extends EventEmitter { /* eslint-enable @typescript-eslint/no-unsafe-member-access, @typescript-eslint/no-unsafe-assignment */ }); const nowTime = Date.now(); - if (newPlayer.lastStatsTime) { + if (newPlayer.lastStatsTime != undefined && newPlayer.lastQpSum !== undefined && qpSum !== undefined && fps !== undefined) { const deltaMillis = nowTime - newPlayer.lastStatsTime; const qpDelta = (qpSum - newPlayer.lastQpSum) * (deltaMillis / 1000); const qpAvg = qpDelta / fps; - newPlayer.dataChannel.send( - this.constructMessage(DataProtocol.FromStreamer.VideoEncoderAvgQP, qpAvg) - ); + newPlayer.dataChannel.send(this.constructMessage(DataProtocol.FromStreamer.VideoEncoderAvgQP, qpAvg)); } newPlayer.lastQpSum = qpSum; newPlayer.lastStatsTime = nowTime; @@ -293,6 +314,15 @@ export class Streamer extends EventEmitter { } } + mungeOffer(offerSDP: string) : string { + if(this.settings.WebRTC.AbsCaptureTimeHeaderExt) { + // Add the abs-capture-time header extension to the sdp extmap + const kAbsCaptureTime = 'http://www.webrtc.org/experiments/rtp-hdrext/abs-capture-time'; + return SDPUtils.addVideoHeaderExtensionToSdp(offerSDP, kAbsCaptureTime); + } + return offerSDP; + } + sendDataProtocol(playerId: string) { const playerPeer = this.playerMap[playerId]; if (playerPeer) { @@ -344,10 +374,7 @@ export class Streamer extends EventEmitter { let argIndex = 0; if (messageDef.structure.length != args.length) { - console.log( - `Incorrect number of parameters given to constructMessage. Got ${args.length}, expected ${messageDef.structure.length}` - ); - return null; + throw new Error(`Incorrect number of parameters given to constructMessage. Got ${args.length}, expected ${messageDef.structure.length}`); } dataSize += 1; // message type diff --git a/Extras/JSStreamer/tsconfig.base.json b/Extras/JSStreamer/tsconfig.base.json index 8df53774..3c178ad7 100644 --- a/Extras/JSStreamer/tsconfig.base.json +++ b/Extras/JSStreamer/tsconfig.base.json @@ -19,4 +19,3 @@ "readme": "none" } } - diff --git a/Extras/MinimalStreamTester/docker-compose.yml b/Extras/MinimalStreamTester/docker-compose.yml index 426674e4..71586c38 100644 --- a/Extras/MinimalStreamTester/docker-compose.yml +++ b/Extras/MinimalStreamTester/docker-compose.yml @@ -19,7 +19,7 @@ services: streamer: image: pixelstreamingunofficial/ps-minimal-streamer-linux - command: -PixelStreamingURL=ws://signalling:8888 -nothreadtimeout + command: -PixelStreamingURL=ws://signalling:8888 -nothreadtimeout -PixelStreamingEncoderCodec=vp8 -RenderOffScreen -AllowSoftwareRendering networks: - testing healthcheck: diff --git a/Extras/MinimalStreamTester/package.json b/Extras/MinimalStreamTester/package.json index a338240a..a7694bf9 100644 --- a/Extras/MinimalStreamTester/package.json +++ b/Extras/MinimalStreamTester/package.json @@ -1,5 +1,5 @@ { - "name": "prtest", + "name": "MinimalStreamTester", "version": "1.0.0", "description": "", "main": "index.js", @@ -11,7 +11,7 @@ "author": "", "license": "ISC", "devDependencies": { - "@playwright/test": "^1.49.0", + "@playwright/test": "^1.49.1", "@types/node": "^20.12.7", "@types/uuid": "^9.0.8" }, diff --git a/Extras/MinimalStreamTester/tests/stream_test.spec.ts b/Extras/MinimalStreamTester/tests/stream_test.spec.ts index 1e8cbe7f..09db3e35 100644 --- a/Extras/MinimalStreamTester/tests/stream_test.spec.ts +++ b/Extras/MinimalStreamTester/tests/stream_test.spec.ts @@ -7,12 +7,14 @@ function delay(time: number) { }); } -async function waitForVideo(page: Page) { +async function startAndWaitForVideo(page: Page) { await page.evaluate(()=> { return new Promise((resolve) => { - pixelStreaming.addEventListener('playStream', (event) => { + window.pixelStreaming.addEventListener('playStream', (event) => { return resolve(event); }); + // Start the stream now we have listener attached + window.pixelStreaming.connect(); }); }); } @@ -22,20 +24,24 @@ test('Test default stream.', async ({ page }, testinfo) => { // set a long timeout to allow for slow software rendering test.setTimeout(2 * 60 * 1000); - + await page.goto("/?StreamerId=DefaultStreamer"); - await page.getByText('Click to start').click(); // wait until we get a stream - await waitForVideo(page); + await startAndWaitForVideo(page); // let the stream run for a small duration await delay(15000); // query the frontend for its calculated stats - const frame_count:number = await page.evaluate(()=> { - let videoStats = pixelStreaming._webRtcController.peerConnectionController.aggregatedStats.inboundVideoStats; - return videoStats.framesReceived; + let frameCount: number = await page.evaluate(()=> { + return new Promise((resolve) => { + window.pixelStreaming.addEventListener("statsReceived", (e) => { + if(e.data.aggregatedStats && e.data.aggregatedStats.inboundVideoStats && e.data.aggregatedStats.inboundVideoStats.framesReceived) { + resolve(e.data.aggregatedStats.inboundVideoStats.framesReceived); + } + }); + }); }); // take a screenshot for posterity @@ -47,6 +53,6 @@ test('Test default stream.', async ({ page }, testinfo) => { testinfo.attach('screenshot', { body: screenshot, contentType: 'image/png' }); // pass the test if we recorded any frames - expect(frame_count).toBeGreaterThan(0); + expect(frameCount).toBeGreaterThan(0); }); diff --git a/Frontend/implementations/typescript/tsconfig.esm.json b/Frontend/implementations/typescript/tsconfig.esm.json index 4edbe6cf..2671e8eb 100644 --- a/Frontend/implementations/typescript/tsconfig.esm.json +++ b/Frontend/implementations/typescript/tsconfig.esm.json @@ -3,6 +3,6 @@ "compilerOptions": { "outDir": "./dist/esm", "module": "es6", - "moduleResolution": "nodenext" + "moduleResolution": "bundler" } } diff --git a/Frontend/implementations/typescript/webpack.esmodule.js b/Frontend/implementations/typescript/webpack.esmodule.js index 9242a023..f7b0cedb 100644 --- a/Frontend/implementations/typescript/webpack.esmodule.js +++ b/Frontend/implementations/typescript/webpack.esmodule.js @@ -21,7 +21,7 @@ module.exports = { module: true, path: process.env.WEBPACK_OUTPUT_PATH ? path.resolve(process.env.WEBPACK_OUTPUT_PATH) : path.resolve(__dirname, '../../../SignallingWebServer/www'), globalObject: 'this', - hashFunction: 'xxhash64', + hashFunction: 'xxhash64' }, experiments: { outputModule: true, diff --git a/Frontend/library/src/Config/Config.ts b/Frontend/library/src/Config/Config.ts index 7428fc8c..93f700de 100644 --- a/Frontend/library/src/Config/Config.ts +++ b/Frontend/library/src/Config/Config.ts @@ -33,6 +33,7 @@ export class Flags { static XRControllerInput = 'XRControllerInput' as const; static WaitForStreamer = 'WaitForStreamer' as const; static HideUI = 'HideUI' as const; + static EnableCaptureTimeExt = 'EnableCaptureTimeExt' as const; } export type FlagsKeys = Exclude; @@ -564,6 +565,19 @@ export class Config { ) ); + this.flags.set( + Flags.EnableCaptureTimeExt, + new SettingFlag( + Flags.EnableCaptureTimeExt, + 'Enable abs-capture-time', + 'Enables the abs-capture-time RTP header extension', + settings && Object.prototype.hasOwnProperty.call(settings, Flags.EnableCaptureTimeExt) + ? settings[Flags.EnableCaptureTimeExt] + : true, + useUrlParams + ) + ); + /** * Numeric parameters */ diff --git a/Frontend/library/src/PeerConnectionController/AggregatedStats.ts b/Frontend/library/src/PeerConnectionController/AggregatedStats.ts index a2123684..a4a0c5eb 100644 --- a/Frontend/library/src/PeerConnectionController/AggregatedStats.ts +++ b/Frontend/library/src/PeerConnectionController/AggregatedStats.ts @@ -5,7 +5,7 @@ import { InboundTrackStats } from './InboundTrackStats'; import { DataChannelStats } from './DataChannelStats'; import { CandidateStat } from './CandidateStat'; import { CandidatePairStats } from './CandidatePairStats'; -import { OutBoundRTPStats, OutBoundVideoStats } from './OutBoundRTPStats'; +import { RemoteOutboundRTPStats, OutboundRTPStats } from './OutBoundRTPStats'; import { SessionStats } from './SessionStats'; import { StreamStats } from './StreamStats'; import { CodecStats } from './CodecStats'; @@ -15,15 +15,17 @@ import { Logger } from '@epicgames-ps/lib-pixelstreamingcommon-ue5.5'; * The Aggregated Stats that is generated from the RTC Stats Report */ -type RTCStatsTypePS = RTCStatsType | 'stream' | 'media-playout' | 'track'; export class AggregatedStats { inboundVideoStats: InboundVideoStats; inboundAudioStats: InboundAudioStats; candidatePairs: Array; - DataChannelStats: DataChannelStats; + datachannelStats: DataChannelStats; localCandidates: Array; remoteCandidates: Array; - outBoundVideoStats: OutBoundVideoStats; + outboundVideoStats: OutboundRTPStats; + outboundAudioStats: OutboundRTPStats; + remoteOutboundVideoStats: RemoteOutboundRTPStats; + remoteOutboundAudioStats: RemoteOutboundRTPStats; sessionStats: SessionStats; streamStats: StreamStats; codecs: Map; @@ -32,8 +34,11 @@ export class AggregatedStats { constructor() { this.inboundVideoStats = new InboundVideoStats(); this.inboundAudioStats = new InboundAudioStats(); - this.DataChannelStats = new DataChannelStats(); - this.outBoundVideoStats = new OutBoundVideoStats(); + this.datachannelStats = new DataChannelStats(); + this.outboundVideoStats = new OutboundRTPStats(); + this.outboundAudioStats = new OutboundRTPStats(); + this.remoteOutboundAudioStats = new RemoteOutboundRTPStats(); + this.remoteOutboundVideoStats = new RemoteOutboundRTPStats(); this.sessionStats = new SessionStats(); this.streamStats = new StreamStats(); this.codecs = new Map(); @@ -49,7 +54,7 @@ export class AggregatedStats { this.candidatePairs = new Array(); rtcStatsReport.forEach((stat) => { - const type: RTCStatsTypePS = stat.type; + const type: string = stat.type; switch (type) { case 'candidate-pair': @@ -64,7 +69,7 @@ export class AggregatedStats { this.handleDataChannel(stat); break; case 'inbound-rtp': - this.handleInBoundRTP(stat); + this.handleInboundRTP(stat); break; case 'local-candidate': this.handleLocalCandidate(stat); @@ -74,6 +79,7 @@ export class AggregatedStats { case 'media-playout': break; case 'outbound-rtp': + this.handleLocalOutbound(stat); break; case 'peer-connection': break; @@ -83,7 +89,7 @@ export class AggregatedStats { case 'remote-inbound-rtp': break; case 'remote-outbound-rtp': - this.handleRemoteOutBound(stat); + this.handleRemoteOutbound(stat); break; case 'track': this.handleTrack(stat); @@ -125,16 +131,16 @@ export class AggregatedStats { * @param stat - the stats coming in from the data channel */ handleDataChannel(stat: DataChannelStats) { - this.DataChannelStats.bytesReceived = stat.bytesReceived; - this.DataChannelStats.bytesSent = stat.bytesSent; - this.DataChannelStats.dataChannelIdentifier = stat.dataChannelIdentifier; - this.DataChannelStats.id = stat.id; - this.DataChannelStats.label = stat.label; - this.DataChannelStats.messagesReceived = stat.messagesReceived; - this.DataChannelStats.messagesSent = stat.messagesSent; - this.DataChannelStats.protocol = stat.protocol; - this.DataChannelStats.state = stat.state; - this.DataChannelStats.timestamp = stat.timestamp; + this.datachannelStats.bytesReceived = stat.bytesReceived; + this.datachannelStats.bytesSent = stat.bytesSent; + this.datachannelStats.dataChannelIdentifier = stat.dataChannelIdentifier; + this.datachannelStats.id = stat.id; + this.datachannelStats.label = stat.label; + this.datachannelStats.messagesReceived = stat.messagesReceived; + this.datachannelStats.messagesSent = stat.messagesSent; + this.datachannelStats.protocol = stat.protocol; + this.datachannelStats.state = stat.state; + this.datachannelStats.timestamp = stat.timestamp; } /** @@ -159,23 +165,23 @@ export class AggregatedStats { * @param stat - ice candidate stats */ handleRemoteCandidate(stat: CandidateStat) { - const RemoteCandidate = new CandidateStat(); - RemoteCandidate.label = 'remote-candidate'; - RemoteCandidate.address = stat.address; - RemoteCandidate.port = stat.port; - RemoteCandidate.protocol = stat.protocol; - RemoteCandidate.id = stat.id; - RemoteCandidate.candidateType = stat.candidateType; - RemoteCandidate.relayProtocol = stat.relayProtocol; - RemoteCandidate.transportId = stat.transportId; - this.remoteCandidates.push(RemoteCandidate); + const remoteCandidate = new CandidateStat(); + remoteCandidate.label = 'remote-candidate'; + remoteCandidate.address = stat.address; + remoteCandidate.port = stat.port; + remoteCandidate.protocol = stat.protocol; + remoteCandidate.id = stat.id; + remoteCandidate.candidateType = stat.candidateType; + remoteCandidate.relayProtocol = stat.relayProtocol; + remoteCandidate.transportId = stat.transportId; + this.remoteCandidates.push(remoteCandidate); } /** * Process the Inbound RTP Audio and Video Data * @param stat - inbound rtp stats */ - handleInBoundRTP(stat: InboundRTPStats) { + handleInboundRTP(stat: InboundRTPStats) { switch (stat.kind) { case 'video': // Calculate bitrate between stat updates @@ -216,25 +222,63 @@ export class AggregatedStats { } /** - * Process the outbound RTP Audio and Video Data - * @param stat - remote outbound stats + * Process the "local" outbound RTP Audio and Video stats. + * @param stat - local outbound rtp stats */ - handleRemoteOutBound(stat: OutBoundRTPStats) { - switch (stat.kind) { - case 'video': - this.outBoundVideoStats.bytesSent = stat.bytesSent; - this.outBoundVideoStats.id = stat.id; - this.outBoundVideoStats.localId = stat.localId; - this.outBoundVideoStats.packetsSent = stat.packetsSent; - this.outBoundVideoStats.remoteTimestamp = stat.remoteTimestamp; - this.outBoundVideoStats.timestamp = stat.timestamp; - break; - case 'audio': - break; + handleLocalOutbound(stat: OutboundRTPStats) { + const localOutboundStats: OutboundRTPStats = + stat.kind === 'audio' ? this.outboundAudioStats : this.outboundVideoStats; + localOutboundStats.active = stat.active; + localOutboundStats.codecId = stat.codecId; + localOutboundStats.bytesSent = stat.bytesSent; + localOutboundStats.frameHeight = stat.frameHeight; + localOutboundStats.frameWidth = stat.frameWidth; + localOutboundStats.framesEncoded = stat.framesEncoded; + localOutboundStats.framesPerSecond = stat.framesPerSecond; + localOutboundStats.headerBytesSent = stat.headerBytesSent; + localOutboundStats.id = stat.id; + localOutboundStats.keyFramesEncoded = stat.keyFramesEncoded; + localOutboundStats.kind = stat.kind; + localOutboundStats.mediaSourceId = stat.mediaSourceId; + localOutboundStats.mid = stat.mid; + localOutboundStats.nackCount = stat.nackCount; + localOutboundStats.packetsSent = stat.packetsSent; + localOutboundStats.qpSum = stat.qpSum; + localOutboundStats.qualityLimitationDurations = stat.qualityLimitationDurations; + localOutboundStats.qualityLimitationReason = stat.qualityLimitationReason; + localOutboundStats.remoteId = stat.remoteId; + localOutboundStats.retransmittedBytesSent = stat.retransmittedBytesSent; + localOutboundStats.rid = stat.rid; + localOutboundStats.scalabilityMode = stat.scalabilityMode; + localOutboundStats.ssrc = stat.ssrc; + localOutboundStats.targetBitrate = stat.targetBitrate; + localOutboundStats.timestamp = stat.timestamp; + localOutboundStats.totalEncodeTime = stat.totalEncodeTime; + localOutboundStats.totalEncodeBytesTarget = stat.totalEncodeBytesTarget; + localOutboundStats.totalPacketSendDelay = stat.totalPacketSendDelay; + localOutboundStats.transportId = stat.transportId; + } - default: - break; - } + /** + * Process the "remote" outbound RTP Audio and Video stats. + * @param stat - remote outbound rtp stats + */ + handleRemoteOutbound(stat: RemoteOutboundRTPStats) { + const remoteOutboundStats: RemoteOutboundRTPStats = + stat.kind === 'audio' ? this.remoteOutboundAudioStats : this.remoteOutboundVideoStats; + remoteOutboundStats.bytesSent = stat.bytesSent; + remoteOutboundStats.codecId = stat.codecId; + remoteOutboundStats.id = stat.id; + remoteOutboundStats.kind = stat.kind; + remoteOutboundStats.localId = stat.localId; + remoteOutboundStats.packetsSent = stat.packetsSent; + remoteOutboundStats.remoteTimestamp = stat.remoteTimestamp; + remoteOutboundStats.reportsSent = stat.reportsSent; + remoteOutboundStats.roundTripTimeMeasurements = stat.roundTripTimeMeasurements; + remoteOutboundStats.ssrc = stat.ssrc; + remoteOutboundStats.timestamp = stat.timestamp; + remoteOutboundStats.totalRoundTripTime = stat.totalRoundTripTime; + remoteOutboundStats.transportId = stat.transportId; } /** @@ -288,16 +332,31 @@ export class AggregatedStats { * @returns The candidate pair that is currently receiving data */ public getActiveCandidatePair(): CandidatePairStats | null { + if (this.candidatePairs === undefined) { + return null; + } + // Check if the RTCTransport stat is not undefined if (this.transportStats) { // Return the candidate pair that matches the transport candidate pair id - return this.candidatePairs.find( - (candidatePair) => candidatePair.id === this.transportStats.selectedCandidatePairId, - null + const selectedPair: CandidatePairStats | undefined = this.candidatePairs.find( + (candidatePair) => candidatePair.id === this.transportStats.selectedCandidatePairId ); + if (selectedPair === undefined) { + return null; + } else { + return selectedPair; + } } - // Fall back to the selected candidate pair - return this.candidatePairs.find((candidatePair) => candidatePair.selected, null); + // Fall back to the `.selected` member of the candidate pair + const selectedPair: CandidatePairStats | undefined = this.candidatePairs.find( + (candidatePair) => candidatePair.selected + ); + if (selectedPair === undefined) { + return null; + } else { + return selectedPair; + } } } diff --git a/Frontend/library/src/PeerConnectionController/InboundRTPStats.ts b/Frontend/library/src/PeerConnectionController/InboundRTPStats.ts index 030b9d43..663c6c5e 100644 --- a/Frontend/library/src/PeerConnectionController/InboundRTPStats.ts +++ b/Frontend/library/src/PeerConnectionController/InboundRTPStats.ts @@ -4,41 +4,41 @@ * Inbound Audio Stats collected from the RTC Stats Report */ export class InboundAudioStats { - audioLevel: number; + audioLevel: number | undefined; bytesReceived: number; codecId: string; - concealedSamples: number; - concealmentEvents: number; - fecPacketsDiscarded: number; - fecPacketsReceived: number; + concealedSamples: number | undefined; + concealmentEvents: number | undefined; + fecPacketsDiscarded: number | undefined; + fecPacketsReceived: number | undefined; headerBytesReceived: number; id: string; - insertedSamplesForDeceleration: number; + insertedSamplesForDeceleration: number | undefined; jitter: number; jitterBufferDelay: number; jitterBufferEmittedCount: number; - jitterBufferMinimumDelay: number; - jitterBufferTargetDelay: number; + jitterBufferMinimumDelay: number | undefined; + jitterBufferTargetDelay: number | undefined; kind: string; lastPacketReceivedTimestamp: number; - mediaType: string; + mediaType: string | undefined; mid: string; - packetsDiscarded: number; + packetsDiscarded: number | undefined; packetsLost: number; packetsReceived: number; - removedSamplesForAcceleration: number; - silentConcealedSamples: number; + removedSamplesForAcceleration: number | undefined; + silentConcealedSamples: number | undefined; ssrc: number; timestamp: number; - totalAudioEnergy: number; - totalSamplesDuration: number; - totalSamplesReceived: number; - trackIdentifier: string; - transportId: string; + totalAudioEnergy: number | undefined; + totalSamplesDuration: number | undefined; + totalSamplesReceived: number | undefined; + trackIdentifier: string | undefined; + transportId: string | undefined; type: string; /* additional, custom stats */ - bitrate: number; + bitrate: number | undefined; } /** @@ -46,47 +46,47 @@ export class InboundAudioStats { */ export class InboundVideoStats { bytesReceived: number; - codecId: string; - firCount: number; - frameHeight: number; - frameWidth: number; - framesAssembledFromMultiplePackets: number; - framesDecoded: number; - framesDropped: number; - framesPerSecond: number; - framesReceived: number; - freezeCount: number; - googTimingFrameInfo: string; + codecId: string | undefined; + firCount: number | undefined; + frameHeight: number | undefined; + frameWidth: number | undefined; + framesAssembledFromMultiplePackets: number | undefined; + framesDecoded: number | undefined; + framesDropped: number | undefined; + framesPerSecond: number | undefined; + framesReceived: number | undefined; + freezeCount: number | undefined; + googTimingFrameInfo: string | undefined; headerBytesReceived: number; id: string; jitter: number; jitterBufferDelay: number; jitterBufferEmittedCount: number; - keyFramesDecoded: number; + keyFramesDecoded: number | undefined; kind: string; - lastPacketReceivedTimestamp: number; - mediaType: string; + lastPacketReceivedTimestamp: number | undefined; + mediaType: string | undefined; mid: string; - nackCount: number; + nackCount: number | undefined; packetsLost: number; packetsReceived: number; - pauseCount: number; - pliCount: number; + pauseCount: number | undefined; + pliCount: number | undefined; ssrc: number; timestamp: number; - totalAssemblyTime: number; - totalDecodeTime: number; - totalFreezesDuration: number; - totalInterFrameDelay: number; - totalPausesDuration: number; - totalProcessingDelay: number; - totalSquaredInterFrameDelay: number; - trackIdentifier: string; - transportId: string; + totalAssemblyTime: number | undefined; + totalDecodeTime: number | undefined; + totalFreezesDuration: number | undefined; + totalInterFrameDelay: number | undefined; + totalPausesDuration: number | undefined; + totalProcessingDelay: number | undefined; + totalSquaredInterFrameDelay: number | undefined; + trackIdentifier: string | undefined; + transportId: string | undefined; type: string; /* additional, custom stats */ - bitrate: number; + bitrate: number | undefined; } /** @@ -95,60 +95,63 @@ export class InboundVideoStats { export class InboundRTPStats { /* common stats */ bytesReceived: number; - codecId: string; + codecId: string | undefined; headerBytesReceived: number; id: string; jitter: number; jitterBufferDelay: number; jitterBufferEmittedCount: number; kind: string; - lastPacketReceivedTimestamp: number; - mediaType: string; + lastPacketReceivedTimestamp: number | undefined; + mediaType: string | undefined; mid: string; packetsLost: number; packetsReceived: number; + playoutId: string | undefined; + qpsum: number | undefined; + remoteId: string | undefined; ssrc: number; timestamp: number; - trackIdentifier: string; - transportId: string; + trackIdentifier: string | undefined; + transportId: string | undefined; type: string; /* audio specific stats */ - audioLevel: number; - concealedSamples: number; - concealmentEvents: number; - fecPacketsDiscarded: number; - fecPacketsReceived: number; - insertedSamplesForDeceleration: number; - jitterBufferMinimumDelay: number; - jitterBufferTargetDelay: number; - packetsDiscarded: number; - removedSamplesForAcceleration: number; - silentConcealedSamples: number; - totalAudioEnergy: number; - totalSamplesDuration: number; - totalSamplesReceived: number; + audioLevel: number | undefined; + concealedSamples: number | undefined; + concealmentEvents: number | undefined; + fecPacketsDiscarded: number | undefined; + fecPacketsReceived: number | undefined; + insertedSamplesForDeceleration: number | undefined; + jitterBufferMinimumDelay: number | undefined; + jitterBufferTargetDelay: number | undefined; + packetsDiscarded: number | undefined; + removedSamplesForAcceleration: number | undefined; + silentConcealedSamples: number | undefined; + totalAudioEnergy: number | undefined; + totalSamplesDuration: number | undefined; + totalSamplesReceived: number | undefined; /* video specific stats */ - firCount: number; - frameHeight: number; - frameWidth: number; - framesAssembledFromMultiplePackets: number; - framesDecoded: number; - framesDropped: number; - framesPerSecond: number; - framesReceived: number; - freezeCount: number; - googTimingFrameInfo: string; - keyFramesDecoded: number; - nackCount: number; - pauseCount: number; - pliCount: number; - totalAssemblyTime: number; - totalDecodeTime: number; - totalFreezesDuration: number; - totalInterFrameDelay: number; - totalPausesDuration: number; - totalProcessingDelay: number; - totalSquaredInterFrameDelay: number; + firCount: number | undefined; + frameHeight: number | undefined; + frameWidth: number | undefined; + framesAssembledFromMultiplePackets: number | undefined; + framesDecoded: number | undefined; + framesDropped: number | undefined; + framesPerSecond: number | undefined; + framesReceived: number | undefined; + freezeCount: number | undefined; + googTimingFrameInfo: string | undefined; + keyFramesDecoded: number | undefined; + nackCount: number | undefined; + pauseCount: number | undefined; + pliCount: number | undefined; + totalAssemblyTime: number | undefined; + totalDecodeTime: number | undefined; + totalFreezesDuration: number | undefined; + totalInterFrameDelay: number | undefined; + totalPausesDuration: number | undefined; + totalProcessingDelay: number | undefined; + totalSquaredInterFrameDelay: number | undefined; } diff --git a/Frontend/library/src/PeerConnectionController/LatencyCalculator.ts b/Frontend/library/src/PeerConnectionController/LatencyCalculator.ts new file mode 100644 index 00000000..c06f4be8 --- /dev/null +++ b/Frontend/library/src/PeerConnectionController/LatencyCalculator.ts @@ -0,0 +1,392 @@ +// Copyright Epic Games, Inc. All Rights Reserved. + +import { AggregatedStats } from './AggregatedStats'; +import { CandidatePairStats } from './CandidatePairStats'; + +/** + * Represents either a: + * - synchronization source: https://developer.mozilla.org/en-US/docs/Web/API/RTCRtpReceiver/getSynchronizationSources + * - contributing source: https://developer.mozilla.org/en-US/docs/Web/API/RTCRtpReceiver/getContributingSources + * Which also (if browser supports it) may optionall contain fields for captureTimestamp + senderCaptureTimeOffset + * if the abs-capture-time RTP header extension is enabled (currently this only works in Chromium based browsers). + */ +class RTCRtpCaptureSource { + timestamp: number; + captureTimestamp: number; + senderCaptureTimeOffset: number; +} + +/** + * FrameTimingInfo is a Chromium-specific set of WebRTC stats useful for latency calculation. It is stored in WebRTC stats as `googTimingFrameInfo`. + * It is defined as an RTP header extension here: https://webrtc.googlesource.com/src/+/refs/heads/main/docs/native-code/rtp-hdrext/video-timing/README.md + * It is defined in source code here: https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/api/video/video_timing.cc;l=82;drc=8d399817282e3c12ed54eb23ec42a5e418298ec6 + * It is discussed by its author here: https://github.com/w3c/webrtc-provisional-stats/issues/40#issuecomment-1272916692 + * In summary it a comma-delimited string that contains the following (in this order): + * 1) RTP timestamp: the RTP timestamp of the frame + * 2) Capture time: timestamp when this frame was captured + * 3) Encode start: timestamp when this frame started to be encoded + * 4) Encode finish: timestamp when this frame finished encoding + * 5) Packetization finish: timestamp when this frame was split into packets and was ready to be sent over the network + * 6) Pacer exit: timestamp when last packet of this frame was sent over the network by the sender at this timestamp + * 7) Network timestamp1: place for the SFU to mark when the frame started being forwarded. Application specific. + * 8) Network timestamp2: place for the SFU to mark when the frame finished being forwarded. Application specific. + * 9) Receive start: timestamp when the first packet of this frame was received + * 10) Receive finish: timestamp when the last packet of this frame was received + * 11) Decode start: timestamp when the frame was passed to decoder + * 12) Decode finish: timestamp when the frame was decoded + * 13) Render time: timestamp of the projected render time for this frame + * 14) "is outlier": a flag for if this frame is bigger in encoded size than the average frame by at least 5x. + * 15) "triggered by timer": a flag for if this report was triggered by the timer (The report is sent every 200ms) + */ +export class FrameTimingInfo { + rtpTimestamp: number; + captureTimestamp: number; + encodeStartTimestamp: number; + encodeFinishTimestamp: number; + packetizerFinishTimestamp: number; + pacerExitTimestamp: number; + networkTimestamp1: number; + networkTimestamp2: number; + receiveStart: number; + receiveFinish: number; + decodeStart: number; + decodeFinish: number; + renderTime: number; + isOutlier: boolean; + isTriggeredByTimer: boolean; + + /* Milliseconds between encoder start and finish */ + encoderLatencyMs: number; + + /* Milliseconds between encode end and packetizer finish time */ + packetizeLatencyMs: number; + + /* Milliseconds between packetize finish time and pacer sending the frame */ + pacerLatencyMs: number; + + /* Milliseconds between capture time and pacer exit */ + captureToSendLatencyMs: number; +} + +/** + * Calculates a combination of latency statistics using purely WebRTC API. + */ +export class LatencyCalculator { + /* Clock offset between peer clocks cannot always be calculated as it relies of latest sender reports. + * so we store the last time we had a valid clock offset in the assumption that clocks haven't drifted too much since then. + */ + private latestSenderRecvClockOffset: number | null = null; + + public calculate(stats: AggregatedStats, receivers: RTCRtpReceiver[]): LatencyInfo { + const latencyInfo = new LatencyInfo(); + + const rttMS: number | null = this.getRTTMs(stats); + + if (rttMS != null) { + latencyInfo.rttMs = rttMS; + + // Calculate sender latency using the first valid video ssrc/csrc + const captureSource: RTCRtpCaptureSource | null = this.getCaptureSource(receivers); + if (captureSource != null) { + const senderLatencyMs = this.calculateSenderLatency(stats, captureSource); + if (senderLatencyMs !== null) { + latencyInfo.senderLatencyMs = senderLatencyMs; + } + } + } + + // https://w3c.github.io/webrtc-stats/#dom-rtcinboundrtpstreamstats-totalprocessingdelay + if ( + stats.inboundVideoStats.totalProcessingDelay !== undefined && + stats.inboundVideoStats.framesDecoded !== undefined + ) { + latencyInfo.averageProcessingDelayMs = + (stats.inboundVideoStats.totalProcessingDelay / stats.inboundVideoStats.framesDecoded) * 1000; + } + + // https://w3c.github.io/webrtc-stats/#dom-rtcinboundrtpstreamstats-jitterbufferminimumdelay + if ( + stats.inboundVideoStats.jitterBufferDelay !== undefined && + stats.inboundVideoStats.jitterBufferEmittedCount !== undefined + ) { + latencyInfo.averageJitterBufferDelayMs = + (stats.inboundVideoStats.jitterBufferDelay / + stats.inboundVideoStats.jitterBufferEmittedCount) * + 1000; + } + + // https://w3c.github.io/webrtc-stats/#dom-rtcinboundrtpstreamstats-totaldecodetime + if ( + stats.inboundVideoStats.framesDecoded !== undefined && + stats.inboundVideoStats.totalDecodeTime !== undefined + ) { + latencyInfo.averageDecodeLatencyMs = + (stats.inboundVideoStats.totalDecodeTime / stats.inboundVideoStats.framesDecoded) * 1000; + } + + // https://w3c.github.io/webrtc-stats/#dom-rtcinboundrtpstreamstats-framesassembledfrommultiplepackets + if ( + stats.inboundVideoStats.totalAssemblyTime !== undefined && + stats.inboundVideoStats.framesAssembledFromMultiplePackets !== undefined + ) { + latencyInfo.averageAssemblyDelayMs = + (stats.inboundVideoStats.totalAssemblyTime / + stats.inboundVideoStats.framesAssembledFromMultiplePackets) * + 1000; + } + + // Extract extra Chrome-specific stats like encoding latency + if ( + stats.inboundVideoStats.googTimingFrameInfo !== undefined && + stats.inboundVideoStats.googTimingFrameInfo.length > 0 + ) { + latencyInfo.frameTiming = this.extractFrameTimingInfo( + stats.inboundVideoStats.googTimingFrameInfo + ); + } + + // Calculate E2E latency using video-timing capture to send time + one way network latency + receiver-side latency + if ( + latencyInfo.frameTiming !== undefined && + latencyInfo.frameTiming.captureToSendLatencyMs !== undefined && + latencyInfo.averageProcessingDelayMs !== undefined && + latencyInfo.rttMs !== undefined + ) { + latencyInfo.averageE2ELatency = + latencyInfo.frameTiming.captureToSendLatencyMs + + latencyInfo.rttMs * 0.5 + + latencyInfo.averageProcessingDelayMs; + } + + // Calculate E2E latency as abs-capture-time capture to send latency + one way network latency + receiver-side latency + if ( + latencyInfo.senderLatencyMs != undefined && + latencyInfo.averageProcessingDelayMs !== undefined && + latencyInfo.rttMs !== undefined + ) { + latencyInfo.averageE2ELatency = + latencyInfo.senderLatencyMs + latencyInfo.rttMs * 0.5 + latencyInfo.averageProcessingDelayMs; + } + + return latencyInfo; + } + + private extractFrameTimingInfo(googTimingFrameInfo: string): FrameTimingInfo { + const timingInfo: FrameTimingInfo = new FrameTimingInfo(); + + const timingInfoArr: string[] = googTimingFrameInfo.split(','); + + // Should have exactly 15 elements according to: + // https://source.chromium.org/chromium/chromium/src/+/main:third_party/webrtc/api/video/video_timing.cc;l=82;drc=8d399817282e3c12ed54eb23ec42a5e418298ec6 + if (timingInfoArr.length === 15) { + timingInfo.rtpTimestamp = Number.parseInt(timingInfoArr[0]); + timingInfo.captureTimestamp = Number.parseInt(timingInfoArr[1]); + timingInfo.encodeStartTimestamp = Number.parseInt(timingInfoArr[2]); + timingInfo.encodeFinishTimestamp = Number.parseInt(timingInfoArr[3]); + timingInfo.packetizerFinishTimestamp = Number.parseInt(timingInfoArr[4]); + timingInfo.pacerExitTimestamp = Number.parseInt(timingInfoArr[5]); + timingInfo.networkTimestamp1 = Number.parseInt(timingInfoArr[6]); + timingInfo.networkTimestamp2 = Number.parseInt(timingInfoArr[7]); + timingInfo.receiveStart = Number.parseInt(timingInfoArr[8]); + timingInfo.receiveFinish = Number.parseInt(timingInfoArr[9]); + timingInfo.decodeStart = Number.parseInt(timingInfoArr[10]); + timingInfo.decodeFinish = Number.parseInt(timingInfoArr[11]); + timingInfo.renderTime = Number.parseInt(timingInfoArr[12]); + timingInfo.isOutlier = Number.parseInt(timingInfoArr[13]) > 0; + timingInfo.isTriggeredByTimer = Number.parseInt(timingInfoArr[14]) > 0; + + // Calculate some latency stats + timingInfo.encoderLatencyMs = timingInfo.encodeFinishTimestamp - timingInfo.encodeStartTimestamp; + timingInfo.packetizeLatencyMs = + timingInfo.packetizerFinishTimestamp - timingInfo.encodeFinishTimestamp; + timingInfo.pacerLatencyMs = timingInfo.pacerExitTimestamp - timingInfo.packetizerFinishTimestamp; + timingInfo.captureToSendLatencyMs = timingInfo.pacerExitTimestamp - timingInfo.captureTimestamp; + } + + return timingInfo; + } + + private calculateSenderLatency( + stats: AggregatedStats, + captureSource: RTCRtpCaptureSource + ): number | null { + // The calculation performed in this function is as per the procedure defined here: + // https://w3c.github.io/webrtc-extensions/#dom-rtcrtpcontributingsource-sendercapturetimeoffset + + // Get the sender capture in the sender's clock + const senderCaptureTimestamp = captureSource.captureTimestamp + captureSource.senderCaptureTimeOffset; + + let sendRecvClockOffset: number | null = this.calculateSenderReceiverClockOffset(stats); + + // Use latest clock offset if we couldn't calculate one now + if (sendRecvClockOffset == null) { + if (this.latestSenderRecvClockOffset != null) { + sendRecvClockOffset = this.latestSenderRecvClockOffset; + } else { + return null; + } + } else { + this.latestSenderRecvClockOffset = sendRecvClockOffset; + } + + // This brings sender clock roughly inline with recv clock + const recvCaptureTimestampNTP = senderCaptureTimestamp + sendRecvClockOffset; + + // As defined in Chrome source: https://chromium.googlesource.com/external/webrtc/+/master/system_wrappers/include/clock.h#26 + const ntp1970 = 2208988800000; + + const recvCaptureTimestamp = recvCaptureTimestampNTP - ntp1970; + + const senderLatency = captureSource.timestamp - recvCaptureTimestamp; + + return senderLatency; + } + + /** + * Find the first valid ssrc or csrc that has capture time fields present from abs-capture-time header extension. + * @param receivers The RTP receviers this peer connection has. + * @returns A single valid ssrc or csrc that has capture time fields or null if there is none (e.g. in non-chromium browsers it will be null). + */ + private getCaptureSource(receivers: RTCRtpReceiver[]): RTCRtpCaptureSource | null { + // We only want video receivers + receivers = receivers.filter((receiver) => receiver.track.kind === 'video'); + + for (const receiver of receivers) { + // Go through all ssrc and csrc to check for capture timestamp + // Note: Conversion to `any` here is because TS does not have captureTimestamp etc defined in the types + // these fields only exist in Chromium currently. + const sources: any[] = receiver + .getSynchronizationSources() + .concat(receiver.getContributingSources()); + + for (const src of sources) { + if ( + src.captureTimestamp !== undefined && + src.senderCaptureTimeOffset !== undefined && + src.timestamp !== undefined + ) { + const captureSrc = new RTCRtpCaptureSource(); + captureSrc.timestamp = src.timestamp; + captureSrc.captureTimestamp = src.captureTimestamp; + captureSrc.senderCaptureTimeOffset = src.senderCaptureTimeOffset; + return captureSrc; + } + } + } + + return null; + } + + private calculateSenderReceiverClockOffset(stats: AggregatedStats): number | null { + // The calculation performed in this function is as per the procedure defined here: + // https://w3c.github.io/webrtc-extensions/#dom-rtcrtpcontributingsource-sendercapturetimeoffset + + const hasRemoteOutboundVideoStats = + stats.remoteOutboundVideoStats !== undefined && + stats.remoteOutboundVideoStats.timestamp !== undefined && + stats.remoteOutboundVideoStats.remoteTimestamp !== undefined; + + // Note: As of Chrome 132, remote-outbound-rtp stats for video are not yet implemented (audio works). + // This codepath should activate once they do begin to work. + if (!hasRemoteOutboundVideoStats) { + return null; + } + + const remoteStatsArrivedTimestamp = stats.remoteOutboundVideoStats.timestamp; + const remoteStatsSentTimestamp = stats.remoteOutboundVideoStats.remoteTimestamp; + + const rttMs: number | null = this.getRTTMs(stats); + + if ( + remoteStatsArrivedTimestamp !== undefined && + remoteStatsSentTimestamp !== undefined && + rttMs !== null + ) { + const onewayDelay = rttMs * 0.5; + return remoteStatsArrivedTimestamp - (remoteStatsSentTimestamp + onewayDelay); + } + // Could not get stats to calculate sender/receiver clock offset + else { + return null; + } + } + + private getRTTMs(stats: AggregatedStats): number | null { + // Try to get it from the active candidate pair + const activeCandidatePair: CandidatePairStats | null = stats.getActiveCandidatePair(); + if (!!activeCandidatePair && activeCandidatePair.currentRoundTripTime !== undefined) { + const curRTTSeconds = activeCandidatePair.currentRoundTripTime; + return curRTTSeconds * 1000; + } + + // Next try to get it from remote-outbound-rtp video stats + if ( + !!stats.remoteOutboundVideoStats && + stats.remoteOutboundVideoStats.totalRoundTripTime !== undefined && + stats.remoteOutboundVideoStats.roundTripTimeMeasurements !== undefined && + stats.remoteOutboundVideoStats.roundTripTimeMeasurements > 0 + ) { + const avgRttSeconds = + stats.remoteOutboundVideoStats.totalRoundTripTime / + stats.remoteOutboundVideoStats.roundTripTimeMeasurements; + return avgRttSeconds * 1000; + } + + // Next try to get it from remote-outbound-rtp audio stats + if ( + !!stats.remoteOutboundAudioStats && + stats.remoteOutboundAudioStats.totalRoundTripTime !== undefined && + stats.remoteOutboundAudioStats.roundTripTimeMeasurements !== undefined && + stats.remoteOutboundAudioStats.roundTripTimeMeasurements > 0 + ) { + const avgRttSeconds = + stats.remoteOutboundAudioStats.totalRoundTripTime / + stats.remoteOutboundAudioStats.roundTripTimeMeasurements; + return avgRttSeconds * 1000; + } + + return null; + } +} + +/** + * A collection of latency information calculated using the WebRTC API. + * Most stats are calculated following the spec: + * https://w3c.github.io/webrtc-stats/#dictionary-rtcinboundrtpstreamstats-members + */ +export class LatencyInfo { + /** + * The time taken from the moment a frame is done capturing to the moment it is sent over the network. + * Note: This can only be calculated if both offer and answer contain the + * the RTP header extension for `video-timing` (Chrome only for now) + */ + public senderLatencyMs: number | undefined = undefined; + + /** + * The time taken from the moment a frame is done capturing to the moment it is sent over the network. + * Note: This can only be calculated if both offer and answer contain the + * the RTP header extension for `abs-capture-time` (Chrome only for now) + */ + public senderLatencyAbsCaptureTimeMs: number | undefined = undefined; + + /* The round trip time (milliseconds) between each sender->receiver->sender */ + public rttMs: number | undefined = undefined; + + /* Average time taken (milliseconds) from video packet receipt to post-decode. */ + public averageProcessingDelayMs: number | undefined = undefined; + + /* Average time taken (milliseconds) inside the jitter buffer (which is post-receipt but pre-decode). */ + public averageJitterBufferDelayMs: number | undefined = undefined; + + /* Average time taken (milliseconds) to decode a video frame. */ + public averageDecodeLatencyMs: number | undefined = undefined; + + /* Average time taken (milliseconds) to between receipt of the first and last video packet of a. */ + public averageAssemblyDelayMs: number | undefined = undefined; + + /* The sender latency + RTT/2 + processing delay */ + public averageE2ELatency: number | undefined = undefined; + + /* Timing information about the worst performing frame since the last getStats call (only works on Chrome) */ + public frameTiming: FrameTimingInfo | undefined = undefined; +} diff --git a/Frontend/library/src/PeerConnectionController/OutBoundRTPStats.ts b/Frontend/library/src/PeerConnectionController/OutBoundRTPStats.ts index 723e9fcc..f6df02c0 100644 --- a/Frontend/library/src/PeerConnectionController/OutBoundRTPStats.ts +++ b/Frontend/library/src/PeerConnectionController/OutBoundRTPStats.ts @@ -1,26 +1,60 @@ // Copyright Epic Games, Inc. All Rights Reserved. /** - * Outbound Video Stats collected from the RTC Stats Report + * Outbound RTP stats collected from the RTC Stats Report under `outbound-rtp`. + * Wrapper around: https://developer.mozilla.org/en-US/docs/Web/API/RTCOutboundRtpStreamStats + * These are stats for video we are sending to a remote peer. */ -export class OutBoundVideoStats { +export class OutboundRTPStats { + active: boolean | undefined; + codecId: string | undefined; bytesSent: number; + frameHeight: number | undefined; + frameWidth: number | undefined; + framesEncoded: number | undefined; + framesPerSecond: number | undefined; + framesSent: number | undefined; + headerBytesSent: number; id: string; - localId: string; + keyFramesEncoded: number | undefined; + kind: string; + mediaSourceId: string | undefined; + mid: string | undefined; + nackCount: number | undefined; packetsSent: number; - remoteTimestamp: number; + qpSum: number | undefined; + qualityLimitationDurations: number | undefined; + qualityLimitationReason: string | undefined; + remoteId: string | undefined; + retransmittedBytesSent: number; + rid: string | undefined; + scalabilityMode: string | undefined; + ssrc: string; + targetBitrate: number | undefined; timestamp: number; + totalEncodeTime: number | undefined; + totalEncodeBytesTarget: number | undefined; + totalPacketSendDelay: number | undefined; + transportId: string | undefined; } /** - * Outbound Stats collected from the RTC Stats Report + * Remote outbound stats collected from the RTC Stats Report under `remote-outbound-rtp`. + * Wrapper around: https://developer.mozilla.org/en-US/docs/Web/API/RTCRemoteOutboundRtpStreamStats + * These are stats for media we are receiving from a remote peer. */ -export class OutBoundRTPStats { +export class RemoteOutboundRTPStats { + bytesSent: number | undefined; + codecId: string; + id: string | undefined; kind: string; - bytesSent: number; - id: string; - localId: string; - packetsSent: number; - remoteTimestamp: number; - timestamp: number; + localId: string | undefined; + packetsSent: number | undefined; + remoteTimestamp: number | undefined; + reportsSent: number | undefined; + roundTripTimeMeasurements: number | undefined; + ssrc: string; + timestamp: number | undefined; + totalRoundTripTime: number | undefined; + transportId: string | undefined; } diff --git a/Frontend/library/src/PeerConnectionController/PeerConnectionController.ts b/Frontend/library/src/PeerConnectionController/PeerConnectionController.ts index 007b8445..0942f86c 100644 --- a/Frontend/library/src/PeerConnectionController/PeerConnectionController.ts +++ b/Frontend/library/src/PeerConnectionController/PeerConnectionController.ts @@ -6,6 +6,10 @@ import { AggregatedStats } from './AggregatedStats'; import { parseRtpParameters, splitSections } from 'sdp'; import { RTCUtils } from '../Util/RTCUtils'; import { CodecStats } from './CodecStats'; +import { SDPUtils } from '@epicgames-ps/lib-pixelstreamingcommon-ue5.5'; +import { LatencyCalculator, LatencyInfo } from './LatencyCalculator'; + +export const kAbsCaptureTime = 'http://www.webrtc.org/experiments/rtp-hdrext/abs-capture-time'; /** * Handles the Peer Connection @@ -18,6 +22,7 @@ export class PeerConnectionController { updateCodecSelection: boolean; videoTrack: MediaStreamTrack; audioTrack: MediaStreamTrack; + latencyCalculator: LatencyCalculator; /** * Create a new RTC Peer Connection client @@ -27,6 +32,7 @@ export class PeerConnectionController { constructor(options: RTCConfiguration, config: Config, preferredCodec: string) { this.config = config; this.createPeerConnection(options, preferredCodec); + this.latencyCalculator = new LatencyCalculator(); } createPeerConnection(options: RTCConfiguration, preferredCodec: string) { @@ -88,12 +94,26 @@ export class PeerConnectionController { } /** - * + * Receive offer from UE side and process it as the remote description of this peer connection */ async receiveOffer(offer: RTCSessionDescriptionInit, config: Config) { Logger.Info('Receive Offer'); + // If UE or JSStreamer did send abs-capture-time RTP header extension to a non-Chrome browser + // then remove it from the SDP because if Firefox detects it in offer or answer it will fail to connect + // due having 15 or more header extensions: https://mailarchive.ietf.org/arch/msg/rtcweb/QRnWNuWzGuLRovWdHkodNP6VOgg/ + if (this.isFirefox()) { + // example: a=extmap:15 http://www.webrtc.org/experiments/rtp-hdrext/abs-capture-time + offer.sdp = offer.sdp.replace( + /^a=extmap:\d+ http:\/\/www\.webrtc\.org\/experiments\/rtp-hdrext\/abs-capture-time\r\n/gm, + '' + ); + } + this.peerConnection?.setRemoteDescription(offer).then(() => { + // Fire event for when remote offer description is set + this.onSetRemoteDescription(offer); + const isLocalhostConnection = location.hostname === 'localhost' || location.hostname === '127.0.0.1'; const isHttpsConnection = location.protocol === 'https:'; @@ -124,10 +144,10 @@ export class PeerConnectionController { return this.peerConnection?.setLocalDescription(Answer); }) .then(() => { - this.onSendWebRTCAnswer(this.peerConnection?.currentLocalDescription); + this.onSetLocalDescription(this.peerConnection?.currentLocalDescription); }) - .catch(() => { - Logger.Error('createAnswer() failed'); + .catch((err) => { + Logger.Error(`createAnswer() failed - ${err}`); }); }); }); @@ -151,25 +171,29 @@ export class PeerConnectionController { * Generate Aggregated Stats and then fire a onVideo Stats event */ generateStats() { - const audioPromise = this.audioTrack - ? this.peerConnection?.getStats(this.audioTrack).then((statsData: RTCStatsReport) => { - this.aggregatedStats.processStats(statsData); - }) - : Promise.resolve(); - const videoPromise = this.videoTrack - ? this.peerConnection?.getStats(this.videoTrack).then((statsData: RTCStatsReport) => { - this.aggregatedStats.processStats(statsData); - }) - : Promise.resolve(); - - Promise.allSettled([audioPromise, videoPromise]).then(() => { + this.peerConnection.getStats().then((statsData: RTCStatsReport) => { + this.aggregatedStats.processStats(statsData); + this.onVideoStats(this.aggregatedStats); + + // Calculate latency using stats and video receivers and then call the handling function + const latencyInfo: LatencyInfo = this.latencyCalculator.calculate( + this.aggregatedStats, + this.peerConnection.getReceivers() + ); + this.onLatencyCalculated(latencyInfo); + // Update the preferred codec selection based on what was actually negotiated if (this.updateCodecSelection && !!this.aggregatedStats.inboundVideoStats.codecId) { // Construct the qualified codec name from the mimetype and fmtp - const codecStats: CodecStats = this.aggregatedStats.codecs.get( + const codecStats: CodecStats | undefined = this.aggregatedStats.codecs.get( this.aggregatedStats.inboundVideoStats.codecId ); + + if (codecStats === undefined) { + return; + } + const codecShortname = codecStats.mimeType.replace('video/', ''); let fullCodecName = codecShortname; if (codecStats.sdpFmtpLine && codecStats.sdpFmtpLine.trim() !== '') { @@ -237,9 +261,20 @@ export class PeerConnectionController { // We use the line 'useinbandfec=1' (which Opus uses) to set our Opus specific audio parameters. mungedSDP = mungedSDP.replace('useinbandfec=1', audioSDP); + // Add abs-capture-time RTP header extension if we have enabled the setting. + // Note: As at Feb 2025, Chromium based browsers are the only ones that support this and + // munging it into the answer in Firefox will cause the connection to fail. + if (this.config.isFlagEnabled(Flags.EnableCaptureTimeExt) && !this.isFirefox()) { + mungedSDP = SDPUtils.addVideoHeaderExtensionToSdp(mungedSDP, kAbsCaptureTime); + } + return mungedSDP; } + isFirefox(): boolean { + return navigator.userAgent.indexOf('Firefox') > 0; + } + /** * When a Ice Candidate is received add to the RTC Peer Connection * @param iceCandidate - RTC Ice Candidate from the Signaling Server @@ -586,6 +621,15 @@ export class PeerConnectionController { // Default Functionality: Do Nothing } + /** + * And override event for when latency info is calculated + * @param latencyInfo - Calculated latency information. + */ + // eslint-disable-next-line @typescript-eslint/no-unused-vars + onLatencyCalculated(latencyInfo: LatencyInfo) { + // Default Functionality: Do Nothing + } + /** * Event to send the RTC offer to the Signaling server * @param offer - RTC Offer @@ -596,11 +640,20 @@ export class PeerConnectionController { } /** - * Event to send the RTC Answer to the Signaling server + * Event fired when remote offer description is set. + * @param offer - RTC Offer + */ + // eslint-disable-next-line @typescript-eslint/no-unused-vars + onSetRemoteDescription(offer: RTCSessionDescriptionInit) { + // Default Functionality: Do Nothing + } + + /** + * Event fire when local description answer is set. * @param answer - RTC Answer */ // eslint-disable-next-line @typescript-eslint/no-unused-vars - onSendWebRTCAnswer(answer: RTCSessionDescriptionInit) { + onSetLocalDescription(answer: RTCSessionDescriptionInit) { // Default Functionality: Do Nothing } diff --git a/Frontend/library/src/PixelStreaming/PixelStreaming.ts b/Frontend/library/src/PixelStreaming/PixelStreaming.ts index b13a7555..1fb93a3a 100644 --- a/Frontend/library/src/PixelStreaming/PixelStreaming.ts +++ b/Frontend/library/src/PixelStreaming/PixelStreaming.ts @@ -11,6 +11,7 @@ import { OnScreenKeyboard } from '../UI/OnScreenKeyboard'; import { PixelStreamingEventEmitter, InitialSettingsEvent, + LatencyCalculatedEvent, LatencyTestResultEvent, PixelStreamingEvent, StatsReceivedEvent, @@ -30,7 +31,9 @@ import { DataChannelLatencyTestResultEvent, PlayerCountEvent, WebRtcTCPRelayDetectedEvent, - SubscribeFailedEvent + SubscribeFailedEvent, + WebRtcSdpOfferEvent, + WebRtcSdpAnswerEvent } from '../Util/EventEmitter'; import { WebXRController } from '../WebXR/WebXRController'; import { MessageDirection } from '../UeInstanceMessage/StreamMessageController'; @@ -44,6 +47,7 @@ import { } from '../DataChannel/DataChannelLatencyTestResults'; import { RTCUtils } from '../Util/RTCUtils'; import { IURLSearchParams } from '../Util/IURLSearchParams'; +import { LatencyInfo } from '../PeerConnectionController/LatencyCalculator'; export interface PixelStreamingOverrides { /** The DOM element where Pixel Streaming video and user input event handlers are attached to. @@ -450,21 +454,42 @@ export class PixelStreaming { } /** - * Emit an event on auto connecting + * Internal function to emit an event when auto connecting occurs */ _onWebRtcAutoConnect() { this._eventEmitter.dispatchEvent(new WebRtcAutoConnectEvent()); } /** - * Set up functionality to happen when receiving a webRTC answer + * Internal function to emit an event for when SDP negotiation is fully finished. */ _onWebRtcSdp() { this._eventEmitter.dispatchEvent(new WebRtcSdpEvent()); } /** - * Emits a StreamLoading event + * Internal function to emit an SDP offer after it has been set. + */ + _onWebRtcSdpOffer(offer: RTCSessionDescriptionInit) { + this._eventEmitter.dispatchEvent(new WebRtcSdpOfferEvent({ sdp: offer })); + } + + /** + * Internal function to emit an SDP answer after it has been set. + */ + _onWebRtcSdpAnswer(answer: RTCSessionDescriptionInit) { + this._eventEmitter.dispatchEvent(new WebRtcSdpAnswerEvent({ sdp: answer })); + } + + /** + * Internal function call to emit a `latencyCalculated` event. + */ + _onLatencyCalculated(latencyInfo: LatencyInfo) { + this._eventEmitter.dispatchEvent(new LatencyCalculatedEvent({ latencyInfo })); + } + + /** + * Internal function to emits a StreamLoading event */ _onStreamLoading() { this._eventEmitter.dispatchEvent(new StreamLoadingEvent()); diff --git a/Frontend/library/src/Util/EventEmitter.ts b/Frontend/library/src/Util/EventEmitter.ts index 92d49e60..41de11a6 100644 --- a/Frontend/library/src/Util/EventEmitter.ts +++ b/Frontend/library/src/Util/EventEmitter.ts @@ -2,6 +2,7 @@ import { FlagsIds, NumericParametersIds, OptionParametersIds, TextParametersIds import { LatencyTestResults } from '../DataChannel/LatencyTestResults'; import { AggregatedStats } from '../PeerConnectionController/AggregatedStats'; import { InitialSettings } from '../DataChannel/InitialSettings'; +import { LatencyInfo } from '../pixelstreamingfrontend'; import { Messages } from '@epicgames-ps/lib-pixelstreamingcommon-ue5.5'; import { SettingFlag } from '../Config/SettingFlag'; import { SettingNumber } from '../Config/SettingNumber'; @@ -90,6 +91,36 @@ export class WebRtcSdpEvent extends Event { } } +/** + * An event that is emitted after the SDP answer is set. + */ +export class WebRtcSdpAnswerEvent extends Event { + override readonly type: 'webRtcSdpAnswer'; + readonly data: { + /** The sdp answer */ + sdp: RTCSessionDescriptionInit; + }; + constructor(data: WebRtcSdpAnswerEvent['data']) { + super('webRtcSdpAnswer'); + this.data = data; + } +} + +/** + * An event that is emitted after the SDP offer is set. + */ +export class WebRtcSdpOfferEvent extends Event { + override readonly type: 'webRtcSdpOffer'; + readonly data: { + /** The sdp offer */ + sdp: RTCSessionDescriptionInit; + }; + constructor(data: WebRtcSdpOfferEvent['data']) { + super('webRtcSdpOffer'); + this.data = data; + } +} + /** * An event that is emitted when auto connecting. */ @@ -382,6 +413,20 @@ export class LatencyTestResultEvent extends Event { } } +/** + * An event that is emitted everytime latency is calculated using the WebRTC stats API. + */ +export class LatencyCalculatedEvent extends Event { + override readonly type: 'latencyCalculated'; + readonly data: { + latencyInfo: LatencyInfo; + }; + constructor(data: LatencyCalculatedEvent['data']) { + super('latencyCalculated'); + this.data = data; + } +} + /** * An event that is emitted when receiving data channel latency test response from server. * This event is handled by DataChannelLatencyTestController @@ -560,6 +605,8 @@ export type PixelStreamingEvent = | AfkTimedOutEvent | VideoEncoderAvgQPEvent | WebRtcSdpEvent + | WebRtcSdpOfferEvent + | WebRtcSdpAnswerEvent | WebRtcAutoConnectEvent | WebRtcConnectingEvent | WebRtcConnectedEvent @@ -581,6 +628,7 @@ export type PixelStreamingEvent = | StatsReceivedEvent | StreamerListMessageEvent | StreamerIDChangedMessageEvent + | LatencyCalculatedEvent | LatencyTestResultEvent | DataChannelLatencyTestResponseEvent | DataChannelLatencyTestResultEvent diff --git a/Frontend/library/src/WebRtcPlayer/WebRtcPlayerController.ts b/Frontend/library/src/WebRtcPlayer/WebRtcPlayerController.ts index 916532da..42d1d442 100644 --- a/Frontend/library/src/WebRtcPlayer/WebRtcPlayerController.ts +++ b/Frontend/library/src/WebRtcPlayer/WebRtcPlayerController.ts @@ -55,6 +55,7 @@ import { import { IURLSearchParams } from '../Util/IURLSearchParams'; import { IInputController } from '../Inputs/IInputController'; import { GamepadController } from '../Inputs/GamepadController'; +import { LatencyInfo } from '../PeerConnectionController/LatencyCalculator'; /** * Entry point for the WebRTC Player @@ -1075,15 +1076,29 @@ export class WebRtcPlayerController { ); // set up peer connection controller video stats - this.peerConnectionController.onVideoStats = (event: AggregatedStats) => this.handleVideoStats(event); + this.peerConnectionController.onVideoStats = (event: AggregatedStats) => { + this.handleVideoStats(event); + }; + + /* Set event handler for latency information is calculated, handle the event by propogating to the PixelStreaming API */ + this.peerConnectionController.onLatencyCalculated = (latencyInfo: LatencyInfo) => { + this.pixelStreaming._onLatencyCalculated(latencyInfo); + }; /* When the Peer Connection wants to send an offer have it handled */ - this.peerConnectionController.onSendWebRTCOffer = (offer: RTCSessionDescriptionInit) => + this.peerConnectionController.onSendWebRTCOffer = (offer: RTCSessionDescriptionInit) => { this.handleSendWebRTCOffer(offer); + }; - /* When the Peer Connection wants to send an answer have it handled */ - this.peerConnectionController.onSendWebRTCAnswer = (offer: RTCSessionDescriptionInit) => - this.handleSendWebRTCAnswer(offer); + /* Set event handler for when local answer description is set */ + this.peerConnectionController.onSetLocalDescription = (answer: RTCSessionDescriptionInit) => { + this.handleSendWebRTCAnswer(answer); + }; + + /* Set event handler for when remote offer description is set */ + this.peerConnectionController.onSetRemoteDescription = (offer: RTCSessionDescriptionInit) => { + this.pixelStreaming._onWebRtcSdpOffer(offer); + }; /* When the Peer Connection ice candidate is added have it handled */ this.peerConnectionController.onPeerIceCandidate = ( @@ -1484,6 +1499,9 @@ export class WebRtcPlayerController { if (this.isUsingSFU) { this.protocol.sendMessage(MessageHelpers.createMessage(Messages.dataChannelRequest)); } + + // Send answer back to Pixel Streaming main class for event dispatch + this.pixelStreaming._onWebRtcSdpAnswer(answer); } /** diff --git a/Frontend/library/src/__test__/mockRTCPeerConnection.ts b/Frontend/library/src/__test__/mockRTCPeerConnection.ts index a7935e03..1e581c04 100644 --- a/Frontend/library/src/__test__/mockRTCPeerConnection.ts +++ b/Frontend/library/src/__test__/mockRTCPeerConnection.ts @@ -111,7 +111,7 @@ export class MockRTCPeerConnectionImpl implements RTCPeerConnection { throw new Error("Method not implemented."); } getReceivers(): RTCRtpReceiver[] { - throw new Error("Method not implemented."); + return []; } getSenders(): RTCRtpSender[] { throw new Error("Method not implemented."); diff --git a/Frontend/library/src/pixelstreamingfrontend.ts b/Frontend/library/src/pixelstreamingfrontend.ts index 221a9dc8..cc5e1a1b 100644 --- a/Frontend/library/src/pixelstreamingfrontend.ts +++ b/Frontend/library/src/pixelstreamingfrontend.ts @@ -19,7 +19,8 @@ export { CandidatePairStats } from './PeerConnectionController/CandidatePairStat export { CandidateStat } from './PeerConnectionController/CandidateStat'; export { DataChannelStats } from './PeerConnectionController/DataChannelStats'; export { InboundAudioStats, InboundVideoStats } from './PeerConnectionController/InboundRTPStats'; -export { OutBoundVideoStats } from './PeerConnectionController/OutBoundRTPStats'; +export { OutboundRTPStats, RemoteOutboundRTPStats } from './PeerConnectionController/OutBoundRTPStats'; +export * from './PeerConnectionController/LatencyCalculator'; export * from './DataChannel/DataChannelLatencyTestResults'; export * from './Util/EventEmitter'; export * from '@epicgames-ps/lib-pixelstreamingcommon-ue5.5'; diff --git a/Frontend/ui-library/src/Application/Application.ts b/Frontend/ui-library/src/Application/Application.ts index e01bd80a..d369bfe0 100644 --- a/Frontend/ui-library/src/Application/Application.ts +++ b/Frontend/ui-library/src/Application/Application.ts @@ -10,7 +10,8 @@ import { Messages, DataChannelLatencyTestResult, OptionParameters, - SettingsChangedEvent + SettingsChangedEvent, + LatencyInfo } from '@epicgames-ps/lib-pixelstreamingfrontend-ue5.5'; import { OverlayBase } from '../Overlay/BaseOverlay'; import { ActionOverlay } from '../Overlay/ActionOverlay'; @@ -350,6 +351,9 @@ export class Application { this.stream.addEventListener('statsReceived', ({ data: { aggregatedStats } }) => this.onStatsReceived(aggregatedStats) ); + this.stream.addEventListener('latencyCalculated', ({ data: { latencyInfo } }) => + this.onLatencyUpdate(latencyInfo) + ); this.stream.addEventListener('latencyTestResult', ({ data: { latencyTimings } }) => this.onLatencyTestResults(latencyTimings) ); @@ -383,7 +387,7 @@ export class Application { /** * Creates the root element for the Pixel Streaming UI. - * Note: This should be called before the Pixel Streaming object or UI features object are created. + * Note: This should be called before the Pixel Streaming object is created. * @param pixelstreaming - The Pixel Streaming object. * @param uiFeaturesElem - The element holding all the custom UI features. * @returns A div with the id #playerUI populated with videoElementParent and uiFeatureElement. @@ -681,6 +685,10 @@ export class Application { this.statsPanel?.handleStats(aggregatedStats); } + onLatencyUpdate(latencyInfo: LatencyInfo) { + this.statsPanel?.handleLatencyInfo(latencyInfo); + } + onLatencyTestResults(latencyTimings: LatencyTestResults) { this.statsPanel?.latencyTest.handleTestResult(latencyTimings); } diff --git a/Frontend/ui-library/src/UI/LatencyTest.ts b/Frontend/ui-library/src/UI/LatencyTest.ts index 67cd1e11..38d8b5b9 100644 --- a/Frontend/ui-library/src/UI/LatencyTest.ts +++ b/Frontend/ui-library/src/UI/LatencyTest.ts @@ -71,22 +71,49 @@ export class LatencyTest { public handleTestResult(latencyTimings: LatencyTestResults) { Logger.Info(JSON.stringify(latencyTimings)); let latencyStatsInnerHTML = ''; - latencyStatsInnerHTML += '
Net latency RTT (ms): ' + latencyTimings.networkLatency + '
'; - latencyStatsInnerHTML += '
UE Encode (ms): ' + latencyTimings.EncodeMs + '
'; - latencyStatsInnerHTML += '
UE Capture (ms): ' + latencyTimings.CaptureToSendMs + '
'; - latencyStatsInnerHTML += - '
Browser send latency (ms): ' + latencyTimings.browserSendLatency + '
'; - latencyStatsInnerHTML += - latencyTimings.frameDisplayDeltaTimeMs && latencyTimings.browserReceiptTimeMs - ? '
Browser receive latency (ms): ' + latencyTimings.frameDisplayDeltaTimeMs + '
' + + if (latencyTimings.networkLatency !== undefined && latencyTimings.networkLatency > 0) { + latencyStatsInnerHTML += '
Net latency RTT (ms): ' + latencyTimings.networkLatency + '
'; + } + + if (latencyTimings.EncodeMs !== undefined && latencyTimings.EncodeMs > 0) { + latencyStatsInnerHTML += '
UE Encode (ms): ' + latencyTimings.EncodeMs + '
'; + } + + if (latencyTimings.CaptureToSendMs !== undefined && latencyTimings.CaptureToSendMs > 0) { + latencyStatsInnerHTML += '
UE Capture (ms): ' + latencyTimings.CaptureToSendMs + '
'; + } + + if (latencyTimings.browserSendLatency !== undefined && latencyTimings.browserSendLatency > 0) { + latencyStatsInnerHTML += + '
Browser send latency (ms): ' + latencyTimings.browserSendLatency + '
'; + } + + if ( + latencyTimings.frameDisplayDeltaTimeMs !== undefined && + latencyTimings.browserReceiptTimeMs !== undefined + ) { + latencyStatsInnerHTML += + latencyTimings.frameDisplayDeltaTimeMs && latencyTimings.browserReceiptTimeMs + ? '
Browser receive latency (ms): ' + + latencyTimings.frameDisplayDeltaTimeMs + + '
' + : ''; + } + + if (latencyTimings.latencyExcludingDecode !== undefined) { + latencyStatsInnerHTML += + '
Total latency (excluding browser) (ms): ' + + latencyTimings.latencyExcludingDecode + + '
'; + } + + if (latencyTimings.endToEndLatency !== undefined) { + latencyStatsInnerHTML += latencyTimings.endToEndLatency + ? '
Total latency (ms): ' + latencyTimings.endToEndLatency + '
' : ''; - latencyStatsInnerHTML += - '
Total latency (excluding browser) (ms): ' + - latencyTimings.latencyExcludingDecode + - '
'; - latencyStatsInnerHTML += latencyTimings.endToEndLatency - ? '
Total latency (ms): ' + latencyTimings.endToEndLatency + '
' - : ''; + } + this.latencyTestResultsElement.innerHTML = latencyStatsInnerHTML; } } diff --git a/Frontend/ui-library/src/UI/StatsPanel.ts b/Frontend/ui-library/src/UI/StatsPanel.ts index b559c715..e1f34c43 100644 --- a/Frontend/ui-library/src/UI/StatsPanel.ts +++ b/Frontend/ui-library/src/UI/StatsPanel.ts @@ -3,6 +3,7 @@ import { LatencyTest } from './LatencyTest'; import { CandidatePairStats, + LatencyInfo, Logger, PixelStreaming, PixelStreamingSettings @@ -10,7 +11,12 @@ import { import { AggregatedStats } from '@epicgames-ps/lib-pixelstreamingfrontend-ue5.5'; import { MathUtils } from '../Util/MathUtils'; import { DataChannelLatencyTest } from './DataChannelLatencyTest'; -import { isSectionEnabled, StatsSections, StatsPanelConfiguration } from './UIConfigurationTypes'; +import { + isSectionEnabled, + StatsSections, + StatsSectionsIds, + StatsPanelConfiguration +} from './UIConfigurationTypes'; /** * A stat structure, an id, the stat string, and the element where it is rendered. @@ -30,7 +36,9 @@ export class StatsPanel { _statsCloseButton: HTMLElement; _statsContentElement: HTMLElement; _statisticsContainer: HTMLElement; + _latencyStatsContainer: HTMLElement; _statsResult: HTMLElement; + _latencyResult: HTMLElement; _config: StatsPanelConfiguration; latencyTest: LatencyTest; @@ -87,22 +95,42 @@ export class StatsPanel { statistics.id = 'statistics'; statistics.classList.add('settingsContainer'); + const latencyStats = document.createElement('section'); + latencyStats.id = 'latencyStats'; + latencyStats.classList.add('settingsContainer'); + const statisticsHeader = document.createElement('div'); statisticsHeader.id = 'statisticsHeader'; statisticsHeader.classList.add('settings-text'); statisticsHeader.classList.add('settingsHeader'); + const latencyStatsHeader = document.createElement('div'); + latencyStatsHeader.id = 'latencyStatsHeader'; + latencyStatsHeader.classList.add('settings-text'); + latencyStatsHeader.classList.add('settingsHeader'); + this._statsContentElement.appendChild(streamToolStats); streamToolStats.appendChild(controlStats); controlStats.appendChild(statistics); + controlStats.appendChild(latencyStats); + statistics.appendChild(statisticsHeader); + latencyStats.appendChild(latencyStatsHeader); + if (isSectionEnabled(this._config, StatsSections.SessionStats)) { - const sessionStats = document.createElement('div'); - sessionStats.innerHTML = StatsSections.SessionStats; - statisticsHeader.appendChild(sessionStats); + const sessionStatsText = document.createElement('div'); + sessionStatsText.innerHTML = StatsSections.SessionStats; + statisticsHeader.appendChild(sessionStatsText); } statistics.appendChild(this.statisticsContainer); + if (isSectionEnabled(this._config, StatsSections.LatencyStats)) { + const latencyStatsText = document.createElement('div'); + latencyStatsText.innerHTML = StatsSections.LatencyStats; + latencyStatsHeader.appendChild(latencyStatsText); + } + latencyStats.appendChild(this.latencyStatsContainer); + if (isSectionEnabled(this._config, StatsSections.LatencyTest)) { controlStats.appendChild(this.latencyTest.rootElement); } @@ -124,6 +152,16 @@ export class StatsPanel { return this._statisticsContainer; } + public get latencyStatsContainer(): HTMLElement { + if (!this._latencyStatsContainer) { + this._latencyStatsContainer = document.createElement('div'); + this._latencyStatsContainer.id = 'latencyStatsContainer'; + this._latencyStatsContainer.classList.add('d-none'); + this._latencyStatsContainer.appendChild(this.latencyResult); + } + return this._latencyStatsContainer; + } + public get statsResult(): HTMLElement { if (!this._statsResult) { this._statsResult = document.createElement('div'); @@ -133,6 +171,15 @@ export class StatsPanel { return this._statsResult; } + public get latencyResult(): HTMLElement { + if (!this._latencyResult) { + this._latencyResult = document.createElement('div'); + this._latencyResult.id = 'latencyResult'; + this._latencyResult.classList.add('StatsResult'); + } + return this._latencyResult; + } + public get statsCloseButton(): HTMLElement { if (!this._statsCloseButton) { this._statsCloseButton = document.createElement('div'); @@ -207,7 +254,7 @@ export class StatsPanel { } public handlePlayerCount(playerCount: number) { - this.addOrUpdateStat('PlayerCountStat', 'Players', playerCount.toString()); + this.addOrUpdateSessionStat('PlayerCountStat', 'Players', playerCount.toString()); } /** @@ -222,17 +269,17 @@ export class StatsPanel { // Inbound data const inboundData = MathUtils.formatBytes(stats.inboundVideoStats.bytesReceived, 2); - this.addOrUpdateStat('InboundDataStat', 'Received', inboundData); + this.addOrUpdateSessionStat('InboundDataStat', 'Received', inboundData); // Packets lost const packetsLostStat = Object.prototype.hasOwnProperty.call(stats.inboundVideoStats, 'packetsLost') ? numberFormat.format(stats.inboundVideoStats.packetsLost) : 'Chrome only'; - this.addOrUpdateStat('PacketsLostStat', 'Packets Lost', packetsLostStat); + this.addOrUpdateSessionStat('PacketsLostStat', 'Packets Lost', packetsLostStat); // Bitrate if (stats.inboundVideoStats.bitrate) { - this.addOrUpdateStat( + this.addOrUpdateSessionStat( 'VideoBitrateStat', 'Video Bitrate (kbps)', stats.inboundVideoStats.bitrate.toString() @@ -240,7 +287,7 @@ export class StatsPanel { } if (stats.inboundAudioStats.bitrate) { - this.addOrUpdateStat( + this.addOrUpdateSessionStat( 'AudioBitrateStat', 'Audio Bitrate (kbps)', stats.inboundAudioStats.bitrate.toString() @@ -249,23 +296,23 @@ export class StatsPanel { // Video resolution const resStat = - Object.prototype.hasOwnProperty.call(stats.inboundVideoStats, 'frameWidth') && - stats.inboundVideoStats.frameWidth && - Object.prototype.hasOwnProperty.call(stats.inboundVideoStats, 'frameHeight') && - stats.inboundVideoStats.frameHeight + stats.inboundVideoStats.frameWidth !== undefined && + stats.inboundVideoStats.frameWidth > 0 && + stats.inboundVideoStats.frameHeight !== undefined && + stats.inboundVideoStats.frameHeight > 0 ? stats.inboundVideoStats.frameWidth + 'x' + stats.inboundVideoStats.frameHeight : 'Chrome only'; - this.addOrUpdateStat('VideoResStat', 'Video resolution', resStat); + this.addOrUpdateSessionStat('VideoResStat', 'Video resolution', resStat); // Frames decoded - const framesDecoded = Object.prototype.hasOwnProperty.call(stats.inboundVideoStats, 'framesDecoded') - ? numberFormat.format(stats.inboundVideoStats.framesDecoded) - : 'Chrome only'; - this.addOrUpdateStat('FramesDecodedStat', 'Frames Decoded', framesDecoded); + if (stats.inboundVideoStats.framesDecoded !== undefined) { + const framesDecoded = numberFormat.format(stats.inboundVideoStats.framesDecoded); + this.addOrUpdateSessionStat('FramesDecodedStat', 'Frames Decoded', framesDecoded); + } // Framerate if (stats.inboundVideoStats.framesPerSecond) { - this.addOrUpdateStat( + this.addOrUpdateSessionStat( 'FramerateStat', 'Framerate', stats.inboundVideoStats.framesPerSecond.toString() @@ -273,14 +320,16 @@ export class StatsPanel { } // Frames dropped - this.addOrUpdateStat( - 'FramesDroppedStat', - 'Frames dropped', - stats.inboundVideoStats.framesDropped?.toString() - ); + if (stats.inboundVideoStats.framesDropped !== undefined) { + this.addOrUpdateSessionStat( + 'FramesDroppedStat', + 'Frames dropped', + stats.inboundVideoStats.framesDropped.toString() + ); + } if (stats.inboundVideoStats.codecId) { - this.addOrUpdateStat( + this.addOrUpdateSessionStat( 'VideoCodecStat', 'Video codec', // Split the codec to remove the Fmtp line @@ -289,7 +338,7 @@ export class StatsPanel { } if (stats.inboundAudioStats.codecId) { - this.addOrUpdateStat( + this.addOrUpdateSessionStat( 'AudioCodecStat', 'Audio codec', // Split the codec to remove the Fmtp line @@ -298,50 +347,175 @@ export class StatsPanel { } // Store the active candidate pair return a new Candidate pair stat if getActiveCandidate is null - const activeCandidatePair = - stats.getActiveCandidatePair() != null - ? stats.getActiveCandidatePair() - : new CandidatePairStats(); - - // RTT - const netRTT = - Object.prototype.hasOwnProperty.call(activeCandidatePair, 'currentRoundTripTime') && - stats.isNumber(activeCandidatePair.currentRoundTripTime) - ? numberFormat.format(activeCandidatePair.currentRoundTripTime * 1000) - : "Can't calculate"; - this.addOrUpdateStat('RTTStat', 'Net RTT (ms)', netRTT); - - this.addOrUpdateStat('DurationStat', 'Duration', stats.sessionStats.runTime); - - this.addOrUpdateStat( + const activeCandidatePair: CandidatePairStats | null = stats.getActiveCandidatePair(); + + if (activeCandidatePair) { + // RTT + const netRTT = + Object.prototype.hasOwnProperty.call(activeCandidatePair, 'currentRoundTripTime') && + stats.isNumber(activeCandidatePair.currentRoundTripTime) + ? Math.ceil(activeCandidatePair.currentRoundTripTime * 1000).toString() + : "Can't calculate"; + this.addOrUpdateSessionStat('RTTStat', 'Net RTT (ms)', netRTT); + } + + this.addOrUpdateSessionStat('DurationStat', 'Duration', stats.sessionStats.runTime); + + this.addOrUpdateSessionStat( 'ControlsInputStat', 'Controls stream input', stats.sessionStats.controlsStreamInput ); // QP - this.addOrUpdateStat( - 'QPStat', - 'Video quantization parameter', - stats.sessionStats.videoEncoderAvgQP.toString() - ); - - // todo: - //statsText += `
Browser receive to composite (ms): ${stats.inboundVideoStats.receiveToCompositeMs}
`; + if ( + stats.sessionStats.videoEncoderAvgQP !== undefined && + !Number.isNaN(stats.sessionStats.videoEncoderAvgQP) + ) { + this.addOrUpdateSessionStat( + 'QPStat', + 'Video quantization parameter', + stats.sessionStats.videoEncoderAvgQP.toString() + ); + } Logger.Info(`--------- Stats ---------\n ${JSON.stringify(stats)}\n------------------------`); } + public handleLatencyInfo(latencyInfo: LatencyInfo) { + if (latencyInfo.frameTiming !== undefined) { + // Encoder latency + if (latencyInfo.frameTiming.encoderLatencyMs !== undefined) { + this.addOrUpdateLatencyStat( + 'EncodeLatency', + 'Encode latency (ms)', + Math.ceil(latencyInfo.frameTiming.encoderLatencyMs).toString() + ); + } + + // Packetizer latency + if (latencyInfo.frameTiming.packetizeLatencyMs !== undefined) { + this.addOrUpdateLatencyStat( + 'PacketizerLatency', + 'Packetizer latency (ms)', + Math.ceil(latencyInfo.frameTiming.packetizeLatencyMs).toString() + ); + } + + // Pacer latency + if (latencyInfo.frameTiming.pacerLatencyMs !== undefined) { + this.addOrUpdateLatencyStat( + 'PacerLatency', + 'Pacer latency (ms)', + Math.ceil(latencyInfo.frameTiming.pacerLatencyMs).toString() + ); + } + + // Sender latency calculated using timing stats + if (latencyInfo.frameTiming.captureToSendLatencyMs !== undefined) { + this.addOrUpdateLatencyStat( + 'VideoTimingCaptureToSend', + 'Post-capture to send latency (ms)', + Math.ceil(latencyInfo.frameTiming.captureToSendLatencyMs).toString() + ); + } + } + + if (latencyInfo.senderLatencyMs !== undefined) { + this.addOrUpdateLatencyStat( + 'AbsCaptureTimeToSendLatency', + 'Post-capture (abs-ct) to send latency (ms)', + Math.ceil(latencyInfo.senderLatencyMs).toString() + ); + } + + if (latencyInfo.averageAssemblyDelayMs !== undefined) { + this.addOrUpdateLatencyStat( + 'AvgAssemblyDelay', + 'Assembly delay (ms)', + Math.ceil(latencyInfo.averageAssemblyDelayMs).toString() + ); + } + + if (latencyInfo.averageDecodeLatencyMs !== undefined) { + this.addOrUpdateLatencyStat( + 'AvgDecodeDelay', + 'Decode time (ms)', + Math.ceil(latencyInfo.averageDecodeLatencyMs).toString() + ); + } + + if (latencyInfo.averageJitterBufferDelayMs !== undefined) { + this.addOrUpdateLatencyStat( + 'AvgJitterBufferDelay', + 'Jitter buffer (ms)', + Math.ceil(latencyInfo.averageJitterBufferDelayMs).toString() + ); + } + + if (latencyInfo.averageProcessingDelayMs !== undefined) { + this.addOrUpdateLatencyStat( + 'AvgProcessingDelay', + 'Processing delay (ms)', + Math.ceil(latencyInfo.averageProcessingDelayMs).toString() + ); + } + + if (latencyInfo.averageE2ELatency !== undefined) { + this.addOrUpdateLatencyStat( + 'AvgE2ELatency', + 'Total latency (ms)', + Math.ceil(latencyInfo.averageE2ELatency).toString() + ); + } + } + + /** + * Adds a new stat to the stats results in the DOM or updates an exiting stat. + * @param id - The id of the stat to add/update. + * @param stat - The contents of the stat. + */ + public addOrUpdateSessionStat(id: string, statLabel: string, stat: string) { + this.addOrUpdateStat(StatsSections.SessionStats, id, statLabel, stat); + } + + /** + * Adds a new stat to the latency results in the DOM or updates an exiting stat. + * @param id - The id of the stat to add/update. + * @param stat - The contents of the stat. + */ + public addOrUpdateLatencyStat(id: string, statLabel: string, stat: string) { + this.addOrUpdateStat(StatsSections.LatencyStats, id, statLabel, stat); + } + /** * Adds a new stat to the stats results in the DOM or updates an exiting stat. + * @param sectionId - The section to add this stat too. * @param id - The id of the stat to add/update. * @param stat - The contents of the stat. */ - public addOrUpdateStat(id: string, statLabel: string, stat: string) { - if (!isSectionEnabled(this._config, StatsSections.SessionStats)) { + private addOrUpdateStat(sectionId: StatsSectionsIds, id: string, statLabel: string, stat: string) { + if ( + sectionId === StatsSections.SessionStats && + !isSectionEnabled(this._config, StatsSections.SessionStats) + ) { + return; + } + + if ( + sectionId === StatsSections.LatencyStats && + !isSectionEnabled(this._config, StatsSections.LatencyStats) + ) { + return; + } + + // Only support session or latency stats being updated in this function currently + if (sectionId !== StatsSections.SessionStats && sectionId !== StatsSections.LatencyStats) { return; } + const parentElem: HTMLElement = + sectionId === StatsSections.SessionStats ? this.statsResult : this.latencyResult; const statHTML = `${statLabel}: ${stat}`; if (!this.statsMap.has(id)) { @@ -353,7 +527,7 @@ export class StatsPanel { newStat.element = document.createElement('div'); newStat.element.innerHTML = statHTML; // add the stat to the dom - this.statsResult.appendChild(newStat.element); + parentElem.appendChild(newStat.element); this.statsMap.set(id, newStat); } // update the existing stat diff --git a/Frontend/ui-library/src/UI/UIConfigurationTypes.ts b/Frontend/ui-library/src/UI/UIConfigurationTypes.ts index 89b39b33..69f051a4 100644 --- a/Frontend/ui-library/src/UI/UIConfigurationTypes.ts +++ b/Frontend/ui-library/src/UI/UIConfigurationTypes.ts @@ -30,6 +30,7 @@ export type SettingsSectionsIds = (typeof SettingsSections)[SettingsSectionsKeys export class StatsSections { static SessionStats = 'Session Stats' as const; + static LatencyStats = 'Latency Stats' as const; static LatencyTest = 'Latency Test' as const; static DataChannelLatencyTest = 'Data Channel Latency Test' as const; } diff --git a/package-lock.json b/package-lock.json index a0d94a07..55134d1c 100644 --- a/package-lock.json +++ b/package-lock.json @@ -84,7 +84,7 @@ "uuid": "^9.0.0" }, "devDependencies": { - "@playwright/test": "^1.49.0", + "@playwright/test": "^1.49.1", "@types/node": "^20.12.7", "@types/uuid": "^9.0.8" } @@ -222,7 +222,6 @@ } }, "Extras/MinimalStreamTester": { - "name": "prtest", "version": "1.0.0", "license": "ISC", "dependencies": { @@ -231,7 +230,7 @@ "uuid": "^9.0.0" }, "devDependencies": { - "@playwright/test": "^1.49.0", + "@playwright/test": "^1.49.1", "@types/node": "^20.12.7", "@types/uuid": "^9.0.8" } @@ -10723,6 +10722,10 @@ "integrity": "sha512-UtJcAD4yEaGtjPezWuO9wC4nwUnVH/8/Im3yEHQP4b67cXlD/Qr9hdITCU1xDbSEXg2XKNaP8jsReV7vQd00/A==", "dev": true }, + "node_modules/MinimalStreamTester": { + "resolved": "Extras/MinimalStreamTester", + "link": true + }, "node_modules/minimatch": { "version": "9.0.3", "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.3.tgz", @@ -12043,10 +12046,6 @@ "node": ">= 0.10" } }, - "node_modules/prtest": { - "resolved": "Extras/MinimalStreamTester", - "link": true - }, "node_modules/pseudomap": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/pseudomap/-/pseudomap-1.0.2.tgz",