diff --git a/examples/wgpu_room/src/app.rs b/examples/wgpu_room/src/app.rs index 99001f27..aaeb59db 100644 --- a/examples/wgpu_room/src/app.rs +++ b/examples/wgpu_room/src/app.rs @@ -5,7 +5,9 @@ use crate::{ }; use egui::{Rounding, Stroke}; use livekit::{e2ee::EncryptionType, prelude::*, SimulateScenario}; +use livekit::webrtc::{audio_stream::native::NativeAudioStream}; use std::collections::HashMap; +use futures::StreamExt; /// The state of the application are saved on app exit and restored on app start. #[derive(serde::Deserialize, serde::Serialize)] @@ -88,8 +90,15 @@ impl LkApp { ); self.video_renderers .insert((participant.identity(), track.sid()), video_renderer); - } else if let RemoteTrack::Audio(_) = track { - // TODO(theomonnom): Once we support media devices, we can play audio tracks here + } else if let RemoteTrack::Audio(ref audio_track) = track { + let rtc_track = audio_track.rtc_track(); + let mut audio_stream = NativeAudioStream::new(rtc_track, 48000, 2); + // Receive the audio frames in a new task + tokio::spawn(async move { + while let Some(frame) = audio_stream.next().await { + println!("Received audio frame {:?}", frame); + } + }); } } RoomEvent::TrackUnsubscribed { diff --git a/webrtc-sys/src/audio_device.cpp b/webrtc-sys/src/audio_device.cpp index 9ae77c18..f3877498 100644 --- a/webrtc-sys/src/audio_device.cpp +++ b/webrtc-sys/src/audio_device.cpp @@ -167,12 +167,14 @@ bool AudioDevice::RecordingIsInitialized() const { int32_t AudioDevice::StartPlayout() { webrtc::MutexLock lock(&mutex_); playing_ = true; + audio_device_buffer_.StartPlayout(); return 0; } int32_t AudioDevice::StopPlayout() { webrtc::MutexLock lock(&mutex_); playing_ = false; + audio_device_buffer_.StopPlayout(); return 0; } @@ -182,10 +184,12 @@ bool AudioDevice::Playing() const { } int32_t AudioDevice::StartRecording() { + audio_device_buffer_.StartRecording(); return 0; } int32_t AudioDevice::StopRecording() { + audio_device_buffer_.StopRecording(); return 0; }