diff --git a/.gitignore b/.gitignore index 8202b82014..96d8d1396a 100644 --- a/.gitignore +++ b/.gitignore @@ -72,3 +72,10 @@ /xcodebuild /.vscode !webrtc/* +/tmp.patch +/out-release +/out-debug +/node_modules +/libwebrtc +/args.txt +libwebrtc \ No newline at end of file diff --git a/BUILD.gn b/BUILD.gn index d5289b85d7..e1da40c650 100644 --- a/BUILD.gn +++ b/BUILD.gn @@ -37,7 +37,8 @@ if (!build_with_chromium) { # 'ninja default' and then 'ninja all', the second build should do no work. group("default") { testonly = true - deps = [ ":webrtc" ] + deps = [ ":webrtc","//libwebrtc", ] + if (rtc_build_examples) { deps += [ "examples" ] } @@ -344,7 +345,14 @@ config("common_config") { # See https://reviews.llvm.org/D56731 for details about this # warning. "-Wctad-maybe-unsupported", + # "-I./opencv4" , + "-fexceptions", ] + cflags_cc += [ + "-fexceptions" + ] + # include_dirs += [ "opencv4" ] + # cflags += [ "-I./opencv4" ] } if (build_with_chromium) { @@ -372,7 +380,7 @@ config("common_config") { # "-Wnested-externs", (C/Obj-C only) ] cflags_objc += [ "-Wstrict-prototypes" ] - cflags_cc = [ + cflags_cc += [ "-Wnon-virtual-dtor", # This is enabled for clang; enable for gcc as well. diff --git a/DEPS b/DEPS index 68bf5180d9..ed8a0b7053 100644 --- a/DEPS +++ b/DEPS @@ -289,7 +289,7 @@ deps = { 'src/third_party/perfetto': 'https://android.googlesource.com/platform/external/perfetto.git@20b114cd063623e63ef1b0a31167d60081567e51', 'src/third_party/libvpx/source/libvpx': - 'https://chromium.googlesource.com/webm/libvpx.git@27171320f5e36f7b18071bfa1d9616863ca1b4e8', + 'https://chromium.googlesource.com/webm/libvpx.git@7aaffe2df4c9426ab204a272ca5ca52286ca86d4', 'src/third_party/libyuv': 'https://chromium.googlesource.com/libyuv/libyuv.git@77c2121f7e6b8e694d6e908bbbe9be24214097da', 'src/third_party/lss': { diff --git a/NOTICE b/NOTICE new file mode 100644 index 0000000000..3972578ec4 --- /dev/null +++ b/NOTICE @@ -0,0 +1,26 @@ +################################################################################### + +The following modifications follow Apache License 2.0 from shiguredo. + +https://github.com/webrtc-sdk/webrtc/commit/dfec53e93a0a1cb93f444caf50f844ec0068c7b7 +https://github.com/webrtc-sdk/webrtc/commit/403b4678543c5d4ac77bd1ea5753c02637b3bb89 +https://github.com/webrtc-sdk/webrtc/commit/77d5d685a90fb4bded17835ae72ec6671b26d696 + +Apache License 2.0 + +Copyright 2019-2021, Wandbox LLC (Original Author) +Copyright 2019-2021, Shiguredo Inc. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +##################################################################################### \ No newline at end of file diff --git a/README.md b/README.md index 332efcc58b..1c862c4ca4 100644 --- a/README.md +++ b/README.md @@ -1,32 +1,47 @@ -**WebRTC is a free, open software project** that provides browsers and mobile -applications with Real-Time Communications (RTC) capabilities via simple APIs. -The WebRTC components have been optimized to best serve this purpose. +# WebRTC-SDK -**Our mission:** To enable rich, high-quality RTC applications to be -developed for the browser, mobile platforms, and IoT devices, and allow them -all to communicate via a common set of protocols. +This repository contains a fork of WebRTC from Google with various improvements. -The WebRTC initiative is a project supported by Google, Mozilla and Opera, -amongst others. +## Main changes -### Development +### All -See [here][native-dev] for instructions on how to get started -developing with the native code. +- Dynamically acquire decoder to mitigate decoder limitations [#25](https://github.com/webrtc-sdk/webrtc/pull/25) +- Support for video simulcast with hardware & software encoders [patch](https://github.com/webrtc-sdk/webrtc/commit/ee030264e2274a2c90548a99b448782049e48fb4) +- Frame cryptor support (for end-to-end encryption) [patch](https://github.com/webrtc-sdk/webrtc/commit/3a2c008529a15fecde5f979a6ebb75c05463d45e) -[Authoritative list](native-api.md) of directories that contain the -native API header files. +### Android -### More info +- WrappedVideoDecoderFactory [#74](https://github.com/webrtc-sdk/webrtc/pull/74) - * Official web site: http://www.webrtc.org - * Master source code repo: https://webrtc.googlesource.com/src - * Samples and reference apps: https://github.com/webrtc - * Mailing list: http://groups.google.com/group/discuss-webrtc - * Continuous build: https://ci.chromium.org/p/webrtc/g/ci/console - * [Coding style guide](g3doc/style-guide.md) - * [Code of conduct](CODE_OF_CONDUCT.md) - * [Reporting bugs](docs/bug-reporting.md) - * [Documentation](g3doc/sitemap.md) +### iOS / Mac -[native-dev]: https://webrtc.googlesource.com/src/+/main/docs/native-code/index.md +- Sane audio handling [patch](https://github.com/webrtc-sdk/webrtc/commit/272127d457ab48e36241e82549870405864851f6) + - Do not acquire microphone/permissions unless actively publishing audio + - Abililty to bypass voice processing on iOS + - Remove hardcoded limitation of outputting only to right speaker on MacBook Pro +- Desktop capture for Mac [patch](https://github.com/webrtc-sdk/webrtc/commit/8e832d1163644ab504412c9b8f3ba8510d9890d6) + +### Windows + +- Fixed unable to acquire Mic when built-in AEC is enabled [#29](https://github.com/webrtc-sdk/webrtc/pull/29) + +## LICENSE + +- [Google WebRTC](https://chromium.googlesource.com/external/webrtc.git), is licensed under [BSD license](/LICENSE). + +- Contains patches from [shiguredo-webrtc-build](https://github.com/shiguredo-webrtc-build), licensed under [Apache 2.0](/NOTICE). + +- Contains changes from LiveKit, licensed under Apache 2.0. + +## Who is using this project + +- [flutter-webrtc](https://github.com/flutter-webrtc/flutter-webrtc) + +- [LiveKit](https://github.com/livekit) + +- [Membrane Framework](https://github.com/membraneframework/membrane_rtc_engine) + +- [Louper](https://louper.io) + +Are you using WebRTC SDK in your framework or app? Feel free to open a PR and add yourself! diff --git a/api/BUILD.gn b/api/BUILD.gn index a0a9563ce0..7afad0bf29 100644 --- a/api/BUILD.gn +++ b/api/BUILD.gn @@ -330,6 +330,7 @@ rtc_library("libjingle_peerconnection_api") { "video:encoded_image", "video:video_bitrate_allocator_factory", "video:video_frame", + "video:yuv_helper", "video:video_rtp_headers", "video_codecs:video_codecs_api", diff --git a/api/crypto/BUILD.gn b/api/crypto/BUILD.gn index 8d041ea059..9249b7716a 100644 --- a/api/crypto/BUILD.gn +++ b/api/crypto/BUILD.gn @@ -16,6 +16,24 @@ group("crypto") { ] } +rtc_library("frame_crypto_transformer") { + visibility = [ "*" ] + sources = [ + "frame_crypto_transformer.cc", + "frame_crypto_transformer.h", + ] + + deps = [ + "//api:frame_transformer_interface", + ] + + if (rtc_build_ssl) { + deps += [ "//third_party/boringssl" ] + } else { + configs += [ ":external_ssl_library" ] + } +} + rtc_library("options") { visibility = [ "*" ] sources = [ diff --git a/api/crypto/frame_crypto_transformer.cc b/api/crypto/frame_crypto_transformer.cc new file mode 100644 index 0000000000..06ec888170 --- /dev/null +++ b/api/crypto/frame_crypto_transformer.cc @@ -0,0 +1,727 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "frame_crypto_transformer.h" + +#include +#include +#include +#include +#include + +#include +#include +#include +#include + +#include "absl/container/inlined_vector.h" +#include "absl/types/optional.h" +#include "absl/types/variant.h" +#include "api/array_view.h" +#include "common_video/h264/h264_common.h" +#include "modules/rtp_rtcp/source/rtp_format_h264.h" +#include "rtc_base/byte_buffer.h" +#include "rtc_base/logging.h" + +enum class EncryptOrDecrypt { kEncrypt = 0, kDecrypt }; + +#define Success 0 +#define ErrorUnexpected -1 +#define OperationError -2 +#define ErrorDataTooSmall -3 +#define ErrorInvalidAesGcmTagLength -4 + +webrtc::VideoCodecType get_video_codec_type( + webrtc::TransformableFrameInterface* frame) { + auto videoFrame = + static_cast(frame); + return videoFrame->header().codec; +} + +webrtc::H264PacketizationMode get_h264_packetization_mode( + webrtc::TransformableFrameInterface* frame) { + auto video_frame = + static_cast(frame); + const auto& h264_header = absl::get( + video_frame->header().video_type_header); + return h264_header.packetization_mode; +} + +const EVP_AEAD* GetAesGcmAlgorithmFromKeySize(size_t key_size_bytes) { + switch (key_size_bytes) { + case 16: + return EVP_aead_aes_128_gcm(); + case 32: + return EVP_aead_aes_256_gcm(); + default: + return nullptr; + } +} + +const EVP_CIPHER* GetAesCbcAlgorithmFromKeySize(size_t key_size_bytes) { + switch (key_size_bytes) { + case 16: + return EVP_aes_128_cbc(); + case 32: + return EVP_aes_256_cbc(); + default: + return nullptr; + } +} + +inline bool FrameIsH264(webrtc::TransformableFrameInterface* frame, + webrtc::FrameCryptorTransformer::MediaType type) { + switch (type) { + case webrtc::FrameCryptorTransformer::MediaType::kVideoFrame: { + auto videoFrame = + static_cast(frame); + return videoFrame->header().codec == + webrtc::VideoCodecType::kVideoCodecH264; + } + default: + return false; + } +} + +inline bool NeedsRbspUnescaping(const uint8_t* frameData, size_t frameSize) { + for (size_t i = 0; i < frameSize - 3; ++i) { + if (frameData[i] == 0 && frameData[i + 1] == 0 && frameData[i + 2] == 3) + return true; + } + return false; +} + +std::string to_uint8_list(const uint8_t* data, int len) { + std::stringstream ss; + ss << "["; + for (int i = 0; i < len; i++) { + ss << static_cast(data[i]) << ","; + } + ss << "]"; + return ss.str(); +} + +std::string to_hex(const uint8_t* data, int len) { + std::stringstream ss; + ss << std::uppercase << std::hex << std::setfill('0'); + for (int i = 0; i < len; i++) { + ss << std::setw(2) << static_cast(data[i]); + } + return ss.str(); +} + +uint8_t get_unencrypted_bytes(webrtc::TransformableFrameInterface* frame, + webrtc::FrameCryptorTransformer::MediaType type) { + uint8_t unencrypted_bytes = 0; + switch (type) { + case webrtc::FrameCryptorTransformer::MediaType::kAudioFrame: + unencrypted_bytes = 1; + break; + case webrtc::FrameCryptorTransformer::MediaType::kVideoFrame: { + auto videoFrame = + static_cast(frame); + if (videoFrame->header().codec == + webrtc::VideoCodecType::kVideoCodecAV1) { + unencrypted_bytes = 0; + } else if (videoFrame->header().codec == + webrtc::VideoCodecType::kVideoCodecVP8) { + unencrypted_bytes = videoFrame->IsKeyFrame() ? 10 : 3; + } else if (videoFrame->header().codec == + webrtc::VideoCodecType::kVideoCodecH264) { + rtc::ArrayView date_in = frame->GetData(); + std::vector nalu_indices = + webrtc::H264::FindNaluIndices(date_in.data(), date_in.size()); + + int idx = 0; + for (const auto& index : nalu_indices) { + const uint8_t* slice = date_in.data() + index.payload_start_offset; + webrtc::H264::NaluType nalu_type = + webrtc::H264::ParseNaluType(slice[0]); + switch (nalu_type) { + case webrtc::H264::NaluType::kIdr: + case webrtc::H264::NaluType::kSlice: + unencrypted_bytes = index.payload_start_offset + 2; + RTC_LOG(LS_INFO) + << "NonParameterSetNalu::payload_size: " << index.payload_size + << ", nalu_type " << nalu_type << ", NaluIndex [" << idx++ + << "] offset: " << index.payload_start_offset; + return unencrypted_bytes; + default: + break; + } + } + } + break; + } + default: + break; + } + return unencrypted_bytes; +} + +int DerivePBKDF2KeyFromRawKey(const std::vector raw_key, + const std::vector& salt, + unsigned int optional_length_bits, + std::vector* derived_key) { + size_t key_size_bytes = optional_length_bits / 8; + derived_key->resize(key_size_bytes); + + if (PKCS5_PBKDF2_HMAC((const char*)raw_key.data(), raw_key.size(), + salt.data(), salt.size(), 100000, EVP_sha256(), + key_size_bytes, derived_key->data()) != 1) { + RTC_LOG(LS_ERROR) << "Failed to derive AES key from password."; + return ErrorUnexpected; + } + + RTC_LOG(LS_INFO) << "raw_key " + << to_uint8_list(raw_key.data(), raw_key.size()) << " len " + << raw_key.size() << " slat << " + << to_uint8_list(salt.data(), salt.size()) << " len " + << salt.size() << "\n derived_key " + << to_uint8_list(derived_key->data(), derived_key->size()) + << " len " << derived_key->size(); + + return Success; +} + +int AesGcmEncryptDecrypt(EncryptOrDecrypt mode, + const std::vector raw_key, + const rtc::ArrayView data, + unsigned int tag_length_bytes, + rtc::ArrayView iv, + rtc::ArrayView additional_data, + const EVP_AEAD* aead_alg, + std::vector* buffer) { + bssl::ScopedEVP_AEAD_CTX ctx; + + if (!aead_alg) { + RTC_LOG(LS_ERROR) << "Invalid AES-GCM key size."; + return ErrorUnexpected; + } + + if (!EVP_AEAD_CTX_init(ctx.get(), aead_alg, raw_key.data(), raw_key.size(), + tag_length_bytes, nullptr)) { + RTC_LOG(LS_ERROR) << "Failed to initialize AES-GCM context."; + return OperationError; + } + + size_t len; + int ok; + + if (mode == EncryptOrDecrypt::kDecrypt) { + if (data.size() < tag_length_bytes) { + RTC_LOG(LS_ERROR) << "Data too small for AES-GCM tag."; + return ErrorDataTooSmall; + } + + buffer->resize(data.size() - tag_length_bytes); + + ok = EVP_AEAD_CTX_open(ctx.get(), buffer->data(), &len, buffer->size(), + iv.data(), iv.size(), data.data(), data.size(), + additional_data.data(), additional_data.size()); + } else { + buffer->resize(data.size() + EVP_AEAD_max_overhead(aead_alg)); + + ok = EVP_AEAD_CTX_seal(ctx.get(), buffer->data(), &len, buffer->size(), + iv.data(), iv.size(), data.data(), data.size(), + additional_data.data(), additional_data.size()); + } + + if (!ok) { + RTC_LOG(LS_WARNING) << "Failed to perform AES-GCM operation."; + return OperationError; + } + + buffer->resize(len); + + return Success; +} + +int AesCbcEncryptDecrypt(EncryptOrDecrypt mode, + const std::vector& raw_key, + rtc::ArrayView iv, + const rtc::ArrayView input, + std::vector* output) { + const EVP_CIPHER* cipher = GetAesCbcAlgorithmFromKeySize(raw_key.size()); + RTC_DCHECK(cipher); // Already handled in Init(); + RTC_DCHECK_EQ(EVP_CIPHER_iv_length(cipher), iv.size()); + RTC_DCHECK_EQ(EVP_CIPHER_key_length(cipher), raw_key.size()); + + bssl::ScopedEVP_CIPHER_CTX ctx; + if (!EVP_CipherInit_ex(ctx.get(), cipher, nullptr, + reinterpret_cast(raw_key.data()), + iv.data(), + mode == EncryptOrDecrypt::kEncrypt ? 1 : 0)) { + return OperationError; + } + + // Encrypting needs a block size of space to allow for any padding. + output->resize(input.size() + + (mode == EncryptOrDecrypt::kEncrypt ? iv.size() : 0)); + int out_len; + if (!EVP_CipherUpdate(ctx.get(), output->data(), &out_len, input.data(), + input.size())) + return OperationError; + + // Write out the final block plus padding (if any) to the end of the data + // just written. + int tail_len; + if (!EVP_CipherFinal_ex(ctx.get(), output->data() + out_len, &tail_len)) + return OperationError; + + out_len += tail_len; + RTC_CHECK_LE(out_len, static_cast(output->size())); + return Success; +} + +int AesEncryptDecrypt(EncryptOrDecrypt mode, + webrtc::FrameCryptorTransformer::Algorithm algorithm, + const std::vector& raw_key, + rtc::ArrayView iv, + rtc::ArrayView additional_data, + const rtc::ArrayView data, + std::vector* buffer) { + switch (algorithm) { + case webrtc::FrameCryptorTransformer::Algorithm::kAesGcm: { + unsigned int tag_length_bits = 128; + return AesGcmEncryptDecrypt( + mode, raw_key, data, tag_length_bits / 8, iv, additional_data, + GetAesGcmAlgorithmFromKeySize(raw_key.size()), buffer); + } + case webrtc::FrameCryptorTransformer::Algorithm::kAesCbc: + return AesCbcEncryptDecrypt(mode, raw_key, iv, data, buffer); + } +} +namespace webrtc { + +FrameCryptorTransformer::FrameCryptorTransformer( + rtc::Thread* signaling_thread, + const std::string participant_id, + MediaType type, + Algorithm algorithm, + rtc::scoped_refptr key_provider) + : signaling_thread_(signaling_thread), + thread_(rtc::Thread::Create()), + participant_id_(participant_id), + type_(type), + algorithm_(algorithm), + key_provider_(key_provider) { + RTC_DCHECK(key_provider_ != nullptr); + thread_->SetName("FrameCryptorTransformer", this); + thread_->Start(); +} + +FrameCryptorTransformer::~FrameCryptorTransformer() { + thread_->Stop(); +} + +void FrameCryptorTransformer::Transform( + std::unique_ptr frame) { + webrtc::MutexLock lock(&sink_mutex_); + if (sink_callback_ == nullptr && sink_callbacks_.size() == 0) { + RTC_LOG(LS_WARNING) + << "FrameCryptorTransformer::Transform sink_callback_ is NULL"; + return; + } + + // do encrypt or decrypt here... + switch (frame->GetDirection()) { + case webrtc::TransformableFrameInterface::Direction::kSender: + RTC_DCHECK(thread_ != nullptr); + thread_->PostTask([frame = std::move(frame), this]() mutable { + encryptFrame(std::move(frame)); + }); + break; + case webrtc::TransformableFrameInterface::Direction::kReceiver: + RTC_DCHECK(thread_ != nullptr); + thread_->PostTask([frame = std::move(frame), this]() mutable { + decryptFrame(std::move(frame)); + }); + break; + case webrtc::TransformableFrameInterface::Direction::kUnknown: + // do nothing + RTC_LOG(LS_INFO) << "FrameCryptorTransformer::Transform() kUnknown"; + break; + } +} + +void FrameCryptorTransformer::encryptFrame( + std::unique_ptr frame) { + bool enabled_cryption = false; + rtc::scoped_refptr sink_callback = nullptr; + { + webrtc::MutexLock lock(&mutex_); + enabled_cryption = enabled_cryption_; + if (type_ == webrtc::FrameCryptorTransformer::MediaType::kAudioFrame) { + sink_callback = sink_callback_; + } else { + sink_callback = sink_callbacks_[frame->GetSsrc()]; + } + } + + if (sink_callback == nullptr) { + RTC_LOG(LS_WARNING) + << "FrameCryptorTransformer::encryptFrame() sink_callback is NULL"; + if (last_enc_error_ != FrameCryptionState::kInternalError) { + last_enc_error_ = FrameCryptionState::kInternalError; + onFrameCryptionStateChanged(last_enc_error_); + } + return; + } + + rtc::ArrayView date_in = frame->GetData(); + if (date_in.size() == 0 || !enabled_cryption) { + RTC_LOG(LS_WARNING) << "FrameCryptorTransformer::encryptFrame() " + "date_in.size() == 0 || enabled_cryption == false"; + if(key_provider_->options().discard_frame_when_cryptor_not_ready) { + return; + } + sink_callback->OnTransformedFrame(std::move(frame)); + return; + } + + auto key_handler = key_provider_->options().shared_key + ? key_provider_->GetSharedKey(participant_id_) + : key_provider_->GetKey(participant_id_); + + if (key_handler == nullptr || key_handler->GetKeySet(key_index_) == nullptr) { + RTC_LOG(LS_INFO) << "FrameCryptorTransformer::encryptFrame() no keys, or " + "key_index[" + << key_index_ << "] out of range for participant " + << participant_id_; + if (last_enc_error_ != FrameCryptionState::kMissingKey) { + last_enc_error_ = FrameCryptionState::kMissingKey; + onFrameCryptionStateChanged(last_enc_error_); + } + return; + } + + auto key_set = key_handler->GetKeySet(key_index_); + uint8_t unencrypted_bytes = get_unencrypted_bytes(frame.get(), type_); + + rtc::Buffer frame_header(unencrypted_bytes); + for (size_t i = 0; i < unencrypted_bytes; i++) { + frame_header[i] = date_in[i]; + } + + rtc::Buffer frame_trailer(2); + frame_trailer[0] = getIvSize(); + frame_trailer[1] = key_index_; + rtc::Buffer iv = makeIv(frame->GetSsrc(), frame->GetTimestamp()); + + rtc::Buffer payload(date_in.size() - unencrypted_bytes); + for (size_t i = unencrypted_bytes; i < date_in.size(); i++) { + payload[i - unencrypted_bytes] = date_in[i]; + } + + std::vector buffer; + if (AesEncryptDecrypt(EncryptOrDecrypt::kEncrypt, algorithm_, + key_set->encryption_key, iv, frame_header, payload, + &buffer) == Success) { + rtc::Buffer encrypted_payload(buffer.data(), buffer.size()); + rtc::Buffer tag(encrypted_payload.data() + encrypted_payload.size() - 16, + 16); + rtc::Buffer data_without_header; + data_without_header.AppendData(encrypted_payload); + data_without_header.AppendData(iv); + data_without_header.AppendData(frame_trailer); + + rtc::Buffer data_out; + data_out.AppendData(frame_header); + + if (FrameIsH264(frame.get(), type_)) { + H264::WriteRbsp(data_without_header.data(), data_without_header.size(), + &data_out); + } else { + data_out.AppendData(data_without_header); + RTC_CHECK_EQ(data_out.size(), frame_header.size() + + encrypted_payload.size() + iv.size() + + frame_trailer.size()); + } + + frame->SetData(data_out); + + if (last_enc_error_ != FrameCryptionState::kOk) { + last_enc_error_ = FrameCryptionState::kOk; + onFrameCryptionStateChanged(last_enc_error_); + } + sink_callback->OnTransformedFrame(std::move(frame)); + } else { + if (last_enc_error_ != FrameCryptionState::kEncryptionFailed) { + last_enc_error_ = FrameCryptionState::kEncryptionFailed; + onFrameCryptionStateChanged(last_enc_error_); + } + RTC_LOG(LS_ERROR) << "FrameCryptorTransformer::encryptFrame() failed"; + } +} + +void FrameCryptorTransformer::decryptFrame( + std::unique_ptr frame) { + bool enabled_cryption = false; + rtc::scoped_refptr sink_callback = nullptr; + { + webrtc::MutexLock lock(&mutex_); + enabled_cryption = enabled_cryption_; + if (type_ == webrtc::FrameCryptorTransformer::MediaType::kAudioFrame) { + sink_callback = sink_callback_; + } else { + sink_callback = sink_callbacks_[frame->GetSsrc()]; + } + } + + if (sink_callback == nullptr) { + RTC_LOG(LS_WARNING) + << "FrameCryptorTransformer::decryptFrame() sink_callback is NULL"; + if (last_dec_error_ != FrameCryptionState::kInternalError) { + last_dec_error_ = FrameCryptionState::kInternalError; + onFrameCryptionStateChanged(last_dec_error_); + } + return; + } + + rtc::ArrayView date_in = frame->GetData(); + + if (date_in.size() == 0 || !enabled_cryption) { + RTC_LOG(LS_WARNING) << "FrameCryptorTransformer::decryptFrame() " + "date_in.size() == 0 || enabled_cryption == false"; + if(key_provider_->options().discard_frame_when_cryptor_not_ready) { + return; + } + + sink_callback->OnTransformedFrame(std::move(frame)); + return; + } + + auto uncrypted_magic_bytes = key_provider_->options().uncrypted_magic_bytes; + if (uncrypted_magic_bytes.size() > 0 && + date_in.size() >= uncrypted_magic_bytes.size()) { + auto tmp = date_in.subview(date_in.size() - (uncrypted_magic_bytes.size()), + uncrypted_magic_bytes.size()); + auto data = std::vector(tmp.begin(), tmp.end()); + if (uncrypted_magic_bytes == data) { + RTC_CHECK_EQ(tmp.size(), uncrypted_magic_bytes.size()); + RTC_LOG(LS_INFO) << "FrameCryptorTransformer::uncrypted_magic_bytes( tmp " + << to_hex(tmp.data(), tmp.size()) << ", magic bytes " + << to_hex(uncrypted_magic_bytes.data(), + uncrypted_magic_bytes.size()) + << ")"; + + // magic bytes detected, this is a non-encrypted frame, skip frame + // decryption. + rtc::Buffer data_out; + data_out.AppendData( + date_in.subview(0, date_in.size() - uncrypted_magic_bytes.size())); + frame->SetData(data_out); + sink_callback->OnTransformedFrame(std::move(frame)); + return; + } + } + + uint8_t unencrypted_bytes = get_unencrypted_bytes(frame.get(), type_); + + rtc::Buffer frame_header(unencrypted_bytes); + for (size_t i = 0; i < unencrypted_bytes; i++) { + frame_header[i] = date_in[i]; + } + + rtc::Buffer frame_trailer(2); + frame_trailer[0] = date_in[date_in.size() - 2]; + frame_trailer[1] = date_in[date_in.size() - 1]; + uint8_t ivLength = frame_trailer[0]; + uint8_t key_index = frame_trailer[1]; + + if (ivLength != getIvSize()) { + RTC_LOG(LS_WARNING) << "FrameCryptorTransformer::decryptFrame() ivLength[" + << static_cast(ivLength) << "] != getIvSize()[" + << static_cast(getIvSize()) << "]"; + if (last_dec_error_ != FrameCryptionState::kDecryptionFailed) { + last_dec_error_ = FrameCryptionState::kDecryptionFailed; + onFrameCryptionStateChanged(last_dec_error_); + } + return; + } + + auto key_handler = key_provider_->options().shared_key + ? key_provider_->GetSharedKey(participant_id_) + : key_provider_->GetKey(participant_id_); + + if (0 > key_index || key_index >= key_provider_->options().key_ring_size || key_handler == nullptr || + key_handler->GetKeySet(key_index) == nullptr) { + RTC_LOG(LS_INFO) << "FrameCryptorTransformer::decryptFrame() no keys, or " + "key_index[" + << key_index << "] out of range for participant " + << participant_id_; + if (last_dec_error_ != FrameCryptionState::kMissingKey) { + last_dec_error_ = FrameCryptionState::kMissingKey; + onFrameCryptionStateChanged(last_dec_error_); + } + return; + } + + if (last_dec_error_ == kDecryptionFailed && !key_handler->HasValidKey()) { + // if decryption failed and we have an invalid key, + // please try to decrypt with the next new key + return; + } + + auto key_set = key_handler->GetKeySet(key_index); + + rtc::Buffer iv = rtc::Buffer(ivLength); + for (size_t i = 0; i < ivLength; i++) { + iv[i] = date_in[date_in.size() - 2 - ivLength + i]; + } + + rtc::Buffer encrypted_buffer(date_in.size() - unencrypted_bytes); + for (size_t i = unencrypted_bytes; i < date_in.size(); i++) { + encrypted_buffer[i - unencrypted_bytes] = date_in[i]; + } + + if (FrameIsH264(frame.get(), type_) && + NeedsRbspUnescaping(encrypted_buffer.data(), encrypted_buffer.size())) { + encrypted_buffer.SetData( + H264::ParseRbsp(encrypted_buffer.data(), encrypted_buffer.size())); + } + + rtc::Buffer encrypted_payload(encrypted_buffer.size() - ivLength - 2); + for (size_t i = 0; i < encrypted_payload.size(); i++) { + encrypted_payload[i] = encrypted_buffer[i]; + } + + rtc::Buffer tag(encrypted_payload.data() + encrypted_payload.size() - 16, 16); + std::vector buffer; + + int ratchet_count = 0; + auto initialKeyMaterial = key_set->material; + bool decryption_success = false; + if (AesEncryptDecrypt(EncryptOrDecrypt::kDecrypt, algorithm_, + key_set->encryption_key, iv, frame_header, + encrypted_payload, &buffer) == Success) { + decryption_success = true; + } else { + RTC_LOG(LS_WARNING) << "FrameCryptorTransformer::decryptFrame() failed"; + rtc::scoped_refptr ratcheted_key_set; + auto currentKeyMaterial = key_set->material; + if (key_provider_->options().ratchet_window_size > 0) { + while (ratchet_count < key_provider_->options().ratchet_window_size) { + ratchet_count++; + + RTC_LOG(LS_INFO) << "ratcheting key attempt " << ratchet_count << " of " + << key_provider_->options().ratchet_window_size; + + auto new_material = key_handler->RatchetKeyMaterial(currentKeyMaterial); + ratcheted_key_set = key_handler->DeriveKeys( + new_material, key_provider_->options().ratchet_salt, 128); + + if (AesEncryptDecrypt(EncryptOrDecrypt::kDecrypt, algorithm_, + ratcheted_key_set->encryption_key, iv, + frame_header, encrypted_payload, + &buffer) == Success) { + RTC_LOG(LS_INFO) << "FrameCryptorTransformer::decryptFrame() " + "ratcheted to key_index=" + << static_cast(key_index); + decryption_success = true; + // success, so we set the new key + key_handler->SetKeyFromMaterial(new_material, key_index); + key_handler->SetHasValidKey(); + if (last_dec_error_ != FrameCryptionState::kKeyRatcheted) { + last_dec_error_ = FrameCryptionState::kKeyRatcheted; + onFrameCryptionStateChanged(last_dec_error_); + } + break; + } + // for the next ratchet attempt + currentKeyMaterial = new_material; + } + + /* Since the key it is first send and only afterwards actually used for + encrypting, there were situations when the decrypting failed due to the + fact that the received frame was not encrypted yet and ratcheting, of + course, did not solve the problem. So if we fail RATCHET_WINDOW_SIZE + times, we come back to the initial key. + */ + if (!decryption_success || + ratchet_count >= key_provider_->options().ratchet_window_size) { + key_handler->SetKeyFromMaterial(initialKeyMaterial, key_index); + } + } + } + + if (!decryption_success) { + if (key_handler->DecryptionFailure()) { + if (last_dec_error_ != FrameCryptionState::kDecryptionFailed) { + last_dec_error_ = FrameCryptionState::kDecryptionFailed; + onFrameCryptionStateChanged(last_dec_error_); + } + } + return; + } + + rtc::Buffer payload(buffer.data(), buffer.size()); + rtc::Buffer data_out; + data_out.AppendData(frame_header); + data_out.AppendData(payload); + frame->SetData(data_out); + + if (last_dec_error_ != FrameCryptionState::kOk) { + last_dec_error_ = FrameCryptionState::kOk; + onFrameCryptionStateChanged(last_dec_error_); + } + sink_callback->OnTransformedFrame(std::move(frame)); +} + +void FrameCryptorTransformer::onFrameCryptionStateChanged( + FrameCryptionState state) { + webrtc::MutexLock lock(&mutex_); + if (observer_) { + RTC_DCHECK(signaling_thread_ != nullptr); + signaling_thread_->PostTask([observer = observer_, state = state, + participant_id = participant_id_]() mutable { + observer->OnFrameCryptionStateChanged(participant_id, state); + }); + } +} + +rtc::Buffer FrameCryptorTransformer::makeIv(uint32_t ssrc, uint32_t timestamp) { + uint32_t send_count = 0; + if (send_counts_.find(ssrc) == send_counts_.end()) { + srand((unsigned)time(NULL)); + send_counts_[ssrc] = floor(rand() * 0xFFFF); + } else { + send_count = send_counts_[ssrc]; + } + rtc::ByteBufferWriter buf; + buf.WriteUInt32(ssrc); + buf.WriteUInt32(timestamp); + buf.WriteUInt32(timestamp - (send_count % 0xFFFF)); + send_counts_[ssrc] = send_count + 1; + + RTC_CHECK_EQ(buf.Length(), getIvSize()); + + return rtc::Buffer(buf.Data(), buf.Length()); +} + +uint8_t FrameCryptorTransformer::getIvSize() { + switch (algorithm_) { + case Algorithm::kAesGcm: + return 12; + case Algorithm::kAesCbc: + return 16; + default: + return 0; + } +} + +} // namespace webrtc diff --git a/api/crypto/frame_crypto_transformer.h b/api/crypto/frame_crypto_transformer.h new file mode 100644 index 0000000000..9689ec1593 --- /dev/null +++ b/api/crypto/frame_crypto_transformer.h @@ -0,0 +1,482 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef WEBRTC_FRAME_CRYPTOR_TRANSFORMER_H_ +#define WEBRTC_FRAME_CRYPTOR_TRANSFORMER_H_ + +#include + +#include "api/frame_transformer_interface.h" +#include "api/task_queue/pending_task_safety_flag.h" +#include "api/task_queue/task_queue_base.h" +#include "rtc_base/buffer.h" +#include "rtc_base/synchronization/mutex.h" +#include "rtc_base/system/rtc_export.h" +#include "rtc_base/thread.h" + +int DerivePBKDF2KeyFromRawKey(const std::vector raw_key, + const std::vector& salt, + unsigned int optional_length_bits, + std::vector* derived_key); + +namespace webrtc { + +const size_t DEFAULT_KEYRING_SIZE = 16; +const size_t MAX_KEYRING_SIZE = 255; + +class ParticipantKeyHandler; + +struct KeyProviderOptions { + bool shared_key; + std::vector ratchet_salt; + std::vector uncrypted_magic_bytes; + int ratchet_window_size; + int failure_tolerance; + // key ring size should be between 1 and 255 + int key_ring_size; + bool discard_frame_when_cryptor_not_ready; + KeyProviderOptions() + : shared_key(false), + ratchet_window_size(0), + failure_tolerance(-1), + key_ring_size(DEFAULT_KEYRING_SIZE), + discard_frame_when_cryptor_not_ready(false) {} + KeyProviderOptions(KeyProviderOptions& copy) + : shared_key(copy.shared_key), + ratchet_salt(copy.ratchet_salt), + uncrypted_magic_bytes(copy.uncrypted_magic_bytes), + ratchet_window_size(copy.ratchet_window_size), + failure_tolerance(copy.failure_tolerance), + key_ring_size(copy.key_ring_size) {} +}; + +class KeyProvider : public rtc::RefCountInterface { + public: + virtual bool SetSharedKey(int key_index, std::vector key) = 0; + + virtual const rtc::scoped_refptr GetSharedKey( + const std::string participant_id) = 0; + + virtual const std::vector RatchetSharedKey(int key_index) = 0; + + virtual const std::vector ExportSharedKey(int key_index) const = 0; + + virtual bool SetKey(const std::string participant_id, + int key_index, + std::vector key) = 0; + + virtual const rtc::scoped_refptr GetKey( + const std::string participant_id) const = 0; + + virtual const std::vector RatchetKey( + const std::string participant_id, + int key_index) = 0; + + virtual const std::vector ExportKey(const std::string participant_id, + int key_index) const = 0; + + virtual void SetSifTrailer(const std::vector trailer) = 0; + + virtual KeyProviderOptions& options() = 0; + + protected: + virtual ~KeyProvider() {} +}; + +class ParticipantKeyHandler : public rtc::RefCountInterface { + public: + struct KeySet : public rtc::RefCountInterface { + std::vector material; + std::vector encryption_key; + KeySet(std::vector material, std::vector encryptionKey) + : material(material), encryption_key(encryptionKey) {} + }; + + public: + ParticipantKeyHandler(KeyProvider* key_provider) + : key_provider_(key_provider) { + int key_ring_size = key_provider_->options().key_ring_size; + if(key_ring_size <= 0) { + key_ring_size = DEFAULT_KEYRING_SIZE; + } else if (key_ring_size > (int)MAX_KEYRING_SIZE) { + // Keyring size needs to be between 1 and 256 + key_ring_size = MAX_KEYRING_SIZE; + } + crypto_key_ring_.resize(key_ring_size); + } + + virtual ~ParticipantKeyHandler() = default; + + rtc::scoped_refptr Clone() { + auto clone = rtc::make_ref_counted(key_provider_); + clone->crypto_key_ring_ = crypto_key_ring_; + clone->current_key_index_ = current_key_index_; + clone->has_valid_key_ = has_valid_key_; + return clone; + } + + virtual std::vector RatchetKey(int key_index) { + auto key_set = GetKeySet(key_index); + if (!key_set) { + return std::vector(); + } + auto current_material = key_set->material; + std::vector new_material; + if (DerivePBKDF2KeyFromRawKey(current_material, + key_provider_->options().ratchet_salt, 256, + &new_material) != 0) { + return std::vector(); + } + SetKeyFromMaterial(new_material, + key_index != -1 ? key_index : current_key_index_); + SetHasValidKey(); + return new_material; + } + + virtual rtc::scoped_refptr GetKeySet(int key_index) { + webrtc::MutexLock lock(&mutex_); + return crypto_key_ring_[key_index != -1 ? key_index : current_key_index_]; + } + + virtual void SetKey(std::vector password, int key_index) { + SetKeyFromMaterial(password, key_index); + SetHasValidKey(); + } + + std::vector RatchetKeyMaterial( + std::vector current_material) { + std::vector new_material; + if (DerivePBKDF2KeyFromRawKey(current_material, + key_provider_->options().ratchet_salt, 256, + &new_material) != 0) { + return std::vector(); + } + return new_material; + } + + rtc::scoped_refptr DeriveKeys(std::vector password, + std::vector ratchet_salt, + unsigned int optional_length_bits) { + std::vector derived_key; + if (DerivePBKDF2KeyFromRawKey(password, ratchet_salt, optional_length_bits, + &derived_key) == 0) { + return rtc::make_ref_counted(password, derived_key); + } + return nullptr; + } + + bool HasValidKey() { + webrtc::MutexLock lock(&mutex_); + return has_valid_key_; + } + + void SetHasValidKey() { + webrtc::MutexLock lock(&mutex_); + decryption_failure_count_ = 0; + has_valid_key_ = true; + } + + void SetKeyFromMaterial(std::vector password, int key_index) { + webrtc::MutexLock lock(&mutex_); + if (key_index >= 0) { + current_key_index_ = key_index % crypto_key_ring_.size(); + } + crypto_key_ring_[current_key_index_] = + DeriveKeys(password, key_provider_->options().ratchet_salt, 128); + } + + bool DecryptionFailure() { + webrtc::MutexLock lock(&mutex_); + if (key_provider_->options().failure_tolerance < 0) { + return false; + } + decryption_failure_count_ += 1; + + if (decryption_failure_count_ > + key_provider_->options().failure_tolerance) { + has_valid_key_ = false; + return true; + } + return false; + } + + private: + bool has_valid_key_ = false; + int decryption_failure_count_ = 0; + mutable webrtc::Mutex mutex_; + int current_key_index_ = 0; + KeyProvider* key_provider_; + std::vector> crypto_key_ring_; +}; + +class DefaultKeyProviderImpl : public KeyProvider { + public: + DefaultKeyProviderImpl(KeyProviderOptions options) : options_(options) {} + ~DefaultKeyProviderImpl() override = default; + + /// Set the shared key. + bool SetSharedKey(int key_index, std::vector key) override { + webrtc::MutexLock lock(&mutex_); + if (options_.shared_key) { + if (keys_.find("shared") == keys_.end()) { + keys_["shared"] = rtc::make_ref_counted(this); + } + + auto key_handler = keys_["shared"]; + key_handler->SetKey(key, key_index); + + for (auto& key_pair : keys_) { + if (key_pair.first != "shared") { + key_pair.second->SetKey(key, key_index); + } + } + return true; + } + return false; + } + + const std::vector RatchetSharedKey(int key_index) override { + webrtc::MutexLock lock(&mutex_); + auto it = keys_.find("shared"); + if (it == keys_.end()) { + return std::vector(); + } + auto new_key = it->second->RatchetKey(key_index); + if (options_.shared_key) { + for (auto& key_pair : keys_) { + if (key_pair.first != "shared") { + key_pair.second->SetKey(new_key, key_index); + } + } + } + return new_key; + } + + const std::vector ExportSharedKey(int key_index) const override { + webrtc::MutexLock lock(&mutex_); + auto it = keys_.find("shared"); + if (it == keys_.end()) { + return std::vector(); + } + auto key_set = it->second->GetKeySet(key_index); + if (key_set) { + return key_set->material; + } + return std::vector(); + } + + const rtc::scoped_refptr GetSharedKey( + const std::string participant_id) override { + webrtc::MutexLock lock(&mutex_); + if (options_.shared_key && keys_.find("shared") != keys_.end()) { + auto shared_key_handler = keys_["shared"]; + if (keys_.find(participant_id) != keys_.end()) { + return keys_[participant_id]; + } else { + auto key_handler_clone = shared_key_handler->Clone(); + keys_[participant_id] = key_handler_clone; + return key_handler_clone; + } + } + return nullptr; + } + + /// Set the key at the given index. + bool SetKey(const std::string participant_id, + int index, + std::vector key) override { + webrtc::MutexLock lock(&mutex_); + + if (keys_.find(participant_id) == keys_.end()) { + keys_[participant_id] = + rtc::make_ref_counted(this); + } + + auto key_handler = keys_[participant_id]; + key_handler->SetKey(key, index); + return true; + } + + const rtc::scoped_refptr GetKey( + const std::string participant_id) const override { + webrtc::MutexLock lock(&mutex_); + + if (keys_.find(participant_id) == keys_.end()) { + return nullptr; + } + + return keys_.find(participant_id)->second; + } + + const std::vector RatchetKey(const std::string participant_id, + int key_index) override { + auto key_handler = GetKey(participant_id); + if (key_handler) { + return key_handler->RatchetKey(key_index); + } + return std::vector(); + } + + const std::vector ExportKey(const std::string participant_id, + int key_index) const override { + auto key_handler = GetKey(participant_id); + if (key_handler) { + auto key_set = key_handler->GetKeySet(key_index); + if (key_set) { + return key_set->material; + } + } + return std::vector(); + } + + void SetSifTrailer(const std::vector trailer) override { + webrtc::MutexLock lock(&mutex_); + options_.uncrypted_magic_bytes = trailer; + } + + KeyProviderOptions& options() override { return options_; } + + private: + mutable webrtc::Mutex mutex_; + KeyProviderOptions options_; + std::unordered_map> + keys_; +}; + +enum FrameCryptionState { + kNew = 0, + kOk, + kEncryptionFailed, + kDecryptionFailed, + kMissingKey, + kKeyRatcheted, + kInternalError, +}; + +class FrameCryptorTransformerObserver : public rtc::RefCountInterface { + public: + virtual void OnFrameCryptionStateChanged(const std::string participant_id, + FrameCryptionState error) = 0; + + protected: + virtual ~FrameCryptorTransformerObserver() {} +}; + +class RTC_EXPORT FrameCryptorTransformer + : public rtc::RefCountedObject { + public: + enum class MediaType { + kAudioFrame = 0, + kVideoFrame, + }; + + enum class Algorithm { + kAesGcm = 0, + kAesCbc, + }; + + explicit FrameCryptorTransformer( + rtc::Thread* signaling_thread, + const std::string participant_id, + MediaType type, + Algorithm algorithm, + rtc::scoped_refptr key_provider); + ~FrameCryptorTransformer(); + virtual void RegisterFrameCryptorTransformerObserver( + rtc::scoped_refptr observer) { + webrtc::MutexLock lock(&mutex_); + observer_ = observer; + } + + virtual void UnRegisterFrameCryptorTransformerObserver() { + webrtc::MutexLock lock(&mutex_); + observer_ = nullptr; + } + + virtual void SetKeyIndex(int index) { + webrtc::MutexLock lock(&mutex_); + key_index_ = index; + } + + virtual int key_index() const { return key_index_; } + + virtual void SetEnabled(bool enabled) { + webrtc::MutexLock lock(&mutex_); + enabled_cryption_ = enabled; + } + virtual bool enabled() const { + webrtc::MutexLock lock(&mutex_); + return enabled_cryption_; + } + virtual const std::string participant_id() const { return participant_id_; } + + protected: + virtual void RegisterTransformedFrameCallback( + rtc::scoped_refptr callback) override { + webrtc::MutexLock lock(&sink_mutex_); + sink_callback_ = callback; + } + virtual void UnregisterTransformedFrameCallback() override { + webrtc::MutexLock lock(&sink_mutex_); + sink_callback_ = nullptr; + } + virtual void RegisterTransformedFrameSinkCallback( + rtc::scoped_refptr callback, + uint32_t ssrc) override { + webrtc::MutexLock lock(&sink_mutex_); + sink_callbacks_[ssrc] = callback; + } + virtual void UnregisterTransformedFrameSinkCallback(uint32_t ssrc) override { + webrtc::MutexLock lock(&sink_mutex_); + auto it = sink_callbacks_.find(ssrc); + if (it != sink_callbacks_.end()) { + sink_callbacks_.erase(it); + } + } + + virtual void Transform( + std::unique_ptr frame) override; + + private: + void encryptFrame(std::unique_ptr frame); + void decryptFrame(std::unique_ptr frame); + void onFrameCryptionStateChanged(FrameCryptionState error); + rtc::Buffer makeIv(uint32_t ssrc, uint32_t timestamp); + uint8_t getIvSize(); + + private: + TaskQueueBase* const signaling_thread_; + std::unique_ptr thread_; + std::string participant_id_; + mutable webrtc::Mutex mutex_; + mutable webrtc::Mutex sink_mutex_; + bool enabled_cryption_ RTC_GUARDED_BY(mutex_) = false; + MediaType type_; + Algorithm algorithm_; + rtc::scoped_refptr sink_callback_; + std::map> + sink_callbacks_; + int key_index_ = 0; + std::map send_counts_; + rtc::scoped_refptr key_provider_; + rtc::scoped_refptr observer_; + FrameCryptionState last_enc_error_ = FrameCryptionState::kNew; + FrameCryptionState last_dec_error_ = FrameCryptionState::kNew; +}; + +} // namespace webrtc + +#endif // WEBRTC_FRAME_CRYPTOR_TRANSFORMER_H_ diff --git a/api/frame_transformer_interface.h b/api/frame_transformer_interface.h index 2f7b139f9f..b7c03da468 100644 --- a/api/frame_transformer_interface.h +++ b/api/frame_transformer_interface.h @@ -61,6 +61,8 @@ class TransformableVideoFrameInterface : public TransformableFrameInterface { virtual VideoFrameMetadata Metadata() const = 0; virtual void SetMetadata(const VideoFrameMetadata&) = 0; + + virtual const RTPVideoHeader& header () const = 0; }; // Extends the TransformableFrameInterface to expose audio-specific information. diff --git a/api/media_stream_interface.cc b/api/media_stream_interface.cc index e07907917b..5362522262 100644 --- a/api/media_stream_interface.cc +++ b/api/media_stream_interface.cc @@ -18,6 +18,10 @@ const char* const MediaStreamTrackInterface::kVideoKind = const char* const MediaStreamTrackInterface::kAudioKind = cricket::kMediaTypeAudio; +bool VideoTrackInterface::should_receive() const { + return true; +} + VideoTrackInterface::ContentHint VideoTrackInterface::content_hint() const { return ContentHint::kNone; } diff --git a/api/media_stream_interface.h b/api/media_stream_interface.h index 9d336739e4..6d9b4aa6ca 100644 --- a/api/media_stream_interface.h +++ b/api/media_stream_interface.h @@ -188,6 +188,8 @@ class RTC_EXPORT VideoTrackInterface virtual VideoTrackSourceInterface* GetSource() const = 0; + virtual void set_should_receive(bool should_receive) {} + virtual bool should_receive() const; virtual ContentHint content_hint() const; virtual void set_content_hint(ContentHint hint) {} diff --git a/api/peer_connection_interface.h b/api/peer_connection_interface.h index 6ce4650e5f..06476ef86b 100644 --- a/api/peer_connection_interface.h +++ b/api/peer_connection_interface.h @@ -691,6 +691,15 @@ class RTC_EXPORT PeerConnectionInterface : public rtc::RefCountInterface { // The burst interval of the pacer, see TaskQueuePacedSender constructor. absl::optional pacer_burst_interval; + // When this flag is set, ports not bound to any specific network interface + // will be used, in addition to normal ports bound to the enumerated + // interfaces. Without this flag, these "any address" ports would only be + // used when network enumeration fails or is disabled. But under certain + // conditions, these ports may succeed where others fail, so they may allow + // the application to work in a wider variety of environments, at the expense + // of having to allocate additional candidates. + bool enable_any_address_ports = false; + // // Don't forget to update operator== if adding something. // diff --git a/api/video/BUILD.gn b/api/video/BUILD.gn index c8deac37e9..3e53f041c2 100644 --- a/api/video/BUILD.gn +++ b/api/video/BUILD.gn @@ -79,6 +79,19 @@ rtc_library("video_frame") { absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } +rtc_library("yuv_helper") { + visibility = [ "*" ] + sources = [ + "yuv_helper.cc", + "yuv_helper.h", + ] + + deps = [ + "../../rtc_base/system:rtc_export", + "//third_party/libyuv", + ] +} + if (is_android) { java_cpp_enum("video_frame_enums") { sources = [ "video_frame_buffer.h" ] diff --git a/api/video/yuv_helper.cc b/api/video/yuv_helper.cc new file mode 100644 index 0000000000..eab9126183 --- /dev/null +++ b/api/video/yuv_helper.cc @@ -0,0 +1,416 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "yuv_helper.h" + +#include "libyuv/convert.h" +#include "libyuv/convert_argb.h" +#include "libyuv/convert_from_argb.h" +#include "libyuv/row.h" +#include "third_party/libyuv/include/libyuv.h" +#include "video_rotation.h" + +namespace webrtc { + +int I420Rotate(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_u, + int dst_stride_u, + uint8_t* dst_v, + int dst_stride_v, + int width, + int height, + VideoRotation mode) { + return libyuv::I420Rotate(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_y, dst_stride_y, dst_u, + dst_stride_u, dst_v, dst_stride_v, width, height, + static_cast(mode)); +} + +int I420ToNV12(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_uv, + int dst_stride_uv, + int width, + int height) { + return libyuv::I420ToNV12(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_y, dst_stride_y, dst_uv, + dst_stride_uv, width, height); +} + +int I420ToARGB(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_argb, + int dst_stride_argb, + int width, + int height) { + return libyuv::I420ToARGB(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_argb, dst_stride_argb, width, + height); +} + +int I420ToBGRA(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_bgra, + int dst_stride_bgra, + int width, + int height) { + return libyuv::I420ToBGRA(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_bgra, dst_stride_bgra, width, + height); +} + +int I420ToABGR(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_abgr, + int dst_stride_abgr, + int width, + int height) { + return libyuv::I420ToABGR(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_abgr, dst_stride_abgr, width, + height); +} + +int I420ToRGBA(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_rgba, + int dst_stride_rgba, + int width, + int height) { + return libyuv::I420ToRGBA(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_rgba, dst_stride_rgba, width, + height); +} + +int I420ToRGB24(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_rgb24, + int dst_stride_rgb24, + int width, + int height) { + return libyuv::I420ToRGB24(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_rgb24, dst_stride_rgb24, width, + height); +} + +int I420Scale(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + int src_width, + int src_height, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_u, + int dst_stride_u, + uint8_t* dst_v, + int dst_stride_v, + int dst_width, + int dst_height, + libyuv::FilterMode filtering) { + return libyuv::I420Scale(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, src_width, src_height, dst_y, + dst_stride_y, dst_u, dst_stride_u, dst_v, + dst_stride_v, dst_width, dst_height, filtering); +} + +int ARGBToI420(const uint8_t* src_argb, + int src_stride_argb, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_u, + int dst_stride_u, + uint8_t* dst_v, + int dst_stride_v, + int width, + int height) { + return libyuv::ARGBToI420(src_argb, src_stride_argb, dst_y, dst_stride_y, + dst_u, dst_stride_u, dst_v, dst_stride_v, width, + height); +} + +int ABGRToI420(const uint8_t* src_abgr, + int src_stride_abgr, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_u, + int dst_stride_u, + uint8_t* dst_v, + int dst_stride_v, + int width, + int height) { + return libyuv::ABGRToI420(src_abgr, src_stride_abgr, dst_y, dst_stride_y, + dst_u, dst_stride_u, dst_v, dst_stride_v, width, + height); +} + +int ARGBToRGB24(const uint8_t* src_argb, + int src_stride_argb, + uint8_t* dst_rgb24, + int dst_stride_rgb24, + int width, + int height) { + return libyuv::ARGBToRGB24(src_argb, src_stride_argb, dst_rgb24, + dst_stride_rgb24, width, height); +} + +int NV12ToI420(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_uv, + int src_stride_uv, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_u, + int dst_stride_u, + uint8_t* dst_v, + int dst_stride_v, + int width, + int height) { + return libyuv::NV12ToI420(src_y, src_stride_y, src_uv, src_stride_uv, dst_y, + dst_stride_y, dst_u, dst_stride_u, dst_v, + dst_stride_v, width, height); +} + +int I444ToI420(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_u, + int dst_stride_u, + uint8_t* dst_v, + int dst_stride_v, + int width, + int height) { + return libyuv::I444ToI420(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_y, dst_stride_y, dst_u, + dst_stride_u, dst_v, dst_stride_v, width, height); +} + +int I422ToI420(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_u, + int dst_stride_u, + uint8_t* dst_v, + int dst_stride_v, + int width, + int height) { + return libyuv::I422ToI420(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_y, dst_stride_y, dst_u, + dst_stride_u, dst_v, dst_stride_v, width, height); +} + +int I010ToI420(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_u, + int dst_stride_u, + uint8_t* dst_v, + int dst_stride_v, + int width, + int height) { + return libyuv::I010ToI420(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_y, dst_stride_y, dst_u, + dst_stride_u, dst_v, dst_stride_v, width, height); +} + +int NV12ToARGB(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_uv, + int src_stride_uv, + uint8_t* dst_argb, + int dst_stride_argb, + int width, + int height) { + return libyuv::NV12ToARGB(src_y, src_stride_y, src_uv, src_stride_uv, + dst_argb, dst_stride_argb, width, height); +} + +int NV12ToABGR(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_uv, + int src_stride_uv, + uint8_t* dst_abgr, + int dst_stride_abgr, + int width, + int height) { + return libyuv::NV12ToABGR(src_y, src_stride_y, src_uv, src_stride_uv, + dst_abgr, dst_stride_abgr, width, height); +} + +int I444ToARGB(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_abgr, + int dst_stride_abgr, + int width, + int height) { + return libyuv::I444ToARGB(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_abgr, dst_stride_abgr, width, + height); +} + +int I444ToABGR(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_abgr, + int dst_stride_abgr, + int width, + int height) { + return libyuv::I444ToABGR(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_abgr, dst_stride_abgr, width, + height); +} + +int I422ToARGB(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_argb, + int dst_stride_argb, + int width, + int height) { + return libyuv::I422ToARGB(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_argb, dst_stride_argb, width, + height); +} + +int I422ToABGR(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_abgr, + int dst_stride_abgr, + int width, + int height) { + return libyuv::I422ToABGR(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_abgr, dst_stride_abgr, width, + height); +} + +int I010ToARGB(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + uint8_t* dst_argb, + int dst_stride_argb, + int width, + int height) { + return libyuv::I010ToARGB(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_argb, dst_stride_argb, width, + height); +} + +int I010ToABGR(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + uint8_t* dst_abgr, + int dst_stride_abgr, + int width, + int height) { + return libyuv::I010ToABGR(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_abgr, dst_stride_abgr, width, + height); +} + +int ABGRToNV12(const uint8_t* src_abgr, + int src_stride_abgr, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_uv, + int dst_stride_uv, + int width, + int height) { + return libyuv::ABGRToNV12(src_abgr, src_stride_abgr, dst_y, dst_stride_y, + dst_uv, dst_stride_uv, width, height); +} + +int ARGBToNV12(const uint8_t* src_argb, + int src_stride_argb, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_uv, + int dst_stride_uv, + int width, + int height) { + return libyuv::ARGBToNV12(src_argb, src_stride_argb, dst_y, dst_stride_y, + dst_uv, dst_stride_uv, width, height); +} + +} // namespace webrtc diff --git a/api/video/yuv_helper.h b/api/video/yuv_helper.h new file mode 100644 index 0000000000..5e86fb378b --- /dev/null +++ b/api/video/yuv_helper.h @@ -0,0 +1,316 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "libyuv/convert.h" +#include "rtc_base/system/rtc_export.h" +#include "stdint.h" +#include "third_party/libyuv/include/libyuv.h" +#include "video_rotation.h" + +namespace webrtc { + +RTC_EXPORT int I420Rotate(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_u, + int dst_stride_u, + uint8_t* dst_v, + int dst_stride_v, + int width, + int height, + VideoRotation mode); + +RTC_EXPORT int I420ToNV12(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_uv, + int dst_stride_uv, + int width, + int height); + +RTC_EXPORT int I420ToARGB(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_argb, + int dst_stride_argb, + int width, + int height); + +RTC_EXPORT int I420ToBGRA(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_bgra, + int dst_stride_bgra, + int width, + int height); + +RTC_EXPORT int I420ToABGR(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_abgr, + int dst_stride_abgr, + int width, + int height); + +RTC_EXPORT int I420ToRGBA(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_rgba, + int dst_stride_rgba, + int width, + int height); + +RTC_EXPORT int I420ToRGB24(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_rgb24, + int dst_stride_rgb24, + int width, + int height); + +RTC_EXPORT int I420Scale(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + int src_width, + int src_height, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_u, + int dst_stride_u, + uint8_t* dst_v, + int dst_stride_v, + int dst_width, + int dst_height, + libyuv::FilterMode filtering); + +RTC_EXPORT int ARGBToI420(const uint8_t* src_argb, + int src_stride_argb, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_u, + int dst_stride_u, + uint8_t* dst_v, + int dst_stride_v, + int width, + int height); + +RTC_EXPORT int ABGRToI420(const uint8_t* src_abgr, + int src_stride_abgr, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_u, + int dst_stride_u, + uint8_t* dst_v, + int dst_stride_v, + int width, + int height); + +RTC_EXPORT int ARGBToRGB24(const uint8_t* src_argb, + int src_stride_argb, + uint8_t* dst_rgb24, + int dst_stride_rgb24, + int width, + int height); + +RTC_EXPORT int NV12ToI420(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_uv, + int src_stride_uv, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_u, + int dst_stride_u, + uint8_t* dst_v, + int dst_stride_v, + int width, + int height); + +RTC_EXPORT int I444ToI420(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_u, + int dst_stride_u, + uint8_t* dst_v, + int dst_stride_v, + int width, + int height); + +RTC_EXPORT int I422ToI420(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_u, + int dst_stride_u, + uint8_t* dst_v, + int dst_stride_v, + int width, + int height); + +RTC_EXPORT int I010ToI420(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_u, + int dst_stride_u, + uint8_t* dst_v, + int dst_stride_v, + int width, + int height); + +RTC_EXPORT int NV12ToARGB(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_uv, + int src_stride_uv, + uint8_t* dst_argb, + int dst_stride_argb, + int width, + int height); + +RTC_EXPORT int NV12ToABGR(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_uv, + int src_stride_uv, + uint8_t* dst_abgr, + int dst_stride_abgr, + int width, + int height); + +RTC_EXPORT int I444ToARGB(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_abgr, + int dst_stride_abgr, + int width, + int height); + +RTC_EXPORT int I444ToABGR(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_abgr, + int dst_stride_abgr, + int width, + int height); + +RTC_EXPORT int I422ToARGB(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_argb, + int dst_stride_argb, + int width, + int height); + +RTC_EXPORT int I422ToABGR(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_abgr, + int dst_stride_abgr, + int width, + int height); + +RTC_EXPORT int I010ToARGB(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + uint8_t* dst_argb, + int dst_stride_argb, + int width, + int height); + +RTC_EXPORT int I010ToABGR(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + uint8_t* dst_abgr, + int dst_stride_abgr, + int width, + int height); + +RTC_EXPORT int ABGRToNV12(const uint8_t* src_abgr, + int src_stride_abgr, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_uv, + int dst_stride_uv, + int width, + int height); + +RTC_EXPORT int ARGBToNV12(const uint8_t* src_argb, + int src_stride_argb, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_uv, + int dst_stride_uv, + int width, + int height); + +} // namespace webrtc diff --git a/api/video_codecs/video_encoder_factory.h b/api/video_codecs/video_encoder_factory.h index d28a2a4035..bf19540914 100644 --- a/api/video_codecs/video_encoder_factory.h +++ b/api/video_codecs/video_encoder_factory.h @@ -85,13 +85,8 @@ class VideoEncoderFactory { virtual CodecSupport QueryCodecSupport( const SdpVideoFormat& format, absl::optional scalability_mode) const { - // Default implementation, query for supported formats and check if the - // specified format is supported. Returns false if scalability_mode is - // specified. CodecSupport codec_support; - if (!scalability_mode) { - codec_support.is_supported = format.IsCodecInList(GetSupportedFormats()); - } + codec_support.is_supported = format.IsCodecInList(GetSupportedFormats()); return codec_support; } diff --git a/audio/audio_send_stream.cc b/audio/audio_send_stream.cc index e0ad1a8bd6..7e033849ff 100644 --- a/audio/audio_send_stream.cc +++ b/audio/audio_send_stream.cc @@ -424,6 +424,11 @@ void AudioSendStream::SetMuted(bool muted) { channel_send_->SetInputMute(muted); } +bool AudioSendStream::GetMuted() { + RTC_DCHECK_RUN_ON(&worker_thread_checker_); + return channel_send_->InputMute(); +} + webrtc::AudioSendStream::Stats AudioSendStream::GetStats() const { return GetStats(true); } diff --git a/audio/audio_send_stream.h b/audio/audio_send_stream.h index 6cda9c3d52..422c815e38 100644 --- a/audio/audio_send_stream.h +++ b/audio/audio_send_stream.h @@ -96,6 +96,7 @@ class AudioSendStream final : public webrtc::AudioSendStream, int payload_frequency, int event, int duration_ms) override; + bool GetMuted() override; void SetMuted(bool muted) override; webrtc::AudioSendStream::Stats GetStats() const override; webrtc::AudioSendStream::Stats GetStats( diff --git a/audio/audio_state.cc b/audio/audio_state.cc index 6f20e7b128..3a21d9b3a9 100644 --- a/audio/audio_state.cc +++ b/audio/audio_state.cc @@ -98,14 +98,26 @@ void AudioState::AddSendingStream(webrtc::AudioSendStream* stream, UpdateAudioTransportWithSendingStreams(); // Make sure recording is initialized; start recording if enabled. - auto* adm = config_.audio_device_module.get(); - if (!adm->Recording()) { - if (adm->InitRecording() == 0) { - if (recording_enabled_) { - adm->StartRecording(); + if (ShouldRecord()) { + auto* adm = config_.audio_device_module.get(); + if (!adm->Recording()) { + if (adm->InitRecording() == 0) { + if (recording_enabled_) { + + // TODO: Verify if the following windows only logic is still required. +#if defined(WEBRTC_WIN) + if (adm->BuiltInAECIsAvailable() && !adm->Playing()) { + if (!adm->PlayoutIsInitialized()) { + adm->InitPlayout(); + } + adm->StartPlayout(); + } +#endif + adm->StartRecording(); + } + } else { + RTC_DLOG_F(LS_ERROR) << "Failed to initialize recording."; } - } else { - RTC_DLOG_F(LS_ERROR) << "Failed to initialize recording."; } } } @@ -115,7 +127,8 @@ void AudioState::RemoveSendingStream(webrtc::AudioSendStream* stream) { auto count = sending_streams_.erase(stream); RTC_DCHECK_EQ(1, count); UpdateAudioTransportWithSendingStreams(); - if (sending_streams_.empty()) { + + if (!ShouldRecord()) { config_.audio_device_module->StopRecording(); } } @@ -143,7 +156,7 @@ void AudioState::SetRecording(bool enabled) { if (recording_enabled_ != enabled) { recording_enabled_ = enabled; if (enabled) { - if (!sending_streams_.empty()) { + if (ShouldRecord()) { config_.audio_device_module->StartRecording(); } } else { @@ -203,6 +216,39 @@ void AudioState::UpdateNullAudioPollerState() { null_audio_poller_.Stop(); } } + +void AudioState::OnMuteStreamChanged() { + + auto* adm = config_.audio_device_module.get(); + bool should_record = ShouldRecord(); + + if (should_record && !adm->Recording()) { + if (adm->InitRecording() == 0) { + adm->StartRecording(); + } + } else if (!should_record && adm->Recording()) { + adm->StopRecording(); + } +} + +bool AudioState::ShouldRecord() { + // no streams to send + if (sending_streams_.empty()) { + return false; + } + + int stream_count = sending_streams_.size(); + + int muted_count = 0; + for (const auto& kv : sending_streams_) { + if (kv.first->GetMuted()) { + muted_count++; + } + } + + return muted_count != stream_count; +} + } // namespace internal rtc::scoped_refptr AudioState::Create( diff --git a/audio/audio_state.h b/audio/audio_state.h index 88aaaa3697..f21cca771e 100644 --- a/audio/audio_state.h +++ b/audio/audio_state.h @@ -47,6 +47,8 @@ class AudioState : public webrtc::AudioState { void SetStereoChannelSwapping(bool enable) override; + void OnMuteStreamChanged() override; + AudioDeviceModule* audio_device_module() { RTC_DCHECK(config_.audio_device_module); return config_.audio_device_module.get(); @@ -64,6 +66,9 @@ class AudioState : public webrtc::AudioState { void UpdateAudioTransportWithSendingStreams(); void UpdateNullAudioPollerState() RTC_RUN_ON(&thread_checker_); + // Returns true when at least 1 stream exists and all streams are not muted. + bool ShouldRecord(); + SequenceChecker thread_checker_; SequenceChecker process_thread_checker_{SequenceChecker::kDetached}; const webrtc::AudioState::Config config_; diff --git a/audio/channel_receive.cc b/audio/channel_receive.cc index e6435ad1b1..99405b1c78 100644 --- a/audio/channel_receive.cc +++ b/audio/channel_receive.cc @@ -357,7 +357,6 @@ void ChannelReceive::OnReceivedPayloadData( void ChannelReceive::InitFrameTransformerDelegate( rtc::scoped_refptr frame_transformer) { RTC_DCHECK(frame_transformer); - RTC_DCHECK(!frame_transformer_delegate_); RTC_DCHECK(worker_thread_->IsCurrent()); // Pass a callback to ChannelReceive::OnReceivedPayloadData, to be called by @@ -920,11 +919,9 @@ void ChannelReceive::SetAssociatedSendChannel( void ChannelReceive::SetDepacketizerToDecoderFrameTransformer( rtc::scoped_refptr frame_transformer) { RTC_DCHECK_RUN_ON(&worker_thread_checker_); - // Depending on when the channel is created, the transformer might be set - // twice. Don't replace the delegate if it was already initialized. - if (!frame_transformer || frame_transformer_delegate_) { - RTC_DCHECK_NOTREACHED() << "Not setting the transformer?"; - return; + + if(frame_transformer_delegate_ && frame_transformer) { + frame_transformer_delegate_->Reset(); } InitFrameTransformerDelegate(std::move(frame_transformer)); diff --git a/audio/channel_send.cc b/audio/channel_send.cc index 91a17d4d72..8d33fa7107 100644 --- a/audio/channel_send.cc +++ b/audio/channel_send.cc @@ -98,6 +98,8 @@ class ChannelSend : public ChannelSendInterface, // Muting, Volume and Level. void SetInputMute(bool enable) override; + bool InputMute() const override; + // Stats. ANAStats GetANAStatistics() const override; @@ -161,8 +163,6 @@ class ChannelSend : public ChannelSendInterface, size_t payloadSize, int64_t absolute_capture_timestamp_ms) override; - bool InputMute() const; - int32_t SendRtpAudio(AudioFrameType frameType, uint8_t payloadType, uint32_t rtp_timestamp, diff --git a/audio/channel_send.h b/audio/channel_send.h index 08d3f92f1d..d969452185 100644 --- a/audio/channel_send.h +++ b/audio/channel_send.h @@ -95,6 +95,8 @@ class ChannelSendInterface { virtual bool SendTelephoneEventOutband(int event, int duration_ms) = 0; virtual void OnBitrateAllocation(BitrateAllocationUpdate update) = 0; virtual int GetTargetBitrate() const = 0; + + virtual bool InputMute() const = 0; virtual void SetInputMute(bool muted) = 0; virtual void ProcessAndEncodeAudio( diff --git a/call/audio_send_stream.h b/call/audio_send_stream.h index 9c2fad652f..7e73ab2ce6 100644 --- a/call/audio_send_stream.h +++ b/call/audio_send_stream.h @@ -190,6 +190,7 @@ class AudioSendStream : public AudioSender { int event, int duration_ms) = 0; + virtual bool GetMuted() = 0; virtual void SetMuted(bool muted) = 0; virtual Stats GetStats() const = 0; diff --git a/call/audio_state.h b/call/audio_state.h index 79fb5cf981..85f04758dd 100644 --- a/call/audio_state.h +++ b/call/audio_state.h @@ -59,6 +59,9 @@ class AudioState : public rtc::RefCountInterface { virtual void SetStereoChannelSwapping(bool enable) = 0; + // Notify the AudioState that a stream updated it's mute state. + virtual void OnMuteStreamChanged() = 0; + static rtc::scoped_refptr Create( const AudioState::Config& config); diff --git a/media/base/media_channel.h b/media/base/media_channel.h index 378042f1b2..f66ee7769a 100644 --- a/media/base/media_channel.h +++ b/media/base/media_channel.h @@ -997,6 +997,8 @@ class VideoMediaReceiveChannelInterface : public MediaReceiveChannelInterface { bool nack_enabled, webrtc::RtcpMode rtcp_mode, absl::optional rtx_time) = 0; + virtual void StartReceive(uint32_t ssrc) {} + virtual void StopReceive(uint32_t ssrc) {} }; } // namespace cricket diff --git a/media/engine/webrtc_video_engine.cc b/media/engine/webrtc_video_engine.cc index 83581bf9fd..14deb20eac 100644 --- a/media/engine/webrtc_video_engine.cc +++ b/media/engine/webrtc_video_engine.cc @@ -642,18 +642,15 @@ WebRtcVideoEngine::GetRtpHeaderExtensions() const { webrtc::RtpExtension::kVideoContentTypeUri, webrtc::RtpExtension::kVideoTimingUri, webrtc::RtpExtension::kColorSpaceUri, webrtc::RtpExtension::kMidUri, - webrtc::RtpExtension::kRidUri, webrtc::RtpExtension::kRepairedRidUri}) { + webrtc::RtpExtension::kRidUri, webrtc::RtpExtension::kRepairedRidUri, + // "WebRTC-DependencyDescriptorAdvertised" + webrtc::RtpExtension::kDependencyDescriptorUri}) { result.emplace_back(uri, id++, webrtc::RtpTransceiverDirection::kSendRecv); } result.emplace_back(webrtc::RtpExtension::kGenericFrameDescriptorUri00, id++, IsEnabled(trials_, "WebRTC-GenericDescriptorAdvertised") ? webrtc::RtpTransceiverDirection::kSendRecv : webrtc::RtpTransceiverDirection::kStopped); - result.emplace_back( - webrtc::RtpExtension::kDependencyDescriptorUri, id++, - IsEnabled(trials_, "WebRTC-DependencyDescriptorAdvertised") - ? webrtc::RtpTransceiverDirection::kSendRecv - : webrtc::RtpTransceiverDirection::kStopped); result.emplace_back( webrtc::RtpExtension::kVideoLayersAllocationUri, id++, @@ -928,6 +925,24 @@ void WebRtcVideoChannel::RequestEncoderSwitch( } } +void WebRtcVideoChannel::StartReceive(uint32_t ssrc) { + RTC_DCHECK_RUN_ON(&thread_checker_); + WebRtcVideoReceiveStream* stream = FindReceiveStream(ssrc); + if (!stream) { + return; + } + stream->StartStream(); +} + +void WebRtcVideoChannel::StopReceive(uint32_t ssrc) { + RTC_DCHECK_RUN_ON(&thread_checker_); + WebRtcVideoReceiveStream* stream = FindReceiveStream(ssrc); + if (!stream) { + return; + } + stream->StopStream(); +} + bool WebRtcVideoChannel::ApplyChangedParams( const ChangedSendParameters& changed_params) { RTC_DCHECK_RUN_ON(&thread_checker_); @@ -3182,6 +3197,17 @@ void WebRtcVideoChannel::WebRtcVideoReceiveStream::SetRecvParameters( } } +void WebRtcVideoChannel::WebRtcVideoReceiveStream::StartStream() { + if (stream_) { + stream_->Start(); + } +} +void WebRtcVideoChannel::WebRtcVideoReceiveStream::StopStream() { + if (stream_) { + stream_->Stop(); + } +} + void WebRtcVideoChannel::WebRtcVideoReceiveStream::RecreateReceiveStream() { RTC_DCHECK(stream_); absl::optional base_minimum_playout_delay_ms; diff --git a/media/engine/webrtc_video_engine.h b/media/engine/webrtc_video_engine.h index 2cdd05ffd6..a3e433f002 100644 --- a/media/engine/webrtc_video_engine.h +++ b/media/engine/webrtc_video_engine.h @@ -270,6 +270,8 @@ class WebRtcVideoChannel : public VideoMediaChannel, webrtc::RtcpMode rtcp_mode, absl::optional rtx_time) override; + void StartReceive(uint32_t ssrc) override; + void StopReceive(uint32_t ssrc) override; private: class WebRtcVideoReceiveStream; @@ -537,6 +539,9 @@ class WebRtcVideoChannel : public VideoMediaChannel, void SetDepacketizerToDecoderFrameTransformer( rtc::scoped_refptr frame_transformer); + + void StartStream(); + void StopStream(); void SetLocalSsrc(uint32_t local_ssrc); void UpdateRtxSsrc(uint32_t ssrc); diff --git a/media/engine/webrtc_voice_engine.cc b/media/engine/webrtc_voice_engine.cc index 55cebbaa54..35e2657876 100644 --- a/media/engine/webrtc_voice_engine.cc +++ b/media/engine/webrtc_voice_engine.cc @@ -2285,6 +2285,9 @@ bool WebRtcVoiceMediaChannel::MuteStream(uint32_t ssrc, bool muted) { ap->set_output_will_be_muted(all_muted); } + // Notfy the AudioState that the mute state has updated. + engine_->audio_state()->OnMuteStreamChanged(); + return true; } diff --git a/media/engine/webrtc_voice_engine.h b/media/engine/webrtc_voice_engine.h index f5d8080723..a75a707788 100644 --- a/media/engine/webrtc_voice_engine.h +++ b/media/engine/webrtc_voice_engine.h @@ -93,6 +93,8 @@ class WebRtcVoiceEngine final : public VoiceEngineInterface { absl::optional GetAudioDeviceStats() override; + // Moved to public so WebRtcVoiceMediaChannel can access it. + webrtc::AudioState* audio_state(); private: // Every option that is "set" will be applied. Every option not "set" will be @@ -105,7 +107,6 @@ class WebRtcVoiceEngine final : public VoiceEngineInterface { webrtc::AudioDeviceModule* adm(); webrtc::AudioProcessing* apm() const; - webrtc::AudioState* audio_state(); std::vector CollectCodecs( const std::vector& specs) const; diff --git a/modules/audio_device/audio_device_data_observer.cc b/modules/audio_device/audio_device_data_observer.cc index 0524830327..88a8301c4f 100644 --- a/modules/audio_device/audio_device_data_observer.cc +++ b/modules/audio_device/audio_device_data_observer.cc @@ -307,6 +307,10 @@ class ADMWrapper : public AudioDeviceModule, public AudioTransport { } #endif // WEBRTC_IOS + int32_t SetAudioDeviceSink(AudioDeviceSink* sink) const override { + return impl_->SetAudioDeviceSink(sink); + } + protected: rtc::scoped_refptr impl_; AudioDeviceDataObserver* legacy_observer_ = nullptr; diff --git a/modules/audio_device/audio_device_generic.h b/modules/audio_device/audio_device_generic.h index 41e24eb3b0..0585129de4 100644 --- a/modules/audio_device/audio_device_generic.h +++ b/modules/audio_device/audio_device_generic.h @@ -135,6 +135,10 @@ class AudioDeviceGeneric { virtual int GetRecordAudioParameters(AudioParameters* params) const; #endif // WEBRTC_IOS + virtual int32_t SetAudioDeviceSink(AudioDeviceSink* sink) { return -1; } + virtual int32_t GetPlayoutDevice() const { return -1; } + virtual int32_t GetRecordingDevice() const { return -1; } + virtual void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) = 0; virtual ~AudioDeviceGeneric() {} diff --git a/modules/audio_device/audio_device_impl.cc b/modules/audio_device/audio_device_impl.cc index 092b98f2bf..80e208ceec 100644 --- a/modules/audio_device/audio_device_impl.cc +++ b/modules/audio_device/audio_device_impl.cc @@ -72,15 +72,17 @@ namespace webrtc { rtc::scoped_refptr AudioDeviceModule::Create( AudioLayer audio_layer, - TaskQueueFactory* task_queue_factory) { + TaskQueueFactory* task_queue_factory, + bool bypass_voice_processing) { RTC_DLOG(LS_INFO) << __FUNCTION__; - return AudioDeviceModule::CreateForTest(audio_layer, task_queue_factory); + return AudioDeviceModule::CreateForTest(audio_layer, task_queue_factory, bypass_voice_processing); } // static rtc::scoped_refptr AudioDeviceModule::CreateForTest( AudioLayer audio_layer, - TaskQueueFactory* task_queue_factory) { + TaskQueueFactory* task_queue_factory, + bool bypass_voice_processing) { RTC_DLOG(LS_INFO) << __FUNCTION__; // The "AudioDeviceModule::kWindowsCoreAudio2" audio layer has its own @@ -93,7 +95,7 @@ rtc::scoped_refptr AudioDeviceModule::CreateForTest( // Create the generic reference counted (platform independent) implementation. auto audio_device = rtc::make_ref_counted( - audio_layer, task_queue_factory); + audio_layer, task_queue_factory, bypass_voice_processing); // Ensure that the current platform is supported. if (audio_device->CheckPlatform() == -1) { @@ -116,8 +118,13 @@ rtc::scoped_refptr AudioDeviceModule::CreateForTest( AudioDeviceModuleImpl::AudioDeviceModuleImpl( AudioLayer audio_layer, - TaskQueueFactory* task_queue_factory) - : audio_layer_(audio_layer), audio_device_buffer_(task_queue_factory) { + TaskQueueFactory* task_queue_factory, + bool bypass_voice_processing) + : audio_layer_(audio_layer), + #if defined(WEBRTC_IOS) + bypass_voice_processing_(bypass_voice_processing), + #endif + audio_device_buffer_(task_queue_factory) { RTC_DLOG(LS_INFO) << __FUNCTION__; } @@ -280,7 +287,7 @@ int32_t AudioDeviceModuleImpl::CreatePlatformSpecificObjects() { #if defined(WEBRTC_IOS) if (audio_layer == kPlatformDefaultAudio) { audio_device_.reset( - new ios_adm::AudioDeviceIOS(/*bypass_voice_processing=*/false)); + new ios_adm::AudioDeviceIOS(/*bypass_voice_processing=*/bypass_voice_processing_)); RTC_LOG(LS_INFO) << "iPhone Audio APIs will be utilized."; } // END #if defined(WEBRTC_IOS) @@ -937,6 +944,27 @@ int AudioDeviceModuleImpl::GetRecordAudioParameters( } #endif // WEBRTC_IOS +int32_t AudioDeviceModuleImpl::SetAudioDeviceSink(AudioDeviceSink* sink) const { + RTC_LOG(LS_INFO) << __FUNCTION__ << "(" << sink << ")"; + int32_t ok = audio_device_->SetAudioDeviceSink(sink); + RTC_LOG(LS_INFO) << "output: " << ok; + return ok; +} + +int32_t AudioDeviceModuleImpl::GetPlayoutDevice() const { + RTC_LOG(LS_INFO) << __FUNCTION__; + int32_t r = audio_device_->GetPlayoutDevice(); + RTC_LOG(LS_INFO) << "output: " << r; + return r; +} + +int32_t AudioDeviceModuleImpl::GetRecordingDevice() const { + RTC_LOG(LS_INFO) << __FUNCTION__; + int32_t r = audio_device_->GetRecordingDevice(); + RTC_LOG(LS_INFO) << "output: " << r; + return r; +} + AudioDeviceModuleImpl::PlatformType AudioDeviceModuleImpl::Platform() const { RTC_LOG(LS_INFO) << __FUNCTION__; return platform_type_; diff --git a/modules/audio_device/audio_device_impl.h b/modules/audio_device/audio_device_impl.h index 45f73dcd65..40ffa4d303 100644 --- a/modules/audio_device/audio_device_impl.h +++ b/modules/audio_device/audio_device_impl.h @@ -43,7 +43,8 @@ class AudioDeviceModuleImpl : public AudioDeviceModuleForTest { int32_t AttachAudioBuffer(); AudioDeviceModuleImpl(AudioLayer audio_layer, - TaskQueueFactory* task_queue_factory); + TaskQueueFactory* task_queue_factory, + bool bypass_voice_processing = false); ~AudioDeviceModuleImpl() override; // Retrieve the currently utilized audio layer @@ -145,6 +146,10 @@ class AudioDeviceModuleImpl : public AudioDeviceModuleForTest { int GetRecordAudioParameters(AudioParameters* params) const override; #endif // WEBRTC_IOS + int32_t SetAudioDeviceSink(AudioDeviceSink* sink) const override; + int32_t GetPlayoutDevice() const override; + int32_t GetRecordingDevice() const override; + #if defined(WEBRTC_ANDROID) // Only use this acccessor for test purposes on Android. AudioManager* GetAndroidAudioManagerForTest() { @@ -165,7 +170,9 @@ class AudioDeviceModuleImpl : public AudioDeviceModuleForTest { AudioLayer audio_layer_; PlatformType platform_type_ = kPlatformNotSupported; bool initialized_ = false; -#if defined(WEBRTC_ANDROID) +#if defined(WEBRTC_IOS) + bool bypass_voice_processing_; +#elif defined(WEBRTC_ANDROID) // Should be declared first to ensure that it outlives other resources. std::unique_ptr audio_manager_android_; #endif diff --git a/modules/audio_device/include/audio_device.h b/modules/audio_device/include/audio_device.h index 936ee6cb04..58019cc24f 100644 --- a/modules/audio_device/include/audio_device.h +++ b/modules/audio_device/include/audio_device.h @@ -21,6 +21,15 @@ namespace webrtc { class AudioDeviceModuleForTest; +// Sink for callbacks related to a audio device. +class AudioDeviceSink { + public: + virtual ~AudioDeviceSink() = default; + + // input/output devices updated or default device changed + virtual void OnDevicesUpdated() = 0; +}; + class AudioDeviceModule : public rtc::RefCountInterface { public: enum AudioLayer { @@ -56,12 +65,14 @@ class AudioDeviceModule : public rtc::RefCountInterface { // Creates a default ADM for usage in production code. static rtc::scoped_refptr Create( AudioLayer audio_layer, - TaskQueueFactory* task_queue_factory); + TaskQueueFactory* task_queue_factory, + bool bypass_voice_processing = false); // Creates an ADM with support for extra test methods. Don't use this factory // in production code. static rtc::scoped_refptr CreateForTest( AudioLayer audio_layer, - TaskQueueFactory* task_queue_factory); + TaskQueueFactory* task_queue_factory, + bool bypass_voice_processing = false); // Retrieve the currently utilized audio layer virtual int32_t ActiveAudioLayer(AudioLayer* audioLayer) const = 0; @@ -171,6 +182,10 @@ class AudioDeviceModule : public rtc::RefCountInterface { virtual int GetRecordAudioParameters(AudioParameters* params) const = 0; #endif // WEBRTC_IOS + virtual int32_t SetAudioDeviceSink(AudioDeviceSink* sink) const { return -1; } + virtual int32_t GetPlayoutDevice() const { return -1; } + virtual int32_t GetRecordingDevice() const { return -1; } + protected: ~AudioDeviceModule() override {} }; diff --git a/modules/audio_device/mac/audio_device_mac.cc b/modules/audio_device/mac/audio_device_mac.cc index ed7b0e4669..0a371737b3 100644 --- a/modules/audio_device/mac/audio_device_mac.cc +++ b/modules/audio_device/mac/audio_device_mac.cc @@ -119,8 +119,6 @@ AudioDeviceMac::AudioDeviceMac() _twoDevices(true), _doStop(false), _doStopRec(false), - _macBookPro(false), - _macBookProPanRight(false), _captureLatencyUs(0), _renderLatencyUs(0), _captureDelayUs(0), @@ -277,8 +275,11 @@ AudioDeviceGeneric::InitStatus AudioDeviceMac::Init() { // but now must be explicitly specified. HAL would otherwise try to use the // main thread to issue notifications. AudioObjectPropertyAddress propertyAddress = { - kAudioHardwarePropertyRunLoop, kAudioObjectPropertyScopeGlobal, - kAudioObjectPropertyElementMaster}; + kAudioHardwarePropertyRunLoop, + kAudioObjectPropertyScopeGlobal, + kAudioObjectPropertyElementMaster + }; + CFRunLoopRef runLoop = NULL; UInt32 size = sizeof(CFRunLoopRef); int aoerr = AudioObjectSetPropertyData( @@ -294,22 +295,15 @@ AudioDeviceGeneric::InitStatus AudioDeviceMac::Init() { WEBRTC_CA_LOG_ERR(AudioObjectAddPropertyListener( kAudioObjectSystemObject, &propertyAddress, &objectListenerProc, this)); - // Determine if this is a MacBook Pro - _macBookPro = false; - _macBookProPanRight = false; - char buf[128]; - size_t length = sizeof(buf); - memset(buf, 0, length); + // Listen for default output device change. + propertyAddress.mSelector = kAudioHardwarePropertyDefaultOutputDevice; + WEBRTC_CA_LOG_ERR(AudioObjectAddPropertyListener( + kAudioObjectSystemObject, &propertyAddress, &objectListenerProc, this)); - int intErr = sysctlbyname("hw.model", buf, &length, NULL, 0); - if (intErr != 0) { - RTC_LOG(LS_ERROR) << "Error in sysctlbyname(): " << err; - } else { - RTC_LOG(LS_VERBOSE) << "Hardware model: " << buf; - if (strncmp(buf, "MacBookPro", 10) == 0) { - _macBookPro = true; - } - } + // Listen for default input device change. + propertyAddress.mSelector = kAudioHardwarePropertyDefaultInputDevice; + WEBRTC_CA_LOG_ERR(AudioObjectAddPropertyListener( + kAudioObjectSystemObject, &propertyAddress, &objectListenerProc, this)); _initialized = true; @@ -337,9 +331,21 @@ int32_t AudioDeviceMac::Terminate() { OSStatus err = noErr; int retVal = 0; + // Remove listeners for global scope. AudioObjectPropertyAddress propertyAddress = { - kAudioHardwarePropertyDevices, kAudioObjectPropertyScopeGlobal, - kAudioObjectPropertyElementMaster}; + kAudioHardwarePropertyDevices, // selector + kAudioObjectPropertyScopeGlobal, // scope + kAudioObjectPropertyElementMaster // element + }; + + WEBRTC_CA_LOG_WARN(AudioObjectRemovePropertyListener( + kAudioObjectSystemObject, &propertyAddress, &objectListenerProc, this)); + + propertyAddress.mSelector = kAudioHardwarePropertyDefaultOutputDevice; + WEBRTC_CA_LOG_WARN(AudioObjectRemovePropertyListener( + kAudioObjectSystemObject, &propertyAddress, &objectListenerProc, this)); + + propertyAddress.mSelector = kAudioHardwarePropertyDefaultInputDevice; WEBRTC_CA_LOG_WARN(AudioObjectRemovePropertyListener( kAudioObjectSystemObject, &propertyAddress, &objectListenerProc, this)); @@ -789,6 +795,14 @@ int16_t AudioDeviceMac::PlayoutDevices() { MaxNumberDevices); } +int32_t AudioDeviceMac::GetPlayoutDevice() const { + if (_outputDeviceIsSpecified) { + return _outputDeviceIndex; + } + + return 0; +} + int32_t AudioDeviceMac::SetPlayoutDevice(uint16_t index) { MutexLock lock(&mutex_); @@ -830,13 +844,11 @@ int32_t AudioDeviceMac::PlayoutDeviceName(uint16_t index, } memset(name, 0, kAdmMaxDeviceNameSize); - - if (guid != NULL) { - memset(guid, 0, kAdmMaxGuidSize); - } + memset(guid, 0, kAdmMaxGuidSize); return GetDeviceName(kAudioDevicePropertyScopeOutput, index, - rtc::ArrayView(name, kAdmMaxDeviceNameSize)); + rtc::ArrayView(name, kAdmMaxDeviceNameSize), + rtc::ArrayView(guid, kAdmMaxGuidSize)); } int32_t AudioDeviceMac::RecordingDeviceName(uint16_t index, @@ -855,7 +867,8 @@ int32_t AudioDeviceMac::RecordingDeviceName(uint16_t index, } return GetDeviceName(kAudioDevicePropertyScopeInput, index, - rtc::ArrayView(name, kAdmMaxDeviceNameSize)); + rtc::ArrayView(name, kAdmMaxDeviceNameSize), + rtc::ArrayView(guid, kAdmMaxGuidSize)); } int16_t AudioDeviceMac::RecordingDevices() { @@ -864,6 +877,14 @@ int16_t AudioDeviceMac::RecordingDevices() { MaxNumberDevices); } +int32_t AudioDeviceMac::GetRecordingDevice() const { + if (_inputDeviceIsSpecified) { + return _inputDeviceIndex; + } + + return 0; +} + int32_t AudioDeviceMac::SetRecordingDevice(uint16_t index) { if (_recIsInitialized) { return -1; @@ -979,34 +1000,8 @@ int32_t AudioDeviceMac::InitPlayout() { _renderDeviceIsAlive = 1; _doStop = false; - // The internal microphone of a MacBook Pro is located under the left speaker - // grille. When the internal speakers are in use, we want to fully stereo - // pan to the right. AudioObjectPropertyAddress propertyAddress = { kAudioDevicePropertyDataSource, kAudioDevicePropertyScopeOutput, 0}; - if (_macBookPro) { - _macBookProPanRight = false; - Boolean hasProperty = - AudioObjectHasProperty(_outputDeviceID, &propertyAddress); - if (hasProperty) { - UInt32 dataSource = 0; - size = sizeof(dataSource); - WEBRTC_CA_LOG_WARN(AudioObjectGetPropertyData( - _outputDeviceID, &propertyAddress, 0, NULL, &size, &dataSource)); - - if (dataSource == 'ispk') { - _macBookProPanRight = true; - RTC_LOG(LS_VERBOSE) - << "MacBook Pro using internal speakers; stereo panning right"; - } else { - RTC_LOG(LS_VERBOSE) << "MacBook Pro not using internal speakers"; - } - - // Add a listener to determine if the status changes. - WEBRTC_CA_LOG_WARN(AudioObjectAddPropertyListener( - _outputDeviceID, &propertyAddress, &objectListenerProc, this)); - } - } // Get current stream description propertyAddress.mSelector = kAudioDevicePropertyStreamFormat; @@ -1297,7 +1292,7 @@ int32_t AudioDeviceMac::StartRecording() { while (CaptureWorkerThread()) { } }, - "CaptureWorkerThread", + "Audio_CaptureWorkerThread", rtc::ThreadAttributes().SetPriority(rtc::ThreadPriority::kRealtime)); OSStatus err = noErr; @@ -1390,7 +1385,11 @@ int32_t AudioDeviceMac::StopRecording() { // Remove listeners. AudioObjectPropertyAddress propertyAddress = { - kAudioDevicePropertyStreamFormat, kAudioDevicePropertyScopeInput, 0}; + kAudioDevicePropertyStreamFormat, // selector + kAudioDevicePropertyScopeInput, // scope + 0, // element + }; + WEBRTC_CA_LOG_WARN(AudioObjectRemovePropertyListener( _inputDeviceID, &propertyAddress, &objectListenerProc, this)); @@ -1434,7 +1433,7 @@ int32_t AudioDeviceMac::StartPlayout() { while (RenderWorkerThread()) { } }, - "RenderWorkerThread", + "Audio_RenderWorkerThread", rtc::ThreadAttributes().SetPriority(rtc::ThreadPriority::kRealtime)); if (_twoDevices || !_recording) { @@ -1503,7 +1502,11 @@ int32_t AudioDeviceMac::StopPlayout() { // Remove listeners. AudioObjectPropertyAddress propertyAddress = { - kAudioDevicePropertyStreamFormat, kAudioDevicePropertyScopeOutput, 0}; + kAudioDevicePropertyStreamFormat, // selector + kAudioDevicePropertyScopeOutput, // scope + 0, // element + }; + WEBRTC_CA_LOG_WARN(AudioObjectRemovePropertyListener( _outputDeviceID, &propertyAddress, &objectListenerProc, this)); @@ -1511,16 +1514,6 @@ int32_t AudioDeviceMac::StopPlayout() { WEBRTC_CA_LOG_WARN(AudioObjectRemovePropertyListener( _outputDeviceID, &propertyAddress, &objectListenerProc, this)); - if (_macBookPro) { - Boolean hasProperty = - AudioObjectHasProperty(_outputDeviceID, &propertyAddress); - if (hasProperty) { - propertyAddress.mSelector = kAudioDevicePropertyDataSource; - WEBRTC_CA_LOG_WARN(AudioObjectRemovePropertyListener( - _outputDeviceID, &propertyAddress, &objectListenerProc, this)); - } - } - _playIsInitialized = false; _playing = false; @@ -1548,8 +1541,11 @@ int32_t AudioDeviceMac::GetNumberDevices(const AudioObjectPropertyScope scope, OSStatus err = noErr; AudioObjectPropertyAddress propertyAddress = { - kAudioHardwarePropertyDevices, kAudioObjectPropertyScopeGlobal, - kAudioObjectPropertyElementMaster}; + kAudioHardwarePropertyDevices, + kAudioObjectPropertyScopeGlobal, + kAudioObjectPropertyElementMaster, + }; + UInt32 size = 0; WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyDataSize( kAudioObjectSystemObject, &propertyAddress, 0, NULL, &size)); @@ -1648,7 +1644,8 @@ int32_t AudioDeviceMac::GetNumberDevices(const AudioObjectPropertyScope scope, int32_t AudioDeviceMac::GetDeviceName(const AudioObjectPropertyScope scope, const uint16_t index, - rtc::ArrayView name) { + rtc::ArrayView name, + rtc::ArrayView guid) { OSStatus err = noErr; AudioDeviceID deviceIds[MaxNumberDevices]; @@ -1685,10 +1682,9 @@ int32_t AudioDeviceMac::GetDeviceName(const AudioObjectPropertyScope scope, isDefaultDevice = true; } } - AudioObjectPropertyAddress propertyAddress = {kAudioDevicePropertyDeviceName, scope, 0}; - + rtc::SimpleStringBuilder guid_ss(guid); if (isDefaultDevice) { std::array devName; UInt32 len = devName.size(); @@ -1698,6 +1694,7 @@ int32_t AudioDeviceMac::GetDeviceName(const AudioObjectPropertyScope scope, rtc::SimpleStringBuilder ss(name); ss.AppendFormat("default (%s)", devName.data()); + guid_ss << "default"; } else { if (index < numberDevices) { usedID = deviceIds[index]; @@ -1705,7 +1702,7 @@ int32_t AudioDeviceMac::GetDeviceName(const AudioObjectPropertyScope scope, usedID = index; } UInt32 len = name.size(); - + guid_ss << std::to_string(deviceIds[index]); WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData( usedID, &propertyAddress, 0, NULL, &len, name.data())); } @@ -1916,6 +1913,66 @@ OSStatus AudioDeviceMac::implObjectListenerProc( HandleDataSourceChange(objectId, addresses[i]); } else if (addresses[i].mSelector == kAudioDeviceProcessorOverload) { HandleProcessorOverload(addresses[i]); + } else if (addresses[i].mSelector == kAudioHardwarePropertyDefaultOutputDevice) { + RTC_LOG(LS_VERBOSE) << "kAudioHardwarePropertyDefaultOutputDevice"; + // default audio output device changed + HandleDefaultOutputDeviceChange(); + } else if (addresses[i].mSelector == kAudioHardwarePropertyDefaultInputDevice) { + RTC_LOG(LS_VERBOSE) << "kAudioHardwarePropertyDefaultInputDevice"; + // default audio input device changed + HandleDefaultInputDeviceChange(); + } + } + + return 0; +} + +int32_t AudioDeviceMac::HandleDefaultOutputDeviceChange() { + + if (SpeakerIsInitialized()) { + RTC_LOG(LS_WARNING) << "Default audio output device has changed"; + int32_t renderDeviceIsAlive = _renderDeviceIsAlive; + bool wasPlaying = _playing && renderDeviceIsAlive == 1; + + if (wasPlaying && _outputDeviceIsSpecified && _outputDeviceIndex == 0) { + + StopPlayout(); + + // default is already selected _outputDeviceIndex(0) + // re-init and start playout + InitPlayout(); + StartPlayout(); + } + + // Notify default output device updated + if (audio_device_module_sink_) { + audio_device_module_sink_->OnDevicesUpdated(); + } + } + + return 0; +} + +int32_t AudioDeviceMac::HandleDefaultInputDeviceChange() { + + if (MicrophoneIsInitialized()) { + RTC_LOG(LS_WARNING) << "Default audio input device has changed"; + int32_t captureDeviceIsAlive = _captureDeviceIsAlive; + bool wasRecording = _recording && captureDeviceIsAlive == 1; + + if (wasRecording && _inputDeviceIsSpecified && _inputDeviceIndex == 0) { + + StopRecording(); + + // default is already selected _inputDeviceIndex(0) + // re-init and start recording + InitRecording(); + StartRecording(); + } + + // Notify default input device updated + if (audio_device_module_sink_) { + audio_device_module_sink_->OnDevicesUpdated(); } } @@ -1938,9 +1995,29 @@ int32_t AudioDeviceMac::HandleDeviceChange() { &size, &deviceIsAlive); if (err == kAudioHardwareBadDeviceError || deviceIsAlive == 0) { - RTC_LOG(LS_WARNING) << "Capture device is not alive (probably removed)"; - _captureDeviceIsAlive = 0; - _mixerManager.CloseMicrophone(); + RTC_LOG(LS_WARNING) << "Audio input device is not alive (probably removed) deviceID: " << _inputDeviceID; + //AtomicSet32(&_captureDeviceIsAlive, 0); + + // Logic to switch to default device (if exists) + // when the current device is not alive anymore + int32_t captureDeviceIsAlive = _captureDeviceIsAlive; + bool wasRecording = _recording && captureDeviceIsAlive == 1; + + StopRecording(); + + // was playing & default device exists + if (wasRecording && SetRecordingDevice(0) == 0) { + InitRecording(); + StartRecording(); + } else { + _mixerManager.CloseMicrophone(); + } + + // Notify input device removed + if (audio_device_module_sink_) { + audio_device_module_sink_->OnDevicesUpdated(); + } + } else if (err != noErr) { logCAMsg(rtc::LS_ERROR, "Error in AudioDeviceGetProperty()", (const char*)&err); @@ -1957,9 +2034,29 @@ int32_t AudioDeviceMac::HandleDeviceChange() { &size, &deviceIsAlive); if (err == kAudioHardwareBadDeviceError || deviceIsAlive == 0) { - RTC_LOG(LS_WARNING) << "Render device is not alive (probably removed)"; - _renderDeviceIsAlive = 0; - _mixerManager.CloseSpeaker(); + RTC_LOG(LS_WARNING) << "Audio output device is not alive (probably removed) deviceID: " << _outputDeviceID; + // AtomicSet32(&_renderDeviceIsAlive, 0); // StopPlayout() does this + + // Logic to switch to default device (if exists) + // when the current device is not alive anymore + int32_t renderDeviceIsAlive = _renderDeviceIsAlive; + bool wasPlaying = _playing && renderDeviceIsAlive == 1; + + StopPlayout(); + + // was playing & default device exists + if (wasPlaying && SetPlayoutDevice(0) == 0) { + InitPlayout(); + StartPlayout(); + } else { + _mixerManager.CloseSpeaker(); + } + + // Notify output device removed + if (audio_device_module_sink_) { + audio_device_module_sink_->OnDevicesUpdated(); + } + } else if (err != noErr) { logCAMsg(rtc::LS_ERROR, "Error in AudioDeviceGetProperty()", (const char*)&err); @@ -2061,28 +2158,10 @@ int32_t AudioDeviceMac::HandleStreamFormatChange( int32_t AudioDeviceMac::HandleDataSourceChange( const AudioObjectID objectId, const AudioObjectPropertyAddress propertyAddress) { - OSStatus err = noErr; - - if (_macBookPro && - propertyAddress.mScope == kAudioDevicePropertyScopeOutput) { - RTC_LOG(LS_VERBOSE) << "Data source changed"; - - _macBookProPanRight = false; - UInt32 dataSource = 0; - UInt32 size = sizeof(UInt32); - WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData( - objectId, &propertyAddress, 0, NULL, &size, &dataSource)); - if (dataSource == 'ispk') { - _macBookProPanRight = true; - RTC_LOG(LS_VERBOSE) - << "MacBook Pro using internal speakers; stereo panning right"; - } else { - RTC_LOG(LS_VERBOSE) << "MacBook Pro not using internal speakers"; - } - } return 0; } + int32_t AudioDeviceMac::HandleProcessorOverload( const AudioObjectPropertyAddress propertyAddress) { // TODO(xians): we probably want to notify the user in some way of the @@ -2400,24 +2479,6 @@ bool AudioDeviceMac::RenderWorkerThread() { uint32_t nOutSamples = nSamples * _outDesiredFormat.mChannelsPerFrame; SInt16* pPlayBuffer = (SInt16*)&playBuffer; - if (_macBookProPanRight && (_playChannels == 2)) { - // Mix entirely into the right channel and zero the left channel. - SInt32 sampleInt32 = 0; - for (uint32_t sampleIdx = 0; sampleIdx < nOutSamples; sampleIdx += 2) { - sampleInt32 = pPlayBuffer[sampleIdx]; - sampleInt32 += pPlayBuffer[sampleIdx + 1]; - sampleInt32 /= 2; - - if (sampleInt32 > 32767) { - sampleInt32 = 32767; - } else if (sampleInt32 < -32768) { - sampleInt32 = -32768; - } - - pPlayBuffer[sampleIdx] = 0; - pPlayBuffer[sampleIdx + 1] = static_cast(sampleInt32); - } - } PaUtil_WriteRingBuffer(_paRenderBuffer, pPlayBuffer, nOutSamples); diff --git a/modules/audio_device/mac/audio_device_mac.h b/modules/audio_device/mac/audio_device_mac.h index bb06395d03..6cb5482a84 100644 --- a/modules/audio_device/mac/audio_device_mac.h +++ b/modules/audio_device/mac/audio_device_mac.h @@ -154,6 +154,13 @@ class AudioDeviceMac : public AudioDeviceGeneric { virtual void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) RTC_LOCKS_EXCLUDED(mutex_); + virtual int32_t SetAudioDeviceSink(AudioDeviceSink* sink) RTC_LOCKS_EXCLUDED(mutex_) { + audio_device_module_sink_ = sink; + return 0; + } + virtual int32_t GetPlayoutDevice() const; + virtual int32_t GetRecordingDevice() const; + private: int32_t InitSpeakerLocked() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); int32_t InitMicrophoneLocked() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); @@ -180,7 +187,8 @@ class AudioDeviceMac : public AudioDeviceGeneric { int32_t GetDeviceName(AudioObjectPropertyScope scope, uint16_t index, - rtc::ArrayView name); + rtc::ArrayView name, + rtc::ArrayView guid); int32_t InitDevice(uint16_t userDeviceIndex, AudioDeviceID& deviceId, @@ -201,6 +209,8 @@ class AudioDeviceMac : public AudioDeviceGeneric { const AudioObjectPropertyAddress addresses[]); int32_t HandleDeviceChange(); + int32_t HandleDefaultOutputDeviceChange(); + int32_t HandleDefaultInputDeviceChange(); int32_t HandleStreamFormatChange(AudioObjectID objectId, AudioObjectPropertyAddress propertyAddress); @@ -343,6 +353,8 @@ class AudioDeviceMac : public AudioDeviceGeneric { // Typing detection // 0x5c is key "9", after that comes function keys. bool prev_key_state_[0x5d]; + + AudioDeviceSink *audio_device_module_sink_ = nullptr; }; } // namespace webrtc diff --git a/modules/audio_device/win/audio_device_core_win.cc b/modules/audio_device/win/audio_device_core_win.cc index aa8b6a9ebe..c1c2b32a9b 100644 --- a/modules/audio_device/win/audio_device_core_win.cc +++ b/modules/audio_device/win/audio_device_core_win.cc @@ -461,6 +461,8 @@ AudioDeviceWindowsCore::AudioDeviceWindowsCore() _playChannelsPrioList[0] = 2; // stereo is prio 1 _playChannelsPrioList[1] = 1; // mono is prio 2 + _deviceStateListener = new DeviceStateListener(); + HRESULT hr; // We know that this API will work since it has already been verified in @@ -474,6 +476,8 @@ AudioDeviceWindowsCore::AudioDeviceWindowsCore() reinterpret_cast(&_ptrEnumerator)); RTC_DCHECK(_ptrEnumerator); + _ptrEnumerator->RegisterEndpointNotificationCallback(_deviceStateListener); + // DMO initialization for built-in WASAPI AEC. { IMediaObject* ptrDMO = NULL; @@ -499,6 +503,8 @@ AudioDeviceWindowsCore::~AudioDeviceWindowsCore() { Terminate(); + _ptrEnumerator->UnregisterEndpointNotificationCallback(_deviceStateListener); + // The IMMDeviceEnumerator is created during construction. Must release // it here and not in Terminate() since we don't recreate it in Init(). SAFE_RELEASE(_ptrEnumerator); @@ -535,6 +541,11 @@ AudioDeviceWindowsCore::~AudioDeviceWindowsCore() { _hShutdownCaptureEvent = NULL; } + if(NULL != _deviceStateListener) { + delete _deviceStateListener; + _deviceStateListener = NULL; + } + if (_avrtLibrary) { BOOL freeOK = FreeLibrary(_avrtLibrary); if (!freeOK) { @@ -3894,6 +3905,65 @@ int32_t AudioDeviceWindowsCore::_GetDeviceID(IMMDevice* pDevice, return 0; } +int32_t AudioDeviceWindowsCore::SetAudioDeviceSink(AudioDeviceSink* sink) { + _deviceStateListener->SetAudioDeviceSink(sink); + return 0; +} + +void AudioDeviceWindowsCore::DeviceStateListener::SetAudioDeviceSink(AudioDeviceSink *sink) { + callback_ = sink; +} + +HRESULT AudioDeviceWindowsCore::DeviceStateListener::OnDeviceStateChanged(LPCWSTR pwstrDeviceId, DWORD dwNewState) { + RTC_DLOG(LS_INFO) << "AudioDeviceWindowsCore::OnDeviceStateChanged => " << pwstrDeviceId << ", NewState => " << dwNewState; + if(callback_) callback_->OnDevicesUpdated(); + return S_OK; +} + +HRESULT AudioDeviceWindowsCore::DeviceStateListener::OnDeviceAdded(LPCWSTR pwstrDeviceId) { + RTC_DLOG(LS_INFO) << "AudioDeviceWindowsCore::OnDeviceAdded => " << pwstrDeviceId; + return S_OK; +} + +HRESULT AudioDeviceWindowsCore::DeviceStateListener::OnDeviceRemoved(LPCWSTR pwstrDeviceId) { + RTC_DLOG(LS_INFO) << "AudioDeviceWindowsCore::OnDeviceRemoved => " << pwstrDeviceId; + return S_OK; +} + +HRESULT AudioDeviceWindowsCore::DeviceStateListener::OnDefaultDeviceChanged(EDataFlow flow, ERole role, LPCWSTR pwstrDefaultDeviceId) { + RTC_DLOG(LS_INFO) << "AudioDeviceWindowsCore::OnDefaultDeviceChanged => " << pwstrDefaultDeviceId; + return S_OK; +} + +HRESULT AudioDeviceWindowsCore::DeviceStateListener::OnPropertyValueChanged(LPCWSTR pwstrDeviceId, const PROPERTYKEY key) { + //RTC_DLOG(LS_INFO) << "AudioDeviceWindowsCore::OnPropertyValueChanged => " << pwstrDeviceId; + return S_OK; +} + +ULONG AudioDeviceWindowsCore::DeviceStateListener::AddRef() { + ULONG new_ref = InterlockedIncrement(&ref_count_); + // RTC_DLOG(LS_INFO) << "__AddRef => " << new_ref; + return new_ref; +} + +ULONG AudioDeviceWindowsCore::DeviceStateListener::Release() { + ULONG new_ref = InterlockedDecrement(&ref_count_); + // RTC_DLOG(LS_INFO) << "__Release => " << new_ref; + return new_ref; +} + +HRESULT AudioDeviceWindowsCore::DeviceStateListener::QueryInterface(REFIID iid, void** object) { + if (object == nullptr) { + return E_POINTER; + } + if (iid == IID_IUnknown || iid == __uuidof(IMMNotificationClient)) { + *object = static_cast(this); + return S_OK; + } + *object = nullptr; + return E_NOINTERFACE; +} + // ---------------------------------------------------------------------------- // _GetDefaultDevice // ---------------------------------------------------------------------------- diff --git a/modules/audio_device/win/audio_device_core_win.h b/modules/audio_device/win/audio_device_core_win.h index 380effb449..10b6a92b7f 100644 --- a/modules/audio_device/win/audio_device_core_win.h +++ b/modules/audio_device/win/audio_device_core_win.h @@ -22,6 +22,8 @@ #include #include // IMediaObject #include // MMDevice +#include +#include #include "api/scoped_refptr.h" #include "modules/audio_device/audio_device_generic.h" @@ -50,6 +52,34 @@ class AudioDeviceWindowsCore : public AudioDeviceGeneric { AudioDeviceWindowsCore(); ~AudioDeviceWindowsCore(); + class DeviceStateListener : public IMMNotificationClient { + public: + virtual ~DeviceStateListener() = default; + HRESULT __stdcall OnDeviceStateChanged(LPCWSTR pwstrDeviceId, + DWORD dwNewState) override; + HRESULT __stdcall OnDeviceAdded(LPCWSTR pwstrDeviceId) override; + + HRESULT __stdcall OnDeviceRemoved(LPCWSTR pwstrDeviceId) override; + + HRESULT + __stdcall OnDefaultDeviceChanged(EDataFlow flow, + ERole role, + LPCWSTR pwstrDefaultDeviceId) override; + + HRESULT __stdcall OnPropertyValueChanged(LPCWSTR pwstrDeviceId, + const PROPERTYKEY key) override; + // IUnknown (required by IMMNotificationClient). + ULONG __stdcall AddRef() override; + ULONG __stdcall Release() override; + HRESULT __stdcall QueryInterface(REFIID iid, void** object) override; + + void SetAudioDeviceSink(AudioDeviceSink *sink); + + private: + LONG ref_count_ = 1; + AudioDeviceSink *callback_ = nullptr; + }; + static bool CoreAudioIsSupported(); // Retrieve the currently utilized audio layer @@ -150,6 +180,8 @@ class AudioDeviceWindowsCore : public AudioDeviceGeneric { virtual int32_t EnableBuiltInAEC(bool enable); + virtual int32_t SetAudioDeviceSink(AudioDeviceSink* sink); + public: virtual void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer); @@ -237,6 +269,7 @@ class AudioDeviceWindowsCore : public AudioDeviceGeneric { IAudioEndpointVolume* _ptrCaptureVolume; ISimpleAudioVolume* _ptrRenderSimpleVolume; + DeviceStateListener *_deviceStateListener = nullptr; // DirectX Media Object (DMO) for the built-in AEC. rtc::scoped_refptr _dmo; rtc::scoped_refptr _mediaBuffer; diff --git a/modules/desktop_capture/mac/screen_capturer_mac.mm b/modules/desktop_capture/mac/screen_capturer_mac.mm index 8f0c68d48b..1f3889f250 100644 --- a/modules/desktop_capture/mac/screen_capturer_mac.mm +++ b/modules/desktop_capture/mac/screen_capturer_mac.mm @@ -297,16 +297,7 @@ DesktopRect GetExcludedWindowPixelBounds(CGWindowID window, float dip_to_pixel_s ScreenConfigurationChanged(); } - // When screen is zoomed in/out, OSX only updates the part of Rects currently - // displayed on screen, with relative location to current top-left on screen. - // This will cause problems when we copy the dirty regions to the captured - // image. So we invalidate the whole screen to copy all the screen contents. - // With CGI method, the zooming will be ignored and the whole screen contents - // will be captured as before. - // With IOSurface method, the zoomed screen contents will be captured. - if (UAZoomEnabled()) { - helper_.InvalidateScreen(screen_pixel_bounds_.size()); - } + helper_.InvalidateScreen(screen_pixel_bounds_.size()); DesktopRegion region; helper_.TakeInvalidRegion(®ion); diff --git a/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.cc b/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.cc index 34b4af0ec9..6203ff57ff 100644 --- a/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.cc +++ b/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.cc @@ -78,6 +78,8 @@ class TransformableVideoSenderFrame : public TransformableVideoFrameInterface { csrcs_ = metadata.GetCsrcs(); } + const RTPVideoHeader& header() const override { return header_; } + const RTPVideoHeader& GetHeader() const { return header_; } uint8_t GetPayloadType() const override { return payload_type_; } absl::optional GetCodecType() const { return codec_type_; } diff --git a/modules/rtp_rtcp/source/rtp_video_stream_receiver_frame_transformer_delegate.cc b/modules/rtp_rtcp/source/rtp_video_stream_receiver_frame_transformer_delegate.cc index 6e915bf758..394ff5f7a9 100644 --- a/modules/rtp_rtcp/source/rtp_video_stream_receiver_frame_transformer_delegate.cc +++ b/modules/rtp_rtcp/source/rtp_video_stream_receiver_frame_transformer_delegate.cc @@ -58,6 +58,10 @@ class TransformableVideoReceiverFrame << "TransformableVideoReceiverFrame::SetMetadata is not implemented"; } + const RTPVideoHeader& header () const override { + return frame_->GetRtpVideoHeader(); + } + std::unique_ptr ExtractFrame() && { return std::move(frame_); } diff --git a/modules/video_coding/include/video_codec_interface.h b/modules/video_coding/include/video_codec_interface.h index 46ae0d29e1..4faa309b13 100644 --- a/modules/video_coding/include/video_codec_interface.h +++ b/modules/video_coding/include/video_codec_interface.h @@ -51,7 +51,7 @@ struct CodecSpecificInfoVP8 { size_t updatedBuffers[kBuffersCount]; size_t updatedBuffersCount; }; -static_assert(std::is_pod::value, ""); +static_assert(std::is_standard_layout::value, ""); // Hack alert - the code assumes that thisstruct is memset when constructed. struct CodecSpecificInfoVP9 { @@ -82,7 +82,7 @@ struct CodecSpecificInfoVP9 { ABSL_DEPRECATED("") bool end_of_picture; }; -static_assert(std::is_pod::value, ""); +static_assert(std::is_standard_layout::value, ""); // Hack alert - the code assumes that thisstruct is memset when constructed. struct CodecSpecificInfoH264 { @@ -91,14 +91,14 @@ struct CodecSpecificInfoH264 { bool base_layer_sync; bool idr_frame; }; -static_assert(std::is_pod::value, ""); +static_assert(std::is_standard_layout::value, ""); union CodecSpecificInfoUnion { CodecSpecificInfoVP8 VP8; CodecSpecificInfoVP9 VP9; CodecSpecificInfoH264 H264; }; -static_assert(std::is_pod::value, ""); +static_assert(std::is_standard_layout::value, ""); // Note: if any pointers are added to this struct or its sub-structs, it // must be fitted with a copy-constructor. This is because it is copied diff --git a/pc/audio_rtp_receiver.cc b/pc/audio_rtp_receiver.cc index 804d31352d..1f78574b9d 100644 --- a/pc/audio_rtp_receiver.cc +++ b/pc/audio_rtp_receiver.cc @@ -298,7 +298,7 @@ void AudioRtpReceiver::Reconfigure(bool track_enabled) { media_channel_->SetFrameDecryptor(*signaled_ssrc_, frame_decryptor_); } - if (frame_transformer_) { + if (frame_transformer_ && track_enabled) { media_channel_->SetDepacketizerToDecoderFrameTransformer( signaled_ssrc_.value_or(0), frame_transformer_); } diff --git a/pc/data_channel_controller.cc b/pc/data_channel_controller.cc index 0a0d93e236..166e181cfe 100644 --- a/pc/data_channel_controller.cc +++ b/pc/data_channel_controller.cc @@ -28,7 +28,7 @@ DataChannelController::~DataChannelController() { << "Missing call to PrepareForShutdown?"; } -bool DataChannelController::HasDataChannelsForTest() const { +bool DataChannelController::HasDataChannels() const { auto has_channels = [&] { RTC_DCHECK_RUN_ON(network_thread()); return !sctp_data_channels_n_.empty(); diff --git a/pc/data_channel_controller.h b/pc/data_channel_controller.h index ac47c8aa28..1b5c8beadb 100644 --- a/pc/data_channel_controller.h +++ b/pc/data_channel_controller.h @@ -87,8 +87,9 @@ class DataChannelController : public SctpDataChannelControllerInterface, const InternalDataChannelInit& config); void AllocateSctpSids(rtc::SSLRole role); - // Used by tests to check if data channels are currently tracked. - bool HasDataChannelsForTest() const; + // Check if data channels are currently tracked. Used to decide whether a + // rejected m=application section should be reoffered. + bool HasDataChannels() const; // At some point in time, a data channel has existed. bool HasUsedDataChannels() const; diff --git a/pc/data_channel_controller_unittest.cc b/pc/data_channel_controller_unittest.cc index ea72e85218..3b8adb6819 100644 --- a/pc/data_channel_controller_unittest.cc +++ b/pc/data_channel_controller_unittest.cc @@ -105,17 +105,17 @@ TEST_F(DataChannelControllerTest, CreateDataChannelEarlyRelease) { TEST_F(DataChannelControllerTest, CreateDataChannelEarlyClose) { DataChannelControllerForTest dcc(pc_.get()); - EXPECT_FALSE(dcc.HasDataChannelsForTest()); + EXPECT_FALSE(dcc.HasDataChannels()); EXPECT_FALSE(dcc.HasUsedDataChannels()); auto ret = dcc.InternalCreateDataChannelWithProxy( "label", InternalDataChannelInit(DataChannelInit())); ASSERT_TRUE(ret.ok()); auto channel = ret.MoveValue(); - EXPECT_TRUE(dcc.HasDataChannelsForTest()); + EXPECT_TRUE(dcc.HasDataChannels()); EXPECT_TRUE(dcc.HasUsedDataChannels()); channel->Close(); run_loop_.Flush(); - EXPECT_FALSE(dcc.HasDataChannelsForTest()); + EXPECT_FALSE(dcc.HasDataChannels()); EXPECT_TRUE(dcc.HasUsedDataChannels()); } diff --git a/pc/media_stream_track_proxy.h b/pc/media_stream_track_proxy.h index 2af3aedb22..fab23d17ec 100644 --- a/pc/media_stream_track_proxy.h +++ b/pc/media_stream_track_proxy.h @@ -55,6 +55,8 @@ PROXY_SECONDARY_METHOD2(void, PROXY_SECONDARY_METHOD1(void, RemoveSink, rtc::VideoSinkInterface*) PROXY_SECONDARY_METHOD0(void, RequestRefreshFrame) BYPASS_PROXY_CONSTMETHOD0(VideoTrackSourceInterface*, GetSource) +PROXY_CONSTMETHOD0(bool, should_receive) +PROXY_METHOD1(void, set_should_receive, bool) PROXY_METHOD1(void, RegisterObserver, ObserverInterface*) PROXY_METHOD1(void, UnregisterObserver, ObserverInterface*) diff --git a/pc/peer_connection.cc b/pc/peer_connection.cc index 4839fcf0a1..562101be5b 100644 --- a/pc/peer_connection.cc +++ b/pc/peer_connection.cc @@ -345,6 +345,7 @@ bool PeerConnectionInterface::RTCConfiguration::operator==( std::vector vpn_list; PortAllocatorConfig port_allocator_config; absl::optional pacer_burst_interval; + bool enable_any_address_ports; }; static_assert(sizeof(stuff_being_tested_for_equality) == sizeof(*this), "Did you add something to RTCConfiguration and forget to " @@ -411,7 +412,8 @@ bool PeerConnectionInterface::RTCConfiguration::operator==( port_allocator_config.min_port == o.port_allocator_config.min_port && port_allocator_config.max_port == o.port_allocator_config.max_port && port_allocator_config.flags == o.port_allocator_config.flags && - pacer_burst_interval == o.pacer_burst_interval; + pacer_burst_interval == o.pacer_burst_interval && + enable_any_address_ports == o.enable_any_address_ports; } bool PeerConnectionInterface::RTCConfiguration::operator!=( @@ -735,9 +737,6 @@ JsepTransportController* PeerConnection::InitializeTransportController_n( transport_controller_->SubscribeIceConnectionState( [this](cricket::IceConnectionState s) { RTC_DCHECK_RUN_ON(network_thread()); - if (s == cricket::kIceConnectionConnected) { - ReportTransportStats(); - } signaling_thread()->PostTask( SafeTask(signaling_thread_safety_.flag(), [this, s]() { RTC_DCHECK_RUN_ON(signaling_thread()); @@ -1422,9 +1421,10 @@ PeerConnection::CreateDataChannelOrError(const std::string& label, rtc::scoped_refptr channel = ret.MoveValue(); - // Trigger the onRenegotiationNeeded event for - // the first SCTP DataChannel. - if (first_datachannel) { + // Check the onRenegotiationNeeded event (with plan-b backward compat) + if (configuration_.sdp_semantics == SdpSemantics::kUnifiedPlan || + (configuration_.sdp_semantics == SdpSemantics::kPlanB_DEPRECATED && + first_datachannel)) { sdp_handler_->UpdateNegotiationNeeded(); } NoteUsageEvent(UsageEvent::DATA_ADDED); @@ -2176,6 +2176,11 @@ PeerConnection::InitializePortAllocator_n( RTC_LOG(LS_INFO) << "Disable candidates on link-local network interfaces."; } + if (configuration.enable_any_address_ports) { + port_allocator_flags |= cricket::PORTALLOCATOR_ENABLE_ANY_ADDRESS_PORTS; + RTC_LOG(LS_INFO) << "Enable gathering on any address ports."; + } + port_allocator_->set_flags(port_allocator_flags); // No step delay is used while allocating ports. port_allocator_->set_step_delay(cricket::kMinimumStepDelay); @@ -2400,6 +2405,20 @@ void PeerConnection::OnTransportControllerConnectionState( case cricket::kIceConnectionConnected: RTC_LOG(LS_INFO) << "Changing to ICE connected state because " "all transports are writable."; + { + std::vector transceivers; + if (ConfiguredForMedia()) { + transceivers = rtp_manager()->transceivers()->List(); + } + + network_thread()->PostTask( + SafeTask(network_thread_safety_, + [this, transceivers = std::move(transceivers)] { + RTC_DCHECK_RUN_ON(network_thread()); + ReportTransportStats(std::move(transceivers)); + })); + } + SetIceConnectionState(PeerConnectionInterface::kIceConnectionConnected); NoteUsageEvent(UsageEvent::ICE_STATE_CONNECTED); break; @@ -2742,20 +2761,18 @@ void PeerConnection::OnTransportControllerGatheringState( } // Runs on network_thread(). -void PeerConnection::ReportTransportStats() { +void PeerConnection::ReportTransportStats( + std::vector transceivers) { TRACE_EVENT0("webrtc", "PeerConnection::ReportTransportStats"); rtc::Thread::ScopedDisallowBlockingCalls no_blocking_calls; std::map> media_types_by_transport_name; - if (ConfiguredForMedia()) { - for (const auto& transceiver : - rtp_manager()->transceivers()->UnsafeList()) { - if (transceiver->internal()->channel()) { - std::string transport_name( - transceiver->internal()->channel()->transport_name()); - media_types_by_transport_name[transport_name].insert( - transceiver->media_type()); - } + for (const auto& transceiver : transceivers) { + if (transceiver->internal()->channel()) { + std::string transport_name( + transceiver->internal()->channel()->transport_name()); + media_types_by_transport_name[transport_name].insert( + transceiver->media_type()); } } diff --git a/pc/peer_connection.h b/pc/peer_connection.h index 32d304e6e7..21b9a5a430 100644 --- a/pc/peer_connection.h +++ b/pc/peer_connection.h @@ -569,7 +569,8 @@ class PeerConnection : public PeerConnectionInternal, // Invoked when TransportController connection completion is signaled. // Reports stats for all transports in use. - void ReportTransportStats() RTC_RUN_ON(network_thread()); + void ReportTransportStats(std::vector transceivers) + RTC_RUN_ON(network_thread()); // Gather the usage of IPv4/IPv6 as best connection. static void ReportBestConnectionState(const cricket::TransportStats& stats); diff --git a/pc/peer_connection_integrationtest.cc b/pc/peer_connection_integrationtest.cc index 812833000b..07c5e0c0d3 100644 --- a/pc/peer_connection_integrationtest.cc +++ b/pc/peer_connection_integrationtest.cc @@ -1831,6 +1831,10 @@ TEST_P(PeerConnectionIntegrationTest, EXPECT_EQ_WAIT(webrtc::PeerConnectionInterface::kIceConnectionConnected, callee()->ice_connection_state(), kDefaultTimeout); + // Part of reporting the stats will occur on the network thread, so flush it + // before checking NumEvents. + SendTask(network_thread(), [] {}); + EXPECT_METRIC_EQ(1, webrtc::metrics::NumEvents( "WebRTC.PeerConnection.CandidatePairType_UDP", webrtc::kIceCandidatePairHostNameHostName)); @@ -1959,6 +1963,10 @@ TEST_P(PeerConnectionIntegrationIceStatesTest, MAYBE_VerifyBestConnection) { EXPECT_EQ_WAIT(webrtc::PeerConnectionInterface::kIceConnectionConnected, callee()->ice_connection_state(), kDefaultTimeout); + // Part of reporting the stats will occur on the network thread, so flush it + // before checking NumEvents. + SendTask(network_thread(), [] {}); + // TODO(bugs.webrtc.org/9456): Fix it. const int num_best_ipv4 = webrtc::metrics::NumEvents( "WebRTC.PeerConnection.IPMetrics", webrtc::kBestConnections_IPv4); diff --git a/pc/sctp_data_channel.cc b/pc/sctp_data_channel.cc index 3fed03ca11..dda9110bfc 100644 --- a/pc/sctp_data_channel.cc +++ b/pc/sctp_data_channel.cc @@ -843,7 +843,7 @@ void SctpDataChannel::SetState(DataState state) { // RTC_RUN_ON(network_thread_). void SctpDataChannel::DeliverQueuedReceivedData() { - if (!observer_) { + if (!observer_ || state_ != kOpen) { return; } diff --git a/pc/sdp_offer_answer.cc b/pc/sdp_offer_answer.cc index 59f0b01dfd..0cb10b7d07 100644 --- a/pc/sdp_offer_answer.cc +++ b/pc/sdp_offer_answer.cc @@ -3342,8 +3342,20 @@ bool SdpOfferAnswerHandler::CheckIfNegotiationIsNeeded() { // 4. If connection has created any RTCDataChannels, and no m= section in // description has been negotiated yet for data, return true. if (data_channel_controller()->HasUsedDataChannels()) { - if (!cricket::GetFirstDataContent(description->description()->contents())) + const cricket::ContentInfo* data_content = + cricket::GetFirstDataContent(description->description()->contents()); + if (!data_content) { return true; + } + // The remote end might have rejected the data content. + const cricket::ContentInfo* remote_data_content = + current_remote_description() + ? current_remote_description()->description()->GetContentByName( + data_content->name) + : nullptr; + if (remote_data_content && remote_data_content->rejected) { + return true; + } } if (!ConfiguredForMedia()) { return false; @@ -4263,10 +4275,28 @@ void SdpOfferAnswerHandler::GetOptionsForUnifiedPlanOffer( } // Lastly, add a m-section if we have requested local data channels and an // m section does not already exist. - if (!pc_->GetDataMid() && data_channel_controller()->HasUsedDataChannels()) { - session_options->media_description_options.push_back( - GetMediaDescriptionOptionsForActiveData( - mid_generator_.GenerateString())); + if (!pc_->GetDataMid() && data_channel_controller()->HasUsedDataChannels() && + data_channel_controller()->HasDataChannels()) { + // Attempt to recycle a stopped m-line. + // TODO(crbug.com/1442604): GetDataMid() should return the mid if one was + // ever created but rejected. + bool recycled = false; + for (size_t i = 0; i < session_options->media_description_options.size(); + i++) { + auto media_description = session_options->media_description_options[i]; + if (media_description.type == cricket::MEDIA_TYPE_DATA && + media_description.stopped) { + session_options->media_description_options[i] = + GetMediaDescriptionOptionsForActiveData(media_description.mid); + recycled = true; + break; + } + } + if (!recycled) { + session_options->media_description_options.push_back( + GetMediaDescriptionOptionsForActiveData( + mid_generator_.GenerateString())); + } } } diff --git a/pc/sdp_offer_answer_unittest.cc b/pc/sdp_offer_answer_unittest.cc index 33b1e746d9..4c4554f12c 100644 --- a/pc/sdp_offer_answer_unittest.cc +++ b/pc/sdp_offer_answer_unittest.cc @@ -34,6 +34,7 @@ #include "modules/audio_processing/include/audio_processing.h" #include "p2p/base/port_allocator.h" #include "pc/peer_connection_wrapper.h" +#include "pc/session_description.h" #include "pc/test/fake_audio_capture_module.h" #include "pc/test/mock_peer_connection_observers.h" #include "rtc_base/rtc_certificate_generator.h" @@ -467,4 +468,83 @@ TEST_F(SdpOfferAnswerTest, RollbackPreservesAddTrackMid) { EXPECT_EQ(saved_mid, first_transceiver->mid()); } +#ifdef WEBRTC_HAVE_SCTP + +TEST_F(SdpOfferAnswerTest, RejectedDataChannelsDoNotGetReoffered) { + auto pc = CreatePeerConnection(); + EXPECT_TRUE(pc->pc()->CreateDataChannelOrError("dc", nullptr).ok()); + EXPECT_TRUE(pc->CreateOfferAndSetAsLocal()); + auto mid = pc->pc()->local_description()->description()->contents()[0].mid(); + + // An answer that rejects the datachannel content. + std::string sdp = + "v=0\r\n" + "o=- 4131505339648218884 3 IN IP4 **-----**\r\n" + "s=-\r\n" + "t=0 0\r\n" + "a=ice-ufrag:zGWFZ+fVXDeN6UoI/136\r\n" + "a=ice-pwd:9AUNgUqRNI5LSIrC1qFD2iTR\r\n" + "a=fingerprint:sha-256 " + "AD:52:52:E0:B1:37:34:21:0E:15:8E:B7:56:56:7B:B4:39:0E:6D:1C:F5:84:A7:EE:" + "B5:27:3E:30:B1:7D:69:42\r\n" + "a=setup:passive\r\n" + "m=application 0 UDP/DTLS/SCTP webrtc-datachannel\r\n" + "c=IN IP4 0.0.0.0\r\n" + "a=sctp-port:5000\r\n" + "a=max-message-size:262144\r\n" + "a=mid:" + + mid + "\r\n"; + auto answer = CreateSessionDescription(SdpType::kAnswer, sdp); + ASSERT_TRUE(pc->SetRemoteDescription(std::move(answer))); + // The subsequent offer should not recycle the m-line since the existing data + // channel is closed. + auto offer = pc->CreateOffer(); + const auto& offer_contents = offer->description()->contents(); + ASSERT_EQ(offer_contents.size(), 1u); + EXPECT_EQ(offer_contents[0].mid(), mid); + EXPECT_EQ(offer_contents[0].rejected, true); +} + +TEST_F(SdpOfferAnswerTest, RejectedDataChannelsDoGetReofferedWhenActive) { + auto pc = CreatePeerConnection(); + EXPECT_TRUE(pc->pc()->CreateDataChannelOrError("dc", nullptr).ok()); + EXPECT_TRUE(pc->CreateOfferAndSetAsLocal()); + auto mid = pc->pc()->local_description()->description()->contents()[0].mid(); + + // An answer that rejects the datachannel content. + std::string sdp = + "v=0\r\n" + "o=- 4131505339648218884 3 IN IP4 **-----**\r\n" + "s=-\r\n" + "t=0 0\r\n" + "a=ice-ufrag:zGWFZ+fVXDeN6UoI/136\r\n" + "a=ice-pwd:9AUNgUqRNI5LSIrC1qFD2iTR\r\n" + "a=fingerprint:sha-256 " + "AD:52:52:E0:B1:37:34:21:0E:15:8E:B7:56:56:7B:B4:39:0E:6D:1C:F5:84:A7:EE:" + "B5:27:3E:30:B1:7D:69:42\r\n" + "a=setup:passive\r\n" + "m=application 0 UDP/DTLS/SCTP webrtc-datachannel\r\n" + "c=IN IP4 0.0.0.0\r\n" + "a=sctp-port:5000\r\n" + "a=max-message-size:262144\r\n" + "a=mid:" + + mid + "\r\n"; + auto answer = CreateSessionDescription(SdpType::kAnswer, sdp); + ASSERT_TRUE(pc->SetRemoteDescription(std::move(answer))); + + // The subsequent offer should recycle the m-line when there is a new data + // channel. + EXPECT_TRUE(pc->pc()->CreateDataChannelOrError("dc2", nullptr).ok()); + EXPECT_TRUE(pc->pc()->ShouldFireNegotiationNeededEvent( + pc->observer()->latest_negotiation_needed_event())); + + auto offer = pc->CreateOffer(); + const auto& offer_contents = offer->description()->contents(); + ASSERT_EQ(offer_contents.size(), 1u); + EXPECT_EQ(offer_contents[0].mid(), mid); + EXPECT_EQ(offer_contents[0].rejected, false); +} + +#endif // WEBRTC_HAVE_SCTP + } // namespace webrtc diff --git a/pc/video_rtp_receiver.cc b/pc/video_rtp_receiver.cc index 8a2e65c162..63a2d72879 100644 --- a/pc/video_rtp_receiver.cc +++ b/pc/video_rtp_receiver.cc @@ -41,15 +41,20 @@ VideoRtpReceiver::VideoRtpReceiver( rtc::Thread::Current(), worker_thread, VideoTrack::Create(receiver_id, source_, worker_thread))), - attachment_id_(GenerateUniqueId()) { + cached_track_should_receive_(track_->should_receive()), + attachment_id_(GenerateUniqueId()), + worker_thread_safety_(PendingTaskSafetyFlag::CreateDetachedInactive()) { RTC_DCHECK(worker_thread_); SetStreams(streams); + track_->RegisterObserver(this); RTC_DCHECK_EQ(source_->state(), MediaSourceInterface::kInitializing); } VideoRtpReceiver::~VideoRtpReceiver() { RTC_DCHECK_RUN_ON(&signaling_thread_checker_); RTC_DCHECK(!media_channel_); + + track_->UnregisterObserver(this); } std::vector VideoRtpReceiver::stream_ids() const { @@ -114,6 +119,39 @@ void VideoRtpReceiver::Stop() { track_->internal()->set_ended(); } +void VideoRtpReceiver::OnChanged() { + RTC_DCHECK_RUN_ON(&signaling_thread_checker_); + if (cached_track_should_receive_ != track_->should_receive()) { + cached_track_should_receive_ = track_->should_receive(); + worker_thread_->PostTask( + [this, receive = cached_track_should_receive_]() { + RTC_DCHECK_RUN_ON(worker_thread_); + if(receive) { + StartMediaChannel(); + } else { + StopMediaChannel(); + } + }); + } +} + +void VideoRtpReceiver::StartMediaChannel() { + RTC_DCHECK_RUN_ON(worker_thread_); + if (!media_channel_) { + return; + } + media_channel_->StartReceive(signaled_ssrc_.value_or(0)); + OnGenerateKeyFrame(); +} + +void VideoRtpReceiver::StopMediaChannel() { + RTC_DCHECK_RUN_ON(worker_thread_); + if (!media_channel_) { + return; + } + media_channel_->StopReceive(signaled_ssrc_.value_or(0)); +} + void VideoRtpReceiver::RestartMediaChannel(absl::optional ssrc) { RTC_DCHECK_RUN_ON(&signaling_thread_checker_); MediaSourceInterface::SourceState state = source_->state(); @@ -209,6 +247,7 @@ void VideoRtpReceiver::set_transport( void VideoRtpReceiver::SetStreams( const std::vector>& streams) { RTC_DCHECK_RUN_ON(&signaling_thread_checker_); + // Remove remote track from any streams that are going away. for (const auto& existing_stream : streams_) { bool removed = true; diff --git a/pc/video_rtp_receiver.h b/pc/video_rtp_receiver.h index ef88016052..491efe2931 100644 --- a/pc/video_rtp_receiver.h +++ b/pc/video_rtp_receiver.h @@ -42,7 +42,8 @@ namespace webrtc { -class VideoRtpReceiver : public RtpReceiverInternal { +class VideoRtpReceiver : public RtpReceiverInternal, + public ObserverInterface { public: // An SSRC of 0 will create a receiver that will match the first SSRC it // sees. Must be called on signaling thread. @@ -60,6 +61,9 @@ class VideoRtpReceiver : public RtpReceiverInternal { rtc::scoped_refptr video_track() const { return track_; } + // ObserverInterface implementation + void OnChanged() override; + // RtpReceiverInterface implementation rtc::scoped_refptr track() const override { return track_; @@ -115,6 +119,8 @@ class VideoRtpReceiver : public RtpReceiverInternal { cricket::MediaReceiveChannelInterface* media_channel); private: + void StartMediaChannel(); + void StopMediaChannel(); void RestartMediaChannel(absl::optional ssrc) RTC_RUN_ON(&signaling_thread_checker_); void RestartMediaChannel_w(absl::optional ssrc, @@ -162,6 +168,8 @@ class VideoRtpReceiver : public RtpReceiverInternal { RTC_GUARDED_BY(&signaling_thread_checker_) = nullptr; bool received_first_packet_ RTC_GUARDED_BY(&signaling_thread_checker_) = false; + + bool cached_track_should_receive_ RTC_GUARDED_BY(&signaling_thread_checker_); const int attachment_id_; rtc::scoped_refptr frame_decryptor_ RTC_GUARDED_BY(worker_thread_); @@ -177,6 +185,7 @@ class VideoRtpReceiver : public RtpReceiverInternal { // or switched. bool saved_generate_keyframe_ RTC_GUARDED_BY(worker_thread_) = false; bool saved_encoded_sink_enabled_ RTC_GUARDED_BY(worker_thread_) = false; + const rtc::scoped_refptr worker_thread_safety_; }; } // namespace webrtc diff --git a/pc/video_track.cc b/pc/video_track.cc index 0bf8687af3..8922cdaf1f 100644 --- a/pc/video_track.cc +++ b/pc/video_track.cc @@ -76,6 +76,19 @@ VideoTrackSourceInterface* VideoTrack::GetSourceInternal() const { return video_source_->internal(); } +void VideoTrack::set_should_receive(bool receive) { + RTC_DCHECK_RUN_ON(&signaling_thread_); + if (should_receive_ == receive) + return; + should_receive_ = receive; + Notifier::FireOnChanged(); +} + +bool VideoTrack::should_receive() const { + RTC_DCHECK_RUN_ON(&signaling_thread_); + return should_receive_; +} + VideoTrackInterface::ContentHint VideoTrack::content_hint() const { RTC_DCHECK_RUN_ON(&signaling_thread_); return content_hint_; diff --git a/pc/video_track.h b/pc/video_track.h index 13a51c454b..b56c64ef20 100644 --- a/pc/video_track.h +++ b/pc/video_track.h @@ -48,6 +48,9 @@ class VideoTrack : public MediaStreamTrack, void RequestRefreshFrame() override; VideoTrackSourceInterface* GetSource() const override; + void set_should_receive(bool should_receive) override; + bool should_receive() const override; + ContentHint content_hint() const override; void set_content_hint(ContentHint hint) override; bool set_enabled(bool enable) override; @@ -81,6 +84,7 @@ class VideoTrack : public MediaStreamTrack, // be queried without blocking on the worker thread by callers that don't // use an api proxy to call the `enabled()` method. bool enabled_w_ RTC_GUARDED_BY(worker_thread_) = true; + bool should_receive_ RTC_GUARDED_BY(signaling_thread_) = true; }; } // namespace webrtc diff --git a/pc/webrtc_sdp.cc b/pc/webrtc_sdp.cc index 6d041ff0a1..4beb6b0faa 100644 --- a/pc/webrtc_sdp.cc +++ b/pc/webrtc_sdp.cc @@ -3542,6 +3542,11 @@ bool ParseSsrcGroupAttribute(absl::string_view line, if (!GetValueFromString(line, fields[i], &ssrc, error)) { return false; } + // Reject duplicates. While not forbidden by RFC 5576, + // they don't make sense. + if (absl::c_linear_search(ssrcs, ssrc)) { + return ParseFailed(line, "Duplicate SSRC in ssrc-group", error); + } ssrcs.push_back(ssrc); } ssrc_groups->push_back(SsrcGroup(semantics, ssrcs)); diff --git a/pc/webrtc_sdp_unittest.cc b/pc/webrtc_sdp_unittest.cc index 0e1e62e53f..b2c751717e 100644 --- a/pc/webrtc_sdp_unittest.cc +++ b/pc/webrtc_sdp_unittest.cc @@ -5056,3 +5056,29 @@ TEST_F(WebRtcSdpTest, RejectSessionLevelMediaLevelExtmapMixedUsage) { JsepSessionDescription jdesc(kDummyType); EXPECT_FALSE(SdpDeserialize(sdp, &jdesc)); } + +TEST_F(WebRtcSdpTest, RejectDuplicateSsrcInSsrcGroup) { + std::string sdp = + "v=0\r\n" + "o=- 0 3 IN IP4 127.0.0.1\r\n" + "s=-\r\n" + "t=0 0\r\n" + "a=group:BUNDLE 0\r\n" + "a=fingerprint:sha-1 " + "4A:AD:B9:B1:3F:82:18:3B:54:02:12:DF:3E:5D:49:6B:19:E5:7C:AB\r\n" + "a=setup:actpass\r\n" + "a=ice-ufrag:ETEn\r\n" + "a=ice-pwd:OtSK0WpNtpUjkY4+86js7Z/l\r\n" + "m=video 9 UDP/TLS/RTP/SAVPF 96 97\r\n" + "c=IN IP4 0.0.0.0\r\n" + "a=rtcp-mux\r\n" + "a=sendonly\r\n" + "a=mid:0\r\n" + "a=rtpmap:96 VP8/90000\r\n" + "a=rtpmap:97 rtx/90000\r\n" + "a=fmtp:97 apt=96\r\n" + "a=ssrc-group:FID 1234 1234\r\n" + "a=ssrc:1234 cname:test\r\n"; + JsepSessionDescription jdesc(kDummyType); + EXPECT_FALSE(SdpDeserialize(sdp, &jdesc)); +} diff --git a/sdk/BUILD.gn b/sdk/BUILD.gn index ff89b21721..877481a642 100644 --- a/sdk/BUILD.gn +++ b/sdk/BUILD.gn @@ -140,15 +140,18 @@ if (is_ios || is_mac) { "objc/helpers/AVCaptureSession+DevicePosition.mm", "objc/helpers/NSString+StdString.h", "objc/helpers/NSString+StdString.mm", - "objc/helpers/RTCDispatcher+Private.h", "objc/helpers/RTCDispatcher.h", "objc/helpers/RTCDispatcher.m", + "objc/helpers/RTCDispatcher+Private.h", + "objc/helpers/RTCYUVHelper.h", + "objc/helpers/RTCYUVHelper.mm", "objc/helpers/scoped_cftyperef.h", ] deps = [ ":base_objc", "../rtc_base:checks", + "//third_party/libyuv", ] absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] @@ -382,7 +385,12 @@ if (is_ios || is_mac) { "objc/components/network/RTCNetworkMonitor.mm", ] - configs += [ ":used_from_extension" ] + configs += [ + "..:common_objc", + ":used_from_extension", + ] + + public_configs = [ ":common_config_objc" ] frameworks = [ "Network.framework" ] @@ -635,17 +643,13 @@ if (is_ios || is_mac) { "Metal.framework", "MetalKit.framework", ] - if (is_ios) { + if (is_ios || is_mac) { sources += [ "objc/components/renderer/metal/RTCMTLVideoView.h", "objc/components/renderer/metal/RTCMTLVideoView.m", ] } if (is_mac) { - sources += [ - "objc/components/renderer/metal/RTCMTLNSVideoView.h", - "objc/components/renderer/metal/RTCMTLNSVideoView.m", - ] frameworks += [ "AppKit.framework" ] } deps = [ @@ -701,14 +705,51 @@ if (is_ios || is_mac) { "../rtc_base/system:gcd_helpers", ] } + + rtc_library("desktopcapture_objc") { + visibility = [ "*" ] + sources = [ + "objc/components/capturer/RTCDesktopCapturer.h", + "objc/components/capturer/RTCDesktopCapturer.mm", + "objc/components/capturer/RTCDesktopCapturer+Private.h", + "objc/components/capturer/RTCDesktopMediaList.h", + "objc/components/capturer/RTCDesktopMediaList.mm", + "objc/components/capturer/RTCDesktopMediaList+Private.h", + "objc/components/capturer/RTCDesktopSource.h", + "objc/components/capturer/RTCDesktopSource.mm", + "objc/components/capturer/RTCDesktopSource+Private.h", + "objc/native/src/objc_desktop_capture.h", + "objc/native/src/objc_desktop_capture.mm", + "objc/native/src/objc_desktop_media_list.h", + "objc/native/src/objc_desktop_media_list.mm", + ] + frameworks = [ + "AppKit.framework", + ] + + configs += [ "..:common_objc" ] + + public_configs = [ ":common_config_objc" ] + + deps = [ + ":base_objc", + ":helpers_objc", + ":videoframebuffer_objc", + "../rtc_base/system:gcd_helpers", + "../modules/desktop_capture", + ] + if(is_mac) { + deps += [ "//third_party:jpeg", ] + } + } rtc_library("videocodec_objc") { visibility = [ "*" ] configs += [ "..:no_global_constructors" ] sources = [ - "objc/components/video_codec/RTCCodecSpecificInfoH264+Private.h", "objc/components/video_codec/RTCCodecSpecificInfoH264.h", "objc/components/video_codec/RTCCodecSpecificInfoH264.mm", + "objc/components/video_codec/RTCCodecSpecificInfoH264+Private.h", "objc/components/video_codec/RTCH264ProfileLevelId.h", "objc/components/video_codec/RTCH264ProfileLevelId.mm", ] @@ -741,6 +782,7 @@ if (is_ios || is_mac) { ] deps = [ + ":simulcast", ":base_objc", ":native_video", ":videocodec_objc", @@ -842,12 +884,28 @@ if (is_ios || is_mac) { ] } + rtc_library("simulcast") { + sources = [ + "objc/api/video_codec/RTCVideoEncoderSimulcast.h", + "objc/api/video_codec/RTCVideoEncoderSimulcast.mm", + "objc/components/video_codec/RTCVideoEncoderFactorySimulcast.h", + "objc/components/video_codec/RTCVideoEncoderFactorySimulcast.mm", + ] + + deps = [ + ":base_objc", + ":wrapped_native_codec_objc", + "../media:rtc_media_base", + "../media:rtc_simulcast_encoder_adapter", + ] + } + rtc_library("mediaconstraints_objc") { configs += [ "..:no_global_constructors" ] sources = [ - "objc/api/peerconnection/RTCMediaConstraints+Private.h", "objc/api/peerconnection/RTCMediaConstraints.h", "objc/api/peerconnection/RTCMediaConstraints.mm", + "objc/api/peerconnection/RTCMediaConstraints+Private.h", ] public_configs = [ ":common_config_objc" ] @@ -873,9 +931,9 @@ if (is_ios || is_mac) { visibility = [ "*" ] allow_poison = [ "audio_codecs" ] # TODO(bugs.webrtc.org/8396): Remove. sources = [ - "objc/api/RTCVideoRendererAdapter+Private.h", "objc/api/RTCVideoRendererAdapter.h", "objc/api/RTCVideoRendererAdapter.mm", + "objc/api/RTCVideoRendererAdapter+Private.h", ] configs += [ "..:common_objc" ] @@ -892,9 +950,9 @@ if (is_ios || is_mac) { rtc_library("mediasource_objc") { sources = [ - "objc/api/peerconnection/RTCMediaSource+Private.h", "objc/api/peerconnection/RTCMediaSource.h", "objc/api/peerconnection/RTCMediaSource.mm", + "objc/api/peerconnection/RTCMediaSource+Private.h", ] configs += [ @@ -943,109 +1001,142 @@ if (is_ios || is_mac) { ] configs += [ "..:no_global_constructors" ] sources = [ - "objc/api/peerconnection/RTCAudioSource+Private.h", + "objc/api/peerconnection/RTCAudioDeviceModule.h", + "objc/api/peerconnection/RTCAudioDeviceModule.mm", + "objc/api/peerconnection/RTCAudioDeviceModule+Private.h", + "objc/api/peerconnection/RTCAudioSource.h", "objc/api/peerconnection/RTCAudioSource.h", "objc/api/peerconnection/RTCAudioSource.mm", - "objc/api/peerconnection/RTCAudioTrack+Private.h", + "objc/api/peerconnection/RTCAudioSource+Private.h", + "objc/api/peerconnection/RTCAudioSource+Private.h", "objc/api/peerconnection/RTCAudioTrack.h", "objc/api/peerconnection/RTCAudioTrack.mm", + "objc/api/peerconnection/RTCAudioTrack+Private.h", "objc/api/peerconnection/RTCCertificate.h", "objc/api/peerconnection/RTCCertificate.mm", - "objc/api/peerconnection/RTCConfiguration+Native.h", - "objc/api/peerconnection/RTCConfiguration+Private.h", "objc/api/peerconnection/RTCConfiguration.h", "objc/api/peerconnection/RTCConfiguration.mm", + "objc/api/peerconnection/RTCConfiguration+Native.h", + "objc/api/peerconnection/RTCConfiguration+Private.h", "objc/api/peerconnection/RTCCryptoOptions.h", "objc/api/peerconnection/RTCCryptoOptions.mm", - "objc/api/peerconnection/RTCDataChannel+Private.h", "objc/api/peerconnection/RTCDataChannel.h", "objc/api/peerconnection/RTCDataChannel.mm", - "objc/api/peerconnection/RTCDataChannelConfiguration+Private.h", + "objc/api/peerconnection/RTCDataChannel+Private.h", "objc/api/peerconnection/RTCDataChannelConfiguration.h", "objc/api/peerconnection/RTCDataChannelConfiguration.mm", - "objc/api/peerconnection/RTCDtmfSender+Private.h", + "objc/api/peerconnection/RTCDataChannelConfiguration+Private.h", "objc/api/peerconnection/RTCDtmfSender.h", "objc/api/peerconnection/RTCDtmfSender.mm", + "objc/api/peerconnection/RTCDtmfSender+Private.h", "objc/api/peerconnection/RTCFieldTrials.h", "objc/api/peerconnection/RTCFieldTrials.mm", - "objc/api/peerconnection/RTCIceCandidate+Private.h", + "objc/api/peerconnection/RTCFrameCryptor.h", + "objc/api/peerconnection/RTCFrameCryptor.mm", + "objc/api/peerconnection/RTCFrameCryptor+Private.h", + "objc/api/peerconnection/RTCFrameCryptorKeyProvider.h", + "objc/api/peerconnection/RTCFrameCryptorKeyProvider.mm", + "objc/api/peerconnection/RTCFrameCryptorKeyProvider+Private.h", "objc/api/peerconnection/RTCIceCandidate.h", "objc/api/peerconnection/RTCIceCandidate.mm", - "objc/api/peerconnection/RTCIceCandidateErrorEvent+Private.h", + "objc/api/peerconnection/RTCIceCandidate+Private.h", "objc/api/peerconnection/RTCIceCandidateErrorEvent.h", "objc/api/peerconnection/RTCIceCandidateErrorEvent.mm", - "objc/api/peerconnection/RTCIceServer+Private.h", + "objc/api/peerconnection/RTCIceCandidateErrorEvent+Private.h", "objc/api/peerconnection/RTCIceServer.h", "objc/api/peerconnection/RTCIceServer.mm", - "objc/api/peerconnection/RTCLegacyStatsReport+Private.h", + "objc/api/peerconnection/RTCIceServer+Private.h", + "objc/api/peerconnection/RTCIODevice.h", + "objc/api/peerconnection/RTCIODevice.mm", "objc/api/peerconnection/RTCLegacyStatsReport.h", "objc/api/peerconnection/RTCLegacyStatsReport.mm", - "objc/api/peerconnection/RTCMediaStream+Private.h", + "objc/api/peerconnection/RTCLegacyStatsReport+Private.h", "objc/api/peerconnection/RTCMediaStream.h", "objc/api/peerconnection/RTCMediaStream.mm", - "objc/api/peerconnection/RTCMediaStreamTrack+Private.h", + "objc/api/peerconnection/RTCMediaStream+Private.h", "objc/api/peerconnection/RTCMediaStreamTrack.h", "objc/api/peerconnection/RTCMediaStreamTrack.mm", + "objc/api/peerconnection/RTCMediaStreamTrack+Private.h", "objc/api/peerconnection/RTCMetrics.h", "objc/api/peerconnection/RTCMetrics.mm", - "objc/api/peerconnection/RTCMetricsSampleInfo+Private.h", "objc/api/peerconnection/RTCMetricsSampleInfo.h", "objc/api/peerconnection/RTCMetricsSampleInfo.mm", + "objc/api/peerconnection/RTCMetricsSampleInfo+Private.h", + "objc/api/peerconnection/RTCPeerConnection.h", + "objc/api/peerconnection/RTCPeerConnection.mm", "objc/api/peerconnection/RTCPeerConnection+DataChannel.mm", "objc/api/peerconnection/RTCPeerConnection+Private.h", "objc/api/peerconnection/RTCPeerConnection+Stats.mm", - "objc/api/peerconnection/RTCPeerConnection.h", - "objc/api/peerconnection/RTCPeerConnection.mm", - "objc/api/peerconnection/RTCPeerConnectionFactory+Native.h", - "objc/api/peerconnection/RTCPeerConnectionFactory+Private.h", "objc/api/peerconnection/RTCPeerConnectionFactory.h", "objc/api/peerconnection/RTCPeerConnectionFactory.mm", - "objc/api/peerconnection/RTCPeerConnectionFactoryBuilder+DefaultComponents.h", - "objc/api/peerconnection/RTCPeerConnectionFactoryBuilder+DefaultComponents.mm", + "objc/api/peerconnection/RTCPeerConnectionFactory+Native.h", + "objc/api/peerconnection/RTCPeerConnectionFactory+Private.h", "objc/api/peerconnection/RTCPeerConnectionFactoryBuilder.h", "objc/api/peerconnection/RTCPeerConnectionFactoryBuilder.mm", - "objc/api/peerconnection/RTCPeerConnectionFactoryOptions+Private.h", + "objc/api/peerconnection/RTCPeerConnectionFactoryBuilder+DefaultComponents.h", + "objc/api/peerconnection/RTCPeerConnectionFactoryBuilder+DefaultComponents.mm", "objc/api/peerconnection/RTCPeerConnectionFactoryOptions.h", "objc/api/peerconnection/RTCPeerConnectionFactoryOptions.mm", - "objc/api/peerconnection/RTCRtcpParameters+Private.h", + "objc/api/peerconnection/RTCPeerConnectionFactoryOptions+Private.h", "objc/api/peerconnection/RTCRtcpParameters.h", "objc/api/peerconnection/RTCRtcpParameters.mm", - "objc/api/peerconnection/RTCRtpCodecParameters+Private.h", + "objc/api/peerconnection/RTCRtcpParameters+Private.h", + "objc/api/peerconnection/RTCRtpCapabilities.h", + "objc/api/peerconnection/RTCRtpCapabilities.mm", + "objc/api/peerconnection/RTCRtpCapabilities+Private.h", + "objc/api/peerconnection/RTCRtpCodecCapability.h", + "objc/api/peerconnection/RTCRtpCodecCapability.mm", + "objc/api/peerconnection/RTCRtpCodecCapability+Private.h", "objc/api/peerconnection/RTCRtpCodecParameters.h", "objc/api/peerconnection/RTCRtpCodecParameters.mm", - "objc/api/peerconnection/RTCRtpEncodingParameters+Private.h", + "objc/api/peerconnection/RTCRtpCodecParameters+Private.h", "objc/api/peerconnection/RTCRtpEncodingParameters.h", "objc/api/peerconnection/RTCRtpEncodingParameters.mm", - "objc/api/peerconnection/RTCRtpHeaderExtension+Private.h", + "objc/api/peerconnection/RTCRtpEncodingParameters+Private.h", "objc/api/peerconnection/RTCRtpHeaderExtension.h", "objc/api/peerconnection/RTCRtpHeaderExtension.mm", - "objc/api/peerconnection/RTCRtpParameters+Private.h", + "objc/api/peerconnection/RTCRtpHeaderExtension+Private.h", "objc/api/peerconnection/RTCRtpParameters.h", "objc/api/peerconnection/RTCRtpParameters.mm", - "objc/api/peerconnection/RTCRtpReceiver+Native.h", - "objc/api/peerconnection/RTCRtpReceiver+Private.h", + "objc/api/peerconnection/RTCRtpParameters+Private.h", "objc/api/peerconnection/RTCRtpReceiver.h", "objc/api/peerconnection/RTCRtpReceiver.mm", - "objc/api/peerconnection/RTCRtpSender+Native.h", - "objc/api/peerconnection/RTCRtpSender+Private.h", + "objc/api/peerconnection/RTCRtpReceiver+Native.h", + "objc/api/peerconnection/RTCRtpReceiver+Private.h", "objc/api/peerconnection/RTCRtpSender.h", "objc/api/peerconnection/RTCRtpSender.mm", - "objc/api/peerconnection/RTCRtpTransceiver+Private.h", + "objc/api/peerconnection/RTCRtpSender+Native.h", + "objc/api/peerconnection/RTCRtpSender+Private.h", "objc/api/peerconnection/RTCRtpTransceiver.h", "objc/api/peerconnection/RTCRtpTransceiver.mm", - "objc/api/peerconnection/RTCSSLAdapter.h", - "objc/api/peerconnection/RTCSSLAdapter.mm", - "objc/api/peerconnection/RTCSessionDescription+Private.h", + "objc/api/peerconnection/RTCRtpTransceiver+Private.h", "objc/api/peerconnection/RTCSessionDescription.h", "objc/api/peerconnection/RTCSessionDescription.mm", - "objc/api/peerconnection/RTCStatisticsReport+Private.h", + "objc/api/peerconnection/RTCSessionDescription+Private.h", + "objc/api/peerconnection/RTCSSLAdapter.h", + "objc/api/peerconnection/RTCSSLAdapter.mm", "objc/api/peerconnection/RTCStatisticsReport.h", "objc/api/peerconnection/RTCStatisticsReport.mm", + "objc/api/peerconnection/RTCStatisticsReport+Private.h", "objc/api/peerconnection/RTCTracing.h", "objc/api/peerconnection/RTCTracing.mm", - "objc/api/peerconnection/RTCVideoTrack+Private.h", "objc/api/peerconnection/RTCVideoTrack.h", "objc/api/peerconnection/RTCVideoTrack.mm", + "objc/api/peerconnection/RTCVideoTrack+Private.h", + "objc/components/audio/RTCAudioBuffer.h", + "objc/components/audio/RTCAudioBuffer.mm", + "objc/components/audio/RTCAudioBuffer+Private.h", + "objc/components/audio/RTCAudioCustomProcessingAdapter.h", + "objc/components/audio/RTCAudioCustomProcessingAdapter.mm", + "objc/components/audio/RTCAudioCustomProcessingAdapter+Private.h", + "objc/components/audio/RTCAudioCustomProcessingDelegate.h", + "objc/components/audio/RTCAudioProcessingConfig.h", + "objc/components/audio/RTCAudioProcessingConfig.mm", + "objc/components/audio/RTCAudioProcessingConfig+Private.h", + "objc/components/audio/RTCAudioProcessingModule.h", + "objc/components/audio/RTCDefaultAudioProcessingModule.h", + "objc/components/audio/RTCDefaultAudioProcessingModule.mm", + "objc/components/audio/RTCDefaultAudioProcessingModule+Private.h", ] configs += [ @@ -1069,6 +1160,7 @@ if (is_ios || is_mac) { ":videorendereradapter_objc", ":videosource_objc", ":videotoolbox_objc", + "../api/crypto:frame_crypto_transformer", "../api:dtmf_sender_interface", "../api:libjingle_peerconnection_api", "../api:media_stream_interface", @@ -1125,34 +1217,34 @@ if (is_ios || is_mac) { include_dirs = [ "objc/" ] sources = [ + "objc/unittests/frame_buffer_helpers.h", + "objc/unittests/frame_buffer_helpers.mm", + "objc/unittests/nalu_rewriter_xctest.mm", + "objc/unittests/objc_video_decoder_factory_tests.mm", + "objc/unittests/objc_video_encoder_factory_tests.mm", "objc/unittests/ObjCVideoTrackSource_xctest.mm", - "objc/unittests/RTCAudioDeviceModule_xctest.mm", "objc/unittests/RTCAudioDevice_xctest.mm", + "objc/unittests/RTCAudioDeviceModule_xctest.mm", "objc/unittests/RTCAudioSessionTest.mm", - "objc/unittests/RTCCVPixelBuffer_xctest.mm", "objc/unittests/RTCCallbackLogger_xctest.m", "objc/unittests/RTCCameraVideoCapturerTests.mm", "objc/unittests/RTCCertificateTest.mm", "objc/unittests/RTCConfigurationTest.mm", + "objc/unittests/RTCCVPixelBuffer_xctest.mm", "objc/unittests/RTCDataChannelConfigurationTest.mm", "objc/unittests/RTCEncodedImage_xctest.mm", "objc/unittests/RTCFileVideoCapturer_xctest.mm", "objc/unittests/RTCH264ProfileLevelId_xctest.m", "objc/unittests/RTCIceCandidateTest.mm", "objc/unittests/RTCIceServerTest.mm", - "objc/unittests/RTCMTLVideoView_xctest.m", "objc/unittests/RTCMediaConstraintsTest.mm", + "objc/unittests/RTCMTLVideoView_xctest.m", "objc/unittests/RTCNV12TextureCache_xctest.m", - "objc/unittests/RTCPeerConnectionFactoryBuilderTest.mm", "objc/unittests/RTCPeerConnectionFactory_xctest.m", + "objc/unittests/RTCPeerConnectionFactoryBuilderTest.mm", "objc/unittests/RTCPeerConnectionTest.mm", "objc/unittests/RTCSessionDescriptionTest.mm", "objc/unittests/RTCTracingTest.mm", - "objc/unittests/frame_buffer_helpers.h", - "objc/unittests/frame_buffer_helpers.mm", - "objc/unittests/nalu_rewriter_xctest.mm", - "objc/unittests/objc_video_decoder_factory_tests.mm", - "objc/unittests/objc_video_encoder_factory_tests.mm", "objc/unittests/scoped_cftyperef_tests.mm", ] @@ -1246,8 +1338,8 @@ if (is_ios || is_mac) { is_xctest = true info_plist = "//test/ios/Info.plist" sources = [ - "objc/unittests/RTCDoNotPutCPlusPlusInFrameworkHeaders_xctest.m", "objc/unittests/main.mm", + "objc/unittests/RTCDoNotPutCPlusPlusInFrameworkHeaders_xctest.m", ] extra_substitutions = [ "GTEST_BUNDLE_ID_SUFFIX=generic-unit-test" ] @@ -1261,12 +1353,72 @@ if (is_ios || is_mac) { } } + bundle_data("darwin_privacy_info") { + sources = [ + "objc/PrivacyInfo.xcprivacy", + ] + outputs = [ "{{bundle_resources_dir}}/{{source_file_part}}" ] + } + if (is_ios) { apple_framework_bundle_with_umbrella_header("framework_objc") { info_plist = "objc/Info.plist" output_name = "WebRTC" common_objc_headers = [ + "objc/api/peerconnection/RTCAudioDeviceModule.h", + "objc/api/peerconnection/RTCAudioSource.h", + "objc/api/peerconnection/RTCAudioTrack.h", + "objc/api/peerconnection/RTCCertificate.h", + "objc/api/peerconnection/RTCConfiguration.h", + "objc/api/peerconnection/RTCCryptoOptions.h", + "objc/api/peerconnection/RTCDataChannel.h", + "objc/api/peerconnection/RTCDataChannelConfiguration.h", + "objc/api/peerconnection/RTCDtmfSender.h", + "objc/api/peerconnection/RTCFieldTrials.h", + "objc/api/peerconnection/RTCFrameCryptor.h", + "objc/api/peerconnection/RTCFrameCryptorKeyProvider.h", + "objc/api/peerconnection/RTCIceCandidate.h", + "objc/api/peerconnection/RTCIceCandidateErrorEvent.h", + "objc/api/peerconnection/RTCIceServer.h", + "objc/api/peerconnection/RTCIODevice.h", + "objc/api/peerconnection/RTCLegacyStatsReport.h", + "objc/api/peerconnection/RTCMediaConstraints.h", + "objc/api/peerconnection/RTCMediaSource.h", + "objc/api/peerconnection/RTCMediaStream.h", + "objc/api/peerconnection/RTCMediaStreamTrack.h", + "objc/api/peerconnection/RTCMetrics.h", + "objc/api/peerconnection/RTCMetricsSampleInfo.h", + "objc/api/peerconnection/RTCPeerConnection.h", + "objc/api/peerconnection/RTCPeerConnectionFactory.h", + "objc/api/peerconnection/RTCPeerConnectionFactoryOptions.h", + "objc/api/peerconnection/RTCRtcpParameters.h", + "objc/api/peerconnection/RTCRtpCapabilities.h", + "objc/api/peerconnection/RTCRtpCodecCapability.h", + "objc/api/peerconnection/RTCRtpCodecParameters.h", + "objc/api/peerconnection/RTCRtpEncodingParameters.h", + "objc/api/peerconnection/RTCRtpHeaderExtension.h", + "objc/api/peerconnection/RTCRtpParameters.h", + "objc/api/peerconnection/RTCRtpReceiver.h", + "objc/api/peerconnection/RTCRtpSender.h", + "objc/api/peerconnection/RTCRtpTransceiver.h", + "objc/api/peerconnection/RTCSessionDescription.h", + "objc/api/peerconnection/RTCSSLAdapter.h", + "objc/api/peerconnection/RTCStatisticsReport.h", + "objc/api/peerconnection/RTCTracing.h", + "objc/api/peerconnection/RTCVideoSource.h", + "objc/api/peerconnection/RTCVideoTrack.h", + "objc/api/video_codec/RTCVideoCodecConstants.h", + "objc/api/video_codec/RTCVideoDecoderAV1.h", + "objc/api/video_codec/RTCVideoDecoderVP8.h", + "objc/api/video_codec/RTCVideoDecoderVP9.h", + "objc/api/video_codec/RTCVideoEncoderAV1.h", + "objc/api/video_codec/RTCVideoEncoderSimulcast.h", + "objc/api/video_codec/RTCVideoEncoderVP8.h", + "objc/api/video_codec/RTCVideoEncoderVP9.h", + "objc/api/video_frame_buffer/RTCNativeI420Buffer.h", + "objc/api/video_frame_buffer/RTCNativeMutableI420Buffer.h", + "objc/base/RTCAudioRenderer.h", "objc/base/RTCCodecSpecificInfo.h", "objc/base/RTCEncodedImage.h", "objc/base/RTCI420Buffer.h", @@ -1287,9 +1439,14 @@ if (is_ios || is_mac) { "objc/base/RTCVideoFrameBuffer.h", "objc/base/RTCVideoRenderer.h", "objc/base/RTCYUVPlanarBuffer.h", + "objc/components/audio/RTCAudioBuffer.h", + "objc/components/audio/RTCAudioCustomProcessingDelegate.h", "objc/components/audio/RTCAudioDevice.h", + "objc/components/audio/RTCAudioProcessingConfig.h", + "objc/components/audio/RTCAudioProcessingModule.h", "objc/components/audio/RTCAudioSession.h", "objc/components/audio/RTCAudioSessionConfiguration.h", + "objc/components/audio/RTCDefaultAudioProcessingModule.h", "objc/components/capturer/RTCCameraVideoCapturer.h", "objc/components/capturer/RTCFileVideoCapturer.h", "objc/components/network/RTCNetworkMonitor.h", @@ -1303,56 +1460,13 @@ if (is_ios || is_mac) { "objc/components/video_codec/RTCVideoDecoderFactoryH264.h", "objc/components/video_codec/RTCVideoDecoderH264.h", "objc/components/video_codec/RTCVideoEncoderFactoryH264.h", + "objc/components/video_codec/RTCVideoEncoderFactorySimulcast.h", "objc/components/video_codec/RTCVideoEncoderH264.h", "objc/components/video_frame_buffer/RTCCVPixelBuffer.h", "objc/helpers/RTCCameraPreviewView.h", "objc/helpers/RTCDispatcher.h", + "objc/helpers/RTCYUVHelper.h", "objc/helpers/UIDevice+RTCDevice.h", - "objc/api/peerconnection/RTCAudioSource.h", - "objc/api/peerconnection/RTCAudioTrack.h", - "objc/api/peerconnection/RTCConfiguration.h", - "objc/api/peerconnection/RTCDataChannel.h", - "objc/api/peerconnection/RTCDataChannelConfiguration.h", - "objc/api/peerconnection/RTCFieldTrials.h", - "objc/api/peerconnection/RTCIceCandidate.h", - "objc/api/peerconnection/RTCIceCandidateErrorEvent.h", - "objc/api/peerconnection/RTCIceServer.h", - "objc/api/peerconnection/RTCLegacyStatsReport.h", - "objc/api/peerconnection/RTCMediaConstraints.h", - "objc/api/peerconnection/RTCMediaSource.h", - "objc/api/peerconnection/RTCMediaStream.h", - "objc/api/peerconnection/RTCMediaStreamTrack.h", - "objc/api/peerconnection/RTCMetrics.h", - "objc/api/peerconnection/RTCMetricsSampleInfo.h", - "objc/api/peerconnection/RTCPeerConnection.h", - "objc/api/peerconnection/RTCPeerConnectionFactory.h", - "objc/api/peerconnection/RTCPeerConnectionFactoryOptions.h", - "objc/api/peerconnection/RTCRtcpParameters.h", - "objc/api/peerconnection/RTCRtpCodecParameters.h", - "objc/api/peerconnection/RTCRtpEncodingParameters.h", - "objc/api/peerconnection/RTCRtpHeaderExtension.h", - "objc/api/peerconnection/RTCRtpParameters.h", - "objc/api/peerconnection/RTCRtpReceiver.h", - "objc/api/peerconnection/RTCRtpSender.h", - "objc/api/peerconnection/RTCRtpTransceiver.h", - "objc/api/peerconnection/RTCDtmfSender.h", - "objc/api/peerconnection/RTCSSLAdapter.h", - "objc/api/peerconnection/RTCSessionDescription.h", - "objc/api/peerconnection/RTCStatisticsReport.h", - "objc/api/peerconnection/RTCTracing.h", - "objc/api/peerconnection/RTCCertificate.h", - "objc/api/peerconnection/RTCCryptoOptions.h", - "objc/api/peerconnection/RTCVideoSource.h", - "objc/api/peerconnection/RTCVideoTrack.h", - "objc/api/video_codec/RTCVideoCodecConstants.h", - "objc/api/video_codec/RTCVideoDecoderVP8.h", - "objc/api/video_codec/RTCVideoDecoderVP9.h", - "objc/api/video_codec/RTCVideoDecoderAV1.h", - "objc/api/video_codec/RTCVideoEncoderVP8.h", - "objc/api/video_codec/RTCVideoEncoderVP9.h", - "objc/api/video_codec/RTCVideoEncoderAV1.h", - "objc/api/video_frame_buffer/RTCNativeI420Buffer.h", - "objc/api/video_frame_buffer/RTCNativeMutableI420Buffer.h", ] if (!build_with_chromium) { @@ -1382,6 +1496,7 @@ if (is_ios || is_mac) { ":videocapture_objc", ":videocodec_objc", ":videotoolbox_objc", + ":darwin_privacy_info", ] if (rtc_ios_macos_use_opengl_rendering) { deps += [ ":opengl_ui_objc" ] @@ -1422,6 +1537,7 @@ if (is_ios || is_mac) { output_name = "WebRTC" sources = [ + "objc/api/peerconnection/RTCAudioDeviceModule.h", "objc/api/peerconnection/RTCAudioSource.h", "objc/api/peerconnection/RTCAudioTrack.h", "objc/api/peerconnection/RTCCertificate.h", @@ -1431,9 +1547,12 @@ if (is_ios || is_mac) { "objc/api/peerconnection/RTCDataChannelConfiguration.h", "objc/api/peerconnection/RTCDtmfSender.h", "objc/api/peerconnection/RTCFieldTrials.h", + "objc/api/peerconnection/RTCFrameCryptor.h", + "objc/api/peerconnection/RTCFrameCryptorKeyProvider.h", "objc/api/peerconnection/RTCIceCandidate.h", "objc/api/peerconnection/RTCIceCandidateErrorEvent.h", "objc/api/peerconnection/RTCIceServer.h", + "objc/api/peerconnection/RTCIODevice.h", "objc/api/peerconnection/RTCLegacyStatsReport.h", "objc/api/peerconnection/RTCMediaConstraints.h", "objc/api/peerconnection/RTCMediaSource.h", @@ -1445,6 +1564,8 @@ if (is_ios || is_mac) { "objc/api/peerconnection/RTCPeerConnectionFactory.h", "objc/api/peerconnection/RTCPeerConnectionFactoryOptions.h", "objc/api/peerconnection/RTCRtcpParameters.h", + "objc/api/peerconnection/RTCRtpCapabilities.h", + "objc/api/peerconnection/RTCRtpCodecCapability.h", "objc/api/peerconnection/RTCRtpCodecParameters.h", "objc/api/peerconnection/RTCRtpEncodingParameters.h", "objc/api/peerconnection/RTCRtpHeaderExtension.h", @@ -1452,8 +1573,8 @@ if (is_ios || is_mac) { "objc/api/peerconnection/RTCRtpReceiver.h", "objc/api/peerconnection/RTCRtpSender.h", "objc/api/peerconnection/RTCRtpTransceiver.h", - "objc/api/peerconnection/RTCSSLAdapter.h", "objc/api/peerconnection/RTCSessionDescription.h", + "objc/api/peerconnection/RTCSSLAdapter.h", "objc/api/peerconnection/RTCStatisticsReport.h", "objc/api/peerconnection/RTCTracing.h", "objc/api/peerconnection/RTCVideoSource.h", @@ -1462,10 +1583,12 @@ if (is_ios || is_mac) { "objc/api/video_codec/RTCVideoDecoderVP8.h", "objc/api/video_codec/RTCVideoDecoderVP9.h", "objc/api/video_codec/RTCVideoEncoderAV1.h", + "objc/api/video_codec/RTCVideoEncoderSimulcast.h", "objc/api/video_codec/RTCVideoEncoderVP8.h", "objc/api/video_codec/RTCVideoEncoderVP9.h", "objc/api/video_frame_buffer/RTCNativeI420Buffer.h", "objc/api/video_frame_buffer/RTCNativeMutableI420Buffer.h", + "objc/base/RTCAudioRenderer.h", "objc/base/RTCCodecSpecificInfo.h", "objc/base/RTCEncodedImage.h", "objc/base/RTCI420Buffer.h", @@ -1486,9 +1609,18 @@ if (is_ios || is_mac) { "objc/base/RTCVideoFrameBuffer.h", "objc/base/RTCVideoRenderer.h", "objc/base/RTCYUVPlanarBuffer.h", + "objc/components/audio/RTCAudioBuffer.h", + "objc/components/audio/RTCAudioCustomProcessingDelegate.h", + "objc/components/audio/RTCAudioProcessingConfig.h", + "objc/components/audio/RTCAudioProcessingModule.h", + "objc/components/audio/RTCDefaultAudioProcessingModule.h", "objc/components/capturer/RTCCameraVideoCapturer.h", + "objc/components/capturer/RTCDesktopCapturer.h", + "objc/components/capturer/RTCDesktopMediaList.h", + "objc/components/capturer/RTCDesktopSource.h", "objc/components/capturer/RTCFileVideoCapturer.h", "objc/components/renderer/metal/RTCMTLNSVideoView.h", + "objc/components/renderer/metal/RTCMTLVideoView.h", "objc/components/renderer/opengl/RTCNSGLVideoView.h", "objc/components/renderer/opengl/RTCVideoViewShading.h", "objc/components/video_codec/RTCCodecSpecificInfoH264.h", @@ -1498,9 +1630,11 @@ if (is_ios || is_mac) { "objc/components/video_codec/RTCVideoDecoderFactoryH264.h", "objc/components/video_codec/RTCVideoDecoderH264.h", "objc/components/video_codec/RTCVideoEncoderFactoryH264.h", + "objc/components/video_codec/RTCVideoEncoderFactorySimulcast.h", "objc/components/video_codec/RTCVideoEncoderH264.h", "objc/components/video_frame_buffer/RTCCVPixelBuffer.h", "objc/helpers/RTCDispatcher.h", + "objc/helpers/RTCYUVHelper.h", ] if (!build_with_chromium) { sources += [ @@ -1517,8 +1651,10 @@ if (is_ios || is_mac) { ":opengl_ui_objc", ":peerconnectionfactory_base_objc", ":videocapture_objc", + ":desktopcapture_objc", ":videocodec_objc", ":videotoolbox_objc", + ":darwin_privacy_info", ] if (!build_with_chromium) { deps += [ @@ -1581,10 +1717,10 @@ if (is_ios || is_mac) { "objc/native/api/video_decoder_factory.mm", "objc/native/api/video_encoder_factory.h", "objc/native/api/video_encoder_factory.mm", - "objc/native/api/video_frame.h", - "objc/native/api/video_frame.mm", "objc/native/api/video_frame_buffer.h", "objc/native/api/video_frame_buffer.mm", + "objc/native/api/video_frame.h", + "objc/native/api/video_frame.mm", "objc/native/api/video_renderer.h", "objc/native/api/video_renderer.mm", ] diff --git a/sdk/android/BUILD.gn b/sdk/android/BUILD.gn index 5bf25ffc6d..63a3f477d3 100644 --- a/sdk/android/BUILD.gn +++ b/sdk/android/BUILD.gn @@ -255,12 +255,18 @@ if (is_android) { "api/org/webrtc/AudioProcessingFactory.java", "api/org/webrtc/AudioSource.java", "api/org/webrtc/AudioTrack.java", + "api/org/webrtc/AudioTrackSink.java", "api/org/webrtc/CallSessionFileRotatingLogSink.java", "api/org/webrtc/CandidatePairChangeEvent.java", "api/org/webrtc/CryptoOptions.java", "api/org/webrtc/DataChannel.java", "api/org/webrtc/DtmfSender.java", + "api/org/webrtc/ExternalAudioProcessingFactory.java", "api/org/webrtc/FecControllerFactoryFactoryInterface.java", + "api/org/webrtc/FrameCryptor.java", + "api/org/webrtc/FrameCryptorAlgorithm.java", + "api/org/webrtc/FrameCryptorFactory.java", + "api/org/webrtc/FrameCryptorKeyProvider.java", "api/org/webrtc/FrameDecryptor.java", "api/org/webrtc/FrameEncryptor.java", "api/org/webrtc/IceCandidate.java", @@ -287,6 +293,7 @@ if (is_android) { "api/org/webrtc/RTCStatsCollectorCallback.java", "api/org/webrtc/RTCStatsReport.java", "api/org/webrtc/RtcCertificatePem.java", + "api/org/webrtc/RtpCapabilities.java", "api/org/webrtc/RtpParameters.java", "api/org/webrtc/RtpReceiver.java", "api/org/webrtc/RtpSender.java", @@ -359,6 +366,7 @@ if (is_android) { sources = [ "api/org/webrtc/DefaultVideoDecoderFactory.java", "api/org/webrtc/DefaultVideoEncoderFactory.java", + "api/org/webrtc/WrappedVideoDecoderFactory.java", ] deps = [ @@ -522,6 +530,8 @@ if (is_android) { sources = [ "api/org/webrtc/SoftwareVideoDecoderFactory.java", "api/org/webrtc/SoftwareVideoEncoderFactory.java", + "api/org/webrtc/SimulcastVideoEncoder.java", + "api/org/webrtc/SimulcastVideoEncoderFactory.java", ] deps = [ @@ -708,6 +718,8 @@ if (current_os == "linux" || is_android) { "src/jni/pc/add_ice_candidate_observer.cc", "src/jni/pc/add_ice_candidate_observer.h", "src/jni/pc/android_network_monitor.h", + "src/jni/pc/audio_sink.cc", + "src/jni/pc/audio_sink.h", "src/jni/pc/audio_track.cc", "src/jni/pc/call_session_file_rotating_log_sink.cc", "src/jni/pc/crypto_options.cc", @@ -715,6 +727,15 @@ if (current_os == "linux" || is_android) { "src/jni/pc/data_channel.cc", "src/jni/pc/data_channel.h", "src/jni/pc/dtmf_sender.cc", + "src/jni/pc/external_audio_processing_factory.cc", + "src/jni/pc/external_audio_processing_factory.h", + "src/jni/pc/external_audio_processing_interface.h", + "src/jni/pc/external_audio_processor.cc", + "src/jni/pc/external_audio_processor.h", + "src/jni/pc/frame_cryptor.cc", + "src/jni/pc/frame_cryptor.h", + "src/jni/pc/frame_cryptor_key_provider.cc", + "src/jni/pc/frame_cryptor_key_provider.h", "src/jni/pc/ice_candidate.cc", "src/jni/pc/ice_candidate.h", "src/jni/pc/media_constraints.cc", @@ -734,6 +755,8 @@ if (current_os == "linux" || is_android) { "src/jni/pc/rtc_certificate.h", "src/jni/pc/rtc_stats_collector_callback_wrapper.cc", "src/jni/pc/rtc_stats_collector_callback_wrapper.h", + "src/jni/pc/rtp_capabilities.cc", + "src/jni/pc/rtp_capabilities.h", "src/jni/pc/rtp_parameters.cc", "src/jni/pc/rtp_parameters.h", "src/jni/pc/rtp_receiver.cc", @@ -770,6 +793,7 @@ if (current_os == "linux" || is_android) { "../../api:rtp_parameters", "../../api:rtp_sender_interface", "../../api:turn_customizer", + "../../api/crypto:frame_crypto_transformer", "../../api/crypto:options", "../../api/rtc_event_log:rtc_event_log_factory", "../../api/task_queue:default_task_queue_factory", @@ -884,6 +908,21 @@ if (current_os == "linux" || is_android) { ] } + rtc_library("simulcast_jni") { + visibility = [ "*" ] + allow_poison = [ "software_video_codecs" ] + sources = [ + "src/jni/simulcast_video_encoder.cc", + "src/jni/simulcast_video_encoder.h" + ] + deps = [ + ":base_jni", + ":video_jni", + ":native_api_codecs", + "../../media:rtc_simulcast_encoder_adapter" + ] + } + rtc_library("swcodecs_jni") { visibility = [ "*" ] allow_poison = [ "software_video_codecs" ] @@ -897,6 +936,7 @@ if (current_os == "linux" || is_android) { ":libvpx_vp8_jni", ":libvpx_vp9_jni", ":native_api_jni", + ":simulcast_jni", ":video_jni", "../../api/video_codecs:builtin_video_decoder_factory", "../../api/video_codecs:builtin_video_encoder_factory", @@ -1374,11 +1414,16 @@ if (current_os == "linux" || is_android) { sources = [ "api/org/webrtc/AddIceObserver.java", "api/org/webrtc/AudioTrack.java", + "api/org/webrtc/AudioTrackSink.java", "api/org/webrtc/CallSessionFileRotatingLogSink.java", "api/org/webrtc/CandidatePairChangeEvent.java", "api/org/webrtc/CryptoOptions.java", "api/org/webrtc/DataChannel.java", "api/org/webrtc/DtmfSender.java", + "api/org/webrtc/ExternalAudioProcessingFactory.java", + "api/org/webrtc/FrameCryptor.java", + "api/org/webrtc/FrameCryptorFactory.java", + "api/org/webrtc/FrameCryptorKeyProvider.java", "api/org/webrtc/IceCandidate.java", "api/org/webrtc/IceCandidateErrorEvent.java", "api/org/webrtc/MediaConstraints.java", @@ -1391,6 +1436,7 @@ if (current_os == "linux" || is_android) { "api/org/webrtc/RTCStatsCollectorCallback.java", "api/org/webrtc/RTCStatsReport.java", "api/org/webrtc/RtcCertificatePem.java", + "api/org/webrtc/RtpCapabilities.java", "api/org/webrtc/RtpParameters.java", "api/org/webrtc/RtpReceiver.java", "api/org/webrtc/RtpSender.java", diff --git a/sdk/android/api/org/webrtc/AudioTrack.java b/sdk/android/api/org/webrtc/AudioTrack.java index ca745db634..b30e46cebc 100644 --- a/sdk/android/api/org/webrtc/AudioTrack.java +++ b/sdk/android/api/org/webrtc/AudioTrack.java @@ -10,8 +10,12 @@ package org.webrtc; +import java.util.IdentityHashMap; + /** Java wrapper for a C++ AudioTrackInterface */ public class AudioTrack extends MediaStreamTrack { + private final IdentityHashMap sinks = new IdentityHashMap(); + public AudioTrack(long nativeTrack) { super(nativeTrack); } @@ -23,10 +27,54 @@ public void setVolume(double volume) { nativeSetVolume(getNativeAudioTrack(), volume); } + /** + * Adds an AudioTrackSink to the track. This callback is only + * called for remote audio tracks. + * + * Repeated addSink calls will not add the sink multiple times. + */ + public void addSink(AudioTrackSink sink) { + if (sink == null) { + throw new IllegalArgumentException("The AudioTrackSink is not allowed to be null"); + } + if (!sinks.containsKey(sink)) { + final long nativeSink = nativeWrapSink(sink); + sinks.put(sink, nativeSink); + nativeAddSink(getNativeMediaStreamTrack(), nativeSink); + } + } + + /** + * Removes an AudioTrackSink from the track. + * + * If the AudioTrackSink was not attached to the track, this is a no-op. + */ + public void removeSink(AudioTrackSink sink) { + final Long nativeSink = sinks.remove(sink); + if (nativeSink != null) { + nativeRemoveSink(getNativeMediaStreamTrack(), nativeSink); + nativeFreeSink(nativeSink); + } + } + + @Override + public void dispose() { + for (long nativeSink : sinks.values()) { + nativeRemoveSink(getNativeMediaStreamTrack(), nativeSink); + nativeFreeSink(nativeSink); + } + sinks.clear(); + super.dispose(); + } + /** Returns a pointer to webrtc::AudioTrackInterface. */ long getNativeAudioTrack() { return getNativeMediaStreamTrack(); } private static native void nativeSetVolume(long track, double volume); + private static native void nativeAddSink(long track, long nativeSink); + private static native void nativeRemoveSink(long track, long nativeSink); + private static native long nativeWrapSink(AudioTrackSink sink); + private static native void nativeFreeSink(long sink); } diff --git a/sdk/android/api/org/webrtc/AudioTrackSink.java b/sdk/android/api/org/webrtc/AudioTrackSink.java new file mode 100644 index 0000000000..eca390f82c --- /dev/null +++ b/sdk/android/api/org/webrtc/AudioTrackSink.java @@ -0,0 +1,27 @@ +/* + * Copyright 2023 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import java.nio.ByteBuffer; + +/** + * Java version of rtc::AudioTrackSinkInterface. + */ +public interface AudioTrackSink { + /** + * Implementations should copy the audio data into a local copy if they wish + * to use the data after this function returns. + */ + @CalledByNative + void onData(ByteBuffer audioData, int bitsPerSample, int sampleRate, + int numberOfChannels, int numberOfFrames, + long absoluteCaptureTimestampMs); +} diff --git a/sdk/android/api/org/webrtc/ExternalAudioProcessingFactory.java b/sdk/android/api/org/webrtc/ExternalAudioProcessingFactory.java new file mode 100644 index 0000000000..7425d2af57 --- /dev/null +++ b/sdk/android/api/org/webrtc/ExternalAudioProcessingFactory.java @@ -0,0 +1,144 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.webrtc; + +import java.nio.ByteBuffer; + +import androidx.annotation.Nullable; +import org.webrtc.AudioProcessingFactory; + + +public class ExternalAudioProcessingFactory implements AudioProcessingFactory { + + /** + * Interface for external audio processing. + */ + public static interface AudioProcessing { + /** + * Called when the processor should be initialized with a new sample rate and + * number of channels. + */ + @CalledByNative("AudioProcessing") + void initialize(int sampleRateHz, int numChannels); + /** Called when the processor should be reset with a new sample rate. */ + @CalledByNative("AudioProcessing") + void reset(int newRate); + /** + * Processes the given capture or render signal. NOTE: `buffer.data` will be + * freed once this function returns so callers who want to use the data + * asynchronously must make sure to copy it first. + */ + @CalledByNative("AudioProcessing") + void process(int numBands, int numFrames, ByteBuffer buffer); + } + + private long apmPtr; + private long capturePostProcessingPtr; + private long renderPreProcessingPtr; + + public ExternalAudioProcessingFactory() { + apmPtr = nativeGetDefaultApm(); + capturePostProcessingPtr = 0; + renderPreProcessingPtr = 0; + } + + @Override + public long createNative() { + if(apmPtr == 0) { + apmPtr = nativeGetDefaultApm(); + } + return apmPtr; + } + + /** + * Sets the capture post processing module. + * This module is applied to the audio signal after capture and before sending + * to the audio encoder. + */ + public void setCapturePostProcessing(@Nullable AudioProcessing processing) { + checkExternalAudioProcessorExists(); + long newPtr = nativeSetCapturePostProcessing(processing); + if (capturePostProcessingPtr != 0) { + JniCommon.nativeReleaseRef(capturePostProcessingPtr); + capturePostProcessingPtr = 0; + } + capturePostProcessingPtr = newPtr; + } + + /** + * Sets the render pre processing module. + * This module is applied to the audio signal after receiving from the audio + * decoder and before rendering. + */ + public void setRenderPreProcessing(@Nullable AudioProcessing processing) { + checkExternalAudioProcessorExists(); + long newPtr = nativeSetRenderPreProcessing(processing); + if (renderPreProcessingPtr != 0) { + JniCommon.nativeReleaseRef(renderPreProcessingPtr); + renderPreProcessingPtr = 0; + } + renderPreProcessingPtr = newPtr; + } + + /** + * Sets the bypass flag for the capture post processing module. + * If true, the registered audio processing will be bypassed. + */ + public void setBypassFlagForCapturePost( boolean bypass) { + checkExternalAudioProcessorExists(); + nativeSetBypassFlagForCapturePost(bypass); + } + + /** + * Sets the bypass flag for the render pre processing module. + * If true, the registered audio processing will be bypassed. + */ + public void setBypassFlagForRenderPre( boolean bypass) { + checkExternalAudioProcessorExists(); + nativeSetBypassFlagForRenderPre(bypass); + } + + /** + * Destroys the ExternalAudioProcessor. + */ + public void destroy() { + checkExternalAudioProcessorExists(); + if (renderPreProcessingPtr != 0) { + JniCommon.nativeReleaseRef(renderPreProcessingPtr); + renderPreProcessingPtr = 0; + } + if (capturePostProcessingPtr != 0) { + JniCommon.nativeReleaseRef(capturePostProcessingPtr); + capturePostProcessingPtr = 0; + } + nativeDestroy(); + apmPtr = 0; + } + + private void checkExternalAudioProcessorExists() { + if (apmPtr == 0) { + throw new IllegalStateException("ExternalAudioProcessor has been disposed."); + } + } + + private static native long nativeGetDefaultApm(); + private static native long nativeSetCapturePostProcessing(AudioProcessing processing); + private static native long nativeSetRenderPreProcessing(AudioProcessing processing); + private static native void nativeSetBypassFlagForCapturePost(boolean bypass); + private static native void nativeSetBypassFlagForRenderPre(boolean bypass); + private static native void nativeDestroy(); +} diff --git a/sdk/android/api/org/webrtc/FrameCryptor.java b/sdk/android/api/org/webrtc/FrameCryptor.java new file mode 100644 index 0000000000..d633e05005 --- /dev/null +++ b/sdk/android/api/org/webrtc/FrameCryptor.java @@ -0,0 +1,108 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.webrtc; + +import androidx.annotation.Nullable; + +public class FrameCryptor { + public enum FrameCryptionState { + NEW, + OK, + ENCRYPTIONFAILED, + DECRYPTIONFAILED, + MISSINGKEY, + KEYRATCHETED, + INTERNALERROR; + + @CalledByNative("FrameCryptionState") + static FrameCryptionState fromNativeIndex(int nativeIndex) { + return values()[nativeIndex]; + } + } + + public static interface Observer { + @CalledByNative("Observer") + void onFrameCryptionStateChanged(String participantId, FrameCryptionState newState); + } + + private long nativeFrameCryptor; + private long observerPtr; + + public long getNativeFrameCryptor() { + return nativeFrameCryptor; + } + + @CalledByNative + public FrameCryptor(long nativeFrameCryptor) { + this.nativeFrameCryptor = nativeFrameCryptor; + this.observerPtr = 0; + } + + public void setEnabled(boolean enabled) { + checkFrameCryptorExists(); + nativeSetEnabled(nativeFrameCryptor, enabled); + } + + public boolean isEnabled() { + checkFrameCryptorExists(); + return nativeIsEnabled(nativeFrameCryptor); + } + + public int getKeyIndex() { + checkFrameCryptorExists(); + return nativeGetKeyIndex(nativeFrameCryptor); + } + + public void setKeyIndex(int index) { + checkFrameCryptorExists(); + nativeSetKeyIndex(nativeFrameCryptor, index); + } + + public void dispose() { + checkFrameCryptorExists(); + nativeUnSetObserver(nativeFrameCryptor); + JniCommon.nativeReleaseRef(nativeFrameCryptor); + nativeFrameCryptor = 0; + if (observerPtr != 0) { + JniCommon.nativeReleaseRef(observerPtr); + observerPtr = 0; + } + } + + public void setObserver(@Nullable Observer observer) { + checkFrameCryptorExists(); + long newPtr = nativeSetObserver(nativeFrameCryptor, observer); + if (observerPtr != 0) { + JniCommon.nativeReleaseRef(observerPtr); + observerPtr = 0; + } + newPtr = observerPtr; + } + + private void checkFrameCryptorExists() { + if (nativeFrameCryptor == 0) { + throw new IllegalStateException("FrameCryptor has been disposed."); + } + } + + private static native void nativeSetEnabled(long frameCryptorPointer, boolean enabled); + private static native boolean nativeIsEnabled(long frameCryptorPointer); + private static native void nativeSetKeyIndex(long frameCryptorPointer, int index); + private static native int nativeGetKeyIndex(long frameCryptorPointer); + private static native long nativeSetObserver(long frameCryptorPointer, Observer observer); + private static native void nativeUnSetObserver(long frameCryptorPointer); +} diff --git a/sdk/android/api/org/webrtc/FrameCryptorAlgorithm.java b/sdk/android/api/org/webrtc/FrameCryptorAlgorithm.java new file mode 100644 index 0000000000..d0d4dc8374 --- /dev/null +++ b/sdk/android/api/org/webrtc/FrameCryptorAlgorithm.java @@ -0,0 +1,22 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.webrtc; + +public enum FrameCryptorAlgorithm { + AES_GCM, + AES_CBC, +} \ No newline at end of file diff --git a/sdk/android/api/org/webrtc/FrameCryptorFactory.java b/sdk/android/api/org/webrtc/FrameCryptorFactory.java new file mode 100644 index 0000000000..865a4b78bb --- /dev/null +++ b/sdk/android/api/org/webrtc/FrameCryptorFactory.java @@ -0,0 +1,44 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.webrtc; + +public class FrameCryptorFactory { + public static FrameCryptorKeyProvider createFrameCryptorKeyProvider( + boolean sharedKey, byte[] ratchetSalt, int ratchetWindowSize, byte[] uncryptedMagicBytes, int failureTolerance, int keyRingSize, boolean discardFrameWhenCryptorNotReady) { + return nativeCreateFrameCryptorKeyProvider(sharedKey, ratchetSalt, ratchetWindowSize, uncryptedMagicBytes, failureTolerance, keyRingSize, discardFrameWhenCryptorNotReady); + } + + public static FrameCryptor createFrameCryptorForRtpSender(PeerConnectionFactory factory, RtpSender rtpSender, + String participantId, FrameCryptorAlgorithm algorithm, FrameCryptorKeyProvider keyProvider) { + return nativeCreateFrameCryptorForRtpSender(factory.getNativeOwnedFactoryAndThreads(),rtpSender.getNativeRtpSender(), participantId, + algorithm.ordinal(), keyProvider.getNativeKeyProvider()); + } + + public static FrameCryptor createFrameCryptorForRtpReceiver(PeerConnectionFactory factory, RtpReceiver rtpReceiver, + String participantId, FrameCryptorAlgorithm algorithm, FrameCryptorKeyProvider keyProvider) { + return nativeCreateFrameCryptorForRtpReceiver(factory.getNativeOwnedFactoryAndThreads(), rtpReceiver.getNativeRtpReceiver(), participantId, + algorithm.ordinal(), keyProvider.getNativeKeyProvider()); + } + + private static native FrameCryptor nativeCreateFrameCryptorForRtpSender(long factory, + long rtpSender, String participantId, int algorithm, long nativeFrameCryptorKeyProvider); + private static native FrameCryptor nativeCreateFrameCryptorForRtpReceiver(long factory, + long rtpReceiver, String participantId, int algorithm, long nativeFrameCryptorKeyProvider); + + private static native FrameCryptorKeyProvider nativeCreateFrameCryptorKeyProvider( + boolean sharedKey, byte[] ratchetSalt, int ratchetWindowSize, byte[] uncryptedMagicBytes, int failureTolerance, int keyRingSize, boolean discardFrameWhenCryptorNotReady); +} diff --git a/sdk/android/api/org/webrtc/FrameCryptorKeyProvider.java b/sdk/android/api/org/webrtc/FrameCryptorKeyProvider.java new file mode 100644 index 0000000000..6ab0cdddf5 --- /dev/null +++ b/sdk/android/api/org/webrtc/FrameCryptorKeyProvider.java @@ -0,0 +1,93 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.webrtc; + +import java.util.ArrayList; + +public class FrameCryptorKeyProvider { + private long nativeKeyProvider; + + @CalledByNative + public FrameCryptorKeyProvider(long nativeKeyProvider) { + this.nativeKeyProvider = nativeKeyProvider; + } + + public long getNativeKeyProvider() { + return nativeKeyProvider; + } + + public boolean setSharedKey(int index, byte[] key) { + checkKeyProviderExists(); + return nativeSetSharedKey(nativeKeyProvider,index, key); + } + + public byte[] ratchetSharedKey(int index) { + checkKeyProviderExists(); + return nativeRatchetSharedKey(nativeKeyProvider, index); + } + + public byte[] exportSharedKey(int index) { + checkKeyProviderExists(); + return nativeExportSharedKey(nativeKeyProvider, index); + } + + public boolean setKey(String participantId, int index, byte[] key) { + checkKeyProviderExists(); + return nativeSetKey(nativeKeyProvider, participantId, index, key); + } + + public byte[] ratchetKey(String participantId, int index) { + checkKeyProviderExists(); + return nativeRatchetKey(nativeKeyProvider, participantId, index); + } + + public byte[] exportKey(String participantId, int index) { + checkKeyProviderExists(); + return nativeExportKey(nativeKeyProvider, participantId, index); + } + + public void setSifTrailer(byte[] sifTrailer) { + checkKeyProviderExists(); + nativeSetSifTrailer(nativeKeyProvider, sifTrailer); + } + + public void dispose() { + checkKeyProviderExists(); + JniCommon.nativeReleaseRef(nativeKeyProvider); + nativeKeyProvider = 0; + } + + private void checkKeyProviderExists() { + if (nativeKeyProvider == 0) { + throw new IllegalStateException("FrameCryptorKeyProvider has been disposed."); + } + } + private static native boolean nativeSetSharedKey( + long keyProviderPointer, int index, byte[] key); + private static native byte[] nativeRatchetSharedKey( + long keyProviderPointer, int index); + private static native byte[] nativeExportSharedKey( + long keyProviderPointer, int index); + private static native boolean nativeSetKey( + long keyProviderPointer, String participantId, int index, byte[] key); + private static native byte[] nativeRatchetKey( + long keyProviderPointer, String participantId, int index); + private static native byte[] nativeExportKey( + long keyProviderPointer, String participantId, int index); + private static native void nativeSetSifTrailer( + long keyProviderPointer, byte[] sifTrailer); +} \ No newline at end of file diff --git a/sdk/android/api/org/webrtc/HardwareVideoEncoderFactory.java b/sdk/android/api/org/webrtc/HardwareVideoEncoderFactory.java index d43fc27fa0..05c39a4b38 100644 --- a/sdk/android/api/org/webrtc/HardwareVideoEncoderFactory.java +++ b/sdk/android/api/org/webrtc/HardwareVideoEncoderFactory.java @@ -143,11 +143,11 @@ public VideoCodecInfo[] getSupportedCodecs() { // supported by the decoder. if (type == VideoCodecMimeType.H264 && isH264HighProfileSupported(codec)) { supportedCodecInfos.add(new VideoCodecInfo( - name, MediaCodecUtils.getCodecProperties(type, /* highProfile= */ true))); + name, MediaCodecUtils.getCodecProperties(type, /* highProfile= */ true), new ArrayList<>())); } supportedCodecInfos.add(new VideoCodecInfo( - name, MediaCodecUtils.getCodecProperties(type, /* highProfile= */ false))); + name, MediaCodecUtils.getCodecProperties(type, /* highProfile= */ false), new ArrayList<>())); } } diff --git a/sdk/android/api/org/webrtc/LibaomAv1Encoder.java b/sdk/android/api/org/webrtc/LibaomAv1Encoder.java index 569a719f44..1bb5ff9c5e 100644 --- a/sdk/android/api/org/webrtc/LibaomAv1Encoder.java +++ b/sdk/android/api/org/webrtc/LibaomAv1Encoder.java @@ -9,6 +9,7 @@ */ package org.webrtc; +import java.util.List; public class LibaomAv1Encoder extends WrappedNativeVideoEncoder { @Override @@ -22,4 +23,10 @@ public long createNativeVideoEncoder() { public boolean isHardwareEncoder() { return false; } + + static List scalabilityModes() { + return nativeGetSupportedScalabilityModes(); + } + + static native List nativeGetSupportedScalabilityModes(); } diff --git a/sdk/android/api/org/webrtc/LibvpxVp9Encoder.java b/sdk/android/api/org/webrtc/LibvpxVp9Encoder.java index 1211ae93fb..c1cc86a2f2 100644 --- a/sdk/android/api/org/webrtc/LibvpxVp9Encoder.java +++ b/sdk/android/api/org/webrtc/LibvpxVp9Encoder.java @@ -9,6 +9,7 @@ */ package org.webrtc; +import java.util.List; public class LibvpxVp9Encoder extends WrappedNativeVideoEncoder { @Override @@ -24,4 +25,10 @@ public boolean isHardwareEncoder() { } static native boolean nativeIsSupported(); + + static List scalabilityModes() { + return nativeGetSupportedScalabilityModes(); + } + + static native List nativeGetSupportedScalabilityModes(); } diff --git a/sdk/android/api/org/webrtc/PeerConnection.java b/sdk/android/api/org/webrtc/PeerConnection.java index e334a3fd9d..e87c9a2e27 100644 --- a/sdk/android/api/org/webrtc/PeerConnection.java +++ b/sdk/android/api/org/webrtc/PeerConnection.java @@ -572,6 +572,17 @@ public static class RTCConfiguration { * See: https://www.chromestatus.com/feature/6269234631933952 */ public boolean offerExtmapAllowMixed; + + /** + * When this flag is set, ports not bound to any specific network interface + * will be used, in addition to normal ports bound to the enumerated + * interfaces. Without this flag, these "any address" ports would only be + * used when network enumeration fails or is disabled. But under certain + * conditions, these ports may succeed where others fail, so they may allow + * the application to work in a wider variety of environments, at the expense + * of having to allocate additional candidates. + */ + public boolean enableIceGatheringOnAnyAddressPorts; // TODO(deadbeef): Instead of duplicating the defaults here, we should do // something to pick up the defaults from C++. The Objective-C equivalent @@ -616,6 +627,7 @@ public RTCConfiguration(List iceServers) { allowCodecSwitching = null; enableImplicitRollback = false; offerExtmapAllowMixed = true; + enableIceGatheringOnAnyAddressPorts = false; } @CalledByNative("RTCConfiguration") @@ -836,6 +848,11 @@ boolean getEnableImplicitRollback() { boolean getOfferExtmapAllowMixed() { return offerExtmapAllowMixed; } + + @CalledByNative("RTCConfiguration") + boolean getEnableIceGatheringOnAnyAddressPorts() { + return enableIceGatheringOnAnyAddressPorts; + } }; private final List localStreams = new ArrayList<>(); diff --git a/sdk/android/api/org/webrtc/PeerConnectionFactory.java b/sdk/android/api/org/webrtc/PeerConnectionFactory.java index ca67b3afc1..1817d41beb 100644 --- a/sdk/android/api/org/webrtc/PeerConnectionFactory.java +++ b/sdk/android/api/org/webrtc/PeerConnectionFactory.java @@ -18,6 +18,7 @@ import org.webrtc.PeerConnection; import org.webrtc.audio.AudioDeviceModule; import org.webrtc.audio.JavaAudioDeviceModule; +import org.webrtc.RtpCapabilities; /** * Java wrapper for a C++ PeerConnectionFactoryInterface. Main entry point to @@ -134,13 +135,13 @@ public static class Options { // Keep in sync with webrtc/rtc_base/network.h! // // These bit fields are defined for `networkIgnoreMask` below. - static final int ADAPTER_TYPE_UNKNOWN = 0; - static final int ADAPTER_TYPE_ETHERNET = 1 << 0; - static final int ADAPTER_TYPE_WIFI = 1 << 1; - static final int ADAPTER_TYPE_CELLULAR = 1 << 2; - static final int ADAPTER_TYPE_VPN = 1 << 3; - static final int ADAPTER_TYPE_LOOPBACK = 1 << 4; - static final int ADAPTER_TYPE_ANY = 1 << 5; + public static final int ADAPTER_TYPE_UNKNOWN = 0; + public static final int ADAPTER_TYPE_ETHERNET = 1 << 0; + public static final int ADAPTER_TYPE_WIFI = 1 << 1; + public static final int ADAPTER_TYPE_CELLULAR = 1 << 2; + public static final int ADAPTER_TYPE_VPN = 1 << 3; + public static final int ADAPTER_TYPE_LOOPBACK = 1 << 4; + public static final int ADAPTER_TYPE_ANY = 1 << 5; public int networkIgnoreMask; public boolean disableEncryption; @@ -471,6 +472,16 @@ public AudioTrack createAudioTrack(String id, AudioSource source) { return new AudioTrack(nativeCreateAudioTrack(nativeFactory, id, source.getNativeAudioSource())); } + public RtpCapabilities getRtpReceiverCapabilities(MediaStreamTrack.MediaType mediaType) { + checkPeerConnectionFactoryExists(); + return nativeGetRtpReceiverCapabilities(nativeFactory, mediaType); + } + + public RtpCapabilities getRtpSenderCapabilities(MediaStreamTrack.MediaType mediaType) { + checkPeerConnectionFactoryExists(); + return nativeGetRtpSenderCapabilities(nativeFactory, mediaType); + } + // Starts recording an AEC dump. Ownership of the file is transfered to the // native code. If an AEC dump is already in progress, it will be stopped and // a new one will start using the provided file. @@ -615,4 +626,6 @@ private static native boolean nativeStartAecDump( private static native void nativeInjectLoggable(JNILogging jniLogging, int severity); private static native void nativeDeleteLoggable(); private static native void nativePrintStackTrace(int tid); + private static native RtpCapabilities nativeGetRtpSenderCapabilities(long factory, MediaStreamTrack.MediaType mediaType); + private static native RtpCapabilities nativeGetRtpReceiverCapabilities(long factory, MediaStreamTrack.MediaType mediaType); } diff --git a/sdk/android/api/org/webrtc/RtpCapabilities.java b/sdk/android/api/org/webrtc/RtpCapabilities.java new file mode 100644 index 0000000000..612acb8cf6 --- /dev/null +++ b/sdk/android/api/org/webrtc/RtpCapabilities.java @@ -0,0 +1,131 @@ +/* + * Copyright 2023 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.webrtc; + +import androidx.annotation.Nullable; +import java.util.List; +import java.util.Map; +import org.webrtc.MediaStreamTrack; + +public class RtpCapabilities { + public static class CodecCapability { + public int preferredPayloadType; + // Name used to identify the codec. Equivalent to MIME subtype. + public String name; + // The media type of this codec. Equivalent to MIME top-level type. + public MediaStreamTrack.MediaType kind; + // Clock rate in Hertz. + public Integer clockRate; + // The number of audio channels used. Set to null for video codecs. + public Integer numChannels; + // The "format specific parameters" field from the "a=fmtp" line in the SDP + public Map parameters; + // The MIME type of the codec. This is a convenience field. + public String mimeType; + + public CodecCapability() {} + + @CalledByNative("CodecCapability") + CodecCapability(int preferredPayloadType, String name, MediaStreamTrack.MediaType kind, + Integer clockRate, Integer numChannels, String mimeType, Map parameters) { + this.preferredPayloadType = preferredPayloadType; + this.name = name; + this.kind = kind; + this.clockRate = clockRate; + this.numChannels = numChannels; + this.parameters = parameters; + this.mimeType = mimeType; + } + + @CalledByNative("CodecCapability") + int getPreferredPayloadType() { + return preferredPayloadType; + } + + @CalledByNative("CodecCapability") + String getName() { + return name; + } + + @CalledByNative("CodecCapability") + MediaStreamTrack.MediaType getKind() { + return kind; + } + + @CalledByNative("CodecCapability") + Integer getClockRate() { + return clockRate; + } + + @CalledByNative("CodecCapability") + Integer getNumChannels() { + return numChannels; + } + + @CalledByNative("CodecCapability") + Map getParameters() { + return parameters; + } + } + + public static class HeaderExtensionCapability { + private final String uri; + private final int preferredId; + private final boolean preferredEncrypted; + + @CalledByNative("HeaderExtensionCapability") + HeaderExtensionCapability(String uri, int preferredId, boolean preferredEncrypted) { + this.uri = uri; + this.preferredId = preferredId; + this.preferredEncrypted = preferredEncrypted; + } + + @CalledByNative("HeaderExtensionCapability") + public String getUri() { + return uri; + } + + @CalledByNative("HeaderExtensionCapability") + public int getPreferredId() { + return preferredId; + } + + @CalledByNative("HeaderExtensionCapability") + public boolean getPreferredEncrypted() { + return preferredEncrypted; + } + } + + public List codecs; + public List headerExtensions; + + @CalledByNative + RtpCapabilities(List codecs, List headerExtensions) { + this.headerExtensions = headerExtensions; + this.codecs = codecs; + } + + @CalledByNative + public List getHeaderExtensions() { + return headerExtensions; + } + + @CalledByNative + List getCodecs() { + return codecs; + } +} \ No newline at end of file diff --git a/sdk/android/api/org/webrtc/RtpParameters.java b/sdk/android/api/org/webrtc/RtpParameters.java index 9ca8311610..4e3f106785 100644 --- a/sdk/android/api/org/webrtc/RtpParameters.java +++ b/sdk/android/api/org/webrtc/RtpParameters.java @@ -76,6 +76,8 @@ public static class Encoding { // If non-null, scale the width and height down by this factor for video. If null, // implementation default scaling factor will be used. @Nullable public Double scaleResolutionDownBy; + // Scalability modes are used to represent simulcast and SVC layers. + @Nullable public String scalabilityMode; // SSRC to be used by this encoding. // Can't be changed between getParameters/setParameters. public Long ssrc; @@ -93,8 +95,8 @@ public Encoding(String rid, boolean active, Double scaleResolutionDownBy) { @CalledByNative("Encoding") Encoding(String rid, boolean active, double bitratePriority, @Priority int networkPriority, Integer maxBitrateBps, Integer minBitrateBps, Integer maxFramerate, - Integer numTemporalLayers, Double scaleResolutionDownBy, Long ssrc, - boolean adaptiveAudioPacketTime) { + Integer numTemporalLayers, Double scaleResolutionDownBy, String scalabilityMode, + Long ssrc, boolean adaptiveAudioPacketTime) { this.rid = rid; this.active = active; this.bitratePriority = bitratePriority; @@ -104,6 +106,7 @@ public Encoding(String rid, boolean active, Double scaleResolutionDownBy) { this.maxFramerate = maxFramerate; this.numTemporalLayers = numTemporalLayers; this.scaleResolutionDownBy = scaleResolutionDownBy; + this.scalabilityMode = scalabilityMode; this.ssrc = ssrc; this.adaptiveAudioPacketTime = adaptiveAudioPacketTime; } @@ -160,6 +163,12 @@ Double getScaleResolutionDownBy() { return scaleResolutionDownBy; } + @Nullable + @CalledByNative("Encoding") + String getScalabilityMode() { + return scalabilityMode; + } + @CalledByNative("Encoding") Long getSsrc() { return ssrc; diff --git a/sdk/android/api/org/webrtc/RtpTransceiver.java b/sdk/android/api/org/webrtc/RtpTransceiver.java index 1102bd7eb1..b60ac6450a 100644 --- a/sdk/android/api/org/webrtc/RtpTransceiver.java +++ b/sdk/android/api/org/webrtc/RtpTransceiver.java @@ -215,6 +215,11 @@ public void stop() { nativeStopInternal(nativeRtpTransceiver); } + public void setCodecPreferences(List codecs) { + checkRtpTransceiverExists(); + nativeSetCodecPreferences(nativeRtpTransceiver, codecs); + } + /** * The StopInternal method stops the RtpTransceiver, like Stop, but goes * immediately to Stopped state. @@ -263,4 +268,5 @@ private void checkRtpTransceiverExists() { private static native void nativeStopStandard(long rtpTransceiver); private static native boolean nativeSetDirection( long rtpTransceiver, RtpTransceiverDirection rtpTransceiverDirection); + private static native void nativeSetCodecPreferences(long rtpTransceiver, List codecs); } diff --git a/sdk/android/api/org/webrtc/SimulcastVideoEncoder.java b/sdk/android/api/org/webrtc/SimulcastVideoEncoder.java new file mode 100644 index 0000000000..af6c8f61c7 --- /dev/null +++ b/sdk/android/api/org/webrtc/SimulcastVideoEncoder.java @@ -0,0 +1,28 @@ +package org.webrtc; + +public class SimulcastVideoEncoder extends WrappedNativeVideoEncoder { + + static native long nativeCreateEncoder(VideoEncoderFactory primary, VideoEncoderFactory fallback, VideoCodecInfo info); + + VideoEncoderFactory primary; + VideoEncoderFactory fallback; + VideoCodecInfo info; + + public SimulcastVideoEncoder(VideoEncoderFactory primary, VideoEncoderFactory fallback, VideoCodecInfo info) { + this.primary = primary; + this.fallback = fallback; + this.info = info; + } + + @Override + public long createNativeVideoEncoder() { + return nativeCreateEncoder(primary, fallback, info); + } + + @Override + public boolean isHardwareEncoder() { + return false; + } + +} + diff --git a/sdk/android/api/org/webrtc/SimulcastVideoEncoderFactory.java b/sdk/android/api/org/webrtc/SimulcastVideoEncoderFactory.java new file mode 100644 index 0000000000..97b4f32087 --- /dev/null +++ b/sdk/android/api/org/webrtc/SimulcastVideoEncoderFactory.java @@ -0,0 +1,43 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import androidx.annotation.Nullable; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Arrays; + +public class SimulcastVideoEncoderFactory implements VideoEncoderFactory { + + VideoEncoderFactory primary; + VideoEncoderFactory fallback; + + public SimulcastVideoEncoderFactory(VideoEncoderFactory primary, VideoEncoderFactory fallback) { + this.primary = primary; + this.fallback = fallback; + } + + @Nullable + @Override + public VideoEncoder createEncoder(VideoCodecInfo info) { + return new SimulcastVideoEncoder(primary, fallback, info); + } + + @Override + public VideoCodecInfo[] getSupportedCodecs() { + List codecs = new ArrayList(); + codecs.addAll(Arrays.asList(primary.getSupportedCodecs())); + codecs.addAll(Arrays.asList(fallback.getSupportedCodecs())); + return codecs.toArray(new VideoCodecInfo[codecs.size()]); + } + +} diff --git a/sdk/android/api/org/webrtc/VideoCodecInfo.java b/sdk/android/api/org/webrtc/VideoCodecInfo.java index 4f97cf74cf..e0f5153d47 100644 --- a/sdk/android/api/org/webrtc/VideoCodecInfo.java +++ b/sdk/android/api/org/webrtc/VideoCodecInfo.java @@ -14,6 +14,8 @@ import java.util.Arrays; import java.util.Locale; import java.util.Map; +import java.util.List; +import java.util.ArrayList; /** * Represent a video codec as encoded in SDP. @@ -34,13 +36,16 @@ public class VideoCodecInfo { public final String name; public final Map params; + public final List scalabilityModes; + @Deprecated public final int payload; @CalledByNative - public VideoCodecInfo(String name, Map params) { + public VideoCodecInfo(String name, Map params, List scalabilityModes) { this.payload = 0; this.name = name; this.params = params; + this.scalabilityModes = scalabilityModes; } @Deprecated @@ -48,6 +53,7 @@ public VideoCodecInfo(int payload, String name, Map params) { this.payload = payload; this.name = name; this.params = params; + this.scalabilityModes = new ArrayList<>(); } @Override @@ -83,4 +89,9 @@ String getName() { Map getParams() { return params; } + + @CalledByNative + List getScalabilityModes() { + return scalabilityModes; + } } diff --git a/sdk/android/api/org/webrtc/VideoTrack.java b/sdk/android/api/org/webrtc/VideoTrack.java index 5593d424f3..bd5022f39a 100644 --- a/sdk/android/api/org/webrtc/VideoTrack.java +++ b/sdk/android/api/org/webrtc/VideoTrack.java @@ -54,6 +54,24 @@ public void removeSink(VideoSink sink) { } } + /** + * For a remote video track, starts/stops receiving the video stream. + * + * If this is a local video track, this is a no-op. + */ + public void setShouldReceive(boolean shouldReceive){ + nativeSetShouldReceive(getNativeMediaStreamTrack(), shouldReceive); + } + + /** + * The current receive status for a remote video track. + * + * This has no meaning for a local video track. + */ + public boolean shouldReceive(){ + return nativeGetShouldReceive(getNativeMediaStreamTrack()); + } + @Override public void dispose() { for (long nativeSink : sinks.values()) { @@ -73,4 +91,6 @@ long getNativeVideoTrack() { private static native void nativeRemoveSink(long track, long nativeSink); private static native long nativeWrapSink(VideoSink sink); private static native void nativeFreeSink(long sink); + private static native void nativeSetShouldReceive(long track, boolean shouldReceive); + private static native boolean nativeGetShouldReceive(long track); } diff --git a/sdk/android/api/org/webrtc/WrappedVideoDecoderFactory.java b/sdk/android/api/org/webrtc/WrappedVideoDecoderFactory.java new file mode 100644 index 0000000000..a7acd37289 --- /dev/null +++ b/sdk/android/api/org/webrtc/WrappedVideoDecoderFactory.java @@ -0,0 +1,75 @@ +/* + * Copyright 2023 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.webrtc; + +import android.media.MediaCodecInfo; +import androidx.annotation.Nullable; + +import java.util.Arrays; +import java.util.LinkedHashSet; + +public class WrappedVideoDecoderFactory implements VideoDecoderFactory { + public WrappedVideoDecoderFactory(@Nullable EglBase.Context eglContext) { + this.hardwareVideoDecoderFactory = new HardwareVideoDecoderFactory(eglContext); + this.platformSoftwareVideoDecoderFactory = new PlatformSoftwareVideoDecoderFactory(eglContext); + } + + private final VideoDecoderFactory hardwareVideoDecoderFactory; + private final VideoDecoderFactory hardwareVideoDecoderFactoryWithoutEglContext = new HardwareVideoDecoderFactory(null) ; + private final VideoDecoderFactory softwareVideoDecoderFactory = new SoftwareVideoDecoderFactory(); + @Nullable + private final VideoDecoderFactory platformSoftwareVideoDecoderFactory; + + @Override + public VideoDecoder createDecoder(VideoCodecInfo codecType) { + VideoDecoder softwareDecoder = this.softwareVideoDecoderFactory.createDecoder(codecType); + VideoDecoder hardwareDecoder = this.hardwareVideoDecoderFactory.createDecoder(codecType); + if (softwareDecoder == null && this.platformSoftwareVideoDecoderFactory != null) { + softwareDecoder = this.platformSoftwareVideoDecoderFactory.createDecoder(codecType); + } + + if(hardwareDecoder != null && disableSurfaceTextureFrame(hardwareDecoder.getImplementationName())) { + hardwareDecoder.release(); + hardwareDecoder = this.hardwareVideoDecoderFactoryWithoutEglContext.createDecoder(codecType); + } + + if (hardwareDecoder != null && softwareDecoder != null) { + return new VideoDecoderFallback(softwareDecoder, hardwareDecoder); + } else { + return hardwareDecoder != null ? hardwareDecoder : softwareDecoder; + } + } + + private boolean disableSurfaceTextureFrame(String name) { + if (name.startsWith("OMX.qcom.") || name.startsWith("OMX.hisi.")) { + return true; + } + return false; + } + + @Override + public VideoCodecInfo[] getSupportedCodecs() { + LinkedHashSet supportedCodecInfos = new LinkedHashSet(); + supportedCodecInfos.addAll(Arrays.asList(this.softwareVideoDecoderFactory.getSupportedCodecs())); + supportedCodecInfos.addAll(Arrays.asList(this.hardwareVideoDecoderFactory.getSupportedCodecs())); + if (this.platformSoftwareVideoDecoderFactory != null) { + supportedCodecInfos.addAll(Arrays.asList(this.platformSoftwareVideoDecoderFactory.getSupportedCodecs())); + } + + return (VideoCodecInfo[])supportedCodecInfos.toArray(new VideoCodecInfo[supportedCodecInfos.size()]); + } +} diff --git a/sdk/android/api/org/webrtc/audio/JavaAudioDeviceModule.java b/sdk/android/api/org/webrtc/audio/JavaAudioDeviceModule.java index d3d57602a8..f01baa5197 100644 --- a/sdk/android/api/org/webrtc/audio/JavaAudioDeviceModule.java +++ b/sdk/android/api/org/webrtc/audio/JavaAudioDeviceModule.java @@ -42,6 +42,7 @@ public static class Builder { private AudioTrackErrorCallback audioTrackErrorCallback; private AudioRecordErrorCallback audioRecordErrorCallback; private SamplesReadyCallback samplesReadyCallback; + private PlaybackSamplesReadyCallback playbackSamplesReadyCallback; private AudioTrackStateCallback audioTrackStateCallback; private AudioRecordStateCallback audioRecordStateCallback; private boolean useHardwareAcousticEchoCanceler = isBuiltInAcousticEchoCancelerSupported(); @@ -140,6 +141,14 @@ public Builder setSamplesReadyCallback(SamplesReadyCallback samplesReadyCallback return this; } + /** + * Set a callback to listen to the audio output passed to the AudioTrack. + */ + public Builder setPlaybackSamplesReadyCallback(PlaybackSamplesReadyCallback playbackSamplesReadyCallback) { + this.playbackSamplesReadyCallback = playbackSamplesReadyCallback; + return this; + } + /** * Set a callback to retrieve information from the AudioTrack on when audio starts and stop. */ @@ -258,7 +267,7 @@ public JavaAudioDeviceModule createAudioDeviceModule() { samplesReadyCallback, useHardwareAcousticEchoCanceler, useHardwareNoiseSuppressor); final WebRtcAudioTrack audioOutput = new WebRtcAudioTrack(context, audioManager, audioAttributes, audioTrackErrorCallback, - audioTrackStateCallback, useLowLatency, enableVolumeLogger); + audioTrackStateCallback, playbackSamplesReadyCallback, useLowLatency, enableVolumeLogger); return new JavaAudioDeviceModule(context, audioManager, audioInput, audioOutput, inputSampleRate, outputSampleRate, useStereoInput, useStereoOutput); } @@ -325,6 +334,11 @@ public static interface SamplesReadyCallback { void onWebRtcAudioRecordSamplesReady(AudioSamples samples); } + /** Called when new audio samples are ready. This should only be set for debug purposes */ + public static interface PlaybackSamplesReadyCallback { + void onWebRtcAudioTrackSamplesReady(AudioSamples samples); + } + /* AudioTrack */ // Audio playout/track error handler functions. public enum AudioTrackStartErrorCode { @@ -362,8 +376,8 @@ public static boolean isBuiltInNoiseSuppressorSupported() { private final Context context; private final AudioManager audioManager; - private final WebRtcAudioRecord audioInput; - private final WebRtcAudioTrack audioOutput; + public final WebRtcAudioRecord audioInput; + public final WebRtcAudioTrack audioOutput; private final int inputSampleRate; private final int outputSampleRate; private final boolean useStereoInput; diff --git a/sdk/android/instrumentationtests/src/org/webrtc/AndroidVideoDecoderInstrumentationTest.java b/sdk/android/instrumentationtests/src/org/webrtc/AndroidVideoDecoderInstrumentationTest.java index 6f448124e8..d9fadabfd9 100644 --- a/sdk/android/instrumentationtests/src/org/webrtc/AndroidVideoDecoderInstrumentationTest.java +++ b/sdk/android/instrumentationtests/src/org/webrtc/AndroidVideoDecoderInstrumentationTest.java @@ -48,7 +48,7 @@ public AndroidVideoDecoderInstrumentationTest(String codecName, boolean useEglCo if (codecName.equals("H264")) { this.codecType = H264Utils.DEFAULT_H264_BASELINE_PROFILE_CODEC; } else { - this.codecType = new VideoCodecInfo(codecName, new HashMap<>()); + this.codecType = new VideoCodecInfo(codecName, new HashMap<>(), new ArrayList<>()); } this.useEglContext = useEglContext; } diff --git a/sdk/android/instrumentationtests/src/org/webrtc/DefaultVideoEncoderFactoryTest.java b/sdk/android/instrumentationtests/src/org/webrtc/DefaultVideoEncoderFactoryTest.java index fe608c794e..1a9dd5fc38 100644 --- a/sdk/android/instrumentationtests/src/org/webrtc/DefaultVideoEncoderFactoryTest.java +++ b/sdk/android/instrumentationtests/src/org/webrtc/DefaultVideoEncoderFactoryTest.java @@ -47,7 +47,7 @@ public void setUp() { @SmallTest @Test public void getSupportedCodecs_hwVp8SameParamsAsSwVp8_oneVp8() { - VideoCodecInfo hwVp8Encoder = new VideoCodecInfo("VP8", new HashMap<>()); + VideoCodecInfo hwVp8Encoder = new VideoCodecInfo("VP8", new HashMap<>(), new ArrayList<>()); VideoEncoderFactory hwFactory = new CustomHardwareVideoEncoderFactory(hwVp8Encoder); DefaultVideoEncoderFactory defFactory = new DefaultVideoEncoderFactory(hwFactory); VideoCodecInfo[] supportedCodecs = defFactory.getSupportedCodecs(); @@ -62,7 +62,7 @@ public void getSupportedCodecs_hwVp8SameParamsAsSwVp8_oneVp8() { public void getSupportedCodecs_hwVp8WithDifferentParams_twoVp8() { VideoCodecInfo hwVp8Encoder = new VideoCodecInfo("VP8", new HashMap() { { put("param", "value"); } - }); + }, new ArrayList<>()); VideoEncoderFactory hwFactory = new CustomHardwareVideoEncoderFactory(hwVp8Encoder); DefaultVideoEncoderFactory defFactory = new DefaultVideoEncoderFactory(hwFactory); VideoCodecInfo[] supportedCodecs = defFactory.getSupportedCodecs(); diff --git a/sdk/android/instrumentationtests/src/org/webrtc/SoftwareVideoDecoderFactoryTest.java b/sdk/android/instrumentationtests/src/org/webrtc/SoftwareVideoDecoderFactoryTest.java index 8a5d9788ee..8be15624da 100644 --- a/sdk/android/instrumentationtests/src/org/webrtc/SoftwareVideoDecoderFactoryTest.java +++ b/sdk/android/instrumentationtests/src/org/webrtc/SoftwareVideoDecoderFactoryTest.java @@ -14,6 +14,7 @@ import androidx.annotation.Nullable; import androidx.test.filters.SmallTest; +import java.util.ArrayList; import java.util.HashMap; import org.junit.Before; import org.junit.Test; @@ -55,7 +56,7 @@ public void createDecoder_supportedCodec_returnsNotNull() { @Test public void createDecoder_unsupportedCodec_returnsNull() { VideoDecoderFactory factory = new SoftwareVideoDecoderFactory(); - VideoCodecInfo codec = new VideoCodecInfo("unsupported", new HashMap()); + VideoCodecInfo codec = new VideoCodecInfo("unsupported", new HashMap(), new ArrayList<>()); VideoDecoder decoder = factory.createDecoder(codec); assertThat(decoder).isNull(); } diff --git a/sdk/android/instrumentationtests/src/org/webrtc/SoftwareVideoEncoderFactoryTest.java b/sdk/android/instrumentationtests/src/org/webrtc/SoftwareVideoEncoderFactoryTest.java index 696b423cde..0fa4c4cc17 100644 --- a/sdk/android/instrumentationtests/src/org/webrtc/SoftwareVideoEncoderFactoryTest.java +++ b/sdk/android/instrumentationtests/src/org/webrtc/SoftwareVideoEncoderFactoryTest.java @@ -14,6 +14,7 @@ import androidx.annotation.Nullable; import androidx.test.filters.SmallTest; +import java.util.ArrayList; import java.util.HashMap; import org.junit.Before; import org.junit.Test; @@ -52,7 +53,7 @@ public void createEncoder_supportedCodec_returnsNotNull() { @Test public void createEncoder_unsupportedCodec_returnsNull() { VideoEncoderFactory factory = new SoftwareVideoEncoderFactory(); - VideoCodecInfo codec = new VideoCodecInfo("unsupported", new HashMap()); + VideoCodecInfo codec = new VideoCodecInfo("unsupported", new HashMap(), new ArrayList<>()); VideoEncoder encoder = factory.createEncoder(codec); assertThat(encoder).isNull(); } diff --git a/sdk/android/native_unittests/org/webrtc/CodecsWrapperTestHelper.java b/sdk/android/native_unittests/org/webrtc/CodecsWrapperTestHelper.java index 70151d3b78..72c5c64191 100644 --- a/sdk/android/native_unittests/org/webrtc/CodecsWrapperTestHelper.java +++ b/sdk/android/native_unittests/org/webrtc/CodecsWrapperTestHelper.java @@ -12,6 +12,7 @@ import java.util.HashMap; import java.util.Map; +import java.util.ArrayList; public class CodecsWrapperTestHelper { @CalledByNative @@ -20,7 +21,7 @@ public static VideoCodecInfo createTestVideoCodecInfo() { params.put( VideoCodecInfo.H264_FMTP_PROFILE_LEVEL_ID, VideoCodecInfo.H264_CONSTRAINED_BASELINE_3_1); - VideoCodecInfo codec_info = new VideoCodecInfo("H264", params); + VideoCodecInfo codec_info = new VideoCodecInfo("H264", params, new ArrayList<>()); return codec_info; } diff --git a/sdk/android/src/java/org/webrtc/H264Utils.java b/sdk/android/src/java/org/webrtc/H264Utils.java index abb79c6582..4bf292ee12 100644 --- a/sdk/android/src/java/org/webrtc/H264Utils.java +++ b/sdk/android/src/java/org/webrtc/H264Utils.java @@ -12,6 +12,7 @@ import java.util.Map; import java.util.HashMap; +import java.util.ArrayList; /** Container for static helper functions related to dealing with H264 codecs. */ class H264Utils { @@ -38,9 +39,9 @@ public static Map getDefaultH264Params(boolean isHighProfile) { } public static VideoCodecInfo DEFAULT_H264_BASELINE_PROFILE_CODEC = - new VideoCodecInfo("H264", getDefaultH264Params(/* isHighProfile= */ false)); + new VideoCodecInfo("H264", getDefaultH264Params(/* isHighProfile= */ false), new ArrayList<>()); public static VideoCodecInfo DEFAULT_H264_HIGH_PROFILE_CODEC = - new VideoCodecInfo("H264", getDefaultH264Params(/* isHighProfile= */ true)); + new VideoCodecInfo("H264", getDefaultH264Params(/* isHighProfile= */ true), new ArrayList<>()); public static boolean isSameH264Profile( Map params1, Map params2) { diff --git a/sdk/android/src/java/org/webrtc/HardwareVideoEncoder.java b/sdk/android/src/java/org/webrtc/HardwareVideoEncoder.java index 94dfdf0728..6d0edcf972 100644 --- a/sdk/android/src/java/org/webrtc/HardwareVideoEncoder.java +++ b/sdk/android/src/java/org/webrtc/HardwareVideoEncoder.java @@ -45,8 +45,8 @@ class HardwareVideoEncoder implements VideoEncoder { private static final int MEDIA_CODEC_RELEASE_TIMEOUT_MS = 5000; private static final int DEQUEUE_OUTPUT_BUFFER_TIMEOUT_US = 100000; - // Size of the input frames should be multiple of 16 for the H/W encoder. - private static final int REQUIRED_RESOLUTION_ALIGNMENT = 16; + // Size of the input frames should be multiple of 2 for the H/W encoder. + private static final int REQUIRED_RESOLUTION_ALIGNMENT = 2; /** * Keeps track of the number of output buffers that have been passed down the pipeline and not yet @@ -210,6 +210,11 @@ public VideoCodecStatus initEncode(Settings settings, Callback callback) { this.callback = callback; automaticResizeOn = settings.automaticResizeOn; + if (settings.width % REQUIRED_RESOLUTION_ALIGNMENT != 0 + || settings.height % REQUIRED_RESOLUTION_ALIGNMENT != 0) { + Logging.e(TAG, "MediaCodec requires 2x2 alignment."); + return VideoCodecStatus.ERR_SIZE; + } this.width = settings.width; this.height = settings.height; useSurfaceMode = canUseSurface(); @@ -533,6 +538,12 @@ private VideoCodecStatus resetCodec(int newWidth, int newHeight, boolean newUseS if (status != VideoCodecStatus.OK) { return status; } + + if (newWidth % REQUIRED_RESOLUTION_ALIGNMENT != 0 + || newHeight % REQUIRED_RESOLUTION_ALIGNMENT != 0) { + Logging.e(TAG, "MediaCodec requires 2x2 alignment."); + return VideoCodecStatus.ERR_SIZE; + } width = newWidth; height = newHeight; useSurfaceMode = newUseSurfaceMode; diff --git a/sdk/android/src/java/org/webrtc/MediaCodecVideoDecoderFactory.java b/sdk/android/src/java/org/webrtc/MediaCodecVideoDecoderFactory.java index 9a73bc49ff..d5b892279c 100644 --- a/sdk/android/src/java/org/webrtc/MediaCodecVideoDecoderFactory.java +++ b/sdk/android/src/java/org/webrtc/MediaCodecVideoDecoderFactory.java @@ -72,11 +72,11 @@ public VideoCodecInfo[] getSupportedCodecs() { String name = type.name(); if (type == VideoCodecMimeType.H264 && isH264HighProfileSupported(codec)) { supportedCodecInfos.add(new VideoCodecInfo( - name, MediaCodecUtils.getCodecProperties(type, /* highProfile= */ true))); + name, MediaCodecUtils.getCodecProperties(type, /* highProfile= */ true), new ArrayList<>())); } supportedCodecInfos.add(new VideoCodecInfo( - name, MediaCodecUtils.getCodecProperties(type, /* highProfile= */ false))); + name, MediaCodecUtils.getCodecProperties(type, /* highProfile= */ false), new ArrayList<>())); } } diff --git a/sdk/android/src/java/org/webrtc/audio/WebRtcAudioTrack.java b/sdk/android/src/java/org/webrtc/audio/WebRtcAudioTrack.java index 2b34e34013..25d10e4f61 100644 --- a/sdk/android/src/java/org/webrtc/audio/WebRtcAudioTrack.java +++ b/sdk/android/src/java/org/webrtc/audio/WebRtcAudioTrack.java @@ -20,12 +20,14 @@ import android.os.Process; import androidx.annotation.Nullable; import java.nio.ByteBuffer; +import java.util.Arrays; import org.webrtc.CalledByNative; import org.webrtc.Logging; import org.webrtc.ThreadUtils; import org.webrtc.audio.JavaAudioDeviceModule.AudioTrackErrorCallback; import org.webrtc.audio.JavaAudioDeviceModule.AudioTrackStartErrorCode; import org.webrtc.audio.JavaAudioDeviceModule.AudioTrackStateCallback; +import org.webrtc.audio.JavaAudioDeviceModule.PlaybackSamplesReadyCallback; import org.webrtc.audio.LowLatencyAudioBufferManager; class WebRtcAudioTrack { @@ -62,7 +64,7 @@ class WebRtcAudioTrack { private ByteBuffer byteBuffer; - private @Nullable final AudioAttributes audioAttributes; + public @Nullable AudioAttributes audioAttributes; private @Nullable AudioTrack audioTrack; private @Nullable AudioTrackThread audioThread; private final VolumeLogger volumeLogger; @@ -76,6 +78,7 @@ class WebRtcAudioTrack { private final @Nullable AudioTrackErrorCallback errorCallback; private final @Nullable AudioTrackStateCallback stateCallback; + private final @Nullable PlaybackSamplesReadyCallback audioSamplesReadyCallback; /** * Audio thread which keeps calling AudioTrack.write() to stream audio. @@ -129,6 +132,17 @@ public void run() { reportWebRtcAudioTrackError("AudioTrack.write failed: " + bytesWritten); } } + + if (audioSamplesReadyCallback != null && keepAlive) { + // Copy the entire byte buffer array. The start of the byteBuffer is not necessarily + // at index 0. + byte[] data = Arrays.copyOfRange(byteBuffer.array(), byteBuffer.arrayOffset(), + sizeInBytes + byteBuffer.arrayOffset()); + audioSamplesReadyCallback.onWebRtcAudioTrackSamplesReady( + new JavaAudioDeviceModule.AudioSamples(audioTrack.getAudioFormat(), + audioTrack.getChannelCount(), audioTrack.getSampleRate(), data)); + } + if (useLowLatency) { bufferManager.maybeAdjustBufferSize(audioTrack); } @@ -154,13 +168,13 @@ public void stopThread() { @CalledByNative WebRtcAudioTrack(Context context, AudioManager audioManager) { this(context, audioManager, null /* audioAttributes */, null /* errorCallback */, - null /* stateCallback */, false /* useLowLatency */, true /* enableVolumeLogger */); + null /* stateCallback */, null /* audioSamplesReadyCallback */, false /* useLowLatency */, true /* enableVolumeLogger */); } WebRtcAudioTrack(Context context, AudioManager audioManager, @Nullable AudioAttributes audioAttributes, @Nullable AudioTrackErrorCallback errorCallback, - @Nullable AudioTrackStateCallback stateCallback, boolean useLowLatency, - boolean enableVolumeLogger) { + @Nullable AudioTrackStateCallback stateCallback, @Nullable PlaybackSamplesReadyCallback audioSamplesReadyCallback, + boolean useLowLatency, boolean enableVolumeLogger) { threadChecker.detachThread(); this.context = context; this.audioManager = audioManager; @@ -168,6 +182,7 @@ public void stopThread() { this.errorCallback = errorCallback; this.stateCallback = stateCallback; this.volumeLogger = enableVolumeLogger ? new VolumeLogger(audioManager) : null; + this.audioSamplesReadyCallback = audioSamplesReadyCallback; this.useLowLatency = useLowLatency; Logging.d(TAG, "ctor" + WebRtcAudioUtils.getThreadInfo()); } diff --git a/sdk/android/src/jni/libaom_av1_encoder.cc b/sdk/android/src/jni/libaom_av1_encoder.cc index 96e0dda169..32c1fc7993 100644 --- a/sdk/android/src/jni/libaom_av1_encoder.cc +++ b/sdk/android/src/jni/libaom_av1_encoder.cc @@ -15,6 +15,9 @@ #include "sdk/android/generated_libaom_av1_encoder_jni/LibaomAv1Encoder_jni.h" #include "sdk/android/src/jni/jni_helpers.h" +#include +#include + namespace webrtc { namespace jni { @@ -22,5 +25,14 @@ static jlong JNI_LibaomAv1Encoder_CreateEncoder(JNIEnv* jni) { return jlongFromPointer(webrtc::CreateLibaomAv1Encoder().release()); } +static webrtc::ScopedJavaLocalRef JNI_LibaomAv1Encoder_GetSupportedScalabilityModes(JNIEnv* jni) { + std::vector modes; + for (const auto scalability_mode : webrtc::kAllScalabilityModes) { + if (webrtc::ScalabilityStructureConfig(scalability_mode).has_value()) { + modes.push_back(std::string(webrtc::ScalabilityModeToString(scalability_mode))); + } + } + return NativeToJavaStringArray(jni, modes); +} } // namespace jni } // namespace webrtc diff --git a/sdk/android/src/jni/pc/audio_sink.cc b/sdk/android/src/jni/pc/audio_sink.cc new file mode 100644 index 0000000000..5bd88c75f6 --- /dev/null +++ b/sdk/android/src/jni/pc/audio_sink.cc @@ -0,0 +1,39 @@ +/* + * Copyright 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "sdk/android/src/jni/pc/audio_sink.h" + +#include "sdk/android/generated_peerconnection_jni/AudioTrackSink_jni.h" + +namespace webrtc { +namespace jni { + +AudioTrackSinkWrapper::AudioTrackSinkWrapper(JNIEnv* jni, const JavaRef& j_sink) + : j_sink_(jni, j_sink) {} + +AudioTrackSinkWrapper::~AudioTrackSinkWrapper() {} + +void AudioTrackSinkWrapper::OnData( + const void* audio_data, + int bits_per_sample, + int sample_rate, + size_t number_of_channels, + size_t number_of_frames, + absl::optional absolute_capture_timestamp_ms) { + JNIEnv* jni = AttachCurrentThreadIfNeeded(); + int length = (bits_per_sample / 8) * number_of_channels * number_of_frames; + ScopedJavaLocalRef audio_buffer = + NewDirectByteBuffer(jni, (void *) audio_data, length); + Java_AudioTrackSink_onData(jni, j_sink_, + audio_buffer, bits_per_sample, sample_rate, (int) number_of_channels, (int) number_of_frames, (absolute_capture_timestamp_ms ? absolute_capture_timestamp_ms.value() : 0)); +} + +} // namespace jni +} // namespace webrtc diff --git a/sdk/android/src/jni/pc/audio_sink.h b/sdk/android/src/jni/pc/audio_sink.h new file mode 100644 index 0000000000..809f460e0c --- /dev/null +++ b/sdk/android/src/jni/pc/audio_sink.h @@ -0,0 +1,41 @@ +/* + * Copyright 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef SDK_ANDROID_SRC_JNI_AUDIO_TRACK_SINK_H_ +#define SDK_ANDROID_SRC_JNI_AUDIO_TRACK_SINK_H_ + +#include + +#include "api/media_stream_interface.h" +#include "sdk/android/src/jni/jni_helpers.h" + +namespace webrtc { +namespace jni { + +class AudioTrackSinkWrapper : public webrtc::AudioTrackSinkInterface { + public: + AudioTrackSinkWrapper(JNIEnv* jni, const JavaRef& j_sink); + ~AudioTrackSinkWrapper() override; + + private: + void OnData(const void* audio_data, + int bits_per_sample, + int sample_rate, + size_t number_of_channels, + size_t number_of_frames, + absl::optional absolute_capture_timestamp_ms) override; + + const ScopedJavaGlobalRef j_sink_; +}; + +} // namespace jni +} // namespace webrtc + +#endif // SDK_ANDROID_SRC_JNI_AUDIO_TRACK_SINK_H_ diff --git a/sdk/android/src/jni/pc/audio_track.cc b/sdk/android/src/jni/pc/audio_track.cc index 36ed43f1d4..df2d605893 100644 --- a/sdk/android/src/jni/pc/audio_track.cc +++ b/sdk/android/src/jni/pc/audio_track.cc @@ -9,6 +9,8 @@ */ #include "api/media_stream_interface.h" +#include "sdk/android/src/jni/pc/audio_sink.h" + #include "sdk/android/generated_peerconnection_jni/AudioTrack_jni.h" namespace webrtc { @@ -20,5 +22,29 @@ static void JNI_AudioTrack_SetVolume(JNIEnv*, jlong j_p, jdouble volume) { source->SetVolume(volume); } +static void JNI_AudioTrack_AddSink(JNIEnv* jni, + jlong j_native_track, + jlong j_native_sink) { + reinterpret_cast(j_native_track) + ->AddSink(reinterpret_cast(j_native_sink)); +} + +static void JNI_AudioTrack_RemoveSink(JNIEnv* jni, + jlong j_native_track, + jlong j_native_sink) { + reinterpret_cast(j_native_track) + ->RemoveSink(reinterpret_cast(j_native_sink)); +} + +static jlong JNI_AudioTrack_WrapSink(JNIEnv* jni, + const JavaParamRef& sink) { + return jlongFromPointer(new AudioTrackSinkWrapper(jni, sink)); +} + +static void JNI_AudioTrack_FreeSink(JNIEnv* jni, jlong j_native_sink) { + delete reinterpret_cast(j_native_sink); +} + + } // namespace jni } // namespace webrtc diff --git a/sdk/android/src/jni/pc/external_audio_processing_factory.cc b/sdk/android/src/jni/pc/external_audio_processing_factory.cc new file mode 100644 index 0000000000..3d7ee7a4d9 --- /dev/null +++ b/sdk/android/src/jni/pc/external_audio_processing_factory.cc @@ -0,0 +1,143 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "sdk/android/src/jni/pc/external_audio_processing_factory.h" + +#include +#include + +#include "api/make_ref_counted.h" +#include "rtc_base/ref_counted_object.h" +#include "sdk/android/generated_peerconnection_jni/ExternalAudioProcessingFactory_jni.h" +#include "sdk/android/native_api/jni/java_types.h" +#include "sdk/android/native_api/jni/scoped_java_ref.h" +#include "sdk/android/src/jni/jni_helpers.h" +#include "sdk/android/src/jni/pc/external_audio_processor.h" + +namespace webrtc { +namespace jni { + +ExternalAudioProcessingJni::ExternalAudioProcessingJni( + JNIEnv* jni, + const JavaRef& j_processing) + : j_processing_global_(jni, j_processing) {} +ExternalAudioProcessingJni::~ExternalAudioProcessingJni() {} +void ExternalAudioProcessingJni::Initialize(int sample_rate_hz, + int num_channels) { + JNIEnv* env = AttachCurrentThreadIfNeeded(); + Java_AudioProcessing_initialize(env, j_processing_global_, sample_rate_hz, + num_channels); +} + +void ExternalAudioProcessingJni::Reset(int new_rate) { + JNIEnv* env = AttachCurrentThreadIfNeeded(); + Java_AudioProcessing_reset(env, j_processing_global_, new_rate); +} + +void ExternalAudioProcessingJni::Process(int num_bands, int num_frames, int buffer_size, float* buffer) { + JNIEnv* env = AttachCurrentThreadIfNeeded(); + ScopedJavaLocalRef audio_buffer = + NewDirectByteBuffer(env, (void*)buffer, buffer_size * sizeof(float)); + Java_AudioProcessing_process(env, j_processing_global_, num_bands, num_frames, audio_buffer); +} + +ExternalAudioProcessingFactory::ExternalAudioProcessingFactory() { + capture_post_processor_ = new ExternalAudioProcessor(); + std::unique_ptr capture_post_processor( + capture_post_processor_); + + render_pre_processor_ = new ExternalAudioProcessor(); + std::unique_ptr render_pre_processor( + render_pre_processor_); + + apm_ = webrtc::AudioProcessingBuilder() + .SetCapturePostProcessing(std::move(capture_post_processor)) + .SetRenderPreProcessing(std::move(render_pre_processor)) + .Create(); + + webrtc::AudioProcessing::Config config; + apm_->ApplyConfig(config); +} + +static ExternalAudioProcessingFactory* default_processor_ptr; + +static jlong JNI_ExternalAudioProcessingFactory_GetDefaultApm(JNIEnv* env) { + if (!default_processor_ptr) { + auto default_processor = rtc::make_ref_counted(); + default_processor_ptr = default_processor.release(); + } + return webrtc::jni::jlongFromPointer(default_processor_ptr->apm().get()); +} + +static jlong JNI_ExternalAudioProcessingFactory_SetCapturePostProcessing( + JNIEnv* env, + const JavaParamRef& j_processing) { + if (!default_processor_ptr) { + return 0; + } + auto processing = + rtc::make_ref_counted(env, j_processing); + processing->AddRef(); + default_processor_ptr->capture_post_processor()->SetExternalAudioProcessing( + processing.get()); + return jlongFromPointer(processing.get()); +} + +static jlong JNI_ExternalAudioProcessingFactory_SetRenderPreProcessing( + JNIEnv* env, + const JavaParamRef& j_processing) { + if (!default_processor_ptr) { + return 0; + } + auto processing = + rtc::make_ref_counted(env, j_processing); + processing->AddRef(); + default_processor_ptr->render_pre_processor()->SetExternalAudioProcessing( + processing.get()); + return jlongFromPointer(processing.get()); +} + +static void JNI_ExternalAudioProcessingFactory_SetBypassFlagForCapturePost( + JNIEnv* env, + jboolean bypass) { + if (!default_processor_ptr) { + return; + } + default_processor_ptr->capture_post_processor()->SetBypassFlag(bypass); +} + +static void JNI_ExternalAudioProcessingFactory_SetBypassFlagForRenderPre( + JNIEnv* env, + jboolean bypass) { + if (!default_processor_ptr) { + return; + } + default_processor_ptr->render_pre_processor()->SetBypassFlag(bypass); +} + +static void JNI_ExternalAudioProcessingFactory_Destroy(JNIEnv* env) { + if (!default_processor_ptr) { + return; + } + default_processor_ptr->render_pre_processor()->SetExternalAudioProcessing( + nullptr); + default_processor_ptr->capture_post_processor()->SetExternalAudioProcessing( + nullptr); + delete default_processor_ptr; +} + +} // namespace jni +} // namespace webrtc diff --git a/sdk/android/src/jni/pc/external_audio_processing_factory.h b/sdk/android/src/jni/pc/external_audio_processing_factory.h new file mode 100644 index 0000000000..5dfebe81fc --- /dev/null +++ b/sdk/android/src/jni/pc/external_audio_processing_factory.h @@ -0,0 +1,68 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#define WEBRTC_APM_DEBUG_DUMP 0 + +#include "rtc_base/ref_counted_object.h" +#include "sdk/android/native_api/jni/scoped_java_ref.h" +#include "sdk/android/src/jni/pc/external_audio_processor.h" +#include "sdk/android/src/jni/pc/external_audio_processing_interface.h" + +namespace webrtc { +namespace jni { + +class ExternalAudioProcessingJni + : public webrtc::ExternalAudioProcessingInterface, + public rtc::RefCountInterface { + public: + ExternalAudioProcessingJni(JNIEnv* jni, const JavaRef& j_processing); + ~ExternalAudioProcessingJni(); + + protected: + virtual void Initialize(int sample_rate_hz, int num_channels) override; + virtual void Reset(int new_rate) override; + virtual void Process(int num_bans, int num_frames, int buffer_size, float* buffer) override; + + private: + const ScopedJavaGlobalRef j_processing_global_; + const ScopedJavaGlobalRef j_processing_; +}; + +class ExternalAudioProcessingFactory : public rtc::RefCountInterface { + public: + ExternalAudioProcessingFactory(); + virtual ~ExternalAudioProcessingFactory() = default; + + ExternalAudioProcessor* capture_post_processor() { + return capture_post_processor_; + } + + ExternalAudioProcessor* render_pre_processor() { + return render_pre_processor_; + } + + rtc::scoped_refptr apm() { return apm_; } + + private: + rtc::scoped_refptr apm_; + ExternalAudioProcessor* capture_post_processor_; + ExternalAudioProcessor* render_pre_processor_; +}; + +} // namespace jni +} // namespace webrtc diff --git a/sdk/android/src/jni/pc/external_audio_processing_interface.h b/sdk/android/src/jni/pc/external_audio_processing_interface.h new file mode 100644 index 0000000000..1202be106b --- /dev/null +++ b/sdk/android/src/jni/pc/external_audio_processing_interface.h @@ -0,0 +1,33 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef API_ANDROID_JNI_EXTERNALAUDIOPROCESSORINTERFACE_H_ +#define API_ANDROID_JNI_EXTERNALAUDIOPROCESSORINTERFACE_H_ + +namespace webrtc { + +class ExternalAudioProcessingInterface { + public: + virtual void Initialize(int sample_rate_hz, int num_channels) = 0; + virtual void Reset(int new_rate) = 0; + virtual void Process(int num_bands, int num_frames, int buffer_size, float* buffer) = 0; + + protected: + virtual ~ExternalAudioProcessingInterface() = default; +}; + +} // namespace webrtc + +#endif // API_ANDROID_JNI_EXTERNALAUDIOPROCESSORINTERFACE_H_ diff --git a/sdk/android/src/jni/pc/external_audio_processor.cc b/sdk/android/src/jni/pc/external_audio_processor.cc new file mode 100644 index 0000000000..274982d6d4 --- /dev/null +++ b/sdk/android/src/jni/pc/external_audio_processor.cc @@ -0,0 +1,72 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "sdk/android/src/jni/pc/external_audio_processor.h" + +namespace webrtc { + +void ExternalAudioProcessor::SetExternalAudioProcessing( + ExternalAudioProcessingInterface* processor) { + webrtc::MutexLock lock(&mutex_); + external_processor_ = processor; + if (initialized_) { + external_processor_->Initialize(sample_rate_hz_, num_channels_); + } +} + +void ExternalAudioProcessor::SetBypassFlag(bool bypass) { + webrtc::MutexLock lock(&mutex_); + bypass_flag_ = bypass; +} + +void ExternalAudioProcessor::Initialize(int sample_rate_hz, int num_channels) { + webrtc::MutexLock lock(&mutex_); + sample_rate_hz_ = sample_rate_hz; + num_channels_ = num_channels; + if (external_processor_) { + external_processor_->Initialize(sample_rate_hz, num_channels); + } + initialized_ = true; +} + +void ExternalAudioProcessor::Process(webrtc::AudioBuffer* audio) { + webrtc::MutexLock lock(&mutex_); + if (!external_processor_ || bypass_flag_ || !initialized_) { + return; + } + + size_t num_frames = audio->num_frames(); + size_t num_bands =audio->num_bands(); + + // 1 buffer = 10ms of frames + int rate = num_frames * 100; + + if (rate != sample_rate_hz_) { + external_processor_->Reset(rate); + sample_rate_hz_ = rate; + } + + external_processor_->Process(num_bands, num_frames, kNsFrameSize * num_bands, audio->channels()[0]); +} + +std::string ExternalAudioProcessor::ToString() const { + return "ExternalAudioProcessor"; +} + +void ExternalAudioProcessor::SetRuntimeSetting( + webrtc::AudioProcessing::RuntimeSetting setting) {} + +} // namespace webrtc diff --git a/sdk/android/src/jni/pc/external_audio_processor.h b/sdk/android/src/jni/pc/external_audio_processor.h new file mode 100644 index 0000000000..1dc31809fc --- /dev/null +++ b/sdk/android/src/jni/pc/external_audio_processor.h @@ -0,0 +1,57 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef SDK_ANDROID_SRC_JNI_PC_EXTERNAL_AUDIO_PROCESSOR_H_ +#define SDK_ANDROID_SRC_JNI_PC_EXTERNAL_AUDIO_PROCESSOR_H_ + +#define WEBRTC_APM_DEBUG_DUMP 0 + +#include "modules/audio_processing/audio_buffer.h" +#include "modules/audio_processing/audio_processing_impl.h" +#include "modules/audio_processing/include/audio_processing.h" +#include "sdk/android/src/jni/pc/external_audio_processing_interface.h" + +namespace webrtc { + +class ExternalAudioProcessor : public webrtc::CustomProcessing { + public: + ExternalAudioProcessor() = default; + ~ExternalAudioProcessor() override = default; + + void SetExternalAudioProcessing( + ExternalAudioProcessingInterface* processor); + + void SetBypassFlag(bool bypass); + + private: + void Initialize(int sample_rate_hz, int num_channels) override; + void Process(webrtc::AudioBuffer* audio) override; + std::string ToString() const override; + void SetRuntimeSetting( + webrtc::AudioProcessing::RuntimeSetting setting) override; + + private: + mutable webrtc::Mutex mutex_; + ExternalAudioProcessingInterface* external_processor_; + bool bypass_flag_ = false; + bool initialized_ = false; + int sample_rate_hz_ = 0; + int num_channels_ = 0; +}; + +} // namespace webrtc + +#endif // SDK_ANDROID_SRC_JNI_PC_EXTERNAL_AUDIO_PROCESSOR_H_ diff --git a/sdk/android/src/jni/pc/frame_cryptor.cc b/sdk/android/src/jni/pc/frame_cryptor.cc new file mode 100644 index 0000000000..af2fd8f2b0 --- /dev/null +++ b/sdk/android/src/jni/pc/frame_cryptor.cc @@ -0,0 +1,202 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#include "sdk/android/src/jni/pc/frame_cryptor.h" + +#include "api/rtp_receiver_interface.h" +#include "api/rtp_sender_interface.h" +#include "rtc_base/ref_counted_object.h" +#include "sdk/android/generated_peerconnection_jni/FrameCryptorFactory_jni.h" +#include "sdk/android/generated_peerconnection_jni/FrameCryptor_jni.h" +#include "sdk/android/native_api/jni/java_types.h" +#include "sdk/android/src/jni/jni_helpers.h" +#include "sdk/android/src/jni/pc/frame_cryptor_key_provider.h" +#include "sdk/android/src/jni/pc/owned_factory_and_threads.h" + +namespace webrtc { +namespace jni { + +FrameCryptorObserverJni::FrameCryptorObserverJni( + JNIEnv* jni, + const JavaRef& j_observer) + : j_observer_global_(jni, j_observer) {} + +FrameCryptorObserverJni::~FrameCryptorObserverJni() {} + +void FrameCryptorObserverJni::OnFrameCryptionStateChanged( + const std::string participant_id, + FrameCryptionState new_state) { + JNIEnv* env = AttachCurrentThreadIfNeeded(); + Java_Observer_onFrameCryptionStateChanged( + env, j_observer_global_, NativeToJavaString(env, participant_id), + Java_FrameCryptionState_fromNativeIndex(env, new_state)); +} + +ScopedJavaLocalRef NativeToJavaFrameCryptor( + JNIEnv* env, + rtc::scoped_refptr cryptor) { + if (!cryptor) + return nullptr; + // Sender is now owned by the Java object, and will be freed from + // FrameCryptor.dispose(). + return Java_FrameCryptor_Constructor(env, + jlongFromPointer(cryptor.release())); +} + +static void JNI_FrameCryptor_SetEnabled(JNIEnv* jni, + jlong j_frame_cryptor_pointer, + jboolean j_enabled) { + reinterpret_cast(j_frame_cryptor_pointer) + ->SetEnabled(j_enabled); +} + +static jboolean JNI_FrameCryptor_IsEnabled(JNIEnv* jni, + jlong j_frame_cryptor_pointer) { + return reinterpret_cast(j_frame_cryptor_pointer) + ->enabled(); +} + +static void JNI_FrameCryptor_SetKeyIndex(JNIEnv* jni, + jlong j_frame_cryptor_pointer, + jint j_index) { + reinterpret_cast(j_frame_cryptor_pointer) + ->SetKeyIndex(j_index); +} + +static jint JNI_FrameCryptor_GetKeyIndex(JNIEnv* jni, + jlong j_frame_cryptor_pointer) { + return reinterpret_cast(j_frame_cryptor_pointer) + ->key_index(); +} + +static jlong JNI_FrameCryptor_SetObserver( + JNIEnv* jni, + jlong j_frame_cryptor_pointer, + const JavaParamRef& j_observer) { + auto observer = + rtc::make_ref_counted(jni, j_observer); + observer->AddRef(); + reinterpret_cast(j_frame_cryptor_pointer) + ->RegisterFrameCryptorTransformerObserver(observer); + return jlongFromPointer(observer.get()); +} + +static void JNI_FrameCryptor_UnSetObserver(JNIEnv* jni, + jlong j_frame_cryptor_pointer) { + reinterpret_cast(j_frame_cryptor_pointer) + ->UnRegisterFrameCryptorTransformerObserver(); +} + +webrtc::FrameCryptorTransformer::Algorithm AlgorithmFromIndex(int index) { + switch (index) { + case 0: + return webrtc::FrameCryptorTransformer::Algorithm::kAesGcm; + case 1: + return webrtc::FrameCryptorTransformer::Algorithm::kAesCbc; + default: + return webrtc::FrameCryptorTransformer::Algorithm::kAesGcm; + } +} + +static base::android::ScopedJavaLocalRef +JNI_FrameCryptorFactory_CreateFrameCryptorForRtpReceiver( + JNIEnv* env, + jlong native_factory, + jlong j_rtp_receiver_pointer, + const base::android::JavaParamRef& participantId, + jint j_algorithm_index, + jlong j_key_provider) { + OwnedFactoryAndThreads* factory = + reinterpret_cast(native_factory); + auto keyProvider = + reinterpret_cast(j_key_provider); + auto participant_id = JavaToStdString(env, participantId); + auto rtpReceiver = + reinterpret_cast(j_rtp_receiver_pointer); + auto mediaType = + rtpReceiver->track()->kind() == "audio" + ? webrtc::FrameCryptorTransformer::MediaType::kAudioFrame + : webrtc::FrameCryptorTransformer::MediaType::kVideoFrame; + auto frame_crypto_transformer = + rtc::scoped_refptr( + new webrtc::FrameCryptorTransformer(factory->signaling_thread(), + participant_id, mediaType, AlgorithmFromIndex(j_algorithm_index), + rtc::scoped_refptr(keyProvider))); + + rtpReceiver->SetDepacketizerToDecoderFrameTransformer( + frame_crypto_transformer); + frame_crypto_transformer->SetEnabled(false); + + return NativeToJavaFrameCryptor(env, frame_crypto_transformer); +} + +static base::android::ScopedJavaLocalRef +JNI_FrameCryptorFactory_CreateFrameCryptorForRtpSender( + JNIEnv* env, + jlong native_factory, + jlong j_rtp_sender_pointer, + const base::android::JavaParamRef& participantId, + jint j_algorithm_index, + jlong j_key_provider) { + OwnedFactoryAndThreads* factory = + reinterpret_cast(native_factory); + auto keyProvider = + reinterpret_cast(j_key_provider); + auto rtpSender = reinterpret_cast(j_rtp_sender_pointer); + auto participant_id = JavaToStdString(env, participantId); + auto mediaType = + rtpSender->track()->kind() == "audio" + ? webrtc::FrameCryptorTransformer::MediaType::kAudioFrame + : webrtc::FrameCryptorTransformer::MediaType::kVideoFrame; + auto frame_crypto_transformer = + rtc::scoped_refptr( + new webrtc::FrameCryptorTransformer(factory->signaling_thread(), + participant_id, mediaType, AlgorithmFromIndex(j_algorithm_index), + rtc::scoped_refptr(keyProvider))); + + rtpSender->SetEncoderToPacketizerFrameTransformer(frame_crypto_transformer); + frame_crypto_transformer->SetEnabled(false); + + return NativeToJavaFrameCryptor(env, frame_crypto_transformer); +} + +static base::android::ScopedJavaLocalRef +JNI_FrameCryptorFactory_CreateFrameCryptorKeyProvider( + JNIEnv* env, + jboolean j_shared, + const base::android::JavaParamRef& j_ratchetSalt, + jint j_ratchetWindowSize, + const base::android::JavaParamRef& j_uncryptedMagicBytes, + jint j_failureTolerance, + jint j_keyRingSize, + jboolean j_discardFrameWhenCryptorNotReady) { + auto ratchetSalt = JavaToNativeByteArray(env, j_ratchetSalt); + KeyProviderOptions options; + options.ratchet_salt = + std::vector(ratchetSalt.begin(), ratchetSalt.end()); + options.ratchet_window_size = j_ratchetWindowSize; + auto uncryptedMagicBytes = JavaToNativeByteArray(env, j_uncryptedMagicBytes); + options.uncrypted_magic_bytes = + std::vector(uncryptedMagicBytes.begin(), uncryptedMagicBytes.end()); + options.shared_key = j_shared; + options.failure_tolerance = j_failureTolerance; + options.key_ring_size = j_keyRingSize; + options.discard_frame_when_cryptor_not_ready = j_discardFrameWhenCryptorNotReady; + return NativeToJavaFrameCryptorKeyProvider( + env, rtc::make_ref_counted(options)); +} + +} // namespace jni +} // namespace webrtc diff --git a/sdk/android/src/jni/pc/frame_cryptor.h b/sdk/android/src/jni/pc/frame_cryptor.h new file mode 100644 index 0000000000..dd0788d212 --- /dev/null +++ b/sdk/android/src/jni/pc/frame_cryptor.h @@ -0,0 +1,49 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef SDK_ANDROID_SRC_JNI_PC_FRAME_CRYPTOR_H_ +#define SDK_ANDROID_SRC_JNI_PC_FRAME_CRYPTOR_H_ + +#include + +#include "api/crypto/frame_crypto_transformer.h" +#include "sdk/android/native_api/jni/scoped_java_ref.h" + +namespace webrtc { +namespace jni { + +ScopedJavaLocalRef NativeToJavaFrameCryptor( + JNIEnv* env, + rtc::scoped_refptr cryptor); + +class FrameCryptorObserverJni : public FrameCryptorTransformerObserver { + public: + FrameCryptorObserverJni(JNIEnv* jni, const JavaRef& j_observer); + ~FrameCryptorObserverJni() override; + + protected: + void OnFrameCryptionStateChanged(const std::string participant_id, + FrameCryptionState state) override; + + private: + const ScopedJavaGlobalRef j_observer_global_; + const ScopedJavaGlobalRef j_observer_; +}; + +} // namespace jni +} // namespace webrtc + +#endif // SDK_ANDROID_SRC_JNI_PC_FRAME_CRYPTOR_H_ diff --git a/sdk/android/src/jni/pc/frame_cryptor_key_provider.cc b/sdk/android/src/jni/pc/frame_cryptor_key_provider.cc new file mode 100644 index 0000000000..e41d16ed91 --- /dev/null +++ b/sdk/android/src/jni/pc/frame_cryptor_key_provider.cc @@ -0,0 +1,123 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#include "sdk/android/src/jni/pc/frame_cryptor_key_provider.h" + +#include "sdk/android/generated_peerconnection_jni/FrameCryptorKeyProvider_jni.h" +#include "sdk/android/native_api/jni/java_types.h" +#include "sdk/android/src/jni/jni_helpers.h" + +namespace webrtc { +namespace jni { + +ScopedJavaLocalRef NativeToJavaFrameCryptorKeyProvider( + JNIEnv* env, + rtc::scoped_refptr key_provider) { + if (!key_provider) + return nullptr; + // Sender is now owned by the Java object, and will be freed from + // FrameCryptorKeyProvider.dispose(). + return Java_FrameCryptorKeyProvider_Constructor( + env, jlongFromPointer(key_provider.release())); +} + +static jboolean JNI_FrameCryptorKeyProvider_SetSharedKey( + JNIEnv* jni, + jlong j_key_provider, + jint j_index, + const base::android::JavaParamRef& j_key) { + auto key = JavaToNativeByteArray(jni, j_key); + return reinterpret_cast(j_key_provider) + ->SetSharedKey(j_index,std::vector(key.begin(), key.end())); +} + +static base::android::ScopedJavaLocalRef +JNI_FrameCryptorKeyProvider_RatchetSharedKey( + JNIEnv* env, + jlong keyProviderPointer, + jint j_index) { + auto key_provider = + reinterpret_cast(keyProviderPointer); + auto newKey = key_provider->RatchetSharedKey(j_index); + std::vector int8tKey = + std::vector(newKey.begin(), newKey.end()); + return NativeToJavaByteArray(env, rtc::ArrayView(int8tKey)); +} + +static base::android::ScopedJavaLocalRef +JNI_FrameCryptorKeyProvider_ExportSharedKey( + JNIEnv* env, + jlong keyProviderPointer, + jint j_index) { + auto key_provider = + reinterpret_cast(keyProviderPointer); + auto key = key_provider->ExportSharedKey(j_index); + std::vector int8tKey = std::vector(key.begin(), key.end()); + return NativeToJavaByteArray(env, rtc::ArrayView(int8tKey)); +} + +static jboolean JNI_FrameCryptorKeyProvider_SetKey( + JNIEnv* jni, + jlong j_key_provider, + const base::android::JavaParamRef& participantId, + jint j_index, + const base::android::JavaParamRef& j_key) { + auto key = JavaToNativeByteArray(jni, j_key); + auto participant_id = JavaToStdString(jni, participantId); + return reinterpret_cast(j_key_provider) + ->SetKey(participant_id, j_index, + std::vector(key.begin(), key.end())); +} + +static base::android::ScopedJavaLocalRef +JNI_FrameCryptorKeyProvider_RatchetKey( + JNIEnv* env, + jlong keyProviderPointer, + const base::android::JavaParamRef& participantId, + jint j_index) { + auto participant_id = JavaToStdString(env, participantId); + auto key_provider = + reinterpret_cast(keyProviderPointer); + auto newKey = key_provider->RatchetKey(participant_id, j_index); + std::vector int8tKey = + std::vector(newKey.begin(), newKey.end()); + return NativeToJavaByteArray(env, rtc::ArrayView(int8tKey)); +} + +static base::android::ScopedJavaLocalRef +JNI_FrameCryptorKeyProvider_ExportKey( + JNIEnv* env, + jlong keyProviderPointer, + const base::android::JavaParamRef& participantId, + jint j_index) { + auto participant_id = JavaToStdString(env, participantId); + auto key_provider = + reinterpret_cast(keyProviderPointer); + auto key = key_provider->ExportKey(participant_id, j_index); + std::vector int8tKey = std::vector(key.begin(), key.end()); + return NativeToJavaByteArray(env, rtc::ArrayView(int8tKey)); +} + +static void JNI_FrameCryptorKeyProvider_SetSifTrailer( + JNIEnv* jni, + jlong j_key_provider, + const base::android::JavaParamRef& j_trailer) { + auto trailer = JavaToNativeByteArray(jni, j_trailer); + reinterpret_cast(j_key_provider) + ->SetSifTrailer(std::vector(trailer.begin(), trailer.end())); +} + +} // namespace jni +} // namespace webrtc diff --git a/sdk/android/src/jni/pc/frame_cryptor_key_provider.h b/sdk/android/src/jni/pc/frame_cryptor_key_provider.h new file mode 100644 index 0000000000..8832a83035 --- /dev/null +++ b/sdk/android/src/jni/pc/frame_cryptor_key_provider.h @@ -0,0 +1,35 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef SDK_ANDROID_SRC_JNI_PC_FRAME_CRYPTOR_KEY_PROVIDER_H_ +#define SDK_ANDROID_SRC_JNI_PC_FRAME_CRYPTOR_KEY_PROVIDER_H_ + +#include + +#include "api/crypto/frame_crypto_transformer.h" +#include "sdk/android/native_api/jni/scoped_java_ref.h" + +namespace webrtc { +namespace jni { + +ScopedJavaLocalRef NativeToJavaFrameCryptorKeyProvider( + JNIEnv* env, + rtc::scoped_refptr cryptor); + +} // namespace jni +} // namespace webrtc + +#endif // SDK_ANDROID_SRC_JNI_PC_FRAME_CRYPTOR_KEY_PROVIDER_H_ diff --git a/sdk/android/src/jni/pc/peer_connection.cc b/sdk/android/src/jni/pc/peer_connection.cc index 9983ae7df2..b9fea85c4b 100644 --- a/sdk/android/src/jni/pc/peer_connection.cc +++ b/sdk/android/src/jni/pc/peer_connection.cc @@ -278,6 +278,9 @@ void JavaToNativeRTCConfiguration( rtc_config->enable_implicit_rollback = Java_RTCConfiguration_getEnableImplicitRollback(jni, j_rtc_config); + rtc_config->enable_any_address_ports = + Java_RTCConfiguration_getEnableIceGatheringOnAnyAddressPorts(jni, j_rtc_config); + ScopedJavaLocalRef j_turn_logging_id = Java_RTCConfiguration_getTurnLoggingId(jni, j_rtc_config); if (!IsNull(jni, j_turn_logging_id)) { diff --git a/sdk/android/src/jni/pc/peer_connection_factory.cc b/sdk/android/src/jni/pc/peer_connection_factory.cc index 4c682089db..f4a1b16e89 100644 --- a/sdk/android/src/jni/pc/peer_connection_factory.cc +++ b/sdk/android/src/jni/pc/peer_connection_factory.cc @@ -39,7 +39,9 @@ #include "sdk/android/src/jni/logging/log_sink.h" #include "sdk/android/src/jni/pc/android_network_monitor.h" #include "sdk/android/src/jni/pc/audio.h" +#include "sdk/android/src/jni/pc/rtp_capabilities.h" #include "sdk/android/src/jni/pc/ice_candidate.h" +#include "sdk/android/src/jni/pc/media_stream_track.h" #include "sdk/android/src/jni/pc/owned_factory_and_threads.h" #include "sdk/android/src/jni/pc/peer_connection.h" #include "sdk/android/src/jni/pc/ssl_certificate_verifier_wrapper.h" @@ -393,6 +395,22 @@ jlong JNI_PeerConnectionFactory_CreateAudioTrack( return jlongFromPointer(track.release()); } +ScopedJavaLocalRef JNI_PeerConnectionFactory_GetRtpSenderCapabilities( + JNIEnv* jni, + jlong native_factory, + const JavaParamRef& media_type) { + auto factory = PeerConnectionFactoryFromJava(native_factory); + return NativeToJavaRtpCapabilities(jni, factory->GetRtpSenderCapabilities(JavaToNativeMediaType(jni, media_type))); +} + +ScopedJavaLocalRef JNI_PeerConnectionFactory_GetRtpReceiverCapabilities( + JNIEnv* jni, + jlong native_factory, + const JavaParamRef& media_type) { + auto factory = PeerConnectionFactoryFromJava(native_factory); + return NativeToJavaRtpCapabilities(jni, factory->GetRtpReceiverCapabilities(JavaToNativeMediaType(jni, media_type))); +} + static jboolean JNI_PeerConnectionFactory_StartAecDump( JNIEnv* jni, jlong native_factory, diff --git a/sdk/android/src/jni/pc/rtp_capabilities.cc b/sdk/android/src/jni/pc/rtp_capabilities.cc new file mode 100644 index 0000000000..fb71491ee1 --- /dev/null +++ b/sdk/android/src/jni/pc/rtp_capabilities.cc @@ -0,0 +1,114 @@ +/* + * Copyright 2023 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "sdk/android/src/jni/pc/rtp_capabilities.h" + +#include "sdk/android/generated_peerconnection_jni/RtpCapabilities_jni.h" +#include "sdk/android/native_api/jni/java_types.h" +#include "sdk/android/src/jni/jni_helpers.h" +#include "sdk/android/src/jni/pc/media_stream_track.h" + +namespace webrtc { +namespace jni { + +namespace { + +ScopedJavaLocalRef NativeToJavaRtpCodecParameter( + JNIEnv* env, + const RtpCodecCapability& codec) { + return Java_CodecCapability_Constructor(env, codec.preferred_payload_type.value(), + NativeToJavaString(env, codec.name), + NativeToJavaMediaType(env, codec.kind), + NativeToJavaInteger(env, codec.clock_rate), + NativeToJavaInteger(env, codec.num_channels), + NativeToJavaString(env, codec.mime_type()), + NativeToJavaStringMap(env, codec.parameters)); +} + +ScopedJavaLocalRef NativeToJavaRtpHeaderExtensionParameter( + JNIEnv* env, + const RtpHeaderExtensionCapability& extension) { + return Java_HeaderExtensionCapability_Constructor( + env, NativeToJavaString(env, extension.uri), extension.preferred_id.value(), + extension.preferred_encrypt); +} +} // namespace + +RtpCapabilities JavaToNativeRtpCapabilities(JNIEnv* jni, + const JavaRef& j_capabilities) { + RtpCapabilities capabilities; + + ScopedJavaLocalRef j_header_extensions = + Java_RtpCapabilities_getHeaderExtensions(jni, j_capabilities); + for (const JavaRef& j_header_extension : + Iterable(jni, j_header_extensions)) { + RtpHeaderExtensionCapability header_extension; + header_extension.uri = JavaToStdString( + jni, Java_HeaderExtensionCapability_getUri(jni, j_header_extension)); + header_extension.preferred_id = Java_HeaderExtensionCapability_getPreferredId(jni, j_header_extension); + header_extension.preferred_encrypt = + Java_HeaderExtensionCapability_getPreferredEncrypted(jni, j_header_extension); + capabilities.header_extensions.push_back(header_extension); + } + + // Convert codecs. + ScopedJavaLocalRef j_codecs = + Java_RtpCapabilities_getCodecs(jni, j_capabilities); + for (const JavaRef& j_codec : Iterable(jni, j_codecs)) { + RtpCodecCapability codec; + codec.preferred_payload_type = Java_CodecCapability_getPreferredPayloadType(jni, j_codec); + codec.name = JavaToStdString(jni, Java_CodecCapability_getName(jni, j_codec)); + codec.kind = JavaToNativeMediaType(jni, Java_CodecCapability_getKind(jni, j_codec)); + codec.clock_rate = + JavaToNativeOptionalInt(jni, Java_CodecCapability_getClockRate(jni, j_codec)); + codec.num_channels = + JavaToNativeOptionalInt(jni, Java_CodecCapability_getNumChannels(jni, j_codec)); + auto parameters_map = + JavaToNativeStringMap(jni, Java_CodecCapability_getParameters(jni, j_codec)); + codec.parameters.insert(parameters_map.begin(), parameters_map.end()); + capabilities.codecs.push_back(codec); + } + return capabilities; +} + +ScopedJavaLocalRef NativeToJavaRtpCapabilities( + JNIEnv* env, + const RtpCapabilities& capabilities) { + return Java_RtpCapabilities_Constructor( + env, NativeToJavaList(env, capabilities.codecs, &NativeToJavaRtpCodecParameter), + NativeToJavaList(env, capabilities.header_extensions, + &NativeToJavaRtpHeaderExtensionParameter) + ); +} + +RtpCodecCapability JavaToNativeRtpCodecCapability(JNIEnv* jni, + const JavaRef& j_codec) { + RtpCodecCapability codec; + codec.preferred_payload_type = Java_CodecCapability_getPreferredPayloadType(jni, j_codec); + codec.name = JavaToStdString(jni, Java_CodecCapability_getName(jni, j_codec)); + codec.kind = JavaToNativeMediaType(jni, Java_CodecCapability_getKind(jni, j_codec)); + codec.clock_rate = + JavaToNativeOptionalInt(jni, Java_CodecCapability_getClockRate(jni, j_codec)); + codec.num_channels = + JavaToNativeOptionalInt(jni, Java_CodecCapability_getNumChannels(jni, j_codec)); + auto parameters_map = + JavaToNativeStringMap(jni, Java_CodecCapability_getParameters(jni, j_codec)); + codec.parameters.insert(parameters_map.begin(), parameters_map.end()); + return codec; +} + +} // namespace jni +} // namespace webrtc diff --git a/sdk/android/src/jni/pc/rtp_capabilities.h b/sdk/android/src/jni/pc/rtp_capabilities.h new file mode 100644 index 0000000000..4acf611c82 --- /dev/null +++ b/sdk/android/src/jni/pc/rtp_capabilities.h @@ -0,0 +1,41 @@ +/* + * Copyright 2023 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef SDK_ANDROID_SRC_JNI_PC_RTP_CAPABLILITES_H_ +#define SDK_ANDROID_SRC_JNI_PC_RTP_CAPABLILITES_H_ + +#include + +#include "api/rtp_parameters.h" +#include "sdk/android/native_api/jni/scoped_java_ref.h" + +namespace webrtc { +namespace jni { + +RtpCapabilities JavaToNativeRtpCapabilities(JNIEnv* jni, + const JavaRef& j_capabilities); + +ScopedJavaLocalRef NativeToJavaRtpCapabilities( + JNIEnv* jni, + const RtpCapabilities& capabilities); + +RtpCodecCapability JavaToNativeRtpCodecCapability(JNIEnv* jni, + const JavaRef& j_codec_capability); + +} // namespace jni +} // namespace webrtc + +#endif // SDK_ANDROID_SRC_JNI_PC_RTP_CAPABLILITES_H_ diff --git a/sdk/android/src/jni/pc/rtp_parameters.cc b/sdk/android/src/jni/pc/rtp_parameters.cc index 4bd9ee0e1d..6feb6a631b 100644 --- a/sdk/android/src/jni/pc/rtp_parameters.cc +++ b/sdk/android/src/jni/pc/rtp_parameters.cc @@ -53,6 +53,7 @@ ScopedJavaLocalRef NativeToJavaRtpEncodingParameter( NativeToJavaInteger(env, encoding.max_framerate), NativeToJavaInteger(env, encoding.num_temporal_layers), NativeToJavaDouble(env, encoding.scale_resolution_down_by), + NativeToJavaString(env, encoding.scalability_mode), encoding.ssrc ? NativeToJavaLong(env, *encoding.ssrc) : nullptr, encoding.adaptive_ptime); } @@ -116,6 +117,11 @@ RtpEncodingParameters JavaToNativeRtpEncodingParameters( Java_Encoding_getScaleResolutionDownBy(jni, j_encoding_parameters); encoding.scale_resolution_down_by = JavaToNativeOptionalDouble(jni, j_scale_resolution_down_by); + ScopedJavaLocalRef j_scalability_mode = + Java_Encoding_getScalabilityMode(jni, j_encoding_parameters); + if (!IsNull(jni, j_scalability_mode)) { + encoding.scalability_mode = JavaToNativeString(jni,j_scalability_mode); + } encoding.adaptive_ptime = Java_Encoding_getAdaptivePTime(jni, j_encoding_parameters); ScopedJavaLocalRef j_ssrc = diff --git a/sdk/android/src/jni/pc/rtp_transceiver.cc b/sdk/android/src/jni/pc/rtp_transceiver.cc index 1d468461f1..34cba6ab5f 100644 --- a/sdk/android/src/jni/pc/rtp_transceiver.cc +++ b/sdk/android/src/jni/pc/rtp_transceiver.cc @@ -16,6 +16,7 @@ #include "sdk/android/native_api/jni/java_types.h" #include "sdk/android/src/jni/jni_helpers.h" #include "sdk/android/src/jni/pc/media_stream_track.h" +#include "sdk/android/src/jni/pc/rtp_capabilities.h" #include "sdk/android/src/jni/pc/rtp_parameters.h" #include "sdk/android/src/jni/pc/rtp_receiver.h" #include "sdk/android/src/jni/pc/rtp_sender.h" @@ -139,6 +140,16 @@ ScopedJavaLocalRef JNI_RtpTransceiver_CurrentDirection( : nullptr; } + +void JNI_RtpTransceiver_SetCodecPreferences(JNIEnv* jni, + jlong j_rtp_transceiver_pointer, + const JavaParamRef& j_codecs) { + std::vector codecs = + JavaListToNativeVector( + jni, j_codecs, &JavaToNativeRtpCodecCapability); + reinterpret_cast(j_rtp_transceiver_pointer)->SetCodecPreferences(codecs); +} + void JNI_RtpTransceiver_StopInternal(JNIEnv* jni, jlong j_rtp_transceiver_pointer) { reinterpret_cast(j_rtp_transceiver_pointer) diff --git a/sdk/android/src/jni/simulcast_video_encoder.cc b/sdk/android/src/jni/simulcast_video_encoder.cc new file mode 100644 index 0000000000..da31fbbfa5 --- /dev/null +++ b/sdk/android/src/jni/simulcast_video_encoder.cc @@ -0,0 +1,34 @@ +#include + +#include "sdk/android/src/jni/jni_helpers.h" +#include "sdk/android/src/jni/video_encoder_factory_wrapper.h" +#include "sdk/android/src/jni/video_codec_info.h" +#include "sdk/android/native_api/codecs/wrapper.h" +#include "media/engine/simulcast_encoder_adapter.h" +#include "rtc_base/logging.h" + +using namespace webrtc; +using namespace webrtc::jni; + +#ifdef __cplusplus +extern "C" { +#endif + +// (VideoEncoderFactory primary, VideoEncoderFactory fallback, VideoCodecInfo info) +JNIEXPORT jlong JNICALL Java_org_webrtc_SimulcastVideoEncoder_nativeCreateEncoder(JNIEnv *env, jclass klass, jobject primary, jobject fallback, jobject info) { + RTC_LOG(LS_INFO) << "Create simulcast video encoder"; + JavaParamRef info_ref(info); + SdpVideoFormat format = VideoCodecInfoToSdpVideoFormat(env, info_ref); + + // TODO: 影響は軽微だが、リークする可能性があるので将来的に修正したい + // https://github.com/shiguredo-webrtc-build/webrtc-build/pull/16#pullrequestreview-600675795 + return NativeToJavaPointer(std::make_unique( + JavaToNativeVideoEncoderFactory(env, primary).release(), + JavaToNativeVideoEncoderFactory(env, fallback).release(), + format).release()); +} + + +#ifdef __cplusplus +} +#endif diff --git a/sdk/android/src/jni/simulcast_video_encoder.h b/sdk/android/src/jni/simulcast_video_encoder.h new file mode 100644 index 0000000000..519be778e8 --- /dev/null +++ b/sdk/android/src/jni/simulcast_video_encoder.h @@ -0,0 +1,22 @@ +/* DO NOT EDIT THIS FILE - it is machine generated */ +#include +/* Header for class org_webrtc_SimulcastVideoEncoder */ + +#ifndef _Included_org_webrtc_SimulcastVideoEncoder +#define _Included_org_webrtc_SimulcastVideoEncoder +#ifdef __cplusplus +extern "C" { +#endif +/* + * Class: org_webrtc_SimulcastVideoEncoder + * Method: nativeCreateEncoder + * Signature: (Lorg/webrtc/VideoEncoderFactory;Lorg/webrtc/VideoEncoderFactory;Lorg/webrtc/VideoCodecInfo;)J + */ + +JNIEXPORT jlong JNICALL Java_org_webrtc_SimulcastVideoEncoder_nativeCreateEncoder + (JNIEnv *, jclass, jobject, jobject, jobject); + +#ifdef __cplusplus +} +#endif +#endif diff --git a/sdk/android/src/jni/video_codec_info.cc b/sdk/android/src/jni/video_codec_info.cc index a218a1d23f..a85dde67dc 100644 --- a/sdk/android/src/jni/video_codec_info.cc +++ b/sdk/android/src/jni/video_codec_info.cc @@ -13,15 +13,28 @@ #include "sdk/android/generated_video_jni/VideoCodecInfo_jni.h" #include "sdk/android/native_api/jni/java_types.h" #include "sdk/android/src/jni/jni_helpers.h" +#include "api/video_codecs/scalability_mode.h" +#include "modules/video_coding/svc/scalability_mode_util.h" namespace webrtc { namespace jni { SdpVideoFormat VideoCodecInfoToSdpVideoFormat(JNIEnv* jni, const JavaRef& j_info) { + std::vector params = + JavaToStdVectorStrings(jni, Java_VideoCodecInfo_getScalabilityModes(jni, j_info)); + absl::InlinedVector + scalability_modes; + for (auto mode : params) { + auto scalability_mode = ScalabilityModeFromString(mode); + if (scalability_mode != absl::nullopt) { + scalability_modes.push_back(*scalability_mode); + } + } return SdpVideoFormat( JavaToNativeString(jni, Java_VideoCodecInfo_getName(jni, j_info)), - JavaToNativeStringMap(jni, Java_VideoCodecInfo_getParams(jni, j_info))); + JavaToNativeStringMap(jni, Java_VideoCodecInfo_getParams(jni, j_info)), + scalability_modes); } ScopedJavaLocalRef SdpVideoFormatToVideoCodecInfo( @@ -29,8 +42,17 @@ ScopedJavaLocalRef SdpVideoFormatToVideoCodecInfo( const SdpVideoFormat& format) { ScopedJavaLocalRef j_params = NativeToJavaStringMap(jni, format.parameters); + webrtc::ScopedJavaLocalRef j_scalability_modes; + if (!format.scalability_modes.empty()) { + JavaListBuilder builder(jni); + for (auto mode : format.scalability_modes) { + std::string scalability_mode(ScalabilityModeToString(mode)); + builder.add(NativeToJavaString(jni, scalability_mode)); + } + j_scalability_modes = builder.java_list(); + } return Java_VideoCodecInfo_Constructor( - jni, NativeToJavaString(jni, format.name), j_params); + jni, NativeToJavaString(jni, format.name), j_params, j_scalability_modes); } } // namespace jni diff --git a/sdk/android/src/jni/video_track.cc b/sdk/android/src/jni/video_track.cc index eb343ebdb3..2078359cbc 100644 --- a/sdk/android/src/jni/video_track.cc +++ b/sdk/android/src/jni/video_track.cc @@ -44,5 +44,16 @@ static void JNI_VideoTrack_FreeSink(JNIEnv* jni, jlong j_native_sink) { delete reinterpret_cast*>(j_native_sink); } +static void JNI_VideoTrack_SetShouldReceive(JNIEnv* jni, + jlong j_native_track, + jboolean should_receive) { + reinterpret_cast(j_native_track)->set_should_receive(should_receive); +} + +static jboolean JNI_VideoTrack_GetShouldReceive(JNIEnv* jni, + jlong j_native_track) { + return reinterpret_cast(j_native_track)->should_receive(); +} + } // namespace jni } // namespace webrtc diff --git a/sdk/android/src/jni/vp9_codec.cc b/sdk/android/src/jni/vp9_codec.cc index ad9ca793ce..e5929ad2f4 100644 --- a/sdk/android/src/jni/vp9_codec.cc +++ b/sdk/android/src/jni/vp9_codec.cc @@ -10,11 +10,18 @@ #include +#include "absl/container/inlined_vector.h" +#include "api/video_codecs/sdp_video_format.h" #include "modules/video_coding/codecs/vp9/include/vp9.h" +#include "modules/video_coding/svc/create_scalability_structure.h" + #include "sdk/android/generated_libvpx_vp9_jni/LibvpxVp9Decoder_jni.h" #include "sdk/android/generated_libvpx_vp9_jni/LibvpxVp9Encoder_jni.h" #include "sdk/android/src/jni/jni_helpers.h" +#include +#include + namespace webrtc { namespace jni { @@ -34,5 +41,14 @@ static jboolean JNI_LibvpxVp9Decoder_IsSupported(JNIEnv* jni) { return !SupportedVP9Codecs().empty(); } +static webrtc::ScopedJavaLocalRef JNI_LibvpxVp9Encoder_GetSupportedScalabilityModes(JNIEnv* jni) { + std::vector modes; + for (const auto scalability_mode : webrtc::kAllScalabilityModes) { + if (webrtc::ScalabilityStructureConfig(scalability_mode).has_value()) { + modes.push_back(std::string(webrtc::ScalabilityModeToString(scalability_mode))); + } + } + return NativeToJavaStringArray(jni, modes); +} } // namespace jni } // namespace webrtc diff --git a/sdk/objc/PrivacyInfo.xcprivacy b/sdk/objc/PrivacyInfo.xcprivacy new file mode 100644 index 0000000000..7204a67c33 --- /dev/null +++ b/sdk/objc/PrivacyInfo.xcprivacy @@ -0,0 +1,28 @@ + + + + + NSPrivacyCollectedDataTypes + + NSPrivacyAccessedAPITypes + + + NSPrivacyAccessedAPIType + NSPrivacyAccessedAPICategoryFileTimestamp + NSPrivacyAccessedAPITypeReasons + + C617.1 + + + + NSPrivacyAccessedAPIType + NSPrivacyAccessedAPICategorySystemBootTime + NSPrivacyAccessedAPITypeReasons + + 35F9.1 + 8FFB.1 + + + + + \ No newline at end of file diff --git a/sdk/objc/api/RTCVideoRendererAdapter+Private.h b/sdk/objc/api/RTCVideoRendererAdapter+Private.h index 9b123d2d05..cac9ab665c 100644 --- a/sdk/objc/api/RTCVideoRendererAdapter+Private.h +++ b/sdk/objc/api/RTCVideoRendererAdapter+Private.h @@ -16,7 +16,7 @@ NS_ASSUME_NONNULL_BEGIN -@interface RTCVideoRendererAdapter () +@interface RTC_OBJC_TYPE(RTCVideoRendererAdapter) () /** * The Objective-C video renderer passed to this adapter during construction. diff --git a/sdk/objc/api/RTCVideoRendererAdapter.h b/sdk/objc/api/RTCVideoRendererAdapter.h index b0b6f04488..bbb8c6e71c 100644 --- a/sdk/objc/api/RTCVideoRendererAdapter.h +++ b/sdk/objc/api/RTCVideoRendererAdapter.h @@ -10,6 +10,8 @@ #import +#import "RTCMacros.h" + NS_ASSUME_NONNULL_BEGIN /* @@ -18,7 +20,7 @@ NS_ASSUME_NONNULL_BEGIN * adapter adapts calls made to that interface to the RTCVideoRenderer supplied * during construction. */ -@interface RTCVideoRendererAdapter : NSObject +@interface RTC_OBJC_TYPE (RTCVideoRendererAdapter): NSObject - (instancetype)init NS_UNAVAILABLE; diff --git a/sdk/objc/api/RTCVideoRendererAdapter.mm b/sdk/objc/api/RTCVideoRendererAdapter.mm index ef02f72f60..d992c64108 100644 --- a/sdk/objc/api/RTCVideoRendererAdapter.mm +++ b/sdk/objc/api/RTCVideoRendererAdapter.mm @@ -17,10 +17,9 @@ namespace webrtc { -class VideoRendererAdapter - : public rtc::VideoSinkInterface { +class VideoRendererAdapter : public rtc::VideoSinkInterface { public: - VideoRendererAdapter(RTCVideoRendererAdapter* adapter) { + VideoRendererAdapter(RTC_OBJC_TYPE(RTCVideoRendererAdapter) * adapter) { adapter_ = adapter; size_ = CGSizeZero; } @@ -28,9 +27,9 @@ void OnFrame(const webrtc::VideoFrame& nativeVideoFrame) override { RTC_OBJC_TYPE(RTCVideoFrame)* videoFrame = NativeToObjCVideoFrame(nativeVideoFrame); - CGSize current_size = (videoFrame.rotation % 180 == 0) - ? CGSizeMake(videoFrame.width, videoFrame.height) - : CGSizeMake(videoFrame.height, videoFrame.width); + CGSize current_size = (videoFrame.rotation % 180 == 0) ? + CGSizeMake(videoFrame.width, videoFrame.height) : + CGSizeMake(videoFrame.height, videoFrame.width); if (!CGSizeEqualToSize(size_, current_size)) { size_ = current_size; @@ -40,12 +39,12 @@ void OnFrame(const webrtc::VideoFrame& nativeVideoFrame) override { } private: - __weak RTCVideoRendererAdapter *adapter_; + __weak RTC_OBJC_TYPE(RTCVideoRendererAdapter) * adapter_; CGSize size_; }; -} +} // namespace webrtc -@implementation RTCVideoRendererAdapter { +@implementation RTC_OBJC_TYPE (RTCVideoRendererAdapter) { std::unique_ptr _adapter; } @@ -60,7 +59,7 @@ - (instancetype)initWithNativeRenderer:(id)vide return self; } -- (rtc::VideoSinkInterface *)nativeVideoRenderer { +- (rtc::VideoSinkInterface*)nativeVideoRenderer { return _adapter.get(); } diff --git a/sdk/objc/api/peerconnection/RTCAudioDeviceModule+Private.h b/sdk/objc/api/peerconnection/RTCAudioDeviceModule+Private.h new file mode 100644 index 0000000000..73c1a4e26a --- /dev/null +++ b/sdk/objc/api/peerconnection/RTCAudioDeviceModule+Private.h @@ -0,0 +1,31 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import "RTCAudioDeviceModule.h" +#import "sdk/objc/native/api/audio_device_module.h" + +#include "rtc_base/thread.h" + +NS_ASSUME_NONNULL_BEGIN + +@interface RTC_OBJC_TYPE(RTCAudioDeviceModule) () + +- (instancetype)initWithNativeModule:(rtc::scoped_refptr )module + workerThread:(rtc::Thread *)workerThread; + +@end + +NS_ASSUME_NONNULL_END diff --git a/sdk/objc/api/peerconnection/RTCAudioDeviceModule.h b/sdk/objc/api/peerconnection/RTCAudioDeviceModule.h new file mode 100644 index 0000000000..b02cecfd0b --- /dev/null +++ b/sdk/objc/api/peerconnection/RTCAudioDeviceModule.h @@ -0,0 +1,56 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import +#import + +#import "RTCMacros.h" +#import "RTCIODevice.h" + +NS_ASSUME_NONNULL_BEGIN + +typedef void (^RTCOnAudioDevicesDidUpdate)(); + +RTC_OBJC_EXPORT +@interface RTC_OBJC_TYPE (RTCAudioDeviceModule) : NSObject + +@property(nonatomic, readonly) NSArray *outputDevices; +@property(nonatomic, readonly) NSArray *inputDevices; + +@property(nonatomic, readonly) BOOL playing; +@property(nonatomic, readonly) BOOL recording; + +@property(nonatomic, assign) RTC_OBJC_TYPE(RTCIODevice) *outputDevice; +@property(nonatomic, assign) RTC_OBJC_TYPE(RTCIODevice) *inputDevice; + +// Executes low-level API's in sequence to switch the device +// Use outputDevice / inputDevice property unless you need to know if setting the device is +// successful. +- (BOOL)trySetOutputDevice:(nullable RTC_OBJC_TYPE(RTCIODevice) *)device; +- (BOOL)trySetInputDevice:(nullable RTC_OBJC_TYPE(RTCIODevice) *)device; + +- (BOOL)setDevicesUpdatedHandler: (nullable RTCOnAudioDevicesDidUpdate) handler; + +- (BOOL)startPlayout; +- (BOOL)stopPlayout; +- (BOOL)initPlayout; +- (BOOL)startRecording; +- (BOOL)stopRecording; +- (BOOL)initRecording; + +@end + +NS_ASSUME_NONNULL_END diff --git a/sdk/objc/api/peerconnection/RTCAudioDeviceModule.mm b/sdk/objc/api/peerconnection/RTCAudioDeviceModule.mm new file mode 100644 index 0000000000..c88de392d7 --- /dev/null +++ b/sdk/objc/api/peerconnection/RTCAudioDeviceModule.mm @@ -0,0 +1,294 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#import "RTCAudioDeviceModule.h" +#import "RTCAudioDeviceModule+Private.h" +#import "RTCIODevice+Private.h" +#import "base/RTCLogging.h" + +#import "sdk/objc/native/api/audio_device_module.h" + +class AudioDeviceSink : public webrtc::AudioDeviceSink { + public: + AudioDeviceSink() {} + + void OnDevicesUpdated() override { + + RTCLogInfo(@"AudioDeviceSink OnDevicesUpdated"); + + if (callback_handler_) { + callback_handler_(); + } + } + + // private: + RTCOnAudioDevicesDidUpdate callback_handler_; +}; + +@implementation RTC_OBJC_TYPE (RTCAudioDeviceModule) { + rtc::Thread *_workerThread; + rtc::scoped_refptr _native; + AudioDeviceSink *_sink; +} + +- (instancetype)initWithNativeModule:(rtc::scoped_refptr )module + workerThread:(rtc::Thread * )workerThread { + + RTCLogInfo(@"RTCAudioDeviceModule initWithNativeModule:workerThread:"); + + self = [super init]; + _native = module; + _workerThread = workerThread; + + _sink = new AudioDeviceSink(); + + _workerThread->BlockingCall([self] { + _native->SetAudioDeviceSink(_sink); + }); + + return self; +} + +- (NSArray *)outputDevices { + + return _workerThread->BlockingCall([self] { + return [self _outputDevices]; + }); +} + +- (NSArray *)inputDevices { + return _workerThread->BlockingCall([self] { + return [self _inputDevices]; + }); +} + +- (RTC_OBJC_TYPE(RTCIODevice) *)outputDevice { + return _workerThread->BlockingCall([self] { + + NSArray *devices = [self _outputDevices]; + int16_t devicesCount = (int16_t)([devices count]); + int16_t index = _native->GetPlayoutDevice(); + + if (devicesCount == 0 || index <= -1 || index > (devicesCount - 1)) { + return (RTC_OBJC_TYPE(RTCIODevice) *)nil; + } + + return (RTC_OBJC_TYPE(RTCIODevice) *)[devices objectAtIndex:index]; + }); +} + +- (void)setOutputDevice: (RTC_OBJC_TYPE(RTCIODevice) *)device { + [self trySetOutputDevice: device]; +} + +- (BOOL)trySetOutputDevice: (RTC_OBJC_TYPE(RTCIODevice) *)device { + + return _workerThread->BlockingCall([self, device] { + + NSUInteger index = 0; + NSArray *devices = [self _outputDevices]; + + if ([devices count] == 0) { + return NO; + } + + if (device != nil) { + index = [devices indexOfObjectPassingTest:^BOOL(RTC_OBJC_TYPE(RTCIODevice) *e, NSUInteger i, BOOL *stop) { + return (*stop = [e.deviceId isEqualToString:device.deviceId]); + }]; + if (index == NSNotFound) { + return NO; + } + } + + _native->StopPlayout(); + + if (_native->SetPlayoutDevice(index) == 0 + && _native->InitPlayout() == 0 + && _native->StartPlayout() == 0) { + + return YES; + } + + return NO; + }); +} + +- (RTC_OBJC_TYPE(RTCIODevice) *)inputDevice { + + return _workerThread->BlockingCall([self] { + + NSArray *devices = [self _inputDevices]; + int16_t devicesCount = (int16_t)([devices count]); + int16_t index = _native->GetRecordingDevice(); + + if (devicesCount == 0 || index <= -1 || index > (devicesCount - 1)) { + return (RTC_OBJC_TYPE(RTCIODevice) *)nil; + } + + return (RTC_OBJC_TYPE(RTCIODevice) *)[devices objectAtIndex:index]; + }); +} + +- (void)setInputDevice: (RTC_OBJC_TYPE(RTCIODevice) *)device { + [self trySetInputDevice: device]; +} + +- (BOOL)trySetInputDevice: (RTC_OBJC_TYPE(RTCIODevice) *)device { + + return _workerThread->BlockingCall([self, device] { + + NSUInteger index = 0; + NSArray *devices = [self _inputDevices]; + + if ([devices count] == 0) { + return NO; + } + + if (device != nil) { + index = [devices indexOfObjectPassingTest:^BOOL(RTC_OBJC_TYPE(RTCIODevice) *e, NSUInteger i, BOOL *stop) { + return (*stop = [e.deviceId isEqualToString:device.deviceId]); + }]; + if (index == NSNotFound) { + return NO; + } + } + + _native->StopRecording(); + + if (_native->SetRecordingDevice(index) == 0 + && _native->InitRecording() == 0 + && _native->StartRecording() == 0) { + + return YES; + } + + return NO; + }); +} + +- (BOOL)playing { + + return _workerThread->BlockingCall([self] { + return _native->Playing(); + }); +} + +- (BOOL)recording { + + return _workerThread->BlockingCall([self] { + return _native->Recording(); + }); +} + +#pragma mark - Low-level access + +- (BOOL)startPlayout { + + return _workerThread->BlockingCall([self] { + return _native->StartPlayout() == 0; + }); +} + +- (BOOL)stopPlayout { + + return _workerThread->BlockingCall([self] { + return _native->StopPlayout() == 0; + }); +} + +- (BOOL)initPlayout { + + return _workerThread->BlockingCall([self] { + return _native->InitPlayout() == 0; + }); +} + +- (BOOL)startRecording { + + return _workerThread->BlockingCall([self] { + return _native->StartRecording() == 0; + }); +} + +- (BOOL)stopRecording { + + return _workerThread->BlockingCall([self] { + return _native->StopRecording() == 0; + }); +} + +- (BOOL)initRecording { + + return _workerThread->BlockingCall([self] { + return _native->InitRecording() == 0; + }); +} + +- (BOOL)setDevicesUpdatedHandler: (nullable RTCOnAudioDevicesDidUpdate) handler { + _sink->callback_handler_ = handler; + return YES; +} + +#pragma mark - Private + +- (NSArray *)_outputDevices { + + char guid[webrtc::kAdmMaxGuidSize + 1] = {0}; + char name[webrtc::kAdmMaxDeviceNameSize + 1] = {0}; + + NSMutableArray *result = [NSMutableArray array]; + + int16_t count = _native->PlayoutDevices(); + + if (count > 0) { + for (int i = 0; i < count; i++) { + _native->PlayoutDeviceName(i, name, guid); + NSString *strGUID = [[NSString alloc] initWithCString:guid encoding:NSUTF8StringEncoding]; + NSString *strName = [[NSString alloc] initWithCString:name encoding:NSUTF8StringEncoding]; + RTC_OBJC_TYPE(RTCIODevice) *device = [[RTC_OBJC_TYPE(RTCIODevice) alloc] initWithType:RTCIODeviceTypeOutput deviceId:strGUID name:strName]; + [result addObject: device]; + } + } + + return result; +} + +- (NSArray *)_inputDevices { + + char guid[webrtc::kAdmMaxGuidSize + 1] = {0}; + char name[webrtc::kAdmMaxDeviceNameSize + 1] = {0}; + + NSMutableArray *result = [NSMutableArray array]; + + int16_t count = _native->RecordingDevices(); + + if (count > 0) { + for (int i = 0; i < count; i++) { + _native->RecordingDeviceName(i, name, guid); + NSString *strGUID = [[NSString alloc] initWithCString:guid encoding:NSUTF8StringEncoding]; + NSString *strName = [[NSString alloc] initWithCString:name encoding:NSUTF8StringEncoding]; + RTC_OBJC_TYPE(RTCIODevice) *device = [[RTC_OBJC_TYPE(RTCIODevice) alloc] initWithType:RTCIODeviceTypeInput deviceId:strGUID name:strName]; + [result addObject: device]; + } + } + + return result; +} + +@end diff --git a/sdk/objc/api/peerconnection/RTCAudioTrack+Private.h b/sdk/objc/api/peerconnection/RTCAudioTrack+Private.h index 6495500484..38c0bd3b1b 100644 --- a/sdk/objc/api/peerconnection/RTCAudioTrack+Private.h +++ b/sdk/objc/api/peerconnection/RTCAudioTrack+Private.h @@ -8,6 +8,7 @@ * be found in the AUTHORS file in the root of the source tree. */ +#import #import "RTCAudioTrack.h" #include "api/media_stream_interface.h" @@ -15,17 +16,18 @@ NS_ASSUME_NONNULL_BEGIN @class RTC_OBJC_TYPE(RTCPeerConnectionFactory); -@interface RTC_OBJC_TYPE (RTCAudioTrack) -() +@interface RTC_OBJC_TYPE (RTCAudioTrack) () - /** AudioTrackInterface created or passed in at construction. */ - @property(nonatomic, readonly) rtc::scoped_refptr nativeAudioTrack; +/** AudioTrackInterface created or passed in at construction. */ +@property(nonatomic, readonly) rtc::scoped_refptr nativeAudioTrack; /** Initialize an RTCAudioTrack with an id. */ - (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory source:(RTC_OBJC_TYPE(RTCAudioSource) *)source trackId:(NSString *)trackId; +- (void)didCaptureSampleBuffer:(CMSampleBufferRef)sampleBuffer; + @end NS_ASSUME_NONNULL_END diff --git a/sdk/objc/api/peerconnection/RTCAudioTrack.h b/sdk/objc/api/peerconnection/RTCAudioTrack.h index 95eb5d3d48..c8218ad926 100644 --- a/sdk/objc/api/peerconnection/RTCAudioTrack.h +++ b/sdk/objc/api/peerconnection/RTCAudioTrack.h @@ -13,6 +13,7 @@ NS_ASSUME_NONNULL_BEGIN +@protocol RTC_OBJC_TYPE (RTCAudioRenderer); @class RTC_OBJC_TYPE(RTCAudioSource); RTC_OBJC_EXPORT @@ -23,6 +24,13 @@ RTC_OBJC_EXPORT /** The audio source for this audio track. */ @property(nonatomic, readonly) RTC_OBJC_TYPE(RTCAudioSource) * source; +/** Register a renderer that will receive all audio CMSampleBuffers on this track. + * Does not retain. */ +- (void)addRenderer:(id)renderer; + +/** Deregister a renderer */ +- (void)removeRenderer:(id)renderer; + @end NS_ASSUME_NONNULL_END diff --git a/sdk/objc/api/peerconnection/RTCAudioTrack.mm b/sdk/objc/api/peerconnection/RTCAudioTrack.mm index 5c1736f436..40a6f5362c 100644 --- a/sdk/objc/api/peerconnection/RTCAudioTrack.mm +++ b/sdk/objc/api/peerconnection/RTCAudioTrack.mm @@ -8,8 +8,12 @@ * be found in the AUTHORS file in the root of the source tree. */ +#import +#import + #import "RTCAudioTrack+Private.h" +#import "RTCAudioRenderer.h" #import "RTCAudioSource+Private.h" #import "RTCMediaStreamTrack+Private.h" #import "RTCPeerConnectionFactory+Private.h" @@ -17,7 +21,170 @@ #include "rtc_base/checks.h" -@implementation RTC_OBJC_TYPE (RTCAudioTrack) +namespace webrtc { +/** + * Captures audio data and converts to CMSampleBuffers + */ +class AudioSinkConverter : public rtc::RefCountInterface, public webrtc::AudioTrackSinkInterface { + private: + os_unfair_lock *lock_; + __weak RTC_OBJC_TYPE(RTCAudioTrack) *audio_track_; + int64_t total_frames_ = 0; + bool attached_ = false; + + public: + AudioSinkConverter(RTC_OBJC_TYPE(RTCAudioTrack) *audioTrack, os_unfair_lock *lock) { + RTC_LOG(LS_INFO) << "RTCAudioTrack.AudioSinkConverter init"; + audio_track_ = audioTrack; + lock_ = lock; + } + + ~AudioSinkConverter() { + // + RTC_LOG(LS_INFO) << "RTCAudioTrack.AudioSinkConverter dealloc"; + } + + // Must be called while locked + void TryAttach() { + if (attached_) { + // Already attached + return; + } + RTC_LOG(LS_INFO) << "RTCAudioTrack attaching sink..."; + // Reset for creating CMSampleTimingInfo correctly + audio_track_.nativeAudioTrack->AddSink(this); + total_frames_ = 0; + attached_ = true; + } + + // Must be called while locked + void TryDetach() { + if (!attached_) { + // Already detached + return; + } + RTC_LOG(LS_INFO) << "RTCAudioTrack detaching sink..."; + audio_track_.nativeAudioTrack->RemoveSink(this); + attached_ = false; + } + + void OnData(const void *audio_data, + int bits_per_sample, + int sample_rate, + size_t number_of_channels, + size_t number_of_frames, + absl::optional absolute_capture_timestamp_ms) override { + RTC_LOG(LS_INFO) << "RTCAudioTrack.AudioSinkConverter OnData bits_per_sample: " + << bits_per_sample << " sample_rate: " << sample_rate + << " number_of_channels: " << number_of_channels + << " number_of_frames: " << number_of_frames + << " absolute_capture_timestamp_ms: " + << (absolute_capture_timestamp_ms ? absolute_capture_timestamp_ms.value() : 0); + + bool is_locked = os_unfair_lock_trylock(lock_); + if (!is_locked) { + RTC_LOG(LS_INFO) << "RTCAudioTrack.AudioSinkConverter OnData already locked, skipping..."; + return; + } + bool is_attached = attached_; + os_unfair_lock_unlock(lock_); + + if (!is_attached) { + RTC_LOG(LS_INFO) << "RTCAudioTrack.AudioSinkConverter OnData already detached, skipping..."; + return; + } + + /* + * Convert to CMSampleBuffer + */ + + if (!(number_of_channels == 1 || number_of_channels == 2)) { + NSLog(@"RTCAudioTrack: Only mono or stereo is supported currently. numberOfChannels: %zu", + number_of_channels); + return; + } + + OSStatus status; + + AudioChannelLayout acl; + bzero(&acl, sizeof(acl)); + acl.mChannelLayoutTag = + number_of_channels == 2 ? kAudioChannelLayoutTag_Stereo : kAudioChannelLayoutTag_Mono; + + AudioStreamBasicDescription sd; + sd.mSampleRate = sample_rate; + sd.mFormatID = kAudioFormatLinearPCM; + sd.mFormatFlags = kLinearPCMFormatFlagIsSignedInteger | kLinearPCMFormatFlagIsPacked; + sd.mFramesPerPacket = 1; + sd.mChannelsPerFrame = number_of_channels; + sd.mBitsPerChannel = bits_per_sample; /* 16 */ + sd.mBytesPerFrame = sd.mChannelsPerFrame * (sd.mBitsPerChannel / 8); + sd.mBytesPerPacket = sd.mBytesPerFrame; + + CMSampleTimingInfo timing = { + CMTimeMake(1, sample_rate), + CMTimeMake(total_frames_, sample_rate), + kCMTimeInvalid, + }; + + total_frames_ += number_of_frames; // update the total + + CMFormatDescriptionRef format = NULL; + status = CMAudioFormatDescriptionCreate( + kCFAllocatorDefault, &sd, sizeof(acl), &acl, 0, NULL, NULL, &format); + + if (status != 0) { + NSLog(@"RTCAudioTrack: Failed to create audio format description"); + return; + } + + CMSampleBufferRef buffer; + status = CMSampleBufferCreate(kCFAllocatorDefault, + NULL, + false, + NULL, + NULL, + format, + (CMItemCount)number_of_frames, + 1, + &timing, + 0, + NULL, + &buffer); + // format is no longer required + CFRelease(format); + + if (status != 0) { + NSLog(@"RTCAudioTrack: Failed to allocate sample buffer"); + return; + } + + AudioBufferList bufferList; + bufferList.mNumberBuffers = 1; + bufferList.mBuffers[0].mNumberChannels = sd.mChannelsPerFrame; + bufferList.mBuffers[0].mDataByteSize = (UInt32)(number_of_frames * sd.mBytesPerFrame); + bufferList.mBuffers[0].mData = (void *)audio_data; + status = CMSampleBufferSetDataBufferFromAudioBufferList( + buffer, kCFAllocatorDefault, kCFAllocatorDefault, 0, &bufferList); + if (status != 0) { + NSLog(@"RTCAudioTrack: Failed to convert audio buffer list into sample buffer"); + return; + } + + // Report back to RTCAudioTrack + [audio_track_ didCaptureSampleBuffer:buffer]; + + CFRelease(buffer); + } +}; +} // namespace webrtc + +@implementation RTC_OBJC_TYPE (RTCAudioTrack) { + rtc::scoped_refptr _audioConverter; + // Stores weak references to renderers + NSHashTable *_renderers; + os_unfair_lock _lock; +} @synthesize source = _source; @@ -43,7 +210,22 @@ - (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)facto NSParameterAssert(factory); NSParameterAssert(nativeTrack); NSParameterAssert(type == RTCMediaStreamTrackTypeAudio); - return [super initWithFactory:factory nativeTrack:nativeTrack type:type]; + if (self = [super initWithFactory:factory nativeTrack:nativeTrack type:type]) { + RTC_LOG(LS_INFO) << "RTCAudioTrack init"; + _lock = OS_UNFAIR_LOCK_INIT; + _renderers = [NSHashTable weakObjectsHashTable]; + _audioConverter = new rtc::RefCountedObject(self, &_lock); + } + + return self; +} + +- (void)dealloc { + os_unfair_lock_lock(&_lock); + _audioConverter->TryDetach(); + os_unfair_lock_unlock(&_lock); + + RTC_LOG(LS_INFO) << "RTCAudioTrack dealloc"; } - (RTC_OBJC_TYPE(RTCAudioSource) *)source { @@ -57,6 +239,25 @@ - (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)facto return _source; } +- (void)addRenderer:(id)renderer { + os_unfair_lock_lock(&_lock); + [_renderers addObject:renderer]; + _audioConverter->TryAttach(); + os_unfair_lock_unlock(&_lock); +} + +- (void)removeRenderer:(id)renderer { + os_unfair_lock_lock(&_lock); + [_renderers removeObject:renderer]; + NSUInteger renderersCount = _renderers.allObjects.count; + + if (renderersCount == 0) { + // Detach if no more renderers... + _audioConverter->TryDetach(); + } + os_unfair_lock_unlock(&_lock); +} + #pragma mark - Private - (rtc::scoped_refptr)nativeAudioTrack { @@ -64,4 +265,18 @@ - (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)facto static_cast(self.nativeTrack.get())); } +- (void)didCaptureSampleBuffer:(CMSampleBufferRef)sampleBuffer { + bool is_locked = os_unfair_lock_trylock(&_lock); + if (!is_locked) { + RTC_LOG(LS_INFO) << "RTCAudioTrack didCaptureSampleBuffer already locked, skipping..."; + return; + } + NSArray *renderers = [_renderers allObjects]; + os_unfair_lock_unlock(&_lock); + + for (id renderer in renderers) { + [renderer renderSampleBuffer:sampleBuffer]; + } +} + @end diff --git a/sdk/objc/api/peerconnection/RTCConfiguration.h b/sdk/objc/api/peerconnection/RTCConfiguration.h index 1b0d14baf1..dc7fd4b990 100644 --- a/sdk/objc/api/peerconnection/RTCConfiguration.h +++ b/sdk/objc/api/peerconnection/RTCConfiguration.h @@ -261,6 +261,17 @@ RTC_OBJC_EXPORT */ @property(nonatomic, copy, nullable) NSNumber *iceInactiveTimeout; +/** + * When this flag is set, ports not bound to any specific network interface + * will be used, in addition to normal ports bound to the enumerated + * interfaces. Without this flag, these "any address" ports would only be + * used when network enumeration fails or is disabled. But under certain + * conditions, these ports may succeed where others fail, so they may allow + * the application to work in a wider variety of environments, at the expense + * of having to allocate additional candidates. + */ +@property(nonatomic, assign) BOOL enableIceGatheringOnAnyAddressPorts; + - (instancetype)init; @end diff --git a/sdk/objc/api/peerconnection/RTCConfiguration.mm b/sdk/objc/api/peerconnection/RTCConfiguration.mm index 8e42cb2a82..994bb93f0b 100644 --- a/sdk/objc/api/peerconnection/RTCConfiguration.mm +++ b/sdk/objc/api/peerconnection/RTCConfiguration.mm @@ -63,6 +63,7 @@ @implementation RTC_OBJC_TYPE (RTCConfiguration) @synthesize iceUnwritableTimeout = _iceUnwritableTimeout; @synthesize iceUnwritableMinChecks = _iceUnwritableMinChecks; @synthesize iceInactiveTimeout = _iceInactiveTimeout; +@synthesize enableIceGatheringOnAnyAddressPorts = _enableIceGatheringOnAnyAddressPorts; - (instancetype)init { // Copy defaults. @@ -158,6 +159,7 @@ - (instancetype)initWithNativeConfiguration: _iceInactiveTimeout = config.ice_inactive_timeout.has_value() ? [NSNumber numberWithInt:*config.ice_inactive_timeout] : nil; + _enableIceGatheringOnAnyAddressPorts = config.enable_any_address_ports; } return self; } @@ -306,6 +308,7 @@ - (NSString *)description { if (_iceInactiveTimeout != nil) { nativeConfig->ice_inactive_timeout = absl::optional(_iceInactiveTimeout.intValue); } + nativeConfig->enable_any_address_ports = _enableIceGatheringOnAnyAddressPorts; return nativeConfig.release(); } diff --git a/sdk/objc/api/peerconnection/RTCEncodedImage+Private.mm b/sdk/objc/api/peerconnection/RTCEncodedImage+Private.mm index 7f8ae739e0..3e9c768a6f 100644 --- a/sdk/objc/api/peerconnection/RTCEncodedImage+Private.mm +++ b/sdk/objc/api/peerconnection/RTCEncodedImage+Private.mm @@ -34,16 +34,16 @@ explicit ObjCEncodedImageBuffer(NSData *data) : data_(data) {} NSData *data_; }; -} +} // namespace // A simple wrapper around webrtc::EncodedImageBufferInterface to make it usable with associated // objects. -@interface RTCWrappedEncodedImageBuffer : NSObject +@interface RTC_OBJC_TYPE (RTCWrappedEncodedImageBuffer): NSObject @property(nonatomic) rtc::scoped_refptr buffer; - (instancetype)initWithEncodedImageBuffer: (rtc::scoped_refptr)buffer; @end -@implementation RTCWrappedEncodedImageBuffer +@implementation RTC_OBJC_TYPE (RTCWrappedEncodedImageBuffer) @synthesize buffer = _buffer; - (instancetype)initWithEncodedImageBuffer: (rtc::scoped_refptr)buffer { @@ -59,7 +59,7 @@ @implementation RTC_OBJC_TYPE (RTCEncodedImage) (Private) - (rtc::scoped_refptr)encodedData { - RTCWrappedEncodedImageBuffer *wrappedBuffer = + RTC_OBJC_TYPE(RTCWrappedEncodedImageBuffer) *wrappedBuffer = objc_getAssociatedObject(self, @selector(encodedData)); return wrappedBuffer.buffer; } @@ -68,7 +68,7 @@ - (void)setEncodedData:(rtc::scoped_refptr) return objc_setAssociatedObject( self, @selector(encodedData), - [[RTCWrappedEncodedImageBuffer alloc] initWithEncodedImageBuffer:buffer], + [[RTC_OBJC_TYPE(RTCWrappedEncodedImageBuffer) alloc] initWithEncodedImageBuffer:buffer], OBJC_ASSOCIATION_RETAIN_NONATOMIC); } diff --git a/sdk/objc/api/peerconnection/RTCFieldTrials.mm b/sdk/objc/api/peerconnection/RTCFieldTrials.mm index 193da9e4f7..b5a2eca8f0 100644 --- a/sdk/objc/api/peerconnection/RTCFieldTrials.mm +++ b/sdk/objc/api/peerconnection/RTCFieldTrials.mm @@ -10,47 +10,58 @@ #import "RTCFieldTrials.h" +#import #include - #import "base/RTCLogging.h" #include "system_wrappers/include/field_trial.h" NSString *const kRTCFieldTrialAudioForceABWENoTWCCKey = @"WebRTC-Audio-ABWENoTWCC"; -NSString * const kRTCFieldTrialFlexFec03AdvertisedKey = @"WebRTC-FlexFEC-03-Advertised"; -NSString * const kRTCFieldTrialFlexFec03Key = @"WebRTC-FlexFEC-03"; -NSString * const kRTCFieldTrialH264HighProfileKey = @"WebRTC-H264HighProfile"; -NSString * const kRTCFieldTrialMinimizeResamplingOnMobileKey = +NSString *const kRTCFieldTrialFlexFec03AdvertisedKey = @"WebRTC-FlexFEC-03-Advertised"; +NSString *const kRTCFieldTrialFlexFec03Key = @"WebRTC-FlexFEC-03"; +NSString *const kRTCFieldTrialH264HighProfileKey = @"WebRTC-H264HighProfile"; +NSString *const kRTCFieldTrialMinimizeResamplingOnMobileKey = @"WebRTC-Audio-MinimizeResamplingOnMobile"; NSString *const kRTCFieldTrialUseNWPathMonitor = @"WebRTC-Network-UseNWPathMonitor"; -NSString * const kRTCFieldTrialEnabledValue = @"Enabled"; +NSString *const kRTCFieldTrialEnabledValue = @"Enabled"; // InitFieldTrialsFromString stores the char*, so the char array must outlive // the application. static char *gFieldTrialInitString = nullptr; +static os_unfair_lock fieldTrialLock = OS_UNFAIR_LOCK_INIT; void RTCInitFieldTrialDictionary(NSDictionary *fieldTrials) { if (!fieldTrials) { RTCLogWarning(@"No fieldTrials provided."); return; } + // Assemble the keys and values into the field trial string. - // We don't perform any extra format checking. That should be done by the underlying WebRTC calls. NSMutableString *fieldTrialInitString = [NSMutableString string]; for (NSString *key in fieldTrials) { NSString *fieldTrialEntry = [NSString stringWithFormat:@"%@/%@/", key, fieldTrials[key]]; [fieldTrialInitString appendString:fieldTrialEntry]; } + size_t len = fieldTrialInitString.length + 1; + + // Locking before modifying global variable + os_unfair_lock_lock(&fieldTrialLock); if (gFieldTrialInitString != nullptr) { delete[] gFieldTrialInitString; + gFieldTrialInitString = nullptr; } + gFieldTrialInitString = new char[len]; - if (![fieldTrialInitString getCString:gFieldTrialInitString - maxLength:len - encoding:NSUTF8StringEncoding]) { + bool success = [fieldTrialInitString getCString:gFieldTrialInitString + maxLength:len + encoding:NSUTF8StringEncoding]; + if (!success) { RTCLogError(@"Failed to convert field trial string."); + os_unfair_lock_unlock(&fieldTrialLock); return; } + webrtc::field_trial::InitFieldTrialsFromString(gFieldTrialInitString); + os_unfair_lock_unlock(&fieldTrialLock); } diff --git a/sdk/objc/api/peerconnection/RTCFrameCryptor+Private.h b/sdk/objc/api/peerconnection/RTCFrameCryptor+Private.h new file mode 100644 index 0000000000..86e6fdff8c --- /dev/null +++ b/sdk/objc/api/peerconnection/RTCFrameCryptor+Private.h @@ -0,0 +1,45 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import "RTCFrameCryptor.h" + +#include +#include "api/crypto/frame_crypto_transformer.h" + +NS_ASSUME_NONNULL_BEGIN + +@interface RTC_OBJC_TYPE (RTCFrameCryptor) +() + + @end + +namespace webrtc { + +class RTCFrameCryptorDelegateAdapter : public FrameCryptorTransformerObserver { + public: + RTCFrameCryptorDelegateAdapter(RTC_OBJC_TYPE(RTCFrameCryptor) * frameCryptor); + ~RTCFrameCryptorDelegateAdapter() override; + + void OnFrameCryptionStateChanged(const std::string participant_id, + FrameCryptionState state) override; + + private: + __weak RTC_OBJC_TYPE(RTCFrameCryptor) * frame_cryptor_; +}; + +} // namespace webrtc + +NS_ASSUME_NONNULL_END diff --git a/sdk/objc/api/peerconnection/RTCFrameCryptor.h b/sdk/objc/api/peerconnection/RTCFrameCryptor.h new file mode 100644 index 0000000000..864e55be95 --- /dev/null +++ b/sdk/objc/api/peerconnection/RTCFrameCryptor.h @@ -0,0 +1,78 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import + +#import "RTCMacros.h" + +NS_ASSUME_NONNULL_BEGIN + +@class RTC_OBJC_TYPE(RTCRtpSender); +@class RTC_OBJC_TYPE(RTCRtpReceiver); +@class RTC_OBJC_TYPE(RTCFrameCryptorKeyProvider); +@class RTC_OBJC_TYPE(RTCFrameCryptor); +@class RTC_OBJC_TYPE(RTCPeerConnectionFactory); + +typedef NS_ENUM(NSUInteger, RTCCryptorAlgorithm) { + RTCCryptorAlgorithmAesGcm = 0, + RTCCryptorAlgorithmAesCbc, +}; + +typedef NS_ENUM(NSInteger, FrameCryptionState) { + FrameCryptionStateNew = 0, + FrameCryptionStateOk, + FrameCryptionStateEncryptionFailed, + FrameCryptionStateDecryptionFailed, + FrameCryptionStateMissingKey, + FrameCryptionStateKeyRatcheted, + FrameCryptionStateInternalError, +}; + +RTC_OBJC_EXPORT +@protocol RTC_OBJC_TYPE +(RTCFrameCryptorDelegate) + /** Called when the RTCFrameCryptor got errors. */ + - (void)frameCryptor + : (RTC_OBJC_TYPE(RTCFrameCryptor) *)frameCryptor didStateChangeWithParticipantId + : (NSString *)participantId withState : (FrameCryptionState)stateChanged; +@end + +RTC_OBJC_EXPORT +@interface RTC_OBJC_TYPE (RTCFrameCryptor) : NSObject + +@property(nonatomic, assign) BOOL enabled; + +@property(nonatomic, assign) int keyIndex; + +@property(nonatomic, readonly) NSString *participantId; + +@property(nonatomic, weak, nullable) id delegate; + +- (nullable instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory + rtpSender:(RTC_OBJC_TYPE(RTCRtpSender) *)sender + participantId:(NSString *)participantId + algorithm:(RTCCryptorAlgorithm)algorithm + keyProvider:(RTC_OBJC_TYPE(RTCFrameCryptorKeyProvider) *)keyProvider; + +- (nullable instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory + rtpReceiver:(RTC_OBJC_TYPE(RTCRtpReceiver) *)receiver + participantId:(NSString *)participantId + algorithm:(RTCCryptorAlgorithm)algorithm + keyProvider:(RTC_OBJC_TYPE(RTCFrameCryptorKeyProvider) *)keyProvider; + +@end + +NS_ASSUME_NONNULL_END diff --git a/sdk/objc/api/peerconnection/RTCFrameCryptor.mm b/sdk/objc/api/peerconnection/RTCFrameCryptor.mm new file mode 100644 index 0000000000..c51b77e9c5 --- /dev/null +++ b/sdk/objc/api/peerconnection/RTCFrameCryptor.mm @@ -0,0 +1,224 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import "RTCFrameCryptor+Private.h" +#import "RTCFrameCryptorKeyProvider+Private.h" +#import "RTCPeerConnectionFactory+Private.h" +#import "RTCRtpReceiver+Private.h" +#import "RTCRtpSender+Private.h" + +#import +#include + +#import "base/RTCLogging.h" +#import "helpers/NSString+StdString.h" + +#include "api/crypto/frame_crypto_transformer.h" +#include "api/rtp_receiver_interface.h" +#include "api/rtp_sender_interface.h" + +namespace webrtc { + +RTCFrameCryptorDelegateAdapter::RTCFrameCryptorDelegateAdapter(RTC_OBJC_TYPE(RTCFrameCryptor) * + frameCryptor) + : frame_cryptor_(frameCryptor) {} + +RTCFrameCryptorDelegateAdapter::~RTCFrameCryptorDelegateAdapter() {} + +/* + kNew = 0, + kOk, + kEncryptionFailed, + kDecryptionFailed, + kMissingKey, + kInternalError, +*/ +void RTCFrameCryptorDelegateAdapter::OnFrameCryptionStateChanged(const std::string participant_id, + FrameCryptionState state) { + RTC_OBJC_TYPE(RTCFrameCryptor) *frameCryptor = frame_cryptor_; + if (frameCryptor.delegate) { + switch (state) { + case FrameCryptionState::kNew: + [frameCryptor.delegate frameCryptor:frameCryptor + didStateChangeWithParticipantId:[NSString stringForStdString:participant_id] + withState:FrameCryptionStateNew]; + break; + case FrameCryptionState::kOk: + [frameCryptor.delegate frameCryptor:frameCryptor + didStateChangeWithParticipantId:[NSString stringForStdString:participant_id] + withState:FrameCryptionStateOk]; + break; + case FrameCryptionState::kEncryptionFailed: + [frameCryptor.delegate frameCryptor:frameCryptor + didStateChangeWithParticipantId:[NSString stringForStdString:participant_id] + withState:FrameCryptionStateEncryptionFailed]; + break; + case FrameCryptionState::kDecryptionFailed: + [frameCryptor.delegate frameCryptor:frameCryptor + didStateChangeWithParticipantId:[NSString stringForStdString:participant_id] + withState:FrameCryptionStateDecryptionFailed]; + break; + case FrameCryptionState::kMissingKey: + [frameCryptor.delegate frameCryptor:frameCryptor + didStateChangeWithParticipantId:[NSString stringForStdString:participant_id] + withState:FrameCryptionStateMissingKey]; + break; + case FrameCryptionState::kKeyRatcheted: + [frameCryptor.delegate frameCryptor:frameCryptor + didStateChangeWithParticipantId:[NSString stringForStdString:participant_id] + withState:FrameCryptionStateKeyRatcheted]; + break; + case FrameCryptionState::kInternalError: + [frameCryptor.delegate frameCryptor:frameCryptor + didStateChangeWithParticipantId:[NSString stringForStdString:participant_id] + withState:FrameCryptionStateInternalError]; + break; + } + } +} +} // namespace webrtc + +@implementation RTC_OBJC_TYPE (RTCFrameCryptor) { + const webrtc::RtpSenderInterface *_sender; + const webrtc::RtpReceiverInterface *_receiver; + rtc::scoped_refptr _frame_crypto_transformer; + rtc::scoped_refptr _observer; + os_unfair_lock _lock; +} + +@synthesize participantId = _participantId; +@synthesize delegate = _delegate; + +- (webrtc::FrameCryptorTransformer::Algorithm)algorithmFromEnum:(RTCCryptorAlgorithm)algorithm { + switch (algorithm) { + case RTCCryptorAlgorithmAesGcm: + return webrtc::FrameCryptorTransformer::Algorithm::kAesGcm; + case RTCCryptorAlgorithmAesCbc: + return webrtc::FrameCryptorTransformer::Algorithm::kAesCbc; + default: + return webrtc::FrameCryptorTransformer::Algorithm::kAesGcm; + } +} + +- (nullable instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory + rtpSender:(RTC_OBJC_TYPE(RTCRtpSender) *)sender + participantId:(NSString *)participantId + algorithm:(RTCCryptorAlgorithm)algorithm + keyProvider:(RTC_OBJC_TYPE(RTCFrameCryptorKeyProvider) *)keyProvider { + if (self = [super init]) { + _lock = OS_UNFAIR_LOCK_INIT; + + rtc::scoped_refptr nativeRtpSender = sender.nativeRtpSender; + if (nativeRtpSender == nullptr) return nil; + + rtc::scoped_refptr nativeTrack = nativeRtpSender->track(); + if (nativeTrack == nullptr) return nil; + + _observer = rtc::make_ref_counted(self); + _participantId = participantId; + + webrtc::FrameCryptorTransformer::MediaType mediaType = + nativeTrack->kind() == "audio" ? webrtc::FrameCryptorTransformer::MediaType::kAudioFrame + : webrtc::FrameCryptorTransformer::MediaType::kVideoFrame; + + _frame_crypto_transformer = + rtc::scoped_refptr(new webrtc::FrameCryptorTransformer( + factory.signalingThread, [participantId stdString], mediaType, + [self algorithmFromEnum:algorithm], keyProvider.nativeKeyProvider)); + + nativeRtpSender->SetEncoderToPacketizerFrameTransformer(_frame_crypto_transformer); + _frame_crypto_transformer->SetEnabled(false); + _frame_crypto_transformer->RegisterFrameCryptorTransformerObserver(_observer); + } + + return self; +} + +- (nullable instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory + rtpReceiver:(RTC_OBJC_TYPE(RTCRtpReceiver) *)receiver + participantId:(NSString *)participantId + algorithm:(RTCCryptorAlgorithm)algorithm + keyProvider:(RTC_OBJC_TYPE(RTCFrameCryptorKeyProvider) *)keyProvider { + if (self = [super init]) { + _lock = OS_UNFAIR_LOCK_INIT; + + rtc::scoped_refptr nativeRtpReceiver = receiver.nativeRtpReceiver; + if (nativeRtpReceiver == nullptr) return nil; + + rtc::scoped_refptr nativeTrack = nativeRtpReceiver->track(); + if (nativeTrack == nullptr) return nil; + + _observer = rtc::make_ref_counted(self); + _participantId = participantId; + + webrtc::FrameCryptorTransformer::MediaType mediaType = + nativeTrack->kind() == "audio" ? webrtc::FrameCryptorTransformer::MediaType::kAudioFrame + : webrtc::FrameCryptorTransformer::MediaType::kVideoFrame; + + _frame_crypto_transformer = + rtc::scoped_refptr(new webrtc::FrameCryptorTransformer( + factory.signalingThread, [participantId stdString], mediaType, + [self algorithmFromEnum:algorithm], keyProvider.nativeKeyProvider)); + + nativeRtpReceiver->SetDepacketizerToDecoderFrameTransformer(_frame_crypto_transformer); + _frame_crypto_transformer->SetEnabled(false); + _frame_crypto_transformer->RegisterFrameCryptorTransformerObserver(_observer); + } + + return self; +} + +- (void)dealloc { + os_unfair_lock_lock(&_lock); + if (_frame_crypto_transformer != nullptr) { + _frame_crypto_transformer->UnRegisterFrameCryptorTransformerObserver(); + _frame_crypto_transformer = nullptr; + } + _observer = nullptr; + os_unfair_lock_unlock(&_lock); +} + +- (BOOL)enabled { + os_unfair_lock_lock(&_lock); + BOOL result = _frame_crypto_transformer != nullptr ? _frame_crypto_transformer->enabled() : NO; + os_unfair_lock_unlock(&_lock); + return result; +} + +- (void)setEnabled:(BOOL)enabled { + os_unfair_lock_lock(&_lock); + if (_frame_crypto_transformer != nullptr) { + _frame_crypto_transformer->SetEnabled(enabled); + } + os_unfair_lock_unlock(&_lock); +} + +- (int)keyIndex { + os_unfair_lock_lock(&_lock); + int result = _frame_crypto_transformer != nullptr ? _frame_crypto_transformer->key_index() : 0; + os_unfair_lock_unlock(&_lock); + return result; +} + +- (void)setKeyIndex:(int)keyIndex { + os_unfair_lock_lock(&_lock); + if (_frame_crypto_transformer != nullptr) { + _frame_crypto_transformer->SetKeyIndex(keyIndex); + } + os_unfair_lock_unlock(&_lock); +} + +@end diff --git a/sdk/objc/api/peerconnection/RTCFrameCryptorKeyProvider+Private.h b/sdk/objc/api/peerconnection/RTCFrameCryptorKeyProvider+Private.h new file mode 100644 index 0000000000..eb7c83e2e7 --- /dev/null +++ b/sdk/objc/api/peerconnection/RTCFrameCryptorKeyProvider+Private.h @@ -0,0 +1,31 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import "RTCFrameCryptorKeyProvider.h" + +#include "api/crypto/frame_crypto_transformer.h" +#include "rtc_base/ref_count.h" + +NS_ASSUME_NONNULL_BEGIN + +@interface RTC_OBJC_TYPE (RTCFrameCryptorKeyProvider) +() + + @property(nonatomic, readonly) rtc::scoped_refptr nativeKeyProvider; + +@end + +NS_ASSUME_NONNULL_END diff --git a/sdk/objc/api/peerconnection/RTCFrameCryptorKeyProvider.h b/sdk/objc/api/peerconnection/RTCFrameCryptorKeyProvider.h new file mode 100644 index 0000000000..6443b23349 --- /dev/null +++ b/sdk/objc/api/peerconnection/RTCFrameCryptorKeyProvider.h @@ -0,0 +1,62 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import + +#import "RTCMacros.h" + +NS_ASSUME_NONNULL_BEGIN + +RTC_OBJC_EXPORT +@interface RTC_OBJC_TYPE (RTCFrameCryptorKeyProvider) : NSObject + +- (void)setSharedKey:(NSData *)key withIndex:(int)index; + +- (NSData *)ratchetSharedKey:(int)index; + +- (NSData *)exportSharedKey:(int)index; + +- (void)setKey:(NSData *)key withIndex:(int)index forParticipant:(NSString *)participantId; + +- (NSData *)ratchetKey:(NSString *)participantId withIndex:(int)index; + +- (NSData *)exportKey:(NSString *)participantId withIndex:(int)index; + +- (void)setSifTrailer:(NSData *)trailer; + +- (instancetype)initWithRatchetSalt:(NSData *)salt + ratchetWindowSize:(int)windowSize + sharedKeyMode:(BOOL)sharedKey + uncryptedMagicBytes:(nullable NSData *)uncryptedMagicBytes; + +- (instancetype)initWithRatchetSalt:(NSData *)salt + ratchetWindowSize:(int)windowSize + sharedKeyMode:(BOOL)sharedKey + uncryptedMagicBytes:(nullable NSData *)uncryptedMagicBytes + failureTolerance:(int)failureTolerance + keyRingSize:(int)keyRingSize; + +- (instancetype)initWithRatchetSalt:(NSData *)salt + ratchetWindowSize:(int)windowSize + sharedKeyMode:(BOOL)sharedKey + uncryptedMagicBytes:(nullable NSData *)uncryptedMagicBytes + failureTolerance:(int)failureTolerance + keyRingSize:(int)keyRingSize + discardFrameWhenCryptorNotReady:(BOOL)discardFrameWhenCryptorNotReady; + +@end + +NS_ASSUME_NONNULL_END diff --git a/sdk/objc/api/peerconnection/RTCFrameCryptorKeyProvider.mm b/sdk/objc/api/peerconnection/RTCFrameCryptorKeyProvider.mm new file mode 100644 index 0000000000..88bebfcd9d --- /dev/null +++ b/sdk/objc/api/peerconnection/RTCFrameCryptorKeyProvider.mm @@ -0,0 +1,124 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import "RTCFrameCryptorKeyProvider+Private.h" + +#include +#include "api/crypto/frame_crypto_transformer.h" + +#import "base/RTCLogging.h" +#import "helpers/NSString+StdString.h" + +@implementation RTC_OBJC_TYPE (RTCFrameCryptorKeyProvider) { + rtc::scoped_refptr _nativeKeyProvider; +} + +- (rtc::scoped_refptr)nativeKeyProvider { + return _nativeKeyProvider; +} + +- (instancetype)initWithRatchetSalt:(NSData *)salt + ratchetWindowSize:(int)windowSize + sharedKeyMode:(BOOL)sharedKey + uncryptedMagicBytes:(NSData *)uncryptedMagicBytes { + return [self initWithRatchetSalt:salt + ratchetWindowSize:windowSize + sharedKeyMode:sharedKey + uncryptedMagicBytes:uncryptedMagicBytes + failureTolerance:-1 + keyRingSize:webrtc::DEFAULT_KEYRING_SIZE]; +} + +- (instancetype)initWithRatchetSalt:(NSData *)salt + ratchetWindowSize:(int)windowSize + sharedKeyMode:(BOOL)sharedKey + uncryptedMagicBytes:(nullable NSData *)uncryptedMagicBytes + failureTolerance:(int)failureTolerance + keyRingSize:(int)keyRingSize { + return [self initWithRatchetSalt:salt + ratchetWindowSize:windowSize + sharedKeyMode:sharedKey + uncryptedMagicBytes:uncryptedMagicBytes + failureTolerance:-1 + keyRingSize:keyRingSize + discardFrameWhenCryptorNotReady:false]; +} + +- (instancetype)initWithRatchetSalt:(NSData *)salt + ratchetWindowSize:(int)windowSize + sharedKeyMode:(BOOL)sharedKey + uncryptedMagicBytes:(nullable NSData *)uncryptedMagicBytes + failureTolerance:(int)failureTolerance + keyRingSize:(int)keyRingSize + discardFrameWhenCryptorNotReady:(BOOL)discardFrameWhenCryptorNotReady { + if (self = [super init]) { + webrtc::KeyProviderOptions options; + options.ratchet_salt = std::vector((const uint8_t *)salt.bytes, + ((const uint8_t *)salt.bytes) + salt.length); + options.ratchet_window_size = windowSize; + options.shared_key = sharedKey; + options.failure_tolerance = failureTolerance; + options.key_ring_size = keyRingSize; + options.discard_frame_when_cryptor_not_ready = discardFrameWhenCryptorNotReady; + if(uncryptedMagicBytes != nil) { + options.uncrypted_magic_bytes = std::vector((const uint8_t *)uncryptedMagicBytes.bytes, + ((const uint8_t *)uncryptedMagicBytes.bytes) + uncryptedMagicBytes.length); + } + _nativeKeyProvider = rtc::make_ref_counted(options); + } + return self; +} + +- (void)setKey:(NSData *)key withIndex:(int)index forParticipant:(NSString *)participantId { + _nativeKeyProvider->SetKey( + [participantId stdString], + index, + std::vector((const uint8_t *)key.bytes, ((const uint8_t *)key.bytes) + key.length)); +} + +- (void)setSharedKey:(NSData *)key withIndex:(int)index { + _nativeKeyProvider->SetSharedKey( + index, + std::vector((const uint8_t *)key.bytes, ((const uint8_t *)key.bytes) + key.length)); +} + +- (NSData *)ratchetSharedKey:(int)index { + std::vector nativeKey = _nativeKeyProvider->RatchetSharedKey(index); + return [NSData dataWithBytes:nativeKey.data() length:nativeKey.size()]; +} + +- (NSData *)exportSharedKey:(int)index { + std::vector nativeKey = _nativeKeyProvider->ExportSharedKey(index); + return [NSData dataWithBytes:nativeKey.data() length:nativeKey.size()]; +} + +- (NSData *)ratchetKey:(NSString *)participantId withIndex:(int)index { + std::vector nativeKey = _nativeKeyProvider->RatchetKey([participantId stdString], index); + return [NSData dataWithBytes:nativeKey.data() length:nativeKey.size()]; +} + +- (NSData *)exportKey:(NSString *)participantId withIndex:(int)index { + std::vector nativeKey = _nativeKeyProvider->ExportKey([participantId stdString], index); + return [NSData dataWithBytes:nativeKey.data() length:nativeKey.size()]; +} + +- (void)setSifTrailer:(NSData *)trailer { + _nativeKeyProvider->SetSifTrailer( + std::vector((const uint8_t *)trailer.bytes, + ((const uint8_t *)trailer.bytes) + trailer.length)); +} + +@end diff --git a/sdk/objc/api/peerconnection/RTCIODevice+Private.h b/sdk/objc/api/peerconnection/RTCIODevice+Private.h new file mode 100644 index 0000000000..e736c993e1 --- /dev/null +++ b/sdk/objc/api/peerconnection/RTCIODevice+Private.h @@ -0,0 +1,28 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#import "RTCIODevice.h" + +NS_ASSUME_NONNULL_BEGIN + +@interface RTC_OBJC_TYPE(RTCIODevice) () + +- (instancetype)initWithType:(RTCIODeviceType)type + deviceId:(NSString *)deviceId + name:(NSString* )name; + +@end + +NS_ASSUME_NONNULL_END diff --git a/sdk/objc/api/peerconnection/RTCIODevice.h b/sdk/objc/api/peerconnection/RTCIODevice.h new file mode 100644 index 0000000000..f44d532081 --- /dev/null +++ b/sdk/objc/api/peerconnection/RTCIODevice.h @@ -0,0 +1,41 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import + +#import "RTCMacros.h" + +NS_ASSUME_NONNULL_BEGIN + +typedef NS_ENUM(NSInteger, RTCIODeviceType) { + RTCIODeviceTypeOutput, + RTCIODeviceTypeInput, +}; + +RTC_OBJC_EXPORT +@interface RTC_OBJC_TYPE(RTCIODevice) : NSObject + ++ (instancetype)defaultDeviceWithType: (RTCIODeviceType)type; +- (instancetype)init NS_UNAVAILABLE; + +@property(nonatomic, readonly) BOOL isDefault; +@property(nonatomic, readonly) RTCIODeviceType type; +@property(nonatomic, copy, readonly) NSString *deviceId; +@property(nonatomic, copy, readonly) NSString *name; + +@end + +NS_ASSUME_NONNULL_END diff --git a/sdk/objc/api/peerconnection/RTCIODevice.mm b/sdk/objc/api/peerconnection/RTCIODevice.mm new file mode 100644 index 0000000000..b3738f71fe --- /dev/null +++ b/sdk/objc/api/peerconnection/RTCIODevice.mm @@ -0,0 +1,71 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import "RTCIODevice.h" +#import "RTCIODevice+Private.h" + +NSString *const kDefaultDeviceId = @"default"; + +@implementation RTC_OBJC_TYPE(RTCIODevice) + +@synthesize type = _type; +@synthesize deviceId = _deviceId; +@synthesize name = _name; + ++ (instancetype)defaultDeviceWithType: (RTCIODeviceType)type { + return [[self alloc] initWithType: type + deviceId: kDefaultDeviceId + name: @""]; +} + +- (instancetype)initWithType: (RTCIODeviceType)type + deviceId: (NSString *)deviceId + name: (NSString* )name { + if (self = [super init]) { + _type = type; + _deviceId = deviceId; + _name = name; + } + return self; +} + +#pragma mark - IODevice + +- (BOOL)isDefault { + return [_deviceId isEqualToString: kDefaultDeviceId]; +} + +#pragma mark - Equatable + +- (BOOL)isEqual: (id)object { + if (self == object) { + return YES; + } + if (object == nil) { + return NO; + } + if (![object isMemberOfClass:[self class]]) { + return NO; + } + + return [_deviceId isEqualToString:((RTC_OBJC_TYPE(RTCIODevice) *)object).deviceId]; +} + +- (NSUInteger)hash { + return [_deviceId hash]; +} + +@end diff --git a/sdk/objc/api/peerconnection/RTCPeerConnectionFactory+Native.h b/sdk/objc/api/peerconnection/RTCPeerConnectionFactory+Native.h index f361b9f0ea..cc45aba1ec 100644 --- a/sdk/objc/api/peerconnection/RTCPeerConnectionFactory+Native.h +++ b/sdk/objc/api/peerconnection/RTCPeerConnectionFactory+Native.h @@ -50,7 +50,8 @@ NS_ASSUME_NONNULL_BEGIN audioDeviceModule: (nullable webrtc::AudioDeviceModule *)audioDeviceModule audioProcessingModule: - (rtc::scoped_refptr)audioProcessingModule; + (rtc::scoped_refptr)audioProcessingModule + bypassVoiceProcessing:(BOOL)bypassVoiceProcessing; - (instancetype) initWithNativeAudioEncoderFactory: @@ -65,7 +66,8 @@ NS_ASSUME_NONNULL_BEGIN audioProcessingModule: (rtc::scoped_refptr)audioProcessingModule networkControllerFactory:(std::unique_ptr) - networkControllerFactory; + networkControllerFactory + bypassVoiceProcessing:(BOOL)bypassVoiceProcessing; - (instancetype) initWithEncoderFactory:(nullable id)encoderFactory diff --git a/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.h b/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.h index 5575af98c9..5c82750d20 100644 --- a/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.h +++ b/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.h @@ -23,6 +23,11 @@ NS_ASSUME_NONNULL_BEGIN @class RTC_OBJC_TYPE(RTCVideoSource); @class RTC_OBJC_TYPE(RTCVideoTrack); @class RTC_OBJC_TYPE(RTCPeerConnectionFactoryOptions); +@class RTC_OBJC_TYPE(RTCAudioDeviceModule); +@class RTC_OBJC_TYPE(RTCRtpCapabilities); + +typedef NS_ENUM(NSInteger, RTCRtpMediaType); + @protocol RTC_OBJC_TYPE (RTCPeerConnectionDelegate); @protocol RTC_OBJC_TYPE @@ -33,6 +38,8 @@ NS_ASSUME_NONNULL_BEGIN (RTCSSLCertificateVerifier); @protocol RTC_OBJC_TYPE (RTCAudioDevice); +@protocol RTC_OBJC_TYPE +(RTCAudioProcessingModule); RTC_OBJC_EXPORT @interface RTC_OBJC_TYPE (RTCPeerConnectionFactory) : NSObject @@ -51,6 +58,20 @@ RTC_OBJC_EXPORT decoderFactory:(nullable id)decoderFactory audioDevice:(nullable id)audioDevice; +/* Initialize object with bypass voice processing */ +- (instancetype) + initWithBypassVoiceProcessing:(BOOL)bypassVoiceProcessing + encoderFactory:(nullable id)encoderFactory + decoderFactory:(nullable id)decoderFactory + audioProcessingModule: + (nullable id)audioProcessingModule; + +@property(nonatomic, readonly) RTC_OBJC_TYPE(RTCAudioDeviceModule) *audioDeviceModule; + +- (RTC_OBJC_TYPE(RTCRtpCapabilities) *)rtpSenderCapabilitiesFor:(RTCRtpMediaType)mediaType; + +- (RTC_OBJC_TYPE(RTCRtpCapabilities) *)rtpReceiverCapabilitiesFor:(RTCRtpMediaType)mediaType; + /** Initialize an RTCAudioSource with constraints. */ - (RTC_OBJC_TYPE(RTCAudioSource) *)audioSourceWithConstraints: (nullable RTC_OBJC_TYPE(RTCMediaConstraints) *)constraints; diff --git a/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.mm b/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.mm index 62b55543d4..7b0557ba25 100644 --- a/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.mm +++ b/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.mm @@ -14,6 +14,9 @@ #import "RTCPeerConnectionFactory+Private.h" #import "RTCPeerConnectionFactoryOptions+Private.h" +#import "RTCAudioDeviceModule.h" +#import "RTCAudioDeviceModule+Private.h" + #import "RTCAudioSource+Private.h" #import "RTCAudioTrack+Private.h" #import "RTCMediaConstraints+Private.h" @@ -21,6 +24,9 @@ #import "RTCPeerConnection+Private.h" #import "RTCVideoSource+Private.h" #import "RTCVideoTrack+Private.h" +#import "RTCRtpReceiver+Private.h" +#import "RTCRtpCapabilities+Private.h" +#import "RTCRtpCodecCapability+Private.h" #import "base/RTCLogging.h" #import "base/RTCVideoDecoderFactory.h" #import "base/RTCVideoEncoderFactory.h" @@ -47,6 +53,9 @@ #include "sdk/objc/native/src/objc_video_decoder_factory.h" #include "sdk/objc/native/src/objc_video_encoder_factory.h" +#import "components/audio/RTCAudioProcessingModule.h" +#import "components/audio/RTCDefaultAudioProcessingModule+Private.h" + #if defined(WEBRTC_IOS) #import "sdk/objc/native/api/audio_device_module.h" #endif @@ -55,14 +64,18 @@ @implementation RTC_OBJC_TYPE (RTCPeerConnectionFactory) { std::unique_ptr _networkThread; std::unique_ptr _workerThread; std::unique_ptr _signalingThread; + rtc::scoped_refptr _nativeAudioDeviceModule; + RTC_OBJC_TYPE(RTCDefaultAudioProcessingModule) *_defaultAudioProcessingModule; + BOOL _hasStartedAecDump; } @synthesize nativeFactory = _nativeFactory; +@synthesize audioDeviceModule = _audioDeviceModule; -- (rtc::scoped_refptr)audioDeviceModule { +- (rtc::scoped_refptr)createAudioDeviceModule:(BOOL)bypassVoiceProcessing { #if defined(WEBRTC_IOS) - return webrtc::CreateAudioDeviceModule(); + return webrtc::CreateAudioDeviceModule(bypassVoiceProcessing); #else return nullptr; #endif @@ -76,8 +89,9 @@ - (instancetype)init { RTCVideoEncoderFactoryH264) alloc] init]) nativeVideoDecoderFactory:webrtc::ObjCToNativeVideoDecoderFactory([[RTC_OBJC_TYPE( RTCVideoDecoderFactoryH264) alloc] init]) - audioDeviceModule:[self audioDeviceModule].get() - audioProcessingModule:nullptr]; + audioDeviceModule:[self createAudioDeviceModule:NO].get() + audioProcessingModule:nullptr + bypassVoiceProcessing:NO]; } - (instancetype) @@ -105,14 +119,66 @@ - (instancetype)init { if (audioDevice) { audio_device_module = webrtc::CreateAudioDeviceModule(audioDevice); } else { - audio_device_module = [self audioDeviceModule]; + audio_device_module = [self createAudioDeviceModule:NO]; } return [self initWithNativeAudioEncoderFactory:webrtc::CreateBuiltinAudioEncoderFactory() nativeAudioDecoderFactory:webrtc::CreateBuiltinAudioDecoderFactory() nativeVideoEncoderFactory:std::move(native_encoder_factory) nativeVideoDecoderFactory:std::move(native_decoder_factory) audioDeviceModule:audio_device_module.get() - audioProcessingModule:nullptr]; + audioProcessingModule:nullptr + bypassVoiceProcessing:NO]; +#endif +} + +- (RTC_OBJC_TYPE(RTCRtpCapabilities) *)rtpSenderCapabilitiesFor:(RTCRtpMediaType)mediaType { + + webrtc::RtpCapabilities capabilities = _nativeFactory->GetRtpSenderCapabilities([RTC_OBJC_TYPE(RTCRtpReceiver) nativeMediaTypeForMediaType: mediaType]); + + return [[RTC_OBJC_TYPE(RTCRtpCapabilities) alloc] initWithNativeCapabilities: capabilities]; +} + +- (RTC_OBJC_TYPE(RTCRtpCapabilities) *)rtpReceiverCapabilitiesFor:(RTCRtpMediaType)mediaType { + + webrtc::RtpCapabilities capabilities = _nativeFactory->GetRtpReceiverCapabilities([RTC_OBJC_TYPE(RTCRtpReceiver) nativeMediaTypeForMediaType: mediaType]); + + return [[RTC_OBJC_TYPE(RTCRtpCapabilities) alloc] initWithNativeCapabilities: capabilities]; +} + +- (instancetype) + initWithBypassVoiceProcessing:(BOOL)bypassVoiceProcessing + encoderFactory:(nullable id)encoderFactory + decoderFactory:(nullable id)decoderFactory + audioProcessingModule: + (nullable id)audioProcessingModule { +#ifdef HAVE_NO_MEDIA + return [self initWithNoMedia]; +#else + std::unique_ptr native_encoder_factory; + std::unique_ptr native_decoder_factory; + if (encoderFactory) { + native_encoder_factory = webrtc::ObjCToNativeVideoEncoderFactory(encoderFactory); + } + if (decoderFactory) { + native_decoder_factory = webrtc::ObjCToNativeVideoDecoderFactory(decoderFactory); + } + rtc::scoped_refptr audio_device_module = [self createAudioDeviceModule:bypassVoiceProcessing]; + + if ([audioProcessingModule isKindOfClass:[RTC_OBJC_TYPE(RTCDefaultAudioProcessingModule) class]]) { + _defaultAudioProcessingModule = (RTC_OBJC_TYPE(RTCDefaultAudioProcessingModule) *)audioProcessingModule; + } else { + _defaultAudioProcessingModule = [[RTC_OBJC_TYPE(RTCDefaultAudioProcessingModule) alloc] init]; + } + + NSLog(@"AudioProcessingModule: %@", _defaultAudioProcessingModule); + + return [self initWithNativeAudioEncoderFactory:webrtc::CreateBuiltinAudioEncoderFactory() + nativeAudioDecoderFactory:webrtc::CreateBuiltinAudioDecoderFactory() + nativeVideoEncoderFactory:std::move(native_encoder_factory) + nativeVideoDecoderFactory:std::move(native_decoder_factory) + audioDeviceModule:audio_device_module.get() + audioProcessingModule:_defaultAudioProcessingModule.nativeAudioProcessingModule + bypassVoiceProcessing:bypassVoiceProcessing]; #endif } @@ -161,14 +227,16 @@ - (instancetype)initWithNativeAudioEncoderFactory: (std::unique_ptr)videoDecoderFactory audioDeviceModule:(webrtc::AudioDeviceModule *)audioDeviceModule audioProcessingModule: - (rtc::scoped_refptr)audioProcessingModule { + (rtc::scoped_refptr)audioProcessingModule + bypassVoiceProcessing:(BOOL)bypassVoiceProcessing { return [self initWithNativeAudioEncoderFactory:audioEncoderFactory nativeAudioDecoderFactory:audioDecoderFactory nativeVideoEncoderFactory:std::move(videoEncoderFactory) nativeVideoDecoderFactory:std::move(videoDecoderFactory) audioDeviceModule:audioDeviceModule audioProcessingModule:audioProcessingModule - networkControllerFactory:nullptr]; + networkControllerFactory:nullptr + bypassVoiceProcessing:NO]; } - (instancetype)initWithNativeAudioEncoderFactory: (rtc::scoped_refptr)audioEncoderFactory @@ -183,7 +251,8 @@ - (instancetype)initWithNativeAudioEncoderFactory: (rtc::scoped_refptr)audioProcessingModule networkControllerFactory: (std::unique_ptr) - networkControllerFactory { + networkControllerFactory + bypassVoiceProcessing:(BOOL)bypassVoiceProcessing { if (self = [self initNative]) { webrtc::PeerConnectionFactoryDependencies dependencies; dependencies.network_thread = _networkThread.get(); @@ -196,7 +265,19 @@ - (instancetype)initWithNativeAudioEncoderFactory: dependencies.task_queue_factory = webrtc::CreateDefaultTaskQueueFactory(dependencies.trials.get()); cricket::MediaEngineDependencies media_deps; - media_deps.adm = std::move(audioDeviceModule); + + // always create ADM on worker thread + _nativeAudioDeviceModule = _workerThread->BlockingCall([&dependencies, &bypassVoiceProcessing]() { + return webrtc::AudioDeviceModule::Create(webrtc::AudioDeviceModule::AudioLayer::kPlatformDefaultAudio, + dependencies.task_queue_factory.get(), + bypassVoiceProcessing == YES); + }); + + _audioDeviceModule = + [[RTC_OBJC_TYPE(RTCAudioDeviceModule) alloc] initWithNativeModule:_nativeAudioDeviceModule + workerThread:_workerThread.get()]; + + media_deps.adm = _nativeAudioDeviceModule; media_deps.task_queue_factory = dependencies.task_queue_factory.get(); media_deps.audio_encoder_factory = std::move(audioEncoderFactory); media_deps.audio_decoder_factory = std::move(audioDecoderFactory); diff --git a/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder+DefaultComponents.h b/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder+DefaultComponents.h index 070a0e74a5..4d7025bf93 100644 --- a/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder+DefaultComponents.h +++ b/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder+DefaultComponents.h @@ -12,9 +12,9 @@ NS_ASSUME_NONNULL_BEGIN -@interface RTCPeerConnectionFactoryBuilder (DefaultComponents) +@interface RTC_OBJC_TYPE(RTCPeerConnectionFactoryBuilder) (DefaultComponents) -+ (RTCPeerConnectionFactoryBuilder *)defaultBuilder; ++ (RTC_OBJC_TYPE(RTCPeerConnectionFactoryBuilder) *)defaultBuilder; @end diff --git a/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder+DefaultComponents.mm b/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder+DefaultComponents.mm index 522e520e12..a2f633e1a4 100644 --- a/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder+DefaultComponents.mm +++ b/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder+DefaultComponents.mm @@ -22,10 +22,10 @@ #import "sdk/objc/native/api/audio_device_module.h" #endif -@implementation RTCPeerConnectionFactoryBuilder (DefaultComponents) +@implementation RTC_OBJC_TYPE(RTCPeerConnectionFactoryBuilder) (DefaultComponents) -+ (RTCPeerConnectionFactoryBuilder *)defaultBuilder { - RTCPeerConnectionFactoryBuilder *builder = [[RTCPeerConnectionFactoryBuilder alloc] init]; ++ (RTC_OBJC_TYPE(RTCPeerConnectionFactoryBuilder) *)defaultBuilder { + RTC_OBJC_TYPE(RTCPeerConnectionFactoryBuilder) *builder = [[RTC_OBJC_TYPE(RTCPeerConnectionFactoryBuilder) alloc] init]; auto audioEncoderFactory = webrtc::CreateBuiltinAudioEncoderFactory(); [builder setAudioEncoderFactory:audioEncoderFactory]; diff --git a/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder.h b/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder.h index f0b0de156a..a46839b6b3 100644 --- a/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder.h +++ b/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder.h @@ -25,9 +25,9 @@ class AudioProcessing; NS_ASSUME_NONNULL_BEGIN -@interface RTCPeerConnectionFactoryBuilder : NSObject +@interface RTC_OBJC_TYPE(RTCPeerConnectionFactoryBuilder) : NSObject -+ (RTCPeerConnectionFactoryBuilder *)builder; ++ (RTC_OBJC_TYPE(RTCPeerConnectionFactoryBuilder) *)builder; - (RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)createPeerConnectionFactory; diff --git a/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder.mm b/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder.mm index 627909a0e3..4cb12b0a59 100644 --- a/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder.mm +++ b/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder.mm @@ -18,7 +18,7 @@ #include "modules/audio_device/include/audio_device.h" #include "modules/audio_processing/include/audio_processing.h" -@implementation RTCPeerConnectionFactoryBuilder { +@implementation RTC_OBJC_TYPE(RTCPeerConnectionFactoryBuilder) { std::unique_ptr _videoEncoderFactory; std::unique_ptr _videoDecoderFactory; rtc::scoped_refptr _audioEncoderFactory; @@ -27,8 +27,8 @@ @implementation RTCPeerConnectionFactoryBuilder { rtc::scoped_refptr _audioProcessingModule; } -+ (RTCPeerConnectionFactoryBuilder *)builder { - return [[RTCPeerConnectionFactoryBuilder alloc] init]; ++ (RTC_OBJC_TYPE(RTCPeerConnectionFactoryBuilder) *)builder { + return [[RTC_OBJC_TYPE(RTCPeerConnectionFactoryBuilder) alloc] init]; } - (RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)createPeerConnectionFactory { @@ -39,7 +39,8 @@ + (RTCPeerConnectionFactoryBuilder *)builder { nativeVideoEncoderFactory:std::move(_videoEncoderFactory) nativeVideoDecoderFactory:std::move(_videoDecoderFactory) audioDeviceModule:_audioDeviceModule.get() - audioProcessingModule:_audioProcessingModule]; + audioProcessingModule:_audioProcessingModule + bypassVoiceProcessing:NO]; } - (void)setVideoEncoderFactory:(std::unique_ptr)videoEncoderFactory { diff --git a/sdk/objc/api/peerconnection/RTCRtpCapabilities+Private.h b/sdk/objc/api/peerconnection/RTCRtpCapabilities+Private.h new file mode 100644 index 0000000000..d5fff9e016 --- /dev/null +++ b/sdk/objc/api/peerconnection/RTCRtpCapabilities+Private.h @@ -0,0 +1,33 @@ +/* + * Copyright 2023 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import "RTCRtpCapabilities.h" + +#include "api/rtp_parameters.h" + +NS_ASSUME_NONNULL_BEGIN + +@interface RTC_OBJC_TYPE (RTCRtpCapabilities) +() + + @property(nonatomic, readonly) webrtc::RtpCapabilities nativeCapabilities; + +- (instancetype)initWithNativeCapabilities:(const webrtc::RtpCapabilities &)nativeCapabilities + NS_DESIGNATED_INITIALIZER; + +@end + +NS_ASSUME_NONNULL_END diff --git a/sdk/objc/api/peerconnection/RTCRtpCapabilities.h b/sdk/objc/api/peerconnection/RTCRtpCapabilities.h new file mode 100644 index 0000000000..7c84732ad1 --- /dev/null +++ b/sdk/objc/api/peerconnection/RTCRtpCapabilities.h @@ -0,0 +1,40 @@ +/* + * Copyright 2023 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import + +#import "RTCMacros.h" + +@class RTC_OBJC_TYPE(RTCRtpCodecCapability); + +NS_ASSUME_NONNULL_BEGIN + +RTC_OBJC_EXPORT +@interface RTC_OBJC_TYPE (RTCRtpCapabilities) : NSObject + +- (instancetype)init NS_UNAVAILABLE; + +@property(nonatomic, readonly) NSArray *codecs; + +// Not implemented. +// std::vector header_extensions; + +// Not implemented. +// std::vector fec; + +@end + +NS_ASSUME_NONNULL_END diff --git a/sdk/objc/api/peerconnection/RTCRtpCapabilities.mm b/sdk/objc/api/peerconnection/RTCRtpCapabilities.mm new file mode 100644 index 0000000000..5d5abc1511 --- /dev/null +++ b/sdk/objc/api/peerconnection/RTCRtpCapabilities.mm @@ -0,0 +1,50 @@ +/* + * Copyright 2023 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import "RTCRtpCapabilities+Private.h" +#import "RTCRtpCodecCapability+Private.h" + +#import "RTCMediaStreamTrack.h" +#import "helpers/NSString+StdString.h" + +#include "media/base/media_constants.h" +#include "rtc_base/checks.h" + +@implementation RTC_OBJC_TYPE (RTCRtpCapabilities) + +@synthesize nativeCapabilities = _nativeCapabilities; + +- (instancetype)initWithNativeCapabilities:(const webrtc::RtpCapabilities &)nativeCapabilities { + if (self = [super init]) { + _nativeCapabilities = nativeCapabilities; + } + + return self; +} + +- (NSArray *)codecs { + NSMutableArray *result = [NSMutableArray array]; + + for (auto &element : _nativeCapabilities.codecs) { + RTC_OBJC_TYPE(RTCRtpCodecCapability) *object = + [[RTC_OBJC_TYPE(RTCRtpCodecCapability) alloc] initWithNativeCodecCapability:element]; + [result addObject:object]; + } + + return result; +} + +@end diff --git a/sdk/objc/api/peerconnection/RTCRtpCodecCapability+Private.h b/sdk/objc/api/peerconnection/RTCRtpCodecCapability+Private.h new file mode 100644 index 0000000000..43b12d6b7d --- /dev/null +++ b/sdk/objc/api/peerconnection/RTCRtpCodecCapability+Private.h @@ -0,0 +1,33 @@ +/* + * Copyright 2023 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import "RTCRtpCodecCapability.h" + +#include "api/rtp_parameters.h" + +NS_ASSUME_NONNULL_BEGIN + +@interface RTC_OBJC_TYPE (RTCRtpCodecCapability) +() + + @property(nonatomic, readonly) webrtc::RtpCodecCapability nativeCodecCapability; + +- (instancetype)initWithNativeCodecCapability: + (const webrtc::RtpCodecCapability &)nativeCodecCapability NS_DESIGNATED_INITIALIZER; + +@end + +NS_ASSUME_NONNULL_END diff --git a/sdk/objc/api/peerconnection/RTCRtpCodecCapability.h b/sdk/objc/api/peerconnection/RTCRtpCodecCapability.h new file mode 100644 index 0000000000..01f1d7eb46 --- /dev/null +++ b/sdk/objc/api/peerconnection/RTCRtpCodecCapability.h @@ -0,0 +1,63 @@ +/* + * Copyright 2023 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import + +#import "RTCMacros.h" + +typedef NS_ENUM(NSInteger, RTCRtpMediaType); + +NS_ASSUME_NONNULL_BEGIN + +RTC_OBJC_EXPORT +@interface RTC_OBJC_TYPE (RTCRtpCodecCapability) : NSObject + +// Build MIME "type/subtype" string from `name` and `kind`. +@property(nonatomic, readonly) NSString *mimeType; + +// Used to identify the codec. Equivalent to MIME subtype. +@property(nonatomic, copy) NSString *name; + +// The media type of this codec. Equivalent to MIME top-level type. +@property(nonatomic, assign) RTCRtpMediaType kind; + +// Clock rate in Hertz. If unset, the codec is applicable to any clock rate. +@property(nonatomic, copy, nullable) NSNumber *clockRate; + +// Default payload type for this codec. Mainly needed for codecs that use +// that have statically assigned payload types. +@property(nonatomic, copy, nullable) NSNumber *preferredPayloadType; + +// The number of audio channels supported. Unused for video codecs. +@property(nonatomic, copy, nullable) NSNumber *numChannels; + +// Codec-specific parameters that must be signaled to the remote party. +// +// Corresponds to "a=fmtp" parameters in SDP. +// +// Contrary to ORTC, these parameters are named using all lowercase strings. +// This helps make the mapping to SDP simpler, if an application is using SDP. +// Boolean values are represented by the string "1". +// std::map parameters; +@property(nonatomic, copy) NSDictionary *parameters; + +// Feedback mechanisms supported for this codec. +// std::vector rtcp_feedback; +// Not implemented. + +@end + +NS_ASSUME_NONNULL_END diff --git a/sdk/objc/api/peerconnection/RTCRtpCodecCapability.mm b/sdk/objc/api/peerconnection/RTCRtpCodecCapability.mm new file mode 100644 index 0000000000..35d21054b0 --- /dev/null +++ b/sdk/objc/api/peerconnection/RTCRtpCodecCapability.mm @@ -0,0 +1,138 @@ +/* + * Copyright 2023 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import "RTCRtpCodecCapability+Private.h" + +#import "RTCRtpReceiver+Private.h" + +#import "RTCMediaStreamTrack.h" +#import "helpers/NSString+StdString.h" + +#include "media/base/media_constants.h" +#include "rtc_base/checks.h" + +@implementation RTC_OBJC_TYPE (RTCRtpCodecCapability) + +@synthesize nativeCodecCapability = _nativeCodecCapability; + +- (instancetype)init { + return [self initWithNativeCodecCapability:webrtc::RtpCodecCapability()]; +} + +- (instancetype)initWithNativeCodecCapability: + (const webrtc::RtpCodecCapability &)nativeCodecCapability { + if (self = [super init]) { + _nativeCodecCapability = nativeCodecCapability; + } + + return self; +} + +- (NSString *)mimeType { + return [NSString stringWithUTF8String:_nativeCodecCapability.mime_type().c_str()]; +} + +- (NSString *)name { + return [NSString stringWithUTF8String:_nativeCodecCapability.name.c_str()]; +} + +- (void)setName:(NSString *)name { + _nativeCodecCapability.name = std::string([name UTF8String]); +} + +- (RTCRtpMediaType)kind { + return [RTC_OBJC_TYPE(RTCRtpReceiver) mediaTypeForNativeMediaType:_nativeCodecCapability.kind]; +} + +- (void)setKind:(RTCRtpMediaType)kind { + _nativeCodecCapability.kind = [RTC_OBJC_TYPE(RTCRtpReceiver) nativeMediaTypeForMediaType:kind]; +} + +- (NSNumber *)clockRate { + if (!_nativeCodecCapability.clock_rate) { + return nil; + } + + return [NSNumber numberWithInt:*_nativeCodecCapability.clock_rate]; +} + +- (void)setClockRate:(NSNumber *)clockRate { + if (clockRate == nil) { + _nativeCodecCapability.clock_rate = absl::optional(); + return; + } + + _nativeCodecCapability.clock_rate = absl::optional(clockRate.intValue); +} + +- (NSNumber *)preferredPayloadType { + if (!_nativeCodecCapability.preferred_payload_type) { + return nil; + } + + return [NSNumber numberWithInt:*_nativeCodecCapability.preferred_payload_type]; +} + +- (void)setPreferredPayloadType:(NSNumber *)preferredPayloadType { + if (preferredPayloadType == nil) { + _nativeCodecCapability.preferred_payload_type = absl::optional(); + return; + } + + _nativeCodecCapability.preferred_payload_type = + absl::optional(preferredPayloadType.intValue); +} + +- (NSNumber *)numChannels { + if (!_nativeCodecCapability.num_channels) { + return nil; + } + + return [NSNumber numberWithInt:*_nativeCodecCapability.num_channels]; +} + +- (void)setNumChannels:(NSNumber *)numChannels { + if (numChannels == nil) { + _nativeCodecCapability.num_channels = absl::optional(); + return; + } + + _nativeCodecCapability.num_channels = absl::optional(numChannels.intValue); +} + +- (NSDictionary *)parameters { + NSMutableDictionary *result = [NSMutableDictionary dictionary]; + auto _parameters = _nativeCodecCapability.parameters; + for (auto it = _parameters.begin(); it != _parameters.end(); ++it) { + [result setObject:[NSString stringForStdString:it->second] + forKey:[NSString stringForStdString:it->first]]; + } + + return result; +} + +- (void)setParameters:(NSDictionary *)parameters { + std::map _parameters; + for (NSString *paramKey in parameters.allKeys) { + std::string key = [NSString stdStringForString:paramKey]; + std::string value = [NSString stdStringForString:parameters[paramKey]]; + _parameters[key] = value; + } + + _nativeCodecCapability.parameters = _parameters; +} + +@end diff --git a/sdk/objc/api/peerconnection/RTCRtpCodecParameters.h b/sdk/objc/api/peerconnection/RTCRtpCodecParameters.h index 6135223720..4d24d3ccd6 100644 --- a/sdk/objc/api/peerconnection/RTCRtpCodecParameters.h +++ b/sdk/objc/api/peerconnection/RTCRtpCodecParameters.h @@ -30,6 +30,7 @@ RTC_EXTERN const NSString *const kRTCComfortNoiseCodecName; RTC_EXTERN const NSString *const kRTCVp8CodecName; RTC_EXTERN const NSString *const kRTCVp9CodecName; RTC_EXTERN const NSString *const kRTCH264CodecName; +RTC_EXTERN const NSString *const kRTCAv1CodecName; /** Defined in https://www.w3.org/TR/webrtc/#idl-def-rtcrtpcodecparameters */ RTC_OBJC_EXPORT diff --git a/sdk/objc/api/peerconnection/RTCRtpCodecParameters.mm b/sdk/objc/api/peerconnection/RTCRtpCodecParameters.mm index 6201e57b93..42a310cb79 100644 --- a/sdk/objc/api/peerconnection/RTCRtpCodecParameters.mm +++ b/sdk/objc/api/peerconnection/RTCRtpCodecParameters.mm @@ -32,6 +32,7 @@ const NSString * const kRTCVp8CodecName = @(cricket::kVp8CodecName); const NSString * const kRTCVp9CodecName = @(cricket::kVp9CodecName); const NSString * const kRTCH264CodecName = @(cricket::kH264CodecName); +const NSString * const kRTCAv1CodecName = @(cricket::kAv1CodecName); @implementation RTC_OBJC_TYPE (RTCRtpCodecParameters) diff --git a/sdk/objc/api/peerconnection/RTCRtpEncodingParameters.h b/sdk/objc/api/peerconnection/RTCRtpEncodingParameters.h index 07f6b7a39c..af0c6993bc 100644 --- a/sdk/objc/api/peerconnection/RTCRtpEncodingParameters.h +++ b/sdk/objc/api/peerconnection/RTCRtpEncodingParameters.h @@ -69,6 +69,10 @@ RTC_OBJC_EXPORT https://w3c.github.io/webrtc-extensions/#dom-rtcrtpencodingparameters-adaptiveptime */ @property(nonatomic, assign) BOOL adaptiveAudioPacketTime; +/** A case-sensitive identifier of the scalability mode to be used for this stream. + https://w3c.github.io/webrtc-svc/#rtcrtpencodingparameters */ +@property(nonatomic, copy, nullable) NSString *scalabilityMode; + - (instancetype)init; @end diff --git a/sdk/objc/api/peerconnection/RTCRtpEncodingParameters.mm b/sdk/objc/api/peerconnection/RTCRtpEncodingParameters.mm index d6087dafb0..aecb88b6f6 100644 --- a/sdk/objc/api/peerconnection/RTCRtpEncodingParameters.mm +++ b/sdk/objc/api/peerconnection/RTCRtpEncodingParameters.mm @@ -25,6 +25,7 @@ @implementation RTC_OBJC_TYPE (RTCRtpEncodingParameters) @synthesize bitratePriority = _bitratePriority; @synthesize networkPriority = _networkPriority; @synthesize adaptiveAudioPacketTime = _adaptiveAudioPacketTime; +@synthesize scalabilityMode = _scalabilityMode; - (instancetype)init { webrtc::RtpEncodingParameters nativeParameters; @@ -59,6 +60,9 @@ - (instancetype)initWithNativeParameters: if (nativeParameters.ssrc) { _ssrc = [NSNumber numberWithUnsignedLong:*nativeParameters.ssrc]; } + if (nativeParameters.scalability_mode) { + _scalabilityMode = [NSString stringWithUTF8String:nativeParameters.scalability_mode->c_str()]; + } _bitratePriority = nativeParameters.bitrate_priority; _networkPriority = [RTC_OBJC_TYPE(RTCRtpEncodingParameters) priorityFromNativePriority:nativeParameters.network_priority]; @@ -92,6 +96,9 @@ - (instancetype)initWithNativeParameters: if (_ssrc != nil) { parameters.ssrc = absl::optional(_ssrc.unsignedLongValue); } + if (_scalabilityMode != nil) { + parameters.scalability_mode = absl::optional(std::string([_scalabilityMode UTF8String])); + } parameters.bitrate_priority = _bitratePriority; parameters.network_priority = [RTC_OBJC_TYPE(RTCRtpEncodingParameters) nativePriorityFromPriority:_networkPriority]; diff --git a/sdk/objc/api/peerconnection/RTCRtpTransceiver.h b/sdk/objc/api/peerconnection/RTCRtpTransceiver.h index fd59013639..3ffea8efd7 100644 --- a/sdk/objc/api/peerconnection/RTCRtpTransceiver.h +++ b/sdk/objc/api/peerconnection/RTCRtpTransceiver.h @@ -14,6 +14,8 @@ #import "RTCRtpReceiver.h" #import "RTCRtpSender.h" +@class RTC_OBJC_TYPE(RTCRtpCodecCapability); + NS_ASSUME_NONNULL_BEGIN extern NSString *const kRTCRtpTransceiverErrorDomain; @@ -104,6 +106,8 @@ RTC_OBJC_EXPORT */ @property(nonatomic, readonly) RTCRtpTransceiverDirection direction; +@property(nonatomic, copy) NSArray *codecPreferences; + /** The currentDirection attribute indicates the current direction negotiated * for this transceiver. If this transceiver has never been represented in an * offer/answer exchange, or if the transceiver is stopped, the value is not diff --git a/sdk/objc/api/peerconnection/RTCRtpTransceiver.mm b/sdk/objc/api/peerconnection/RTCRtpTransceiver.mm index ae1cf79864..b7cc37c2f8 100644 --- a/sdk/objc/api/peerconnection/RTCRtpTransceiver.mm +++ b/sdk/objc/api/peerconnection/RTCRtpTransceiver.mm @@ -14,6 +14,8 @@ #import "RTCRtpParameters+Private.h" #import "RTCRtpReceiver+Private.h" #import "RTCRtpSender+Private.h" +#import "RTCRtpCodecCapability.h" +#import "RTCRtpCodecCapability+Private.h" #import "base/RTCLogging.h" #import "helpers/NSString+StdString.h" @@ -65,6 +67,36 @@ - (NSString *)mid { } } +- (void)setCodecPreferences:(NSArray *)codecPreferences { + + std::vector objects; + + for (RTC_OBJC_TYPE(RTCRtpCodecCapability) *object in codecPreferences) { + objects.push_back(object.nativeCodecCapability); + } + + //webrtc::RTCError error = + _nativeRtpTransceiver->SetCodecPreferences(rtc::ArrayView(objects.data(), objects.size())); + + // if (!error.ok()) { + // [NSException raise:@"setCodecPreferences" format:@"SDK returned error: %@", [NSString stringWithUTF8String: error.message()]]; + // } +} + +- (NSArray *)codecPreferences { + + NSMutableArray *result = [NSMutableArray array]; + + std::vector capabilities = _nativeRtpTransceiver->codec_preferences(); + + for (auto & element : capabilities) { + RTC_OBJC_TYPE(RTCRtpCodecCapability) *object = [[RTC_OBJC_TYPE(RTCRtpCodecCapability) alloc] initWithNativeCodecCapability: element]; + [result addObject: object]; + } + + return result; +} + @synthesize sender = _sender; @synthesize receiver = _receiver; diff --git a/sdk/objc/api/peerconnection/RTCVideoCodecInfo+Private.mm b/sdk/objc/api/peerconnection/RTCVideoCodecInfo+Private.mm index 2eb8d366d2..88c2add1df 100644 --- a/sdk/objc/api/peerconnection/RTCVideoCodecInfo+Private.mm +++ b/sdk/objc/api/peerconnection/RTCVideoCodecInfo+Private.mm @@ -12,6 +12,11 @@ #import "helpers/NSString+StdString.h" +#include "absl/container/inlined_vector.h" +#include "api/video_codecs/sdp_video_format.h" +#include "modules/video_coding/svc/scalability_mode_util.h" +#include "modules/video_coding/svc/create_scalability_structure.h" + @implementation RTC_OBJC_TYPE (RTCVideoCodecInfo) (Private) @@ -31,8 +36,16 @@ - (instancetype)initWithNativeSdpVideoFormat : (webrtc::SdpVideoFormat)format { std::string value = [NSString stdStringForString:self.parameters[paramKey]]; parameters[key] = value; } - - return webrtc::SdpVideoFormat([NSString stdStringForString:self.name], parameters); + + absl::InlinedVector + scalability_modes; + for (NSString *scalabilityMode in self.scalabilityModes) { + auto scalability_mode = webrtc::ScalabilityModeFromString([NSString stdStringForString:scalabilityMode]); + if (scalability_mode != absl::nullopt) { + scalability_modes.push_back(*scalability_mode); + } + } + return webrtc::SdpVideoFormat([NSString stdStringForString:self.name], parameters, scalability_modes); } @end diff --git a/sdk/objc/api/peerconnection/RTCVideoTrack.h b/sdk/objc/api/peerconnection/RTCVideoTrack.h index 5382b7169f..56d25c1568 100644 --- a/sdk/objc/api/peerconnection/RTCVideoTrack.h +++ b/sdk/objc/api/peerconnection/RTCVideoTrack.h @@ -25,6 +25,9 @@ RTC_OBJC_EXPORT /** The video source for this video track. */ @property(nonatomic, readonly) RTC_OBJC_TYPE(RTCVideoSource) *source; +/** The receive state, if this is a remote video track. */ +@property(nonatomic, assign) BOOL shouldReceive; + - (instancetype)init NS_UNAVAILABLE; /** Register a renderer that will render all frames received on this track. */ diff --git a/sdk/objc/api/peerconnection/RTCVideoTrack.mm b/sdk/objc/api/peerconnection/RTCVideoTrack.mm index d3296f6279..546ec80a61 100644 --- a/sdk/objc/api/peerconnection/RTCVideoTrack.mm +++ b/sdk/objc/api/peerconnection/RTCVideoTrack.mm @@ -53,7 +53,7 @@ - (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)facto } - (void)dealloc { - for (RTCVideoRendererAdapter *adapter in _adapters) { + for (RTC_OBJC_TYPE(RTCVideoRendererAdapter) * adapter in _adapters) { self.nativeVideoTrack->RemoveSink(adapter.nativeVideoRenderer); } } @@ -70,6 +70,14 @@ - (void)dealloc { return _source; } +- (BOOL)shouldReceive { + return self.nativeVideoTrack->should_receive(); +} + +- (void)setShouldReceive:(BOOL)shouldReceive { + self.nativeVideoTrack->set_should_receive(shouldReceive); +} + - (void)addRenderer:(id)renderer { if (!_workerThread->IsCurrent()) { _workerThread->BlockingCall([renderer, self] { [self addRenderer:renderer]; }); @@ -77,18 +85,17 @@ - (void)addRenderer:(id)renderer { } // Make sure we don't have this renderer yet. - for (RTCVideoRendererAdapter *adapter in _adapters) { + for (RTC_OBJC_TYPE(RTCVideoRendererAdapter) * adapter in _adapters) { if (adapter.videoRenderer == renderer) { RTC_LOG(LS_INFO) << "|renderer| is already attached to this track"; return; } } // Create a wrapper that provides a native pointer for us. - RTCVideoRendererAdapter* adapter = - [[RTCVideoRendererAdapter alloc] initWithNativeRenderer:renderer]; + RTC_OBJC_TYPE(RTCVideoRendererAdapter) *adapter = + [[RTC_OBJC_TYPE(RTCVideoRendererAdapter) alloc] initWithNativeRenderer:renderer]; [_adapters addObject:adapter]; - self.nativeVideoTrack->AddOrUpdateSink(adapter.nativeVideoRenderer, - rtc::VideoSinkWants()); + self.nativeVideoTrack->AddOrUpdateSink(adapter.nativeVideoRenderer, rtc::VideoSinkWants()); } - (void)removeRenderer:(id)renderer { @@ -97,9 +104,8 @@ - (void)removeRenderer:(id)renderer { return; } __block NSUInteger indexToRemove = NSNotFound; - [_adapters enumerateObjectsUsingBlock:^(RTCVideoRendererAdapter *adapter, - NSUInteger idx, - BOOL *stop) { + [_adapters enumerateObjectsUsingBlock:^( + RTC_OBJC_TYPE(RTCVideoRendererAdapter) * adapter, NSUInteger idx, BOOL * stop) { if (adapter.videoRenderer == renderer) { indexToRemove = idx; *stop = YES; @@ -109,8 +115,7 @@ - (void)removeRenderer:(id)renderer { RTC_LOG(LS_INFO) << "removeRenderer called with a renderer that has not been previously added"; return; } - RTCVideoRendererAdapter *adapterToRemove = - [_adapters objectAtIndex:indexToRemove]; + RTC_OBJC_TYPE(RTCVideoRendererAdapter) *adapterToRemove = [_adapters objectAtIndex:indexToRemove]; self.nativeVideoTrack->RemoveSink(adapterToRemove.nativeVideoRenderer); [_adapters removeObjectAtIndex:indexToRemove]; } diff --git a/sdk/objc/api/video_codec/RTCVideoEncoderAV1.h b/sdk/objc/api/video_codec/RTCVideoEncoderAV1.h index 8aa55e4bfa..a900728049 100644 --- a/sdk/objc/api/video_codec/RTCVideoEncoderAV1.h +++ b/sdk/objc/api/video_codec/RTCVideoEncoderAV1.h @@ -24,4 +24,6 @@ RTC_OBJC_EXPORT + (bool)isSupported; ++ (NSArray *)scalabilityModes; + @end diff --git a/sdk/objc/api/video_codec/RTCVideoEncoderAV1.mm b/sdk/objc/api/video_codec/RTCVideoEncoderAV1.mm index d2fe65293b..f9256387dc 100644 --- a/sdk/objc/api/video_codec/RTCVideoEncoderAV1.mm +++ b/sdk/objc/api/video_codec/RTCVideoEncoderAV1.mm @@ -14,7 +14,13 @@ #import "RTCMacros.h" #import "RTCVideoEncoderAV1.h" #import "RTCWrappedNativeVideoEncoder.h" + +#import "helpers/NSString+StdString.h" + +#include "absl/container/inlined_vector.h" +#include "api/video_codecs/sdp_video_format.h" #include "modules/video_coding/codecs/av1/libaom_av1_encoder.h" +#include "modules/video_coding/svc/create_scalability_structure.h" @implementation RTC_OBJC_TYPE (RTCVideoEncoderAV1) @@ -28,4 +34,14 @@ + (bool)isSupported { return true; } ++ (NSArray *)scalabilityModes { + NSMutableArray *scalabilityModes = [NSMutableArray array]; + for (const auto scalability_mode : webrtc::kAllScalabilityModes) { + if (webrtc::ScalabilityStructureConfig(scalability_mode).has_value()) { + [scalabilityModes addObject:[NSString stringForAbslStringView:webrtc::ScalabilityModeToString(scalability_mode)]]; + } + } + return scalabilityModes; +} + @end diff --git a/sdk/objc/api/video_codec/RTCVideoEncoderSimulcast.h b/sdk/objc/api/video_codec/RTCVideoEncoderSimulcast.h new file mode 100644 index 0000000000..4f1b55c713 --- /dev/null +++ b/sdk/objc/api/video_codec/RTCVideoEncoderSimulcast.h @@ -0,0 +1,13 @@ +#import "RTCMacros.h" +#import "RTCVideoEncoder.h" +#import "RTCVideoEncoderFactory.h" +#import "RTCVideoCodecInfo.h" + +RTC_OBJC_EXPORT +@interface RTC_OBJC_TYPE (RTCVideoEncoderSimulcast) : NSObject + ++ (id)simulcastEncoderWithPrimary:(id)primary + fallback:(id)fallback + videoCodecInfo:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)videoCodecInfo; + +@end diff --git a/sdk/objc/api/video_codec/RTCVideoEncoderSimulcast.mm b/sdk/objc/api/video_codec/RTCVideoEncoderSimulcast.mm new file mode 100644 index 0000000000..c7d5e2939c --- /dev/null +++ b/sdk/objc/api/video_codec/RTCVideoEncoderSimulcast.mm @@ -0,0 +1,26 @@ +#import + +#import "RTCMacros.h" +#import "RTCVideoEncoderSimulcast.h" +#import "RTCWrappedNativeVideoEncoder.h" +#import "api/peerconnection/RTCVideoCodecInfo+Private.h" + +#include "native/api/video_encoder_factory.h" +#include "media/engine/simulcast_encoder_adapter.h" + +@implementation RTC_OBJC_TYPE (RTCVideoEncoderSimulcast) + ++ (id)simulcastEncoderWithPrimary:(id)primary + fallback:(id)fallback + videoCodecInfo:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)videoCodecInfo { + auto nativePrimary = webrtc::ObjCToNativeVideoEncoderFactory(primary); + auto nativeFallback = webrtc::ObjCToNativeVideoEncoderFactory(fallback); + auto nativeFormat = [videoCodecInfo nativeSdpVideoFormat]; + return [[RTC_OBJC_TYPE(RTCWrappedNativeVideoEncoder) alloc] + initWithNativeEncoder: std::make_unique( + nativePrimary.release(), + nativeFallback.release(), + std::move(nativeFormat))]; +} + +@end diff --git a/sdk/objc/api/video_codec/RTCVideoEncoderVP9.h b/sdk/objc/api/video_codec/RTCVideoEncoderVP9.h index f7dac6117d..adfca0f9a4 100644 --- a/sdk/objc/api/video_codec/RTCVideoEncoderVP9.h +++ b/sdk/objc/api/video_codec/RTCVideoEncoderVP9.h @@ -24,4 +24,6 @@ RTC_OBJC_EXPORT + (bool)isSupported; ++ (NSArray *)scalabilityModes; + @end diff --git a/sdk/objc/api/video_codec/RTCVideoEncoderVP9.mm b/sdk/objc/api/video_codec/RTCVideoEncoderVP9.mm index 18a9353f7e..3db0629074 100644 --- a/sdk/objc/api/video_codec/RTCVideoEncoderVP9.mm +++ b/sdk/objc/api/video_codec/RTCVideoEncoderVP9.mm @@ -15,7 +15,12 @@ #import "RTCVideoEncoderVP9.h" #import "RTCWrappedNativeVideoEncoder.h" +#import "helpers/NSString+StdString.h" + +#include "absl/container/inlined_vector.h" +#include "api/video_codecs/sdp_video_format.h" #include "modules/video_coding/codecs/vp9/include/vp9.h" +#include "modules/video_coding/svc/create_scalability_structure.h" @implementation RTC_OBJC_TYPE (RTCVideoEncoderVP9) @@ -36,4 +41,14 @@ + (bool)isSupported { #endif } ++ (NSArray *)scalabilityModes { + NSMutableArray *scalabilityModes = [NSMutableArray array]; + for (const auto scalability_mode : webrtc::kAllScalabilityModes) { + if (webrtc::ScalabilityStructureConfig(scalability_mode).has_value()) { + [scalabilityModes addObject:[NSString stringForAbslStringView:webrtc::ScalabilityModeToString(scalability_mode)]]; + } + } + return scalabilityModes; +} + @end diff --git a/sdk/objc/base/RTCAudioRenderer.h b/sdk/objc/base/RTCAudioRenderer.h new file mode 100644 index 0000000000..3669831fca --- /dev/null +++ b/sdk/objc/base/RTCAudioRenderer.h @@ -0,0 +1,32 @@ +/* + * Copyright 2023 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import +#if TARGET_OS_IPHONE +#import +#endif + +#import "RTCMacros.h" + +NS_ASSUME_NONNULL_BEGIN + +RTC_OBJC_EXPORT @protocol RTC_OBJC_TYPE(RTCAudioRenderer) + +- (void)renderSampleBuffer: (CMSampleBufferRef)sampleBuffer NS_SWIFT_NAME(render(sampleBuffer:)); + +@end + +NS_ASSUME_NONNULL_END diff --git a/sdk/objc/base/RTCVideoCodecInfo.h b/sdk/objc/base/RTCVideoCodecInfo.h index fa28958f25..9da0c7aa81 100644 --- a/sdk/objc/base/RTCVideoCodecInfo.h +++ b/sdk/objc/base/RTCVideoCodecInfo.h @@ -26,10 +26,16 @@ RTC_OBJC_EXPORT parameters:(nullable NSDictionary *)parameters NS_DESIGNATED_INITIALIZER; +- (instancetype)initWithName:(NSString *)name + parameters:(nullable NSDictionary *)parameters + scalabilityModes:(nullable NSArray *)scalabilityModes + NS_DESIGNATED_INITIALIZER; + - (BOOL)isEqualToCodecInfo:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)info; @property(nonatomic, readonly) NSString *name; @property(nonatomic, readonly) NSDictionary *parameters; +@property(nonatomic, readonly) NSArray *scalabilityModes; @end diff --git a/sdk/objc/base/RTCVideoCodecInfo.m b/sdk/objc/base/RTCVideoCodecInfo.m index ce26ae1de3..441ecde7c5 100644 --- a/sdk/objc/base/RTCVideoCodecInfo.m +++ b/sdk/objc/base/RTCVideoCodecInfo.m @@ -14,6 +14,7 @@ @implementation RTC_OBJC_TYPE (RTCVideoCodecInfo) @synthesize name = _name; @synthesize parameters = _parameters; +@synthesize scalabilityModes = _scalabilityModes; - (instancetype)initWithName:(NSString *)name { return [self initWithName:name parameters:nil]; @@ -24,15 +25,29 @@ - (instancetype)initWithName:(NSString *)name if (self = [super init]) { _name = name; _parameters = (parameters ? parameters : @{}); + _scalabilityModes = @[]; } return self; } +- (instancetype)initWithName:(NSString *)name + parameters:(nullable NSDictionary *)parameters + scalabilityModes:(nullable NSArray *)scalabilityModes { + if (self = [super init]) { + _name = name; + _parameters = (parameters ? parameters : @{}); + _scalabilityModes = (scalabilityModes ? scalabilityModes : @[]); + } + + return self; + } + - (BOOL)isEqualToCodecInfo:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)info { if (!info || ![self.name isEqualToString:info.name] || - ![self.parameters isEqualToDictionary:info.parameters]) { + ![self.parameters isEqualToDictionary:info.parameters] || + ![self.scalabilityModes isEqualToArray:info.scalabilityModes]) { return NO; } return YES; diff --git a/sdk/objc/components/audio/RTCAudioBuffer+Private.h b/sdk/objc/components/audio/RTCAudioBuffer+Private.h new file mode 100644 index 0000000000..effd8bb429 --- /dev/null +++ b/sdk/objc/components/audio/RTCAudioBuffer+Private.h @@ -0,0 +1,29 @@ +/* + * Copyright 2023 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import "RTCAudioBuffer.h" + +#include "modules/audio_processing/audio_buffer.h" + +NS_ASSUME_NONNULL_BEGIN + +@interface RTC_OBJC_TYPE (RTCAudioBuffer)() + +- (instancetype)initWithNativeType: (webrtc::AudioBuffer *) audioBuffer; + +@end + +NS_ASSUME_NONNULL_END diff --git a/sdk/objc/components/audio/RTCAudioBuffer.h b/sdk/objc/components/audio/RTCAudioBuffer.h new file mode 100644 index 0000000000..8bbd068657 --- /dev/null +++ b/sdk/objc/components/audio/RTCAudioBuffer.h @@ -0,0 +1,38 @@ +/* + * Copyright 2023 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import + +#import "RTCMacros.h" + +NS_ASSUME_NONNULL_BEGIN + +RTC_OBJC_EXPORT +@interface RTC_OBJC_TYPE (RTCAudioBuffer) : NSObject + +@property(nonatomic, readonly) size_t channels; +@property(nonatomic, readonly) size_t frames; +@property(nonatomic, readonly) size_t framesPerBand; +@property(nonatomic, readonly) size_t bands; + +// Returns pointer arrays. Index range from 0 to `frames`. +- (float* _Nonnull)rawBufferForChannel:(size_t)channel; + +// TODO: More convenience methods... + +@end + +NS_ASSUME_NONNULL_END diff --git a/sdk/objc/components/audio/RTCAudioBuffer.mm b/sdk/objc/components/audio/RTCAudioBuffer.mm new file mode 100644 index 0000000000..e37ea344dd --- /dev/null +++ b/sdk/objc/components/audio/RTCAudioBuffer.mm @@ -0,0 +1,55 @@ +/* + * Copyright 2023 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import "RTCAudioBuffer.h" + +#include "modules/audio_processing/audio_buffer.h" + +@implementation RTC_OBJC_TYPE (RTCAudioBuffer) { + // Raw + webrtc::AudioBuffer *_audioBuffer; +} + +- (size_t)channels { + return _audioBuffer->num_channels(); +} + +- (size_t)frames { + return _audioBuffer->num_frames(); +} + +- (size_t)framesPerBand { + return _audioBuffer->num_frames_per_band(); +} + +- (size_t)bands { + return _audioBuffer->num_bands(); +} + +- (float *)rawBufferForChannel:(size_t)channel { + return _audioBuffer->channels()[channel]; +} + +#pragma mark - Private + +- (instancetype)initWithNativeType:(webrtc::AudioBuffer *)audioBuffer { + if (self = [super init]) { + _audioBuffer = audioBuffer; + } + return self; +} + +@end diff --git a/sdk/objc/components/audio/RTCAudioCustomProcessingAdapter+Private.h b/sdk/objc/components/audio/RTCAudioCustomProcessingAdapter+Private.h new file mode 100644 index 0000000000..9995b58abb --- /dev/null +++ b/sdk/objc/components/audio/RTCAudioCustomProcessingAdapter+Private.h @@ -0,0 +1,43 @@ +/* + * Copyright 2023 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import "RTCAudioCustomProcessingAdapter.h" +#import "RTCAudioCustomProcessingDelegate.h" +#import "RTCMacros.h" + +#include "modules/audio_processing/include/audio_processing.h" + +NS_ASSUME_NONNULL_BEGIN + +@interface RTC_OBJC_TYPE(RTCAudioCustomProcessingAdapter) () + +// Thread safe set/get with os_unfair_lock. +@property(nonatomic, weak, nullable) id + audioCustomProcessingDelegate; + +// Direct read access without lock. +@property(nonatomic, readonly, weak, nullable) id + rawAudioCustomProcessingDelegate; + +@property(nonatomic, readonly) std::unique_ptr + nativeAudioCustomProcessingModule; + +- (instancetype)initWithDelegate: + (nullable id)audioCustomProcessingDelegate; + +@end + +NS_ASSUME_NONNULL_END diff --git a/sdk/objc/components/audio/RTCAudioCustomProcessingAdapter.h b/sdk/objc/components/audio/RTCAudioCustomProcessingAdapter.h new file mode 100644 index 0000000000..3230c19323 --- /dev/null +++ b/sdk/objc/components/audio/RTCAudioCustomProcessingAdapter.h @@ -0,0 +1,28 @@ +/* + * Copyright 2023 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import +#import "RTCMacros.h" + +NS_ASSUME_NONNULL_BEGIN + +@interface RTC_OBJC_TYPE(RTCAudioCustomProcessingAdapter) : NSObject + +- (instancetype)init NS_UNAVAILABLE; + +@end + +NS_ASSUME_NONNULL_END diff --git a/sdk/objc/components/audio/RTCAudioCustomProcessingAdapter.mm b/sdk/objc/components/audio/RTCAudioCustomProcessingAdapter.mm new file mode 100644 index 0000000000..c0f297c786 --- /dev/null +++ b/sdk/objc/components/audio/RTCAudioCustomProcessingAdapter.mm @@ -0,0 +1,139 @@ +/* + * Copyright 2023 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import +#import + +#import "RTCAudioBuffer+Private.h" +#import "RTCAudioCustomProcessingAdapter+Private.h" +#import "RTCAudioCustomProcessingAdapter.h" + +#include "rtc_base/logging.h" + +namespace webrtc { + +class AudioCustomProcessingAdapter : public webrtc::CustomProcessing { + public: + bool is_initialized_; + int sample_rate_hz_; + int num_channels_; + + AudioCustomProcessingAdapter(RTC_OBJC_TYPE(RTCAudioCustomProcessingAdapter) *adapter, os_unfair_lock *lock) { + RTC_LOG(LS_INFO) << "RTCAudioCustomProcessingAdapter.AudioCustomProcessingAdapter init"; + + adapter_ = adapter; + lock_ = lock; + is_initialized_ = false; + sample_rate_hz_ = 0; + num_channels_ = 0; + } + + ~AudioCustomProcessingAdapter() { + RTC_LOG(LS_INFO) << "RTCAudioCustomProcessingAdapter.AudioCustomProcessingAdapter dealloc"; + + os_unfair_lock_lock(lock_); + id delegate = adapter_.rawAudioCustomProcessingDelegate; + [delegate audioProcessingRelease]; + os_unfair_lock_unlock(lock_); + } + + void Initialize(int sample_rate_hz, int num_channels) override { + os_unfair_lock_lock(lock_); + id delegate = adapter_.rawAudioCustomProcessingDelegate; + [delegate audioProcessingInitializeWithSampleRate:sample_rate_hz channels:num_channels]; + is_initialized_ = true; + sample_rate_hz_ = sample_rate_hz; + num_channels_ = num_channels; + os_unfair_lock_unlock(lock_); + } + + void Process(AudioBuffer *audio_buffer) override { + bool is_locked = os_unfair_lock_trylock(lock_); + if (!is_locked) { + RTC_LOG(LS_INFO) << "RTCAudioCustomProcessingAdapter.AudioCustomProcessingAdapter Process " + "already locked, skipping..."; + + return; + } + id delegate = adapter_.rawAudioCustomProcessingDelegate; + if (delegate != nil) { + RTC_OBJC_TYPE(RTCAudioBuffer) *audioBuffer = [[RTC_OBJC_TYPE(RTCAudioBuffer) alloc] initWithNativeType:audio_buffer]; + [delegate audioProcessingProcess:audioBuffer]; + } + os_unfair_lock_unlock(lock_); + } + + std::string ToString() const override { return "AudioCustomProcessingAdapter"; } + + private: + __weak RTC_OBJC_TYPE(RTCAudioCustomProcessingAdapter) *adapter_; + os_unfair_lock *lock_; +}; +} // namespace webrtc + +@implementation RTC_OBJC_TYPE(RTCAudioCustomProcessingAdapter) { + webrtc::AudioCustomProcessingAdapter *_adapter; + os_unfair_lock _lock; +} + +@synthesize rawAudioCustomProcessingDelegate = _rawAudioCustomProcessingDelegate; + +- (instancetype)initWithDelegate: + (nullable id)audioCustomProcessingDelegate { + if (self = [super init]) { + _lock = OS_UNFAIR_LOCK_INIT; + _rawAudioCustomProcessingDelegate = audioCustomProcessingDelegate; + _adapter = new webrtc::AudioCustomProcessingAdapter(self, &_lock); + RTC_LOG(LS_INFO) << "RTCAudioCustomProcessingAdapter init"; + } + + return self; +} + +- (void)dealloc { + RTC_LOG(LS_INFO) << "RTCAudioCustomProcessingAdapter dealloc"; +} + +#pragma mark - Getter & Setter for audioCustomProcessingDelegate + +- (nullable id)audioCustomProcessingDelegate { + os_unfair_lock_lock(&_lock); + id delegate = _rawAudioCustomProcessingDelegate; + os_unfair_lock_unlock(&_lock); + return delegate; +} + +- (void)setAudioCustomProcessingDelegate:(nullable id)delegate { + os_unfair_lock_lock(&_lock); + if (_rawAudioCustomProcessingDelegate != nil && _adapter->is_initialized_) { + [_rawAudioCustomProcessingDelegate audioProcessingRelease]; + } + _rawAudioCustomProcessingDelegate = delegate; + if (_adapter->is_initialized_) { + [_rawAudioCustomProcessingDelegate + audioProcessingInitializeWithSampleRate:_adapter->sample_rate_hz_ + channels:_adapter->num_channels_]; + } + os_unfair_lock_unlock(&_lock); +} + +#pragma mark - Private + +- (std::unique_ptr)nativeAudioCustomProcessingModule { + return std::unique_ptr(_adapter); +} + +@end diff --git a/sdk/objc/components/audio/RTCAudioCustomProcessingDelegate.h b/sdk/objc/components/audio/RTCAudioCustomProcessingDelegate.h new file mode 100644 index 0000000000..a8e4981fbc --- /dev/null +++ b/sdk/objc/components/audio/RTCAudioCustomProcessingDelegate.h @@ -0,0 +1,52 @@ +/* + * Copyright 2023 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import + +#import "RTCMacros.h" + +NS_ASSUME_NONNULL_BEGIN + +@class RTC_OBJC_TYPE(RTCAudioBuffer); + +RTC_OBJC_EXPORT @protocol RTC_OBJC_TYPE (RTCAudioCustomProcessingDelegate) + +/** +* (Re-)initialize the audio processor. +* This method can be invoked multiple times. +*/ +- (void)audioProcessingInitializeWithSampleRate : (size_t)sampleRateHz channels +: (size_t)channels NS_SWIFT_NAME(audioProcessingInitialize(sampleRate:channels:)); + +/** + * Process (read or write) the audio buffer. + * RTCAudioBuffer is a simple wrapper for webrtc::AudioBuffer and the valid scope is only inside + * this method. Do not retain it. + */ +- (void)audioProcessingProcess:(RTC_OBJC_TYPE(RTCAudioBuffer) *)audioBuffer + NS_SWIFT_NAME(audioProcessingProcess(audioBuffer:)); + +// TOOD: +// virtual void SetRuntimeSetting(AudioProcessing::RuntimeSetting setting); + +/** + * Suggests releasing resources allocated by the audio processor. + */ +- (void)audioProcessingRelease; + +@end + +NS_ASSUME_NONNULL_END diff --git a/sdk/objc/components/audio/RTCAudioProcessingConfig+Private.h b/sdk/objc/components/audio/RTCAudioProcessingConfig+Private.h new file mode 100644 index 0000000000..ed565ee0aa --- /dev/null +++ b/sdk/objc/components/audio/RTCAudioProcessingConfig+Private.h @@ -0,0 +1,29 @@ +/* + * Copyright 2023 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import "RTCAudioProcessingConfig.h" + +#include "modules/audio_processing/include/audio_processing.h" + +NS_ASSUME_NONNULL_BEGIN + +@interface RTC_OBJC_TYPE (RTCAudioProcessingConfig)() + +@property(nonatomic, readonly) webrtc::AudioProcessing::Config nativeAudioProcessingConfig; + +@end + +NS_ASSUME_NONNULL_END diff --git a/sdk/objc/components/audio/RTCAudioProcessingConfig.h b/sdk/objc/components/audio/RTCAudioProcessingConfig.h new file mode 100644 index 0000000000..3c7dce45f1 --- /dev/null +++ b/sdk/objc/components/audio/RTCAudioProcessingConfig.h @@ -0,0 +1,31 @@ +/* + * Copyright 2023 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import +#import "RTCMacros.h" + +NS_ASSUME_NONNULL_BEGIN + +RTC_OBJC_EXPORT +@interface RTC_OBJC_TYPE (RTCAudioProcessingConfig) : NSObject + +@property(nonatomic, assign) BOOL echoCancellerEnabled; +@property(nonatomic, assign) BOOL echoCancellerMobileMode; + + +@end + +NS_ASSUME_NONNULL_END diff --git a/sdk/objc/components/audio/RTCAudioProcessingConfig.mm b/sdk/objc/components/audio/RTCAudioProcessingConfig.mm new file mode 100644 index 0000000000..ca40f16e17 --- /dev/null +++ b/sdk/objc/components/audio/RTCAudioProcessingConfig.mm @@ -0,0 +1,51 @@ +/* + * Copyright 2023 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import "RTCAudioProcessingConfig.h" + +#include "modules/audio_processing/include/audio_processing.h" + +@implementation RTC_OBJC_TYPE (RTCAudioProcessingConfig) { + webrtc::AudioProcessing::Config _config; +} + +// config.echo_canceller.enabled + +- (BOOL)echoCancellerEnabled { + return _config.echo_canceller.enabled; +} + +- (void)setEchoCancellerEnabled:(BOOL)value { + _config.echo_canceller.enabled = value; +} + +// config.echo_canceller.mobile_mode + +- (BOOL)echoCancellerMobileMode { + return _config.echo_canceller.mobile_mode; +} + +- (void)setEchoCancellerMobileMode:(BOOL)value { + _config.echo_canceller.mobile_mode = value; +} + +#pragma mark - Private + +- (webrtc::AudioProcessing::Config)nativeAudioProcessingConfig { + return _config; +} + +@end diff --git a/sdk/objc/components/audio/RTCAudioProcessingModule.h b/sdk/objc/components/audio/RTCAudioProcessingModule.h new file mode 100644 index 0000000000..9b05c4800e --- /dev/null +++ b/sdk/objc/components/audio/RTCAudioProcessingModule.h @@ -0,0 +1,33 @@ +/* + * Copyright 2023 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import + +#import "RTCMacros.h" + +NS_ASSUME_NONNULL_BEGIN + +@class RTC_OBJC_TYPE(RTCAudioProcessingConfig); + +RTC_OBJC_EXPORT @protocol RTC_OBJC_TYPE (RTCAudioProcessingModule) + +- (void)applyConfig: (RTC_OBJC_TYPE(RTCAudioProcessingConfig) *)config; + +// TODO: Implement... + +@end + +NS_ASSUME_NONNULL_END diff --git a/sdk/objc/components/audio/RTCAudioSession+Configuration.mm b/sdk/objc/components/audio/RTCAudioSession+Configuration.mm index 449f31e9dd..b123e2002e 100644 --- a/sdk/objc/components/audio/RTCAudioSession+Configuration.mm +++ b/sdk/objc/components/audio/RTCAudioSession+Configuration.mm @@ -55,7 +55,8 @@ - (BOOL)setConfiguration:(RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *)configur if (![self setCategory:configuration.category withOptions:configuration.categoryOptions error:&categoryError]) { - RTCLogError(@"Failed to set category: %@", + RTCLogError(@"Failed to set category to %@: %@", + self.category, categoryError.localizedDescription); error = categoryError; } else { @@ -66,7 +67,8 @@ - (BOOL)setConfiguration:(RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *)configur if (self.mode != configuration.mode) { NSError *modeError = nil; if (![self setMode:configuration.mode error:&modeError]) { - RTCLogError(@"Failed to set mode: %@", + RTCLogError(@"Failed to set mode to %@: %@", + self.mode, modeError.localizedDescription); error = modeError; } else { diff --git a/sdk/objc/components/audio/RTCAudioSession.mm b/sdk/objc/components/audio/RTCAudioSession.mm index 550a426d36..e6da689946 100644 --- a/sdk/objc/components/audio/RTCAudioSession.mm +++ b/sdk/objc/components/audio/RTCAudioSession.mm @@ -540,8 +540,7 @@ - (void)handleRouteChangeNotification:(NSNotification *)notification { RTCLog(@"Audio route changed: OldDeviceUnavailable"); break; case AVAudioSessionRouteChangeReasonCategoryChange: - RTCLog(@"Audio route changed: CategoryChange to :%@", - self.session.category); + RTCLog(@"Audio route changed: CategoryChange to :%@", self.session.category); break; case AVAudioSessionRouteChangeReasonOverride: RTCLog(@"Audio route changed: Override"); diff --git a/sdk/objc/components/audio/RTCAudioSessionConfiguration.m b/sdk/objc/components/audio/RTCAudioSessionConfiguration.m index 39e9ac13ec..7591d86f7d 100644 --- a/sdk/objc/components/audio/RTCAudioSessionConfiguration.m +++ b/sdk/objc/components/audio/RTCAudioSessionConfiguration.m @@ -65,15 +65,17 @@ @implementation RTC_OBJC_TYPE (RTCAudioSessionConfiguration) - (instancetype)init { if (self = [super init]) { + // Use AVAudioSession values for default + AVAudioSession *session = [AVAudioSession sharedInstance]; // Use a category which supports simultaneous recording and playback. // By default, using this category implies that our app’s audio is // nonmixable, hence activating the session will interrupt any other // audio sessions which are also nonmixable. - _category = AVAudioSessionCategoryPlayAndRecord; - _categoryOptions = AVAudioSessionCategoryOptionAllowBluetooth; + _category = session.category; + _categoryOptions = session.categoryOptions; // Specify mode for two-way voice communication (e.g. VoIP). - _mode = AVAudioSessionModeVoiceChat; + _mode = session.mode; // Set the session's sample rate or the hardware sample rate. // It is essential that we use the same sample rate as stream format diff --git a/sdk/objc/components/audio/RTCDefaultAudioProcessingModule+Private.h b/sdk/objc/components/audio/RTCDefaultAudioProcessingModule+Private.h new file mode 100644 index 0000000000..4f8551e372 --- /dev/null +++ b/sdk/objc/components/audio/RTCDefaultAudioProcessingModule+Private.h @@ -0,0 +1,29 @@ +/* + * Copyright 2023 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import "RTCDefaultAudioProcessingModule.h" + +#include "modules/audio_processing/include/audio_processing.h" + +NS_ASSUME_NONNULL_BEGIN + +@interface RTC_OBJC_TYPE (RTCDefaultAudioProcessingModule)() + +@property(nonatomic, readonly) rtc::scoped_refptr nativeAudioProcessingModule; + +@end + +NS_ASSUME_NONNULL_END diff --git a/sdk/objc/components/audio/RTCDefaultAudioProcessingModule.h b/sdk/objc/components/audio/RTCDefaultAudioProcessingModule.h new file mode 100644 index 0000000000..2047b3f797 --- /dev/null +++ b/sdk/objc/components/audio/RTCDefaultAudioProcessingModule.h @@ -0,0 +1,46 @@ +/* + * Copyright 2023 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import + +#import "RTCAudioProcessingModule.h" +#import "RTCMacros.h" + +NS_ASSUME_NONNULL_BEGIN + +@class RTC_OBJC_TYPE(RTCAudioProcessingConfig); +@protocol RTC_OBJC_TYPE (RTCAudioCustomProcessingDelegate); + +RTC_OBJC_EXPORT +@interface RTC_OBJC_TYPE (RTCDefaultAudioProcessingModule) : NSObject + +- (instancetype)initWithConfig: (nullable RTC_OBJC_TYPE(RTCAudioProcessingConfig) *)config + capturePostProcessingDelegate: (nullable id)capturePostProcessingDelegate + renderPreProcessingDelegate: (nullable id)renderPreProcessingDelegate + NS_SWIFT_NAME(init(config:capturePostProcessingDelegate:renderPreProcessingDelegate:)) NS_DESIGNATED_INITIALIZER; + +- (void)applyConfig:(RTC_OBJC_TYPE(RTCAudioProcessingConfig) *)config; + +// Dynamically update delegates at runtime + +@property(nonatomic, weak, nullable) id + capturePostProcessingDelegate; +@property(nonatomic, weak, nullable) id + renderPreProcessingDelegate; + +@end + +NS_ASSUME_NONNULL_END diff --git a/sdk/objc/components/audio/RTCDefaultAudioProcessingModule.mm b/sdk/objc/components/audio/RTCDefaultAudioProcessingModule.mm new file mode 100644 index 0000000000..2f592cefa4 --- /dev/null +++ b/sdk/objc/components/audio/RTCDefaultAudioProcessingModule.mm @@ -0,0 +1,96 @@ +/* + * Copyright 2023 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import "RTCDefaultAudioProcessingModule.h" +#import "RTCAudioCustomProcessingAdapter+Private.h" +#import "RTCAudioProcessingConfig+Private.h" + +#include "modules/audio_processing/include/audio_processing.h" + +@implementation RTC_OBJC_TYPE (RTCDefaultAudioProcessingModule) { + rtc::scoped_refptr _nativeAudioProcessingModule; + // Custom processing adapters... + RTC_OBJC_TYPE(RTCAudioCustomProcessingAdapter) *_capturePostProcessingAdapter; + RTC_OBJC_TYPE(RTCAudioCustomProcessingAdapter) *_renderPreProcessingAdapter; +} + +- (instancetype)init { + return [self initWithConfig:nil + capturePostProcessingDelegate:nil + renderPreProcessingDelegate:nil]; +} + +- (instancetype)initWithConfig:(nullable RTC_OBJC_TYPE(RTCAudioProcessingConfig) *)config + capturePostProcessingDelegate: + (nullable id)capturePostProcessingDelegate + renderPreProcessingDelegate:(nullable id) + renderPreProcessingDelegate { + if (self = [super init]) { + webrtc::AudioProcessingBuilder builder = webrtc::AudioProcessingBuilder(); + + // TODO: Custom Config... + + if (config != nil) { + builder.SetConfig(config.nativeAudioProcessingConfig); + } + + _capturePostProcessingAdapter = + [[RTC_OBJC_TYPE(RTCAudioCustomProcessingAdapter) alloc] initWithDelegate:capturePostProcessingDelegate]; + builder.SetCapturePostProcessing( + _capturePostProcessingAdapter.nativeAudioCustomProcessingModule); + + _renderPreProcessingAdapter = + [[RTC_OBJC_TYPE(RTCAudioCustomProcessingAdapter) alloc] initWithDelegate:renderPreProcessingDelegate]; + builder.SetRenderPreProcessing(_renderPreProcessingAdapter.nativeAudioCustomProcessingModule); + + _nativeAudioProcessingModule = builder.Create(); + } + return self; +} + +#pragma mark - Getter & Setters for delegates + +- (nullable id)capturePostProcessingDelegate { + return _capturePostProcessingAdapter.audioCustomProcessingDelegate; +} + +- (void)setCapturePostProcessingDelegate: + (nullable id)delegate { + _capturePostProcessingAdapter.audioCustomProcessingDelegate = delegate; +} + +- (nullable id)renderPreProcessingDelegate { + return _renderPreProcessingAdapter.audioCustomProcessingDelegate; +} + +- (void)setRenderPreProcessingDelegate: + (nullable id)delegate { + _renderPreProcessingAdapter.audioCustomProcessingDelegate = delegate; +} + +#pragma mark - RTCAudioProcessingModule protocol + +- (void)applyConfig:(RTC_OBJC_TYPE(RTCAudioProcessingConfig) *)config { + _nativeAudioProcessingModule->ApplyConfig(config.nativeAudioProcessingConfig); +} + +#pragma mark - Private + +- (rtc::scoped_refptr)nativeAudioProcessingModule { + return _nativeAudioProcessingModule; +} + +@end diff --git a/sdk/objc/components/audio/RTCNativeAudioSessionDelegateAdapter.h b/sdk/objc/components/audio/RTCNativeAudioSessionDelegateAdapter.h index 6a75f01479..1799c11415 100644 --- a/sdk/objc/components/audio/RTCNativeAudioSessionDelegateAdapter.h +++ b/sdk/objc/components/audio/RTCNativeAudioSessionDelegateAdapter.h @@ -19,7 +19,7 @@ class AudioSessionObserver; /** Adapter that forwards RTCAudioSessionDelegate calls to the appropriate * methods on the AudioSessionObserver. */ -@interface RTCNativeAudioSessionDelegateAdapter : NSObject +@interface RTC_OBJC_TYPE(RTCNativeAudioSessionDelegateAdapter) : NSObject - (instancetype)init NS_UNAVAILABLE; diff --git a/sdk/objc/components/audio/RTCNativeAudioSessionDelegateAdapter.mm b/sdk/objc/components/audio/RTCNativeAudioSessionDelegateAdapter.mm index daddf314a4..f652ad1e5f 100644 --- a/sdk/objc/components/audio/RTCNativeAudioSessionDelegateAdapter.mm +++ b/sdk/objc/components/audio/RTCNativeAudioSessionDelegateAdapter.mm @@ -14,7 +14,7 @@ #import "base/RTCLogging.h" -@implementation RTCNativeAudioSessionDelegateAdapter { +@implementation RTC_OBJC_TYPE(RTCNativeAudioSessionDelegateAdapter) { webrtc::AudioSessionObserver *_observer; } diff --git a/sdk/objc/components/capturer/RTCCameraVideoCapturer.h b/sdk/objc/components/capturer/RTCCameraVideoCapturer.h index 370bfa70f0..74f0b39925 100644 --- a/sdk/objc/components/capturer/RTCCameraVideoCapturer.h +++ b/sdk/objc/components/capturer/RTCCameraVideoCapturer.h @@ -30,6 +30,8 @@ NS_EXTENSION_UNAVAILABLE_IOS("Camera not available in app extensions.") // Returns list of formats that are supported by this class for this device. + (NSArray *)supportedFormatsForDevice:(AVCaptureDevice *)device; ++ (CGFloat)defaultZoomFactorForDeviceType:(AVCaptureDeviceType)deviceType; + // Returns the most efficient supported output pixel format for this capturer. - (FourCharCode)preferredOutputPixelFormat; diff --git a/sdk/objc/components/capturer/RTCCameraVideoCapturer.m b/sdk/objc/components/capturer/RTCCameraVideoCapturer.m index 98d3cf9f45..626c83f855 100644 --- a/sdk/objc/components/capturer/RTCCameraVideoCapturer.m +++ b/sdk/objc/components/capturer/RTCCameraVideoCapturer.m @@ -42,7 +42,7 @@ @implementation RTC_OBJC_TYPE (RTCCameraVideoCapturer) { FourCharCode _outputPixelFormat; RTCVideoRotation _rotation; #if TARGET_OS_IPHONE - UIDeviceOrientation _orientation; + UIInterfaceOrientation _orientation; BOOL _generatingOrientationNotifications; #endif } @@ -75,7 +75,7 @@ - (instancetype)initWithDelegate:(__weak id_generatingOrientationNotifications = YES; } + // Must be called on main + [self updateOrientation]; }); #endif @@ -173,8 +195,7 @@ - (void)startCaptureWithDevice:(AVCaptureDevice *)device NSError *error = nil; if (![self.currentDevice lockForConfiguration:&error]) { - RTCLogError(@"Failed to lock device %@. Error: %@", - self.currentDevice, + RTCLogError(@"Failed to lock device %@. Error: %@", self.currentDevice, error.userInfo); if (completionHandler) { completionHandler(error); @@ -183,11 +204,12 @@ - (void)startCaptureWithDevice:(AVCaptureDevice *)device return; } [self reconfigureCaptureSessionInput]; - [self updateOrientation]; [self updateDeviceCaptureFormat:format fps:fps]; [self updateVideoDataOutputPixelFormat:format]; - [self.captureSession startRunning]; + [self updateZoomFactor]; [self.currentDevice unlockForConfiguration]; + + [self.captureSession startRunning]; self.isRunning = YES; if (completionHandler) { completionHandler(nil); @@ -226,10 +248,7 @@ - (void)stopCaptureWithCompletionHandler:(nullable void (^)(void))completionHand #if TARGET_OS_IPHONE - (void)deviceOrientationDidChange:(NSNotification *)notification { - [RTC_OBJC_TYPE(RTCDispatcher) dispatchAsyncOnType:RTCDispatcherTypeCaptureSession - block:^{ - [self updateOrientation]; - }]; + [self updateOrientation]; } #endif @@ -265,22 +284,20 @@ - (void)captureOutput:(AVCaptureOutput *)captureOutput usingFrontCamera = AVCaptureDevicePositionFront == deviceInput.device.position; } switch (_orientation) { - case UIDeviceOrientationPortrait: + case UIInterfaceOrientationPortrait: _rotation = RTCVideoRotation_90; break; - case UIDeviceOrientationPortraitUpsideDown: + case UIInterfaceOrientationPortraitUpsideDown: _rotation = RTCVideoRotation_270; break; - case UIDeviceOrientationLandscapeLeft: - _rotation = usingFrontCamera ? RTCVideoRotation_180 : RTCVideoRotation_0; - break; - case UIDeviceOrientationLandscapeRight: + case UIInterfaceOrientationLandscapeLeft: _rotation = usingFrontCamera ? RTCVideoRotation_0 : RTCVideoRotation_180; break; - case UIDeviceOrientationFaceUp: - case UIDeviceOrientationFaceDown: - case UIDeviceOrientationUnknown: - // Ignore. + case UIInterfaceOrientationLandscapeRight: + _rotation = usingFrontCamera ? RTCVideoRotation_180 : RTCVideoRotation_0; + break; + case UIInterfaceOrientationUnknown: + _rotation = RTCVideoRotation_0; break; } #else @@ -291,7 +308,7 @@ - (void)captureOutput:(AVCaptureOutput *)captureOutput RTC_OBJC_TYPE(RTCCVPixelBuffer) *rtcPixelBuffer = [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBuffer]; int64_t timeStampNs = CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)) * - kNanosecondsPerSecond; + kNanosecondsPerSecond; RTC_OBJC_TYPE(RTCVideoFrame) *videoFrame = [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:rtcPixelBuffer rotation:_rotation @@ -421,8 +438,7 @@ - (void)handleApplicationDidBecomeActive:(NSNotification *)notification { - (dispatch_queue_t)frameQueue { if (!_frameQueue) { _frameQueue = RTCDispatchQueueCreateWithTarget( - "org.webrtc.cameravideocapturer.video", - DISPATCH_QUEUE_SERIAL, + "org.webrtc.cameravideocapturer.video", DISPATCH_QUEUE_SERIAL, dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0)); } return _frameQueue; @@ -495,19 +511,31 @@ - (void)updateDeviceCaptureFormat:(AVCaptureDeviceFormat *)format fps:(NSInteger @"updateDeviceCaptureFormat must be called on the capture queue."); @try { _currentDevice.activeFormat = format; - _currentDevice.activeVideoMinFrameDuration = CMTimeMake(1, fps); + if (![NSStringFromClass([_currentDevice class]) isEqualToString:@"AVCaptureDALDevice"]) { + _currentDevice.activeVideoMinFrameDuration = CMTimeMake(1, fps); + } } @catch (NSException *exception) { RTCLogError(@"Failed to set active format!\n User info:%@", exception.userInfo); return; } } +- (void)updateZoomFactor { + NSAssert([RTC_OBJC_TYPE(RTCDispatcher) isOnQueueForType:RTCDispatcherTypeCaptureSession], + @"updateZoomFactor must be called on the capture queue."); + +#if TARGET_OS_IOS || TARGET_OS_TV + CGFloat videoZoomFactor = [[self class] defaultZoomFactorForDeviceType:_currentDevice.deviceType]; + [_currentDevice setVideoZoomFactor:videoZoomFactor]; +#endif +} + - (void)reconfigureCaptureSessionInput { NSAssert([RTC_OBJC_TYPE(RTCDispatcher) isOnQueueForType:RTCDispatcherTypeCaptureSession], @"reconfigureCaptureSessionInput must be called on the capture queue."); NSError *error = nil; - AVCaptureDeviceInput *input = - [AVCaptureDeviceInput deviceInputWithDevice:_currentDevice error:&error]; + AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:_currentDevice + error:&error]; if (!input) { RTCLogError(@"Failed to create front camera input: %@", error.localizedDescription); return; @@ -524,12 +552,19 @@ - (void)reconfigureCaptureSessionInput { [_captureSession commitConfiguration]; } -- (void)updateOrientation { - NSAssert([RTC_OBJC_TYPE(RTCDispatcher) isOnQueueForType:RTCDispatcherTypeCaptureSession], - @"updateOrientation must be called on the capture queue."); #if TARGET_OS_IPHONE - _orientation = [UIDevice currentDevice].orientation; -#endif +- (void)updateOrientation { + NSAssert([RTC_OBJC_TYPE(RTCDispatcher) isOnQueueForType:RTCDispatcherTypeMain], + @"statusBarOrientation must be called on the main queue."); + // statusBarOrientation must be called on the main queue + UIInterfaceOrientation newOrientation = [UIApplication sharedApplication].statusBarOrientation; + + [RTC_OBJC_TYPE(RTCDispatcher) dispatchAsyncOnType:RTCDispatcherTypeCaptureSession + block:^{ + // Must be called on the capture queue + self->_orientation = newOrientation; + }]; } +#endif @end diff --git a/sdk/objc/components/capturer/RTCDesktopCapturer+Private.h b/sdk/objc/components/capturer/RTCDesktopCapturer+Private.h new file mode 100644 index 0000000000..30aed69d29 --- /dev/null +++ b/sdk/objc/components/capturer/RTCDesktopCapturer+Private.h @@ -0,0 +1,49 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import "RTCDesktopCapturer.h" + +#include "sdk/objc/native/src/objc_desktop_capture.h" + +NS_ASSUME_NONNULL_BEGIN + +RTC_OBJC_EXPORT +@protocol RTC_OBJC_TYPE +(DesktopCapturerDelegate) +-(void)didCaptureVideoFrame:(RTC_OBJC_TYPE(RTCVideoFrame) *) frame; +-(void)didSourceCaptureStart; +-(void)didSourceCapturePaused; +-(void)didSourceCaptureStop; +-(void)didSourceCaptureError; +@end + +@interface RTC_OBJC_TYPE(RTCDesktopCapturer) () + +@property(nonatomic, readonly)std::shared_ptr nativeCapturer; + +- (void)didCaptureVideoFrame:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame; + +-(void)didSourceCaptureStart; + +-(void)didSourceCapturePaused; + +-(void)didSourceCaptureStop; + +-(void)didSourceCaptureError; + +@end + +NS_ASSUME_NONNULL_END \ No newline at end of file diff --git a/sdk/objc/components/capturer/RTCDesktopCapturer.h b/sdk/objc/components/capturer/RTCDesktopCapturer.h new file mode 100644 index 0000000000..b63912acf0 --- /dev/null +++ b/sdk/objc/components/capturer/RTCDesktopCapturer.h @@ -0,0 +1,61 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import +#import + +#import "RTCMacros.h" +#import "RTCVideoCapturer.h" +#import "RTCDesktopSource.h" + +NS_ASSUME_NONNULL_BEGIN + +@class RTC_OBJC_TYPE(RTCDesktopCapturer); + +RTC_OBJC_EXPORT +@protocol RTC_OBJC_TYPE +(RTCDesktopCapturerDelegate) +-(void)didSourceCaptureStart:(RTC_OBJC_TYPE(RTCDesktopCapturer) *) capturer; + +-(void)didSourceCapturePaused:(RTC_OBJC_TYPE(RTCDesktopCapturer) *) capturer; + +-(void)didSourceCaptureStop:(RTC_OBJC_TYPE(RTCDesktopCapturer) *) capturer; + +-(void)didSourceCaptureError:(RTC_OBJC_TYPE(RTCDesktopCapturer) *) capturer; +@end + +RTC_OBJC_EXPORT +// Screen capture that implements RTCVideoCapturer. Delivers frames to a +// RTCVideoCapturerDelegate (usually RTCVideoSource). +@interface RTC_OBJC_TYPE (RTCDesktopCapturer) : RTC_OBJC_TYPE(RTCVideoCapturer) + +@property(nonatomic, readonly) RTC_OBJC_TYPE(RTCDesktopSource) *source; + +- (instancetype)initWithSource:(RTC_OBJC_TYPE(RTCDesktopSource) *)source delegate:(__weak id)delegate captureDelegate:(__weak id)captureDelegate; + +- (instancetype)initWithDefaultScreen:(__weak id)delegate captureDelegate:(__weak id)captureDelegate; + +- (void)startCapture; + +- (void)startCaptureWithFPS:(NSInteger)fps; + +- (void)stopCapture; + +- (void)stopCaptureWithCompletionHandler:(nullable void (^)(void))completionHandler; + +@end + +NS_ASSUME_NONNULL_END diff --git a/sdk/objc/components/capturer/RTCDesktopCapturer.mm b/sdk/objc/components/capturer/RTCDesktopCapturer.mm new file mode 100644 index 0000000000..a1948684d3 --- /dev/null +++ b/sdk/objc/components/capturer/RTCDesktopCapturer.mm @@ -0,0 +1,104 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import + +#import "base/RTCLogging.h" +#import "base/RTCVideoFrameBuffer.h" + +#import "components/video_frame_buffer/RTCCVPixelBuffer.h" + +#import "RTCDesktopCapturer.h" +#import "RTCDesktopCapturer+Private.h" +#import "RTCDesktopSource+Private.h" + +@implementation RTC_OBJC_TYPE (RTCDesktopCapturer) { + __weak id _delegate; +} + +@synthesize nativeCapturer = _nativeCapturer; +@synthesize source = _source; + +- (instancetype)initWithSource:(RTC_OBJC_TYPE(RTCDesktopSource) *)source delegate:(__weak id)delegate captureDelegate:(__weak id)captureDelegate { + if (self = [super initWithDelegate:captureDelegate]) { + webrtc::DesktopType captureType = webrtc::kScreen; + if(source.sourceType == RTCDesktopSourceTypeWindow) { + captureType = webrtc::kWindow; + } + _nativeCapturer = std::make_shared(captureType, source.nativeMediaSource->id(), self); + _source = source; + _delegate = delegate; + } + return self; +} + +- (instancetype)initWithDefaultScreen:(__weak id)delegate captureDelegate:(__weak id)captureDelegate { + if (self = [super initWithDelegate:captureDelegate]) { + _nativeCapturer = std::make_unique(webrtc::kScreen, -1, self); + _source = nil; + _delegate = delegate; + } + return self; +} + + +-(void)dealloc { + _nativeCapturer->Stop(); + _nativeCapturer = nullptr; +} + +- (void)startCapture { + [self didSourceCaptureStart]; + _nativeCapturer->Start(30); +} + +- (void)startCaptureWithFPS:(NSInteger)fps { + _nativeCapturer->Start(fps); +} + +- (void)didCaptureVideoFrame + : (RTC_OBJC_TYPE(RTCVideoFrame) *)frame { + [self.delegate capturer:self didCaptureVideoFrame:frame]; +} + +- (void)stopCapture { + _nativeCapturer->Stop(); +} + +- (void)stopCaptureWithCompletionHandler:(nullable void (^)(void))completionHandler { + [self stopCapture]; + if(completionHandler != nil) { + completionHandler(); + } +} + +-(void)didSourceCaptureStart { + [_delegate didSourceCaptureStart:self]; +} + +-(void)didSourceCapturePaused { + [_delegate didSourceCapturePaused:self]; +} + +-(void)didSourceCaptureStop { + [_delegate didSourceCaptureStop:self]; +} + +-(void)didSourceCaptureError { + [_delegate didSourceCaptureError:self]; +} + +@end diff --git a/sdk/objc/components/capturer/RTCDesktopMediaList+Private.h b/sdk/objc/components/capturer/RTCDesktopMediaList+Private.h new file mode 100644 index 0000000000..eb1e76ddbb --- /dev/null +++ b/sdk/objc/components/capturer/RTCDesktopMediaList+Private.h @@ -0,0 +1,40 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import "RTCDesktopMediaList.h" + +namespace webrtc { + class ObjCDesktopMediaList; + class MediaSource; +} + +NS_ASSUME_NONNULL_BEGIN + +@interface RTC_OBJC_TYPE(RTCDesktopMediaList) () + +@property(nonatomic, readonly)std::shared_ptr nativeMediaList; + +-(void)mediaSourceAdded:(webrtc::MediaSource *) source; + +-(void)mediaSourceRemoved:(webrtc::MediaSource *) source; + +-(void)mediaSourceNameChanged:(webrtc::MediaSource *) source; + +-(void)mediaSourceThumbnailChanged:(webrtc::MediaSource *) source; + +@end + +NS_ASSUME_NONNULL_END \ No newline at end of file diff --git a/sdk/objc/components/capturer/RTCDesktopMediaList.h b/sdk/objc/components/capturer/RTCDesktopMediaList.h new file mode 100644 index 0000000000..fafeaf5e0d --- /dev/null +++ b/sdk/objc/components/capturer/RTCDesktopMediaList.h @@ -0,0 +1,51 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import +#import + +#import "RTCMacros.h" +#import "RTCDesktopSource.h" + +NS_ASSUME_NONNULL_BEGIN + +RTC_OBJC_EXPORT +@protocol RTC_OBJC_TYPE +(RTCDesktopMediaListDelegate) + +- (void)didDesktopSourceAdded:(RTC_OBJC_TYPE(RTCDesktopSource) *) source; + +- (void)didDesktopSourceRemoved:(RTC_OBJC_TYPE(RTCDesktopSource) *) source; + +- (void)didDesktopSourceNameChanged:(RTC_OBJC_TYPE(RTCDesktopSource) *) source; + +- (void)didDesktopSourceThumbnailChanged:(RTC_OBJC_TYPE(RTCDesktopSource) *) source; +@end + +RTC_OBJC_EXPORT +@interface RTC_OBJC_TYPE (RTCDesktopMediaList) : NSObject + +-(instancetype)initWithType:(RTCDesktopSourceType)type delegate:(__weak id)delegate; + +@property(nonatomic, readonly) RTCDesktopSourceType sourceType; + +- (int32_t)UpdateSourceList:(BOOL)forceReload updateAllThumbnails:(BOOL)updateThumbnail; + +- (NSArray*) getSources; + +@end + +NS_ASSUME_NONNULL_END diff --git a/sdk/objc/components/capturer/RTCDesktopMediaList.mm b/sdk/objc/components/capturer/RTCDesktopMediaList.mm new file mode 100644 index 0000000000..2bd6c1da0e --- /dev/null +++ b/sdk/objc/components/capturer/RTCDesktopMediaList.mm @@ -0,0 +1,99 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import "RTCDesktopMediaList.h" + +#import "RTCDesktopSource+Private.h" +#import "RTCDesktopMediaList+Private.h" + +@implementation RTC_OBJC_TYPE(RTCDesktopMediaList) { + RTCDesktopSourceType _sourceType; + NSMutableArray* _sources; + __weak id _delegate; +} + +@synthesize sourceType = _sourceType; +@synthesize nativeMediaList = _nativeMediaList; + +- (instancetype)initWithType:(RTCDesktopSourceType)type delegate:(__weak id)delegate{ + if (self = [super init]) { + webrtc::DesktopType captureType = webrtc::kScreen; + if(type == RTCDesktopSourceTypeWindow) { + captureType = webrtc::kWindow; + } + _nativeMediaList = std::make_shared(captureType, self); + _sourceType = type; + _delegate = delegate; + } + return self; +} + +- (int32_t)UpdateSourceList:(BOOL)forceReload updateAllThumbnails:(BOOL)updateThumbnail { + return _nativeMediaList->UpdateSourceList(forceReload, updateThumbnail); +} + +-(NSArray*) getSources { + _sources = [NSMutableArray array]; + int sourceCount = _nativeMediaList->GetSourceCount(); + for (int i = 0; i < sourceCount; i++) { + webrtc::MediaSource *mediaSource = _nativeMediaList->GetSource(i); + [_sources addObject:[[RTC_OBJC_TYPE(RTCDesktopSource) alloc] initWithNativeSource:mediaSource sourceType:_sourceType]]; + } + return _sources; +} + +-(void)mediaSourceAdded:(webrtc::MediaSource *) source { + RTC_OBJC_TYPE(RTCDesktopSource) *desktopSource = [[RTC_OBJC_TYPE(RTCDesktopSource) alloc] initWithNativeSource:source sourceType:_sourceType]; + [_sources addObject:desktopSource]; + [_delegate didDesktopSourceAdded:desktopSource]; +} + +-(void)mediaSourceRemoved:(webrtc::MediaSource *) source { + RTC_OBJC_TYPE(RTCDesktopSource) *desktopSource = [self getSourceById:source]; + if(desktopSource != nil) { + [_sources removeObject:desktopSource]; + [_delegate didDesktopSourceRemoved:desktopSource]; + } +} + +-(void)mediaSourceNameChanged:(webrtc::MediaSource *) source { + RTC_OBJC_TYPE(RTCDesktopSource) *desktopSource = [self getSourceById:source]; + if(desktopSource != nil) { + [desktopSource setName:source->name().c_str()]; + [_delegate didDesktopSourceNameChanged:desktopSource]; + } +} + +-(void)mediaSourceThumbnailChanged:(webrtc::MediaSource *) source { + RTC_OBJC_TYPE(RTCDesktopSource) *desktopSource = [self getSourceById:source]; + if(desktopSource != nil) { + [desktopSource setThumbnail:source->thumbnail()]; + [_delegate didDesktopSourceThumbnailChanged:desktopSource]; + } +} + +-(RTC_OBJC_TYPE(RTCDesktopSource) *)getSourceById:(webrtc::MediaSource *) source { + NSEnumerator *enumerator = [_sources objectEnumerator]; + RTC_OBJC_TYPE(RTCDesktopSource) *object; + while ((object = enumerator.nextObject) != nil) { + if(object.nativeMediaSource == source) { + return object; + } + } + return nil; +} + +@end \ No newline at end of file diff --git a/sdk/objc/components/capturer/RTCDesktopSource+Private.h b/sdk/objc/components/capturer/RTCDesktopSource+Private.h new file mode 100644 index 0000000000..3f4c4ef25f --- /dev/null +++ b/sdk/objc/components/capturer/RTCDesktopSource+Private.h @@ -0,0 +1,37 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#import + +#import "RTCDesktopSource.h" + +#include "sdk/objc/native/src/objc_desktop_media_list.h" + +NS_ASSUME_NONNULL_BEGIN + +@interface RTC_OBJC_TYPE(RTCDesktopSource) () + +- (instancetype)initWithNativeSource:(webrtc::MediaSource*) nativeSource + sourceType:(RTCDesktopSourceType) sourceType; + +@property(nonatomic, readonly)webrtc::MediaSource* nativeMediaSource; + +-(void) setName:(const char *) name; + +-(void) setThumbnail:(std::vector) thumbnail; + +@end + +NS_ASSUME_NONNULL_END \ No newline at end of file diff --git a/sdk/objc/components/capturer/RTCDesktopSource.h b/sdk/objc/components/capturer/RTCDesktopSource.h new file mode 100644 index 0000000000..82da458ce6 --- /dev/null +++ b/sdk/objc/components/capturer/RTCDesktopSource.h @@ -0,0 +1,40 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#import +#import +#import + +#import "RTCMacros.h" + +typedef NS_ENUM(NSInteger, RTCDesktopSourceType) { + RTCDesktopSourceTypeScreen, + RTCDesktopSourceTypeWindow, +}; + +RTC_OBJC_EXPORT +@interface RTC_OBJC_TYPE (RTCDesktopSource) : NSObject + +@property(nonatomic, readonly) NSString *sourceId; + +@property(nonatomic, readonly) NSString *name; + +@property(nonatomic, readonly) NSImage *thumbnail; + +@property(nonatomic, readonly) RTCDesktopSourceType sourceType; + +-( NSImage *)UpdateThumbnail; + +@end \ No newline at end of file diff --git a/sdk/objc/components/capturer/RTCDesktopSource.mm b/sdk/objc/components/capturer/RTCDesktopSource.mm new file mode 100644 index 0000000000..e1bdc6893a --- /dev/null +++ b/sdk/objc/components/capturer/RTCDesktopSource.mm @@ -0,0 +1,68 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import + +#import "RTCDesktopSource.h" +#import "RTCDesktopSource+Private.h" + +@implementation RTC_OBJC_TYPE(RTCDesktopSource) { + NSString *_sourceId; + NSString *_name; + NSImage *_thumbnail; + RTCDesktopSourceType _sourceType; +} + +@synthesize sourceId = _sourceId; +@synthesize name = _name; +@synthesize thumbnail = _thumbnail; +@synthesize sourceType = _sourceType; +@synthesize nativeMediaSource = _nativeMediaSource; + +- (instancetype)initWithNativeSource:(webrtc::MediaSource*)nativeSource + sourceType:(RTCDesktopSourceType) sourceType { + if (self = [super init]) { + _nativeMediaSource = nativeSource; + _sourceId = [NSString stringWithUTF8String:std::to_string(nativeSource->id()).c_str()]; + _name = [NSString stringWithUTF8String:nativeSource->name().c_str()]; + _thumbnail = [self createThumbnailFromNativeSource:nativeSource->thumbnail()]; + _sourceType = sourceType; + } + return self; +} + +-(NSImage*)createThumbnailFromNativeSource:(std::vector)thumbnail { + NSData* data = [[NSData alloc] initWithBytes:thumbnail.data() length:thumbnail.size()]; + NSImage *image = [[NSImage alloc] initWithData:data]; + return image; +} + +-( NSImage *)UpdateThumbnail { + if(_nativeMediaSource->UpdateThumbnail()) { + _thumbnail = [self createThumbnailFromNativeSource:_nativeMediaSource->thumbnail()]; + } + return _thumbnail; +} + +-(void)setName:(const char *) name { + _name = [NSString stringWithUTF8String:name]; +} + +-(void)setThumbnail:(std::vector) thumbnail { + _thumbnail = [self createThumbnailFromNativeSource:thumbnail]; +} + +@end diff --git a/sdk/objc/components/network/RTCNetworkMonitor+Private.h b/sdk/objc/components/network/RTCNetworkMonitor+Private.h index b5c786be18..f3761f7ba3 100644 --- a/sdk/objc/components/network/RTCNetworkMonitor+Private.h +++ b/sdk/objc/components/network/RTCNetworkMonitor+Private.h @@ -9,16 +9,18 @@ */ #import "RTCNetworkMonitor.h" +#import "RTCMacros.h" #include "sdk/objc/native/src/network_monitor_observer.h" -@interface RTCNetworkMonitor () +@interface RTC_OBJC_TYPE (RTCNetworkMonitor) +() -/** `observer` is a raw pointer and should be kept alive - * for this object's lifetime. - */ -- (instancetype)initWithObserver:(webrtc::NetworkMonitorObserver *)observer - NS_DESIGNATED_INITIALIZER; + /** `observer` is a raw pointer and should be kept alive + * for this object's lifetime. + */ + - (instancetype)initWithObserver + : (webrtc::NetworkMonitorObserver *)observer NS_DESIGNATED_INITIALIZER; /** Stops the receiver from posting updates to `observer`. */ - (void)stop; diff --git a/sdk/objc/components/network/RTCNetworkMonitor.h b/sdk/objc/components/network/RTCNetworkMonitor.h index 21d22f5463..4b0cb4baf0 100644 --- a/sdk/objc/components/network/RTCNetworkMonitor.h +++ b/sdk/objc/components/network/RTCNetworkMonitor.h @@ -10,12 +10,14 @@ #import +#import "RTCMacros.h" + NS_ASSUME_NONNULL_BEGIN /** Listens for NWPathMonitor updates and forwards the results to a C++ * observer. */ -@interface RTCNetworkMonitor : NSObject +@interface RTC_OBJC_TYPE (RTCNetworkMonitor): NSObject - (instancetype)init NS_UNAVAILABLE; diff --git a/sdk/objc/components/network/RTCNetworkMonitor.mm b/sdk/objc/components/network/RTCNetworkMonitor.mm index 7e75b2b4c0..2e42ab5290 100644 --- a/sdk/objc/components/network/RTCNetworkMonitor.mm +++ b/sdk/objc/components/network/RTCNetworkMonitor.mm @@ -46,7 +46,7 @@ } // namespace -@implementation RTCNetworkMonitor { +@implementation RTC_OBJC_TYPE (RTCNetworkMonitor) { webrtc::NetworkMonitorObserver *_observer; nw_path_monitor_t _pathMonitor; dispatch_queue_t _monitorQueue; @@ -63,12 +63,12 @@ - (instancetype)initWithObserver:(webrtc::NetworkMonitorObserver *)observer { return nil; } RTCLog(@"NW path monitor created."); - __weak RTCNetworkMonitor *weakSelf = self; + __weak RTC_OBJC_TYPE(RTCNetworkMonitor) *weakSelf = self; nw_path_monitor_set_update_handler(_pathMonitor, ^(nw_path_t path) { if (weakSelf == nil) { return; } - RTCNetworkMonitor *strongSelf = weakSelf; + RTC_OBJC_TYPE(RTCNetworkMonitor) *strongSelf = weakSelf; RTCLog(@"NW path monitor: updated."); nw_path_status_t status = nw_path_get_status(path); if (status == nw_path_status_invalid) { diff --git a/sdk/objc/components/renderer/metal/RTCMTLI420Renderer.h b/sdk/objc/components/renderer/metal/RTCMTLI420Renderer.h index e5987fe22a..c4e2724042 100644 --- a/sdk/objc/components/renderer/metal/RTCMTLI420Renderer.h +++ b/sdk/objc/components/renderer/metal/RTCMTLI420Renderer.h @@ -13,5 +13,5 @@ #import "RTCMTLRenderer.h" NS_AVAILABLE(10_11, 9_0) -@interface RTCMTLI420Renderer : RTCMTLRenderer +@interface RTC_OBJC_TYPE(RTCMTLI420Renderer): RTC_OBJC_TYPE(RTCMTLRenderer) @end diff --git a/sdk/objc/components/renderer/metal/RTCMTLI420Renderer.mm b/sdk/objc/components/renderer/metal/RTCMTLI420Renderer.mm index f4c76fa313..963f36c62a 100644 --- a/sdk/objc/components/renderer/metal/RTCMTLI420Renderer.mm +++ b/sdk/objc/components/renderer/metal/RTCMTLI420Renderer.mm @@ -70,7 +70,7 @@ fragment half4 fragmentColorConversion( return half4(out); }); -@implementation RTCMTLI420Renderer { +@implementation RTC_OBJC_TYPE(RTCMTLI420Renderer) { // Textures. id _yTexture; id _uTexture; diff --git a/sdk/objc/components/renderer/metal/RTCMTLNSVideoView.h b/sdk/objc/components/renderer/metal/RTCMTLNSVideoView.h index f70e2ad5ee..330533bef0 100644 --- a/sdk/objc/components/renderer/metal/RTCMTLNSVideoView.h +++ b/sdk/objc/components/renderer/metal/RTCMTLNSVideoView.h @@ -8,17 +8,7 @@ * be found in the AUTHORS file in the root of the source tree. */ -#import +#import "RTCMacros.h" -#import "RTCVideoRenderer.h" - -NS_AVAILABLE_MAC(10.11) - -RTC_OBJC_EXPORT -@interface RTC_OBJC_TYPE (RTCMTLNSVideoView) : NSView - -@property(nonatomic, weak) id delegate; - -+ (BOOL)isMetalAvailable; - -@end +// Deprecated: Use RTCMTLVideoView instead +@compatibility_alias RTC_OBJC_TYPE(RTCMTLNSVideoView) RTC_OBJC_TYPE(RTCMTLVideoView); diff --git a/sdk/objc/components/renderer/metal/RTCMTLNSVideoView.m b/sdk/objc/components/renderer/metal/RTCMTLNSVideoView.m deleted file mode 100644 index 625fb1caa7..0000000000 --- a/sdk/objc/components/renderer/metal/RTCMTLNSVideoView.m +++ /dev/null @@ -1,122 +0,0 @@ -/* - * Copyright 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import "RTCMTLNSVideoView.h" - -#import -#import - -#import "base/RTCVideoFrame.h" - -#import "RTCMTLI420Renderer.h" - -@interface RTC_OBJC_TYPE (RTCMTLNSVideoView) -() @property(nonatomic) id renderer; -@property(nonatomic, strong) MTKView *metalView; -@property(atomic, strong) RTC_OBJC_TYPE(RTCVideoFrame) * videoFrame; -@end - -@implementation RTC_OBJC_TYPE (RTCMTLNSVideoView) { - id _renderer; -} - -@synthesize delegate = _delegate; -@synthesize renderer = _renderer; -@synthesize metalView = _metalView; -@synthesize videoFrame = _videoFrame; - -- (instancetype)initWithFrame:(CGRect)frameRect { - self = [super initWithFrame:frameRect]; - if (self) { - [self configure]; - } - return self; -} - -- (instancetype)initWithCoder:(NSCoder *)aCoder { - self = [super initWithCoder:aCoder]; - if (self) { - [self configure]; - } - return self; -} - -#pragma mark - Private - -+ (BOOL)isMetalAvailable { - return [MTLCopyAllDevices() count] > 0; -} - -- (void)configure { - if ([[self class] isMetalAvailable]) { - _metalView = [[MTKView alloc] initWithFrame:self.bounds]; - [self addSubview:_metalView]; - _metalView.layerContentsPlacement = NSViewLayerContentsPlacementScaleProportionallyToFit; - _metalView.translatesAutoresizingMaskIntoConstraints = NO; - _metalView.framebufferOnly = YES; - _metalView.delegate = self; - - _renderer = [[RTCMTLI420Renderer alloc] init]; - if (![(RTCMTLI420Renderer *)_renderer addRenderingDestination:_metalView]) { - _renderer = nil; - }; - } -} - -- (void)updateConstraints { - NSDictionary *views = NSDictionaryOfVariableBindings(_metalView); - - NSArray *constraintsHorizontal = - [NSLayoutConstraint constraintsWithVisualFormat:@"H:|-0-[_metalView]-0-|" - options:0 - metrics:nil - views:views]; - [self addConstraints:constraintsHorizontal]; - - NSArray *constraintsVertical = - [NSLayoutConstraint constraintsWithVisualFormat:@"V:|-0-[_metalView]-0-|" - options:0 - metrics:nil - views:views]; - [self addConstraints:constraintsVertical]; - [super updateConstraints]; -} - -#pragma mark - MTKViewDelegate methods -- (void)drawInMTKView:(nonnull MTKView *)view { - if (self.videoFrame == nil) { - return; - } - if (view == self.metalView) { - [_renderer drawFrame:self.videoFrame]; - } -} - -- (void)mtkView:(MTKView *)view drawableSizeWillChange:(CGSize)size { -} - -#pragma mark - RTC_OBJC_TYPE(RTCVideoRenderer) - -- (void)setSize:(CGSize)size { - _metalView.drawableSize = size; - dispatch_async(dispatch_get_main_queue(), ^{ - [self.delegate videoView:self didChangeVideoSize:size]; - }); - [_metalView draw]; -} - -- (void)renderFrame:(nullable RTC_OBJC_TYPE(RTCVideoFrame) *)frame { - if (frame == nil) { - return; - } - self.videoFrame = [frame newI420VideoFrame]; -} - -@end diff --git a/sdk/objc/components/renderer/metal/RTCMTLNV12Renderer.h b/sdk/objc/components/renderer/metal/RTCMTLNV12Renderer.h index 866b7ea17e..125612a269 100644 --- a/sdk/objc/components/renderer/metal/RTCMTLNV12Renderer.h +++ b/sdk/objc/components/renderer/metal/RTCMTLNV12Renderer.h @@ -13,6 +13,6 @@ #import "RTCMTLRenderer.h" NS_AVAILABLE(10_11, 9_0) -@interface RTCMTLNV12Renderer : RTCMTLRenderer +@interface RTC_OBJC_TYPE(RTCMTLNV12Renderer): RTC_OBJC_TYPE(RTCMTLRenderer) @end diff --git a/sdk/objc/components/renderer/metal/RTCMTLNV12Renderer.mm b/sdk/objc/components/renderer/metal/RTCMTLNV12Renderer.mm index 7b037c6dbc..c4000b1b1d 100644 --- a/sdk/objc/components/renderer/metal/RTCMTLNV12Renderer.mm +++ b/sdk/objc/components/renderer/metal/RTCMTLNV12Renderer.mm @@ -60,7 +60,7 @@ fragment half4 fragmentColorConversion( return half4(out); }); -@implementation RTCMTLNV12Renderer { +@implementation RTC_OBJC_TYPE(RTCMTLNV12Renderer) { // Textures. CVMetalTextureCacheRef _textureCache; id _yTexture; diff --git a/sdk/objc/components/renderer/metal/RTCMTLRGBRenderer.h b/sdk/objc/components/renderer/metal/RTCMTLRGBRenderer.h index 9db422cd22..5e355a8504 100644 --- a/sdk/objc/components/renderer/metal/RTCMTLRGBRenderer.h +++ b/sdk/objc/components/renderer/metal/RTCMTLRGBRenderer.h @@ -11,12 +11,13 @@ #import #import "RTCMTLRenderer.h" +#import "RTCMacros.h" /** @abstract RGB/BGR renderer. * @discussion This renderer handles both kCVPixelFormatType_32BGRA and * kCVPixelFormatType_32ARGB. */ NS_AVAILABLE(10_11, 9_0) -@interface RTCMTLRGBRenderer : RTCMTLRenderer +@interface RTC_OBJC_TYPE (RTCMTLRGBRenderer): RTC_OBJC_TYPE(RTCMTLRenderer) @end diff --git a/sdk/objc/components/renderer/metal/RTCMTLRGBRenderer.mm b/sdk/objc/components/renderer/metal/RTCMTLRGBRenderer.mm index e5dc4ef80a..6ca4a4000d 100644 --- a/sdk/objc/components/renderer/metal/RTCMTLRGBRenderer.mm +++ b/sdk/objc/components/renderer/metal/RTCMTLRGBRenderer.mm @@ -30,12 +30,12 @@ } Vertex; typedef struct { - float4 position[[position]]; + float4 position [[position]]; float2 texcoord; } VertexIO; - vertex VertexIO vertexPassthrough(constant Vertex *verticies[[buffer(0)]], - uint vid[[vertex_id]]) { + vertex VertexIO vertexPassthrough(constant Vertex * verticies [[buffer(0)]], + uint vid [[vertex_id]]) { VertexIO out; constant Vertex &v = verticies[vid]; out.position = float4(float2(v.position), 0.0, 1.0); @@ -43,9 +43,9 @@ vertex VertexIO vertexPassthrough(constant Vertex *verticies[[buffer(0)]], return out; } - fragment half4 fragmentColorConversion(VertexIO in[[stage_in]], - texture2d texture[[texture(0)]], - constant bool &isARGB[[buffer(0)]]) { + fragment half4 fragmentColorConversion(VertexIO in [[stage_in]], + texture2d texture [[texture(0)]], + constant bool &isARGB [[buffer(0)]]) { constexpr sampler s(address::clamp_to_edge, filter::linear); half4 out = texture.sample(s, in.texcoord); @@ -56,7 +56,7 @@ fragment half4 fragmentColorConversion(VertexIO in[[stage_in]], return out; }); -@implementation RTCMTLRGBRenderer { +@implementation RTC_OBJC_TYPE (RTCMTLRGBRenderer) { // Textures. CVMetalTextureCacheRef _textureCache; id _texture; @@ -73,8 +73,8 @@ - (BOOL)addRenderingDestination:(__kindof MTKView *)view { } - (BOOL)initializeTextureCache { - CVReturn status = CVMetalTextureCacheCreate(kCFAllocatorDefault, nil, [self currentMetalDevice], - nil, &_textureCache); + CVReturn status = CVMetalTextureCacheCreate( + kCFAllocatorDefault, nil, [self currentMetalDevice], nil, &_textureCache); if (status != kCVReturnSuccess) { RTCLogError(@"Metal: Failed to initialize metal texture cache. Return status is %d", status); return NO; @@ -130,9 +130,15 @@ - (BOOL)setupTexturesForFrame:(nonnull RTC_OBJC_TYPE(RTCVideoFrame) *)frame { return NO; } - CVReturn result = CVMetalTextureCacheCreateTextureFromImage( - kCFAllocatorDefault, _textureCache, pixelBuffer, nil, mtlPixelFormat, - width, height, 0, &textureOut); + CVReturn result = CVMetalTextureCacheCreateTextureFromImage(kCFAllocatorDefault, + _textureCache, + pixelBuffer, + nil, + mtlPixelFormat, + width, + height, + 0, + &textureOut); if (result == kCVReturnSuccess) { gpuTexture = CVMetalTextureGetTexture(textureOut); } diff --git a/sdk/objc/components/renderer/metal/RTCMTLRenderer+Private.h b/sdk/objc/components/renderer/metal/RTCMTLRenderer+Private.h index 916d4d4430..f6a82db56a 100644 --- a/sdk/objc/components/renderer/metal/RTCMTLRenderer+Private.h +++ b/sdk/objc/components/renderer/metal/RTCMTLRenderer+Private.h @@ -16,7 +16,7 @@ NS_ASSUME_NONNULL_BEGIN -@interface RTCMTLRenderer (Private) +@interface RTC_OBJC_TYPE(RTCMTLRenderer) (Private) - (nullable id)currentMetalDevice; - (NSString *)shaderSource; - (BOOL)setupTexturesForFrame:(nonnull RTC_OBJC_TYPE(RTCVideoFrame) *)frame; diff --git a/sdk/objc/components/renderer/metal/RTCMTLRenderer.h b/sdk/objc/components/renderer/metal/RTCMTLRenderer.h index aa31545973..6bbca3d985 100644 --- a/sdk/objc/components/renderer/metal/RTCMTLRenderer.h +++ b/sdk/objc/components/renderer/metal/RTCMTLRenderer.h @@ -21,7 +21,7 @@ NS_ASSUME_NONNULL_BEGIN /** * Protocol defining ability to render RTCVideoFrame in Metal enabled views. */ -@protocol RTCMTLRenderer +@protocol RTC_OBJC_TYPE(RTCMTLRenderer) /** * Method to be implemented to perform actual rendering of the provided frame. @@ -49,7 +49,7 @@ NS_ASSUME_NONNULL_BEGIN * Implementation of RTCMTLRenderer protocol. */ NS_AVAILABLE(10_11, 9_0) -@interface RTCMTLRenderer : NSObject +@interface RTC_OBJC_TYPE(RTCMTLRenderer) : NSObject /** @abstract A wrapped RTCVideoRotation, or nil. @discussion When not nil, the rotation of the actual frame is ignored when rendering. diff --git a/sdk/objc/components/renderer/metal/RTCMTLRenderer.mm b/sdk/objc/components/renderer/metal/RTCMTLRenderer.mm index 410590a7b1..ca3fcc3e51 100644 --- a/sdk/objc/components/renderer/metal/RTCMTLRenderer.mm +++ b/sdk/objc/components/renderer/metal/RTCMTLRenderer.mm @@ -87,7 +87,7 @@ static inline void getCubeVertexData(int cropX, // In future we might use triple buffering method if it improves performance. static const NSInteger kMaxInflightBuffers = 1; -@implementation RTCMTLRenderer { +@implementation RTC_OBJC_TYPE(RTCMTLRenderer) { __kindof MTKView *_view; // Controller. diff --git a/sdk/objc/components/renderer/metal/RTCMTLVideoView.h b/sdk/objc/components/renderer/metal/RTCMTLVideoView.h index 3320d12076..bed02ffa92 100644 --- a/sdk/objc/components/renderer/metal/RTCMTLVideoView.h +++ b/sdk/objc/components/renderer/metal/RTCMTLVideoView.h @@ -10,6 +10,10 @@ #import +#if TARGET_OS_OSX +#import +#endif + #import "RTCMacros.h" #import "RTCVideoFrame.h" #import "RTCVideoRenderer.h" @@ -22,14 +26,26 @@ NS_ASSUME_NONNULL_BEGIN * It has id property that renders video frames in the view's * bounds using Metal. */ +#if TARGET_OS_IPHONE NS_CLASS_AVAILABLE_IOS(9) +#elif TARGET_OS_OSX +NS_AVAILABLE_MAC(10.11) +#endif RTC_OBJC_EXPORT -@interface RTC_OBJC_TYPE (RTCMTLVideoView) : UIView +@interface RTC_OBJC_TYPE (RTCMTLVideoView) : + +#if TARGET_OS_IPHONE + UIView +#elif TARGET_OS_OSX + NSView +#endif @property(nonatomic, weak) id delegate; +#if TARGET_OS_IPHONE @property(nonatomic) UIViewContentMode videoContentMode; +#endif /** @abstract Enables/disables rendering. */ @@ -39,6 +55,8 @@ RTC_OBJC_EXPORT */ @property(nonatomic, nullable) NSValue* rotationOverride; ++ (BOOL)isMetalAvailable; + @end NS_ASSUME_NONNULL_END diff --git a/sdk/objc/components/renderer/metal/RTCMTLVideoView.m b/sdk/objc/components/renderer/metal/RTCMTLVideoView.m index c5d9e4385f..d4d98a0bf4 100644 --- a/sdk/objc/components/renderer/metal/RTCMTLVideoView.m +++ b/sdk/objc/components/renderer/metal/RTCMTLVideoView.m @@ -22,17 +22,12 @@ #import "RTCMTLNV12Renderer.h" #import "RTCMTLRGBRenderer.h" -// To avoid unreconized symbol linker errors, we're taking advantage of the objc runtime. -// Linking errors occur when compiling for architectures that don't support Metal. -#define MTKViewClass NSClassFromString(@"MTKView") -#define RTCMTLNV12RendererClass NSClassFromString(@"RTCMTLNV12Renderer") -#define RTCMTLI420RendererClass NSClassFromString(@"RTCMTLI420Renderer") -#define RTCMTLRGBRendererClass NSClassFromString(@"RTCMTLRGBRenderer") - -@interface RTC_OBJC_TYPE (RTCMTLVideoView) -() @property(nonatomic) RTCMTLI420Renderer *rendererI420; -@property(nonatomic) RTCMTLNV12Renderer *rendererNV12; -@property(nonatomic) RTCMTLRGBRenderer *rendererRGB; +#import "RTCMTLRenderer+Private.h" + +@interface RTC_OBJC_TYPE (RTCMTLVideoView) () +@property(nonatomic) RTC_OBJC_TYPE(RTCMTLI420Renderer) *rendererI420; +@property(nonatomic) RTC_OBJC_TYPE(RTCMTLNV12Renderer) * rendererNV12; +@property(nonatomic) RTC_OBJC_TYPE(RTCMTLRGBRenderer) * rendererRGB; @property(nonatomic) MTKView *metalView; @property(atomic) RTC_OBJC_TYPE(RTCVideoFrame) * videoFrame; @property(nonatomic) CGSize videoFrameSize; @@ -51,6 +46,14 @@ @implementation RTC_OBJC_TYPE (RTCMTLVideoView) @synthesize lastFrameTimeNs = _lastFrameTimeNs; @synthesize rotationOverride = _rotationOverride; ++ (BOOL)isMetalAvailable { +#if TARGET_OS_IPHONE + return MTLCreateSystemDefaultDevice() != nil; +#elif TARGET_OS_OSX + return [MTLCopyAllDevices() count] > 0; +#endif +} + - (instancetype)initWithFrame:(CGRect)frameRect { self = [super initWithFrame:frameRect]; if (self) { @@ -75,6 +78,7 @@ - (void)setEnabled:(BOOL)enabled { self.metalView.paused = !enabled; } +#if TARGET_OS_IPHONE - (UIViewContentMode)videoContentMode { return self.metalView.contentMode; } @@ -82,27 +86,24 @@ - (UIViewContentMode)videoContentMode { - (void)setVideoContentMode:(UIViewContentMode)mode { self.metalView.contentMode = mode; } +#endif #pragma mark - Private -+ (BOOL)isMetalAvailable { - return MTLCreateSystemDefaultDevice() != nil; -} - + (MTKView *)createMetalView:(CGRect)frame { - return [[MTKViewClass alloc] initWithFrame:frame]; + return [[MTKView alloc] initWithFrame:frame]; } -+ (RTCMTLNV12Renderer *)createNV12Renderer { - return [[RTCMTLNV12RendererClass alloc] init]; ++ (RTC_OBJC_TYPE(RTCMTLNV12Renderer) *)createNV12Renderer { + return [[RTC_OBJC_TYPE(RTCMTLNV12Renderer) alloc] init]; } -+ (RTCMTLI420Renderer *)createI420Renderer { - return [[RTCMTLI420RendererClass alloc] init]; ++ (RTC_OBJC_TYPE(RTCMTLI420Renderer) *)createI420Renderer { + return [[RTC_OBJC_TYPE(RTCMTLI420Renderer) alloc] init]; } -+ (RTCMTLRGBRenderer *)createRGBRenderer { - return [[RTCMTLRGBRenderer alloc] init]; ++ (RTC_OBJC_TYPE(RTCMTLRGBRenderer) *)createRGBRenderer { + return [[RTC_OBJC_TYPE(RTCMTLRGBRenderer) alloc] init]; } - (void)configure { @@ -111,19 +112,24 @@ - (void)configure { self.metalView = [RTC_OBJC_TYPE(RTCMTLVideoView) createMetalView:self.bounds]; self.metalView.delegate = self; +#if TARGET_OS_IPHONE self.metalView.contentMode = UIViewContentModeScaleAspectFill; +#elif TARGET_OS_OSX + self.metalView.layerContentsPlacement = NSViewLayerContentsPlacementScaleProportionallyToFit; +#endif + [self addSubview:self.metalView]; self.videoFrameSize = CGSizeZero; } +#if TARGET_OS_IPHONE - (void)setMultipleTouchEnabled:(BOOL)multipleTouchEnabled { - [super setMultipleTouchEnabled:multipleTouchEnabled]; - self.metalView.multipleTouchEnabled = multipleTouchEnabled; + [super setMultipleTouchEnabled:multipleTouchEnabled]; + self.metalView.multipleTouchEnabled = multipleTouchEnabled; } +#endif -- (void)layoutSubviews { - [super layoutSubviews]; - +- (void)performLayout { CGRect bounds = self.bounds; self.metalView.frame = bounds; if (!CGSizeEqualToSize(self.videoFrameSize, CGSizeZero)) { @@ -148,7 +154,7 @@ - (void)drawInMTKView:(nonnull MTKView *)view { return; } - RTCMTLRenderer *renderer; + RTC_OBJC_TYPE(RTCMTLRenderer) * renderer; if ([videoFrame.buffer isKindOfClass:[RTC_OBJC_TYPE(RTCCVPixelBuffer) class]]) { RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = (RTC_OBJC_TYPE(RTCCVPixelBuffer) *)videoFrame.buffer; const OSType pixelFormat = CVPixelBufferGetPixelFormatType(buffer.pixelBuffer); @@ -203,10 +209,10 @@ - (void)setRotationOverride:(NSValue *)rotationOverride { [self setNeedsLayout]; } -- (RTCVideoRotation)frameRotation { +- (RTCVideoRotation)videoRotation { if (self.rotationOverride) { RTCVideoRotation rotation; - if (@available(iOS 11, *)) { + if (@available(iOS 11, macos 10.13, *)) { [self.rotationOverride getValue:&rotation size:sizeof(rotation)]; } else { [self.rotationOverride getValue:&rotation]; @@ -220,10 +226,10 @@ - (RTCVideoRotation)frameRotation { - (CGSize)drawableSize { // Flip width/height if the rotations are not the same. CGSize videoFrameSize = self.videoFrameSize; - RTCVideoRotation frameRotation = [self frameRotation]; + RTCVideoRotation videoRotation = [self videoRotation]; BOOL useLandscape = - (frameRotation == RTCVideoRotation_0) || (frameRotation == RTCVideoRotation_180); + (videoRotation == RTCVideoRotation_0) || (videoRotation == RTCVideoRotation_180); BOOL sizeIsLandscape = (self.videoFrame.rotation == RTCVideoRotation_0) || (self.videoFrame.rotation == RTCVideoRotation_180); @@ -259,7 +265,34 @@ - (void)renderFrame:(nullable RTC_OBJC_TYPE(RTCVideoFrame) *)frame { RTCLogInfo(@"Incoming frame is nil. Exiting render callback."); return; } - self.videoFrame = frame; + + // Workaround to support RTCCVPixelBuffer rendering. + // RTCMTLRGBRenderer seems to be broken at the moment. + BOOL useI420 = NO; + if ([frame.buffer isKindOfClass:[RTC_OBJC_TYPE(RTCCVPixelBuffer) class]]) { + RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = (RTC_OBJC_TYPE(RTCCVPixelBuffer) *)frame.buffer; + const OSType pixelFormat = CVPixelBufferGetPixelFormatType(buffer.pixelBuffer); + useI420 = pixelFormat == kCVPixelFormatType_32BGRA || pixelFormat == kCVPixelFormatType_32ARGB; + } + self.videoFrame = useI420 ? [frame newI420VideoFrame] : frame; +} + +#pragma mark - Cross platform + +#if TARGET_OS_IPHONE +- (void)layoutSubviews { + [super layoutSubviews]; + [self performLayout]; +} +#elif TARGET_OS_OSX +- (void)layout { + [super layout]; + [self performLayout]; +} + +- (void)setNeedsLayout { + self.needsLayout = YES; } +#endif @end diff --git a/sdk/objc/components/renderer/opengl/RTCDefaultShader.h b/sdk/objc/components/renderer/opengl/RTCDefaultShader.h index 71a073ab21..b00cf8047d 100644 --- a/sdk/objc/components/renderer/opengl/RTCDefaultShader.h +++ b/sdk/objc/components/renderer/opengl/RTCDefaultShader.h @@ -16,7 +16,7 @@ NS_ASSUME_NONNULL_BEGIN * and RTCEAGLVideoView if no external shader is specified. This shader will render * the video in a rectangle without any color or geometric transformations. */ -@interface RTCDefaultShader : NSObject +@interface RTC_OBJC_TYPE(RTCDefaultShader) : NSObject @end diff --git a/sdk/objc/components/renderer/opengl/RTCDefaultShader.mm b/sdk/objc/components/renderer/opengl/RTCDefaultShader.mm index 51dca3223d..b0c111293f 100644 --- a/sdk/objc/components/renderer/opengl/RTCDefaultShader.mm +++ b/sdk/objc/components/renderer/opengl/RTCDefaultShader.mm @@ -69,7 +69,7 @@ " 1.0);\n" " }\n"; -@implementation RTCDefaultShader { +@implementation RTC_OBJC_TYPE(RTCDefaultShader) { GLuint _vertexBuffer; GLuint _vertexArray; // Store current rotation and only upload new vertex data when rotation changes. diff --git a/sdk/objc/components/renderer/opengl/RTCDisplayLinkTimer.h b/sdk/objc/components/renderer/opengl/RTCDisplayLinkTimer.h index b78501e9e6..1c5b64fdfc 100644 --- a/sdk/objc/components/renderer/opengl/RTCDisplayLinkTimer.h +++ b/sdk/objc/components/renderer/opengl/RTCDisplayLinkTimer.h @@ -10,11 +10,13 @@ #import +#import "RTCMacros.h" + // RTCDisplayLinkTimer wraps a CADisplayLink and is set to fire every two screen // refreshes, which should be 30fps. We wrap the display link in order to avoid // a retain cycle since CADisplayLink takes a strong reference onto its target. // The timer is paused by default. -@interface RTCDisplayLinkTimer : NSObject +@interface RTC_OBJC_TYPE (RTCDisplayLinkTimer): NSObject @property(nonatomic) BOOL isPaused; diff --git a/sdk/objc/components/renderer/opengl/RTCDisplayLinkTimer.m b/sdk/objc/components/renderer/opengl/RTCDisplayLinkTimer.m index 906bb898d6..f4cf03304d 100644 --- a/sdk/objc/components/renderer/opengl/RTCDisplayLinkTimer.m +++ b/sdk/objc/components/renderer/opengl/RTCDisplayLinkTimer.m @@ -12,7 +12,7 @@ #import -@implementation RTCDisplayLinkTimer { +@implementation RTC_OBJC_TYPE (RTCDisplayLinkTimer) { CADisplayLink *_displayLink; void (^_timerHandler)(void); } @@ -21,17 +21,15 @@ - (instancetype)initWithTimerHandler:(void (^)(void))timerHandler { NSParameterAssert(timerHandler); if (self = [super init]) { _timerHandler = timerHandler; - _displayLink = - [CADisplayLink displayLinkWithTarget:self - selector:@selector(displayLinkDidFire:)]; + _displayLink = [CADisplayLink displayLinkWithTarget:self + selector:@selector(displayLinkDidFire:)]; _displayLink.paused = YES; #if __IPHONE_OS_VERSION_MIN_REQUIRED >= __IPHONE_10_0 _displayLink.preferredFramesPerSecond = 30; #else [_displayLink setFrameInterval:2]; #endif - [_displayLink addToRunLoop:[NSRunLoop currentRunLoop] - forMode:NSRunLoopCommonModes]; + [_displayLink addToRunLoop:[NSRunLoop currentRunLoop] forMode:NSRunLoopCommonModes]; } return self; } diff --git a/sdk/objc/components/renderer/opengl/RTCEAGLVideoView.m b/sdk/objc/components/renderer/opengl/RTCEAGLVideoView.m index 89e62d2ce7..0a00494d2d 100644 --- a/sdk/objc/components/renderer/opengl/RTCEAGLVideoView.m +++ b/sdk/objc/components/renderer/opengl/RTCEAGLVideoView.m @@ -42,14 +42,14 @@ @interface RTC_OBJC_TYPE (RTCEAGLVideoView) @end @implementation RTC_OBJC_TYPE (RTCEAGLVideoView) { - RTCDisplayLinkTimer *_timer; + RTC_OBJC_TYPE(RTCDisplayLinkTimer) * _timer; EAGLContext *_glContext; // This flag should only be set and read on the main thread (e.g. by // setNeedsDisplay) BOOL _isDirty; id _shader; - RTCNV12TextureCache *_nv12TextureCache; - RTCI420TextureCache *_i420TextureCache; + RTC_OBJC_TYPE(RTCNV12TextureCache) *_nv12TextureCache; + RTC_OBJC_TYPE(RTCI420TextureCache) *_i420TextureCache; // As timestamps should be unique between frames, will store last // drawn frame timestamp instead of the whole frame to reduce memory usage. int64_t _lastDrawnFrameTimeStampNs; @@ -61,11 +61,11 @@ @implementation RTC_OBJC_TYPE (RTCEAGLVideoView) { @synthesize rotationOverride = _rotationOverride; - (instancetype)initWithFrame:(CGRect)frame { - return [self initWithFrame:frame shader:[[RTCDefaultShader alloc] init]]; + return [self initWithFrame:frame shader:[[RTC_OBJC_TYPE(RTCDefaultShader) alloc] init]]; } - (instancetype)initWithCoder:(NSCoder *)aDecoder { - return [self initWithCoder:aDecoder shader:[[RTCDefaultShader alloc] init]]; + return [self initWithCoder:aDecoder shader:[[RTC_OBJC_TYPE(RTCDefaultShader) alloc] init]]; } - (instancetype)initWithFrame:(CGRect)frame shader:(id)shader { @@ -90,8 +90,7 @@ - (instancetype)initWithCoder:(NSCoder *)aDecoder } - (BOOL)configure { - EAGLContext *glContext = - [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES3]; + EAGLContext *glContext = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES3]; if (!glContext) { glContext = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2]; } @@ -102,8 +101,7 @@ - (BOOL)configure { _glContext = glContext; // GLKView manages a framebuffer for us. - _glkView = [[GLKView alloc] initWithFrame:CGRectZero - context:_glContext]; + _glkView = [[GLKView alloc] initWithFrame:CGRectZero context:_glContext]; _glkView.drawableColorFormat = GLKViewDrawableColorFormatRGBA8888; _glkView.drawableDepthFormat = GLKViewDrawableDepthFormatNone; _glkView.drawableStencilFormat = GLKViewDrawableStencilFormatNone; @@ -115,8 +113,7 @@ - (BOOL)configure { // Listen to application state in order to clean up OpenGL before app goes // away. - NSNotificationCenter *notificationCenter = - [NSNotificationCenter defaultCenter]; + NSNotificationCenter *notificationCenter = [NSNotificationCenter defaultCenter]; [notificationCenter addObserver:self selector:@selector(willResignActive) name:UIApplicationWillResignActiveNotification @@ -130,7 +127,7 @@ - (BOOL)configure { // using a refresh rate proportional to screen refresh frequency. This // occurs on the main thread. __weak RTC_OBJC_TYPE(RTCEAGLVideoView) *weakSelf = self; - _timer = [[RTCDisplayLinkTimer alloc] initWithTimerHandler:^{ + _timer = [[RTC_OBJC_TYPE(RTCDisplayLinkTimer) alloc] initWithTimerHandler:^{ RTC_OBJC_TYPE(RTCEAGLVideoView) *strongSelf = weakSelf; [strongSelf displayLinkTimerDidFire]; }]; @@ -141,14 +138,13 @@ - (BOOL)configure { } - (void)setMultipleTouchEnabled:(BOOL)multipleTouchEnabled { - [super setMultipleTouchEnabled:multipleTouchEnabled]; - _glkView.multipleTouchEnabled = multipleTouchEnabled; + [super setMultipleTouchEnabled:multipleTouchEnabled]; + _glkView.multipleTouchEnabled = multipleTouchEnabled; } - (void)dealloc { [[NSNotificationCenter defaultCenter] removeObserver:self]; - UIApplicationState appState = - [UIApplication sharedApplication].applicationState; + UIApplicationState appState = [UIApplication sharedApplication].applicationState; if (appState == UIApplicationStateActive) { [self teardownGL]; } @@ -189,14 +185,14 @@ - (void)glkView:(GLKView *)view drawInRect:(CGRect)rect { return; } RTCVideoRotation rotation = frame.rotation; - if(_rotationOverride != nil) { - [_rotationOverride getValue: &rotation]; + if (_rotationOverride != nil) { + [_rotationOverride getValue:&rotation]; } [self ensureGLContext]; glClear(GL_COLOR_BUFFER_BIT); if ([frame.buffer isKindOfClass:[RTC_OBJC_TYPE(RTCCVPixelBuffer) class]]) { if (!_nv12TextureCache) { - _nv12TextureCache = [[RTCNV12TextureCache alloc] initWithContext:_glContext]; + _nv12TextureCache = [[RTC_OBJC_TYPE(RTCNV12TextureCache) alloc] initWithContext:_glContext]; } if (_nv12TextureCache) { [_nv12TextureCache uploadFrameToTextures:frame]; @@ -211,7 +207,7 @@ - (void)glkView:(GLKView *)view drawInRect:(CGRect)rect { } } else { if (!_i420TextureCache) { - _i420TextureCache = [[RTCI420TextureCache alloc] initWithContext:_glContext]; + _i420TextureCache = [[RTC_OBJC_TYPE(RTCI420TextureCache) alloc] initWithContext:_glContext]; } [_i420TextureCache uploadFrameToTextures:frame]; [_shader applyShadingForFrameWithWidth:frame.width diff --git a/sdk/objc/components/renderer/opengl/RTCI420TextureCache.h b/sdk/objc/components/renderer/opengl/RTCI420TextureCache.h index 9fdcc5a695..2c2319d043 100644 --- a/sdk/objc/components/renderer/opengl/RTCI420TextureCache.h +++ b/sdk/objc/components/renderer/opengl/RTCI420TextureCache.h @@ -11,7 +11,7 @@ #import "RTCOpenGLDefines.h" #import "base/RTCVideoFrame.h" -@interface RTCI420TextureCache : NSObject +@interface RTC_OBJC_TYPE(RTCI420TextureCache) : NSObject @property(nonatomic, readonly) GLuint yTexture; @property(nonatomic, readonly) GLuint uTexture; diff --git a/sdk/objc/components/renderer/opengl/RTCI420TextureCache.mm b/sdk/objc/components/renderer/opengl/RTCI420TextureCache.mm index 5dccd4bf6a..0ed19a842c 100644 --- a/sdk/objc/components/renderer/opengl/RTCI420TextureCache.mm +++ b/sdk/objc/components/renderer/opengl/RTCI420TextureCache.mm @@ -28,7 +28,7 @@ static const GLsizei kNumTexturesPerSet = 3; static const GLsizei kNumTextures = kNumTexturesPerSet * kNumTextureSets; -@implementation RTCI420TextureCache { +@implementation RTC_OBJC_TYPE(RTCI420TextureCache) { BOOL _hasUnpackRowLength; GLint _currentTextureSet; // Handles for OpenGL constructs. diff --git a/sdk/objc/components/renderer/opengl/RTCNSGLVideoView.m b/sdk/objc/components/renderer/opengl/RTCNSGLVideoView.m index 168c73126f..97957faf24 100644 --- a/sdk/objc/components/renderer/opengl/RTCNSGLVideoView.m +++ b/sdk/objc/components/renderer/opengl/RTCNSGLVideoView.m @@ -29,7 +29,7 @@ @interface RTC_OBJC_TYPE (RTCNSGLVideoView) // from the display link callback so atomicity is required. @property(atomic, strong) RTC_OBJC_TYPE(RTCVideoFrame) * videoFrame; -@property(atomic, strong) RTCI420TextureCache *i420TextureCache; +@property(atomic, strong) RTC_OBJC_TYPE(RTCI420TextureCache) *i420TextureCache; - (void)drawFrame; @end @@ -57,7 +57,7 @@ @implementation RTC_OBJC_TYPE (RTCNSGLVideoView) { @synthesize i420TextureCache = _i420TextureCache; - (instancetype)initWithFrame:(NSRect)frame pixelFormat:(NSOpenGLPixelFormat *)format { - return [self initWithFrame:frame pixelFormat:format shader:[[RTCDefaultShader alloc] init]]; + return [self initWithFrame:frame pixelFormat:format shader:[[RTC_OBJC_TYPE(RTCDefaultShader) alloc] init]]; } - (instancetype)initWithFrame:(NSRect)frame @@ -140,9 +140,9 @@ - (void)drawFrame { // TODO(magjed): Add support for NV12 texture cache on OS X. frame = [frame newI420VideoFrame]; if (!self.i420TextureCache) { - self.i420TextureCache = [[RTCI420TextureCache alloc] initWithContext:context]; + self.i420TextureCache = [[RTC_OBJC_TYPE(RTCI420TextureCache) alloc] initWithContext:context]; } - RTCI420TextureCache *i420TextureCache = self.i420TextureCache; + RTC_OBJC_TYPE(RTCI420TextureCache) *i420TextureCache = self.i420TextureCache; if (i420TextureCache) { [i420TextureCache uploadFrameToTextures:frame]; [_shader applyShadingForFrameWithWidth:frame.width diff --git a/sdk/objc/components/renderer/opengl/RTCNV12TextureCache.h b/sdk/objc/components/renderer/opengl/RTCNV12TextureCache.h index f202b836b5..420490b1ab 100644 --- a/sdk/objc/components/renderer/opengl/RTCNV12TextureCache.h +++ b/sdk/objc/components/renderer/opengl/RTCNV12TextureCache.h @@ -16,7 +16,7 @@ NS_ASSUME_NONNULL_BEGIN -@interface RTCNV12TextureCache : NSObject +@interface RTC_OBJC_TYPE(RTCNV12TextureCache) : NSObject @property(nonatomic, readonly) GLuint yTexture; @property(nonatomic, readonly) GLuint uvTexture; diff --git a/sdk/objc/components/renderer/opengl/RTCNV12TextureCache.m b/sdk/objc/components/renderer/opengl/RTCNV12TextureCache.m index a520ac45b4..096767be55 100644 --- a/sdk/objc/components/renderer/opengl/RTCNV12TextureCache.m +++ b/sdk/objc/components/renderer/opengl/RTCNV12TextureCache.m @@ -14,7 +14,7 @@ #import "base/RTCVideoFrameBuffer.h" #import "components/video_frame_buffer/RTCCVPixelBuffer.h" -@implementation RTCNV12TextureCache { +@implementation RTC_OBJC_TYPE(RTCNV12TextureCache) { CVOpenGLESTextureCacheRef _textureCache; CVOpenGLESTextureRef _yTextureRef; CVOpenGLESTextureRef _uvTextureRef; diff --git a/sdk/objc/components/video_codec/RTCDefaultVideoEncoderFactory.m b/sdk/objc/components/video_codec/RTCDefaultVideoEncoderFactory.m index 8de55bde4a..3cc92382e6 100644 --- a/sdk/objc/components/video_codec/RTCDefaultVideoEncoderFactory.m +++ b/sdk/objc/components/video_codec/RTCDefaultVideoEncoderFactory.m @@ -55,11 +55,13 @@ @implementation RTC_OBJC_TYPE (RTCDefaultVideoEncoderFactory) if ([RTC_OBJC_TYPE(RTCVideoEncoderVP9) isSupported]) { [result - addObject:[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecVp9Name]]; + addObject:[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecVp9Name parameters:nil scalabilityModes:[RTC_OBJC_TYPE(RTCVideoEncoderVP9) scalabilityModes]]]; } #if defined(RTC_USE_LIBAOM_AV1_ENCODER) - [result addObject:[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecAv1Name]]; + RTC_OBJC_TYPE(RTCVideoCodecInfo) *av1Info = + [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecAv1Name parameters:nil scalabilityModes:[RTC_OBJC_TYPE(RTCVideoEncoderAV1) scalabilityModes]]; + [result addObject:av1Info]; #endif return result; diff --git a/sdk/objc/components/video_codec/RTCVideoEncoderFactorySimulcast.h b/sdk/objc/components/video_codec/RTCVideoEncoderFactorySimulcast.h new file mode 100644 index 0000000000..4070af22e4 --- /dev/null +++ b/sdk/objc/components/video_codec/RTCVideoEncoderFactorySimulcast.h @@ -0,0 +1,16 @@ +#import + +#import "RTCMacros.h" +#import "RTCVideoEncoderFactory.h" + +NS_ASSUME_NONNULL_BEGIN + +RTC_OBJC_EXPORT +@interface RTC_OBJC_TYPE (RTCVideoEncoderFactorySimulcast) : NSObject + +- (instancetype)initWithPrimary:(id)primary + fallback:(id)fallback; + +@end + +NS_ASSUME_NONNULL_END diff --git a/sdk/objc/components/video_codec/RTCVideoEncoderFactorySimulcast.mm b/sdk/objc/components/video_codec/RTCVideoEncoderFactorySimulcast.mm new file mode 100644 index 0000000000..55916435a1 --- /dev/null +++ b/sdk/objc/components/video_codec/RTCVideoEncoderFactorySimulcast.mm @@ -0,0 +1,39 @@ +#import + +#import "RTCMacros.h" +#import "RTCVideoCodecInfo.h" +#import "RTCVideoEncoderFactorySimulcast.h" +#import "api/video_codec/RTCVideoEncoderSimulcast.h" + +@interface RTC_OBJC_TYPE (RTCVideoEncoderFactorySimulcast) () + +@property id primary; +@property id fallback; + +@end + + +@implementation RTC_OBJC_TYPE (RTCVideoEncoderFactorySimulcast) + +@synthesize primary = _primary; +@synthesize fallback = _fallback; + +- (instancetype)initWithPrimary:(id)primary + fallback:(id)fallback { + if (self = [super init]) { + _primary = primary; + _fallback = fallback; + } + return self; +} + +- (nullable id)createEncoder: (RTC_OBJC_TYPE(RTCVideoCodecInfo) *)info { + return [RTC_OBJC_TYPE(RTCVideoEncoderSimulcast) simulcastEncoderWithPrimary: _primary fallback: _fallback videoCodecInfo: info]; +} + +- (NSArray *)supportedCodecs { + return [[_primary supportedCodecs] arrayByAddingObjectsFromArray: [_fallback supportedCodecs]]; +} + + +@end diff --git a/sdk/objc/components/video_codec/RTCVideoEncoderH264.mm b/sdk/objc/components/video_codec/RTCVideoEncoderH264.mm index 2160d79ae5..d3dd33aef6 100644 --- a/sdk/objc/components/video_codec/RTCVideoEncoderH264.mm +++ b/sdk/objc/components/video_codec/RTCVideoEncoderH264.mm @@ -54,14 +54,42 @@ - (void)frameWasEncoded : (OSStatus)status flags : (VTEncodeInfoFlags)infoFlags // The ratio between kVTCompressionPropertyKey_DataRateLimits and // kVTCompressionPropertyKey_AverageBitRate. The data rate limit is set higher // than the average bit rate to avoid undershooting the target. -const float kLimitToAverageBitRateFactor = 1.5f; +const float kLimitToAverageBitRateFactor = 10.0f; // These thresholds deviate from the default h264 QP thresholds, as they // have been found to work better on devices that support VideoToolbox const int kLowH264QpThreshold = 28; const int kHighH264QpThreshold = 39; +const int kBitsPerByte = 8; const OSType kNV12PixelFormat = kCVPixelFormatType_420YpCbCr8BiPlanarFullRange; +typedef NS_ENUM(NSInteger, RTCVideoEncodeMode) { + Variable = 0, + Constant = 1, +}; + +NSArray *CreateRateLimitArray(uint32_t computedBitrateBps, RTCVideoEncodeMode mode) { + switch (mode) { + case Variable: { + // 5 seconds should be an okay interval for VBR to enforce the long-term + // limit. + float avgInterval = 5.0; + uint32_t avgBytesPerSecond = computedBitrateBps / kBitsPerByte * avgInterval; + // And the peak bitrate is measured per-second in a way similar to CBR. + float peakInterval = 1.0; + uint32_t peakBytesPerSecond = + computedBitrateBps * kLimitToAverageBitRateFactor / kBitsPerByte; + return @[ @(peakBytesPerSecond), @(peakInterval), @(avgBytesPerSecond), @(avgInterval) ]; + } + case Constant: { + // CBR should be enforces with granularity of a second. + float targetInterval = 1.0; + int32_t targetBitrate = computedBitrateBps / kBitsPerByte; + return @[ @(targetBitrate), @(targetInterval) ]; + } + } +} + // Struct that we pass to the encoder per frame to encode. We receive it again // in the encoder callback. struct RTCFrameEncodeParams { @@ -180,10 +208,13 @@ void compressionOutputCallback(void *encoder, // no specific VideoToolbox profile for the specified level, AutoLevel will be // returned. The user must initialize the encoder with a resolution and // framerate conforming to the selected H264 level regardless. -CFStringRef ExtractProfile(const webrtc::H264ProfileLevelId &profile_level_id) { +CFStringRef ExtractProfile(const webrtc::H264ProfileLevelId &profile_level_id, bool screenSharing) { switch (profile_level_id.profile) { case webrtc::H264Profile::kProfileConstrainedBaseline: case webrtc::H264Profile::kProfileBaseline: + if (screenSharing) { + return kVTProfileLevel_H264_Baseline_AutoLevel; + } switch (profile_level_id.level) { case webrtc::H264Level::kLevel3: return kVTProfileLevel_H264_Baseline_3_0; @@ -319,8 +350,8 @@ NSUInteger GetMaxSampleRate(const webrtc::H264ProfileLevelId &profile_level_id) @implementation RTC_OBJC_TYPE (RTCVideoEncoderH264) { RTC_OBJC_TYPE(RTCVideoCodecInfo) * _codecInfo; - std::unique_ptr _bitrateAdjuster; uint32_t _targetBitrateBps; + uint32_t _targetFrameRate; uint32_t _encoderBitrateBps; uint32_t _encoderFrameRate; uint32_t _maxAllowedFrameRate; @@ -330,10 +361,17 @@ @implementation RTC_OBJC_TYPE (RTCVideoEncoderH264) { int32_t _width; int32_t _height; VTCompressionSessionRef _compressionSession; - RTCVideoCodecMode _mode; + CVPixelBufferPoolRef _pixelBufferPool; + RTCVideoCodecMode _codecMode; + unsigned int _maxQP; + unsigned int _minBitrate; + unsigned int _maxBitrate; + RTCVideoEncodeMode _encodeMode; webrtc::H264BitstreamParser _h264BitstreamParser; std::vector _frameScaleBuffer; + + CMTime _previousPresentationTimeStamp; } // .5 is set as a mininum to prevent overcompensating for large temporary @@ -346,12 +384,14 @@ @implementation RTC_OBJC_TYPE (RTCVideoEncoderH264) { - (instancetype)initWithCodecInfo:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)codecInfo { if (self = [super init]) { _codecInfo = codecInfo; - _bitrateAdjuster.reset(new webrtc::BitrateAdjuster(.5, .95)); _packetizationMode = RTCH264PacketizationModeNonInterleaved; _profile_level_id = webrtc::ParseSdpForH264ProfileLevelId([codecInfo nativeSdpVideoFormat].parameters); + _previousPresentationTimeStamp = kCMTimeZero; RTC_DCHECK(_profile_level_id); - RTC_LOG(LS_INFO) << "Using profile " << CFStringToString(ExtractProfile(*_profile_level_id)); + RTC_LOG(LS_INFO) << "Using profile " + << CFStringToString(ExtractProfile( + *_profile_level_id, _codecMode == RTCVideoCodecModeScreensharing)); RTC_CHECK([codecInfo.name isEqualToString:kRTCVideoCodecH264Name]); } return self; @@ -368,7 +408,12 @@ - (NSInteger)startEncodeWithSettings:(RTC_OBJC_TYPE(RTCVideoEncoderSettings) *)s _width = settings.width; _height = settings.height; - _mode = settings.mode; + _codecMode = settings.mode; + _maxQP = settings.qpMax; + + _encodeMode = Variable; // Always variable mode for now + _minBitrate = settings.minBitrate * 1000; // minBitrate is in kbps. + _maxBitrate = settings.maxBitrate * 1000; // maxBitrate is in kbps. uint32_t aligned_width = (((_width + 15) >> 4) << 4); uint32_t aligned_height = (((_height + 15) >> 4) << 4); @@ -376,9 +421,15 @@ - (NSInteger)startEncodeWithSettings:(RTC_OBJC_TYPE(RTCVideoEncoderSettings) *)s (aligned_width * aligned_height)); // We can only set average bitrate on the HW encoder. - _targetBitrateBps = settings.startBitrate * 1000; // startBitrate is in kbps. - _bitrateAdjuster->SetTargetBitrateBps(_targetBitrateBps); - _encoderFrameRate = MIN(settings.maxFramerate, _maxAllowedFrameRate); + if (_encodeMode == Constant) { + _targetBitrateBps = _maxBitrate; + } else { + _targetBitrateBps = settings.startBitrate * 1000; // startBitrate is in kbps. + } + + _targetFrameRate = MIN(settings.maxFramerate, _maxAllowedFrameRate); + _encoderBitrateBps = 0; + _encoderFrameRate = 0; if (settings.maxFramerate > _maxAllowedFrameRate && _maxAllowedFrameRate > 0) { RTC_LOG(LS_WARNING) << "Initial encoder frame rate setting " << settings.maxFramerate << " is larger than the " @@ -397,8 +448,15 @@ - (NSInteger)encode:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame if (!_callback || !_compressionSession) { return WEBRTC_VIDEO_CODEC_UNINITIALIZED; } - BOOL isKeyframeRequired = NO; + CMTime presentationTimeStamp = CMTimeMake(frame.timeStampNs / rtc::kNumNanosecsPerMillisec, 1000); + if (CMTimeCompare(presentationTimeStamp, _previousPresentationTimeStamp) == 0) { + // Same PTS + return WEBRTC_VIDEO_CODEC_NO_OUTPUT; + } + _previousPresentationTimeStamp = presentationTimeStamp; + + BOOL isKeyframeRequired = NO; // Get a pixel buffer from the pool and copy frame data over. if ([self resetCompressionSessionIfNeededWithFrame:frame]) { isKeyframeRequired = YES; @@ -425,8 +483,8 @@ - (NSInteger)encode:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame int dstWidth = CVPixelBufferGetWidth(pixelBuffer); int dstHeight = CVPixelBufferGetHeight(pixelBuffer); if ([rtcPixelBuffer requiresScalingToWidth:dstWidth height:dstHeight]) { - int size = - [rtcPixelBuffer bufferSizeForCroppingAndScalingToWidth:dstWidth height:dstHeight]; + int size = [rtcPixelBuffer bufferSizeForCroppingAndScalingToWidth:dstWidth + height:dstHeight]; _frameScaleBuffer.resize(size); } else { _frameScaleBuffer.clear(); @@ -466,7 +524,6 @@ - (NSInteger)encode:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame } } - CMTime presentationTimeStamp = CMTimeMake(frame.timeStampNs / rtc::kNumNanosecsPerMillisec, 1000); CFDictionaryRef frameProperties = nullptr; if (isKeyframeRequired) { CFTypeRef keys[] = {kVTEncodeFrameOptionKey_ForceKeyFrame}; @@ -484,8 +541,8 @@ - (NSInteger)encode:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame frame.rotation)); encodeParams->codecSpecificInfo.packetizationMode = _packetizationMode; - // Update the bitrate if needed. - [self setBitrateBps:_bitrateAdjuster->GetAdjustedBitrateBps() frameRate:_encoderFrameRate]; + // Update encoder bitrate or frameRate if needed. + [self updateEncoderBitrateAndFrameRate]; OSStatus status = VTCompressionSessionEncodeFrame(_compressionSession, pixelBuffer, @@ -526,14 +583,19 @@ - (void)setCallback:(RTCVideoEncoderCallback)callback { } - (int)setBitrate:(uint32_t)bitrateKbit framerate:(uint32_t)framerate { - _targetBitrateBps = 1000 * bitrateKbit; - _bitrateAdjuster->SetTargetBitrateBps(_targetBitrateBps); + // set target bitrate bps + _targetBitrateBps = bitrateKbit * 1000; + + RTC_LOG(LS_INFO) << "setBitrateKBit: " << bitrateKbit << " targetBps: " << _targetBitrateBps + << " frameRate: " << framerate; + if (framerate > _maxAllowedFrameRate && _maxAllowedFrameRate > 0) { RTC_LOG(LS_WARNING) << "Encoder frame rate setting " << framerate << " is larger than the " << "maximal allowed frame rate " << _maxAllowedFrameRate << "."; } - framerate = MIN(framerate, _maxAllowedFrameRate); - [self setBitrateBps:_bitrateAdjuster->GetAdjustedBitrateBps() frameRate:framerate]; + + _targetFrameRate = MIN(framerate, _maxAllowedFrameRate); + return WEBRTC_VIDEO_CODEC_OK; } @@ -585,7 +647,8 @@ - (BOOL)resetCompressionSessionIfNeededWithFrame:(RTC_OBJC_TYPE(RTCVideoFrame) * CVPixelBufferPoolRef pixelBufferPool = VTCompressionSessionGetPixelBufferPool(_compressionSession); if (!pixelBufferPool) { - return NO; + [self resetCompressionSessionWithPixelFormat:framePixelFormat]; + return YES; } NSDictionary *poolAttributes = @@ -631,14 +694,19 @@ - (int)resetCompressionSessionWithPixelFormat:(OSType)framePixelFormat { (NSString *)kCVPixelBufferPixelFormatTypeKey : @(framePixelFormat), }; - NSDictionary *encoder_specs; + NSMutableDictionary *encoder_specs; #if defined(WEBRTC_MAC) && !defined(WEBRTC_IOS) // Currently hw accl is supported above 360p on mac, below 360p // the compression session will be created with hw accl disabled. - encoder_specs = @{ + encoder_specs = [@{ (NSString *)kVTVideoEncoderSpecification_EnableHardwareAcceleratedVideoEncoder : @(YES), - }; - + } mutableCopy]; + // Enable low-latency video encoding + if (@available(iOS 14.5, macOS 11.3, *)) { + [encoder_specs addEntriesFromDictionary:@{ + (NSString *)kVTVideoEncoderSpecification_EnableLowLatencyRateControl : @(YES), + }]; + } #endif OSStatus status = VTCompressionSessionCreate( nullptr, // use default allocator @@ -675,11 +743,30 @@ - (int)resetCompressionSessionWithPixelFormat:(OSType)framePixelFormat { - (void)configureCompressionSession { RTC_DCHECK(_compressionSession); SetVTSessionProperty(_compressionSession, kVTCompressionPropertyKey_RealTime, true); - SetVTSessionProperty(_compressionSession, - kVTCompressionPropertyKey_ProfileLevel, - ExtractProfile(*_profile_level_id)); + // Sacrifice encoding speed over quality when necessary + if (@available(iOS 14.0, macOS 11.0, *)) { + SetVTSessionProperty( + _compressionSession, kVTCompressionPropertyKey_PrioritizeEncodingSpeedOverQuality, true); + } + // Set maximum QP for screen sharing mode, range must be within 1 to 51 + // https://developer.apple.com/documentation/videotoolbox/kvtcompressionpropertykey_maxallowedframeqp + if (@available(iOS 15.0, macOS 12.0, *)) { + // Only enable for screen sharing and let VideoToolbox do the optimizing as much as possible. + if (_codecMode == RTCVideoCodecModeScreensharing) { + RTC_LOG(LS_INFO) << "Configuring VideoToolbox to use maxQP: " << kHighH264QpThreshold + << " mode: " << _codecMode; + SetVTSessionProperty( + _compressionSession, kVTCompressionPropertyKey_MaxAllowedFrameQP, kHighH264QpThreshold); + } + } + SetVTSessionProperty( + _compressionSession, + kVTCompressionPropertyKey_ProfileLevel, + ExtractProfile(*_profile_level_id, _codecMode == RTCVideoCodecModeScreensharing)); SetVTSessionProperty(_compressionSession, kVTCompressionPropertyKey_AllowFrameReordering, false); - [self setEncoderBitrateBps:_targetBitrateBps frameRate:_encoderFrameRate]; + + // [self updateEncoderBitrateAndFrameRate]; + // TODO(tkchin): Look at entropy mode and colorspace matrices. // TODO(tkchin): Investigate to see if there's any way to make this work. // May need it to interop with Android. Currently this call just fails. @@ -706,49 +793,59 @@ - (NSString *)implementationName { return @"VideoToolbox"; } -- (void)setBitrateBps:(uint32_t)bitrateBps frameRate:(uint32_t)frameRate { - if (_encoderBitrateBps != bitrateBps || _encoderFrameRate != frameRate) { - [self setEncoderBitrateBps:bitrateBps frameRate:frameRate]; +- (void)updateEncoderBitrateAndFrameRate { + // If no compression session simply return + if (!_compressionSession) { + return; } -} + // Initial status + OSStatus status = noErr; -- (void)setEncoderBitrateBps:(uint32_t)bitrateBps frameRate:(uint32_t)frameRate { - if (_compressionSession) { - SetVTSessionProperty(_compressionSession, kVTCompressionPropertyKey_AverageBitRate, bitrateBps); + uint32_t computedBitrateBps = _targetBitrateBps; - // With zero `_maxAllowedFrameRate`, we fall back to automatic frame rate detection. - if (_maxAllowedFrameRate > 0) { - SetVTSessionProperty( - _compressionSession, kVTCompressionPropertyKey_ExpectedFrameRate, frameRate); - } + // With zero `_maxAllowedFrameRate`, we fall back to automatic frame rate detection. + uint32_t computedFrameRate = _maxAllowedFrameRate > 0 ? _targetFrameRate : 0; - // TODO(tkchin): Add a helper method to set array value. - int64_t dataLimitBytesPerSecondValue = - static_cast(bitrateBps * kLimitToAverageBitRateFactor / 8); - CFNumberRef bytesPerSecond = - CFNumberCreate(kCFAllocatorDefault, kCFNumberSInt64Type, &dataLimitBytesPerSecondValue); - int64_t oneSecondValue = 1; - CFNumberRef oneSecond = - CFNumberCreate(kCFAllocatorDefault, kCFNumberSInt64Type, &oneSecondValue); - const void *nums[2] = {bytesPerSecond, oneSecond}; - CFArrayRef dataRateLimits = CFArrayCreate(nullptr, nums, 2, &kCFTypeArrayCallBacks); - OSStatus status = VTSessionSetProperty( - _compressionSession, kVTCompressionPropertyKey_DataRateLimits, dataRateLimits); - if (bytesPerSecond) { - CFRelease(bytesPerSecond); - } - if (oneSecond) { - CFRelease(oneSecond); + // Set frame rate + if (computedFrameRate != _encoderFrameRate) { + status = VTSessionSetProperty(_compressionSession, + kVTCompressionPropertyKey_ExpectedFrameRate, + (__bridge CFTypeRef) @(computedFrameRate)); + // Ensure the bitrate was set successfully + if (status != noErr) { + RTC_LOG(LS_ERROR) << "Failed to set frame rate: " << computedFrameRate + << " error: " << status; + } else { + RTC_LOG(LS_INFO) << "Did update encoder frame rate: " << computedFrameRate; } - if (dataRateLimits) { - CFRelease(dataRateLimits); + _encoderFrameRate = computedFrameRate; + } + + // Set bitrate + if (computedBitrateBps != _encoderBitrateBps) { + status = VTSessionSetProperty(_compressionSession, + kVTCompressionPropertyKey_AverageBitRate, + (__bridge CFTypeRef) @(computedBitrateBps)); + + // Ensure the bitrate was set successfully + if (status != noErr) { + RTC_LOG(LS_ERROR) << "Failed to update encoder bitrate: " << computedBitrateBps + << "error: " << status; + } else { + RTC_LOG(LS_INFO) << "Did update encoder bitrate: " << computedBitrateBps; } + + status = VTSessionSetProperty( + _compressionSession, + kVTCompressionPropertyKey_DataRateLimits, + (__bridge CFArrayRef)CreateRateLimitArray(computedBitrateBps, _encodeMode)); if (status != noErr) { - RTC_LOG(LS_ERROR) << "Failed to set data rate limit with code: " << status; + RTC_LOG(LS_ERROR) << "Failed to update encoder data rate limits"; + } else { + RTC_LOG(LS_INFO) << "Did update encoder data rate limits"; } - _encoderBitrateBps = bitrateBps; - _encoderFrameRate = frameRate; + _encoderBitrateBps = computedBitrateBps; } } @@ -804,8 +901,9 @@ - (void)frameWasEncoded:(OSStatus)status frame.captureTimeMs = renderTimeMs; frame.timeStamp = timestamp; frame.rotation = rotation; - frame.contentType = (_mode == RTCVideoCodecModeScreensharing) ? RTCVideoContentTypeScreenshare : - RTCVideoContentTypeUnspecified; + frame.contentType = (_codecMode == RTCVideoCodecModeScreensharing) ? + RTCVideoContentTypeScreenshare : + RTCVideoContentTypeUnspecified; frame.flags = webrtc::VideoSendTiming::kInvalid; _h264BitstreamParser.ParseBitstream(*buffer); @@ -816,7 +914,6 @@ - (void)frameWasEncoded:(OSStatus)status RTC_LOG(LS_ERROR) << "Encode callback failed"; return; } - _bitrateAdjuster->Update(frame.buffer.length); } - (nullable RTC_OBJC_TYPE(RTCVideoEncoderQpThresholds) *)scalingSettings { @@ -826,3 +923,4 @@ - (void)frameWasEncoded:(OSStatus)status } @end + diff --git a/sdk/objc/helpers/RTCYUVHelper.h b/sdk/objc/helpers/RTCYUVHelper.h new file mode 100644 index 0000000000..ec8ce48355 --- /dev/null +++ b/sdk/objc/helpers/RTCYUVHelper.h @@ -0,0 +1,118 @@ +/* + * Copyright 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#import + +#import "RTCMacros.h" +#import "RTCVideoFrame.h" + +RTC_OBJC_EXPORT +@interface RTC_OBJC_TYPE (RTCYUVHelper) : NSObject + +- (instancetype)init NS_UNAVAILABLE; + ++ (void)I420Rotate:(const uint8_t*)srcY + srcStrideY:(int)srcStrideY + srcU:(const uint8_t*)srcU + srcStrideU:(int)srcStrideU + srcV:(const uint8_t*)srcV + srcStrideV:(int)srcStrideV + dstY:(uint8_t*)dstY + dstStrideY:(int)dstStrideY + dstU:(uint8_t*)dstU + dstStrideU:(int)dstStrideU + dstV:(uint8_t*)dstV + dstStrideV:(int)dstStrideV + width:(int)width + height:(int)height + mode:(RTCVideoRotation)mode; + ++ (int)I420ToNV12:(const uint8_t*)srcY + srcStrideY:(int)srcStrideY + srcU:(const uint8_t*)srcU + srcStrideU:(int)srcStrideU + srcV:(const uint8_t*)srcV + srcStrideV:(int)srcStrideV + dstY:(uint8_t*)dstY + dstStrideY:(int)dstStrideY + dstUV:(uint8_t*)dstUV + dstStrideUV:(int)dstStrideUV + width:(int)width + height:(int)height; + ++ (int)I420ToNV21:(const uint8_t*)srcY + srcStrideY:(int)srcStrideY + srcU:(const uint8_t*)srcU + srcStrideU:(int)srcStrideU + srcV:(const uint8_t*)srcV + srcStrideV:(int)srcStrideV + dstY:(uint8_t*)dstY + dstStrideY:(int)dstStrideY + dstUV:(uint8_t*)dstUV + dstStrideUV:(int)dstStrideUV + width:(int)width + height:(int)height; + ++ (int)I420ToARGB:(const uint8_t*)srcY + srcStrideY:(int)srcStrideY + srcU:(const uint8_t*)srcU + srcStrideU:(int)srcStrideU + srcV:(const uint8_t*)srcV + srcStrideV:(int)srcStrideV + dstARGB:(uint8_t*)dstARGB + dstStrideARGB:(int)dstStrideARGB + width:(int)width + height:(int)height; + ++ (int)I420ToBGRA:(const uint8_t*)srcY + srcStrideY:(int)srcStrideY + srcU:(const uint8_t*)srcU + srcStrideU:(int)srcStrideU + srcV:(const uint8_t*)srcV + srcStrideV:(int)srcStrideV + dstBGRA:(uint8_t*)dstBGRA + dstStrideBGRA:(int)dstStrideBGRA + width:(int)width + height:(int)height; + ++ (int)I420ToABGR:(const uint8_t*)srcY + srcStrideY:(int)srcStrideY + srcU:(const uint8_t*)srcU + srcStrideU:(int)srcStrideU + srcV:(const uint8_t*)srcV + srcStrideV:(int)srcStrideV + dstABGR:(uint8_t*)dstABGR + dstStrideABGR:(int)dstStrideABGR + width:(int)width + height:(int)height; + ++ (int)I420ToRGBA:(const uint8_t*)srcY + srcStrideY:(int)srcStrideY + srcU:(const uint8_t*)srcU + srcStrideU:(int)srcStrideU + srcV:(const uint8_t*)srcV + srcStrideV:(int)srcStrideV + dstRGBA:(uint8_t*)dstRGBA + dstStrideRGBA:(int)dstStrideRGBA + width:(int)width + height:(int)height; + ++ (int)I420ToRGB24:(const uint8_t*)srcY + srcStrideY:(int)srcStrideY + srcU:(const uint8_t*)srcU + srcStrideU:(int)srcStrideU + srcV:(const uint8_t*)srcV + srcStrideV:(int)srcStrideV + dstRGB24:(uint8_t*)dstRGB24 + dstStrideRGB24:(int)dstStrideRGB24 + width:(int)width + height:(int)height; + +@end diff --git a/sdk/objc/helpers/RTCYUVHelper.mm b/sdk/objc/helpers/RTCYUVHelper.mm new file mode 100644 index 0000000000..4a39d469da --- /dev/null +++ b/sdk/objc/helpers/RTCYUVHelper.mm @@ -0,0 +1,179 @@ +/* + * Copyright 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#import "RTCYUVHelper.h" + +#include "third_party/libyuv/include/libyuv.h" + +@implementation RTC_OBJC_TYPE (RTCYUVHelper) + ++ (void)I420Rotate:(const uint8_t*)srcY + srcStrideY:(int)srcStrideY + srcU:(const uint8_t*)srcU + srcStrideU:(int)srcStrideU + srcV:(const uint8_t*)srcV + srcStrideV:(int)srcStrideV + dstY:(uint8_t*)dstY + dstStrideY:(int)dstStrideY + dstU:(uint8_t*)dstU + dstStrideU:(int)dstStrideU + dstV:(uint8_t*)dstV + dstStrideV:(int)dstStrideV + width:(int)width + height:(int)height + mode:(RTCVideoRotation)mode { + libyuv::I420Rotate(srcY, + srcStrideY, + srcU, + srcStrideU, + srcV, + srcStrideV, + dstY, + dstStrideY, + dstU, + dstStrideU, + dstV, + dstStrideV, + width, + height, + (libyuv::RotationMode)mode); +} + ++ (int)I420ToNV12:(const uint8_t*)srcY + srcStrideY:(int)srcStrideY + srcU:(const uint8_t*)srcU + srcStrideU:(int)srcStrideU + srcV:(const uint8_t*)srcV + srcStrideV:(int)srcStrideV + dstY:(uint8_t*)dstY + dstStrideY:(int)dstStrideY + dstUV:(uint8_t*)dstUV + dstStrideUV:(int)dstStrideUV + width:(int)width + height:(int)height { + return libyuv::I420ToNV12(srcY, + srcStrideY, + srcU, + srcStrideU, + srcV, + srcStrideV, + dstY, + dstStrideY, + dstUV, + dstStrideUV, + width, + height); +} + ++ (int)I420ToNV21:(const uint8_t*)srcY + srcStrideY:(int)srcStrideY + srcU:(const uint8_t*)srcU + srcStrideU:(int)srcStrideU + srcV:(const uint8_t*)srcV + srcStrideV:(int)srcStrideV + dstY:(uint8_t*)dstY + dstStrideY:(int)dstStrideY + dstUV:(uint8_t*)dstUV + dstStrideUV:(int)dstStrideUV + width:(int)width + height:(int)height { + return libyuv::I420ToNV21(srcY, + srcStrideY, + srcU, + srcStrideU, + srcV, + srcStrideV, + dstY, + dstStrideY, + dstUV, + dstStrideUV, + width, + height); +} + ++ (int)I420ToARGB:(const uint8_t*)srcY + srcStrideY:(int)srcStrideY + srcU:(const uint8_t*)srcU + srcStrideU:(int)srcStrideU + srcV:(const uint8_t*)srcV + srcStrideV:(int)srcStrideV + dstARGB:(uint8_t*)dstARGB + dstStrideARGB:(int)dstStrideARGB + width:(int)width + height:(int)height { + return libyuv::I420ToARGB( + srcY, srcStrideY, srcU, srcStrideU, srcV, srcStrideV, dstARGB, dstStrideARGB, width, height); +} + ++ (int)I420ToBGRA:(const uint8_t*)srcY + srcStrideY:(int)srcStrideY + srcU:(const uint8_t*)srcU + srcStrideU:(int)srcStrideU + srcV:(const uint8_t*)srcV + srcStrideV:(int)srcStrideV + dstBGRA:(uint8_t*)dstBGRA + dstStrideBGRA:(int)dstStrideBGRA + width:(int)width + height:(int)height { + return libyuv::I420ToBGRA( + srcY, srcStrideY, srcU, srcStrideU, srcV, srcStrideV, dstBGRA, dstStrideBGRA, width, height); +} + ++ (int)I420ToABGR:(const uint8_t*)srcY + srcStrideY:(int)srcStrideY + srcU:(const uint8_t*)srcU + srcStrideU:(int)srcStrideU + srcV:(const uint8_t*)srcV + srcStrideV:(int)srcStrideV + dstABGR:(uint8_t*)dstABGR + dstStrideABGR:(int)dstStrideABGR + width:(int)width + height:(int)height { + return libyuv::I420ToABGR( + srcY, srcStrideY, srcU, srcStrideU, srcV, srcStrideV, dstABGR, dstStrideABGR, width, height); +} + ++ (int)I420ToRGBA:(const uint8_t*)srcY + srcStrideY:(int)srcStrideY + srcU:(const uint8_t*)srcU + srcStrideU:(int)srcStrideU + srcV:(const uint8_t*)srcV + srcStrideV:(int)srcStrideV + dstRGBA:(uint8_t*)dstRGBA + dstStrideRGBA:(int)dstStrideRGBA + width:(int)width + height:(int)height { + return libyuv::I420ToRGBA( + srcY, srcStrideY, srcU, srcStrideU, srcV, srcStrideV, dstRGBA, dstStrideRGBA, width, height); +} + ++ (int)I420ToRGB24:(const uint8_t*)srcY + srcStrideY:(int)srcStrideY + srcU:(const uint8_t*)srcU + srcStrideU:(int)srcStrideU + srcV:(const uint8_t*)srcV + srcStrideV:(int)srcStrideV + dstRGB24:(uint8_t*)dstRGB24 + dstStrideRGB24:(int)dstStrideRGB24 + width:(int)width + height:(int)height { + return libyuv::I420ToRGB24(srcY, + srcStrideY, + srcU, + srcStrideU, + srcV, + srcStrideV, + dstRGB24, + dstStrideRGB24, + width, + height); +} + +@end diff --git a/sdk/objc/native/api/audio_device_module.mm b/sdk/objc/native/api/audio_device_module.mm index 4e7b681e69..ada25bd9ee 100644 --- a/sdk/objc/native/api/audio_device_module.mm +++ b/sdk/objc/native/api/audio_device_module.mm @@ -13,7 +13,11 @@ #include "api/make_ref_counted.h" #include "rtc_base/logging.h" +#if defined(WEBRTC_IOS) #include "sdk/objc/native/src/audio/audio_device_module_ios.h" +#endif + +#include "modules/audio_device/include/audio_device.h" namespace webrtc { diff --git a/sdk/objc/native/api/video_capturer.mm b/sdk/objc/native/api/video_capturer.mm index a7260ab802..d5cf6fd563 100644 --- a/sdk/objc/native/api/video_capturer.mm +++ b/sdk/objc/native/api/video_capturer.mm @@ -20,7 +20,7 @@ RTC_OBJC_TYPE(RTCVideoCapturer) * objc_video_capturer, rtc::Thread *signaling_thread, rtc::Thread *worker_thread) { - RTCObjCVideoSourceAdapter *adapter = [[RTCObjCVideoSourceAdapter alloc] init]; + RTC_OBJC_TYPE(RTCObjCVideoSourceAdapter) *adapter = [[RTC_OBJC_TYPE(RTCObjCVideoSourceAdapter) alloc] init]; rtc::scoped_refptr objc_video_track_source = rtc::make_ref_counted(adapter); rtc::scoped_refptr video_source = diff --git a/sdk/objc/native/src/audio/audio_device_ios.h b/sdk/objc/native/src/audio/audio_device_ios.h index a86acb56fe..69f04164e7 100644 --- a/sdk/objc/native/src/audio/audio_device_ios.h +++ b/sdk/objc/native/src/audio/audio_device_ios.h @@ -25,7 +25,7 @@ #include "sdk/objc/base/RTCMacros.h" #include "voice_processing_audio_unit.h" -RTC_FWD_DECL_OBJC_CLASS(RTCNativeAudioSessionDelegateAdapter); +RTC_FWD_DECL_OBJC_CLASS(RTC_OBJC_TYPE(RTCNativeAudioSessionDelegateAdapter)); namespace webrtc { @@ -172,6 +172,8 @@ class AudioDeviceIOS : public AudioDeviceGeneric, void HandlePlayoutGlitchDetected(); void HandleOutputVolumeChange(); + bool RestartAudioUnit(bool enable_input); + // Uses current `playout_parameters_` and `record_parameters_` to inform the // audio device buffer (ADB) about our internal audio parameters. void UpdateAudioDeviceBuffer(); @@ -200,7 +202,7 @@ class AudioDeviceIOS : public AudioDeviceGeneric, // Activates our audio session, creates and initializes the voice-processing // audio unit and verifies that we got the preferred native audio parameters. - bool InitPlayOrRecord(); + bool InitPlayOrRecord(bool enable_input); // Closes and deletes the voice-processing I/O unit. void ShutdownPlayOrRecord(); @@ -260,24 +262,24 @@ class AudioDeviceIOS : public AudioDeviceGeneric, // will be changed dynamically to account for this behavior. rtc::BufferT record_audio_buffer_; + bool recording_is_initialized_; + // Set to 1 when recording is active and 0 otherwise. std::atomic recording_; + bool playout_is_initialized_; + // Set to 1 when playout is active and 0 otherwise. std::atomic playing_; // Set to true after successful call to Init(), false otherwise. bool initialized_ RTC_GUARDED_BY(thread_); - // Set to true after successful call to InitRecording() or InitPlayout(), - // false otherwise. - bool audio_is_initialized_; - // Set to true if audio session is interrupted, false otherwise. bool is_interrupted_; // Audio interruption observer instance. - RTCNativeAudioSessionDelegateAdapter* audio_session_observer_ + RTC_OBJC_TYPE(RTCNativeAudioSessionDelegateAdapter)* audio_session_observer_ RTC_GUARDED_BY(thread_); // Set to true if we've activated the audio session. diff --git a/sdk/objc/native/src/audio/audio_device_ios.mm b/sdk/objc/native/src/audio/audio_device_ios.mm index dd2c11bdd2..3211ce69d5 100644 --- a/sdk/objc/native/src/audio/audio_device_ios.mm +++ b/sdk/objc/native/src/audio/audio_device_ios.mm @@ -94,10 +94,11 @@ static void LogDeviceInfo() { : bypass_voice_processing_(bypass_voice_processing), audio_device_buffer_(nullptr), audio_unit_(nullptr), + recording_is_initialized_(false), recording_(0), + playout_is_initialized_(false), playing_(0), initialized_(false), - audio_is_initialized_(false), is_interrupted_(false), has_configured_session_(false), num_detected_playout_glitches_(0), @@ -109,7 +110,7 @@ static void LogDeviceInfo() { io_thread_checker_.Detach(); thread_ = rtc::Thread::Current(); - audio_session_observer_ = [[RTCNativeAudioSessionDelegateAdapter alloc] initWithObserver:this]; + audio_session_observer_ = [[RTC_OBJC_TYPE(RTCNativeAudioSessionDelegateAdapter) alloc] initWithObserver:this]; } AudioDeviceIOS::~AudioDeviceIOS() { @@ -176,48 +177,57 @@ static void LogDeviceInfo() { LOGI() << "InitPlayout"; RTC_DCHECK_RUN_ON(thread_); RTC_DCHECK(initialized_); - RTC_DCHECK(!audio_is_initialized_); + RTC_DCHECK(!playout_is_initialized_); RTC_DCHECK(!playing_.load()); - if (!audio_is_initialized_) { - if (!InitPlayOrRecord()) { + if (!recording_is_initialized_) { + // recording not initialized yet, init with no input + if (!InitPlayOrRecord(false)) { RTC_LOG_F(LS_ERROR) << "InitPlayOrRecord failed for InitPlayout!"; return -1; } } - audio_is_initialized_ = true; + + playout_is_initialized_ = true; + return 0; } bool AudioDeviceIOS::PlayoutIsInitialized() const { RTC_DCHECK_RUN_ON(thread_); - return audio_is_initialized_; + return playout_is_initialized_; } bool AudioDeviceIOS::RecordingIsInitialized() const { RTC_DCHECK_RUN_ON(thread_); - return audio_is_initialized_; + return recording_is_initialized_; } int32_t AudioDeviceIOS::InitRecording() { LOGI() << "InitRecording"; RTC_DCHECK_RUN_ON(thread_); RTC_DCHECK(initialized_); - RTC_DCHECK(!audio_is_initialized_); + RTC_DCHECK(!recording_is_initialized_); RTC_DCHECK(!recording_.load()); - if (!audio_is_initialized_) { - if (!InitPlayOrRecord()) { + if (!playout_is_initialized_) { + // playout not initialized yet, init with input + if (!InitPlayOrRecord(true)) { RTC_LOG_F(LS_ERROR) << "InitPlayOrRecord failed for InitRecording!"; return -1; } + } else { + // playout already initialized, restart audio unit with input + RestartAudioUnit(true); } - audio_is_initialized_ = true; + + recording_is_initialized_ = true; + return 0; } int32_t AudioDeviceIOS::StartPlayout() { LOGI() << "StartPlayout"; RTC_DCHECK_RUN_ON(thread_); - RTC_DCHECK(audio_is_initialized_); + RTC_DCHECK(playout_is_initialized_); RTC_DCHECK(!playing_.load()); RTC_DCHECK(audio_unit_); if (fine_audio_buffer_) { @@ -242,14 +252,16 @@ static void LogDeviceInfo() { int32_t AudioDeviceIOS::StopPlayout() { LOGI() << "StopPlayout"; RTC_DCHECK_RUN_ON(thread_); - if (!audio_is_initialized_ || !playing_.load()) { + if (!playout_is_initialized_ || !playing_.load()) { return 0; } if (!recording_.load()) { ShutdownPlayOrRecord(); - audio_is_initialized_ = false; + + recording_is_initialized_ = false; } playing_.store(0, std::memory_order_release); + playout_is_initialized_ = false; // Derive average number of calls to OnGetPlayoutData() between detected // audio glitches and add the result to a histogram. @@ -273,7 +285,7 @@ static void LogDeviceInfo() { int32_t AudioDeviceIOS::StartRecording() { LOGI() << "StartRecording"; RTC_DCHECK_RUN_ON(thread_); - RTC_DCHECK(audio_is_initialized_); + RTC_DCHECK(recording_is_initialized_); RTC_DCHECK(!recording_.load()); RTC_DCHECK(audio_unit_); if (fine_audio_buffer_) { @@ -296,14 +308,19 @@ static void LogDeviceInfo() { int32_t AudioDeviceIOS::StopRecording() { LOGI() << "StopRecording"; RTC_DCHECK_RUN_ON(thread_); - if (!audio_is_initialized_ || !recording_.load()) { + if (!recording_is_initialized_ || !recording_.load()) { return 0; } if (!playing_.load()) { ShutdownPlayOrRecord(); - audio_is_initialized_ = false; + + playout_is_initialized_ = false; + } else if (playout_is_initialized_) { + // restart audio unit with no input + RestartAudioUnit(false); } recording_.store(0, std::memory_order_release); + recording_is_initialized_ = false; return 0; } @@ -445,7 +462,7 @@ static void LogDeviceInfo() { // Exclude extreme delta values since they do most likely not correspond // to a real glitch. Instead, the most probable cause is that a headset // has been plugged in or out. There are more direct ways to detect - // audio device changes (see HandleValidRouteChange()) but experiments + // audio device changes (see ValidRouteChange()) but experiments // show that using it leads to more complex implementations. // TODO(henrika): more tests might be needed to come up with an even // better upper limit. @@ -579,7 +596,7 @@ static void LogDeviceInfo() { SetupAudioBuffersForActiveAudioSession(); // Initialize the audio unit again with the new sample rate. - if (!audio_unit_->Initialize(playout_parameters_.sample_rate())) { + if (!audio_unit_->Initialize(playout_parameters_.sample_rate(), recording_is_initialized_)) { RTCLogError(@"Failed to initialize the audio unit with sample rate: %d", playout_parameters_.sample_rate()); return; @@ -633,6 +650,46 @@ static void LogDeviceInfo() { last_output_volume_change_time_ = rtc::TimeMillis(); } +bool AudioDeviceIOS::RestartAudioUnit(bool enable_input) { + RTC_DCHECK_RUN_ON(&io_thread_checker_); + + LOGI() << "RestartAudioUnit"; + + // If we don't have an audio unit yet, or the audio unit is uninitialized, + // there is no work to do. + if (!audio_unit_ || audio_unit_->GetState() < VoiceProcessingAudioUnit::kInitialized) { + return false; + } + + bool restart_audio_unit = false; + if (audio_unit_->GetState() == VoiceProcessingAudioUnit::kStarted) { + audio_unit_->Stop(); + PrepareForNewStart(); + restart_audio_unit = true; + } + + if (audio_unit_->GetState() == VoiceProcessingAudioUnit::kInitialized) { + audio_unit_->Uninitialize(); + } + + // Initialize the audio unit again with the same sample rate. + const double sample_rate = playout_parameters_.sample_rate(); + + if (!audio_unit_->Initialize(sample_rate, enable_input)) { + RTCLogError(@"Failed to initialize the audio unit with sample rate: %f", sample_rate); + return false; + } + + // Restart the audio unit if it was already running. + if (restart_audio_unit && !audio_unit_->Start()) { + RTCLogError(@"Failed to start audio unit with sample rate: %f", sample_rate); + return false; + } + + LOGI() << "Successfully enabled audio unit for recording."; + return true; +} + void AudioDeviceIOS::UpdateAudioDeviceBuffer() { LOGI() << "UpdateAudioDevicebuffer"; // AttachAudioBuffer() is called at construction by the main class but check @@ -726,7 +783,7 @@ static void LogDeviceInfo() { // If we're not initialized we don't need to do anything. Audio unit will // be initialized on initialization. - if (!audio_is_initialized_) return; + if (!playout_is_initialized_ && !recording_is_initialized_) return; // If we're initialized, we must have an audio unit. RTC_DCHECK(audio_unit_); @@ -764,7 +821,7 @@ static void LogDeviceInfo() { RTCLog(@"Initializing audio unit for UpdateAudioUnit"); ConfigureAudioSession(); SetupAudioBuffersForActiveAudioSession(); - if (!audio_unit_->Initialize(playout_parameters_.sample_rate())) { + if (!audio_unit_->Initialize(playout_parameters_.sample_rate(), recording_is_initialized_)) { RTCLogError(@"Failed to initialize audio unit."); return; } @@ -854,7 +911,7 @@ static void LogDeviceInfo() { RTCLog(@"Unconfigured audio session."); } -bool AudioDeviceIOS::InitPlayOrRecord() { +bool AudioDeviceIOS::InitPlayOrRecord(bool enable_input) { LOGI() << "InitPlayOrRecord"; RTC_DCHECK_RUN_ON(thread_); @@ -890,7 +947,7 @@ static void LogDeviceInfo() { return false; } SetupAudioBuffersForActiveAudioSession(); - audio_unit_->Initialize(playout_parameters_.sample_rate()); + audio_unit_->Initialize(playout_parameters_.sample_rate(), enable_input); } // Release the lock. diff --git a/sdk/objc/native/src/audio/audio_device_module_ios.h b/sdk/objc/native/src/audio/audio_device_module_ios.h index 189d7e6c9c..2f9b95a0a8 100644 --- a/sdk/objc/native/src/audio/audio_device_module_ios.h +++ b/sdk/objc/native/src/audio/audio_device_module_ios.h @@ -129,6 +129,9 @@ class AudioDeviceModuleIOS : public AudioDeviceModule { int GetPlayoutAudioParameters(AudioParameters* params) const override; int GetRecordAudioParameters(AudioParameters* params) const override; #endif // WEBRTC_IOS + + int32_t SetAudioDeviceSink(AudioDeviceSink* sink) const override; + private: const bool bypass_voice_processing_; bool initialized_ = false; diff --git a/sdk/objc/native/src/audio/audio_device_module_ios.mm b/sdk/objc/native/src/audio/audio_device_module_ios.mm index 5effef3abd..5f93a06ee8 100644 --- a/sdk/objc/native/src/audio/audio_device_module_ios.mm +++ b/sdk/objc/native/src/audio/audio_device_module_ios.mm @@ -665,5 +665,11 @@ return r; } #endif // WEBRTC_IOS + + int32_t AudioDeviceModuleIOS::SetAudioDeviceSink(AudioDeviceSink* sink) const { + // not implemented + RTC_LOG(LS_WARNING) << __FUNCTION__ << "(" << sink << ") Not implemented"; + return -1; + } } } diff --git a/sdk/objc/native/src/audio/voice_processing_audio_unit.h b/sdk/objc/native/src/audio/voice_processing_audio_unit.h index ed9dd98568..b474cda104 100644 --- a/sdk/objc/native/src/audio/voice_processing_audio_unit.h +++ b/sdk/objc/native/src/audio/voice_processing_audio_unit.h @@ -75,7 +75,7 @@ class VoiceProcessingAudioUnit { VoiceProcessingAudioUnit::State GetState() const; // Initializes the underlying audio unit with the given sample rate. - bool Initialize(Float64 sample_rate); + bool Initialize(Float64 sample_rate, bool enable_input); // Starts the underlying audio unit. OSStatus Start(); diff --git a/sdk/objc/native/src/audio/voice_processing_audio_unit.mm b/sdk/objc/native/src/audio/voice_processing_audio_unit.mm index 3905b6857a..b3daacb334 100644 --- a/sdk/objc/native/src/audio/voice_processing_audio_unit.mm +++ b/sdk/objc/native/src/audio/voice_processing_audio_unit.mm @@ -111,19 +111,6 @@ static OSStatus GetAGCState(AudioUnit audio_unit, UInt32* enabled) { return false; } - // Enable input on the input scope of the input element. - UInt32 enable_input = 1; - result = AudioUnitSetProperty(vpio_unit_, kAudioOutputUnitProperty_EnableIO, - kAudioUnitScope_Input, kInputBus, &enable_input, - sizeof(enable_input)); - if (result != noErr) { - DisposeAudioUnit(); - RTCLogError(@"Failed to enable input on input scope of input element. " - "Error=%ld.", - (long)result); - return false; - } - // Enable output on the output scope of the output element. UInt32 enable_output = 1; result = AudioUnitSetProperty(vpio_unit_, kAudioOutputUnitProperty_EnableIO, @@ -193,7 +180,7 @@ static OSStatus GetAGCState(AudioUnit audio_unit, UInt32* enabled) { return state_; } -bool VoiceProcessingAudioUnit::Initialize(Float64 sample_rate) { +bool VoiceProcessingAudioUnit::Initialize(Float64 sample_rate, bool enable_input) { RTC_DCHECK_GE(state_, kUninitialized); RTCLog(@"Initializing audio unit with sample rate: %f", sample_rate); @@ -204,6 +191,19 @@ static OSStatus GetAGCState(AudioUnit audio_unit, UInt32* enabled) { LogStreamDescription(format); #endif + UInt32 _enable_input = enable_input ? 1 : 0; + RTCLog(@"Initializing AudioUnit, _enable_input=%d", (int) _enable_input); + result = AudioUnitSetProperty(vpio_unit_, kAudioOutputUnitProperty_EnableIO, + kAudioUnitScope_Input, kInputBus, &_enable_input, + sizeof(_enable_input)); + if (result != noErr) { + DisposeAudioUnit(); + RTCLogError(@"Failed to enable input on input scope of input element. " + "Error=%ld.", + (long)result); + return false; + } + // Set the format on the output scope of the input element/bus. result = AudioUnitSetProperty(vpio_unit_, kAudioUnitProperty_StreamFormat, diff --git a/sdk/objc/native/src/objc_audio_device.h b/sdk/objc/native/src/objc_audio_device.h index fcfe7a6e8b..88f6f19f99 100644 --- a/sdk/objc/native/src/objc_audio_device.h +++ b/sdk/objc/native/src/objc_audio_device.h @@ -19,7 +19,7 @@ #include "modules/audio_device/include/audio_device.h" #include "rtc_base/thread.h" -@class ObjCAudioDeviceDelegate; +@class RTC_OBJC_TYPE(ObjCAudioDeviceDelegate); namespace webrtc { @@ -267,7 +267,7 @@ class ObjCAudioDeviceModule : public AudioDeviceModule { rtc::BufferT record_audio_buffer_; // Delegate object provided to RTCAudioDevice during initialization - ObjCAudioDeviceDelegate* audio_device_delegate_; + RTC_OBJC_TYPE(ObjCAudioDeviceDelegate)* audio_device_delegate_; }; } // namespace objc_adm diff --git a/sdk/objc/native/src/objc_audio_device.mm b/sdk/objc/native/src/objc_audio_device.mm index d629fae20f..5fb72d8a5c 100644 --- a/sdk/objc/native/src/objc_audio_device.mm +++ b/sdk/objc/native/src/objc_audio_device.mm @@ -77,7 +77,7 @@ if (![audio_device_ isInitialized]) { if (audio_device_delegate_ == nil) { - audio_device_delegate_ = [[ObjCAudioDeviceDelegate alloc] + audio_device_delegate_ = [[RTC_OBJC_TYPE(ObjCAudioDeviceDelegate) alloc] initWithAudioDeviceModule:rtc::scoped_refptr(this) audioDeviceThread:thread_]; } diff --git a/sdk/objc/native/src/objc_audio_device_delegate.h b/sdk/objc/native/src/objc_audio_device_delegate.h index 3af079dad9..0b546f269c 100644 --- a/sdk/objc/native/src/objc_audio_device_delegate.h +++ b/sdk/objc/native/src/objc_audio_device_delegate.h @@ -22,7 +22,7 @@ class ObjCAudioDeviceModule; } // namespace objc_adm } // namespace webrtc -@interface ObjCAudioDeviceDelegate : NSObject +@interface RTC_OBJC_TYPE(ObjCAudioDeviceDelegate) : NSObject - (instancetype)initWithAudioDeviceModule: (rtc::scoped_refptr)audioDeviceModule diff --git a/sdk/objc/native/src/objc_audio_device_delegate.mm b/sdk/objc/native/src/objc_audio_device_delegate.mm index 156d6326a4..f4c8cfb71a 100644 --- a/sdk/objc/native/src/objc_audio_device_delegate.mm +++ b/sdk/objc/native/src/objc_audio_device_delegate.mm @@ -55,7 +55,7 @@ } // namespace -@implementation ObjCAudioDeviceDelegate { +@implementation RTC_OBJC_TYPE(ObjCAudioDeviceDelegate) { rtc::scoped_refptr impl_; } diff --git a/sdk/objc/native/src/objc_desktop_capture.h b/sdk/objc/native/src/objc_desktop_capture.h new file mode 100644 index 0000000000..a781457220 --- /dev/null +++ b/sdk/objc/native/src/objc_desktop_capture.h @@ -0,0 +1,70 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef SDK_OBJC_NATIVE_SRC_OBJC_DESKTOP_CAPTURE_H_ +#define SDK_OBJC_NATIVE_SRC_OBJC_DESKTOP_CAPTURE_H_ + +#import "base/RTCMacros.h" + +#include "api/video/i420_buffer.h" +#include "api/video/video_frame.h" +#include "modules/desktop_capture/desktop_capture_options.h" +#include "modules/desktop_capture/desktop_and_cursor_composer.h" +#include "modules/desktop_capture/desktop_frame.h" +#include "rtc_base/thread.h" + +@protocol RTC_OBJC_TYPE +(DesktopCapturerDelegate); + +namespace webrtc { + +enum DesktopType { kScreen, kWindow }; + +class ObjCDesktopCapturer : public DesktopCapturer::Callback { + public: + enum CaptureState { CS_RUNNING, CS_STOPPED, CS_FAILED}; + + public: + ObjCDesktopCapturer(DesktopType type, + webrtc::DesktopCapturer::SourceId source_id, + id delegate); + virtual ~ObjCDesktopCapturer(); + + virtual CaptureState Start(uint32_t fps); + + virtual void Stop(); + + virtual bool IsRunning(); + + protected: + virtual void OnCaptureResult(webrtc::DesktopCapturer::Result result, + std::unique_ptr frame) override; + private: + void CaptureFrame(); + webrtc::DesktopCaptureOptions options_; + std::unique_ptr capturer_; + std::unique_ptr thread_; + CaptureState capture_state_ = CS_STOPPED; + DesktopType type_; + webrtc::DesktopCapturer::SourceId source_id_; + id delegate_; + uint32_t capture_delay_ = 1000; // 1s + webrtc::DesktopCapturer::Result result_ = webrtc::DesktopCapturer::Result::SUCCESS; +}; + +} // namespace webrtc + +#endif // SDK_OBJC_NATIVE_SRC_OBJC_DESKTOP_CAPTURE_H_ diff --git a/sdk/objc/native/src/objc_desktop_capture.mm b/sdk/objc/native/src/objc_desktop_capture.mm new file mode 100644 index 0000000000..7aba3e5612 --- /dev/null +++ b/sdk/objc/native/src/objc_desktop_capture.mm @@ -0,0 +1,205 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "api/sequence_checker.h" +#include "rtc_base/checks.h" +#include "sdk/objc/native/src/objc_desktop_capture.h" +#include "sdk/objc/native/src/objc_video_frame.h" +#include "third_party/libyuv/include/libyuv.h" + +#import "components/capturer/RTCDesktopCapturer+Private.h" +#import "components/video_frame_buffer/RTCCVPixelBuffer.h" + +namespace webrtc { + +enum { kCaptureDelay = 33, kCaptureMessageId = 1000 }; + +ObjCDesktopCapturer::ObjCDesktopCapturer(DesktopType type, + webrtc::DesktopCapturer::SourceId source_id, + id delegate) + : thread_(rtc::Thread::Create()), source_id_(source_id), delegate_(delegate) { + RTC_DCHECK(thread_); + type_ = type; + thread_->Start(); + options_ = webrtc::DesktopCaptureOptions::CreateDefault(); + options_.set_detect_updated_region(true); + options_.set_allow_iosurface(true); + thread_->BlockingCall([this, type] { + if (type == kScreen) { + capturer_ = std::make_unique( + webrtc::DesktopCapturer::CreateScreenCapturer(options_), options_); + } else { + capturer_ = std::make_unique( + webrtc::DesktopCapturer::CreateWindowCapturer(options_), options_); + } + }); +} + +ObjCDesktopCapturer::~ObjCDesktopCapturer() { + thread_->BlockingCall([this] { + capturer_.reset(); + }); +} + +ObjCDesktopCapturer::CaptureState ObjCDesktopCapturer::Start(uint32_t fps) { + if(capture_state_ == CS_RUNNING) { + return capture_state_; + } + + if(fps == 0) { + capture_state_ = CS_FAILED; + return capture_state_; + } + + if (fps >= 60) { + capture_delay_ = uint32_t(1000.0 / 60.0); + } else { + capture_delay_ = uint32_t(1000.0 / fps); + } + + if (source_id_ != -1) { + if (!capturer_->SelectSource(source_id_)) { + capture_state_ = CS_FAILED; + return capture_state_; + } + if (type_ == kWindow) { + if (!capturer_->FocusOnSelectedSource()) { + capture_state_ = CS_FAILED; + return capture_state_; + } + } + } + + thread_->BlockingCall([this] { + capturer_->Start(this); + }); + capture_state_ = CS_RUNNING; + + thread_->PostTask([this] { + CaptureFrame(); + }); + + [delegate_ didSourceCaptureStart]; + return capture_state_; +} + +void ObjCDesktopCapturer::Stop() { + [delegate_ didSourceCaptureStop]; + capture_state_ = CS_STOPPED; +} + +bool ObjCDesktopCapturer::IsRunning() { + return capture_state_ == CS_RUNNING; +} + +void ObjCDesktopCapturer::OnCaptureResult(webrtc::DesktopCapturer::Result result, + std::unique_ptr frame) { + if (result != result_) { + if (result == webrtc::DesktopCapturer::Result::ERROR_PERMANENT) { + [delegate_ didSourceCaptureError]; + capture_state_ = CS_FAILED; + return; + } + + if (result == webrtc::DesktopCapturer::Result::ERROR_TEMPORARY) { + result_ = result; + [delegate_ didSourceCapturePaused]; + return; + } + + if (result == webrtc::DesktopCapturer::Result::SUCCESS) { + result_ = result; + [delegate_ didSourceCaptureStart]; + } + } + + if (result == webrtc::DesktopCapturer::Result::ERROR_TEMPORARY) { + return; + } + + int width = frame->size().width(); + int height = frame->size().height(); + int real_width = width; + + if (type_ == kWindow) { + int multiple = 0; +#if defined(WEBRTC_ARCH_X86_FAMILY) + multiple = 16; +#elif defined(WEBRTC_ARCH_ARM64) + multiple = 32; +#endif + // A multiple of $multiple must be used as the width of the src frame, + // and the right black border needs to be cropped during conversion. + if (multiple != 0 && (width % multiple) != 0) { + width = (width / multiple + 1) * multiple; + } + } + + CVPixelBufferRef pixelBuffer = NULL; + + NSDictionary *pixelAttributes = @{(NSString *)kCVPixelBufferIOSurfacePropertiesKey : @{}}; + CVReturn res = CVPixelBufferCreate(kCFAllocatorDefault, + width, + height, + kCVPixelFormatType_32BGRA, + (__bridge CFDictionaryRef)(pixelAttributes), + &pixelBuffer); + CVPixelBufferLockBaseAddress(pixelBuffer, 0); + uint8_t *pxdata = (uint8_t *)CVPixelBufferGetBaseAddress(pixelBuffer); + libyuv::ConvertToARGB(reinterpret_cast(frame->data()), + real_width * height * 4, + reinterpret_cast(pxdata), + width * 4, + 0, + 0, + width, + height, + real_width, + height, + libyuv::kRotate0, + libyuv::FOURCC_ARGB); + CVPixelBufferUnlockBaseAddress(pixelBuffer, 0); + + if (res != kCVReturnSuccess) { + NSLog(@"Unable to create cvpixelbuffer %d", res); + return; + } + + RTC_OBJC_TYPE(RTCCVPixelBuffer) *rtcPixelBuffer = + [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBuffer]; + NSTimeInterval timeStampSeconds = CACurrentMediaTime(); + int64_t timeStampNs = lroundf(timeStampSeconds * NSEC_PER_SEC); + RTC_OBJC_TYPE(RTCVideoFrame) *videoFrame = + [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:rtcPixelBuffer + rotation:RTCVideoRotation_0 + timeStampNs:timeStampNs]; + CVPixelBufferRelease(pixelBuffer); + [delegate_ didCaptureVideoFrame:videoFrame]; +} + +void ObjCDesktopCapturer::CaptureFrame() { + RTC_DCHECK_RUN_ON(thread_.get()); + if (capture_state_ == CS_RUNNING) { + capturer_->CaptureFrame(); + thread_->PostDelayedHighPrecisionTask( + [this]() { + CaptureFrame(); + }, + TimeDelta::Millis(capture_delay_)); + } +} + +} // namespace webrtc diff --git a/sdk/objc/native/src/objc_desktop_media_list.h b/sdk/objc/native/src/objc_desktop_media_list.h new file mode 100644 index 0000000000..ecb2d27221 --- /dev/null +++ b/sdk/objc/native/src/objc_desktop_media_list.h @@ -0,0 +1,111 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef SDK_OBJC_NATIVE_SRC_OBJC_DESKTOP_MEDIA_LIST_H_ +#define SDK_OBJC_NATIVE_SRC_OBJC_DESKTOP_MEDIA_LIST_H_ + +#import "base/RTCMacros.h" + +#include "api/video/i420_buffer.h" +#include "api/video/video_frame.h" +#include "modules/desktop_capture/desktop_capture_options.h" +#include "modules/desktop_capture/desktop_capturer.h" +#include "modules/desktop_capture/desktop_frame.h" +#include "rtc_base/thread.h" + +#include "objc_desktop_capture.h" + +#import "components/capturer/RTCDesktopMediaList+Private.h" + +namespace webrtc { + +class MediaSource { + public: + MediaSource( ObjCDesktopMediaList *mediaList, DesktopCapturer::Source src, DesktopType type) + : source(src), mediaList_(mediaList), type_(type) {} + virtual ~MediaSource() {} + + DesktopCapturer::Source source; + + // source id + DesktopCapturer::SourceId id() const { return source.id; } + + // source name + std::string name() const { return source.title; } + + // Returns the thumbnail of the source, jpeg format. + std::vector thumbnail() const { return thumbnail_; } + + + + DesktopType type() const { return type_; } + + bool UpdateThumbnail(); + + void SaveCaptureResult(webrtc::DesktopCapturer::Result result, + std::unique_ptr frame); + + private: + std::vector thumbnail_; + ObjCDesktopMediaList *mediaList_; + DesktopType type_; +}; + +class ObjCDesktopMediaList { + public: + enum CaptureState { CS_RUNNING, CS_STOPPED, CS_FAILED}; + public: + ObjCDesktopMediaList(DesktopType type, RTC_OBJC_TYPE(RTCDesktopMediaList)* objcMediaList); + + virtual ~ObjCDesktopMediaList(); + + virtual int32_t UpdateSourceList(bool force_reload = false, bool get_thumbnail = true); + + virtual int GetSourceCount() const; + + virtual MediaSource* GetSource(int index); + + virtual bool GetThumbnail(MediaSource *source, bool notify); + + private: + class CallbackProxy : public DesktopCapturer::Callback { + public: + CallbackProxy(){} + void SetCallback(std::function frame)> on_capture_result) { + on_capture_result_ = on_capture_result; + } + private: + void OnCaptureResult(webrtc::DesktopCapturer::Result result, + std::unique_ptr frame) override { + if(on_capture_result_) on_capture_result_(result, std::move(frame)); + } + std::function frame)> on_capture_result_ = nullptr; + }; + private: + std::unique_ptr callback_; + webrtc::DesktopCaptureOptions options_; + std::unique_ptr capturer_; + std::unique_ptr thread_; + std::vector> sources_; + RTC_OBJC_TYPE(RTCDesktopMediaList)* objcMediaList_; + DesktopType type_; +}; + +} // namespace webrtc + +#endif // SDK_OBJC_NATIVE_SRC_OBJC_DESKTOP_MEDIA_LIST_H_ diff --git a/sdk/objc/native/src/objc_desktop_media_list.mm b/sdk/objc/native/src/objc_desktop_media_list.mm new file mode 100644 index 0000000000..574e47b6f3 --- /dev/null +++ b/sdk/objc/native/src/objc_desktop_media_list.mm @@ -0,0 +1,252 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "sdk/objc/native/src/objc_desktop_media_list.h" +#include "rtc_base/checks.h" +#include "sdk/objc/native/src/objc_video_frame.h" +#include "third_party/libyuv/include/libyuv.h" + +extern "C" { +#if defined(USE_SYSTEM_LIBJPEG) +#include +#else +// Include directory supplied by gn +#include "jpeglib.h" // NOLINT +#endif +} + +#include +#include + +#import + +namespace webrtc { + +ObjCDesktopMediaList::ObjCDesktopMediaList(DesktopType type, + RTC_OBJC_TYPE(RTCDesktopMediaList) * objcMediaList) + : thread_(rtc::Thread::Create()), objcMediaList_(objcMediaList), type_(type) { + RTC_DCHECK(thread_); + thread_->Start(); + options_ = webrtc::DesktopCaptureOptions::CreateDefault(); + options_.set_detect_updated_region(true); + options_.set_allow_iosurface(true); + + callback_ = std::make_unique(); + + thread_->BlockingCall([this, type] { + if (type == kScreen) { + capturer_ = webrtc::DesktopCapturer::CreateScreenCapturer(options_); + } else { + capturer_ = webrtc::DesktopCapturer::CreateWindowCapturer(options_); + } + capturer_->Start(callback_.get()); + }); +} + +ObjCDesktopMediaList::~ObjCDesktopMediaList() { + thread_->BlockingCall([this] { + capturer_.reset(); + }); +} + +int32_t ObjCDesktopMediaList::UpdateSourceList(bool force_reload, bool get_thumbnail) { + if (force_reload) { + for (auto source : sources_) { + [objcMediaList_ mediaSourceRemoved:source.get()]; + } + sources_.clear(); + } + + webrtc::DesktopCapturer::SourceList new_sources; + + thread_->BlockingCall([this, &new_sources] { + capturer_->GetSourceList(&new_sources); + }); + + typedef std::set SourceSet; + SourceSet new_source_set; + for (size_t i = 0; i < new_sources.size(); ++i) { + if (type_ == kScreen && new_sources[i].title.length() == 0) { + new_sources[i].title = std::string("Screen " + std::to_string(i + 1)); + } + new_source_set.insert(new_sources[i].id); + } + // Iterate through the old sources to find the removed sources. + for (size_t i = 0; i < sources_.size(); ++i) { + if (new_source_set.find(sources_[i]->id()) == new_source_set.end()) { + [objcMediaList_ mediaSourceRemoved:(*(sources_.begin() + i)).get()]; + sources_.erase(sources_.begin() + i); + --i; + } + } + // Iterate through the new sources to find the added sources. + if (new_sources.size() > sources_.size()) { + SourceSet old_source_set; + for (size_t i = 0; i < sources_.size(); ++i) { + old_source_set.insert(sources_[i]->id()); + } + for (size_t i = 0; i < new_sources.size(); ++i) { + if (old_source_set.find(new_sources[i].id) == old_source_set.end()) { + MediaSource *source = new MediaSource(this, new_sources[i], type_); + sources_.insert(sources_.begin() + i, std::shared_ptr(source)); + [objcMediaList_ mediaSourceAdded:source]; + GetThumbnail(source, true); + } + } + } + + RTC_DCHECK_EQ(new_sources.size(), sources_.size()); + + // Find the moved/changed sources. + size_t pos = 0; + while (pos < sources_.size()) { + if (!(sources_[pos]->id() == new_sources[pos].id)) { + // Find the source that should be moved to |pos|, starting from |pos + 1| + // of |sources_|, because entries before |pos| should have been sorted. + size_t old_pos = pos + 1; + for (; old_pos < sources_.size(); ++old_pos) { + if (sources_[old_pos]->id() == new_sources[pos].id) break; + } + RTC_DCHECK(sources_[old_pos]->id() == new_sources[pos].id); + + // Move the source from |old_pos| to |pos|. + auto temp = sources_[old_pos]; + sources_.erase(sources_.begin() + old_pos); + sources_.insert(sources_.begin() + pos, temp); + //[objcMediaList_ mediaSourceMoved:old_pos newIndex:pos]; + } + + if (sources_[pos]->source.title != new_sources[pos].title) { + sources_[pos]->source.title = new_sources[pos].title; + [objcMediaList_ mediaSourceNameChanged:sources_[pos].get()]; + } + ++pos; + } + + if (get_thumbnail) { + for (auto source : sources_) { + GetThumbnail(source.get(), true); + } + } + return sources_.size(); +} + +bool ObjCDesktopMediaList::GetThumbnail(MediaSource *source, bool notify) { + thread_->PostTask([this, source, notify] { + if(capturer_->SelectSource(source->id())){ + callback_->SetCallback([&](webrtc::DesktopCapturer::Result result, + std::unique_ptr frame) { + auto old_thumbnail = source->thumbnail(); + source->SaveCaptureResult(result, std::move(frame)); + if(old_thumbnail.size() != source->thumbnail().size() && notify) { + [objcMediaList_ mediaSourceThumbnailChanged:source]; + } + }); + capturer_->CaptureFrame(); + } + }); + + return true; +} + +int ObjCDesktopMediaList::GetSourceCount() const { + return sources_.size(); +} + +MediaSource *ObjCDesktopMediaList::GetSource(int index) { + return sources_[index].get(); +} + +bool MediaSource::UpdateThumbnail() { + return mediaList_->GetThumbnail(this, true); +} + +void MediaSource::SaveCaptureResult(webrtc::DesktopCapturer::Result result, + std::unique_ptr frame) { + if (result != webrtc::DesktopCapturer::Result::SUCCESS) { + return; + } + int width = frame->size().width(); + int height = frame->size().height(); + int real_width = width; + + if (type_ == kWindow) { + int multiple = 0; +#if defined(WEBRTC_ARCH_X86_FAMILY) + multiple = 16; +#elif defined(WEBRTC_ARCH_ARM64) + multiple = 32; +#endif + // A multiple of $multiple must be used as the width of the src frame, + // and the right black border needs to be cropped during conversion. + if (multiple != 0 && (width % multiple) != 0) { + width = (width / multiple + 1) * multiple; + } + } + + CVPixelBufferRef pixelBuffer = NULL; + + NSDictionary *pixelAttributes = @{(NSString *)kCVPixelBufferIOSurfacePropertiesKey : @{}}; + CVReturn res = CVPixelBufferCreate(kCFAllocatorDefault, + width, + height, + kCVPixelFormatType_32BGRA, + (__bridge CFDictionaryRef)(pixelAttributes), + &pixelBuffer); + CVPixelBufferLockBaseAddress(pixelBuffer, 0); + uint8_t *pxdata = (uint8_t *)CVPixelBufferGetBaseAddress(pixelBuffer); + libyuv::ConvertToARGB(reinterpret_cast(frame->data()), + real_width * height * 4, + reinterpret_cast(pxdata), + width * 4, + 0, + 0, + width, + height, + real_width, + height, + libyuv::kRotate0, + libyuv::FOURCC_ARGB); + CVPixelBufferUnlockBaseAddress(pixelBuffer, 0); + + if (res != kCVReturnSuccess) { + NSLog(@"Unable to create cvpixelbuffer %d", res); + return; + } + + CIImage *ciImage = [CIImage imageWithCVPixelBuffer:pixelBuffer]; + CGRect outputSize = CGRectMake(0, 0, width, height); + + CIContext *tempContext = [CIContext contextWithOptions:nil]; + CGImageRef cgImage = [tempContext createCGImage:ciImage fromRect:outputSize]; + NSData *imageData; + NSBitmapImageRep *newRep = [[NSBitmapImageRep alloc] initWithCGImage:cgImage]; + [newRep setSize:NSSizeToCGSize(outputSize.size)]; + imageData = [newRep representationUsingType:NSJPEGFileType + properties:@{ + NSImageCompressionFactor : @1.0f + }]; + + thumbnail_.resize(imageData.length); + const void *_Nullable rawData = [imageData bytes]; + char *src = (char *)rawData; + std::copy(src, src + imageData.length, thumbnail_.begin()); + + CGImageRelease(cgImage); + CVPixelBufferRelease(pixelBuffer); +} + +} // namespace webrtc diff --git a/sdk/objc/native/src/objc_network_monitor.h b/sdk/objc/native/src/objc_network_monitor.h index 709e9dfbe5..c5440d587b 100644 --- a/sdk/objc/native/src/objc_network_monitor.h +++ b/sdk/objc/native/src/objc_network_monitor.h @@ -59,7 +59,7 @@ class ObjCNetworkMonitor : public rtc::NetworkMonitorInterface, std::map adapter_type_by_name_ RTC_GUARDED_BY(thread_); rtc::scoped_refptr safety_flag_; - RTCNetworkMonitor* network_monitor_ = nil; + RTC_OBJC_TYPE(RTCNetworkMonitor) * network_monitor_ = nil; }; } // namespace webrtc diff --git a/sdk/objc/native/src/objc_network_monitor.mm b/sdk/objc/native/src/objc_network_monitor.mm index 535548c64c..e0785e6d0b 100644 --- a/sdk/objc/native/src/objc_network_monitor.mm +++ b/sdk/objc/native/src/objc_network_monitor.mm @@ -39,7 +39,7 @@ thread_ = rtc::Thread::Current(); RTC_DCHECK_RUN_ON(thread_); safety_flag_->SetAlive(); - network_monitor_ = [[RTCNetworkMonitor alloc] initWithObserver:this]; + network_monitor_ = [[RTC_OBJC_TYPE(RTCNetworkMonitor) alloc] initWithObserver:this]; if (network_monitor_ == nil) { RTC_LOG(LS_WARNING) << "Failed to create RTCNetworkMonitor; not available on this OS?"; } diff --git a/sdk/objc/native/src/objc_video_track_source.h b/sdk/objc/native/src/objc_video_track_source.h index 19a3d6db43..5fe39baade 100644 --- a/sdk/objc/native/src/objc_video_track_source.h +++ b/sdk/objc/native/src/objc_video_track_source.h @@ -19,7 +19,7 @@ RTC_FWD_DECL_OBJC_CLASS(RTC_OBJC_TYPE(RTCVideoFrame)); -@interface RTCObjCVideoSourceAdapter : NSObject +@interface RTC_OBJC_TYPE(RTCObjCVideoSourceAdapter) : NSObject @end namespace webrtc { @@ -28,7 +28,7 @@ class ObjCVideoTrackSource : public rtc::AdaptedVideoTrackSource { public: ObjCVideoTrackSource(); explicit ObjCVideoTrackSource(bool is_screencast); - explicit ObjCVideoTrackSource(RTCObjCVideoSourceAdapter* adapter); + explicit ObjCVideoTrackSource(RTC_OBJC_TYPE(RTCObjCVideoSourceAdapter)* adapter); bool is_screencast() const override; @@ -50,7 +50,7 @@ class ObjCVideoTrackSource : public rtc::AdaptedVideoTrackSource { rtc::VideoBroadcaster broadcaster_; rtc::TimestampAligner timestamp_aligner_; - RTCObjCVideoSourceAdapter* adapter_; + RTC_OBJC_TYPE(RTCObjCVideoSourceAdapter)* adapter_; bool is_screencast_; }; diff --git a/sdk/objc/native/src/objc_video_track_source.mm b/sdk/objc/native/src/objc_video_track_source.mm index 7937e90505..401db1d111 100644 --- a/sdk/objc/native/src/objc_video_track_source.mm +++ b/sdk/objc/native/src/objc_video_track_source.mm @@ -17,11 +17,11 @@ #include "api/video/i420_buffer.h" #include "sdk/objc/native/src/objc_frame_buffer.h" -@interface RTCObjCVideoSourceAdapter () +@interface RTC_OBJC_TYPE(RTCObjCVideoSourceAdapter) () @property(nonatomic) webrtc::ObjCVideoTrackSource *objCVideoTrackSource; @end -@implementation RTCObjCVideoSourceAdapter +@implementation RTC_OBJC_TYPE(RTCObjCVideoSourceAdapter) @synthesize objCVideoTrackSource = _objCVideoTrackSource; @@ -40,7 +40,7 @@ - (void)capturer:(RTC_OBJC_TYPE(RTCVideoCapturer) *)capturer : AdaptedVideoTrackSource(/* required resolution alignment */ 2), is_screencast_(is_screencast) {} -ObjCVideoTrackSource::ObjCVideoTrackSource(RTCObjCVideoSourceAdapter *adapter) : adapter_(adapter) { +ObjCVideoTrackSource::ObjCVideoTrackSource(RTC_OBJC_TYPE(RTCObjCVideoSourceAdapter) *adapter) : adapter_(adapter) { adapter_.objCVideoTrackSource = this; } diff --git a/sdk/objc/unittests/RTCMTLVideoView_xctest.m b/sdk/objc/unittests/RTCMTLVideoView_xctest.m index f152eeec91..159025803e 100644 --- a/sdk/objc/unittests/RTCMTLVideoView_xctest.m +++ b/sdk/objc/unittests/RTCMTLVideoView_xctest.m @@ -32,8 +32,8 @@ @interface RTC_OBJC_TYPE (RTCMTLVideoView) + (BOOL)isMetalAvailable; + (UIView *)createMetalView:(CGRect)frame; -+ (id)createNV12Renderer; -+ (id)createI420Renderer; ++ (id)createNV12Renderer; ++ (id)createI420Renderer; - (void)drawInMTKView:(id)view; @end @@ -91,7 +91,7 @@ - (id)frameMockWithCVPixelBuffer:(BOOL)hasCVPixelBuffer { } - (id)rendererMockWithSuccessfulSetup:(BOOL)success { - id rendererMock = OCMClassMock([RTCMTLRenderer class]); + id rendererMock = OCMClassMock([RTC_OBJC_TYPE(RTCMTLRenderer) class]); OCMStub([rendererMock addRenderingDestination:[OCMArg any]]).andReturn(success); return rendererMock; } diff --git a/sdk/objc/unittests/RTCPeerConnectionFactoryBuilderTest.mm b/sdk/objc/unittests/RTCPeerConnectionFactoryBuilderTest.mm index 5ba5a52a53..c4dda5aef1 100644 --- a/sdk/objc/unittests/RTCPeerConnectionFactoryBuilderTest.mm +++ b/sdk/objc/unittests/RTCPeerConnectionFactoryBuilderTest.mm @@ -46,7 +46,7 @@ - (void)testBuilder { nativeVideoDecoderFactory:nullptr audioDeviceModule:nullptr audioProcessingModule:nullptr]); - RTCPeerConnectionFactoryBuilder* builder = [[RTCPeerConnectionFactoryBuilder alloc] init]; + RTC_OBJC_TYPE(RTCPeerConnectionFactoryBuilder)* builder = [[RTC_OBJC_TYPE(RTCPeerConnectionFactoryBuilder) alloc] init]; RTC_OBJC_TYPE(RTCPeerConnectionFactory)* peerConnectionFactory = [builder createPeerConnectionFactory]; EXPECT_TRUE(peerConnectionFactory != nil); @@ -63,7 +63,7 @@ - (void)testDefaultComponentsBuilder { nativeVideoDecoderFactory:nullptr audioDeviceModule:nullptr audioProcessingModule:nullptr]); - RTCPeerConnectionFactoryBuilder* builder = [RTCPeerConnectionFactoryBuilder defaultBuilder]; + RTC_OBJC_TYPE(RTCPeerConnectionFactoryBuilder)* builder = [RTC_OBJC_TYPE(RTCPeerConnectionFactoryBuilder) defaultBuilder]; RTC_OBJC_TYPE(RTCPeerConnectionFactory)* peerConnectionFactory = [builder createPeerConnectionFactory]; EXPECT_TRUE(peerConnectionFactory != nil); diff --git a/test/fuzzers/rtp_frame_reference_finder_fuzzer.cc b/test/fuzzers/rtp_frame_reference_finder_fuzzer.cc index b1cabc31ac..f3e0d1ad6c 100644 --- a/test/fuzzers/rtp_frame_reference_finder_fuzzer.cc +++ b/test/fuzzers/rtp_frame_reference_finder_fuzzer.cc @@ -23,7 +23,7 @@ class DataReader { template void CopyTo(T* object) { - static_assert(std::is_pod(), ""); + static_assert(std::is_trivial(), ""); uint8_t* destination = reinterpret_cast(object); size_t object_size = sizeof(T); size_t num_bytes = std::min(size_ - offset_, object_size); diff --git a/video/video_stream_encoder.cc b/video/video_stream_encoder.cc index e556ff9b8c..5fb3dd1b78 100644 --- a/video/video_stream_encoder.cc +++ b/video/video_stream_encoder.cc @@ -2042,9 +2042,15 @@ void VideoStreamEncoder::EncodeVideoFrame(const VideoFrame& video_frame, was_encode_called_since_last_initialization_ = true; if (encode_status < 0) { - RTC_LOG(LS_ERROR) << "Encoder failed, failing encoder format: " - << encoder_config_.video_format.ToString(); - RequestEncoderSwitch(); + if (encode_status == WEBRTC_VIDEO_CODEC_ENCODER_FAILURE) { + RTC_LOG(LS_ERROR) << "Encoder failed, failing encoder format: " + << encoder_config_.video_format.ToString(); + RequestEncoderSwitch(); + } else { + RTC_LOG(LS_ERROR) << "Failed to encode frame. Error code: " + << encode_status; + } + return; }