diff --git a/.gitignore b/.gitignore index 9f8caa9a88..3ff72b28b1 100644 --- a/.gitignore +++ b/.gitignore @@ -73,3 +73,9 @@ /xcodebuild /.vscode !webrtc/* +/tmp.patch +/out-release +/out-debug +/node_modules +/libwebrtc +/args.txt diff --git a/DEPS b/DEPS index 61027df618..e79b1c27aa 100644 --- a/DEPS +++ b/DEPS @@ -75,7 +75,7 @@ deps = { 'src/base': 'https://chromium.googlesource.com/chromium/src/base@86c814633cf284bc8057a539bc722e2a672afe2f', 'src/build': - 'https://chromium.googlesource.com/chromium/src/build@88030b320338e0706b6b93336c4b35e6bbaf467e', + 'https://github.com/webrtc-sdk/build@9af2ddd8e5ad6278165cadfa554bea6f25081dd2', 'src/buildtools': 'https://chromium.googlesource.com/chromium/src/buildtools@0f32cb9025766951122d4ed19aba87a94ded3f43', # Gradle 6.6.1. Used for testing Android Studio project generation for WebRTC. diff --git a/NOTICE b/NOTICE new file mode 100644 index 0000000000..3972578ec4 --- /dev/null +++ b/NOTICE @@ -0,0 +1,26 @@ +################################################################################### + +The following modifications follow Apache License 2.0 from shiguredo. + +https://github.com/webrtc-sdk/webrtc/commit/dfec53e93a0a1cb93f444caf50f844ec0068c7b7 +https://github.com/webrtc-sdk/webrtc/commit/403b4678543c5d4ac77bd1ea5753c02637b3bb89 +https://github.com/webrtc-sdk/webrtc/commit/77d5d685a90fb4bded17835ae72ec6671b26d696 + +Apache License 2.0 + +Copyright 2019-2021, Wandbox LLC (Original Author) +Copyright 2019-2021, Shiguredo Inc. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +##################################################################################### \ No newline at end of file diff --git a/README.md b/README.md index 32b9f16bad..1c862c4ca4 100644 --- a/README.md +++ b/README.md @@ -1,32 +1,47 @@ -**WebRTC is a free, open software project** that provides browsers and mobile -applications with Real-Time Communications (RTC) capabilities via simple APIs. -The WebRTC components have been optimized to best serve this purpose. +# WebRTC-SDK -**Our mission:** To enable rich, high-quality RTC applications to be -developed for the browser, mobile platforms, and IoT devices, and allow them -all to communicate via a common set of protocols. +This repository contains a fork of WebRTC from Google with various improvements. -The WebRTC initiative is a project supported by Google, Mozilla and Opera, -amongst others. +## Main changes -### Development +### All -See [here][native-dev] for instructions on how to get started -developing with the native code. +- Dynamically acquire decoder to mitigate decoder limitations [#25](https://github.com/webrtc-sdk/webrtc/pull/25) +- Support for video simulcast with hardware & software encoders [patch](https://github.com/webrtc-sdk/webrtc/commit/ee030264e2274a2c90548a99b448782049e48fb4) +- Frame cryptor support (for end-to-end encryption) [patch](https://github.com/webrtc-sdk/webrtc/commit/3a2c008529a15fecde5f979a6ebb75c05463d45e) -[Authoritative list](native-api.md) of directories that contain the -native API header files. +### Android -### More info +- WrappedVideoDecoderFactory [#74](https://github.com/webrtc-sdk/webrtc/pull/74) - * Official web site: http://www.webrtc.org - * Master source code repo: https://webrtc.googlesource.com/src - * Samples and reference apps: https://github.com/webrtc - * Mailing list: http://groups.google.com/group/discuss-webrtc - * Continuous build: https://ci.chromium.org/p/webrtc/g/ci/console - * [Coding style guide](g3doc/style-guide.md) - * [Code of conduct](CODE_OF_CONDUCT.md) - * [Reporting bugs](docs/bug-reporting.md) - * [Documentation](g3doc/sitemap.md) +### iOS / Mac -[native-dev]: https://webrtc.googlesource.com/src/+/main/docs/native-code/ +- Sane audio handling [patch](https://github.com/webrtc-sdk/webrtc/commit/272127d457ab48e36241e82549870405864851f6) + - Do not acquire microphone/permissions unless actively publishing audio + - Abililty to bypass voice processing on iOS + - Remove hardcoded limitation of outputting only to right speaker on MacBook Pro +- Desktop capture for Mac [patch](https://github.com/webrtc-sdk/webrtc/commit/8e832d1163644ab504412c9b8f3ba8510d9890d6) + +### Windows + +- Fixed unable to acquire Mic when built-in AEC is enabled [#29](https://github.com/webrtc-sdk/webrtc/pull/29) + +## LICENSE + +- [Google WebRTC](https://chromium.googlesource.com/external/webrtc.git), is licensed under [BSD license](/LICENSE). + +- Contains patches from [shiguredo-webrtc-build](https://github.com/shiguredo-webrtc-build), licensed under [Apache 2.0](/NOTICE). + +- Contains changes from LiveKit, licensed under Apache 2.0. + +## Who is using this project + +- [flutter-webrtc](https://github.com/flutter-webrtc/flutter-webrtc) + +- [LiveKit](https://github.com/livekit) + +- [Membrane Framework](https://github.com/membraneframework/membrane_rtc_engine) + +- [Louper](https://louper.io) + +Are you using WebRTC SDK in your framework or app? Feel free to open a PR and add yourself! diff --git a/api/BUILD.gn b/api/BUILD.gn index 7a3591881f..0d870e099f 100644 --- a/api/BUILD.gn +++ b/api/BUILD.gn @@ -382,6 +382,7 @@ rtc_library("libjingle_peerconnection_api") { "video:encoded_image", "video:video_bitrate_allocator_factory", "video:video_frame", + "video:yuv_helper", "video:video_rtp_headers", "video_codecs:video_codecs_api", "//third_party/abseil-cpp/absl/algorithm:container", diff --git a/api/audio/audio_device.h b/api/audio/audio_device.h index b22b021ec4..043ad0c023 100644 --- a/api/audio/audio_device.h +++ b/api/audio/audio_device.h @@ -22,6 +22,15 @@ namespace webrtc { class AudioDeviceModuleForTest; +// Sink for callbacks related to a audio device. +class AudioDeviceSink { + public: + virtual ~AudioDeviceSink() = default; + + // input/output devices updated or default device changed + virtual void OnDevicesUpdated() = 0; +}; + class AudioDeviceModule : public webrtc::RefCountInterface { public: enum AudioLayer { @@ -62,11 +71,11 @@ class AudioDeviceModule : public webrtc::RefCountInterface { public: // Creates a default ADM for usage in production code. static scoped_refptr Create( - AudioLayer audio_layer, TaskQueueFactory* task_queue_factory); + AudioLayer audio_layer, TaskQueueFactory* task_queue_factory, bool bypass_voice_processing = false); // Creates an ADM with support for extra test methods. Don't use this factory // in production code. static scoped_refptr CreateForTest( - AudioLayer audio_layer, TaskQueueFactory* task_queue_factory); + AudioLayer audio_layer, TaskQueueFactory* task_queue_factory, bool bypass_voice_processing = false); // Retrieve the currently utilized audio layer virtual int32_t ActiveAudioLayer(AudioLayer* audioLayer) const = 0; @@ -176,6 +185,10 @@ class AudioDeviceModule : public webrtc::RefCountInterface { virtual int GetRecordAudioParameters(AudioParameters* params) const = 0; #endif // WEBRTC_IOS + virtual int32_t SetAudioDeviceSink(AudioDeviceSink* sink) const { return -1; } + virtual int32_t GetPlayoutDevice() const { return -1; } + virtual int32_t GetRecordingDevice() const { return -1; } + protected: ~AudioDeviceModule() override {} }; diff --git a/api/crypto/BUILD.gn b/api/crypto/BUILD.gn index 2970f341b1..94c2977f17 100644 --- a/api/crypto/BUILD.gn +++ b/api/crypto/BUILD.gn @@ -16,6 +16,24 @@ group("crypto") { ] } +rtc_library("frame_crypto_transformer") { + visibility = [ "*" ] + sources = [ + "frame_crypto_transformer.cc", + "frame_crypto_transformer.h", + ] + + deps = [ + "//api:frame_transformer_interface", + ] + + if (rtc_build_ssl) { + deps += [ "//third_party/boringssl" ] + } else { + configs += [ ":external_ssl_library" ] + } +} + rtc_library("options") { visibility = [ "*" ] sources = [ diff --git a/api/crypto/frame_crypto_transformer.cc b/api/crypto/frame_crypto_transformer.cc new file mode 100644 index 0000000000..88c394ef10 --- /dev/null +++ b/api/crypto/frame_crypto_transformer.cc @@ -0,0 +1,693 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "frame_crypto_transformer.h" + +#include +#include +#include +#include +#include + +#include +#include +#include +#include + +#include "absl/container/inlined_vector.h" +#include "absl/types/optional.h" +#include "absl/types/variant.h" +#include "api/array_view.h" +#include "common_video/h264/h264_common.h" +#include "modules/rtp_rtcp/source/rtp_format_h264.h" +#include "rtc_base/byte_buffer.h" +#include "rtc_base/logging.h" + +enum class EncryptOrDecrypt { kEncrypt = 0, kDecrypt }; + +#define Success 0 +#define ErrorUnexpected -1 +#define OperationError -2 +#define ErrorDataTooSmall -3 +#define ErrorInvalidAesGcmTagLength -4 + +webrtc::VideoCodecType get_video_codec_type( + webrtc::TransformableFrameInterface* frame) { + auto videoFrame = + static_cast(frame); + return videoFrame->header().codec; +} + +webrtc::H264PacketizationMode get_h264_packetization_mode( + webrtc::TransformableFrameInterface* frame) { + auto video_frame = + static_cast(frame); + const auto& h264_header = absl::get( + video_frame->header().video_type_header); + return h264_header.packetization_mode; +} + +const EVP_AEAD* GetAesGcmAlgorithmFromKeySize(size_t key_size_bytes) { + switch (key_size_bytes) { + case 16: + return EVP_aead_aes_128_gcm(); + case 32: + return EVP_aead_aes_256_gcm(); + default: + return nullptr; + } +} + +const EVP_CIPHER* GetAesCbcAlgorithmFromKeySize(size_t key_size_bytes) { + switch (key_size_bytes) { + case 16: + return EVP_aes_128_cbc(); + case 32: + return EVP_aes_256_cbc(); + default: + return nullptr; + } +} + +inline bool FrameIsH264(webrtc::TransformableFrameInterface* frame, + webrtc::FrameCryptorTransformer::MediaType type) { + switch (type) { + case webrtc::FrameCryptorTransformer::MediaType::kVideoFrame: { + auto videoFrame = + static_cast(frame); + return videoFrame->header().codec == + webrtc::VideoCodecType::kVideoCodecH264; + } + default: + return false; + } +} + +inline bool NeedsRbspUnescaping(const uint8_t* frameData, size_t frameSize) { + for (size_t i = 0; i < frameSize - 3; ++i) { + if (frameData[i] == 0 && frameData[i + 1] == 0 && frameData[i + 2] == 3) + return true; + } + return false; +} + +std::string to_uint8_list(const uint8_t* data, int len) { + std::stringstream ss; + ss << "["; + for (int i = 0; i < len; i++) { + ss << static_cast(data[i]) << ","; + } + ss << "]"; + return ss.str(); +} + +std::string to_hex(const uint8_t* data, int len) { + std::stringstream ss; + ss << std::uppercase << std::hex << std::setfill('0'); + for (int i = 0; i < len; i++) { + ss << std::setw(2) << static_cast(data[i]); + } + return ss.str(); +} + +uint8_t get_unencrypted_bytes(webrtc::TransformableFrameInterface* frame, + webrtc::FrameCryptorTransformer::MediaType type) { + uint8_t unencrypted_bytes = 0; + switch (type) { + case webrtc::FrameCryptorTransformer::MediaType::kAudioFrame: + unencrypted_bytes = 1; + break; + case webrtc::FrameCryptorTransformer::MediaType::kVideoFrame: { + auto videoFrame = + static_cast(frame); + if (videoFrame->header().codec == + webrtc::VideoCodecType::kVideoCodecAV1) { + unencrypted_bytes = 0; + } else if (videoFrame->header().codec == + webrtc::VideoCodecType::kVideoCodecVP8) { + unencrypted_bytes = videoFrame->IsKeyFrame() ? 10 : 3; + } else if (videoFrame->header().codec == + webrtc::VideoCodecType::kVideoCodecH264) { + rtc::ArrayView data_in = frame->GetData(); + std::vector nalu_indices = + webrtc::H264::FindNaluIndices(data_in); + + int idx = 0; + for (const auto& index : nalu_indices) { + const uint8_t* slice = data_in.data() + index.payload_start_offset; + webrtc::H264::NaluType nalu_type = + webrtc::H264::ParseNaluType(slice[0]); + switch (nalu_type) { + case webrtc::H264::NaluType::kIdr: + case webrtc::H264::NaluType::kSlice: + unencrypted_bytes = index.payload_start_offset + 2; + RTC_LOG(LS_INFO) + << "NonParameterSetNalu::payload_size: " << index.payload_size + << ", nalu_type " << nalu_type << ", NaluIndex [" << idx++ + << "] offset: " << index.payload_start_offset; + return unencrypted_bytes; + default: + break; + } + } + } + break; + } + default: + break; + } + return unencrypted_bytes; +} + +int DerivePBKDF2KeyFromRawKey(const std::vector raw_key, + const std::vector& salt, + unsigned int optional_length_bits, + std::vector* derived_key) { + size_t key_size_bytes = optional_length_bits / 8; + derived_key->resize(key_size_bytes); + + if (PKCS5_PBKDF2_HMAC((const char*)raw_key.data(), raw_key.size(), + salt.data(), salt.size(), 100000, EVP_sha256(), + key_size_bytes, derived_key->data()) != 1) { + RTC_LOG(LS_ERROR) << "Failed to derive AES key from password."; + return ErrorUnexpected; + } + + RTC_LOG(LS_INFO) << "raw_key " + << to_uint8_list(raw_key.data(), raw_key.size()) << " len " + << raw_key.size() << " slat << " + << to_uint8_list(salt.data(), salt.size()) << " len " + << salt.size() << "\n derived_key " + << to_uint8_list(derived_key->data(), derived_key->size()) + << " len " << derived_key->size(); + + return Success; +} + +int AesGcmEncryptDecrypt(EncryptOrDecrypt mode, + const std::vector raw_key, + const rtc::ArrayView data, + unsigned int tag_length_bytes, + rtc::ArrayView iv, + rtc::ArrayView additional_data, + const EVP_AEAD* aead_alg, + std::vector* buffer) { + bssl::ScopedEVP_AEAD_CTX ctx; + + if (!aead_alg) { + RTC_LOG(LS_ERROR) << "Invalid AES-GCM key size."; + return ErrorUnexpected; + } + + if (!EVP_AEAD_CTX_init(ctx.get(), aead_alg, raw_key.data(), raw_key.size(), + tag_length_bytes, nullptr)) { + RTC_LOG(LS_ERROR) << "Failed to initialize AES-GCM context."; + return OperationError; + } + + size_t len; + int ok; + + if (mode == EncryptOrDecrypt::kDecrypt) { + if (data.size() < tag_length_bytes) { + RTC_LOG(LS_ERROR) << "Data too small for AES-GCM tag."; + return ErrorDataTooSmall; + } + + buffer->resize(data.size() - tag_length_bytes); + + ok = EVP_AEAD_CTX_open(ctx.get(), buffer->data(), &len, buffer->size(), + iv.data(), iv.size(), data.data(), data.size(), + additional_data.data(), additional_data.size()); + } else { + buffer->resize(data.size() + EVP_AEAD_max_overhead(aead_alg)); + + ok = EVP_AEAD_CTX_seal(ctx.get(), buffer->data(), &len, buffer->size(), + iv.data(), iv.size(), data.data(), data.size(), + additional_data.data(), additional_data.size()); + } + + if (!ok) { + RTC_LOG(LS_WARNING) << "Failed to perform AES-GCM operation."; + return OperationError; + } + + buffer->resize(len); + + return Success; +} + +int AesEncryptDecrypt(EncryptOrDecrypt mode, + webrtc::FrameCryptorTransformer::Algorithm algorithm, + const std::vector& raw_key, + rtc::ArrayView iv, + rtc::ArrayView additional_data, + const rtc::ArrayView data, + std::vector* buffer) { + switch (algorithm) { + case webrtc::FrameCryptorTransformer::Algorithm::kAesGcm: { + unsigned int tag_length_bits = 128; + const EVP_AEAD* cipher = GetAesGcmAlgorithmFromKeySize(raw_key.size()); + if (!cipher) { + RTC_LOG(LS_ERROR) << "Invalid AES-GCM key size."; + return ErrorUnexpected; + } + return AesGcmEncryptDecrypt( + mode, raw_key, data, tag_length_bits / 8, iv, additional_data, cipher, buffer); + } + default: + RTC_LOG(LS_ERROR) << "Unsupported algorithm."; + return ErrorUnexpected; + } +} +namespace webrtc { + +FrameCryptorTransformer::FrameCryptorTransformer( + rtc::Thread* signaling_thread, + const std::string participant_id, + MediaType type, + Algorithm algorithm, + webrtc::scoped_refptr key_provider) + : signaling_thread_(signaling_thread), + thread_(rtc::Thread::Create()), + participant_id_(participant_id), + type_(type), + algorithm_(algorithm), + key_provider_(key_provider) { + RTC_DCHECK(key_provider_ != nullptr); + thread_->SetName("FrameCryptorTransformer", this); + thread_->Start(); +} + +FrameCryptorTransformer::~FrameCryptorTransformer() { + thread_->Stop(); +} + +void FrameCryptorTransformer::Transform( + std::unique_ptr frame) { + webrtc::MutexLock lock(&sink_mutex_); + if (sink_callback_ == nullptr && sink_callbacks_.size() == 0) { + RTC_LOG(LS_WARNING) + << "FrameCryptorTransformer::Transform sink_callback_ is NULL"; + return; + } + + // do encrypt or decrypt here... + switch (frame->GetDirection()) { + case webrtc::TransformableFrameInterface::Direction::kSender: + RTC_DCHECK(thread_ != nullptr); + thread_->PostTask([frame = std::move(frame), this]() mutable { + encryptFrame(std::move(frame)); + }); + break; + case webrtc::TransformableFrameInterface::Direction::kReceiver: + RTC_DCHECK(thread_ != nullptr); + thread_->PostTask([frame = std::move(frame), this]() mutable { + decryptFrame(std::move(frame)); + }); + break; + case webrtc::TransformableFrameInterface::Direction::kUnknown: + // do nothing + RTC_LOG(LS_INFO) << "FrameCryptorTransformer::Transform() kUnknown"; + break; + } +} + +void FrameCryptorTransformer::encryptFrame( + std::unique_ptr frame) { + bool enabled_cryption = false; + webrtc::scoped_refptr sink_callback = nullptr; + { + webrtc::MutexLock lock(&mutex_); + enabled_cryption = enabled_cryption_; + if (type_ == webrtc::FrameCryptorTransformer::MediaType::kAudioFrame) { + sink_callback = sink_callback_; + } else { + sink_callback = sink_callbacks_[frame->GetSsrc()]; + } + } + + if (sink_callback == nullptr) { + RTC_LOG(LS_WARNING) + << "FrameCryptorTransformer::encryptFrame() sink_callback is NULL"; + if (last_enc_error_ != FrameCryptionState::kInternalError) { + last_enc_error_ = FrameCryptionState::kInternalError; + onFrameCryptionStateChanged(last_enc_error_); + } + return; + } + + rtc::ArrayView data_in = frame->GetData(); + if (data_in.size() == 0 || !enabled_cryption) { + RTC_LOG(LS_WARNING) << "FrameCryptorTransformer::encryptFrame() " + "data_in.size() == 0 || enabled_cryption == false"; + if(key_provider_->options().discard_frame_when_cryptor_not_ready) { + return; + } + sink_callback->OnTransformedFrame(std::move(frame)); + return; + } + + auto key_handler = key_provider_->options().shared_key + ? key_provider_->GetSharedKey(participant_id_) + : key_provider_->GetKey(participant_id_); + + if (key_handler == nullptr || key_handler->GetKeySet(key_index_) == nullptr) { + RTC_LOG(LS_INFO) << "FrameCryptorTransformer::encryptFrame() no keys, or " + "key_index[" + << key_index_ << "] out of range for participant " + << participant_id_; + if (last_enc_error_ != FrameCryptionState::kMissingKey) { + last_enc_error_ = FrameCryptionState::kMissingKey; + onFrameCryptionStateChanged(last_enc_error_); + } + return; + } + + auto key_set = key_handler->GetKeySet(key_index_); + uint8_t unencrypted_bytes = get_unencrypted_bytes(frame.get(), type_); + + rtc::Buffer frame_header(unencrypted_bytes); + for (size_t i = 0; i < unencrypted_bytes; i++) { + frame_header[i] = data_in[i]; + } + + rtc::Buffer frame_trailer(2); + frame_trailer[0] = getIvSize(); + frame_trailer[1] = key_index_; + rtc::Buffer iv = makeIv(frame->GetSsrc(), frame->GetTimestamp()); + + rtc::Buffer payload(data_in.size() - unencrypted_bytes); + for (size_t i = unencrypted_bytes; i < data_in.size(); i++) { + payload[i - unencrypted_bytes] = data_in[i]; + } + + std::vector buffer; + if (AesEncryptDecrypt(EncryptOrDecrypt::kEncrypt, algorithm_, + key_set->encryption_key, iv, frame_header, payload, + &buffer) == Success) { + rtc::Buffer encrypted_payload(buffer.data(), buffer.size()); + rtc::Buffer tag(encrypted_payload.data() + encrypted_payload.size() - 16, + 16); + rtc::Buffer data_without_header; + data_without_header.AppendData(encrypted_payload); + data_without_header.AppendData(iv); + data_without_header.AppendData(frame_trailer); + + rtc::Buffer data_out; + data_out.AppendData(frame_header); + + if (FrameIsH264(frame.get(), type_)) { + H264::WriteRbsp(data_without_header.data(), data_without_header.size(), + &data_out); + } else { + data_out.AppendData(data_without_header); + RTC_CHECK_EQ(data_out.size(), frame_header.size() + + encrypted_payload.size() + iv.size() + + frame_trailer.size()); + } + + frame->SetData(data_out); + + if (last_enc_error_ != FrameCryptionState::kOk) { + last_enc_error_ = FrameCryptionState::kOk; + onFrameCryptionStateChanged(last_enc_error_); + } + sink_callback->OnTransformedFrame(std::move(frame)); + } else { + if (last_enc_error_ != FrameCryptionState::kEncryptionFailed) { + last_enc_error_ = FrameCryptionState::kEncryptionFailed; + onFrameCryptionStateChanged(last_enc_error_); + } + RTC_LOG(LS_ERROR) << "FrameCryptorTransformer::encryptFrame() failed"; + } +} + +void FrameCryptorTransformer::decryptFrame( + std::unique_ptr frame) { + bool enabled_cryption = false; + webrtc::scoped_refptr sink_callback = nullptr; + { + webrtc::MutexLock lock(&mutex_); + enabled_cryption = enabled_cryption_; + if (type_ == webrtc::FrameCryptorTransformer::MediaType::kAudioFrame) { + sink_callback = sink_callback_; + } else { + sink_callback = sink_callbacks_[frame->GetSsrc()]; + } + } + + if (sink_callback == nullptr) { + RTC_LOG(LS_WARNING) + << "FrameCryptorTransformer::decryptFrame() sink_callback is NULL"; + if (last_dec_error_ != FrameCryptionState::kInternalError) { + last_dec_error_ = FrameCryptionState::kInternalError; + onFrameCryptionStateChanged(last_dec_error_); + } + return; + } + + rtc::ArrayView data_in = frame->GetData(); + + if (data_in.size() == 0 || !enabled_cryption) { + RTC_LOG(LS_WARNING) << "FrameCryptorTransformer::decryptFrame() " + "data_in.size() == 0 || enabled_cryption == false"; + if(key_provider_->options().discard_frame_when_cryptor_not_ready) { + return; + } + + sink_callback->OnTransformedFrame(std::move(frame)); + return; + } + + auto uncrypted_magic_bytes = key_provider_->options().uncrypted_magic_bytes; + if (uncrypted_magic_bytes.size() > 0 && + data_in.size() >= uncrypted_magic_bytes.size()) { + auto tmp = data_in.subview(data_in.size() - (uncrypted_magic_bytes.size()), + uncrypted_magic_bytes.size()); + auto data = std::vector(tmp.begin(), tmp.end()); + if (uncrypted_magic_bytes == data) { + RTC_CHECK_EQ(tmp.size(), uncrypted_magic_bytes.size()); + RTC_LOG(LS_INFO) << "FrameCryptorTransformer::uncrypted_magic_bytes( tmp " + << to_hex(tmp.data(), tmp.size()) << ", magic bytes " + << to_hex(uncrypted_magic_bytes.data(), + uncrypted_magic_bytes.size()) + << ")"; + + // magic bytes detected, this is a non-encrypted frame, skip frame + // decryption. + rtc::Buffer data_out; + data_out.AppendData( + data_in.subview(0, data_in.size() - uncrypted_magic_bytes.size())); + frame->SetData(data_out); + sink_callback->OnTransformedFrame(std::move(frame)); + return; + } + } + + uint8_t unencrypted_bytes = get_unencrypted_bytes(frame.get(), type_); + + rtc::Buffer frame_header(unencrypted_bytes); + for (size_t i = 0; i < unencrypted_bytes; i++) { + frame_header[i] = data_in[i]; + } + + rtc::Buffer frame_trailer(2); + frame_trailer[0] = data_in[data_in.size() - 2]; + frame_trailer[1] = data_in[data_in.size() - 1]; + uint8_t ivLength = frame_trailer[0]; + uint8_t key_index = frame_trailer[1]; + + if (ivLength != getIvSize()) { + RTC_LOG(LS_WARNING) << "FrameCryptorTransformer::decryptFrame() ivLength[" + << static_cast(ivLength) << "] != getIvSize()[" + << static_cast(getIvSize()) << "]"; + if (last_dec_error_ != FrameCryptionState::kDecryptionFailed) { + last_dec_error_ = FrameCryptionState::kDecryptionFailed; + onFrameCryptionStateChanged(last_dec_error_); + } + return; + } + + auto key_handler = key_provider_->options().shared_key + ? key_provider_->GetSharedKey(participant_id_) + : key_provider_->GetKey(participant_id_); + + if (0 > key_index || key_index >= key_provider_->options().key_ring_size || key_handler == nullptr || + key_handler->GetKeySet(key_index) == nullptr) { + RTC_LOG(LS_INFO) << "FrameCryptorTransformer::decryptFrame() no keys, or " + "key_index[" + << key_index << "] out of range for participant " + << participant_id_; + if (last_dec_error_ != FrameCryptionState::kMissingKey) { + last_dec_error_ = FrameCryptionState::kMissingKey; + onFrameCryptionStateChanged(last_dec_error_); + } + return; + } + + if (last_dec_error_ == kDecryptionFailed && !key_handler->HasValidKey()) { + // if decryption failed and we have an invalid key, + // please try to decrypt with the next new key + return; + } + + auto key_set = key_handler->GetKeySet(key_index); + + rtc::Buffer iv = rtc::Buffer(ivLength); + for (size_t i = 0; i < ivLength; i++) { + iv[i] = data_in[data_in.size() - 2 - ivLength + i]; + } + + rtc::Buffer encrypted_buffer(data_in.size() - unencrypted_bytes); + for (size_t i = unencrypted_bytes; i < data_in.size(); i++) { + encrypted_buffer[i - unencrypted_bytes] = data_in[i]; + } + + if (FrameIsH264(frame.get(), type_) && + NeedsRbspUnescaping(encrypted_buffer.data(), encrypted_buffer.size())) { + encrypted_buffer.SetData( + H264::ParseRbsp(encrypted_buffer.data(), encrypted_buffer.size())); + } + + rtc::Buffer encrypted_payload(encrypted_buffer.size() - ivLength - 2); + for (size_t i = 0; i < encrypted_payload.size(); i++) { + encrypted_payload[i] = encrypted_buffer[i]; + } + + rtc::Buffer tag(encrypted_payload.data() + encrypted_payload.size() - 16, 16); + std::vector buffer; + + int ratchet_count = 0; + auto initialKeyMaterial = key_set->material; + bool decryption_success = false; + if (AesEncryptDecrypt(EncryptOrDecrypt::kDecrypt, algorithm_, + key_set->encryption_key, iv, frame_header, + encrypted_payload, &buffer) == Success) { + decryption_success = true; + } else { + RTC_LOG(LS_WARNING) << "FrameCryptorTransformer::decryptFrame() failed"; + webrtc::scoped_refptr ratcheted_key_set; + auto currentKeyMaterial = key_set->material; + if (key_provider_->options().ratchet_window_size > 0) { + while (ratchet_count < key_provider_->options().ratchet_window_size) { + ratchet_count++; + + RTC_LOG(LS_INFO) << "ratcheting key attempt " << ratchet_count << " of " + << key_provider_->options().ratchet_window_size; + + auto new_material = key_handler->RatchetKeyMaterial(currentKeyMaterial); + ratcheted_key_set = key_handler->DeriveKeys( + new_material, key_provider_->options().ratchet_salt, 128); + + if (AesEncryptDecrypt(EncryptOrDecrypt::kDecrypt, algorithm_, + ratcheted_key_set->encryption_key, iv, + frame_header, encrypted_payload, + &buffer) == Success) { + RTC_LOG(LS_INFO) << "FrameCryptorTransformer::decryptFrame() " + "ratcheted to key_index=" + << static_cast(key_index); + decryption_success = true; + // success, so we set the new key + key_handler->SetKeyFromMaterial(new_material, key_index); + key_handler->SetHasValidKey(); + if (last_dec_error_ != FrameCryptionState::kKeyRatcheted) { + last_dec_error_ = FrameCryptionState::kKeyRatcheted; + onFrameCryptionStateChanged(last_dec_error_); + } + break; + } + // for the next ratchet attempt + currentKeyMaterial = new_material; + } + + /* Since the key it is first send and only afterwards actually used for + encrypting, there were situations when the decrypting failed due to the + fact that the received frame was not encrypted yet and ratcheting, of + course, did not solve the problem. So if we fail RATCHET_WINDOW_SIZE + times, we come back to the initial key. + */ + if (!decryption_success || + ratchet_count >= key_provider_->options().ratchet_window_size) { + key_handler->SetKeyFromMaterial(initialKeyMaterial, key_index); + } + } + } + + if (!decryption_success) { + if (key_handler->DecryptionFailure()) { + if (last_dec_error_ != FrameCryptionState::kDecryptionFailed) { + last_dec_error_ = FrameCryptionState::kDecryptionFailed; + onFrameCryptionStateChanged(last_dec_error_); + } + } + return; + } + + rtc::Buffer payload(buffer.data(), buffer.size()); + rtc::Buffer data_out; + data_out.AppendData(frame_header); + data_out.AppendData(payload); + frame->SetData(data_out); + + if (last_dec_error_ != FrameCryptionState::kOk) { + last_dec_error_ = FrameCryptionState::kOk; + onFrameCryptionStateChanged(last_dec_error_); + } + sink_callback->OnTransformedFrame(std::move(frame)); +} + +void FrameCryptorTransformer::onFrameCryptionStateChanged( + FrameCryptionState state) { + webrtc::MutexLock lock(&mutex_); + if (observer_) { + RTC_DCHECK(signaling_thread_ != nullptr); + signaling_thread_->PostTask([observer = observer_, state = state, + participant_id = participant_id_]() mutable { + observer->OnFrameCryptionStateChanged(participant_id, state); + }); + } +} + +rtc::Buffer FrameCryptorTransformer::makeIv(uint32_t ssrc, uint32_t timestamp) { + uint32_t send_count = 0; + if (send_counts_.find(ssrc) == send_counts_.end()) { + srand((unsigned)time(NULL)); + send_counts_[ssrc] = floor(rand() * 0xFFFF); + } else { + send_count = send_counts_[ssrc]; + } + rtc::ByteBufferWriter buf; + buf.WriteUInt32(ssrc); + buf.WriteUInt32(timestamp); + buf.WriteUInt32(timestamp - (send_count % 0xFFFF)); + send_counts_[ssrc] = send_count + 1; + + RTC_CHECK_EQ(buf.Length(), getIvSize()); + + return rtc::Buffer(buf.Data(), buf.Length()); +} + +uint8_t FrameCryptorTransformer::getIvSize() { + switch (algorithm_) { + case Algorithm::kAesGcm: + return 12; + default: + return 0; + } +} + +} // namespace webrtc diff --git a/api/crypto/frame_crypto_transformer.h b/api/crypto/frame_crypto_transformer.h new file mode 100644 index 0000000000..af530e9c5c --- /dev/null +++ b/api/crypto/frame_crypto_transformer.h @@ -0,0 +1,483 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef WEBRTC_FRAME_CRYPTOR_TRANSFORMER_H_ +#define WEBRTC_FRAME_CRYPTOR_TRANSFORMER_H_ + +#include + +#include "api/frame_transformer_interface.h" +#include "api/task_queue/pending_task_safety_flag.h" +#include "api/task_queue/task_queue_base.h" +#include "api/make_ref_counted.h" +#include "rtc_base/buffer.h" +#include "rtc_base/synchronization/mutex.h" +#include "rtc_base/system/rtc_export.h" +#include "rtc_base/thread.h" + +int DerivePBKDF2KeyFromRawKey(const std::vector raw_key, + const std::vector& salt, + unsigned int optional_length_bits, + std::vector* derived_key); + +namespace webrtc { + +const size_t DEFAULT_KEYRING_SIZE = 16; +const size_t MAX_KEYRING_SIZE = 255; + +class ParticipantKeyHandler; + +struct KeyProviderOptions { + bool shared_key; + std::vector ratchet_salt; + std::vector uncrypted_magic_bytes; + int ratchet_window_size; + int failure_tolerance; + // key ring size should be between 1 and 255 + int key_ring_size; + bool discard_frame_when_cryptor_not_ready; + KeyProviderOptions() + : shared_key(false), + ratchet_window_size(0), + failure_tolerance(-1), + key_ring_size(DEFAULT_KEYRING_SIZE), + discard_frame_when_cryptor_not_ready(false) {} + KeyProviderOptions(KeyProviderOptions& copy) + : shared_key(copy.shared_key), + ratchet_salt(copy.ratchet_salt), + uncrypted_magic_bytes(copy.uncrypted_magic_bytes), + ratchet_window_size(copy.ratchet_window_size), + failure_tolerance(copy.failure_tolerance), + key_ring_size(copy.key_ring_size) {} +}; + +class KeyProvider : public webrtc::RefCountInterface { + public: + virtual bool SetSharedKey(int key_index, std::vector key) = 0; + + virtual const webrtc::scoped_refptr GetSharedKey( + const std::string participant_id) = 0; + + virtual const std::vector RatchetSharedKey(int key_index) = 0; + + virtual const std::vector ExportSharedKey(int key_index) const = 0; + + virtual bool SetKey(const std::string participant_id, + int key_index, + std::vector key) = 0; + + virtual const webrtc::scoped_refptr GetKey( + const std::string participant_id) const = 0; + + virtual const std::vector RatchetKey( + const std::string participant_id, + int key_index) = 0; + + virtual const std::vector ExportKey(const std::string participant_id, + int key_index) const = 0; + + virtual void SetSifTrailer(const std::vector trailer) = 0; + + virtual KeyProviderOptions& options() = 0; + + protected: + virtual ~KeyProvider() {} +}; + +class ParticipantKeyHandler : public webrtc::RefCountInterface { + public: + struct KeySet : public webrtc::RefCountInterface { + std::vector material; + std::vector encryption_key; + KeySet(std::vector material, std::vector encryptionKey) + : material(material), encryption_key(encryptionKey) {} + }; + + public: + ParticipantKeyHandler(KeyProvider* key_provider) + : key_provider_(key_provider) { + int key_ring_size = key_provider_->options().key_ring_size; + if(key_ring_size <= 0) { + key_ring_size = DEFAULT_KEYRING_SIZE; + } else if (key_ring_size > (int)MAX_KEYRING_SIZE) { + // Keyring size needs to be between 1 and 256 + key_ring_size = MAX_KEYRING_SIZE; + } + crypto_key_ring_.resize(key_ring_size); + } + + virtual ~ParticipantKeyHandler() = default; + + webrtc::scoped_refptr Clone() { + auto clone = webrtc::make_ref_counted(key_provider_); + clone->crypto_key_ring_ = crypto_key_ring_; + clone->current_key_index_ = current_key_index_; + clone->has_valid_key_ = has_valid_key_; + return clone; + } + + virtual std::vector RatchetKey(int key_index) { + auto key_set = GetKeySet(key_index); + if (!key_set) { + return std::vector(); + } + auto current_material = key_set->material; + std::vector new_material; + if (DerivePBKDF2KeyFromRawKey(current_material, + key_provider_->options().ratchet_salt, 256, + &new_material) != 0) { + return std::vector(); + } + SetKeyFromMaterial(new_material, + key_index != -1 ? key_index : current_key_index_); + SetHasValidKey(); + return new_material; + } + + virtual webrtc::scoped_refptr GetKeySet(int key_index) { + webrtc::MutexLock lock(&mutex_); + return crypto_key_ring_[key_index != -1 ? key_index : current_key_index_]; + } + + virtual void SetKey(std::vector password, int key_index) { + SetKeyFromMaterial(password, key_index); + SetHasValidKey(); + } + + std::vector RatchetKeyMaterial( + std::vector current_material) { + std::vector new_material; + if (DerivePBKDF2KeyFromRawKey(current_material, + key_provider_->options().ratchet_salt, 256, + &new_material) != 0) { + return std::vector(); + } + return new_material; + } + + webrtc::scoped_refptr DeriveKeys(std::vector password, + std::vector ratchet_salt, + unsigned int optional_length_bits) { + std::vector derived_key; + if (DerivePBKDF2KeyFromRawKey(password, ratchet_salt, optional_length_bits, + &derived_key) == 0) { + return webrtc::make_ref_counted(password, derived_key); + } + return nullptr; + } + + bool HasValidKey() { + webrtc::MutexLock lock(&mutex_); + return has_valid_key_; + } + + void SetHasValidKey() { + webrtc::MutexLock lock(&mutex_); + decryption_failure_count_ = 0; + has_valid_key_ = true; + } + + void SetKeyFromMaterial(std::vector password, int key_index) { + webrtc::MutexLock lock(&mutex_); + if (key_index >= 0) { + current_key_index_ = key_index % crypto_key_ring_.size(); + } + crypto_key_ring_[current_key_index_] = + DeriveKeys(password, key_provider_->options().ratchet_salt, 128); + } + + bool DecryptionFailure() { + webrtc::MutexLock lock(&mutex_); + if (key_provider_->options().failure_tolerance < 0) { + return false; + } + decryption_failure_count_ += 1; + + if (decryption_failure_count_ > + key_provider_->options().failure_tolerance) { + has_valid_key_ = false; + return true; + } + return false; + } + + private: + bool has_valid_key_ = false; + int decryption_failure_count_ = 0; + mutable webrtc::Mutex mutex_; + int current_key_index_ = 0; + KeyProvider* key_provider_; + std::vector> crypto_key_ring_; +}; + +class DefaultKeyProviderImpl : public KeyProvider { + public: + DefaultKeyProviderImpl(KeyProviderOptions options) : options_(options) {} + ~DefaultKeyProviderImpl() override = default; + + /// Set the shared key. + bool SetSharedKey(int key_index, std::vector key) override { + webrtc::MutexLock lock(&mutex_); + if (options_.shared_key) { + if (keys_.find("shared") == keys_.end()) { + keys_["shared"] = webrtc::make_ref_counted(this); + } + + auto key_handler = keys_["shared"]; + key_handler->SetKey(key, key_index); + + for (auto& key_pair : keys_) { + if (key_pair.first != "shared") { + key_pair.second->SetKey(key, key_index); + } + } + return true; + } + return false; + } + + const std::vector RatchetSharedKey(int key_index) override { + webrtc::MutexLock lock(&mutex_); + auto it = keys_.find("shared"); + if (it == keys_.end()) { + return std::vector(); + } + auto new_key = it->second->RatchetKey(key_index); + if (options_.shared_key) { + for (auto& key_pair : keys_) { + if (key_pair.first != "shared") { + key_pair.second->SetKey(new_key, key_index); + } + } + } + return new_key; + } + + const std::vector ExportSharedKey(int key_index) const override { + webrtc::MutexLock lock(&mutex_); + auto it = keys_.find("shared"); + if (it == keys_.end()) { + return std::vector(); + } + auto key_set = it->second->GetKeySet(key_index); + if (key_set) { + return key_set->material; + } + return std::vector(); + } + + const webrtc::scoped_refptr GetSharedKey( + const std::string participant_id) override { + webrtc::MutexLock lock(&mutex_); + if (options_.shared_key && keys_.find("shared") != keys_.end()) { + auto shared_key_handler = keys_["shared"]; + if (keys_.find(participant_id) != keys_.end()) { + return keys_[participant_id]; + } else { + auto key_handler_clone = shared_key_handler->Clone(); + keys_[participant_id] = key_handler_clone; + return key_handler_clone; + } + } + return nullptr; + } + + /// Set the key at the given index. + bool SetKey(const std::string participant_id, + int index, + std::vector key) override { + webrtc::MutexLock lock(&mutex_); + + if (keys_.find(participant_id) == keys_.end()) { + keys_[participant_id] = + webrtc::make_ref_counted(this); + } + + auto key_handler = keys_[participant_id]; + key_handler->SetKey(key, index); + return true; + } + + const webrtc::scoped_refptr GetKey( + const std::string participant_id) const override { + webrtc::MutexLock lock(&mutex_); + + if (keys_.find(participant_id) == keys_.end()) { + return nullptr; + } + + return keys_.find(participant_id)->second; + } + + const std::vector RatchetKey(const std::string participant_id, + int key_index) override { + auto key_handler = GetKey(participant_id); + if (key_handler) { + return key_handler->RatchetKey(key_index); + } + return std::vector(); + } + + const std::vector ExportKey(const std::string participant_id, + int key_index) const override { + auto key_handler = GetKey(participant_id); + if (key_handler) { + auto key_set = key_handler->GetKeySet(key_index); + if (key_set) { + return key_set->material; + } + } + return std::vector(); + } + + void SetSifTrailer(const std::vector trailer) override { + webrtc::MutexLock lock(&mutex_); + options_.uncrypted_magic_bytes = trailer; + } + + KeyProviderOptions& options() override { return options_; } + + private: + mutable webrtc::Mutex mutex_; + KeyProviderOptions options_; + std::unordered_map> + keys_; +}; + +enum FrameCryptionState { + kNew = 0, + kOk, + kEncryptionFailed, + kDecryptionFailed, + kMissingKey, + kKeyRatcheted, + kInternalError, +}; + +class FrameCryptorTransformerObserver : public webrtc::RefCountInterface { + public: + virtual void OnFrameCryptionStateChanged(const std::string participant_id, + FrameCryptionState error) = 0; + + protected: + virtual ~FrameCryptorTransformerObserver() {} +}; + +class RTC_EXPORT FrameCryptorTransformer + : public webrtc::RefCountedObject { + public: + enum class MediaType { + kAudioFrame = 0, + kVideoFrame, + }; + + enum class Algorithm { + kAesGcm = 0, + kAesCbc, + }; + + explicit FrameCryptorTransformer( + rtc::Thread* signaling_thread, + const std::string participant_id, + MediaType type, + Algorithm algorithm, + webrtc::scoped_refptr key_provider); + ~FrameCryptorTransformer(); + virtual void RegisterFrameCryptorTransformerObserver( + webrtc::scoped_refptr observer) { + webrtc::MutexLock lock(&mutex_); + observer_ = observer; + } + + virtual void UnRegisterFrameCryptorTransformerObserver() { + webrtc::MutexLock lock(&mutex_); + observer_ = nullptr; + } + + virtual void SetKeyIndex(int index) { + webrtc::MutexLock lock(&mutex_); + key_index_ = index; + } + + virtual int key_index() const { return key_index_; } + + virtual void SetEnabled(bool enabled) { + webrtc::MutexLock lock(&mutex_); + enabled_cryption_ = enabled; + } + virtual bool enabled() const { + webrtc::MutexLock lock(&mutex_); + return enabled_cryption_; + } + virtual const std::string participant_id() const { return participant_id_; } + + protected: + virtual void RegisterTransformedFrameCallback( + webrtc::scoped_refptr callback) override { + webrtc::MutexLock lock(&sink_mutex_); + sink_callback_ = callback; + } + virtual void UnregisterTransformedFrameCallback() override { + webrtc::MutexLock lock(&sink_mutex_); + sink_callback_ = nullptr; + } + virtual void RegisterTransformedFrameSinkCallback( + webrtc::scoped_refptr callback, + uint32_t ssrc) override { + webrtc::MutexLock lock(&sink_mutex_); + sink_callbacks_[ssrc] = callback; + } + virtual void UnregisterTransformedFrameSinkCallback(uint32_t ssrc) override { + webrtc::MutexLock lock(&sink_mutex_); + auto it = sink_callbacks_.find(ssrc); + if (it != sink_callbacks_.end()) { + sink_callbacks_.erase(it); + } + } + + virtual void Transform( + std::unique_ptr frame) override; + + private: + void encryptFrame(std::unique_ptr frame); + void decryptFrame(std::unique_ptr frame); + void onFrameCryptionStateChanged(FrameCryptionState error); + rtc::Buffer makeIv(uint32_t ssrc, uint32_t timestamp); + uint8_t getIvSize(); + + private: + TaskQueueBase* const signaling_thread_; + std::unique_ptr thread_; + std::string participant_id_; + mutable webrtc::Mutex mutex_; + mutable webrtc::Mutex sink_mutex_; + bool enabled_cryption_ RTC_GUARDED_BY(mutex_) = false; + MediaType type_; + Algorithm algorithm_; + webrtc::scoped_refptr sink_callback_; + std::map> + sink_callbacks_; + int key_index_ = 0; + std::map send_counts_; + webrtc::scoped_refptr key_provider_; + webrtc::scoped_refptr observer_; + FrameCryptionState last_enc_error_ = FrameCryptionState::kNew; + FrameCryptionState last_dec_error_ = FrameCryptionState::kNew; +}; + +} // namespace webrtc + +#endif // WEBRTC_FRAME_CRYPTOR_TRANSFORMER_H_ diff --git a/api/frame_transformer_interface.h b/api/frame_transformer_interface.h index 873bf6dce3..c7375e381b 100644 --- a/api/frame_transformer_interface.h +++ b/api/frame_transformer_interface.h @@ -22,6 +22,7 @@ #include "api/units/time_delta.h" #include "api/units/timestamp.h" #include "api/video/video_frame_metadata.h" +#include "modules/rtp_rtcp/source/rtp_video_header.h" #include "rtc_base/system/rtc_export.h" namespace webrtc { @@ -109,6 +110,8 @@ class TransformableVideoFrameInterface : public TransformableFrameInterface { virtual VideoFrameMetadata Metadata() const = 0; virtual void SetMetadata(const VideoFrameMetadata&) = 0; + + virtual const RTPVideoHeader& header () const = 0; }; // Extends the TransformableFrameInterface to expose audio-specific information. diff --git a/api/media_stream_interface.cc b/api/media_stream_interface.cc index a092855bb5..604f02472b 100644 --- a/api/media_stream_interface.cc +++ b/api/media_stream_interface.cc @@ -19,6 +19,10 @@ namespace webrtc { const char* const MediaStreamTrackInterface::kVideoKind = kMediaTypeVideo; const char* const MediaStreamTrackInterface::kAudioKind = kMediaTypeAudio; +bool VideoTrackInterface::should_receive() const { + return true; +} + VideoTrackInterface::ContentHint VideoTrackInterface::content_hint() const { return ContentHint::kNone; } diff --git a/api/media_stream_interface.h b/api/media_stream_interface.h index c3b60f7034..adc0b6825c 100644 --- a/api/media_stream_interface.h +++ b/api/media_stream_interface.h @@ -189,6 +189,8 @@ class RTC_EXPORT VideoTrackInterface : public MediaStreamTrackInterface, virtual VideoTrackSourceInterface* GetSource() const = 0; + virtual void set_should_receive(bool should_receive) {} + virtual bool should_receive() const; virtual ContentHint content_hint() const; virtual void set_content_hint(ContentHint /* hint */) {} diff --git a/api/peer_connection_interface.h b/api/peer_connection_interface.h index de071d5b04..e5b3853e11 100644 --- a/api/peer_connection_interface.h +++ b/api/peer_connection_interface.h @@ -691,6 +691,15 @@ class RTC_EXPORT PeerConnectionInterface : public webrtc::RefCountInterface { // The burst interval of the pacer, see TaskQueuePacedSender constructor. std::optional pacer_burst_interval; + // When this flag is set, ports not bound to any specific network interface + // will be used, in addition to normal ports bound to the enumerated + // interfaces. Without this flag, these "any address" ports would only be + // used when network enumeration fails or is disabled. But under certain + // conditions, these ports may succeed where others fail, so they may allow + // the application to work in a wider variety of environments, at the expense + // of having to allocate additional candidates. + bool enable_any_address_ports = false; + // // Don't forget to update operator== if adding something. // diff --git a/api/video/BUILD.gn b/api/video/BUILD.gn index 98591e6050..10383a520c 100644 --- a/api/video/BUILD.gn +++ b/api/video/BUILD.gn @@ -79,6 +79,19 @@ rtc_library("video_frame") { ] } +rtc_library("yuv_helper") { + visibility = [ "*" ] + sources = [ + "yuv_helper.cc", + "yuv_helper.h", + ] + + deps = [ + "../../rtc_base/system:rtc_export", + "//third_party/libyuv", + ] +} + if (is_android) { java_cpp_enum("video_frame_enums") { sources = [ "video_frame_buffer.h" ] diff --git a/api/video/yuv_helper.cc b/api/video/yuv_helper.cc new file mode 100644 index 0000000000..eab9126183 --- /dev/null +++ b/api/video/yuv_helper.cc @@ -0,0 +1,416 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "yuv_helper.h" + +#include "libyuv/convert.h" +#include "libyuv/convert_argb.h" +#include "libyuv/convert_from_argb.h" +#include "libyuv/row.h" +#include "third_party/libyuv/include/libyuv.h" +#include "video_rotation.h" + +namespace webrtc { + +int I420Rotate(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_u, + int dst_stride_u, + uint8_t* dst_v, + int dst_stride_v, + int width, + int height, + VideoRotation mode) { + return libyuv::I420Rotate(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_y, dst_stride_y, dst_u, + dst_stride_u, dst_v, dst_stride_v, width, height, + static_cast(mode)); +} + +int I420ToNV12(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_uv, + int dst_stride_uv, + int width, + int height) { + return libyuv::I420ToNV12(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_y, dst_stride_y, dst_uv, + dst_stride_uv, width, height); +} + +int I420ToARGB(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_argb, + int dst_stride_argb, + int width, + int height) { + return libyuv::I420ToARGB(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_argb, dst_stride_argb, width, + height); +} + +int I420ToBGRA(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_bgra, + int dst_stride_bgra, + int width, + int height) { + return libyuv::I420ToBGRA(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_bgra, dst_stride_bgra, width, + height); +} + +int I420ToABGR(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_abgr, + int dst_stride_abgr, + int width, + int height) { + return libyuv::I420ToABGR(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_abgr, dst_stride_abgr, width, + height); +} + +int I420ToRGBA(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_rgba, + int dst_stride_rgba, + int width, + int height) { + return libyuv::I420ToRGBA(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_rgba, dst_stride_rgba, width, + height); +} + +int I420ToRGB24(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_rgb24, + int dst_stride_rgb24, + int width, + int height) { + return libyuv::I420ToRGB24(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_rgb24, dst_stride_rgb24, width, + height); +} + +int I420Scale(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + int src_width, + int src_height, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_u, + int dst_stride_u, + uint8_t* dst_v, + int dst_stride_v, + int dst_width, + int dst_height, + libyuv::FilterMode filtering) { + return libyuv::I420Scale(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, src_width, src_height, dst_y, + dst_stride_y, dst_u, dst_stride_u, dst_v, + dst_stride_v, dst_width, dst_height, filtering); +} + +int ARGBToI420(const uint8_t* src_argb, + int src_stride_argb, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_u, + int dst_stride_u, + uint8_t* dst_v, + int dst_stride_v, + int width, + int height) { + return libyuv::ARGBToI420(src_argb, src_stride_argb, dst_y, dst_stride_y, + dst_u, dst_stride_u, dst_v, dst_stride_v, width, + height); +} + +int ABGRToI420(const uint8_t* src_abgr, + int src_stride_abgr, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_u, + int dst_stride_u, + uint8_t* dst_v, + int dst_stride_v, + int width, + int height) { + return libyuv::ABGRToI420(src_abgr, src_stride_abgr, dst_y, dst_stride_y, + dst_u, dst_stride_u, dst_v, dst_stride_v, width, + height); +} + +int ARGBToRGB24(const uint8_t* src_argb, + int src_stride_argb, + uint8_t* dst_rgb24, + int dst_stride_rgb24, + int width, + int height) { + return libyuv::ARGBToRGB24(src_argb, src_stride_argb, dst_rgb24, + dst_stride_rgb24, width, height); +} + +int NV12ToI420(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_uv, + int src_stride_uv, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_u, + int dst_stride_u, + uint8_t* dst_v, + int dst_stride_v, + int width, + int height) { + return libyuv::NV12ToI420(src_y, src_stride_y, src_uv, src_stride_uv, dst_y, + dst_stride_y, dst_u, dst_stride_u, dst_v, + dst_stride_v, width, height); +} + +int I444ToI420(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_u, + int dst_stride_u, + uint8_t* dst_v, + int dst_stride_v, + int width, + int height) { + return libyuv::I444ToI420(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_y, dst_stride_y, dst_u, + dst_stride_u, dst_v, dst_stride_v, width, height); +} + +int I422ToI420(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_u, + int dst_stride_u, + uint8_t* dst_v, + int dst_stride_v, + int width, + int height) { + return libyuv::I422ToI420(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_y, dst_stride_y, dst_u, + dst_stride_u, dst_v, dst_stride_v, width, height); +} + +int I010ToI420(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_u, + int dst_stride_u, + uint8_t* dst_v, + int dst_stride_v, + int width, + int height) { + return libyuv::I010ToI420(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_y, dst_stride_y, dst_u, + dst_stride_u, dst_v, dst_stride_v, width, height); +} + +int NV12ToARGB(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_uv, + int src_stride_uv, + uint8_t* dst_argb, + int dst_stride_argb, + int width, + int height) { + return libyuv::NV12ToARGB(src_y, src_stride_y, src_uv, src_stride_uv, + dst_argb, dst_stride_argb, width, height); +} + +int NV12ToABGR(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_uv, + int src_stride_uv, + uint8_t* dst_abgr, + int dst_stride_abgr, + int width, + int height) { + return libyuv::NV12ToABGR(src_y, src_stride_y, src_uv, src_stride_uv, + dst_abgr, dst_stride_abgr, width, height); +} + +int I444ToARGB(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_abgr, + int dst_stride_abgr, + int width, + int height) { + return libyuv::I444ToARGB(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_abgr, dst_stride_abgr, width, + height); +} + +int I444ToABGR(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_abgr, + int dst_stride_abgr, + int width, + int height) { + return libyuv::I444ToABGR(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_abgr, dst_stride_abgr, width, + height); +} + +int I422ToARGB(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_argb, + int dst_stride_argb, + int width, + int height) { + return libyuv::I422ToARGB(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_argb, dst_stride_argb, width, + height); +} + +int I422ToABGR(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_abgr, + int dst_stride_abgr, + int width, + int height) { + return libyuv::I422ToABGR(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_abgr, dst_stride_abgr, width, + height); +} + +int I010ToARGB(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + uint8_t* dst_argb, + int dst_stride_argb, + int width, + int height) { + return libyuv::I010ToARGB(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_argb, dst_stride_argb, width, + height); +} + +int I010ToABGR(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + uint8_t* dst_abgr, + int dst_stride_abgr, + int width, + int height) { + return libyuv::I010ToABGR(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_abgr, dst_stride_abgr, width, + height); +} + +int ABGRToNV12(const uint8_t* src_abgr, + int src_stride_abgr, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_uv, + int dst_stride_uv, + int width, + int height) { + return libyuv::ABGRToNV12(src_abgr, src_stride_abgr, dst_y, dst_stride_y, + dst_uv, dst_stride_uv, width, height); +} + +int ARGBToNV12(const uint8_t* src_argb, + int src_stride_argb, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_uv, + int dst_stride_uv, + int width, + int height) { + return libyuv::ARGBToNV12(src_argb, src_stride_argb, dst_y, dst_stride_y, + dst_uv, dst_stride_uv, width, height); +} + +} // namespace webrtc diff --git a/api/video/yuv_helper.h b/api/video/yuv_helper.h new file mode 100644 index 0000000000..5e86fb378b --- /dev/null +++ b/api/video/yuv_helper.h @@ -0,0 +1,316 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "libyuv/convert.h" +#include "rtc_base/system/rtc_export.h" +#include "stdint.h" +#include "third_party/libyuv/include/libyuv.h" +#include "video_rotation.h" + +namespace webrtc { + +RTC_EXPORT int I420Rotate(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_u, + int dst_stride_u, + uint8_t* dst_v, + int dst_stride_v, + int width, + int height, + VideoRotation mode); + +RTC_EXPORT int I420ToNV12(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_uv, + int dst_stride_uv, + int width, + int height); + +RTC_EXPORT int I420ToARGB(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_argb, + int dst_stride_argb, + int width, + int height); + +RTC_EXPORT int I420ToBGRA(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_bgra, + int dst_stride_bgra, + int width, + int height); + +RTC_EXPORT int I420ToABGR(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_abgr, + int dst_stride_abgr, + int width, + int height); + +RTC_EXPORT int I420ToRGBA(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_rgba, + int dst_stride_rgba, + int width, + int height); + +RTC_EXPORT int I420ToRGB24(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_rgb24, + int dst_stride_rgb24, + int width, + int height); + +RTC_EXPORT int I420Scale(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + int src_width, + int src_height, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_u, + int dst_stride_u, + uint8_t* dst_v, + int dst_stride_v, + int dst_width, + int dst_height, + libyuv::FilterMode filtering); + +RTC_EXPORT int ARGBToI420(const uint8_t* src_argb, + int src_stride_argb, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_u, + int dst_stride_u, + uint8_t* dst_v, + int dst_stride_v, + int width, + int height); + +RTC_EXPORT int ABGRToI420(const uint8_t* src_abgr, + int src_stride_abgr, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_u, + int dst_stride_u, + uint8_t* dst_v, + int dst_stride_v, + int width, + int height); + +RTC_EXPORT int ARGBToRGB24(const uint8_t* src_argb, + int src_stride_argb, + uint8_t* dst_rgb24, + int dst_stride_rgb24, + int width, + int height); + +RTC_EXPORT int NV12ToI420(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_uv, + int src_stride_uv, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_u, + int dst_stride_u, + uint8_t* dst_v, + int dst_stride_v, + int width, + int height); + +RTC_EXPORT int I444ToI420(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_u, + int dst_stride_u, + uint8_t* dst_v, + int dst_stride_v, + int width, + int height); + +RTC_EXPORT int I422ToI420(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_u, + int dst_stride_u, + uint8_t* dst_v, + int dst_stride_v, + int width, + int height); + +RTC_EXPORT int I010ToI420(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_u, + int dst_stride_u, + uint8_t* dst_v, + int dst_stride_v, + int width, + int height); + +RTC_EXPORT int NV12ToARGB(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_uv, + int src_stride_uv, + uint8_t* dst_argb, + int dst_stride_argb, + int width, + int height); + +RTC_EXPORT int NV12ToABGR(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_uv, + int src_stride_uv, + uint8_t* dst_abgr, + int dst_stride_abgr, + int width, + int height); + +RTC_EXPORT int I444ToARGB(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_abgr, + int dst_stride_abgr, + int width, + int height); + +RTC_EXPORT int I444ToABGR(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_abgr, + int dst_stride_abgr, + int width, + int height); + +RTC_EXPORT int I422ToARGB(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_argb, + int dst_stride_argb, + int width, + int height); + +RTC_EXPORT int I422ToABGR(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_abgr, + int dst_stride_abgr, + int width, + int height); + +RTC_EXPORT int I010ToARGB(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + uint8_t* dst_argb, + int dst_stride_argb, + int width, + int height); + +RTC_EXPORT int I010ToABGR(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + uint8_t* dst_abgr, + int dst_stride_abgr, + int width, + int height); + +RTC_EXPORT int ABGRToNV12(const uint8_t* src_abgr, + int src_stride_abgr, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_uv, + int dst_stride_uv, + int width, + int height); + +RTC_EXPORT int ARGBToNV12(const uint8_t* src_argb, + int src_stride_argb, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_uv, + int dst_stride_uv, + int width, + int height); + +} // namespace webrtc diff --git a/api/video_codecs/video_encoder_factory.h b/api/video_codecs/video_encoder_factory.h index 8986e920d9..0e0509c0b6 100644 --- a/api/video_codecs/video_encoder_factory.h +++ b/api/video_codecs/video_encoder_factory.h @@ -85,13 +85,8 @@ class VideoEncoderFactory { virtual CodecSupport QueryCodecSupport( const SdpVideoFormat& format, std::optional scalability_mode) const { - // Default implementation, query for supported formats and check if the - // specified format is supported. Returns false if scalability_mode is - // specified. CodecSupport codec_support; - if (!scalability_mode) { - codec_support.is_supported = format.IsCodecInList(GetSupportedFormats()); - } + codec_support.is_supported = format.IsCodecInList(GetSupportedFormats()); return codec_support; } diff --git a/audio/audio_send_stream.cc b/audio/audio_send_stream.cc index 94991f76e4..275967e9ba 100644 --- a/audio/audio_send_stream.cc +++ b/audio/audio_send_stream.cc @@ -400,6 +400,11 @@ void AudioSendStream::SetMuted(bool muted) { channel_send_->SetInputMute(muted); } +bool AudioSendStream::GetMuted() { + RTC_DCHECK_RUN_ON(&worker_thread_checker_); + return channel_send_->InputMute(); +} + webrtc::AudioSendStream::Stats AudioSendStream::GetStats() const { return GetStats(true); } diff --git a/audio/audio_send_stream.h b/audio/audio_send_stream.h index c38456d5df..98914de28c 100644 --- a/audio/audio_send_stream.h +++ b/audio/audio_send_stream.h @@ -88,6 +88,7 @@ class AudioSendStream final : public webrtc::AudioSendStream, int payload_frequency, int event, int duration_ms) override; + bool GetMuted() override; void SetMuted(bool muted) override; webrtc::AudioSendStream::Stats GetStats() const override; webrtc::AudioSendStream::Stats GetStats( diff --git a/audio/audio_state.cc b/audio/audio_state.cc index 57ae1a91b2..49f669cf07 100644 --- a/audio/audio_state.cc +++ b/audio/audio_state.cc @@ -135,11 +135,23 @@ void AudioState::AddSendingStream(webrtc::AudioSendStream* stream, UpdateAudioTransportWithSendingStreams(); // Make sure recording is initialized; start recording if enabled. - auto* adm = config_.audio_device_module.get(); - if (recording_enabled_) { + if (ShouldRecord()) { + auto* adm = config_.audio_device_module.get(); if (!adm->Recording()) { if (adm->InitRecording() == 0) { - adm->StartRecording(); + if (recording_enabled_) { + + // TODO: Verify if the following windows only logic is still required. +#if defined(WEBRTC_WIN) + if (adm->BuiltInAECIsAvailable() && !adm->Playing()) { + if (!adm->PlayoutIsInitialized()) { + adm->InitPlayout(); + } + adm->StartPlayout(); + } +#endif + adm->StartRecording(); + } } else { RTC_DLOG_F(LS_ERROR) << "Failed to initialize recording."; } @@ -152,7 +164,10 @@ void AudioState::RemoveSendingStream(webrtc::AudioSendStream* stream) { auto count = sending_streams_.erase(stream); RTC_DCHECK_EQ(1, count); UpdateAudioTransportWithSendingStreams(); - if (sending_streams_.empty()) { + + bool should_record = ShouldRecord(); + RTC_LOG(LS_INFO) << "RemoveSendingStream: should_record = " << should_record; + if (!should_record) { config_.audio_device_module->StopRecording(); } } @@ -208,6 +223,43 @@ void AudioState::UpdateNullAudioPollerState() { null_audio_poller_.Stop(); } } + +void AudioState::OnMuteStreamChanged() { + + auto* adm = config_.audio_device_module.get(); + bool should_record = ShouldRecord(); + + RTC_LOG(LS_INFO) << "OnMuteStreamChanged: should_record = " << should_record; + if (should_record && !adm->Recording()) { + if (adm->InitRecording() == 0) { + adm->StartRecording(); + } + } else if (!should_record && adm->Recording()) { + adm->StopRecording(); + } +} + +bool AudioState::ShouldRecord() { + RTC_LOG(LS_INFO) << "ShouldRecord"; + // no streams to send + if (sending_streams_.empty()) { + RTC_LOG(LS_INFO) << "ShouldRecord: send stream = empty"; + return false; + } + + int stream_count = sending_streams_.size(); + + int muted_count = 0; + for (const auto& kv : sending_streams_) { + if (kv.first->GetMuted()) { + muted_count++; + } + } + + RTC_LOG(LS_INFO) << "ShouldRecord: " << muted_count << " muted, " << stream_count << " sending"; + return muted_count != stream_count; +} + } // namespace internal scoped_refptr AudioState::Create(const AudioState::Config& config) { diff --git a/audio/audio_state.h b/audio/audio_state.h index 88aaaa3697..f21cca771e 100644 --- a/audio/audio_state.h +++ b/audio/audio_state.h @@ -47,6 +47,8 @@ class AudioState : public webrtc::AudioState { void SetStereoChannelSwapping(bool enable) override; + void OnMuteStreamChanged() override; + AudioDeviceModule* audio_device_module() { RTC_DCHECK(config_.audio_device_module); return config_.audio_device_module.get(); @@ -64,6 +66,9 @@ class AudioState : public webrtc::AudioState { void UpdateAudioTransportWithSendingStreams(); void UpdateNullAudioPollerState() RTC_RUN_ON(&thread_checker_); + // Returns true when at least 1 stream exists and all streams are not muted. + bool ShouldRecord(); + SequenceChecker thread_checker_; SequenceChecker process_thread_checker_{SequenceChecker::kDetached}; const webrtc::AudioState::Config config_; diff --git a/audio/channel_receive.cc b/audio/channel_receive.cc index cfbf843032..6fff0b324b 100644 --- a/audio/channel_receive.cc +++ b/audio/channel_receive.cc @@ -383,7 +383,6 @@ void ChannelReceive::OnReceivedPayloadData(ArrayView payload, void ChannelReceive::InitFrameTransformerDelegate( scoped_refptr frame_transformer) { RTC_DCHECK(frame_transformer); - RTC_DCHECK(!frame_transformer_delegate_); RTC_DCHECK(worker_thread_->IsCurrent()); // Pass a callback to ChannelReceive::OnReceivedPayloadData, to be called by @@ -935,21 +934,29 @@ void ChannelReceive::RtcpPacketTypesCounterUpdated( void ChannelReceive::SetDepacketizerToDecoderFrameTransformer( scoped_refptr frame_transformer) { RTC_DCHECK_RUN_ON(&worker_thread_checker_); - if (!frame_transformer) { - RTC_DCHECK_NOTREACHED() << "Not setting the transformer?"; - return; - } - if (frame_transformer_delegate_) { - // Depending on when the channel is created, the transformer might be set - // twice. Don't replace the delegate if it was already initialized. - // TODO(crbug.com/webrtc/15674): Prevent multiple calls during - // reconfiguration. - RTC_CHECK_EQ(frame_transformer_delegate_->FrameTransformer(), - frame_transformer); - return; + + // Check if a reset is needed + if (frame_transformer_delegate_ && + frame_transformer_delegate_->FrameTransformer() != frame_transformer) { + frame_transformer_delegate_->Reset(); + frame_transformer_delegate_ = nullptr; + RTC_DLOG(LS_INFO) << "Frame transformer delegate has been reset."; } - InitFrameTransformerDelegate(std::move(frame_transformer)); + // Initialize the delegate if needed + if (frame_transformer_delegate_ && + frame_transformer_delegate_->FrameTransformer() == frame_transformer) { + RTC_DLOG(LS_INFO) + << "Frame transformer is already set to the provided transformer."; + } else { + if (!frame_transformer) { + RTC_DCHECK_NOTREACHED() << "Attempted to set a null frame transformer."; + } else { + RTC_DLOG(LS_INFO) << "Initializing frame transformer delegate with the " + "new frame transformer."; + InitFrameTransformerDelegate(std::move(frame_transformer)); + } + } } void ChannelReceive::SetFrameDecryptor( diff --git a/audio/channel_send.cc b/audio/channel_send.cc index 52817f31ff..a71322ab30 100644 --- a/audio/channel_send.cc +++ b/audio/channel_send.cc @@ -163,6 +163,8 @@ class ChannelSend : public ChannelSendInterface, // Muting, Volume and Level. void SetInputMute(bool enable) override; + bool InputMute() const override; + // Stats. ANAStats GetANAStatistics() const override; @@ -235,8 +237,6 @@ class ChannelSend : public ChannelSendInterface, size_t payloadSize, int64_t absolute_capture_timestamp_ms) override; - bool InputMute() const; - int32_t SendRtpAudio(AudioFrameType frameType, uint8_t payloadType, uint32_t rtp_timestamp_without_offset, diff --git a/audio/channel_send.h b/audio/channel_send.h index 8991fc0f44..5748222f8c 100644 --- a/audio/channel_send.h +++ b/audio/channel_send.h @@ -89,6 +89,8 @@ class ChannelSendInterface { virtual bool SendTelephoneEventOutband(int event, int duration_ms) = 0; virtual void OnBitrateAllocation(BitrateAllocationUpdate update) = 0; virtual int GetTargetBitrate() const = 0; + + virtual bool InputMute() const = 0; virtual void SetInputMute(bool muted) = 0; virtual void ProcessAndEncodeAudio( diff --git a/build_xcframework_dynamic_livekit.sh b/build_xcframework_dynamic_livekit.sh new file mode 100755 index 0000000000..891b4ae2c0 --- /dev/null +++ b/build_xcframework_dynamic_livekit.sh @@ -0,0 +1,119 @@ +#!/bin/sh +if [ ! -n "$1" ]; then + echo "Usage: $0 'debug' | 'release'" + exit 0 +fi + +MODE=$1 +OUT_DIR=./out-$MODE +DEBUG="false" +if [ "$MODE" == "debug" ]; then + DEBUG="true" +fi + +GREEN='\033[0;32m' +YELLOW='\033[1;33m' +RED='\033[0;31m' +NC='\033[0m' + +echo "build_xcframework_dynamic_livekit.sh: MODE=$MODE, DEBUG=$DEBUG" + +COMMON_ARGS=" + rtc_objc_prefix = \"LK\" + treat_warnings_as_errors = false + ios_enable_code_signing = false + is_component_build = false + rtc_enable_symbol_export = true + rtc_libvpx_build_vp9 = true + rtc_include_tests = false + rtc_build_examples = false + rtc_use_h264 = false + rtc_enable_protobuf = false + enable_libaom = true + rtc_include_dav1d_in_internal_decoder_factory = true + use_rtti = true + is_debug = $DEBUG + enable_dsyms = $DEBUG + enable_stripping = true" + +PLATFORMS=( + "tvOS-arm64-device:target_os=\"ios\" target_environment=\"appletv\" target_cpu=\"arm64\" ios_deployment_target=\"17.0\"" + "tvOS-arm64-simulator:target_os=\"ios\" target_environment=\"appletvsimulator\" target_cpu=\"arm64\" ios_deployment_target=\"17.0\"" + "xrOS-arm64-device:target_os=\"ios\" target_environment=\"xrdevice\" target_cpu=\"arm64\" ios_deployment_target=\"2.2.0\"" + "xrOS-arm64-simulator:target_os=\"ios\" target_environment=\"xrsimulator\" target_cpu=\"arm64\" ios_deployment_target=\"2.2.0\"" + "catalyst-arm64:target_os=\"ios\" target_environment=\"catalyst\" target_cpu=\"arm64\" ios_deployment_target=\"14.0\"" + "catalyst-x64:target_os=\"ios\" target_environment=\"catalyst\" target_cpu=\"x64\" ios_deployment_target=\"14.0\"" + "iOS-arm64-device:target_os=\"ios\" target_environment=\"device\" target_cpu=\"arm64\" ios_deployment_target=\"13.0\"" + "iOS-x64-simulator:target_os=\"ios\" target_environment=\"simulator\" target_cpu=\"x64\" ios_deployment_target=\"13.0\"" + "iOS-arm64-simulator:target_os=\"ios\" target_environment=\"simulator\" target_cpu=\"x64\" ios_deployment_target=\"13.0\"" + "macOS-x64:target_os=\"mac\" target_cpu=\"x64\" mac_deployment_target=\"10.15\"" + "macOS-arm64:target_os=\"mac\" target_cpu=\"arm64\" mac_deployment_target=\"10.15\"" +) + +for platform_config in "${PLATFORMS[@]}"; do + platform="${platform_config%%:*}" + config="${platform_config#*:}" + + echo "Generating configuration for $platform..." + gn gen $OUT_DIR/$platform --args="$COMMON_ARGS $config" --ide=xcode + + if [[ $platform == *"macOS"* ]]; then + build_target="mac_framework_bundle" + else + build_target="ios_framework_bundle" + fi + + echo "${YELLOW}Building $platform...${NC}" + ninja -C $OUT_DIR/$platform $build_target -j 10 --quiet + if [ $? -ne 0 ]; then + echo "${RED}Build $platform failed${NC}" + exit 1 + fi + echo "${GREEN}Build $platform completed${NC}" +done + +rm -rf $OUT_DIR/*-lib $OUT_DIR/LiveKitWebRTC.* + +mkdir -p $OUT_DIR/macOS-lib +cp -R $OUT_DIR/macOS-x64/LiveKitWebRTC.framework $OUT_DIR/macOS-lib/LiveKitWebRTC.framework +lipo -create -output $OUT_DIR/macOS-lib/LiveKitWebRTC.framework/LiveKitWebRTC $OUT_DIR/macOS-arm64/LiveKitWebRTC.framework/LiveKitWebRTC $OUT_DIR/macOS-x64/LiveKitWebRTC.framework/LiveKitWebRTC + +mkdir -p $OUT_DIR/catalyst-lib +cp -R $OUT_DIR/catalyst-arm64/LiveKitWebRTC.framework $OUT_DIR/catalyst-lib/LiveKitWebRTC.framework +lipo -create -output $OUT_DIR/catalyst-lib/LiveKitWebRTC.framework/LiveKitWebRTC $OUT_DIR/catalyst-arm64/LiveKitWebRTC.framework/LiveKitWebRTC $OUT_DIR/catalyst-x64/LiveKitWebRTC.framework/LiveKitWebRTC + +mkdir -p $OUT_DIR/iOS-device-lib +cp -R $OUT_DIR/iOS-arm64-device/LiveKitWebRTC.framework $OUT_DIR/iOS-device-lib/LiveKitWebRTC.framework +lipo -create -output $OUT_DIR/iOS-device-lib/LiveKitWebRTC.framework/LiveKitWebRTC $OUT_DIR/iOS-arm64-device/LiveKitWebRTC.framework/LiveKitWebRTC + +mkdir -p $OUT_DIR/iOS-simulator-lib +cp -R $OUT_DIR/iOS-arm64-simulator/LiveKitWebRTC.framework $OUT_DIR/iOS-simulator-lib/LiveKitWebRTC.framework +lipo -create -output $OUT_DIR/iOS-simulator-lib/LiveKitWebRTC.framework/LiveKitWebRTC $OUT_DIR/iOS-arm64-simulator/LiveKitWebRTC.framework/LiveKitWebRTC $OUT_DIR/iOS-x64-simulator/LiveKitWebRTC.framework/LiveKitWebRTC + +xcodebuild -create-xcframework \ + -framework $OUT_DIR/iOS-device-lib/LiveKitWebRTC.framework \ + -framework $OUT_DIR/iOS-simulator-lib/LiveKitWebRTC.framework \ + -framework $OUT_DIR/xrOS-arm64-device/LiveKitWebRTC.framework \ + -framework $OUT_DIR/xrOS-arm64-simulator/LiveKitWebRTC.framework \ + -framework $OUT_DIR/tvOS-arm64-device/LiveKitWebRTC.framework \ + -framework $OUT_DIR/tvOS-arm64-simulator/LiveKitWebRTC.framework \ + -framework $OUT_DIR/catalyst-lib/LiveKitWebRTC.framework \ + -framework $OUT_DIR/macOS-lib/LiveKitWebRTC.framework \ + -output $OUT_DIR/LiveKitWebRTC.xcframework + +cp ./src/LICENSE $OUT_DIR/LiveKitWebRTC.xcframework/ + +cd $OUT_DIR/LiveKitWebRTC.xcframework/macos-arm64_x86_64/LiveKitWebRTC.framework/ +mv LiveKitWebRTC Versions/A/LiveKitWebRTC +ln -s Versions/Current/LiveKitWebRTC LiveKitWebRTC +cd ../../../../ + +cd $OUT_DIR/LiveKitWebRTC.xcframework/ios-arm64_x86_64-maccatalyst/LiveKitWebRTC.framework/ +mv LiveKitWebRTC Versions/A/LiveKitWebRTC +ln -s Versions/Current/LiveKitWebRTC LiveKitWebRTC +cd ../../../ +zip --symlinks -9 -r LiveKitWebRTC.xcframework.zip LiveKitWebRTC.xcframework + +# hash +shasum -a 256 LiveKitWebRTC.xcframework.zip >LiveKitWebRTC.xcframework.zip.shasum +cat LiveKitWebRTC.xcframework.zip.shasum diff --git a/call/audio_send_stream.h b/call/audio_send_stream.h index d1b3e64ba3..11f5ede921 100644 --- a/call/audio_send_stream.h +++ b/call/audio_send_stream.h @@ -193,6 +193,7 @@ class AudioSendStream : public AudioSender { int event, int duration_ms) = 0; + virtual bool GetMuted() = 0; virtual void SetMuted(bool muted) = 0; virtual Stats GetStats() const = 0; diff --git a/call/audio_state.h b/call/audio_state.h index d58b7ff97e..1e5b17c74e 100644 --- a/call/audio_state.h +++ b/call/audio_state.h @@ -58,6 +58,9 @@ class AudioState : public RefCountInterface { virtual void SetStereoChannelSwapping(bool enable) = 0; + // Notify the AudioState that a stream updated it's mute state. + virtual void OnMuteStreamChanged() = 0; + static scoped_refptr Create(const AudioState::Config& config); ~AudioState() override {} diff --git a/call/rtp_transport_controller_send.cc b/call/rtp_transport_controller_send.cc index 992e3a436c..fd6a4580e8 100644 --- a/call/rtp_transport_controller_send.cc +++ b/call/rtp_transport_controller_send.cc @@ -308,6 +308,9 @@ void RtpTransportControllerSend::ReconfigureBandwidthEstimation( UpdateNetworkAvailability(); } } + pacer_.SetAllowProbeWithoutMediaPacket( + bwe_settings_.allow_probe_without_media && + packet_router_.SupportsRtxPayloadPadding()); } void RtpTransportControllerSend::RegisterTargetTransferRateObserver( diff --git a/media/base/media_channel.h b/media/base/media_channel.h index b1f026372a..6b7164fdf9 100644 --- a/media/base/media_channel.h +++ b/media/base/media_channel.h @@ -1015,6 +1015,8 @@ class VideoMediaReceiveChannelInterface : public MediaReceiveChannelInterface { std::optional rtx_time) = 0; virtual bool AddDefaultRecvStreamForTesting( const webrtc::StreamParams& sp) = 0; + virtual void StartReceive(uint32_t ssrc) {} + virtual void StopReceive(uint32_t ssrc) {} }; } // namespace webrtc diff --git a/media/engine/webrtc_video_engine.cc b/media/engine/webrtc_video_engine.cc index 3645bd054f..c3a6317783 100644 --- a/media/engine/webrtc_video_engine.cc +++ b/media/engine/webrtc_video_engine.cc @@ -3676,6 +3676,24 @@ void WebRtcVideoReceiveChannel::WebRtcVideoReceiveStream::SetReceiverParameters( } } +void WebRtcVideoReceiveChannel::StartReceive(uint32_t ssrc) { + RTC_DCHECK_RUN_ON(&thread_checker_); + WebRtcVideoReceiveStream* stream = FindReceiveStream(ssrc); + if(!stream) { + return; + } + stream->StartReceiveStream(); +} + +void WebRtcVideoReceiveChannel::StopReceive(uint32_t ssrc) { + RTC_DCHECK_RUN_ON(&thread_checker_); + WebRtcVideoReceiveStream* stream = FindReceiveStream(ssrc); + if(!stream) { + return; + } + stream->StopReceiveStream(); +} + void WebRtcVideoReceiveChannel::WebRtcVideoReceiveStream:: RecreateReceiveStream() { RTC_DCHECK_RUN_ON(&thread_checker_); diff --git a/media/engine/webrtc_video_engine.h b/media/engine/webrtc_video_engine.h index 5d6bd6a847..8618bc2797 100644 --- a/media/engine/webrtc_video_engine.h +++ b/media/engine/webrtc_video_engine.h @@ -620,7 +620,8 @@ class WebRtcVideoReceiveChannel : public MediaChannelUtil, bool nack_enabled, RtcpMode rtcp_mode, std::optional rtx_time) override; - + void StartReceive(uint32_t ssrc) override; + void StopReceive(uint32_t ssrc) override; private: class WebRtcVideoReceiveStream; struct ChangedReceiverParameters { diff --git a/media/engine/webrtc_voice_engine.cc b/media/engine/webrtc_voice_engine.cc index cb48226931..9c7f1e8a98 100644 --- a/media/engine/webrtc_voice_engine.cc +++ b/media/engine/webrtc_voice_engine.cc @@ -604,7 +604,7 @@ void WebRtcVoiceEngine::ApplyOptions(const AudioOptions& options_in) { // Use desktop AEC by default, when not using hardware AEC. bool use_mobile_software_aec = false; -#if defined(WEBRTC_IOS) +#if defined(WEBRTC_IOS) && !TARGET_OS_SIMULATOR if (options.ios_force_software_aec_HACK && *options.ios_force_software_aec_HACK) { // EC may be forced on for a device known to have non-functioning platform @@ -622,7 +622,7 @@ void WebRtcVoiceEngine::ApplyOptions(const AudioOptions& options_in) { #endif // Set and adjust gain control options. -#if defined(WEBRTC_IOS) +#if defined(WEBRTC_IOS) && !TARGET_OS_SIMULATOR // On iOS, VPIO provides built-in AGC. options.auto_gain_control = false; RTC_LOG(LS_INFO) << "Always disable AGC on iOS. Use built-in instead."; @@ -1751,6 +1751,9 @@ bool WebRtcVoiceSendChannel::MuteStream(uint32_t ssrc, bool muted) { ap->set_output_will_be_muted(all_muted); } + // Notfy the AudioState that the mute state has updated. + engine_->audio_state()->OnMuteStreamChanged(); + return true; } diff --git a/modules/audio_device/audio_device_data_observer.cc b/modules/audio_device/audio_device_data_observer.cc index 0548f6cffc..9f4f5a343f 100644 --- a/modules/audio_device/audio_device_data_observer.cc +++ b/modules/audio_device/audio_device_data_observer.cc @@ -298,6 +298,10 @@ class ADMWrapper : public AudioDeviceModule, public AudioTransport { } #endif // WEBRTC_IOS + int32_t SetAudioDeviceSink(AudioDeviceSink* sink) const override { + return impl_->SetAudioDeviceSink(sink); + } + protected: scoped_refptr impl_; std::unique_ptr observer_; diff --git a/modules/audio_device/audio_device_generic.h b/modules/audio_device/audio_device_generic.h index 3fa3ed9d8a..fdb318e3c7 100644 --- a/modules/audio_device/audio_device_generic.h +++ b/modules/audio_device/audio_device_generic.h @@ -135,6 +135,10 @@ class AudioDeviceGeneric { virtual int GetRecordAudioParameters(AudioParameters* params) const; #endif // WEBRTC_IOS + virtual int32_t SetAudioDeviceSink(AudioDeviceSink* sink) { return -1; } + virtual int32_t GetPlayoutDevice() const { return -1; } + virtual int32_t GetRecordingDevice() const { return -1; } + virtual void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) = 0; virtual ~AudioDeviceGeneric() {} diff --git a/modules/audio_device/audio_device_impl.cc b/modules/audio_device/audio_device_impl.cc index 44cfabeddd..3b3b103dac 100644 --- a/modules/audio_device/audio_device_impl.cc +++ b/modules/audio_device/audio_device_impl.cc @@ -64,15 +64,17 @@ namespace webrtc { scoped_refptr AudioDeviceModule::Create( AudioLayer audio_layer, - TaskQueueFactory* task_queue_factory) { + TaskQueueFactory* task_queue_factory, + bool bypass_voice_processing) { RTC_DLOG(LS_INFO) << __FUNCTION__; - return AudioDeviceModule::CreateForTest(audio_layer, task_queue_factory); + return AudioDeviceModule::CreateForTest(audio_layer, task_queue_factory, bypass_voice_processing); } // static scoped_refptr AudioDeviceModule::CreateForTest( AudioLayer audio_layer, - TaskQueueFactory* task_queue_factory) { + TaskQueueFactory* task_queue_factory, + bool bypass_voice_processing) { RTC_DLOG(LS_INFO) << __FUNCTION__; // The "AudioDeviceModule::kWindowsCoreAudio2" audio layer has its own @@ -93,8 +95,8 @@ scoped_refptr AudioDeviceModule::CreateForTest( } // Create the generic reference counted (platform independent) implementation. - auto audio_device = - make_ref_counted(audio_layer, task_queue_factory); + auto audio_device = webrtc::make_ref_counted( + audio_layer, task_queue_factory, bypass_voice_processing); // Ensure that the current platform is supported. if (audio_device->CheckPlatform() == -1) { @@ -117,8 +119,13 @@ scoped_refptr AudioDeviceModule::CreateForTest( AudioDeviceModuleImpl::AudioDeviceModuleImpl( AudioLayer audio_layer, - TaskQueueFactory* task_queue_factory) - : audio_layer_(audio_layer), audio_device_buffer_(task_queue_factory) { + TaskQueueFactory* task_queue_factory, + bool bypass_voice_processing) + : audio_layer_(audio_layer), +#if defined(WEBRTC_IOS) + bypass_voice_processing_(bypass_voice_processing), +#endif + audio_device_buffer_(task_queue_factory) { RTC_DLOG(LS_INFO) << __FUNCTION__; } @@ -242,7 +249,7 @@ int32_t AudioDeviceModuleImpl::CreatePlatformSpecificObjects() { #if defined(WEBRTC_IOS) if (audio_layer == kPlatformDefaultAudio) { audio_device_.reset(new ios_adm::AudioDeviceIOS( - /*bypass_voice_processing=*/false, + /*bypass_voice_processing=*/bypass_voice_processing_, /*muted_speech_event_handler=*/nullptr, /*render_error_handler=*/nullptr)); RTC_LOG(LS_INFO) << "iPhone Audio APIs will be utilized."; @@ -899,6 +906,27 @@ int AudioDeviceModuleImpl::GetRecordAudioParameters( } #endif // WEBRTC_IOS +int32_t AudioDeviceModuleImpl::SetAudioDeviceSink(AudioDeviceSink* sink) const { + RTC_LOG(LS_INFO) << __FUNCTION__ << "(" << sink << ")"; + int32_t ok = audio_device_->SetAudioDeviceSink(sink); + RTC_LOG(LS_INFO) << "output: " << ok; + return ok; +} + +int32_t AudioDeviceModuleImpl::GetPlayoutDevice() const { + RTC_LOG(LS_INFO) << __FUNCTION__; + int32_t r = audio_device_->GetPlayoutDevice(); + RTC_LOG(LS_INFO) << "output: " << r; + return r; +} + +int32_t AudioDeviceModuleImpl::GetRecordingDevice() const { + RTC_LOG(LS_INFO) << __FUNCTION__; + int32_t r = audio_device_->GetRecordingDevice(); + RTC_LOG(LS_INFO) << "output: " << r; + return r; +} + AudioDeviceModuleImpl::PlatformType AudioDeviceModuleImpl::Platform() const { RTC_LOG(LS_INFO) << __FUNCTION__; return platform_type_; diff --git a/modules/audio_device/audio_device_impl.h b/modules/audio_device/audio_device_impl.h index 41d68209e8..826b48f328 100644 --- a/modules/audio_device/audio_device_impl.h +++ b/modules/audio_device/audio_device_impl.h @@ -47,7 +47,8 @@ class AudioDeviceModuleImpl : public AudioDeviceModuleForTest { int32_t AttachAudioBuffer(); AudioDeviceModuleImpl(AudioLayer audio_layer, - TaskQueueFactory* task_queue_factory); + TaskQueueFactory* task_queue_factory, + bool bypass_voice_processing = false); // If `create_detached` is true, created ADM can be used on another thread // compared to the one on which it was created. It's useful for testing. AudioDeviceModuleImpl(AudioLayer audio_layer, @@ -155,6 +156,10 @@ class AudioDeviceModuleImpl : public AudioDeviceModuleForTest { int GetRecordAudioParameters(AudioParameters* params) const override; #endif // WEBRTC_IOS + int32_t SetAudioDeviceSink(AudioDeviceSink* sink) const override; + int32_t GetPlayoutDevice() const override; + int32_t GetRecordingDevice() const override; + AudioDeviceBuffer* GetAudioDeviceBuffer() { return &audio_device_buffer_; } int RestartPlayoutInternally() override { return -1; } @@ -169,6 +174,9 @@ class AudioDeviceModuleImpl : public AudioDeviceModuleForTest { AudioLayer audio_layer_; PlatformType platform_type_ = kPlatformNotSupported; bool initialized_ = false; +#if defined(WEBRTC_IOS) + bool bypass_voice_processing_; +#endif AudioDeviceBuffer audio_device_buffer_; std::unique_ptr audio_device_; }; diff --git a/modules/audio_device/mac/audio_device_mac.cc b/modules/audio_device/mac/audio_device_mac.cc index 0d21a986f0..399f546181 100644 --- a/modules/audio_device/mac/audio_device_mac.cc +++ b/modules/audio_device/mac/audio_device_mac.cc @@ -120,8 +120,6 @@ AudioDeviceMac::AudioDeviceMac() _twoDevices(true), _doStop(false), _doStopRec(false), - _macBookPro(false), - _macBookProPanRight(false), _captureLatencyUs(0), _renderLatencyUs(0), _captureDelayUs(0), @@ -278,8 +276,11 @@ AudioDeviceGeneric::InitStatus AudioDeviceMac::Init() { // but now must be explicitly specified. HAL would otherwise try to use the // main thread to issue notifications. AudioObjectPropertyAddress propertyAddress = { - kAudioHardwarePropertyRunLoop, kAudioObjectPropertyScopeGlobal, - kAudioObjectPropertyElementMaster}; + kAudioHardwarePropertyRunLoop, + kAudioObjectPropertyScopeGlobal, + kAudioObjectPropertyElementMaster + }; + CFRunLoopRef runLoop = NULL; UInt32 size = sizeof(CFRunLoopRef); int aoerr = AudioObjectSetPropertyData( @@ -295,22 +296,15 @@ AudioDeviceGeneric::InitStatus AudioDeviceMac::Init() { WEBRTC_CA_LOG_ERR(AudioObjectAddPropertyListener( kAudioObjectSystemObject, &propertyAddress, &objectListenerProc, this)); - // Determine if this is a MacBook Pro - _macBookPro = false; - _macBookProPanRight = false; - char buf[128]; - size_t length = sizeof(buf); - memset(buf, 0, length); + // Listen for default output device change. + propertyAddress.mSelector = kAudioHardwarePropertyDefaultOutputDevice; + WEBRTC_CA_LOG_ERR(AudioObjectAddPropertyListener( + kAudioObjectSystemObject, &propertyAddress, &objectListenerProc, this)); - int intErr = sysctlbyname("hw.model", buf, &length, NULL, 0); - if (intErr != 0) { - RTC_LOG(LS_ERROR) << "Error in sysctlbyname(): " << err; - } else { - RTC_LOG(LS_VERBOSE) << "Hardware model: " << buf; - if (strncmp(buf, "MacBookPro", 10) == 0) { - _macBookPro = true; - } - } + // Listen for default input device change. + propertyAddress.mSelector = kAudioHardwarePropertyDefaultInputDevice; + WEBRTC_CA_LOG_ERR(AudioObjectAddPropertyListener( + kAudioObjectSystemObject, &propertyAddress, &objectListenerProc, this)); _initialized = true; @@ -338,9 +332,21 @@ int32_t AudioDeviceMac::Terminate() { OSStatus err = noErr; int retVal = 0; + // Remove listeners for global scope. AudioObjectPropertyAddress propertyAddress = { - kAudioHardwarePropertyDevices, kAudioObjectPropertyScopeGlobal, - kAudioObjectPropertyElementMaster}; + kAudioHardwarePropertyDevices, // selector + kAudioObjectPropertyScopeGlobal, // scope + kAudioObjectPropertyElementMaster // element + }; + + WEBRTC_CA_LOG_WARN(AudioObjectRemovePropertyListener( + kAudioObjectSystemObject, &propertyAddress, &objectListenerProc, this)); + + propertyAddress.mSelector = kAudioHardwarePropertyDefaultOutputDevice; + WEBRTC_CA_LOG_WARN(AudioObjectRemovePropertyListener( + kAudioObjectSystemObject, &propertyAddress, &objectListenerProc, this)); + + propertyAddress.mSelector = kAudioHardwarePropertyDefaultInputDevice; WEBRTC_CA_LOG_WARN(AudioObjectRemovePropertyListener( kAudioObjectSystemObject, &propertyAddress, &objectListenerProc, this)); @@ -790,6 +796,14 @@ int16_t AudioDeviceMac::PlayoutDevices() { MaxNumberDevices); } +int32_t AudioDeviceMac::GetPlayoutDevice() const { + if (_outputDeviceIsSpecified) { + return _outputDeviceIndex; + } + + return 0; +} + int32_t AudioDeviceMac::SetPlayoutDevice(uint16_t index) { MutexLock lock(&mutex_); @@ -831,13 +845,11 @@ int32_t AudioDeviceMac::PlayoutDeviceName(uint16_t index, } memset(name, 0, kAdmMaxDeviceNameSize); - - if (guid != NULL) { - memset(guid, 0, kAdmMaxGuidSize); - } + memset(guid, 0, kAdmMaxGuidSize); return GetDeviceName(kAudioDevicePropertyScopeOutput, index, - webrtc::ArrayView(name, kAdmMaxDeviceNameSize)); + rtc::ArrayView(name, kAdmMaxDeviceNameSize), + rtc::ArrayView(guid, kAdmMaxGuidSize)); } int32_t AudioDeviceMac::RecordingDeviceName(uint16_t index, @@ -856,7 +868,8 @@ int32_t AudioDeviceMac::RecordingDeviceName(uint16_t index, } return GetDeviceName(kAudioDevicePropertyScopeInput, index, - webrtc::ArrayView(name, kAdmMaxDeviceNameSize)); + rtc::ArrayView(name, kAdmMaxDeviceNameSize), + rtc::ArrayView(guid, kAdmMaxGuidSize)); } int16_t AudioDeviceMac::RecordingDevices() { @@ -865,6 +878,14 @@ int16_t AudioDeviceMac::RecordingDevices() { MaxNumberDevices); } +int32_t AudioDeviceMac::GetRecordingDevice() const { + if (_inputDeviceIsSpecified) { + return _inputDeviceIndex; + } + + return 0; +} + int32_t AudioDeviceMac::SetRecordingDevice(uint16_t index) { if (_recIsInitialized) { return -1; @@ -980,34 +1001,8 @@ int32_t AudioDeviceMac::InitPlayout() { _renderDeviceIsAlive = 1; _doStop = false; - // The internal microphone of a MacBook Pro is located under the left speaker - // grille. When the internal speakers are in use, we want to fully stereo - // pan to the right. AudioObjectPropertyAddress propertyAddress = { kAudioDevicePropertyDataSource, kAudioDevicePropertyScopeOutput, 0}; - if (_macBookPro) { - _macBookProPanRight = false; - Boolean hasProperty = - AudioObjectHasProperty(_outputDeviceID, &propertyAddress); - if (hasProperty) { - UInt32 dataSource = 0; - size = sizeof(dataSource); - WEBRTC_CA_LOG_WARN(AudioObjectGetPropertyData( - _outputDeviceID, &propertyAddress, 0, NULL, &size, &dataSource)); - - if (dataSource == 'ispk') { - _macBookProPanRight = true; - RTC_LOG(LS_VERBOSE) - << "MacBook Pro using internal speakers; stereo panning right"; - } else { - RTC_LOG(LS_VERBOSE) << "MacBook Pro not using internal speakers"; - } - - // Add a listener to determine if the status changes. - WEBRTC_CA_LOG_WARN(AudioObjectAddPropertyListener( - _outputDeviceID, &propertyAddress, &objectListenerProc, this)); - } - } // Get current stream description propertyAddress.mSelector = kAudioDevicePropertyStreamFormat; @@ -1392,7 +1387,11 @@ int32_t AudioDeviceMac::StopRecording() { // Remove listeners. AudioObjectPropertyAddress propertyAddress = { - kAudioDevicePropertyStreamFormat, kAudioDevicePropertyScopeInput, 0}; + kAudioDevicePropertyStreamFormat, // selector + kAudioDevicePropertyScopeInput, // scope + 0, // element + }; + WEBRTC_CA_LOG_WARN(AudioObjectRemovePropertyListener( _inputDeviceID, &propertyAddress, &objectListenerProc, this)); @@ -1506,7 +1505,11 @@ int32_t AudioDeviceMac::StopPlayout() { // Remove listeners. AudioObjectPropertyAddress propertyAddress = { - kAudioDevicePropertyStreamFormat, kAudioDevicePropertyScopeOutput, 0}; + kAudioDevicePropertyStreamFormat, // selector + kAudioDevicePropertyScopeOutput, // scope + 0, // element + }; + WEBRTC_CA_LOG_WARN(AudioObjectRemovePropertyListener( _outputDeviceID, &propertyAddress, &objectListenerProc, this)); @@ -1514,16 +1517,6 @@ int32_t AudioDeviceMac::StopPlayout() { WEBRTC_CA_LOG_WARN(AudioObjectRemovePropertyListener( _outputDeviceID, &propertyAddress, &objectListenerProc, this)); - if (_macBookPro) { - Boolean hasProperty = - AudioObjectHasProperty(_outputDeviceID, &propertyAddress); - if (hasProperty) { - propertyAddress.mSelector = kAudioDevicePropertyDataSource; - WEBRTC_CA_LOG_WARN(AudioObjectRemovePropertyListener( - _outputDeviceID, &propertyAddress, &objectListenerProc, this)); - } - } - _playIsInitialized = false; _playing = false; @@ -1551,8 +1544,11 @@ int32_t AudioDeviceMac::GetNumberDevices(const AudioObjectPropertyScope scope, OSStatus err = noErr; AudioObjectPropertyAddress propertyAddress = { - kAudioHardwarePropertyDevices, kAudioObjectPropertyScopeGlobal, - kAudioObjectPropertyElementMaster}; + kAudioHardwarePropertyDevices, + kAudioObjectPropertyScopeGlobal, + kAudioObjectPropertyElementMaster, + }; + UInt32 size = 0; WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyDataSize( kAudioObjectSystemObject, &propertyAddress, 0, NULL, &size)); @@ -1651,7 +1647,8 @@ int32_t AudioDeviceMac::GetNumberDevices(const AudioObjectPropertyScope scope, int32_t AudioDeviceMac::GetDeviceName(const AudioObjectPropertyScope scope, const uint16_t index, - webrtc::ArrayView name) { + rtc::ArrayView name, + rtc::ArrayView guid) { OSStatus err = noErr; AudioDeviceID deviceIds[MaxNumberDevices]; @@ -1688,10 +1685,9 @@ int32_t AudioDeviceMac::GetDeviceName(const AudioObjectPropertyScope scope, isDefaultDevice = true; } } - AudioObjectPropertyAddress propertyAddress = {kAudioDevicePropertyDeviceName, scope, 0}; - + rtc::SimpleStringBuilder guid_ss(guid); if (isDefaultDevice) { std::array devName; UInt32 len = devName.size(); @@ -1701,6 +1697,7 @@ int32_t AudioDeviceMac::GetDeviceName(const AudioObjectPropertyScope scope, webrtc::SimpleStringBuilder ss(name); ss.AppendFormat("default (%s)", devName.data()); + guid_ss << "default"; } else { if (index < numberDevices) { usedID = deviceIds[index]; @@ -1708,7 +1705,7 @@ int32_t AudioDeviceMac::GetDeviceName(const AudioObjectPropertyScope scope, usedID = index; } UInt32 len = name.size(); - + guid_ss << std::to_string(deviceIds[index]); WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData( usedID, &propertyAddress, 0, NULL, &len, name.data())); } @@ -1919,6 +1916,66 @@ OSStatus AudioDeviceMac::implObjectListenerProc( HandleDataSourceChange(objectId, addresses[i]); } else if (addresses[i].mSelector == kAudioDeviceProcessorOverload) { HandleProcessorOverload(addresses[i]); + } else if (addresses[i].mSelector == kAudioHardwarePropertyDefaultOutputDevice) { + RTC_LOG(LS_VERBOSE) << "kAudioHardwarePropertyDefaultOutputDevice"; + // default audio output device changed + HandleDefaultOutputDeviceChange(); + } else if (addresses[i].mSelector == kAudioHardwarePropertyDefaultInputDevice) { + RTC_LOG(LS_VERBOSE) << "kAudioHardwarePropertyDefaultInputDevice"; + // default audio input device changed + HandleDefaultInputDeviceChange(); + } + } + + return 0; +} + +int32_t AudioDeviceMac::HandleDefaultOutputDeviceChange() { + + if (SpeakerIsInitialized()) { + RTC_LOG(LS_WARNING) << "Default audio output device has changed"; + int32_t renderDeviceIsAlive = _renderDeviceIsAlive; + bool wasPlaying = _playing && renderDeviceIsAlive == 1; + + if (wasPlaying && _outputDeviceIsSpecified && _outputDeviceIndex == 0) { + + StopPlayout(); + + // default is already selected _outputDeviceIndex(0) + // re-init and start playout + InitPlayout(); + StartPlayout(); + } + + // Notify default output device updated + if (audio_device_module_sink_) { + audio_device_module_sink_->OnDevicesUpdated(); + } + } + + return 0; +} + +int32_t AudioDeviceMac::HandleDefaultInputDeviceChange() { + + if (MicrophoneIsInitialized()) { + RTC_LOG(LS_WARNING) << "Default audio input device has changed"; + int32_t captureDeviceIsAlive = _captureDeviceIsAlive; + bool wasRecording = _recording && captureDeviceIsAlive == 1; + + if (wasRecording && _inputDeviceIsSpecified && _inputDeviceIndex == 0) { + + StopRecording(); + + // default is already selected _inputDeviceIndex(0) + // re-init and start recording + InitRecording(); + StartRecording(); + } + + // Notify default input device updated + if (audio_device_module_sink_) { + audio_device_module_sink_->OnDevicesUpdated(); } } @@ -1941,9 +1998,29 @@ int32_t AudioDeviceMac::HandleDeviceChange() { &size, &deviceIsAlive); if (err == kAudioHardwareBadDeviceError || deviceIsAlive == 0) { - RTC_LOG(LS_WARNING) << "Capture device is not alive (probably removed)"; - _captureDeviceIsAlive = 0; - _mixerManager.CloseMicrophone(); + RTC_LOG(LS_WARNING) << "Audio input device is not alive (probably removed) deviceID: " << _inputDeviceID; + //AtomicSet32(&_captureDeviceIsAlive, 0); + + // Logic to switch to default device (if exists) + // when the current device is not alive anymore + int32_t captureDeviceIsAlive = _captureDeviceIsAlive; + bool wasRecording = _recording && captureDeviceIsAlive == 1; + + StopRecording(); + + // was playing & default device exists + if (wasRecording && SetRecordingDevice(0) == 0) { + InitRecording(); + StartRecording(); + } else { + _mixerManager.CloseMicrophone(); + } + + // Notify input device removed + if (audio_device_module_sink_) { + audio_device_module_sink_->OnDevicesUpdated(); + } + } else if (err != noErr) { logCAMsg(webrtc::LS_ERROR, "Error in AudioDeviceGetProperty()", (const char*)&err); @@ -1960,9 +2037,29 @@ int32_t AudioDeviceMac::HandleDeviceChange() { &size, &deviceIsAlive); if (err == kAudioHardwareBadDeviceError || deviceIsAlive == 0) { - RTC_LOG(LS_WARNING) << "Render device is not alive (probably removed)"; - _renderDeviceIsAlive = 0; - _mixerManager.CloseSpeaker(); + RTC_LOG(LS_WARNING) << "Audio output device is not alive (probably removed) deviceID: " << _outputDeviceID; + // AtomicSet32(&_renderDeviceIsAlive, 0); // StopPlayout() does this + + // Logic to switch to default device (if exists) + // when the current device is not alive anymore + int32_t renderDeviceIsAlive = _renderDeviceIsAlive; + bool wasPlaying = _playing && renderDeviceIsAlive == 1; + + StopPlayout(); + + // was playing & default device exists + if (wasPlaying && SetPlayoutDevice(0) == 0) { + InitPlayout(); + StartPlayout(); + } else { + _mixerManager.CloseSpeaker(); + } + + // Notify output device removed + if (audio_device_module_sink_) { + audio_device_module_sink_->OnDevicesUpdated(); + } + } else if (err != noErr) { logCAMsg(webrtc::LS_ERROR, "Error in AudioDeviceGetProperty()", (const char*)&err); @@ -2065,28 +2162,10 @@ int32_t AudioDeviceMac::HandleStreamFormatChange( int32_t AudioDeviceMac::HandleDataSourceChange( const AudioObjectID objectId, const AudioObjectPropertyAddress propertyAddress) { - OSStatus err = noErr; - - if (_macBookPro && - propertyAddress.mScope == kAudioDevicePropertyScopeOutput) { - RTC_LOG(LS_VERBOSE) << "Data source changed"; - - _macBookProPanRight = false; - UInt32 dataSource = 0; - UInt32 size = sizeof(UInt32); - WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData( - objectId, &propertyAddress, 0, NULL, &size, &dataSource)); - if (dataSource == 'ispk') { - _macBookProPanRight = true; - RTC_LOG(LS_VERBOSE) - << "MacBook Pro using internal speakers; stereo panning right"; - } else { - RTC_LOG(LS_VERBOSE) << "MacBook Pro not using internal speakers"; - } - } return 0; } + int32_t AudioDeviceMac::HandleProcessorOverload( const AudioObjectPropertyAddress propertyAddress) { // TODO(xians): we probably want to notify the user in some way of the @@ -2404,24 +2483,6 @@ bool AudioDeviceMac::RenderWorkerThread() { uint32_t nOutSamples = nSamples * _outDesiredFormat.mChannelsPerFrame; SInt16* pPlayBuffer = (SInt16*)&playBuffer; - if (_macBookProPanRight && (_playChannels == 2)) { - // Mix entirely into the right channel and zero the left channel. - SInt32 sampleInt32 = 0; - for (uint32_t sampleIdx = 0; sampleIdx < nOutSamples; sampleIdx += 2) { - sampleInt32 = pPlayBuffer[sampleIdx]; - sampleInt32 += pPlayBuffer[sampleIdx + 1]; - sampleInt32 /= 2; - - if (sampleInt32 > 32767) { - sampleInt32 = 32767; - } else if (sampleInt32 < -32768) { - sampleInt32 = -32768; - } - - pPlayBuffer[sampleIdx] = 0; - pPlayBuffer[sampleIdx + 1] = static_cast(sampleInt32); - } - } PaUtil_WriteRingBuffer(_paRenderBuffer, pPlayBuffer, nOutSamples); diff --git a/modules/audio_device/mac/audio_device_mac.h b/modules/audio_device/mac/audio_device_mac.h index 6c07d62f01..c8c3fb5d0a 100644 --- a/modules/audio_device/mac/audio_device_mac.h +++ b/modules/audio_device/mac/audio_device_mac.h @@ -154,6 +154,13 @@ class AudioDeviceMac : public AudioDeviceGeneric { virtual void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) RTC_LOCKS_EXCLUDED(mutex_); + virtual int32_t SetAudioDeviceSink(AudioDeviceSink* sink) RTC_LOCKS_EXCLUDED(mutex_) { + audio_device_module_sink_ = sink; + return 0; + } + virtual int32_t GetPlayoutDevice() const; + virtual int32_t GetRecordingDevice() const; + private: int32_t InitSpeakerLocked() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); int32_t InitMicrophoneLocked() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); @@ -180,7 +187,8 @@ class AudioDeviceMac : public AudioDeviceGeneric { int32_t GetDeviceName(AudioObjectPropertyScope scope, uint16_t index, - webrtc::ArrayView name); + rtc::ArrayView name, + rtc::ArrayView guid); int32_t InitDevice(uint16_t userDeviceIndex, AudioDeviceID& deviceId, @@ -201,6 +209,8 @@ class AudioDeviceMac : public AudioDeviceGeneric { const AudioObjectPropertyAddress addresses[]); int32_t HandleDeviceChange(); + int32_t HandleDefaultOutputDeviceChange(); + int32_t HandleDefaultInputDeviceChange(); int32_t HandleStreamFormatChange(AudioObjectID objectId, AudioObjectPropertyAddress propertyAddress); @@ -341,6 +351,8 @@ class AudioDeviceMac : public AudioDeviceGeneric { // Typing detection // 0x5c is key "9", after that comes function keys. bool prev_key_state_[0x5d]; + + AudioDeviceSink *audio_device_module_sink_ = nullptr; }; } // namespace webrtc diff --git a/modules/audio_device/win/audio_device_core_win.cc b/modules/audio_device/win/audio_device_core_win.cc index 015a0a394f..b07f287c4a 100644 --- a/modules/audio_device/win/audio_device_core_win.cc +++ b/modules/audio_device/win/audio_device_core_win.cc @@ -461,6 +461,8 @@ AudioDeviceWindowsCore::AudioDeviceWindowsCore() _playChannelsPrioList[0] = 2; // stereo is prio 1 _playChannelsPrioList[1] = 1; // mono is prio 2 + _deviceStateListener = new DeviceStateListener(); + HRESULT hr; // We know that this API will work since it has already been verified in @@ -474,6 +476,8 @@ AudioDeviceWindowsCore::AudioDeviceWindowsCore() reinterpret_cast(&_ptrEnumerator)); RTC_DCHECK(_ptrEnumerator); + _ptrEnumerator->RegisterEndpointNotificationCallback(_deviceStateListener); + // DMO initialization for built-in WASAPI AEC. { IMediaObject* ptrDMO = NULL; @@ -499,6 +503,8 @@ AudioDeviceWindowsCore::~AudioDeviceWindowsCore() { Terminate(); + _ptrEnumerator->UnregisterEndpointNotificationCallback(_deviceStateListener); + // The IMMDeviceEnumerator is created during construction. Must release // it here and not in Terminate() since we don't recreate it in Init(). SAFE_RELEASE(_ptrEnumerator); @@ -535,6 +541,11 @@ AudioDeviceWindowsCore::~AudioDeviceWindowsCore() { _hShutdownCaptureEvent = NULL; } + if(NULL != _deviceStateListener) { + delete _deviceStateListener; + _deviceStateListener = NULL; + } + if (_avrtLibrary) { BOOL freeOK = FreeLibrary(_avrtLibrary); if (!freeOK) { @@ -3894,6 +3905,65 @@ int32_t AudioDeviceWindowsCore::_GetDeviceID(IMMDevice* pDevice, return 0; } +int32_t AudioDeviceWindowsCore::SetAudioDeviceSink(AudioDeviceSink* sink) { + _deviceStateListener->SetAudioDeviceSink(sink); + return 0; +} + +void AudioDeviceWindowsCore::DeviceStateListener::SetAudioDeviceSink(AudioDeviceSink *sink) { + callback_ = sink; +} + +HRESULT AudioDeviceWindowsCore::DeviceStateListener::OnDeviceStateChanged(LPCWSTR pwstrDeviceId, DWORD dwNewState) { + RTC_DLOG(LS_INFO) << "AudioDeviceWindowsCore::OnDeviceStateChanged => " << pwstrDeviceId << ", NewState => " << dwNewState; + if(callback_) callback_->OnDevicesUpdated(); + return S_OK; +} + +HRESULT AudioDeviceWindowsCore::DeviceStateListener::OnDeviceAdded(LPCWSTR pwstrDeviceId) { + RTC_DLOG(LS_INFO) << "AudioDeviceWindowsCore::OnDeviceAdded => " << pwstrDeviceId; + return S_OK; +} + +HRESULT AudioDeviceWindowsCore::DeviceStateListener::OnDeviceRemoved(LPCWSTR pwstrDeviceId) { + RTC_DLOG(LS_INFO) << "AudioDeviceWindowsCore::OnDeviceRemoved => " << pwstrDeviceId; + return S_OK; +} + +HRESULT AudioDeviceWindowsCore::DeviceStateListener::OnDefaultDeviceChanged(EDataFlow flow, ERole role, LPCWSTR pwstrDefaultDeviceId) { + RTC_DLOG(LS_INFO) << "AudioDeviceWindowsCore::OnDefaultDeviceChanged => " << pwstrDefaultDeviceId; + return S_OK; +} + +HRESULT AudioDeviceWindowsCore::DeviceStateListener::OnPropertyValueChanged(LPCWSTR pwstrDeviceId, const PROPERTYKEY key) { + //RTC_DLOG(LS_INFO) << "AudioDeviceWindowsCore::OnPropertyValueChanged => " << pwstrDeviceId; + return S_OK; +} + +ULONG AudioDeviceWindowsCore::DeviceStateListener::AddRef() { + ULONG new_ref = InterlockedIncrement(&ref_count_); + // RTC_DLOG(LS_INFO) << "__AddRef => " << new_ref; + return new_ref; +} + +ULONG AudioDeviceWindowsCore::DeviceStateListener::Release() { + ULONG new_ref = InterlockedDecrement(&ref_count_); + // RTC_DLOG(LS_INFO) << "__Release => " << new_ref; + return new_ref; +} + +HRESULT AudioDeviceWindowsCore::DeviceStateListener::QueryInterface(REFIID iid, void** object) { + if (object == nullptr) { + return E_POINTER; + } + if (iid == IID_IUnknown || iid == __uuidof(IMMNotificationClient)) { + *object = static_cast(this); + return S_OK; + } + *object = nullptr; + return E_NOINTERFACE; +} + // ---------------------------------------------------------------------------- // _GetDefaultDevice // ---------------------------------------------------------------------------- diff --git a/modules/audio_device/win/audio_device_core_win.h b/modules/audio_device/win/audio_device_core_win.h index d09bed9939..31cf3d200d 100644 --- a/modules/audio_device/win/audio_device_core_win.h +++ b/modules/audio_device/win/audio_device_core_win.h @@ -22,6 +22,8 @@ #include #include // IMediaObject #include // MMDevice +#include +#include #include "api/scoped_refptr.h" #include "modules/audio_device/audio_device_generic.h" @@ -50,6 +52,34 @@ class AudioDeviceWindowsCore : public AudioDeviceGeneric { AudioDeviceWindowsCore(); ~AudioDeviceWindowsCore(); + class DeviceStateListener : public IMMNotificationClient { + public: + virtual ~DeviceStateListener() = default; + HRESULT __stdcall OnDeviceStateChanged(LPCWSTR pwstrDeviceId, + DWORD dwNewState) override; + HRESULT __stdcall OnDeviceAdded(LPCWSTR pwstrDeviceId) override; + + HRESULT __stdcall OnDeviceRemoved(LPCWSTR pwstrDeviceId) override; + + HRESULT + __stdcall OnDefaultDeviceChanged(EDataFlow flow, + ERole role, + LPCWSTR pwstrDefaultDeviceId) override; + + HRESULT __stdcall OnPropertyValueChanged(LPCWSTR pwstrDeviceId, + const PROPERTYKEY key) override; + // IUnknown (required by IMMNotificationClient). + ULONG __stdcall AddRef() override; + ULONG __stdcall Release() override; + HRESULT __stdcall QueryInterface(REFIID iid, void** object) override; + + void SetAudioDeviceSink(AudioDeviceSink *sink); + + private: + LONG ref_count_ = 1; + AudioDeviceSink *callback_ = nullptr; + }; + static bool CoreAudioIsSupported(); // Retrieve the currently utilized audio layer @@ -150,6 +180,8 @@ class AudioDeviceWindowsCore : public AudioDeviceGeneric { virtual int32_t EnableBuiltInAEC(bool enable); + virtual int32_t SetAudioDeviceSink(AudioDeviceSink* sink); + public: virtual void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer); @@ -237,6 +269,7 @@ class AudioDeviceWindowsCore : public AudioDeviceGeneric { IAudioEndpointVolume* _ptrCaptureVolume; ISimpleAudioVolume* _ptrRenderSimpleVolume; + DeviceStateListener *_deviceStateListener = nullptr; // DirectX Media Object (DMO) for the built-in AEC. webrtc::scoped_refptr _dmo; webrtc::scoped_refptr _mediaBuffer; diff --git a/modules/congestion_controller/goog_cc/probe_controller.cc b/modules/congestion_controller/goog_cc/probe_controller.cc index bf8253ded5..f585b63ad1 100644 --- a/modules/congestion_controller/goog_cc/probe_controller.cc +++ b/modules/congestion_controller/goog_cc/probe_controller.cc @@ -30,6 +30,7 @@ #include "rtc_base/experiments/field_trial_parser.h" #include "rtc_base/logging.h" #include "system_wrappers/include/metrics.h" +#include "modules/rtp_rtcp/source/rtp_video_header.h" namespace webrtc { @@ -286,21 +287,6 @@ std::vector ProbeController::OnNetworkAvailability( return std::vector(); } -void ProbeController::UpdateState(State new_state) { - switch (new_state) { - case State::kInit: - state_ = State::kInit; - break; - case State::kWaitingForProbingResult: - state_ = State::kWaitingForProbingResult; - break; - case State::kProbingComplete: - state_ = State::kProbingComplete; - min_bitrate_to_probe_further_ = DataRate::PlusInfinity(); - break; - } -} - std::vector ProbeController::InitiateExponentialProbing( Timestamp at_time) { RTC_DCHECK(network_available_); @@ -506,7 +492,8 @@ std::vector ProbeController::Process(Timestamp at_time) { kMaxWaitingTimeForProbingResult) { if (state_ == State::kWaitingForProbingResult) { RTC_LOG(LS_INFO) << "kWaitingForProbingResult: timeout"; - UpdateState(State::kProbingComplete); + state_ = State::kProbingComplete; + min_bitrate_to_probe_further_ = DataRate::PlusInfinity(); } } if (estimated_bitrate_.IsZero() || state_ != State::kProbingComplete) { @@ -565,7 +552,8 @@ std::vector ProbeController::InitiateProbing( max_bitrate_); if (std::min(network_estimate, estimated_bitrate_) > config_.skip_if_estimate_larger_than_fraction_of_max * max_probe_rate) { - UpdateState(State::kProbingComplete); + state_ = State::kProbingComplete; + min_bitrate_to_probe_further_ = DataRate::PlusInfinity(); return {}; } } @@ -623,13 +611,14 @@ std::vector ProbeController::InitiateProbing( } time_last_probing_initiated_ = now; if (probe_further) { - UpdateState(State::kWaitingForProbingResult); + state_ = State::kWaitingForProbingResult; // Dont expect probe results to be larger than a fraction of the actual // probe rate. min_bitrate_to_probe_further_ = pending_probes.back().target_data_rate * config_.further_probe_threshold; } else { - UpdateState(State::kProbingComplete); + state_ = State::kProbingComplete; + min_bitrate_to_probe_further_ = DataRate::PlusInfinity(); } return pending_probes; } diff --git a/modules/congestion_controller/goog_cc/probe_controller.h b/modules/congestion_controller/goog_cc/probe_controller.h index 9c9c39ecad..de8d7088ed 100644 --- a/modules/congestion_controller/goog_cc/probe_controller.h +++ b/modules/congestion_controller/goog_cc/probe_controller.h @@ -173,7 +173,6 @@ class ProbeController { kProbingComplete, }; - void UpdateState(State new_state); ABSL_MUST_USE_RESULT std::vector InitiateExponentialProbing(Timestamp at_time); ABSL_MUST_USE_RESULT std::vector InitiateProbing( diff --git a/modules/congestion_controller/goog_cc/probe_controller_unittest.cc b/modules/congestion_controller/goog_cc/probe_controller_unittest.cc index 39712ad4f6..08cc22927f 100644 --- a/modules/congestion_controller/goog_cc/probe_controller_unittest.cc +++ b/modules/congestion_controller/goog_cc/probe_controller_unittest.cc @@ -328,32 +328,6 @@ TEST(ProbeControllerTest, TestExponentialProbing) { EXPECT_EQ(probes[0].target_data_rate.bps(), 2 * 1800); } -TEST(ProbeControllerTest, ExponentialProbingStopIfMaxBitrateLow) { - ProbeControllerFixture fixture( - "WebRTC-Bwe-ProbingConfiguration/abort_further:true/"); - std::unique_ptr probe_controller = - fixture.CreateController(); - ASSERT_THAT( - probe_controller->OnNetworkAvailability({.network_available = true}), - IsEmpty()); - auto probes = probe_controller->SetBitrates( - kMinBitrate, kStartBitrate, kMaxBitrate, fixture.CurrentTime()); - ASSERT_THAT(probes, SizeIs(Gt(0))); - - // Repeated probe normally is sent when estimated bitrate climbs above - // 0.7 * 6 * kStartBitrate = 1260. But since max bitrate is low, expect - // exponential probing to stop. - probes = probe_controller->SetBitrates(kMinBitrate, kStartBitrate, - /*max_bitrate=*/kStartBitrate, - fixture.CurrentTime()); - EXPECT_THAT(probes, IsEmpty()); - - probes = probe_controller->SetEstimatedBitrate( - DataRate::BitsPerSec(1800), BandwidthLimitedCause::kDelayBasedLimited, - fixture.CurrentTime()); - EXPECT_THAT(probes, IsEmpty()); -} - TEST(ProbeControllerTest, ExponentialProbingStopIfMaxAllocatedBitrateLow) { ProbeControllerFixture fixture( "WebRTC-Bwe-ProbingConfiguration/abort_further:true/"); diff --git a/modules/desktop_capture/mac/screen_capturer_mac.mm b/modules/desktop_capture/mac/screen_capturer_mac.mm index 5f36971ea9..8257239d81 100644 --- a/modules/desktop_capture/mac/screen_capturer_mac.mm +++ b/modules/desktop_capture/mac/screen_capturer_mac.mm @@ -224,16 +224,7 @@ DesktopRect GetExcludedWindowPixelBounds(CGWindowID window, ScreenConfigurationChanged(); } - // When screen is zoomed in/out, OSX only updates the part of Rects currently - // displayed on screen, with relative location to current top-left on screen. - // This will cause problems when we copy the dirty regions to the captured - // image. So we invalidate the whole screen to copy all the screen contents. - // With CGI method, the zooming will be ignored and the whole screen contents - // will be captured as before. - // With IOSurface method, the zoomed screen contents will be captured. - if (UAZoomEnabled()) { - helper_.InvalidateScreen(screen_pixel_bounds_.size()); - } + helper_.InvalidateScreen(screen_pixel_bounds_.size()); DesktopRegion region; helper_.TakeInvalidRegion(®ion); diff --git a/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.cc b/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.cc index 2d08bd6e8f..99cd3f0ec8 100644 --- a/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.cc +++ b/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.cc @@ -108,6 +108,8 @@ class TransformableVideoSenderFrame : public TransformableVideoFrameInterface { csrcs_ = metadata.GetCsrcs(); } + const RTPVideoHeader& header() const override { return header_; } + const RTPVideoHeader& GetHeader() const { return header_; } uint8_t GetPayloadType() const override { return payload_type_; } std::optional GetCodecType() const { return codec_type_; } diff --git a/modules/rtp_rtcp/source/rtp_video_stream_receiver_frame_transformer_delegate.cc b/modules/rtp_rtcp/source/rtp_video_stream_receiver_frame_transformer_delegate.cc index 7b5f480aa1..cc2ced9185 100644 --- a/modules/rtp_rtcp/source/rtp_video_stream_receiver_frame_transformer_delegate.cc +++ b/modules/rtp_rtcp/source/rtp_video_stream_receiver_frame_transformer_delegate.cc @@ -89,6 +89,10 @@ class TransformableVideoReceiverFrame frame_->SetHeaderFromMetadata(new_metadata); } + const RTPVideoHeader& header () const override { + return frame_->GetRtpVideoHeader(); + } + std::unique_ptr ExtractFrame() && { return std::move(frame_); } diff --git a/pc/audio_rtp_receiver.cc b/pc/audio_rtp_receiver.cc index 10bd129390..9694274320 100644 --- a/pc/audio_rtp_receiver.cc +++ b/pc/audio_rtp_receiver.cc @@ -312,7 +312,7 @@ void AudioRtpReceiver::Reconfigure(bool track_enabled) { media_channel_->SetFrameDecryptor(*signaled_ssrc_, frame_decryptor_); } - if (frame_transformer_) { + if (frame_transformer_ && track_enabled) { media_channel_->SetDepacketizerToDecoderFrameTransformer( signaled_ssrc_.value_or(0), frame_transformer_); } diff --git a/pc/media_stream_track_proxy.h b/pc/media_stream_track_proxy.h index 8775ecd344..f409caa78a 100644 --- a/pc/media_stream_track_proxy.h +++ b/pc/media_stream_track_proxy.h @@ -55,6 +55,8 @@ PROXY_SECONDARY_METHOD2(void, PROXY_SECONDARY_METHOD1(void, RemoveSink, VideoSinkInterface*) PROXY_SECONDARY_METHOD0(void, RequestRefreshFrame) BYPASS_PROXY_CONSTMETHOD0(VideoTrackSourceInterface*, GetSource) +PROXY_CONSTMETHOD0(bool, should_receive) +PROXY_METHOD1(void, set_should_receive, bool) PROXY_METHOD1(void, RegisterObserver, ObserverInterface*) PROXY_METHOD1(void, UnregisterObserver, ObserverInterface*) diff --git a/pc/peer_connection.cc b/pc/peer_connection.cc index 74c4c4531e..485a4ed2ef 100644 --- a/pc/peer_connection.cc +++ b/pc/peer_connection.cc @@ -414,6 +414,7 @@ bool PeerConnectionInterface::RTCConfiguration::operator==( std::vector vpn_list; PortAllocatorConfig port_allocator_config; std::optional pacer_burst_interval; + bool enable_any_address_ports; }; static_assert(sizeof(stuff_being_tested_for_equality) == sizeof(*this), "Did you add something to RTCConfiguration and forget to " @@ -475,7 +476,8 @@ bool PeerConnectionInterface::RTCConfiguration::operator==( port_allocator_config.min_port == o.port_allocator_config.min_port && port_allocator_config.max_port == o.port_allocator_config.max_port && port_allocator_config.flags == o.port_allocator_config.flags && - pacer_burst_interval == o.pacer_burst_interval; + pacer_burst_interval == o.pacer_burst_interval && + enable_any_address_ports == o.enable_any_address_ports; } bool PeerConnectionInterface::RTCConfiguration::operator!=( @@ -2150,6 +2152,11 @@ PeerConnection::InitializePortAllocator_n( RTC_LOG(LS_INFO) << "Disable candidates on link-local network interfaces."; } + if (configuration.enable_any_address_ports) { + port_allocator_flags |= cricket::PORTALLOCATOR_ENABLE_ANY_ADDRESS_PORTS; + RTC_LOG(LS_INFO) << "Enable gathering on any address ports."; + } + port_allocator_->set_flags(port_allocator_flags); // No step delay is used while allocating ports. port_allocator_->set_step_delay(kMinimumStepDelay); diff --git a/pc/video_rtp_receiver.cc b/pc/video_rtp_receiver.cc index 0e18ade560..2f8fefb57e 100644 --- a/pc/video_rtp_receiver.cc +++ b/pc/video_rtp_receiver.cc @@ -59,15 +59,20 @@ VideoRtpReceiver::VideoRtpReceiver( Thread::Current(), worker_thread, VideoTrack::Create(receiver_id, source_, worker_thread))), - attachment_id_(GenerateUniqueId()) { + cached_track_should_receive_(track_->should_receive()), + attachment_id_(GenerateUniqueId()), + worker_thread_safety_(PendingTaskSafetyFlag::CreateDetachedInactive()) { RTC_DCHECK(worker_thread_); SetStreams(streams); + track_->RegisterObserver(this); RTC_DCHECK_EQ(source_->state(), MediaSourceInterface::kInitializing); } VideoRtpReceiver::~VideoRtpReceiver() { RTC_DCHECK_RUN_ON(&signaling_thread_checker_); RTC_DCHECK(!media_channel_); + + track_->UnregisterObserver(this); } std::vector VideoRtpReceiver::stream_ids() const { @@ -131,6 +136,39 @@ void VideoRtpReceiver::Stop() { track_->internal()->set_ended(); } +void VideoRtpReceiver::OnChanged() { + RTC_DCHECK_RUN_ON(&signaling_thread_checker_); + if (cached_track_should_receive_ != track_->should_receive()) { + cached_track_should_receive_ = track_->should_receive(); + worker_thread_->PostTask( + [this, receive = cached_track_should_receive_]() { + RTC_DCHECK_RUN_ON(worker_thread_); + if(receive) { + StartMediaChannel(); + } else { + StopMediaChannel(); + } + }); + } +} + +void VideoRtpReceiver::StartMediaChannel() { + RTC_DCHECK_RUN_ON(worker_thread_); + if (!media_channel_) { + return; + } + media_channel_->StartReceive(signaled_ssrc_.value_or(0)); + OnGenerateKeyFrame(); +} + +void VideoRtpReceiver::StopMediaChannel() { + RTC_DCHECK_RUN_ON(worker_thread_); + if (!media_channel_) { + return; + } + media_channel_->StopReceive(signaled_ssrc_.value_or(0)); +} + void VideoRtpReceiver::RestartMediaChannel(std::optional ssrc) { RTC_DCHECK_RUN_ON(&signaling_thread_checker_); MediaSourceInterface::SourceState state = source_->state(); @@ -226,6 +264,7 @@ void VideoRtpReceiver::set_transport( void VideoRtpReceiver::SetStreams( const std::vector>& streams) { RTC_DCHECK_RUN_ON(&signaling_thread_checker_); + // Remove remote track from any streams that are going away. for (const auto& existing_stream : streams_) { bool removed = true; diff --git a/pc/video_rtp_receiver.h b/pc/video_rtp_receiver.h index 5bd2e6a766..7579e65e2a 100644 --- a/pc/video_rtp_receiver.h +++ b/pc/video_rtp_receiver.h @@ -26,6 +26,7 @@ #include "api/rtp_receiver_interface.h" #include "api/scoped_refptr.h" #include "api/sequence_checker.h" +#include "api/task_queue/pending_task_safety_flag.h" #include "api/transport/rtp/rtp_source.h" #include "api/video/video_frame.h" #include "api/video/video_sink_interface.h" @@ -42,7 +43,8 @@ namespace webrtc { -class VideoRtpReceiver : public RtpReceiverInternal { +class VideoRtpReceiver : public RtpReceiverInternal, + public ObserverInterface { public: // An SSRC of 0 will create a receiver that will match the first SSRC it // sees. Must be called on signaling thread. @@ -60,6 +62,9 @@ class VideoRtpReceiver : public RtpReceiverInternal { scoped_refptr video_track() const { return track_; } + // ObserverInterface implementation + void OnChanged() override; + // RtpReceiverInterface implementation scoped_refptr track() const override { return track_; @@ -112,6 +117,8 @@ class VideoRtpReceiver : public RtpReceiverInternal { MediaReceiveChannelInterface* media_channel); private: + void StartMediaChannel(); + void StopMediaChannel(); void RestartMediaChannel(std::optional ssrc) RTC_RUN_ON(&signaling_thread_checker_); void RestartMediaChannel_w(std::optional ssrc, @@ -158,6 +165,8 @@ class VideoRtpReceiver : public RtpReceiverInternal { RTC_GUARDED_BY(&signaling_thread_checker_) = nullptr; bool received_first_packet_ RTC_GUARDED_BY(&signaling_thread_checker_) = false; + + bool cached_track_should_receive_ RTC_GUARDED_BY(&signaling_thread_checker_); const int attachment_id_; scoped_refptr frame_decryptor_ RTC_GUARDED_BY(worker_thread_); @@ -173,6 +182,7 @@ class VideoRtpReceiver : public RtpReceiverInternal { // or switched. bool saved_generate_keyframe_ RTC_GUARDED_BY(worker_thread_) = false; bool saved_encoded_sink_enabled_ RTC_GUARDED_BY(worker_thread_) = false; + const webrtc::scoped_refptr worker_thread_safety_; }; } // namespace webrtc diff --git a/pc/video_track.cc b/pc/video_track.cc index ad2ce051cc..b0fe2453a1 100644 --- a/pc/video_track.cc +++ b/pc/video_track.cc @@ -86,6 +86,19 @@ VideoTrackSourceInterface* VideoTrack::GetSourceInternal() const { return video_source_->internal(); } +void VideoTrack::set_should_receive(bool receive) { + RTC_DCHECK_RUN_ON(&signaling_thread_); + if (should_receive_ == receive) + return; + should_receive_ = receive; + Notifier::FireOnChanged(); +} + +bool VideoTrack::should_receive() const { + RTC_DCHECK_RUN_ON(&signaling_thread_); + return should_receive_; +} + VideoTrackInterface::ContentHint VideoTrack::content_hint() const { RTC_DCHECK_RUN_ON(&signaling_thread_); return content_hint_; diff --git a/pc/video_track.h b/pc/video_track.h index 4c415505e4..bd9377b900 100644 --- a/pc/video_track.h +++ b/pc/video_track.h @@ -48,6 +48,9 @@ class VideoTrack : public MediaStreamTrack, void RequestRefreshFrame() override; VideoTrackSourceInterface* GetSource() const override; + void set_should_receive(bool should_receive) override; + bool should_receive() const override; + ContentHint content_hint() const override; void set_content_hint(ContentHint hint) override; bool set_enabled(bool enable) override; @@ -81,6 +84,7 @@ class VideoTrack : public MediaStreamTrack, // be queried without blocking on the worker thread by callers that don't // use an api proxy to call the `enabled()` method. bool enabled_w_ RTC_GUARDED_BY(worker_thread_) = true; + bool should_receive_ RTC_GUARDED_BY(signaling_thread_) = true; }; } // namespace webrtc diff --git a/rtc_base/socket_address.cc b/rtc_base/socket_address.cc index 1e7d9418b8..6511024c11 100644 --- a/rtc_base/socket_address.cc +++ b/rtc_base/socket_address.cc @@ -155,7 +155,10 @@ std::string SocketAddress::HostAsSensitiveURIString() const { } std::string SocketAddress::PortAsString() const { - return std::to_string(port_); +#pragma clang diagnostic push +#pragma clang diagnostic ignored "-Wdeprecated-declarations" + return rtc::ToString(port_); +#pragma clang diagnostic pop } std::string SocketAddress::ToString() const { diff --git a/sdk/BUILD.gn b/sdk/BUILD.gn index e76b68c92e..a710d600d6 100644 --- a/sdk/BUILD.gn +++ b/sdk/BUILD.gn @@ -119,6 +119,7 @@ if (is_ios || is_mac) { "objc/base/RTCVideoFrame.mm", "objc/base/RTCVideoFrameBuffer.h", "objc/base/RTCVideoRenderer.h", + "objc/base/RTCAudioRenderer.h", "objc/base/RTCYUVPlanarBuffer.h", ] @@ -143,6 +144,8 @@ if (is_ios || is_mac) { "objc/helpers/RTCDispatcher+Private.h", "objc/helpers/RTCDispatcher.h", "objc/helpers/RTCDispatcher.m", + "objc/helpers/RTCYUVHelper.h", + "objc/helpers/RTCYUVHelper.mm", "objc/helpers/scoped_cftyperef.h", ] @@ -150,6 +153,7 @@ if (is_ios || is_mac) { ":base_objc", "../rtc_base:checks", "//third_party/abseil-cpp/absl/strings:string_view", + "//third_party/libyuv", ] frameworks = [ @@ -167,11 +171,15 @@ if (is_ios || is_mac) { if (is_ios) { sources += [ - "objc/helpers/RTCCameraPreviewView.h", - "objc/helpers/RTCCameraPreviewView.m", "objc/helpers/UIDevice+RTCDevice.h", "objc/helpers/UIDevice+RTCDevice.mm", ] + if (target_environment != "xrdevice" && target_environment != "xrsimulator") { + sources += [ + "objc/helpers/RTCCameraPreviewView.h", + "objc/helpers/RTCCameraPreviewView.m", + ] + } frameworks += [ "UIKit.framework" ] } } @@ -269,11 +277,14 @@ if (is_ios || is_mac) { deps = [ ":base_objc", ":helpers_objc", - ":metal_objc", ":opengl_objc", ":videocapture_objc", ":videoframebuffer_objc", ] + + if (is_ios && (target_environment != "xrdevice" && target_environment != "xrsimulator")) { + deps += [ ":metal_objc" ] + } } rtc_library("audio_device") { @@ -404,7 +415,12 @@ if (is_ios || is_mac) { "objc/components/network/RTCNetworkMonitor.mm", ] - configs += [ ":used_from_extension" ] + configs += [ + "..:common_objc", + ":used_from_extension", + ] + + public_configs = [ ":common_config_objc" ] frameworks = [ "Network.framework" ] @@ -622,17 +638,13 @@ if (is_ios || is_mac) { "Metal.framework", "MetalKit.framework", ] - if (is_ios) { + if (is_mac || (is_ios && !(target_environment == "xrsimulator" || target_environment == "xrdevice"))) { sources += [ "objc/components/renderer/metal/RTCMTLVideoView.h", "objc/components/renderer/metal/RTCMTLVideoView.m", ] } if (is_mac) { - sources += [ - "objc/components/renderer/metal/RTCMTLNSVideoView.h", - "objc/components/renderer/metal/RTCMTLNSVideoView.m", - ] frameworks += [ "AppKit.framework" ] } deps = [ @@ -668,9 +680,13 @@ if (is_ios || is_mac) { sources = [ "objc/components/capturer/RTCCameraVideoCapturer.h", "objc/components/capturer/RTCCameraVideoCapturer.m", - "objc/components/capturer/RTCFileVideoCapturer.h", - "objc/components/capturer/RTCFileVideoCapturer.m", ] + if (is_ios && (target_environment != "xrdevice" && target_environment != "xrsimulator")) { + sources += [ + "objc/components/capturer/RTCFileVideoCapturer.h", + "objc/components/capturer/RTCFileVideoCapturer.m", + ] + } frameworks = [ "AVFoundation.framework", "CoreVideo.framework", @@ -689,6 +705,43 @@ if (is_ios || is_mac) { ] } + rtc_library("desktopcapture_objc") { + visibility = [ "*" ] + sources = [ + "objc/components/capturer/RTCDesktopCapturer+Private.h", + "objc/components/capturer/RTCDesktopCapturer.h", + "objc/components/capturer/RTCDesktopCapturer.mm", + "objc/components/capturer/RTCDesktopSource+Private.h", + "objc/components/capturer/RTCDesktopSource.h", + "objc/components/capturer/RTCDesktopSource.mm", + "objc/components/capturer/RTCDesktopMediaList+Private.h", + "objc/components/capturer/RTCDesktopMediaList.h", + "objc/components/capturer/RTCDesktopMediaList.mm", + "objc/native/src/objc_desktop_capture.h", + "objc/native/src/objc_desktop_capture.mm", + "objc/native/src/objc_desktop_media_list.h", + "objc/native/src/objc_desktop_media_list.mm", + ] + frameworks = [ + "AppKit.framework", + ] + + configs += [ "..:common_objc" ] + + public_configs = [ ":common_config_objc" ] + + deps = [ + ":base_objc", + ":helpers_objc", + ":videoframebuffer_objc", + "../rtc_base/system:gcd_helpers", + "../modules/desktop_capture", + ] + if (is_mac) { + deps += [ "//third_party:jpeg", ] + } + } + rtc_library("videocodec_objc") { visibility = [ "*" ] configs += [ "..:no_global_constructors" ] @@ -728,6 +781,7 @@ if (is_ios || is_mac) { ] deps = [ + ":simulcast", ":base_objc", ":native_video", ":videocodec_objc", @@ -835,6 +889,22 @@ if (is_ios || is_mac) { ] } + rtc_library("simulcast") { + sources = [ + "objc/components/video_codec/RTCVideoEncoderFactorySimulcast.h", + "objc/components/video_codec/RTCVideoEncoderFactorySimulcast.mm", + "objc/api/video_codec/RTCVideoEncoderSimulcast.h", + "objc/api/video_codec/RTCVideoEncoderSimulcast.mm", + ] + + deps = [ + ":base_objc", + ":wrapped_native_codec_objc", + "../media:rtc_media_base", + "../media:rtc_simulcast_encoder_adapter", + ] + } + rtc_library("mediaconstraints_objc") { configs += [ "..:no_global_constructors" ] sources = [ @@ -883,6 +953,25 @@ if (is_ios || is_mac) { ] } + rtc_library("audiorendereradapter_objc") { + visibility = [ "*" ] + sources = [ + "objc/api/RTCAudioRendererAdapter+Private.h", + "objc/api/RTCAudioRendererAdapter.h", + "objc/api/RTCAudioRendererAdapter.mm", + ] + + configs += [ "..:common_objc" ] + public_configs = [ ":common_config_objc" ] + + deps = [ + ":base_objc", + ":native_api", + "../api:libjingle_peerconnection_api", + "../api:media_stream_interface", + ] + } + rtc_library("mediasource_objc") { sources = [ "objc/api/peerconnection/RTCMediaSource+Private.h", @@ -938,6 +1027,11 @@ if (is_ios || is_mac) { ] configs += [ "..:no_global_constructors" ] sources = [ + "objc/api/peerconnection/RTCAudioDeviceModule.h", + "objc/api/peerconnection/RTCAudioDeviceModule+Private.h", + "objc/api/peerconnection/RTCAudioDeviceModule.mm", + "objc/api/peerconnection/RTCIODevice.h", + "objc/api/peerconnection/RTCIODevice.mm", "objc/api/peerconnection/RTCAudioSource+Private.h", "objc/api/peerconnection/RTCAudioSource.h", "objc/api/peerconnection/RTCAudioSource.mm", @@ -963,6 +1057,12 @@ if (is_ios || is_mac) { "objc/api/peerconnection/RTCDtmfSender.mm", "objc/api/peerconnection/RTCFieldTrials.h", "objc/api/peerconnection/RTCFieldTrials.mm", + "objc/api/peerconnection/RTCFrameCryptor+Private.h", + "objc/api/peerconnection/RTCFrameCryptor.h", + "objc/api/peerconnection/RTCFrameCryptor.mm", + "objc/api/peerconnection/RTCFrameCryptorKeyProvider+Private.h", + "objc/api/peerconnection/RTCFrameCryptorKeyProvider.h", + "objc/api/peerconnection/RTCFrameCryptorKeyProvider.mm", "objc/api/peerconnection/RTCIceCandidate+Private.h", "objc/api/peerconnection/RTCIceCandidate.h", "objc/api/peerconnection/RTCIceCandidate.mm", @@ -1053,6 +1153,20 @@ if (is_ios || is_mac) { "objc/api/peerconnection/RTCVideoTrack+Private.h", "objc/api/peerconnection/RTCVideoTrack.h", "objc/api/peerconnection/RTCVideoTrack.mm", + "objc/components/audio/RTCAudioBuffer.h", + "objc/components/audio/RTCAudioBuffer.mm", + "objc/components/audio/RTCAudioBuffer+Private.h", + "objc/components/audio/RTCAudioCustomProcessingAdapter.h", + "objc/components/audio/RTCAudioCustomProcessingAdapter.mm", + "objc/components/audio/RTCAudioCustomProcessingAdapter+Private.h", + "objc/components/audio/RTCAudioCustomProcessingDelegate.h", + "objc/components/audio/RTCAudioProcessingConfig.h", + "objc/components/audio/RTCAudioProcessingConfig.mm", + "objc/components/audio/RTCAudioProcessingConfig+Private.h", + "objc/components/audio/RTCAudioProcessingModule.h", + "objc/components/audio/RTCDefaultAudioProcessingModule.h", + "objc/components/audio/RTCDefaultAudioProcessingModule.mm", + "objc/components/audio/RTCDefaultAudioProcessingModule+Private.h", ] configs += [ @@ -1074,8 +1188,10 @@ if (is_ios || is_mac) { ":objc_audio_device_module", ":videoframebuffer_objc", ":videorendereradapter_objc", + ":audiorendereradapter_objc", ":videosource_objc", ":videotoolbox_objc", + "../api/crypto:frame_crypto_transformer", "../api:dtmf_sender_interface", "../api:enable_media", "../api:field_trials_view", @@ -1183,7 +1299,6 @@ if (is_ios || is_mac) { ":framework_objc", ":helpers_objc", ":mediaconstraints_objc", - ":metal_objc", ":native_api", ":native_api_audio_device_module", ":native_video", @@ -1220,7 +1335,11 @@ if (is_ios || is_mac) { "../test:wait_until", "//third_party/libyuv", ] - + if (target_environment != "xrdevice" && target_environment != "xrsimulator") { + deps += [ + ":metal_objc", + ] + } if (rtc_ios_use_opengl_rendering) { deps += [ ":opengl_objc" ] } @@ -1280,6 +1399,13 @@ if (is_ios || is_mac) { } } + bundle_data("darwin_privacy_info") { + sources = [ + "objc/PrivacyInfo.xcprivacy", + ] + outputs = [ "{{bundle_resources_dir}}/{{source_file_part}}" ] + } + if (is_ios) { apple_framework_bundle_with_umbrella_header("framework_objc") { info_plist = "objc/Info.plist" @@ -1305,6 +1431,7 @@ if (is_ios || is_mac) { "objc/base/RTCVideoFrame.h", "objc/base/RTCVideoFrameBuffer.h", "objc/base/RTCVideoRenderer.h", + "objc/base/RTCAudioRenderer.h", "objc/base/RTCYUVPlanarBuffer.h", "objc/components/audio/RTCAudioDevice.h", "objc/components/audio/RTCAudioSession.h", @@ -1312,7 +1439,6 @@ if (is_ios || is_mac) { "objc/components/capturer/RTCCameraVideoCapturer.h", "objc/components/capturer/RTCFileVideoCapturer.h", "objc/components/network/RTCNetworkMonitor.h", - "objc/components/renderer/metal/RTCMTLVideoView.h", "objc/components/renderer/opengl/RTCEAGLVideoView.h", "objc/components/renderer/opengl/RTCVideoViewShading.h", "objc/components/video_codec/RTCCodecSpecificInfoH264.h", @@ -1324,14 +1450,18 @@ if (is_ios || is_mac) { "objc/components/video_codec/RTCVideoEncoderFactoryH264.h", "objc/components/video_codec/RTCVideoEncoderH264.h", "objc/components/video_frame_buffer/RTCCVPixelBuffer.h", - "objc/helpers/RTCCameraPreviewView.h", "objc/helpers/RTCDispatcher.h", + "objc/helpers/RTCYUVHelper.h", "objc/helpers/UIDevice+RTCDevice.h", + "objc/api/peerconnection/RTCAudioDeviceModule.h", + "objc/api/peerconnection/RTCIODevice.h", "objc/api/peerconnection/RTCAudioSource.h", "objc/api/peerconnection/RTCAudioTrack.h", "objc/api/peerconnection/RTCConfiguration.h", "objc/api/peerconnection/RTCDataChannel.h", "objc/api/peerconnection/RTCDataChannelConfiguration.h", + "objc/api/peerconnection/RTCFrameCryptor.h", + "objc/api/peerconnection/RTCFrameCryptorKeyProvider.h", "objc/api/peerconnection/RTCFieldTrials.h", "objc/api/peerconnection/RTCIceCandidate.h", "objc/api/peerconnection/RTCIceCandidateErrorEvent.h", @@ -1376,8 +1506,23 @@ if (is_ios || is_mac) { "objc/api/video_codec/RTCVideoEncoderAV1.h", "objc/api/video_frame_buffer/RTCNativeI420Buffer.h", "objc/api/video_frame_buffer/RTCNativeMutableI420Buffer.h", + # Added for Simulcast support + "objc/components/video_codec/RTCVideoEncoderFactorySimulcast.h", + "objc/api/video_codec/RTCVideoEncoderSimulcast.h", + "objc/components/audio/RTCAudioBuffer.h", + "objc/components/audio/RTCAudioProcessingModule.h", + "objc/components/audio/RTCDefaultAudioProcessingModule.h", + "objc/components/audio/RTCAudioCustomProcessingDelegate.h", + "objc/components/audio/RTCAudioProcessingConfig.h", ] + if (target_environment != "xrdevice" && target_environment != "xrsimulator") { + common_objc_headers += [ + "objc/helpers/RTCCameraPreviewView.h", + "objc/components/renderer/metal/RTCMTLVideoView.h", + ] + } + if (!build_with_chromium) { common_objc_headers += [ "objc/api/logging/RTCCallbackLogger.h", @@ -1398,14 +1543,17 @@ if (is_ios || is_mac) { ":audio_objc", ":base_objc", ":default_codec_factory_objc", - ":metal_objc", ":native_api", ":native_video", ":peerconnectionfactory_base_objc", ":videocapture_objc", ":videocodec_objc", ":videotoolbox_objc", + ":darwin_privacy_info", ] + if (target_environment != "xrdevice" && target_environment != "xrsimulator") { + deps += [ ":metal_objc" ] + } if (!build_with_chromium) { deps += [ ":callback_logger_objc", @@ -1442,6 +1590,8 @@ if (is_ios || is_mac) { output_name = "WebRTC" sources = [ + "objc/api/peerconnection/RTCAudioDeviceModule.h", + "objc/api/peerconnection/RTCIODevice.h", "objc/api/peerconnection/RTCAudioSource.h", "objc/api/peerconnection/RTCAudioTrack.h", "objc/api/peerconnection/RTCCertificate.h", @@ -1450,6 +1600,8 @@ if (is_ios || is_mac) { "objc/api/peerconnection/RTCDataChannel.h", "objc/api/peerconnection/RTCDataChannelConfiguration.h", "objc/api/peerconnection/RTCDtmfSender.h", + "objc/api/peerconnection/RTCFrameCryptor.h", + "objc/api/peerconnection/RTCFrameCryptorKeyProvider.h", "objc/api/peerconnection/RTCFieldTrials.h", "objc/api/peerconnection/RTCIceCandidate.h", "objc/api/peerconnection/RTCIceCandidateErrorEvent.h", @@ -1509,9 +1661,14 @@ if (is_ios || is_mac) { "objc/base/RTCVideoFrame.h", "objc/base/RTCVideoFrameBuffer.h", "objc/base/RTCVideoRenderer.h", + "objc/base/RTCAudioRenderer.h", "objc/base/RTCYUVPlanarBuffer.h", "objc/components/capturer/RTCCameraVideoCapturer.h", "objc/components/capturer/RTCFileVideoCapturer.h", + "objc/components/capturer/RTCDesktopCapturer.h", + "objc/components/capturer/RTCDesktopSource.h", + "objc/components/capturer/RTCDesktopMediaList.h", + "objc/components/renderer/metal/RTCMTLVideoView.h", "objc/components/renderer/metal/RTCMTLNSVideoView.h", "objc/components/renderer/opengl/RTCVideoViewShading.h", "objc/components/video_codec/RTCCodecSpecificInfoH264.h", @@ -1524,6 +1681,15 @@ if (is_ios || is_mac) { "objc/components/video_codec/RTCVideoEncoderH264.h", "objc/components/video_frame_buffer/RTCCVPixelBuffer.h", "objc/helpers/RTCDispatcher.h", + "objc/helpers/RTCYUVHelper.h", + # Added for Simulcast support + "objc/components/video_codec/RTCVideoEncoderFactorySimulcast.h", + "objc/api/video_codec/RTCVideoEncoderSimulcast.h", + "objc/components/audio/RTCAudioBuffer.h", + "objc/components/audio/RTCAudioProcessingModule.h", + "objc/components/audio/RTCDefaultAudioProcessingModule.h", + "objc/components/audio/RTCAudioCustomProcessingDelegate.h", + "objc/components/audio/RTCAudioProcessingConfig.h", ] if (!build_with_chromium) { sources += [ @@ -1540,9 +1706,14 @@ if (is_ios || is_mac) { ":native_video", ":peerconnectionfactory_base_objc", ":videocapture_objc", + ":desktopcapture_objc", ":videocodec_objc", ":videotoolbox_objc", + ":darwin_privacy_info", ] + if (is_ios && (target_environment != "xrdevice" && target_environment != "xrsimulator")) { + deps += [ ":metal_objc" ] + } if (!build_with_chromium) { deps += [ ":callback_logger_objc", @@ -1779,6 +1950,7 @@ if (is_ios || is_mac) { "CoreMedia.framework", "CoreVideo.framework", "VideoToolbox.framework", + "Accelerate.framework", ] } } diff --git a/sdk/android/BUILD.gn b/sdk/android/BUILD.gn index 6d8992f8b4..ece05a2df3 100644 --- a/sdk/android/BUILD.gn +++ b/sdk/android/BUILD.gn @@ -274,12 +274,18 @@ if (is_android) { "api/org/webrtc/AudioProcessingFactory.java", "api/org/webrtc/AudioSource.java", "api/org/webrtc/AudioTrack.java", + "api/org/webrtc/AudioTrackSink.java", "api/org/webrtc/CallSessionFileRotatingLogSink.java", "api/org/webrtc/CandidatePairChangeEvent.java", "api/org/webrtc/CryptoOptions.java", "api/org/webrtc/DataChannel.java", "api/org/webrtc/DtmfSender.java", + "api/org/webrtc/ExternalAudioProcessingFactory.java", "api/org/webrtc/FecControllerFactoryFactoryInterface.java", + "api/org/webrtc/FrameCryptor.java", + "api/org/webrtc/FrameCryptorAlgorithm.java", + "api/org/webrtc/FrameCryptorFactory.java", + "api/org/webrtc/FrameCryptorKeyProvider.java", "api/org/webrtc/FrameDecryptor.java", "api/org/webrtc/FrameEncryptor.java", "api/org/webrtc/IceCandidate.java", @@ -380,6 +386,7 @@ if (is_android) { sources = [ "api/org/webrtc/DefaultVideoDecoderFactory.java", "api/org/webrtc/DefaultVideoEncoderFactory.java", + "api/org/webrtc/WrappedVideoDecoderFactory.java", ] deps = [ @@ -555,6 +562,8 @@ if (is_android) { sources = [ "api/org/webrtc/SoftwareVideoDecoderFactory.java", "api/org/webrtc/SoftwareVideoEncoderFactory.java", + "api/org/webrtc/SimulcastVideoEncoder.java", + "api/org/webrtc/SimulcastVideoEncoderFactory.java", ] deps = [ @@ -728,6 +737,8 @@ if (current_os == "linux" || is_android) { "src/jni/pc/add_ice_candidate_observer.cc", "src/jni/pc/add_ice_candidate_observer.h", "src/jni/pc/android_network_monitor.h", + "src/jni/pc/audio_sink.cc", + "src/jni/pc/audio_sink.h", "src/jni/pc/audio_track.cc", "src/jni/pc/call_session_file_rotating_log_sink.cc", "src/jni/pc/crypto_options.cc", @@ -735,6 +746,15 @@ if (current_os == "linux" || is_android) { "src/jni/pc/data_channel.cc", "src/jni/pc/data_channel.h", "src/jni/pc/dtmf_sender.cc", + "src/jni/pc/external_audio_processing_factory.cc", + "src/jni/pc/external_audio_processing_factory.h", + "src/jni/pc/external_audio_processing_interface.h", + "src/jni/pc/external_audio_processor.cc", + "src/jni/pc/external_audio_processor.h", + "src/jni/pc/frame_cryptor.cc", + "src/jni/pc/frame_cryptor.h", + "src/jni/pc/frame_cryptor_key_provider.cc", + "src/jni/pc/frame_cryptor_key_provider.h", "src/jni/pc/ice_candidate.cc", "src/jni/pc/ice_candidate.h", "src/jni/pc/media_constraints.cc", @@ -798,6 +818,7 @@ if (current_os == "linux" || is_android) { "../../api/audio:audio_device", "../../api/audio:audio_processing", "../../api/audio:builtin_audio_processing_builder", + "../../api/crypto:frame_crypto_transformer", "../../api/crypto:options", "../../api/rtc_event_log:rtc_event_log_factory", "../../api/task_queue:default_task_queue_factory", @@ -913,6 +934,21 @@ if (current_os == "linux" || is_android) { ] } + rtc_library("simulcast_jni") { + visibility = [ "*" ] + allow_poison = [ "software_video_codecs" ] + sources = [ + "src/jni/simulcast_video_encoder.cc", + "src/jni/simulcast_video_encoder.h" + ] + deps = [ + ":base_jni", + ":video_jni", + ":native_api_codecs", + "../../media:rtc_simulcast_encoder_adapter" + ] + } + rtc_library("swcodecs_jni") { visibility = [ "*" ] allow_poison = [ "software_video_codecs" ] @@ -926,6 +962,7 @@ if (current_os == "linux" || is_android) { ":libvpx_vp8_jni", ":libvpx_vp9_jni", ":native_api_jni", + ":simulcast_jni", ":video_jni", "../../api/environment", "../../api/video_codecs:builtin_video_decoder_factory", @@ -1420,11 +1457,16 @@ if (current_os == "linux" || is_android) { sources = [ "api/org/webrtc/AddIceObserver.java", "api/org/webrtc/AudioTrack.java", + "api/org/webrtc/AudioTrackSink.java", "api/org/webrtc/CallSessionFileRotatingLogSink.java", "api/org/webrtc/CandidatePairChangeEvent.java", "api/org/webrtc/CryptoOptions.java", "api/org/webrtc/DataChannel.java", "api/org/webrtc/DtmfSender.java", + "api/org/webrtc/ExternalAudioProcessingFactory.java", + "api/org/webrtc/FrameCryptor.java", + "api/org/webrtc/FrameCryptorFactory.java", + "api/org/webrtc/FrameCryptorKeyProvider.java", "api/org/webrtc/IceCandidate.java", "api/org/webrtc/IceCandidateErrorEvent.java", "api/org/webrtc/MediaConstraints.java", diff --git a/sdk/android/api/org/webrtc/AudioTrack.java b/sdk/android/api/org/webrtc/AudioTrack.java index ca745db634..b30e46cebc 100644 --- a/sdk/android/api/org/webrtc/AudioTrack.java +++ b/sdk/android/api/org/webrtc/AudioTrack.java @@ -10,8 +10,12 @@ package org.webrtc; +import java.util.IdentityHashMap; + /** Java wrapper for a C++ AudioTrackInterface */ public class AudioTrack extends MediaStreamTrack { + private final IdentityHashMap sinks = new IdentityHashMap(); + public AudioTrack(long nativeTrack) { super(nativeTrack); } @@ -23,10 +27,54 @@ public void setVolume(double volume) { nativeSetVolume(getNativeAudioTrack(), volume); } + /** + * Adds an AudioTrackSink to the track. This callback is only + * called for remote audio tracks. + * + * Repeated addSink calls will not add the sink multiple times. + */ + public void addSink(AudioTrackSink sink) { + if (sink == null) { + throw new IllegalArgumentException("The AudioTrackSink is not allowed to be null"); + } + if (!sinks.containsKey(sink)) { + final long nativeSink = nativeWrapSink(sink); + sinks.put(sink, nativeSink); + nativeAddSink(getNativeMediaStreamTrack(), nativeSink); + } + } + + /** + * Removes an AudioTrackSink from the track. + * + * If the AudioTrackSink was not attached to the track, this is a no-op. + */ + public void removeSink(AudioTrackSink sink) { + final Long nativeSink = sinks.remove(sink); + if (nativeSink != null) { + nativeRemoveSink(getNativeMediaStreamTrack(), nativeSink); + nativeFreeSink(nativeSink); + } + } + + @Override + public void dispose() { + for (long nativeSink : sinks.values()) { + nativeRemoveSink(getNativeMediaStreamTrack(), nativeSink); + nativeFreeSink(nativeSink); + } + sinks.clear(); + super.dispose(); + } + /** Returns a pointer to webrtc::AudioTrackInterface. */ long getNativeAudioTrack() { return getNativeMediaStreamTrack(); } private static native void nativeSetVolume(long track, double volume); + private static native void nativeAddSink(long track, long nativeSink); + private static native void nativeRemoveSink(long track, long nativeSink); + private static native long nativeWrapSink(AudioTrackSink sink); + private static native void nativeFreeSink(long sink); } diff --git a/sdk/android/api/org/webrtc/AudioTrackSink.java b/sdk/android/api/org/webrtc/AudioTrackSink.java new file mode 100644 index 0000000000..eca390f82c --- /dev/null +++ b/sdk/android/api/org/webrtc/AudioTrackSink.java @@ -0,0 +1,27 @@ +/* + * Copyright 2023 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import java.nio.ByteBuffer; + +/** + * Java version of rtc::AudioTrackSinkInterface. + */ +public interface AudioTrackSink { + /** + * Implementations should copy the audio data into a local copy if they wish + * to use the data after this function returns. + */ + @CalledByNative + void onData(ByteBuffer audioData, int bitsPerSample, int sampleRate, + int numberOfChannels, int numberOfFrames, + long absoluteCaptureTimestampMs); +} diff --git a/sdk/android/api/org/webrtc/ExternalAudioProcessingFactory.java b/sdk/android/api/org/webrtc/ExternalAudioProcessingFactory.java new file mode 100644 index 0000000000..7425d2af57 --- /dev/null +++ b/sdk/android/api/org/webrtc/ExternalAudioProcessingFactory.java @@ -0,0 +1,144 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.webrtc; + +import java.nio.ByteBuffer; + +import androidx.annotation.Nullable; +import org.webrtc.AudioProcessingFactory; + + +public class ExternalAudioProcessingFactory implements AudioProcessingFactory { + + /** + * Interface for external audio processing. + */ + public static interface AudioProcessing { + /** + * Called when the processor should be initialized with a new sample rate and + * number of channels. + */ + @CalledByNative("AudioProcessing") + void initialize(int sampleRateHz, int numChannels); + /** Called when the processor should be reset with a new sample rate. */ + @CalledByNative("AudioProcessing") + void reset(int newRate); + /** + * Processes the given capture or render signal. NOTE: `buffer.data` will be + * freed once this function returns so callers who want to use the data + * asynchronously must make sure to copy it first. + */ + @CalledByNative("AudioProcessing") + void process(int numBands, int numFrames, ByteBuffer buffer); + } + + private long apmPtr; + private long capturePostProcessingPtr; + private long renderPreProcessingPtr; + + public ExternalAudioProcessingFactory() { + apmPtr = nativeGetDefaultApm(); + capturePostProcessingPtr = 0; + renderPreProcessingPtr = 0; + } + + @Override + public long createNative() { + if(apmPtr == 0) { + apmPtr = nativeGetDefaultApm(); + } + return apmPtr; + } + + /** + * Sets the capture post processing module. + * This module is applied to the audio signal after capture and before sending + * to the audio encoder. + */ + public void setCapturePostProcessing(@Nullable AudioProcessing processing) { + checkExternalAudioProcessorExists(); + long newPtr = nativeSetCapturePostProcessing(processing); + if (capturePostProcessingPtr != 0) { + JniCommon.nativeReleaseRef(capturePostProcessingPtr); + capturePostProcessingPtr = 0; + } + capturePostProcessingPtr = newPtr; + } + + /** + * Sets the render pre processing module. + * This module is applied to the audio signal after receiving from the audio + * decoder and before rendering. + */ + public void setRenderPreProcessing(@Nullable AudioProcessing processing) { + checkExternalAudioProcessorExists(); + long newPtr = nativeSetRenderPreProcessing(processing); + if (renderPreProcessingPtr != 0) { + JniCommon.nativeReleaseRef(renderPreProcessingPtr); + renderPreProcessingPtr = 0; + } + renderPreProcessingPtr = newPtr; + } + + /** + * Sets the bypass flag for the capture post processing module. + * If true, the registered audio processing will be bypassed. + */ + public void setBypassFlagForCapturePost( boolean bypass) { + checkExternalAudioProcessorExists(); + nativeSetBypassFlagForCapturePost(bypass); + } + + /** + * Sets the bypass flag for the render pre processing module. + * If true, the registered audio processing will be bypassed. + */ + public void setBypassFlagForRenderPre( boolean bypass) { + checkExternalAudioProcessorExists(); + nativeSetBypassFlagForRenderPre(bypass); + } + + /** + * Destroys the ExternalAudioProcessor. + */ + public void destroy() { + checkExternalAudioProcessorExists(); + if (renderPreProcessingPtr != 0) { + JniCommon.nativeReleaseRef(renderPreProcessingPtr); + renderPreProcessingPtr = 0; + } + if (capturePostProcessingPtr != 0) { + JniCommon.nativeReleaseRef(capturePostProcessingPtr); + capturePostProcessingPtr = 0; + } + nativeDestroy(); + apmPtr = 0; + } + + private void checkExternalAudioProcessorExists() { + if (apmPtr == 0) { + throw new IllegalStateException("ExternalAudioProcessor has been disposed."); + } + } + + private static native long nativeGetDefaultApm(); + private static native long nativeSetCapturePostProcessing(AudioProcessing processing); + private static native long nativeSetRenderPreProcessing(AudioProcessing processing); + private static native void nativeSetBypassFlagForCapturePost(boolean bypass); + private static native void nativeSetBypassFlagForRenderPre(boolean bypass); + private static native void nativeDestroy(); +} diff --git a/sdk/android/api/org/webrtc/FrameCryptor.java b/sdk/android/api/org/webrtc/FrameCryptor.java new file mode 100644 index 0000000000..d633e05005 --- /dev/null +++ b/sdk/android/api/org/webrtc/FrameCryptor.java @@ -0,0 +1,108 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.webrtc; + +import androidx.annotation.Nullable; + +public class FrameCryptor { + public enum FrameCryptionState { + NEW, + OK, + ENCRYPTIONFAILED, + DECRYPTIONFAILED, + MISSINGKEY, + KEYRATCHETED, + INTERNALERROR; + + @CalledByNative("FrameCryptionState") + static FrameCryptionState fromNativeIndex(int nativeIndex) { + return values()[nativeIndex]; + } + } + + public static interface Observer { + @CalledByNative("Observer") + void onFrameCryptionStateChanged(String participantId, FrameCryptionState newState); + } + + private long nativeFrameCryptor; + private long observerPtr; + + public long getNativeFrameCryptor() { + return nativeFrameCryptor; + } + + @CalledByNative + public FrameCryptor(long nativeFrameCryptor) { + this.nativeFrameCryptor = nativeFrameCryptor; + this.observerPtr = 0; + } + + public void setEnabled(boolean enabled) { + checkFrameCryptorExists(); + nativeSetEnabled(nativeFrameCryptor, enabled); + } + + public boolean isEnabled() { + checkFrameCryptorExists(); + return nativeIsEnabled(nativeFrameCryptor); + } + + public int getKeyIndex() { + checkFrameCryptorExists(); + return nativeGetKeyIndex(nativeFrameCryptor); + } + + public void setKeyIndex(int index) { + checkFrameCryptorExists(); + nativeSetKeyIndex(nativeFrameCryptor, index); + } + + public void dispose() { + checkFrameCryptorExists(); + nativeUnSetObserver(nativeFrameCryptor); + JniCommon.nativeReleaseRef(nativeFrameCryptor); + nativeFrameCryptor = 0; + if (observerPtr != 0) { + JniCommon.nativeReleaseRef(observerPtr); + observerPtr = 0; + } + } + + public void setObserver(@Nullable Observer observer) { + checkFrameCryptorExists(); + long newPtr = nativeSetObserver(nativeFrameCryptor, observer); + if (observerPtr != 0) { + JniCommon.nativeReleaseRef(observerPtr); + observerPtr = 0; + } + newPtr = observerPtr; + } + + private void checkFrameCryptorExists() { + if (nativeFrameCryptor == 0) { + throw new IllegalStateException("FrameCryptor has been disposed."); + } + } + + private static native void nativeSetEnabled(long frameCryptorPointer, boolean enabled); + private static native boolean nativeIsEnabled(long frameCryptorPointer); + private static native void nativeSetKeyIndex(long frameCryptorPointer, int index); + private static native int nativeGetKeyIndex(long frameCryptorPointer); + private static native long nativeSetObserver(long frameCryptorPointer, Observer observer); + private static native void nativeUnSetObserver(long frameCryptorPointer); +} diff --git a/sdk/android/api/org/webrtc/FrameCryptorAlgorithm.java b/sdk/android/api/org/webrtc/FrameCryptorAlgorithm.java new file mode 100644 index 0000000000..20b783e9ab --- /dev/null +++ b/sdk/android/api/org/webrtc/FrameCryptorAlgorithm.java @@ -0,0 +1,31 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.webrtc; + +public enum FrameCryptorAlgorithm { + AES_GCM(0); + + private final int value; + + FrameCryptorAlgorithm(int value) { + this.value = value; + } + + public int getValue() { + return value; + } +} \ No newline at end of file diff --git a/sdk/android/api/org/webrtc/FrameCryptorFactory.java b/sdk/android/api/org/webrtc/FrameCryptorFactory.java new file mode 100644 index 0000000000..a2a165c711 --- /dev/null +++ b/sdk/android/api/org/webrtc/FrameCryptorFactory.java @@ -0,0 +1,44 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.webrtc; + +public class FrameCryptorFactory { + public static FrameCryptorKeyProvider createFrameCryptorKeyProvider( + boolean sharedKey, byte[] ratchetSalt, int ratchetWindowSize, byte[] uncryptedMagicBytes, int failureTolerance, int keyRingSize, boolean discardFrameWhenCryptorNotReady) { + return nativeCreateFrameCryptorKeyProvider(sharedKey, ratchetSalt, ratchetWindowSize, uncryptedMagicBytes, failureTolerance, keyRingSize, discardFrameWhenCryptorNotReady); + } + + public static FrameCryptor createFrameCryptorForRtpSender(PeerConnectionFactory factory, RtpSender rtpSender, + String participantId, FrameCryptorAlgorithm algorithm, FrameCryptorKeyProvider keyProvider) { + return nativeCreateFrameCryptorForRtpSender(factory.getNativeOwnedFactoryAndThreads(),rtpSender.getNativeRtpSender(), participantId, + algorithm.getValue(), keyProvider.getNativeKeyProvider()); + } + + public static FrameCryptor createFrameCryptorForRtpReceiver(PeerConnectionFactory factory, RtpReceiver rtpReceiver, + String participantId, FrameCryptorAlgorithm algorithm, FrameCryptorKeyProvider keyProvider) { + return nativeCreateFrameCryptorForRtpReceiver(factory.getNativeOwnedFactoryAndThreads(), rtpReceiver.getNativeRtpReceiver(), participantId, + algorithm.getValue(), keyProvider.getNativeKeyProvider()); + } + + private static native FrameCryptor nativeCreateFrameCryptorForRtpSender(long factory, + long rtpSender, String participantId, int algorithm, long nativeFrameCryptorKeyProvider); + private static native FrameCryptor nativeCreateFrameCryptorForRtpReceiver(long factory, + long rtpReceiver, String participantId, int algorithm, long nativeFrameCryptorKeyProvider); + + private static native FrameCryptorKeyProvider nativeCreateFrameCryptorKeyProvider( + boolean sharedKey, byte[] ratchetSalt, int ratchetWindowSize, byte[] uncryptedMagicBytes, int failureTolerance, int keyRingSize, boolean discardFrameWhenCryptorNotReady); +} diff --git a/sdk/android/api/org/webrtc/FrameCryptorKeyProvider.java b/sdk/android/api/org/webrtc/FrameCryptorKeyProvider.java new file mode 100644 index 0000000000..6ab0cdddf5 --- /dev/null +++ b/sdk/android/api/org/webrtc/FrameCryptorKeyProvider.java @@ -0,0 +1,93 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.webrtc; + +import java.util.ArrayList; + +public class FrameCryptorKeyProvider { + private long nativeKeyProvider; + + @CalledByNative + public FrameCryptorKeyProvider(long nativeKeyProvider) { + this.nativeKeyProvider = nativeKeyProvider; + } + + public long getNativeKeyProvider() { + return nativeKeyProvider; + } + + public boolean setSharedKey(int index, byte[] key) { + checkKeyProviderExists(); + return nativeSetSharedKey(nativeKeyProvider,index, key); + } + + public byte[] ratchetSharedKey(int index) { + checkKeyProviderExists(); + return nativeRatchetSharedKey(nativeKeyProvider, index); + } + + public byte[] exportSharedKey(int index) { + checkKeyProviderExists(); + return nativeExportSharedKey(nativeKeyProvider, index); + } + + public boolean setKey(String participantId, int index, byte[] key) { + checkKeyProviderExists(); + return nativeSetKey(nativeKeyProvider, participantId, index, key); + } + + public byte[] ratchetKey(String participantId, int index) { + checkKeyProviderExists(); + return nativeRatchetKey(nativeKeyProvider, participantId, index); + } + + public byte[] exportKey(String participantId, int index) { + checkKeyProviderExists(); + return nativeExportKey(nativeKeyProvider, participantId, index); + } + + public void setSifTrailer(byte[] sifTrailer) { + checkKeyProviderExists(); + nativeSetSifTrailer(nativeKeyProvider, sifTrailer); + } + + public void dispose() { + checkKeyProviderExists(); + JniCommon.nativeReleaseRef(nativeKeyProvider); + nativeKeyProvider = 0; + } + + private void checkKeyProviderExists() { + if (nativeKeyProvider == 0) { + throw new IllegalStateException("FrameCryptorKeyProvider has been disposed."); + } + } + private static native boolean nativeSetSharedKey( + long keyProviderPointer, int index, byte[] key); + private static native byte[] nativeRatchetSharedKey( + long keyProviderPointer, int index); + private static native byte[] nativeExportSharedKey( + long keyProviderPointer, int index); + private static native boolean nativeSetKey( + long keyProviderPointer, String participantId, int index, byte[] key); + private static native byte[] nativeRatchetKey( + long keyProviderPointer, String participantId, int index); + private static native byte[] nativeExportKey( + long keyProviderPointer, String participantId, int index); + private static native void nativeSetSifTrailer( + long keyProviderPointer, byte[] sifTrailer); +} \ No newline at end of file diff --git a/sdk/android/api/org/webrtc/HardwareVideoEncoderFactory.java b/sdk/android/api/org/webrtc/HardwareVideoEncoderFactory.java index bdb10e9698..125e1d01c4 100644 --- a/sdk/android/api/org/webrtc/HardwareVideoEncoderFactory.java +++ b/sdk/android/api/org/webrtc/HardwareVideoEncoderFactory.java @@ -143,11 +143,11 @@ public VideoCodecInfo[] getSupportedCodecs() { // supported by the decoder. if (type == VideoCodecMimeType.H264 && isH264HighProfileSupported(codec)) { supportedCodecInfos.add(new VideoCodecInfo( - name, MediaCodecUtils.getCodecProperties(type, /* highProfile= */ true))); + name, MediaCodecUtils.getCodecProperties(type, /* highProfile= */ true), new ArrayList<>())); } supportedCodecInfos.add(new VideoCodecInfo( - name, MediaCodecUtils.getCodecProperties(type, /* highProfile= */ false))); + name, MediaCodecUtils.getCodecProperties(type, /* highProfile= */ false), new ArrayList<>())); } } diff --git a/sdk/android/api/org/webrtc/LibaomAv1Encoder.java b/sdk/android/api/org/webrtc/LibaomAv1Encoder.java index 340586ef81..9e80541969 100644 --- a/sdk/android/api/org/webrtc/LibaomAv1Encoder.java +++ b/sdk/android/api/org/webrtc/LibaomAv1Encoder.java @@ -9,6 +9,7 @@ */ package org.webrtc; +import java.util.List; public class LibaomAv1Encoder extends WrappedNativeVideoEncoder { @Override @@ -22,4 +23,10 @@ public long createNative(long webrtcEnvRef) { public boolean isHardwareEncoder() { return false; } + + static List scalabilityModes() { + return nativeGetSupportedScalabilityModes(); + } + + static native List nativeGetSupportedScalabilityModes(); } diff --git a/sdk/android/api/org/webrtc/LibvpxVp9Encoder.java b/sdk/android/api/org/webrtc/LibvpxVp9Encoder.java index 71bcd1374b..c864ac2794 100644 --- a/sdk/android/api/org/webrtc/LibvpxVp9Encoder.java +++ b/sdk/android/api/org/webrtc/LibvpxVp9Encoder.java @@ -9,6 +9,7 @@ */ package org.webrtc; +import java.util.List; public class LibvpxVp9Encoder extends WrappedNativeVideoEncoder { @Override @@ -24,4 +25,10 @@ public boolean isHardwareEncoder() { } static native boolean nativeIsSupported(); + + static List scalabilityModes() { + return nativeGetSupportedScalabilityModes(); + } + + static native List nativeGetSupportedScalabilityModes(); } diff --git a/sdk/android/api/org/webrtc/MediaStreamTrack.java b/sdk/android/api/org/webrtc/MediaStreamTrack.java index 8e39d3c73a..59b1d65cba 100644 --- a/sdk/android/api/org/webrtc/MediaStreamTrack.java +++ b/sdk/android/api/org/webrtc/MediaStreamTrack.java @@ -121,6 +121,10 @@ private void checkMediaStreamTrackExists() { } } + public boolean isDisposed() { + return nativeTrack == 0; + } + private static native String nativeGetId(long track); private static native String nativeGetKind(long track); private static native boolean nativeGetEnabled(long track); diff --git a/sdk/android/api/org/webrtc/PeerConnection.java b/sdk/android/api/org/webrtc/PeerConnection.java index c70f8e4188..b1d85f0d2d 100644 --- a/sdk/android/api/org/webrtc/PeerConnection.java +++ b/sdk/android/api/org/webrtc/PeerConnection.java @@ -578,6 +578,17 @@ public static class RTCConfiguration { * See: https://www.chromestatus.com/feature/6269234631933952 */ public boolean offerExtmapAllowMixed; + + /** + * When this flag is set, ports not bound to any specific network interface + * will be used, in addition to normal ports bound to the enumerated + * interfaces. Without this flag, these "any address" ports would only be + * used when network enumeration fails or is disabled. But under certain + * conditions, these ports may succeed where others fail, so they may allow + * the application to work in a wider variety of environments, at the expense + * of having to allocate additional candidates. + */ + public boolean enableIceGatheringOnAnyAddressPorts; /** Limit ports used for connections. */ public int portAllocatorMinPort; @@ -630,6 +641,7 @@ public RTCConfiguration(List iceServers) { portAllocatorMinPort = 0; portAllocatorMaxPort = 0; portAllocatorFlags = 0; + enableIceGatheringOnAnyAddressPorts = false; } @CalledByNative("RTCConfiguration") @@ -854,6 +866,11 @@ int getPortAllocatorMaxPort() { int getPortAllocatorFlags() { return portAllocatorFlags; } + + @CalledByNative("RTCConfiguration") + boolean getEnableIceGatheringOnAnyAddressPorts() { + return enableIceGatheringOnAnyAddressPorts; + } }; private final List localStreams = new ArrayList<>(); diff --git a/sdk/android/api/org/webrtc/PeerConnectionFactory.java b/sdk/android/api/org/webrtc/PeerConnectionFactory.java index 2b2ea155c1..2a3eb12491 100644 --- a/sdk/android/api/org/webrtc/PeerConnectionFactory.java +++ b/sdk/android/api/org/webrtc/PeerConnectionFactory.java @@ -20,6 +20,7 @@ import org.webrtc.RtpCapabilities; import org.webrtc.audio.AudioDeviceModule; import org.webrtc.audio.JavaAudioDeviceModule; +import org.webrtc.RtpCapabilities; /** * Java wrapper for a C++ PeerConnectionFactoryInterface. Main entry point to diff --git a/sdk/android/api/org/webrtc/RtpParameters.java b/sdk/android/api/org/webrtc/RtpParameters.java index 9ca8311610..4e3f106785 100644 --- a/sdk/android/api/org/webrtc/RtpParameters.java +++ b/sdk/android/api/org/webrtc/RtpParameters.java @@ -76,6 +76,8 @@ public static class Encoding { // If non-null, scale the width and height down by this factor for video. If null, // implementation default scaling factor will be used. @Nullable public Double scaleResolutionDownBy; + // Scalability modes are used to represent simulcast and SVC layers. + @Nullable public String scalabilityMode; // SSRC to be used by this encoding. // Can't be changed between getParameters/setParameters. public Long ssrc; @@ -93,8 +95,8 @@ public Encoding(String rid, boolean active, Double scaleResolutionDownBy) { @CalledByNative("Encoding") Encoding(String rid, boolean active, double bitratePriority, @Priority int networkPriority, Integer maxBitrateBps, Integer minBitrateBps, Integer maxFramerate, - Integer numTemporalLayers, Double scaleResolutionDownBy, Long ssrc, - boolean adaptiveAudioPacketTime) { + Integer numTemporalLayers, Double scaleResolutionDownBy, String scalabilityMode, + Long ssrc, boolean adaptiveAudioPacketTime) { this.rid = rid; this.active = active; this.bitratePriority = bitratePriority; @@ -104,6 +106,7 @@ public Encoding(String rid, boolean active, Double scaleResolutionDownBy) { this.maxFramerate = maxFramerate; this.numTemporalLayers = numTemporalLayers; this.scaleResolutionDownBy = scaleResolutionDownBy; + this.scalabilityMode = scalabilityMode; this.ssrc = ssrc; this.adaptiveAudioPacketTime = adaptiveAudioPacketTime; } @@ -160,6 +163,12 @@ Double getScaleResolutionDownBy() { return scaleResolutionDownBy; } + @Nullable + @CalledByNative("Encoding") + String getScalabilityMode() { + return scalabilityMode; + } + @CalledByNative("Encoding") Long getSsrc() { return ssrc; diff --git a/sdk/android/api/org/webrtc/SimulcastVideoEncoder.java b/sdk/android/api/org/webrtc/SimulcastVideoEncoder.java new file mode 100644 index 0000000000..306cbe57d8 --- /dev/null +++ b/sdk/android/api/org/webrtc/SimulcastVideoEncoder.java @@ -0,0 +1,28 @@ +package org.webrtc; + +public class SimulcastVideoEncoder extends WrappedNativeVideoEncoder { + + static native long nativeCreateEncoder(long webrtcEnvRef, VideoEncoderFactory primary, VideoEncoderFactory fallback, VideoCodecInfo info); + + VideoEncoderFactory primary; + VideoEncoderFactory fallback; + VideoCodecInfo info; + + public SimulcastVideoEncoder(VideoEncoderFactory primary, VideoEncoderFactory fallback, VideoCodecInfo info) { + this.primary = primary; + this.fallback = fallback; + this.info = info; + } + + @Override + public long createNative(long webrtcEnvRef) { + return nativeCreateEncoder(webrtcEnvRef, primary, fallback, info); + } + + @Override + public boolean isHardwareEncoder() { + return false; + } + +} + diff --git a/sdk/android/api/org/webrtc/SimulcastVideoEncoderFactory.java b/sdk/android/api/org/webrtc/SimulcastVideoEncoderFactory.java new file mode 100644 index 0000000000..97b4f32087 --- /dev/null +++ b/sdk/android/api/org/webrtc/SimulcastVideoEncoderFactory.java @@ -0,0 +1,43 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import androidx.annotation.Nullable; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Arrays; + +public class SimulcastVideoEncoderFactory implements VideoEncoderFactory { + + VideoEncoderFactory primary; + VideoEncoderFactory fallback; + + public SimulcastVideoEncoderFactory(VideoEncoderFactory primary, VideoEncoderFactory fallback) { + this.primary = primary; + this.fallback = fallback; + } + + @Nullable + @Override + public VideoEncoder createEncoder(VideoCodecInfo info) { + return new SimulcastVideoEncoder(primary, fallback, info); + } + + @Override + public VideoCodecInfo[] getSupportedCodecs() { + List codecs = new ArrayList(); + codecs.addAll(Arrays.asList(primary.getSupportedCodecs())); + codecs.addAll(Arrays.asList(fallback.getSupportedCodecs())); + return codecs.toArray(new VideoCodecInfo[codecs.size()]); + } + +} diff --git a/sdk/android/api/org/webrtc/VideoCodecInfo.java b/sdk/android/api/org/webrtc/VideoCodecInfo.java index 4f97cf74cf..e0f5153d47 100644 --- a/sdk/android/api/org/webrtc/VideoCodecInfo.java +++ b/sdk/android/api/org/webrtc/VideoCodecInfo.java @@ -14,6 +14,8 @@ import java.util.Arrays; import java.util.Locale; import java.util.Map; +import java.util.List; +import java.util.ArrayList; /** * Represent a video codec as encoded in SDP. @@ -34,13 +36,16 @@ public class VideoCodecInfo { public final String name; public final Map params; + public final List scalabilityModes; + @Deprecated public final int payload; @CalledByNative - public VideoCodecInfo(String name, Map params) { + public VideoCodecInfo(String name, Map params, List scalabilityModes) { this.payload = 0; this.name = name; this.params = params; + this.scalabilityModes = scalabilityModes; } @Deprecated @@ -48,6 +53,7 @@ public VideoCodecInfo(int payload, String name, Map params) { this.payload = payload; this.name = name; this.params = params; + this.scalabilityModes = new ArrayList<>(); } @Override @@ -83,4 +89,9 @@ String getName() { Map getParams() { return params; } + + @CalledByNative + List getScalabilityModes() { + return scalabilityModes; + } } diff --git a/sdk/android/api/org/webrtc/VideoTrack.java b/sdk/android/api/org/webrtc/VideoTrack.java index 512e46c26e..1791592b56 100644 --- a/sdk/android/api/org/webrtc/VideoTrack.java +++ b/sdk/android/api/org/webrtc/VideoTrack.java @@ -54,6 +54,24 @@ public void removeSink(VideoSink sink) { } } + /** + * For a remote video track, starts/stops receiving the video stream. + * + * If this is a local video track, this is a no-op. + */ + public void setShouldReceive(boolean shouldReceive){ + nativeSetShouldReceive(getNativeMediaStreamTrack(), shouldReceive); + } + + /** + * The current receive status for a remote video track. + * + * This has no meaning for a local video track. + */ + public boolean shouldReceive(){ + return nativeGetShouldReceive(getNativeMediaStreamTrack()); + } + @Override public void dispose() { for (long nativeSink : sinks.values()) { @@ -73,4 +91,6 @@ public long getNativeVideoTrack() { private static native void nativeRemoveSink(long track, long nativeSink); private static native long nativeWrapSink(VideoSink sink); private static native void nativeFreeSink(long sink); + private static native void nativeSetShouldReceive(long track, boolean shouldReceive); + private static native boolean nativeGetShouldReceive(long track); } diff --git a/sdk/android/api/org/webrtc/WrappedVideoDecoderFactory.java b/sdk/android/api/org/webrtc/WrappedVideoDecoderFactory.java new file mode 100644 index 0000000000..a7acd37289 --- /dev/null +++ b/sdk/android/api/org/webrtc/WrappedVideoDecoderFactory.java @@ -0,0 +1,75 @@ +/* + * Copyright 2023 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.webrtc; + +import android.media.MediaCodecInfo; +import androidx.annotation.Nullable; + +import java.util.Arrays; +import java.util.LinkedHashSet; + +public class WrappedVideoDecoderFactory implements VideoDecoderFactory { + public WrappedVideoDecoderFactory(@Nullable EglBase.Context eglContext) { + this.hardwareVideoDecoderFactory = new HardwareVideoDecoderFactory(eglContext); + this.platformSoftwareVideoDecoderFactory = new PlatformSoftwareVideoDecoderFactory(eglContext); + } + + private final VideoDecoderFactory hardwareVideoDecoderFactory; + private final VideoDecoderFactory hardwareVideoDecoderFactoryWithoutEglContext = new HardwareVideoDecoderFactory(null) ; + private final VideoDecoderFactory softwareVideoDecoderFactory = new SoftwareVideoDecoderFactory(); + @Nullable + private final VideoDecoderFactory platformSoftwareVideoDecoderFactory; + + @Override + public VideoDecoder createDecoder(VideoCodecInfo codecType) { + VideoDecoder softwareDecoder = this.softwareVideoDecoderFactory.createDecoder(codecType); + VideoDecoder hardwareDecoder = this.hardwareVideoDecoderFactory.createDecoder(codecType); + if (softwareDecoder == null && this.platformSoftwareVideoDecoderFactory != null) { + softwareDecoder = this.platformSoftwareVideoDecoderFactory.createDecoder(codecType); + } + + if(hardwareDecoder != null && disableSurfaceTextureFrame(hardwareDecoder.getImplementationName())) { + hardwareDecoder.release(); + hardwareDecoder = this.hardwareVideoDecoderFactoryWithoutEglContext.createDecoder(codecType); + } + + if (hardwareDecoder != null && softwareDecoder != null) { + return new VideoDecoderFallback(softwareDecoder, hardwareDecoder); + } else { + return hardwareDecoder != null ? hardwareDecoder : softwareDecoder; + } + } + + private boolean disableSurfaceTextureFrame(String name) { + if (name.startsWith("OMX.qcom.") || name.startsWith("OMX.hisi.")) { + return true; + } + return false; + } + + @Override + public VideoCodecInfo[] getSupportedCodecs() { + LinkedHashSet supportedCodecInfos = new LinkedHashSet(); + supportedCodecInfos.addAll(Arrays.asList(this.softwareVideoDecoderFactory.getSupportedCodecs())); + supportedCodecInfos.addAll(Arrays.asList(this.hardwareVideoDecoderFactory.getSupportedCodecs())); + if (this.platformSoftwareVideoDecoderFactory != null) { + supportedCodecInfos.addAll(Arrays.asList(this.platformSoftwareVideoDecoderFactory.getSupportedCodecs())); + } + + return (VideoCodecInfo[])supportedCodecInfos.toArray(new VideoCodecInfo[supportedCodecInfos.size()]); + } +} diff --git a/sdk/android/api/org/webrtc/audio/JavaAudioDeviceModule.java b/sdk/android/api/org/webrtc/audio/JavaAudioDeviceModule.java index 8e38c1f34f..825656e0dc 100644 --- a/sdk/android/api/org/webrtc/audio/JavaAudioDeviceModule.java +++ b/sdk/android/api/org/webrtc/audio/JavaAudioDeviceModule.java @@ -16,6 +16,7 @@ import android.media.AudioManager; import android.os.Build; import androidx.annotation.RequiresApi; +import java.nio.ByteBuffer; import java.util.concurrent.ScheduledExecutorService; import org.webrtc.JniCommon; import org.webrtc.Logging; @@ -42,6 +43,8 @@ public static class Builder { private AudioTrackErrorCallback audioTrackErrorCallback; private AudioRecordErrorCallback audioRecordErrorCallback; private SamplesReadyCallback samplesReadyCallback; + private PlaybackSamplesReadyCallback playbackSamplesReadyCallback; + private AudioBufferCallback audioBufferCallback; private AudioTrackStateCallback audioTrackStateCallback; private AudioRecordStateCallback audioRecordStateCallback; private boolean useHardwareAcousticEchoCanceler = isBuiltInAcousticEchoCancelerSupported(); @@ -140,6 +143,22 @@ public Builder setSamplesReadyCallback(SamplesReadyCallback samplesReadyCallback return this; } + /** + * Set a callback to listen for buffer requests from the AudioRecord. + */ + public Builder setAudioBufferCallback(AudioBufferCallback audioBufferCallback) { + this.audioBufferCallback = audioBufferCallback; + return this; + } + + /** + * Set a callback to listen to the audio output passed to the AudioTrack. + */ + public Builder setPlaybackSamplesReadyCallback(PlaybackSamplesReadyCallback playbackSamplesReadyCallback) { + this.playbackSamplesReadyCallback = playbackSamplesReadyCallback; + return this; + } + /** * Set a callback to retrieve information from the AudioTrack on when audio starts and stop. */ @@ -255,10 +274,11 @@ public JavaAudioDeviceModule createAudioDeviceModule() { } final WebRtcAudioRecord audioInput = new WebRtcAudioRecord(context, executor, audioManager, audioSource, audioFormat, audioRecordErrorCallback, audioRecordStateCallback, - samplesReadyCallback, useHardwareAcousticEchoCanceler, useHardwareNoiseSuppressor); + samplesReadyCallback, audioBufferCallback, useHardwareAcousticEchoCanceler, + useHardwareNoiseSuppressor, inputSampleRate, useStereoInput ? 2 : 1); final WebRtcAudioTrack audioOutput = new WebRtcAudioTrack(context, audioManager, audioAttributes, audioTrackErrorCallback, - audioTrackStateCallback, useLowLatency, enableVolumeLogger); + audioTrackStateCallback, playbackSamplesReadyCallback, useLowLatency, enableVolumeLogger); return new JavaAudioDeviceModule(context, audioManager, audioInput, audioOutput, inputSampleRate, outputSampleRate, useStereoInput, useStereoOutput); } @@ -325,6 +345,11 @@ public static interface SamplesReadyCallback { void onWebRtcAudioRecordSamplesReady(AudioSamples samples); } + /** Called when new audio samples are ready. This should only be set for debug purposes */ + public static interface PlaybackSamplesReadyCallback { + void onWebRtcAudioTrackSamplesReady(AudioSamples samples); + } + /* AudioTrack */ // Audio playout/track error handler functions. public enum AudioTrackStartErrorCode { @@ -344,6 +369,16 @@ public static interface AudioTrackStateCallback { void onWebRtcAudioTrackStop(); } + public static interface AudioBufferCallback { + /** + * Called when new audio samples are ready. + * @param buffer the buffer of audio bytes. Changes to this buffer will be published on the audio track. + * @param captureTimeNs the capture timestamp of the original audio data. + * @return the capture timestamp in nanoseconds. Return 0 if not available. + */ + long onBuffer(ByteBuffer buffer, int audioFormat, int channelCount, int sampleRate, int bytesRead, long captureTimeNs); + } + /** * Returns true if the device supports built-in HW AEC, and the UUID is approved (some UUIDs can * be excluded). @@ -362,8 +397,8 @@ public static boolean isBuiltInNoiseSuppressorSupported() { private final Context context; private final AudioManager audioManager; - private final WebRtcAudioRecord audioInput; - private final WebRtcAudioTrack audioOutput; + public final WebRtcAudioRecord audioInput; + public final WebRtcAudioTrack audioOutput; private final int inputSampleRate; private final int outputSampleRate; private final boolean useStereoInput; @@ -418,6 +453,25 @@ public void setMicrophoneMute(boolean mute) { audioInput.setMicrophoneMute(mute); } + public void setAudioRecordEnabled(boolean enable) { + audioInput.setUseAudioRecord(enable); + } + + public void prewarmRecording(){ + audioInput.initRecordingIfNeeded(); + audioInput.prewarmRecordingIfNeeded(); + } + + public void requestStartRecording() { + audioInput.initRecordingIfNeeded(); + audioInput.startRecordingIfNeeded(); + } + + public void requestStopRecording() { + audioInput.initRecordingIfNeeded(); + audioInput.stopRecordingIfNeeded(); + } + @Override public boolean setNoiseSuppressorEnabled(boolean enabled) { Logging.d(TAG, "setNoiseSuppressorEnabled: " + enabled); diff --git a/sdk/android/instrumentationtests/src/org/webrtc/AndroidVideoDecoderInstrumentationTest.java b/sdk/android/instrumentationtests/src/org/webrtc/AndroidVideoDecoderInstrumentationTest.java index 6f448124e8..d9fadabfd9 100644 --- a/sdk/android/instrumentationtests/src/org/webrtc/AndroidVideoDecoderInstrumentationTest.java +++ b/sdk/android/instrumentationtests/src/org/webrtc/AndroidVideoDecoderInstrumentationTest.java @@ -48,7 +48,7 @@ public AndroidVideoDecoderInstrumentationTest(String codecName, boolean useEglCo if (codecName.equals("H264")) { this.codecType = H264Utils.DEFAULT_H264_BASELINE_PROFILE_CODEC; } else { - this.codecType = new VideoCodecInfo(codecName, new HashMap<>()); + this.codecType = new VideoCodecInfo(codecName, new HashMap<>(), new ArrayList<>()); } this.useEglContext = useEglContext; } diff --git a/sdk/android/instrumentationtests/src/org/webrtc/DefaultVideoEncoderFactoryTest.java b/sdk/android/instrumentationtests/src/org/webrtc/DefaultVideoEncoderFactoryTest.java index 86978c993b..66fe9845c1 100644 --- a/sdk/android/instrumentationtests/src/org/webrtc/DefaultVideoEncoderFactoryTest.java +++ b/sdk/android/instrumentationtests/src/org/webrtc/DefaultVideoEncoderFactoryTest.java @@ -47,7 +47,7 @@ public void setUp() { @SmallTest @Test public void getSupportedCodecs_hwVp8SameParamsAsSwVp8_oneVp8() { - VideoCodecInfo hwVp8Encoder = new VideoCodecInfo("VP8", new HashMap<>()); + VideoCodecInfo hwVp8Encoder = new VideoCodecInfo("VP8", new HashMap<>(), new ArrayList<>()); VideoEncoderFactory hwFactory = new CustomHardwareVideoEncoderFactory(hwVp8Encoder); DefaultVideoEncoderFactory defFactory = new DefaultVideoEncoderFactory(hwFactory); VideoCodecInfo[] supportedCodecs = defFactory.getSupportedCodecs(); diff --git a/sdk/android/instrumentationtests/src/org/webrtc/SoftwareVideoDecoderFactoryTest.java b/sdk/android/instrumentationtests/src/org/webrtc/SoftwareVideoDecoderFactoryTest.java index 8a5d9788ee..8be15624da 100644 --- a/sdk/android/instrumentationtests/src/org/webrtc/SoftwareVideoDecoderFactoryTest.java +++ b/sdk/android/instrumentationtests/src/org/webrtc/SoftwareVideoDecoderFactoryTest.java @@ -14,6 +14,7 @@ import androidx.annotation.Nullable; import androidx.test.filters.SmallTest; +import java.util.ArrayList; import java.util.HashMap; import org.junit.Before; import org.junit.Test; @@ -55,7 +56,7 @@ public void createDecoder_supportedCodec_returnsNotNull() { @Test public void createDecoder_unsupportedCodec_returnsNull() { VideoDecoderFactory factory = new SoftwareVideoDecoderFactory(); - VideoCodecInfo codec = new VideoCodecInfo("unsupported", new HashMap()); + VideoCodecInfo codec = new VideoCodecInfo("unsupported", new HashMap(), new ArrayList<>()); VideoDecoder decoder = factory.createDecoder(codec); assertThat(decoder).isNull(); } diff --git a/sdk/android/instrumentationtests/src/org/webrtc/SoftwareVideoEncoderFactoryTest.java b/sdk/android/instrumentationtests/src/org/webrtc/SoftwareVideoEncoderFactoryTest.java index 696b423cde..0fa4c4cc17 100644 --- a/sdk/android/instrumentationtests/src/org/webrtc/SoftwareVideoEncoderFactoryTest.java +++ b/sdk/android/instrumentationtests/src/org/webrtc/SoftwareVideoEncoderFactoryTest.java @@ -14,6 +14,7 @@ import androidx.annotation.Nullable; import androidx.test.filters.SmallTest; +import java.util.ArrayList; import java.util.HashMap; import org.junit.Before; import org.junit.Test; @@ -52,7 +53,7 @@ public void createEncoder_supportedCodec_returnsNotNull() { @Test public void createEncoder_unsupportedCodec_returnsNull() { VideoEncoderFactory factory = new SoftwareVideoEncoderFactory(); - VideoCodecInfo codec = new VideoCodecInfo("unsupported", new HashMap()); + VideoCodecInfo codec = new VideoCodecInfo("unsupported", new HashMap(), new ArrayList<>()); VideoEncoder encoder = factory.createEncoder(codec); assertThat(encoder).isNull(); } diff --git a/sdk/android/native_unittests/org/webrtc/CodecsWrapperTestHelper.java b/sdk/android/native_unittests/org/webrtc/CodecsWrapperTestHelper.java index 70151d3b78..72c5c64191 100644 --- a/sdk/android/native_unittests/org/webrtc/CodecsWrapperTestHelper.java +++ b/sdk/android/native_unittests/org/webrtc/CodecsWrapperTestHelper.java @@ -12,6 +12,7 @@ import java.util.HashMap; import java.util.Map; +import java.util.ArrayList; public class CodecsWrapperTestHelper { @CalledByNative @@ -20,7 +21,7 @@ public static VideoCodecInfo createTestVideoCodecInfo() { params.put( VideoCodecInfo.H264_FMTP_PROFILE_LEVEL_ID, VideoCodecInfo.H264_CONSTRAINED_BASELINE_3_1); - VideoCodecInfo codec_info = new VideoCodecInfo("H264", params); + VideoCodecInfo codec_info = new VideoCodecInfo("H264", params, new ArrayList<>()); return codec_info; } diff --git a/sdk/android/src/java/org/webrtc/H264Utils.java b/sdk/android/src/java/org/webrtc/H264Utils.java index abb79c6582..4bf292ee12 100644 --- a/sdk/android/src/java/org/webrtc/H264Utils.java +++ b/sdk/android/src/java/org/webrtc/H264Utils.java @@ -12,6 +12,7 @@ import java.util.Map; import java.util.HashMap; +import java.util.ArrayList; /** Container for static helper functions related to dealing with H264 codecs. */ class H264Utils { @@ -38,9 +39,9 @@ public static Map getDefaultH264Params(boolean isHighProfile) { } public static VideoCodecInfo DEFAULT_H264_BASELINE_PROFILE_CODEC = - new VideoCodecInfo("H264", getDefaultH264Params(/* isHighProfile= */ false)); + new VideoCodecInfo("H264", getDefaultH264Params(/* isHighProfile= */ false), new ArrayList<>()); public static VideoCodecInfo DEFAULT_H264_HIGH_PROFILE_CODEC = - new VideoCodecInfo("H264", getDefaultH264Params(/* isHighProfile= */ true)); + new VideoCodecInfo("H264", getDefaultH264Params(/* isHighProfile= */ true), new ArrayList<>()); public static boolean isSameH264Profile( Map params1, Map params2) { diff --git a/sdk/android/src/java/org/webrtc/HardwareVideoEncoder.java b/sdk/android/src/java/org/webrtc/HardwareVideoEncoder.java index 7c6b1a7a90..ae7137a8f9 100644 --- a/sdk/android/src/java/org/webrtc/HardwareVideoEncoder.java +++ b/sdk/android/src/java/org/webrtc/HardwareVideoEncoder.java @@ -45,8 +45,8 @@ class HardwareVideoEncoder implements VideoEncoder { private static final int MEDIA_CODEC_RELEASE_TIMEOUT_MS = 5000; private static final int DEQUEUE_OUTPUT_BUFFER_TIMEOUT_US = 100000; - // Size of the input frames should be multiple of 16 for the H/W encoder. - private static final int REQUIRED_RESOLUTION_ALIGNMENT = 16; + // Size of the input frames should be multiple of 2 for the H/W encoder. + private static final int REQUIRED_RESOLUTION_ALIGNMENT = 2; /** * Keeps track of the number of output buffers that have been passed down the pipeline and not yet @@ -210,6 +210,11 @@ public VideoCodecStatus initEncode(Settings settings, Callback callback) { this.callback = callback; automaticResizeOn = settings.automaticResizeOn; + if (settings.width % REQUIRED_RESOLUTION_ALIGNMENT != 0 + || settings.height % REQUIRED_RESOLUTION_ALIGNMENT != 0) { + Logging.e(TAG, "MediaCodec requires 2x2 alignment."); + return VideoCodecStatus.ERR_SIZE; + } this.width = settings.width; this.height = settings.height; useSurfaceMode = canUseSurface(); @@ -533,6 +538,12 @@ private VideoCodecStatus resetCodec(int newWidth, int newHeight, boolean newUseS if (status != VideoCodecStatus.OK) { return status; } + + if (newWidth % REQUIRED_RESOLUTION_ALIGNMENT != 0 + || newHeight % REQUIRED_RESOLUTION_ALIGNMENT != 0) { + Logging.e(TAG, "MediaCodec requires 2x2 alignment."); + return VideoCodecStatus.ERR_SIZE; + } width = newWidth; height = newHeight; useSurfaceMode = newUseSurfaceMode; diff --git a/sdk/android/src/java/org/webrtc/MediaCodecVideoDecoderFactory.java b/sdk/android/src/java/org/webrtc/MediaCodecVideoDecoderFactory.java index 875d781abd..fdc14cb20e 100644 --- a/sdk/android/src/java/org/webrtc/MediaCodecVideoDecoderFactory.java +++ b/sdk/android/src/java/org/webrtc/MediaCodecVideoDecoderFactory.java @@ -72,11 +72,11 @@ public VideoCodecInfo[] getSupportedCodecs() { String name = type.name(); if (type == VideoCodecMimeType.H264 && isH264HighProfileSupported(codec)) { supportedCodecInfos.add(new VideoCodecInfo( - name, MediaCodecUtils.getCodecProperties(type, /* highProfile= */ true))); + name, MediaCodecUtils.getCodecProperties(type, /* highProfile= */ true), new ArrayList<>())); } supportedCodecInfos.add(new VideoCodecInfo( - name, MediaCodecUtils.getCodecProperties(type, /* highProfile= */ false))); + name, MediaCodecUtils.getCodecProperties(type, /* highProfile= */ false), new ArrayList<>())); } } diff --git a/sdk/android/src/java/org/webrtc/audio/WebRtcAudioRecord.java b/sdk/android/src/java/org/webrtc/audio/WebRtcAudioRecord.java index ac62308aa3..27591e3ab7 100644 --- a/sdk/android/src/java/org/webrtc/audio/WebRtcAudioRecord.java +++ b/sdk/android/src/java/org/webrtc/audio/WebRtcAudioRecord.java @@ -34,11 +34,13 @@ import java.util.concurrent.ScheduledFuture; import java.util.concurrent.ThreadFactory; import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; import org.webrtc.CalledByNative; import org.webrtc.Logging; import org.webrtc.ThreadUtils; +import org.webrtc.audio.JavaAudioDeviceModule.AudioBufferCallback; import org.webrtc.audio.JavaAudioDeviceModule.AudioRecordErrorCallback; import org.webrtc.audio.JavaAudioDeviceModule.AudioRecordStartErrorCode; import org.webrtc.audio.JavaAudioDeviceModule.AudioRecordStateCallback; @@ -83,6 +85,11 @@ class WebRtcAudioRecord { private final AudioManager audioManager; private final int audioSource; private final int audioFormat; + private int channelCount; + private int sampleRate; + + private int expectedChannelCount; + private int expectedSampleRate; private long nativeAudioRecord; @@ -90,7 +97,11 @@ class WebRtcAudioRecord { private @Nullable ByteBuffer byteBuffer; - private @Nullable AudioRecord audioRecord; + private final Object audioRecordStateLock = new Object(); + private boolean useAudioRecord = true; + private volatile @Nullable AudioRecord audioRecord; + + private final Object audioThreadStateLock = new Object(); private @Nullable AudioRecordThread audioThread; private @Nullable AudioDeviceInfo preferredDevice; @@ -105,6 +116,7 @@ class WebRtcAudioRecord { private final @Nullable AudioRecordErrorCallback errorCallback; private final @Nullable AudioRecordStateCallback stateCallback; private final @Nullable SamplesReadyCallback audioSamplesReadyCallback; + private final @Nullable AudioBufferCallback audioBufferCallback; private final boolean isAcousticEchoCancelerSupported; private final boolean isNoiseSuppressorSupported; @@ -125,7 +137,9 @@ public AudioRecordThread(String name) { public void run() { Process.setThreadPriority(Process.THREAD_PRIORITY_URGENT_AUDIO); Logging.d(TAG, "AudioRecordThread" + WebRtcAudioUtils.getThreadInfo()); - assertTrue(audioRecord.getRecordingState() == AudioRecord.RECORDSTATE_RECORDING); + if (audioRecord != null) { + assertTrue(audioRecord.getRecordingState() == AudioRecord.RECORDSTATE_RECORDING); + } // Audio recording has started and the client is informed about it. doAudioRecordStateCallback(AUDIO_RECORD_START); @@ -136,52 +150,112 @@ public void run() { audioTimestamp = new AudioTimestamp(); } while (keepAlive) { - int bytesRead = audioRecord.read(byteBuffer, byteBuffer.capacity()); - if (bytesRead == byteBuffer.capacity()) { - if (microphoneMute) { - byteBuffer.clear(); - byteBuffer.put(emptyBytes); + long captureTimeNs = 0; + AudioRecord audioRecord; + boolean shouldReportData; + synchronized (audioRecordStateLock) { + audioRecord = WebRtcAudioRecord.this.audioRecord; + shouldReportData = nativeCalledInitRecording.get(); + } + + if (audioRecord == null && useAudioRecord) { + boolean result = initAudioRecord(); + + if (!result) { + // Failed audio record init, don't try again. + useAudioRecord = false; + } else { + synchronized (audioRecordStateLock) { + audioRecord = WebRtcAudioRecord.this.audioRecord; + } + + assertTrue(audioRecord != null); + try { + audioRecord.startRecording(); + } catch (IllegalStateException e) { + reportWebRtcAudioRecordStartError(AudioRecordStartErrorCode.AUDIO_RECORD_START_EXCEPTION, + "AudioRecord.startRecording failed: " + e.getMessage()); + audioRecord = null; + useAudioRecord = false; + } + if (useAudioRecord && audioRecord.getRecordingState() != AudioRecord.RECORDSTATE_RECORDING) { + reportWebRtcAudioRecordStartError(AudioRecordStartErrorCode.AUDIO_RECORD_START_STATE_MISMATCH, + "AudioRecord.startRecording failed - incorrect state: " + + audioRecord.getRecordingState()); + audioRecord = null; + useAudioRecord = false; + } } - // It's possible we've been shut down during the read, and stopRecording() tried and - // failed to join this thread. To be a bit safer, try to avoid calling any native methods - // in case they've been unregistered after stopRecording() returned. - if (keepAlive) { - long captureTimeNs = 0; - if (Build.VERSION.SDK_INT >= 24) { - if (audioRecord.getTimestamp(audioTimestamp, AudioTimestamp.TIMEBASE_MONOTONIC) - == AudioRecord.SUCCESS) { - captureTimeNs = audioTimestamp.nanoTime; + } + + if (audioRecord != null && !useAudioRecord) { + audioRecord = null; + releaseAudioResources(); + } + + int bytesRead = 0; + if (audioRecord != null) { + bytesRead = audioRecord.read(byteBuffer, byteBuffer.capacity()); + if (bytesRead == byteBuffer.capacity()) { + if (microphoneMute) { + byteBuffer.clear(); + byteBuffer.put(emptyBytes); + } + + if (keepAlive) { + if (Build.VERSION.SDK_INT >= 24) { + if (audioRecord.getTimestamp(audioTimestamp, AudioTimestamp.TIMEBASE_MONOTONIC) + == AudioRecord.SUCCESS) { + captureTimeNs = audioTimestamp.nanoTime; + } } } - nativeDataIsRecorded(nativeAudioRecord, bytesRead, captureTimeNs); - } - if (audioSamplesReadyCallback != null) { - // Copy the entire byte buffer array. The start of the byteBuffer is not necessarily - // at index 0. - byte[] data = Arrays.copyOfRange(byteBuffer.array(), byteBuffer.arrayOffset(), - byteBuffer.capacity() + byteBuffer.arrayOffset()); - audioSamplesReadyCallback.onWebRtcAudioRecordSamplesReady( - new JavaAudioDeviceModule.AudioSamples(audioRecord.getAudioFormat(), - audioRecord.getChannelCount(), audioRecord.getSampleRate(), data)); + } else { + String errorMessage = "AudioRecord.read failed: " + bytesRead; + Logging.e(TAG, errorMessage); + + if (bytesRead == AudioRecord.ERROR_INVALID_OPERATION) { + keepAlive = false; + reportWebRtcAudioRecordError(errorMessage); + } + // AudioRecord is primary driver in this path, so try again if possible. + continue; } } else { - String errorMessage = "AudioRecord.read failed: " + bytesRead; - Logging.e(TAG, errorMessage); - if (bytesRead == AudioRecord.ERROR_INVALID_OPERATION) { - keepAlive = false; - reportWebRtcAudioRecordError(errorMessage); - } + byteBuffer.clear(); + byteBuffer.put(emptyBytes); + } + + if (keepAlive && audioBufferCallback != null) { + captureTimeNs = audioBufferCallback.onBuffer(byteBuffer, audioFormat, + channelCount, sampleRate, bytesRead, captureTimeNs); + bytesRead = byteBuffer.capacity(); + } + + // It's possible we've been shut down during the read, and stopRecording() tried and + // failed to join this thread. To be a bit safer, try to avoid calling any native methods + // in case they've been unregistered after stopRecording() returned. + if (keepAlive && shouldReportData) { + nativeDataIsRecorded(nativeAudioRecord, bytesRead, captureTimeNs); + } + if (audioSamplesReadyCallback != null) { + // Copy the entire byte buffer array. The start of the byteBuffer is not necessarily + // at index 0. + byte[] data = Arrays.copyOfRange(byteBuffer.array(), byteBuffer.arrayOffset(), + byteBuffer.capacity() + byteBuffer.arrayOffset()); + audioSamplesReadyCallback.onWebRtcAudioRecordSamplesReady( + new JavaAudioDeviceModule.AudioSamples(audioFormat, channelCount, sampleRate, data)); } } try { if (audioRecord != null) { audioRecord.stop(); - doAudioRecordStateCallback(AUDIO_RECORD_STOP); } } catch (IllegalStateException e) { Logging.e(TAG, "AudioRecord.stop failed: " + e.getMessage()); } + doAudioRecordStateCallback(AUDIO_RECORD_STOP); } // Stops the inner thread loop and also calls AudioRecord.stop(). @@ -196,8 +270,12 @@ public void stopThread() { WebRtcAudioRecord(Context context, AudioManager audioManager) { this(context, newDefaultScheduler() /* scheduler */, audioManager, DEFAULT_AUDIO_SOURCE, DEFAULT_AUDIO_FORMAT, null /* errorCallback */, null /* stateCallback */, - null /* audioSamplesReadyCallback */, WebRtcAudioEffects.isAcousticEchoCancelerSupported(), - WebRtcAudioEffects.isNoiseSuppressorSupported()); + null /* audioSamplesReadyCallback */, null /* audioBufferCallback */, + WebRtcAudioEffects.isAcousticEchoCancelerSupported(), + WebRtcAudioEffects.isNoiseSuppressorSupported(), + WebRtcAudioManager.getSampleRate(audioManager) /* expectedSampleRate */, + 1 /* expectedChannelCount */ + ); } public WebRtcAudioRecord(Context context, ScheduledExecutorService scheduler, @@ -205,7 +283,10 @@ public WebRtcAudioRecord(Context context, ScheduledExecutorService scheduler, @Nullable AudioRecordErrorCallback errorCallback, @Nullable AudioRecordStateCallback stateCallback, @Nullable SamplesReadyCallback audioSamplesReadyCallback, - boolean isAcousticEchoCancelerSupported, boolean isNoiseSuppressorSupported) { + @Nullable AudioBufferCallback audioBufferCallback, + boolean isAcousticEchoCancelerSupported, boolean isNoiseSuppressorSupported, + int expectedSampleRate, int expectedChannelCount + ) { if (isAcousticEchoCancelerSupported && !WebRtcAudioEffects.isAcousticEchoCancelerSupported()) { throw new IllegalArgumentException("HW AEC not supported"); } @@ -220,8 +301,11 @@ public WebRtcAudioRecord(Context context, ScheduledExecutorService scheduler, this.errorCallback = errorCallback; this.stateCallback = stateCallback; this.audioSamplesReadyCallback = audioSamplesReadyCallback; + this.audioBufferCallback = audioBufferCallback; this.isAcousticEchoCancelerSupported = isAcousticEchoCancelerSupported; this.isNoiseSuppressorSupported = isNoiseSuppressorSupported; + this.expectedSampleRate = expectedSampleRate; + this.expectedChannelCount = expectedChannelCount; Logging.d(TAG, "ctor" + WebRtcAudioUtils.getThreadInfo()); } @@ -273,15 +357,60 @@ private boolean enableBuiltInNS(boolean enable) { return effects.setNS(enable); } + public void setUseAudioRecord(boolean enable) { + Logging.d(TAG, "setUseAudioRecord(" + enable + ")"); + this.useAudioRecord = enable; + } + + /** + * Allows clients to init recording manually. + * + * @return true if recording was initialized correctly. + */ + public boolean initRecordingIfNeeded() { + synchronized (audioRecordStateLock) { + if (audioRecord == null){ + return initRecordingImpl(expectedSampleRate, expectedChannelCount, false) >= 0; + } + } + return true; + } + @CalledByNative private int initRecording(int sampleRate, int channels) { Logging.d(TAG, "initRecording(sampleRate=" + sampleRate + ", channels=" + channels + ")"); + + synchronized (audioRecordStateLock) { + if (!nativeCalledInitRecording.compareAndSet(false, true)) { + reportWebRtcAudioRecordInitError("InitRecording called twice without StopRecording."); + return -1; + } + + if (audioRecord == null){ + return initRecordingImpl(sampleRate, channels, true); + } + + // initRecording was already called previously by client. + // Handle required steps for native libwebrtc. + final int framesPerBuffer = getFramesPerBuffer(sampleRate); + if (byteBuffer == null) { + throw new IllegalStateException("initRecording: byteBuffer is null!"); + } + nativeCacheDirectBufferAddress(nativeAudioRecord, byteBuffer); + return framesPerBuffer; + } + } + + private int initRecordingImpl(int sampleRate, int channels, boolean nativeCall) { + Logging.d(TAG, "initRecordingImpl(sampleRate=" + sampleRate + ", channels=" + channels + ")"); if (audioRecord != null) { reportWebRtcAudioRecordInitError("InitRecording called twice without StopRecording."); return -1; } - final int bytesPerFrame = channels * getBytesPerSample(audioFormat); - final int framesPerBuffer = sampleRate / BUFFERS_PER_SECOND; + this.sampleRate = sampleRate; + this.channelCount = channels; + final int bytesPerFrame = getBytesPerFrame(channels, this.audioFormat); + final int framesPerBuffer = getFramesPerBuffer(sampleRate); byteBuffer = ByteBuffer.allocateDirect(bytesPerFrame * framesPerBuffer); if (!byteBuffer.hasArray()) { reportWebRtcAudioRecordInitError("ByteBuffer does not have backing array."); @@ -292,55 +421,18 @@ private int initRecording(int sampleRate, int channels) { // Rather than passing the ByteBuffer with every callback (requiring // the potentially expensive GetDirectBufferAddress) we simply have the // the native class cache the address to the memory once. - nativeCacheDirectBufferAddress(nativeAudioRecord, byteBuffer); - - // Get the minimum buffer size required for the successful creation of - // an AudioRecord object, in byte units. - // Note that this size doesn't guarantee a smooth recording under load. - final int channelConfig = channelCountToConfiguration(channels); - int minBufferSize = AudioRecord.getMinBufferSize(sampleRate, channelConfig, audioFormat); - if (minBufferSize == AudioRecord.ERROR || minBufferSize == AudioRecord.ERROR_BAD_VALUE) { - reportWebRtcAudioRecordInitError("AudioRecord.getMinBufferSize failed: " + minBufferSize); - return -1; + // Caching can only be done on the native thread. + if (nativeCall) { + nativeCacheDirectBufferAddress(nativeAudioRecord, byteBuffer); } - Logging.d(TAG, "AudioRecord.getMinBufferSize: " + minBufferSize); - - // Use a larger buffer size than the minimum required when creating the - // AudioRecord instance to ensure smooth recording under load. It has been - // verified that it does not increase the actual recording latency. - int bufferSizeInBytes = Math.max(BUFFER_SIZE_FACTOR * minBufferSize, byteBuffer.capacity()); - Logging.d(TAG, "bufferSizeInBytes: " + bufferSizeInBytes); - try { - if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { - // Use the AudioRecord.Builder class on Android M (23) and above. - // Throws IllegalArgumentException. - audioRecord = createAudioRecordOnMOrHigher( - audioSource, sampleRate, channelConfig, audioFormat, bufferSizeInBytes); - audioSourceMatchesRecordingSessionRef.set(null); - if (preferredDevice != null) { - setPreferredDevice(preferredDevice); - } - } else { - // Use the old AudioRecord constructor for API levels below 23. - // Throws UnsupportedOperationException. - audioRecord = createAudioRecordOnLowerThanM( - audioSource, sampleRate, channelConfig, audioFormat, bufferSizeInBytes); - audioSourceMatchesRecordingSessionRef.set(null); - } - } catch (IllegalArgumentException | UnsupportedOperationException e) { - // Report of exception message is sufficient. Example: "Cannot create AudioRecord". - reportWebRtcAudioRecordInitError(e.getMessage()); - releaseAudioResources(); - return -1; - } - if (audioRecord == null || audioRecord.getState() != AudioRecord.STATE_INITIALIZED) { - reportWebRtcAudioRecordInitError("Creation or initialization of audio recorder failed."); - releaseAudioResources(); - return -1; + + if(useAudioRecord) { + boolean result = initAudioRecord(); + if (!result) { + return -1; + } } - effects.enable(audioRecord.getAudioSessionId()); - logMainParameters(); - logMainParametersExtended(); + // Check number of active recording sessions. Should be zero but we have seen conflict cases // and adding a log for it can help us figure out details about conflicting sessions. final int numActiveRecordingSessions = @@ -354,6 +446,70 @@ private int initRecording(int sampleRate, int channels) { return framesPerBuffer; } + private boolean initAudioRecord() { + if (sampleRate == 0 || channelCount == 0) { + Logging.w(TAG, "initAudioRecord called before initRecord!"); + return false; + } + + synchronized (audioRecordStateLock) { + if (audioRecord != null) { + reportWebRtcAudioRecordInitError("InitAudioRecord called twice without StopRecording."); + return false; + } + // Get the minimum buffer size required for the successful creation of + // an AudioRecord object, in byte units. + // Note that this size doesn't guarantee a smooth recording under load. + final int channelConfig = channelCountToConfiguration(channelCount); + int minBufferSize = AudioRecord.getMinBufferSize(sampleRate, channelConfig, audioFormat); + if (minBufferSize == AudioRecord.ERROR || minBufferSize == AudioRecord.ERROR_BAD_VALUE) { + reportWebRtcAudioRecordInitError("AudioRecord.getMinBufferSize failed: " + minBufferSize); + return false; + } + Logging.d(TAG, "AudioRecord.getMinBufferSize: " + minBufferSize); + + // Use a larger buffer size than the minimum required when creating the + // AudioRecord instance to ensure smooth recording under load. It has been + // verified that it does not increase the actual recording latency. + int bufferSizeInBytes = Math.max(BUFFER_SIZE_FACTOR * minBufferSize, byteBuffer.capacity()); + Logging.d(TAG, "bufferSizeInBytes: " + bufferSizeInBytes); + + try { + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { + // Use the AudioRecord.Builder class on Android M (23) and above. + // Throws IllegalArgumentException. + audioRecord = createAudioRecordOnMOrHigher( + audioSource, sampleRate, channelConfig, audioFormat, bufferSizeInBytes); + audioSourceMatchesRecordingSessionRef.set(null); + if (preferredDevice != null) { + setPreferredDevice(preferredDevice); + } + } else { + // Use the old AudioRecord constructor for API levels below 23. + // Throws UnsupportedOperationException. + audioRecord = createAudioRecordOnLowerThanM( + audioSource, sampleRate, channelConfig, audioFormat, bufferSizeInBytes); + audioSourceMatchesRecordingSessionRef.set(null); + } + } catch (IllegalArgumentException | UnsupportedOperationException e) { + // Report of exception message is sufficient. Example: "Cannot create AudioRecord". + reportWebRtcAudioRecordInitError(e.getMessage()); + releaseAudioResources(); + return false; + } + if (audioRecord == null || audioRecord.getState() != AudioRecord.STATE_INITIALIZED) { + reportWebRtcAudioRecordInitError("Creation or initialization of audio recorder failed."); + releaseAudioResources(); + return false; + } + + effects.enable(audioRecord.getAudioSessionId()); + + logMainParameters(); + logMainParametersExtended(); + } + return true; + } /** * Prefer a specific {@link AudioDeviceInfo} device for recording. Calling after recording starts * is valid but may cause a temporary interruption if the audio routing changes. @@ -371,50 +527,131 @@ void setPreferredDevice(@Nullable AudioDeviceInfo preferredDevice) { } } + public boolean prewarmRecordingIfNeeded() { + if(audioThread == null) { + synchronized(audioRecordStateLock) { + synchronized (audioThreadStateLock) { + if (audioThread == null) { + return startRecordingImpl(); + } + } + } + } + return true; + } + + public boolean startRecordingIfNeeded() { + clientCalledStartRecording.set(true); + if(audioThread == null) { + synchronized(audioRecordStateLock) { + synchronized (audioThreadStateLock) { + if (audioThread == null) { + return startRecordingImpl(); + } + } + } + } + return true; + } + @CalledByNative private boolean startRecording() { + if (!nativeCalledStartRecording.compareAndSet(false, true)) { + throw new IllegalStateException("startRecording called twice without stopRecording"); + } + if (audioThread == null) { + synchronized(audioRecordStateLock) { + synchronized (audioThreadStateLock) { + if (audioThread == null) { + return startRecordingImpl(); + } + } + } + } + return true; + } + + private boolean startRecordingImpl() { Logging.d(TAG, "startRecording"); - assertTrue(audioRecord != null); - assertTrue(audioThread == null); - try { - audioRecord.startRecording(); - } catch (IllegalStateException e) { - reportWebRtcAudioRecordStartError(AudioRecordStartErrorCode.AUDIO_RECORD_START_EXCEPTION, - "AudioRecord.startRecording failed: " + e.getMessage()); - return false; + synchronized (audioRecordStateLock) { + synchronized (audioThreadStateLock) { + assertTrue(audioThread == null); + // Disabling useAudioRecord allows for "recordingless" recording, + // where we emit audio buffers to be mixed in by client. + if (useAudioRecord) { + assertTrue(audioRecord != null); + try { + audioRecord.startRecording(); + } catch (IllegalStateException e) { + reportWebRtcAudioRecordStartError(AudioRecordStartErrorCode.AUDIO_RECORD_START_EXCEPTION, + "AudioRecord.startRecording failed: " + e.getMessage()); + return false; + } + if (audioRecord.getRecordingState() != AudioRecord.RECORDSTATE_RECORDING) { + reportWebRtcAudioRecordStartError(AudioRecordStartErrorCode.AUDIO_RECORD_START_STATE_MISMATCH, + "AudioRecord.startRecording failed - incorrect state: " + + audioRecord.getRecordingState()); + return false; + } + } + audioThread = new AudioRecordThread("AudioRecordJavaThread"); + audioThread.start(); + scheduleLogRecordingConfigurationsTask(audioRecord); + return true; + } } - if (audioRecord.getRecordingState() != AudioRecord.RECORDSTATE_RECORDING) { - reportWebRtcAudioRecordStartError(AudioRecordStartErrorCode.AUDIO_RECORD_START_STATE_MISMATCH, - "AudioRecord.startRecording failed - incorrect state: " - + audioRecord.getRecordingState()); - return false; + } + + private AtomicBoolean clientCalledStartRecording = new AtomicBoolean(false); + private AtomicBoolean nativeCalledInitRecording = new AtomicBoolean(false); + private AtomicBoolean nativeCalledStartRecording = new AtomicBoolean(false); + + public boolean stopRecordingIfNeeded() { + Logging.d(TAG, "stopRecordingIfNeeded"); + synchronized(audioRecordStateLock) { + clientCalledStartRecording.set(false); + if(audioThread != null) { + return stopRecordingIfNeededImpl(); + } } - audioThread = new AudioRecordThread("AudioRecordJavaThread"); - audioThread.start(); - scheduleLogRecordingConfigurationsTask(audioRecord); return true; } @CalledByNative private boolean stopRecording() { Logging.d(TAG, "stopRecording"); - assertTrue(audioThread != null); - if (future != null) { - if (!future.isDone()) { - // Might be needed if the client calls startRecording(), stopRecording() back-to-back. - future.cancel(true /* mayInterruptIfRunning */); - } - future = null; - } - audioThread.stopThread(); - if (!ThreadUtils.joinUninterruptibly(audioThread, AUDIO_RECORD_THREAD_JOIN_TIMEOUT_MS)) { - Logging.e(TAG, "Join of AudioRecordJavaThread timed out"); - WebRtcAudioUtils.logAudioState(TAG, context, audioManager); - } - audioThread = null; - effects.release(); - releaseAudioResources(); - return true; + synchronized(audioRecordStateLock) { + nativeCalledStartRecording.set(false); + nativeCalledInitRecording.set(false); + return stopRecordingIfNeededImpl(); + } + } + + private boolean stopRecordingIfNeededImpl() { + synchronized(audioRecordStateLock) { + if(clientCalledStartRecording.get() || nativeCalledStartRecording.get()) { + // Someone has still requested recording, ignore stop request. + return true; + } + + Logging.d(TAG, "stopping recording"); + assertTrue(audioThread != null); + if (future != null) { + if (!future.isDone()) { + // Might be needed if the client calls startRecording(), stopRecording() back-to-back. + future.cancel(true /* mayInterruptIfRunning */); + } + future = null; + } + audioThread.stopThread(); + if (!ThreadUtils.joinUninterruptibly(audioThread, AUDIO_RECORD_THREAD_JOIN_TIMEOUT_MS)) { + Logging.e(TAG, "Join of AudioRecordJavaThread timed out"); + WebRtcAudioUtils.logAudioState(TAG, context, audioManager); + } + audioThread = null; + releaseAudioResources(); + return true; + } } @TargetApi(Build.VERSION_CODES.M) @@ -439,20 +676,28 @@ private static AudioRecord createAudioRecordOnLowerThanM( } private void logMainParameters() { - Logging.d(TAG, - "AudioRecord: " - + "session ID: " + audioRecord.getAudioSessionId() + ", " - + "channels: " + audioRecord.getChannelCount() + ", " - + "sample rate: " + audioRecord.getSampleRate()); + synchronized(audioRecordStateLock) { + if(audioRecord != null) { + Logging.d(TAG, + "AudioRecord: " + + "session ID: " + audioRecord.getAudioSessionId() + ", " + + "channels: " + audioRecord.getChannelCount() + ", " + + "sample rate: " + audioRecord.getSampleRate()); + } + } } @TargetApi(Build.VERSION_CODES.M) private void logMainParametersExtended() { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { - Logging.d(TAG, - "AudioRecord: " - // The frame count of the native AudioRecord buffer. - + "buffer size in frames: " + audioRecord.getBufferSizeInFrames()); + synchronized(audioRecordStateLock) { + if(audioRecord != null) { + Logging.d(TAG, + "AudioRecord: " + // The frame count of the native AudioRecord buffer. + + "buffer size in frames: " + audioRecord.getBufferSizeInFrames()); + } + } } } @@ -526,9 +771,12 @@ public boolean setNoiseSuppressorEnabled(boolean enabled) { // Releases the native AudioRecord resources. private void releaseAudioResources() { Logging.d(TAG, "releaseAudioResources"); - if (audioRecord != null) { - audioRecord.release(); - audioRecord = null; + synchronized (audioRecordStateLock) { + effects.release(); + if (audioRecord != null) { + audioRecord.release(); + audioRecord = null; + } } audioSourceMatchesRecordingSessionRef.set(null); } @@ -592,6 +840,14 @@ private static int getBytesPerSample(int audioFormat) { } } + private static int getBytesPerFrame(int channels, int audioFormat) { + return channels * getBytesPerSample(audioFormat); + } + + private static int getFramesPerBuffer(int sampleRate) { + return sampleRate / BUFFERS_PER_SECOND; + } + // Use an ExecutorService to schedule a task after a given delay where the task consists of // checking (by logging) the current status of active recording sessions. private void scheduleLogRecordingConfigurationsTask(AudioRecord audioRecord) { diff --git a/sdk/android/src/java/org/webrtc/audio/WebRtcAudioTrack.java b/sdk/android/src/java/org/webrtc/audio/WebRtcAudioTrack.java index 2b34e34013..e4499e694a 100644 --- a/sdk/android/src/java/org/webrtc/audio/WebRtcAudioTrack.java +++ b/sdk/android/src/java/org/webrtc/audio/WebRtcAudioTrack.java @@ -20,12 +20,14 @@ import android.os.Process; import androidx.annotation.Nullable; import java.nio.ByteBuffer; +import java.util.Arrays; import org.webrtc.CalledByNative; import org.webrtc.Logging; import org.webrtc.ThreadUtils; import org.webrtc.audio.JavaAudioDeviceModule.AudioTrackErrorCallback; import org.webrtc.audio.JavaAudioDeviceModule.AudioTrackStartErrorCode; import org.webrtc.audio.JavaAudioDeviceModule.AudioTrackStateCallback; +import org.webrtc.audio.JavaAudioDeviceModule.PlaybackSamplesReadyCallback; import org.webrtc.audio.LowLatencyAudioBufferManager; class WebRtcAudioTrack { @@ -62,7 +64,7 @@ class WebRtcAudioTrack { private ByteBuffer byteBuffer; - private @Nullable final AudioAttributes audioAttributes; + public @Nullable AudioAttributes audioAttributes; private @Nullable AudioTrack audioTrack; private @Nullable AudioTrackThread audioThread; private final VolumeLogger volumeLogger; @@ -76,6 +78,9 @@ class WebRtcAudioTrack { private final @Nullable AudioTrackErrorCallback errorCallback; private final @Nullable AudioTrackStateCallback stateCallback; + private final @Nullable PlaybackSamplesReadyCallback audioSamplesReadyCallback; + + private boolean checkPlayState = true; /** * Audio thread which keeps calling AudioTrack.write() to stream audio. @@ -96,7 +101,10 @@ public AudioTrackThread(String name) { public void run() { Process.setThreadPriority(Process.THREAD_PRIORITY_URGENT_AUDIO); Logging.d(TAG, "AudioTrackThread" + WebRtcAudioUtils.getThreadInfo()); - assertTrue(audioTrack.getPlayState() == AudioTrack.PLAYSTATE_PLAYING); + + if (checkPlayState) { + assertTrue(audioTrack.getPlayState() == AudioTrack.PLAYSTATE_PLAYING); + } // Audio playout has started and the client is informed about it. doAudioTrackStateCallback(AUDIO_TRACK_START); @@ -129,6 +137,17 @@ public void run() { reportWebRtcAudioTrackError("AudioTrack.write failed: " + bytesWritten); } } + + if (audioSamplesReadyCallback != null && keepAlive) { + // Copy the entire byte buffer array. The start of the byteBuffer is not necessarily + // at index 0. + byte[] data = Arrays.copyOfRange(byteBuffer.array(), byteBuffer.arrayOffset(), + sizeInBytes + byteBuffer.arrayOffset()); + audioSamplesReadyCallback.onWebRtcAudioTrackSamplesReady( + new JavaAudioDeviceModule.AudioSamples(audioTrack.getAudioFormat(), + audioTrack.getChannelCount(), audioTrack.getSampleRate(), data)); + } + if (useLowLatency) { bufferManager.maybeAdjustBufferSize(audioTrack); } @@ -154,13 +173,13 @@ public void stopThread() { @CalledByNative WebRtcAudioTrack(Context context, AudioManager audioManager) { this(context, audioManager, null /* audioAttributes */, null /* errorCallback */, - null /* stateCallback */, false /* useLowLatency */, true /* enableVolumeLogger */); + null /* stateCallback */, null /* audioSamplesReadyCallback */, false /* useLowLatency */, true /* enableVolumeLogger */); } WebRtcAudioTrack(Context context, AudioManager audioManager, @Nullable AudioAttributes audioAttributes, @Nullable AudioTrackErrorCallback errorCallback, - @Nullable AudioTrackStateCallback stateCallback, boolean useLowLatency, - boolean enableVolumeLogger) { + @Nullable AudioTrackStateCallback stateCallback, @Nullable PlaybackSamplesReadyCallback audioSamplesReadyCallback, + boolean useLowLatency, boolean enableVolumeLogger) { threadChecker.detachThread(); this.context = context; this.audioManager = audioManager; @@ -168,6 +187,7 @@ public void stopThread() { this.errorCallback = errorCallback; this.stateCallback = stateCallback; this.volumeLogger = enableVolumeLogger ? new VolumeLogger(audioManager) : null; + this.audioSamplesReadyCallback = audioSamplesReadyCallback; this.useLowLatency = useLowLatency; Logging.d(TAG, "ctor" + WebRtcAudioUtils.getThreadInfo()); } diff --git a/sdk/android/src/jni/libaom_av1_encoder.cc b/sdk/android/src/jni/libaom_av1_encoder.cc index 9ed4abf5d6..17f7655c53 100644 --- a/sdk/android/src/jni/libaom_av1_encoder.cc +++ b/sdk/android/src/jni/libaom_av1_encoder.cc @@ -16,6 +16,9 @@ #include "sdk/android/generated_libaom_av1_encoder_jni/LibaomAv1Encoder_jni.h" #include "sdk/android/src/jni/jni_helpers.h" +#include +#include + namespace webrtc { namespace jni { @@ -26,5 +29,14 @@ jlong JNI_LibaomAv1Encoder_Create(JNIEnv* jni, jlong j_webrtc_env_ref) { .release()); } +static webrtc::ScopedJavaLocalRef JNI_LibaomAv1Encoder_GetSupportedScalabilityModes(JNIEnv* jni) { + std::vector modes; + for (const auto scalability_mode : webrtc::kAllScalabilityModes) { + if (webrtc::ScalabilityStructureConfig(scalability_mode).has_value()) { + modes.push_back(std::string(webrtc::ScalabilityModeToString(scalability_mode))); + } + } + return NativeToJavaStringArray(jni, modes); +} } // namespace jni } // namespace webrtc diff --git a/sdk/android/src/jni/pc/audio_sink.cc b/sdk/android/src/jni/pc/audio_sink.cc new file mode 100644 index 0000000000..f889dbb552 --- /dev/null +++ b/sdk/android/src/jni/pc/audio_sink.cc @@ -0,0 +1,39 @@ +/* + * Copyright 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "sdk/android/src/jni/pc/audio_sink.h" + +#include "sdk/android/generated_peerconnection_jni/AudioTrackSink_jni.h" + +namespace webrtc { +namespace jni { + +AudioTrackSinkWrapper::AudioTrackSinkWrapper(JNIEnv* jni, const JavaRef& j_sink) + : j_sink_(jni, j_sink) {} + +AudioTrackSinkWrapper::~AudioTrackSinkWrapper() {} + +void AudioTrackSinkWrapper::OnData( + const void* audio_data, + int bits_per_sample, + int sample_rate, + size_t number_of_channels, + size_t number_of_frames, + std::optional absolute_capture_timestamp_ms) { + JNIEnv* jni = AttachCurrentThreadIfNeeded(); + int length = (bits_per_sample / 8) * number_of_channels * number_of_frames; + ScopedJavaLocalRef audio_buffer = + NewDirectByteBuffer(jni, (void *) audio_data, length); + Java_AudioTrackSink_onData(jni, j_sink_, + audio_buffer, bits_per_sample, sample_rate, (int) number_of_channels, (int) number_of_frames, (absolute_capture_timestamp_ms ? absolute_capture_timestamp_ms.value() : 0)); +} + +} // namespace jni +} // namespace webrtc diff --git a/sdk/android/src/jni/pc/audio_sink.h b/sdk/android/src/jni/pc/audio_sink.h new file mode 100644 index 0000000000..2493f93ee6 --- /dev/null +++ b/sdk/android/src/jni/pc/audio_sink.h @@ -0,0 +1,42 @@ +/* + * Copyright 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef SDK_ANDROID_SRC_JNI_AUDIO_TRACK_SINK_H_ +#define SDK_ANDROID_SRC_JNI_AUDIO_TRACK_SINK_H_ + +#include +#include + +#include "api/media_stream_interface.h" +#include "sdk/android/src/jni/jni_helpers.h" + +namespace webrtc { +namespace jni { + +class AudioTrackSinkWrapper : public webrtc::AudioTrackSinkInterface { + public: + AudioTrackSinkWrapper(JNIEnv* jni, const JavaRef& j_sink); + ~AudioTrackSinkWrapper() override; + + private: + void OnData(const void* audio_data, + int bits_per_sample, + int sample_rate, + size_t number_of_channels, + size_t number_of_frames, + std::optional absolute_capture_timestamp_ms) override; + + const ScopedJavaGlobalRef j_sink_; +}; + +} // namespace jni +} // namespace webrtc + +#endif // SDK_ANDROID_SRC_JNI_AUDIO_TRACK_SINK_H_ diff --git a/sdk/android/src/jni/pc/audio_track.cc b/sdk/android/src/jni/pc/audio_track.cc index a2048620b4..72cd231a16 100644 --- a/sdk/android/src/jni/pc/audio_track.cc +++ b/sdk/android/src/jni/pc/audio_track.cc @@ -9,6 +9,8 @@ */ #include "api/media_stream_interface.h" +#include "sdk/android/src/jni/pc/audio_sink.h" + #include "sdk/android/generated_peerconnection_jni/AudioTrack_jni.h" namespace webrtc { @@ -20,5 +22,29 @@ static void JNI_AudioTrack_SetVolume(JNIEnv*, jlong j_p, jdouble volume) { source->SetVolume(volume); } +static void JNI_AudioTrack_AddSink(JNIEnv* jni, + jlong j_native_track, + jlong j_native_sink) { + reinterpret_cast(j_native_track) + ->AddSink(reinterpret_cast(j_native_sink)); +} + +static void JNI_AudioTrack_RemoveSink(JNIEnv* jni, + jlong j_native_track, + jlong j_native_sink) { + reinterpret_cast(j_native_track) + ->RemoveSink(reinterpret_cast(j_native_sink)); +} + +static jlong JNI_AudioTrack_WrapSink(JNIEnv* jni, + const JavaParamRef& sink) { + return jlongFromPointer(new AudioTrackSinkWrapper(jni, sink)); +} + +static void JNI_AudioTrack_FreeSink(JNIEnv* jni, jlong j_native_sink) { + delete reinterpret_cast(j_native_sink); +} + + } // namespace jni } // namespace webrtc diff --git a/sdk/android/src/jni/pc/external_audio_processing_factory.cc b/sdk/android/src/jni/pc/external_audio_processing_factory.cc new file mode 100644 index 0000000000..c7ec8dded6 --- /dev/null +++ b/sdk/android/src/jni/pc/external_audio_processing_factory.cc @@ -0,0 +1,145 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "sdk/android/src/jni/pc/external_audio_processing_factory.h" + +#include +#include + +#include "api/audio/builtin_audio_processing_builder.h" +#include "api/environment/environment_factory.h" +#include "api/make_ref_counted.h" +#include "rtc_base/ref_counted_object.h" +#include "sdk/android/generated_peerconnection_jni/ExternalAudioProcessingFactory_jni.h" +#include "sdk/android/native_api/jni/java_types.h" +#include "sdk/android/native_api/jni/scoped_java_ref.h" +#include "sdk/android/src/jni/jni_helpers.h" +#include "sdk/android/src/jni/pc/external_audio_processor.h" + +namespace webrtc { +namespace jni { + +ExternalAudioProcessingJni::ExternalAudioProcessingJni( + JNIEnv* jni, + const JavaRef& j_processing) + : j_processing_global_(jni, j_processing) {} +ExternalAudioProcessingJni::~ExternalAudioProcessingJni() {} +void ExternalAudioProcessingJni::Initialize(int sample_rate_hz, + int num_channels) { + JNIEnv* env = AttachCurrentThreadIfNeeded(); + Java_AudioProcessing_initialize(env, j_processing_global_, sample_rate_hz, + num_channels); +} + +void ExternalAudioProcessingJni::Reset(int new_rate) { + JNIEnv* env = AttachCurrentThreadIfNeeded(); + Java_AudioProcessing_reset(env, j_processing_global_, new_rate); +} + +void ExternalAudioProcessingJni::Process(int num_bands, int num_frames, int buffer_size, float* buffer) { + JNIEnv* env = AttachCurrentThreadIfNeeded(); + ScopedJavaLocalRef audio_buffer = + NewDirectByteBuffer(env, (void*)buffer, buffer_size * sizeof(float)); + Java_AudioProcessing_process(env, j_processing_global_, num_bands, num_frames, audio_buffer); +} + +ExternalAudioProcessingFactory::ExternalAudioProcessingFactory() { + capture_post_processor_ = new ExternalAudioProcessor(); + std::unique_ptr capture_post_processor( + capture_post_processor_); + + render_pre_processor_ = new ExternalAudioProcessor(); + std::unique_ptr render_pre_processor( + render_pre_processor_); + + apm_ = webrtc::BuiltinAudioProcessingBuilder() + .SetCapturePostProcessing(std::move(capture_post_processor)) + .SetRenderPreProcessing(std::move(render_pre_processor)) + .Build(CreateEnvironment()); + + webrtc::AudioProcessing::Config config; + apm_->ApplyConfig(config); +} + +static ExternalAudioProcessingFactory* default_processor_ptr; + +static jlong JNI_ExternalAudioProcessingFactory_GetDefaultApm(JNIEnv* env) { + if (!default_processor_ptr) { + auto default_processor = rtc::make_ref_counted(); + default_processor_ptr = default_processor.release(); + } + return webrtc::jni::jlongFromPointer(default_processor_ptr->apm().get()); +} + +static jlong JNI_ExternalAudioProcessingFactory_SetCapturePostProcessing( + JNIEnv* env, + const JavaParamRef& j_processing) { + if (!default_processor_ptr) { + return 0; + } + auto processing = + rtc::make_ref_counted(env, j_processing); + processing->AddRef(); + default_processor_ptr->capture_post_processor()->SetExternalAudioProcessing( + processing.get()); + return jlongFromPointer(processing.get()); +} + +static jlong JNI_ExternalAudioProcessingFactory_SetRenderPreProcessing( + JNIEnv* env, + const JavaParamRef& j_processing) { + if (!default_processor_ptr) { + return 0; + } + auto processing = + rtc::make_ref_counted(env, j_processing); + processing->AddRef(); + default_processor_ptr->render_pre_processor()->SetExternalAudioProcessing( + processing.get()); + return jlongFromPointer(processing.get()); +} + +static void JNI_ExternalAudioProcessingFactory_SetBypassFlagForCapturePost( + JNIEnv* env, + jboolean bypass) { + if (!default_processor_ptr) { + return; + } + default_processor_ptr->capture_post_processor()->SetBypassFlag(bypass); +} + +static void JNI_ExternalAudioProcessingFactory_SetBypassFlagForRenderPre( + JNIEnv* env, + jboolean bypass) { + if (!default_processor_ptr) { + return; + } + default_processor_ptr->render_pre_processor()->SetBypassFlag(bypass); +} + +static void JNI_ExternalAudioProcessingFactory_Destroy(JNIEnv* env) { + if (!default_processor_ptr) { + return; + } + default_processor_ptr->render_pre_processor()->SetExternalAudioProcessing( + nullptr); + default_processor_ptr->capture_post_processor()->SetExternalAudioProcessing( + nullptr); + delete default_processor_ptr; +} + +} // namespace jni +} // namespace webrtc diff --git a/sdk/android/src/jni/pc/external_audio_processing_factory.h b/sdk/android/src/jni/pc/external_audio_processing_factory.h new file mode 100644 index 0000000000..bf0bc886b7 --- /dev/null +++ b/sdk/android/src/jni/pc/external_audio_processing_factory.h @@ -0,0 +1,68 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#define WEBRTC_APM_DEBUG_DUMP 0 + +#include "rtc_base/ref_counted_object.h" +#include "sdk/android/native_api/jni/scoped_java_ref.h" +#include "sdk/android/src/jni/pc/external_audio_processor.h" +#include "sdk/android/src/jni/pc/external_audio_processing_interface.h" + +namespace webrtc { +namespace jni { + +class ExternalAudioProcessingJni + : public webrtc::ExternalAudioProcessingInterface, + public webrtc::RefCountInterface { + public: + ExternalAudioProcessingJni(JNIEnv* jni, const JavaRef& j_processing); + ~ExternalAudioProcessingJni(); + + protected: + virtual void Initialize(int sample_rate_hz, int num_channels) override; + virtual void Reset(int new_rate) override; + virtual void Process(int num_bans, int num_frames, int buffer_size, float* buffer) override; + + private: + const ScopedJavaGlobalRef j_processing_global_; + const ScopedJavaGlobalRef j_processing_; +}; + +class ExternalAudioProcessingFactory : public webrtc::RefCountInterface { + public: + ExternalAudioProcessingFactory(); + virtual ~ExternalAudioProcessingFactory() = default; + + ExternalAudioProcessor* capture_post_processor() { + return capture_post_processor_; + } + + ExternalAudioProcessor* render_pre_processor() { + return render_pre_processor_; + } + + rtc::scoped_refptr apm() { return apm_; } + + private: + rtc::scoped_refptr apm_; + ExternalAudioProcessor* capture_post_processor_; + ExternalAudioProcessor* render_pre_processor_; +}; + +} // namespace jni +} // namespace webrtc diff --git a/sdk/android/src/jni/pc/external_audio_processing_interface.h b/sdk/android/src/jni/pc/external_audio_processing_interface.h new file mode 100644 index 0000000000..1202be106b --- /dev/null +++ b/sdk/android/src/jni/pc/external_audio_processing_interface.h @@ -0,0 +1,33 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef API_ANDROID_JNI_EXTERNALAUDIOPROCESSORINTERFACE_H_ +#define API_ANDROID_JNI_EXTERNALAUDIOPROCESSORINTERFACE_H_ + +namespace webrtc { + +class ExternalAudioProcessingInterface { + public: + virtual void Initialize(int sample_rate_hz, int num_channels) = 0; + virtual void Reset(int new_rate) = 0; + virtual void Process(int num_bands, int num_frames, int buffer_size, float* buffer) = 0; + + protected: + virtual ~ExternalAudioProcessingInterface() = default; +}; + +} // namespace webrtc + +#endif // API_ANDROID_JNI_EXTERNALAUDIOPROCESSORINTERFACE_H_ diff --git a/sdk/android/src/jni/pc/external_audio_processor.cc b/sdk/android/src/jni/pc/external_audio_processor.cc new file mode 100644 index 0000000000..274982d6d4 --- /dev/null +++ b/sdk/android/src/jni/pc/external_audio_processor.cc @@ -0,0 +1,72 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "sdk/android/src/jni/pc/external_audio_processor.h" + +namespace webrtc { + +void ExternalAudioProcessor::SetExternalAudioProcessing( + ExternalAudioProcessingInterface* processor) { + webrtc::MutexLock lock(&mutex_); + external_processor_ = processor; + if (initialized_) { + external_processor_->Initialize(sample_rate_hz_, num_channels_); + } +} + +void ExternalAudioProcessor::SetBypassFlag(bool bypass) { + webrtc::MutexLock lock(&mutex_); + bypass_flag_ = bypass; +} + +void ExternalAudioProcessor::Initialize(int sample_rate_hz, int num_channels) { + webrtc::MutexLock lock(&mutex_); + sample_rate_hz_ = sample_rate_hz; + num_channels_ = num_channels; + if (external_processor_) { + external_processor_->Initialize(sample_rate_hz, num_channels); + } + initialized_ = true; +} + +void ExternalAudioProcessor::Process(webrtc::AudioBuffer* audio) { + webrtc::MutexLock lock(&mutex_); + if (!external_processor_ || bypass_flag_ || !initialized_) { + return; + } + + size_t num_frames = audio->num_frames(); + size_t num_bands =audio->num_bands(); + + // 1 buffer = 10ms of frames + int rate = num_frames * 100; + + if (rate != sample_rate_hz_) { + external_processor_->Reset(rate); + sample_rate_hz_ = rate; + } + + external_processor_->Process(num_bands, num_frames, kNsFrameSize * num_bands, audio->channels()[0]); +} + +std::string ExternalAudioProcessor::ToString() const { + return "ExternalAudioProcessor"; +} + +void ExternalAudioProcessor::SetRuntimeSetting( + webrtc::AudioProcessing::RuntimeSetting setting) {} + +} // namespace webrtc diff --git a/sdk/android/src/jni/pc/external_audio_processor.h b/sdk/android/src/jni/pc/external_audio_processor.h new file mode 100644 index 0000000000..1dc31809fc --- /dev/null +++ b/sdk/android/src/jni/pc/external_audio_processor.h @@ -0,0 +1,57 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef SDK_ANDROID_SRC_JNI_PC_EXTERNAL_AUDIO_PROCESSOR_H_ +#define SDK_ANDROID_SRC_JNI_PC_EXTERNAL_AUDIO_PROCESSOR_H_ + +#define WEBRTC_APM_DEBUG_DUMP 0 + +#include "modules/audio_processing/audio_buffer.h" +#include "modules/audio_processing/audio_processing_impl.h" +#include "modules/audio_processing/include/audio_processing.h" +#include "sdk/android/src/jni/pc/external_audio_processing_interface.h" + +namespace webrtc { + +class ExternalAudioProcessor : public webrtc::CustomProcessing { + public: + ExternalAudioProcessor() = default; + ~ExternalAudioProcessor() override = default; + + void SetExternalAudioProcessing( + ExternalAudioProcessingInterface* processor); + + void SetBypassFlag(bool bypass); + + private: + void Initialize(int sample_rate_hz, int num_channels) override; + void Process(webrtc::AudioBuffer* audio) override; + std::string ToString() const override; + void SetRuntimeSetting( + webrtc::AudioProcessing::RuntimeSetting setting) override; + + private: + mutable webrtc::Mutex mutex_; + ExternalAudioProcessingInterface* external_processor_; + bool bypass_flag_ = false; + bool initialized_ = false; + int sample_rate_hz_ = 0; + int num_channels_ = 0; +}; + +} // namespace webrtc + +#endif // SDK_ANDROID_SRC_JNI_PC_EXTERNAL_AUDIO_PROCESSOR_H_ diff --git a/sdk/android/src/jni/pc/frame_cryptor.cc b/sdk/android/src/jni/pc/frame_cryptor.cc new file mode 100644 index 0000000000..812814d84c --- /dev/null +++ b/sdk/android/src/jni/pc/frame_cryptor.cc @@ -0,0 +1,200 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#include "sdk/android/src/jni/pc/frame_cryptor.h" + +#include "api/rtp_receiver_interface.h" +#include "api/rtp_sender_interface.h" +#include "rtc_base/ref_counted_object.h" +#include "sdk/android/generated_peerconnection_jni/FrameCryptorFactory_jni.h" +#include "sdk/android/generated_peerconnection_jni/FrameCryptor_jni.h" +#include "sdk/android/native_api/jni/java_types.h" +#include "sdk/android/src/jni/jni_helpers.h" +#include "sdk/android/src/jni/pc/frame_cryptor_key_provider.h" +#include "sdk/android/src/jni/pc/owned_factory_and_threads.h" + +namespace webrtc { +namespace jni { + +FrameCryptorObserverJni::FrameCryptorObserverJni( + JNIEnv* jni, + const JavaRef& j_observer) + : j_observer_global_(jni, j_observer) {} + +FrameCryptorObserverJni::~FrameCryptorObserverJni() {} + +void FrameCryptorObserverJni::OnFrameCryptionStateChanged( + const std::string participant_id, + FrameCryptionState new_state) { + JNIEnv* env = AttachCurrentThreadIfNeeded(); + Java_Observer_onFrameCryptionStateChanged( + env, j_observer_global_, NativeToJavaString(env, participant_id), + Java_FrameCryptionState_fromNativeIndex(env, new_state)); +} + +ScopedJavaLocalRef NativeToJavaFrameCryptor( + JNIEnv* env, + rtc::scoped_refptr cryptor) { + if (!cryptor) + return nullptr; + // Sender is now owned by the Java object, and will be freed from + // FrameCryptor.dispose(). + return Java_FrameCryptor_Constructor(env, + jlongFromPointer(cryptor.release())); +} + +static void JNI_FrameCryptor_SetEnabled(JNIEnv* jni, + jlong j_frame_cryptor_pointer, + jboolean j_enabled) { + reinterpret_cast(j_frame_cryptor_pointer) + ->SetEnabled(j_enabled); +} + +static jboolean JNI_FrameCryptor_IsEnabled(JNIEnv* jni, + jlong j_frame_cryptor_pointer) { + return reinterpret_cast(j_frame_cryptor_pointer) + ->enabled(); +} + +static void JNI_FrameCryptor_SetKeyIndex(JNIEnv* jni, + jlong j_frame_cryptor_pointer, + jint j_index) { + reinterpret_cast(j_frame_cryptor_pointer) + ->SetKeyIndex(j_index); +} + +static jint JNI_FrameCryptor_GetKeyIndex(JNIEnv* jni, + jlong j_frame_cryptor_pointer) { + return reinterpret_cast(j_frame_cryptor_pointer) + ->key_index(); +} + +static jlong JNI_FrameCryptor_SetObserver( + JNIEnv* jni, + jlong j_frame_cryptor_pointer, + const JavaParamRef& j_observer) { + auto observer = + rtc::make_ref_counted(jni, j_observer); + observer->AddRef(); + reinterpret_cast(j_frame_cryptor_pointer) + ->RegisterFrameCryptorTransformerObserver(observer); + return jlongFromPointer(observer.get()); +} + +static void JNI_FrameCryptor_UnSetObserver(JNIEnv* jni, + jlong j_frame_cryptor_pointer) { + reinterpret_cast(j_frame_cryptor_pointer) + ->UnRegisterFrameCryptorTransformerObserver(); +} + +webrtc::FrameCryptorTransformer::Algorithm AlgorithmFromIndex(int index) { + switch (index) { + case 0: + return webrtc::FrameCryptorTransformer::Algorithm::kAesGcm; + default: + return webrtc::FrameCryptorTransformer::Algorithm::kAesGcm; + } +} + +static ScopedJavaLocalRef +JNI_FrameCryptorFactory_CreateFrameCryptorForRtpReceiver( + JNIEnv* env, + jlong native_factory, + jlong j_rtp_receiver_pointer, + const JavaParamRef& participantId, + jint j_algorithm_index, + jlong j_key_provider) { + OwnedFactoryAndThreads* factory = + reinterpret_cast(native_factory); + auto keyProvider = + reinterpret_cast(j_key_provider); + auto participant_id = JavaToStdString(env, participantId); + auto rtpReceiver = + reinterpret_cast(j_rtp_receiver_pointer); + auto mediaType = + rtpReceiver->track()->kind() == "audio" + ? webrtc::FrameCryptorTransformer::MediaType::kAudioFrame + : webrtc::FrameCryptorTransformer::MediaType::kVideoFrame; + auto frame_crypto_transformer = + rtc::scoped_refptr( + new webrtc::FrameCryptorTransformer(factory->signaling_thread(), + participant_id, mediaType, AlgorithmFromIndex(j_algorithm_index), + rtc::scoped_refptr(keyProvider))); + + rtpReceiver->SetDepacketizerToDecoderFrameTransformer( + frame_crypto_transformer); + frame_crypto_transformer->SetEnabled(false); + + return NativeToJavaFrameCryptor(env, frame_crypto_transformer); +} + +static ScopedJavaLocalRef +JNI_FrameCryptorFactory_CreateFrameCryptorForRtpSender( + JNIEnv* env, + jlong native_factory, + jlong j_rtp_sender_pointer, + const JavaParamRef& participantId, + jint j_algorithm_index, + jlong j_key_provider) { + OwnedFactoryAndThreads* factory = + reinterpret_cast(native_factory); + auto keyProvider = + reinterpret_cast(j_key_provider); + auto rtpSender = reinterpret_cast(j_rtp_sender_pointer); + auto participant_id = JavaToStdString(env, participantId); + auto mediaType = + rtpSender->track()->kind() == "audio" + ? webrtc::FrameCryptorTransformer::MediaType::kAudioFrame + : webrtc::FrameCryptorTransformer::MediaType::kVideoFrame; + auto frame_crypto_transformer = + rtc::scoped_refptr( + new webrtc::FrameCryptorTransformer(factory->signaling_thread(), + participant_id, mediaType, AlgorithmFromIndex(j_algorithm_index), + rtc::scoped_refptr(keyProvider))); + + rtpSender->SetEncoderToPacketizerFrameTransformer(frame_crypto_transformer); + frame_crypto_transformer->SetEnabled(false); + + return NativeToJavaFrameCryptor(env, frame_crypto_transformer); +} + +static ScopedJavaLocalRef +JNI_FrameCryptorFactory_CreateFrameCryptorKeyProvider( + JNIEnv* env, + jboolean j_shared, + const JavaParamRef& j_ratchetSalt, + jint j_ratchetWindowSize, + const JavaParamRef& j_uncryptedMagicBytes, + jint j_failureTolerance, + jint j_keyRingSize, + jboolean j_discardFrameWhenCryptorNotReady) { + auto ratchetSalt = JavaToNativeByteArray(env, j_ratchetSalt); + KeyProviderOptions options; + options.ratchet_salt = + std::vector(ratchetSalt.begin(), ratchetSalt.end()); + options.ratchet_window_size = j_ratchetWindowSize; + auto uncryptedMagicBytes = JavaToNativeByteArray(env, j_uncryptedMagicBytes); + options.uncrypted_magic_bytes = + std::vector(uncryptedMagicBytes.begin(), uncryptedMagicBytes.end()); + options.shared_key = j_shared; + options.failure_tolerance = j_failureTolerance; + options.key_ring_size = j_keyRingSize; + options.discard_frame_when_cryptor_not_ready = j_discardFrameWhenCryptorNotReady; + return NativeToJavaFrameCryptorKeyProvider( + env, rtc::make_ref_counted(options)); +} + +} // namespace jni +} // namespace webrtc diff --git a/sdk/android/src/jni/pc/frame_cryptor.h b/sdk/android/src/jni/pc/frame_cryptor.h new file mode 100644 index 0000000000..dd0788d212 --- /dev/null +++ b/sdk/android/src/jni/pc/frame_cryptor.h @@ -0,0 +1,49 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef SDK_ANDROID_SRC_JNI_PC_FRAME_CRYPTOR_H_ +#define SDK_ANDROID_SRC_JNI_PC_FRAME_CRYPTOR_H_ + +#include + +#include "api/crypto/frame_crypto_transformer.h" +#include "sdk/android/native_api/jni/scoped_java_ref.h" + +namespace webrtc { +namespace jni { + +ScopedJavaLocalRef NativeToJavaFrameCryptor( + JNIEnv* env, + rtc::scoped_refptr cryptor); + +class FrameCryptorObserverJni : public FrameCryptorTransformerObserver { + public: + FrameCryptorObserverJni(JNIEnv* jni, const JavaRef& j_observer); + ~FrameCryptorObserverJni() override; + + protected: + void OnFrameCryptionStateChanged(const std::string participant_id, + FrameCryptionState state) override; + + private: + const ScopedJavaGlobalRef j_observer_global_; + const ScopedJavaGlobalRef j_observer_; +}; + +} // namespace jni +} // namespace webrtc + +#endif // SDK_ANDROID_SRC_JNI_PC_FRAME_CRYPTOR_H_ diff --git a/sdk/android/src/jni/pc/frame_cryptor_key_provider.cc b/sdk/android/src/jni/pc/frame_cryptor_key_provider.cc new file mode 100644 index 0000000000..3456ab24b3 --- /dev/null +++ b/sdk/android/src/jni/pc/frame_cryptor_key_provider.cc @@ -0,0 +1,123 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#include "sdk/android/src/jni/pc/frame_cryptor_key_provider.h" + +#include "sdk/android/generated_peerconnection_jni/FrameCryptorKeyProvider_jni.h" +#include "sdk/android/native_api/jni/java_types.h" +#include "sdk/android/src/jni/jni_helpers.h" + +namespace webrtc { +namespace jni { + +ScopedJavaLocalRef NativeToJavaFrameCryptorKeyProvider( + JNIEnv* env, + rtc::scoped_refptr key_provider) { + if (!key_provider) + return nullptr; + // Sender is now owned by the Java object, and will be freed from + // FrameCryptorKeyProvider.dispose(). + return Java_FrameCryptorKeyProvider_Constructor( + env, jlongFromPointer(key_provider.release())); +} + +static jboolean JNI_FrameCryptorKeyProvider_SetSharedKey( + JNIEnv* jni, + jlong j_key_provider, + jint j_index, + const JavaParamRef& j_key) { + auto key = JavaToNativeByteArray(jni, j_key); + return reinterpret_cast(j_key_provider) + ->SetSharedKey(j_index,std::vector(key.begin(), key.end())); +} + +static ScopedJavaLocalRef +JNI_FrameCryptorKeyProvider_RatchetSharedKey( + JNIEnv* env, + jlong keyProviderPointer, + jint j_index) { + auto key_provider = + reinterpret_cast(keyProviderPointer); + auto newKey = key_provider->RatchetSharedKey(j_index); + std::vector int8tKey = + std::vector(newKey.begin(), newKey.end()); + return NativeToJavaByteArray(env, rtc::ArrayView(int8tKey)); +} + +static ScopedJavaLocalRef +JNI_FrameCryptorKeyProvider_ExportSharedKey( + JNIEnv* env, + jlong keyProviderPointer, + jint j_index) { + auto key_provider = + reinterpret_cast(keyProviderPointer); + auto key = key_provider->ExportSharedKey(j_index); + std::vector int8tKey = std::vector(key.begin(), key.end()); + return NativeToJavaByteArray(env, rtc::ArrayView(int8tKey)); +} + +static jboolean JNI_FrameCryptorKeyProvider_SetKey( + JNIEnv* jni, + jlong j_key_provider, + const JavaParamRef& participantId, + jint j_index, + const JavaParamRef& j_key) { + auto key = JavaToNativeByteArray(jni, j_key); + auto participant_id = JavaToStdString(jni, participantId); + return reinterpret_cast(j_key_provider) + ->SetKey(participant_id, j_index, + std::vector(key.begin(), key.end())); +} + +static ScopedJavaLocalRef +JNI_FrameCryptorKeyProvider_RatchetKey( + JNIEnv* env, + jlong keyProviderPointer, + const JavaParamRef& participantId, + jint j_index) { + auto participant_id = JavaToStdString(env, participantId); + auto key_provider = + reinterpret_cast(keyProviderPointer); + auto newKey = key_provider->RatchetKey(participant_id, j_index); + std::vector int8tKey = + std::vector(newKey.begin(), newKey.end()); + return NativeToJavaByteArray(env, rtc::ArrayView(int8tKey)); +} + +static ScopedJavaLocalRef +JNI_FrameCryptorKeyProvider_ExportKey( + JNIEnv* env, + jlong keyProviderPointer, + const JavaParamRef& participantId, + jint j_index) { + auto participant_id = JavaToStdString(env, participantId); + auto key_provider = + reinterpret_cast(keyProviderPointer); + auto key = key_provider->ExportKey(participant_id, j_index); + std::vector int8tKey = std::vector(key.begin(), key.end()); + return NativeToJavaByteArray(env, rtc::ArrayView(int8tKey)); +} + +static void JNI_FrameCryptorKeyProvider_SetSifTrailer( + JNIEnv* jni, + jlong j_key_provider, + const JavaParamRef& j_trailer) { + auto trailer = JavaToNativeByteArray(jni, j_trailer); + reinterpret_cast(j_key_provider) + ->SetSifTrailer(std::vector(trailer.begin(), trailer.end())); +} + +} // namespace jni +} // namespace webrtc diff --git a/sdk/android/src/jni/pc/frame_cryptor_key_provider.h b/sdk/android/src/jni/pc/frame_cryptor_key_provider.h new file mode 100644 index 0000000000..8832a83035 --- /dev/null +++ b/sdk/android/src/jni/pc/frame_cryptor_key_provider.h @@ -0,0 +1,35 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef SDK_ANDROID_SRC_JNI_PC_FRAME_CRYPTOR_KEY_PROVIDER_H_ +#define SDK_ANDROID_SRC_JNI_PC_FRAME_CRYPTOR_KEY_PROVIDER_H_ + +#include + +#include "api/crypto/frame_crypto_transformer.h" +#include "sdk/android/native_api/jni/scoped_java_ref.h" + +namespace webrtc { +namespace jni { + +ScopedJavaLocalRef NativeToJavaFrameCryptorKeyProvider( + JNIEnv* env, + rtc::scoped_refptr cryptor); + +} // namespace jni +} // namespace webrtc + +#endif // SDK_ANDROID_SRC_JNI_PC_FRAME_CRYPTOR_KEY_PROVIDER_H_ diff --git a/sdk/android/src/jni/pc/peer_connection.cc b/sdk/android/src/jni/pc/peer_connection.cc index 51dd8d7e1e..2953bd2b7c 100644 --- a/sdk/android/src/jni/pc/peer_connection.cc +++ b/sdk/android/src/jni/pc/peer_connection.cc @@ -277,6 +277,9 @@ void JavaToNativeRTCConfiguration( rtc_config->enable_implicit_rollback = Java_RTCConfiguration_getEnableImplicitRollback(jni, j_rtc_config); + rtc_config->enable_any_address_ports = + Java_RTCConfiguration_getEnableIceGatheringOnAnyAddressPorts(jni, j_rtc_config); + jni_zero::ScopedJavaLocalRef j_turn_logging_id = Java_RTCConfiguration_getTurnLoggingId(jni, j_rtc_config); if (!IsNull(jni, j_turn_logging_id)) { diff --git a/sdk/android/src/jni/pc/rtp_parameters.cc b/sdk/android/src/jni/pc/rtp_parameters.cc index 4bd9ee0e1d..6feb6a631b 100644 --- a/sdk/android/src/jni/pc/rtp_parameters.cc +++ b/sdk/android/src/jni/pc/rtp_parameters.cc @@ -53,6 +53,7 @@ ScopedJavaLocalRef NativeToJavaRtpEncodingParameter( NativeToJavaInteger(env, encoding.max_framerate), NativeToJavaInteger(env, encoding.num_temporal_layers), NativeToJavaDouble(env, encoding.scale_resolution_down_by), + NativeToJavaString(env, encoding.scalability_mode), encoding.ssrc ? NativeToJavaLong(env, *encoding.ssrc) : nullptr, encoding.adaptive_ptime); } @@ -116,6 +117,11 @@ RtpEncodingParameters JavaToNativeRtpEncodingParameters( Java_Encoding_getScaleResolutionDownBy(jni, j_encoding_parameters); encoding.scale_resolution_down_by = JavaToNativeOptionalDouble(jni, j_scale_resolution_down_by); + ScopedJavaLocalRef j_scalability_mode = + Java_Encoding_getScalabilityMode(jni, j_encoding_parameters); + if (!IsNull(jni, j_scalability_mode)) { + encoding.scalability_mode = JavaToNativeString(jni,j_scalability_mode); + } encoding.adaptive_ptime = Java_Encoding_getAdaptivePTime(jni, j_encoding_parameters); ScopedJavaLocalRef j_ssrc = diff --git a/sdk/android/src/jni/simulcast_video_encoder.cc b/sdk/android/src/jni/simulcast_video_encoder.cc new file mode 100644 index 0000000000..afb64271d5 --- /dev/null +++ b/sdk/android/src/jni/simulcast_video_encoder.cc @@ -0,0 +1,35 @@ +#include + +#include "sdk/android/src/jni/jni_helpers.h" +#include "sdk/android/src/jni/video_encoder_factory_wrapper.h" +#include "sdk/android/src/jni/video_codec_info.h" +#include "sdk/android/native_api/codecs/wrapper.h" +#include "media/engine/simulcast_encoder_adapter.h" +#include "rtc_base/logging.h" + +using namespace webrtc; +using namespace webrtc::jni; + +#ifdef __cplusplus +extern "C" { +#endif + +// (VideoEncoderFactory primary, VideoEncoderFactory fallback, VideoCodecInfo info) +JNIEXPORT jlong JNICALL Java_org_webrtc_SimulcastVideoEncoder_nativeCreateEncoder(JNIEnv *env, jclass klass, jlong webrtcEnvRef, jobject primary, jobject fallback, jobject info) { + RTC_LOG(LS_INFO) << "Create simulcast video encoder"; + JavaParamRef info_ref(env, info); + SdpVideoFormat format = VideoCodecInfoToSdpVideoFormat(env, info_ref); + + // TODO: 影響は軽微だが、リークする可能性があるので将来的に修正したい + // https://github.com/shiguredo-webrtc-build/webrtc-build/pull/16#pullrequestreview-600675795 + return NativeToJavaPointer(std::make_unique( + *reinterpret_cast(webrtcEnvRef), + JavaToNativeVideoEncoderFactory(env, primary).release(), + JavaToNativeVideoEncoderFactory(env, fallback).release(), + format).release()); +} + + +#ifdef __cplusplus +} +#endif diff --git a/sdk/android/src/jni/simulcast_video_encoder.h b/sdk/android/src/jni/simulcast_video_encoder.h new file mode 100644 index 0000000000..d80ccde014 --- /dev/null +++ b/sdk/android/src/jni/simulcast_video_encoder.h @@ -0,0 +1,22 @@ +/* DO NOT EDIT THIS FILE - it is machine generated */ +#include +/* Header for class org_webrtc_SimulcastVideoEncoder */ + +#ifndef _Included_org_webrtc_SimulcastVideoEncoder +#define _Included_org_webrtc_SimulcastVideoEncoder +#ifdef __cplusplus +extern "C" { +#endif +/* + * Class: org_webrtc_SimulcastVideoEncoder + * Method: nativeCreateEncoder + * Signature: (Lorg/webrtc/VideoEncoderFactory;Lorg/webrtc/VideoEncoderFactory;Lorg/webrtc/VideoCodecInfo;)J + */ + +JNIEXPORT jlong JNICALL Java_org_webrtc_SimulcastVideoEncoder_nativeCreateEncoder + (JNIEnv *, jclass, jlong, jobject, jobject, jobject); + +#ifdef __cplusplus +} +#endif +#endif diff --git a/sdk/android/src/jni/video_codec_info.cc b/sdk/android/src/jni/video_codec_info.cc index a218a1d23f..9c032894ca 100644 --- a/sdk/android/src/jni/video_codec_info.cc +++ b/sdk/android/src/jni/video_codec_info.cc @@ -13,15 +13,28 @@ #include "sdk/android/generated_video_jni/VideoCodecInfo_jni.h" #include "sdk/android/native_api/jni/java_types.h" #include "sdk/android/src/jni/jni_helpers.h" +#include "api/video_codecs/scalability_mode.h" +#include "modules/video_coding/svc/scalability_mode_util.h" namespace webrtc { namespace jni { SdpVideoFormat VideoCodecInfoToSdpVideoFormat(JNIEnv* jni, const JavaRef& j_info) { + std::vector params = + JavaToStdVectorStrings(jni, Java_VideoCodecInfo_getScalabilityModes(jni, j_info)); + absl::InlinedVector + scalability_modes; + for (auto mode : params) { + auto scalability_mode = ScalabilityModeFromString(mode); + if (scalability_mode) { + scalability_modes.push_back(*scalability_mode); + } + } return SdpVideoFormat( JavaToNativeString(jni, Java_VideoCodecInfo_getName(jni, j_info)), - JavaToNativeStringMap(jni, Java_VideoCodecInfo_getParams(jni, j_info))); + JavaToNativeStringMap(jni, Java_VideoCodecInfo_getParams(jni, j_info)), + scalability_modes); } ScopedJavaLocalRef SdpVideoFormatToVideoCodecInfo( @@ -29,8 +42,17 @@ ScopedJavaLocalRef SdpVideoFormatToVideoCodecInfo( const SdpVideoFormat& format) { ScopedJavaLocalRef j_params = NativeToJavaStringMap(jni, format.parameters); + webrtc::ScopedJavaLocalRef j_scalability_modes; + if (!format.scalability_modes.empty()) { + JavaListBuilder builder(jni); + for (auto mode : format.scalability_modes) { + std::string scalability_mode(ScalabilityModeToString(mode)); + builder.add(NativeToJavaString(jni, scalability_mode)); + } + j_scalability_modes = builder.java_list(); + } return Java_VideoCodecInfo_Constructor( - jni, NativeToJavaString(jni, format.name), j_params); + jni, NativeToJavaString(jni, format.name), j_params, j_scalability_modes); } } // namespace jni diff --git a/sdk/android/src/jni/video_track.cc b/sdk/android/src/jni/video_track.cc index 0e11b27340..9c64910637 100644 --- a/sdk/android/src/jni/video_track.cc +++ b/sdk/android/src/jni/video_track.cc @@ -46,5 +46,16 @@ static void JNI_VideoTrack_FreeSink(JNIEnv* jni, jlong j_native_sink) { delete reinterpret_cast*>(j_native_sink); } +static void JNI_VideoTrack_SetShouldReceive(JNIEnv* jni, + jlong j_native_track, + jboolean should_receive) { + reinterpret_cast(j_native_track)->set_should_receive(should_receive); +} + +static jboolean JNI_VideoTrack_GetShouldReceive(JNIEnv* jni, + jlong j_native_track) { + return reinterpret_cast(j_native_track)->should_receive(); +} + } // namespace jni } // namespace webrtc diff --git a/sdk/android/src/jni/vp9_codec.cc b/sdk/android/src/jni/vp9_codec.cc index dcce5bb5e8..9131b8306d 100644 --- a/sdk/android/src/jni/vp9_codec.cc +++ b/sdk/android/src/jni/vp9_codec.cc @@ -11,11 +11,18 @@ #include #include "api/environment/environment.h" +#include "absl/container/inlined_vector.h" +#include "api/video_codecs/sdp_video_format.h" #include "modules/video_coding/codecs/vp9/include/vp9.h" +#include "modules/video_coding/svc/create_scalability_structure.h" + #include "sdk/android/generated_libvpx_vp9_jni/LibvpxVp9Decoder_jni.h" #include "sdk/android/generated_libvpx_vp9_jni/LibvpxVp9Encoder_jni.h" #include "sdk/android/src/jni/jni_helpers.h" +#include +#include + namespace webrtc { namespace jni { @@ -37,5 +44,14 @@ static jboolean JNI_LibvpxVp9Decoder_IsSupported(JNIEnv* jni) { return !SupportedVP9Codecs().empty(); } +static webrtc::ScopedJavaLocalRef JNI_LibvpxVp9Encoder_GetSupportedScalabilityModes(JNIEnv* jni) { + std::vector modes; + for (const auto scalability_mode : webrtc::kAllScalabilityModes) { + if (webrtc::ScalabilityStructureConfig(scalability_mode).has_value()) { + modes.push_back(std::string(webrtc::ScalabilityModeToString(scalability_mode))); + } + } + return NativeToJavaStringArray(jni, modes); +} } // namespace jni } // namespace webrtc diff --git a/sdk/objc/PrivacyInfo.xcprivacy b/sdk/objc/PrivacyInfo.xcprivacy new file mode 100644 index 0000000000..7204a67c33 --- /dev/null +++ b/sdk/objc/PrivacyInfo.xcprivacy @@ -0,0 +1,28 @@ + + + + + NSPrivacyCollectedDataTypes + + NSPrivacyAccessedAPITypes + + + NSPrivacyAccessedAPIType + NSPrivacyAccessedAPICategoryFileTimestamp + NSPrivacyAccessedAPITypeReasons + + C617.1 + + + + NSPrivacyAccessedAPIType + NSPrivacyAccessedAPICategorySystemBootTime + NSPrivacyAccessedAPITypeReasons + + 35F9.1 + 8FFB.1 + + + + + \ No newline at end of file diff --git a/sdk/objc/api/RTCAudioRendererAdapter+Private.h b/sdk/objc/api/RTCAudioRendererAdapter+Private.h new file mode 100644 index 0000000000..8a914138de --- /dev/null +++ b/sdk/objc/api/RTCAudioRendererAdapter+Private.h @@ -0,0 +1,36 @@ +/* + * Copyright 2024 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import "RTCAudioRendererAdapter.h" + +#import "base/RTCAudioRenderer.h" + +#include "api/media_stream_interface.h" + +NS_ASSUME_NONNULL_BEGIN + +@interface RTC_OBJC_TYPE(RTCAudioRendererAdapter) () + +@property(nonatomic, readonly) id audioRenderer; + +@property(nonatomic, readonly) webrtc::AudioTrackSinkInterface *nativeAudioRenderer; + +- (instancetype)initWithNativeRenderer:(id)audioRenderer + NS_DESIGNATED_INITIALIZER; + +@end + +NS_ASSUME_NONNULL_END diff --git a/sdk/objc/api/RTCAudioRendererAdapter.h b/sdk/objc/api/RTCAudioRendererAdapter.h new file mode 100644 index 0000000000..5753257182 --- /dev/null +++ b/sdk/objc/api/RTCAudioRendererAdapter.h @@ -0,0 +1,29 @@ +/* + * Copyright 2024 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import + +#import "RTCMacros.h" + +NS_ASSUME_NONNULL_BEGIN + +@interface RTC_OBJC_TYPE (RTCAudioRendererAdapter): NSObject + +- (instancetype)init NS_UNAVAILABLE; + +@end + +NS_ASSUME_NONNULL_END diff --git a/sdk/objc/api/RTCAudioRendererAdapter.mm b/sdk/objc/api/RTCAudioRendererAdapter.mm new file mode 100644 index 0000000000..8f2a92f553 --- /dev/null +++ b/sdk/objc/api/RTCAudioRendererAdapter.mm @@ -0,0 +1,117 @@ +/* + * Copyright 2024 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import +#import "RTCAudioRendererAdapter+Private.h" + +#include + +namespace webrtc { + +class AudioRendererAdapter : public webrtc::AudioTrackSinkInterface { + public: + AudioRendererAdapter(RTC_OBJC_TYPE(RTCAudioRendererAdapter) * adapter) { adapter_ = adapter; } + + private: + __weak RTC_OBJC_TYPE(RTCAudioRendererAdapter) * adapter_; + + void OnData(const void *audio_data, int bits_per_sample, int sample_rate, + size_t number_of_channels, size_t number_of_frames, + std::optional absolute_capture_timestamp_ms) override { + OSStatus status; + AudioChannelLayout acl = {}; + acl.mChannelLayoutTag = + (number_of_channels == 2) ? kAudioChannelLayoutTag_Stereo : kAudioChannelLayoutTag_Mono; + + AudioStreamBasicDescription sd = { + .mSampleRate = static_cast(sample_rate), + .mFormatID = kAudioFormatLinearPCM, + .mFormatFlags = kLinearPCMFormatFlagIsSignedInteger | kLinearPCMFormatFlagIsPacked, + .mBytesPerPacket = static_cast(number_of_channels * 2), + .mFramesPerPacket = 1, + .mBytesPerFrame = static_cast(number_of_channels * 2), + .mChannelsPerFrame = static_cast(number_of_channels), + .mBitsPerChannel = 16, + .mReserved = 0}; + + CMFormatDescriptionRef formatDescription = nullptr; + status = CMAudioFormatDescriptionCreate(kCFAllocatorDefault, &sd, sizeof(acl), &acl, 0, NULL, + NULL, &formatDescription); + if (status != noErr) { + NSLog(@"RTCAudioTrack: Failed to create audio formatDescription description. Error: %d", + (int)status); + return; + } + + AVAudioFormat *format = + [[AVAudioFormat alloc] initWithCMAudioFormatDescription:formatDescription]; + CFRelease(formatDescription); + + AVAudioFrameCount frameCount = static_cast(number_of_frames); + AVAudioPCMBuffer *pcmBuffer = [[AVAudioPCMBuffer alloc] initWithPCMFormat:format + frameCapacity:frameCount]; + if (!pcmBuffer) { + NSLog(@"Failed to create AVAudioPCMBuffer"); + return; + } + + pcmBuffer.frameLength = frameCount; + + // Handle both mono and stereo + const int16_t *inputData = static_cast(audio_data); + if (number_of_channels == 1) { + // Mono: straight copy + memcpy(pcmBuffer.int16ChannelData[0], inputData, number_of_frames * sizeof(int16_t)); + } else if (number_of_channels == 2) { + // Stereo: manual deinterleave + int16_t *leftChannel = pcmBuffer.int16ChannelData[0]; + int16_t *rightChannel = pcmBuffer.int16ChannelData[1]; + + for (size_t i = 0; i < number_of_frames; i++) { + leftChannel[i] = inputData[i * 2]; + rightChannel[i] = inputData[i * 2 + 1]; + } + } else { + NSLog(@"Unsupported number of channels: %zu", number_of_channels); + return; + } + + [adapter_.audioRenderer renderPCMBuffer:pcmBuffer]; + } +}; +} // namespace webrtc + +@implementation RTC_OBJC_TYPE (RTCAudioRendererAdapter) { + std::unique_ptr _adapter; +} + +@synthesize audioRenderer = _audioRenderer; + +- (instancetype)initWithNativeRenderer:(id)audioRenderer { + NSParameterAssert(audioRenderer); + self = [super init]; + if (self) { + _audioRenderer = audioRenderer; + _adapter.reset(new webrtc::AudioRendererAdapter(self)); + } + return self; +} + +- (webrtc::AudioTrackSinkInterface *)nativeAudioRenderer { + return _adapter.get(); +} + +@end diff --git a/sdk/objc/api/RTCVideoRendererAdapter+Private.h b/sdk/objc/api/RTCVideoRendererAdapter+Private.h index 52553b62c7..2567bbf295 100644 --- a/sdk/objc/api/RTCVideoRendererAdapter+Private.h +++ b/sdk/objc/api/RTCVideoRendererAdapter+Private.h @@ -16,7 +16,7 @@ NS_ASSUME_NONNULL_BEGIN -@interface RTCVideoRendererAdapter () +@interface RTC_OBJC_TYPE(RTCVideoRendererAdapter) () /** * The Objective-C video renderer passed to this adapter during construction. diff --git a/sdk/objc/api/RTCVideoRendererAdapter.h b/sdk/objc/api/RTCVideoRendererAdapter.h index 2aafb68dc6..cd2ded94e8 100644 --- a/sdk/objc/api/RTCVideoRendererAdapter.h +++ b/sdk/objc/api/RTCVideoRendererAdapter.h @@ -10,6 +10,8 @@ #import +#import "RTCMacros.h" + NS_ASSUME_NONNULL_BEGIN /* @@ -18,7 +20,7 @@ NS_ASSUME_NONNULL_BEGIN * adapter adapts calls made to that interface to the RTCVideoRenderer supplied * during construction. */ -@interface RTCVideoRendererAdapter : NSObject +@interface RTC_OBJC_TYPE (RTCVideoRendererAdapter): NSObject - (instancetype)init NS_UNAVAILABLE; diff --git a/sdk/objc/api/RTCVideoRendererAdapter.mm b/sdk/objc/api/RTCVideoRendererAdapter.mm index 269132c6a5..67a2649f8b 100644 --- a/sdk/objc/api/RTCVideoRendererAdapter.mm +++ b/sdk/objc/api/RTCVideoRendererAdapter.mm @@ -19,7 +19,7 @@ class VideoRendererAdapter : public VideoSinkInterface { public: - VideoRendererAdapter(RTCVideoRendererAdapter* adapter) { + VideoRendererAdapter(RTC_OBJC_TYPE(RTCVideoRendererAdapter) * adapter) { adapter_ = adapter; size_ = CGSizeZero; } @@ -40,12 +40,12 @@ void OnFrame(const webrtc::VideoFrame& nativeVideoFrame) override { } private: - __weak RTCVideoRendererAdapter* adapter_; + __weak RTC_OBJC_TYPE(RTCVideoRendererAdapter) * adapter_; CGSize size_; }; } // namespace webrtc -@implementation RTCVideoRendererAdapter { +@implementation RTC_OBJC_TYPE (RTCVideoRendererAdapter) { std::unique_ptr _adapter; } diff --git a/sdk/objc/api/logging/RTCCallbackLogger.h b/sdk/objc/api/logging/RTCCallbackLogger.h index 08bfbf0f91..ef8dad750a 100644 --- a/sdk/objc/api/logging/RTCCallbackLogger.h +++ b/sdk/objc/api/logging/RTCCallbackLogger.h @@ -17,7 +17,7 @@ NS_ASSUME_NONNULL_BEGIN typedef void (^RTCCallbackLoggerMessageHandler)(NSString *message); typedef void (^RTCCallbackLoggerMessageAndSeverityHandler)( - NSString *message, RTCLoggingSeverity severity); + NSString *message, RTC_OBJC_TYPE(RTCLoggingSeverity) severity); // This class intercepts WebRTC logs and forwards them to a registered block. // This class is not threadsafe. @@ -25,7 +25,7 @@ RTC_OBJC_EXPORT @interface RTC_OBJC_TYPE (RTCCallbackLogger) : NSObject // The severity level to capture. The default is kRTCLoggingSeverityInfo. -@property(nonatomic, assign) RTCLoggingSeverity severity; +@property(nonatomic, assign) RTC_OBJC_TYPE(RTCLoggingSeverity) severity; // The callback handler will be called on the same thread that does the // logging, so if the logging callback can be slow it may be a good idea diff --git a/sdk/objc/api/logging/RTCCallbackLogger.mm b/sdk/objc/api/logging/RTCCallbackLogger.mm index b479798bdf..32554e8986 100644 --- a/sdk/objc/api/logging/RTCCallbackLogger.mm +++ b/sdk/objc/api/logging/RTCCallbackLogger.mm @@ -58,27 +58,24 @@ void OnLogMessage(const std::string& message, void OnLogMessage(absl::string_view message, webrtc::LoggingSeverity severity) override { if (callback_handler_) { - RTCLoggingSeverity loggingSeverity = - NativeSeverityToObjcSeverity(severity); - callback_handler_([NSString stringForAbslStringView:message], - loggingSeverity); + RTC_OBJC_TYPE(RTCLoggingSeverity) loggingSeverity = NativeSeverityToObjcSeverity(severity); + callback_handler_([NSString stringForAbslStringView:message], loggingSeverity); } } private: - static RTCLoggingSeverity NativeSeverityToObjcSeverity( - webrtc::LoggingSeverity severity) { + static RTC_OBJC_TYPE(RTCLoggingSeverity) NativeSeverityToObjcSeverity(webrtc::LoggingSeverity severity) { switch (severity) { case webrtc::LS_VERBOSE: - return RTCLoggingSeverityVerbose; + return RTC_OBJC_TYPE(RTCLoggingSeverityVerbose); case webrtc::LS_INFO: - return RTCLoggingSeverityInfo; + return RTC_OBJC_TYPE(RTCLoggingSeverityInfo); case webrtc::LS_WARNING: - return RTCLoggingSeverityWarning; + return RTC_OBJC_TYPE(RTCLoggingSeverityWarning); case webrtc::LS_ERROR: - return RTCLoggingSeverityError; + return RTC_OBJC_TYPE(RTCLoggingSeverityError); case webrtc::LS_NONE: - return RTCLoggingSeverityNone; + return RTC_OBJC_TYPE(RTCLoggingSeverityNone); } } @@ -97,7 +94,7 @@ @implementation RTC_OBJC_TYPE (RTCCallbackLogger) { - (instancetype)init { self = [super init]; if (self != nil) { - _severity = RTCLoggingSeverityInfo; + _severity = RTC_OBJC_TYPE(RTCLoggingSeverityInfo); } return self; } @@ -143,15 +140,15 @@ - (void)stop { - (webrtc::LoggingSeverity)rtcSeverity { switch (_severity) { - case RTCLoggingSeverityVerbose: + case RTC_OBJC_TYPE(RTCLoggingSeverityVerbose): return webrtc::LS_VERBOSE; - case RTCLoggingSeverityInfo: + case RTC_OBJC_TYPE(RTCLoggingSeverityInfo): return webrtc::LS_INFO; - case RTCLoggingSeverityWarning: + case RTC_OBJC_TYPE(RTCLoggingSeverityWarning): return webrtc::LS_WARNING; - case RTCLoggingSeverityError: + case RTC_OBJC_TYPE(RTCLoggingSeverityError): return webrtc::LS_ERROR; - case RTCLoggingSeverityNone: + case RTC_OBJC_TYPE(RTCLoggingSeverityNone): return webrtc::LS_NONE; } } diff --git a/sdk/objc/api/peerconnection/RTCAudioDeviceModule+Private.h b/sdk/objc/api/peerconnection/RTCAudioDeviceModule+Private.h new file mode 100644 index 0000000000..73c1a4e26a --- /dev/null +++ b/sdk/objc/api/peerconnection/RTCAudioDeviceModule+Private.h @@ -0,0 +1,31 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import "RTCAudioDeviceModule.h" +#import "sdk/objc/native/api/audio_device_module.h" + +#include "rtc_base/thread.h" + +NS_ASSUME_NONNULL_BEGIN + +@interface RTC_OBJC_TYPE(RTCAudioDeviceModule) () + +- (instancetype)initWithNativeModule:(rtc::scoped_refptr )module + workerThread:(rtc::Thread *)workerThread; + +@end + +NS_ASSUME_NONNULL_END diff --git a/sdk/objc/api/peerconnection/RTCAudioDeviceModule.h b/sdk/objc/api/peerconnection/RTCAudioDeviceModule.h new file mode 100644 index 0000000000..b02cecfd0b --- /dev/null +++ b/sdk/objc/api/peerconnection/RTCAudioDeviceModule.h @@ -0,0 +1,56 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import +#import + +#import "RTCMacros.h" +#import "RTCIODevice.h" + +NS_ASSUME_NONNULL_BEGIN + +typedef void (^RTCOnAudioDevicesDidUpdate)(); + +RTC_OBJC_EXPORT +@interface RTC_OBJC_TYPE (RTCAudioDeviceModule) : NSObject + +@property(nonatomic, readonly) NSArray *outputDevices; +@property(nonatomic, readonly) NSArray *inputDevices; + +@property(nonatomic, readonly) BOOL playing; +@property(nonatomic, readonly) BOOL recording; + +@property(nonatomic, assign) RTC_OBJC_TYPE(RTCIODevice) *outputDevice; +@property(nonatomic, assign) RTC_OBJC_TYPE(RTCIODevice) *inputDevice; + +// Executes low-level API's in sequence to switch the device +// Use outputDevice / inputDevice property unless you need to know if setting the device is +// successful. +- (BOOL)trySetOutputDevice:(nullable RTC_OBJC_TYPE(RTCIODevice) *)device; +- (BOOL)trySetInputDevice:(nullable RTC_OBJC_TYPE(RTCIODevice) *)device; + +- (BOOL)setDevicesUpdatedHandler: (nullable RTCOnAudioDevicesDidUpdate) handler; + +- (BOOL)startPlayout; +- (BOOL)stopPlayout; +- (BOOL)initPlayout; +- (BOOL)startRecording; +- (BOOL)stopRecording; +- (BOOL)initRecording; + +@end + +NS_ASSUME_NONNULL_END diff --git a/sdk/objc/api/peerconnection/RTCAudioDeviceModule.mm b/sdk/objc/api/peerconnection/RTCAudioDeviceModule.mm new file mode 100644 index 0000000000..0318f3d533 --- /dev/null +++ b/sdk/objc/api/peerconnection/RTCAudioDeviceModule.mm @@ -0,0 +1,294 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#import "RTCAudioDeviceModule.h" +#import "RTCAudioDeviceModule+Private.h" +#import "RTCIODevice+Private.h" +#import "base/RTCLogging.h" + +#import "sdk/objc/native/api/audio_device_module.h" + +class AudioDeviceSink : public webrtc::AudioDeviceSink { + public: + AudioDeviceSink() {} + + void OnDevicesUpdated() override { + + RTCLogInfo(@"AudioDeviceSink OnDevicesUpdated"); + + if (callback_handler_) { + callback_handler_(); + } + } + + // private: + RTCOnAudioDevicesDidUpdate callback_handler_; +}; + +@implementation RTC_OBJC_TYPE (RTCAudioDeviceModule) { + rtc::Thread *_workerThread; + webrtc::scoped_refptr _native; + AudioDeviceSink *_sink; +} + +- (instancetype)initWithNativeModule:(rtc::scoped_refptr )module + workerThread:(rtc::Thread * )workerThread { + + RTCLogInfo(@"RTCAudioDeviceModule initWithNativeModule:workerThread:"); + + self = [super init]; + _native = module; + _workerThread = workerThread; + + _sink = new AudioDeviceSink(); + + _workerThread->BlockingCall([self] { + _native->SetAudioDeviceSink(_sink); + }); + + return self; +} + +- (NSArray *)outputDevices { + + return _workerThread->BlockingCall([self] { + return [self _outputDevices]; + }); +} + +- (NSArray *)inputDevices { + return _workerThread->BlockingCall([self] { + return [self _inputDevices]; + }); +} + +- (RTC_OBJC_TYPE(RTCIODevice) *)outputDevice { + return _workerThread->BlockingCall([self] { + + NSArray *devices = [self _outputDevices]; + int16_t devicesCount = (int16_t)([devices count]); + int16_t index = _native->GetPlayoutDevice(); + + if (devicesCount == 0 || index <= -1 || index > (devicesCount - 1)) { + return (RTC_OBJC_TYPE(RTCIODevice) *)nil; + } + + return (RTC_OBJC_TYPE(RTCIODevice) *)[devices objectAtIndex:index]; + }); +} + +- (void)setOutputDevice: (RTC_OBJC_TYPE(RTCIODevice) *)device { + [self trySetOutputDevice: device]; +} + +- (BOOL)trySetOutputDevice: (RTC_OBJC_TYPE(RTCIODevice) *)device { + + return _workerThread->BlockingCall([self, device] { + + NSUInteger index = 0; + NSArray *devices = [self _outputDevices]; + + if ([devices count] == 0) { + return NO; + } + + if (device != nil) { + index = [devices indexOfObjectPassingTest:^BOOL(RTC_OBJC_TYPE(RTCIODevice) *e, NSUInteger i, BOOL *stop) { + return (*stop = [e.deviceId isEqualToString:device.deviceId]); + }]; + if (index == NSNotFound) { + return NO; + } + } + + _native->StopPlayout(); + + if (_native->SetPlayoutDevice(index) == 0 + && _native->InitPlayout() == 0 + && _native->StartPlayout() == 0) { + + return YES; + } + + return NO; + }); +} + +- (RTC_OBJC_TYPE(RTCIODevice) *)inputDevice { + + return _workerThread->BlockingCall([self] { + + NSArray *devices = [self _inputDevices]; + int16_t devicesCount = (int16_t)([devices count]); + int16_t index = _native->GetRecordingDevice(); + + if (devicesCount == 0 || index <= -1 || index > (devicesCount - 1)) { + return (RTC_OBJC_TYPE(RTCIODevice) *)nil; + } + + return (RTC_OBJC_TYPE(RTCIODevice) *)[devices objectAtIndex:index]; + }); +} + +- (void)setInputDevice: (RTC_OBJC_TYPE(RTCIODevice) *)device { + [self trySetInputDevice: device]; +} + +- (BOOL)trySetInputDevice: (RTC_OBJC_TYPE(RTCIODevice) *)device { + + return _workerThread->BlockingCall([self, device] { + + NSUInteger index = 0; + NSArray *devices = [self _inputDevices]; + + if ([devices count] == 0) { + return NO; + } + + if (device != nil) { + index = [devices indexOfObjectPassingTest:^BOOL(RTC_OBJC_TYPE(RTCIODevice) *e, NSUInteger i, BOOL *stop) { + return (*stop = [e.deviceId isEqualToString:device.deviceId]); + }]; + if (index == NSNotFound) { + return NO; + } + } + + _native->StopRecording(); + + if (_native->SetRecordingDevice(index) == 0 + && _native->InitRecording() == 0 + && _native->StartRecording() == 0) { + + return YES; + } + + return NO; + }); +} + +- (BOOL)playing { + + return _workerThread->BlockingCall([self] { + return _native->Playing(); + }); +} + +- (BOOL)recording { + + return _workerThread->BlockingCall([self] { + return _native->Recording(); + }); +} + +#pragma mark - Low-level access + +- (BOOL)startPlayout { + + return _workerThread->BlockingCall([self] { + return _native->StartPlayout() == 0; + }); +} + +- (BOOL)stopPlayout { + + return _workerThread->BlockingCall([self] { + return _native->StopPlayout() == 0; + }); +} + +- (BOOL)initPlayout { + + return _workerThread->BlockingCall([self] { + return _native->InitPlayout() == 0; + }); +} + +- (BOOL)startRecording { + + return _workerThread->BlockingCall([self] { + return _native->StartRecording() == 0; + }); +} + +- (BOOL)stopRecording { + + return _workerThread->BlockingCall([self] { + return _native->StopRecording() == 0; + }); +} + +- (BOOL)initRecording { + + return _workerThread->BlockingCall([self] { + return _native->InitRecording() == 0; + }); +} + +- (BOOL)setDevicesUpdatedHandler: (nullable RTCOnAudioDevicesDidUpdate) handler { + _sink->callback_handler_ = handler; + return YES; +} + +#pragma mark - Private + +- (NSArray *)_outputDevices { + + char guid[webrtc::kAdmMaxGuidSize + 1] = {0}; + char name[webrtc::kAdmMaxDeviceNameSize + 1] = {0}; + + NSMutableArray *result = [NSMutableArray array]; + + int16_t count = _native->PlayoutDevices(); + + if (count > 0) { + for (int i = 0; i < count; i++) { + _native->PlayoutDeviceName(i, name, guid); + NSString *strGUID = [[NSString alloc] initWithCString:guid encoding:NSUTF8StringEncoding]; + NSString *strName = [[NSString alloc] initWithCString:name encoding:NSUTF8StringEncoding]; + RTC_OBJC_TYPE(RTCIODevice) *device = [[RTC_OBJC_TYPE(RTCIODevice) alloc] initWithType:RTC_OBJC_TYPE(RTCIODeviceTypeOutput) deviceId:strGUID name:strName]; + [result addObject: device]; + } + } + + return result; +} + +- (NSArray *)_inputDevices { + + char guid[webrtc::kAdmMaxGuidSize + 1] = {0}; + char name[webrtc::kAdmMaxDeviceNameSize + 1] = {0}; + + NSMutableArray *result = [NSMutableArray array]; + + int16_t count = _native->RecordingDevices(); + + if (count > 0) { + for (int i = 0; i < count; i++) { + _native->RecordingDeviceName(i, name, guid); + NSString *strGUID = [[NSString alloc] initWithCString:guid encoding:NSUTF8StringEncoding]; + NSString *strName = [[NSString alloc] initWithCString:name encoding:NSUTF8StringEncoding]; + RTC_OBJC_TYPE(RTCIODevice) *device = [[RTC_OBJC_TYPE(RTCIODevice) alloc] initWithType:RTC_OBJC_TYPE(RTCIODeviceTypeInput) deviceId:strGUID name:strName]; + [result addObject: device]; + } + } + + return result; +} + +@end diff --git a/sdk/objc/api/peerconnection/RTCAudioSource+Private.h b/sdk/objc/api/peerconnection/RTCAudioSource+Private.h index fdf72ccf17..08832af4d2 100644 --- a/sdk/objc/api/peerconnection/RTCAudioSource+Private.h +++ b/sdk/objc/api/peerconnection/RTCAudioSource+Private.h @@ -34,6 +34,6 @@ initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory nativeMediaSource: (webrtc::scoped_refptr)nativeMediaSource - type:(RTCMediaSourceType)type NS_UNAVAILABLE; + type:(RTC_OBJC_TYPE(RTCMediaSourceType))type NS_UNAVAILABLE; @end diff --git a/sdk/objc/api/peerconnection/RTCAudioSource.mm b/sdk/objc/api/peerconnection/RTCAudioSource.mm index b1ac2ad1b0..458d24435e 100644 --- a/sdk/objc/api/peerconnection/RTCAudioSource.mm +++ b/sdk/objc/api/peerconnection/RTCAudioSource.mm @@ -26,8 +26,8 @@ @implementation RTC_OBJC_TYPE (RTCAudioSource) { RTC_DCHECK(nativeAudioSource); self = [super initWithFactory:factory - nativeMediaSource:nativeAudioSource - type:RTCMediaSourceTypeAudio]; + nativeMediaSource:nativeAudioSource + type:RTC_OBJC_TYPE(RTCMediaSourceTypeAudio)]; if (self) { _nativeAudioSource = nativeAudioSource; } @@ -38,7 +38,7 @@ @implementation RTC_OBJC_TYPE (RTCAudioSource) { initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory nativeMediaSource: (webrtc::scoped_refptr)nativeMediaSource - type:(RTCMediaSourceType)type { + type:(RTC_OBJC_TYPE(RTCMediaSourceType))type { RTC_DCHECK_NOTREACHED(); return nil; } diff --git a/sdk/objc/api/peerconnection/RTCAudioTrack+Private.h b/sdk/objc/api/peerconnection/RTCAudioTrack+Private.h index 17ac1db40a..5908d1ede2 100644 --- a/sdk/objc/api/peerconnection/RTCAudioTrack+Private.h +++ b/sdk/objc/api/peerconnection/RTCAudioTrack+Private.h @@ -8,6 +8,7 @@ * be found in the AUTHORS file in the root of the source tree. */ +#import #import "RTCAudioTrack.h" #include "api/media_stream_interface.h" @@ -15,8 +16,7 @@ NS_ASSUME_NONNULL_BEGIN @class RTC_OBJC_TYPE(RTCPeerConnectionFactory); -@interface RTC_OBJC_TYPE (RTCAudioTrack) -() +@interface RTC_OBJC_TYPE (RTCAudioTrack) () /** AudioTrackInterface created or passed in at construction. */ @property(nonatomic, readonly) diff --git a/sdk/objc/api/peerconnection/RTCAudioTrack.h b/sdk/objc/api/peerconnection/RTCAudioTrack.h index db8afb50fc..de0919eb43 100644 --- a/sdk/objc/api/peerconnection/RTCAudioTrack.h +++ b/sdk/objc/api/peerconnection/RTCAudioTrack.h @@ -13,6 +13,7 @@ NS_ASSUME_NONNULL_BEGIN +@protocol RTC_OBJC_TYPE (RTCAudioRenderer); @class RTC_OBJC_TYPE(RTCAudioSource); RTC_OBJC_EXPORT @@ -23,6 +24,12 @@ RTC_OBJC_EXPORT /** The audio source for this audio track. */ @property(nonatomic, readonly) RTC_OBJC_TYPE(RTCAudioSource) * source; +- (void)addRenderer:(id)renderer; + +- (void)removeRenderer:(id)renderer; + +- (void)removeAllRenderers; + @end NS_ASSUME_NONNULL_END diff --git a/sdk/objc/api/peerconnection/RTCAudioTrack.mm b/sdk/objc/api/peerconnection/RTCAudioTrack.mm index 5ba53c84e9..b2d5915c09 100644 --- a/sdk/objc/api/peerconnection/RTCAudioTrack.mm +++ b/sdk/objc/api/peerconnection/RTCAudioTrack.mm @@ -8,16 +8,24 @@ * be found in the AUTHORS file in the root of the source tree. */ +#import +#import + #import "RTCAudioTrack+Private.h" +#import "RTCAudioRenderer.h" #import "RTCAudioSource+Private.h" #import "RTCMediaStreamTrack+Private.h" #import "RTCPeerConnectionFactory+Private.h" +#import "api/RTCAudioRendererAdapter+Private.h" #import "helpers/NSString+StdString.h" #include "rtc_base/checks.h" -@implementation RTC_OBJC_TYPE (RTCAudioTrack) +@implementation RTC_OBJC_TYPE (RTCAudioTrack) { + rtc::Thread *_signalingThread; + NSMutableArray *_adapters; +} @synthesize source = _source; @@ -31,14 +39,12 @@ - (instancetype)initWithFactory: std::string nativeId = [NSString stdStringForString:trackId]; webrtc::scoped_refptr track = - factory.nativeFactory->CreateAudioTrack(nativeId, - source.nativeAudioSource.get()); - self = [self initWithFactory:factory - nativeTrack:track - type:RTCMediaStreamTrackTypeAudio]; + factory.nativeFactory->CreateAudioTrack(nativeId, source.nativeAudioSource.get()); + self = [self initWithFactory:factory nativeTrack:track type:RTC_OBJC_TYPE(RTCMediaStreamTrackTypeAudio)]; if (self) { _source = source; } + return self; } @@ -46,11 +52,21 @@ - (instancetype)initWithFactory: initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory nativeTrack:(webrtc::scoped_refptr) nativeTrack - type:(RTCMediaStreamTrackType)type { + type:(RTC_OBJC_TYPE(RTCMediaStreamTrackType))type { NSParameterAssert(factory); NSParameterAssert(nativeTrack); - NSParameterAssert(type == RTCMediaStreamTrackTypeAudio); - return [super initWithFactory:factory nativeTrack:nativeTrack type:type]; + NSParameterAssert(type == RTC_OBJC_TYPE(RTCMediaStreamTrackTypeAudio)); + self = [super initWithFactory:factory nativeTrack:nativeTrack type:type]; + if (self) { + _adapters = [NSMutableArray array]; + _signalingThread = factory.signalingThread; + } + + return self; +} + +- (void)dealloc { + [self removeAllRenderers]; } - (RTC_OBJC_TYPE(RTCAudioSource) *)source { @@ -66,6 +82,64 @@ - (instancetype)initWithFactory: return _source; } +- (void)addRenderer:(id)renderer { + if (!_signalingThread->IsCurrent()) { + _signalingThread->BlockingCall([renderer, self] { [self addRenderer:renderer]; }); + return; + } + + // Make sure we don't have this renderer yet. + for (RTC_OBJC_TYPE(RTCAudioRendererAdapter) * adapter in _adapters) { + if (adapter.audioRenderer == renderer) { + RTC_LOG(LS_INFO) << "|renderer| is already attached to this track"; + return; + } + } + // Create a wrapper that provides a native pointer for us. + RTC_OBJC_TYPE(RTCAudioRendererAdapter) *adapter = + [[RTC_OBJC_TYPE(RTCAudioRendererAdapter) alloc] initWithNativeRenderer:renderer]; + [_adapters addObject:adapter]; + self.nativeAudioTrack->AddSink(adapter.nativeAudioRenderer); +} + +- (void)removeRenderer:(id)renderer { + if (!_signalingThread->IsCurrent()) { + _signalingThread->BlockingCall([renderer, self] { [self removeRenderer:renderer]; }); + return; + } + __block NSUInteger indexToRemove = NSNotFound; + [_adapters enumerateObjectsUsingBlock:^(RTC_OBJC_TYPE(RTCAudioRendererAdapter) * adapter, + NSUInteger idx, BOOL * stop) { + if (adapter.audioRenderer == renderer) { + indexToRemove = idx; + *stop = YES; + } + }]; + if (indexToRemove == NSNotFound) { + RTC_LOG(LS_INFO) << "removeRenderer called with a renderer that has not been previously added"; + return; + } + RTC_OBJC_TYPE(RTCAudioRendererAdapter) *adapterToRemove = [_adapters objectAtIndex:indexToRemove]; + self.nativeAudioTrack->RemoveSink(adapterToRemove.nativeAudioRenderer); + [_adapters removeObjectAtIndex:indexToRemove]; +} + +- (void)removeAllRenderers { + // Ensure the method is executed on the signaling thread. + if (!_signalingThread->IsCurrent()) { + _signalingThread->BlockingCall([self] { [self removeAllRenderers]; }); + return; + } + + // Iterate over all adapters and remove each one from the native audio track. + for (RTC_OBJC_TYPE(RTCAudioRendererAdapter) * adapter in _adapters) { + self.nativeAudioTrack->RemoveSink(adapter.nativeAudioRenderer); + } + + // Clear the adapters array after all sinks have been removed. + [_adapters removeAllObjects]; +} + #pragma mark - Private - (webrtc::scoped_refptr)nativeAudioTrack { diff --git a/sdk/objc/api/peerconnection/RTCConfiguration+Private.h b/sdk/objc/api/peerconnection/RTCConfiguration+Private.h index 52bf3abc17..697ba64d12 100644 --- a/sdk/objc/api/peerconnection/RTCConfiguration+Private.h +++ b/sdk/objc/api/peerconnection/RTCConfiguration+Private.h @@ -17,57 +17,52 @@ NS_ASSUME_NONNULL_BEGIN @interface RTC_OBJC_TYPE (RTCConfiguration) () - + (webrtc::PeerConnectionInterface::IceTransportsType) - nativeTransportsTypeForTransportPolicy - : (RTCIceTransportPolicy)policy; + + (webrtc::PeerConnectionInterface::IceTransportsType)nativeTransportsTypeForTransportPolicy + : (RTC_OBJC_TYPE(RTCIceTransportPolicy))policy; -+ (RTCIceTransportPolicy)transportPolicyForTransportsType: ++ (RTC_OBJC_TYPE(RTCIceTransportPolicy))transportPolicyForTransportsType: (webrtc::PeerConnectionInterface::IceTransportsType)nativeType; -+ (NSString *)stringForTransportPolicy:(RTCIceTransportPolicy)policy; ++ (NSString *)stringForTransportPolicy:(RTC_OBJC_TYPE(RTCIceTransportPolicy))policy; + (webrtc::PeerConnectionInterface::BundlePolicy)nativeBundlePolicyForPolicy: - (RTCBundlePolicy)policy; + (RTC_OBJC_TYPE(RTCBundlePolicy))policy; -+ (RTCBundlePolicy)bundlePolicyForNativePolicy: ++ (RTC_OBJC_TYPE(RTCBundlePolicy))bundlePolicyForNativePolicy: (webrtc::PeerConnectionInterface::BundlePolicy)nativePolicy; -+ (NSString *)stringForBundlePolicy:(RTCBundlePolicy)policy; ++ (NSString *)stringForBundlePolicy:(RTC_OBJC_TYPE(RTCBundlePolicy))policy; + (webrtc::PeerConnectionInterface::RtcpMuxPolicy)nativeRtcpMuxPolicyForPolicy: - (RTCRtcpMuxPolicy)policy; + (RTC_OBJC_TYPE(RTCRtcpMuxPolicy))policy; -+ (RTCRtcpMuxPolicy)rtcpMuxPolicyForNativePolicy: ++ (RTC_OBJC_TYPE(RTCRtcpMuxPolicy))rtcpMuxPolicyForNativePolicy: (webrtc::PeerConnectionInterface::RtcpMuxPolicy)nativePolicy; -+ (NSString *)stringForRtcpMuxPolicy:(RTCRtcpMuxPolicy)policy; ++ (NSString *)stringForRtcpMuxPolicy:(RTC_OBJC_TYPE(RTCRtcpMuxPolicy))policy; -+ (webrtc::PeerConnectionInterface::TcpCandidatePolicy) - nativeTcpCandidatePolicyForPolicy:(RTCTcpCandidatePolicy)policy; ++ (webrtc::PeerConnectionInterface::TcpCandidatePolicy)nativeTcpCandidatePolicyForPolicy: + (RTC_OBJC_TYPE(RTCTcpCandidatePolicy))policy; -+ (RTCTcpCandidatePolicy)tcpCandidatePolicyForNativePolicy: ++ (RTC_OBJC_TYPE(RTCTcpCandidatePolicy))tcpCandidatePolicyForNativePolicy: (webrtc::PeerConnectionInterface::TcpCandidatePolicy)nativePolicy; -+ (NSString *)stringForTcpCandidatePolicy:(RTCTcpCandidatePolicy)policy; ++ (NSString *)stringForTcpCandidatePolicy:(RTC_OBJC_TYPE(RTCTcpCandidatePolicy))policy; -+ (webrtc::PeerConnectionInterface::CandidateNetworkPolicy) - nativeCandidateNetworkPolicyForPolicy:(RTCCandidateNetworkPolicy)policy; ++ (webrtc::PeerConnectionInterface::CandidateNetworkPolicy)nativeCandidateNetworkPolicyForPolicy: + (RTC_OBJC_TYPE(RTCCandidateNetworkPolicy))policy; -+ (RTCCandidateNetworkPolicy)candidateNetworkPolicyForNativePolicy: ++ (RTC_OBJC_TYPE(RTCCandidateNetworkPolicy))candidateNetworkPolicyForNativePolicy: (webrtc::PeerConnectionInterface::CandidateNetworkPolicy)nativePolicy; -+ (NSString *)stringForCandidateNetworkPolicy:(RTCCandidateNetworkPolicy)policy; ++ (NSString *)stringForCandidateNetworkPolicy:(RTC_OBJC_TYPE(RTCCandidateNetworkPolicy))policy; -+ (webrtc::KeyType)nativeEncryptionKeyTypeForKeyType: - (RTCEncryptionKeyType)keyType; ++ (rtc::KeyType)nativeEncryptionKeyTypeForKeyType:(RTC_OBJC_TYPE(RTCEncryptionKeyType))keyType; -+ (webrtc::SdpSemantics)nativeSdpSemanticsForSdpSemantics: - (RTCSdpSemantics)sdpSemantics; ++ (NSString *)stringForSdpSemantics:(RTC_OBJC_TYPE(RTCSdpSemantics))sdpSemantics; ++ (webrtc::SdpSemantics)nativeSdpSemanticsForSdpSemantics:(RTC_OBJC_TYPE(RTCSdpSemantics))sdpSemantics; -+ (RTCSdpSemantics)sdpSemanticsForNativeSdpSemantics: - (webrtc::SdpSemantics)sdpSemantics; - -+ (NSString *)stringForSdpSemantics:(RTCSdpSemantics)sdpSemantics; ++ (RTC_OBJC_TYPE(RTCSdpSemantics))sdpSemanticsForNativeSdpSemantics:(webrtc::SdpSemantics)sdpSemantics; /** * RTCConfiguration struct representation of this RTCConfiguration. diff --git a/sdk/objc/api/peerconnection/RTCConfiguration.h b/sdk/objc/api/peerconnection/RTCConfiguration.h index b75ac6cf9a..6503996959 100644 --- a/sdk/objc/api/peerconnection/RTCConfiguration.h +++ b/sdk/objc/api/peerconnection/RTCConfiguration.h @@ -20,55 +20,55 @@ * Represents the ice transport policy. This exposes the same states in C++, * which include one more state than what exists in the W3C spec. */ -typedef NS_ENUM(NSInteger, RTCIceTransportPolicy) { - RTCIceTransportPolicyNone, - RTCIceTransportPolicyRelay, - RTCIceTransportPolicyNoHost, - RTCIceTransportPolicyAll +typedef NS_ENUM(NSInteger, RTC_OBJC_TYPE(RTCIceTransportPolicy)) { + RTC_OBJC_TYPE(RTCIceTransportPolicyNone), + RTC_OBJC_TYPE(RTCIceTransportPolicyRelay), + RTC_OBJC_TYPE(RTCIceTransportPolicyNoHost), + RTC_OBJC_TYPE(RTCIceTransportPolicyAll) }; /** Represents the bundle policy. */ -typedef NS_ENUM(NSInteger, RTCBundlePolicy) { - RTCBundlePolicyBalanced, - RTCBundlePolicyMaxCompat, - RTCBundlePolicyMaxBundle +typedef NS_ENUM(NSInteger, RTC_OBJC_TYPE(RTCBundlePolicy)) { + RTC_OBJC_TYPE(RTCBundlePolicyBalanced), + RTC_OBJC_TYPE(RTCBundlePolicyMaxCompat), + RTC_OBJC_TYPE(RTCBundlePolicyMaxBundle) }; /** Represents the rtcp mux policy. */ -typedef NS_ENUM(NSInteger, RTCRtcpMuxPolicy) { - RTCRtcpMuxPolicyNegotiate, - RTCRtcpMuxPolicyRequire +typedef NS_ENUM(NSInteger, RTC_OBJC_TYPE(RTCRtcpMuxPolicy)) { + RTC_OBJC_TYPE(RTCRtcpMuxPolicyNegotiate), + RTC_OBJC_TYPE(RTCRtcpMuxPolicyRequire) }; /** Represents the tcp candidate policy. */ -typedef NS_ENUM(NSInteger, RTCTcpCandidatePolicy) { - RTCTcpCandidatePolicyEnabled, - RTCTcpCandidatePolicyDisabled +typedef NS_ENUM(NSInteger, RTC_OBJC_TYPE(RTCTcpCandidatePolicy)) { + RTC_OBJC_TYPE(RTCTcpCandidatePolicyEnabled), + RTC_OBJC_TYPE(RTCTcpCandidatePolicyDisabled) }; /** Represents the candidate network policy. */ -typedef NS_ENUM(NSInteger, RTCCandidateNetworkPolicy) { - RTCCandidateNetworkPolicyAll, - RTCCandidateNetworkPolicyLowCost +typedef NS_ENUM(NSInteger, RTC_OBJC_TYPE(RTCCandidateNetworkPolicy)) { + RTC_OBJC_TYPE(RTCCandidateNetworkPolicyAll), + RTC_OBJC_TYPE(RTCCandidateNetworkPolicyLowCost) }; /** Represents the continual gathering policy. */ -typedef NS_ENUM(NSInteger, RTCContinualGatheringPolicy) { - RTCContinualGatheringPolicyGatherOnce, - RTCContinualGatheringPolicyGatherContinually +typedef NS_ENUM(NSInteger, RTC_OBJC_TYPE(RTCContinualGatheringPolicy)) { + RTC_OBJC_TYPE(RTCContinualGatheringPolicyGatherOnce), + RTC_OBJC_TYPE(RTCContinualGatheringPolicyGatherContinually) }; /** Represents the encryption key type. */ -typedef NS_ENUM(NSInteger, RTCEncryptionKeyType) { - RTCEncryptionKeyTypeRSA, - RTCEncryptionKeyTypeECDSA, +typedef NS_ENUM(NSInteger, RTC_OBJC_TYPE(RTCEncryptionKeyType)) { + RTC_OBJC_TYPE(RTCEncryptionKeyTypeRSA), + RTC_OBJC_TYPE(RTCEncryptionKeyTypeECDSA), }; /** Represents the chosen SDP semantics for the RTCPeerConnection. */ -typedef NS_ENUM(NSInteger, RTCSdpSemantics) { +typedef NS_ENUM(NSInteger, RTC_OBJC_TYPE(RTCSdpSemantics)) { // TODO(https://crbug.com/webrtc/13528): Remove support for Plan B. - RTCSdpSemanticsPlanB, - RTCSdpSemanticsUnifiedPlan, + RTC_OBJC_TYPE(RTCSdpSemanticsPlanB), + RTC_OBJC_TYPE(RTCSdpSemanticsUnifiedPlan), }; NS_ASSUME_NONNULL_BEGIN @@ -89,17 +89,16 @@ RTC_OBJC_EXPORT /** Which candidates the ICE agent is allowed to use. The W3C calls it * `iceTransportPolicy`, while in C++ it is called `type`. */ -@property(nonatomic, assign) RTCIceTransportPolicy iceTransportPolicy; +@property(nonatomic, assign) RTC_OBJC_TYPE(RTCIceTransportPolicy) iceTransportPolicy; /** The media-bundling policy to use when gathering ICE candidates. */ -@property(nonatomic, assign) RTCBundlePolicy bundlePolicy; +@property(nonatomic, assign) RTC_OBJC_TYPE(RTCBundlePolicy) bundlePolicy; /** The rtcp-mux policy to use when gathering ICE candidates. */ -@property(nonatomic, assign) RTCRtcpMuxPolicy rtcpMuxPolicy; -@property(nonatomic, assign) RTCTcpCandidatePolicy tcpCandidatePolicy; -@property(nonatomic, assign) RTCCandidateNetworkPolicy candidateNetworkPolicy; -@property(nonatomic, assign) - RTCContinualGatheringPolicy continualGatheringPolicy; +@property(nonatomic, assign) RTC_OBJC_TYPE(RTCRtcpMuxPolicy) rtcpMuxPolicy; +@property(nonatomic, assign) RTC_OBJC_TYPE(RTCTcpCandidatePolicy) tcpCandidatePolicy; +@property(nonatomic, assign) RTC_OBJC_TYPE(RTCCandidateNetworkPolicy) candidateNetworkPolicy; +@property(nonatomic, assign) RTC_OBJC_TYPE(RTCContinualGatheringPolicy) continualGatheringPolicy; /** If set to YES, don't gather IPv6 ICE candidates on Wi-Fi. * Only intended to be used on specific devices. Certain phones disable IPv6 @@ -129,7 +128,7 @@ RTC_OBJC_EXPORT @property(nonatomic, assign) int iceBackupCandidatePairPingInterval; /** Key type used to generate SSL identity. Default is ECDSA. */ -@property(nonatomic, assign) RTCEncryptionKeyType keyType; +@property(nonatomic, assign) RTC_OBJC_TYPE(RTCEncryptionKeyType) keyType; /** ICE candidate pool size as defined in JSEP. Default is 0. */ @property(nonatomic, assign) int iceCandidatePoolSize; @@ -181,7 +180,7 @@ RTC_OBJC_EXPORT * the section. This will also cause RTCPeerConnection to ignore all but the * first m= section of the same media type. */ -@property(nonatomic, assign) RTCSdpSemantics sdpSemantics; +@property(nonatomic, assign) RTC_OBJC_TYPE(RTCSdpSemantics) sdpSemantics; /** Actively reset the SRTP parameters when the DTLS transports underneath are * changed after offer/answer negotiation. This is only intended to be a @@ -261,6 +260,17 @@ RTC_OBJC_EXPORT */ @property(nonatomic, copy, nullable) NSNumber *iceInactiveTimeout; +/** + * When this flag is set, ports not bound to any specific network interface + * will be used, in addition to normal ports bound to the enumerated + * interfaces. Without this flag, these "any address" ports would only be + * used when network enumeration fails or is disabled. But under certain + * conditions, these ports may succeed where others fail, so they may allow + * the application to work in a wider variety of environments, at the expense + * of having to allocate additional candidates. + */ +@property(nonatomic, assign) BOOL enableIceGatheringOnAnyAddressPorts; + - (instancetype)init; @end diff --git a/sdk/objc/api/peerconnection/RTCConfiguration.mm b/sdk/objc/api/peerconnection/RTCConfiguration.mm index b0040a36c9..30ab46bcc9 100644 --- a/sdk/objc/api/peerconnection/RTCConfiguration.mm +++ b/sdk/objc/api/peerconnection/RTCConfiguration.mm @@ -64,6 +64,7 @@ @implementation RTC_OBJC_TYPE (RTCConfiguration) @synthesize iceUnwritableTimeout = _iceUnwritableTimeout; @synthesize iceUnwritableMinChecks = _iceUnwritableMinChecks; @synthesize iceInactiveTimeout = _iceInactiveTimeout; +@synthesize enableIceGatheringOnAnyAddressPorts = _enableIceGatheringOnAnyAddressPorts; - (instancetype)init { // Copy defaults. @@ -116,7 +117,7 @@ - (instancetype)initWithNativeConfiguration: _iceConnectionReceivingTimeout = config.ice_connection_receiving_timeout; _iceBackupCandidatePairPingInterval = config.ice_backup_candidate_pair_ping_interval; - _keyType = RTCEncryptionKeyTypeECDSA; + _keyType = RTC_OBJC_TYPE(RTCEncryptionKeyTypeECDSA); _iceCandidatePoolSize = config.ice_candidate_pool_size; _shouldPruneTurnPorts = config.prune_turn_ports; _shouldPresumeWritableWhenFullyRelayed = @@ -166,6 +167,7 @@ - (instancetype)initWithNativeConfiguration: _iceInactiveTimeout = config.ice_inactive_timeout.has_value() ? [NSNumber numberWithInt:*config.ice_inactive_timeout] : nil; + _enableIceGatheringOnAnyAddressPorts = config.enable_any_address_ports; } return self; } @@ -326,239 +328,236 @@ - (NSString *)description { nativeConfig->ice_inactive_timeout = std::optional(_iceInactiveTimeout.intValue); } + nativeConfig->enable_any_address_ports = _enableIceGatheringOnAnyAddressPorts; return nativeConfig.release(); } + (webrtc::PeerConnectionInterface::IceTransportsType) - nativeTransportsTypeForTransportPolicy:(RTCIceTransportPolicy)policy { + nativeTransportsTypeForTransportPolicy:(RTC_OBJC_TYPE(RTCIceTransportPolicy))policy { switch (policy) { - case RTCIceTransportPolicyNone: + case RTC_OBJC_TYPE(RTCIceTransportPolicyNone): return webrtc::PeerConnectionInterface::kNone; - case RTCIceTransportPolicyRelay: + case RTC_OBJC_TYPE(RTCIceTransportPolicyRelay): return webrtc::PeerConnectionInterface::kRelay; - case RTCIceTransportPolicyNoHost: + case RTC_OBJC_TYPE(RTCIceTransportPolicyNoHost): return webrtc::PeerConnectionInterface::kNoHost; - case RTCIceTransportPolicyAll: + case RTC_OBJC_TYPE(RTCIceTransportPolicyAll): return webrtc::PeerConnectionInterface::kAll; } } -+ (RTCIceTransportPolicy)transportPolicyForTransportsType: ++ (RTC_OBJC_TYPE(RTCIceTransportPolicy))transportPolicyForTransportsType: (webrtc::PeerConnectionInterface::IceTransportsType)nativeType { switch (nativeType) { case webrtc::PeerConnectionInterface::kNone: - return RTCIceTransportPolicyNone; + return RTC_OBJC_TYPE(RTCIceTransportPolicyNone); case webrtc::PeerConnectionInterface::kRelay: - return RTCIceTransportPolicyRelay; + return RTC_OBJC_TYPE(RTCIceTransportPolicyRelay); case webrtc::PeerConnectionInterface::kNoHost: - return RTCIceTransportPolicyNoHost; + return RTC_OBJC_TYPE(RTCIceTransportPolicyNoHost); case webrtc::PeerConnectionInterface::kAll: - return RTCIceTransportPolicyAll; + return RTC_OBJC_TYPE(RTCIceTransportPolicyAll); } } -+ (NSString *)stringForTransportPolicy:(RTCIceTransportPolicy)policy { ++ (NSString *)stringForTransportPolicy:(RTC_OBJC_TYPE(RTCIceTransportPolicy))policy { switch (policy) { - case RTCIceTransportPolicyNone: + case RTC_OBJC_TYPE(RTCIceTransportPolicyNone): return @"NONE"; - case RTCIceTransportPolicyRelay: + case RTC_OBJC_TYPE(RTCIceTransportPolicyRelay): return @"RELAY"; - case RTCIceTransportPolicyNoHost: + case RTC_OBJC_TYPE(RTCIceTransportPolicyNoHost): return @"NO_HOST"; - case RTCIceTransportPolicyAll: + case RTC_OBJC_TYPE(RTCIceTransportPolicyAll): return @"ALL"; } } + (webrtc::PeerConnectionInterface::BundlePolicy)nativeBundlePolicyForPolicy: - (RTCBundlePolicy)policy { + (RTC_OBJC_TYPE(RTCBundlePolicy))policy { switch (policy) { - case RTCBundlePolicyBalanced: + case RTC_OBJC_TYPE(RTCBundlePolicyBalanced): return webrtc::PeerConnectionInterface::kBundlePolicyBalanced; - case RTCBundlePolicyMaxCompat: + case RTC_OBJC_TYPE(RTCBundlePolicyMaxCompat): return webrtc::PeerConnectionInterface::kBundlePolicyMaxCompat; - case RTCBundlePolicyMaxBundle: + case RTC_OBJC_TYPE(RTCBundlePolicyMaxBundle): return webrtc::PeerConnectionInterface::kBundlePolicyMaxBundle; } } -+ (RTCBundlePolicy)bundlePolicyForNativePolicy: ++ (RTC_OBJC_TYPE(RTCBundlePolicy))bundlePolicyForNativePolicy: (webrtc::PeerConnectionInterface::BundlePolicy)nativePolicy { switch (nativePolicy) { case webrtc::PeerConnectionInterface::kBundlePolicyBalanced: - return RTCBundlePolicyBalanced; + return RTC_OBJC_TYPE(RTCBundlePolicyBalanced); case webrtc::PeerConnectionInterface::kBundlePolicyMaxCompat: - return RTCBundlePolicyMaxCompat; + return RTC_OBJC_TYPE(RTCBundlePolicyMaxCompat); case webrtc::PeerConnectionInterface::kBundlePolicyMaxBundle: - return RTCBundlePolicyMaxBundle; + return RTC_OBJC_TYPE(RTCBundlePolicyMaxBundle); } } -+ (NSString *)stringForBundlePolicy:(RTCBundlePolicy)policy { ++ (NSString *)stringForBundlePolicy:(RTC_OBJC_TYPE(RTCBundlePolicy))policy { switch (policy) { - case RTCBundlePolicyBalanced: + case RTC_OBJC_TYPE(RTCBundlePolicyBalanced): return @"BALANCED"; - case RTCBundlePolicyMaxCompat: + case RTC_OBJC_TYPE(RTCBundlePolicyMaxCompat): return @"MAX_COMPAT"; - case RTCBundlePolicyMaxBundle: + case RTC_OBJC_TYPE(RTCBundlePolicyMaxBundle): return @"MAX_BUNDLE"; } } + (webrtc::PeerConnectionInterface::RtcpMuxPolicy)nativeRtcpMuxPolicyForPolicy: - (RTCRtcpMuxPolicy)policy { + (RTC_OBJC_TYPE(RTCRtcpMuxPolicy))policy { switch (policy) { - case RTCRtcpMuxPolicyNegotiate: + case RTC_OBJC_TYPE(RTCRtcpMuxPolicyNegotiate): return webrtc::PeerConnectionInterface::kRtcpMuxPolicyNegotiate; - case RTCRtcpMuxPolicyRequire: + case RTC_OBJC_TYPE(RTCRtcpMuxPolicyRequire): return webrtc::PeerConnectionInterface::kRtcpMuxPolicyRequire; } } -+ (RTCRtcpMuxPolicy)rtcpMuxPolicyForNativePolicy: ++ (RTC_OBJC_TYPE(RTCRtcpMuxPolicy))rtcpMuxPolicyForNativePolicy: (webrtc::PeerConnectionInterface::RtcpMuxPolicy)nativePolicy { switch (nativePolicy) { case webrtc::PeerConnectionInterface::kRtcpMuxPolicyNegotiate: - return RTCRtcpMuxPolicyNegotiate; + return RTC_OBJC_TYPE(RTCRtcpMuxPolicyNegotiate); case webrtc::PeerConnectionInterface::kRtcpMuxPolicyRequire: - return RTCRtcpMuxPolicyRequire; + return RTC_OBJC_TYPE(RTCRtcpMuxPolicyRequire); } } -+ (NSString *)stringForRtcpMuxPolicy:(RTCRtcpMuxPolicy)policy { ++ (NSString *)stringForRtcpMuxPolicy:(RTC_OBJC_TYPE(RTCRtcpMuxPolicy))policy { switch (policy) { - case RTCRtcpMuxPolicyNegotiate: + case RTC_OBJC_TYPE(RTCRtcpMuxPolicyNegotiate): return @"NEGOTIATE"; - case RTCRtcpMuxPolicyRequire: + case RTC_OBJC_TYPE(RTCRtcpMuxPolicyRequire): return @"REQUIRE"; } } -+ (webrtc::PeerConnectionInterface::TcpCandidatePolicy) - nativeTcpCandidatePolicyForPolicy:(RTCTcpCandidatePolicy)policy { ++ (webrtc::PeerConnectionInterface::TcpCandidatePolicy)nativeTcpCandidatePolicyForPolicy: + (RTC_OBJC_TYPE(RTCTcpCandidatePolicy))policy { switch (policy) { - case RTCTcpCandidatePolicyEnabled: + case RTC_OBJC_TYPE(RTCTcpCandidatePolicyEnabled): return webrtc::PeerConnectionInterface::kTcpCandidatePolicyEnabled; - case RTCTcpCandidatePolicyDisabled: + case RTC_OBJC_TYPE(RTCTcpCandidatePolicyDisabled): return webrtc::PeerConnectionInterface::kTcpCandidatePolicyDisabled; } } -+ (webrtc::PeerConnectionInterface::CandidateNetworkPolicy) - nativeCandidateNetworkPolicyForPolicy:(RTCCandidateNetworkPolicy)policy { ++ (webrtc::PeerConnectionInterface::CandidateNetworkPolicy)nativeCandidateNetworkPolicyForPolicy: + (RTC_OBJC_TYPE(RTCCandidateNetworkPolicy))policy { switch (policy) { - case RTCCandidateNetworkPolicyAll: + case RTC_OBJC_TYPE(RTCCandidateNetworkPolicyAll): return webrtc::PeerConnectionInterface::kCandidateNetworkPolicyAll; - case RTCCandidateNetworkPolicyLowCost: + case RTC_OBJC_TYPE(RTCCandidateNetworkPolicyLowCost): return webrtc::PeerConnectionInterface::kCandidateNetworkPolicyLowCost; } } -+ (RTCTcpCandidatePolicy)tcpCandidatePolicyForNativePolicy: ++ (RTC_OBJC_TYPE(RTCTcpCandidatePolicy))tcpCandidatePolicyForNativePolicy: (webrtc::PeerConnectionInterface::TcpCandidatePolicy)nativePolicy { switch (nativePolicy) { case webrtc::PeerConnectionInterface::kTcpCandidatePolicyEnabled: - return RTCTcpCandidatePolicyEnabled; + return RTC_OBJC_TYPE(RTCTcpCandidatePolicyEnabled); case webrtc::PeerConnectionInterface::kTcpCandidatePolicyDisabled: - return RTCTcpCandidatePolicyDisabled; + return RTC_OBJC_TYPE(RTCTcpCandidatePolicyDisabled); } } -+ (NSString *)stringForTcpCandidatePolicy:(RTCTcpCandidatePolicy)policy { ++ (NSString *)stringForTcpCandidatePolicy:(RTC_OBJC_TYPE(RTCTcpCandidatePolicy))policy { switch (policy) { - case RTCTcpCandidatePolicyEnabled: + case RTC_OBJC_TYPE(RTCTcpCandidatePolicyEnabled): return @"TCP_ENABLED"; - case RTCTcpCandidatePolicyDisabled: + case RTC_OBJC_TYPE(RTCTcpCandidatePolicyDisabled): return @"TCP_DISABLED"; } } -+ (RTCCandidateNetworkPolicy)candidateNetworkPolicyForNativePolicy: ++ (RTC_OBJC_TYPE(RTCCandidateNetworkPolicy))candidateNetworkPolicyForNativePolicy: (webrtc::PeerConnectionInterface::CandidateNetworkPolicy)nativePolicy { switch (nativePolicy) { case webrtc::PeerConnectionInterface::kCandidateNetworkPolicyAll: - return RTCCandidateNetworkPolicyAll; + return RTC_OBJC_TYPE(RTCCandidateNetworkPolicyAll); case webrtc::PeerConnectionInterface::kCandidateNetworkPolicyLowCost: - return RTCCandidateNetworkPolicyLowCost; + return RTC_OBJC_TYPE(RTCCandidateNetworkPolicyLowCost); } } -+ (NSString *)stringForCandidateNetworkPolicy: - (RTCCandidateNetworkPolicy)policy { ++ (NSString *)stringForCandidateNetworkPolicy:(RTC_OBJC_TYPE(RTCCandidateNetworkPolicy))policy { switch (policy) { - case RTCCandidateNetworkPolicyAll: + case RTC_OBJC_TYPE(RTCCandidateNetworkPolicyAll): return @"CANDIDATE_ALL_NETWORKS"; - case RTCCandidateNetworkPolicyLowCost: + case RTC_OBJC_TYPE(RTCCandidateNetworkPolicyLowCost): return @"CANDIDATE_LOW_COST_NETWORKS"; } } + (webrtc::PeerConnectionInterface::ContinualGatheringPolicy) nativeContinualGatheringPolicyForPolicy: - (RTCContinualGatheringPolicy)policy { + (RTC_OBJC_TYPE(RTCContinualGatheringPolicy))policy { switch (policy) { - case RTCContinualGatheringPolicyGatherOnce: + case RTC_OBJC_TYPE(RTCContinualGatheringPolicyGatherOnce): return webrtc::PeerConnectionInterface::GATHER_ONCE; - case RTCContinualGatheringPolicyGatherContinually: + case RTC_OBJC_TYPE(RTCContinualGatheringPolicyGatherContinually): return webrtc::PeerConnectionInterface::GATHER_CONTINUALLY; } } -+ (RTCContinualGatheringPolicy)continualGatheringPolicyForNativePolicy: ++ (RTC_OBJC_TYPE(RTCContinualGatheringPolicy))continualGatheringPolicyForNativePolicy: (webrtc::PeerConnectionInterface::ContinualGatheringPolicy)nativePolicy { switch (nativePolicy) { case webrtc::PeerConnectionInterface::GATHER_ONCE: - return RTCContinualGatheringPolicyGatherOnce; + return RTC_OBJC_TYPE(RTCContinualGatheringPolicyGatherOnce); case webrtc::PeerConnectionInterface::GATHER_CONTINUALLY: - return RTCContinualGatheringPolicyGatherContinually; + return RTC_OBJC_TYPE(RTCContinualGatheringPolicyGatherContinually); } } + (NSString *)stringForContinualGatheringPolicy: - (RTCContinualGatheringPolicy)policy { + (RTC_OBJC_TYPE(RTCContinualGatheringPolicy))policy { switch (policy) { - case RTCContinualGatheringPolicyGatherOnce: + case RTC_OBJC_TYPE(RTCContinualGatheringPolicyGatherOnce): return @"GATHER_ONCE"; - case RTCContinualGatheringPolicyGatherContinually: + case RTC_OBJC_TYPE(RTCContinualGatheringPolicyGatherContinually): return @"GATHER_CONTINUALLY"; } } -+ (webrtc::KeyType)nativeEncryptionKeyTypeForKeyType: - (RTCEncryptionKeyType)keyType { ++ (webrtc::KeyType)nativeEncryptionKeyTypeForKeyType:(RTC_OBJC_TYPE(RTCEncryptionKeyType))keyType { switch (keyType) { - case RTCEncryptionKeyTypeRSA: - return webrtc::KT_RSA; - case RTCEncryptionKeyTypeECDSA: - return webrtc::KT_ECDSA; + case RTC_OBJC_TYPE(RTCEncryptionKeyTypeRSA): + return rtc::KT_RSA; + case RTC_OBJC_TYPE(RTCEncryptionKeyTypeECDSA): + return rtc::KT_ECDSA; } } -+ (webrtc::SdpSemantics)nativeSdpSemanticsForSdpSemantics: - (RTCSdpSemantics)sdpSemantics { ++ (webrtc::SdpSemantics)nativeSdpSemanticsForSdpSemantics:(RTC_OBJC_TYPE(RTCSdpSemantics))sdpSemantics { switch (sdpSemantics) { - case RTCSdpSemanticsPlanB: + case RTC_OBJC_TYPE(RTCSdpSemanticsPlanB): return webrtc::SdpSemantics::kPlanB_DEPRECATED; - case RTCSdpSemanticsUnifiedPlan: + case RTC_OBJC_TYPE(RTCSdpSemanticsUnifiedPlan): return webrtc::SdpSemantics::kUnifiedPlan; } } -+ (RTCSdpSemantics)sdpSemanticsForNativeSdpSemantics: - (webrtc::SdpSemantics)sdpSemantics { ++ (RTC_OBJC_TYPE(RTCSdpSemantics))sdpSemanticsForNativeSdpSemantics:(webrtc::SdpSemantics)sdpSemantics { switch (sdpSemantics) { case webrtc::SdpSemantics::kPlanB_DEPRECATED: - return RTCSdpSemanticsPlanB; + return RTC_OBJC_TYPE(RTCSdpSemanticsPlanB); case webrtc::SdpSemantics::kUnifiedPlan: - return RTCSdpSemanticsUnifiedPlan; + return RTC_OBJC_TYPE(RTCSdpSemanticsUnifiedPlan); } } -+ (NSString *)stringForSdpSemantics:(RTCSdpSemantics)sdpSemantics { ++ (NSString *)stringForSdpSemantics:(RTC_OBJC_TYPE(RTCSdpSemantics))sdpSemantics { switch (sdpSemantics) { - case RTCSdpSemanticsPlanB: + case RTC_OBJC_TYPE(RTCSdpSemanticsPlanB): return @"PLAN_B"; - case RTCSdpSemanticsUnifiedPlan: + case RTC_OBJC_TYPE(RTCSdpSemanticsUnifiedPlan): return @"UNIFIED_PLAN"; } } diff --git a/sdk/objc/api/peerconnection/RTCDataChannel+Private.h b/sdk/objc/api/peerconnection/RTCDataChannel+Private.h index 506e6a377a..0d2ab69c5b 100644 --- a/sdk/objc/api/peerconnection/RTCDataChannel+Private.h +++ b/sdk/objc/api/peerconnection/RTCDataChannel+Private.h @@ -17,7 +17,7 @@ NS_ASSUME_NONNULL_BEGIN @class RTC_OBJC_TYPE(RTCPeerConnectionFactory); -@interface RTC_OBJC_TYPE (RTCDataBuffer) +@interface RTC_OBJC_TYPE(RTCDataBuffer) () /** @@ -31,7 +31,7 @@ NS_ASSUME_NONNULL_BEGIN @end -@interface RTC_OBJC_TYPE (RTCDataChannel) +@interface RTC_OBJC_TYPE(RTCDataChannel) () /** Initialize an RTCDataChannel from a native DataChannelInterface. */ @@ -41,12 +41,12 @@ NS_ASSUME_NONNULL_BEGIN nativeDataChannel NS_DESIGNATED_INITIALIZER; + (webrtc::DataChannelInterface::DataState)nativeDataChannelStateForState: - (RTCDataChannelState)state; + (RTC_OBJC_TYPE(RTCDataChannelState))state; -+ (RTCDataChannelState)dataChannelStateForNativeState: ++ (RTC_OBJC_TYPE(RTCDataChannelState))dataChannelStateForNativeState: (webrtc::DataChannelInterface::DataState)nativeState; -+ (NSString *)stringForState:(RTCDataChannelState)state; ++ (NSString *)stringForState:(RTC_OBJC_TYPE(RTCDataChannelState))state; @end diff --git a/sdk/objc/api/peerconnection/RTCDataChannel.h b/sdk/objc/api/peerconnection/RTCDataChannel.h index d08bd4903b..83c54c212e 100644 --- a/sdk/objc/api/peerconnection/RTCDataChannel.h +++ b/sdk/objc/api/peerconnection/RTCDataChannel.h @@ -55,11 +55,11 @@ RTC_OBJC_EXPORT @end /** Represents the state of the data channel. */ -typedef NS_ENUM(NSInteger, RTCDataChannelState) { - RTCDataChannelStateConnecting, - RTCDataChannelStateOpen, - RTCDataChannelStateClosing, - RTCDataChannelStateClosed, +typedef NS_ENUM(NSInteger, RTC_OBJC_TYPE(RTCDataChannelState)) { + RTC_OBJC_TYPE(RTCDataChannelStateConnecting), + RTC_OBJC_TYPE(RTCDataChannelStateOpen), + RTC_OBJC_TYPE(RTCDataChannelStateClosing), + RTC_OBJC_TYPE(RTCDataChannelStateClosed), }; RTC_OBJC_EXPORT @@ -110,7 +110,7 @@ RTC_OBJC_EXPORT @property(nonatomic, readonly) int channelId; /** The state of the data channel. */ -@property(nonatomic, readonly) RTCDataChannelState readyState; +@property(nonatomic, readonly) RTC_OBJC_TYPE(RTCDataChannelState) readyState; /** * The number of bytes of application data that have been queued using diff --git a/sdk/objc/api/peerconnection/RTCDataChannel.mm b/sdk/objc/api/peerconnection/RTCDataChannel.mm index b52089a854..85f82c9c55 100644 --- a/sdk/objc/api/peerconnection/RTCDataChannel.mm +++ b/sdk/objc/api/peerconnection/RTCDataChannel.mm @@ -149,9 +149,9 @@ - (int)channelId { return _nativeDataChannel->id(); } -- (RTCDataChannelState)readyState { - return - [[self class] dataChannelStateForNativeState:_nativeDataChannel->state()]; +- (RTC_OBJC_TYPE(RTCDataChannelState))readyState { + return [[self class] dataChannelStateForNativeState: + _nativeDataChannel->state()]; } - (uint64_t)bufferedAmount { @@ -191,43 +191,43 @@ - (NSString *)description { return self; } -+ (webrtc::DataChannelInterface::DataState)nativeDataChannelStateForState: - (RTCDataChannelState)state { ++ (webrtc::DataChannelInterface::DataState) + nativeDataChannelStateForState:(RTC_OBJC_TYPE(RTCDataChannelState))state { switch (state) { - case RTCDataChannelStateConnecting: + case RTC_OBJC_TYPE(RTCDataChannelStateConnecting): return webrtc::DataChannelInterface::DataState::kConnecting; - case RTCDataChannelStateOpen: + case RTC_OBJC_TYPE(RTCDataChannelStateOpen): return webrtc::DataChannelInterface::DataState::kOpen; - case RTCDataChannelStateClosing: + case RTC_OBJC_TYPE(RTCDataChannelStateClosing): return webrtc::DataChannelInterface::DataState::kClosing; - case RTCDataChannelStateClosed: + case RTC_OBJC_TYPE(RTCDataChannelStateClosed): return webrtc::DataChannelInterface::DataState::kClosed; } } -+ (RTCDataChannelState)dataChannelStateForNativeState: ++ (RTC_OBJC_TYPE(RTCDataChannelState))dataChannelStateForNativeState: (webrtc::DataChannelInterface::DataState)nativeState { switch (nativeState) { case webrtc::DataChannelInterface::DataState::kConnecting: - return RTCDataChannelStateConnecting; + return RTC_OBJC_TYPE(RTCDataChannelStateConnecting); case webrtc::DataChannelInterface::DataState::kOpen: - return RTCDataChannelStateOpen; + return RTC_OBJC_TYPE(RTCDataChannelStateOpen); case webrtc::DataChannelInterface::DataState::kClosing: - return RTCDataChannelStateClosing; + return RTC_OBJC_TYPE(RTCDataChannelStateClosing); case webrtc::DataChannelInterface::DataState::kClosed: - return RTCDataChannelStateClosed; + return RTC_OBJC_TYPE(RTCDataChannelStateClosed); } } -+ (NSString *)stringForState:(RTCDataChannelState)state { ++ (NSString *)stringForState:(RTC_OBJC_TYPE(RTCDataChannelState))state { switch (state) { - case RTCDataChannelStateConnecting: + case RTC_OBJC_TYPE(RTCDataChannelStateConnecting): return @"Connecting"; - case RTCDataChannelStateOpen: + case RTC_OBJC_TYPE(RTCDataChannelStateOpen): return @"Open"; - case RTCDataChannelStateClosing: + case RTC_OBJC_TYPE(RTCDataChannelStateClosing): return @"Closing"; - case RTCDataChannelStateClosed: + case RTC_OBJC_TYPE(RTCDataChannelStateClosed): return @"Closed"; } } diff --git a/sdk/objc/api/peerconnection/RTCEncodedImage+Private.mm b/sdk/objc/api/peerconnection/RTCEncodedImage+Private.mm index 6ff8dd8b25..9f69512517 100644 --- a/sdk/objc/api/peerconnection/RTCEncodedImage+Private.mm +++ b/sdk/objc/api/peerconnection/RTCEncodedImage+Private.mm @@ -39,15 +39,13 @@ explicit ObjCEncodedImageBuffer(NSData *data) : data_(data) {} }; } // namespace -// A simple wrapper around webrtc::EncodedImageBufferInterface to make it usable -// with associated objects. -@interface RTCWrappedEncodedImageBuffer : NSObject +@interface RTC_OBJC_TYPE (RTCWrappedEncodedImageBuffer): NSObject @property(nonatomic) webrtc::scoped_refptr buffer; - (instancetype)initWithEncodedImageBuffer: (webrtc::scoped_refptr)buffer; @end -@implementation RTCWrappedEncodedImageBuffer +@implementation RTC_OBJC_TYPE (RTCWrappedEncodedImageBuffer) @synthesize buffer = _buffer; - (instancetype)initWithEncodedImageBuffer: (webrtc::scoped_refptr)buffer { @@ -63,7 +61,7 @@ @implementation RTC_OBJC_TYPE (RTCEncodedImage) (Private) - (webrtc::scoped_refptr)encodedData { - RTCWrappedEncodedImageBuffer *wrappedBuffer = + RTC_OBJC_TYPE(RTCWrappedEncodedImageBuffer) *wrappedBuffer = objc_getAssociatedObject(self, @selector(encodedData)); return wrappedBuffer.buffer; } @@ -73,7 +71,7 @@ - (void)setEncodedData: return objc_setAssociatedObject( self, @selector(encodedData), - [[RTCWrappedEncodedImageBuffer alloc] initWithEncodedImageBuffer:buffer], + [[RTC_OBJC_TYPE(RTCWrappedEncodedImageBuffer) alloc] initWithEncodedImageBuffer:buffer], OBJC_ASSOCIATION_RETAIN_NONATOMIC); } @@ -98,13 +96,12 @@ - (instancetype)initWithNativeEncodedImage: self.flags = encodedImage.timing_.flags; self.encodeStartMs = encodedImage.timing_.encode_start_ms; self.encodeFinishMs = encodedImage.timing_.encode_finish_ms; - self.frameType = static_cast(encodedImage._frameType); - self.rotation = static_cast(encodedImage.rotation_); + self.frameType = static_cast(encodedImage._frameType); + self.rotation = static_cast(encodedImage.rotation_); self.qp = @(encodedImage.qp_); - self.contentType = - (encodedImage.content_type_ == webrtc::VideoContentType::SCREENSHARE) ? - RTCVideoContentTypeScreenshare : - RTCVideoContentTypeUnspecified; + self.contentType = (encodedImage.content_type_ == webrtc::VideoContentType::SCREENSHARE) ? + RTC_OBJC_TYPE(RTCVideoContentTypeScreenshare) : + RTC_OBJC_TYPE(RTCVideoContentTypeUnspecified); } return self; @@ -132,8 +129,7 @@ - (instancetype)initWithNativeEncodedImage: encodedImage._frameType = webrtc::VideoFrameType(self.frameType); encodedImage.rotation_ = webrtc::VideoRotation(self.rotation); encodedImage.qp_ = self.qp ? self.qp.intValue : -1; - encodedImage.content_type_ = - (self.contentType == RTCVideoContentTypeScreenshare) ? + encodedImage.content_type_ = (self.contentType == RTC_OBJC_TYPE(RTCVideoContentTypeScreenshare)) ? webrtc::VideoContentType::SCREENSHARE : webrtc::VideoContentType::UNSPECIFIED; diff --git a/sdk/objc/api/peerconnection/RTCFieldTrials.h b/sdk/objc/api/peerconnection/RTCFieldTrials.h index a61a2b3f98..59c408bed8 100644 --- a/sdk/objc/api/peerconnection/RTCFieldTrials.h +++ b/sdk/objc/api/peerconnection/RTCFieldTrials.h @@ -12,21 +12,19 @@ #import "sdk/objc/base/RTCMacros.h" -/** The only valid value for the following if set is kRTCFieldTrialEnabledValue. - */ -RTC_EXTERN NSString *const kRTCFieldTrialAudioForceABWENoTWCCKey; -RTC_EXTERN NSString *const kRTCFieldTrialFlexFec03AdvertisedKey; -RTC_EXTERN NSString *const kRTCFieldTrialFlexFec03Key; -RTC_EXTERN NSString *const kRTCFieldTrialH264HighProfileKey; -RTC_EXTERN NSString *const kRTCFieldTrialMinimizeResamplingOnMobileKey; -RTC_EXTERN NSString *const kRTCFieldTrialUseNWPathMonitor; +/** The only valid value for the following if set is kRTCFieldTrialEnabledValue. */ +RTC_EXTERN NSString *const RTC_CONSTANT_TYPE(RTCFieldTrialAudioForceABWENoTWCCKey); +RTC_EXTERN NSString *const RTC_CONSTANT_TYPE(RTCFieldTrialFlexFec03AdvertisedKey); +RTC_EXTERN NSString *const RTC_CONSTANT_TYPE(RTCFieldTrialFlexFec03Key); +RTC_EXTERN NSString *const RTC_CONSTANT_TYPE(RTCFieldTrialH264HighProfileKey); +RTC_EXTERN NSString *const RTC_CONSTANT_TYPE(RTCFieldTrialMinimizeResamplingOnMobileKey); +RTC_EXTERN NSString *const RTC_CONSTANT_TYPE(RTCFieldTrialUseNWPathMonitor); /** The valid value for field trials above. */ -RTC_EXTERN NSString *const kRTCFieldTrialEnabledValue; +RTC_EXTERN NSString *const RTC_CONSTANT_TYPE(RTCFieldTrialEnabledValue); /** Initialize field trials using a dictionary mapping field trial keys to their * values. See above for valid keys and values. Must be called before any other * call into WebRTC. See: webrtc/system_wrappers/include/field_trial.h */ -RTC_EXTERN void RTCInitFieldTrialDictionary( - NSDictionary *fieldTrials); +RTC_EXTERN void RTC_OBJC_TYPE(RTCInitFieldTrialDictionary)(NSDictionary *fieldTrials); diff --git a/sdk/objc/api/peerconnection/RTCFieldTrials.mm b/sdk/objc/api/peerconnection/RTCFieldTrials.mm index 80619b3291..7f1f9993cb 100644 --- a/sdk/objc/api/peerconnection/RTCFieldTrials.mm +++ b/sdk/objc/api/peerconnection/RTCFieldTrials.mm @@ -10,34 +10,32 @@ #import "RTCFieldTrials.h" +#import #include - #import "base/RTCLogging.h" #include "system_wrappers/include/field_trial.h" -NSString *const kRTCFieldTrialAudioForceABWENoTWCCKey = - @"WebRTC-Audio-ABWENoTWCC"; -NSString *const kRTCFieldTrialFlexFec03AdvertisedKey = - @"WebRTC-FlexFEC-03-Advertised"; -NSString *const kRTCFieldTrialFlexFec03Key = @"WebRTC-FlexFEC-03"; -NSString *const kRTCFieldTrialH264HighProfileKey = @"WebRTC-H264HighProfile"; -NSString *const kRTCFieldTrialMinimizeResamplingOnMobileKey = +NSString *const RTC_CONSTANT_TYPE(RTCFieldTrialAudioForceABWENoTWCCKey) = @"WebRTC-Audio-ABWENoTWCC"; +NSString *const RTC_CONSTANT_TYPE(RTCFieldTrialFlexFec03AdvertisedKey) = @"WebRTC-FlexFEC-03-Advertised"; +NSString *const RTC_CONSTANT_TYPE(RTCFieldTrialFlexFec03Key) = @"WebRTC-FlexFEC-03"; +NSString *const RTC_CONSTANT_TYPE(RTCFieldTrialH264HighProfileKey) = @"WebRTC-H264HighProfile"; +NSString *const RTC_CONSTANT_TYPE(RTCFieldTrialMinimizeResamplingOnMobileKey) = @"WebRTC-Audio-MinimizeResamplingOnMobile"; -NSString *const kRTCFieldTrialUseNWPathMonitor = - @"WebRTC-Network-UseNWPathMonitor"; -NSString *const kRTCFieldTrialEnabledValue = @"Enabled"; +NSString *const RTC_CONSTANT_TYPE(RTCFieldTrialUseNWPathMonitor) = @"WebRTC-Network-UseNWPathMonitor"; +NSString *const RTC_CONSTANT_TYPE(RTCFieldTrialEnabledValue) = @"Enabled"; // InitFieldTrialsFromString stores the char*, so the char array must outlive // the application. static char *gFieldTrialInitString = nullptr; +static os_unfair_lock fieldTrialLock = OS_UNFAIR_LOCK_INIT; -void RTCInitFieldTrialDictionary( - NSDictionary *fieldTrials) { +void RTC_OBJC_TYPE(RTCInitFieldTrialDictionary)(NSDictionary *fieldTrials) { if (!fieldTrials) { RTCLogWarning(@"No fieldTrials provided."); return; } + // Assemble the keys and values into the field trial string. // We don't perform any extra format checking. That should be done by the // underlying WebRTC calls. @@ -47,16 +45,26 @@ void RTCInitFieldTrialDictionary( [NSString stringWithFormat:@"%@/%@/", key, fieldTrials[key]]; [fieldTrialInitString appendString:fieldTrialEntry]; } + size_t len = fieldTrialInitString.length + 1; + + // Locking before modifying global variable + os_unfair_lock_lock(&fieldTrialLock); if (gFieldTrialInitString != nullptr) { delete[] gFieldTrialInitString; + gFieldTrialInitString = nullptr; } + gFieldTrialInitString = new char[len]; - if (![fieldTrialInitString getCString:gFieldTrialInitString - maxLength:len - encoding:NSUTF8StringEncoding]) { + bool success = [fieldTrialInitString getCString:gFieldTrialInitString + maxLength:len + encoding:NSUTF8StringEncoding]; + if (!success) { RTCLogError(@"Failed to convert field trial string."); + os_unfair_lock_unlock(&fieldTrialLock); return; } + webrtc::field_trial::InitFieldTrialsFromString(gFieldTrialInitString); + os_unfair_lock_unlock(&fieldTrialLock); } diff --git a/sdk/objc/api/peerconnection/RTCFileLogger.h b/sdk/objc/api/peerconnection/RTCFileLogger.h index 37fe366667..851422b2f7 100644 --- a/sdk/objc/api/peerconnection/RTCFileLogger.h +++ b/sdk/objc/api/peerconnection/RTCFileLogger.h @@ -12,16 +12,16 @@ #import "sdk/objc/base/RTCMacros.h" -typedef NS_ENUM(NSUInteger, RTCFileLoggerSeverity) { - RTCFileLoggerSeverityVerbose, - RTCFileLoggerSeverityInfo, - RTCFileLoggerSeverityWarning, - RTCFileLoggerSeverityError +typedef NS_ENUM(NSUInteger, RTC_OBJC_TYPE(RTCFileLoggerSeverity)) { + RTC_OBJC_TYPE(RTCFileLoggerSeverityVerbose), + RTC_OBJC_TYPE(RTCFileLoggerSeverityInfo), + RTC_OBJC_TYPE(RTCFileLoggerSeverityWarning), + RTC_OBJC_TYPE(RTCFileLoggerSeverityError) }; -typedef NS_ENUM(NSUInteger, RTCFileLoggerRotationType) { - RTCFileLoggerTypeCall, - RTCFileLoggerTypeApp, +typedef NS_ENUM(NSUInteger, RTC_OBJC_TYPE(RTCFileLoggerRotationType)) { + RTC_OBJC_TYPE(RTCFileLoggerTypeCall), + RTC_OBJC_TYPE(RTCFileLoggerTypeApp), }; NS_ASSUME_NONNULL_BEGIN @@ -37,11 +37,11 @@ RTC_OBJC_EXPORT @interface RTC_OBJC_TYPE (RTCFileLogger) : NSObject // The severity level to capture. The default is kRTCFileLoggerSeverityInfo. -@property(nonatomic, assign) RTCFileLoggerSeverity severity; +@property(nonatomic, assign) RTC_OBJC_TYPE(RTCFileLoggerSeverity) severity; // The rotation type for this file logger. The default is // kRTCFileLoggerTypeCall. -@property(nonatomic, readonly) RTCFileLoggerRotationType rotationType; +@property(nonatomic, readonly) RTC_OBJC_TYPE(RTCFileLoggerRotationType) rotationType; // Disables buffering disk writes. Should be set before `start`. Buffering // is enabled by default for performance. @@ -57,8 +57,7 @@ RTC_OBJC_EXPORT - (instancetype)initWithDirPath:(NSString *)dirPath maxFileSize:(NSUInteger)maxFileSize - rotationType:(RTCFileLoggerRotationType)rotationType - NS_DESIGNATED_INITIALIZER; + rotationType:(RTC_OBJC_TYPE(RTCFileLoggerRotationType))rotationType NS_DESIGNATED_INITIALIZER; // Starts writing WebRTC logs to disk if not already started. Overwrites any // existing file(s). diff --git a/sdk/objc/api/peerconnection/RTCFileLogger.mm b/sdk/objc/api/peerconnection/RTCFileLogger.mm index b3a6ba3764..1be4c04b13 100644 --- a/sdk/objc/api/peerconnection/RTCFileLogger.mm +++ b/sdk/objc/api/peerconnection/RTCFileLogger.mm @@ -18,8 +18,8 @@ #include "rtc_base/logging.h" NSString *const kDefaultLogDirName = @"webrtc_logs"; -NSUInteger const kDefaultMaxFileSize = 10 * 1024 * 1024; // 10MB. -const char *kRTCFileLoggerRotatingLogPrefix = "rotating_log"; +NSUInteger const kDefaultMaxFileSize = 10 * 1024 * 1024; // 10MB. +const char * RTC_CONSTANT_TYPE(RTCFileLoggerRotatingLogPrefix) = "rotating_log"; @implementation RTC_OBJC_TYPE (RTCFileLogger) { BOOL _hasStarted; @@ -45,12 +45,12 @@ - (instancetype)initWithDirPath:(NSString *)dirPath maxFileSize:(NSUInteger)maxFileSize { return [self initWithDirPath:dirPath maxFileSize:maxFileSize - rotationType:RTCFileLoggerTypeCall]; + rotationType:RTC_OBJC_TYPE(RTCFileLoggerTypeCall)]; } - (instancetype)initWithDirPath:(NSString *)dirPath maxFileSize:(NSUInteger)maxFileSize - rotationType:(RTCFileLoggerRotationType)rotationType { + rotationType:(RTC_OBJC_TYPE(RTCFileLoggerRotationType))rotationType { NSParameterAssert(dirPath.length); NSParameterAssert(maxFileSize); self = [super init]; @@ -73,7 +73,7 @@ - (instancetype)initWithDirPath:(NSString *)dirPath } _dirPath = dirPath; _maxFileSize = maxFileSize; - _severity = RTCFileLoggerSeverityInfo; + _severity = RTC_OBJC_TYPE(RTCFileLoggerSeverityInfo); } return self; } @@ -87,16 +87,17 @@ - (void)start { return; } switch (_rotationType) { - case RTCFileLoggerTypeApp: + case RTC_OBJC_TYPE(RTCFileLoggerTypeApp): _logSink.reset( new webrtc::FileRotatingLogSink(_dirPath.UTF8String, - kRTCFileLoggerRotatingLogPrefix, - _maxFileSize, - _maxFileSize / 10)); + RTC_CONSTANT_TYPE(RTCFileLoggerRotatingLogPrefix), + _maxFileSize, + _maxFileSize / 10)); break; - case RTCFileLoggerTypeCall: - _logSink.reset(new webrtc::CallSessionFileRotatingLogSink( - _dirPath.UTF8String, _maxFileSize)); + case RTC_OBJC_TYPE(RTCFileLoggerTypeCall): + _logSink.reset( + new rtc::CallSessionFileRotatingLogSink(_dirPath.UTF8String, + _maxFileSize)); break; } if (!_logSink->Init()) { @@ -128,16 +129,15 @@ - (nullable NSData *)logData { if (_hasStarted) { return nil; } - NSMutableData *logData = [NSMutableData data]; + NSMutableData* logData = [NSMutableData data]; std::unique_ptr stream; - switch (_rotationType) { - case RTCFileLoggerTypeApp: - stream = std::make_unique( - _dirPath.UTF8String, kRTCFileLoggerRotatingLogPrefix); + switch(_rotationType) { + case RTC_OBJC_TYPE(RTCFileLoggerTypeApp): + stream = std::make_unique(_dirPath.UTF8String, + RTC_CONSTANT_TYPE(RTCFileLoggerRotatingLogPrefix)); break; - case RTCFileLoggerTypeCall: - stream = std::make_unique( - _dirPath.UTF8String); + case RTC_OBJC_TYPE(RTCFileLoggerTypeCall): + stream = std::make_unique(_dirPath.UTF8String); break; } size_t bufferSize = stream->GetSize(); @@ -157,13 +157,13 @@ - (nullable NSData *)logData { - (webrtc::LoggingSeverity)rtcSeverity { switch (_severity) { - case RTCFileLoggerSeverityVerbose: + case RTC_OBJC_TYPE(RTCFileLoggerSeverityVerbose): return webrtc::LS_VERBOSE; - case RTCFileLoggerSeverityInfo: + case RTC_OBJC_TYPE(RTCFileLoggerSeverityInfo): return webrtc::LS_INFO; - case RTCFileLoggerSeverityWarning: + case RTC_OBJC_TYPE(RTCFileLoggerSeverityWarning): return webrtc::LS_WARNING; - case RTCFileLoggerSeverityError: + case RTC_OBJC_TYPE(RTCFileLoggerSeverityError): return webrtc::LS_ERROR; } } diff --git a/sdk/objc/api/peerconnection/RTCFrameCryptor+Private.h b/sdk/objc/api/peerconnection/RTCFrameCryptor+Private.h new file mode 100644 index 0000000000..86e6fdff8c --- /dev/null +++ b/sdk/objc/api/peerconnection/RTCFrameCryptor+Private.h @@ -0,0 +1,45 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import "RTCFrameCryptor.h" + +#include +#include "api/crypto/frame_crypto_transformer.h" + +NS_ASSUME_NONNULL_BEGIN + +@interface RTC_OBJC_TYPE (RTCFrameCryptor) +() + + @end + +namespace webrtc { + +class RTCFrameCryptorDelegateAdapter : public FrameCryptorTransformerObserver { + public: + RTCFrameCryptorDelegateAdapter(RTC_OBJC_TYPE(RTCFrameCryptor) * frameCryptor); + ~RTCFrameCryptorDelegateAdapter() override; + + void OnFrameCryptionStateChanged(const std::string participant_id, + FrameCryptionState state) override; + + private: + __weak RTC_OBJC_TYPE(RTCFrameCryptor) * frame_cryptor_; +}; + +} // namespace webrtc + +NS_ASSUME_NONNULL_END diff --git a/sdk/objc/api/peerconnection/RTCFrameCryptor.h b/sdk/objc/api/peerconnection/RTCFrameCryptor.h new file mode 100644 index 0000000000..b37ededa31 --- /dev/null +++ b/sdk/objc/api/peerconnection/RTCFrameCryptor.h @@ -0,0 +1,77 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import + +#import "RTCMacros.h" + +NS_ASSUME_NONNULL_BEGIN + +@class RTC_OBJC_TYPE(RTCRtpSender); +@class RTC_OBJC_TYPE(RTCRtpReceiver); +@class RTC_OBJC_TYPE(RTCFrameCryptorKeyProvider); +@class RTC_OBJC_TYPE(RTCFrameCryptor); +@class RTC_OBJC_TYPE(RTCPeerConnectionFactory); + +typedef NS_ENUM(NSUInteger, RTC_OBJC_TYPE(RTCCryptorAlgorithm)) { + RTC_OBJC_TYPE(RTCCryptorAlgorithmAesGcm) = 0, +}; + +typedef NS_ENUM(NSInteger, RTC_OBJC_TYPE(RTCFrameCryptorState)) { + RTC_OBJC_TYPE(RTCFrameCryptorStateNew) = 0, + RTC_OBJC_TYPE(RTCFrameCryptorStateOk), + RTC_OBJC_TYPE(RTCFrameCryptorStateEncryptionFailed), + RTC_OBJC_TYPE(RTCFrameCryptorStateDecryptionFailed), + RTC_OBJC_TYPE(RTCFrameCryptorStateMissingKey), + RTC_OBJC_TYPE(RTCFrameCryptorStateKeyRatcheted), + RTC_OBJC_TYPE(RTCFrameCryptorStateInternalError), +}; + +RTC_OBJC_EXPORT +@protocol RTC_OBJC_TYPE +(RTCFrameCryptorDelegate) + /** Called when the RTCFrameCryptor got errors. */ + - (void)frameCryptor + : (RTC_OBJC_TYPE(RTCFrameCryptor) *)frameCryptor didStateChangeWithParticipantId + : (NSString *)participantId withState : (RTC_OBJC_TYPE(RTCFrameCryptorState))stateChanged; +@end + +RTC_OBJC_EXPORT +@interface RTC_OBJC_TYPE (RTCFrameCryptor) : NSObject + +@property(nonatomic, assign) BOOL enabled; + +@property(nonatomic, assign) int keyIndex; + +@property(nonatomic, readonly) NSString *participantId; + +@property(nonatomic, weak, nullable) id delegate; + +- (nullable instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory + rtpSender:(RTC_OBJC_TYPE(RTCRtpSender) *)sender + participantId:(NSString *)participantId + algorithm:(RTC_OBJC_TYPE(RTCCryptorAlgorithm))algorithm + keyProvider:(RTC_OBJC_TYPE(RTCFrameCryptorKeyProvider) *)keyProvider; + +- (nullable instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory + rtpReceiver:(RTC_OBJC_TYPE(RTCRtpReceiver) *)receiver + participantId:(NSString *)participantId + algorithm:(RTC_OBJC_TYPE(RTCCryptorAlgorithm))algorithm + keyProvider:(RTC_OBJC_TYPE(RTCFrameCryptorKeyProvider) *)keyProvider; + +@end + +NS_ASSUME_NONNULL_END diff --git a/sdk/objc/api/peerconnection/RTCFrameCryptor.mm b/sdk/objc/api/peerconnection/RTCFrameCryptor.mm new file mode 100644 index 0000000000..4a7b02497a --- /dev/null +++ b/sdk/objc/api/peerconnection/RTCFrameCryptor.mm @@ -0,0 +1,236 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import "RTCFrameCryptor+Private.h" +#import "RTCFrameCryptorKeyProvider+Private.h" +#import "RTCPeerConnectionFactory+Private.h" +#import "RTCRtpReceiver+Private.h" +#import "RTCRtpSender+Private.h" + +#import +#include + +#import "base/RTCLogging.h" +#import "helpers/NSString+StdString.h" + +#include "api/crypto/frame_crypto_transformer.h" +#include "api/rtp_receiver_interface.h" +#include "api/rtp_sender_interface.h" + +namespace webrtc { + +RTCFrameCryptorDelegateAdapter::RTCFrameCryptorDelegateAdapter(RTC_OBJC_TYPE(RTCFrameCryptor) * + frameCryptor) + : frame_cryptor_(frameCryptor) {} + +RTCFrameCryptorDelegateAdapter::~RTCFrameCryptorDelegateAdapter() {} + +/* + kNew = 0, + kOk, + kEncryptionFailed, + kDecryptionFailed, + kMissingKey, + kInternalError, +*/ +void RTCFrameCryptorDelegateAdapter::OnFrameCryptionStateChanged(const std::string participant_id, + FrameCryptionState state) { + RTC_OBJC_TYPE(RTCFrameCryptor) *frameCryptor = frame_cryptor_; + if (frameCryptor.delegate) { + switch (state) { + case FrameCryptionState::kNew: + [frameCryptor.delegate frameCryptor:frameCryptor + didStateChangeWithParticipantId:[NSString stringForStdString:participant_id] + withState:RTC_OBJC_TYPE(RTCFrameCryptorStateNew)]; + break; + case FrameCryptionState::kOk: + [frameCryptor.delegate frameCryptor:frameCryptor + didStateChangeWithParticipantId:[NSString stringForStdString:participant_id] + withState:RTC_OBJC_TYPE(RTCFrameCryptorStateOk)]; + break; + case FrameCryptionState::kEncryptionFailed: + [frameCryptor.delegate frameCryptor:frameCryptor + didStateChangeWithParticipantId:[NSString stringForStdString:participant_id] + withState:RTC_OBJC_TYPE(RTCFrameCryptorStateEncryptionFailed)]; + break; + case FrameCryptionState::kDecryptionFailed: + [frameCryptor.delegate frameCryptor:frameCryptor + didStateChangeWithParticipantId:[NSString stringForStdString:participant_id] + withState:RTC_OBJC_TYPE(RTCFrameCryptorStateDecryptionFailed)]; + break; + case FrameCryptionState::kMissingKey: + [frameCryptor.delegate frameCryptor:frameCryptor + didStateChangeWithParticipantId:[NSString stringForStdString:participant_id] + withState:RTC_OBJC_TYPE(RTCFrameCryptorStateMissingKey)]; + break; + case FrameCryptionState::kKeyRatcheted: + [frameCryptor.delegate frameCryptor:frameCryptor + didStateChangeWithParticipantId:[NSString stringForStdString:participant_id] + withState:RTC_OBJC_TYPE(RTCFrameCryptorStateKeyRatcheted)]; + break; + case FrameCryptionState::kInternalError: + [frameCryptor.delegate frameCryptor:frameCryptor + didStateChangeWithParticipantId:[NSString stringForStdString:participant_id] + withState:RTC_OBJC_TYPE(RTCFrameCryptorStateInternalError)]; + break; + } + } +} +} // namespace webrtc + +@implementation RTC_OBJC_TYPE (RTCFrameCryptor) { + const webrtc::RtpSenderInterface *_sender; + const webrtc::RtpReceiverInterface *_receiver; + webrtc::scoped_refptr _frame_crypto_transformer; + webrtc::scoped_refptr _observer; + os_unfair_lock _lock; +} + +@synthesize participantId = _participantId; +@synthesize delegate = _delegate; + +- (webrtc::FrameCryptorTransformer::Algorithm)algorithmFromEnum:(RTC_OBJC_TYPE(RTCCryptorAlgorithm))algorithm { + switch (algorithm) { + case RTC_OBJC_TYPE(RTCCryptorAlgorithmAesGcm): + return webrtc::FrameCryptorTransformer::Algorithm::kAesGcm; + default: + return webrtc::FrameCryptorTransformer::Algorithm::kAesGcm; + } +} + +- (nullable instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory + rtpSender:(RTC_OBJC_TYPE(RTCRtpSender) *)sender + participantId:(NSString *)participantId + algorithm:(RTC_OBJC_TYPE(RTCCryptorAlgorithm))algorithm + keyProvider:(RTC_OBJC_TYPE(RTCFrameCryptorKeyProvider) *)keyProvider { + self = [super init]; + if (self) { + _lock = OS_UNFAIR_LOCK_INIT; + + webrtc::scoped_refptr nativeRtpSender = sender.nativeRtpSender; + if (nativeRtpSender == nullptr) return nil; + + webrtc::scoped_refptr nativeTrack = nativeRtpSender->track(); + if (nativeTrack == nullptr) return nil; + + webrtc::FrameCryptorTransformer::MediaType mediaType = + nativeTrack->kind() == "audio" ? webrtc::FrameCryptorTransformer::MediaType::kAudioFrame + : webrtc::FrameCryptorTransformer::MediaType::kVideoFrame; + + os_unfair_lock_lock(&_lock); + _observer = webrtc::make_ref_counted(self); + _participantId = participantId; + + _frame_crypto_transformer = + webrtc::scoped_refptr(new webrtc::FrameCryptorTransformer( + factory.signalingThread, [participantId stdString], mediaType, + [self algorithmFromEnum:algorithm], keyProvider.nativeKeyProvider)); + + factory.signalingThread->BlockingCall([self, nativeRtpSender] { + // Must be called on signal thread + nativeRtpSender->SetEncoderToPacketizerFrameTransformer(_frame_crypto_transformer); + }); + + _frame_crypto_transformer->SetEnabled(false); + _frame_crypto_transformer->RegisterFrameCryptorTransformerObserver(_observer); + os_unfair_lock_unlock(&_lock); + } + + return self; +} + +- (nullable instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory + rtpReceiver:(RTC_OBJC_TYPE(RTCRtpReceiver) *)receiver + participantId:(NSString *)participantId + algorithm:(RTC_OBJC_TYPE(RTCCryptorAlgorithm))algorithm + keyProvider:(RTC_OBJC_TYPE(RTCFrameCryptorKeyProvider) *)keyProvider { + self = [super init]; + if (self) { + _lock = OS_UNFAIR_LOCK_INIT; + + webrtc::scoped_refptr nativeRtpReceiver = receiver.nativeRtpReceiver; + if (nativeRtpReceiver == nullptr) return nil; + + webrtc::scoped_refptr nativeTrack = nativeRtpReceiver->track(); + if (nativeTrack == nullptr) return nil; + + webrtc::FrameCryptorTransformer::MediaType mediaType = + nativeTrack->kind() == "audio" ? webrtc::FrameCryptorTransformer::MediaType::kAudioFrame + : webrtc::FrameCryptorTransformer::MediaType::kVideoFrame; + + os_unfair_lock_lock(&_lock); + _observer = webrtc::make_ref_counted(self); + _participantId = participantId; + + _frame_crypto_transformer = + webrtc::scoped_refptr(new webrtc::FrameCryptorTransformer( + factory.signalingThread, [participantId stdString], mediaType, + [self algorithmFromEnum:algorithm], keyProvider.nativeKeyProvider)); + + factory.signalingThread->BlockingCall([self, nativeRtpReceiver] { + // Must be called on signal thread + nativeRtpReceiver->SetDepacketizerToDecoderFrameTransformer(_frame_crypto_transformer); + }); + + _frame_crypto_transformer->SetEnabled(false); + _frame_crypto_transformer->RegisterFrameCryptorTransformerObserver(_observer); + os_unfair_lock_unlock(&_lock); + } + + return self; +} + +- (void)dealloc { + os_unfair_lock_lock(&_lock); + if (_frame_crypto_transformer != nullptr) { + _frame_crypto_transformer->UnRegisterFrameCryptorTransformerObserver(); + _frame_crypto_transformer = nullptr; + } + _observer = nullptr; + os_unfair_lock_unlock(&_lock); +} + +- (BOOL)enabled { + os_unfair_lock_lock(&_lock); + BOOL result = _frame_crypto_transformer != nullptr ? _frame_crypto_transformer->enabled() : NO; + os_unfair_lock_unlock(&_lock); + return result; +} + +- (void)setEnabled:(BOOL)enabled { + os_unfair_lock_lock(&_lock); + if (_frame_crypto_transformer != nullptr) { + _frame_crypto_transformer->SetEnabled(enabled); + } + os_unfair_lock_unlock(&_lock); +} + +- (int)keyIndex { + os_unfair_lock_lock(&_lock); + int result = _frame_crypto_transformer != nullptr ? _frame_crypto_transformer->key_index() : 0; + os_unfair_lock_unlock(&_lock); + return result; +} + +- (void)setKeyIndex:(int)keyIndex { + os_unfair_lock_lock(&_lock); + if (_frame_crypto_transformer != nullptr) { + _frame_crypto_transformer->SetKeyIndex(keyIndex); + } + os_unfair_lock_unlock(&_lock); +} + +@end diff --git a/sdk/objc/api/peerconnection/RTCFrameCryptorKeyProvider+Private.h b/sdk/objc/api/peerconnection/RTCFrameCryptorKeyProvider+Private.h new file mode 100644 index 0000000000..269219f170 --- /dev/null +++ b/sdk/objc/api/peerconnection/RTCFrameCryptorKeyProvider+Private.h @@ -0,0 +1,31 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import "RTCFrameCryptorKeyProvider.h" + +#include "api/crypto/frame_crypto_transformer.h" +#include "rtc_base/ref_count.h" + +NS_ASSUME_NONNULL_BEGIN + +@interface RTC_OBJC_TYPE (RTCFrameCryptorKeyProvider) +() + + @property(nonatomic, readonly) webrtc::scoped_refptr nativeKeyProvider; + +@end + +NS_ASSUME_NONNULL_END diff --git a/sdk/objc/api/peerconnection/RTCFrameCryptorKeyProvider.h b/sdk/objc/api/peerconnection/RTCFrameCryptorKeyProvider.h new file mode 100644 index 0000000000..6443b23349 --- /dev/null +++ b/sdk/objc/api/peerconnection/RTCFrameCryptorKeyProvider.h @@ -0,0 +1,62 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import + +#import "RTCMacros.h" + +NS_ASSUME_NONNULL_BEGIN + +RTC_OBJC_EXPORT +@interface RTC_OBJC_TYPE (RTCFrameCryptorKeyProvider) : NSObject + +- (void)setSharedKey:(NSData *)key withIndex:(int)index; + +- (NSData *)ratchetSharedKey:(int)index; + +- (NSData *)exportSharedKey:(int)index; + +- (void)setKey:(NSData *)key withIndex:(int)index forParticipant:(NSString *)participantId; + +- (NSData *)ratchetKey:(NSString *)participantId withIndex:(int)index; + +- (NSData *)exportKey:(NSString *)participantId withIndex:(int)index; + +- (void)setSifTrailer:(NSData *)trailer; + +- (instancetype)initWithRatchetSalt:(NSData *)salt + ratchetWindowSize:(int)windowSize + sharedKeyMode:(BOOL)sharedKey + uncryptedMagicBytes:(nullable NSData *)uncryptedMagicBytes; + +- (instancetype)initWithRatchetSalt:(NSData *)salt + ratchetWindowSize:(int)windowSize + sharedKeyMode:(BOOL)sharedKey + uncryptedMagicBytes:(nullable NSData *)uncryptedMagicBytes + failureTolerance:(int)failureTolerance + keyRingSize:(int)keyRingSize; + +- (instancetype)initWithRatchetSalt:(NSData *)salt + ratchetWindowSize:(int)windowSize + sharedKeyMode:(BOOL)sharedKey + uncryptedMagicBytes:(nullable NSData *)uncryptedMagicBytes + failureTolerance:(int)failureTolerance + keyRingSize:(int)keyRingSize + discardFrameWhenCryptorNotReady:(BOOL)discardFrameWhenCryptorNotReady; + +@end + +NS_ASSUME_NONNULL_END diff --git a/sdk/objc/api/peerconnection/RTCFrameCryptorKeyProvider.mm b/sdk/objc/api/peerconnection/RTCFrameCryptorKeyProvider.mm new file mode 100644 index 0000000000..c261a3efb7 --- /dev/null +++ b/sdk/objc/api/peerconnection/RTCFrameCryptorKeyProvider.mm @@ -0,0 +1,125 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import "RTCFrameCryptorKeyProvider+Private.h" + +#include +#include "api/crypto/frame_crypto_transformer.h" + +#import "base/RTCLogging.h" +#import "helpers/NSString+StdString.h" + +@implementation RTC_OBJC_TYPE (RTCFrameCryptorKeyProvider) { + webrtc::scoped_refptr _nativeKeyProvider; +} + +- (rtc::scoped_refptr)nativeKeyProvider { + return _nativeKeyProvider; +} + +- (instancetype)initWithRatchetSalt:(NSData *)salt + ratchetWindowSize:(int)windowSize + sharedKeyMode:(BOOL)sharedKey + uncryptedMagicBytes:(NSData *)uncryptedMagicBytes { + return [self initWithRatchetSalt:salt + ratchetWindowSize:windowSize + sharedKeyMode:sharedKey + uncryptedMagicBytes:uncryptedMagicBytes + failureTolerance:-1 + keyRingSize:webrtc::DEFAULT_KEYRING_SIZE]; +} + +- (instancetype)initWithRatchetSalt:(NSData *)salt + ratchetWindowSize:(int)windowSize + sharedKeyMode:(BOOL)sharedKey + uncryptedMagicBytes:(nullable NSData *)uncryptedMagicBytes + failureTolerance:(int)failureTolerance + keyRingSize:(int)keyRingSize { + return [self initWithRatchetSalt:salt + ratchetWindowSize:windowSize + sharedKeyMode:sharedKey + uncryptedMagicBytes:uncryptedMagicBytes + failureTolerance:-1 + keyRingSize:keyRingSize + discardFrameWhenCryptorNotReady:false]; +} + +- (instancetype)initWithRatchetSalt:(NSData *)salt + ratchetWindowSize:(int)windowSize + sharedKeyMode:(BOOL)sharedKey + uncryptedMagicBytes:(nullable NSData *)uncryptedMagicBytes + failureTolerance:(int)failureTolerance + keyRingSize:(int)keyRingSize + discardFrameWhenCryptorNotReady:(BOOL)discardFrameWhenCryptorNotReady { + self = [super init]; + if (self) { + webrtc::KeyProviderOptions options; + options.ratchet_salt = std::vector((const uint8_t *)salt.bytes, + ((const uint8_t *)salt.bytes) + salt.length); + options.ratchet_window_size = windowSize; + options.shared_key = sharedKey; + options.failure_tolerance = failureTolerance; + options.key_ring_size = keyRingSize; + options.discard_frame_when_cryptor_not_ready = discardFrameWhenCryptorNotReady; + if(uncryptedMagicBytes != nil) { + options.uncrypted_magic_bytes = std::vector((const uint8_t *)uncryptedMagicBytes.bytes, + ((const uint8_t *)uncryptedMagicBytes.bytes) + uncryptedMagicBytes.length); + } + _nativeKeyProvider = webrtc::make_ref_counted(options); + } + return self; +} + +- (void)setKey:(NSData *)key withIndex:(int)index forParticipant:(NSString *)participantId { + _nativeKeyProvider->SetKey( + [participantId stdString], + index, + std::vector((const uint8_t *)key.bytes, ((const uint8_t *)key.bytes) + key.length)); +} + +- (void)setSharedKey:(NSData *)key withIndex:(int)index { + _nativeKeyProvider->SetSharedKey( + index, + std::vector((const uint8_t *)key.bytes, ((const uint8_t *)key.bytes) + key.length)); +} + +- (NSData *)ratchetSharedKey:(int)index { + std::vector nativeKey = _nativeKeyProvider->RatchetSharedKey(index); + return [NSData dataWithBytes:nativeKey.data() length:nativeKey.size()]; +} + +- (NSData *)exportSharedKey:(int)index { + std::vector nativeKey = _nativeKeyProvider->ExportSharedKey(index); + return [NSData dataWithBytes:nativeKey.data() length:nativeKey.size()]; +} + +- (NSData *)ratchetKey:(NSString *)participantId withIndex:(int)index { + std::vector nativeKey = _nativeKeyProvider->RatchetKey([participantId stdString], index); + return [NSData dataWithBytes:nativeKey.data() length:nativeKey.size()]; +} + +- (NSData *)exportKey:(NSString *)participantId withIndex:(int)index { + std::vector nativeKey = _nativeKeyProvider->ExportKey([participantId stdString], index); + return [NSData dataWithBytes:nativeKey.data() length:nativeKey.size()]; +} + +- (void)setSifTrailer:(NSData *)trailer { + _nativeKeyProvider->SetSifTrailer( + std::vector((const uint8_t *)trailer.bytes, + ((const uint8_t *)trailer.bytes) + trailer.length)); +} + +@end diff --git a/sdk/objc/api/peerconnection/RTCIODevice+Private.h b/sdk/objc/api/peerconnection/RTCIODevice+Private.h new file mode 100644 index 0000000000..54d19a4626 --- /dev/null +++ b/sdk/objc/api/peerconnection/RTCIODevice+Private.h @@ -0,0 +1,28 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#import "RTCIODevice.h" + +NS_ASSUME_NONNULL_BEGIN + +@interface RTC_OBJC_TYPE(RTCIODevice) () + +- (instancetype)initWithType:(RTC_OBJC_TYPE(RTCIODeviceType))type + deviceId:(NSString *)deviceId + name:(NSString* )name; + +@end + +NS_ASSUME_NONNULL_END diff --git a/sdk/objc/api/peerconnection/RTCIODevice.h b/sdk/objc/api/peerconnection/RTCIODevice.h new file mode 100644 index 0000000000..e84e55d95e --- /dev/null +++ b/sdk/objc/api/peerconnection/RTCIODevice.h @@ -0,0 +1,41 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import + +#import "RTCMacros.h" + +NS_ASSUME_NONNULL_BEGIN + +typedef NS_ENUM(NSInteger, RTC_OBJC_TYPE(RTCIODeviceType)) { + RTC_OBJC_TYPE(RTCIODeviceTypeOutput), + RTC_OBJC_TYPE(RTCIODeviceTypeInput), +}; + +RTC_OBJC_EXPORT +@interface RTC_OBJC_TYPE(RTCIODevice) : NSObject + ++ (instancetype)defaultDeviceWithType: (RTC_OBJC_TYPE(RTCIODeviceType))type; +- (instancetype)init NS_UNAVAILABLE; + +@property(nonatomic, readonly) BOOL isDefault; +@property(nonatomic, readonly) RTC_OBJC_TYPE(RTCIODeviceType) type; +@property(nonatomic, copy, readonly) NSString *deviceId; +@property(nonatomic, copy, readonly) NSString *name; + +@end + +NS_ASSUME_NONNULL_END diff --git a/sdk/objc/api/peerconnection/RTCIODevice.mm b/sdk/objc/api/peerconnection/RTCIODevice.mm new file mode 100644 index 0000000000..21e416317f --- /dev/null +++ b/sdk/objc/api/peerconnection/RTCIODevice.mm @@ -0,0 +1,72 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import "RTCIODevice.h" +#import "RTCIODevice+Private.h" + +NSString *const kDefaultDeviceId = @"default"; + +@implementation RTC_OBJC_TYPE(RTCIODevice) + +@synthesize type = _type; +@synthesize deviceId = _deviceId; +@synthesize name = _name; + ++ (instancetype)defaultDeviceWithType: (RTC_OBJC_TYPE(RTCIODeviceType))type { + return [[self alloc] initWithType: type + deviceId: kDefaultDeviceId + name: @""]; +} + +- (instancetype)initWithType: (RTC_OBJC_TYPE(RTCIODeviceType))type + deviceId: (NSString *)deviceId + name: (NSString* )name { + self = [super init]; + if (self) { + _type = type; + _deviceId = deviceId; + _name = name; + } + return self; +} + +#pragma mark - IODevice + +- (BOOL)isDefault { + return [_deviceId isEqualToString: kDefaultDeviceId]; +} + +#pragma mark - Equatable + +- (BOOL)isEqual: (id)object { + if (self == object) { + return YES; + } + if (object == nil) { + return NO; + } + if (![object isMemberOfClass:[self class]]) { + return NO; + } + + return [_deviceId isEqualToString:((RTC_OBJC_TYPE(RTCIODevice) *)object).deviceId]; +} + +- (NSUInteger)hash { + return [_deviceId hash]; +} + +@end diff --git a/sdk/objc/api/peerconnection/RTCIceServer.h b/sdk/objc/api/peerconnection/RTCIceServer.h index 9d66aed936..c77d080a02 100644 --- a/sdk/objc/api/peerconnection/RTCIceServer.h +++ b/sdk/objc/api/peerconnection/RTCIceServer.h @@ -12,9 +12,9 @@ #import "sdk/objc/base/RTCMacros.h" -typedef NS_ENUM(NSUInteger, RTCTlsCertPolicy) { - RTCTlsCertPolicySecure, - RTCTlsCertPolicyInsecureNoCheck +typedef NS_ENUM(NSUInteger, RTC_OBJC_TYPE(RTCTlsCertPolicy)) { + RTC_OBJC_TYPE(RTCTlsCertPolicySecure), + RTC_OBJC_TYPE(RTCTlsCertPolicyInsecureNoCheck) }; NS_ASSUME_NONNULL_BEGIN @@ -34,7 +34,7 @@ RTC_OBJC_EXPORT /** * TLS certificate policy to use if this RTCIceServer object is a TURN server. */ -@property(nonatomic, readonly) RTCTlsCertPolicy tlsCertPolicy; +@property(nonatomic, readonly) RTC_OBJC_TYPE(RTCTlsCertPolicy) tlsCertPolicy; /** If the URIs in `urls` only contain IP addresses, this field can be used @@ -72,7 +72,7 @@ RTC_OBJC_EXPORT - (instancetype)initWithURLStrings:(NSArray *)urlStrings username:(nullable NSString *)username credential:(nullable NSString *)credential - tlsCertPolicy:(RTCTlsCertPolicy)tlsCertPolicy; + tlsCertPolicy:(RTC_OBJC_TYPE(RTCTlsCertPolicy))tlsCertPolicy; /** * Initialize an RTCIceServer with its associated URLs, optional username, @@ -81,7 +81,7 @@ RTC_OBJC_EXPORT - (instancetype)initWithURLStrings:(NSArray *)urlStrings username:(nullable NSString *)username credential:(nullable NSString *)credential - tlsCertPolicy:(RTCTlsCertPolicy)tlsCertPolicy + tlsCertPolicy:(RTC_OBJC_TYPE(RTCTlsCertPolicy))tlsCertPolicy hostname:(nullable NSString *)hostname; /** @@ -91,7 +91,7 @@ RTC_OBJC_EXPORT - (instancetype)initWithURLStrings:(NSArray *)urlStrings username:(nullable NSString *)username credential:(nullable NSString *)credential - tlsCertPolicy:(RTCTlsCertPolicy)tlsCertPolicy + tlsCertPolicy:(RTC_OBJC_TYPE(RTCTlsCertPolicy))tlsCertPolicy hostname:(nullable NSString *)hostname tlsAlpnProtocols:(NSArray *)tlsAlpnProtocols; @@ -100,14 +100,13 @@ RTC_OBJC_EXPORT * optional credential, TLS cert policy, hostname, ALPN protocols and * elliptic curves. */ -- (instancetype) - initWithURLStrings:(NSArray *)urlStrings - username:(nullable NSString *)username - credential:(nullable NSString *)credential - tlsCertPolicy:(RTCTlsCertPolicy)tlsCertPolicy - hostname:(nullable NSString *)hostname - tlsAlpnProtocols:(nullable NSArray *)tlsAlpnProtocols - tlsEllipticCurves:(nullable NSArray *)tlsEllipticCurves +- (instancetype)initWithURLStrings:(NSArray *)urlStrings + username:(nullable NSString *)username + credential:(nullable NSString *)credential + tlsCertPolicy:(RTC_OBJC_TYPE(RTCTlsCertPolicy))tlsCertPolicy + hostname:(nullable NSString *)hostname + tlsAlpnProtocols:(nullable NSArray *)tlsAlpnProtocols + tlsEllipticCurves:(nullable NSArray *)tlsEllipticCurves NS_DESIGNATED_INITIALIZER; @end diff --git a/sdk/objc/api/peerconnection/RTCIceServer.mm b/sdk/objc/api/peerconnection/RTCIceServer.mm index 96961e33d0..290a99bfed 100644 --- a/sdk/objc/api/peerconnection/RTCIceServer.mm +++ b/sdk/objc/api/peerconnection/RTCIceServer.mm @@ -12,7 +12,7 @@ #import "helpers/NSString+StdString.h" -@implementation RTC_OBJC_TYPE (RTCIceServer) +@implementation RTC_OBJC_TYPE(RTCIceServer) @synthesize urlStrings = _urlStrings; @synthesize username = _username; @@ -32,13 +32,13 @@ - (instancetype)initWithURLStrings:(NSArray *)urlStrings return [self initWithURLStrings:urlStrings username:username credential:credential - tlsCertPolicy:RTCTlsCertPolicySecure]; + tlsCertPolicy:RTC_OBJC_TYPE(RTCTlsCertPolicySecure)]; } - (instancetype)initWithURLStrings:(NSArray *)urlStrings username:(NSString *)username credential:(NSString *)credential - tlsCertPolicy:(RTCTlsCertPolicy)tlsCertPolicy { + tlsCertPolicy:(RTC_OBJC_TYPE(RTCTlsCertPolicy))tlsCertPolicy { return [self initWithURLStrings:urlStrings username:username credential:credential @@ -49,7 +49,7 @@ - (instancetype)initWithURLStrings:(NSArray *)urlStrings - (instancetype)initWithURLStrings:(NSArray *)urlStrings username:(NSString *)username credential:(NSString *)credential - tlsCertPolicy:(RTCTlsCertPolicy)tlsCertPolicy + tlsCertPolicy:(RTC_OBJC_TYPE(RTCTlsCertPolicy))tlsCertPolicy hostname:(NSString *)hostname { return [self initWithURLStrings:urlStrings username:username @@ -62,7 +62,7 @@ - (instancetype)initWithURLStrings:(NSArray *)urlStrings - (instancetype)initWithURLStrings:(NSArray *)urlStrings username:(NSString *)username credential:(NSString *)credential - tlsCertPolicy:(RTCTlsCertPolicy)tlsCertPolicy + tlsCertPolicy:(RTC_OBJC_TYPE(RTCTlsCertPolicy))tlsCertPolicy hostname:(NSString *)hostname tlsAlpnProtocols:(NSArray *)tlsAlpnProtocols { return [self initWithURLStrings:urlStrings @@ -77,7 +77,7 @@ - (instancetype)initWithURLStrings:(NSArray *)urlStrings - (instancetype)initWithURLStrings:(NSArray *)urlStrings username:(NSString *)username credential:(NSString *)credential - tlsCertPolicy:(RTCTlsCertPolicy)tlsCertPolicy + tlsCertPolicy:(RTC_OBJC_TYPE(RTCTlsCertPolicy))tlsCertPolicy hostname:(NSString *)hostname tlsAlpnProtocols:(NSArray *)tlsAlpnProtocols tlsEllipticCurves:(NSArray *)tlsEllipticCurves { @@ -112,11 +112,11 @@ - (NSString *)description { #pragma mark - Private -- (NSString *)stringForTlsCertPolicy:(RTCTlsCertPolicy)tlsCertPolicy { +- (NSString *)stringForTlsCertPolicy:(RTC_OBJC_TYPE(RTCTlsCertPolicy))tlsCertPolicy { switch (tlsCertPolicy) { - case RTCTlsCertPolicySecure: + case RTC_OBJC_TYPE(RTCTlsCertPolicySecure): return @"RTCTlsCertPolicySecure"; - case RTCTlsCertPolicyInsecureNoCheck: + case RTC_OBJC_TYPE(RTCTlsCertPolicyInsecureNoCheck): return @"RTCTlsCertPolicyInsecureNoCheck"; } } @@ -144,11 +144,11 @@ - (NSString *)stringForTlsCertPolicy:(RTCTlsCertPolicy)tlsCertPolicy { }]; switch (_tlsCertPolicy) { - case RTCTlsCertPolicySecure: + case RTC_OBJC_TYPE(RTCTlsCertPolicySecure): iceServer.tls_cert_policy = webrtc::PeerConnectionInterface::kTlsCertPolicySecure; break; - case RTCTlsCertPolicyInsecureNoCheck: + case RTC_OBJC_TYPE(RTCTlsCertPolicyInsecureNoCheck): iceServer.tls_cert_policy = webrtc::PeerConnectionInterface::kTlsCertPolicyInsecureNoCheck; break; @@ -176,14 +176,14 @@ - (instancetype)initWithNativeServer: for (auto const &curve : nativeServer.tls_elliptic_curves) { [tlsEllipticCurves addObject:[NSString stringForStdString:curve]]; } - RTCTlsCertPolicy tlsCertPolicy; + RTC_OBJC_TYPE(RTCTlsCertPolicy) tlsCertPolicy; switch (nativeServer.tls_cert_policy) { case webrtc::PeerConnectionInterface::kTlsCertPolicySecure: - tlsCertPolicy = RTCTlsCertPolicySecure; + tlsCertPolicy = RTC_OBJC_TYPE(RTCTlsCertPolicySecure); break; case webrtc::PeerConnectionInterface::kTlsCertPolicyInsecureNoCheck: - tlsCertPolicy = RTCTlsCertPolicyInsecureNoCheck; + tlsCertPolicy = RTC_OBJC_TYPE(RTCTlsCertPolicyInsecureNoCheck); break; } diff --git a/sdk/objc/api/peerconnection/RTCMediaConstraints.h b/sdk/objc/api/peerconnection/RTCMediaConstraints.h index 3a92d4d5b2..503f9cb2bf 100644 --- a/sdk/objc/api/peerconnection/RTCMediaConstraints.h +++ b/sdk/objc/api/peerconnection/RTCMediaConstraints.h @@ -18,17 +18,17 @@ NS_ASSUME_NONNULL_BEGIN /** The value for this key should be a base64 encoded string containing * the data from the serialized configuration proto. */ -RTC_EXTERN NSString *const kRTCMediaConstraintsAudioNetworkAdaptorConfig; +RTC_EXTERN NSString *const RTC_CONSTANT_TYPE(RTCMediaConstraintsAudioNetworkAdaptorConfig); /** Constraint keys for generating offers and answers. */ -RTC_EXTERN NSString *const kRTCMediaConstraintsIceRestart; -RTC_EXTERN NSString *const kRTCMediaConstraintsOfferToReceiveAudio; -RTC_EXTERN NSString *const kRTCMediaConstraintsOfferToReceiveVideo; -RTC_EXTERN NSString *const kRTCMediaConstraintsVoiceActivityDetection; +RTC_EXTERN NSString *const RTC_CONSTANT_TYPE(RTCMediaConstraintsIceRestart); +RTC_EXTERN NSString *const RTC_CONSTANT_TYPE(RTCMediaConstraintsOfferToReceiveAudio); +RTC_EXTERN NSString *const RTC_CONSTANT_TYPE(RTCMediaConstraintsOfferToReceiveVideo); +RTC_EXTERN NSString *const RTC_CONSTANT_TYPE(RTCMediaConstraintsVoiceActivityDetection); /** Constraint values for Boolean parameters. */ -RTC_EXTERN NSString *const kRTCMediaConstraintsValueTrue; -RTC_EXTERN NSString *const kRTCMediaConstraintsValueFalse; +RTC_EXTERN NSString *const RTC_CONSTANT_TYPE(RTCMediaConstraintsValueTrue); +RTC_EXTERN NSString *const RTC_CONSTANT_TYPE(RTCMediaConstraintsValueFalse); RTC_OBJC_EXPORT @interface RTC_OBJC_TYPE (RTCMediaConstraints) : NSObject diff --git a/sdk/objc/api/peerconnection/RTCMediaConstraints.mm b/sdk/objc/api/peerconnection/RTCMediaConstraints.mm index 5158b5905b..76893abab1 100644 --- a/sdk/objc/api/peerconnection/RTCMediaConstraints.mm +++ b/sdk/objc/api/peerconnection/RTCMediaConstraints.mm @@ -14,22 +14,19 @@ #include -NSString *const kRTCMediaConstraintsAudioNetworkAdaptorConfig = +NSString *const RTC_CONSTANT_TYPE(RTCMediaConstraintsAudioNetworkAdaptorConfig) = @(webrtc::MediaConstraints::kAudioNetworkAdaptorConfig); -NSString *const kRTCMediaConstraintsIceRestart = - @(webrtc::MediaConstraints::kIceRestart); -NSString *const kRTCMediaConstraintsOfferToReceiveAudio = +NSString *const RTC_CONSTANT_TYPE(RTCMediaConstraintsIceRestart) = @(webrtc::MediaConstraints::kIceRestart); +NSString *const RTC_CONSTANT_TYPE(RTCMediaConstraintsOfferToReceiveAudio) = @(webrtc::MediaConstraints::kOfferToReceiveAudio); -NSString *const kRTCMediaConstraintsOfferToReceiveVideo = +NSString *const RTC_CONSTANT_TYPE(RTCMediaConstraintsOfferToReceiveVideo) = @(webrtc::MediaConstraints::kOfferToReceiveVideo); -NSString *const kRTCMediaConstraintsVoiceActivityDetection = +NSString *const RTC_CONSTANT_TYPE(RTCMediaConstraintsVoiceActivityDetection) = @(webrtc::MediaConstraints::kVoiceActivityDetection); -NSString *const kRTCMediaConstraintsValueTrue = - @(webrtc::MediaConstraints::kValueTrue); -NSString *const kRTCMediaConstraintsValueFalse = - @(webrtc::MediaConstraints::kValueFalse); +NSString *const RTC_CONSTANT_TYPE(RTCMediaConstraintsValueTrue) = @(webrtc::MediaConstraints::kValueTrue); +NSString *const RTC_CONSTANT_TYPE(RTCMediaConstraintsValueFalse) = @(webrtc::MediaConstraints::kValueFalse); @implementation RTC_OBJC_TYPE (RTCMediaConstraints) { NSDictionary *_mandatory; @@ -78,9 +75,8 @@ - (NSString *)description { [key isKindOfClass:[NSString class]], @"%@ is not an NSString.", key); NSString *value = [constraints objectForKey:key]; NSAssert([value isKindOfClass:[NSString class]], - @"%@ is not an NSString.", - value); - if ([kRTCMediaConstraintsAudioNetworkAdaptorConfig isEqualToString:key]) { + @"%@ is not an NSString.", value); + if ([RTC_CONSTANT_TYPE(RTCMediaConstraintsAudioNetworkAdaptorConfig) isEqualToString:key]) { // This value is base64 encoded. NSData *charData = [[NSData alloc] initWithBase64EncodedString:value options:0]; diff --git a/sdk/objc/api/peerconnection/RTCMediaSource+Private.h b/sdk/objc/api/peerconnection/RTCMediaSource+Private.h index 66d5b836d8..ecf37ec92b 100644 --- a/sdk/objc/api/peerconnection/RTCMediaSource+Private.h +++ b/sdk/objc/api/peerconnection/RTCMediaSource+Private.h @@ -16,9 +16,9 @@ NS_ASSUME_NONNULL_BEGIN @class RTC_OBJC_TYPE(RTCPeerConnectionFactory); -typedef NS_ENUM(NSInteger, RTCMediaSourceType) { - RTCMediaSourceTypeAudio, - RTCMediaSourceTypeVideo, +typedef NS_ENUM(NSInteger, RTC_OBJC_TYPE(RTCMediaSourceType)) { + RTC_OBJC_TYPE(RTCMediaSourceTypeAudio), + RTC_OBJC_TYPE(RTCMediaSourceTypeVideo), }; @interface RTC_OBJC_TYPE (RTCMediaSource) @@ -28,19 +28,15 @@ typedef NS_ENUM(NSInteger, RTCMediaSourceType) { webrtc::scoped_refptr nativeMediaSource; -- (instancetype) - initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory - nativeMediaSource: - (webrtc::scoped_refptr)nativeMediaSource - type:(RTCMediaSourceType)type NS_DESIGNATED_INITIALIZER; +- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory + nativeMediaSource:(webrtc::scoped_refptr)nativeMediaSource + type:(RTC_OBJC_TYPE(RTCMediaSourceType))type NS_DESIGNATED_INITIALIZER; -+ (webrtc::MediaSourceInterface::SourceState)nativeSourceStateForState: - (RTCSourceState)state; ++ (webrtc::MediaSourceInterface::SourceState)nativeSourceStateForState:(RTC_OBJC_TYPE(RTCSourceState))state; -+ (RTCSourceState)sourceStateForNativeState: - (webrtc::MediaSourceInterface::SourceState)nativeState; ++ (RTC_OBJC_TYPE(RTCSourceState))sourceStateForNativeState:(webrtc::MediaSourceInterface::SourceState)nativeState; -+ (NSString *)stringForState:(RTCSourceState)state; ++ (NSString *)stringForState:(RTC_OBJC_TYPE(RTCSourceState))state; @end diff --git a/sdk/objc/api/peerconnection/RTCMediaSource.h b/sdk/objc/api/peerconnection/RTCMediaSource.h index 6ff36e6b9a..b32c27134a 100644 --- a/sdk/objc/api/peerconnection/RTCMediaSource.h +++ b/sdk/objc/api/peerconnection/RTCMediaSource.h @@ -12,11 +12,11 @@ #import "sdk/objc/base/RTCMacros.h" -typedef NS_ENUM(NSInteger, RTCSourceState) { - RTCSourceStateInitializing, - RTCSourceStateLive, - RTCSourceStateEnded, - RTCSourceStateMuted, +typedef NS_ENUM(NSInteger, RTC_OBJC_TYPE(RTCSourceState)) { + RTC_OBJC_TYPE(RTCSourceStateInitializing), + RTC_OBJC_TYPE(RTCSourceStateLive), + RTC_OBJC_TYPE(RTCSourceStateEnded), + RTC_OBJC_TYPE(RTCSourceStateMuted), }; NS_ASSUME_NONNULL_BEGIN @@ -25,7 +25,7 @@ RTC_OBJC_EXPORT @interface RTC_OBJC_TYPE (RTCMediaSource) : NSObject /** The current state of the RTCMediaSource. */ -@property(nonatomic, readonly) RTCSourceState state; +@property(nonatomic, readonly) RTC_OBJC_TYPE(RTCSourceState) state; - (instancetype)init NS_UNAVAILABLE; diff --git a/sdk/objc/api/peerconnection/RTCMediaSource.mm b/sdk/objc/api/peerconnection/RTCMediaSource.mm index 3fd9c6cc5f..f87697c74c 100644 --- a/sdk/objc/api/peerconnection/RTCMediaSource.mm +++ b/sdk/objc/api/peerconnection/RTCMediaSource.mm @@ -14,16 +14,14 @@ @implementation RTC_OBJC_TYPE (RTCMediaSource) { RTC_OBJC_TYPE(RTCPeerConnectionFactory) * _factory; - RTCMediaSourceType _type; + RTC_OBJC_TYPE(RTCMediaSourceType) _type; } @synthesize nativeMediaSource = _nativeMediaSource; -- (instancetype) - initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory - nativeMediaSource: - (webrtc::scoped_refptr)nativeMediaSource - type:(RTCMediaSourceType)type { +- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory + nativeMediaSource:(webrtc::scoped_refptr)nativeMediaSource + type:(RTC_OBJC_TYPE(RTCMediaSourceType))type { RTC_DCHECK(factory); RTC_DCHECK(nativeMediaSource); self = [super init]; @@ -35,49 +33,49 @@ @implementation RTC_OBJC_TYPE (RTCMediaSource) { return self; } -- (RTCSourceState)state { +- (RTC_OBJC_TYPE(RTCSourceState))state { return [[self class] sourceStateForNativeState:_nativeMediaSource->state()]; } #pragma mark - Private + (webrtc::MediaSourceInterface::SourceState)nativeSourceStateForState: - (RTCSourceState)state { + (RTC_OBJC_TYPE(RTCSourceState))state { switch (state) { - case RTCSourceStateInitializing: + case RTC_OBJC_TYPE(RTCSourceStateInitializing): return webrtc::MediaSourceInterface::kInitializing; - case RTCSourceStateLive: + case RTC_OBJC_TYPE(RTCSourceStateLive): return webrtc::MediaSourceInterface::kLive; - case RTCSourceStateEnded: + case RTC_OBJC_TYPE(RTCSourceStateEnded): return webrtc::MediaSourceInterface::kEnded; - case RTCSourceStateMuted: + case RTC_OBJC_TYPE(RTCSourceStateMuted): return webrtc::MediaSourceInterface::kMuted; } } -+ (RTCSourceState)sourceStateForNativeState: ++ (RTC_OBJC_TYPE(RTCSourceState))sourceStateForNativeState: (webrtc::MediaSourceInterface::SourceState)nativeState { switch (nativeState) { case webrtc::MediaSourceInterface::kInitializing: - return RTCSourceStateInitializing; + return RTC_OBJC_TYPE(RTCSourceStateInitializing); case webrtc::MediaSourceInterface::kLive: - return RTCSourceStateLive; + return RTC_OBJC_TYPE(RTCSourceStateLive); case webrtc::MediaSourceInterface::kEnded: - return RTCSourceStateEnded; + return RTC_OBJC_TYPE(RTCSourceStateEnded); case webrtc::MediaSourceInterface::kMuted: - return RTCSourceStateMuted; + return RTC_OBJC_TYPE(RTCSourceStateMuted); } } -+ (NSString *)stringForState:(RTCSourceState)state { ++ (NSString *)stringForState:(RTC_OBJC_TYPE(RTCSourceState))state { switch (state) { - case RTCSourceStateInitializing: + case RTC_OBJC_TYPE(RTCSourceStateInitializing): return @"Initializing"; - case RTCSourceStateLive: + case RTC_OBJC_TYPE(RTCSourceStateLive): return @"Live"; - case RTCSourceStateEnded: + case RTC_OBJC_TYPE(RTCSourceStateEnded): return @"Ended"; - case RTCSourceStateMuted: + case RTC_OBJC_TYPE(RTCSourceStateMuted): return @"Muted"; } } diff --git a/sdk/objc/api/peerconnection/RTCMediaStream.mm b/sdk/objc/api/peerconnection/RTCMediaStream.mm index 60c256cc94..1df225d759 100644 --- a/sdk/objc/api/peerconnection/RTCMediaStream.mm +++ b/sdk/objc/api/peerconnection/RTCMediaStream.mm @@ -140,7 +140,7 @@ - (NSString *)description { _nativeMediaStream = nativeMediaStream; for (auto &track : audioTracks) { - RTCMediaStreamTrackType type = RTCMediaStreamTrackTypeAudio; + RTC_OBJC_TYPE(RTCMediaStreamTrackType) type = RTC_OBJC_TYPE(RTCMediaStreamTrackTypeAudio); RTC_OBJC_TYPE(RTCAudioTrack) *audioTrack = [[RTC_OBJC_TYPE(RTCAudioTrack) alloc] initWithFactory:_factory nativeTrack:track @@ -149,7 +149,7 @@ - (NSString *)description { } for (auto &track : videoTracks) { - RTCMediaStreamTrackType type = RTCMediaStreamTrackTypeVideo; + RTC_OBJC_TYPE(RTCMediaStreamTrackType) type = RTC_OBJC_TYPE(RTCMediaStreamTrackTypeVideo); RTC_OBJC_TYPE(RTCVideoTrack) *videoTrack = [[RTC_OBJC_TYPE(RTCVideoTrack) alloc] initWithFactory:_factory nativeTrack:track diff --git a/sdk/objc/api/peerconnection/RTCMediaStreamTrack+Private.h b/sdk/objc/api/peerconnection/RTCMediaStreamTrack+Private.h index a4f2d5e1e4..94165d139b 100644 --- a/sdk/objc/api/peerconnection/RTCMediaStreamTrack+Private.h +++ b/sdk/objc/api/peerconnection/RTCMediaStreamTrack+Private.h @@ -12,9 +12,9 @@ #include "api/media_stream_interface.h" -typedef NS_ENUM(NSInteger, RTCMediaStreamTrackType) { - RTCMediaStreamTrackTypeAudio, - RTCMediaStreamTrackTypeVideo, +typedef NS_ENUM(NSInteger, RTC_OBJC_TYPE(RTCMediaStreamTrackType)) { + RTC_OBJC_TYPE(RTCMediaStreamTrackTypeAudio), + RTC_OBJC_TYPE(RTCMediaStreamTrackTypeVideo), }; NS_ASSUME_NONNULL_BEGIN @@ -38,11 +38,9 @@ NS_ASSUME_NONNULL_BEGIN /** * Initialize an RTCMediaStreamTrack from a native MediaStreamTrackInterface. */ -- (instancetype) - initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory - nativeTrack:(webrtc::scoped_refptr) - nativeTrack - type:(RTCMediaStreamTrackType)type NS_DESIGNATED_INITIALIZER; +- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory + nativeTrack:(webrtc::scoped_refptr)nativeTrack + type:(RTC_OBJC_TYPE(RTCMediaStreamTrackType))type NS_DESIGNATED_INITIALIZER; - (instancetype) initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory @@ -52,12 +50,12 @@ NS_ASSUME_NONNULL_BEGIN - (BOOL)isEqualToTrack:(RTC_OBJC_TYPE(RTCMediaStreamTrack) *)track; + (webrtc::MediaStreamTrackInterface::TrackState)nativeTrackStateForState: - (RTCMediaStreamTrackState)state; + (RTC_OBJC_TYPE(RTCMediaStreamTrackState))state; -+ (RTCMediaStreamTrackState)trackStateForNativeState: ++ (RTC_OBJC_TYPE(RTCMediaStreamTrackState))trackStateForNativeState: (webrtc::MediaStreamTrackInterface::TrackState)nativeState; -+ (NSString *)stringForState:(RTCMediaStreamTrackState)state; ++ (NSString *)stringForState:(RTC_OBJC_TYPE(RTCMediaStreamTrackState))state; + (RTC_OBJC_TYPE(RTCMediaStreamTrack) *) mediaTrackForNativeTrack: diff --git a/sdk/objc/api/peerconnection/RTCMediaStreamTrack.h b/sdk/objc/api/peerconnection/RTCMediaStreamTrack.h index 1a8d062417..1aa5731c0f 100644 --- a/sdk/objc/api/peerconnection/RTCMediaStreamTrack.h +++ b/sdk/objc/api/peerconnection/RTCMediaStreamTrack.h @@ -15,15 +15,15 @@ /** * Represents the state of the track. This exposes the same states in C++. */ -typedef NS_ENUM(NSInteger, RTCMediaStreamTrackState) { - RTCMediaStreamTrackStateLive, - RTCMediaStreamTrackStateEnded +typedef NS_ENUM(NSInteger, RTC_OBJC_TYPE(RTCMediaStreamTrackState)) { + RTC_OBJC_TYPE(RTCMediaStreamTrackStateLive), + RTC_OBJC_TYPE(RTCMediaStreamTrackStateEnded) }; NS_ASSUME_NONNULL_BEGIN -RTC_EXTERN NSString *const kRTCMediaStreamTrackKindAudio; -RTC_EXTERN NSString *const kRTCMediaStreamTrackKindVideo; +RTC_EXTERN NSString *const RTC_CONSTANT_TYPE(RTCMediaStreamTrackKindAudio); +RTC_EXTERN NSString *const RTC_CONSTANT_TYPE(RTCMediaStreamTrackKindVideo); RTC_OBJC_EXPORT @interface RTC_OBJC_TYPE (RTCMediaStreamTrack) : NSObject @@ -41,7 +41,7 @@ RTC_OBJC_EXPORT @property(nonatomic, assign) BOOL isEnabled; /** The state of the track. */ -@property(nonatomic, readonly) RTCMediaStreamTrackState readyState; +@property(nonatomic, readonly) RTC_OBJC_TYPE(RTCMediaStreamTrackState) readyState; - (instancetype)init NS_UNAVAILABLE; diff --git a/sdk/objc/api/peerconnection/RTCMediaStreamTrack.mm b/sdk/objc/api/peerconnection/RTCMediaStreamTrack.mm index 244b5e7207..75873140d8 100644 --- a/sdk/objc/api/peerconnection/RTCMediaStreamTrack.mm +++ b/sdk/objc/api/peerconnection/RTCMediaStreamTrack.mm @@ -14,15 +14,15 @@ #import "helpers/NSString+StdString.h" -NSString *const kRTCMediaStreamTrackKindAudio = +NSString * const RTC_CONSTANT_TYPE(RTCMediaStreamTrackKindAudio) = @(webrtc::MediaStreamTrackInterface::kAudioKind); -NSString *const kRTCMediaStreamTrackKindVideo = +NSString * const RTC_CONSTANT_TYPE(RTCMediaStreamTrackKindVideo) = @(webrtc::MediaStreamTrackInterface::kVideoKind); -@implementation RTC_OBJC_TYPE (RTCMediaStreamTrack) { +@implementation RTC_OBJC_TYPE(RTCMediaStreamTrack) { RTC_OBJC_TYPE(RTCPeerConnectionFactory) * _factory; webrtc::scoped_refptr _nativeTrack; - RTCMediaStreamTrackType _type; + RTC_OBJC_TYPE(RTCMediaStreamTrackType) _type; } - (NSString *)kind { @@ -41,7 +41,7 @@ - (void)setIsEnabled:(BOOL)isEnabled { _nativeTrack->set_enabled(isEnabled); } -- (RTCMediaStreamTrackState)readyState { +- (RTC_OBJC_TYPE(RTCMediaStreamTrackState))readyState { return [[self class] trackStateForNativeState:_nativeTrack->state()]; } @@ -77,11 +77,9 @@ - (NSUInteger)hash { @synthesize factory = _factory; -- (instancetype) - initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory - nativeTrack:(webrtc::scoped_refptr) - nativeTrack - type:(RTCMediaStreamTrackType)type { +- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory + nativeTrack:(webrtc::scoped_refptr)nativeTrack + type:(RTC_OBJC_TYPE(RTCMediaStreamTrackType))type { NSParameterAssert(nativeTrack); NSParameterAssert(factory); self = [super init]; @@ -100,15 +98,11 @@ - (NSUInteger)hash { NSParameterAssert(nativeTrack); if (nativeTrack->kind() == std::string(webrtc::MediaStreamTrackInterface::kAudioKind)) { - return [self initWithFactory:factory - nativeTrack:nativeTrack - type:RTCMediaStreamTrackTypeAudio]; + return [self initWithFactory:factory nativeTrack:nativeTrack type:RTC_OBJC_TYPE(RTCMediaStreamTrackTypeAudio)]; } if (nativeTrack->kind() == std::string(webrtc::MediaStreamTrackInterface::kVideoKind)) { - return [self initWithFactory:factory - nativeTrack:nativeTrack - type:RTCMediaStreamTrackTypeVideo]; + return [self initWithFactory:factory nativeTrack:nativeTrack type:RTC_OBJC_TYPE(RTCMediaStreamTrackTypeVideo)]; } return nil; } @@ -121,30 +115,30 @@ - (BOOL)isEqualToTrack:(RTC_OBJC_TYPE(RTCMediaStreamTrack) *)track { } + (webrtc::MediaStreamTrackInterface::TrackState)nativeTrackStateForState: - (RTCMediaStreamTrackState)state { + (RTC_OBJC_TYPE(RTCMediaStreamTrackState))state { switch (state) { - case RTCMediaStreamTrackStateLive: + case RTC_OBJC_TYPE(RTCMediaStreamTrackStateLive): return webrtc::MediaStreamTrackInterface::kLive; - case RTCMediaStreamTrackStateEnded: + case RTC_OBJC_TYPE(RTCMediaStreamTrackStateEnded): return webrtc::MediaStreamTrackInterface::kEnded; } } -+ (RTCMediaStreamTrackState)trackStateForNativeState: ++ (RTC_OBJC_TYPE(RTCMediaStreamTrackState))trackStateForNativeState: (webrtc::MediaStreamTrackInterface::TrackState)nativeState { switch (nativeState) { case webrtc::MediaStreamTrackInterface::kLive: - return RTCMediaStreamTrackStateLive; + return RTC_OBJC_TYPE(RTCMediaStreamTrackStateLive); case webrtc::MediaStreamTrackInterface::kEnded: - return RTCMediaStreamTrackStateEnded; + return RTC_OBJC_TYPE(RTCMediaStreamTrackStateEnded); } } -+ (NSString *)stringForState:(RTCMediaStreamTrackState)state { ++ (NSString *)stringForState:(RTC_OBJC_TYPE(RTCMediaStreamTrackState))state { switch (state) { - case RTCMediaStreamTrackStateLive: + case RTC_OBJC_TYPE(RTCMediaStreamTrackStateLive): return @"Live"; - case RTCMediaStreamTrackStateEnded: + case RTC_OBJC_TYPE(RTCMediaStreamTrackStateEnded): return @"Ended"; } } @@ -157,16 +151,13 @@ + (NSString *)stringForState:(RTCMediaStreamTrackState)state { NSParameterAssert(nativeTrack); NSParameterAssert(factory); if (nativeTrack->kind() == webrtc::MediaStreamTrackInterface::kAudioKind) { - return [[RTC_OBJC_TYPE(RTCAudioTrack) alloc] - initWithFactory:factory - nativeTrack:nativeTrack - type:RTCMediaStreamTrackTypeAudio]; - } else if (nativeTrack->kind() == - webrtc::MediaStreamTrackInterface::kVideoKind) { - return [[RTC_OBJC_TYPE(RTCVideoTrack) alloc] - initWithFactory:factory - nativeTrack:nativeTrack - type:RTCMediaStreamTrackTypeVideo]; + return [[RTC_OBJC_TYPE(RTCAudioTrack) alloc] initWithFactory:factory + nativeTrack:nativeTrack + type:RTC_OBJC_TYPE(RTCMediaStreamTrackTypeAudio)]; + } else if (nativeTrack->kind() == webrtc::MediaStreamTrackInterface::kVideoKind) { + return [[RTC_OBJC_TYPE(RTCVideoTrack) alloc] initWithFactory:factory + nativeTrack:nativeTrack + type:RTC_OBJC_TYPE(RTCMediaStreamTrackTypeVideo)]; } else { return [[RTC_OBJC_TYPE(RTCMediaStreamTrack) alloc] initWithFactory:factory diff --git a/sdk/objc/api/peerconnection/RTCPeerConnection+Private.h b/sdk/objc/api/peerconnection/RTCPeerConnection+Private.h index c9a955ec63..acaf9e8555 100644 --- a/sdk/objc/api/peerconnection/RTCPeerConnection+Private.h +++ b/sdk/objc/api/peerconnection/RTCPeerConnection+Private.h @@ -120,39 +120,39 @@ class PeerConnectionDelegateAdapter : public PeerConnectionObserver { delegate NS_DESIGNATED_INITIALIZER; + (webrtc::PeerConnectionInterface::SignalingState)nativeSignalingStateForState: - (RTCSignalingState)state; + (RTC_OBJC_TYPE(RTCSignalingState))state; -+ (RTCSignalingState)signalingStateForNativeState: ++ (RTC_OBJC_TYPE(RTCSignalingState))signalingStateForNativeState: (webrtc::PeerConnectionInterface::SignalingState)nativeState; -+ (NSString *)stringForSignalingState:(RTCSignalingState)state; ++ (NSString *)stringForSignalingState:(RTC_OBJC_TYPE(RTCSignalingState))state; -+ (webrtc::PeerConnectionInterface::IceConnectionState) - nativeIceConnectionStateForState:(RTCIceConnectionState)state; ++ (webrtc::PeerConnectionInterface::IceConnectionState)nativeIceConnectionStateForState: + (RTC_OBJC_TYPE(RTCIceConnectionState))state; -+ (webrtc::PeerConnectionInterface::PeerConnectionState) - nativeConnectionStateForState:(RTCPeerConnectionState)state; ++ (webrtc::PeerConnectionInterface::PeerConnectionState)nativeConnectionStateForState: + (RTC_OBJC_TYPE(RTCPeerConnectionState))state; -+ (RTCIceConnectionState)iceConnectionStateForNativeState: ++ (RTC_OBJC_TYPE(RTCIceConnectionState))iceConnectionStateForNativeState: (webrtc::PeerConnectionInterface::IceConnectionState)nativeState; -+ (RTCPeerConnectionState)connectionStateForNativeState: ++ (RTC_OBJC_TYPE(RTCPeerConnectionState))connectionStateForNativeState: (webrtc::PeerConnectionInterface::PeerConnectionState)nativeState; -+ (NSString *)stringForIceConnectionState:(RTCIceConnectionState)state; ++ (NSString *)stringForIceConnectionState:(RTC_OBJC_TYPE(RTCIceConnectionState))state; -+ (NSString *)stringForConnectionState:(RTCPeerConnectionState)state; ++ (NSString *)stringForConnectionState:(RTC_OBJC_TYPE(RTCPeerConnectionState))state; -+ (webrtc::PeerConnectionInterface::IceGatheringState) - nativeIceGatheringStateForState:(RTCIceGatheringState)state; ++ (webrtc::PeerConnectionInterface::IceGatheringState)nativeIceGatheringStateForState: + (RTC_OBJC_TYPE(RTCIceGatheringState))state; -+ (RTCIceGatheringState)iceGatheringStateForNativeState: ++ (RTC_OBJC_TYPE(RTCIceGatheringState))iceGatheringStateForNativeState: (webrtc::PeerConnectionInterface::IceGatheringState)nativeState; -+ (NSString *)stringForIceGatheringState:(RTCIceGatheringState)state; ++ (NSString *)stringForIceGatheringState:(RTC_OBJC_TYPE(RTCIceGatheringState))state; -+ (webrtc::PeerConnectionInterface::StatsOutputLevel) - nativeStatsOutputLevelForLevel:(RTCStatsOutputLevel)level; ++ (webrtc::PeerConnectionInterface::StatsOutputLevel)nativeStatsOutputLevelForLevel: + (RTC_OBJC_TYPE(RTCStatsOutputLevel))level; @end diff --git a/sdk/objc/api/peerconnection/RTCPeerConnection+Stats.mm b/sdk/objc/api/peerconnection/RTCPeerConnection+Stats.mm index 3f7cbbad6a..e7afcac3c2 100644 --- a/sdk/objc/api/peerconnection/RTCPeerConnection+Stats.mm +++ b/sdk/objc/api/peerconnection/RTCPeerConnection+Stats.mm @@ -98,7 +98,7 @@ - (void)statisticsWithCompletionHandler: } - (void)statsForTrack:(RTC_OBJC_TYPE(RTCMediaStreamTrack) *)mediaStreamTrack - statsOutputLevel:(RTCStatsOutputLevel)statsOutputLevel + statsOutputLevel:(RTC_OBJC_TYPE(RTCStatsOutputLevel))statsOutputLevel completionHandler: (void (^)(NSArray *stats)) completionHandler { diff --git a/sdk/objc/api/peerconnection/RTCPeerConnection.h b/sdk/objc/api/peerconnection/RTCPeerConnection.h index fb5c414815..044f3de2f0 100644 --- a/sdk/objc/api/peerconnection/RTCPeerConnection.h +++ b/sdk/objc/api/peerconnection/RTCPeerConnection.h @@ -29,57 +29,57 @@ @class RTC_OBJC_TYPE(RTCStatisticsReport); @class RTC_OBJC_TYPE(RTCLegacyStatsReport); -typedef NS_ENUM(NSInteger, RTCRtpMediaType); +typedef NS_ENUM(NSInteger, RTC_OBJC_TYPE(RTCRtpMediaType)); NS_ASSUME_NONNULL_BEGIN -extern NSString *const kRTCPeerConnectionErrorDomain; -extern int const kRTCSessionDescriptionErrorCode; +extern NSString *const RTC_CONSTANT_TYPE(RTCPeerConnectionErrorDomain); +extern int const RTC_CONSTANT_TYPE(RTCSessionDescriptionErrorCode); /** Represents the signaling state of the peer connection. */ -typedef NS_ENUM(NSInteger, RTCSignalingState) { - RTCSignalingStateStable, - RTCSignalingStateHaveLocalOffer, - RTCSignalingStateHaveLocalPrAnswer, - RTCSignalingStateHaveRemoteOffer, - RTCSignalingStateHaveRemotePrAnswer, +typedef NS_ENUM(NSInteger, RTC_OBJC_TYPE(RTCSignalingState)) { + RTC_OBJC_TYPE(RTCSignalingStateStable), + RTC_OBJC_TYPE(RTCSignalingStateHaveLocalOffer), + RTC_OBJC_TYPE(RTCSignalingStateHaveLocalPrAnswer), + RTC_OBJC_TYPE(RTCSignalingStateHaveRemoteOffer), + RTC_OBJC_TYPE(RTCSignalingStateHaveRemotePrAnswer), // Not an actual state, represents the total number of states. - RTCSignalingStateClosed, + RTC_OBJC_TYPE(RTCSignalingStateClosed), }; /** Represents the ice connection state of the peer connection. */ -typedef NS_ENUM(NSInteger, RTCIceConnectionState) { - RTCIceConnectionStateNew, - RTCIceConnectionStateChecking, - RTCIceConnectionStateConnected, - RTCIceConnectionStateCompleted, - RTCIceConnectionStateFailed, - RTCIceConnectionStateDisconnected, - RTCIceConnectionStateClosed, - RTCIceConnectionStateCount, +typedef NS_ENUM(NSInteger, RTC_OBJC_TYPE(RTCIceConnectionState)) { + RTC_OBJC_TYPE(RTCIceConnectionStateNew), + RTC_OBJC_TYPE(RTCIceConnectionStateChecking), + RTC_OBJC_TYPE(RTCIceConnectionStateConnected), + RTC_OBJC_TYPE(RTCIceConnectionStateCompleted), + RTC_OBJC_TYPE(RTCIceConnectionStateFailed), + RTC_OBJC_TYPE(RTCIceConnectionStateDisconnected), + RTC_OBJC_TYPE(RTCIceConnectionStateClosed), + RTC_OBJC_TYPE(RTCIceConnectionStateCount), }; /** Represents the combined ice+dtls connection state of the peer connection. */ -typedef NS_ENUM(NSInteger, RTCPeerConnectionState) { - RTCPeerConnectionStateNew, - RTCPeerConnectionStateConnecting, - RTCPeerConnectionStateConnected, - RTCPeerConnectionStateDisconnected, - RTCPeerConnectionStateFailed, - RTCPeerConnectionStateClosed, +typedef NS_ENUM(NSInteger, RTC_OBJC_TYPE(RTCPeerConnectionState)) { + RTC_OBJC_TYPE(RTCPeerConnectionStateNew), + RTC_OBJC_TYPE(RTCPeerConnectionStateConnecting), + RTC_OBJC_TYPE(RTCPeerConnectionStateConnected), + RTC_OBJC_TYPE(RTCPeerConnectionStateDisconnected), + RTC_OBJC_TYPE(RTCPeerConnectionStateFailed), + RTC_OBJC_TYPE(RTCPeerConnectionStateClosed), }; /** Represents the ice gathering state of the peer connection. */ -typedef NS_ENUM(NSInteger, RTCIceGatheringState) { - RTCIceGatheringStateNew, - RTCIceGatheringStateGathering, - RTCIceGatheringStateComplete, +typedef NS_ENUM(NSInteger, RTC_OBJC_TYPE(RTCIceGatheringState)) { + RTC_OBJC_TYPE(RTCIceGatheringStateNew), + RTC_OBJC_TYPE(RTCIceGatheringStateGathering), + RTC_OBJC_TYPE(RTCIceGatheringStateComplete), }; /** Represents the stats output level. */ -typedef NS_ENUM(NSInteger, RTCStatsOutputLevel) { - RTCStatsOutputLevelStandard, - RTCStatsOutputLevelDebug, +typedef NS_ENUM(NSInteger, RTC_OBJC_TYPE(RTCStatsOutputLevel)) { + RTC_OBJC_TYPE(RTCStatsOutputLevelStandard), + RTC_OBJC_TYPE(RTCStatsOutputLevelDebug), }; typedef void (^RTCCreateSessionDescriptionCompletionHandler)( @@ -98,7 +98,7 @@ RTC_OBJC_EXPORT /** Called when the SignalingState changed. */ - (void)peerConnection : (RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection didChangeSignalingState - : (RTCSignalingState)stateChanged; + : (RTC_OBJC_TYPE(RTCSignalingState))stateChanged; /** Called when media is received on a new stream from remote peer. */ - (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection @@ -116,11 +116,11 @@ RTC_OBJC_EXPORT /** Called any time the IceConnectionState changes. */ - (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection - didChangeIceConnectionState:(RTCIceConnectionState)newState; + didChangeIceConnectionState:(RTC_OBJC_TYPE(RTCIceConnectionState))newState; /** Called any time the IceGatheringState changes. */ - (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection - didChangeIceGatheringState:(RTCIceGatheringState)newState; + didChangeIceGatheringState:(RTC_OBJC_TYPE(RTCIceGatheringState))newState; /** New ice candidate has been found. */ - (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection @@ -143,11 +143,11 @@ RTC_OBJC_EXPORT /** Called any time the IceConnectionState changes following standardized * transition. */ - (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection - didChangeStandardizedIceConnectionState:(RTCIceConnectionState)newState; + didChangeStandardizedIceConnectionState:(RTC_OBJC_TYPE(RTCIceConnectionState))newState; /** Called any time the PeerConnectionState changes. */ - (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection - didChangeConnectionState:(RTCPeerConnectionState)newState; + didChangeConnectionState:(RTC_OBJC_TYPE(RTCPeerConnectionState))newState; - (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection didStartReceivingOnTransceiver: @@ -186,18 +186,14 @@ RTC_OBJC_EXPORT /** This property is not available with RTCSdpSemanticsUnifiedPlan. Please use * `senders` instead. */ -@property(nonatomic, readonly) - NSArray *localStreams; -@property(nonatomic, readonly, nullable) RTC_OBJC_TYPE(RTCSessionDescription) * - localDescription; -@property(nonatomic, readonly, nullable) RTC_OBJC_TYPE(RTCSessionDescription) * - remoteDescription; -@property(nonatomic, readonly) RTCSignalingState signalingState; -@property(nonatomic, readonly) RTCIceConnectionState iceConnectionState; -@property(nonatomic, readonly) RTCPeerConnectionState connectionState; -@property(nonatomic, readonly) RTCIceGatheringState iceGatheringState; -@property(nonatomic, readonly, copy) RTC_OBJC_TYPE(RTCConfiguration) * - configuration; +@property(nonatomic, readonly) NSArray *localStreams; +@property(nonatomic, readonly, nullable) RTC_OBJC_TYPE(RTCSessionDescription) * localDescription; +@property(nonatomic, readonly, nullable) RTC_OBJC_TYPE(RTCSessionDescription) * remoteDescription; +@property(nonatomic, readonly) RTC_OBJC_TYPE(RTCSignalingState) signalingState; +@property(nonatomic, readonly) RTC_OBJC_TYPE(RTCIceConnectionState) iceConnectionState; +@property(nonatomic, readonly) RTC_OBJC_TYPE(RTCPeerConnectionState) connectionState; +@property(nonatomic, readonly) RTC_OBJC_TYPE(RTCIceGatheringState) iceGatheringState; +@property(nonatomic, readonly, copy) RTC_OBJC_TYPE(RTCConfiguration) * configuration; /** Gets all RTCRtpSenders associated with this peer connection. * Note: reading this property returns different instances of RTCRtpSender. @@ -310,10 +306,9 @@ RTC_OBJC_EXPORT /** Adds a transceiver with the given kind. Can either be RTCRtpMediaTypeAudio * or RTCRtpMediaTypeVideo. */ -- (nullable RTC_OBJC_TYPE(RTCRtpTransceiver) *)addTransceiverOfType: - (RTCRtpMediaType)mediaType; +- (nullable RTC_OBJC_TYPE(RTCRtpTransceiver) *)addTransceiverOfType:(RTC_OBJC_TYPE(RTCRtpMediaType))mediaType; - (nullable RTC_OBJC_TYPE(RTCRtpTransceiver) *) - addTransceiverOfType:(RTCRtpMediaType)mediaType + addTransceiverOfType:(RTC_OBJC_TYPE(RTCRtpMediaType))mediaType init:(RTC_OBJC_TYPE(RTCRtpTransceiverInit) *)init; /** Tells the PeerConnection that ICE should be restarted. This triggers a need @@ -395,11 +390,10 @@ typedef void (^RTCStatisticsCompletionHandler)( /** Gather stats for the given RTCMediaStreamTrack. If `mediaStreamTrack` is * nil statistics are gathered for all tracks. */ - - (void)statsForTrack : (nullable RTC_OBJC_TYPE(RTCMediaStreamTrack) *) - mediaStreamTrack statsOutputLevel - : (RTCStatsOutputLevel)statsOutputLevel completionHandler - : (nullable void (^)(NSArray *stats)) - completionHandler; + - (void)statsForTrack + : (nullable RTC_OBJC_TYPE(RTCMediaStreamTrack) *)mediaStreamTrack statsOutputLevel + : (RTC_OBJC_TYPE(RTCStatsOutputLevel))statsOutputLevel completionHandler + : (nullable void (^)(NSArray *stats))completionHandler; /** Gather statistic through the v2 statistics API. */ - (void)statisticsWithCompletionHandler: diff --git a/sdk/objc/api/peerconnection/RTCPeerConnection.mm b/sdk/objc/api/peerconnection/RTCPeerConnection.mm index cc92c6b51f..4b7eddefa4 100644 --- a/sdk/objc/api/peerconnection/RTCPeerConnection.mm +++ b/sdk/objc/api/peerconnection/RTCPeerConnection.mm @@ -36,9 +36,8 @@ #include "rtc_base/numerics/safe_conversions.h" #include "sdk/objc/native/api/ssl_certificate_verifier.h" -NSString *const kRTCPeerConnectionErrorDomain = - @"org.webrtc.RTC_OBJC_TYPE(RTCPeerConnection)"; -int const kRTCPeerConnnectionSessionDescriptionError = -1; +NSString *const RTC_CONSTANT_TYPE(RTCPeerConnectionErrorDomain) = @"org.webrtc.RTC_OBJC_TYPE(RTCPeerConnection)"; +int const RTC_CONSTANT_TYPE(RTCPeerConnnectionSessionDescriptionError) = -1; namespace { @@ -67,10 +66,9 @@ void OnCompelete(webrtc::RTCError error) { } else { // TODO(hta): Add handling of error.type() NSString *str = [NSString stringForStdString:error.message()]; - NSError *err = - [NSError errorWithDomain:kRTCPeerConnectionErrorDomain - code:kRTCPeerConnnectionSessionDescriptionError - userInfo:@{NSLocalizedDescriptionKey : str}]; + NSError *err = [NSError errorWithDomain:RTC_CONSTANT_TYPE(RTCPeerConnectionErrorDomain) + code:RTC_CONSTANT_TYPE(RTCPeerConnnectionSessionDescriptionError) + userInfo:@{NSLocalizedDescriptionKey : str}]; completion_handler_(err); } completion_handler_ = nil; @@ -110,10 +108,10 @@ void OnFailure(RTCError error) override { RTC_DCHECK(completion_handler_); // TODO(hta): Add handling of error.type() NSString *str = [NSString stringForStdString:error.message()]; - NSError *err = - [NSError errorWithDomain:kRTCPeerConnectionErrorDomain - code:kRTCPeerConnnectionSessionDescriptionError - userInfo:@{NSLocalizedDescriptionKey : str}]; + NSError* err = + [NSError errorWithDomain:RTC_CONSTANT_TYPE(RTCPeerConnectionErrorDomain) + code:RTC_CONSTANT_TYPE(RTCPeerConnnectionSessionDescriptionError) + userInfo:@{ NSLocalizedDescriptionKey : str }]; completion_handler_(nil, err); completion_handler_ = nil; } @@ -135,8 +133,8 @@ void OnFailure(RTCError error) override { void PeerConnectionDelegateAdapter::OnSignalingChange( PeerConnectionInterface::SignalingState new_state) { - RTCSignalingState state = [[RTC_OBJC_TYPE(RTCPeerConnection) class] - signalingStateForNativeState:new_state]; + RTC_OBJC_TYPE(RTCSignalingState) state = + [[RTC_OBJC_TYPE(RTCPeerConnection) class] signalingStateForNativeState:new_state]; RTC_OBJC_TYPE(RTCPeerConnection) *peer_connection = peer_connection_; if (peer_connection == nil) { return; @@ -248,7 +246,7 @@ void OnFailure(RTCError error) override { if (delegate == nil) { return; } - RTCIceConnectionState state = [RTC_OBJC_TYPE(RTCPeerConnection) + RTC_OBJC_TYPE(RTCIceConnectionState) state = [RTC_OBJC_TYPE(RTCPeerConnection) iceConnectionStateForNativeState:new_state]; [delegate peerConnection:peer_connection didChangeIceConnectionState:state]; } @@ -266,7 +264,7 @@ void OnFailure(RTCError error) override { } if ([delegate respondsToSelector:@selector (peerConnection:didChangeStandardizedIceConnectionState:)]) { - RTCIceConnectionState state = [RTC_OBJC_TYPE(RTCPeerConnection) + RTC_OBJC_TYPE(RTCIceConnectionState) state = [RTC_OBJC_TYPE(RTCPeerConnection) iceConnectionStateForNativeState:new_state]; [delegate peerConnection:peer_connection didChangeStandardizedIceConnectionState:state]; @@ -286,7 +284,7 @@ void OnFailure(RTCError error) override { } if ([delegate respondsToSelector:@selector(peerConnection: didChangeConnectionState:)]) { - RTCPeerConnectionState state = [RTC_OBJC_TYPE(RTCPeerConnection) + RTC_OBJC_TYPE(RTCPeerConnectionState) state = [RTC_OBJC_TYPE(RTCPeerConnection) connectionStateForNativeState:new_state]; [delegate peerConnection:peer_connection didChangeConnectionState:state]; } @@ -303,7 +301,7 @@ void OnFailure(RTCError error) override { if (delegate == nil) { return; } - RTCIceGatheringState state = [[RTC_OBJC_TYPE(RTCPeerConnection) class] + RTC_OBJC_TYPE(RTCIceGatheringState) state = [[RTC_OBJC_TYPE(RTCPeerConnection) class] iceGatheringStateForNativeState:new_state]; [delegate peerConnection:peer_connection didChangeIceGatheringState:state]; } @@ -572,24 +570,20 @@ @implementation RTC_OBJC_TYPE (RTCPeerConnection) { }); } -- (RTCSignalingState)signalingState { - return [[self class] - signalingStateForNativeState:_peerConnection->signaling_state()]; +- (RTC_OBJC_TYPE(RTCSignalingState))signalingState { + return [[self class] signalingStateForNativeState:self.nativePeerConnection->signaling_state()]; } -- (RTCIceConnectionState)iceConnectionState { - return [[self class] - iceConnectionStateForNativeState:_peerConnection->ice_connection_state()]; +- (RTC_OBJC_TYPE(RTCIceConnectionState))iceConnectionState { + return [[self class] iceConnectionStateForNativeState:self.nativePeerConnection->ice_connection_state()]; } -- (RTCPeerConnectionState)connectionState { - return [[self class] - connectionStateForNativeState:_peerConnection->peer_connection_state()]; +- (RTC_OBJC_TYPE(RTCPeerConnectionState))connectionState { + return [[self class] connectionStateForNativeState:self.nativePeerConnection->peer_connection_state()]; } -- (RTCIceGatheringState)iceGatheringState { - return [[self class] - iceGatheringStateForNativeState:_peerConnection->ice_gathering_state()]; +- (RTC_OBJC_TYPE(RTCIceGatheringState))iceGatheringState { + return [[self class] iceGatheringStateForNativeState:self.nativePeerConnection->ice_gathering_state()]; } - (BOOL)setConfiguration:(RTC_OBJC_TYPE(RTCConfiguration) *)configuration { @@ -627,10 +621,9 @@ - (void)addIceCandidate:(RTC_OBJC_TYPE(RTCIceCandidate) *)candidate completionHandler(nil); } else { NSString *str = [NSString stringForStdString:error.message()]; - NSError *err = - [NSError errorWithDomain:kRTCPeerConnectionErrorDomain - code:static_cast(error.type()) - userInfo:@{NSLocalizedDescriptionKey : str}]; + NSError *err = [NSError errorWithDomain:RTC_CONSTANT_TYPE(RTCPeerConnectionErrorDomain) + code:static_cast(error.type()) + userInfo:@{NSLocalizedDescriptionKey : str}]; completionHandler(err); } }); @@ -719,15 +712,12 @@ - (BOOL)removeTrack:(RTC_OBJC_TYPE(RTCRtpSender) *)sender { nativeRtpTransceiver:nativeTransceiverOrError.MoveValue()]; } -- (nullable RTC_OBJC_TYPE(RTCRtpTransceiver) *)addTransceiverOfType: - (RTCRtpMediaType)mediaType { - return [self - addTransceiverOfType:mediaType - init:[[RTC_OBJC_TYPE(RTCRtpTransceiverInit) alloc] init]]; +- (nullable RTC_OBJC_TYPE(RTCRtpTransceiver) *)addTransceiverOfType:(RTC_OBJC_TYPE(RTCRtpMediaType))mediaType { + return [self addTransceiverOfType:mediaType init:[[RTC_OBJC_TYPE(RTCRtpTransceiverInit) alloc] init]]; } - (nullable RTC_OBJC_TYPE(RTCRtpTransceiver) *) - addTransceiverOfType:(RTCRtpMediaType)mediaType + addTransceiverOfType:(RTC_OBJC_TYPE(RTCRtpMediaType))mediaType init:(RTC_OBJC_TYPE(RTCRtpTransceiverInit) *)init { webrtc::RTCErrorOr> nativeTransceiverOrError = _peerConnection->AddTransceiver( @@ -905,218 +895,217 @@ - (void)stopRtcEventLog { #pragma mark - Private + (webrtc::PeerConnectionInterface::SignalingState)nativeSignalingStateForState: - (RTCSignalingState)state { + (RTC_OBJC_TYPE(RTCSignalingState))state { switch (state) { - case RTCSignalingStateStable: + case RTC_OBJC_TYPE(RTCSignalingStateStable): return webrtc::PeerConnectionInterface::kStable; - case RTCSignalingStateHaveLocalOffer: + case RTC_OBJC_TYPE(RTCSignalingStateHaveLocalOffer): return webrtc::PeerConnectionInterface::kHaveLocalOffer; - case RTCSignalingStateHaveLocalPrAnswer: + case RTC_OBJC_TYPE(RTCSignalingStateHaveLocalPrAnswer): return webrtc::PeerConnectionInterface::kHaveLocalPrAnswer; - case RTCSignalingStateHaveRemoteOffer: + case RTC_OBJC_TYPE(RTCSignalingStateHaveRemoteOffer): return webrtc::PeerConnectionInterface::kHaveRemoteOffer; - case RTCSignalingStateHaveRemotePrAnswer: + case RTC_OBJC_TYPE(RTCSignalingStateHaveRemotePrAnswer): return webrtc::PeerConnectionInterface::kHaveRemotePrAnswer; - case RTCSignalingStateClosed: + case RTC_OBJC_TYPE(RTCSignalingStateClosed): return webrtc::PeerConnectionInterface::kClosed; } } -+ (RTCSignalingState)signalingStateForNativeState: ++ (RTC_OBJC_TYPE(RTCSignalingState))signalingStateForNativeState: (webrtc::PeerConnectionInterface::SignalingState)nativeState { switch (nativeState) { case webrtc::PeerConnectionInterface::kStable: - return RTCSignalingStateStable; + return RTC_OBJC_TYPE(RTCSignalingStateStable); case webrtc::PeerConnectionInterface::kHaveLocalOffer: - return RTCSignalingStateHaveLocalOffer; + return RTC_OBJC_TYPE(RTCSignalingStateHaveLocalOffer); case webrtc::PeerConnectionInterface::kHaveLocalPrAnswer: - return RTCSignalingStateHaveLocalPrAnswer; + return RTC_OBJC_TYPE(RTCSignalingStateHaveLocalPrAnswer); case webrtc::PeerConnectionInterface::kHaveRemoteOffer: - return RTCSignalingStateHaveRemoteOffer; + return RTC_OBJC_TYPE(RTCSignalingStateHaveRemoteOffer); case webrtc::PeerConnectionInterface::kHaveRemotePrAnswer: - return RTCSignalingStateHaveRemotePrAnswer; + return RTC_OBJC_TYPE(RTCSignalingStateHaveRemotePrAnswer); case webrtc::PeerConnectionInterface::kClosed: - return RTCSignalingStateClosed; + return RTC_OBJC_TYPE(RTCSignalingStateClosed); } } -+ (NSString *)stringForSignalingState:(RTCSignalingState)state { ++ (NSString *)stringForSignalingState:(RTC_OBJC_TYPE(RTCSignalingState))state { switch (state) { - case RTCSignalingStateStable: + case RTC_OBJC_TYPE(RTCSignalingStateStable): return @"STABLE"; - case RTCSignalingStateHaveLocalOffer: + case RTC_OBJC_TYPE(RTCSignalingStateHaveLocalOffer): return @"HAVE_LOCAL_OFFER"; - case RTCSignalingStateHaveLocalPrAnswer: + case RTC_OBJC_TYPE(RTCSignalingStateHaveLocalPrAnswer): return @"HAVE_LOCAL_PRANSWER"; - case RTCSignalingStateHaveRemoteOffer: + case RTC_OBJC_TYPE(RTCSignalingStateHaveRemoteOffer): return @"HAVE_REMOTE_OFFER"; - case RTCSignalingStateHaveRemotePrAnswer: + case RTC_OBJC_TYPE(RTCSignalingStateHaveRemotePrAnswer): return @"HAVE_REMOTE_PRANSWER"; - case RTCSignalingStateClosed: + case RTC_OBJC_TYPE(RTCSignalingStateClosed): return @"CLOSED"; } } -+ (webrtc::PeerConnectionInterface::PeerConnectionState) - nativeConnectionStateForState:(RTCPeerConnectionState)state { ++ (webrtc::PeerConnectionInterface::PeerConnectionState)nativeConnectionStateForState: + (RTC_OBJC_TYPE(RTCPeerConnectionState))state { switch (state) { - case RTCPeerConnectionStateNew: + case RTC_OBJC_TYPE(RTCPeerConnectionStateNew): return webrtc::PeerConnectionInterface::PeerConnectionState::kNew; - case RTCPeerConnectionStateConnecting: + case RTC_OBJC_TYPE(RTCPeerConnectionStateConnecting): return webrtc::PeerConnectionInterface::PeerConnectionState::kConnecting; - case RTCPeerConnectionStateConnected: + case RTC_OBJC_TYPE(RTCPeerConnectionStateConnected): return webrtc::PeerConnectionInterface::PeerConnectionState::kConnected; - case RTCPeerConnectionStateFailed: + case RTC_OBJC_TYPE(RTCPeerConnectionStateFailed): return webrtc::PeerConnectionInterface::PeerConnectionState::kFailed; - case RTCPeerConnectionStateDisconnected: - return webrtc::PeerConnectionInterface::PeerConnectionState:: - kDisconnected; - case RTCPeerConnectionStateClosed: + case RTC_OBJC_TYPE(RTCPeerConnectionStateDisconnected): + return webrtc::PeerConnectionInterface::PeerConnectionState::kDisconnected; + case RTC_OBJC_TYPE(RTCPeerConnectionStateClosed): return webrtc::PeerConnectionInterface::PeerConnectionState::kClosed; } } -+ (RTCPeerConnectionState)connectionStateForNativeState: - (webrtc::PeerConnectionInterface::PeerConnectionState)nativeState { ++ (RTC_OBJC_TYPE(RTCPeerConnectionState))connectionStateForNativeState: + (webrtc::PeerConnectionInterface::PeerConnectionState)nativeState { switch (nativeState) { case webrtc::PeerConnectionInterface::PeerConnectionState::kNew: - return RTCPeerConnectionStateNew; + return RTC_OBJC_TYPE(RTCPeerConnectionStateNew); case webrtc::PeerConnectionInterface::PeerConnectionState::kConnecting: - return RTCPeerConnectionStateConnecting; + return RTC_OBJC_TYPE(RTCPeerConnectionStateConnecting); case webrtc::PeerConnectionInterface::PeerConnectionState::kConnected: - return RTCPeerConnectionStateConnected; + return RTC_OBJC_TYPE(RTCPeerConnectionStateConnected); case webrtc::PeerConnectionInterface::PeerConnectionState::kFailed: - return RTCPeerConnectionStateFailed; + return RTC_OBJC_TYPE(RTCPeerConnectionStateFailed); case webrtc::PeerConnectionInterface::PeerConnectionState::kDisconnected: - return RTCPeerConnectionStateDisconnected; + return RTC_OBJC_TYPE(RTCPeerConnectionStateDisconnected); case webrtc::PeerConnectionInterface::PeerConnectionState::kClosed: - return RTCPeerConnectionStateClosed; + return RTC_OBJC_TYPE(RTCPeerConnectionStateClosed); } } -+ (NSString *)stringForConnectionState:(RTCPeerConnectionState)state { ++ (NSString *)stringForConnectionState:(RTC_OBJC_TYPE(RTCPeerConnectionState))state { switch (state) { - case RTCPeerConnectionStateNew: + case RTC_OBJC_TYPE(RTCPeerConnectionStateNew): return @"NEW"; - case RTCPeerConnectionStateConnecting: + case RTC_OBJC_TYPE(RTCPeerConnectionStateConnecting): return @"CONNECTING"; - case RTCPeerConnectionStateConnected: + case RTC_OBJC_TYPE(RTCPeerConnectionStateConnected): return @"CONNECTED"; - case RTCPeerConnectionStateFailed: + case RTC_OBJC_TYPE(RTCPeerConnectionStateFailed): return @"FAILED"; - case RTCPeerConnectionStateDisconnected: + case RTC_OBJC_TYPE(RTCPeerConnectionStateDisconnected): return @"DISCONNECTED"; - case RTCPeerConnectionStateClosed: + case RTC_OBJC_TYPE(RTCPeerConnectionStateClosed): return @"CLOSED"; } } + (webrtc::PeerConnectionInterface::IceConnectionState) - nativeIceConnectionStateForState:(RTCIceConnectionState)state { + nativeIceConnectionStateForState:(RTC_OBJC_TYPE(RTCIceConnectionState))state { switch (state) { - case RTCIceConnectionStateNew: + case RTC_OBJC_TYPE(RTCIceConnectionStateNew): return webrtc::PeerConnectionInterface::kIceConnectionNew; - case RTCIceConnectionStateChecking: + case RTC_OBJC_TYPE(RTCIceConnectionStateChecking): return webrtc::PeerConnectionInterface::kIceConnectionChecking; - case RTCIceConnectionStateConnected: + case RTC_OBJC_TYPE(RTCIceConnectionStateConnected): return webrtc::PeerConnectionInterface::kIceConnectionConnected; - case RTCIceConnectionStateCompleted: + case RTC_OBJC_TYPE(RTCIceConnectionStateCompleted): return webrtc::PeerConnectionInterface::kIceConnectionCompleted; - case RTCIceConnectionStateFailed: + case RTC_OBJC_TYPE(RTCIceConnectionStateFailed): return webrtc::PeerConnectionInterface::kIceConnectionFailed; - case RTCIceConnectionStateDisconnected: + case RTC_OBJC_TYPE(RTCIceConnectionStateDisconnected): return webrtc::PeerConnectionInterface::kIceConnectionDisconnected; - case RTCIceConnectionStateClosed: + case RTC_OBJC_TYPE(RTCIceConnectionStateClosed): return webrtc::PeerConnectionInterface::kIceConnectionClosed; - case RTCIceConnectionStateCount: + case RTC_OBJC_TYPE(RTCIceConnectionStateCount): return webrtc::PeerConnectionInterface::kIceConnectionMax; } } -+ (RTCIceConnectionState)iceConnectionStateForNativeState: ++ (RTC_OBJC_TYPE(RTCIceConnectionState))iceConnectionStateForNativeState: (webrtc::PeerConnectionInterface::IceConnectionState)nativeState { switch (nativeState) { case webrtc::PeerConnectionInterface::kIceConnectionNew: - return RTCIceConnectionStateNew; + return RTC_OBJC_TYPE(RTCIceConnectionStateNew); case webrtc::PeerConnectionInterface::kIceConnectionChecking: - return RTCIceConnectionStateChecking; + return RTC_OBJC_TYPE(RTCIceConnectionStateChecking); case webrtc::PeerConnectionInterface::kIceConnectionConnected: - return RTCIceConnectionStateConnected; + return RTC_OBJC_TYPE(RTCIceConnectionStateConnected); case webrtc::PeerConnectionInterface::kIceConnectionCompleted: - return RTCIceConnectionStateCompleted; + return RTC_OBJC_TYPE(RTCIceConnectionStateCompleted); case webrtc::PeerConnectionInterface::kIceConnectionFailed: - return RTCIceConnectionStateFailed; + return RTC_OBJC_TYPE(RTCIceConnectionStateFailed); case webrtc::PeerConnectionInterface::kIceConnectionDisconnected: - return RTCIceConnectionStateDisconnected; + return RTC_OBJC_TYPE(RTCIceConnectionStateDisconnected); case webrtc::PeerConnectionInterface::kIceConnectionClosed: - return RTCIceConnectionStateClosed; + return RTC_OBJC_TYPE(RTCIceConnectionStateClosed); case webrtc::PeerConnectionInterface::kIceConnectionMax: - return RTCIceConnectionStateCount; + return RTC_OBJC_TYPE(RTCIceConnectionStateCount); } } -+ (NSString *)stringForIceConnectionState:(RTCIceConnectionState)state { ++ (NSString *)stringForIceConnectionState:(RTC_OBJC_TYPE(RTCIceConnectionState))state { switch (state) { - case RTCIceConnectionStateNew: + case RTC_OBJC_TYPE(RTCIceConnectionStateNew): return @"NEW"; - case RTCIceConnectionStateChecking: + case RTC_OBJC_TYPE(RTCIceConnectionStateChecking): return @"CHECKING"; - case RTCIceConnectionStateConnected: + case RTC_OBJC_TYPE(RTCIceConnectionStateConnected): return @"CONNECTED"; - case RTCIceConnectionStateCompleted: + case RTC_OBJC_TYPE(RTCIceConnectionStateCompleted): return @"COMPLETED"; - case RTCIceConnectionStateFailed: + case RTC_OBJC_TYPE(RTCIceConnectionStateFailed): return @"FAILED"; - case RTCIceConnectionStateDisconnected: + case RTC_OBJC_TYPE(RTCIceConnectionStateDisconnected): return @"DISCONNECTED"; - case RTCIceConnectionStateClosed: + case RTC_OBJC_TYPE(RTCIceConnectionStateClosed): return @"CLOSED"; - case RTCIceConnectionStateCount: + case RTC_OBJC_TYPE(RTCIceConnectionStateCount): return @"COUNT"; } } + (webrtc::PeerConnectionInterface::IceGatheringState) - nativeIceGatheringStateForState:(RTCIceGatheringState)state { + nativeIceGatheringStateForState:(RTC_OBJC_TYPE(RTCIceGatheringState))state { switch (state) { - case RTCIceGatheringStateNew: + case RTC_OBJC_TYPE(RTCIceGatheringStateNew): return webrtc::PeerConnectionInterface::kIceGatheringNew; - case RTCIceGatheringStateGathering: + case RTC_OBJC_TYPE(RTCIceGatheringStateGathering): return webrtc::PeerConnectionInterface::kIceGatheringGathering; - case RTCIceGatheringStateComplete: + case RTC_OBJC_TYPE(RTCIceGatheringStateComplete): return webrtc::PeerConnectionInterface::kIceGatheringComplete; } } -+ (RTCIceGatheringState)iceGatheringStateForNativeState: ++ (RTC_OBJC_TYPE(RTCIceGatheringState))iceGatheringStateForNativeState: (webrtc::PeerConnectionInterface::IceGatheringState)nativeState { switch (nativeState) { case webrtc::PeerConnectionInterface::kIceGatheringNew: - return RTCIceGatheringStateNew; + return RTC_OBJC_TYPE(RTCIceGatheringStateNew); case webrtc::PeerConnectionInterface::kIceGatheringGathering: - return RTCIceGatheringStateGathering; + return RTC_OBJC_TYPE(RTCIceGatheringStateGathering); case webrtc::PeerConnectionInterface::kIceGatheringComplete: - return RTCIceGatheringStateComplete; + return RTC_OBJC_TYPE(RTCIceGatheringStateComplete); } } -+ (NSString *)stringForIceGatheringState:(RTCIceGatheringState)state { ++ (NSString *)stringForIceGatheringState:(RTC_OBJC_TYPE(RTCIceGatheringState))state { switch (state) { - case RTCIceGatheringStateNew: + case RTC_OBJC_TYPE(RTCIceGatheringStateNew): return @"NEW"; - case RTCIceGatheringStateGathering: + case RTC_OBJC_TYPE(RTCIceGatheringStateGathering): return @"GATHERING"; - case RTCIceGatheringStateComplete: + case RTC_OBJC_TYPE(RTCIceGatheringStateComplete): return @"COMPLETE"; } } + (webrtc::PeerConnectionInterface::StatsOutputLevel) - nativeStatsOutputLevelForLevel:(RTCStatsOutputLevel)level { + nativeStatsOutputLevelForLevel:(RTC_OBJC_TYPE(RTCStatsOutputLevel))level { switch (level) { - case RTCStatsOutputLevelStandard: + case RTC_OBJC_TYPE(RTCStatsOutputLevelStandard): return webrtc::PeerConnectionInterface::kStatsOutputLevelStandard; - case RTCStatsOutputLevelDebug: + case RTC_OBJC_TYPE(RTCStatsOutputLevelDebug): return webrtc::PeerConnectionInterface::kStatsOutputLevelDebug; } } diff --git a/sdk/objc/api/peerconnection/RTCPeerConnectionFactory+Native.h b/sdk/objc/api/peerconnection/RTCPeerConnectionFactory+Native.h index 08734083cf..a5df8c8201 100644 --- a/sdk/objc/api/peerconnection/RTCPeerConnectionFactory+Native.h +++ b/sdk/objc/api/peerconnection/RTCPeerConnectionFactory+Native.h @@ -54,7 +54,10 @@ NS_ASSUME_NONNULL_BEGIN (nullable webrtc::AudioDeviceModule *)audioDeviceModule audioProcessingModule: (webrtc::scoped_refptr) - audioProcessingModule; + audioProcessingModule + bypassVoiceProcessing: + (BOOL) + bypassVoiceProcessing; - (instancetype) initWithNativeAudioEncoderFactory: @@ -71,11 +74,10 @@ NS_ASSUME_NONNULL_BEGIN audioDeviceModule: (nullable webrtc::AudioDeviceModule *)audioDeviceModule audioProcessingModule: - (webrtc::scoped_refptr) - audioProcessingModule - networkControllerFactory: - (std::unique_ptr) - networkControllerFactory; + (rtc::scoped_refptr)audioProcessingModule + networkControllerFactory:(std::unique_ptr) + networkControllerFactory + bypassVoiceProcessing:(BOOL)bypassVoiceProcessing; - (instancetype) initWithEncoderFactory: diff --git a/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.h b/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.h index abfa679a1c..e8926ccfe5 100644 --- a/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.h +++ b/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.h @@ -10,7 +10,7 @@ #import -#import "sdk/objc/base/RTCMacros.h" +#import "RTCMacros.h" NS_ASSUME_NONNULL_BEGIN @@ -24,6 +24,11 @@ NS_ASSUME_NONNULL_BEGIN @class RTC_OBJC_TYPE(RTCVideoSource); @class RTC_OBJC_TYPE(RTCVideoTrack); @class RTC_OBJC_TYPE(RTCPeerConnectionFactoryOptions); +@class RTC_OBJC_TYPE(RTCAudioDeviceModule); +@class RTC_OBJC_TYPE(RTCRtpCapabilities); + +typedef NS_ENUM(NSInteger, RTC_OBJC_TYPE(RTCRtpMediaType)); + @protocol RTC_OBJC_TYPE (RTCPeerConnectionDelegate); @protocol RTC_OBJC_TYPE @@ -34,6 +39,8 @@ NS_ASSUME_NONNULL_BEGIN (RTCSSLCertificateVerifier); @protocol RTC_OBJC_TYPE (RTCAudioDevice); +@protocol RTC_OBJC_TYPE +(RTCAudioProcessingModule); RTC_OBJC_EXPORT @interface RTC_OBJC_TYPE (RTCPeerConnectionFactory) : NSObject @@ -41,50 +48,50 @@ RTC_OBJC_EXPORT /* Initialize object with default H264 video encoder/decoder factories and default ADM */ - (instancetype)init; -/* Initialize object with injectable video encoder/decoder factories and default - * ADM */ +/* Initialize object with injectable video encoder/decoder factories and default ADM */ - (instancetype) - initWithEncoderFactory: - (nullable id)encoderFactory - decoderFactory:(nullable id) - decoderFactory; + initWithEncoderFactory:(nullable id)encoderFactory + decoderFactory:(nullable id)decoderFactory; -/* Initialize object with injectable video encoder/decoder factories and - * injectable ADM */ +/* Initialize object with injectable video encoder/decoder factories and injectable ADM */ - (instancetype) - initWithEncoderFactory: - (nullable id)encoderFactory - decoderFactory:(nullable id) - decoderFactory - audioDevice: - (nullable id)audioDevice; + initWithEncoderFactory:(nullable id)encoderFactory + decoderFactory:(nullable id)decoderFactory + audioDevice:(nullable id)audioDevice; + +/* Initialize object with bypass voice processing */ +- (instancetype) + initWithBypassVoiceProcessing:(BOOL)bypassVoiceProcessing + encoderFactory:(nullable id)encoderFactory + decoderFactory:(nullable id)decoderFactory + audioProcessingModule: + (nullable id)audioProcessingModule; + +@property(nonatomic, readonly) RTC_OBJC_TYPE(RTCAudioDeviceModule) *audioDeviceModule; /** * Valid kind values are kRTCMediaStreamTrackKindAudio and * kRTCMediaStreamTrackKindVideo. */ -- (RTC_OBJC_TYPE(RTCRtpCapabilities) *)rtpSenderCapabilitiesForKind: - (NSString *)kind; +- (RTC_OBJC_TYPE(RTCRtpCapabilities) *)rtpSenderCapabilitiesForKind:(NSString *)kind; /** * Valid kind values are kRTCMediaStreamTrackKindAudio and * kRTCMediaStreamTrackKindVideo. */ -- (RTC_OBJC_TYPE(RTCRtpCapabilities) *)rtpReceiverCapabilitiesForKind: - (NSString *)kind; +- (RTC_OBJC_TYPE(RTCRtpCapabilities) *)rtpReceiverCapabilitiesForKind:(NSString *)kind; /** Initialize an RTCAudioSource with constraints. */ - (RTC_OBJC_TYPE(RTCAudioSource) *)audioSourceWithConstraints: (nullable RTC_OBJC_TYPE(RTCMediaConstraints) *)constraints; -/** Initialize an RTCAudioTrack with an id. Convenience ctor to use an audio - * source with no constraints. +/** Initialize an RTCAudioTrack with an id. Convenience ctor to use an audio source + * with no constraints. */ - (RTC_OBJC_TYPE(RTCAudioTrack) *)audioTrackWithTrackId:(NSString *)trackId; /** Initialize an RTCAudioTrack with a source and an id. */ -- (RTC_OBJC_TYPE(RTCAudioTrack) *)audioTrackWithSource: - (RTC_OBJC_TYPE(RTCAudioSource) *)source +- (RTC_OBJC_TYPE(RTCAudioTrack) *)audioTrackWithSource:(RTC_OBJC_TYPE(RTCAudioSource) *)source trackId:(NSString *)trackId; /** Initialize a generic RTCVideoSource. The RTCVideoSource should be @@ -101,8 +108,7 @@ RTC_OBJC_EXPORT - (RTC_OBJC_TYPE(RTCVideoSource) *)videoSourceForScreenCast:(BOOL)forScreenCast; /** Initialize an RTCVideoTrack with a source and an id. */ -- (RTC_OBJC_TYPE(RTCVideoTrack) *)videoTrackWithSource: - (RTC_OBJC_TYPE(RTCVideoSource) *)source +- (RTC_OBJC_TYPE(RTCVideoTrack) *)videoTrackWithSource:(RTC_OBJC_TYPE(RTCVideoSource) *)source trackId:(NSString *)trackId; /** Initialize an RTCMediaStream with an id. */ @@ -112,32 +118,22 @@ RTC_OBJC_EXPORT * delegate. */ - (nullable RTC_OBJC_TYPE(RTCPeerConnection) *) - peerConnectionWithConfiguration: - (RTC_OBJC_TYPE(RTCConfiguration) *)configuration - constraints: - (RTC_OBJC_TYPE(RTCMediaConstraints) *)constraints - delegate:(nullable id)delegate; + peerConnectionWithConfiguration:(RTC_OBJC_TYPE(RTCConfiguration) *)configuration + constraints:(RTC_OBJC_TYPE(RTCMediaConstraints) *)constraints + delegate:(nullable id)delegate; - (nullable RTC_OBJC_TYPE(RTCPeerConnection) *) - peerConnectionWithConfiguration: - (RTC_OBJC_TYPE(RTCConfiguration) *)configuration - constraints: - (RTC_OBJC_TYPE(RTCMediaConstraints) *)constraints + peerConnectionWithConfiguration:(RTC_OBJC_TYPE(RTCConfiguration) *)configuration + constraints:(RTC_OBJC_TYPE(RTCMediaConstraints) *)constraints certificateVerifier: - (id) - certificateVerifier - delegate:(nullable id)delegate; + (id)certificateVerifier + delegate:(nullable id)delegate; /** Set the options to be used for subsequently created RTCPeerConnections */ -- (void)setOptions: - (nonnull RTC_OBJC_TYPE(RTCPeerConnectionFactoryOptions) *)options; +- (void)setOptions:(nonnull RTC_OBJC_TYPE(RTCPeerConnectionFactoryOptions) *)options; -/** Start an AecDump recording. This API call will likely change in the future. - */ -- (BOOL)startAecDumpWithFilePath:(NSString *)filePath - maxSizeInBytes:(int64_t)maxSizeInBytes; +/** Start an AecDump recording. This API call will likely change in the future. */ +- (BOOL)startAecDumpWithFilePath:(NSString *)filePath maxSizeInBytes:(int64_t)maxSizeInBytes; /* Stop an active AecDump recording */ - (void)stopAecDump; diff --git a/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.mm b/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.mm index 79afe271da..cd9ed6ab5e 100644 --- a/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.mm +++ b/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.mm @@ -15,6 +15,9 @@ #import "RTCPeerConnectionFactoryOptions+Private.h" #import "RTCRtpCapabilities+Private.h" +#import "RTCAudioDeviceModule.h" +#import "RTCAudioDeviceModule+Private.h" + #import "RTCAudioSource+Private.h" #import "RTCAudioTrack+Private.h" #import "RTCMediaConstraints+Private.h" @@ -22,6 +25,9 @@ #import "RTCPeerConnection+Private.h" #import "RTCVideoSource+Private.h" #import "RTCVideoTrack+Private.h" +#import "RTCRtpReceiver+Private.h" +#import "RTCRtpCapabilities+Private.h" +#import "RTCRtpCodecCapability+Private.h" #import "base/RTCLogging.h" #import "base/RTCVideoDecoderFactory.h" #import "base/RTCVideoEncoderFactory.h" @@ -29,10 +35,10 @@ #include "rtc_base/checks.h" #include "sdk/objc/native/api/network_monitor_factory.h" #include "sdk/objc/native/api/ssl_certificate_verifier.h" +#include "system_wrappers/include/field_trial.h" -#include "api/audio/audio_device.h" -#include "api/audio/audio_processing.h" #include "api/audio/builtin_audio_processing_builder.h" +#include "api/environment/environment_factory.h" #include "api/audio_codecs/builtin_audio_decoder_factory.h" #include "api/audio_codecs/builtin_audio_encoder_factory.h" #include "api/enable_media.h" @@ -42,6 +48,8 @@ #import "components/video_codec/RTCVideoDecoderFactoryH264.h" #import "components/video_codec/RTCVideoEncoderFactoryH264.h" #include "media/base/media_constants.h" +#include "modules/audio_device/include/audio_device.h" +#include "modules/audio_processing/include/audio_processing.h" #include "sdk/objc/native/api/objc_audio_device_module.h" #include "sdk/objc/native/api/video_decoder_factory.h" @@ -49,6 +57,9 @@ #include "sdk/objc/native/src/objc_video_decoder_factory.h" #include "sdk/objc/native/src/objc_video_encoder_factory.h" +#import "components/audio/RTCAudioProcessingModule.h" +#import "components/audio/RTCDefaultAudioProcessingModule+Private.h" + #if defined(WEBRTC_IOS) #import "sdk/objc/native/api/audio_device_module.h" #endif @@ -57,73 +68,94 @@ @implementation RTC_OBJC_TYPE (RTCPeerConnectionFactory) { std::unique_ptr _networkThread; std::unique_ptr _workerThread; std::unique_ptr _signalingThread; + webrtc::scoped_refptr _nativeAudioDeviceModule; + RTC_OBJC_TYPE(RTCDefaultAudioProcessingModule) *_defaultAudioProcessingModule; + BOOL _hasStartedAecDump; } @synthesize nativeFactory = _nativeFactory; +@synthesize audioDeviceModule = _audioDeviceModule; -- (webrtc::scoped_refptr)audioDeviceModule { +- (webrtc::scoped_refptr)createAudioDeviceModule:(BOOL)bypassVoiceProcessing { #if defined(WEBRTC_IOS) - return webrtc::CreateAudioDeviceModule(); + return webrtc::CreateAudioDeviceModule(bypassVoiceProcessing); #else return nullptr; #endif } - (instancetype)init { - webrtc::PeerConnectionFactoryDependencies dependencies; - dependencies.audio_encoder_factory = - webrtc::CreateBuiltinAudioEncoderFactory(); - dependencies.audio_decoder_factory = - webrtc::CreateBuiltinAudioDecoderFactory(); - dependencies.video_encoder_factory = webrtc::ObjCToNativeVideoEncoderFactory( - [[RTC_OBJC_TYPE(RTCVideoEncoderFactoryH264) alloc] init]); - dependencies.video_decoder_factory = webrtc::ObjCToNativeVideoDecoderFactory( - [[RTC_OBJC_TYPE(RTCVideoDecoderFactoryH264) alloc] init]); - dependencies.adm = [self audioDeviceModule]; - return [self initWithMediaAndDependencies:std::move(dependencies)]; + return [self + initWithNativeAudioEncoderFactory:webrtc::CreateBuiltinAudioEncoderFactory() + nativeAudioDecoderFactory:webrtc::CreateBuiltinAudioDecoderFactory() + nativeVideoEncoderFactory:webrtc::ObjCToNativeVideoEncoderFactory([[RTC_OBJC_TYPE( + RTCVideoEncoderFactoryH264) alloc] init]) + nativeVideoDecoderFactory:webrtc::ObjCToNativeVideoDecoderFactory([[RTC_OBJC_TYPE( + RTCVideoDecoderFactoryH264) alloc] init]) + audioDeviceModule:[self createAudioDeviceModule:NO].get() + audioProcessingModule:nullptr + bypassVoiceProcessing:NO]; } - (instancetype) - initWithEncoderFactory: - (nullable id)encoderFactory - decoderFactory:(nullable id) - decoderFactory { - return [self initWithEncoderFactory:encoderFactory - decoderFactory:decoderFactory - audioDevice:nil]; + initWithEncoderFactory:(nullable id)encoderFactory + decoderFactory:(nullable id)decoderFactory { + return [self initWithEncoderFactory:encoderFactory decoderFactory:decoderFactory audioDevice:nil]; } - (instancetype) - initWithEncoderFactory: - (nullable id)encoderFactory - decoderFactory:(nullable id) - decoderFactory - audioDevice: - (nullable id)audioDevice { + initWithEncoderFactory:(nullable id)encoderFactory + decoderFactory:(nullable id)decoderFactory + audioDevice:(nullable id)audioDevice { #ifdef HAVE_NO_MEDIA return [self initWithNoMedia]; #else - webrtc::PeerConnectionFactoryDependencies dependencies; - dependencies.audio_encoder_factory = - webrtc::CreateBuiltinAudioEncoderFactory(); - dependencies.audio_decoder_factory = - webrtc::CreateBuiltinAudioDecoderFactory(); + std::unique_ptr native_encoder_factory; + std::unique_ptr native_decoder_factory; if (encoderFactory) { - dependencies.video_encoder_factory = - webrtc::ObjCToNativeVideoEncoderFactory(encoderFactory); + native_encoder_factory = webrtc::ObjCToNativeVideoEncoderFactory(encoderFactory); } if (decoderFactory) { - dependencies.video_decoder_factory = - webrtc::ObjCToNativeVideoDecoderFactory(decoderFactory); + native_decoder_factory = webrtc::ObjCToNativeVideoDecoderFactory(decoderFactory); } + rtc::scoped_refptr audio_device_module; if (audioDevice) { - dependencies.adm = webrtc::CreateAudioDeviceModule(audioDevice); + audio_device_module = webrtc::CreateAudioDeviceModule(audioDevice); } else { - dependencies.adm = [self audioDeviceModule]; + audio_device_module = [self createAudioDeviceModule:NO]; + } + return [self initWithNativeAudioEncoderFactory:webrtc::CreateBuiltinAudioEncoderFactory() + nativeAudioDecoderFactory:webrtc::CreateBuiltinAudioDecoderFactory() + nativeVideoEncoderFactory:std::move(native_encoder_factory) + nativeVideoDecoderFactory:std::move(native_decoder_factory) + audioDeviceModule:audio_device_module.get() + audioProcessingModule:nullptr + bypassVoiceProcessing:NO]; +#endif +} + +- (instancetype)initWithMediaAndDependencies: + (webrtc::PeerConnectionFactoryDependencies)dependencies { +#pragma clang diagnostic push +#pragma clang diagnostic ignored "-Wdeprecated-declarations" + // audio_processing_builder should be used instead in new code. + RTC_CHECK(dependencies.audio_processing == nullptr); +#pragma clang diagnostic pop + +#ifndef WEBRTC_EXCLUDE_AUDIO_PROCESSING_MODULE + if (dependencies.audio_processing_builder == nullptr) { + dependencies.audio_processing_builder = + std::make_unique(); } - return [self initWithMediaAndDependencies:std::move(dependencies)]; #endif + if (dependencies.event_log_factory == nullptr) { + dependencies.event_log_factory = + std::make_unique(); + + } + webrtc::EnableMedia(dependencies); + return [self initWithNativeDependencies:std::move(dependencies)]; } - (instancetype)initWithNativeDependencies: @@ -147,6 +179,7 @@ - (instancetype)initWithNativeDependencies: // Set fields that are relevant both to 'no media' and 'with media' // scenarios. + dependencies.network_thread = _networkThread.get(); dependencies.worker_thread = _workerThread.get(); dependencies.signaling_thread = _signalingThread.get(); @@ -166,115 +199,188 @@ - (instancetype)initWithNativeDependencies: return self; } -- (instancetype)initWithNoMedia { - return [self - initWithNativeDependencies:webrtc::PeerConnectionFactoryDependencies()]; +- (RTC_OBJC_TYPE(RTCRtpCapabilities) *)rtpSenderCapabilitiesFor:(RTC_OBJC_TYPE(RTCRtpMediaType))mediaType { + + webrtc::RtpCapabilities capabilities = _nativeFactory->GetRtpSenderCapabilities([RTC_OBJC_TYPE(RTCRtpReceiver) nativeMediaTypeForMediaType: mediaType]); + + return [[RTC_OBJC_TYPE(RTCRtpCapabilities) alloc] initWithNativeRtpCapabilities:capabilities]; } -- (instancetype) - initWithNativeAudioEncoderFactory: - (webrtc::scoped_refptr)audioEncoderFactory - nativeAudioDecoderFactory: - (webrtc::scoped_refptr) - audioDecoderFactory - nativeVideoEncoderFactory: - (std::unique_ptr) - videoEncoderFactory - nativeVideoDecoderFactory: - (std::unique_ptr) - videoDecoderFactory - audioDeviceModule: - (webrtc::AudioDeviceModule *)audioDeviceModule - audioProcessingModule: - (webrtc::scoped_refptr) - audioProcessingModule { - webrtc::PeerConnectionFactoryDependencies dependencies; - dependencies.audio_encoder_factory = std::move(audioEncoderFactory); - dependencies.audio_decoder_factory = std::move(audioDecoderFactory); - dependencies.video_encoder_factory = std::move(videoEncoderFactory); - dependencies.video_decoder_factory = std::move(videoDecoderFactory); - dependencies.adm = std::move(audioDeviceModule); - if (audioProcessingModule != nullptr) { - dependencies.audio_processing_builder = - CustomAudioProcessing(std::move(audioProcessingModule)); - } - return [self initWithMediaAndDependencies:std::move(dependencies)]; +- (RTC_OBJC_TYPE(RTCRtpCapabilities) *)rtpReceiverCapabilitiesFor:(RTC_OBJC_TYPE(RTCRtpMediaType))mediaType { + + webrtc::RtpCapabilities capabilities = _nativeFactory->GetRtpReceiverCapabilities([RTC_OBJC_TYPE(RTCRtpReceiver) nativeMediaTypeForMediaType: mediaType]); + + return [[RTC_OBJC_TYPE(RTCRtpCapabilities) alloc] initWithNativeRtpCapabilities:capabilities]; } - (instancetype) - initWithNativeAudioEncoderFactory: - (webrtc::scoped_refptr)audioEncoderFactory - nativeAudioDecoderFactory: - (webrtc::scoped_refptr) - audioDecoderFactory - nativeVideoEncoderFactory: - (std::unique_ptr) - videoEncoderFactory - nativeVideoDecoderFactory: - (std::unique_ptr) - videoDecoderFactory - audioDeviceModule: - (webrtc::AudioDeviceModule *)audioDeviceModule - audioProcessingModule: - (webrtc::scoped_refptr) - audioProcessingModule - networkControllerFactory: - (std::unique_ptr) - networkControllerFactory { - webrtc::PeerConnectionFactoryDependencies dependencies; - dependencies.adm = std::move(audioDeviceModule); - dependencies.audio_encoder_factory = std::move(audioEncoderFactory); - dependencies.audio_decoder_factory = std::move(audioDecoderFactory); - dependencies.video_encoder_factory = std::move(videoEncoderFactory); - dependencies.video_decoder_factory = std::move(videoDecoderFactory); - if (audioProcessingModule != nullptr) { - dependencies.audio_processing_builder = - CustomAudioProcessing(std::move(audioProcessingModule)); + initWithBypassVoiceProcessing:(BOOL)bypassVoiceProcessing + encoderFactory:(nullable id)encoderFactory + decoderFactory:(nullable id)decoderFactory + audioProcessingModule: + (nullable id)audioProcessingModule { +#ifdef HAVE_NO_MEDIA + return [self initWithNoMedia]; +#else + std::unique_ptr native_encoder_factory; + std::unique_ptr native_decoder_factory; + if (encoderFactory) { + native_encoder_factory = webrtc::ObjCToNativeVideoEncoderFactory(encoderFactory); + } + if (decoderFactory) { + native_decoder_factory = webrtc::ObjCToNativeVideoDecoderFactory(decoderFactory); } - dependencies.network_controller_factory = std::move(networkControllerFactory); - return [self initWithMediaAndDependencies:std::move(dependencies)]; + rtc::scoped_refptr audio_device_module = [self createAudioDeviceModule:bypassVoiceProcessing]; + + if ([audioProcessingModule isKindOfClass:[RTC_OBJC_TYPE(RTCDefaultAudioProcessingModule) class]]) { + _defaultAudioProcessingModule = (RTC_OBJC_TYPE(RTCDefaultAudioProcessingModule) *)audioProcessingModule; + } else { + _defaultAudioProcessingModule = [[RTC_OBJC_TYPE(RTCDefaultAudioProcessingModule) alloc] init]; + } + + NSLog(@"AudioProcessingModule: %@", _defaultAudioProcessingModule); + + return [self initWithNativeAudioEncoderFactory:webrtc::CreateBuiltinAudioEncoderFactory() + nativeAudioDecoderFactory:webrtc::CreateBuiltinAudioDecoderFactory() + nativeVideoEncoderFactory:std::move(native_encoder_factory) + nativeVideoDecoderFactory:std::move(native_decoder_factory) + audioDeviceModule:audio_device_module.get() + audioProcessingModule:_defaultAudioProcessingModule.nativeAudioProcessingModule + bypassVoiceProcessing:bypassVoiceProcessing]; +#endif } -- (instancetype)initWithMediaAndDependencies: - (webrtc::PeerConnectionFactoryDependencies)dependencies { -#pragma clang diagnostic push -#pragma clang diagnostic ignored "-Wdeprecated-declarations" - // audio_processing_builder should be used instead in new code. - RTC_CHECK(dependencies.audio_processing == nullptr); -#pragma clang diagnostic pop +- (instancetype)initNative { + self = [super init]; + if (self) { + _networkThread = webrtc::Thread::CreateWithSocketServer(); + _networkThread->SetName("network_thread", _networkThread.get()); + BOOL result = _networkThread->Start(); + RTC_DCHECK(result) << "Failed to start network thread."; -#ifndef WEBRTC_EXCLUDE_AUDIO_PROCESSING_MODULE - if (dependencies.audio_processing_builder == nullptr) { - dependencies.audio_processing_builder = - std::make_unique(); + _workerThread = webrtc::Thread::Create(); + _workerThread->SetName("worker_thread", _workerThread.get()); + result = _workerThread->Start(); + RTC_DCHECK(result) << "Failed to start worker thread."; + + _signalingThread = webrtc::Thread::Create(); + _signalingThread->SetName("signaling_thread", _signalingThread.get()); + result = _signalingThread->Start(); + RTC_DCHECK(result) << "Failed to start signaling thread."; } -#endif - if (dependencies.event_log_factory == nullptr) { - dependencies.event_log_factory = - std::make_unique(); + return self; +} + +- (instancetype)initWithNoMedia { + self = [self initNative]; + if (self) { + webrtc::PeerConnectionFactoryDependencies dependencies; + dependencies.network_thread = _networkThread.get(); + dependencies.worker_thread = _workerThread.get(); + dependencies.signaling_thread = _signalingThread.get(); + if (webrtc::field_trial::IsEnabled("WebRTC-Network-UseNWPathMonitor")) { + dependencies.network_monitor_factory = webrtc::CreateNetworkMonitorFactory(); + } + _nativeFactory = webrtc::CreateModularPeerConnectionFactory(std::move(dependencies)); + NSAssert(_nativeFactory, @"Failed to initialize PeerConnectionFactory!"); } - webrtc::EnableMedia(dependencies); - return [self initWithNativeDependencies:std::move(dependencies)]; + return self; } -- (RTC_OBJC_TYPE(RTCRtpCapabilities) *)rtpSenderCapabilitiesForKind: - (NSString *)kind { - webrtc::MediaType mediaType = [[self class] mediaTypeForKind:kind]; +- (instancetype)initWithNativeAudioEncoderFactory: + (rtc::scoped_refptr)audioEncoderFactory + nativeAudioDecoderFactory: + (rtc::scoped_refptr)audioDecoderFactory + nativeVideoEncoderFactory: + (std::unique_ptr)videoEncoderFactory + nativeVideoDecoderFactory: + (std::unique_ptr)videoDecoderFactory + audioDeviceModule:(webrtc::AudioDeviceModule *)audioDeviceModule + audioProcessingModule: + (rtc::scoped_refptr)audioProcessingModule + bypassVoiceProcessing:(BOOL)bypassVoiceProcessing { + return [self initWithNativeAudioEncoderFactory:audioEncoderFactory + nativeAudioDecoderFactory:audioDecoderFactory + nativeVideoEncoderFactory:std::move(videoEncoderFactory) + nativeVideoDecoderFactory:std::move(videoDecoderFactory) + audioDeviceModule:audioDeviceModule + audioProcessingModule:audioProcessingModule + networkControllerFactory:nullptr + bypassVoiceProcessing:bypassVoiceProcessing]; +} +- (instancetype)initWithNativeAudioEncoderFactory: + (rtc::scoped_refptr)audioEncoderFactory + nativeAudioDecoderFactory: + (rtc::scoped_refptr)audioDecoderFactory + nativeVideoEncoderFactory: + (std::unique_ptr)videoEncoderFactory + nativeVideoDecoderFactory: + (std::unique_ptr)videoDecoderFactory + audioDeviceModule:(webrtc::AudioDeviceModule *)audioDeviceModule + audioProcessingModule: + (rtc::scoped_refptr)audioProcessingModule + networkControllerFactory: + (std::unique_ptr) + networkControllerFactory + bypassVoiceProcessing:(BOOL)bypassVoiceProcessing { + self = [self initNative]; + if (self) { + webrtc::PeerConnectionFactoryDependencies dependencies; + dependencies.network_thread = _networkThread.get(); + dependencies.worker_thread = _workerThread.get(); + dependencies.signaling_thread = _signalingThread.get(); + if (webrtc::field_trial::IsEnabled("WebRTC-Network-UseNWPathMonitor")) { + dependencies.network_monitor_factory = webrtc::CreateNetworkMonitorFactory(); + } + dependencies.trials = std::make_unique(); + dependencies.task_queue_factory = + webrtc::CreateDefaultTaskQueueFactory(dependencies.trials.get()); + + if(audioDeviceModule) { + _nativeAudioDeviceModule = std::move(audioDeviceModule); + } else { + // always create ADM on worker thread + _nativeAudioDeviceModule = _workerThread->BlockingCall([&dependencies, &bypassVoiceProcessing]() { + return webrtc::AudioDeviceModule::Create(webrtc::AudioDeviceModule::AudioLayer::kPlatformDefaultAudio, + dependencies.task_queue_factory.get(), + bypassVoiceProcessing == YES); + }); + } - webrtc::RtpCapabilities rtpCapabilities = - _nativeFactory->GetRtpSenderCapabilities(mediaType); - return [[RTC_OBJC_TYPE(RTCRtpCapabilities) alloc] - initWithNativeRtpCapabilities:rtpCapabilities]; + _audioDeviceModule = [[RTC_OBJC_TYPE(RTCAudioDeviceModule) alloc] initWithNativeModule: _nativeAudioDeviceModule + workerThread: _workerThread.get()]; + dependencies.adm = _nativeAudioDeviceModule; + dependencies.audio_encoder_factory = std::move(audioEncoderFactory); + dependencies.audio_decoder_factory = std::move(audioDecoderFactory); + dependencies.video_encoder_factory = std::move(videoEncoderFactory); + dependencies.video_decoder_factory = std::move(videoDecoderFactory); + + if (audioProcessingModule) { + dependencies.audio_processing = std::move(audioProcessingModule); + } else { + dependencies.audio_processing = webrtc::BuiltinAudioProcessingBuilder().Build(webrtc::CreateEnvironment()); + } + webrtc::EnableMedia(dependencies); + dependencies.event_log_factory = std::make_unique(); + dependencies.network_controller_factory = std::move(networkControllerFactory); + _nativeFactory = webrtc::CreateModularPeerConnectionFactory(std::move(dependencies)); + NSAssert(_nativeFactory, @"Failed to initialize PeerConnectionFactory!"); + } + return self; } -- (RTC_OBJC_TYPE(RTCRtpCapabilities) *)rtpReceiverCapabilitiesForKind: - (NSString *)kind { +- (RTC_OBJC_TYPE(RTCRtpCapabilities) *)rtpSenderCapabilitiesForKind:(NSString *)kind { webrtc::MediaType mediaType = [[self class] mediaTypeForKind:kind]; - webrtc::RtpCapabilities rtpCapabilities = - _nativeFactory->GetRtpReceiverCapabilities(mediaType); - return [[RTC_OBJC_TYPE(RTCRtpCapabilities) alloc] - initWithNativeRtpCapabilities:rtpCapabilities]; + webrtc::RtpCapabilities rtpCapabilities = _nativeFactory->GetRtpSenderCapabilities(mediaType); + return [[RTC_OBJC_TYPE(RTCRtpCapabilities) alloc] initWithNativeRtpCapabilities:rtpCapabilities]; +} + +- (RTC_OBJC_TYPE(RTCRtpCapabilities) *)rtpReceiverCapabilitiesForKind:(NSString *)kind { + webrtc::MediaType mediaType = [[self class] mediaTypeForKind:kind]; + + webrtc::RtpCapabilities rtpCapabilities = _nativeFactory->GetRtpReceiverCapabilities(mediaType); + return [[RTC_OBJC_TYPE(RTCRtpCapabilities) alloc] initWithNativeRtpCapabilities:rtpCapabilities]; } - (RTC_OBJC_TYPE(RTCAudioSource) *)audioSourceWithConstraints: @@ -288,61 +394,46 @@ - (instancetype)initWithMediaAndDependencies: webrtc::scoped_refptr source = _nativeFactory->CreateAudioSource(options); - return [[RTC_OBJC_TYPE(RTCAudioSource) alloc] initWithFactory:self - nativeAudioSource:source]; + return [[RTC_OBJC_TYPE(RTCAudioSource) alloc] initWithFactory:self nativeAudioSource:source]; } - (RTC_OBJC_TYPE(RTCAudioTrack) *)audioTrackWithTrackId:(NSString *)trackId { - RTC_OBJC_TYPE(RTCAudioSource) *audioSource = - [self audioSourceWithConstraints:nil]; + RTC_OBJC_TYPE(RTCAudioSource) *audioSource = [self audioSourceWithConstraints:nil]; return [self audioTrackWithSource:audioSource trackId:trackId]; } -- (RTC_OBJC_TYPE(RTCAudioTrack) *)audioTrackWithSource: - (RTC_OBJC_TYPE(RTCAudioSource) *)source +- (RTC_OBJC_TYPE(RTCAudioTrack) *)audioTrackWithSource:(RTC_OBJC_TYPE(RTCAudioSource) *)source trackId:(NSString *)trackId { - return [[RTC_OBJC_TYPE(RTCAudioTrack) alloc] initWithFactory:self - source:source - trackId:trackId]; + return [[RTC_OBJC_TYPE(RTCAudioTrack) alloc] initWithFactory:self source:source trackId:trackId]; } - (RTC_OBJC_TYPE(RTCVideoSource) *)videoSource { - return [[RTC_OBJC_TYPE(RTCVideoSource) alloc] - initWithFactory:self - signalingThread:_signalingThread.get() - workerThread:_workerThread.get()]; + return [[RTC_OBJC_TYPE(RTCVideoSource) alloc] initWithFactory:self + signalingThread:_signalingThread.get() + workerThread:_workerThread.get()]; } -- (RTC_OBJC_TYPE(RTCVideoSource) *)videoSourceForScreenCast: - (BOOL)forScreenCast { - return [[RTC_OBJC_TYPE(RTCVideoSource) alloc] - initWithFactory:self - signalingThread:_signalingThread.get() - workerThread:_workerThread.get() - isScreenCast:forScreenCast]; +- (RTC_OBJC_TYPE(RTCVideoSource) *)videoSourceForScreenCast:(BOOL)forScreenCast { + return [[RTC_OBJC_TYPE(RTCVideoSource) alloc] initWithFactory:self + signalingThread:_signalingThread.get() + workerThread:_workerThread.get() + isScreenCast:forScreenCast]; } -- (RTC_OBJC_TYPE(RTCVideoTrack) *)videoTrackWithSource: - (RTC_OBJC_TYPE(RTCVideoSource) *)source +- (RTC_OBJC_TYPE(RTCVideoTrack) *)videoTrackWithSource:(RTC_OBJC_TYPE(RTCVideoSource) *)source trackId:(NSString *)trackId { - return [[RTC_OBJC_TYPE(RTCVideoTrack) alloc] initWithFactory:self - source:source - trackId:trackId]; + return [[RTC_OBJC_TYPE(RTCVideoTrack) alloc] initWithFactory:self source:source trackId:trackId]; } -- (RTC_OBJC_TYPE(RTCMediaStream) *)mediaStreamWithStreamId: - (NSString *)streamId { - return [[RTC_OBJC_TYPE(RTCMediaStream) alloc] initWithFactory:self - streamId:streamId]; +- (RTC_OBJC_TYPE(RTCMediaStream) *)mediaStreamWithStreamId:(NSString *)streamId { + return [[RTC_OBJC_TYPE(RTCMediaStream) alloc] initWithFactory:self streamId:streamId]; } - (nullable RTC_OBJC_TYPE(RTCPeerConnection) *) - peerConnectionWithConfiguration: - (RTC_OBJC_TYPE(RTCConfiguration) *)configuration - constraints: - (RTC_OBJC_TYPE(RTCMediaConstraints) *)constraints - delegate:(nullable id)delegate { + peerConnectionWithConfiguration:(RTC_OBJC_TYPE(RTCConfiguration) *)configuration + constraints:(RTC_OBJC_TYPE(RTCMediaConstraints) *)constraints + delegate: + (nullable id)delegate { return [[RTC_OBJC_TYPE(RTCPeerConnection) alloc] initWithFactory:self configuration:configuration constraints:constraints @@ -351,40 +442,29 @@ - (instancetype)initWithMediaAndDependencies: } - (nullable RTC_OBJC_TYPE(RTCPeerConnection) *) - peerConnectionWithConfiguration: - (RTC_OBJC_TYPE(RTCConfiguration) *)configuration - constraints: - (RTC_OBJC_TYPE(RTCMediaConstraints) *)constraints + peerConnectionWithConfiguration:(RTC_OBJC_TYPE(RTCConfiguration) *)configuration + constraints:(RTC_OBJC_TYPE(RTCMediaConstraints) *)constraints certificateVerifier: - (id) - certificateVerifier - delegate:(nullable id)delegate { - return [[RTC_OBJC_TYPE(RTCPeerConnection) alloc] - initWithFactory:self - configuration:configuration - constraints:constraints - certificateVerifier:certificateVerifier - delegate:delegate]; + (id)certificateVerifier + delegate: + (nullable id)delegate { + return [[RTC_OBJC_TYPE(RTCPeerConnection) alloc] initWithFactory:self + configuration:configuration + constraints:constraints + certificateVerifier:certificateVerifier + delegate:delegate]; } - (nullable RTC_OBJC_TYPE(RTCPeerConnection) *) - peerConnectionWithDependencies: - (RTC_OBJC_TYPE(RTCConfiguration) *)configuration - constraints: - (RTC_OBJC_TYPE(RTCMediaConstraints) *)constraints - dependencies: - (std::unique_ptr) - dependencies - delegate: - (id) - delegate { - return [[RTC_OBJC_TYPE(RTCPeerConnection) alloc] - initWithDependencies:self - configuration:configuration - constraints:constraints - dependencies:std::move(dependencies) - delegate:delegate]; + peerConnectionWithDependencies:(RTC_OBJC_TYPE(RTCConfiguration) *)configuration + constraints:(RTC_OBJC_TYPE(RTCMediaConstraints) *)constraints + dependencies:(std::unique_ptr)dependencies + delegate:(id)delegate { + return [[RTC_OBJC_TYPE(RTCPeerConnection) alloc] initWithDependencies:self + configuration:configuration + constraints:constraints + dependencies:std::move(dependencies) + delegate:delegate]; } - (void)setOptions: @@ -404,8 +484,8 @@ - (BOOL)startAecDumpWithFilePath:(NSString *)filePath } FILE *f = fopen(filePath.UTF8String, "wb"); if (!f) { - RTCLogError( - @"Error opening file: %@. Error: %s", filePath, strerror(errno)); + RTCLogError( + @"Error opening file: %@. Error: %s", filePath, strerror(errno)); return NO; } _hasStartedAecDump = _nativeFactory->StartAecDump(f, maxSizeInBytes); @@ -432,9 +512,9 @@ - (void)stopAecDump { #pragma mark - Private + (webrtc::MediaType)mediaTypeForKind:(NSString *)kind { - if (kind == kRTCMediaStreamTrackKindAudio) { + if (kind == RTC_CONSTANT_TYPE(RTCMediaStreamTrackKindAudio)) { return webrtc::MediaType::AUDIO; - } else if (kind == kRTCMediaStreamTrackKindVideo) { + } else if (kind == RTC_CONSTANT_TYPE(RTCMediaStreamTrackKindVideo)) { return webrtc::MediaType::VIDEO; } else { RTC_DCHECK_NOTREACHED(); diff --git a/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder+DefaultComponents.h b/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder+DefaultComponents.h index 070a0e74a5..4d7025bf93 100644 --- a/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder+DefaultComponents.h +++ b/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder+DefaultComponents.h @@ -12,9 +12,9 @@ NS_ASSUME_NONNULL_BEGIN -@interface RTCPeerConnectionFactoryBuilder (DefaultComponents) +@interface RTC_OBJC_TYPE(RTCPeerConnectionFactoryBuilder) (DefaultComponents) -+ (RTCPeerConnectionFactoryBuilder *)defaultBuilder; ++ (RTC_OBJC_TYPE(RTCPeerConnectionFactoryBuilder) *)defaultBuilder; @end diff --git a/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder+DefaultComponents.mm b/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder+DefaultComponents.mm index e5eeba5992..a2f633e1a4 100644 --- a/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder+DefaultComponents.mm +++ b/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder+DefaultComponents.mm @@ -22,11 +22,10 @@ #import "sdk/objc/native/api/audio_device_module.h" #endif -@implementation RTCPeerConnectionFactoryBuilder (DefaultComponents) +@implementation RTC_OBJC_TYPE(RTCPeerConnectionFactoryBuilder) (DefaultComponents) -+ (RTCPeerConnectionFactoryBuilder *)defaultBuilder { - RTCPeerConnectionFactoryBuilder *builder = - [[RTCPeerConnectionFactoryBuilder alloc] init]; ++ (RTC_OBJC_TYPE(RTCPeerConnectionFactoryBuilder) *)defaultBuilder { + RTC_OBJC_TYPE(RTCPeerConnectionFactoryBuilder) *builder = [[RTC_OBJC_TYPE(RTCPeerConnectionFactoryBuilder) alloc] init]; auto audioEncoderFactory = webrtc::CreateBuiltinAudioEncoderFactory(); [builder setAudioEncoderFactory:audioEncoderFactory]; diff --git a/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder.h b/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder.h index 8d2c47365c..b7a687ec5a 100644 --- a/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder.h +++ b/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder.h @@ -21,9 +21,9 @@ NS_ASSUME_NONNULL_BEGIN -@interface RTCPeerConnectionFactoryBuilder : NSObject +@interface RTC_OBJC_TYPE(RTCPeerConnectionFactoryBuilder) : NSObject -+ (RTCPeerConnectionFactoryBuilder *)builder; ++ (RTC_OBJC_TYPE(RTCPeerConnectionFactoryBuilder) *)builder; - (RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)createPeerConnectionFactory; diff --git a/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder.mm b/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder.mm index 8fe5565111..4e1f481e9e 100644 --- a/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder.mm +++ b/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder.mm @@ -18,12 +18,12 @@ #include "api/video_codecs/video_decoder_factory.h" #include "api/video_codecs/video_encoder_factory.h" -@implementation RTCPeerConnectionFactoryBuilder { +@implementation RTC_OBJC_TYPE(RTCPeerConnectionFactoryBuilder) { webrtc::PeerConnectionFactoryDependencies _dependencies; } -+ (RTCPeerConnectionFactoryBuilder *)builder { - return [[RTCPeerConnectionFactoryBuilder alloc] init]; ++ (RTC_OBJC_TYPE(RTCPeerConnectionFactoryBuilder) *)builder { + return [[RTC_OBJC_TYPE(RTCPeerConnectionFactoryBuilder) alloc] init]; } - (RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)createPeerConnectionFactory { diff --git a/sdk/objc/api/peerconnection/RTCRtpCodecCapability.mm b/sdk/objc/api/peerconnection/RTCRtpCodecCapability.mm index 0cfe53a677..5da4821dfa 100644 --- a/sdk/objc/api/peerconnection/RTCRtpCodecCapability.mm +++ b/sdk/objc/api/peerconnection/RTCRtpCodecCapability.mm @@ -42,10 +42,10 @@ - (instancetype)initWithNativeRtpCodecCapability: _name = [NSString stringForStdString:nativeRtpCodecCapability.name]; switch (nativeRtpCodecCapability.kind) { case webrtc::MediaType::AUDIO: - _kind = kRTCMediaStreamTrackKindAudio; + _kind = RTC_CONSTANT_TYPE(RTCMediaStreamTrackKindAudio); break; case webrtc::MediaType::VIDEO: - _kind = kRTCMediaStreamTrackKindVideo; + _kind = RTC_CONSTANT_TYPE(RTCMediaStreamTrackKindVideo); break; default: RTC_DCHECK_NOTREACHED(); @@ -95,9 +95,9 @@ - (NSString *)description { rtpCodecCapability.name = [NSString stdStringForString:_name]; // NSString pointer comparison is safe here since "kind" is readonly and only // populated above. - if (_kind == kRTCMediaStreamTrackKindAudio) { + if (_kind == RTC_CONSTANT_TYPE(RTCMediaStreamTrackKindAudio)) { rtpCodecCapability.kind = webrtc::MediaType::AUDIO; - } else if (_kind == kRTCMediaStreamTrackKindVideo) { + } else if (_kind == RTC_CONSTANT_TYPE(RTCMediaStreamTrackKindVideo)) { rtpCodecCapability.kind = webrtc::MediaType::VIDEO; } else { RTC_DCHECK_NOTREACHED(); diff --git a/sdk/objc/api/peerconnection/RTCRtpCodecParameters.h b/sdk/objc/api/peerconnection/RTCRtpCodecParameters.h index 9934f944c6..033ad700a1 100644 --- a/sdk/objc/api/peerconnection/RTCRtpCodecParameters.h +++ b/sdk/objc/api/peerconnection/RTCRtpCodecParameters.h @@ -14,21 +14,23 @@ NS_ASSUME_NONNULL_BEGIN -RTC_EXTERN const NSString *const kRTCRtxCodecName; -RTC_EXTERN const NSString *const kRTCRedCodecName; -RTC_EXTERN const NSString *const kRTCUlpfecCodecName; -RTC_EXTERN const NSString *const kRTCFlexfecCodecName; -RTC_EXTERN const NSString *const kRTCOpusCodecName; -RTC_EXTERN const NSString *const kRTCIsacCodecName; -RTC_EXTERN const NSString *const kRTCL16CodecName; -RTC_EXTERN const NSString *const kRTCG722CodecName; -RTC_EXTERN const NSString *const kRTCPcmuCodecName; -RTC_EXTERN const NSString *const kRTCPcmaCodecName; -RTC_EXTERN const NSString *const kRTCDtmfCodecName; -RTC_EXTERN const NSString *const kRTCComfortNoiseCodecName; -RTC_EXTERN const NSString *const kRTCVp8CodecName; -RTC_EXTERN const NSString *const kRTCVp9CodecName; -RTC_EXTERN const NSString *const kRTCH264CodecName; +RTC_EXTERN const NSString *const RTC_CONSTANT_TYPE(RTCRtxCodecName); +RTC_EXTERN const NSString *const RTC_CONSTANT_TYPE(RTCRedCodecName); +RTC_EXTERN const NSString *const RTC_CONSTANT_TYPE(RTCUlpfecCodecName); +RTC_EXTERN const NSString *const RTC_CONSTANT_TYPE(RTCFlexfecCodecName); +RTC_EXTERN const NSString *const RTC_CONSTANT_TYPE(RTCOpusCodecName); +RTC_EXTERN const NSString *const RTC_CONSTANT_TYPE(RTCIsacCodecName); +RTC_EXTERN const NSString *const RTC_CONSTANT_TYPE(RTCL16CodecName); +RTC_EXTERN const NSString *const RTC_CONSTANT_TYPE(RTCG722CodecName); +RTC_EXTERN const NSString *const RTC_CONSTANT_TYPE(RTCIlbcCodecName); +RTC_EXTERN const NSString *const RTC_CONSTANT_TYPE(RTCPcmuCodecName); +RTC_EXTERN const NSString *const RTC_CONSTANT_TYPE(RTCPcmaCodecName); +RTC_EXTERN const NSString *const RTC_CONSTANT_TYPE(RTCDtmfCodecName); +RTC_EXTERN const NSString *const RTC_CONSTANT_TYPE(RTCComfortNoiseCodecName); +RTC_EXTERN const NSString *const RTC_CONSTANT_TYPE(RTCVp8CodecName); +RTC_EXTERN const NSString *const RTC_CONSTANT_TYPE(RTCVp9CodecName); +RTC_EXTERN const NSString *const RTC_CONSTANT_TYPE(RTCH264CodecName); +RTC_EXTERN const NSString *const RTC_CONSTANT_TYPE(RTCAv1CodecName); /** Defined in https://www.w3.org/TR/webrtc/#idl-def-rtcrtpcodecparameters */ RTC_OBJC_EXPORT diff --git a/sdk/objc/api/peerconnection/RTCRtpCodecParameters.mm b/sdk/objc/api/peerconnection/RTCRtpCodecParameters.mm index deb4738f5b..a58705567c 100644 --- a/sdk/objc/api/peerconnection/RTCRtpCodecParameters.mm +++ b/sdk/objc/api/peerconnection/RTCRtpCodecParameters.mm @@ -16,21 +16,22 @@ #include "media/base/media_constants.h" #include "rtc_base/checks.h" -const NSString *const kRTCRtxCodecName = @(webrtc::kRtxCodecName); -const NSString *const kRTCRedCodecName = @(webrtc::kRedCodecName); -const NSString *const kRTCUlpfecCodecName = @(webrtc::kUlpfecCodecName); -const NSString *const kRTCFlexfecCodecName = @(webrtc::kFlexfecCodecName); -const NSString *const kRTCOpusCodecName = @(webrtc::kOpusCodecName); -const NSString *const kRTCL16CodecName = @(webrtc::kL16CodecName); -const NSString *const kRTCG722CodecName = @(webrtc::kG722CodecName); -const NSString *const kRTCPcmuCodecName = @(webrtc::kPcmuCodecName); -const NSString *const kRTCPcmaCodecName = @(webrtc::kPcmaCodecName); -const NSString *const kRTCDtmfCodecName = @(webrtc::kDtmfCodecName); -const NSString *const kRTCComfortNoiseCodecName = +const NSString * const RTC_CONSTANT_TYPE(RTCRtxCodecName) = @(webrtc::kRtxCodecName); +const NSString * const RTC_CONSTANT_TYPE(RTCRedCodecName) = @(webrtc::kRedCodecName); +const NSString * const RTC_CONSTANT_TYPE(RTCUlpfecCodecName) = @(webrtc::kUlpfecCodecName); +const NSString * const RTC_CONSTANT_TYPE(RTCFlexfecCodecName) = @(webrtc::kFlexfecCodecName); +const NSString * const RTC_CONSTANT_TYPE(RTCOpusCodecName) = @(webrtc::kOpusCodecName); +const NSString * const RTC_CONSTANT_TYPE(RTCL16CodecName) = @(webrtc::kL16CodecName); +const NSString * const RTC_CONSTANT_TYPE(RTCG722CodecName) = @(webrtc::kG722CodecName); +const NSString * const RTC_CONSTANT_TYPE(RTCPcmuCodecName) = @(webrtc::kPcmuCodecName); +const NSString * const RTC_CONSTANT_TYPE(RTCPcmaCodecName) = @(webrtc::kPcmaCodecName); +const NSString * const RTC_CONSTANT_TYPE(RTCDtmfCodecName) = @(webrtc::kDtmfCodecName); +const NSString * const RTC_CONSTANT_TYPE(RTCComfortNoiseCodecName) = @(webrtc::kComfortNoiseCodecName); -const NSString *const kRTCVp8CodecName = @(webrtc::kVp8CodecName); -const NSString *const kRTCVp9CodecName = @(webrtc::kVp9CodecName); -const NSString *const kRTCH264CodecName = @(webrtc::kH264CodecName); +const NSString * const RTC_CONSTANT_TYPE(RTCVp8CodecName) = @(webrtc::kVp8CodecName); +const NSString * const RTC_CONSTANT_TYPE(RTCVp9CodecName) = @(webrtc::kVp9CodecName); +const NSString * const RTC_CONSTANT_TYPE(RTCH264CodecName) = @(webrtc::kH264CodecName); +const NSString * const RTC_CONSTANT_TYPE(RTCAv1CodecName) = @(webrtc::kAv1CodecName); @implementation RTC_OBJC_TYPE (RTCRtpCodecParameters) @@ -54,10 +55,10 @@ - (instancetype)initWithNativeParameters: _name = [NSString stringForStdString:nativeParameters.name]; switch (nativeParameters.kind) { case webrtc::MediaType::AUDIO: - _kind = kRTCMediaStreamTrackKindAudio; + _kind = RTC_CONSTANT_TYPE(RTCMediaStreamTrackKindAudio); break; case webrtc::MediaType::VIDEO: - _kind = kRTCMediaStreamTrackKindVideo; + _kind = RTC_CONSTANT_TYPE(RTCMediaStreamTrackKindVideo); break; default: RTC_DCHECK_NOTREACHED(); @@ -85,9 +86,9 @@ - (instancetype)initWithNativeParameters: parameters.name = [NSString stdStringForString:_name]; // NSString pointer comparison is safe here since "kind" is readonly and only // populated above. - if (_kind == kRTCMediaStreamTrackKindAudio) { + if (_kind == RTC_CONSTANT_TYPE(RTCMediaStreamTrackKindAudio)) { parameters.kind = webrtc::MediaType::AUDIO; - } else if (_kind == kRTCMediaStreamTrackKindVideo) { + } else if (_kind == RTC_CONSTANT_TYPE(RTCMediaStreamTrackKindVideo)) { parameters.kind = webrtc::MediaType::VIDEO; } else { RTC_DCHECK_NOTREACHED(); diff --git a/sdk/objc/api/peerconnection/RTCRtpEncodingParameters.h b/sdk/objc/api/peerconnection/RTCRtpEncodingParameters.h index e1829ea164..45187e9808 100644 --- a/sdk/objc/api/peerconnection/RTCRtpEncodingParameters.h +++ b/sdk/objc/api/peerconnection/RTCRtpEncodingParameters.h @@ -15,11 +15,11 @@ NS_ASSUME_NONNULL_BEGIN /** Corresponds to webrtc::Priority. */ -typedef NS_ENUM(NSInteger, RTCPriority) { - RTCPriorityVeryLow, - RTCPriorityLow, - RTCPriorityMedium, - RTCPriorityHigh +typedef NS_ENUM(NSInteger, RTC_OBJC_TYPE(RTCPriority)) { + RTC_OBJC_TYPE(RTCPriorityVeryLow), + RTC_OBJC_TYPE(RTCPriorityLow), + RTC_OBJC_TYPE(RTCPriorityMedium), + RTC_OBJC_TYPE(RTCPriorityHigh) }; RTC_OBJC_EXPORT @@ -63,13 +63,17 @@ RTC_OBJC_EXPORT @property(nonatomic, assign) double bitratePriority; /** The relative DiffServ Code Point priority. */ -@property(nonatomic, assign) RTCPriority networkPriority; +@property(nonatomic, assign) RTC_OBJC_TYPE(RTCPriority) networkPriority; /** Allow dynamic frame length changes for audio: https://w3c.github.io/webrtc-extensions/#dom-rtcrtpencodingparameters-adaptiveptime */ @property(nonatomic, assign) BOOL adaptiveAudioPacketTime; +/** A case-sensitive identifier of the scalability mode to be used for this stream. + https://w3c.github.io/webrtc-svc/#rtcrtpencodingparameters */ +@property(nonatomic, copy, nullable) NSString *scalabilityMode; + - (instancetype)init; @end diff --git a/sdk/objc/api/peerconnection/RTCRtpEncodingParameters.mm b/sdk/objc/api/peerconnection/RTCRtpEncodingParameters.mm index 49ae8a192c..3bf94000fb 100644 --- a/sdk/objc/api/peerconnection/RTCRtpEncodingParameters.mm +++ b/sdk/objc/api/peerconnection/RTCRtpEncodingParameters.mm @@ -25,6 +25,7 @@ @implementation RTC_OBJC_TYPE (RTCRtpEncodingParameters) @synthesize bitratePriority = _bitratePriority; @synthesize networkPriority = _networkPriority; @synthesize adaptiveAudioPacketTime = _adaptiveAudioPacketTime; +@synthesize scalabilityMode = _scalabilityMode; - (instancetype)init { webrtc::RtpEncodingParameters nativeParameters; @@ -61,6 +62,9 @@ - (instancetype)initWithNativeParameters: if (nativeParameters.ssrc) { _ssrc = [NSNumber numberWithUnsignedLong:*nativeParameters.ssrc]; } + if (nativeParameters.scalability_mode) { + _scalabilityMode = [NSString stringWithUTF8String:nativeParameters.scalability_mode->c_str()]; + } _bitratePriority = nativeParameters.bitrate_priority; _networkPriority = [RTC_OBJC_TYPE(RTCRtpEncodingParameters) priorityFromNativePriority:nativeParameters.network_priority]; @@ -95,6 +99,9 @@ - (instancetype)initWithNativeParameters: if (_ssrc != nil) { parameters.ssrc = std::optional(_ssrc.unsignedLongValue); } + if (_scalabilityMode != nil) { + parameters.scalability_mode = std::optional(std::string([_scalabilityMode UTF8String])); + } parameters.bitrate_priority = _bitratePriority; parameters.network_priority = [RTC_OBJC_TYPE(RTCRtpEncodingParameters) nativePriorityFromPriority:_networkPriority]; @@ -102,29 +109,29 @@ - (instancetype)initWithNativeParameters: return parameters; } -+ (webrtc::Priority)nativePriorityFromPriority:(RTCPriority)networkPriority { ++ (webrtc::Priority)nativePriorityFromPriority:(RTC_OBJC_TYPE(RTCPriority))networkPriority { switch (networkPriority) { - case RTCPriorityVeryLow: + case RTC_OBJC_TYPE(RTCPriorityVeryLow): return webrtc::Priority::kVeryLow; - case RTCPriorityLow: + case RTC_OBJC_TYPE(RTCPriorityLow): return webrtc::Priority::kLow; - case RTCPriorityMedium: + case RTC_OBJC_TYPE(RTCPriorityMedium): return webrtc::Priority::kMedium; - case RTCPriorityHigh: + case RTC_OBJC_TYPE(RTCPriorityHigh): return webrtc::Priority::kHigh; } } -+ (RTCPriority)priorityFromNativePriority:(webrtc::Priority)nativePriority { ++ (RTC_OBJC_TYPE(RTCPriority))priorityFromNativePriority:(webrtc::Priority)nativePriority { switch (nativePriority) { case webrtc::Priority::kVeryLow: - return RTCPriorityVeryLow; + return RTC_OBJC_TYPE(RTCPriorityVeryLow); case webrtc::Priority::kLow: - return RTCPriorityLow; + return RTC_OBJC_TYPE(RTCPriorityLow); case webrtc::Priority::kMedium: - return RTCPriorityMedium; + return RTC_OBJC_TYPE(RTCPriorityMedium); case webrtc::Priority::kHigh: - return RTCPriorityHigh; + return RTC_OBJC_TYPE(RTCPriorityHigh); } } diff --git a/sdk/objc/api/peerconnection/RTCRtpHeaderExtensionCapability.h b/sdk/objc/api/peerconnection/RTCRtpHeaderExtensionCapability.h index 37e7788018..9f8aab04fe 100644 --- a/sdk/objc/api/peerconnection/RTCRtpHeaderExtensionCapability.h +++ b/sdk/objc/api/peerconnection/RTCRtpHeaderExtensionCapability.h @@ -12,7 +12,7 @@ #import "sdk/objc/base/RTCMacros.h" -typedef NS_ENUM(NSInteger, RTCRtpTransceiverDirection); +typedef NS_ENUM(NSInteger, RTC_OBJC_TYPE(RTCRtpTransceiverDirection)); NS_ASSUME_NONNULL_BEGIN @@ -30,7 +30,7 @@ RTC_OBJC_EXPORT BOOL preferredEncrypted; /** Direction of the header extension. */ -@property(nonatomic) RTCRtpTransceiverDirection direction; +@property(nonatomic) RTC_OBJC_TYPE(RTCRtpTransceiverDirection) direction; - (instancetype)init NS_UNAVAILABLE; diff --git a/sdk/objc/api/peerconnection/RTCRtpParameters.h b/sdk/objc/api/peerconnection/RTCRtpParameters.h index ee1c540829..6ad90753bd 100644 --- a/sdk/objc/api/peerconnection/RTCRtpParameters.h +++ b/sdk/objc/api/peerconnection/RTCRtpParameters.h @@ -19,11 +19,11 @@ NS_ASSUME_NONNULL_BEGIN /** Corresponds to webrtc::DegradationPreference. */ -typedef NS_ENUM(NSInteger, RTCDegradationPreference) { - RTCDegradationPreferenceDisabled, - RTCDegradationPreferenceMaintainFramerate, - RTCDegradationPreferenceMaintainResolution, - RTCDegradationPreferenceBalanced +typedef NS_ENUM(NSInteger, RTC_OBJC_TYPE(RTCDegradationPreference)) { + RTC_OBJC_TYPE(RTCDegradationPreferenceDisabled), + RTC_OBJC_TYPE(RTCDegradationPreferenceMaintainFramerate), + RTC_OBJC_TYPE(RTCDegradationPreferenceMaintainResolution), + RTC_OBJC_TYPE(RTCDegradationPreferenceBalanced) }; RTC_OBJC_EXPORT diff --git a/sdk/objc/api/peerconnection/RTCRtpParameters.mm b/sdk/objc/api/peerconnection/RTCRtpParameters.mm index dd7b0deab5..3c631631ae 100644 --- a/sdk/objc/api/peerconnection/RTCRtpParameters.mm +++ b/sdk/objc/api/peerconnection/RTCRtpParameters.mm @@ -84,23 +84,22 @@ - (instancetype)initWithNativeParameters: } if (_degradationPreference) { parameters.degradation_preference = [RTC_OBJC_TYPE(RTCRtpParameters) - nativeDegradationPreferenceFromDegradationPreference: - (RTCDegradationPreference)_degradationPreference.intValue]; + nativeDegradationPreferenceFromDegradationPreference:(RTC_OBJC_TYPE(RTCDegradationPreference)) + _degradationPreference.intValue]; } return parameters; } -+ (webrtc::DegradationPreference) - nativeDegradationPreferenceFromDegradationPreference: - (RTCDegradationPreference)degradationPreference { ++ (webrtc::DegradationPreference)nativeDegradationPreferenceFromDegradationPreference: + (RTC_OBJC_TYPE(RTCDegradationPreference))degradationPreference { switch (degradationPreference) { - case RTCDegradationPreferenceDisabled: + case RTC_OBJC_TYPE(RTCDegradationPreferenceDisabled): return webrtc::DegradationPreference::DISABLED; - case RTCDegradationPreferenceMaintainFramerate: + case RTC_OBJC_TYPE(RTCDegradationPreferenceMaintainFramerate): return webrtc::DegradationPreference::MAINTAIN_FRAMERATE; - case RTCDegradationPreferenceMaintainResolution: + case RTC_OBJC_TYPE(RTCDegradationPreferenceMaintainResolution): return webrtc::DegradationPreference::MAINTAIN_RESOLUTION; - case RTCDegradationPreferenceBalanced: + case RTC_OBJC_TYPE(RTCDegradationPreferenceBalanced): return webrtc::DegradationPreference::BALANCED; } } @@ -113,13 +112,13 @@ + (NSNumber *)degradationPreferenceFromNativeDegradationPreference: switch (*nativeDegradationPreference) { case webrtc::DegradationPreference::DISABLED: - return @(RTCDegradationPreferenceDisabled); + return @(RTC_OBJC_TYPE(RTCDegradationPreferenceDisabled)); case webrtc::DegradationPreference::MAINTAIN_FRAMERATE: - return @(RTCDegradationPreferenceMaintainFramerate); + return @(RTC_OBJC_TYPE(RTCDegradationPreferenceMaintainFramerate)); case webrtc::DegradationPreference::MAINTAIN_RESOLUTION: - return @(RTCDegradationPreferenceMaintainResolution); + return @(RTC_OBJC_TYPE(RTCDegradationPreferenceMaintainResolution)); case webrtc::DegradationPreference::BALANCED: - return @(RTCDegradationPreferenceBalanced); + return @(RTC_OBJC_TYPE(RTCDegradationPreferenceBalanced)); } } diff --git a/sdk/objc/api/peerconnection/RTCRtpReceiver+Private.h b/sdk/objc/api/peerconnection/RTCRtpReceiver+Private.h index 0c051bc654..94807d33ec 100644 --- a/sdk/objc/api/peerconnection/RTCRtpReceiver+Private.h +++ b/sdk/objc/api/peerconnection/RTCRtpReceiver+Private.h @@ -44,12 +44,10 @@ class RtpReceiverDelegateAdapter : public RtpReceiverObserverInterface { (webrtc::scoped_refptr)nativeRtpReceiver NS_DESIGNATED_INITIALIZER; -+ (RTCRtpMediaType)mediaTypeForNativeMediaType: - (webrtc::MediaType)nativeMediaType; ++ (RTC_OBJC_TYPE(RTCRtpMediaType))mediaTypeForNativeMediaType:(webrtc::MediaType)nativeMediaType; -+ (webrtc::MediaType)nativeMediaTypeForMediaType:(RTCRtpMediaType)mediaType; - -+ (NSString *)stringForMediaType:(RTCRtpMediaType)mediaType; ++ (NSString *)stringForMediaType:(RTC_OBJC_TYPE(RTCRtpMediaType))mediaType; ++ (webrtc::MediaType)nativeMediaTypeForMediaType:(RTC_OBJC_TYPE(RTCRtpMediaType))mediaType; @end diff --git a/sdk/objc/api/peerconnection/RTCRtpReceiver.h b/sdk/objc/api/peerconnection/RTCRtpReceiver.h index 2877a4c12b..aa9fdf347b 100644 --- a/sdk/objc/api/peerconnection/RTCRtpReceiver.h +++ b/sdk/objc/api/peerconnection/RTCRtpReceiver.h @@ -17,12 +17,12 @@ NS_ASSUME_NONNULL_BEGIN /** Represents the media type of the RtpReceiver. */ -typedef NS_ENUM(NSInteger, RTCRtpMediaType) { - RTCRtpMediaTypeAudio, - RTCRtpMediaTypeVideo, - RTCRtpMediaTypeData, - RTCRtpMediaTypeUnsupported, - RTCRtpMediaTypeAny, +typedef NS_ENUM(NSInteger, RTC_OBJC_TYPE(RTCRtpMediaType)) { + RTC_OBJC_TYPE(RTCRtpMediaTypeAudio), + RTC_OBJC_TYPE(RTCRtpMediaTypeVideo), + RTC_OBJC_TYPE(RTCRtpMediaTypeData), + RTC_OBJC_TYPE(RTCRtpMediaTypeUnsupported), + RTC_OBJC_TYPE(RTCRtpMediaTypeAny), }; @class RTC_OBJC_TYPE(RTCRtpReceiver); @@ -45,9 +45,9 @@ RTC_OBJC_EXPORT * * The process is the same for video receivers. */ - - (void)rtpReceiver : (RTC_OBJC_TYPE(RTCRtpReceiver) *) - rtpReceiver didReceiveFirstPacketForMediaType - : (RTCRtpMediaType)mediaType; + - (void)rtpReceiver + : (RTC_OBJC_TYPE(RTCRtpReceiver) *)rtpReceiver didReceiveFirstPacketForMediaType + : (RTC_OBJC_TYPE(RTCRtpMediaType))mediaType; @end diff --git a/sdk/objc/api/peerconnection/RTCRtpReceiver.mm b/sdk/objc/api/peerconnection/RTCRtpReceiver.mm index 78de7c0a42..2100918937 100644 --- a/sdk/objc/api/peerconnection/RTCRtpReceiver.mm +++ b/sdk/objc/api/peerconnection/RTCRtpReceiver.mm @@ -29,7 +29,7 @@ void RtpReceiverDelegateAdapter::OnFirstPacketReceived( webrtc::MediaType media_type) { - RTCRtpMediaType packet_media_type = + RTC_OBJC_TYPE(RTCRtpMediaType) packet_media_type = [RTC_OBJC_TYPE(RTCRtpReceiver) mediaTypeForNativeMediaType:media_type]; RTC_OBJC_TYPE(RTCRtpReceiver) *receiver = receiver_; [receiver.delegate rtpReceiver:receiver @@ -139,48 +139,48 @@ - (void)setFrameDecryptor: return self; } -+ (RTCRtpMediaType)mediaTypeForNativeMediaType: ++ (RTC_OBJC_TYPE(RTCRtpMediaType))mediaTypeForNativeMediaType: (webrtc::MediaType)nativeMediaType { switch (nativeMediaType) { case webrtc::MediaType::AUDIO: - return RTCRtpMediaTypeAudio; + return RTC_OBJC_TYPE(RTCRtpMediaTypeAudio); case webrtc::MediaType::VIDEO: - return RTCRtpMediaTypeVideo; + return RTC_OBJC_TYPE(RTCRtpMediaTypeVideo); case webrtc::MediaType::DATA: - return RTCRtpMediaTypeData; + return RTC_OBJC_TYPE(RTCRtpMediaTypeData); case webrtc::MediaType::UNSUPPORTED: - return RTCRtpMediaTypeUnsupported; + return RTC_OBJC_TYPE(RTCRtpMediaTypeUnsupported); case webrtc::MediaType::ANY: - return RTCRtpMediaTypeAny; + return RTC_OBJC_TYPE(RTCRtpMediaTypeAny); } } -+ (webrtc::MediaType)nativeMediaTypeForMediaType:(RTCRtpMediaType)mediaType { ++ (webrtc::MediaType)nativeMediaTypeForMediaType:(RTC_OBJC_TYPE(RTCRtpMediaType))mediaType { switch (mediaType) { - case RTCRtpMediaTypeAudio: + case RTC_OBJC_TYPE(RTCRtpMediaTypeAudio): return webrtc::MediaType::AUDIO; - case RTCRtpMediaTypeVideo: + case RTC_OBJC_TYPE(RTCRtpMediaTypeVideo): return webrtc::MediaType::VIDEO; - case RTCRtpMediaTypeData: + case RTC_OBJC_TYPE(RTCRtpMediaTypeData): return webrtc::MediaType::DATA; - case RTCRtpMediaTypeUnsupported: + case RTC_OBJC_TYPE(RTCRtpMediaTypeUnsupported): return webrtc::MediaType::UNSUPPORTED; - case RTCRtpMediaTypeAny: + case RTC_OBJC_TYPE(RTCRtpMediaTypeAny): return webrtc::MediaType::ANY; } } -+ (NSString *)stringForMediaType:(RTCRtpMediaType)mediaType { ++ (NSString *)stringForMediaType:(RTC_OBJC_TYPE(RTCRtpMediaType))mediaType { switch (mediaType) { - case RTCRtpMediaTypeAudio: + case RTC_OBJC_TYPE(RTCRtpMediaTypeAudio): return @"AUDIO"; - case RTCRtpMediaTypeVideo: + case RTC_OBJC_TYPE(RTCRtpMediaTypeVideo): return @"VIDEO"; - case RTCRtpMediaTypeData: + case RTC_OBJC_TYPE(RTCRtpMediaTypeData): return @"DATA"; - case RTCRtpMediaTypeUnsupported: + case RTC_OBJC_TYPE(RTCRtpMediaTypeUnsupported): return @"UNSUPPORTED"; - case RTCRtpMediaTypeAny: + case RTC_OBJC_TYPE(RTCRtpMediaTypeAny): return @"ANY"; } } diff --git a/sdk/objc/api/peerconnection/RTCRtpTransceiver+Private.h b/sdk/objc/api/peerconnection/RTCRtpTransceiver+Private.h index 10f2fa8052..5b8681906a 100644 --- a/sdk/objc/api/peerconnection/RTCRtpTransceiver+Private.h +++ b/sdk/objc/api/peerconnection/RTCRtpTransceiver+Private.h @@ -38,9 +38,9 @@ NS_ASSUME_NONNULL_BEGIN nativeRtpTransceiver NS_DESIGNATED_INITIALIZER; + (webrtc::RtpTransceiverDirection)nativeRtpTransceiverDirectionFromDirection: - (RTCRtpTransceiverDirection)direction; + (RTC_OBJC_TYPE(RTCRtpTransceiverDirection))direction; -+ (RTCRtpTransceiverDirection)rtpTransceiverDirectionFromNativeDirection: ++ (RTC_OBJC_TYPE(RTCRtpTransceiverDirection))rtpTransceiverDirectionFromNativeDirection: (webrtc::RtpTransceiverDirection)nativeDirection; @end diff --git a/sdk/objc/api/peerconnection/RTCRtpTransceiver.h b/sdk/objc/api/peerconnection/RTCRtpTransceiver.h index 0e543a5bab..409ba7f1c2 100644 --- a/sdk/objc/api/peerconnection/RTCRtpTransceiver.h +++ b/sdk/objc/api/peerconnection/RTCRtpTransceiver.h @@ -14,17 +14,19 @@ #import "RTCRtpSender.h" #import "sdk/objc/base/RTCMacros.h" +@class RTC_OBJC_TYPE(RTCRtpCodecCapability); + NS_ASSUME_NONNULL_BEGIN -extern NSString *const kRTCRtpTransceiverErrorDomain; +extern NSString *const RTC_CONSTANT_TYPE(RTCRtpTransceiverErrorDomain); /** https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiverdirection */ -typedef NS_ENUM(NSInteger, RTCRtpTransceiverDirection) { - RTCRtpTransceiverDirectionSendRecv, - RTCRtpTransceiverDirectionSendOnly, - RTCRtpTransceiverDirectionRecvOnly, - RTCRtpTransceiverDirectionInactive, - RTCRtpTransceiverDirectionStopped +typedef NS_ENUM(NSInteger, RTC_OBJC_TYPE(RTCRtpTransceiverDirection)) { + RTC_OBJC_TYPE(RTCRtpTransceiverDirectionSendRecv), + RTC_OBJC_TYPE(RTCRtpTransceiverDirectionSendOnly), + RTC_OBJC_TYPE(RTCRtpTransceiverDirectionRecvOnly), + RTC_OBJC_TYPE(RTCRtpTransceiverDirectionInactive), + RTC_OBJC_TYPE(RTCRtpTransceiverDirectionStopped) }; /** Structure for initializing an RTCRtpTransceiver in a call to @@ -35,7 +37,7 @@ RTC_OBJC_EXPORT @interface RTC_OBJC_TYPE (RTCRtpTransceiverInit) : NSObject /** Direction of the RTCRtpTransceiver. See RTCRtpTransceiver.direction. */ -@property(nonatomic) RTCRtpTransceiverDirection direction; +@property(nonatomic) RTC_OBJC_TYPE(RTCRtpTransceiverDirection) direction; /** The added RTCRtpTransceiver will be added to these streams. */ @property(nonatomic) NSArray *streamIds; @@ -70,7 +72,7 @@ RTC_OBJC_EXPORT /** Media type of the transceiver. The sender and receiver will also have * this type. */ - @property(nonatomic, readonly) RTCRtpMediaType mediaType; + @property(nonatomic, readonly) RTC_OBJC_TYPE(RTCRtpMediaType) mediaType; /** The mid attribute is the mid negotiated and present in the local and * remote descriptions. Before negotiation is complete, the mid value may be @@ -105,7 +107,9 @@ RTC_OBJC_EXPORT * transceiver, which will be used in calls to createOffer and createAnswer. * https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiver-direction */ -@property(nonatomic, readonly) RTCRtpTransceiverDirection direction; +@property(nonatomic, readonly) RTC_OBJC_TYPE(RTCRtpTransceiverDirection) direction; + +@property(nonatomic, copy) NSArray *codecPreferences; /** It will contain all the RTP header extensions that are supported. * The direction attribute for all extensions that are mandatory to use MUST be @@ -127,7 +131,7 @@ RTC_OBJC_EXPORT * present and this method returns NO. * https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiver-currentdirection */ -- (BOOL)currentDirection:(RTCRtpTransceiverDirection *)currentDirectionOut; +- (BOOL)currentDirection:(RTC_OBJC_TYPE(RTCRtpTransceiverDirection) *)currentDirectionOut; /** The stop method irreversibly stops the RTCRtpTransceiver. The sender of * this transceiver will no longer send, the receiver will no longer receive. @@ -162,8 +166,7 @@ RTC_OBJC_EXPORT * descriptions as sendrecv, sendonly, recvonly, or inactive. * https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiver-direction */ -- (void)setDirection:(RTCRtpTransceiverDirection)direction - error:(NSError **)error; +- (void)setDirection:(RTC_OBJC_TYPE(RTCRtpTransceiverDirection))direction error:(NSError **)error; @end diff --git a/sdk/objc/api/peerconnection/RTCRtpTransceiver.mm b/sdk/objc/api/peerconnection/RTCRtpTransceiver.mm index faa6b64dd4..c000e093d7 100644 --- a/sdk/objc/api/peerconnection/RTCRtpTransceiver.mm +++ b/sdk/objc/api/peerconnection/RTCRtpTransceiver.mm @@ -16,12 +16,14 @@ #import "RTCRtpParameters+Private.h" #import "RTCRtpReceiver+Private.h" #import "RTCRtpSender+Private.h" +#import "RTCRtpCodecCapability.h" +#import "RTCRtpCodecCapability+Private.h" #import "base/RTCLogging.h" #import "helpers/NSString+StdString.h" #include "api/rtp_parameters.h" -NSString *const kRTCRtpTransceiverErrorDomain = @"org.webrtc.RTCRtpTranceiver"; +NSString *const RTC_CONSTANT_TYPE(RTCRtpTransceiverErrorDomain) = @"org.webrtc.RTCRtpTranceiver"; @implementation RTC_OBJC_TYPE (RTCRtpTransceiverInit) @@ -32,7 +34,7 @@ @implementation RTC_OBJC_TYPE (RTCRtpTransceiverInit) - (instancetype)init { self = [super init]; if (self) { - _direction = RTCRtpTransceiverDirectionSendRecv; + _direction = RTC_OBJC_TYPE(RTCRtpTransceiverDirectionSendRecv); } return self; } @@ -58,7 +60,7 @@ @implementation RTC_OBJC_TYPE (RTCRtpTransceiver) { webrtc::scoped_refptr _nativeRtpTransceiver; } -- (RTCRtpMediaType)mediaType { +- (RTC_OBJC_TYPE(RTCRtpMediaType))mediaType { return [RTC_OBJC_TYPE(RTCRtpReceiver) mediaTypeForNativeMediaType:_nativeRtpTransceiver->media_type()]; } @@ -71,6 +73,20 @@ - (NSString *)mid { } } +- (NSArray *)codecPreferences { + + NSMutableArray *result = [NSMutableArray array]; + + std::vector capabilities = _nativeRtpTransceiver->codec_preferences(); + + for (auto & element : capabilities) { + RTC_OBJC_TYPE(RTCRtpCodecCapability) *object = [[RTC_OBJC_TYPE(RTCRtpCodecCapability) alloc] initWithNativeRtpCodecCapability: element]; + [result addObject: object]; + } + + return result; +} + @synthesize sender = _sender; @synthesize receiver = _receiver; @@ -78,7 +94,7 @@ - (BOOL)isStopped { return _nativeRtpTransceiver->stopped(); } -- (RTCRtpTransceiverDirection)direction { +- (RTC_OBJC_TYPE(RTCRtpTransceiverDirection))direction { return [RTC_OBJC_TYPE(RTCRtpTransceiver) rtpTransceiverDirectionFromNativeDirection:_nativeRtpTransceiver ->direction()]; @@ -116,8 +132,7 @@ - (RTCRtpTransceiverDirection)direction { return headerExtensions; } -- (void)setDirection:(RTCRtpTransceiverDirection)direction - error:(NSError **)error { +- (void)setDirection:(RTC_OBJC_TYPE(RTCRtpTransceiverDirection))direction error:(NSError **)error { webrtc::RTCError nativeError = _nativeRtpTransceiver->SetDirectionWithError( [RTC_OBJC_TYPE(RTCRtpTransceiver) nativeRtpTransceiverDirectionFromDirection:direction]); @@ -128,13 +143,13 @@ - (void)setDirection:(RTCRtpTransceiverDirection)direction [NSString stringWithCString:nativeError.message() encoding:NSUTF8StringEncoding] }; - *error = [NSError errorWithDomain:kRTCRtpTransceiverErrorDomain + *error = [NSError errorWithDomain:RTC_CONSTANT_TYPE(RTCRtpTransceiverErrorDomain) code:static_cast(nativeError.type()) userInfo:userInfo]; } } -- (BOOL)currentDirection:(RTCRtpTransceiverDirection *)currentDirectionOut { +- (BOOL)currentDirection:(RTC_OBJC_TYPE(RTCRtpTransceiverDirection) *)currentDirectionOut { if (_nativeRtpTransceiver->current_direction()) { *currentDirectionOut = [RTC_OBJC_TYPE(RTCRtpTransceiver) rtpTransceiverDirectionFromNativeDirection:*_nativeRtpTransceiver @@ -260,34 +275,34 @@ - (instancetype)initWithFactory: } + (webrtc::RtpTransceiverDirection)nativeRtpTransceiverDirectionFromDirection: - (RTCRtpTransceiverDirection)direction { + (RTC_OBJC_TYPE(RTCRtpTransceiverDirection))direction { switch (direction) { - case RTCRtpTransceiverDirectionSendRecv: + case RTC_OBJC_TYPE(RTCRtpTransceiverDirectionSendRecv): return webrtc::RtpTransceiverDirection::kSendRecv; - case RTCRtpTransceiverDirectionSendOnly: + case RTC_OBJC_TYPE(RTCRtpTransceiverDirectionSendOnly): return webrtc::RtpTransceiverDirection::kSendOnly; - case RTCRtpTransceiverDirectionRecvOnly: + case RTC_OBJC_TYPE(RTCRtpTransceiverDirectionRecvOnly): return webrtc::RtpTransceiverDirection::kRecvOnly; - case RTCRtpTransceiverDirectionInactive: + case RTC_OBJC_TYPE(RTCRtpTransceiverDirectionInactive): return webrtc::RtpTransceiverDirection::kInactive; - case RTCRtpTransceiverDirectionStopped: + case RTC_OBJC_TYPE(RTCRtpTransceiverDirectionStopped): return webrtc::RtpTransceiverDirection::kStopped; } } -+ (RTCRtpTransceiverDirection)rtpTransceiverDirectionFromNativeDirection: - (webrtc::RtpTransceiverDirection)nativeDirection { ++ (RTC_OBJC_TYPE(RTCRtpTransceiverDirection))rtpTransceiverDirectionFromNativeDirection: + (webrtc::RtpTransceiverDirection)nativeDirection { switch (nativeDirection) { case webrtc::RtpTransceiverDirection::kSendRecv: - return RTCRtpTransceiverDirectionSendRecv; + return RTC_OBJC_TYPE(RTCRtpTransceiverDirectionSendRecv); case webrtc::RtpTransceiverDirection::kSendOnly: - return RTCRtpTransceiverDirectionSendOnly; + return RTC_OBJC_TYPE(RTCRtpTransceiverDirectionSendOnly); case webrtc::RtpTransceiverDirection::kRecvOnly: - return RTCRtpTransceiverDirectionRecvOnly; + return RTC_OBJC_TYPE(RTCRtpTransceiverDirectionRecvOnly); case webrtc::RtpTransceiverDirection::kInactive: - return RTCRtpTransceiverDirectionInactive; + return RTC_OBJC_TYPE(RTCRtpTransceiverDirectionInactive); case webrtc::RtpTransceiverDirection::kStopped: - return RTCRtpTransceiverDirectionStopped; + return RTC_OBJC_TYPE(RTCRtpTransceiverDirectionStopped); } } diff --git a/sdk/objc/api/peerconnection/RTCSSLAdapter.h b/sdk/objc/api/peerconnection/RTCSSLAdapter.h index 1c8d920745..35b7577cf6 100644 --- a/sdk/objc/api/peerconnection/RTCSSLAdapter.h +++ b/sdk/objc/api/peerconnection/RTCSSLAdapter.h @@ -16,5 +16,5 @@ * Initialize and clean up the SSL library. Failure is fatal. These call the * corresponding functions in webrtc/rtc_base/ssladapter.h. */ -RTC_EXTERN BOOL RTCInitializeSSL(void); -RTC_EXTERN BOOL RTCCleanupSSL(void); +RTC_EXTERN BOOL RTC_OBJC_TYPE(RTCInitializeSSL)(void); +RTC_EXTERN BOOL RTC_OBJC_TYPE(RTCCleanupSSL)(void); diff --git a/sdk/objc/api/peerconnection/RTCSSLAdapter.mm b/sdk/objc/api/peerconnection/RTCSSLAdapter.mm index 0558a30dc7..0703cb968a 100644 --- a/sdk/objc/api/peerconnection/RTCSSLAdapter.mm +++ b/sdk/objc/api/peerconnection/RTCSSLAdapter.mm @@ -13,13 +13,13 @@ #include "rtc_base/checks.h" #include "rtc_base/ssl_adapter.h" -BOOL RTCInitializeSSL(void) { +BOOL RTC_OBJC_TYPE(RTCInitializeSSL)(void) { BOOL initialized = webrtc::InitializeSSL(); RTC_DCHECK(initialized); return initialized; } -BOOL RTCCleanupSSL(void) { +BOOL RTC_OBJC_TYPE(RTCCleanupSSL)(void) { BOOL cleanedUp = webrtc::CleanupSSL(); RTC_DCHECK(cleanedUp); return cleanedUp; diff --git a/sdk/objc/api/peerconnection/RTCSessionDescription+Private.h b/sdk/objc/api/peerconnection/RTCSessionDescription+Private.h index 4acc059728..a53074a7fd 100644 --- a/sdk/objc/api/peerconnection/RTCSessionDescription+Private.h +++ b/sdk/objc/api/peerconnection/RTCSessionDescription+Private.h @@ -33,9 +33,9 @@ NS_ASSUME_NONNULL_BEGIN - (instancetype)initWithNativeDescription: (const webrtc::SessionDescriptionInterface *)nativeDescription; -+ (std::string)stdStringForType:(RTCSdpType)type; ++ (std::string)stdStringForType:(RTC_OBJC_TYPE(RTCSdpType))type; -+ (RTCSdpType)typeForStdString:(const std::string &)string; ++ (RTC_OBJC_TYPE(RTCSdpType))typeForStdString:(const std::string &)string; @end diff --git a/sdk/objc/api/peerconnection/RTCSessionDescription.h b/sdk/objc/api/peerconnection/RTCSessionDescription.h index 01878d525a..d30cd4c46f 100644 --- a/sdk/objc/api/peerconnection/RTCSessionDescription.h +++ b/sdk/objc/api/peerconnection/RTCSessionDescription.h @@ -16,11 +16,11 @@ * Represents the session description type. This exposes the same types that are * in C++, which doesn't include the rollback type that is in the W3C spec. */ -typedef NS_ENUM(NSInteger, RTCSdpType) { - RTCSdpTypeOffer, - RTCSdpTypePrAnswer, - RTCSdpTypeAnswer, - RTCSdpTypeRollback, +typedef NS_ENUM(NSInteger, RTC_OBJC_TYPE(RTCSdpType)) { + RTC_OBJC_TYPE(RTCSdpTypeOffer), + RTC_OBJC_TYPE(RTCSdpTypePrAnswer), + RTC_OBJC_TYPE(RTCSdpTypeAnswer), + RTC_OBJC_TYPE(RTCSdpTypeRollback), }; NS_ASSUME_NONNULL_BEGIN @@ -29,7 +29,7 @@ RTC_OBJC_EXPORT @interface RTC_OBJC_TYPE (RTCSessionDescription) : NSObject /** The type of session description. */ -@property(nonatomic, readonly) RTCSdpType type; +@property(nonatomic, readonly) RTC_OBJC_TYPE(RTCSdpType) type; /** The SDP string representation of this session description. */ @property(nonatomic, readonly) NSString *sdp; @@ -37,12 +37,11 @@ RTC_OBJC_EXPORT - (instancetype)init NS_UNAVAILABLE; /** Initialize a session description with a type and SDP string. */ -- (instancetype)initWithType:(RTCSdpType)type - sdp:(NSString *)sdp NS_DESIGNATED_INITIALIZER; +- (instancetype)initWithType:(RTC_OBJC_TYPE(RTCSdpType))type sdp:(NSString *)sdp NS_DESIGNATED_INITIALIZER; -+ (NSString *)stringForType:(RTCSdpType)type; ++ (NSString *)stringForType:(RTC_OBJC_TYPE(RTCSdpType))type; -+ (RTCSdpType)typeForString:(NSString *)string; ++ (RTC_OBJC_TYPE(RTCSdpType))typeForString:(NSString *)string; @end diff --git a/sdk/objc/api/peerconnection/RTCSessionDescription.mm b/sdk/objc/api/peerconnection/RTCSessionDescription.mm index a62f4c7f29..461129ba62 100644 --- a/sdk/objc/api/peerconnection/RTCSessionDescription.mm +++ b/sdk/objc/api/peerconnection/RTCSessionDescription.mm @@ -21,17 +21,17 @@ @implementation RTC_OBJC_TYPE (RTCSessionDescription) @synthesize type = _type; @synthesize sdp = _sdp; -+ (NSString *)stringForType:(RTCSdpType)type { ++ (NSString *)stringForType:(RTC_OBJC_TYPE(RTCSdpType))type { std::string string = [[self class] stdStringForType:type]; return [NSString stringForStdString:string]; } -+ (RTCSdpType)typeForString:(NSString *)string { ++ (RTC_OBJC_TYPE(RTCSdpType))typeForString:(NSString *)string { std::string typeString = string.stdString; return [[self class] typeForStdString:typeString]; } -- (instancetype)initWithType:(RTCSdpType)type sdp:(NSString *)sdp { +- (instancetype)initWithType:(RTC_OBJC_TYPE(RTCSdpType))type sdp:(NSString *)sdp { self = [super init]; if (self) { _type = type; @@ -70,48 +70,48 @@ - (instancetype)initWithNativeDescription: NSParameterAssert(nativeDescription); std::string sdp; nativeDescription->ToString(&sdp); - RTCSdpType type = [[self class] typeForStdString:nativeDescription->type()]; + RTC_OBJC_TYPE(RTCSdpType) type = [[self class] typeForStdString:nativeDescription->type()]; return [self initWithType:type sdp:[NSString stringForStdString:sdp]]; } -+ (std::string)stdStringForType:(RTCSdpType)type { ++ (std::string)stdStringForType:(RTC_OBJC_TYPE(RTCSdpType))type { switch (type) { - case RTCSdpTypeOffer: + case RTC_OBJC_TYPE(RTCSdpTypeOffer): return webrtc::SessionDescriptionInterface::kOffer; - case RTCSdpTypePrAnswer: + case RTC_OBJC_TYPE(RTCSdpTypePrAnswer): return webrtc::SessionDescriptionInterface::kPrAnswer; - case RTCSdpTypeAnswer: + case RTC_OBJC_TYPE(RTCSdpTypeAnswer): return webrtc::SessionDescriptionInterface::kAnswer; - case RTCSdpTypeRollback: + case RTC_OBJC_TYPE(RTCSdpTypeRollback): return webrtc::SessionDescriptionInterface::kRollback; } } -+ (RTCSdpType)typeForStdString:(const std::string &)string { ++ (RTC_OBJC_TYPE(RTCSdpType))typeForStdString:(const std::string &)string { if (string == webrtc::SessionDescriptionInterface::kOffer) { - return RTCSdpTypeOffer; + return RTC_OBJC_TYPE(RTCSdpTypeOffer); } else if (string == webrtc::SessionDescriptionInterface::kPrAnswer) { - return RTCSdpTypePrAnswer; + return RTC_OBJC_TYPE(RTCSdpTypePrAnswer); } else if (string == webrtc::SessionDescriptionInterface::kAnswer) { - return RTCSdpTypeAnswer; + return RTC_OBJC_TYPE(RTCSdpTypeAnswer); } else if (string == webrtc::SessionDescriptionInterface::kRollback) { - return RTCSdpTypeRollback; + return RTC_OBJC_TYPE(RTCSdpTypeRollback); } else { RTC_DCHECK_NOTREACHED(); - return RTCSdpTypeOffer; + return RTC_OBJC_TYPE(RTCSdpTypeOffer); } } -+ (webrtc::SdpType)nativeTypeForType:(RTCSdpType)type { ++ (webrtc::SdpType)nativeTypeForType:(RTC_OBJC_TYPE(RTCSdpType))type { switch (type) { - case RTCSdpTypeOffer: + case RTC_OBJC_TYPE(RTCSdpTypeOffer): return webrtc::SdpType::kOffer; - case RTCSdpTypePrAnswer: + case RTC_OBJC_TYPE(RTCSdpTypePrAnswer): return webrtc::SdpType::kPrAnswer; - case RTCSdpTypeAnswer: + case RTC_OBJC_TYPE(RTCSdpTypeAnswer): return webrtc::SdpType::kAnswer; - case RTCSdpTypeRollback: + case RTC_OBJC_TYPE(RTCSdpTypeRollback): return webrtc::SdpType::kRollback; } } diff --git a/sdk/objc/api/peerconnection/RTCTracing.h b/sdk/objc/api/peerconnection/RTCTracing.h index cde11fe21d..13a5909a83 100644 --- a/sdk/objc/api/peerconnection/RTCTracing.h +++ b/sdk/objc/api/peerconnection/RTCTracing.h @@ -12,10 +12,10 @@ #import "sdk/objc/base/RTCMacros.h" -RTC_EXTERN void RTCSetupInternalTracer(void); +RTC_EXTERN void RTC_OBJC_TYPE(RTCSetupInternalTracer)(void); /** Starts capture to specified file. Must be a valid writable path. * Returns YES if capture starts. */ -RTC_EXTERN BOOL RTCStartInternalCapture(NSString* filePath); -RTC_EXTERN void RTCStopInternalCapture(void); -RTC_EXTERN void RTCShutdownInternalTracer(void); +RTC_EXTERN BOOL RTC_OBJC_TYPE(RTCStartInternalCapture)(NSString* filePath); +RTC_EXTERN void RTC_OBJC_TYPE(RTCStopInternalCapture)(void); +RTC_EXTERN void RTC_OBJC_TYPE(RTCShutdownInternalTracer)(void); diff --git a/sdk/objc/api/peerconnection/RTCTracing.mm b/sdk/objc/api/peerconnection/RTCTracing.mm index dca21a9932..1d17f01803 100644 --- a/sdk/objc/api/peerconnection/RTCTracing.mm +++ b/sdk/objc/api/peerconnection/RTCTracing.mm @@ -12,18 +12,18 @@ #include "rtc_base/event_tracer.h" -void RTCSetupInternalTracer(void) { +void RTC_OBJC_TYPE(RTCSetupInternalTracer)(void) { webrtc::tracing::SetupInternalTracer(); } -BOOL RTCStartInternalCapture(NSString *filePath) { +BOOL RTC_OBJC_TYPE(RTCStartInternalCapture)(NSString *filePath) { return webrtc::tracing::StartInternalCapture(filePath.UTF8String); } -void RTCStopInternalCapture(void) { +void RTC_OBJC_TYPE(RTCStopInternalCapture)(void) { webrtc::tracing::StopInternalCapture(); } -void RTCShutdownInternalTracer(void) { +void RTC_OBJC_TYPE(RTCShutdownInternalTracer)(void) { webrtc::tracing::ShutdownInternalTracer(); } diff --git a/sdk/objc/api/peerconnection/RTCVideoCodecInfo+Private.mm b/sdk/objc/api/peerconnection/RTCVideoCodecInfo+Private.mm index 8a977a3447..c69bf10d47 100644 --- a/sdk/objc/api/peerconnection/RTCVideoCodecInfo+Private.mm +++ b/sdk/objc/api/peerconnection/RTCVideoCodecInfo+Private.mm @@ -14,6 +14,11 @@ #include "api/video_codecs/scalability_mode_helper.h" #import "helpers/NSString+StdString.h" +#include "absl/container/inlined_vector.h" +#include "api/video_codecs/sdp_video_format.h" +#include "modules/video_coding/svc/scalability_mode_util.h" +#include "modules/video_coding/svc/create_scalability_structure.h" + @implementation RTC_OBJC_TYPE (RTCVideoCodecInfo) (Private) diff --git a/sdk/objc/api/peerconnection/RTCVideoEncoderSettings+Private.mm b/sdk/objc/api/peerconnection/RTCVideoEncoderSettings+Private.mm index 44df191717..18e36f290f 100644 --- a/sdk/objc/api/peerconnection/RTCVideoEncoderSettings+Private.mm +++ b/sdk/objc/api/peerconnection/RTCVideoEncoderSettings+Private.mm @@ -30,7 +30,7 @@ - (instancetype)initWithNativeVideoCodec self.minBitrate = videoCodec->minBitrate; self.maxFramerate = videoCodec->maxFramerate; self.qpMax = videoCodec->qpMax; - self.mode = (RTCVideoCodecMode)videoCodec->mode; + self.mode = (RTC_OBJC_TYPE(RTCVideoCodecMode))videoCodec->mode; } } return self; diff --git a/sdk/objc/api/peerconnection/RTCVideoSource+Private.h b/sdk/objc/api/peerconnection/RTCVideoSource+Private.h index 0898abd67c..45856ae24e 100644 --- a/sdk/objc/api/peerconnection/RTCVideoSource+Private.h +++ b/sdk/objc/api/peerconnection/RTCVideoSource+Private.h @@ -38,7 +38,7 @@ NS_ASSUME_NONNULL_BEGIN initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory nativeMediaSource: (webrtc::scoped_refptr)nativeMediaSource - type:(RTCMediaSourceType)type NS_UNAVAILABLE; + type:(RTC_OBJC_TYPE(RTCMediaSourceType))type NS_UNAVAILABLE; - (instancetype)initWithFactory: (RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory diff --git a/sdk/objc/api/peerconnection/RTCVideoSource.mm b/sdk/objc/api/peerconnection/RTCVideoSource.mm index 0486d00bff..9c57d2664d 100644 --- a/sdk/objc/api/peerconnection/RTCVideoSource.mm +++ b/sdk/objc/api/peerconnection/RTCVideoSource.mm @@ -36,19 +36,17 @@ @implementation RTC_OBJC_TYPE (RTCVideoSource) { RTC_DCHECK(factory); RTC_DCHECK(nativeVideoSource); self = [super initWithFactory:factory - nativeMediaSource:nativeVideoSource - type:RTCMediaSourceTypeVideo]; + nativeMediaSource:nativeVideoSource + type:RTC_OBJC_TYPE(RTCMediaSourceTypeVideo)]; if (self) { _nativeVideoSource = nativeVideoSource; } return self; } -- (instancetype) - initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory - nativeMediaSource: - (webrtc::scoped_refptr)nativeMediaSource - type:(RTCMediaSourceType)type { +- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory + nativeMediaSource:(webrtc::scoped_refptr)nativeMediaSource + type:(RTC_OBJC_TYPE(RTCMediaSourceType))type { RTC_DCHECK_NOTREACHED(); return nil; } diff --git a/sdk/objc/api/peerconnection/RTCVideoTrack.h b/sdk/objc/api/peerconnection/RTCVideoTrack.h index 47e98d8fd6..0c78cfd8eb 100644 --- a/sdk/objc/api/peerconnection/RTCVideoTrack.h +++ b/sdk/objc/api/peerconnection/RTCVideoTrack.h @@ -25,6 +25,9 @@ RTC_OBJC_EXPORT /** The video source for this video track. */ @property(nonatomic, readonly) RTC_OBJC_TYPE(RTCVideoSource) *source; +/** The receive state, if this is a remote video track. */ +@property(nonatomic, assign) BOOL shouldReceive; + - (instancetype)init NS_UNAVAILABLE; /** Register a renderer that will render all frames received on this track. */ diff --git a/sdk/objc/api/peerconnection/RTCVideoTrack.mm b/sdk/objc/api/peerconnection/RTCVideoTrack.mm index 5411a22dfd..2f04295d14 100644 --- a/sdk/objc/api/peerconnection/RTCVideoTrack.mm +++ b/sdk/objc/api/peerconnection/RTCVideoTrack.mm @@ -32,25 +32,21 @@ - (instancetype)initWithFactory: NSParameterAssert(trackId.length); std::string nativeId = [NSString stdStringForString:trackId]; webrtc::scoped_refptr track = - factory.nativeFactory->CreateVideoTrack(source.nativeVideoSource, - nativeId); - self = [self initWithFactory:factory - nativeTrack:track - type:RTCMediaStreamTrackTypeVideo]; + factory.nativeFactory->CreateVideoTrack(source.nativeVideoSource, nativeId); + self = [self initWithFactory:factory nativeTrack:track type:RTC_OBJC_TYPE(RTCMediaStreamTrackTypeVideo)]; if (self) { _source = source; } return self; } -- (instancetype) - initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory - nativeTrack:(webrtc::scoped_refptr) - nativeMediaTrack - type:(RTCMediaStreamTrackType)type { +- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory + nativeTrack: + (webrtc::scoped_refptr)nativeMediaTrack + type:(RTC_OBJC_TYPE(RTCMediaStreamTrackType))type { NSParameterAssert(factory); NSParameterAssert(nativeMediaTrack); - NSParameterAssert(type == RTCMediaStreamTrackTypeVideo); + NSParameterAssert(type == RTC_OBJC_TYPE(RTCMediaStreamTrackTypeVideo)); self = [super initWithFactory:factory nativeTrack:nativeMediaTrack type:type]; if (self) { _adapters = [NSMutableArray array]; @@ -60,7 +56,7 @@ - (instancetype)initWithFactory: } - (void)dealloc { - for (RTCVideoRendererAdapter *adapter in _adapters) { + for (RTC_OBJC_TYPE(RTCVideoRendererAdapter) * adapter in _adapters) { self.nativeVideoTrack->RemoveSink(adapter.nativeVideoRenderer); } } @@ -78,6 +74,14 @@ - (void)dealloc { return _source; } +- (BOOL)shouldReceive { + return self.nativeVideoTrack->should_receive(); +} + +- (void)setShouldReceive:(BOOL)shouldReceive { + self.nativeVideoTrack->set_should_receive(shouldReceive); +} + - (void)addRenderer:(id)renderer { if (!_workerThread->IsCurrent()) { _workerThread->BlockingCall( @@ -86,18 +90,17 @@ - (void)addRenderer:(id)renderer { } // Make sure we don't have this renderer yet. - for (RTCVideoRendererAdapter *adapter in _adapters) { + for (RTC_OBJC_TYPE(RTCVideoRendererAdapter) * adapter in _adapters) { if (adapter.videoRenderer == renderer) { RTC_LOG(LS_INFO) << "|renderer| is already attached to this track"; return; } } // Create a wrapper that provides a native pointer for us. - RTCVideoRendererAdapter *adapter = - [[RTCVideoRendererAdapter alloc] initWithNativeRenderer:renderer]; + RTC_OBJC_TYPE(RTCVideoRendererAdapter) *adapter = + [[RTC_OBJC_TYPE(RTCVideoRendererAdapter) alloc] initWithNativeRenderer:renderer]; [_adapters addObject:adapter]; - self.nativeVideoTrack->AddOrUpdateSink(adapter.nativeVideoRenderer, - webrtc::VideoSinkWants()); + self.nativeVideoTrack->AddOrUpdateSink(adapter.nativeVideoRenderer, webrtc::VideoSinkWants()); } - (void)removeRenderer:(id)renderer { @@ -108,7 +111,7 @@ - (void)removeRenderer:(id)renderer { } __block NSUInteger indexToRemove = NSNotFound; [_adapters enumerateObjectsUsingBlock:^( - RTCVideoRendererAdapter *adapter, NSUInteger idx, BOOL *stop) { + RTC_OBJC_TYPE(RTCVideoRendererAdapter) * adapter, NSUInteger idx, BOOL * stop) { if (adapter.videoRenderer == renderer) { indexToRemove = idx; *stop = YES; @@ -119,8 +122,7 @@ - (void)removeRenderer:(id)renderer { "been previously added"; return; } - RTCVideoRendererAdapter *adapterToRemove = - [_adapters objectAtIndex:indexToRemove]; + RTC_OBJC_TYPE(RTCVideoRendererAdapter) *adapterToRemove = [_adapters objectAtIndex:indexToRemove]; self.nativeVideoTrack->RemoveSink(adapterToRemove.nativeVideoRenderer); [_adapters removeObjectAtIndex:indexToRemove]; } diff --git a/sdk/objc/api/video_codec/RTCVideoCodecConstants.h b/sdk/objc/api/video_codec/RTCVideoCodecConstants.h index fd64ea5d81..090a9a1316 100644 --- a/sdk/objc/api/video_codec/RTCVideoCodecConstants.h +++ b/sdk/objc/api/video_codec/RTCVideoCodecConstants.h @@ -12,6 +12,6 @@ #import "sdk/objc/base/RTCMacros.h" -RTC_EXTERN NSString* const kRTCVideoCodecVp8Name; -RTC_EXTERN NSString* const kRTCVideoCodecVp9Name; -RTC_EXTERN NSString* const kRTCVideoCodecAv1Name; +RTC_EXTERN NSString* const RTC_CONSTANT_TYPE(RTCVideoCodecVp8Name); +RTC_EXTERN NSString* const RTC_CONSTANT_TYPE(RTCVideoCodecVp9Name); +RTC_EXTERN NSString* const RTC_CONSTANT_TYPE(RTCVideoCodecAv1Name); diff --git a/sdk/objc/api/video_codec/RTCVideoCodecConstants.mm b/sdk/objc/api/video_codec/RTCVideoCodecConstants.mm index 31bec3e178..605606b564 100644 --- a/sdk/objc/api/video_codec/RTCVideoCodecConstants.mm +++ b/sdk/objc/api/video_codec/RTCVideoCodecConstants.mm @@ -13,6 +13,6 @@ #include "media/base/media_constants.h" -NSString *const kRTCVideoCodecVp8Name = @(webrtc::kVp8CodecName); -NSString *const kRTCVideoCodecVp9Name = @(webrtc::kVp9CodecName); -NSString *const kRTCVideoCodecAv1Name = @(webrtc::kAv1CodecName); +NSString *const RTC_CONSTANT_TYPE(RTCVideoCodecVp8Name) = @(webrtc::kVp8CodecName); +NSString *const RTC_CONSTANT_TYPE(RTCVideoCodecVp9Name) = @(webrtc::kVp9CodecName); +NSString *const RTC_CONSTANT_TYPE(RTCVideoCodecAv1Name) = @(webrtc::kAv1CodecName); diff --git a/sdk/objc/api/video_codec/RTCVideoEncoderAV1.h b/sdk/objc/api/video_codec/RTCVideoEncoderAV1.h index 9fd07ffd93..4d591b1bbd 100644 --- a/sdk/objc/api/video_codec/RTCVideoEncoderAV1.h +++ b/sdk/objc/api/video_codec/RTCVideoEncoderAV1.h @@ -29,4 +29,6 @@ RTC_OBJC_EXPORT + (bool)isSupported; ++ (nonnull NSArray *)scalabilityModes; + @end diff --git a/sdk/objc/api/video_codec/RTCVideoEncoderAV1.mm b/sdk/objc/api/video_codec/RTCVideoEncoderAV1.mm index cca4da4a8b..de2acb1593 100644 --- a/sdk/objc/api/video_codec/RTCVideoEncoderAV1.mm +++ b/sdk/objc/api/video_codec/RTCVideoEncoderAV1.mm @@ -19,7 +19,10 @@ #include "api/video_codecs/scalability_mode.h" #include "modules/video_coding/codecs/av1/av1_svc_config.h" +#include "absl/container/inlined_vector.h" +#include "api/video_codecs/sdp_video_format.h" #include "modules/video_coding/codecs/av1/libaom_av1_encoder.h" +#include "modules/video_coding/svc/create_scalability_structure.h" @interface RTC_OBJC_TYPE (RTCVideoEncoderAV1Builder) : RTC_OBJC_TYPE(RTCNativeVideoEncoder) @@ -57,4 +60,14 @@ + (bool)isSupported { return true; } + + (NSArray *)scalabilityModes { + NSMutableArray *scalabilityModes = [NSMutableArray array]; + for (const auto scalability_mode : webrtc::kAllScalabilityModes) { + if (webrtc::ScalabilityStructureConfig(scalability_mode).has_value()) { + [scalabilityModes addObject:[NSString stringForAbslStringView:webrtc::ScalabilityModeToString(scalability_mode)]]; + } + } + return scalabilityModes; + } + @end diff --git a/sdk/objc/api/video_codec/RTCVideoEncoderSimulcast.h b/sdk/objc/api/video_codec/RTCVideoEncoderSimulcast.h new file mode 100644 index 0000000000..4f1b55c713 --- /dev/null +++ b/sdk/objc/api/video_codec/RTCVideoEncoderSimulcast.h @@ -0,0 +1,13 @@ +#import "RTCMacros.h" +#import "RTCVideoEncoder.h" +#import "RTCVideoEncoderFactory.h" +#import "RTCVideoCodecInfo.h" + +RTC_OBJC_EXPORT +@interface RTC_OBJC_TYPE (RTCVideoEncoderSimulcast) : NSObject + ++ (id)simulcastEncoderWithPrimary:(id)primary + fallback:(id)fallback + videoCodecInfo:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)videoCodecInfo; + +@end diff --git a/sdk/objc/api/video_codec/RTCVideoEncoderSimulcast.mm b/sdk/objc/api/video_codec/RTCVideoEncoderSimulcast.mm new file mode 100644 index 0000000000..0efc96f6d9 --- /dev/null +++ b/sdk/objc/api/video_codec/RTCVideoEncoderSimulcast.mm @@ -0,0 +1,65 @@ +#import + +#import "RTCMacros.h" +#import "RTCVideoEncoderSimulcast.h" +#import "RTCNativeVideoEncoder.h" +#import "RTCNativeVideoEncoderBuilder+Native.h" +#import "api/peerconnection/RTCVideoCodecInfo+Private.h" +#include "api/transport/field_trial_based_config.h" + +#include "native/api/video_encoder_factory.h" +#include "media/engine/simulcast_encoder_adapter.h" + +@interface RTC_OBJC_TYPE (RTCVideoEncoderSimulcastBuilder) + : RTC_OBJC_TYPE(RTCNativeVideoEncoder) { + + id _primary; + id _fallback; + RTC_OBJC_TYPE(RTCVideoCodecInfo) *_videoCodecInfo; +} + +- (id)initWithPrimary:(id)primary + fallback:(id)fallback + videoCodecInfo:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)videoCodecInfo; + +@end + +@implementation RTC_OBJC_TYPE (RTCVideoEncoderSimulcastBuilder) + +- (std::unique_ptr)build:(const webrtc::Environment&)env { + auto nativePrimary = webrtc::ObjCToNativeVideoEncoderFactory(_primary); + auto nativeFallback = webrtc::ObjCToNativeVideoEncoderFactory(_fallback); + auto nativeFormat = [_videoCodecInfo nativeSdpVideoFormat]; + return std::make_unique( + env, + nativePrimary.release(), + nativeFallback.release(), + std::move(nativeFormat)); +} + +- (id)initWithPrimary:(id)primary + fallback:(id)fallback + videoCodecInfo:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)videoCodecInfo { + self = [super init]; + if (self) { + self->_primary = primary; + self->_fallback = fallback; + self->_videoCodecInfo = videoCodecInfo; + } + return self; +} + +@end + +@implementation RTC_OBJC_TYPE (RTCVideoEncoderSimulcast) + ++ (id)simulcastEncoderWithPrimary:(id)primary + fallback:(id)fallback + videoCodecInfo:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)videoCodecInfo { + return [[RTC_OBJC_TYPE(RTCVideoEncoderSimulcastBuilder) alloc] + initWithPrimary:primary + fallback:fallback + videoCodecInfo:videoCodecInfo]; +} + +@end diff --git a/sdk/objc/api/video_codec/RTCVideoEncoderVP9.h b/sdk/objc/api/video_codec/RTCVideoEncoderVP9.h index 6fde07c4e9..ac1252de06 100644 --- a/sdk/objc/api/video_codec/RTCVideoEncoderVP9.h +++ b/sdk/objc/api/video_codec/RTCVideoEncoderVP9.h @@ -29,4 +29,6 @@ RTC_OBJC_EXPORT + (bool)isSupported; ++ (nonnull NSArray *)scalabilityModes; + @end diff --git a/sdk/objc/api/video_codec/RTCVideoEncoderVP9.mm b/sdk/objc/api/video_codec/RTCVideoEncoderVP9.mm index 68b54f4314..958e4c63f3 100644 --- a/sdk/objc/api/video_codec/RTCVideoEncoderVP9.mm +++ b/sdk/objc/api/video_codec/RTCVideoEncoderVP9.mm @@ -17,8 +17,12 @@ #import "helpers/NSString+StdString.h" #import "sdk/objc/base/RTCMacros.h" +#import "helpers/NSString+StdString.h" +#include "absl/container/inlined_vector.h" +#include "api/video_codecs/sdp_video_format.h" #include "api/video_codecs/scalability_mode.h" #include "modules/video_coding/codecs/vp9/include/vp9.h" +#include "modules/video_coding/svc/create_scalability_structure.h" @interface RTC_OBJC_TYPE (RTCVideoEncoderVP9Builder) : RTC_OBJC_TYPE(RTCNativeVideoEncoder) @@ -62,5 +66,13 @@ + (bool)isSupported { return false; #endif } - + + (NSArray *)scalabilityModes { + NSMutableArray *scalabilityModes = [NSMutableArray array]; + for (const auto scalability_mode : webrtc::kAllScalabilityModes) { + if (webrtc::ScalabilityStructureConfig(scalability_mode).has_value()) { + [scalabilityModes addObject:[NSString stringForAbslStringView:webrtc::ScalabilityModeToString(scalability_mode)]]; + } + } + return scalabilityModes; + } @end diff --git a/sdk/objc/base/RTCAudioRenderer.h b/sdk/objc/base/RTCAudioRenderer.h new file mode 100644 index 0000000000..73bad7d39c --- /dev/null +++ b/sdk/objc/base/RTCAudioRenderer.h @@ -0,0 +1,35 @@ +/* + * Copyright 2023 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import +#import +#import + +#if TARGET_OS_IPHONE +#import +#endif + +#import "RTCMacros.h" + +NS_ASSUME_NONNULL_BEGIN + +RTC_OBJC_EXPORT @protocol RTC_OBJC_TYPE(RTCAudioRenderer) + +- (void)renderPCMBuffer: (AVAudioPCMBuffer *)pcmBuffer NS_SWIFT_NAME(render(pcmBuffer:)); + +@end + +NS_ASSUME_NONNULL_END diff --git a/sdk/objc/base/RTCEncodedImage.h b/sdk/objc/base/RTCEncodedImage.h index d4dfd99f85..d61b529ee4 100644 --- a/sdk/objc/base/RTCEncodedImage.h +++ b/sdk/objc/base/RTCEncodedImage.h @@ -16,17 +16,17 @@ NS_ASSUME_NONNULL_BEGIN /** Represents an encoded frame's type. */ -typedef NS_ENUM(NSUInteger, RTCFrameType) { - RTCFrameTypeEmptyFrame = 0, - RTCFrameTypeAudioFrameSpeech = 1, - RTCFrameTypeAudioFrameCN = 2, - RTCFrameTypeVideoFrameKey = 3, - RTCFrameTypeVideoFrameDelta = 4, +typedef NS_ENUM(NSUInteger, RTC_OBJC_TYPE(RTCFrameType)) { + RTC_OBJC_TYPE(RTCFrameTypeEmptyFrame) = 0, + RTC_OBJC_TYPE(RTCFrameTypeAudioFrameSpeech) = 1, + RTC_OBJC_TYPE(RTCFrameTypeAudioFrameCN) = 2, + RTC_OBJC_TYPE(RTCFrameTypeVideoFrameKey) = 3, + RTC_OBJC_TYPE(RTCFrameTypeVideoFrameDelta) = 4, }; -typedef NS_ENUM(NSUInteger, RTCVideoContentType) { - RTCVideoContentTypeUnspecified, - RTCVideoContentTypeScreenshare, +typedef NS_ENUM(NSUInteger, RTC_OBJC_TYPE(RTCVideoContentType)) { + RTC_OBJC_TYPE(RTCVideoContentTypeUnspecified), + RTC_OBJC_TYPE(RTCVideoContentTypeScreenshare), }; /** Represents an encoded frame. Corresponds to webrtc::EncodedImage. */ @@ -42,10 +42,10 @@ RTC_OBJC_EXPORT @property(nonatomic, assign) uint8_t flags; @property(nonatomic, assign) int64_t encodeStartMs; @property(nonatomic, assign) int64_t encodeFinishMs; -@property(nonatomic, assign) RTCFrameType frameType; -@property(nonatomic, assign) RTCVideoRotation rotation; +@property(nonatomic, assign) RTC_OBJC_TYPE(RTCFrameType) frameType; +@property(nonatomic, assign) RTC_OBJC_TYPE(RTCVideoRotation) rotation; @property(nonatomic, strong) NSNumber *qp; -@property(nonatomic, assign) RTCVideoContentType contentType; +@property(nonatomic, assign) RTC_OBJC_TYPE(RTCVideoContentType) contentType; @end diff --git a/sdk/objc/base/RTCLogging.h b/sdk/objc/base/RTCLogging.h index 26a75fb284..0bf74c740a 100644 --- a/sdk/objc/base/RTCLogging.h +++ b/sdk/objc/base/RTCLogging.h @@ -13,51 +13,47 @@ #import "sdk/objc/base/RTCMacros.h" // Subset of webrtc::LoggingSeverity. -typedef NS_ENUM(NSInteger, RTCLoggingSeverity) { - RTCLoggingSeverityVerbose, - RTCLoggingSeverityInfo, - RTCLoggingSeverityWarning, - RTCLoggingSeverityError, - RTCLoggingSeverityNone, +typedef NS_ENUM(NSInteger, RTC_OBJC_TYPE(RTCLoggingSeverity)) { + RTC_OBJC_TYPE(RTCLoggingSeverityVerbose), + RTC_OBJC_TYPE(RTCLoggingSeverityInfo), + RTC_OBJC_TYPE(RTCLoggingSeverityWarning), + RTC_OBJC_TYPE(RTCLoggingSeverityError), + RTC_OBJC_TYPE(RTCLoggingSeverityNone), }; // Wrapper for C++ RTC_LOG(sev) macros. // Logs the log string to the webrtc logstream for the given severity. -RTC_EXTERN void RTCLogEx(RTCLoggingSeverity severity, NSString* log_string); +RTC_EXTERN void RTC_OBJC_TYPE(RTCLogEx)(RTC_OBJC_TYPE(RTCLoggingSeverity) severity, NSString* log_string); // Wrapper for webrtc::LogMessage::LogToDebug. // Sets the minimum severity to be logged to console. -RTC_EXTERN void RTCSetMinDebugLogLevel(RTCLoggingSeverity severity); +RTC_EXTERN void RTC_OBJC_TYPE(RTCSetMinDebugLogLevel)(RTC_OBJC_TYPE(RTCLoggingSeverity) severity); // Returns the filename with the path prefix removed. -RTC_EXTERN NSString* RTCFileName(const char* filePath); +RTC_EXTERN NSString* RTC_OBJC_TYPE(RTCFileName)(const char* filePath); // Some convenience macros. -#define RTCLogString(format, ...) \ - [NSString stringWithFormat:@"(%@:%d %s): " format, \ - RTCFileName(__FILE__), \ - __LINE__, \ - __FUNCTION__, \ +#define RTCLogString(format, ...) \ + [NSString stringWithFormat:@"(%@:%d %s): " format, \ + RTC_OBJC_TYPE(RTCFileName)(__FILE__), \ + __LINE__, \ + __FUNCTION__, \ ##__VA_ARGS__] #define RTCLogFormat(severity, format, ...) \ do { \ NSString* log_string = RTCLogString(format, ##__VA_ARGS__); \ - RTCLogEx(severity, log_string); \ + RTC_OBJC_TYPE(RTCLogEx)(severity, log_string); \ } while (false) -#define RTCLogVerbose(format, ...) \ - RTCLogFormat(RTCLoggingSeverityVerbose, format, ##__VA_ARGS__) +#define RTCLogVerbose(format, ...) RTCLogFormat(RTC_OBJC_TYPE(RTCLoggingSeverityVerbose), format, ##__VA_ARGS__) -#define RTCLogInfo(format, ...) \ - RTCLogFormat(RTCLoggingSeverityInfo, format, ##__VA_ARGS__) +#define RTCLogInfo(format, ...) RTCLogFormat(RTC_OBJC_TYPE(RTCLoggingSeverityInfo), format, ##__VA_ARGS__) -#define RTCLogWarning(format, ...) \ - RTCLogFormat(RTCLoggingSeverityWarning, format, ##__VA_ARGS__) +#define RTCLogWarning(format, ...) RTCLogFormat(RTC_OBJC_TYPE(RTCLoggingSeverityWarning), format, ##__VA_ARGS__) -#define RTCLogError(format, ...) \ - RTCLogFormat(RTCLoggingSeverityError, format, ##__VA_ARGS__) +#define RTCLogError(format, ...) RTCLogFormat(RTC_OBJC_TYPE(RTCLoggingSeverityError), format, ##__VA_ARGS__) #if !defined(NDEBUG) #define RTCLogDebug(format, ...) RTCLogInfo(format, ##__VA_ARGS__) diff --git a/sdk/objc/base/RTCLogging.mm b/sdk/objc/base/RTCLogging.mm index 41f7f82b96..f451f01822 100644 --- a/sdk/objc/base/RTCLogging.mm +++ b/sdk/objc/base/RTCLogging.mm @@ -12,34 +12,33 @@ #include "rtc_base/logging.h" -webrtc::LoggingSeverity RTCGetNativeLoggingSeverity( - RTCLoggingSeverity severity) { +webrtc::LoggingSeverity RTC_OBJC_TYPE(RTCGetNativeLoggingSeverity)(RTC_OBJC_TYPE(RTCLoggingSeverity) severity) { switch (severity) { - case RTCLoggingSeverityVerbose: + case RTC_OBJC_TYPE(RTCLoggingSeverityVerbose): return webrtc::LS_VERBOSE; - case RTCLoggingSeverityInfo: + case RTC_OBJC_TYPE(RTCLoggingSeverityInfo): return webrtc::LS_INFO; - case RTCLoggingSeverityWarning: + case RTC_OBJC_TYPE(RTCLoggingSeverityWarning): return webrtc::LS_WARNING; - case RTCLoggingSeverityError: + case RTC_OBJC_TYPE(RTCLoggingSeverityError): return webrtc::LS_ERROR; - case RTCLoggingSeverityNone: + case RTC_OBJC_TYPE(RTCLoggingSeverityNone): return webrtc::LS_NONE; } } -void RTCLogEx(RTCLoggingSeverity severity, NSString* log_string) { +void RTC_OBJC_TYPE(RTCLogEx)(RTC_OBJC_TYPE(RTCLoggingSeverity) severity, NSString* log_string) { if (log_string.length) { const char* utf8_string = log_string.UTF8String; - RTC_LOG_V(RTCGetNativeLoggingSeverity(severity)) << utf8_string; + RTC_LOG_V(RTC_OBJC_TYPE(RTCGetNativeLoggingSeverity)(severity)) << utf8_string; } } -void RTCSetMinDebugLogLevel(RTCLoggingSeverity severity) { - webrtc::LogMessage::LogToDebug(RTCGetNativeLoggingSeverity(severity)); +void RTC_OBJC_TYPE(RTCSetMinDebugLogLevel)(RTC_OBJC_TYPE(RTCLoggingSeverity) severity) { + webrtc::LogMessage::LogToDebug(RTC_OBJC_TYPE(RTCGetNativeLoggingSeverity)(severity)); } -NSString* RTCFileName(const char* file_path) { +NSString* RTC_OBJC_TYPE(RTCFileName)(const char* file_path) { NSString* ns_file_path = [[NSString alloc] initWithBytesNoCopy:const_cast(file_path) length:strlen(file_path) diff --git a/sdk/objc/base/RTCMacros.h b/sdk/objc/base/RTCMacros.h index cb943b4bee..09418b86ac 100644 --- a/sdk/objc/base/RTCMacros.h +++ b/sdk/objc/base/RTCMacros.h @@ -44,6 +44,10 @@ #define RTC_OBJC_TYPE_PREFIX #endif +#ifndef RTC_CONSTANT_TYPE_PREFIX +#define RTC_CONSTANT_TYPE_PREFIX k +#endif + // RCT_OBJC_TYPE // // Macro used internally to declare API types. Declaring an API type without @@ -52,6 +56,8 @@ #define RTC_OBJC_TYPE(type_name) \ RTC_SYMBOL_CONCAT(RTC_OBJC_TYPE_PREFIX, type_name) +#define RTC_CONSTANT_TYPE(type_name) RTC_SYMBOL_CONCAT(RTC_CONSTANT_TYPE_PREFIX, type_name) + #if defined(__cplusplus) #define RTC_EXTERN extern "C" RTC_OBJC_EXPORT #else diff --git a/sdk/objc/base/RTCVideoCodecInfo.h b/sdk/objc/base/RTCVideoCodecInfo.h index 0a9d4bf215..ad8e5acdfd 100644 --- a/sdk/objc/base/RTCVideoCodecInfo.h +++ b/sdk/objc/base/RTCVideoCodecInfo.h @@ -28,8 +28,8 @@ RTC_OBJC_EXPORT parameters; - (instancetype)initWithName:(NSString *)name - parameters:(NSDictionary *)parameters - scalabilityModes:(NSArray *)scalabilityModes + parameters:(nullable NSDictionary *)parameters + scalabilityModes:(nullable NSArray *)scalabilityModes NS_DESIGNATED_INITIALIZER; - (BOOL)isEqualToCodecInfo:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)info; diff --git a/sdk/objc/base/RTCVideoCodecInfo.m b/sdk/objc/base/RTCVideoCodecInfo.m index f3c0673591..370845f24b 100644 --- a/sdk/objc/base/RTCVideoCodecInfo.m +++ b/sdk/objc/base/RTCVideoCodecInfo.m @@ -28,13 +28,13 @@ - (instancetype)initWithName:(NSString *)name } - (instancetype)initWithName:(NSString *)name - parameters:(NSDictionary *)parameters - scalabilityModes:(NSArray *)scalabilityModes { + parameters:(nullable NSDictionary *)parameters + scalabilityModes:(nullable NSArray *)scalabilityModes { self = [super init]; if (self) { _name = name; - _parameters = parameters; - _scalabilityModes = scalabilityModes; + _parameters = (parameters ? parameters : @{}); + _scalabilityModes = @[]; } return self; diff --git a/sdk/objc/base/RTCVideoEncoderSettings.h b/sdk/objc/base/RTCVideoEncoderSettings.h index d3552e78db..3482b9e31f 100644 --- a/sdk/objc/base/RTCVideoEncoderSettings.h +++ b/sdk/objc/base/RTCVideoEncoderSettings.h @@ -14,9 +14,9 @@ NS_ASSUME_NONNULL_BEGIN -typedef NS_ENUM(NSUInteger, RTCVideoCodecMode) { - RTCVideoCodecModeRealtimeVideo, - RTCVideoCodecModeScreensharing, +typedef NS_ENUM(NSUInteger, RTC_OBJC_TYPE(RTCVideoCodecMode)) { + RTC_OBJC_TYPE(RTCVideoCodecModeRealtimeVideo), + RTC_OBJC_TYPE(RTCVideoCodecModeScreensharing), }; /** Settings for encoder. Corresponds to webrtc::VideoCodec. */ @@ -35,7 +35,7 @@ RTC_OBJC_EXPORT @property(nonatomic, assign) uint32_t maxFramerate; @property(nonatomic, assign) unsigned int qpMax; -@property(nonatomic, assign) RTCVideoCodecMode mode; +@property(nonatomic, assign) RTC_OBJC_TYPE(RTCVideoCodecMode) mode; @end diff --git a/sdk/objc/base/RTCVideoFrame.h b/sdk/objc/base/RTCVideoFrame.h index 0f87dd8f6d..86ccb105f3 100644 --- a/sdk/objc/base/RTCVideoFrame.h +++ b/sdk/objc/base/RTCVideoFrame.h @@ -15,11 +15,11 @@ NS_ASSUME_NONNULL_BEGIN -typedef NS_ENUM(NSInteger, RTCVideoRotation) { - RTCVideoRotation_0 = 0, - RTCVideoRotation_90 = 90, - RTCVideoRotation_180 = 180, - RTCVideoRotation_270 = 270, +typedef NS_ENUM(NSInteger, RTC_OBJC_TYPE(RTCVideoRotation)) { + RTC_OBJC_TYPE(RTCVideoRotation_0) = 0, + RTC_OBJC_TYPE(RTCVideoRotation_90) = 90, + RTC_OBJC_TYPE(RTCVideoRotation_180) = 180, + RTC_OBJC_TYPE(RTCVideoRotation_270) = 270, }; @protocol RTC_OBJC_TYPE @@ -34,7 +34,7 @@ RTC_OBJC_EXPORT /** Height without rotation applied. */ @property(nonatomic, readonly) int height; -@property(nonatomic, readonly) RTCVideoRotation rotation; +@property(nonatomic, readonly) RTC_OBJC_TYPE(RTCVideoRotation) rotation; /** Timestamp in nanoseconds. */ @property(nonatomic, readonly) int64_t timeStampNs; @@ -47,11 +47,33 @@ RTC_OBJC_EXPORT - (instancetype)init NS_UNAVAILABLE; - (instancetype)new NS_UNAVAILABLE; +/** Initialize an RTCVideoFrame from a pixel buffer, rotation, and timestamp. + * Deprecated - initialize with a RTCCVPixelBuffer instead + */ +- (instancetype)initWithPixelBuffer:(CVPixelBufferRef)pixelBuffer + rotation:(RTC_OBJC_TYPE(RTCVideoRotation))rotation + timeStampNs:(int64_t)timeStampNs + DEPRECATED_MSG_ATTRIBUTE("use initWithBuffer instead"); + +/** Initialize an RTCVideoFrame from a pixel buffer combined with cropping and + * scaling. Cropping will be applied first on the pixel buffer, followed by + * scaling to the final resolution of scaledWidth x scaledHeight. + */ +- (instancetype)initWithPixelBuffer:(CVPixelBufferRef)pixelBuffer + scaledWidth:(int)scaledWidth + scaledHeight:(int)scaledHeight + cropWidth:(int)cropWidth + cropHeight:(int)cropHeight + cropX:(int)cropX + cropY:(int)cropY + rotation:(RTC_OBJC_TYPE(RTCVideoRotation))rotation + timeStampNs:(int64_t)timeStampNs + DEPRECATED_MSG_ATTRIBUTE("use initWithBuffer instead"); + /** Initialize an RTCVideoFrame from a frame buffer, rotation, and timestamp. */ -- (instancetype)initWithBuffer: - (id)frameBuffer - rotation:(RTCVideoRotation)rotation +- (instancetype)initWithBuffer:(id)frameBuffer + rotation:(RTC_OBJC_TYPE(RTCVideoRotation))rotation timeStampNs:(int64_t)timeStampNs; /** Return a frame that is guaranteed to be I420, i.e. it is possible to access diff --git a/sdk/objc/base/RTCVideoFrame.mm b/sdk/objc/base/RTCVideoFrame.mm index cf73ca5df8..23b9f3bcdd 100644 --- a/sdk/objc/base/RTCVideoFrame.mm +++ b/sdk/objc/base/RTCVideoFrame.mm @@ -14,7 +14,7 @@ #import "RTCVideoFrameBuffer.h" @implementation RTC_OBJC_TYPE (RTCVideoFrame) { - RTCVideoRotation _rotation; + RTC_OBJC_TYPE(RTCVideoRotation) _rotation; int64_t _timeStampNs; } @@ -29,7 +29,7 @@ - (int)height { return _buffer.height; } -- (RTCVideoRotation)rotation { +- (RTC_OBJC_TYPE(RTCVideoRotation) )rotation { return _rotation; } @@ -43,8 +43,28 @@ - (int64_t)timeStampNs { timeStampNs:_timeStampNs]; } +- (instancetype)initWithPixelBuffer:(CVPixelBufferRef)pixelBuffer + rotation:(RTC_OBJC_TYPE(RTCVideoRotation))rotation + timeStampNs:(int64_t)timeStampNs { + // Deprecated. + return nil; +} + +- (instancetype)initWithPixelBuffer:(CVPixelBufferRef)pixelBuffer + scaledWidth:(int)scaledWidth + scaledHeight:(int)scaledHeight + cropWidth:(int)cropWidth + cropHeight:(int)cropHeight + cropX:(int)cropX + cropY:(int)cropY + rotation:(RTC_OBJC_TYPE(RTCVideoRotation))rotation + timeStampNs:(int64_t)timeStampNs { + // Deprecated. + return nil; +} + - (instancetype)initWithBuffer:(id)buffer - rotation:(RTCVideoRotation)rotation + rotation:(RTC_OBJC_TYPE(RTCVideoRotation))rotation timeStampNs:(int64_t)timeStampNs { self = [super init]; if (self) { diff --git a/sdk/objc/components/audio/RTCAudioBuffer+Private.h b/sdk/objc/components/audio/RTCAudioBuffer+Private.h new file mode 100644 index 0000000000..effd8bb429 --- /dev/null +++ b/sdk/objc/components/audio/RTCAudioBuffer+Private.h @@ -0,0 +1,29 @@ +/* + * Copyright 2023 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import "RTCAudioBuffer.h" + +#include "modules/audio_processing/audio_buffer.h" + +NS_ASSUME_NONNULL_BEGIN + +@interface RTC_OBJC_TYPE (RTCAudioBuffer)() + +- (instancetype)initWithNativeType: (webrtc::AudioBuffer *) audioBuffer; + +@end + +NS_ASSUME_NONNULL_END diff --git a/sdk/objc/components/audio/RTCAudioBuffer.h b/sdk/objc/components/audio/RTCAudioBuffer.h new file mode 100644 index 0000000000..8bbd068657 --- /dev/null +++ b/sdk/objc/components/audio/RTCAudioBuffer.h @@ -0,0 +1,38 @@ +/* + * Copyright 2023 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import + +#import "RTCMacros.h" + +NS_ASSUME_NONNULL_BEGIN + +RTC_OBJC_EXPORT +@interface RTC_OBJC_TYPE (RTCAudioBuffer) : NSObject + +@property(nonatomic, readonly) size_t channels; +@property(nonatomic, readonly) size_t frames; +@property(nonatomic, readonly) size_t framesPerBand; +@property(nonatomic, readonly) size_t bands; + +// Returns pointer arrays. Index range from 0 to `frames`. +- (float* _Nonnull)rawBufferForChannel:(size_t)channel; + +// TODO: More convenience methods... + +@end + +NS_ASSUME_NONNULL_END diff --git a/sdk/objc/components/audio/RTCAudioBuffer.mm b/sdk/objc/components/audio/RTCAudioBuffer.mm new file mode 100644 index 0000000000..2d98ee691e --- /dev/null +++ b/sdk/objc/components/audio/RTCAudioBuffer.mm @@ -0,0 +1,56 @@ +/* + * Copyright 2023 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import "RTCAudioBuffer.h" + +#include "modules/audio_processing/audio_buffer.h" + +@implementation RTC_OBJC_TYPE (RTCAudioBuffer) { + // Raw + webrtc::AudioBuffer *_audioBuffer; +} + +- (size_t)channels { + return _audioBuffer->num_channels(); +} + +- (size_t)frames { + return _audioBuffer->num_frames(); +} + +- (size_t)framesPerBand { + return _audioBuffer->num_frames_per_band(); +} + +- (size_t)bands { + return _audioBuffer->num_bands(); +} + +- (float *)rawBufferForChannel:(size_t)channel { + return _audioBuffer->channels()[channel]; +} + +#pragma mark - Private + +- (instancetype)initWithNativeType:(webrtc::AudioBuffer *)audioBuffer { + self = [super init]; + if (self) { + _audioBuffer = audioBuffer; + } + return self; +} + +@end diff --git a/sdk/objc/components/audio/RTCAudioCustomProcessingAdapter+Private.h b/sdk/objc/components/audio/RTCAudioCustomProcessingAdapter+Private.h new file mode 100644 index 0000000000..9995b58abb --- /dev/null +++ b/sdk/objc/components/audio/RTCAudioCustomProcessingAdapter+Private.h @@ -0,0 +1,43 @@ +/* + * Copyright 2023 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import "RTCAudioCustomProcessingAdapter.h" +#import "RTCAudioCustomProcessingDelegate.h" +#import "RTCMacros.h" + +#include "modules/audio_processing/include/audio_processing.h" + +NS_ASSUME_NONNULL_BEGIN + +@interface RTC_OBJC_TYPE(RTCAudioCustomProcessingAdapter) () + +// Thread safe set/get with os_unfair_lock. +@property(nonatomic, weak, nullable) id + audioCustomProcessingDelegate; + +// Direct read access without lock. +@property(nonatomic, readonly, weak, nullable) id + rawAudioCustomProcessingDelegate; + +@property(nonatomic, readonly) std::unique_ptr + nativeAudioCustomProcessingModule; + +- (instancetype)initWithDelegate: + (nullable id)audioCustomProcessingDelegate; + +@end + +NS_ASSUME_NONNULL_END diff --git a/sdk/objc/components/audio/RTCAudioCustomProcessingAdapter.h b/sdk/objc/components/audio/RTCAudioCustomProcessingAdapter.h new file mode 100644 index 0000000000..3230c19323 --- /dev/null +++ b/sdk/objc/components/audio/RTCAudioCustomProcessingAdapter.h @@ -0,0 +1,28 @@ +/* + * Copyright 2023 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import +#import "RTCMacros.h" + +NS_ASSUME_NONNULL_BEGIN + +@interface RTC_OBJC_TYPE(RTCAudioCustomProcessingAdapter) : NSObject + +- (instancetype)init NS_UNAVAILABLE; + +@end + +NS_ASSUME_NONNULL_END diff --git a/sdk/objc/components/audio/RTCAudioCustomProcessingAdapter.mm b/sdk/objc/components/audio/RTCAudioCustomProcessingAdapter.mm new file mode 100644 index 0000000000..85b7b82609 --- /dev/null +++ b/sdk/objc/components/audio/RTCAudioCustomProcessingAdapter.mm @@ -0,0 +1,140 @@ +/* + * Copyright 2023 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import +#import + +#import "RTCAudioBuffer+Private.h" +#import "RTCAudioCustomProcessingAdapter+Private.h" +#import "RTCAudioCustomProcessingAdapter.h" + +#include "rtc_base/logging.h" + +namespace webrtc { + +class AudioCustomProcessingAdapter : public webrtc::CustomProcessing { + public: + bool is_initialized_; + int sample_rate_hz_; + int num_channels_; + + AudioCustomProcessingAdapter(RTC_OBJC_TYPE(RTCAudioCustomProcessingAdapter) *adapter, os_unfair_lock *lock) { + RTC_LOG(LS_INFO) << "RTCAudioCustomProcessingAdapter.AudioCustomProcessingAdapter init"; + + adapter_ = adapter; + lock_ = lock; + is_initialized_ = false; + sample_rate_hz_ = 0; + num_channels_ = 0; + } + + ~AudioCustomProcessingAdapter() { + RTC_LOG(LS_INFO) << "RTCAudioCustomProcessingAdapter.AudioCustomProcessingAdapter dealloc"; + + os_unfair_lock_lock(lock_); + id delegate = adapter_.rawAudioCustomProcessingDelegate; + [delegate audioProcessingRelease]; + os_unfair_lock_unlock(lock_); + } + + void Initialize(int sample_rate_hz, int num_channels) override { + os_unfair_lock_lock(lock_); + id delegate = adapter_.rawAudioCustomProcessingDelegate; + [delegate audioProcessingInitializeWithSampleRate:sample_rate_hz channels:num_channels]; + is_initialized_ = true; + sample_rate_hz_ = sample_rate_hz; + num_channels_ = num_channels; + os_unfair_lock_unlock(lock_); + } + + void Process(AudioBuffer *audio_buffer) override { + bool is_locked = os_unfair_lock_trylock(lock_); + if (!is_locked) { + RTC_LOG(LS_INFO) << "RTCAudioCustomProcessingAdapter.AudioCustomProcessingAdapter Process " + "already locked, skipping..."; + + return; + } + id delegate = adapter_.rawAudioCustomProcessingDelegate; + if (delegate != nil) { + RTC_OBJC_TYPE(RTCAudioBuffer) *audioBuffer = [[RTC_OBJC_TYPE(RTCAudioBuffer) alloc] initWithNativeType:audio_buffer]; + [delegate audioProcessingProcess:audioBuffer]; + } + os_unfair_lock_unlock(lock_); + } + + std::string ToString() const override { return "AudioCustomProcessingAdapter"; } + + private: + __weak RTC_OBJC_TYPE(RTCAudioCustomProcessingAdapter) *adapter_; + os_unfair_lock *lock_; +}; +} // namespace webrtc + +@implementation RTC_OBJC_TYPE(RTCAudioCustomProcessingAdapter) { + webrtc::AudioCustomProcessingAdapter *_adapter; + os_unfair_lock _lock; +} + +@synthesize rawAudioCustomProcessingDelegate = _rawAudioCustomProcessingDelegate; + +- (instancetype)initWithDelegate: + (nullable id)audioCustomProcessingDelegate { + self = [super init]; + if (self) { + _lock = OS_UNFAIR_LOCK_INIT; + _rawAudioCustomProcessingDelegate = audioCustomProcessingDelegate; + _adapter = new webrtc::AudioCustomProcessingAdapter(self, &_lock); + RTC_LOG(LS_INFO) << "RTCAudioCustomProcessingAdapter init"; + } + + return self; +} + +- (void)dealloc { + RTC_LOG(LS_INFO) << "RTCAudioCustomProcessingAdapter dealloc"; +} + +#pragma mark - Getter & Setter for audioCustomProcessingDelegate + +- (nullable id)audioCustomProcessingDelegate { + os_unfair_lock_lock(&_lock); + id delegate = _rawAudioCustomProcessingDelegate; + os_unfair_lock_unlock(&_lock); + return delegate; +} + +- (void)setAudioCustomProcessingDelegate:(nullable id)delegate { + os_unfair_lock_lock(&_lock); + if (_rawAudioCustomProcessingDelegate != nil && _adapter->is_initialized_) { + [_rawAudioCustomProcessingDelegate audioProcessingRelease]; + } + _rawAudioCustomProcessingDelegate = delegate; + if (_adapter->is_initialized_) { + [_rawAudioCustomProcessingDelegate + audioProcessingInitializeWithSampleRate:_adapter->sample_rate_hz_ + channels:_adapter->num_channels_]; + } + os_unfair_lock_unlock(&_lock); +} + +#pragma mark - Private + +- (std::unique_ptr)nativeAudioCustomProcessingModule { + return std::unique_ptr(_adapter); +} + +@end diff --git a/sdk/objc/components/audio/RTCAudioCustomProcessingDelegate.h b/sdk/objc/components/audio/RTCAudioCustomProcessingDelegate.h new file mode 100644 index 0000000000..a8e4981fbc --- /dev/null +++ b/sdk/objc/components/audio/RTCAudioCustomProcessingDelegate.h @@ -0,0 +1,52 @@ +/* + * Copyright 2023 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import + +#import "RTCMacros.h" + +NS_ASSUME_NONNULL_BEGIN + +@class RTC_OBJC_TYPE(RTCAudioBuffer); + +RTC_OBJC_EXPORT @protocol RTC_OBJC_TYPE (RTCAudioCustomProcessingDelegate) + +/** +* (Re-)initialize the audio processor. +* This method can be invoked multiple times. +*/ +- (void)audioProcessingInitializeWithSampleRate : (size_t)sampleRateHz channels +: (size_t)channels NS_SWIFT_NAME(audioProcessingInitialize(sampleRate:channels:)); + +/** + * Process (read or write) the audio buffer. + * RTCAudioBuffer is a simple wrapper for webrtc::AudioBuffer and the valid scope is only inside + * this method. Do not retain it. + */ +- (void)audioProcessingProcess:(RTC_OBJC_TYPE(RTCAudioBuffer) *)audioBuffer + NS_SWIFT_NAME(audioProcessingProcess(audioBuffer:)); + +// TOOD: +// virtual void SetRuntimeSetting(AudioProcessing::RuntimeSetting setting); + +/** + * Suggests releasing resources allocated by the audio processor. + */ +- (void)audioProcessingRelease; + +@end + +NS_ASSUME_NONNULL_END diff --git a/sdk/objc/components/audio/RTCAudioProcessingConfig+Private.h b/sdk/objc/components/audio/RTCAudioProcessingConfig+Private.h new file mode 100644 index 0000000000..3eacaa653d --- /dev/null +++ b/sdk/objc/components/audio/RTCAudioProcessingConfig+Private.h @@ -0,0 +1,31 @@ +/* + * Copyright 2023 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import "RTCAudioProcessingConfig.h" + +#include "modules/audio_processing/include/audio_processing.h" + +NS_ASSUME_NONNULL_BEGIN + +@interface RTC_OBJC_TYPE (RTCAudioProcessingConfig)() + +- (instancetype)initWithNativeAudioProcessingConfig:(webrtc::AudioProcessing::Config)config; + +@property(nonatomic, readonly) webrtc::AudioProcessing::Config nativeAudioProcessingConfig; + +@end + +NS_ASSUME_NONNULL_END diff --git a/sdk/objc/components/audio/RTCAudioProcessingConfig.h b/sdk/objc/components/audio/RTCAudioProcessingConfig.h new file mode 100644 index 0000000000..7e4e666608 --- /dev/null +++ b/sdk/objc/components/audio/RTCAudioProcessingConfig.h @@ -0,0 +1,37 @@ +/* + * Copyright 2023 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import +#import "RTCMacros.h" + +NS_ASSUME_NONNULL_BEGIN + +RTC_OBJC_EXPORT +@interface RTC_OBJC_TYPE (RTCAudioProcessingConfig) : NSObject + +@property(nonatomic, assign) BOOL isEchoCancellationEnabled; +@property(nonatomic, assign) BOOL isEchoCancellationMobileMode; + +@property(nonatomic, assign) BOOL isNoiseSuppressionEnabled; +@property(nonatomic, assign) BOOL isHighpassFilterEnabled; + +@property(nonatomic, assign) BOOL isAutoGainControl1Enabled; +@property(nonatomic, assign) BOOL isAutoGainControl2Enabled; + + +@end + +NS_ASSUME_NONNULL_END diff --git a/sdk/objc/components/audio/RTCAudioProcessingConfig.mm b/sdk/objc/components/audio/RTCAudioProcessingConfig.mm new file mode 100644 index 0000000000..4cdc80cb22 --- /dev/null +++ b/sdk/objc/components/audio/RTCAudioProcessingConfig.mm @@ -0,0 +1,99 @@ +/* + * Copyright 2023 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import "RTCAudioProcessingConfig.h" + +#include "modules/audio_processing/include/audio_processing.h" + +@implementation RTC_OBJC_TYPE (RTCAudioProcessingConfig) { + webrtc::AudioProcessing::Config _config; +} + +// config.echo_canceller.enabled + +- (BOOL)isEchoCancellationEnabled { + return _config.echo_canceller.enabled; +} + +- (void)setIsEchoCancellationEnabled:(BOOL)value { + _config.echo_canceller.enabled = value; +} + +// config.echo_canceller.mobile_mode + +- (BOOL)isEchoCancellationMobileMode { + return _config.echo_canceller.mobile_mode; +} + +- (void)setIsEchoCancellationMobileMode:(BOOL)value { + _config.echo_canceller.mobile_mode = value; +} + +// config.noise_suppression.enabled + +- (BOOL)isNoiseSuppressionEnabled { + return _config.noise_suppression.enabled; +} + +- (void)setIsNoiseSuppressionEnabled:(BOOL)value { + _config.noise_suppression.enabled = value; +} + +// config.high_pass_filter.enabled + +- (BOOL)isHighpassFilterEnabled { + return _config.high_pass_filter.enabled; +} + +- (void)setIsHighpassFilterEnabled:(BOOL)value { + _config.high_pass_filter.enabled = value; +} + +// config.gain_controller1.enabled + +- (BOOL)isAutoGainControl1Enabled { + return _config.gain_controller1.enabled; +} + +- (void)setIsAutoGainControl1Enabled:(BOOL)value { + _config.gain_controller1.enabled = value; +} + +// config.gain_controller2.enabled + +- (BOOL)isAutoGainControl2Enabled { + return _config.gain_controller2.enabled; +} + +- (void)setIsAutoGainControl2Enabled:(BOOL)value { + _config.gain_controller2.enabled = value; +} + +#pragma mark - Private + +- (instancetype)initWithNativeAudioProcessingConfig:(webrtc::AudioProcessing::Config)config { + self = [super init]; + if (self) { + _config = config; + } + return self; +} + +- (webrtc::AudioProcessing::Config)nativeAudioProcessingConfig { + return _config; +} + +@end diff --git a/sdk/objc/components/audio/RTCAudioProcessingModule.h b/sdk/objc/components/audio/RTCAudioProcessingModule.h new file mode 100644 index 0000000000..af8024ddcb --- /dev/null +++ b/sdk/objc/components/audio/RTCAudioProcessingModule.h @@ -0,0 +1,31 @@ +/* + * Copyright 2023 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import + +#import "RTCMacros.h" + +NS_ASSUME_NONNULL_BEGIN + +@class RTC_OBJC_TYPE(RTCAudioProcessingConfig); + +RTC_OBJC_EXPORT @protocol RTC_OBJC_TYPE (RTCAudioProcessingModule) + +@property(nonatomic, assign) RTC_OBJC_TYPE(RTCAudioProcessingConfig) * config; + +@end + +NS_ASSUME_NONNULL_END diff --git a/sdk/objc/components/audio/RTCAudioSession+Configuration.mm b/sdk/objc/components/audio/RTCAudioSession+Configuration.mm index fd9054ff89..f346981a30 100644 --- a/sdk/objc/components/audio/RTCAudioSession+Configuration.mm +++ b/sdk/objc/components/audio/RTCAudioSession+Configuration.mm @@ -70,6 +70,33 @@ - (BOOL)setConfiguration: } } + if (self.mode != configuration.mode) { + NSError *modeError = nil; + if (![self setMode:configuration.mode error:&modeError]) { + RTCLogError(@"Failed to set mode to %@: %@", + self.mode, + modeError.localizedDescription); + error = modeError; + } else { + RTCLog(@"Set mode to: %@", configuration.mode); + } + } + + // Sometimes category options don't stick after setting mode. + if (self.categoryOptions != configuration.categoryOptions) { + NSError *categoryError = nil; + if (![self setCategory:configuration.category + withOptions:configuration.categoryOptions + error:&categoryError]) { + RTCLogError(@"Failed to set category options: %@", + categoryError.localizedDescription); + error = categoryError; + } else { + RTCLog(@"Set category options to: %ld", + (long)configuration.categoryOptions); + } + } + if (self.preferredSampleRate != configuration.sampleRate) { NSError *sampleRateError = nil; if (![self setPreferredSampleRate:configuration.sampleRate diff --git a/sdk/objc/components/audio/RTCAudioSession.h b/sdk/objc/components/audio/RTCAudioSession.h index 00a825f0a1..9ea6965c72 100644 --- a/sdk/objc/components/audio/RTCAudioSession.h +++ b/sdk/objc/components/audio/RTCAudioSession.h @@ -15,11 +15,11 @@ NS_ASSUME_NONNULL_BEGIN -extern NSString *const kRTCAudioSessionErrorDomain; +extern NSString *const RTC_CONSTANT_TYPE(RTCAudioSessionErrorDomain); /** Method that requires lock was called without lock. */ -extern NSInteger const kRTCAudioSessionErrorLockRequired; +extern NSInteger const RTC_CONSTANT_TYPE(RTCAudioSessionErrorLockRequired); /** Unknown configuration error occurred. */ -extern NSInteger const kRTCAudioSessionErrorConfiguration; +extern NSInteger const RTC_CONSTANT_TYPE(RTCAudioSessionErrorConfiguration); @class RTC_OBJC_TYPE(RTCAudioSession); @class RTC_OBJC_TYPE(RTCAudioSessionConfiguration); diff --git a/sdk/objc/components/audio/RTCAudioSession.mm b/sdk/objc/components/audio/RTCAudioSession.mm index a759c35b2c..7cf38f17eb 100644 --- a/sdk/objc/components/audio/RTCAudioSession.mm +++ b/sdk/objc/components/audio/RTCAudioSession.mm @@ -26,11 +26,10 @@ #error ABSL_HAVE_THREAD_LOCAL should be defined for MacOS / iOS Targets. #endif -NSString *const kRTCAudioSessionErrorDomain = - @"org.webrtc.RTC_OBJC_TYPE(RTCAudioSession)"; -NSInteger const kRTCAudioSessionErrorLockRequired = -1; -NSInteger const kRTCAudioSessionErrorConfiguration = -2; -NSString *const kRTCAudioSessionOutputVolumeSelector = @"outputVolume"; +NSString *const RTC_CONSTANT_TYPE(RTCAudioSessionErrorDomain) = @"org.webrtc.RTC_OBJC_TYPE(RTCAudioSession)"; +NSInteger const RTC_CONSTANT_TYPE(RTCAudioSessionErrorLockRequired) = -1; +NSInteger const RTC_CONSTANT_TYPE(RTCAudioSessionErrorConfiguration) = -2; +NSString * const RTC_CONSTANT_TYPE(RTCAudioSessionOutputVolumeSelector) = @"outputVolume"; namespace { // Since webrtc::Mutex is not a reentrant lock and cannot check if the mutex is @@ -114,11 +113,10 @@ - (instancetype)initWithAudioSession:(id)audioSession { selector:@selector(handleApplicationDidBecomeActive:) name:UIApplicationDidBecomeActiveNotification object:nil]; - [_session - addObserver:self - forKeyPath:kRTCAudioSessionOutputVolumeSelector - options:NSKeyValueObservingOptionNew | NSKeyValueObservingOptionOld - context:(__bridge void *)RTC_OBJC_TYPE(RTCAudioSession).class]; + [_session addObserver:self + forKeyPath:RTC_CONSTANT_TYPE(RTCAudioSessionOutputVolumeSelector) + options:NSKeyValueObservingOptionNew | NSKeyValueObservingOptionOld + context:(__bridge void *)RTC_OBJC_TYPE(RTCAudioSession).class]; RTCLog(@"RTC_OBJC_TYPE(RTCAudioSession) (%p): init.", self); } @@ -127,10 +125,9 @@ - (instancetype)initWithAudioSession:(id)audioSession { - (void)dealloc { [[NSNotificationCenter defaultCenter] removeObserver:self]; - [_session - removeObserver:self - forKeyPath:kRTCAudioSessionOutputVolumeSelector - context:(__bridge void *)RTC_OBJC_TYPE(RTCAudioSession).class]; + [_session removeObserver:self + forKeyPath:RTC_CONSTANT_TYPE(RTCAudioSessionOutputVolumeSelector) + context:(__bridge void *)RTC_OBJC_TYPE(RTCAudioSession).class]; RTCLog(@"RTC_OBJC_TYPE(RTCAudioSession) (%p): dealloc.", self); } @@ -568,8 +565,7 @@ - (void)handleRouteChangeNotification:(NSNotification *)notification { RTCLog(@"Audio route changed: OldDeviceUnavailable"); break; case AVAudioSessionRouteChangeReasonCategoryChange: - RTCLog(@"Audio route changed: CategoryChange to :%@", - self.session.category); + RTCLog(@"Audio route changed: CategoryChange to :%@", self.session.category); break; case AVAudioSessionRouteChangeReasonOverride: RTCLog(@"Audio route changed: Override"); @@ -642,14 +638,11 @@ - (void)handleApplicationDidBecomeActive:(NSNotification *)notification { #pragma mark - Private + (NSError *)lockError { - NSDictionary *userInfo = @{ - NSLocalizedDescriptionKey : - @"Must call lockForConfiguration before calling this method." - }; - NSError *error = - [[NSError alloc] initWithDomain:kRTCAudioSessionErrorDomain - code:kRTCAudioSessionErrorLockRequired - userInfo:userInfo]; + NSDictionary *userInfo = + @{NSLocalizedDescriptionKey : @"Must call lockForConfiguration before calling this method."}; + NSError *error = [[NSError alloc] initWithDomain:RTC_CONSTANT_TYPE(RTCAudioSessionErrorDomain) + code:RTC_CONSTANT_TYPE(RTCAudioSessionErrorLockRequired) + userInfo:userInfo]; return error; } @@ -763,6 +756,7 @@ - (BOOL)configureWebRTCSession:(NSError **)outError { return NO; } +#if !TARGET_OS_TV // Ensure that the device currently supports audio input. // TODO(tkchin): Figure out if this is really necessary. if (!self.inputAvailable) { @@ -773,6 +767,7 @@ - (BOOL)configureWebRTCSession:(NSError **)outError { } return NO; } +#endif // It can happen (e.g. in combination with BT devices) that the attempt to set // the preferred sample rate for WebRTC (48kHz) fails. If so, make a new @@ -816,8 +811,8 @@ - (NSError *)configurationErrorWithDescription:(NSString *)description { NSDictionary *userInfo = @{ NSLocalizedDescriptionKey : description, }; - return [[NSError alloc] initWithDomain:kRTCAudioSessionErrorDomain - code:kRTCAudioSessionErrorConfiguration + return [[NSError alloc] initWithDomain:RTC_CONSTANT_TYPE(RTCAudioSessionErrorDomain) + code:RTC_CONSTANT_TYPE(RTCAudioSessionErrorConfiguration) userInfo:userInfo]; } @@ -906,10 +901,9 @@ - (void)notifyAudioUnitStartFailedWithError:(OSStatus)error { SEL sel = @selector(audioSession:audioUnitStartFailedWithError:); if ([delegate respondsToSelector:sel]) { [delegate audioSession:self - audioUnitStartFailedWithError: - [NSError errorWithDomain:kRTCAudioSessionErrorDomain - code:error - userInfo:nil]]; + audioUnitStartFailedWithError:[NSError errorWithDomain:RTC_CONSTANT_TYPE(RTCAudioSessionErrorDomain) + code:error + userInfo:nil]]; } } } diff --git a/sdk/objc/components/audio/RTCAudioSessionConfiguration.h b/sdk/objc/components/audio/RTCAudioSessionConfiguration.h index 622f6f006a..169e91a266 100644 --- a/sdk/objc/components/audio/RTCAudioSessionConfiguration.h +++ b/sdk/objc/components/audio/RTCAudioSessionConfiguration.h @@ -15,9 +15,9 @@ NS_ASSUME_NONNULL_BEGIN -RTC_EXTERN const int kRTCAudioSessionPreferredNumberOfChannels; -RTC_EXTERN const double kRTCAudioSessionHighPerformanceSampleRate; -RTC_EXTERN const double kRTCAudioSessionHighPerformanceIOBufferDuration; +RTC_EXTERN const int RTC_CONSTANT_TYPE(RTCAudioSessionPreferredNumberOfChannels); +RTC_EXTERN const double RTC_CONSTANT_TYPE(RTCAudioSessionHighPerformanceSampleRate); +RTC_EXTERN const double RTC_CONSTANT_TYPE(RTCAudioSessionHighPerformanceIOBufferDuration); // Struct to hold configuration values. RTC_OBJC_EXPORT diff --git a/sdk/objc/components/audio/RTCAudioSessionConfiguration.m b/sdk/objc/components/audio/RTCAudioSessionConfiguration.m index 74de6b5ce2..36e4a0c68f 100644 --- a/sdk/objc/components/audio/RTCAudioSessionConfiguration.m +++ b/sdk/objc/components/audio/RTCAudioSessionConfiguration.m @@ -22,13 +22,13 @@ // audio unit. Hence, we will not hit a RTC_CHECK in // VerifyAudioParametersForActiveAudioSession() for a mismatch between the // preferred number of channels and the actual number of channels. -const int kRTCAudioSessionPreferredNumberOfChannels = 1; +const int RTC_CONSTANT_TYPE(RTCAudioSessionPreferredNumberOfChannels) = 1; // Preferred hardware sample rate (unit is in Hertz). The client sample rate // will be set to this value as well to avoid resampling the the audio unit's // format converter. Note that, some devices, e.g. BT headsets, only supports // 8000Hz as native sample rate. -const double kRTCAudioSessionHighPerformanceSampleRate = 48000.0; +const double RTC_CONSTANT_TYPE(RTCAudioSessionHighPerformanceSampleRate) = 48000.0; // Use a hardware I/O buffer size (unit is in seconds) that matches the 10ms // size used by WebRTC. The exact actual size will differ between devices. @@ -38,7 +38,7 @@ // buffers used by WebRTC. It is beneficial for the performance if the native // size is as an even multiple of 10ms as possible since it results in "clean" // callback sequence without bursts of callbacks back to back. -const double kRTCAudioSessionHighPerformanceIOBufferDuration = 0.02; +const double RTC_CONSTANT_TYPE(RTCAudioSessionHighPerformanceIOBufferDuration) = 0.02; static RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *gWebRTCConfiguration = nil; @@ -55,27 +55,29 @@ @implementation RTC_OBJC_TYPE (RTCAudioSessionConfiguration) - (instancetype)init { self = [super init]; if (self) { + // Use AVAudioSession values for default + AVAudioSession *session = [AVAudioSession sharedInstance]; // Use a category which supports simultaneous recording and playback. - // By default, using this category implies that our app’s audio is + // By default, using this category implies that our app's audio is // nonmixable, hence activating the session will interrupt any other // audio sessions which are also nonmixable. - _category = AVAudioSessionCategoryPlayAndRecord; - _categoryOptions = AVAudioSessionCategoryOptionAllowBluetooth; + _category = session.category; + _categoryOptions = session.categoryOptions; // Specify mode for two-way voice communication (e.g. VoIP). - _mode = AVAudioSessionModeVoiceChat; + _mode = session.mode; // Use best sample rate and buffer duration if the CPU has more than one // core. - _sampleRate = kRTCAudioSessionHighPerformanceSampleRate; - _ioBufferDuration = kRTCAudioSessionHighPerformanceIOBufferDuration; + _sampleRate = RTC_CONSTANT_TYPE(RTCAudioSessionHighPerformanceSampleRate); + _ioBufferDuration = RTC_CONSTANT_TYPE(RTCAudioSessionHighPerformanceIOBufferDuration); // We try to use mono in both directions to save resources and format // conversions in the audio unit. Some devices does only support stereo; // e.g. wired headset on iPhone 6. // TODO(henrika): add support for stereo if needed. - _inputNumberOfChannels = kRTCAudioSessionPreferredNumberOfChannels; - _outputNumberOfChannels = kRTCAudioSessionPreferredNumberOfChannels; + _inputNumberOfChannels = RTC_CONSTANT_TYPE(RTCAudioSessionPreferredNumberOfChannels); + _outputNumberOfChannels = RTC_CONSTANT_TYPE(RTCAudioSessionPreferredNumberOfChannels); } return self; } diff --git a/sdk/objc/components/audio/RTCDefaultAudioProcessingModule+Private.h b/sdk/objc/components/audio/RTCDefaultAudioProcessingModule+Private.h new file mode 100644 index 0000000000..9298db2986 --- /dev/null +++ b/sdk/objc/components/audio/RTCDefaultAudioProcessingModule+Private.h @@ -0,0 +1,29 @@ +/* + * Copyright 2023 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import "RTCDefaultAudioProcessingModule.h" + +#include "modules/audio_processing/include/audio_processing.h" + +NS_ASSUME_NONNULL_BEGIN + +@interface RTC_OBJC_TYPE (RTCDefaultAudioProcessingModule)() + +@property(nonatomic, readonly) webrtc::scoped_refptr nativeAudioProcessingModule; + +@end + +NS_ASSUME_NONNULL_END diff --git a/sdk/objc/components/audio/RTCDefaultAudioProcessingModule.h b/sdk/objc/components/audio/RTCDefaultAudioProcessingModule.h new file mode 100644 index 0000000000..81a1e7e11e --- /dev/null +++ b/sdk/objc/components/audio/RTCDefaultAudioProcessingModule.h @@ -0,0 +1,46 @@ +/* + * Copyright 2023 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import + +#import "RTCAudioProcessingModule.h" +#import "RTCMacros.h" + +NS_ASSUME_NONNULL_BEGIN + +@class RTC_OBJC_TYPE(RTCAudioProcessingConfig); +@protocol RTC_OBJC_TYPE (RTCAudioCustomProcessingDelegate); + +RTC_OBJC_EXPORT +@interface RTC_OBJC_TYPE (RTCDefaultAudioProcessingModule) : NSObject + +- (instancetype)initWithConfig: (nullable RTC_OBJC_TYPE(RTCAudioProcessingConfig) *)config + capturePostProcessingDelegate: (nullable id)capturePostProcessingDelegate + renderPreProcessingDelegate: (nullable id)renderPreProcessingDelegate + NS_SWIFT_NAME(init(config:capturePostProcessingDelegate:renderPreProcessingDelegate:)) NS_DESIGNATED_INITIALIZER; + +@property(nonatomic, assign) RTC_OBJC_TYPE(RTCAudioProcessingConfig) * config; + +// Dynamically update delegates at runtime + +@property(nonatomic, weak, nullable) id + capturePostProcessingDelegate; +@property(nonatomic, weak, nullable) id + renderPreProcessingDelegate; + +@end + +NS_ASSUME_NONNULL_END diff --git a/sdk/objc/components/audio/RTCDefaultAudioProcessingModule.mm b/sdk/objc/components/audio/RTCDefaultAudioProcessingModule.mm new file mode 100644 index 0000000000..9d9c78345a --- /dev/null +++ b/sdk/objc/components/audio/RTCDefaultAudioProcessingModule.mm @@ -0,0 +1,105 @@ +/* + * Copyright 2023 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import "RTCDefaultAudioProcessingModule.h" +#import "RTCAudioCustomProcessingAdapter+Private.h" +#import "RTCAudioProcessingConfig+Private.h" + +#include "api/environment/environment_factory.h" +#include "api/scoped_refptr.h" +#include "api/audio/builtin_audio_processing_builder.h" +#include "modules/audio_processing/include/audio_processing.h" + +@implementation RTC_OBJC_TYPE (RTCDefaultAudioProcessingModule) { + webrtc::scoped_refptr _nativeAudioProcessingModule; + // Custom processing adapters... + RTC_OBJC_TYPE(RTCAudioCustomProcessingAdapter) *_capturePostProcessingAdapter; + RTC_OBJC_TYPE(RTCAudioCustomProcessingAdapter) *_renderPreProcessingAdapter; +} + +- (instancetype)init { + return [self initWithConfig:nil + capturePostProcessingDelegate:nil + renderPreProcessingDelegate:nil]; +} + +- (instancetype)initWithConfig:(nullable RTC_OBJC_TYPE(RTCAudioProcessingConfig) *)config + capturePostProcessingDelegate: + (nullable id)capturePostProcessingDelegate + renderPreProcessingDelegate:(nullable id) + renderPreProcessingDelegate { + self = [super init]; + if (self) { + webrtc::BuiltinAudioProcessingBuilder builder = webrtc::BuiltinAudioProcessingBuilder(); + + // TODO: Custom Config... + + if (config != nil) { + builder.SetConfig(config.nativeAudioProcessingConfig); + } + + _capturePostProcessingAdapter = + [[RTC_OBJC_TYPE(RTCAudioCustomProcessingAdapter) alloc] initWithDelegate:capturePostProcessingDelegate]; + builder.SetCapturePostProcessing( + _capturePostProcessingAdapter.nativeAudioCustomProcessingModule); + + _renderPreProcessingAdapter = + [[RTC_OBJC_TYPE(RTCAudioCustomProcessingAdapter) alloc] initWithDelegate:renderPreProcessingDelegate]; + builder.SetRenderPreProcessing(_renderPreProcessingAdapter.nativeAudioCustomProcessingModule); + + _nativeAudioProcessingModule = builder.Build(webrtc::CreateEnvironment()); + } + return self; +} + +#pragma mark - Getter & Setters for delegates + +- (nullable id)capturePostProcessingDelegate { + return _capturePostProcessingAdapter.audioCustomProcessingDelegate; +} + +- (void)setCapturePostProcessingDelegate: + (nullable id)delegate { + _capturePostProcessingAdapter.audioCustomProcessingDelegate = delegate; +} + +- (nullable id)renderPreProcessingDelegate { + return _renderPreProcessingAdapter.audioCustomProcessingDelegate; +} + +- (void)setRenderPreProcessingDelegate: + (nullable id)delegate { + _renderPreProcessingAdapter.audioCustomProcessingDelegate = delegate; +} + +#pragma mark - RTCAudioProcessingModule protocol + +- (RTC_OBJC_TYPE(RTCAudioProcessingConfig) *)config { + webrtc::AudioProcessing::Config nativeConfig = _nativeAudioProcessingModule->GetConfig(); + return [[RTC_OBJC_TYPE(RTCAudioProcessingConfig) alloc] initWithNativeAudioProcessingConfig: nativeConfig]; +} + +- (void)setConfig:(RTC_OBJC_TYPE(RTCAudioProcessingConfig) *)config { + _nativeAudioProcessingModule->ApplyConfig(config.nativeAudioProcessingConfig); +} + +#pragma mark - Private + +- (rtc::scoped_refptr)nativeAudioProcessingModule { + return _nativeAudioProcessingModule; +} + +@end diff --git a/sdk/objc/components/audio/RTCNativeAudioSessionDelegateAdapter.h b/sdk/objc/components/audio/RTCNativeAudioSessionDelegateAdapter.h index b296aedfdb..515d665dcf 100644 --- a/sdk/objc/components/audio/RTCNativeAudioSessionDelegateAdapter.h +++ b/sdk/objc/components/audio/RTCNativeAudioSessionDelegateAdapter.h @@ -19,8 +19,7 @@ class AudioSessionObserver; /** Adapter that forwards RTCAudioSessionDelegate calls to the appropriate * methods on the AudioSessionObserver. */ -@interface RTCNativeAudioSessionDelegateAdapter - : NSObject +@interface RTC_OBJC_TYPE(RTCNativeAudioSessionDelegateAdapter) : NSObject - (instancetype)init NS_UNAVAILABLE; diff --git a/sdk/objc/components/audio/RTCNativeAudioSessionDelegateAdapter.mm b/sdk/objc/components/audio/RTCNativeAudioSessionDelegateAdapter.mm index cf408f60d5..1284ec3b70 100644 --- a/sdk/objc/components/audio/RTCNativeAudioSessionDelegateAdapter.mm +++ b/sdk/objc/components/audio/RTCNativeAudioSessionDelegateAdapter.mm @@ -14,7 +14,7 @@ #import "base/RTCLogging.h" -@implementation RTCNativeAudioSessionDelegateAdapter { +@implementation RTC_OBJC_TYPE(RTCNativeAudioSessionDelegateAdapter) { webrtc::AudioSessionObserver *_observer; } diff --git a/sdk/objc/components/capturer/RTCCameraVideoCapturer.h b/sdk/objc/components/capturer/RTCCameraVideoCapturer.h index 5185fa866e..6318f075a6 100644 --- a/sdk/objc/components/capturer/RTCCameraVideoCapturer.h +++ b/sdk/objc/components/capturer/RTCCameraVideoCapturer.h @@ -27,10 +27,22 @@ RTC_OBJC_EXPORT // Returns list of available capture devices that support video capture. + (NSArray *)captureDevices; + // Returns list of formats that are supported by this class for this device. + (NSArray *)supportedFormatsForDevice: (AVCaptureDevice *)device; +#if !TARGET_OS_VISION ++ (CGFloat)defaultZoomFactorForDeviceType:(AVCaptureDeviceType)deviceType; +#endif + +- (instancetype)initWithDelegate: + (nullable __weak id)delegate; + +- (instancetype)initWithDelegate: + (nullable __weak id)delegate + captureSession:(AVCaptureSession *)captureSession; + // Returns the most efficient supported output pixel format for this capturer. - (FourCharCode)preferredOutputPixelFormat; diff --git a/sdk/objc/components/capturer/RTCCameraVideoCapturer.m b/sdk/objc/components/capturer/RTCCameraVideoCapturer.m index ff55bd7f19..b6d71e75e0 100644 --- a/sdk/objc/components/capturer/RTCCameraVideoCapturer.m +++ b/sdk/objc/components/capturer/RTCCameraVideoCapturer.m @@ -9,13 +9,21 @@ */ #import +#import #import "RTCCameraVideoCapturer.h" #import "base/RTCLogging.h" #import "base/RTCVideoFrameBuffer.h" #import "components/video_frame_buffer/RTCCVPixelBuffer.h" -#if TARGET_OS_IPHONE +// AVCaptureMultiCamSession iOS 13.0+, iPadOS 13.0+, Mac Catalyst 14.0+, tvOS 17.0+ +#define TARGET_MULTICAM_CAPABLE (TARGET_OS_IPHONE && !TARGET_OS_VISION) + +// iOS 2.0+, iPadOS 2.0+, Mac Catalyst 13.0+ +#define TARGET_WATCH_DEVICE_ROTATION \ + (TARGET_OS_IPHONE && !TARGET_OS_MACCATALYST && !TARGET_OS_VISION && !TARGET_OS_TV) + +#if TARGET_WATCH_DEVICE_ROTATION #import "helpers/UIDevice+RTCDevice.h" #endif @@ -41,13 +49,25 @@ @implementation RTC_OBJC_TYPE (RTCCameraVideoCapturer) { AVCaptureSession *_captureSession; FourCharCode _preferredOutputPixelFormat; FourCharCode _outputPixelFormat; - RTCVideoRotation _rotation; -#if TARGET_OS_IPHONE - UIDeviceOrientation _orientation; + RTC_OBJC_TYPE(RTCVideoRotation) _rotation; + +#if TARGET_WATCH_DEVICE_ROTATION + UIInterfaceOrientation _orientation; BOOL _generatingOrientationNotifications; #endif + +#if TARGET_MULTICAM_CAPABLE + AVCaptureConnection *_captureConnection; +#endif } +#if TARGET_MULTICAM_CAPABLE +// Shared multi-camera session across capturers. +static AVCaptureMultiCamSession *_sharedMultiCamSession = nil; +static os_unfair_lock _sharedMultiCamSessionLock = OS_UNFAIR_LOCK_INIT; +static NSUInteger _sharedMultiCamSessionCount = 0; +#endif + @synthesize frameQueue = _frameQueue; @synthesize captureSession = _captureSession; @synthesize currentDevice = _currentDevice; @@ -56,19 +76,14 @@ @implementation RTC_OBJC_TYPE (RTCCameraVideoCapturer) { @synthesize willBeRunning = _willBeRunning; - (instancetype)init { - return [self initWithDelegate:nil - captureSession:[[AVCaptureSession alloc] init]]; + return [self initWithDelegate:nil captureSession:[self createCaptureSession]]; } -- (instancetype)initWithDelegate: - (__weak id)delegate { - return [self initWithDelegate:delegate - captureSession:[[AVCaptureSession alloc] init]]; +- (instancetype)initWithDelegate:(__weak id)delegate { + return [self initWithDelegate:delegate captureSession:[self createCaptureSession]]; } -// This initializer is used for testing. -- (instancetype)initWithDelegate: - (__weak id)delegate +- (instancetype)initWithDelegate:(__weak id)delegate captureSession:(AVCaptureSession *)captureSession { self = [super initWithDelegate:delegate]; if (self) { @@ -79,10 +94,12 @@ - (instancetype)initWithDelegate: if (![self setupCaptureSession:captureSession]) { return nil; } + NSNotificationCenter *center = [NSNotificationCenter defaultCenter]; -#if TARGET_OS_IPHONE - _orientation = UIDeviceOrientationPortrait; - _rotation = RTCVideoRotation_90; + +#if TARGET_WATCH_DEVICE_ROTATION + _orientation = UIInterfaceOrientationPortrait; + _rotation = RTC_OBJC_TYPE(RTCVideoRotation_90); [center addObserver:self selector:@selector(deviceOrientationDidChange:) name:UIDeviceOrientationDidChangeNotification @@ -100,6 +117,7 @@ - (instancetype)initWithDelegate: name:UIApplicationDidBecomeActiveNotification object:[UIApplication sharedApplication]]; #endif + [center addObserver:self selector:@selector(handleCaptureSessionRuntimeError:) name:AVCaptureSessionRuntimeErrorNotification @@ -113,6 +131,7 @@ - (instancetype)initWithDelegate: name:AVCaptureSessionDidStopRunningNotification object:_captureSession]; } + return self; } @@ -125,6 +144,10 @@ - (void)dealloc { } + (NSArray *)captureDevices { +#if TARGET_OS_VISION + AVCaptureDevice *device = AVCaptureDevice.systemPreferredCamera; + return device ? @[ device ] : @[]; +#else AVCaptureDeviceDiscoverySession *session = [AVCaptureDeviceDiscoverySession discoverySessionWithDeviceTypes:@[ AVCaptureDeviceTypeBuiltInWideAngleCamera @@ -132,6 +155,7 @@ - (void)dealloc { mediaType:AVMediaTypeVideo position:AVCaptureDevicePositionUnspecified]; return session.devices; +#endif } + (NSArray *)supportedFormatsForDevice: @@ -141,6 +165,28 @@ - (void)dealloc { return device.formats; } +#if !TARGET_OS_VISION ++ (CGFloat)defaultZoomFactorForDeviceType:(AVCaptureDeviceType)deviceType { + // AVCaptureDeviceTypeBuiltInTripleCamera, Virtual, switchOver: [2, 6], default: 2 + // AVCaptureDeviceTypeBuiltInDualCamera, Virtual, switchOver: [3], default: 1 + // AVCaptureDeviceTypeBuiltInDualWideCamera, Virtual, switchOver: [2], default: 2 + // AVCaptureDeviceTypeBuiltInWideAngleCamera, Physical, General purpose use + // AVCaptureDeviceTypeBuiltInTelephotoCamera, Physical + // AVCaptureDeviceTypeBuiltInUltraWideCamera, Physical +#if TARGET_OS_IOS || TARGET_OS_TV + if (@available(iOS 13.0, tvOS 17.0, *)) { + if ([deviceType isEqualToString:AVCaptureDeviceTypeBuiltInTripleCamera] || + [deviceType isEqualToString:AVCaptureDeviceTypeBuiltInDualWideCamera]) + // For AVCaptureDeviceTypeBuiltInTripleCamera and AVCaptureDeviceTypeBuiltInDualWideCamera, + // it will switch over from ultra-wide to wide on 2.0, so to prefer wide by default. + return 2.0; + } +#endif + + return 1.0; +} +#endif + - (FourCharCode)preferredOutputPixelFormat { return _preferredOutputPixelFormat; } @@ -165,19 +211,21 @@ - (void)startCaptureWithDevice:(AVCaptureDevice *)device completionHandler { _willBeRunning = YES; [RTC_OBJC_TYPE(RTCDispatcher) - dispatchAsyncOnType:RTCDispatcherTypeCaptureSession + dispatchAsyncOnType:RTC_OBJC_TYPE(RTCDispatcherTypeCaptureSession) block:^{ RTCLogInfo("startCaptureWithDevice %@ @ %ld fps", format, (long)fps); -#if TARGET_OS_IPHONE +#if TARGET_WATCH_DEVICE_ROTATION dispatch_async(dispatch_get_main_queue(), ^{ if (!self->_generatingOrientationNotifications) { [[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications]; self->_generatingOrientationNotifications = YES; } + // Must be called on main + [self updateOrientation]; }); #endif @@ -185,8 +233,7 @@ - (void)startCaptureWithDevice:(AVCaptureDevice *)device NSError *error = nil; if (![self.currentDevice lockForConfiguration:&error]) { - RTCLogError(@"Failed to lock device %@. Error: %@", - self.currentDevice, + RTCLogError(@"Failed to lock device %@. Error: %@", self.currentDevice, error.userInfo); if (completionHandler) { completionHandler(error); @@ -195,12 +242,13 @@ - (void)startCaptureWithDevice:(AVCaptureDevice *)device return; } [self reconfigureCaptureSessionInput]; - [self updateOrientation]; [self updateDeviceCaptureFormat:format fps:fps]; [self updateVideoDataOutputPixelFormat:format]; - [self.captureSession startRunning]; + [self updateZoomFactor]; [self.currentDevice unlockForConfiguration]; - self.isRunning = YES; + + [self startRunning]; + if (completionHandler) { completionHandler(nil); } @@ -211,7 +259,7 @@ - (void)stopCaptureWithCompletionHandler: (nullable void (^)(void))completionHandler { _willBeRunning = NO; [RTC_OBJC_TYPE(RTCDispatcher) - dispatchAsyncOnType:RTCDispatcherTypeCaptureSession + dispatchAsyncOnType:RTC_OBJC_TYPE(RTCDispatcherTypeCaptureSession) block:^{ RTCLogInfo("Stop"); self.currentDevice = nil; @@ -221,7 +269,22 @@ - (void)stopCaptureWithCompletionHandler: } [self.captureSession stopRunning]; -#if TARGET_OS_IPHONE +#if TARGET_MULTICAM_CAPABLE + [self.captureSession removeConnection:self->_captureConnection]; + self->_captureConnection = nil; +#endif + + for (AVCaptureDeviceInput *oldInput in [self.captureSession.inputs copy]) { + // Remove any old input with same device. + if ([self->_currentDevice isEqual:oldInput.device]) { + [self.captureSession removeInput:oldInput]; + } + } + self.currentDevice = nil; + + [self stopRunning]; + +#if TARGET_WATCH_DEVICE_ROTATION dispatch_async(dispatch_get_main_queue(), ^{ if (self->_generatingOrientationNotifications) { [[UIDevice currentDevice] @@ -230,7 +293,7 @@ - (void)stopCaptureWithCompletionHandler: } }); #endif - self.isRunning = NO; + if (completionHandler) { completionHandler(); } @@ -239,10 +302,10 @@ - (void)stopCaptureWithCompletionHandler: #pragma mark iOS notifications -#if TARGET_OS_IPHONE +#if TARGET_WATCH_DEVICE_ROTATION - (void)deviceOrientationDidChange:(NSNotification *)notification { [RTC_OBJC_TYPE(RTCDispatcher) - dispatchAsyncOnType:RTCDispatcherTypeCaptureSession + dispatchAsyncOnType:RTC_OBJC_TYPE(RTCDispatcherTypeCaptureSession) block:^{ [self updateOrientation]; }]; @@ -267,7 +330,7 @@ - (void)captureOutput:(AVCaptureOutput *)captureOutput return; } -#if TARGET_OS_IPHONE +#if TARGET_WATCH_DEVICE_ROTATION // Default to portrait orientation on iPhone. BOOL usingFrontCamera = NO; // Check the image's EXIF for the camera the image came from as the image @@ -285,27 +348,25 @@ - (void)captureOutput:(AVCaptureOutput *)captureOutput AVCaptureDevicePositionFront == deviceInput.device.position; } switch (_orientation) { - case UIDeviceOrientationPortrait: - _rotation = RTCVideoRotation_90; + case UIInterfaceOrientationPortrait: + _rotation = RTC_OBJC_TYPE(RTCVideoRotation_90); break; - case UIDeviceOrientationPortraitUpsideDown: - _rotation = RTCVideoRotation_270; + case UIInterfaceOrientationPortraitUpsideDown: + _rotation = RTC_OBJC_TYPE(RTCVideoRotation_270); break; - case UIDeviceOrientationLandscapeLeft: - _rotation = usingFrontCamera ? RTCVideoRotation_180 : RTCVideoRotation_0; + case UIInterfaceOrientationLandscapeLeft: + _rotation = usingFrontCamera ? RTC_OBJC_TYPE(RTCVideoRotation_0) : RTC_OBJC_TYPE(RTCVideoRotation_180); break; - case UIDeviceOrientationLandscapeRight: - _rotation = usingFrontCamera ? RTCVideoRotation_0 : RTCVideoRotation_180; + case UIInterfaceOrientationLandscapeRight: + _rotation = usingFrontCamera ? RTC_OBJC_TYPE(RTCVideoRotation_180) : RTC_OBJC_TYPE(RTCVideoRotation_0); break; - case UIDeviceOrientationFaceUp: - case UIDeviceOrientationFaceDown: - case UIDeviceOrientationUnknown: - // Ignore. + case UIInterfaceOrientationUnknown: + _rotation = RTC_OBJC_TYPE(RTCVideoRotation_0); break; } #else // No rotation on Mac. - _rotation = RTCVideoRotation_0; + _rotation = RTC_OBJC_TYPE(RTCVideoRotation_0); #endif RTC_OBJC_TYPE(RTCCVPixelBuffer) *rtcPixelBuffer = @@ -323,7 +384,7 @@ - (void)captureOutput:(AVCaptureOutput *)captureOutput - (void)captureOutput:(AVCaptureOutput *)captureOutput didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection { -#if TARGET_OS_IPHONE +#if TARGET_OS_IPHONE && !TARGET_OS_MACCATALYST CFStringRef droppedReason = CMGetAttachment( sampleBuffer, kCMSampleBufferAttachmentKey_DroppedFrameReason, nil); #else @@ -338,7 +399,7 @@ - (void)captureOutput:(AVCaptureOutput *)captureOutput - (void)handleCaptureSessionInterruption:(NSNotification *)notification { NSString *reasonString = nil; -#if TARGET_OS_IPHONE +#if TARGET_OS_IPHONE && !TARGET_OS_MACCATALYST NSNumber *reason = notification.userInfo[AVCaptureSessionInterruptionReasonKey]; if (reason) { @@ -371,9 +432,9 @@ - (void)handleCaptureSessionRuntimeError:(NSNotification *)notification { RTCLogError(@"Capture session runtime error: %@", error); [RTC_OBJC_TYPE(RTCDispatcher) - dispatchAsyncOnType:RTCDispatcherTypeCaptureSession + dispatchAsyncOnType:RTC_OBJC_TYPE(RTCDispatcherTypeCaptureSession) block:^{ -#if TARGET_OS_IPHONE +#if TARGET_OS_IPHONE && !TARGET_OS_MACCATALYST if (error.code == AVErrorMediaServicesWereReset) { [self handleNonFatalError]; } else { @@ -389,7 +450,7 @@ - (void)handleCaptureSessionDidStartRunning:(NSNotification *)notification { RTCLog(@"Capture session started."); [RTC_OBJC_TYPE(RTCDispatcher) - dispatchAsyncOnType:RTCDispatcherTypeCaptureSession + dispatchAsyncOnType:RTC_OBJC_TYPE(RTCDispatcherTypeCaptureSession) block:^{ // If we successfully restarted after an unknown // error, allow future retries on fatal errors. @@ -403,7 +464,7 @@ - (void)handleCaptureSessionDidStopRunning:(NSNotification *)notification { - (void)handleFatalError { [RTC_OBJC_TYPE(RTCDispatcher) - dispatchAsyncOnType:RTCDispatcherTypeCaptureSession + dispatchAsyncOnType:RTC_OBJC_TYPE(RTCDispatcherTypeCaptureSession) block:^{ if (!self.hasRetriedOnFatalError) { RTCLogWarning( @@ -418,7 +479,7 @@ - (void)handleFatalError { - (void)handleNonFatalError { [RTC_OBJC_TYPE(RTCDispatcher) - dispatchAsyncOnType:RTCDispatcherTypeCaptureSession + dispatchAsyncOnType:RTC_OBJC_TYPE(RTCDispatcherTypeCaptureSession) block:^{ RTCLog(@"Restarting capture session after error."); if (self.isRunning) { @@ -427,13 +488,13 @@ - (void)handleNonFatalError { }]; } -#if TARGET_OS_IPHONE +#if TARGET_OS_IPHONE && !TARGET_OS_MACCATALYST #pragma mark - UIApplication notifications - (void)handleApplicationDidBecomeActive:(NSNotification *)notification { [RTC_OBJC_TYPE(RTCDispatcher) - dispatchAsyncOnType:RTCDispatcherTypeCaptureSession + dispatchAsyncOnType:RTC_OBJC_TYPE(RTCDispatcherTypeCaptureSession) block:^{ if (self.isRunning && !self.captureSession.isRunning) { RTCLog(@"Restarting capture session on active."); @@ -442,15 +503,74 @@ - (void)handleApplicationDidBecomeActive:(NSNotification *)notification { }]; } -#endif // TARGET_OS_IPHONE +#endif #pragma mark - Private +- (AVCaptureSession *)createCaptureSession { +#if TARGET_MULTICAM_CAPABLE + if (AVCaptureMultiCamSession.isMultiCamSupported) { + // AVCaptureMultiCamSession exists and device supports multi-cam. + if (_sharedMultiCamSession == nil) { + _sharedMultiCamSession = [[AVCaptureMultiCamSession alloc] init]; + } + return _sharedMultiCamSession; + } else { + // AVCaptureMultiCamSession exists but device doesn't support multi-cam. + return [[AVCaptureSession alloc] init]; + } +#else + // AVCaptureMultiCamSession doesn't exist with this platform, use AVCaptureSession. + return [[AVCaptureSession alloc] init]; +#endif +} + +- (BOOL)isUsingSelfCreatedMultiCamSession { +#if TARGET_MULTICAM_CAPABLE + return _sharedMultiCamSession != nil && _sharedMultiCamSession == _captureSession; +#else + return NO; +#endif +} + +- (void)startRunning { + BOOL shouldStartRunning = YES; +#if TARGET_MULTICAM_CAPABLE + if ([self isUsingSelfCreatedMultiCamSession]) { + os_unfair_lock_lock(&_sharedMultiCamSessionLock); + shouldStartRunning = _sharedMultiCamSessionCount == 0; + _sharedMultiCamSessionCount += 1; + os_unfair_lock_unlock(&_sharedMultiCamSessionLock); + } +#endif + if (shouldStartRunning) { + [_captureSession startRunning]; + } + self.isRunning = YES; +} + +- (void)stopRunning { + BOOL shouldStopRunning = YES; +#if TARGET_MULTICAM_CAPABLE + if ([self isUsingSelfCreatedMultiCamSession]) { + os_unfair_lock_lock(&_sharedMultiCamSessionLock); + if (_sharedMultiCamSessionCount > 0) { + _sharedMultiCamSessionCount -= 1; + shouldStopRunning = _sharedMultiCamSessionCount == 0; + } + os_unfair_lock_unlock(&_sharedMultiCamSessionLock); + } +#endif + if (shouldStopRunning) { + [_captureSession stopRunning]; + } + self.isRunning = NO; +} + - (dispatch_queue_t)frameQueue { if (!_frameQueue) { _frameQueue = RTCDispatchQueueCreateWithTarget( - "org.webrtc.cameravideocapturer.video", - DISPATCH_QUEUE_SERIAL, + "org.webrtc.cameravideocapturer.video", DISPATCH_QUEUE_SERIAL, dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0)); } return _frameQueue; @@ -459,7 +579,7 @@ - (dispatch_queue_t)frameQueue { - (BOOL)setupCaptureSession:(AVCaptureSession *)captureSession { NSAssert(_captureSession == nil, @"Setup capture session called twice."); _captureSession = captureSession; -#if defined(WEBRTC_IOS) +#if defined(WEBRTC_IOS) && !TARGET_OS_VISION _captureSession.sessionPreset = AVCaptureSessionPresetInputPriority; _captureSession.usesApplicationAudioSession = NO; #endif @@ -469,7 +589,12 @@ - (BOOL)setupCaptureSession:(AVCaptureSession *)captureSession { RTCLogError(@"Video data output unsupported."); return NO; } + +#if TARGET_MULTICAM_CAPABLE + [_captureSession addOutputWithNoConnections:_videoDataOutput]; +#else [_captureSession addOutput:_videoDataOutput]; +#endif return YES; } @@ -527,11 +652,13 @@ - (void)updateVideoDataOutputPixelFormat:(AVCaptureDeviceFormat *)format { - (void)updateDeviceCaptureFormat:(AVCaptureDeviceFormat *)format fps:(NSInteger)fps { NSAssert([RTC_OBJC_TYPE(RTCDispatcher) - isOnQueueForType:RTCDispatcherTypeCaptureSession], + isOnQueueForType:RTC_OBJC_TYPE(RTCDispatcherTypeCaptureSession)], @"updateDeviceCaptureFormat must be called on the capture queue."); @try { _currentDevice.activeFormat = format; - _currentDevice.activeVideoMinFrameDuration = CMTimeMake(1, fps); + if (![NSStringFromClass([_currentDevice class]) isEqualToString:@"AVCaptureDALDevice"]) { + _currentDevice.activeVideoMinFrameDuration = CMTimeMake(1, fps); + } } @catch (NSException *exception) { RTCLogError(@"Failed to set active format!\n User info:%@", exception.userInfo); @@ -539,14 +666,24 @@ - (void)updateDeviceCaptureFormat:(AVCaptureDeviceFormat *)format } } +- (void)updateZoomFactor { + NSAssert([RTC_OBJC_TYPE(RTCDispatcher) isOnQueueForType:RTC_OBJC_TYPE(RTCDispatcherTypeCaptureSession)], + @"updateZoomFactor must be called on the capture queue."); + +#if (TARGET_OS_IOS || TARGET_OS_TV) && !TARGET_OS_VISION + CGFloat videoZoomFactor = [[self class] defaultZoomFactorForDeviceType:_currentDevice.deviceType]; + [_currentDevice setVideoZoomFactor:videoZoomFactor]; +#endif +} + - (void)reconfigureCaptureSessionInput { NSAssert( [RTC_OBJC_TYPE(RTCDispatcher) - isOnQueueForType:RTCDispatcherTypeCaptureSession], + isOnQueueForType:RTC_OBJC_TYPE(RTCDispatcherTypeCaptureSession)], @"reconfigureCaptureSessionInput must be called on the capture queue."); NSError *error = nil; - AVCaptureDeviceInput *input = - [AVCaptureDeviceInput deviceInputWithDevice:_currentDevice error:&error]; + AVCaptureDeviceInput *input = [[AVCaptureDeviceInput alloc] initWithDevice:_currentDevice + error:&error]; if (!input) { RTCLogError(@"Failed to create front camera input: %@", error.localizedDescription); @@ -554,23 +691,47 @@ - (void)reconfigureCaptureSessionInput { } [_captureSession beginConfiguration]; for (AVCaptureDeviceInput *oldInput in [_captureSession.inputs copy]) { - [_captureSession removeInput:oldInput]; + // Remove any old input with same device. + if ([_currentDevice isEqual:oldInput.device]) { + [_captureSession removeInput:oldInput]; + } } + if ([_captureSession canAddInput:input]) { +#if TARGET_MULTICAM_CAPABLE + [_captureSession addInputWithNoConnections:input]; + + AVCaptureInputPort *videoPort = input.ports.firstObject; + _captureConnection = [AVCaptureConnection connectionWithInputPorts:@[ videoPort ] + output:_videoDataOutput]; + + [_captureSession addConnection:_captureConnection]; +#else [_captureSession addInput:input]; +#endif } else { RTCLogError(@"Cannot add camera as an input to the session."); } + [_captureSession commitConfiguration]; } +#if TARGET_WATCH_DEVICE_ROTATION - (void)updateOrientation { - NSAssert([RTC_OBJC_TYPE(RTCDispatcher) - isOnQueueForType:RTCDispatcherTypeCaptureSession], - @"updateOrientation must be called on the capture queue."); -#if TARGET_OS_IPHONE - _orientation = [UIDevice currentDevice].orientation; -#endif + NSAssert([RTC_OBJC_TYPE(RTCDispatcher) isOnQueueForType:RTC_OBJC_TYPE(RTCDispatcherTypeMain)], + @"Retrieving device orientation must be called on the main queue."); + + // Must be called on the main queue. + UIWindowScene *windowScene = + (UIWindowScene *)[UIApplication sharedApplication].connectedScenes.anyObject; + UIInterfaceOrientation newOrientation = windowScene.interfaceOrientation; + + [RTC_OBJC_TYPE(RTCDispatcher) dispatchAsyncOnType:RTC_OBJC_TYPE(RTCDispatcherTypeCaptureSession) + block:^{ + // Must be called on the capture queue + self->_orientation = newOrientation; + }]; } +#endif @end diff --git a/sdk/objc/components/capturer/RTCDesktopCapturer+Private.h b/sdk/objc/components/capturer/RTCDesktopCapturer+Private.h new file mode 100644 index 0000000000..7118161d18 --- /dev/null +++ b/sdk/objc/components/capturer/RTCDesktopCapturer+Private.h @@ -0,0 +1,48 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import "RTCDesktopCapturer.h" + +#include "sdk/objc/native/src/objc_desktop_capture.h" + +NS_ASSUME_NONNULL_BEGIN + +RTC_OBJC_EXPORT +@protocol RTC_OBJC_TYPE(RTCDesktopCapturerPrivateDelegate) +-(void)didCaptureVideoFrame:(RTC_OBJC_TYPE(RTCVideoFrame) *) frame; +-(void)didSourceCaptureStart; +-(void)didSourceCapturePaused; +-(void)didSourceCaptureStop; +-(void)didSourceCaptureError; +@end + +@interface RTC_OBJC_TYPE(RTCDesktopCapturer) () + +@property(nonatomic, readonly)std::shared_ptr nativeCapturer; + +- (void)didCaptureVideoFrame:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame; + +-(void)didSourceCaptureStart; + +-(void)didSourceCapturePaused; + +-(void)didSourceCaptureStop; + +-(void)didSourceCaptureError; + +@end + +NS_ASSUME_NONNULL_END \ No newline at end of file diff --git a/sdk/objc/components/capturer/RTCDesktopCapturer.h b/sdk/objc/components/capturer/RTCDesktopCapturer.h new file mode 100644 index 0000000000..48c76310a4 --- /dev/null +++ b/sdk/objc/components/capturer/RTCDesktopCapturer.h @@ -0,0 +1,60 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import +#import + +#import "RTCMacros.h" +#import "RTCVideoCapturer.h" +#import "RTCDesktopSource.h" + +NS_ASSUME_NONNULL_BEGIN + +@class RTC_OBJC_TYPE(RTCDesktopCapturer); + +RTC_OBJC_EXPORT +@protocol RTC_OBJC_TYPE(RTCDesktopCapturerDelegate) +-(void)didSourceCaptureStart:(RTC_OBJC_TYPE(RTCDesktopCapturer) *) capturer; + +-(void)didSourceCapturePaused:(RTC_OBJC_TYPE(RTCDesktopCapturer) *) capturer; + +-(void)didSourceCaptureStop:(RTC_OBJC_TYPE(RTCDesktopCapturer) *) capturer; + +-(void)didSourceCaptureError:(RTC_OBJC_TYPE(RTCDesktopCapturer) *) capturer; +@end + +RTC_OBJC_EXPORT +// Screen capture that implements RTCVideoCapturer. Delivers frames to a +// RTCVideoCapturerDelegate (usually RTCVideoSource). +@interface RTC_OBJC_TYPE (RTCDesktopCapturer) : RTC_OBJC_TYPE(RTCVideoCapturer) + +@property(nonatomic, readonly) RTC_OBJC_TYPE(RTCDesktopSource) *source; + +- (instancetype)initWithSource:(RTC_OBJC_TYPE(RTCDesktopSource) *)source delegate:(__weak id)delegate captureDelegate:(__weak id)captureDelegate; + +- (instancetype)initWithDefaultScreen:(__weak id)delegate captureDelegate:(__weak id)captureDelegate; + +- (void)startCapture; + +- (void)startCaptureWithFPS:(NSInteger)fps; + +- (void)stopCapture; + +- (void)stopCaptureWithCompletionHandler:(nullable void (^)(void))completionHandler; + +@end + +NS_ASSUME_NONNULL_END diff --git a/sdk/objc/components/capturer/RTCDesktopCapturer.mm b/sdk/objc/components/capturer/RTCDesktopCapturer.mm new file mode 100644 index 0000000000..fc9244850e --- /dev/null +++ b/sdk/objc/components/capturer/RTCDesktopCapturer.mm @@ -0,0 +1,104 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import + +#import "base/RTCLogging.h" +#import "base/RTCVideoFrameBuffer.h" + +#import "components/video_frame_buffer/RTCCVPixelBuffer.h" + +#import "RTCDesktopCapturer.h" +#import "RTCDesktopCapturer+Private.h" +#import "RTCDesktopSource+Private.h" + +@implementation RTC_OBJC_TYPE (RTCDesktopCapturer) { + __weak id _delegate; +} + +@synthesize nativeCapturer = _nativeCapturer; +@synthesize source = _source; + +- (instancetype)initWithSource:(RTC_OBJC_TYPE(RTCDesktopSource) *)source delegate:(__weak id)delegate captureDelegate:(__weak id)captureDelegate { + if (self = [super initWithDelegate:captureDelegate]) { + webrtc::DesktopType captureType = webrtc::kScreen; + if(source.sourceType == RTC_OBJC_TYPE(RTCDesktopSourceTypeWindow)) { + captureType = webrtc::kWindow; + } + _nativeCapturer = std::make_shared(captureType, source.nativeMediaSource->id(), self); + _source = source; + _delegate = delegate; + } + return self; +} + +- (instancetype)initWithDefaultScreen:(__weak id)delegate captureDelegate:(__weak id)captureDelegate { + if (self = [super initWithDelegate:captureDelegate]) { + _nativeCapturer = std::make_unique(webrtc::kScreen, -1, self); + _source = nil; + _delegate = delegate; + } + return self; +} + + +-(void)dealloc { + _nativeCapturer->Stop(); + _nativeCapturer = nullptr; +} + +- (void)startCapture { + [self didSourceCaptureStart]; + _nativeCapturer->Start(30); +} + +- (void)startCaptureWithFPS:(NSInteger)fps { + _nativeCapturer->Start(fps); +} + +- (void)didCaptureVideoFrame + : (RTC_OBJC_TYPE(RTCVideoFrame) *)frame { + [self.delegate capturer:self didCaptureVideoFrame:frame]; +} + +- (void)stopCapture { + _nativeCapturer->Stop(); +} + +- (void)stopCaptureWithCompletionHandler:(nullable void (^)(void))completionHandler { + [self stopCapture]; + if(completionHandler != nil) { + completionHandler(); + } +} + +-(void)didSourceCaptureStart { + [_delegate didSourceCaptureStart:self]; +} + +-(void)didSourceCapturePaused { + [_delegate didSourceCapturePaused:self]; +} + +-(void)didSourceCaptureStop { + [_delegate didSourceCaptureStop:self]; +} + +-(void)didSourceCaptureError { + [_delegate didSourceCaptureError:self]; +} + +@end diff --git a/sdk/objc/components/capturer/RTCDesktopMediaList+Private.h b/sdk/objc/components/capturer/RTCDesktopMediaList+Private.h new file mode 100644 index 0000000000..eb1e76ddbb --- /dev/null +++ b/sdk/objc/components/capturer/RTCDesktopMediaList+Private.h @@ -0,0 +1,40 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import "RTCDesktopMediaList.h" + +namespace webrtc { + class ObjCDesktopMediaList; + class MediaSource; +} + +NS_ASSUME_NONNULL_BEGIN + +@interface RTC_OBJC_TYPE(RTCDesktopMediaList) () + +@property(nonatomic, readonly)std::shared_ptr nativeMediaList; + +-(void)mediaSourceAdded:(webrtc::MediaSource *) source; + +-(void)mediaSourceRemoved:(webrtc::MediaSource *) source; + +-(void)mediaSourceNameChanged:(webrtc::MediaSource *) source; + +-(void)mediaSourceThumbnailChanged:(webrtc::MediaSource *) source; + +@end + +NS_ASSUME_NONNULL_END \ No newline at end of file diff --git a/sdk/objc/components/capturer/RTCDesktopMediaList.h b/sdk/objc/components/capturer/RTCDesktopMediaList.h new file mode 100644 index 0000000000..30360d2d24 --- /dev/null +++ b/sdk/objc/components/capturer/RTCDesktopMediaList.h @@ -0,0 +1,51 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import +#import + +#import "RTCMacros.h" +#import "RTCDesktopSource.h" + +NS_ASSUME_NONNULL_BEGIN + +RTC_OBJC_EXPORT +@protocol RTC_OBJC_TYPE +(RTCDesktopMediaListDelegate) + +- (void)didDesktopSourceAdded:(RTC_OBJC_TYPE(RTCDesktopSource) *) source; + +- (void)didDesktopSourceRemoved:(RTC_OBJC_TYPE(RTCDesktopSource) *) source; + +- (void)didDesktopSourceNameChanged:(RTC_OBJC_TYPE(RTCDesktopSource) *) source; + +- (void)didDesktopSourceThumbnailChanged:(RTC_OBJC_TYPE(RTCDesktopSource) *) source; +@end + +RTC_OBJC_EXPORT +@interface RTC_OBJC_TYPE (RTCDesktopMediaList) : NSObject + +-(instancetype)initWithType:(RTC_OBJC_TYPE(RTCDesktopSourceType))type delegate:(__weak id)delegate; + +@property(nonatomic, readonly) RTC_OBJC_TYPE(RTCDesktopSourceType) sourceType; + +- (int32_t)UpdateSourceList:(BOOL)forceReload updateAllThumbnails:(BOOL)updateThumbnail; + +- (NSArray*) getSources; + +@end + +NS_ASSUME_NONNULL_END diff --git a/sdk/objc/components/capturer/RTCDesktopMediaList.mm b/sdk/objc/components/capturer/RTCDesktopMediaList.mm new file mode 100644 index 0000000000..c274ad7825 --- /dev/null +++ b/sdk/objc/components/capturer/RTCDesktopMediaList.mm @@ -0,0 +1,99 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import "RTCDesktopMediaList.h" + +#import "RTCDesktopSource+Private.h" +#import "RTCDesktopMediaList+Private.h" + +@implementation RTC_OBJC_TYPE(RTCDesktopMediaList) { + RTC_OBJC_TYPE(RTCDesktopSourceType) _sourceType; + NSMutableArray* _sources; + __weak id _delegate; +} + +@synthesize sourceType = _sourceType; +@synthesize nativeMediaList = _nativeMediaList; + +- (instancetype)initWithType:(RTC_OBJC_TYPE(RTCDesktopSourceType))type delegate:(__weak id)delegate{ + if (self = [super init]) { + webrtc::DesktopType captureType = webrtc::kScreen; + if(type == RTC_OBJC_TYPE(RTCDesktopSourceTypeWindow)) { + captureType = webrtc::kWindow; + } + _nativeMediaList = std::make_shared(captureType, self); + _sourceType = type; + _delegate = delegate; + } + return self; +} + +- (int32_t)UpdateSourceList:(BOOL)forceReload updateAllThumbnails:(BOOL)updateThumbnail { + return _nativeMediaList->UpdateSourceList(forceReload, updateThumbnail); +} + +-(NSArray*) getSources { + _sources = [NSMutableArray array]; + int sourceCount = _nativeMediaList->GetSourceCount(); + for (int i = 0; i < sourceCount; i++) { + webrtc::MediaSource *mediaSource = _nativeMediaList->GetSource(i); + [_sources addObject:[[RTC_OBJC_TYPE(RTCDesktopSource) alloc] initWithNativeSource:mediaSource sourceType:_sourceType]]; + } + return _sources; +} + +-(void)mediaSourceAdded:(webrtc::MediaSource *) source { + RTC_OBJC_TYPE(RTCDesktopSource) *desktopSource = [[RTC_OBJC_TYPE(RTCDesktopSource) alloc] initWithNativeSource:source sourceType:_sourceType]; + [_sources addObject:desktopSource]; + [_delegate didDesktopSourceAdded:desktopSource]; +} + +-(void)mediaSourceRemoved:(webrtc::MediaSource *) source { + RTC_OBJC_TYPE(RTCDesktopSource) *desktopSource = [self getSourceById:source]; + if(desktopSource != nil) { + [_sources removeObject:desktopSource]; + [_delegate didDesktopSourceRemoved:desktopSource]; + } +} + +-(void)mediaSourceNameChanged:(webrtc::MediaSource *) source { + RTC_OBJC_TYPE(RTCDesktopSource) *desktopSource = [self getSourceById:source]; + if(desktopSource != nil) { + [desktopSource setName:source->name().c_str()]; + [_delegate didDesktopSourceNameChanged:desktopSource]; + } +} + +-(void)mediaSourceThumbnailChanged:(webrtc::MediaSource *) source { + RTC_OBJC_TYPE(RTCDesktopSource) *desktopSource = [self getSourceById:source]; + if(desktopSource != nil) { + [desktopSource setThumbnail:source->thumbnail()]; + [_delegate didDesktopSourceThumbnailChanged:desktopSource]; + } +} + +-(RTC_OBJC_TYPE(RTCDesktopSource) *)getSourceById:(webrtc::MediaSource *) source { + NSEnumerator *enumerator = [_sources objectEnumerator]; + RTC_OBJC_TYPE(RTCDesktopSource) *object; + while ((object = enumerator.nextObject) != nil) { + if(object.nativeMediaSource == source) { + return object; + } + } + return nil; +} + +@end \ No newline at end of file diff --git a/sdk/objc/components/capturer/RTCDesktopSource+Private.h b/sdk/objc/components/capturer/RTCDesktopSource+Private.h new file mode 100644 index 0000000000..787ba8bb65 --- /dev/null +++ b/sdk/objc/components/capturer/RTCDesktopSource+Private.h @@ -0,0 +1,37 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#import + +#import "RTCDesktopSource.h" + +#include "sdk/objc/native/src/objc_desktop_media_list.h" + +NS_ASSUME_NONNULL_BEGIN + +@interface RTC_OBJC_TYPE(RTCDesktopSource) () + +- (instancetype)initWithNativeSource:(webrtc::MediaSource*) nativeSource + sourceType:(RTC_OBJC_TYPE(RTCDesktopSourceType)) sourceType; + +@property(nonatomic, readonly)webrtc::MediaSource* nativeMediaSource; + +-(void) setName:(const char *) name; + +-(void) setThumbnail:(std::vector) thumbnail; + +@end + +NS_ASSUME_NONNULL_END \ No newline at end of file diff --git a/sdk/objc/components/capturer/RTCDesktopSource.h b/sdk/objc/components/capturer/RTCDesktopSource.h new file mode 100644 index 0000000000..bad716e19d --- /dev/null +++ b/sdk/objc/components/capturer/RTCDesktopSource.h @@ -0,0 +1,40 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#import +#import +#import + +#import "RTCMacros.h" + +typedef NS_ENUM(NSInteger, RTC_OBJC_TYPE(RTCDesktopSourceType)) { + RTC_OBJC_TYPE(RTCDesktopSourceTypeScreen), + RTC_OBJC_TYPE(RTCDesktopSourceTypeWindow), +}; + +RTC_OBJC_EXPORT +@interface RTC_OBJC_TYPE (RTCDesktopSource) : NSObject + +@property(nonatomic, readonly) NSString *sourceId; + +@property(nonatomic, readonly) NSString *name; + +@property(nonatomic, readonly) NSImage *thumbnail; + +@property(nonatomic, readonly) RTC_OBJC_TYPE(RTCDesktopSourceType) sourceType; + +-( NSImage *)UpdateThumbnail; + +@end \ No newline at end of file diff --git a/sdk/objc/components/capturer/RTCDesktopSource.mm b/sdk/objc/components/capturer/RTCDesktopSource.mm new file mode 100644 index 0000000000..0b0c2a4021 --- /dev/null +++ b/sdk/objc/components/capturer/RTCDesktopSource.mm @@ -0,0 +1,68 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import + +#import "RTCDesktopSource.h" +#import "RTCDesktopSource+Private.h" + +@implementation RTC_OBJC_TYPE(RTCDesktopSource) { + NSString *_sourceId; + NSString *_name; + NSImage *_thumbnail; + RTC_OBJC_TYPE(RTCDesktopSourceType) _sourceType; +} + +@synthesize sourceId = _sourceId; +@synthesize name = _name; +@synthesize thumbnail = _thumbnail; +@synthesize sourceType = _sourceType; +@synthesize nativeMediaSource = _nativeMediaSource; + +- (instancetype)initWithNativeSource:(webrtc::MediaSource*)nativeSource + sourceType:(RTC_OBJC_TYPE(RTCDesktopSourceType)) sourceType { + if (self = [super init]) { + _nativeMediaSource = nativeSource; + _sourceId = [NSString stringWithUTF8String:std::to_string(nativeSource->id()).c_str()]; + _name = [NSString stringWithUTF8String:nativeSource->name().c_str()]; + _thumbnail = [self createThumbnailFromNativeSource:nativeSource->thumbnail()]; + _sourceType = sourceType; + } + return self; +} + +-(NSImage*)createThumbnailFromNativeSource:(std::vector)thumbnail { + NSData* data = [[NSData alloc] initWithBytes:thumbnail.data() length:thumbnail.size()]; + NSImage *image = [[NSImage alloc] initWithData:data]; + return image; +} + +-( NSImage *)UpdateThumbnail { + if(_nativeMediaSource->UpdateThumbnail()) { + _thumbnail = [self createThumbnailFromNativeSource:_nativeMediaSource->thumbnail()]; + } + return _thumbnail; +} + +-(void)setName:(const char *) name { + _name = [NSString stringWithUTF8String:name]; +} + +-(void)setThumbnail:(std::vector) thumbnail { + _thumbnail = [self createThumbnailFromNativeSource:thumbnail]; +} + +@end diff --git a/sdk/objc/components/capturer/RTCFileVideoCapturer.m b/sdk/objc/components/capturer/RTCFileVideoCapturer.m index 913e54e715..950b3054fc 100644 --- a/sdk/objc/components/capturer/RTCFileVideoCapturer.m +++ b/sdk/objc/components/capturer/RTCFileVideoCapturer.m @@ -15,18 +15,18 @@ #import "components/video_frame_buffer/RTCCVPixelBuffer.h" #include "rtc_base/system/gcd_helpers.h" -NSString *const kRTCFileVideoCapturerErrorDomain = +NSString *const RTC_CONSTANT_TYPE(RTCFileVideoCapturerErrorDomain) = @"org.webrtc.RTC_OBJC_TYPE(RTCFileVideoCapturer)"; -typedef NS_ENUM(NSInteger, RTCFileVideoCapturerErrorCode) { - RTCFileVideoCapturerErrorCode_CapturerRunning = 2000, - RTCFileVideoCapturerErrorCode_FileNotFound +typedef NS_ENUM(NSInteger, RTC_OBJC_TYPE(RTCFileVideoCapturerErrorCode)) { + RTC_OBJC_TYPE(RTCFileVideoCapturerErrorCode_CapturerRunning) = 2000, + RTC_OBJC_TYPE(RTCFileVideoCapturerErrorCode_FileNotFound) }; -typedef NS_ENUM(NSInteger, RTCFileVideoCapturerStatus) { - RTCFileVideoCapturerStatusNotInitialized, - RTCFileVideoCapturerStatusStarted, - RTCFileVideoCapturerStatusStopped +typedef NS_ENUM(NSInteger, RTC_OBJC_TYPE(RTCFileVideoCapturerStatus)) { + RTC_OBJC_TYPE(RTCFileVideoCapturerStatusNotInitialized), + RTC_OBJC_TYPE(RTCFileVideoCapturerStatusStarted), + RTC_OBJC_TYPE(RTCFileVideoCapturerStatusStopped) }; @interface RTC_OBJC_TYPE (RTCFileVideoCapturer) @@ -37,7 +37,7 @@ @interface RTC_OBJC_TYPE (RTCFileVideoCapturer) @implementation RTC_OBJC_TYPE (RTCFileVideoCapturer) { AVAssetReader *_reader; AVAssetReaderTrackOutput *_outTrack; - RTCFileVideoCapturerStatus _status; + RTC_OBJC_TYPE(RTCFileVideoCapturerStatus) _status; dispatch_queue_t _frameQueue; } @@ -46,33 +46,29 @@ @implementation RTC_OBJC_TYPE (RTCFileVideoCapturer) { - (void)startCapturingFromFileNamed:(NSString *)nameOfFile onError:(RTCFileVideoCapturerErrorBlock)errorBlock { - if (_status == RTCFileVideoCapturerStatusStarted) { + if (_status == RTC_OBJC_TYPE(RTCFileVideoCapturerStatusStarted)) { NSError *error = - [NSError errorWithDomain:kRTCFileVideoCapturerErrorDomain - code:RTCFileVideoCapturerErrorCode_CapturerRunning - userInfo:@{ - NSUnderlyingErrorKey : @"Capturer has been started." - }]; + [NSError errorWithDomain:RTC_CONSTANT_TYPE(RTCFileVideoCapturerErrorDomain) + code:RTC_OBJC_TYPE(RTCFileVideoCapturerErrorCode_CapturerRunning) + userInfo:@{NSUnderlyingErrorKey : @"Capturer has been started."}]; errorBlock(error); return; } else { - _status = RTCFileVideoCapturerStatusStarted; + _status = RTC_OBJC_TYPE(RTCFileVideoCapturerStatusStarted); } - dispatch_async( - dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{ - NSString *pathForFile = [self pathForFileName:nameOfFile]; - if (!pathForFile) { - NSString *errorString = [NSString - stringWithFormat:@"File %@ not found in bundle", nameOfFile]; - NSError *error = [NSError - errorWithDomain:kRTCFileVideoCapturerErrorDomain - code:RTCFileVideoCapturerErrorCode_FileNotFound - userInfo:@{NSUnderlyingErrorKey : errorString}]; - errorBlock(error); - return; - } + dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{ + NSString *pathForFile = [self pathForFileName:nameOfFile]; + if (!pathForFile) { + NSString *errorString = + [NSString stringWithFormat:@"File %@ not found in bundle", nameOfFile]; + NSError *error = [NSError errorWithDomain:RTC_CONSTANT_TYPE(RTCFileVideoCapturerErrorDomain) + code:RTC_OBJC_TYPE(RTCFileVideoCapturerErrorCode_FileNotFound) + userInfo:@{NSUnderlyingErrorKey : errorString}]; + errorBlock(error); + return; + } self.lastPresentationTime = CMTimeMake(0, 0); @@ -107,7 +103,7 @@ - (void)setupReaderOnError:(RTCFileVideoCapturerErrorBlock)errorBlock { [self readNextBuffer]; } - (void)stopCapture { - _status = RTCFileVideoCapturerStatusStopped; + _status = RTC_OBJC_TYPE(RTCFileVideoCapturerStatusStopped); RTCLog(@"File capturer stopped."); } @@ -135,7 +131,7 @@ - (dispatch_queue_t)frameQueue { } - (void)readNextBuffer { - if (_status == RTCFileVideoCapturerStatusStopped) { + if (_status == RTC_OBJC_TYPE(RTCFileVideoCapturerStatusStopped)) { [_reader cancelReading]; _reader = nil; return; diff --git a/sdk/objc/components/network/RTCNetworkMonitor+Private.h b/sdk/objc/components/network/RTCNetworkMonitor+Private.h index b5c786be18..f3761f7ba3 100644 --- a/sdk/objc/components/network/RTCNetworkMonitor+Private.h +++ b/sdk/objc/components/network/RTCNetworkMonitor+Private.h @@ -9,16 +9,18 @@ */ #import "RTCNetworkMonitor.h" +#import "RTCMacros.h" #include "sdk/objc/native/src/network_monitor_observer.h" -@interface RTCNetworkMonitor () +@interface RTC_OBJC_TYPE (RTCNetworkMonitor) +() -/** `observer` is a raw pointer and should be kept alive - * for this object's lifetime. - */ -- (instancetype)initWithObserver:(webrtc::NetworkMonitorObserver *)observer - NS_DESIGNATED_INITIALIZER; + /** `observer` is a raw pointer and should be kept alive + * for this object's lifetime. + */ + - (instancetype)initWithObserver + : (webrtc::NetworkMonitorObserver *)observer NS_DESIGNATED_INITIALIZER; /** Stops the receiver from posting updates to `observer`. */ - (void)stop; diff --git a/sdk/objc/components/network/RTCNetworkMonitor.h b/sdk/objc/components/network/RTCNetworkMonitor.h index 21d22f5463..4b0cb4baf0 100644 --- a/sdk/objc/components/network/RTCNetworkMonitor.h +++ b/sdk/objc/components/network/RTCNetworkMonitor.h @@ -10,12 +10,14 @@ #import +#import "RTCMacros.h" + NS_ASSUME_NONNULL_BEGIN /** Listens for NWPathMonitor updates and forwards the results to a C++ * observer. */ -@interface RTCNetworkMonitor : NSObject +@interface RTC_OBJC_TYPE (RTCNetworkMonitor): NSObject - (instancetype)init NS_UNAVAILABLE; diff --git a/sdk/objc/components/network/RTCNetworkMonitor.mm b/sdk/objc/components/network/RTCNetworkMonitor.mm index e3f312362f..8dbc53551e 100644 --- a/sdk/objc/components/network/RTCNetworkMonitor.mm +++ b/sdk/objc/components/network/RTCNetworkMonitor.mm @@ -47,7 +47,7 @@ } // namespace -@implementation RTCNetworkMonitor { +@implementation RTC_OBJC_TYPE (RTCNetworkMonitor) { webrtc::NetworkMonitorObserver *_observer; nw_path_monitor_t _pathMonitor; dispatch_queue_t _monitorQueue; @@ -65,9 +65,9 @@ - (instancetype)initWithObserver:(webrtc::NetworkMonitorObserver *)observer { return nil; } RTCLog(@"NW path monitor created."); - __weak RTCNetworkMonitor *weakSelf = self; + __weak RTC_OBJC_TYPE(RTCNetworkMonitor) *weakSelf = self; nw_path_monitor_set_update_handler(_pathMonitor, ^(nw_path_t path) { - RTCNetworkMonitor *strongSelf = weakSelf; + RTC_OBJC_TYPE(RTCNetworkMonitor) *strongSelf = weakSelf; if (strongSelf == nil) { return; } @@ -103,8 +103,7 @@ - (instancetype)initWithObserver:(webrtc::NetworkMonitorObserver *)observer { }); nw_path_monitor_set_queue( _pathMonitor, - [RTC_OBJC_TYPE(RTCDispatcher) - dispatchQueueForType:RTCDispatcherTypeNetworkMonitor]); + [RTC_OBJC_TYPE(RTCDispatcher) dispatchQueueForType:RTC_OBJC_TYPE(RTCDispatcherTypeNetworkMonitor)]); nw_path_monitor_start(_pathMonitor); } } diff --git a/sdk/objc/components/renderer/metal/RTCMTLI420Renderer.h b/sdk/objc/components/renderer/metal/RTCMTLI420Renderer.h index e5987fe22a..c4e2724042 100644 --- a/sdk/objc/components/renderer/metal/RTCMTLI420Renderer.h +++ b/sdk/objc/components/renderer/metal/RTCMTLI420Renderer.h @@ -13,5 +13,5 @@ #import "RTCMTLRenderer.h" NS_AVAILABLE(10_11, 9_0) -@interface RTCMTLI420Renderer : RTCMTLRenderer +@interface RTC_OBJC_TYPE(RTCMTLI420Renderer): RTC_OBJC_TYPE(RTCMTLRenderer) @end diff --git a/sdk/objc/components/renderer/metal/RTCMTLI420Renderer.mm b/sdk/objc/components/renderer/metal/RTCMTLI420Renderer.mm index c085b9260f..246bceb439 100644 --- a/sdk/objc/components/renderer/metal/RTCMTLI420Renderer.mm +++ b/sdk/objc/components/renderer/metal/RTCMTLI420Renderer.mm @@ -70,7 +70,7 @@ fragment half4 fragmentColorConversion( return half4(out); }); -@implementation RTCMTLI420Renderer { +@implementation RTC_OBJC_TYPE(RTCMTLI420Renderer) { // Textures. id _yTexture; id _uTexture; diff --git a/sdk/objc/components/renderer/metal/RTCMTLNSVideoView.m b/sdk/objc/components/renderer/metal/RTCMTLNSVideoView.m deleted file mode 100644 index 168045c09d..0000000000 --- a/sdk/objc/components/renderer/metal/RTCMTLNSVideoView.m +++ /dev/null @@ -1,123 +0,0 @@ -/* - * Copyright 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import "RTCMTLNSVideoView.h" - -#import -#import - -#import "base/RTCVideoFrame.h" - -#import "RTCMTLI420Renderer.h" - -@interface RTC_OBJC_TYPE (RTCMTLNSVideoView) -() @property(nonatomic) id renderer; -@property(nonatomic, strong) MTKView *metalView; -@property(atomic, strong) RTC_OBJC_TYPE(RTCVideoFrame) * videoFrame; -@end - -@implementation RTC_OBJC_TYPE (RTCMTLNSVideoView) { - id _renderer; -} - -@synthesize delegate = _delegate; -@synthesize renderer = _renderer; -@synthesize metalView = _metalView; -@synthesize videoFrame = _videoFrame; - -- (instancetype)initWithFrame:(CGRect)frameRect { - self = [super initWithFrame:frameRect]; - if (self) { - [self configure]; - } - return self; -} - -- (instancetype)initWithCoder:(NSCoder *)aCoder { - self = [super initWithCoder:aCoder]; - if (self) { - [self configure]; - } - return self; -} - -#pragma mark - Private - -+ (BOOL)isMetalAvailable { - return [MTLCopyAllDevices() count] > 0; -} - -- (void)configure { - if ([[self class] isMetalAvailable]) { - _metalView = [[MTKView alloc] initWithFrame:self.bounds]; - [self addSubview:_metalView]; - _metalView.layerContentsPlacement = - NSViewLayerContentsPlacementScaleProportionallyToFit; - _metalView.translatesAutoresizingMaskIntoConstraints = NO; - _metalView.framebufferOnly = YES; - _metalView.delegate = self; - - _renderer = [[RTCMTLI420Renderer alloc] init]; - if (![(RTCMTLI420Renderer *)_renderer addRenderingDestination:_metalView]) { - _renderer = nil; - }; - } -} - -- (void)updateConstraints { - NSDictionary *views = NSDictionaryOfVariableBindings(_metalView); - - NSArray *constraintsHorizontal = - [NSLayoutConstraint constraintsWithVisualFormat:@"H:|-0-[_metalView]-0-|" - options:0 - metrics:nil - views:views]; - [self addConstraints:constraintsHorizontal]; - - NSArray *constraintsVertical = - [NSLayoutConstraint constraintsWithVisualFormat:@"V:|-0-[_metalView]-0-|" - options:0 - metrics:nil - views:views]; - [self addConstraints:constraintsVertical]; - [super updateConstraints]; -} - -#pragma mark - MTKViewDelegate methods -- (void)drawInMTKView:(nonnull MTKView *)view { - if (self.videoFrame == nil) { - return; - } - if (view == self.metalView) { - [_renderer drawFrame:self.videoFrame]; - } -} - -- (void)mtkView:(MTKView *)view drawableSizeWillChange:(CGSize)size { -} - -#pragma mark - RTC_OBJC_TYPE(RTCVideoRenderer) - -- (void)setSize:(CGSize)size { - _metalView.drawableSize = size; - dispatch_async(dispatch_get_main_queue(), ^{ - [self.delegate videoView:self didChangeVideoSize:size]; - }); - [_metalView draw]; -} - -- (void)renderFrame:(nullable RTC_OBJC_TYPE(RTCVideoFrame) *)frame { - if (frame == nil) { - return; - } - self.videoFrame = [frame newI420VideoFrame]; -} - -@end diff --git a/sdk/objc/components/renderer/metal/RTCMTLNV12Renderer.h b/sdk/objc/components/renderer/metal/RTCMTLNV12Renderer.h index 866b7ea17e..125612a269 100644 --- a/sdk/objc/components/renderer/metal/RTCMTLNV12Renderer.h +++ b/sdk/objc/components/renderer/metal/RTCMTLNV12Renderer.h @@ -13,6 +13,6 @@ #import "RTCMTLRenderer.h" NS_AVAILABLE(10_11, 9_0) -@interface RTCMTLNV12Renderer : RTCMTLRenderer +@interface RTC_OBJC_TYPE(RTCMTLNV12Renderer): RTC_OBJC_TYPE(RTCMTLRenderer) @end diff --git a/sdk/objc/components/renderer/metal/RTCMTLNV12Renderer.mm b/sdk/objc/components/renderer/metal/RTCMTLNV12Renderer.mm index 2849e0fef0..68ebb88979 100644 --- a/sdk/objc/components/renderer/metal/RTCMTLNV12Renderer.mm +++ b/sdk/objc/components/renderer/metal/RTCMTLNV12Renderer.mm @@ -63,7 +63,7 @@ fragment half4 fragmentColorConversion( return half4(out); }); -@implementation RTCMTLNV12Renderer { +@implementation RTC_OBJC_TYPE(RTCMTLNV12Renderer) { // Textures. CVMetalTextureCacheRef _textureCache; id _yTexture; diff --git a/sdk/objc/components/renderer/metal/RTCMTLRGBRenderer.h b/sdk/objc/components/renderer/metal/RTCMTLRGBRenderer.h index 9db422cd22..5e355a8504 100644 --- a/sdk/objc/components/renderer/metal/RTCMTLRGBRenderer.h +++ b/sdk/objc/components/renderer/metal/RTCMTLRGBRenderer.h @@ -11,12 +11,13 @@ #import #import "RTCMTLRenderer.h" +#import "RTCMacros.h" /** @abstract RGB/BGR renderer. * @discussion This renderer handles both kCVPixelFormatType_32BGRA and * kCVPixelFormatType_32ARGB. */ NS_AVAILABLE(10_11, 9_0) -@interface RTCMTLRGBRenderer : RTCMTLRenderer +@interface RTC_OBJC_TYPE (RTCMTLRGBRenderer): RTC_OBJC_TYPE(RTCMTLRenderer) @end diff --git a/sdk/objc/components/renderer/metal/RTCMTLRGBRenderer.mm b/sdk/objc/components/renderer/metal/RTCMTLRGBRenderer.mm index df59c6fdfd..71a8270a5c 100644 --- a/sdk/objc/components/renderer/metal/RTCMTLRGBRenderer.mm +++ b/sdk/objc/components/renderer/metal/RTCMTLRGBRenderer.mm @@ -57,7 +57,7 @@ fragment half4 fragmentColorConversion( return out; }); -@implementation RTCMTLRGBRenderer { +@implementation RTC_OBJC_TYPE (RTCMTLRGBRenderer) { // Textures. CVMetalTextureCacheRef _textureCache; id _texture; diff --git a/sdk/objc/components/renderer/metal/RTCMTLRenderer+Private.h b/sdk/objc/components/renderer/metal/RTCMTLRenderer+Private.h index 8e99fc4614..43540dce25 100644 --- a/sdk/objc/components/renderer/metal/RTCMTLRenderer+Private.h +++ b/sdk/objc/components/renderer/metal/RTCMTLRenderer+Private.h @@ -16,7 +16,7 @@ NS_ASSUME_NONNULL_BEGIN -@interface RTCMTLRenderer (Private) +@interface RTC_OBJC_TYPE(RTCMTLRenderer) (Private) - (nullable id)currentMetalDevice; - (NSString *)shaderSource; - (BOOL)setupTexturesForFrame:(nonnull RTC_OBJC_TYPE(RTCVideoFrame) *)frame; diff --git a/sdk/objc/components/renderer/metal/RTCMTLRenderer.h b/sdk/objc/components/renderer/metal/RTCMTLRenderer.h index 84aa3fe7fc..f84a6f0af3 100644 --- a/sdk/objc/components/renderer/metal/RTCMTLRenderer.h +++ b/sdk/objc/components/renderer/metal/RTCMTLRenderer.h @@ -21,7 +21,7 @@ NS_ASSUME_NONNULL_BEGIN /** * Protocol defining ability to render RTCVideoFrame in Metal enabled views. */ -@protocol RTCMTLRenderer +@protocol RTC_OBJC_TYPE(RTCMTLRenderer) /** * Method to be implemented to perform actual rendering of the provided frame. @@ -37,7 +37,7 @@ NS_ASSUME_NONNULL_BEGIN * for performing cleanups. */ -#if TARGET_OS_IOS +#if TARGET_OS_IOS || TARGET_OS_TV - (BOOL)addRenderingDestination:(__kindof UIView *)view; #else - (BOOL)addRenderingDestination:(__kindof NSView *)view; @@ -49,7 +49,7 @@ NS_ASSUME_NONNULL_BEGIN * Implementation of RTCMTLRenderer protocol. */ NS_AVAILABLE(10_11, 9_0) -@interface RTCMTLRenderer : NSObject +@interface RTC_OBJC_TYPE(RTCMTLRenderer) : NSObject /** @abstract A wrapped RTCVideoRotation, or nil. @discussion When not nil, the rotation of the actual frame is ignored when diff --git a/sdk/objc/components/renderer/metal/RTCMTLRenderer.mm b/sdk/objc/components/renderer/metal/RTCMTLRenderer.mm index 5323633415..81b774057d 100644 --- a/sdk/objc/components/renderer/metal/RTCMTLRenderer.mm +++ b/sdk/objc/components/renderer/metal/RTCMTLRenderer.mm @@ -36,7 +36,7 @@ static inline void getCubeVertexData(int cropX, int cropHeight, size_t frameWidth, size_t frameHeight, - RTCVideoRotation rotation, + RTC_OBJC_TYPE(RTCVideoRotation) rotation, float *buffer) { // The computed values are the adjusted texture coordinates, in [0..1]. // For the left and top, 0.0 means no cropping and e.g. 0.2 means we're @@ -52,80 +52,32 @@ static inline void getCubeVertexData(int cropX, // cropping and rotation into account. The first two columns are view // coordinates, the last two are texture coordinates. switch (rotation) { - case RTCVideoRotation_0: { - float values[16] = {-1.0, - -1.0, - cropLeft, - cropBottom, - 1.0, - -1.0, - cropRight, - cropBottom, - -1.0, - 1.0, - cropLeft, - cropTop, - 1.0, - 1.0, - cropRight, - cropTop}; + case RTC_OBJC_TYPE(RTCVideoRotation_0): { + float values[16] = {-1.0, -1.0, cropLeft, cropBottom, + 1.0, -1.0, cropRight, cropBottom, + -1.0, 1.0, cropLeft, cropTop, + 1.0, 1.0, cropRight, cropTop}; memcpy(buffer, &values, sizeof(values)); } break; - case RTCVideoRotation_90: { - float values[16] = {-1.0, - -1.0, - cropRight, - cropBottom, - 1.0, - -1.0, - cropRight, - cropTop, - -1.0, - 1.0, - cropLeft, - cropBottom, - 1.0, - 1.0, - cropLeft, - cropTop}; + case RTC_OBJC_TYPE(RTCVideoRotation_90): { + float values[16] = {-1.0, -1.0, cropRight, cropBottom, + 1.0, -1.0, cropRight, cropTop, + -1.0, 1.0, cropLeft, cropBottom, + 1.0, 1.0, cropLeft, cropTop}; memcpy(buffer, &values, sizeof(values)); } break; - case RTCVideoRotation_180: { - float values[16] = {-1.0, - -1.0, - cropRight, - cropTop, - 1.0, - -1.0, - cropLeft, - cropTop, - -1.0, - 1.0, - cropRight, - cropBottom, - 1.0, - 1.0, - cropLeft, - cropBottom}; + case RTC_OBJC_TYPE(RTCVideoRotation_180): { + float values[16] = {-1.0, -1.0, cropRight, cropTop, + 1.0, -1.0, cropLeft, cropTop, + -1.0, 1.0, cropRight, cropBottom, + 1.0, 1.0, cropLeft, cropBottom}; memcpy(buffer, &values, sizeof(values)); } break; - case RTCVideoRotation_270: { - float values[16] = {-1.0, - -1.0, - cropLeft, - cropTop, - 1.0, - -1.0, - cropLeft, - cropBottom, - -1.0, - 1.0, - cropRight, - cropTop, - 1.0, - 1.0, - cropRight, - cropBottom}; + case RTC_OBJC_TYPE(RTCVideoRotation_270): { + float values[16] = {-1.0, -1.0, cropLeft, cropTop, + 1.0, -1.0, cropLeft, cropBottom, + -1.0, 1.0, cropRight, cropTop, + 1.0, 1.0, cropRight, cropBottom}; memcpy(buffer, &values, sizeof(values)); } break; } @@ -136,7 +88,7 @@ static inline void getCubeVertexData(int cropX, // In future we might use triple buffering method if it improves performance. static const NSInteger kMaxInflightBuffers = 1; -@implementation RTCMTLRenderer { +@implementation RTC_OBJC_TYPE(RTCMTLRenderer) { __kindof MTKView *_view; // Controller. @@ -159,7 +111,7 @@ @implementation RTCMTLRenderer { int _oldCropHeight; int _oldCropX; int _oldCropY; - RTCVideoRotation _oldRotation; + RTC_OBJC_TYPE(RTCVideoRotation) _oldRotation; } @synthesize rotationOverride = _rotationOverride; @@ -226,7 +178,7 @@ - (void)getWidth:(int *)width - (BOOL)setupTexturesForFrame:(nonnull RTC_OBJC_TYPE(RTCVideoFrame) *)frame { // Apply rotation override if set. - RTCVideoRotation rotation; + RTC_OBJC_TYPE(RTCVideoRotation) rotation; NSValue *rotationOverride = self.rotationOverride; if (rotationOverride) { #if defined(__IPHONE_11_0) && defined(__IPHONE_OS_VERSION_MAX_ALLOWED) && \ diff --git a/sdk/objc/components/renderer/metal/RTCMTLVideoView.h b/sdk/objc/components/renderer/metal/RTCMTLVideoView.h index b9c029f830..df45366472 100644 --- a/sdk/objc/components/renderer/metal/RTCMTLVideoView.h +++ b/sdk/objc/components/renderer/metal/RTCMTLVideoView.h @@ -10,6 +10,11 @@ #import +#if TARGET_OS_OSX +#import +#endif + +#import "RTCMacros.h" #import "RTCVideoFrame.h" #import "RTCVideoRenderer.h" #import "sdk/objc/base/RTCMacros.h" @@ -22,14 +27,26 @@ NS_ASSUME_NONNULL_BEGIN * It has id property that renders video frames in the view's * bounds using Metal. */ +#if TARGET_OS_IPHONE NS_CLASS_AVAILABLE_IOS(9) +#elif TARGET_OS_OSX +NS_AVAILABLE_MAC(10.11) +#endif RTC_OBJC_EXPORT -@interface RTC_OBJC_TYPE (RTCMTLVideoView) : UIView +@interface RTC_OBJC_TYPE (RTCMTLVideoView) : + +#if TARGET_OS_IPHONE + UIView +#elif TARGET_OS_OSX + NSView +#endif @property(nonatomic, weak) id delegate; +#if TARGET_OS_IPHONE @property(nonatomic) UIViewContentMode videoContentMode; +#endif /** @abstract Enables/disables rendering. */ @@ -39,6 +56,8 @@ RTC_OBJC_EXPORT */ @property(nonatomic, nullable) NSValue* rotationOverride; ++ (BOOL)isMetalAvailable; + @end NS_ASSUME_NONNULL_END diff --git a/sdk/objc/components/renderer/metal/RTCMTLVideoView.m b/sdk/objc/components/renderer/metal/RTCMTLVideoView.m index 7b1755dc6c..f998e2a09f 100644 --- a/sdk/objc/components/renderer/metal/RTCMTLVideoView.m +++ b/sdk/objc/components/renderer/metal/RTCMTLVideoView.m @@ -22,6 +22,8 @@ #import "RTCMTLNV12Renderer.h" #import "RTCMTLRGBRenderer.h" +#import "RTCMTLRenderer+Private.h" + // To avoid unreconized symbol linker errors, we're taking advantage of the objc // runtime. Linking errors occur when compiling for architectures that don't // support Metal. @@ -30,10 +32,10 @@ #define RTCMTLI420RendererClass NSClassFromString(@"RTCMTLI420Renderer") #define RTCMTLRGBRendererClass NSClassFromString(@"RTCMTLRGBRenderer") -@interface RTC_OBJC_TYPE (RTCMTLVideoView) -() @property(nonatomic) RTCMTLI420Renderer *rendererI420; -@property(nonatomic) RTCMTLNV12Renderer *rendererNV12; -@property(nonatomic) RTCMTLRGBRenderer *rendererRGB; +@interface RTC_OBJC_TYPE (RTCMTLVideoView) () +@property(nonatomic) RTC_OBJC_TYPE(RTCMTLI420Renderer) *rendererI420; +@property(nonatomic) RTC_OBJC_TYPE(RTCMTLNV12Renderer) * rendererNV12; +@property(nonatomic) RTC_OBJC_TYPE(RTCMTLRGBRenderer) * rendererRGB; @property(nonatomic) MTKView *metalView; @property(atomic) RTC_OBJC_TYPE(RTCVideoFrame) * videoFrame; @property(nonatomic) CGSize videoFrameSize; @@ -52,6 +54,14 @@ @implementation RTC_OBJC_TYPE (RTCMTLVideoView) @synthesize lastFrameTimeNs = _lastFrameTimeNs; @synthesize rotationOverride = _rotationOverride; ++ (BOOL)isMetalAvailable { +#if TARGET_OS_IPHONE + return MTLCreateSystemDefaultDevice() != nil; +#elif TARGET_OS_OSX + return [MTLCopyAllDevices() count] > 0; +#endif +} + - (instancetype)initWithFrame:(CGRect)frameRect { self = [super initWithFrame:frameRect]; if (self) { @@ -76,6 +86,7 @@ - (void)setEnabled:(BOOL)enabled { self.metalView.paused = !enabled; } +#if TARGET_OS_IPHONE - (UIViewContentMode)videoContentMode { return self.metalView.contentMode; } @@ -83,27 +94,24 @@ - (UIViewContentMode)videoContentMode { - (void)setVideoContentMode:(UIViewContentMode)mode { self.metalView.contentMode = mode; } +#endif #pragma mark - Private -+ (BOOL)isMetalAvailable { - return MTLCreateSystemDefaultDevice() != nil; -} - + (MTKView *)createMetalView:(CGRect)frame { - return [[MTKViewClass alloc] initWithFrame:frame]; + return [[MTKView alloc] initWithFrame:frame]; } -+ (RTCMTLNV12Renderer *)createNV12Renderer { - return [[RTCMTLNV12RendererClass alloc] init]; ++ (RTC_OBJC_TYPE(RTCMTLNV12Renderer) *)createNV12Renderer { + return [[RTC_OBJC_TYPE(RTCMTLNV12Renderer) alloc] init]; } -+ (RTCMTLI420Renderer *)createI420Renderer { - return [[RTCMTLI420RendererClass alloc] init]; ++ (RTC_OBJC_TYPE(RTCMTLI420Renderer) *)createI420Renderer { + return [[RTC_OBJC_TYPE(RTCMTLI420Renderer) alloc] init]; } -+ (RTCMTLRGBRenderer *)createRGBRenderer { - return [[RTCMTLRGBRenderer alloc] init]; ++ (RTC_OBJC_TYPE(RTCMTLRGBRenderer) *)createRGBRenderer { + return [[RTC_OBJC_TYPE(RTCMTLRGBRenderer) alloc] init]; } - (void)configure { @@ -112,25 +120,42 @@ - (void)configure { self.metalView = [RTC_OBJC_TYPE(RTCMTLVideoView) createMetalView:self.bounds]; self.metalView.delegate = self; +#if TARGET_OS_IPHONE self.metalView.contentMode = UIViewContentModeScaleAspectFill; +#elif TARGET_OS_OSX + self.metalView.layerContentsPlacement = NSViewLayerContentsPlacementScaleProportionallyToFit; +#endif + [self addSubview:self.metalView]; self.videoFrameSize = CGSizeZero; } +#if TARGET_OS_IPHONE - (void)setMultipleTouchEnabled:(BOOL)multipleTouchEnabled { [super setMultipleTouchEnabled:multipleTouchEnabled]; self.metalView.multipleTouchEnabled = multipleTouchEnabled; } +#endif + +- (CGFloat)currentScaleFactor { + CGFloat scale = 1.0; +#if TARGET_OS_IPHONE + scale = [UIScreen mainScreen].scale; +#elif TARGET_OS_OSX + scale = [NSScreen mainScreen].backingScaleFactor; +#endif + return MAX(scale, 1.0); +} -- (void)layoutSubviews { - [super layoutSubviews]; - +- (void)performLayout { CGRect bounds = self.bounds; self.metalView.frame = bounds; if (!CGSizeEqualToSize(self.videoFrameSize, CGSizeZero)) { self.metalView.drawableSize = [self drawableSize]; } else { - self.metalView.drawableSize = bounds.size; + // Apply scale factor for default size as well (when videoFrameSize is zero) + CGFloat scale = [self currentScaleFactor]; + self.metalView.drawableSize = CGSizeMake(bounds.size.width * scale, bounds.size.height * scale); } } @@ -150,7 +175,7 @@ - (void)drawInMTKView:(nonnull MTKView *)view { return; } - RTCMTLRenderer *renderer; + RTC_OBJC_TYPE(RTCMTLRenderer) * renderer; if ([videoFrame.buffer isKindOfClass:[RTC_OBJC_TYPE(RTCCVPixelBuffer) class]]) { RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = @@ -209,10 +234,10 @@ - (void)setRotationOverride:(NSValue *)rotationOverride { [self setNeedsLayout]; } -- (RTCVideoRotation)frameRotation { +- (RTC_OBJC_TYPE(RTCVideoRotation) )videoRotation { if (self.rotationOverride) { - RTCVideoRotation rotation; - if (@available(iOS 11, *)) { + RTC_OBJC_TYPE(RTCVideoRotation) rotation; + if (@available(iOS 11, macos 10.13, *)) { [self.rotationOverride getValue:&rotation size:sizeof(rotation)]; } else { [self.rotationOverride getValue:&rotation]; @@ -226,18 +251,23 @@ - (RTCVideoRotation)frameRotation { - (CGSize)drawableSize { // Flip width/height if the rotations are not the same. CGSize videoFrameSize = self.videoFrameSize; - RTCVideoRotation frameRotation = [self frameRotation]; + RTC_OBJC_TYPE(RTCVideoRotation) videoRotation = [self videoRotation]; - BOOL useLandscape = (frameRotation == RTCVideoRotation_0) || - (frameRotation == RTCVideoRotation_180); - BOOL sizeIsLandscape = (self.videoFrame.rotation == RTCVideoRotation_0) || - (self.videoFrame.rotation == RTCVideoRotation_180); + BOOL useLandscape = + (videoRotation == RTC_OBJC_TYPE(RTCVideoRotation_0)) || (videoRotation == RTC_OBJC_TYPE(RTCVideoRotation_180)); + BOOL sizeIsLandscape = (self.videoFrame.rotation == RTC_OBJC_TYPE(RTCVideoRotation_0)) || + (self.videoFrame.rotation == RTC_OBJC_TYPE(RTCVideoRotation_180)); + CGSize size; if (useLandscape == sizeIsLandscape) { - return videoFrameSize; + size = videoFrameSize; } else { - return CGSizeMake(videoFrameSize.height, videoFrameSize.width); + size = CGSizeMake(videoFrameSize.height, videoFrameSize.width); } + + // Apply scale factor for retina displays + CGFloat scale = [self currentScaleFactor]; + return CGSizeMake(size.width * scale, size.height * scale); } #pragma mark - RTC_OBJC_TYPE(RTCVideoRenderer) @@ -265,7 +295,34 @@ - (void)renderFrame:(nullable RTC_OBJC_TYPE(RTCVideoFrame) *)frame { RTCLogInfo(@"Incoming frame is nil. Exiting render callback."); return; } - self.videoFrame = frame; + + // Workaround to support RTCCVPixelBuffer rendering. + // RTCMTLRGBRenderer seems to be broken at the moment. + BOOL useI420 = NO; + if ([frame.buffer isKindOfClass:[RTC_OBJC_TYPE(RTCCVPixelBuffer) class]]) { + RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = (RTC_OBJC_TYPE(RTCCVPixelBuffer) *)frame.buffer; + const OSType pixelFormat = CVPixelBufferGetPixelFormatType(buffer.pixelBuffer); + useI420 = pixelFormat == kCVPixelFormatType_32BGRA || pixelFormat == kCVPixelFormatType_32ARGB; + } + self.videoFrame = useI420 ? [frame newI420VideoFrame] : frame; +} + +#pragma mark - Cross platform + +#if TARGET_OS_IPHONE +- (void)layoutSubviews { + [super layoutSubviews]; + [self performLayout]; +} +#elif TARGET_OS_OSX +- (void)layout { + [super layout]; + [self performLayout]; +} + +- (void)setNeedsLayout { + self.needsLayout = YES; } +#endif @end diff --git a/sdk/objc/components/renderer/opengl/RTCDefaultShader.h b/sdk/objc/components/renderer/opengl/RTCDefaultShader.h index bd58cbbe4c..eca0c67338 100644 --- a/sdk/objc/components/renderer/opengl/RTCDefaultShader.h +++ b/sdk/objc/components/renderer/opengl/RTCDefaultShader.h @@ -17,7 +17,7 @@ NS_ASSUME_NONNULL_BEGIN * render the video in a rectangle without any color or geometric * transformations. */ -@interface RTCDefaultShader : NSObject +@interface RTC_OBJC_TYPE(RTCDefaultShader) : NSObject @end diff --git a/sdk/objc/components/renderer/opengl/RTCDefaultShader.mm b/sdk/objc/components/renderer/opengl/RTCDefaultShader.mm index a6f5388b41..1a9c24dc6a 100644 --- a/sdk/objc/components/renderer/opengl/RTCDefaultShader.mm +++ b/sdk/objc/components/renderer/opengl/RTCDefaultShader.mm @@ -58,7 +58,7 @@ " 1.0);\n" " }\n"; -@implementation RTCDefaultShader { +@implementation RTC_OBJC_TYPE(RTCDefaultShader) { GLuint _vertexBuffer; GLuint _vertexArray; // Store current rotation and only upload new vertex data when rotation diff --git a/sdk/objc/components/renderer/opengl/RTCDisplayLinkTimer.h b/sdk/objc/components/renderer/opengl/RTCDisplayLinkTimer.h index b78501e9e6..1c5b64fdfc 100644 --- a/sdk/objc/components/renderer/opengl/RTCDisplayLinkTimer.h +++ b/sdk/objc/components/renderer/opengl/RTCDisplayLinkTimer.h @@ -10,11 +10,13 @@ #import +#import "RTCMacros.h" + // RTCDisplayLinkTimer wraps a CADisplayLink and is set to fire every two screen // refreshes, which should be 30fps. We wrap the display link in order to avoid // a retain cycle since CADisplayLink takes a strong reference onto its target. // The timer is paused by default. -@interface RTCDisplayLinkTimer : NSObject +@interface RTC_OBJC_TYPE (RTCDisplayLinkTimer): NSObject @property(nonatomic) BOOL isPaused; diff --git a/sdk/objc/components/renderer/opengl/RTCDisplayLinkTimer.m b/sdk/objc/components/renderer/opengl/RTCDisplayLinkTimer.m index fe1ec904eb..0e784917dd 100644 --- a/sdk/objc/components/renderer/opengl/RTCDisplayLinkTimer.m +++ b/sdk/objc/components/renderer/opengl/RTCDisplayLinkTimer.m @@ -12,7 +12,7 @@ #import -@implementation RTCDisplayLinkTimer { +@implementation RTC_OBJC_TYPE (RTCDisplayLinkTimer) { CADisplayLink *_displayLink; void (^_timerHandler)(void); } @@ -22,17 +22,15 @@ - (instancetype)initWithTimerHandler:(void (^)(void))timerHandler { self = [super init]; if (self) { _timerHandler = timerHandler; - _displayLink = - [CADisplayLink displayLinkWithTarget:self - selector:@selector(displayLinkDidFire:)]; + _displayLink = [CADisplayLink displayLinkWithTarget:self + selector:@selector(displayLinkDidFire:)]; _displayLink.paused = YES; #if __IPHONE_OS_VERSION_MIN_REQUIRED >= __IPHONE_10_0 _displayLink.preferredFramesPerSecond = 30; #else [_displayLink setFrameInterval:2]; #endif - [_displayLink addToRunLoop:[NSRunLoop currentRunLoop] - forMode:NSRunLoopCommonModes]; + [_displayLink addToRunLoop:[NSRunLoop currentRunLoop] forMode:NSRunLoopCommonModes]; } return self; } diff --git a/sdk/objc/components/renderer/opengl/RTCEAGLVideoView.m b/sdk/objc/components/renderer/opengl/RTCEAGLVideoView.m index b6044ecb87..9a934a8c20 100644 --- a/sdk/objc/components/renderer/opengl/RTCEAGLVideoView.m +++ b/sdk/objc/components/renderer/opengl/RTCEAGLVideoView.m @@ -42,14 +42,14 @@ @interface RTC_OBJC_TYPE (RTCEAGLVideoView) @end @implementation RTC_OBJC_TYPE (RTCEAGLVideoView) { - RTCDisplayLinkTimer *_timer; + RTC_OBJC_TYPE(RTCDisplayLinkTimer) * _timer; EAGLContext *_glContext; // This flag should only be set and read on the main thread (e.g. by // setNeedsDisplay) BOOL _isDirty; id _shader; - RTCNV12TextureCache *_nv12TextureCache; - RTCI420TextureCache *_i420TextureCache; + RTC_OBJC_TYPE(RTCNV12TextureCache) *_nv12TextureCache; + RTC_OBJC_TYPE(RTCI420TextureCache) *_i420TextureCache; // As timestamps should be unique between frames, will store last // drawn frame timestamp instead of the whole frame to reduce memory usage. int64_t _lastDrawnFrameTimeStampNs; @@ -61,11 +61,11 @@ @implementation RTC_OBJC_TYPE (RTCEAGLVideoView) { @synthesize rotationOverride = _rotationOverride; - (instancetype)initWithFrame:(CGRect)frame { - return [self initWithFrame:frame shader:[[RTCDefaultShader alloc] init]]; + return [self initWithFrame:frame shader:[[RTC_OBJC_TYPE(RTCDefaultShader) alloc] init]]; } - (instancetype)initWithCoder:(NSCoder *)aDecoder { - return [self initWithCoder:aDecoder shader:[[RTCDefaultShader alloc] init]]; + return [self initWithCoder:aDecoder shader:[[RTC_OBJC_TYPE(RTCDefaultShader) alloc] init]]; } - (instancetype)initWithFrame:(CGRect)frame @@ -117,8 +117,7 @@ - (BOOL)configure { // Listen to application state in order to clean up OpenGL before app goes // away. - NSNotificationCenter *notificationCenter = - [NSNotificationCenter defaultCenter]; + NSNotificationCenter *notificationCenter = [NSNotificationCenter defaultCenter]; [notificationCenter addObserver:self selector:@selector(willResignActive) name:UIApplicationWillResignActiveNotification @@ -132,7 +131,7 @@ - (BOOL)configure { // using a refresh rate proportional to screen refresh frequency. This // occurs on the main thread. __weak RTC_OBJC_TYPE(RTCEAGLVideoView) *weakSelf = self; - _timer = [[RTCDisplayLinkTimer alloc] initWithTimerHandler:^{ + _timer = [[RTC_OBJC_TYPE(RTCDisplayLinkTimer) alloc] initWithTimerHandler:^{ RTC_OBJC_TYPE(RTCEAGLVideoView) *strongSelf = weakSelf; [strongSelf displayLinkTimerDidFire]; }]; @@ -150,8 +149,7 @@ - (void)setMultipleTouchEnabled:(BOOL)multipleTouchEnabled { - (void)dealloc { [[NSNotificationCenter defaultCenter] removeObserver:self]; - UIApplicationState appState = - [UIApplication sharedApplication].applicationState; + UIApplicationState appState = [UIApplication sharedApplication].applicationState; if (appState == UIApplicationStateActive) { [self teardownGL]; } @@ -199,8 +197,7 @@ - (void)glkView:(GLKView *)view drawInRect:(CGRect)rect { glClear(GL_COLOR_BUFFER_BIT); if ([frame.buffer isKindOfClass:[RTC_OBJC_TYPE(RTCCVPixelBuffer) class]]) { if (!_nv12TextureCache) { - _nv12TextureCache = - [[RTCNV12TextureCache alloc] initWithContext:_glContext]; + _nv12TextureCache = [[RTC_OBJC_TYPE(RTCNV12TextureCache) alloc] initWithContext:_glContext]; } if (_nv12TextureCache) { [_nv12TextureCache uploadFrameToTextures:frame]; @@ -215,8 +212,7 @@ - (void)glkView:(GLKView *)view drawInRect:(CGRect)rect { } } else { if (!_i420TextureCache) { - _i420TextureCache = - [[RTCI420TextureCache alloc] initWithContext:_glContext]; + _i420TextureCache = [[RTC_OBJC_TYPE(RTCI420TextureCache) alloc] initWithContext:_glContext]; } [_i420TextureCache uploadFrameToTextures:frame]; [_shader applyShadingForFrameWithWidth:frame.width diff --git a/sdk/objc/components/renderer/opengl/RTCI420TextureCache.h b/sdk/objc/components/renderer/opengl/RTCI420TextureCache.h index 526308165f..7a9ba638ee 100644 --- a/sdk/objc/components/renderer/opengl/RTCI420TextureCache.h +++ b/sdk/objc/components/renderer/opengl/RTCI420TextureCache.h @@ -11,7 +11,7 @@ #import "RTCOpenGLDefines.h" #import "base/RTCVideoFrame.h" -@interface RTCI420TextureCache : NSObject +@interface RTC_OBJC_TYPE(RTCI420TextureCache) : NSObject @property(nonatomic, readonly) GLuint yTexture; @property(nonatomic, readonly) GLuint uTexture; diff --git a/sdk/objc/components/renderer/opengl/RTCI420TextureCache.mm b/sdk/objc/components/renderer/opengl/RTCI420TextureCache.mm index c0d1bc647f..5de46ccbe7 100644 --- a/sdk/objc/components/renderer/opengl/RTCI420TextureCache.mm +++ b/sdk/objc/components/renderer/opengl/RTCI420TextureCache.mm @@ -24,7 +24,7 @@ static const GLsizei kNumTexturesPerSet = 3; static const GLsizei kNumTextures = kNumTexturesPerSet * kNumTextureSets; -@implementation RTCI420TextureCache { +@implementation RTC_OBJC_TYPE(RTCI420TextureCache) { BOOL _hasUnpackRowLength; GLint _currentTextureSet; // Handles for OpenGL constructs. diff --git a/sdk/objc/components/renderer/opengl/RTCNSGLVideoView.m b/sdk/objc/components/renderer/opengl/RTCNSGLVideoView.m new file mode 100644 index 0000000000..97957faf24 --- /dev/null +++ b/sdk/objc/components/renderer/opengl/RTCNSGLVideoView.m @@ -0,0 +1,199 @@ +/* + * Copyright 2015 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#import + +#if !TARGET_OS_IPHONE + +#import "RTCNSGLVideoView.h" + +#import +#import +#import + +#import "RTCDefaultShader.h" +#import "RTCI420TextureCache.h" +#import "base/RTCLogging.h" +#import "base/RTCVideoFrame.h" + +@interface RTC_OBJC_TYPE (RTCNSGLVideoView) +() + // `videoFrame` is set when we receive a frame from a worker thread and is read + // from the display link callback so atomicity is required. + @property(atomic, strong) RTC_OBJC_TYPE(RTCVideoFrame) * + videoFrame; +@property(atomic, strong) RTC_OBJC_TYPE(RTCI420TextureCache) *i420TextureCache; + +- (void)drawFrame; +@end + +static CVReturn OnDisplayLinkFired(CVDisplayLinkRef displayLink, + const CVTimeStamp *now, + const CVTimeStamp *outputTime, + CVOptionFlags flagsIn, + CVOptionFlags *flagsOut, + void *displayLinkContext) { + RTC_OBJC_TYPE(RTCNSGLVideoView) *view = + (__bridge RTC_OBJC_TYPE(RTCNSGLVideoView) *)displayLinkContext; + [view drawFrame]; + return kCVReturnSuccess; +} + +@implementation RTC_OBJC_TYPE (RTCNSGLVideoView) { + CVDisplayLinkRef _displayLink; + RTC_OBJC_TYPE(RTCVideoFrame) * _lastDrawnFrame; + id _shader; +} + +@synthesize delegate = _delegate; +@synthesize videoFrame = _videoFrame; +@synthesize i420TextureCache = _i420TextureCache; + +- (instancetype)initWithFrame:(NSRect)frame pixelFormat:(NSOpenGLPixelFormat *)format { + return [self initWithFrame:frame pixelFormat:format shader:[[RTC_OBJC_TYPE(RTCDefaultShader) alloc] init]]; +} + +- (instancetype)initWithFrame:(NSRect)frame + pixelFormat:(NSOpenGLPixelFormat *)format + shader:(id)shader { + if (self = [super initWithFrame:frame pixelFormat:format]) { + _shader = shader; + } + return self; +} + +- (void)dealloc { + [self teardownDisplayLink]; +} + +- (void)drawRect:(NSRect)rect { + [self drawFrame]; +} + +- (void)reshape { + [super reshape]; + NSRect frame = [self frame]; + [self ensureGLContext]; + CGLLockContext([[self openGLContext] CGLContextObj]); + glViewport(0, 0, frame.size.width, frame.size.height); + CGLUnlockContext([[self openGLContext] CGLContextObj]); +} + +- (void)lockFocus { + NSOpenGLContext *context = [self openGLContext]; + [super lockFocus]; + if ([context view] != self) { + [context setView:self]; + } + [context makeCurrentContext]; +} + +- (void)prepareOpenGL { + [super prepareOpenGL]; + [self ensureGLContext]; + glDisable(GL_DITHER); + [self setupDisplayLink]; +} + +- (void)clearGLContext { + [self ensureGLContext]; + self.i420TextureCache = nil; + [super clearGLContext]; +} + +#pragma mark - RTC_OBJC_TYPE(RTCVideoRenderer) + +// These methods may be called on non-main thread. +- (void)setSize:(CGSize)size { + dispatch_async(dispatch_get_main_queue(), ^{ + [self.delegate videoView:self didChangeVideoSize:size]; + }); +} + +- (void)renderFrame:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame { + self.videoFrame = frame; +} + +#pragma mark - Private + +- (void)drawFrame { + RTC_OBJC_TYPE(RTCVideoFrame) *frame = self.videoFrame; + if (!frame || frame == _lastDrawnFrame) { + return; + } + // This method may be called from CVDisplayLink callback which isn't on the + // main thread so we have to lock the GL context before drawing. + NSOpenGLContext *context = [self openGLContext]; + CGLLockContext([context CGLContextObj]); + + [self ensureGLContext]; + glClear(GL_COLOR_BUFFER_BIT); + + // Rendering native CVPixelBuffer is not supported on OS X. + // TODO(magjed): Add support for NV12 texture cache on OS X. + frame = [frame newI420VideoFrame]; + if (!self.i420TextureCache) { + self.i420TextureCache = [[RTC_OBJC_TYPE(RTCI420TextureCache) alloc] initWithContext:context]; + } + RTC_OBJC_TYPE(RTCI420TextureCache) *i420TextureCache = self.i420TextureCache; + if (i420TextureCache) { + [i420TextureCache uploadFrameToTextures:frame]; + [_shader applyShadingForFrameWithWidth:frame.width + height:frame.height + rotation:frame.rotation + yPlane:i420TextureCache.yTexture + uPlane:i420TextureCache.uTexture + vPlane:i420TextureCache.vTexture]; + [context flushBuffer]; + _lastDrawnFrame = frame; + } + CGLUnlockContext([context CGLContextObj]); +} + +- (void)setupDisplayLink { + if (_displayLink) { + return; + } + // Synchronize buffer swaps with vertical refresh rate. + GLint swapInt = 1; + [[self openGLContext] setValues:&swapInt forParameter:NSOpenGLCPSwapInterval]; + + // Create display link. + CVDisplayLinkCreateWithActiveCGDisplays(&_displayLink); + CVDisplayLinkSetOutputCallback(_displayLink, + &OnDisplayLinkFired, + (__bridge void *)self); + // Set the display link for the current renderer. + CGLContextObj cglContext = [[self openGLContext] CGLContextObj]; + CGLPixelFormatObj cglPixelFormat = [[self pixelFormat] CGLPixelFormatObj]; + CVDisplayLinkSetCurrentCGDisplayFromOpenGLContext( + _displayLink, cglContext, cglPixelFormat); + CVDisplayLinkStart(_displayLink); +} + +- (void)teardownDisplayLink { + if (!_displayLink) { + return; + } + CVDisplayLinkRelease(_displayLink); + _displayLink = NULL; +} + +- (void)ensureGLContext { + NSOpenGLContext* context = [self openGLContext]; + NSAssert(context, @"context shouldn't be nil"); + if ([NSOpenGLContext currentContext] != context) { + [context makeCurrentContext]; + } +} + +@end + +#endif // !TARGET_OS_IPHONE diff --git a/sdk/objc/components/renderer/opengl/RTCNV12TextureCache.h b/sdk/objc/components/renderer/opengl/RTCNV12TextureCache.h index 9dddb37b08..b1a48feddd 100644 --- a/sdk/objc/components/renderer/opengl/RTCNV12TextureCache.h +++ b/sdk/objc/components/renderer/opengl/RTCNV12TextureCache.h @@ -16,7 +16,7 @@ NS_ASSUME_NONNULL_BEGIN -@interface RTCNV12TextureCache : NSObject +@interface RTC_OBJC_TYPE(RTCNV12TextureCache) : NSObject @property(nonatomic, readonly) GLuint yTexture; @property(nonatomic, readonly) GLuint uvTexture; diff --git a/sdk/objc/components/renderer/opengl/RTCNV12TextureCache.m b/sdk/objc/components/renderer/opengl/RTCNV12TextureCache.m index 0909f5d136..48aeec0ac3 100644 --- a/sdk/objc/components/renderer/opengl/RTCNV12TextureCache.m +++ b/sdk/objc/components/renderer/opengl/RTCNV12TextureCache.m @@ -14,7 +14,7 @@ #import "base/RTCVideoFrameBuffer.h" #import "components/video_frame_buffer/RTCCVPixelBuffer.h" -@implementation RTCNV12TextureCache { +@implementation RTC_OBJC_TYPE(RTCNV12TextureCache) { CVOpenGLESTextureCacheRef _textureCache; CVOpenGLESTextureRef _yTextureRef; CVOpenGLESTextureRef _uvTextureRef; diff --git a/sdk/objc/components/renderer/opengl/RTCShader.h b/sdk/objc/components/renderer/opengl/RTCShader.h index d1b91fb643..4f4fa3d9c5 100644 --- a/sdk/objc/components/renderer/opengl/RTCShader.h +++ b/sdk/objc/components/renderer/opengl/RTCShader.h @@ -10,12 +10,12 @@ #import "base/RTCVideoFrame.h" -RTC_EXTERN const char kRTCVertexShaderSource[]; +RTC_EXTERN const char RTC_CONSTANT_TYPE(RTCVertexShaderSource)[]; -RTC_EXTERN GLuint RTCCreateShader(GLenum type, const GLchar* source); -RTC_EXTERN GLuint RTCCreateProgram(GLuint vertexShader, GLuint fragmentShader); +RTC_EXTERN GLuint RTC_OBJC_TYPE(RTCCreateShader)(GLenum type, const GLchar* source); +RTC_EXTERN GLuint RTC_OBJC_TYPE(RTCCreateProgram)(GLuint vertexShader, GLuint fragmentShader); RTC_EXTERN GLuint -RTCCreateProgramFromFragmentSource(const char fragmentShaderSource[]); -RTC_EXTERN BOOL RTCCreateVertexBuffer(GLuint* vertexBuffer, +RTC_OBJC_TYPE(RTCCreateProgramFromFragmentSource)(const char fragmentShaderSource[]); +RTC_EXTERN BOOL RTC_OBJC_TYPE(RTCCreateVertexBuffer)(GLuint* vertexBuffer, GLuint* vertexArray); -RTC_EXTERN void RTCSetVertexData(RTCVideoRotation rotation); +RTC_EXTERN void RTC_OBJC_TYPE(RTCSetVertexData)(RTCVideoRotation rotation); diff --git a/sdk/objc/components/renderer/opengl/RTCShader.mm b/sdk/objc/components/renderer/opengl/RTCShader.mm index 4284ceee1c..cb0dcd0690 100644 --- a/sdk/objc/components/renderer/opengl/RTCShader.mm +++ b/sdk/objc/components/renderer/opengl/RTCShader.mm @@ -22,7 +22,7 @@ #include "rtc_base/logging.h" // Vertex shader doesn't do anything except pass coordinates through. -const char kRTCVertexShaderSource[] = SHADER_VERSION VERTEX_SHADER_IN +const char RTC_CONSTANT_TYPE(RTCVertexShaderSource)[] = SHADER_VERSION VERTEX_SHADER_IN " vec2 position;\n" VERTEX_SHADER_IN " vec2 texcoord;\n" VERTEX_SHADER_OUT " vec2 v_texcoord;\n" "void main() {\n" @@ -32,7 +32,7 @@ // Compiles a shader of the given `type` with GLSL source `source` and returns // the shader handle or 0 on error. -GLuint RTCCreateShader(GLenum type, const GLchar *source) { +GLuint RTC_OBJC_TYPE(RTCCreateShader)(GLenum type, const GLchar *source) { GLuint shader = glCreateShader(type); if (!shader) { return 0; @@ -59,7 +59,7 @@ GLuint RTCCreateShader(GLenum type, const GLchar *source) { // Links a shader program with the given vertex and fragment shaders and // returns the program handle or 0 on error. -GLuint RTCCreateProgram(GLuint vertexShader, GLuint fragmentShader) { +GLuint RTC_OBJC_TYPE(RTCCreateProgram)(GLuint vertexShader, GLuint fragmentShader) { if (vertexShader == 0 || fragmentShader == 0) { return 0; } @@ -81,9 +81,8 @@ GLuint RTCCreateProgram(GLuint vertexShader, GLuint fragmentShader) { // Creates and links a shader program with the given fragment shader source and // a plain vertex shader. Returns the program handle or 0 on error. -GLuint RTCCreateProgramFromFragmentSource(const char fragmentShaderSource[]) { - GLuint vertexShader = - RTCCreateShader(GL_VERTEX_SHADER, kRTCVertexShaderSource); +GLuint RTC_OBJC_TYPE(RTCCreateProgramFromFragmentSource)(const char fragmentShaderSource[]) { + GLuint vertexShader = RTCCreateShader(GL_VERTEX_SHADER, RTC_CONSTANT_TYPE(RTCVertexShaderSource)); RTC_CHECK(vertexShader) << "failed to create vertex shader"; GLuint fragmentShader = RTCCreateShader(GL_FRAGMENT_SHADER, fragmentShaderSource); @@ -126,7 +125,7 @@ GLuint RTCCreateProgramFromFragmentSource(const char fragmentShaderSource[]) { return program; } -BOOL RTCCreateVertexBuffer(GLuint *vertexBuffer, GLuint *vertexArray) { +BOOL RTC_OBJC_TYPE(RTCCreateVertexBuffer)(GLuint *vertexBuffer, GLuint *vertexArray) { glGenBuffers(1, vertexBuffer); if (*vertexBuffer == 0) { glDeleteVertexArrays(1, vertexArray); @@ -138,7 +137,7 @@ BOOL RTCCreateVertexBuffer(GLuint *vertexBuffer, GLuint *vertexArray) { } // Set vertex data to the currently bound vertex buffer. -void RTCSetVertexData(RTCVideoRotation rotation) { +void RTC_OBJC_TYPE(RTCSetVertexData)(RTCVideoRotation rotation) { // When modelview and projection matrices are identity (default) the world is // contained in the square around origin with unit size 2. Drawing to these // coordinates is equivalent to drawing to the entire screen. The texture is @@ -156,16 +155,16 @@ void RTCSetVertexData(RTCVideoRotation rotation) { // Rotate the UV coordinates. int rotation_offset; switch (rotation) { - case RTCVideoRotation_0: + case RTC_OBJC_TYPE(RTCVideoRotation_0): rotation_offset = 0; break; - case RTCVideoRotation_90: + case RTC_OBJC_TYPE(RTCVideoRotation_90): rotation_offset = 1; break; - case RTCVideoRotation_180: + case RTC_OBJC_TYPE(RTCVideoRotation_180): rotation_offset = 2; break; - case RTCVideoRotation_270: + case RTC_OBJC_TYPE(RTCVideoRotation_270): rotation_offset = 3; break; } diff --git a/sdk/objc/components/renderer/opengl/RTCVideoViewShading.h b/sdk/objc/components/renderer/opengl/RTCVideoViewShading.h index 1499bec686..2251c4edff 100644 --- a/sdk/objc/components/renderer/opengl/RTCVideoViewShading.h +++ b/sdk/objc/components/renderer/opengl/RTCVideoViewShading.h @@ -23,14 +23,14 @@ RTC_OBJC_EXPORT (RTCVideoViewShading) /** Callback for I420 frames. Each plane is given as a texture. */ - - (void)applyShadingForFrameWithWidth : (int)width height - : (int)height rotation : (RTCVideoRotation)rotation yPlane - : (GLuint)yPlane uPlane : (GLuint)uPlane vPlane : (GLuint)vPlane; + - (void)applyShadingForFrameWithWidth : (int)width height : (int)height rotation + : (RTC_OBJC_TYPE(RTCVideoRotation))rotation yPlane : (GLuint)yPlane uPlane : (GLuint)uPlane vPlane + : (GLuint)vPlane; /** Callback for NV12 frames. Each plane is given as a texture. */ - (void)applyShadingForFrameWithWidth:(int)width height:(int)height - rotation:(RTCVideoRotation)rotation + rotation:(RTC_OBJC_TYPE(RTCVideoRotation))rotation yPlane:(GLuint)yPlane uvPlane:(GLuint)uvPlane; diff --git a/sdk/objc/components/video_codec/RTCCodecSpecificInfoH264.h b/sdk/objc/components/video_codec/RTCCodecSpecificInfoH264.h index 8f58667335..f1c1d03e6c 100644 --- a/sdk/objc/components/video_codec/RTCCodecSpecificInfoH264.h +++ b/sdk/objc/components/video_codec/RTCCodecSpecificInfoH264.h @@ -14,15 +14,14 @@ #import "sdk/objc/base/RTCMacros.h" /** Class for H264 specific config. */ -typedef NS_ENUM(NSUInteger, RTCH264PacketizationMode) { - RTCH264PacketizationModeNonInterleaved = - 0, // Mode 1 - STAP-A, FU-A is allowed - RTCH264PacketizationModeSingleNalUnit // Mode 0 - only single NALU allowed +typedef NS_ENUM(NSUInteger, RTC_OBJC_TYPE(RTCH264PacketizationMode)) { + RTC_OBJC_TYPE(RTCH264PacketizationModeNonInterleaved) = 0, // Mode 1 - STAP-A, FU-A is allowed + RTC_OBJC_TYPE(RTCH264PacketizationModeSingleNalUnit) // Mode 0 - only single NALU allowed }; RTC_OBJC_EXPORT @interface RTC_OBJC_TYPE (RTCCodecSpecificInfoH264) : NSObject -@property(nonatomic, assign) RTCH264PacketizationMode packetizationMode; +@property(nonatomic, assign) RTC_OBJC_TYPE(RTCH264PacketizationMode) packetizationMode; @end diff --git a/sdk/objc/components/video_codec/RTCDefaultVideoDecoderFactory.m b/sdk/objc/components/video_codec/RTCDefaultVideoDecoderFactory.m index f08953ba55..9c1943565a 100644 --- a/sdk/objc/components/video_codec/RTCDefaultVideoDecoderFactory.m +++ b/sdk/objc/components/video_codec/RTCDefaultVideoDecoderFactory.m @@ -25,27 +25,25 @@ @implementation RTC_OBJC_TYPE (RTCDefaultVideoDecoderFactory) - (NSArray *)supportedCodecs { NSDictionary *constrainedHighParams = @{ - @"profile-level-id" : kRTCMaxSupportedH264ProfileLevelConstrainedHigh, + @"profile-level-id" : RTC_CONSTANT_TYPE(RTCMaxSupportedH264ProfileLevelConstrainedHigh), @"level-asymmetry-allowed" : @"1", @"packetization-mode" : @"1", }; RTC_OBJC_TYPE(RTCVideoCodecInfo) *constrainedHighInfo = - [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] - initWithName:kRTCVideoCodecH264Name - parameters:constrainedHighParams]; + [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:RTC_CONSTANT_TYPE(RTCVideoCodecH264Name) + parameters:constrainedHighParams]; NSDictionary *constrainedBaselineParams = @{ - @"profile-level-id" : kRTCMaxSupportedH264ProfileLevelConstrainedBaseline, + @"profile-level-id" : RTC_CONSTANT_TYPE(RTCMaxSupportedH264ProfileLevelConstrainedBaseline), @"level-asymmetry-allowed" : @"1", @"packetization-mode" : @"1", }; RTC_OBJC_TYPE(RTCVideoCodecInfo) *constrainedBaselineInfo = - [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] - initWithName:kRTCVideoCodecH264Name - parameters:constrainedBaselineParams]; + [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:RTC_CONSTANT_TYPE(RTCVideoCodecH264Name) + parameters:constrainedBaselineParams]; - RTC_OBJC_TYPE(RTCVideoCodecInfo) *vp8Info = [[RTC_OBJC_TYPE(RTCVideoCodecInfo) - alloc] initWithName:kRTCVideoCodecVp8Name]; + RTC_OBJC_TYPE(RTCVideoCodecInfo) *vp8Info = + [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:RTC_CONSTANT_TYPE(RTCVideoCodecVp8Name)]; NSMutableArray *result = [@[ constrainedHighInfo, @@ -54,31 +52,29 @@ @implementation RTC_OBJC_TYPE (RTCDefaultVideoDecoderFactory) ] mutableCopy]; if ([RTC_OBJC_TYPE(RTCVideoDecoderVP9) isSupported]) { - [result addObject:[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] - initWithName:kRTCVideoCodecVp9Name]]; + [result + addObject:[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:RTC_CONSTANT_TYPE(RTCVideoCodecVp9Name)]]; } #if defined(RTC_DAV1D_IN_INTERNAL_DECODER_FACTORY) - [result addObject:[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] - initWithName:kRTCVideoCodecAv1Name]]; + [result addObject:[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:RTC_CONSTANT_TYPE(RTCVideoCodecAv1Name)]]; #endif return result; } -- (id)createDecoder: - (RTC_OBJC_TYPE(RTCVideoCodecInfo) *)info { - if ([info.name isEqualToString:kRTCVideoCodecH264Name]) { +- (id)createDecoder:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)info { + if ([info.name isEqualToString:RTC_CONSTANT_TYPE(RTCVideoCodecH264Name)]) { return [[RTC_OBJC_TYPE(RTCVideoDecoderH264) alloc] init]; - } else if ([info.name isEqualToString:kRTCVideoCodecVp8Name]) { + } else if ([info.name isEqualToString:RTC_CONSTANT_TYPE(RTCVideoCodecVp8Name)]) { return [RTC_OBJC_TYPE(RTCVideoDecoderVP8) vp8Decoder]; - } else if ([info.name isEqualToString:kRTCVideoCodecVp9Name] && + } else if ([info.name isEqualToString:RTC_CONSTANT_TYPE(RTCVideoCodecVp9Name)] && [RTC_OBJC_TYPE(RTCVideoDecoderVP9) isSupported]) { return [RTC_OBJC_TYPE(RTCVideoDecoderVP9) vp9Decoder]; } #if defined(RTC_DAV1D_IN_INTERNAL_DECODER_FACTORY) - if ([info.name isEqualToString:kRTCVideoCodecAv1Name]) { + if ([info.name isEqualToString:RTC_CONSTANT_TYPE(RTCVideoCodecAv1Name)]) { return [RTC_OBJC_TYPE(RTCVideoDecoderAV1) av1Decoder]; } #endif diff --git a/sdk/objc/components/video_codec/RTCDefaultVideoEncoderFactory.m b/sdk/objc/components/video_codec/RTCDefaultVideoEncoderFactory.m index 53edf81d7a..df593f2311 100644 --- a/sdk/objc/components/video_codec/RTCDefaultVideoEncoderFactory.m +++ b/sdk/objc/components/video_codec/RTCDefaultVideoEncoderFactory.m @@ -27,27 +27,25 @@ @implementation RTC_OBJC_TYPE (RTCDefaultVideoEncoderFactory) + (NSArray *)supportedCodecs { NSDictionary *constrainedHighParams = @{ - @"profile-level-id" : kRTCMaxSupportedH264ProfileLevelConstrainedHigh, + @"profile-level-id" : RTC_CONSTANT_TYPE(RTCMaxSupportedH264ProfileLevelConstrainedHigh), @"level-asymmetry-allowed" : @"1", @"packetization-mode" : @"1", }; RTC_OBJC_TYPE(RTCVideoCodecInfo) *constrainedHighInfo = - [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] - initWithName:kRTCVideoCodecH264Name - parameters:constrainedHighParams]; + [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:RTC_CONSTANT_TYPE(RTCVideoCodecH264Name) + parameters:constrainedHighParams]; NSDictionary *constrainedBaselineParams = @{ - @"profile-level-id" : kRTCMaxSupportedH264ProfileLevelConstrainedBaseline, + @"profile-level-id" : RTC_CONSTANT_TYPE(RTCMaxSupportedH264ProfileLevelConstrainedBaseline), @"level-asymmetry-allowed" : @"1", @"packetization-mode" : @"1", }; RTC_OBJC_TYPE(RTCVideoCodecInfo) *constrainedBaselineInfo = - [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] - initWithName:kRTCVideoCodecH264Name - parameters:constrainedBaselineParams]; + [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:RTC_CONSTANT_TYPE(RTCVideoCodecH264Name) + parameters:constrainedBaselineParams]; - RTC_OBJC_TYPE(RTCVideoCodecInfo) *vp8Info = [[RTC_OBJC_TYPE(RTCVideoCodecInfo) - alloc] initWithName:kRTCVideoCodecVp8Name]; + RTC_OBJC_TYPE(RTCVideoCodecInfo) *vp8Info = + [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:RTC_CONSTANT_TYPE(RTCVideoCodecVp8Name)]; NSMutableArray *result = [@[ constrainedHighInfo, @@ -56,31 +54,31 @@ @implementation RTC_OBJC_TYPE (RTCDefaultVideoEncoderFactory) ] mutableCopy]; if ([RTC_OBJC_TYPE(RTCVideoEncoderVP9) isSupported]) { - [result addObject:[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] - initWithName:kRTCVideoCodecVp9Name]]; + [result + addObject:[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:RTC_CONSTANT_TYPE(RTCVideoCodecVp9Name) parameters:nil scalabilityModes:[RTC_OBJC_TYPE(RTCVideoEncoderVP9) scalabilityModes]]]; } #if defined(RTC_USE_LIBAOM_AV1_ENCODER) - [result addObject:[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] - initWithName:kRTCVideoCodecAv1Name]]; + RTC_OBJC_TYPE(RTCVideoCodecInfo) *av1Info = + [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:RTC_CONSTANT_TYPE(RTCVideoCodecAv1Name) parameters:nil scalabilityModes:[RTC_OBJC_TYPE(RTCVideoEncoderAV1) scalabilityModes]]; + [result addObject:av1Info]; #endif return result; } -- (id)createEncoder: - (RTC_OBJC_TYPE(RTCVideoCodecInfo) *)info { - if ([info.name isEqualToString:kRTCVideoCodecH264Name]) { +- (id)createEncoder:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)info { + if ([info.name isEqualToString:RTC_CONSTANT_TYPE(RTCVideoCodecH264Name)]) { return [[RTC_OBJC_TYPE(RTCVideoEncoderH264) alloc] initWithCodecInfo:info]; - } else if ([info.name isEqualToString:kRTCVideoCodecVp8Name]) { + } else if ([info.name isEqualToString:RTC_CONSTANT_TYPE(RTCVideoCodecVp8Name)]) { return [RTC_OBJC_TYPE(RTCVideoEncoderVP8) vp8Encoder]; - } else if ([info.name isEqualToString:kRTCVideoCodecVp9Name] && + } else if ([info.name isEqualToString:RTC_CONSTANT_TYPE(RTCVideoCodecVp9Name)] && [RTC_OBJC_TYPE(RTCVideoEncoderVP9) isSupported]) { return [RTC_OBJC_TYPE(RTCVideoEncoderVP9) vp9Encoder]; } #if defined(RTC_USE_LIBAOM_AV1_ENCODER) - if ([info.name isEqualToString:kRTCVideoCodecAv1Name]) { + if ([info.name isEqualToString:RTC_CONSTANT_TYPE(RTCVideoCodecAv1Name)]) { return [RTC_OBJC_TYPE(RTCVideoEncoderAV1) av1Encoder]; } #endif diff --git a/sdk/objc/components/video_codec/RTCH264ProfileLevelId.h b/sdk/objc/components/video_codec/RTCH264ProfileLevelId.h index 3817b246c7..5b026d3587 100644 --- a/sdk/objc/components/video_codec/RTCH264ProfileLevelId.h +++ b/sdk/objc/components/video_codec/RTCH264ProfileLevelId.h @@ -12,50 +12,49 @@ #import "sdk/objc/base/RTCMacros.h" -RTC_EXTERN NSString *const kRTCVideoCodecH264Name; -RTC_EXTERN NSString *const kRTCLevel31ConstrainedHigh; -RTC_EXTERN NSString *const kRTCLevel31ConstrainedBaseline; -RTC_EXTERN NSString *const kRTCMaxSupportedH264ProfileLevelConstrainedHigh; -RTC_EXTERN NSString *const kRTCMaxSupportedH264ProfileLevelConstrainedBaseline; +RTC_OBJC_EXPORT extern NSString *const RTC_CONSTANT_TYPE(RTCVideoCodecH264Name); +RTC_OBJC_EXPORT extern NSString *const RTC_CONSTANT_TYPE(RTCLevel31ConstrainedHigh); +RTC_OBJC_EXPORT extern NSString *const RTC_CONSTANT_TYPE(RTCLevel31ConstrainedBaseline); +RTC_OBJC_EXPORT extern NSString *const RTC_CONSTANT_TYPE(RTCMaxSupportedH264ProfileLevelConstrainedHigh); +RTC_OBJC_EXPORT extern NSString *const RTC_CONSTANT_TYPE(RTCMaxSupportedH264ProfileLevelConstrainedBaseline); /** H264 Profiles and levels. */ -typedef NS_ENUM(NSUInteger, RTCH264Profile) { - RTCH264ProfileConstrainedBaseline, - RTCH264ProfileBaseline, - RTCH264ProfileMain, - RTCH264ProfileConstrainedHigh, - RTCH264ProfileHigh, +typedef NS_ENUM(NSUInteger, RTC_OBJC_TYPE(RTCH264Profile)) { + RTC_OBJC_TYPE(RTCH264ProfileConstrainedBaseline), + RTC_OBJC_TYPE(RTCH264ProfileBaseline), + RTC_OBJC_TYPE(RTCH264ProfileMain), + RTC_OBJC_TYPE(RTCH264ProfileConstrainedHigh), + RTC_OBJC_TYPE(RTCH264ProfileHigh), }; -typedef NS_ENUM(NSUInteger, RTCH264Level) { - RTCH264Level1_b = 0, - RTCH264Level1 = 10, - RTCH264Level1_1 = 11, - RTCH264Level1_2 = 12, - RTCH264Level1_3 = 13, - RTCH264Level2 = 20, - RTCH264Level2_1 = 21, - RTCH264Level2_2 = 22, - RTCH264Level3 = 30, - RTCH264Level3_1 = 31, - RTCH264Level3_2 = 32, - RTCH264Level4 = 40, - RTCH264Level4_1 = 41, - RTCH264Level4_2 = 42, - RTCH264Level5 = 50, - RTCH264Level5_1 = 51, - RTCH264Level5_2 = 52 +typedef NS_ENUM(NSUInteger, RTC_OBJC_TYPE(RTCH264Level)) { + RTC_OBJC_TYPE(RTCH264Level1_b) = 0, + RTC_OBJC_TYPE(RTCH264Level1) = 10, + RTC_OBJC_TYPE(RTCH264Level1_1) = 11, + RTC_OBJC_TYPE(RTCH264Level1_2) = 12, + RTC_OBJC_TYPE(RTCH264Level1_3) = 13, + RTC_OBJC_TYPE(RTCH264Level2) = 20, + RTC_OBJC_TYPE(RTCH264Level2_1) = 21, + RTC_OBJC_TYPE(RTCH264Level2_2) = 22, + RTC_OBJC_TYPE(RTCH264Level3) = 30, + RTC_OBJC_TYPE(RTCH264Level3_1) = 31, + RTC_OBJC_TYPE(RTCH264Level3_2) = 32, + RTC_OBJC_TYPE(RTCH264Level4) = 40, + RTC_OBJC_TYPE(RTCH264Level4_1) = 41, + RTC_OBJC_TYPE(RTCH264Level4_2) = 42, + RTC_OBJC_TYPE(RTCH264Level5) = 50, + RTC_OBJC_TYPE(RTCH264Level5_1) = 51, + RTC_OBJC_TYPE(RTCH264Level5_2) = 52 }; RTC_OBJC_EXPORT @interface RTC_OBJC_TYPE (RTCH264ProfileLevelId) : NSObject -@property(nonatomic, readonly) RTCH264Profile profile; -@property(nonatomic, readonly) RTCH264Level level; +@property(nonatomic, readonly) RTC_OBJC_TYPE(RTCH264Profile) profile; +@property(nonatomic, readonly) RTC_OBJC_TYPE(RTCH264Level) level; @property(nonatomic, readonly) NSString *hexString; - (instancetype)initWithHexString:(NSString *)hexString; -- (instancetype)initWithProfile:(RTCH264Profile)profile - level:(RTCH264Level)level; +- (instancetype)initWithProfile:(RTC_OBJC_TYPE(RTCH264Profile))profile level:(RTC_OBJC_TYPE(RTCH264Level))level; @end diff --git a/sdk/objc/components/video_codec/RTCH264ProfileLevelId.mm b/sdk/objc/components/video_codec/RTCH264ProfileLevelId.mm index 2dc66d1514..45f7126cd5 100644 --- a/sdk/objc/components/video_codec/RTCH264ProfileLevelId.mm +++ b/sdk/objc/components/video_codec/RTCH264ProfileLevelId.mm @@ -26,12 +26,12 @@ } // namespace -NSString *const kRTCVideoCodecH264Name = @(webrtc::kH264CodecName); -NSString *const kRTCLevel31ConstrainedHigh = @"640c1f"; -NSString *const kRTCLevel31ConstrainedBaseline = @"42e01f"; -NSString *const kRTCMaxSupportedH264ProfileLevelConstrainedHigh = +NSString *const RTC_CONSTANT_TYPE(RTCVideoCodecH264Name) = @(webrtc::kH264CodecName); +NSString *const RTC_CONSTANT_TYPE(RTCLevel31ConstrainedHigh) = @"640c1f"; +NSString *const RTC_CONSTANT_TYPE(RTCLevel31ConstrainedBaseline) = @"42e01f"; +NSString *const RTC_CONSTANT_TYPE(RTCMaxSupportedH264ProfileLevelConstrainedHigh) = MaxSupportedProfileLevelConstrainedHigh(); -NSString *const kRTCMaxSupportedH264ProfileLevelConstrainedBaseline = +NSString *const RTC_CONSTANT_TYPE(RTCMaxSupportedH264ProfileLevelConstrainedBaseline) = MaxSupportedProfileLevelConstrainedBaseline(); namespace { @@ -60,7 +60,7 @@ return profile; } #endif - return kRTCLevel31ConstrainedBaseline; + return RTC_CONSTANT_TYPE(RTCLevel31ConstrainedBaseline); } NSString *MaxSupportedProfileLevelConstrainedHigh() { @@ -71,7 +71,7 @@ return profile; } #endif - return kRTCLevel31ConstrainedHigh; + return RTC_CONSTANT_TYPE(RTCLevel31ConstrainedHigh); } } // namespace @@ -79,8 +79,8 @@ @interface RTC_OBJC_TYPE (RTCH264ProfileLevelId) () - @property(nonatomic, assign) RTCH264Profile profile; -@property(nonatomic, assign) RTCH264Level level; + @property(nonatomic, assign) RTC_OBJC_TYPE(RTCH264Profile) profile; +@property(nonatomic, assign) RTC_OBJC_TYPE(RTCH264Level) level; @property(nonatomic, strong) NSString *hexString; @end @@ -100,15 +100,14 @@ - (instancetype)initWithHexString:(NSString *)hexString { webrtc::ParseH264ProfileLevelId( [hexString cStringUsingEncoding:NSUTF8StringEncoding]); if (profile_level_id.has_value()) { - self.profile = static_cast(profile_level_id->profile); - self.level = static_cast(profile_level_id->level); + self.profile = static_cast(profile_level_id->profile); + self.level = static_cast(profile_level_id->level); } } return self; } -- (instancetype)initWithProfile:(RTCH264Profile)profile - level:(RTCH264Level)level { +- (instancetype)initWithProfile:(RTC_OBJC_TYPE(RTCH264Profile))profile level:(RTC_OBJC_TYPE(RTCH264Level))level { self = [super init]; if (self) { self.profile = profile; diff --git a/sdk/objc/components/video_codec/RTCVideoDecoderFactoryH264.m b/sdk/objc/components/video_codec/RTCVideoDecoderFactoryH264.m index 97412f8481..b6d127e0c9 100644 --- a/sdk/objc/components/video_codec/RTCVideoDecoderFactoryH264.m +++ b/sdk/objc/components/video_codec/RTCVideoDecoderFactoryH264.m @@ -18,10 +18,10 @@ @implementation RTC_OBJC_TYPE (RTCVideoDecoderFactoryH264) - (NSArray *)supportedCodecs { NSMutableArray *codecs = [NSMutableArray array]; - NSString *codecName = kRTCVideoCodecH264Name; + NSString *codecName = RTC_CONSTANT_TYPE(RTCVideoCodecH264Name); NSDictionary *constrainedHighParams = @{ - @"profile-level-id" : kRTCMaxSupportedH264ProfileLevelConstrainedHigh, + @"profile-level-id" : RTC_CONSTANT_TYPE(RTCMaxSupportedH264ProfileLevelConstrainedHigh), @"level-asymmetry-allowed" : @"1", @"packetization-mode" : @"1", }; @@ -32,7 +32,7 @@ @implementation RTC_OBJC_TYPE (RTCVideoDecoderFactoryH264) [codecs addObject:constrainedHighInfo]; NSDictionary *constrainedBaselineParams = @{ - @"profile-level-id" : kRTCMaxSupportedH264ProfileLevelConstrainedBaseline, + @"profile-level-id" : RTC_CONSTANT_TYPE(RTCMaxSupportedH264ProfileLevelConstrainedBaseline), @"level-asymmetry-allowed" : @"1", @"packetization-mode" : @"1", }; diff --git a/sdk/objc/components/video_codec/RTCVideoDecoderH264.mm b/sdk/objc/components/video_codec/RTCVideoDecoderH264.mm index 89802d19d4..21e2b805b0 100644 --- a/sdk/objc/components/video_codec/RTCVideoDecoderH264.mm +++ b/sdk/objc/components/video_codec/RTCVideoDecoderH264.mm @@ -66,7 +66,7 @@ void decompressionOutputCallback(void *decoderRef, RTC_OBJC_TYPE( RTCVideoFrame) *decodedFrame = [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:frameBuffer - rotation:RTCVideoRotation_0 + rotation:RTC_OBJC_TYPE(RTCVideoRotation_0) timeStampNs:CMTimeGetSeconds(timestamp) * webrtc::kNumNanosecsPerSec]; decodedFrame.timeStamp = decodeParams->timestamp; decodeParams->callback(decodedFrame); @@ -220,7 +220,7 @@ - (int)resetDecompressionSession { NSDictionary *attributes = @{ #if defined(WEBRTC_IOS) && (TARGET_OS_MACCATALYST || TARGET_OS_SIMULATOR) (NSString *)kCVPixelBufferMetalCompatibilityKey : @(YES), -#elif defined(WEBRTC_IOS) +#elif defined(WEBRTC_IOS) && !defined(TARGET_OS_VISION) (NSString *)kCVPixelBufferOpenGLESCompatibilityKey : @(YES), #elif defined(WEBRTC_MAC) && !defined(WEBRTC_ARCH_ARM64) (NSString *)kCVPixelBufferOpenGLCompatibilityKey : @(YES), diff --git a/sdk/objc/components/video_codec/RTCVideoEncoderFactoryH264.m b/sdk/objc/components/video_codec/RTCVideoEncoderFactoryH264.m index 11f98f0113..d89179c438 100644 --- a/sdk/objc/components/video_codec/RTCVideoEncoderFactoryH264.m +++ b/sdk/objc/components/video_codec/RTCVideoEncoderFactoryH264.m @@ -18,10 +18,10 @@ @implementation RTC_OBJC_TYPE (RTCVideoEncoderFactoryH264) - (NSArray *)supportedCodecs { NSMutableArray *codecs = [NSMutableArray array]; - NSString *codecName = kRTCVideoCodecH264Name; + NSString *codecName = RTC_CONSTANT_TYPE(RTCVideoCodecH264Name); NSDictionary *constrainedHighParams = @{ - @"profile-level-id" : kRTCMaxSupportedH264ProfileLevelConstrainedHigh, + @"profile-level-id" : RTC_CONSTANT_TYPE(RTCMaxSupportedH264ProfileLevelConstrainedHigh), @"level-asymmetry-allowed" : @"1", @"packetization-mode" : @"1", }; @@ -32,7 +32,7 @@ @implementation RTC_OBJC_TYPE (RTCVideoEncoderFactoryH264) [codecs addObject:constrainedHighInfo]; NSDictionary *constrainedBaselineParams = @{ - @"profile-level-id" : kRTCMaxSupportedH264ProfileLevelConstrainedBaseline, + @"profile-level-id" : RTC_CONSTANT_TYPE(RTCMaxSupportedH264ProfileLevelConstrainedBaseline), @"level-asymmetry-allowed" : @"1", @"packetization-mode" : @"1", }; diff --git a/sdk/objc/components/video_codec/RTCVideoEncoderFactorySimulcast.h b/sdk/objc/components/video_codec/RTCVideoEncoderFactorySimulcast.h new file mode 100644 index 0000000000..4070af22e4 --- /dev/null +++ b/sdk/objc/components/video_codec/RTCVideoEncoderFactorySimulcast.h @@ -0,0 +1,16 @@ +#import + +#import "RTCMacros.h" +#import "RTCVideoEncoderFactory.h" + +NS_ASSUME_NONNULL_BEGIN + +RTC_OBJC_EXPORT +@interface RTC_OBJC_TYPE (RTCVideoEncoderFactorySimulcast) : NSObject + +- (instancetype)initWithPrimary:(id)primary + fallback:(id)fallback; + +@end + +NS_ASSUME_NONNULL_END diff --git a/sdk/objc/components/video_codec/RTCVideoEncoderFactorySimulcast.mm b/sdk/objc/components/video_codec/RTCVideoEncoderFactorySimulcast.mm new file mode 100644 index 0000000000..e9c9c5bde3 --- /dev/null +++ b/sdk/objc/components/video_codec/RTCVideoEncoderFactorySimulcast.mm @@ -0,0 +1,64 @@ +#import + +#import "RTCMacros.h" +#import "RTCVideoCodecInfo.h" +#import "RTCVideoEncoderFactorySimulcast.h" +#import "api/video_codec/RTCVideoEncoderSimulcast.h" +#import "api/peerconnection/RTCVideoCodecInfo+Private.h" + +#include "absl/container/inlined_vector.h" +#include "api/video_codecs/video_codec.h" +#include "api/video_codecs/sdp_video_format.h" +#include "api/video_codecs/video_codec.h" +#include "modules/video_coding/codecs/av1/av1_svc_config.h" +#include "modules/video_coding/codecs/vp9/include/vp9.h" +#include "media/base/media_constants.h" + +@interface RTC_OBJC_TYPE (RTCVideoEncoderFactorySimulcast) () + +@property id primary; +@property id fallback; + +@end + + +@implementation RTC_OBJC_TYPE (RTCVideoEncoderFactorySimulcast) + +@synthesize primary = _primary; +@synthesize fallback = _fallback; + +- (instancetype)initWithPrimary:(id)primary + fallback:(id)fallback { + self = [super init]; + if (self) { + _primary = primary; + _fallback = fallback; + } + return self; +} + +- (nullable id)createEncoder: (RTC_OBJC_TYPE(RTCVideoCodecInfo) *)info { + return [RTC_OBJC_TYPE(RTCVideoEncoderSimulcast) simulcastEncoderWithPrimary: _primary fallback: _fallback videoCodecInfo: info]; +} + +- (NSArray *)supportedCodecs { + NSArray *supportedCodecs = [[_primary supportedCodecs] arrayByAddingObjectsFromArray: [_fallback supportedCodecs]]; + + NSMutableArray *addingCodecs = [[NSMutableArray alloc] init]; + + for (const webrtc::SdpVideoFormat& format : webrtc::SupportedVP9Codecs(true)) { + RTC_OBJC_TYPE(RTCVideoCodecInfo) *codec = [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithNativeSdpVideoFormat: format]; + [addingCodecs addObject: codec]; + } + + auto av1Format = webrtc::SdpVideoFormat( + cricket::kAv1CodecName, webrtc::CodecParameterMap(), + webrtc::LibaomAv1EncoderSupportedScalabilityModes()); + RTC_OBJC_TYPE(RTCVideoCodecInfo) *av1Codec = [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithNativeSdpVideoFormat: av1Format]; + [addingCodecs addObject: av1Codec]; + + return [supportedCodecs arrayByAddingObjectsFromArray: addingCodecs]; +} + + +@end diff --git a/sdk/objc/components/video_codec/RTCVideoEncoderH264.mm b/sdk/objc/components/video_codec/RTCVideoEncoderH264.mm index d1a10dc9b6..f7771532ba 100644 --- a/sdk/objc/components/video_codec/RTCVideoEncoderH264.mm +++ b/sdk/objc/components/video_codec/RTCVideoEncoderH264.mm @@ -44,10 +44,9 @@ @interface RTC_OBJC_TYPE (RTCVideoEncoderH264) - (void)frameWasEncoded : (OSStatus)status flags : (VTEncodeInfoFlags)infoFlags sampleBuffer : (CMSampleBufferRef)sampleBuffer codecSpecificInfo - : (id)codecSpecificInfo width - : (int32_t)width height : (int32_t)height renderTimeMs - : (int64_t)renderTimeMs timestamp : (uint32_t)timestamp rotation - : (RTCVideoRotation)rotation; + : (id)codecSpecificInfo width : (int32_t)width height + : (int32_t)height renderTimeMs : (int64_t)renderTimeMs timestamp : (uint32_t)timestamp rotation + : (RTC_OBJC_TYPE(RTCVideoRotation))rotation; @end @@ -56,14 +55,42 @@ - (void)frameWasEncoded : (OSStatus)status flags // The ratio between kVTCompressionPropertyKey_DataRateLimits and // kVTCompressionPropertyKey_AverageBitRate. The data rate limit is set higher // than the average bit rate to avoid undershooting the target. -const float kLimitToAverageBitRateFactor = 1.5f; +const float kLimitToAverageBitRateFactor = 10.0f; // These thresholds deviate from the default h264 QP thresholds, as they // have been found to work better on devices that support VideoToolbox const int kLowH264QpThreshold = 28; const int kHighH264QpThreshold = 39; +const int kBitsPerByte = 8; const OSType kNV12PixelFormat = kCVPixelFormatType_420YpCbCr8BiPlanarFullRange; +typedef NS_ENUM(NSInteger, RTC_OBJC_TYPE(RTCVideoEncodeMode)) { + RTC_OBJC_TYPE(RTCVideoEncodeModeVariable) = 0, + RTC_OBJC_TYPE(RTCVideoEncodeModeConstant) = 1, +}; + +NSArray *CreateRateLimitArray(uint32_t computedBitrateBps, RTC_OBJC_TYPE(RTCVideoEncodeMode) mode) { + switch (mode) { + case RTC_OBJC_TYPE(RTCVideoEncodeModeVariable): { + // 5 seconds should be an okay interval for VBR to enforce the long-term + // limit. + float avgInterval = 5.0; + uint32_t avgBytesPerSecond = computedBitrateBps / kBitsPerByte * avgInterval; + // And the peak bitrate is measured per-second in a way similar to CBR. + float peakInterval = 1.0; + uint32_t peakBytesPerSecond = + computedBitrateBps * kLimitToAverageBitRateFactor / kBitsPerByte; + return @[ @(peakBytesPerSecond), @(peakInterval), @(avgBytesPerSecond), @(avgInterval) ]; + } + case RTC_OBJC_TYPE(RTCVideoEncodeModeConstant): { + // CBR should be enforces with granularity of a second. + float targetInterval = 1.0; + int32_t targetBitrate = computedBitrateBps / kBitsPerByte; + return @[ @(targetBitrate), @(targetInterval) ]; + } + } +} + // Struct that we pass to the encoder per frame to encode. We receive it again // in the encoder callback. struct RTCFrameEncodeParams { @@ -73,13 +100,8 @@ - (void)frameWasEncoded : (OSStatus)status flags int32_t h, int64_t rtms, uint32_t ts, - RTCVideoRotation r) - : encoder(e), - width(w), - height(h), - render_time_ms(rtms), - timestamp(ts), - rotation(r) { + RTC_OBJC_TYPE(RTCVideoRotation) r) + : encoder(e), width(w), height(h), render_time_ms(rtms), timestamp(ts), rotation(r) { if (csi) { codecSpecificInfo = csi; } else { @@ -94,7 +116,7 @@ - (void)frameWasEncoded : (OSStatus)status flags int32_t height; int64_t render_time_ms; uint32_t timestamp; - RTCVideoRotation rotation; + RTC_OBJC_TYPE(RTCVideoRotation) rotation; }; // We receive I420Frames as input, but we need to feed CVPixelBuffers into the @@ -196,10 +218,13 @@ void compressionOutputCallback(void *encoder, // no specific VideoToolbox profile for the specified level, AutoLevel will be // returned. The user must initialize the encoder with a resolution and // framerate conforming to the selected H264 level regardless. -CFStringRef ExtractProfile(const webrtc::H264ProfileLevelId &profile_level_id) { +CFStringRef ExtractProfile(const webrtc::H264ProfileLevelId &profile_level_id, bool screenSharing) { switch (profile_level_id.profile) { case webrtc::H264Profile::kProfileConstrainedBaseline: case webrtc::H264Profile::kProfileBaseline: + if (screenSharing) { + return kVTProfileLevel_H264_Baseline_AutoLevel; + } switch (profile_level_id.level) { case webrtc::H264Level::kLevel3: return kVTProfileLevel_H264_Baseline_3_0; @@ -337,21 +362,28 @@ NSUInteger GetMaxSampleRate( @implementation RTC_OBJC_TYPE (RTCVideoEncoderH264) { RTC_OBJC_TYPE(RTCVideoCodecInfo) * _codecInfo; - std::unique_ptr _bitrateAdjuster; uint32_t _targetBitrateBps; + uint32_t _targetFrameRate; uint32_t _encoderBitrateBps; uint32_t _encoderFrameRate; uint32_t _maxAllowedFrameRate; - RTCH264PacketizationMode _packetizationMode; + RTC_OBJC_TYPE(RTCH264PacketizationMode) _packetizationMode; std::optional _profile_level_id; RTCVideoEncoderCallback _callback; int32_t _width; int32_t _height; VTCompressionSessionRef _compressionSession; - RTCVideoCodecMode _mode; + CVPixelBufferPoolRef _pixelBufferPool; + RTC_OBJC_TYPE(RTCVideoCodecMode) _codecMode; + unsigned int _maxQP; + unsigned int _minBitrate; + unsigned int _maxBitrate; + RTC_OBJC_TYPE(RTCVideoEncodeMode) _encodeMode; webrtc::H264BitstreamParser _h264BitstreamParser; std::vector _frameScaleBuffer; + + CMTime _previousPresentationTimeStamp; } // .5 is set as a mininum to prevent overcompensating for large temporary @@ -366,14 +398,15 @@ - (instancetype)initWithCodecInfo: self = [super init]; if (self) { _codecInfo = codecInfo; - _bitrateAdjuster.reset(new webrtc::BitrateAdjuster(.5, .95)); - _packetizationMode = RTCH264PacketizationModeNonInterleaved; - _profile_level_id = webrtc::ParseSdpForH264ProfileLevelId( - [codecInfo nativeSdpVideoFormat].parameters); + _packetizationMode = RTC_OBJC_TYPE(RTCH264PacketizationModeNonInterleaved); + _profile_level_id = + webrtc::ParseSdpForH264ProfileLevelId([codecInfo nativeSdpVideoFormat].parameters); + _previousPresentationTimeStamp = kCMTimeZero; RTC_DCHECK(_profile_level_id); RTC_LOG(LS_INFO) << "Using profile " - << CFStringToString(ExtractProfile(*_profile_level_id)); - RTC_CHECK([codecInfo.name isEqualToString:kRTCVideoCodecH264Name]); + << CFStringToString(ExtractProfile( + *_profile_level_id, _codecMode == RTC_OBJC_TYPE(RTCVideoCodecModeScreensharing))); + RTC_CHECK([codecInfo.name isEqualToString:RTC_CONSTANT_TYPE(RTCVideoCodecH264Name)]); } return self; } @@ -386,11 +419,16 @@ - (NSInteger)startEncodeWithSettings: (RTC_OBJC_TYPE(RTCVideoEncoderSettings) *)settings numberOfCores:(int)numberOfCores { RTC_DCHECK(settings); - RTC_DCHECK([settings.name isEqualToString:kRTCVideoCodecH264Name]); + RTC_DCHECK([settings.name isEqualToString:RTC_CONSTANT_TYPE(RTCVideoCodecH264Name)]); _width = settings.width; _height = settings.height; - _mode = settings.mode; + _codecMode = settings.mode; + _maxQP = settings.qpMax; + + _encodeMode = RTC_OBJC_TYPE(RTCVideoEncodeModeVariable); // Always variable mode for now + _minBitrate = settings.minBitrate * 1000; // minBitrate is in kbps. + _maxBitrate = settings.maxBitrate * 1000; // maxBitrate is in kbps. uint32_t aligned_width = (((_width + 15) >> 4) << 4); uint32_t aligned_height = (((_height + 15) >> 4) << 4); @@ -398,15 +436,19 @@ - (NSInteger)startEncodeWithSettings: GetMaxSampleRate(*_profile_level_id) / (aligned_width * aligned_height)); // We can only set average bitrate on the HW encoder. - _targetBitrateBps = settings.startBitrate * 1000; // startBitrate is in kbps. - _bitrateAdjuster->SetTargetBitrateBps(_targetBitrateBps); - _encoderFrameRate = MIN(settings.maxFramerate, _maxAllowedFrameRate); - if (settings.maxFramerate > _maxAllowedFrameRate && - _maxAllowedFrameRate > 0) { - RTC_LOG(LS_WARNING) << "Initial encoder frame rate setting " - << settings.maxFramerate << " is larger than the " - << "maximal allowed frame rate " << _maxAllowedFrameRate - << "."; + if (_encodeMode == RTC_OBJC_TYPE(RTCVideoEncodeModeConstant)) { + _targetBitrateBps = _maxBitrate; + } else { + _targetBitrateBps = settings.startBitrate * 1000; // startBitrate is in kbps. + } + + _targetFrameRate = MIN(settings.maxFramerate, _maxAllowedFrameRate); + _encoderBitrateBps = 0; + _encoderFrameRate = 0; + if (settings.maxFramerate > _maxAllowedFrameRate && _maxAllowedFrameRate > 0) { + RTC_LOG(LS_WARNING) << "Initial encoder frame rate setting " << settings.maxFramerate + << " is larger than the " + << "maximal allowed frame rate " << _maxAllowedFrameRate << "."; } // TODO(tkchin): Try setting payload size via @@ -422,8 +464,15 @@ - (NSInteger)encode:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame if (!_callback || !_compressionSession) { return WEBRTC_VIDEO_CODEC_UNINITIALIZED; } - BOOL isKeyframeRequired = NO; + CMTime presentationTimeStamp = CMTimeMake(frame.timeStampNs / rtc::kNumNanosecsPerMillisec, 1000); + if (CMTimeCompare(presentationTimeStamp, _previousPresentationTimeStamp) == 0) { + // Same PTS + return WEBRTC_VIDEO_CODEC_NO_OUTPUT; + } + _previousPresentationTimeStamp = presentationTimeStamp; + + BOOL isKeyframeRequired = NO; // Get a pixel buffer from the pool and copy frame data over. if ([self resetCompressionSessionIfNeededWithFrame:frame]) { isKeyframeRequired = YES; @@ -450,9 +499,8 @@ - (NSInteger)encode:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame int dstWidth = CVPixelBufferGetWidth(pixelBuffer); int dstHeight = CVPixelBufferGetHeight(pixelBuffer); if ([rtcPixelBuffer requiresScalingToWidth:dstWidth height:dstHeight]) { - int size = - [rtcPixelBuffer bufferSizeForCroppingAndScalingToWidth:dstWidth - height:dstHeight]; + int size = [rtcPixelBuffer bufferSizeForCroppingAndScalingToWidth:dstWidth + height:dstHeight]; _frameScaleBuffer.resize(size); } else { _frameScaleBuffer.clear(); @@ -486,15 +534,13 @@ - (NSInteger)encode:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame // Check if we need a keyframe. if (!isKeyframeRequired && frameTypes) { for (NSNumber *frameType in frameTypes) { - if ((RTCFrameType)frameType.intValue == RTCFrameTypeVideoFrameKey) { + if ((RTC_OBJC_TYPE(RTCFrameType))frameType.intValue == RTC_OBJC_TYPE(RTCFrameTypeVideoFrameKey)) { isKeyframeRequired = YES; break; } } } - CMTime presentationTimeStamp = - CMTimeMake(frame.timeStampNs / webrtc::kNumNanosecsPerMillisec, 1000); CFDictionaryRef frameProperties = nullptr; if (isKeyframeRequired) { CFTypeRef keys[] = {kVTEncodeFrameOptionKey_ForceKeyFrame}; @@ -513,9 +559,8 @@ - (NSInteger)encode:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame frame.rotation)); encodeParams->codecSpecificInfo.packetizationMode = _packetizationMode; - // Update the bitrate if needed. - [self setBitrateBps:_bitrateAdjuster->GetAdjustedBitrateBps() - frameRate:_encoderFrameRate]; + // Update encoder bitrate or frameRate if needed. + [self updateEncoderBitrateAndFrameRate]; OSStatus status = VTCompressionSessionEncodeFrame(_compressionSession, pixelBuffer, @@ -558,17 +603,21 @@ - (void)setCallback:(RTCVideoEncoderCallback)callback { } - (int)setBitrate:(uint32_t)bitrateKbit framerate:(uint32_t)framerate { - _targetBitrateBps = 1000 * bitrateKbit; - _bitrateAdjuster->SetTargetBitrateBps(_targetBitrateBps); + // set target bitrate bps + _targetBitrateBps = bitrateKbit * 1000; + + RTC_LOG(LS_INFO) << "setBitrateKBit: " << bitrateKbit << " targetBps: " << _targetBitrateBps + << " frameRate: " << framerate; + if (framerate > _maxAllowedFrameRate && _maxAllowedFrameRate > 0) { RTC_LOG(LS_WARNING) << "Encoder frame rate setting " << framerate << " is larger than the " << "maximal allowed frame rate " << _maxAllowedFrameRate << "."; } - framerate = MIN(framerate, _maxAllowedFrameRate); - [self setBitrateBps:_bitrateAdjuster->GetAdjustedBitrateBps() - frameRate:framerate]; + + _targetFrameRate = MIN(framerate, _maxAllowedFrameRate); + return WEBRTC_VIDEO_CODEC_OK; } @@ -622,8 +671,23 @@ - (BOOL)resetCompressionSessionIfNeededWithFrame: CVPixelBufferPoolRef pixelBufferPool = VTCompressionSessionGetPixelBufferPool(_compressionSession); if (!pixelBufferPool) { - // If we have a compression session but can't acquire the pixel buffer - // pool, we're in an invalid state and should reset. + [self resetCompressionSessionWithPixelFormat:framePixelFormat]; + return YES; + } + + NSDictionary *poolAttributes = + (__bridge NSDictionary *)CVPixelBufferPoolGetPixelBufferAttributes(pixelBufferPool); + id pixelFormats = + [poolAttributes objectForKey:(__bridge NSString *)kCVPixelBufferPixelFormatTypeKey]; + NSArray *compressionSessionPixelFormats = nil; + if ([pixelFormats isKindOfClass:[NSArray class]]) { + compressionSessionPixelFormats = (NSArray *)pixelFormats; + } else if ([pixelFormats isKindOfClass:[NSNumber class]]) { + compressionSessionPixelFormats = @[ (NSNumber *)pixelFormats ]; + } + + if (![compressionSessionPixelFormats + containsObject:[NSNumber numberWithLong:framePixelFormat]]) { resetCompressionSession = YES; } else { NSDictionary *poolAttributes = @@ -663,7 +727,7 @@ - (int)resetCompressionSessionWithPixelFormat:(OSType)framePixelFormat { NSDictionary *sourceAttributes = @{ #if defined(WEBRTC_IOS) && (TARGET_OS_MACCATALYST || TARGET_OS_SIMULATOR) (NSString *)kCVPixelBufferMetalCompatibilityKey : @(YES), -#elif defined(WEBRTC_IOS) +#elif defined(WEBRTC_IOS) && !defined(TARGET_OS_VISION) (NSString *)kCVPixelBufferOpenGLESCompatibilityKey : @(YES), #elif defined(WEBRTC_MAC) && !defined(WEBRTC_ARCH_ARM64) (NSString *)kCVPixelBufferOpenGLCompatibilityKey : @(YES), @@ -672,15 +736,19 @@ - (int)resetCompressionSessionWithPixelFormat:(OSType)framePixelFormat { (NSString *)kCVPixelBufferPixelFormatTypeKey : @(framePixelFormat), }; - NSDictionary *encoder_specs; + NSMutableDictionary *encoder_specs; #if defined(WEBRTC_MAC) && !defined(WEBRTC_IOS) // Currently hw accl is supported above 360p on mac, below 360p // the compression session will be created with hw accl disabled. - encoder_specs = @{ - (NSString *) - kVTVideoEncoderSpecification_EnableHardwareAcceleratedVideoEncoder : @(YES), - }; - + encoder_specs = [@{ + (NSString *)kVTVideoEncoderSpecification_EnableHardwareAcceleratedVideoEncoder : @(YES), + } mutableCopy]; + // Enable low-latency video encoding + if (@available(iOS 14.5, macOS 11.3, *)) { + [encoder_specs addEntriesFromDictionary:@{ + (NSString *)kVTVideoEncoderSpecification_EnableLowLatencyRateControl : @(YES), + }]; + } #endif OSStatus status = VTCompressionSessionCreate( nullptr, // use default allocator @@ -718,15 +786,31 @@ - (int)resetCompressionSessionWithPixelFormat:(OSType)framePixelFormat { - (void)configureCompressionSession { RTC_DCHECK(_compressionSession); + SetVTSessionProperty(_compressionSession, kVTCompressionPropertyKey_RealTime, true); + // Sacrifice encoding speed over quality when necessary + if (@available(iOS 14.0, macOS 11.0, *)) { + SetVTSessionProperty( + _compressionSession, kVTCompressionPropertyKey_PrioritizeEncodingSpeedOverQuality, true); + } + // Set maximum QP for screen sharing mode, range must be within 1 to 51 + // https://developer.apple.com/documentation/videotoolbox/kvtcompressionpropertykey_maxallowedframeqp + if (@available(iOS 15.0, macOS 12.0, *)) { + // Only enable for screen sharing and let VideoToolbox do the optimizing as much as possible. + if (_codecMode == RTC_OBJC_TYPE(RTCVideoCodecModeScreensharing)) { + RTC_LOG(LS_INFO) << "Configuring VideoToolbox to use maxQP: " << kHighH264QpThreshold + << " mode: " << _codecMode; + SetVTSessionProperty( + _compressionSession, kVTCompressionPropertyKey_MaxAllowedFrameQP, kHighH264QpThreshold); + } + } SetVTSessionProperty( - _compressionSession, kVTCompressionPropertyKey_RealTime, true); - SetVTSessionProperty(_compressionSession, - kVTCompressionPropertyKey_ProfileLevel, - ExtractProfile(*_profile_level_id)); - SetVTSessionProperty(_compressionSession, - kVTCompressionPropertyKey_AllowFrameReordering, - false); - [self setEncoderBitrateBps:_targetBitrateBps frameRate:_encoderFrameRate]; + _compressionSession, + kVTCompressionPropertyKey_ProfileLevel, + ExtractProfile(*_profile_level_id, _codecMode == RTC_OBJC_TYPE(RTCVideoCodecModeScreensharing))); + SetVTSessionProperty(_compressionSession, kVTCompressionPropertyKey_AllowFrameReordering, false); + + // [self updateEncoderBitrateAndFrameRate]; + // TODO(tkchin): Look at entropy mode and colorspace matrices. // TODO(tkchin): Investigate to see if there's any way to make this work. // May need it to interop with Android. Currently this call just fails. @@ -756,59 +840,59 @@ - (NSString *)implementationName { return @"VideoToolbox"; } -- (void)setBitrateBps:(uint32_t)bitrateBps frameRate:(uint32_t)frameRate { - if (_encoderBitrateBps != bitrateBps || _encoderFrameRate != frameRate) { - [self setEncoderBitrateBps:bitrateBps frameRate:frameRate]; +- (void)updateEncoderBitrateAndFrameRate { + // If no compression session simply return + if (!_compressionSession) { + return; } -} + // Initial status + OSStatus status = noErr; -- (void)setEncoderBitrateBps:(uint32_t)bitrateBps - frameRate:(uint32_t)frameRate { - if (_compressionSession) { - SetVTSessionProperty(_compressionSession, - kVTCompressionPropertyKey_AverageBitRate, - bitrateBps); - - // With zero `_maxAllowedFrameRate`, we fall back to automatic frame rate - // detection. - if (_maxAllowedFrameRate > 0) { - SetVTSessionProperty(_compressionSession, - kVTCompressionPropertyKey_ExpectedFrameRate, - frameRate); - } + uint32_t computedBitrateBps = _targetBitrateBps; - // TODO(tkchin): Add a helper method to set array value. - int64_t dataLimitBytesPerSecondValue = - static_cast(bitrateBps * kLimitToAverageBitRateFactor / 8); - CFNumberRef bytesPerSecond = CFNumberCreate(kCFAllocatorDefault, - kCFNumberSInt64Type, - &dataLimitBytesPerSecondValue); - int64_t oneSecondValue = 1; - CFNumberRef oneSecond = CFNumberCreate( - kCFAllocatorDefault, kCFNumberSInt64Type, &oneSecondValue); - const void *nums[2] = {bytesPerSecond, oneSecond}; - CFArrayRef dataRateLimits = - CFArrayCreate(nullptr, nums, 2, &kCFTypeArrayCallBacks); - OSStatus status = - VTSessionSetProperty(_compressionSession, - kVTCompressionPropertyKey_DataRateLimits, - dataRateLimits); - if (bytesPerSecond) { - CFRelease(bytesPerSecond); - } - if (oneSecond) { - CFRelease(oneSecond); + // With zero `_maxAllowedFrameRate`, we fall back to automatic frame rate detection. + uint32_t computedFrameRate = _maxAllowedFrameRate > 0 ? _targetFrameRate : 0; + + // Set frame rate + if (computedFrameRate != _encoderFrameRate) { + status = VTSessionSetProperty(_compressionSession, + kVTCompressionPropertyKey_ExpectedFrameRate, + (__bridge CFTypeRef) @(computedFrameRate)); + // Ensure the bitrate was set successfully + if (status != noErr) { + RTC_LOG(LS_ERROR) << "Failed to set frame rate: " << computedFrameRate + << " error: " << status; + } else { + RTC_LOG(LS_INFO) << "Did update encoder frame rate: " << computedFrameRate; } - if (dataRateLimits) { - CFRelease(dataRateLimits); + _encoderFrameRate = computedFrameRate; + } + + // Set bitrate + if (computedBitrateBps != _encoderBitrateBps) { + status = VTSessionSetProperty(_compressionSession, + kVTCompressionPropertyKey_AverageBitRate, + (__bridge CFTypeRef) @(computedBitrateBps)); + + // Ensure the bitrate was set successfully + if (status != noErr) { + RTC_LOG(LS_ERROR) << "Failed to update encoder bitrate: " << computedBitrateBps + << "error: " << status; + } else { + RTC_LOG(LS_INFO) << "Did update encoder bitrate: " << computedBitrateBps; } + + status = VTSessionSetProperty( + _compressionSession, + kVTCompressionPropertyKey_DataRateLimits, + (__bridge CFArrayRef)CreateRateLimitArray(computedBitrateBps, _encodeMode)); if (status != noErr) { - RTC_LOG(LS_ERROR) << "Failed to set data rate limit with code: " - << status; + RTC_LOG(LS_ERROR) << "Failed to update encoder data rate limits"; + } else { + RTC_LOG(LS_INFO) << "Did update encoder data rate limits"; } - _encoderBitrateBps = bitrateBps; - _encoderFrameRate = frameRate; + _encoderBitrateBps = computedBitrateBps; } } @@ -821,7 +905,7 @@ - (void)frameWasEncoded:(OSStatus)status height:(int32_t)height renderTimeMs:(int64_t)renderTimeMs timestamp:(uint32_t)timestamp - rotation:(RTCVideoRotation)rotation { + rotation:(RTC_OBJC_TYPE(RTCVideoRotation))rotation { RTCVideoEncoderCallback callback = _callback; if (!callback) { return; @@ -868,14 +952,13 @@ - (void)frameWasEncoded:(OSStatus)status }]; frame.encodedWidth = width; frame.encodedHeight = height; - frame.frameType = - isKeyframe ? RTCFrameTypeVideoFrameKey : RTCFrameTypeVideoFrameDelta; + frame.frameType = isKeyframe ? RTC_OBJC_TYPE(RTCFrameTypeVideoFrameKey) : RTC_OBJC_TYPE(RTCFrameTypeVideoFrameDelta); frame.captureTimeMs = renderTimeMs; frame.timeStamp = timestamp; frame.rotation = rotation; - frame.contentType = (_mode == RTCVideoCodecModeScreensharing) ? - RTCVideoContentTypeScreenshare : - RTCVideoContentTypeUnspecified; + frame.contentType = (_codecMode == RTC_OBJC_TYPE(RTCVideoCodecModeScreensharing)) ? + RTC_OBJC_TYPE(RTCVideoContentTypeScreenshare) : + RTC_OBJC_TYPE(RTCVideoContentTypeUnspecified); frame.flags = webrtc::VideoSendTiming::kInvalid; _h264BitstreamParser.ParseBitstream(*buffer); @@ -886,7 +969,6 @@ - (void)frameWasEncoded:(OSStatus)status RTC_LOG(LS_ERROR) << "Encode callback failed"; return; } - _bitrateAdjuster->Update(frame.buffer.length); } - (nullable RTC_OBJC_TYPE(RTCVideoEncoderQpThresholds) *)scalingSettings { @@ -896,3 +978,4 @@ - (void)frameWasEncoded:(OSStatus)status } @end + diff --git a/sdk/objc/helpers/RTCCameraPreviewView.m b/sdk/objc/helpers/RTCCameraPreviewView.m index 92c3cd2635..f23f80105d 100644 --- a/sdk/objc/helpers/RTCCameraPreviewView.m +++ b/sdk/objc/helpers/RTCCameraPreviewView.m @@ -26,7 +26,9 @@ + (Class)layerClass { - (instancetype)initWithFrame:(CGRect)aRect { self = [super initWithFrame:aRect]; if (self) { +#if !TARGET_OS_TV [self addOrientationObserver]; +#endif } return self; } @@ -34,14 +36,18 @@ - (instancetype)initWithFrame:(CGRect)aRect { - (instancetype)initWithCoder:(NSCoder *)aDecoder { self = [super initWithCoder:aDecoder]; if (self) { +#if !TARGET_OS_TV [self addOrientationObserver]; +#endif } return self; } +#if !TARGET_OS_TV - (void)dealloc { [self removeOrientationObserver]; } +#endif - (void)setCaptureSession:(AVCaptureSession *)captureSession { if (_captureSession == captureSession) { @@ -49,25 +55,27 @@ - (void)setCaptureSession:(AVCaptureSession *)captureSession { } _captureSession = captureSession; [RTC_OBJC_TYPE(RTCDispatcher) - dispatchAsyncOnType:RTCDispatcherTypeMain + dispatchAsyncOnType:RTC_OBJC_TYPE(RTCDispatcherTypeMain) block:^{ AVCaptureVideoPreviewLayer *previewLayer = [self previewLayer]; [RTC_OBJC_TYPE(RTCDispatcher) - dispatchAsyncOnType:RTCDispatcherTypeCaptureSession + dispatchAsyncOnType:RTC_OBJC_TYPE(RTCDispatcherTypeCaptureSession) block:^{ previewLayer.session = captureSession; +#if !TARGET_OS_TV [RTC_OBJC_TYPE(RTCDispatcher) - dispatchAsyncOnType: - RTCDispatcherTypeMain + dispatchAsyncOnType:RTC_OBJC_TYPE(RTCDispatcherTypeMain) block:^{ [self setCorrectVideoOrientation]; }]; +#endif }]; }]; } +#if !TARGET_OS_TV - (void)layoutSubviews { [super layoutSubviews]; @@ -88,17 +96,13 @@ - (void)setCorrectVideoOrientation { if (previewLayer.connection.isVideoOrientationSupported) { // Set the video orientation based on device orientation. if (deviceOrientation == UIDeviceOrientationPortraitUpsideDown) { - previewLayer.connection.videoOrientation = - AVCaptureVideoOrientationPortraitUpsideDown; + previewLayer.connection.videoOrientation = AVCaptureVideoOrientationPortraitUpsideDown; } else if (deviceOrientation == UIDeviceOrientationLandscapeRight) { - previewLayer.connection.videoOrientation = - AVCaptureVideoOrientationLandscapeRight; + previewLayer.connection.videoOrientation = AVCaptureVideoOrientationLandscapeRight; } else if (deviceOrientation == UIDeviceOrientationLandscapeLeft) { - previewLayer.connection.videoOrientation = - AVCaptureVideoOrientationLandscapeLeft; + previewLayer.connection.videoOrientation = AVCaptureVideoOrientationLandscapeLeft; } else if (deviceOrientation == UIDeviceOrientationPortrait) { - previewLayer.connection.videoOrientation = - AVCaptureVideoOrientationPortrait; + previewLayer.connection.videoOrientation = AVCaptureVideoOrientationPortrait; } // If device orientation switches to FaceUp or FaceDown, don't change video // orientation. @@ -122,6 +126,8 @@ - (void)removeOrientationObserver { object:nil]; } +#endif + - (AVCaptureVideoPreviewLayer *)previewLayer { return (AVCaptureVideoPreviewLayer *)self.layer; } diff --git a/sdk/objc/helpers/RTCDispatcher+Private.h b/sdk/objc/helpers/RTCDispatcher+Private.h index da286c6467..e587e11c0a 100644 --- a/sdk/objc/helpers/RTCDispatcher+Private.h +++ b/sdk/objc/helpers/RTCDispatcher+Private.h @@ -13,7 +13,6 @@ @interface RTC_OBJC_TYPE (RTCDispatcher) () - + (dispatch_queue_t)dispatchQueueForType - : (RTCDispatcherQueueType)dispatchType; + + (dispatch_queue_t)dispatchQueueForType : (RTC_OBJC_TYPE(RTCDispatcherQueueType))dispatchType; @end diff --git a/sdk/objc/helpers/RTCDispatcher.h b/sdk/objc/helpers/RTCDispatcher.h index f6afe24243..ebffc54088 100644 --- a/sdk/objc/helpers/RTCDispatcher.h +++ b/sdk/objc/helpers/RTCDispatcher.h @@ -12,16 +12,16 @@ #import "sdk/objc/base/RTCMacros.h" -typedef NS_ENUM(NSInteger, RTCDispatcherQueueType) { +typedef NS_ENUM(NSInteger, RTC_OBJC_TYPE(RTCDispatcherQueueType)) { // Main dispatcher queue. - RTCDispatcherTypeMain, + RTC_OBJC_TYPE(RTCDispatcherTypeMain), // Used for starting/stopping AVCaptureSession, and assigning // capture session to AVCaptureVideoPreviewLayer. - RTCDispatcherTypeCaptureSession, + RTC_OBJC_TYPE(RTCDispatcherTypeCaptureSession), // Used for operations on AVAudioSession. - RTCDispatcherTypeAudioSession, + RTC_OBJC_TYPE(RTCDispatcherTypeAudioSession), // Used for operations on NWPathMonitor. - RTCDispatcherTypeNetworkMonitor, + RTC_OBJC_TYPE(RTCDispatcherTypeNetworkMonitor), }; /** Dispatcher that asynchronously dispatches blocks to a specific @@ -36,12 +36,11 @@ RTC_OBJC_EXPORT * @param dispatchType The queue type to dispatch on. * @param block The block to dispatch asynchronously. */ -+ (void)dispatchAsyncOnType:(RTCDispatcherQueueType)dispatchType - block:(dispatch_block_t)block; ++ (void)dispatchAsyncOnType:(RTC_OBJC_TYPE(RTCDispatcherQueueType))dispatchType block:(dispatch_block_t)block; /** Returns YES if run on queue for the dispatchType otherwise NO. * Useful for asserting that a method is run on a correct queue. */ -+ (BOOL)isOnQueueForType:(RTCDispatcherQueueType)dispatchType; ++ (BOOL)isOnQueueForType:(RTC_OBJC_TYPE(RTCDispatcherQueueType))dispatchType; @end diff --git a/sdk/objc/helpers/RTCDispatcher.m b/sdk/objc/helpers/RTCDispatcher.m index 00f292e869..6f52568cd7 100644 --- a/sdk/objc/helpers/RTCDispatcher.m +++ b/sdk/objc/helpers/RTCDispatcher.m @@ -28,13 +28,13 @@ + (void)initialize { }); } -+ (void)dispatchAsyncOnType:(RTCDispatcherQueueType)dispatchType ++ (void)dispatchAsyncOnType:(RTC_OBJC_TYPE(RTCDispatcherQueueType))dispatchType block:(dispatch_block_t)block { dispatch_queue_t queue = [self dispatchQueueForType:dispatchType]; dispatch_async(queue, block); } -+ (BOOL)isOnQueueForType:(RTCDispatcherQueueType)dispatchType { ++ (BOOL)isOnQueueForType:(RTC_OBJC_TYPE(RTCDispatcherQueueType))dispatchType { dispatch_queue_t targetQueue = [self dispatchQueueForType:dispatchType]; const char* targetLabel = dispatch_queue_get_label(targetQueue); const char* currentLabel = @@ -49,15 +49,15 @@ + (BOOL)isOnQueueForType:(RTCDispatcherQueueType)dispatchType { #pragma mark - Private -+ (dispatch_queue_t)dispatchQueueForType:(RTCDispatcherQueueType)dispatchType { ++ (dispatch_queue_t)dispatchQueueForType:(RTC_OBJC_TYPE(RTCDispatcherQueueType))dispatchType { switch (dispatchType) { - case RTCDispatcherTypeMain: + case RTC_OBJC_TYPE(RTCDispatcherTypeMain): return dispatch_get_main_queue(); - case RTCDispatcherTypeCaptureSession: + case RTC_OBJC_TYPE(RTCDispatcherTypeCaptureSession): return kCaptureSessionQueue; - case RTCDispatcherTypeAudioSession: + case RTC_OBJC_TYPE(RTCDispatcherTypeAudioSession): return kAudioSessionQueue; - case RTCDispatcherTypeNetworkMonitor: + case RTC_OBJC_TYPE(RTCDispatcherTypeNetworkMonitor): return kNetworkMonitorQueue; } } diff --git a/sdk/objc/helpers/RTCYUVHelper.h b/sdk/objc/helpers/RTCYUVHelper.h new file mode 100644 index 0000000000..354be73729 --- /dev/null +++ b/sdk/objc/helpers/RTCYUVHelper.h @@ -0,0 +1,118 @@ +/* + * Copyright 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#import + +#import "RTCMacros.h" +#import "RTCVideoFrame.h" + +RTC_OBJC_EXPORT +@interface RTC_OBJC_TYPE (RTCYUVHelper) : NSObject + +- (instancetype)init NS_UNAVAILABLE; + ++ (void)I420Rotate:(const uint8_t*)srcY + srcStrideY:(int)srcStrideY + srcU:(const uint8_t*)srcU + srcStrideU:(int)srcStrideU + srcV:(const uint8_t*)srcV + srcStrideV:(int)srcStrideV + dstY:(uint8_t*)dstY + dstStrideY:(int)dstStrideY + dstU:(uint8_t*)dstU + dstStrideU:(int)dstStrideU + dstV:(uint8_t*)dstV + dstStrideV:(int)dstStrideV + width:(int)width + height:(int)height + mode:(RTC_OBJC_TYPE(RTCVideoRotation))mode; + ++ (int)I420ToNV12:(const uint8_t*)srcY + srcStrideY:(int)srcStrideY + srcU:(const uint8_t*)srcU + srcStrideU:(int)srcStrideU + srcV:(const uint8_t*)srcV + srcStrideV:(int)srcStrideV + dstY:(uint8_t*)dstY + dstStrideY:(int)dstStrideY + dstUV:(uint8_t*)dstUV + dstStrideUV:(int)dstStrideUV + width:(int)width + height:(int)height; + ++ (int)I420ToNV21:(const uint8_t*)srcY + srcStrideY:(int)srcStrideY + srcU:(const uint8_t*)srcU + srcStrideU:(int)srcStrideU + srcV:(const uint8_t*)srcV + srcStrideV:(int)srcStrideV + dstY:(uint8_t*)dstY + dstStrideY:(int)dstStrideY + dstUV:(uint8_t*)dstUV + dstStrideUV:(int)dstStrideUV + width:(int)width + height:(int)height; + ++ (int)I420ToARGB:(const uint8_t*)srcY + srcStrideY:(int)srcStrideY + srcU:(const uint8_t*)srcU + srcStrideU:(int)srcStrideU + srcV:(const uint8_t*)srcV + srcStrideV:(int)srcStrideV + dstARGB:(uint8_t*)dstARGB + dstStrideARGB:(int)dstStrideARGB + width:(int)width + height:(int)height; + ++ (int)I420ToBGRA:(const uint8_t*)srcY + srcStrideY:(int)srcStrideY + srcU:(const uint8_t*)srcU + srcStrideU:(int)srcStrideU + srcV:(const uint8_t*)srcV + srcStrideV:(int)srcStrideV + dstBGRA:(uint8_t*)dstBGRA + dstStrideBGRA:(int)dstStrideBGRA + width:(int)width + height:(int)height; + ++ (int)I420ToABGR:(const uint8_t*)srcY + srcStrideY:(int)srcStrideY + srcU:(const uint8_t*)srcU + srcStrideU:(int)srcStrideU + srcV:(const uint8_t*)srcV + srcStrideV:(int)srcStrideV + dstABGR:(uint8_t*)dstABGR + dstStrideABGR:(int)dstStrideABGR + width:(int)width + height:(int)height; + ++ (int)I420ToRGBA:(const uint8_t*)srcY + srcStrideY:(int)srcStrideY + srcU:(const uint8_t*)srcU + srcStrideU:(int)srcStrideU + srcV:(const uint8_t*)srcV + srcStrideV:(int)srcStrideV + dstRGBA:(uint8_t*)dstRGBA + dstStrideRGBA:(int)dstStrideRGBA + width:(int)width + height:(int)height; + ++ (int)I420ToRGB24:(const uint8_t*)srcY + srcStrideY:(int)srcStrideY + srcU:(const uint8_t*)srcU + srcStrideU:(int)srcStrideU + srcV:(const uint8_t*)srcV + srcStrideV:(int)srcStrideV + dstRGB24:(uint8_t*)dstRGB24 + dstStrideRGB24:(int)dstStrideRGB24 + width:(int)width + height:(int)height; + +@end diff --git a/sdk/objc/helpers/RTCYUVHelper.mm b/sdk/objc/helpers/RTCYUVHelper.mm new file mode 100644 index 0000000000..8c52677c8f --- /dev/null +++ b/sdk/objc/helpers/RTCYUVHelper.mm @@ -0,0 +1,179 @@ +/* + * Copyright 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#import "RTCYUVHelper.h" + +#include "third_party/libyuv/include/libyuv.h" + +@implementation RTC_OBJC_TYPE (RTCYUVHelper) + ++ (void)I420Rotate:(const uint8_t*)srcY + srcStrideY:(int)srcStrideY + srcU:(const uint8_t*)srcU + srcStrideU:(int)srcStrideU + srcV:(const uint8_t*)srcV + srcStrideV:(int)srcStrideV + dstY:(uint8_t*)dstY + dstStrideY:(int)dstStrideY + dstU:(uint8_t*)dstU + dstStrideU:(int)dstStrideU + dstV:(uint8_t*)dstV + dstStrideV:(int)dstStrideV + width:(int)width + height:(int)height + mode:(RTC_OBJC_TYPE(RTCVideoRotation))mode { + libyuv::I420Rotate(srcY, + srcStrideY, + srcU, + srcStrideU, + srcV, + srcStrideV, + dstY, + dstStrideY, + dstU, + dstStrideU, + dstV, + dstStrideV, + width, + height, + (libyuv::RotationMode)mode); +} + ++ (int)I420ToNV12:(const uint8_t*)srcY + srcStrideY:(int)srcStrideY + srcU:(const uint8_t*)srcU + srcStrideU:(int)srcStrideU + srcV:(const uint8_t*)srcV + srcStrideV:(int)srcStrideV + dstY:(uint8_t*)dstY + dstStrideY:(int)dstStrideY + dstUV:(uint8_t*)dstUV + dstStrideUV:(int)dstStrideUV + width:(int)width + height:(int)height { + return libyuv::I420ToNV12(srcY, + srcStrideY, + srcU, + srcStrideU, + srcV, + srcStrideV, + dstY, + dstStrideY, + dstUV, + dstStrideUV, + width, + height); +} + ++ (int)I420ToNV21:(const uint8_t*)srcY + srcStrideY:(int)srcStrideY + srcU:(const uint8_t*)srcU + srcStrideU:(int)srcStrideU + srcV:(const uint8_t*)srcV + srcStrideV:(int)srcStrideV + dstY:(uint8_t*)dstY + dstStrideY:(int)dstStrideY + dstUV:(uint8_t*)dstUV + dstStrideUV:(int)dstStrideUV + width:(int)width + height:(int)height { + return libyuv::I420ToNV21(srcY, + srcStrideY, + srcU, + srcStrideU, + srcV, + srcStrideV, + dstY, + dstStrideY, + dstUV, + dstStrideUV, + width, + height); +} + ++ (int)I420ToARGB:(const uint8_t*)srcY + srcStrideY:(int)srcStrideY + srcU:(const uint8_t*)srcU + srcStrideU:(int)srcStrideU + srcV:(const uint8_t*)srcV + srcStrideV:(int)srcStrideV + dstARGB:(uint8_t*)dstARGB + dstStrideARGB:(int)dstStrideARGB + width:(int)width + height:(int)height { + return libyuv::I420ToARGB( + srcY, srcStrideY, srcU, srcStrideU, srcV, srcStrideV, dstARGB, dstStrideARGB, width, height); +} + ++ (int)I420ToBGRA:(const uint8_t*)srcY + srcStrideY:(int)srcStrideY + srcU:(const uint8_t*)srcU + srcStrideU:(int)srcStrideU + srcV:(const uint8_t*)srcV + srcStrideV:(int)srcStrideV + dstBGRA:(uint8_t*)dstBGRA + dstStrideBGRA:(int)dstStrideBGRA + width:(int)width + height:(int)height { + return libyuv::I420ToBGRA( + srcY, srcStrideY, srcU, srcStrideU, srcV, srcStrideV, dstBGRA, dstStrideBGRA, width, height); +} + ++ (int)I420ToABGR:(const uint8_t*)srcY + srcStrideY:(int)srcStrideY + srcU:(const uint8_t*)srcU + srcStrideU:(int)srcStrideU + srcV:(const uint8_t*)srcV + srcStrideV:(int)srcStrideV + dstABGR:(uint8_t*)dstABGR + dstStrideABGR:(int)dstStrideABGR + width:(int)width + height:(int)height { + return libyuv::I420ToABGR( + srcY, srcStrideY, srcU, srcStrideU, srcV, srcStrideV, dstABGR, dstStrideABGR, width, height); +} + ++ (int)I420ToRGBA:(const uint8_t*)srcY + srcStrideY:(int)srcStrideY + srcU:(const uint8_t*)srcU + srcStrideU:(int)srcStrideU + srcV:(const uint8_t*)srcV + srcStrideV:(int)srcStrideV + dstRGBA:(uint8_t*)dstRGBA + dstStrideRGBA:(int)dstStrideRGBA + width:(int)width + height:(int)height { + return libyuv::I420ToRGBA( + srcY, srcStrideY, srcU, srcStrideU, srcV, srcStrideV, dstRGBA, dstStrideRGBA, width, height); +} + ++ (int)I420ToRGB24:(const uint8_t*)srcY + srcStrideY:(int)srcStrideY + srcU:(const uint8_t*)srcU + srcStrideU:(int)srcStrideU + srcV:(const uint8_t*)srcV + srcStrideV:(int)srcStrideV + dstRGB24:(uint8_t*)dstRGB24 + dstStrideRGB24:(int)dstStrideRGB24 + width:(int)width + height:(int)height { + return libyuv::I420ToRGB24(srcY, + srcStrideY, + srcU, + srcStrideU, + srcV, + srcStrideV, + dstRGB24, + dstStrideRGB24, + width, + height); +} + +@end diff --git a/sdk/objc/native/api/audio_device_module.h b/sdk/objc/native/api/audio_device_module.h index 34f99e85c8..0ee04d4bf6 100644 --- a/sdk/objc/native/api/audio_device_module.h +++ b/sdk/objc/native/api/audio_device_module.h @@ -26,6 +26,7 @@ namespace webrtc { webrtc::scoped_refptr CreateAudioDeviceModule( bool bypass_voice_processing = false); +#if defined(WEBRTC_IOS) // If `muted_speech_event_handler` is exist, audio unit will catch speech // activity while muted. webrtc::scoped_refptr CreateMutedDetectAudioDeviceModule( @@ -40,6 +41,7 @@ webrtc::scoped_refptr CreateMutedDetectAudioDeviceModule( AudioDeviceModule::MutedSpeechEventHandler muted_speech_event_handler, ADMErrorHandler error_handler, bool bypass_voice_processing = false); +#endif } // namespace webrtc diff --git a/sdk/objc/native/api/audio_device_module.mm b/sdk/objc/native/api/audio_device_module.mm index 898886b592..06c21c1261 100644 --- a/sdk/objc/native/api/audio_device_module.mm +++ b/sdk/objc/native/api/audio_device_module.mm @@ -13,7 +13,11 @@ #include "api/make_ref_counted.h" #include "rtc_base/logging.h" +#if defined(WEBRTC_IOS) #include "sdk/objc/native/src/audio/audio_device_module_ios.h" +#endif + +#include "modules/audio_device/include/audio_device.h" namespace webrtc { diff --git a/sdk/objc/native/api/video_capturer.mm b/sdk/objc/native/api/video_capturer.mm index ca4937f46e..88a4733a6a 100644 --- a/sdk/objc/native/api/video_capturer.mm +++ b/sdk/objc/native/api/video_capturer.mm @@ -21,7 +21,7 @@ objc_video_capturer, webrtc::Thread *signaling_thread, webrtc::Thread *worker_thread) { - RTCObjCVideoSourceAdapter *adapter = [[RTCObjCVideoSourceAdapter alloc] init]; + RTC_OBJC_TYPE(RTCObjCVideoSourceAdapter) *adapter = [[RTC_OBJC_TYPE(RTCObjCVideoSourceAdapter) alloc] init]; webrtc::scoped_refptr objc_video_track_source = webrtc::make_ref_counted(adapter); webrtc::scoped_refptr video_source = diff --git a/sdk/objc/native/src/audio/audio_device_ios.h b/sdk/objc/native/src/audio/audio_device_ios.h index cc70ee7f2f..77c5a1a122 100644 --- a/sdk/objc/native/src/audio/audio_device_ios.h +++ b/sdk/objc/native/src/audio/audio_device_ios.h @@ -25,7 +25,7 @@ #include "sdk/objc/base/RTCMacros.h" #include "voice_processing_audio_unit.h" -RTC_FWD_DECL_OBJC_CLASS(RTCNativeAudioSessionDelegateAdapter); +RTC_FWD_DECL_OBJC_CLASS(RTC_OBJC_TYPE(RTCNativeAudioSessionDelegateAdapter)); namespace webrtc { @@ -184,6 +184,8 @@ class AudioDeviceIOS : public AudioDeviceGeneric, void HandlePlayoutGlitchDetected(uint64_t glitch_duration_ms); void HandleOutputVolumeChange(); + bool RestartAudioUnit(bool enable_input); + // Uses current `playout_parameters_` and `record_parameters_` to inform the // audio device buffer (ADB) about our internal audio parameters. void UpdateAudioDeviceBuffer(); @@ -212,7 +214,7 @@ class AudioDeviceIOS : public AudioDeviceGeneric, // Activates our audio session, creates and initializes the voice-processing // audio unit and verifies that we got the preferred native audio parameters. - bool InitPlayOrRecord(); + bool InitPlayOrRecord(bool enable_input); // Closes and deletes the voice-processing I/O unit. void ShutdownPlayOrRecord(); @@ -282,24 +284,24 @@ class AudioDeviceIOS : public AudioDeviceGeneric, // will be changed dynamically to account for this behavior. webrtc::BufferT record_audio_buffer_; + bool recording_is_initialized_; + // Set to 1 when recording is active and 0 otherwise. std::atomic recording_; + bool playout_is_initialized_; + // Set to 1 when playout is active and 0 otherwise. std::atomic playing_; // Set to true after successful call to Init(), false otherwise. bool initialized_ RTC_GUARDED_BY(thread_); - // Set to true after successful call to InitRecording() or InitPlayout(), - // false otherwise. - bool audio_is_initialized_; - // Set to true if audio session is interrupted, false otherwise. bool is_interrupted_; // Audio interruption observer instance. - RTCNativeAudioSessionDelegateAdapter* audio_session_observer_ + RTC_OBJC_TYPE(RTCNativeAudioSessionDelegateAdapter)* audio_session_observer_ RTC_GUARDED_BY(thread_); // Set to true if we've activated the audio session. diff --git a/sdk/objc/native/src/audio/audio_device_ios.mm b/sdk/objc/native/src/audio/audio_device_ios.mm index 25c1e02022..079645c595 100644 --- a/sdk/objc/native/src/audio/audio_device_ios.mm +++ b/sdk/objc/native/src/audio/audio_device_ios.mm @@ -105,10 +105,11 @@ static void LogDeviceInfo() { disregard_next_render_error_(false), audio_device_buffer_(nullptr), audio_unit_(nullptr), + recording_is_initialized_(false), recording_(0), + playout_is_initialized_(false), playing_(0), initialized_(false), - audio_is_initialized_(false), is_interrupted_(false), has_configured_session_(false), num_detected_playout_glitches_(0), @@ -126,8 +127,7 @@ static void LogDeviceInfo() { io_thread_checker_.Detach(); thread_ = webrtc::Thread::Current(); - audio_session_observer_ = - [[RTCNativeAudioSessionDelegateAdapter alloc] initWithObserver:this]; + audio_session_observer_ = [[RTC_OBJC_TYPE(RTCNativeAudioSessionDelegateAdapter) alloc] initWithObserver:this]; mach_timebase_info_data_t tinfo; mach_timebase_info(&tinfo); machTickUnitsToNanoseconds_ = (double)tinfo.numer / tinfo.denom; @@ -197,49 +197,60 @@ static void LogDeviceInfo() { LOGI() << "InitPlayout"; RTC_DCHECK_RUN_ON(thread_); RTC_DCHECK(initialized_); - RTC_DCHECK(!audio_is_initialized_); + RTC_DCHECK(!playout_is_initialized_); RTC_DCHECK(!playing_.load()); - if (!audio_is_initialized_) { - if (!InitPlayOrRecord()) { + if (!recording_is_initialized_) { + // recording not initialized yet, init with no input + if (!InitPlayOrRecord(false)) { RTC_LOG_F(LS_ERROR) << "InitPlayOrRecord failed for InitPlayout!"; return -1; } } + + playout_is_initialized_ = true; + return 0; } bool AudioDeviceIOS::PlayoutIsInitialized() const { RTC_DCHECK_RUN_ON(thread_); - return audio_is_initialized_; + return playout_is_initialized_; } bool AudioDeviceIOS::RecordingIsInitialized() const { RTC_DCHECK_RUN_ON(thread_); - return audio_is_initialized_; + return recording_is_initialized_; } int32_t AudioDeviceIOS::InitRecording() { LOGI() << "InitRecording"; RTC_DCHECK_RUN_ON(thread_); RTC_DCHECK(initialized_); - RTC_DCHECK(!audio_is_initialized_); + RTC_DCHECK(!recording_is_initialized_); RTC_DCHECK(!recording_.load()); - if (!audio_is_initialized_) { - if (!InitPlayOrRecord()) { + if (!playout_is_initialized_) { + // playout not initialized yet, init with input + if (!InitPlayOrRecord(true)) { RTC_LOG_F(LS_ERROR) << "InitPlayOrRecord failed for InitRecording!"; return -1; } + } else { + // playout already initialized, restart audio unit with input + RestartAudioUnit(true); } + + recording_is_initialized_ = true; + return 0; } int32_t AudioDeviceIOS::StartPlayout() { LOGI() << "StartPlayout"; RTC_DCHECK_RUN_ON(thread_); - RTC_DCHECK(audio_is_initialized_); + RTC_DCHECK(playout_is_initialized_); RTC_DCHECK(!playing_.load()); RTC_DCHECK(audio_unit_); - if (!audio_is_initialized_) { + if (!playout_is_initialized_) { return -1; } if (fine_audio_buffer_) { @@ -268,13 +279,16 @@ static void LogDeviceInfo() { int32_t AudioDeviceIOS::StopPlayout() { LOGI() << "StopPlayout"; RTC_DCHECK_RUN_ON(thread_); - if (!audio_is_initialized_ || !playing_.load()) { + if (!playout_is_initialized_ || !playing_.load()) { return 0; } if (!recording_.load()) { ShutdownPlayOrRecord(); + + recording_is_initialized_ = false; } playing_.store(0, std::memory_order_release); + playout_is_initialized_ = false; // Derive average number of calls to OnGetPlayoutData() between detected // audio glitches and add the result to a histogram. @@ -301,10 +315,10 @@ static void LogDeviceInfo() { int32_t AudioDeviceIOS::StartRecording() { LOGI() << "StartRecording"; RTC_DCHECK_RUN_ON(thread_); - RTC_DCHECK(audio_is_initialized_); + RTC_DCHECK(recording_is_initialized_); RTC_DCHECK(!recording_.load()); RTC_DCHECK(audio_unit_); - if (!audio_is_initialized_) { + if (!recording_is_initialized_) { return -1; } if (fine_audio_buffer_) { @@ -330,13 +344,19 @@ static void LogDeviceInfo() { int32_t AudioDeviceIOS::StopRecording() { LOGI() << "StopRecording"; RTC_DCHECK_RUN_ON(thread_); - if (!audio_is_initialized_ || !recording_.load()) { + if (!recording_is_initialized_ || !recording_.load()) { return 0; } if (!playing_.load()) { ShutdownPlayOrRecord(); + + playout_is_initialized_ = false; + } else if (playout_is_initialized_) { + // restart audio unit with no input + RestartAudioUnit(false); } recording_.store(0, std::memory_order_release); + recording_is_initialized_ = false; return 0; } @@ -496,7 +516,7 @@ static void LogDeviceInfo() { // Exclude extreme delta values since they do most likely not correspond // to a real glitch. Instead, the most probable cause is that a headset // has been plugged in or out. There are more direct ways to detect - // audio device changes (see HandleValidRouteChange()) but experiments + // audio device changes (see ValidRouteChange()) but experiments // show that using it leads to more complex implementations. // TODO(henrika): more tests might be needed to come up with an even // better upper limit. @@ -678,7 +698,7 @@ static void LogDeviceInfo() { SetupAudioBuffersForActiveAudioSession(); // Initialize the audio unit again with the new sample rate. - if (!audio_unit_->Initialize(playout_parameters_.sample_rate())) { + if (!audio_unit_->Initialize(playout_parameters_.sample_rate(), recording_is_initialized_)) { RTCLogError(@"Failed to initialize the audio unit with sample rate: %d", playout_parameters_.sample_rate()); return; @@ -737,6 +757,46 @@ static void LogDeviceInfo() { last_output_volume_change_time_ = webrtc::TimeMillis(); } +bool AudioDeviceIOS::RestartAudioUnit(bool enable_input) { + RTC_DCHECK_RUN_ON(&io_thread_checker_); + + LOGI() << "RestartAudioUnit"; + + // If we don't have an audio unit yet, or the audio unit is uninitialized, + // there is no work to do. + if (!audio_unit_ || audio_unit_->GetState() < VoiceProcessingAudioUnit::kInitialized) { + return false; + } + + bool restart_audio_unit = false; + if (audio_unit_->GetState() == VoiceProcessingAudioUnit::kStarted) { + audio_unit_->Stop(); + PrepareForNewStart(); + restart_audio_unit = true; + } + + if (audio_unit_->GetState() == VoiceProcessingAudioUnit::kInitialized) { + audio_unit_->Uninitialize(); + } + + // Initialize the audio unit again with the same sample rate. + const double sample_rate = playout_parameters_.sample_rate(); + + if (!audio_unit_->Initialize(sample_rate, enable_input)) { + RTCLogError(@"Failed to initialize the audio unit with sample rate: %f", sample_rate); + return false; + } + + // Restart the audio unit if it was already running. + if (restart_audio_unit && !audio_unit_->Start()) { + RTCLogError(@"Failed to start audio unit with sample rate: %f", sample_rate); + return false; + } + + LOGI() << "Successfully enabled audio unit for recording."; + return true; +} + void AudioDeviceIOS::UpdateAudioDeviceBuffer() { LOGI() << "UpdateAudioDevicebuffer"; // AttachAudioBuffer() is called at construction by the main class but check @@ -815,8 +875,8 @@ static void LogDeviceInfo() { bool AudioDeviceIOS::CreateAudioUnit() { RTC_DCHECK(!audio_unit_); - RTC_DCHECK(!audio_is_initialized_); - if (audio_unit_ || audio_is_initialized_) { + RTC_DCHECK(!playout_is_initialized_ && !recording_is_initialized_); + if (audio_unit_ || playout_is_initialized_ || recording_is_initialized_) { return false; } BOOL detect_mute_speech_ = (muted_speech_event_handler_ != 0); @@ -843,7 +903,7 @@ static void LogDeviceInfo() { // If we're not initialized we don't need to do anything. Audio unit will // be initialized on initialization. - if (!audio_is_initialized_) return; + if (!playout_is_initialized_ && !recording_is_initialized_) return; // If we're initialized, we must have an audio unit. RTC_DCHECK(audio_unit_); @@ -882,7 +942,7 @@ static void LogDeviceInfo() { RTCLog(@"Initializing audio unit for UpdateAudioUnit"); ConfigureAudioSession(); SetupAudioBuffersForActiveAudioSession(); - if (!audio_unit_->Initialize(playout_parameters_.sample_rate())) { + if (!audio_unit_->Initialize(playout_parameters_.sample_rate(), recording_is_initialized_)) { RTCLogError(@"Failed to initialize audio unit."); return; } @@ -976,7 +1036,7 @@ static void LogDeviceInfo() { RTCLog(@"Unconfigured audio session."); } -bool AudioDeviceIOS::InitPlayOrRecord() { +bool AudioDeviceIOS::InitPlayOrRecord(bool enable_input) { LOGI() << "InitPlayOrRecord"; RTC_DCHECK_RUN_ON(thread_); @@ -1014,12 +1074,11 @@ static void LogDeviceInfo() { return false; } SetupAudioBuffersForActiveAudioSession(); - audio_unit_->Initialize(playout_parameters_.sample_rate()); + audio_unit_->Initialize(playout_parameters_.sample_rate(), enable_input); } // Release the lock. [session unlockForConfiguration]; - audio_is_initialized_ = true; return true; } @@ -1046,7 +1105,8 @@ static void LogDeviceInfo() { // session, hence we deactivate as last action. UnconfigureAudioSession(); - audio_is_initialized_ = false; + playout_is_initialized_ = false; + recording_is_initialized_ = false; } void AudioDeviceIOS::PrepareForNewStart() { diff --git a/sdk/objc/native/src/audio/audio_device_module_ios.h b/sdk/objc/native/src/audio/audio_device_module_ios.h index 394e1ff9bd..642adc2ada 100644 --- a/sdk/objc/native/src/audio/audio_device_module_ios.h +++ b/sdk/objc/native/src/audio/audio_device_module_ios.h @@ -135,6 +135,9 @@ class AudioDeviceModuleIOS : public AudioDeviceModule { int GetPlayoutAudioParameters(AudioParameters* params) const override; int GetRecordAudioParameters(AudioParameters* params) const override; #endif // WEBRTC_IOS + + int32_t SetAudioDeviceSink(AudioDeviceSink* sink) const override; + private: void ReportError(ADMError error) const; const bool bypass_voice_processing_; diff --git a/sdk/objc/native/src/audio/audio_device_module_ios.mm b/sdk/objc/native/src/audio/audio_device_module_ios.mm index 3b338f2399..ca19ad6de2 100644 --- a/sdk/objc/native/src/audio/audio_device_module_ios.mm +++ b/sdk/objc/native/src/audio/audio_device_module_ios.mm @@ -779,5 +779,11 @@ return r; } #endif // WEBRTC_IOS -} // namespace ios_adm -} // namespace webrtc + + int32_t AudioDeviceModuleIOS::SetAudioDeviceSink(AudioDeviceSink* sink) const { + // not implemented + RTC_LOG(LS_WARNING) << __FUNCTION__ << "(" << sink << ") Not implemented"; + return -1; + } +} +} diff --git a/sdk/objc/native/src/audio/voice_processing_audio_unit.h b/sdk/objc/native/src/audio/voice_processing_audio_unit.h index 99586a94ed..fe3c87096e 100644 --- a/sdk/objc/native/src/audio/voice_processing_audio_unit.h +++ b/sdk/objc/native/src/audio/voice_processing_audio_unit.h @@ -81,7 +81,7 @@ class VoiceProcessingAudioUnit { VoiceProcessingAudioUnit::State GetState() const; // Initializes the underlying audio unit with the given sample rate. - bool Initialize(Float64 sample_rate); + bool Initialize(Float64 sample_rate, bool enable_input); // Starts the underlying audio unit. OSStatus Start(); diff --git a/sdk/objc/native/src/audio/voice_processing_audio_unit.mm b/sdk/objc/native/src/audio/voice_processing_audio_unit.mm index 066f3b161c..238693de99 100644 --- a/sdk/objc/native/src/audio/voice_processing_audio_unit.mm +++ b/sdk/objc/native/src/audio/voice_processing_audio_unit.mm @@ -212,7 +212,7 @@ static OSStatus GetAGCState(AudioUnit audio_unit, UInt32* enabled) { return state_; } -bool VoiceProcessingAudioUnit::Initialize(Float64 sample_rate) { +bool VoiceProcessingAudioUnit::Initialize(Float64 sample_rate, bool enable_input) { RTC_DCHECK_GE(state_, kUninitialized); RTCLog(@"Initializing audio unit with sample rate: %f", sample_rate); @@ -223,6 +223,19 @@ static OSStatus GetAGCState(AudioUnit audio_unit, UInt32* enabled) { LogStreamDescription(format); #endif + UInt32 _enable_input = enable_input ? 1 : 0; + RTCLog(@"Initializing AudioUnit, _enable_input=%d", (int) _enable_input); + result = AudioUnitSetProperty(vpio_unit_, kAudioOutputUnitProperty_EnableIO, + kAudioUnitScope_Input, kInputBus, &_enable_input, + sizeof(_enable_input)); + if (result != noErr) { + DisposeAudioUnit(); + RTCLogError(@"Failed to enable input on input scope of input element. " + "Error=%ld.", + (long)result); + return false; + } + // Set the format on the output scope of the input element/bus. result = AudioUnitSetProperty(vpio_unit_, kAudioUnitProperty_StreamFormat, @@ -531,7 +544,7 @@ static OSStatus GetAGCState(AudioUnit audio_unit, UInt32* enabled) { // - linear PCM => noncompressed audio data format with one frame per packet // - no need to specify interleaving since only mono is supported AudioStreamBasicDescription format; - RTC_DCHECK_EQ(1, kRTCAudioSessionPreferredNumberOfChannels); + RTC_DCHECK_EQ(1, RTC_CONSTANT_TYPE(RTCAudioSessionPreferredNumberOfChannels)); format.mSampleRate = sample_rate; format.mFormatID = kAudioFormatLinearPCM; format.mFormatFlags = @@ -539,7 +552,7 @@ static OSStatus GetAGCState(AudioUnit audio_unit, UInt32* enabled) { format.mBytesPerPacket = kBytesPerSample; format.mFramesPerPacket = 1; // uncompressed. format.mBytesPerFrame = kBytesPerSample; - format.mChannelsPerFrame = kRTCAudioSessionPreferredNumberOfChannels; + format.mChannelsPerFrame = RTC_CONSTANT_TYPE(RTCAudioSessionPreferredNumberOfChannels); format.mBitsPerChannel = 8 * kBytesPerSample; return format; } diff --git a/sdk/objc/native/src/objc_audio_device.h b/sdk/objc/native/src/objc_audio_device.h index 27eb0af88c..658d77281b 100644 --- a/sdk/objc/native/src/objc_audio_device.h +++ b/sdk/objc/native/src/objc_audio_device.h @@ -19,7 +19,7 @@ #include "modules/audio_device/audio_device_buffer.h" #include "rtc_base/thread.h" -@class ObjCAudioDeviceDelegate; +@class RTC_OBJC_TYPE(RTCObjCAudioDeviceDelegate); namespace webrtc { @@ -277,7 +277,7 @@ class ObjCAudioDeviceModule : public AudioDeviceModule { webrtc::BufferT record_audio_buffer_; // Delegate object provided to RTCAudioDevice during initialization - ObjCAudioDeviceDelegate* audio_device_delegate_; + RTC_OBJC_TYPE(RTCObjCAudioDeviceDelegate)* audio_device_delegate_; }; } // namespace objc_adm diff --git a/sdk/objc/native/src/objc_audio_device.mm b/sdk/objc/native/src/objc_audio_device.mm index da595ba059..d71a757bc9 100644 --- a/sdk/objc/native/src/objc_audio_device.mm +++ b/sdk/objc/native/src/objc_audio_device.mm @@ -87,9 +87,8 @@ if (![audio_device_ isInitialized]) { if (audio_device_delegate_ == nil) { - audio_device_delegate_ = [[ObjCAudioDeviceDelegate alloc] - initWithAudioDeviceModule:webrtc::scoped_refptr< - ObjCAudioDeviceModule>(this) + audio_device_delegate_ = [[RTC_OBJC_TYPE(RTCObjCAudioDeviceDelegate) alloc] + initWithAudioDeviceModule:webrtc::scoped_refptr(this) audioDeviceThread:thread_]; } diff --git a/sdk/objc/native/src/objc_audio_device_delegate.h b/sdk/objc/native/src/objc_audio_device_delegate.h index b823971e85..5a3a451fe3 100644 --- a/sdk/objc/native/src/objc_audio_device_delegate.h +++ b/sdk/objc/native/src/objc_audio_device_delegate.h @@ -22,8 +22,7 @@ class ObjCAudioDeviceModule; } // namespace objc_adm } // namespace webrtc -@interface ObjCAudioDeviceDelegate - : NSObject +@interface RTC_OBJC_TYPE(RTCObjCAudioDeviceDelegate) : NSObject - (instancetype) initWithAudioDeviceModule: diff --git a/sdk/objc/native/src/objc_audio_device_delegate.mm b/sdk/objc/native/src/objc_audio_device_delegate.mm index df8555a957..ef091eaf5d 100644 --- a/sdk/objc/native/src/objc_audio_device_delegate.mm +++ b/sdk/objc/native/src/objc_audio_device_delegate.mm @@ -58,7 +58,7 @@ } // namespace -@implementation ObjCAudioDeviceDelegate { +@implementation RTC_OBJC_TYPE(RTCObjCAudioDeviceDelegate) { webrtc::scoped_refptr impl_; } diff --git a/sdk/objc/native/src/objc_desktop_capture.h b/sdk/objc/native/src/objc_desktop_capture.h new file mode 100644 index 0000000000..b9ca6c9759 --- /dev/null +++ b/sdk/objc/native/src/objc_desktop_capture.h @@ -0,0 +1,70 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef SDK_OBJC_NATIVE_SRC_OBJC_DESKTOP_CAPTURE_H_ +#define SDK_OBJC_NATIVE_SRC_OBJC_DESKTOP_CAPTURE_H_ + +#import "base/RTCMacros.h" + +#include "api/video/i420_buffer.h" +#include "api/video/video_frame.h" +#include "modules/desktop_capture/desktop_capture_options.h" +#include "modules/desktop_capture/desktop_and_cursor_composer.h" +#include "modules/desktop_capture/desktop_frame.h" +#include "rtc_base/thread.h" + +@protocol RTC_OBJC_TYPE +(RTCDesktopCapturerPrivateDelegate); + +namespace webrtc { + +enum DesktopType { kScreen, kWindow }; + +class ObjCDesktopCapturer : public DesktopCapturer::Callback { + public: + enum CaptureState { CS_RUNNING, CS_STOPPED, CS_FAILED}; + + public: + ObjCDesktopCapturer(DesktopType type, + webrtc::DesktopCapturer::SourceId source_id, + id delegate); + virtual ~ObjCDesktopCapturer(); + + virtual CaptureState Start(uint32_t fps); + + virtual void Stop(); + + virtual bool IsRunning(); + + protected: + virtual void OnCaptureResult(webrtc::DesktopCapturer::Result result, + std::unique_ptr frame) override; + private: + void CaptureFrame(); + webrtc::DesktopCaptureOptions options_; + std::unique_ptr capturer_; + std::unique_ptr thread_; + CaptureState capture_state_ = CS_STOPPED; + DesktopType type_; + webrtc::DesktopCapturer::SourceId source_id_; + id delegate_; + uint32_t capture_delay_ = 1000; // 1s + webrtc::DesktopCapturer::Result result_ = webrtc::DesktopCapturer::Result::SUCCESS; +}; + +} // namespace webrtc + +#endif // SDK_OBJC_NATIVE_SRC_OBJC_DESKTOP_CAPTURE_H_ diff --git a/sdk/objc/native/src/objc_desktop_capture.mm b/sdk/objc/native/src/objc_desktop_capture.mm new file mode 100644 index 0000000000..ca43f6afd3 --- /dev/null +++ b/sdk/objc/native/src/objc_desktop_capture.mm @@ -0,0 +1,205 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "api/sequence_checker.h" +#include "rtc_base/checks.h" +#include "sdk/objc/native/src/objc_desktop_capture.h" +#include "sdk/objc/native/src/objc_video_frame.h" +#include "third_party/libyuv/include/libyuv.h" + +#import "components/capturer/RTCDesktopCapturer+Private.h" +#import "components/video_frame_buffer/RTCCVPixelBuffer.h" + +namespace webrtc { + +enum { kCaptureDelay = 33, kCaptureMessageId = 1000 }; + +ObjCDesktopCapturer::ObjCDesktopCapturer(DesktopType type, + webrtc::DesktopCapturer::SourceId source_id, + id delegate) + : thread_(rtc::Thread::Create()), source_id_(source_id), delegate_(delegate) { + RTC_DCHECK(thread_); + type_ = type; + thread_->Start(); + options_ = webrtc::DesktopCaptureOptions::CreateDefault(); + options_.set_detect_updated_region(true); + options_.set_allow_iosurface(true); + thread_->BlockingCall([this, type] { + if (type == kScreen) { + capturer_ = std::make_unique( + webrtc::DesktopCapturer::CreateScreenCapturer(options_), options_); + } else { + capturer_ = std::make_unique( + webrtc::DesktopCapturer::CreateWindowCapturer(options_), options_); + } + }); +} + +ObjCDesktopCapturer::~ObjCDesktopCapturer() { + thread_->BlockingCall([this] { + capturer_.reset(); + }); +} + +ObjCDesktopCapturer::CaptureState ObjCDesktopCapturer::Start(uint32_t fps) { + if(capture_state_ == CS_RUNNING) { + return capture_state_; + } + + if(fps == 0) { + capture_state_ = CS_FAILED; + return capture_state_; + } + + if (fps >= 60) { + capture_delay_ = uint32_t(1000.0 / 60.0); + } else { + capture_delay_ = uint32_t(1000.0 / fps); + } + + if (source_id_ != -1) { + if (!capturer_->SelectSource(source_id_)) { + capture_state_ = CS_FAILED; + return capture_state_; + } + if (type_ == kWindow) { + if (!capturer_->FocusOnSelectedSource()) { + capture_state_ = CS_FAILED; + return capture_state_; + } + } + } + + thread_->BlockingCall([this] { + capturer_->Start(this); + }); + capture_state_ = CS_RUNNING; + + thread_->PostTask([this] { + CaptureFrame(); + }); + + [delegate_ didSourceCaptureStart]; + return capture_state_; +} + +void ObjCDesktopCapturer::Stop() { + [delegate_ didSourceCaptureStop]; + capture_state_ = CS_STOPPED; +} + +bool ObjCDesktopCapturer::IsRunning() { + return capture_state_ == CS_RUNNING; +} + +void ObjCDesktopCapturer::OnCaptureResult(webrtc::DesktopCapturer::Result result, + std::unique_ptr frame) { + if (result != result_) { + if (result == webrtc::DesktopCapturer::Result::ERROR_PERMANENT) { + [delegate_ didSourceCaptureError]; + capture_state_ = CS_FAILED; + return; + } + + if (result == webrtc::DesktopCapturer::Result::ERROR_TEMPORARY) { + result_ = result; + [delegate_ didSourceCapturePaused]; + return; + } + + if (result == webrtc::DesktopCapturer::Result::SUCCESS) { + result_ = result; + [delegate_ didSourceCaptureStart]; + } + } + + if (result == webrtc::DesktopCapturer::Result::ERROR_TEMPORARY) { + return; + } + + int width = frame->size().width(); + int height = frame->size().height(); + int real_width = width; + + if (type_ == kWindow) { + int multiple = 0; +#if defined(WEBRTC_ARCH_X86_FAMILY) + multiple = 16; +#elif defined(WEBRTC_ARCH_ARM64) + multiple = 32; +#endif + // A multiple of $multiple must be used as the width of the src frame, + // and the right black border needs to be cropped during conversion. + if (multiple != 0 && (width % multiple) != 0) { + width = (width / multiple + 1) * multiple; + } + } + + CVPixelBufferRef pixelBuffer = NULL; + + NSDictionary *pixelAttributes = @{(NSString *)kCVPixelBufferIOSurfacePropertiesKey : @{}}; + CVReturn res = CVPixelBufferCreate(kCFAllocatorDefault, + width, + height, + kCVPixelFormatType_32BGRA, + (__bridge CFDictionaryRef)(pixelAttributes), + &pixelBuffer); + CVPixelBufferLockBaseAddress(pixelBuffer, 0); + uint8_t *pxdata = (uint8_t *)CVPixelBufferGetBaseAddress(pixelBuffer); + libyuv::ConvertToARGB(reinterpret_cast(frame->data()), + real_width * height * 4, + reinterpret_cast(pxdata), + width * 4, + 0, + 0, + width, + height, + real_width, + height, + libyuv::kRotate0, + libyuv::FOURCC_ARGB); + CVPixelBufferUnlockBaseAddress(pixelBuffer, 0); + + if (res != kCVReturnSuccess) { + NSLog(@"Unable to create cvpixelbuffer %d", res); + return; + } + + RTC_OBJC_TYPE(RTCCVPixelBuffer) *rtcPixelBuffer = + [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBuffer]; + NSTimeInterval timeStampSeconds = CACurrentMediaTime(); + int64_t timeStampNs = lroundf(timeStampSeconds * NSEC_PER_SEC); + RTC_OBJC_TYPE(RTCVideoFrame) *videoFrame = + [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:rtcPixelBuffer + rotation:RTC_OBJC_TYPE(RTCVideoRotation_0) + timeStampNs:timeStampNs]; + CVPixelBufferRelease(pixelBuffer); + [delegate_ didCaptureVideoFrame:videoFrame]; +} + +void ObjCDesktopCapturer::CaptureFrame() { + RTC_DCHECK_RUN_ON(thread_.get()); + if (capture_state_ == CS_RUNNING) { + capturer_->CaptureFrame(); + thread_->PostDelayedHighPrecisionTask( + [this]() { + CaptureFrame(); + }, + TimeDelta::Millis(capture_delay_)); + } +} + +} // namespace webrtc diff --git a/sdk/objc/native/src/objc_desktop_media_list.h b/sdk/objc/native/src/objc_desktop_media_list.h new file mode 100644 index 0000000000..ecb2d27221 --- /dev/null +++ b/sdk/objc/native/src/objc_desktop_media_list.h @@ -0,0 +1,111 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef SDK_OBJC_NATIVE_SRC_OBJC_DESKTOP_MEDIA_LIST_H_ +#define SDK_OBJC_NATIVE_SRC_OBJC_DESKTOP_MEDIA_LIST_H_ + +#import "base/RTCMacros.h" + +#include "api/video/i420_buffer.h" +#include "api/video/video_frame.h" +#include "modules/desktop_capture/desktop_capture_options.h" +#include "modules/desktop_capture/desktop_capturer.h" +#include "modules/desktop_capture/desktop_frame.h" +#include "rtc_base/thread.h" + +#include "objc_desktop_capture.h" + +#import "components/capturer/RTCDesktopMediaList+Private.h" + +namespace webrtc { + +class MediaSource { + public: + MediaSource( ObjCDesktopMediaList *mediaList, DesktopCapturer::Source src, DesktopType type) + : source(src), mediaList_(mediaList), type_(type) {} + virtual ~MediaSource() {} + + DesktopCapturer::Source source; + + // source id + DesktopCapturer::SourceId id() const { return source.id; } + + // source name + std::string name() const { return source.title; } + + // Returns the thumbnail of the source, jpeg format. + std::vector thumbnail() const { return thumbnail_; } + + + + DesktopType type() const { return type_; } + + bool UpdateThumbnail(); + + void SaveCaptureResult(webrtc::DesktopCapturer::Result result, + std::unique_ptr frame); + + private: + std::vector thumbnail_; + ObjCDesktopMediaList *mediaList_; + DesktopType type_; +}; + +class ObjCDesktopMediaList { + public: + enum CaptureState { CS_RUNNING, CS_STOPPED, CS_FAILED}; + public: + ObjCDesktopMediaList(DesktopType type, RTC_OBJC_TYPE(RTCDesktopMediaList)* objcMediaList); + + virtual ~ObjCDesktopMediaList(); + + virtual int32_t UpdateSourceList(bool force_reload = false, bool get_thumbnail = true); + + virtual int GetSourceCount() const; + + virtual MediaSource* GetSource(int index); + + virtual bool GetThumbnail(MediaSource *source, bool notify); + + private: + class CallbackProxy : public DesktopCapturer::Callback { + public: + CallbackProxy(){} + void SetCallback(std::function frame)> on_capture_result) { + on_capture_result_ = on_capture_result; + } + private: + void OnCaptureResult(webrtc::DesktopCapturer::Result result, + std::unique_ptr frame) override { + if(on_capture_result_) on_capture_result_(result, std::move(frame)); + } + std::function frame)> on_capture_result_ = nullptr; + }; + private: + std::unique_ptr callback_; + webrtc::DesktopCaptureOptions options_; + std::unique_ptr capturer_; + std::unique_ptr thread_; + std::vector> sources_; + RTC_OBJC_TYPE(RTCDesktopMediaList)* objcMediaList_; + DesktopType type_; +}; + +} // namespace webrtc + +#endif // SDK_OBJC_NATIVE_SRC_OBJC_DESKTOP_MEDIA_LIST_H_ diff --git a/sdk/objc/native/src/objc_desktop_media_list.mm b/sdk/objc/native/src/objc_desktop_media_list.mm new file mode 100644 index 0000000000..cb783737a2 --- /dev/null +++ b/sdk/objc/native/src/objc_desktop_media_list.mm @@ -0,0 +1,252 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "sdk/objc/native/src/objc_desktop_media_list.h" +#include "rtc_base/checks.h" +#include "sdk/objc/native/src/objc_video_frame.h" +#include "third_party/libyuv/include/libyuv.h" + +extern "C" { +#if defined(USE_SYSTEM_LIBJPEG) +#include +#else +// Include directory supplied by gn +#include "jpeglib.h" // NOLINT +#endif +} + +#include +#include + +#import + +namespace webrtc { + +ObjCDesktopMediaList::ObjCDesktopMediaList(DesktopType type, + RTC_OBJC_TYPE(RTCDesktopMediaList) * objcMediaList) + : thread_(rtc::Thread::Create()), objcMediaList_(objcMediaList), type_(type) { + RTC_DCHECK(thread_); + thread_->Start(); + options_ = webrtc::DesktopCaptureOptions::CreateDefault(); + options_.set_detect_updated_region(true); + options_.set_allow_iosurface(true); + + callback_ = std::make_unique(); + + thread_->BlockingCall([this, type] { + if (type == kScreen) { + capturer_ = webrtc::DesktopCapturer::CreateScreenCapturer(options_); + } else { + capturer_ = webrtc::DesktopCapturer::CreateWindowCapturer(options_); + } + capturer_->Start(callback_.get()); + }); +} + +ObjCDesktopMediaList::~ObjCDesktopMediaList() { + thread_->BlockingCall([this] { + capturer_.reset(); + }); +} + +int32_t ObjCDesktopMediaList::UpdateSourceList(bool force_reload, bool get_thumbnail) { + if (force_reload) { + for (auto source : sources_) { + [objcMediaList_ mediaSourceRemoved:source.get()]; + } + sources_.clear(); + } + + webrtc::DesktopCapturer::SourceList new_sources; + + thread_->BlockingCall([this, &new_sources] { + capturer_->GetSourceList(&new_sources); + }); + + typedef std::set SourceSet; + SourceSet new_source_set; + for (size_t i = 0; i < new_sources.size(); ++i) { + if (type_ == kScreen && new_sources[i].title.length() == 0) { + new_sources[i].title = std::string("Screen " + std::to_string(i + 1)); + } + new_source_set.insert(new_sources[i].id); + } + // Iterate through the old sources to find the removed sources. + for (size_t i = 0; i < sources_.size(); ++i) { + if (new_source_set.find(sources_[i]->id()) == new_source_set.end()) { + [objcMediaList_ mediaSourceRemoved:(*(sources_.begin() + i)).get()]; + sources_.erase(sources_.begin() + i); + --i; + } + } + // Iterate through the new sources to find the added sources. + if (new_sources.size() > sources_.size()) { + SourceSet old_source_set; + for (size_t i = 0; i < sources_.size(); ++i) { + old_source_set.insert(sources_[i]->id()); + } + for (size_t i = 0; i < new_sources.size(); ++i) { + if (old_source_set.find(new_sources[i].id) == old_source_set.end()) { + MediaSource *source = new MediaSource(this, new_sources[i], type_); + sources_.insert(sources_.begin() + i, std::shared_ptr(source)); + [objcMediaList_ mediaSourceAdded:source]; + GetThumbnail(source, true); + } + } + } + + RTC_DCHECK_EQ(new_sources.size(), sources_.size()); + + // Find the moved/changed sources. + size_t pos = 0; + while (pos < sources_.size()) { + if (!(sources_[pos]->id() == new_sources[pos].id)) { + // Find the source that should be moved to |pos|, starting from |pos + 1| + // of |sources_|, because entries before |pos| should have been sorted. + size_t old_pos = pos + 1; + for (; old_pos < sources_.size(); ++old_pos) { + if (sources_[old_pos]->id() == new_sources[pos].id) break; + } + RTC_DCHECK(sources_[old_pos]->id() == new_sources[pos].id); + + // Move the source from |old_pos| to |pos|. + auto temp = sources_[old_pos]; + sources_.erase(sources_.begin() + old_pos); + sources_.insert(sources_.begin() + pos, temp); + //[objcMediaList_ mediaSourceMoved:old_pos newIndex:pos]; + } + + if (sources_[pos]->source.title != new_sources[pos].title) { + sources_[pos]->source.title = new_sources[pos].title; + [objcMediaList_ mediaSourceNameChanged:sources_[pos].get()]; + } + ++pos; + } + + if (get_thumbnail) { + for (auto source : sources_) { + GetThumbnail(source.get(), true); + } + } + return sources_.size(); +} + +bool ObjCDesktopMediaList::GetThumbnail(MediaSource *source, bool notify) { + thread_->PostTask([this, source, notify] { + if(capturer_->SelectSource(source->id())){ + callback_->SetCallback([&](webrtc::DesktopCapturer::Result result, + std::unique_ptr frame) { + auto old_thumbnail = source->thumbnail(); + source->SaveCaptureResult(result, std::move(frame)); + if(old_thumbnail.size() != source->thumbnail().size() && notify) { + [objcMediaList_ mediaSourceThumbnailChanged:source]; + } + }); + capturer_->CaptureFrame(); + } + }); + + return true; +} + +int ObjCDesktopMediaList::GetSourceCount() const { + return sources_.size(); +} + +MediaSource *ObjCDesktopMediaList::GetSource(int index) { + return sources_[index].get(); +} + +bool MediaSource::UpdateThumbnail() { + return mediaList_->GetThumbnail(this, true); +} + +void MediaSource::SaveCaptureResult(webrtc::DesktopCapturer::Result result, + std::unique_ptr frame) { + if (result != webrtc::DesktopCapturer::Result::SUCCESS) { + return; + } + int width = frame->size().width(); + int height = frame->size().height(); + int real_width = width; + + if (type_ == kWindow) { + int multiple = 0; +#if defined(WEBRTC_ARCH_X86_FAMILY) + multiple = 16; +#elif defined(WEBRTC_ARCH_ARM64) + multiple = 32; +#endif + // A multiple of $multiple must be used as the width of the src frame, + // and the right black border needs to be cropped during conversion. + if (multiple != 0 && (width % multiple) != 0) { + width = (width / multiple + 1) * multiple; + } + } + + CVPixelBufferRef pixelBuffer = NULL; + + NSDictionary *pixelAttributes = @{(NSString *)kCVPixelBufferIOSurfacePropertiesKey : @{}}; + CVReturn res = CVPixelBufferCreate(kCFAllocatorDefault, + width, + height, + kCVPixelFormatType_32BGRA, + (__bridge CFDictionaryRef)(pixelAttributes), + &pixelBuffer); + CVPixelBufferLockBaseAddress(pixelBuffer, 0); + uint8_t *pxdata = (uint8_t *)CVPixelBufferGetBaseAddress(pixelBuffer); + libyuv::ConvertToARGB(reinterpret_cast(frame->data()), + real_width * height * 4, + reinterpret_cast(pxdata), + width * 4, + 0, + 0, + width, + height, + real_width, + height, + libyuv::kRotate0, + libyuv::FOURCC_ARGB); + CVPixelBufferUnlockBaseAddress(pixelBuffer, 0); + + if (res != kCVReturnSuccess) { + NSLog(@"Unable to create cvpixelbuffer %d", res); + return; + } + + CIImage *ciImage = [CIImage imageWithCVPixelBuffer:pixelBuffer]; + CGRect outputSize = CGRectMake(0, 0, width, height); + + CIContext *tempContext = [CIContext contextWithOptions:nil]; + CGImageRef cgImage = [tempContext createCGImage:ciImage fromRect:outputSize]; + NSData *imageData; + NSBitmapImageRep *newRep = [[NSBitmapImageRep alloc] initWithCGImage:cgImage]; + [newRep setSize:NSSizeToCGSize(outputSize.size)]; + imageData = [newRep representationUsingType:NSBitmapImageFileTypeJPEG + properties:@{ + NSImageCompressionFactor : @1.0f + }]; + + thumbnail_.resize(imageData.length); + const void *_Nullable rawData = [imageData bytes]; + char *src = (char *)rawData; + std::copy(src, src + imageData.length, thumbnail_.begin()); + + CGImageRelease(cgImage); + CVPixelBufferRelease(pixelBuffer); +} + +} // namespace webrtc diff --git a/sdk/objc/native/src/objc_network_monitor.h b/sdk/objc/native/src/objc_network_monitor.h index 2ebccd41dd..a3b9fb411a 100644 --- a/sdk/objc/native/src/objc_network_monitor.h +++ b/sdk/objc/native/src/objc_network_monitor.h @@ -59,7 +59,7 @@ class ObjCNetworkMonitor : public webrtc::NetworkMonitorInterface, std::map adapter_type_by_name_ RTC_GUARDED_BY(thread_); webrtc::scoped_refptr safety_flag_; - RTCNetworkMonitor* network_monitor_ = nil; + RTC_OBJC_TYPE(RTCNetworkMonitor) * network_monitor_ = nil; }; } // namespace webrtc diff --git a/sdk/objc/native/src/objc_network_monitor.mm b/sdk/objc/native/src/objc_network_monitor.mm index a7ac555ede..b48bfc6d93 100644 --- a/sdk/objc/native/src/objc_network_monitor.mm +++ b/sdk/objc/native/src/objc_network_monitor.mm @@ -40,7 +40,7 @@ thread_ = webrtc::Thread::Current(); RTC_DCHECK_RUN_ON(thread_); safety_flag_->SetAlive(); - network_monitor_ = [[RTCNetworkMonitor alloc] initWithObserver:this]; + network_monitor_ = [[RTC_OBJC_TYPE(RTCNetworkMonitor) alloc] initWithObserver:this]; if (network_monitor_ == nil) { RTC_LOG(LS_WARNING) << "Failed to create RTCNetworkMonitor; not available on this OS?"; diff --git a/sdk/objc/native/src/objc_video_encoder_factory.mm b/sdk/objc/native/src/objc_video_encoder_factory.mm index 5d46915b22..1febb40d62 100644 --- a/sdk/objc/native/src/objc_video_encoder_factory.mm +++ b/sdk/objc/native/src/objc_video_encoder_factory.mm @@ -85,7 +85,7 @@ int32_t Encode(const VideoFrame &frame, const std::vector *frame_types) override { NSMutableArray *rtcFrameTypes = [NSMutableArray array]; for (size_t i = 0; i < frame_types->size(); ++i) { - [rtcFrameTypes addObject:@(RTCFrameType(frame_types->at(i)))]; + [rtcFrameTypes addObject:@(RTC_OBJC_TYPE(RTCFrameType)(frame_types->at(i)))]; } return [encoder_ encode:ToObjCVideoFrame(frame) diff --git a/sdk/objc/native/src/objc_video_frame.mm b/sdk/objc/native/src/objc_video_frame.mm index f982831bd8..94cf5a4c88 100644 --- a/sdk/objc/native/src/objc_video_frame.mm +++ b/sdk/objc/native/src/objc_video_frame.mm @@ -19,7 +19,7 @@ RTC_OBJC_TYPE( RTCVideoFrame) *videoFrame = [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:ToObjCVideoFrameBuffer(frame.video_frame_buffer()) - rotation:RTCVideoRotation(frame.rotation()) + rotation:RTC_OBJC_TYPE(RTCVideoRotation)(frame.rotation()) timeStampNs:frame.timestamp_us() * webrtc::kNumNanosecsPerMicrosec]; videoFrame.timeStamp = frame.rtp_timestamp(); diff --git a/sdk/objc/native/src/objc_video_track_source.h b/sdk/objc/native/src/objc_video_track_source.h index 56547ed2a7..c6daeb5408 100644 --- a/sdk/objc/native/src/objc_video_track_source.h +++ b/sdk/objc/native/src/objc_video_track_source.h @@ -19,8 +19,7 @@ RTC_FWD_DECL_OBJC_CLASS(RTC_OBJC_TYPE(RTCVideoFrame)); -@interface RTCObjCVideoSourceAdapter - : NSObject +@interface RTC_OBJC_TYPE(RTCObjCVideoSourceAdapter) : NSObject @end namespace webrtc { @@ -29,7 +28,7 @@ class ObjCVideoTrackSource : public webrtc::AdaptedVideoTrackSource { public: ObjCVideoTrackSource(); explicit ObjCVideoTrackSource(bool is_screencast); - explicit ObjCVideoTrackSource(RTCObjCVideoSourceAdapter* adapter); + explicit ObjCVideoTrackSource(RTC_OBJC_TYPE(RTCObjCVideoSourceAdapter)* adapter); bool is_screencast() const override; @@ -51,7 +50,7 @@ class ObjCVideoTrackSource : public webrtc::AdaptedVideoTrackSource { webrtc::VideoBroadcaster broadcaster_; webrtc::TimestampAligner timestamp_aligner_; - RTCObjCVideoSourceAdapter* adapter_; + RTC_OBJC_TYPE(RTCObjCVideoSourceAdapter)* adapter_; bool is_screencast_; }; diff --git a/sdk/objc/native/src/objc_video_track_source.mm b/sdk/objc/native/src/objc_video_track_source.mm index 3885d34147..164a01ccb1 100644 --- a/sdk/objc/native/src/objc_video_track_source.mm +++ b/sdk/objc/native/src/objc_video_track_source.mm @@ -17,11 +17,11 @@ #include "api/video/i420_buffer.h" #include "sdk/objc/native/src/objc_frame_buffer.h" -@interface RTCObjCVideoSourceAdapter () +@interface RTC_OBJC_TYPE(RTCObjCVideoSourceAdapter) () @property(nonatomic) webrtc::ObjCVideoTrackSource *objCVideoTrackSource; @end -@implementation RTCObjCVideoSourceAdapter +@implementation RTC_OBJC_TYPE(RTCObjCVideoSourceAdapter) @synthesize objCVideoTrackSource = _objCVideoTrackSource; @@ -40,8 +40,7 @@ - (void)capturer:(RTC_OBJC_TYPE(RTCVideoCapturer) *)capturer : AdaptedVideoTrackSource(/* required resolution alignment */ 2), is_screencast_(is_screencast) {} -ObjCVideoTrackSource::ObjCVideoTrackSource(RTCObjCVideoSourceAdapter *adapter) - : adapter_(adapter) { +ObjCVideoTrackSource::ObjCVideoTrackSource(RTC_OBJC_TYPE(RTCObjCVideoSourceAdapter) *adapter) : adapter_(adapter) { adapter_.objCVideoTrackSource = this; } diff --git a/sdk/objc/unittests/RTCAudioSessionTest.mm b/sdk/objc/unittests/RTCAudioSessionTest.mm index 03f7b6942b..98137196c2 100644 --- a/sdk/objc/unittests/RTCAudioSessionTest.mm +++ b/sdk/objc/unittests/RTCAudioSessionTest.mm @@ -318,8 +318,8 @@ - (void)DISABLED_testConfigureWebRTCSessionWithoutLocking { withOptions:0 error:&error]; EXPECT_TRUE(error != nil); - EXPECT_EQ(error.domain, kRTCAudioSessionErrorDomain); - EXPECT_EQ(error.code, kRTCAudioSessionErrorLockRequired); + EXPECT_EQ(error.domain, RTC_CONSTANT_TYPE(RTCAudioSessionErrorDomain)); + EXPECT_EQ(error.code, RTC_CONSTANT_TYPE(RTCAudioSessionErrorLockRequired)); waitCleanup.Set(); thread->Stop(); diff --git a/sdk/objc/unittests/RTCMTLVideoView_xctest.m b/sdk/objc/unittests/RTCMTLVideoView_xctest.m index fc140a4662..437f094046 100644 --- a/sdk/objc/unittests/RTCMTLVideoView_xctest.m +++ b/sdk/objc/unittests/RTCMTLVideoView_xctest.m @@ -32,8 +32,8 @@ @interface RTC_OBJC_TYPE (RTCMTLVideoView) + (BOOL)isMetalAvailable; + (UIView *)createMetalView:(CGRect)frame; -+ (id)createNV12Renderer; -+ (id)createI420Renderer; ++ (id)createNV12Renderer; ++ (id)createI420Renderer; - (void)drawInMTKView:(id)view; @end @@ -95,7 +95,7 @@ - (id)frameMockWithCVPixelBuffer:(BOOL)hasCVPixelBuffer { } - (id)rendererMockWithSuccessfulSetup:(BOOL)success { - id rendererMock = OCMClassMock([RTCMTLRenderer class]); + id rendererMock = OCMClassMock([RTC_OBJC_TYPE(RTCMTLRenderer) class]); OCMStub([rendererMock addRenderingDestination:[OCMArg any]]) .andReturn(success); return rendererMock; diff --git a/sdk/objc/unittests/RTCPeerConnectionFactory_xctest.m b/sdk/objc/unittests/RTCPeerConnectionFactory_xctest.m index c5e39a6a30..1b2d6e3d0b 100644 --- a/sdk/objc/unittests/RTCPeerConnectionFactory_xctest.m +++ b/sdk/objc/unittests/RTCPeerConnectionFactory_xctest.m @@ -199,11 +199,9 @@ - (void)testRTCRtpSenderLifetime { @autoreleasepool { factory = [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init]; - peerConnection = [factory peerConnectionWithConfiguration:config - constraints:constraints - delegate:nil]; - sender = [peerConnection senderWithKind:kRTCMediaStreamTrackKindVideo - streamId:@"stream"]; + peerConnection = + [factory peerConnectionWithConfiguration:config constraints:constraints delegate:nil]; + sender = [peerConnection senderWithKind:RTC_CONSTANT_TYPE(RTCMediaStreamTrackKindVideo) streamId:@"stream"]; XCTAssertNotNil(sender); [peerConnection close]; peerConnection = nil; @@ -234,15 +232,11 @@ - (void)testRTCRtpReceiverLifetime { @autoreleasepool { factory = [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init]; - pc1 = [factory peerConnectionWithConfiguration:config - constraints:constraints - delegate:nil]; - [pc1 senderWithKind:kRTCMediaStreamTrackKindAudio streamId:@"stream"]; + pc1 = [factory peerConnectionWithConfiguration:config constraints:constraints delegate:nil]; + [pc1 senderWithKind:RTC_CONSTANT_TYPE(RTCMediaStreamTrackKindAudio) streamId:@"stream"]; - pc2 = [factory peerConnectionWithConfiguration:config - constraints:constraints - delegate:nil]; - [pc2 senderWithKind:kRTCMediaStreamTrackKindAudio streamId:@"stream"]; + pc2 = [factory peerConnectionWithConfiguration:config constraints:constraints delegate:nil]; + [pc2 senderWithKind:RTC_CONSTANT_TYPE(RTCMediaStreamTrackKindAudio) streamId:@"stream"]; NSTimeInterval negotiationTimeout = 15; XCTAssertTrue([self negotiatePeerConnection:pc1 @@ -343,11 +337,11 @@ - (void)testRollback { RTC_OBJC_TYPE(RTCConfiguration) *config = [[RTC_OBJC_TYPE(RTCConfiguration) alloc] init]; config.sdpSemantics = RTCSdpSemanticsUnifiedPlan; - RTC_OBJC_TYPE(RTCMediaConstraints) *constraints = [[RTC_OBJC_TYPE( - RTCMediaConstraints) alloc] initWithMandatoryConstraints:@{ - kRTCMediaConstraintsOfferToReceiveAudio : kRTCMediaConstraintsValueTrue - } - optionalConstraints:nil]; + RTC_OBJC_TYPE(RTCMediaConstraints) *constraints = + [[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:@{ + RTC_CONSTANT_TYPE(RTCMediaConstraintsOfferToReceiveAudio) : RTC_CONSTANT_TYPE(RTCMediaConstraintsValueTrue) + } + optionalConstraints:nil]; __block RTC_OBJC_TYPE(RTCPeerConnectionFactory) * factory; __block RTC_OBJC_TYPE(RTCPeerConnection) * pc1; @@ -415,7 +409,7 @@ - (void)testSenderCapabilities { decoderFactory:decoder]; RTC_OBJC_TYPE(RTCRtpCapabilities) *capabilities = - [factory rtpSenderCapabilitiesForKind:kRTCMediaStreamTrackKindVideo]; + [factory rtpSenderCapabilitiesForKind:RTC_CONSTANT_TYPE(RTCMediaStreamTrackKindVideo)]; NSMutableArray *codecNames = [NSMutableArray new]; for (RTC_OBJC_TYPE(RTCRtpCodecCapability) * codec in capabilities.codecs) { [codecNames addObject:codec.name]; @@ -447,7 +441,7 @@ - (void)testReceiverCapabilities { decoderFactory:decoder]; RTC_OBJC_TYPE(RTCRtpCapabilities) *capabilities = - [factory rtpReceiverCapabilitiesForKind:kRTCMediaStreamTrackKindVideo]; + [factory rtpReceiverCapabilitiesForKind:RTC_CONSTANT_TYPE(RTCMediaStreamTrackKindVideo)]; NSMutableArray *codecNames = [NSMutableArray new]; for (RTC_OBJC_TYPE(RTCRtpCodecCapability) * codec in capabilities.codecs) { [codecNames addObject:codec.name]; @@ -497,7 +491,7 @@ - (void)testSetCodecPreferences { XCTAssertNotNil(tranceiver); RTC_OBJC_TYPE(RTCRtpCapabilities) *capabilities = - [factory rtpReceiverCapabilitiesForKind:kRTCMediaStreamTrackKindVideo]; + [factory rtpReceiverCapabilitiesForKind:RTC_CONSTANT_TYPE(RTCMediaStreamTrackKindVideo)]; RTC_OBJC_TYPE(RTCRtpCodecCapability) * targetCodec; for (RTC_OBJC_TYPE(RTCRtpCodecCapability) * codec in capabilities.codecs) { @@ -702,9 +696,8 @@ - (bool)negotiatePeerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)pc1 __weak RTC_OBJC_TYPE(RTCPeerConnection) *weakPC1 = pc1; __weak RTC_OBJC_TYPE(RTCPeerConnection) *weakPC2 = pc2; RTC_OBJC_TYPE(RTCMediaConstraints) *sdpConstraints = - [[RTC_OBJC_TYPE(RTCMediaConstraints) - alloc] initWithMandatoryConstraints:@{ - kRTCMediaConstraintsOfferToReceiveAudio : kRTCMediaConstraintsValueTrue + [[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:@{ + RTC_CONSTANT_TYPE(RTCMediaConstraintsOfferToReceiveAudio) : RTC_CONSTANT_TYPE(RTCMediaConstraintsValueTrue) } optionalConstraints:nil]; diff --git a/video/video_stream_encoder.cc b/video/video_stream_encoder.cc index 86239444f7..5ddaf24f66 100644 --- a/video/video_stream_encoder.cc +++ b/video/video_stream_encoder.cc @@ -70,6 +70,7 @@ #include "modules/video_coding/codecs/interface/common_constants.h" #include "modules/video_coding/include/video_codec_initializer.h" #include "modules/video_coding/include/video_codec_interface.h" +#include "modules/video_coding/include/video_error_codes.h" #include "modules/video_coding/svc/scalability_mode_util.h" #include "modules/video_coding/svc/svc_rate_allocator.h" #include "rtc_base/checks.h" @@ -2099,9 +2100,15 @@ void VideoStreamEncoder::EncodeVideoFrame(const VideoFrame& video_frame, was_encode_called_since_last_initialization_ = true; if (encode_status < 0) { - RTC_LOG(LS_ERROR) << "Encoder failed, failing encoder format: " - << encoder_config_.video_format.ToString(); - RequestEncoderSwitch(); + if (encode_status == WEBRTC_VIDEO_CODEC_ENCODER_FAILURE) { + RTC_LOG(LS_ERROR) << "Encoder failed, failing encoder format: " + << encoder_config_.video_format.ToString(); + RequestEncoderSwitch(); + } else { + RTC_LOG(LS_ERROR) << "Failed to encode frame. Error code: " + << encode_status; + } + return; } diff --git a/webrtc.gni b/webrtc.gni index 2a407632d6..be03d29e02 100644 --- a/webrtc.gni +++ b/webrtc.gni @@ -1058,7 +1058,6 @@ if (is_mac || is_ios) { umbrella_header_path = "$target_gen_dir/$output_name.framework/WebRTC/$output_name.h" modulemap_path = "$target_gen_dir/Modules/module.modulemap" - privacy_manifest_path = "$target_gen_dir/$target_name/PrivacyInfo.xcprivacy" action_foreach("create_bracket_include_headers_$target_name") { script = "//tools_webrtc/apple/copy_framework_header.py" @@ -1099,7 +1098,6 @@ if (is_mac || is_ios) { deps += [ ":copy_framework_headers_$this_target_name", ":copy_modulemap_$this_target_name", - ":copy_privacy_manifest_$this_target_name", ":copy_umbrella_header_$this_target_name", ":create_bracket_include_headers_$this_target_name", ":modulemap_$this_target_name", @@ -1122,7 +1120,6 @@ if (is_mac || is_ios) { ":create_bracket_include_headers_$this_target_name") deps += [ - ":copy_privacy_manifest_$this_target_name", ":copy_umbrella_header_$this_target_name", ":create_bracket_include_headers_$this_target_name", ] @@ -1132,13 +1129,8 @@ if (is_mac || is_ios) { if (is_mac || target_environment == "catalyst") { # Catalyst frameworks use the same layout as regular Mac frameworks. headers_dir = "Versions/A/Headers" - - # The path to the privacy manifest file differs between Mac and iOS. - # https://developer.apple.com/documentation/bundleresources/privacy_manifest_files/adding_a_privacy_manifest_to_your_app_or_third-party_sdk - privacy_manifest_out_path = "Versions/A/Resources/PrivacyInfo.xcprivacy" } else { headers_dir = "Headers" - privacy_manifest_out_path = "PrivacyInfo.xcprivacy" } bundle_data("copy_framework_headers_$this_target_name") { @@ -1188,25 +1180,6 @@ if (is_mac || is_ios) { deps = [ ":umbrella_header_$target_name" ] } - - action("create_privacy_manifest_$target_name") { - script = "//tools_webrtc/apple/generate_privacy_manifest.py" - - args = [ - "--output", - rebase_path(privacy_manifest_path), - ] - - outputs = [ privacy_manifest_path ] - } - - copy("copy_privacy_manifest_$target_name") { - sources = [ privacy_manifest_path ] - outputs = - [ "$root_out_dir/$output_name.framework/$privacy_manifest_out_path" ] - - deps = [ ":create_privacy_manifest_$target_name" ] - } } }