From 89e26b72494b97b1a50e5c960147550896ed14af Mon Sep 17 00:00:00 2001 From: Philipp Hancke Date: Wed, 17 Apr 2024 08:50:15 -0700 Subject: [PATCH 01/49] [m125] webrtc stats: fix video remote-outbound-rtp timestamp which had a 70 year offset (i.e. 2094 instead of 2024) which broke the webrtc-internal stats graphs. A similar adjustment is done for audio in audio/channel_receive.cc BUG=webrtc:12529,chromium:336222282 (cherry picked from commit 77313800c72fa1e33c30e952800e4157e9ad44a4) Change-Id: I0ce43cc8b451185bc056cf9e54757ef22d006c99 Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/347780 Reviewed-by: Florent Castelli Commit-Queue: Philipp Hancke Reviewed-by: Harald Alvestrand Cr-Original-Commit-Position: refs/heads/main@{#42114} Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/348702 Commit-Queue: Harald Alvestrand Cr-Commit-Position: refs/branch-heads/6422@{#1} Cr-Branched-From: b831eb816ef847d09d446ef4168e36b13af163f8-refs/heads/main@{#42072} --- video/video_receive_stream2.cc | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/video/video_receive_stream2.cc b/video/video_receive_stream2.cc index 6f63f760c0..f3a7c329a6 100644 --- a/video/video_receive_stream2.cc +++ b/video/video_receive_stream2.cc @@ -573,9 +573,10 @@ VideoReceiveStreamInterface::Stats VideoReceiveStream2::GetStats() const { rtp_video_stream_receiver_.GetSenderReportStats(); if (rtcp_sr_stats) { stats.last_sender_report_timestamp_ms = - rtcp_sr_stats->last_arrival_timestamp.ToMs(); + rtcp_sr_stats->last_arrival_timestamp.ToMs() - + rtc::kNtpJan1970Millisecs; stats.last_sender_report_remote_timestamp_ms = - rtcp_sr_stats->last_remote_timestamp.ToMs(); + rtcp_sr_stats->last_remote_timestamp.ToMs() - rtc::kNtpJan1970Millisecs; stats.sender_reports_packets_sent = rtcp_sr_stats->packets_sent; stats.sender_reports_bytes_sent = rtcp_sr_stats->bytes_sent; stats.sender_reports_reports_count = rtcp_sr_stats->reports_count; From 8505a9838ea91c66c96c173d30cd66f9dbcc7548 Mon Sep 17 00:00:00 2001 From: Per Kjellander Date: Tue, 16 Apr 2024 09:08:29 +0000 Subject: [PATCH 02/49] Revert "Ignore allocated bitrate during initial exponential BWE." MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This reverts commit 33cc83595ae7dd144c57c614fb62d286d9d7bf68. Reason for revert: Perf bots showed that this cl cause a change in metrics. It looks like it is for the better, but we want this to be behind a field trial. Original change's description: > Ignore allocated bitrate during initial exponential BWE. > > The reason why we want to do this is because audio can allocate a needed bitrate before video when starting a call, which may lead to a race between the first probe result and updating the allocated bitrate. > That is the, initial probe will try to probe up to the max configured bitrate. > > ProbeController::SetFirstProbeToMaxBitrate will allow the first probe to > continue up to the max configured bitrate, regardless of of the max > allocated bitrate. > > Bug: webrtc:14928 > Change-Id: I6e0ae90e21a78466527f3464951e6033dc846470 > Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/346760 > Reviewed-by: Diep Bui > Commit-Queue: Per Kjellander > Reviewed-by: Erik Språng > Reviewed-by: Per Kjellander > Cr-Commit-Position: refs/heads/main@{#42049} (cherry picked from commit 501c4f37bfee47b26999ee291c5355ad64554df7) Bug: chromium:335337923,webrtc:14928 Change-Id: I56ba58560b6857b6069552c02df822691f7af64d Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/347622 Bot-Commit: rubber-stamper@appspot.gserviceaccount.com Commit-Queue: Per Kjellander Reviewed-by: Diep Bui Owners-Override: Per Kjellander Cr-Original-Commit-Position: refs/heads/main@{#42081} Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/348722 Reviewed-by: Erik Språng Cr-Commit-Position: refs/branch-heads/6422@{#2} Cr-Branched-From: b831eb816ef847d09d446ef4168e36b13af163f8-refs/heads/main@{#42072} --- api/transport/network_types.h | 4 -- call/rtp_transport_controller_send.cc | 8 +-- .../goog_cc/goog_cc_network_control.cc | 4 -- .../goog_cc/probe_controller.cc | 39 +++--------- .../goog_cc/probe_controller.h | 6 -- .../goog_cc/probe_controller_unittest.cc | 59 ------------------- 6 files changed, 11 insertions(+), 109 deletions(-) diff --git a/api/transport/network_types.h b/api/transport/network_types.h index 46470d6b5d..258a3a4350 100644 --- a/api/transport/network_types.h +++ b/api/transport/network_types.h @@ -46,10 +46,6 @@ struct StreamsConfig { ~StreamsConfig(); Timestamp at_time = Timestamp::PlusInfinity(); absl::optional requests_alr_probing; - // If `initial_probe_to_max_bitrate` is set to true, the first probe - // may probe up to the max configured bitrate and can ignore - // max_total_allocated_bitrate. - absl::optional initial_probe_to_max_bitrate; absl::optional pacing_factor; // TODO(srte): Use BitrateAllocationLimits here. diff --git a/call/rtp_transport_controller_send.cc b/call/rtp_transport_controller_send.cc index a997593492..a10d75d2e0 100644 --- a/call/rtp_transport_controller_send.cc +++ b/call/rtp_transport_controller_send.cc @@ -262,11 +262,6 @@ void RtpTransportControllerSend::ReconfigureBandwidthEstimation( RTC_DCHECK_RUN_ON(&sequence_checker_); bwe_settings_ = settings; - bool allow_probe_without_media = bwe_settings_.allow_probe_without_media && - packet_router_.SupportsRtxPayloadPadding(); - streams_config_.initial_probe_to_max_bitrate = allow_probe_without_media; - pacer_.SetAllowProbeWithoutMediaPacket(allow_probe_without_media); - if (controller_) { // Recreate the controller and handler. control_handler_ = nullptr; @@ -280,6 +275,9 @@ void RtpTransportControllerSend::ReconfigureBandwidthEstimation( UpdateNetworkAvailability(); } } + pacer_.SetAllowProbeWithoutMediaPacket( + bwe_settings_.allow_probe_without_media && + packet_router_.SupportsRtxPayloadPadding()); } void RtpTransportControllerSend::RegisterTargetTransferRateObserver( diff --git a/modules/congestion_controller/goog_cc/goog_cc_network_control.cc b/modules/congestion_controller/goog_cc/goog_cc_network_control.cc index 815520ace2..d8a0ce9d64 100644 --- a/modules/congestion_controller/goog_cc/goog_cc_network_control.cc +++ b/modules/congestion_controller/goog_cc/goog_cc_network_control.cc @@ -218,10 +218,6 @@ NetworkControlUpdate GoogCcNetworkController::OnProcessInterval( probe_controller_->EnablePeriodicAlrProbing( *initial_config_->stream_based_config.requests_alr_probing); } - if (initial_config_->stream_based_config.initial_probe_to_max_bitrate) { - probe_controller_->SetFirstProbeToMaxBitrate( - *initial_config_->stream_based_config.initial_probe_to_max_bitrate); - } absl::optional total_bitrate = initial_config_->stream_based_config.max_total_allocated_bitrate; if (total_bitrate) { diff --git a/modules/congestion_controller/goog_cc/probe_controller.cc b/modules/congestion_controller/goog_cc/probe_controller.cc index f9848b563b..3fc8677e87 100644 --- a/modules/congestion_controller/goog_cc/probe_controller.cc +++ b/modules/congestion_controller/goog_cc/probe_controller.cc @@ -272,22 +272,6 @@ std::vector ProbeController::OnNetworkAvailability( return std::vector(); } -void ProbeController::UpdateState(State new_state) { - switch (new_state) { - case State::kInit: - state_ = State::kInit; - break; - case State::kWaitingForProbingResult: - state_ = State::kWaitingForProbingResult; - break; - case State::kProbingComplete: - state_ = State::kProbingComplete; - waiting_for_initial_probe_result_ = false; - min_bitrate_to_probe_further_ = DataRate::PlusInfinity(); - break; - } -} - std::vector ProbeController::InitiateExponentialProbing( Timestamp at_time) { RTC_DCHECK(network_available_); @@ -303,8 +287,6 @@ std::vector ProbeController::InitiateExponentialProbing( probes.push_back(config_.second_exponential_probe_scale.Value() * start_bitrate_); } - waiting_for_initial_probe_result_ = true; - return InitiateProbing(at_time, probes, true); } @@ -325,7 +307,6 @@ std::vector ProbeController::SetEstimatedBitrate( if (config_.abort_further_probe_if_max_lower_than_current && (bitrate > max_bitrate_ || (!max_total_allocated_bitrate_.IsZero() && - !(waiting_for_initial_probe_result_ && first_probe_to_max_bitrate_) && bitrate > 2 * max_total_allocated_bitrate_))) { // No need to continue probing. min_bitrate_to_probe_further_ = DataRate::PlusInfinity(); @@ -354,11 +335,6 @@ void ProbeController::EnablePeriodicAlrProbing(bool enable) { enable_periodic_alr_probing_ = enable; } -void ProbeController::SetFirstProbeToMaxBitrate( - bool first_probe_to_max_bitrate) { - first_probe_to_max_bitrate_ = first_probe_to_max_bitrate; -} - void ProbeController::SetAlrStartTimeMs( absl::optional alr_start_time_ms) { if (alr_start_time_ms) { @@ -415,7 +391,6 @@ void ProbeController::SetNetworkStateEstimate( void ProbeController::Reset(Timestamp at_time) { bandwidth_limited_cause_ = BandwidthLimitedCause::kDelayBasedLimited; state_ = State::kInit; - waiting_for_initial_probe_result_ = false; min_bitrate_to_probe_further_ = DataRate::PlusInfinity(); time_last_probing_initiated_ = Timestamp::Zero(); estimated_bitrate_ = DataRate::Zero(); @@ -477,7 +452,8 @@ std::vector ProbeController::Process(Timestamp at_time) { kMaxWaitingTimeForProbingResult) { if (state_ == State::kWaitingForProbingResult) { RTC_LOG(LS_INFO) << "kWaitingForProbingResult: timeout"; - UpdateState(State::kProbingComplete); + state_ = State::kProbingComplete; + min_bitrate_to_probe_further_ = DataRate::PlusInfinity(); } } if (estimated_bitrate_.IsZero() || state_ != State::kProbingComplete) { @@ -504,14 +480,14 @@ std::vector ProbeController::InitiateProbing( : std::min(max_total_allocated_bitrate_, max_bitrate_); if (std::min(network_estimate, estimated_bitrate_) > config_.skip_if_estimate_larger_than_fraction_of_max * max_probe_rate) { - UpdateState(State::kProbingComplete); + state_ = State::kProbingComplete; + min_bitrate_to_probe_further_ = DataRate::PlusInfinity(); return {}; } } DataRate max_probe_bitrate = max_bitrate_; - if (max_total_allocated_bitrate_ > DataRate::Zero() && - !waiting_for_initial_probe_result_) { + if (max_total_allocated_bitrate_ > DataRate::Zero()) { // If a max allocated bitrate has been configured, allow probing up to 2x // that rate. This allows some overhead to account for bursty streams, // which otherwise would have to ramp up when the overshoot is already in @@ -579,14 +555,15 @@ std::vector ProbeController::InitiateProbing( } time_last_probing_initiated_ = now; if (probe_further) { - UpdateState(State::kWaitingForProbingResult); + state_ = State::kWaitingForProbingResult; // Dont expect probe results to be larger than a fraction of the actual // probe rate. min_bitrate_to_probe_further_ = std::min(estimate_capped_bitrate, (*(bitrates_to_probe.end() - 1))) * config_.further_probe_threshold; } else { - UpdateState(State::kProbingComplete); + state_ = State::kProbingComplete; + min_bitrate_to_probe_further_ = DataRate::PlusInfinity(); } return pending_probes; } diff --git a/modules/congestion_controller/goog_cc/probe_controller.h b/modules/congestion_controller/goog_cc/probe_controller.h index ec078adbc1..cec6157851 100644 --- a/modules/congestion_controller/goog_cc/probe_controller.h +++ b/modules/congestion_controller/goog_cc/probe_controller.h @@ -121,9 +121,6 @@ class ProbeController { Timestamp at_time); void EnablePeriodicAlrProbing(bool enable); - // The first initial probe ignores allocated bitrate constraints and probe up - // to max configured bitrate configured via SetBitrates. - void SetFirstProbeToMaxBitrate(bool first_probe_to_max_bitrate); void SetAlrStartTimeMs(absl::optional alr_start_time); void SetAlrEndedTimeMs(int64_t alr_end_time); @@ -151,7 +148,6 @@ class ProbeController { kProbingComplete, }; - void UpdateState(State new_state); ABSL_MUST_USE_RESULT std::vector InitiateExponentialProbing(Timestamp at_time); ABSL_MUST_USE_RESULT std::vector InitiateProbing( @@ -162,8 +158,6 @@ class ProbeController { bool TimeForNetworkStateProbe(Timestamp at_time) const; bool network_available_; - bool waiting_for_initial_probe_result_ = false; - bool first_probe_to_max_bitrate_ = false; BandwidthLimitedCause bandwidth_limited_cause_ = BandwidthLimitedCause::kDelayBasedLimited; State state_; diff --git a/modules/congestion_controller/goog_cc/probe_controller_unittest.cc b/modules/congestion_controller/goog_cc/probe_controller_unittest.cc index fb988d4c18..aa62c476d5 100644 --- a/modules/congestion_controller/goog_cc/probe_controller_unittest.cc +++ b/modules/congestion_controller/goog_cc/probe_controller_unittest.cc @@ -327,32 +327,6 @@ TEST(ProbeControllerTest, TestExponentialProbing) { EXPECT_EQ(probes[0].target_data_rate.bps(), 2 * 1800); } -TEST(ProbeControllerTest, ExponentialProbingStopIfMaxBitrateLow) { - ProbeControllerFixture fixture( - "WebRTC-Bwe-ProbingConfiguration/abort_further:true/"); - std::unique_ptr probe_controller = - fixture.CreateController(); - ASSERT_THAT( - probe_controller->OnNetworkAvailability({.network_available = true}), - IsEmpty()); - auto probes = probe_controller->SetBitrates( - kMinBitrate, kStartBitrate, kMaxBitrate, fixture.CurrentTime()); - ASSERT_THAT(probes, SizeIs(Gt(0))); - - // Repeated probe normally is sent when estimated bitrate climbs above - // 0.7 * 6 * kStartBitrate = 1260. But since max bitrate is low, expect - // exponential probing to stop. - probes = probe_controller->SetBitrates(kMinBitrate, kStartBitrate, - /*max_bitrate=*/kStartBitrate, - fixture.CurrentTime()); - EXPECT_THAT(probes, IsEmpty()); - - probes = probe_controller->SetEstimatedBitrate( - DataRate::BitsPerSec(1800), BandwidthLimitedCause::kDelayBasedLimited, - fixture.CurrentTime()); - EXPECT_THAT(probes, IsEmpty()); -} - TEST(ProbeControllerTest, ExponentialProbingStopIfMaxAllocatedBitrateLow) { ProbeControllerFixture fixture( "WebRTC-Bwe-ProbingConfiguration/abort_further:true/"); @@ -378,39 +352,6 @@ TEST(ProbeControllerTest, ExponentialProbingStopIfMaxAllocatedBitrateLow) { EXPECT_THAT(probes, IsEmpty()); } -TEST(ProbeControllerTest, - InitialProbingIgnoreLowMaxAllocatedbitrateIfSetFirstProbeToMaxBitrate) { - ProbeControllerFixture fixture( - "WebRTC-Bwe-ProbingConfiguration/abort_further:true/"); - std::unique_ptr probe_controller = - fixture.CreateController(); - ASSERT_THAT( - probe_controller->OnNetworkAvailability({.network_available = true}), - IsEmpty()); - auto probes = probe_controller->SetBitrates( - kMinBitrate, kStartBitrate, kMaxBitrate, fixture.CurrentTime()); - ASSERT_THAT(probes, SizeIs(Gt(0))); - probe_controller->SetFirstProbeToMaxBitrate(true); - - // Repeated probe is sent when estimated bitrate climbs above - // 0.7 * 6 * kStartBitrate = 1260. During the initial probe, we ignore the - // allocation limit and probe up to the max. - probes = probe_controller->OnMaxTotalAllocatedBitrate(kStartBitrate, - fixture.CurrentTime()); - EXPECT_THAT(probes, IsEmpty()); - - probes = probe_controller->SetEstimatedBitrate( - DataRate::BitsPerSec(1800), BandwidthLimitedCause::kDelayBasedLimited, - fixture.CurrentTime()); - EXPECT_EQ(probes.size(), 1u); - EXPECT_EQ(probes[0].target_data_rate.bps(), 2 * 1800); - - probes = probe_controller->SetEstimatedBitrate( - probes[0].target_data_rate, BandwidthLimitedCause::kDelayBasedLimited, - fixture.CurrentTime()); - EXPECT_EQ(probes.size(), 1u); -} - TEST(ProbeControllerTest, TestExponentialProbingTimeout) { ProbeControllerFixture fixture; std::unique_ptr probe_controller = From 46226b524cf2445bbb34e9c0ccfbcfd3bee7e4a9 Mon Sep 17 00:00:00 2001 From: CloudWebRTC Date: Wed, 12 Jun 2024 15:37:39 +0800 Subject: [PATCH 03/49] Update to m125. (#119) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Use M125 as the latest version and migrate historical patches to m125 Patches Group: ## 1. Update README.md https://github.com/webrtc-sdk/webrtc/pull/119/commits/b6c65fc7f0e81ed305e5578467b7198833665352 * Add Apache-2.0 license and some note to README.md. (#9) * Updated readme detailing changes from original (#42) * Adding membrane framework (#51) * Updated readme (#83) ## 2. Audio Device Optimization https://github.com/webrtc-sdk/webrtc/pull/119/commits/7454824ea6481cae2f70ad58fb1e1eb5d3ecdc88 * allow listen-only mode in AudioUnit, adjust when category changes (https://github.com/webrtc-sdk/webrtc/pull/2) * release mic when category changes (https://github.com/webrtc-sdk/webrtc/pull/5) * Change defaults to iOS defaults (https://github.com/webrtc-sdk/webrtc/pull/7) * Sync audio session config (https://github.com/webrtc-sdk/webrtc/pull/8) * feat: support bypass voice processing for iOS. (https://github.com/webrtc-sdk/webrtc/pull/15) * Remove MacBookPro audio pan right code (https://github.com/webrtc-sdk/webrtc/pull/22) * fix: Fix can't open mic alone when built-in AEC is enabled. (https://github.com/webrtc-sdk/webrtc/pull/29) * feat: add audio device changes detect for windows. (https://github.com/webrtc-sdk/webrtc/pull/41) * fix Linux compile (https://github.com/webrtc-sdk/webrtc/pull/47) * AudioUnit: Don't rely on category switch for mic indicator to turn off (https://github.com/webrtc-sdk/webrtc/pull/52) * Stop recording on mute (turn off mic indicator) (https://github.com/webrtc-sdk/webrtc/pull/55) * Cherry pick audio selection from m97 release (https://github.com/webrtc-sdk/webrtc/pull/35) * [Mac] Allow audio device selection (https://github.com/webrtc-sdk/webrtc/pull/21) * RTCAudioDeviceModule.outputDevice / inputDevice getter and setter (https://github.com/webrtc-sdk/webrtc/pull/80) * Allow custom audio processing by exposing AudioProcessingModule (https://github.com/webrtc-sdk/webrtc/pull/85) * Expose audio sample buffers for Android (https://github.com/webrtc-sdk/webrtc/pull/89) * feat: add external audio processor for android. (https://github.com/webrtc-sdk/webrtc/pull/103) * android: make audio output attributes modifiable (https://github.com/webrtc-sdk/webrtc/pull/118) * Fix external audio processor sample rate calculation (https://github.com/webrtc-sdk/webrtc/pull/108) * Expose remote audio sample buffers on RTCAudioTrack (https://github.com/webrtc-sdk/webrtc/pull/84) * Fix memory leak when creating audio CMSampleBuffer https://github.com/webrtc-sdk/webrtc/pull/86 ## 3. Simulcast/SVC support for iOS/Android. https://github.com/webrtc-sdk/webrtc/pull/119/commits/b0b9fe93b1b4f0f8cdd80919a05f999124ff8bce - Simulcast support for iOS SDK (#4) - Support for simulcast in Android SDK (#3) - include simulcast headers for mac also (#10) - Fix simulcast using hardware encoder on Android (#48) - Add scalabilityMode support for AV1/VP9. (#90) ## 4. Android improvements. https://github.com/webrtc-sdk/webrtc/pull/119/commits/9aaaab5a20fec2c84616ca709ac9c1aae6de9225 - Start/Stop receiving stream method for VideoTrack (#25) - Properly remove observer upon deconstruction (#26) - feat: Expose setCodecPreferences/getCapabilities for android. (#61) - fix: add WrappedVideoDecoderFactory.java. (#74) ## 5. Darwin improvements https://github.com/webrtc-sdk/webrtc/pull/119/commits/a13ea178e46ee2ab222f1553c9164b6556e1abd8 - [Mac/iOS] feat: Add RTCYUVHelper for darwin. (#28) - Cross-platform `RTCMTLVideoView` for both iOS / macOS (#40) - rotationOverride should not be assign (#44) - [ObjC] Expose properties / methods required for AV1 codec support (#60) - Workaround: Render PixelBuffer in RTCMTLVideoView (#58) - Improve iOS/macOS H264 encoder (#70) - fix: fix video encoder not resuming correctly upon foregrounding (#75). - add PrivacyInfo.xcprivacy to darwin frameworks. (#112) - Add NSPrivacyCollectedDataTypes key to xcprivacy file (#114) - Thread-safe `RTCInitFieldTrialDictionary` (#116) - Set RTCCameraVideoCapturer initial zoom factor (#121) - Unlock configuration before starting capture session (#122) ## 6. Desktop Capture for macOS. https://github.com/webrtc-sdk/webrtc/pull/119/commits/841d78f0631aff91590c1493909c4f3fccd8fafe - [Mac] feat: Support screen capture for macOS. (#24) (#36) - fix: Get thumbnails asynchronously. (#37) - fix: Use CVPixelBuffer to build DesktopCapture Frame, fix the crash caused by non-CVPixelBuffer frame in RTCVideoEncoderH264 that cannot be cropped. (#63) - Fix the crash when setting the fps of the virtual camera. (#62) ## 7. Frame Cryptor Support. https://github.com/webrtc-sdk/webrtc/pull/119/commits/fc08745d9c5218aed45829942a3590c97c19c76a - feat: Frame Cryptor (aes gcm/cbc). (#54) - feat: key ratchet/derive. (#66) - fix: skip invalid key when decryption failed. (#81) - Improve e2ee, add setSharedKey to KeyProvider. (#88) - add failure tolerance for framecryptor. (#91) - fix h264 freeze. (#93) - Fix/send frame cryptor events from signaling thread (#95) - more improvements for E2EE. (#96) - remove too verbose logs (#107) - Add key ring size to keyProviderOptions. (#109) ## 8. Other improvements. https://github.com/webrtc-sdk/webrtc/pull/119/commits/eed6c8a1bff6c0e16a850be07597e59ed8a68d7e - Added yuv_helper (#57) - ABGRToI420, ARGBToI420 & ARGBToRGB24 (#65) - more yuv wrappers (#87) - Fix naming for yuv helper (#113) - Fix missing `RTC_OBJC_TYPE` macros (#100) --------- Co-authored-by: Hiroshi Horie <548776+hiroshihorie@users.noreply.github.com> Co-authored-by: David Zhao Co-authored-by: davidliu Co-authored-by: Angelika Serwa Co-authored-by: Théo Monnom --- .gitignore | 6 + NOTICE | 26 + README.md | 63 +- api/BUILD.gn | 1 + api/crypto/BUILD.gn | 18 + api/crypto/frame_crypto_transformer.cc | 727 ++++++++++++++++++ api/crypto/frame_crypto_transformer.h | 482 ++++++++++++ api/frame_transformer_interface.h | 2 + api/media_stream_interface.cc | 4 + api/media_stream_interface.h | 2 + api/peer_connection_interface.h | 9 + api/video/BUILD.gn | 13 + api/video/yuv_helper.cc | 416 ++++++++++ api/video/yuv_helper.h | 316 ++++++++ api/video_codecs/video_encoder_factory.h | 7 +- audio/audio_send_stream.cc | 5 + audio/audio_send_stream.h | 1 + audio/audio_state.cc | 64 +- audio/audio_state.h | 5 + audio/channel_receive.cc | 4 +- audio/channel_send.cc | 4 +- audio/channel_send.h | 2 + call/audio_send_stream.h | 1 + call/audio_state.h | 3 + media/base/media_channel.h | 2 + media/engine/webrtc_video_engine.cc | 18 + media/engine/webrtc_video_engine.h | 3 +- media/engine/webrtc_voice_engine.h | 3 +- .../audio_device_data_observer.cc | 4 + modules/audio_device/audio_device_generic.h | 4 + modules/audio_device/audio_device_impl.cc | 42 +- modules/audio_device/audio_device_impl.h | 10 +- modules/audio_device/include/audio_device.h | 23 +- modules/audio_device/mac/audio_device_mac.cc | 293 ++++--- modules/audio_device/mac/audio_device_mac.h | 14 +- .../audio_device/win/audio_device_core_win.cc | 70 ++ .../audio_device/win/audio_device_core_win.h | 33 + .../mac/screen_capturer_mac.mm | 11 +- ...sender_video_frame_transformer_delegate.cc | 2 + ...eam_receiver_frame_transformer_delegate.cc | 4 + pc/audio_rtp_receiver.cc | 2 +- pc/media_stream_track_proxy.h | 2 + pc/peer_connection.cc | 9 +- pc/video_rtp_receiver.cc | 41 +- pc/video_rtp_receiver.h | 11 +- pc/video_track.cc | 13 + pc/video_track.h | 4 + sdk/BUILD.gn | 129 +++- sdk/android/BUILD.gn | 42 + sdk/android/api/org/webrtc/AudioTrack.java | 48 ++ .../api/org/webrtc/AudioTrackSink.java | 27 + .../ExternalAudioProcessingFactory.java | 144 ++++ sdk/android/api/org/webrtc/FrameCryptor.java | 108 +++ .../api/org/webrtc/FrameCryptorAlgorithm.java | 22 + .../api/org/webrtc/FrameCryptorFactory.java | 44 ++ .../org/webrtc/FrameCryptorKeyProvider.java | 93 +++ .../webrtc/HardwareVideoEncoderFactory.java | 4 +- .../api/org/webrtc/LibaomAv1Encoder.java | 7 + .../api/org/webrtc/LibvpxVp9Encoder.java | 7 + .../api/org/webrtc/PeerConnection.java | 17 + .../api/org/webrtc/PeerConnectionFactory.java | 1 + sdk/android/api/org/webrtc/RtpParameters.java | 13 +- .../api/org/webrtc/SimulcastVideoEncoder.java | 28 + .../webrtc/SimulcastVideoEncoderFactory.java | 43 ++ .../api/org/webrtc/VideoCodecInfo.java | 13 +- sdk/android/api/org/webrtc/VideoTrack.java | 20 + .../webrtc/WrappedVideoDecoderFactory.java | 75 ++ .../webrtc/audio/JavaAudioDeviceModule.java | 20 +- ...ndroidVideoDecoderInstrumentationTest.java | 2 +- .../DefaultVideoEncoderFactoryTest.java | 4 +- .../SoftwareVideoDecoderFactoryTest.java | 3 +- .../SoftwareVideoEncoderFactoryTest.java | 3 +- .../org/webrtc/CodecsWrapperTestHelper.java | 3 +- .../src/java/org/webrtc/H264Utils.java | 5 +- .../java/org/webrtc/HardwareVideoEncoder.java | 15 +- .../webrtc/MediaCodecVideoDecoderFactory.java | 4 +- .../org/webrtc/audio/WebRtcAudioTrack.java | 23 +- sdk/android/src/jni/libaom_av1_encoder.cc | 12 + sdk/android/src/jni/pc/audio_sink.cc | 39 + sdk/android/src/jni/pc/audio_sink.h | 41 + sdk/android/src/jni/pc/audio_track.cc | 26 + .../pc/external_audio_processing_factory.cc | 143 ++++ .../pc/external_audio_processing_factory.h | 68 ++ .../pc/external_audio_processing_interface.h | 33 + .../src/jni/pc/external_audio_processor.cc | 72 ++ .../src/jni/pc/external_audio_processor.h | 57 ++ sdk/android/src/jni/pc/frame_cryptor.cc | 202 +++++ sdk/android/src/jni/pc/frame_cryptor.h | 49 ++ .../src/jni/pc/frame_cryptor_key_provider.cc | 123 +++ .../src/jni/pc/frame_cryptor_key_provider.h | 35 + sdk/android/src/jni/pc/peer_connection.cc | 3 + .../src/jni/pc/peer_connection_factory.cc | 1 + sdk/android/src/jni/pc/rtp_parameters.cc | 6 + .../src/jni/simulcast_video_encoder.cc | 35 + sdk/android/src/jni/simulcast_video_encoder.h | 22 + sdk/android/src/jni/video_codec_info.cc | 26 +- sdk/android/src/jni/video_track.cc | 11 + sdk/android/src/jni/vp9_codec.cc | 16 + sdk/objc/PrivacyInfo.xcprivacy | 28 + .../api/RTCVideoRendererAdapter+Private.h | 2 +- sdk/objc/api/RTCVideoRendererAdapter.h | 4 +- sdk/objc/api/RTCVideoRendererAdapter.mm | 19 +- .../RTCAudioDeviceModule+Private.h | 31 + .../api/peerconnection/RTCAudioDeviceModule.h | 56 ++ .../peerconnection/RTCAudioDeviceModule.mm | 294 +++++++ .../peerconnection/RTCAudioTrack+Private.h | 10 +- sdk/objc/api/peerconnection/RTCAudioTrack.h | 8 + sdk/objc/api/peerconnection/RTCAudioTrack.mm | 218 +++++- .../api/peerconnection/RTCConfiguration.h | 11 + .../api/peerconnection/RTCConfiguration.mm | 3 + .../peerconnection/RTCEncodedImage+Private.mm | 10 +- sdk/objc/api/peerconnection/RTCFieldTrials.mm | 31 +- .../peerconnection/RTCFrameCryptor+Private.h | 45 ++ sdk/objc/api/peerconnection/RTCFrameCryptor.h | 78 ++ .../api/peerconnection/RTCFrameCryptor.mm | 224 ++++++ .../RTCFrameCryptorKeyProvider+Private.h | 31 + .../RTCFrameCryptorKeyProvider.h | 62 ++ .../RTCFrameCryptorKeyProvider.mm | 124 +++ .../api/peerconnection/RTCIODevice+Private.h | 28 + sdk/objc/api/peerconnection/RTCIODevice.h | 41 + sdk/objc/api/peerconnection/RTCIODevice.mm | 71 ++ .../RTCPeerConnectionFactory+Native.h | 6 +- .../peerconnection/RTCPeerConnectionFactory.h | 17 + .../RTCPeerConnectionFactory.mm | 100 ++- ...nnectionFactoryBuilder+DefaultComponents.h | 4 +- ...nectionFactoryBuilder+DefaultComponents.mm | 6 +- .../RTCPeerConnectionFactoryBuilder.h | 4 +- .../RTCPeerConnectionFactoryBuilder.mm | 9 +- .../peerconnection/RTCRtpCodecParameters.h | 1 + .../peerconnection/RTCRtpCodecParameters.mm | 1 + .../peerconnection/RTCRtpEncodingParameters.h | 4 + .../RTCRtpEncodingParameters.mm | 7 + .../api/peerconnection/RTCRtpTransceiver.h | 4 + .../api/peerconnection/RTCRtpTransceiver.mm | 16 + .../RTCVideoCodecInfo+Private.mm | 17 +- sdk/objc/api/peerconnection/RTCVideoTrack.h | 3 + sdk/objc/api/peerconnection/RTCVideoTrack.mm | 27 +- sdk/objc/api/video_codec/RTCVideoEncoderAV1.h | 2 + .../api/video_codec/RTCVideoEncoderAV1.mm | 14 + .../video_codec/RTCVideoEncoderSimulcast.h | 13 + .../video_codec/RTCVideoEncoderSimulcast.mm | 64 ++ sdk/objc/api/video_codec/RTCVideoEncoderVP9.h | 2 + .../api/video_codec/RTCVideoEncoderVP9.mm | 15 +- sdk/objc/base/RTCAudioRenderer.h | 32 + sdk/objc/base/RTCVideoCodecInfo.h | 6 + sdk/objc/base/RTCVideoCodecInfo.m | 17 +- .../components/audio/RTCAudioBuffer+Private.h | 29 + sdk/objc/components/audio/RTCAudioBuffer.h | 38 + sdk/objc/components/audio/RTCAudioBuffer.mm | 55 ++ .../RTCAudioCustomProcessingAdapter+Private.h | 43 ++ .../audio/RTCAudioCustomProcessingAdapter.h | 28 + .../audio/RTCAudioCustomProcessingAdapter.mm | 139 ++++ .../audio/RTCAudioCustomProcessingDelegate.h | 52 ++ .../audio/RTCAudioProcessingConfig+Private.h | 29 + .../audio/RTCAudioProcessingConfig.h | 31 + .../audio/RTCAudioProcessingConfig.mm | 51 ++ .../audio/RTCAudioProcessingModule.h | 33 + .../audio/RTCAudioSession+Configuration.mm | 27 + sdk/objc/components/audio/RTCAudioSession.mm | 3 +- .../audio/RTCAudioSessionConfiguration.m | 8 +- .../RTCDefaultAudioProcessingModule+Private.h | 29 + .../audio/RTCDefaultAudioProcessingModule.h | 46 ++ .../audio/RTCDefaultAudioProcessingModule.mm | 96 +++ .../RTCNativeAudioSessionDelegateAdapter.h | 2 +- .../RTCNativeAudioSessionDelegateAdapter.mm | 2 +- .../capturer/RTCCameraVideoCapturer.h | 2 + .../capturer/RTCCameraVideoCapturer.m | 97 ++- .../capturer/RTCDesktopCapturer+Private.h | 49 ++ .../components/capturer/RTCDesktopCapturer.h | 61 ++ .../components/capturer/RTCDesktopCapturer.mm | 104 +++ .../capturer/RTCDesktopMediaList+Private.h | 40 + .../components/capturer/RTCDesktopMediaList.h | 51 ++ .../capturer/RTCDesktopMediaList.mm | 99 +++ .../capturer/RTCDesktopSource+Private.h | 37 + .../components/capturer/RTCDesktopSource.h | 40 + .../components/capturer/RTCDesktopSource.mm | 68 ++ .../network/RTCNetworkMonitor+Private.h | 14 +- .../components/network/RTCNetworkMonitor.h | 4 +- .../components/network/RTCNetworkMonitor.mm | 6 +- .../renderer/metal/RTCMTLI420Renderer.h | 2 +- .../renderer/metal/RTCMTLI420Renderer.mm | 2 +- .../renderer/metal/RTCMTLNSVideoView.m | 122 --- .../renderer/metal/RTCMTLNV12Renderer.h | 2 +- .../renderer/metal/RTCMTLNV12Renderer.mm | 2 +- .../renderer/metal/RTCMTLRGBRenderer.h | 3 +- .../renderer/metal/RTCMTLRGBRenderer.mm | 30 +- .../renderer/metal/RTCMTLRenderer+Private.h | 2 +- .../renderer/metal/RTCMTLRenderer.h | 4 +- .../renderer/metal/RTCMTLRenderer.mm | 2 +- .../renderer/metal/RTCMTLVideoView.h | 20 +- .../renderer/metal/RTCMTLVideoView.m | 99 ++- .../renderer/opengl/RTCDefaultShader.h | 2 +- .../renderer/opengl/RTCDefaultShader.mm | 2 +- .../renderer/opengl/RTCDisplayLinkTimer.h | 4 +- .../renderer/opengl/RTCDisplayLinkTimer.m | 10 +- .../renderer/opengl/RTCEAGLVideoView.m | 36 +- .../renderer/opengl/RTCI420TextureCache.h | 2 +- .../renderer/opengl/RTCI420TextureCache.mm | 2 +- .../renderer/opengl/RTCNSGLVideoView.m | 199 +++++ .../renderer/opengl/RTCNV12TextureCache.h | 2 +- .../renderer/opengl/RTCNV12TextureCache.m | 2 +- .../RTCDefaultVideoEncoderFactory.m | 6 +- .../RTCVideoEncoderFactorySimulcast.h | 16 + .../RTCVideoEncoderFactorySimulcast.mm | 63 ++ .../video_codec/RTCVideoEncoderH264.mm | 230 ++++-- sdk/objc/helpers/RTCYUVHelper.h | 118 +++ sdk/objc/helpers/RTCYUVHelper.mm | 179 +++++ sdk/objc/native/api/audio_device_module.mm | 4 + sdk/objc/native/api/video_capturer.mm | 2 +- sdk/objc/native/src/audio/audio_device_ios.h | 16 +- sdk/objc/native/src/audio/audio_device_ios.mm | 105 ++- .../src/audio/audio_device_module_ios.h | 3 + .../src/audio/audio_device_module_ios.mm | 6 + .../src/audio/voice_processing_audio_unit.h | 2 +- .../src/audio/voice_processing_audio_unit.mm | 28 +- sdk/objc/native/src/objc_audio_device.h | 4 +- sdk/objc/native/src/objc_audio_device.mm | 2 +- .../native/src/objc_audio_device_delegate.h | 2 +- .../native/src/objc_audio_device_delegate.mm | 2 +- sdk/objc/native/src/objc_desktop_capture.h | 70 ++ sdk/objc/native/src/objc_desktop_capture.mm | 205 +++++ sdk/objc/native/src/objc_desktop_media_list.h | 111 +++ .../native/src/objc_desktop_media_list.mm | 252 ++++++ sdk/objc/native/src/objc_network_monitor.h | 2 +- sdk/objc/native/src/objc_network_monitor.mm | 2 +- sdk/objc/native/src/objc_video_track_source.h | 6 +- .../native/src/objc_video_track_source.mm | 6 +- sdk/objc/unittests/RTCMTLVideoView_xctest.m | 6 +- .../RTCPeerConnectionFactoryBuilderTest.mm | 4 +- video/video_stream_encoder.cc | 12 +- 230 files changed, 9350 insertions(+), 684 deletions(-) create mode 100644 NOTICE create mode 100644 api/crypto/frame_crypto_transformer.cc create mode 100644 api/crypto/frame_crypto_transformer.h create mode 100644 api/video/yuv_helper.cc create mode 100644 api/video/yuv_helper.h create mode 100644 sdk/android/api/org/webrtc/AudioTrackSink.java create mode 100644 sdk/android/api/org/webrtc/ExternalAudioProcessingFactory.java create mode 100644 sdk/android/api/org/webrtc/FrameCryptor.java create mode 100644 sdk/android/api/org/webrtc/FrameCryptorAlgorithm.java create mode 100644 sdk/android/api/org/webrtc/FrameCryptorFactory.java create mode 100644 sdk/android/api/org/webrtc/FrameCryptorKeyProvider.java create mode 100644 sdk/android/api/org/webrtc/SimulcastVideoEncoder.java create mode 100644 sdk/android/api/org/webrtc/SimulcastVideoEncoderFactory.java create mode 100644 sdk/android/api/org/webrtc/WrappedVideoDecoderFactory.java create mode 100644 sdk/android/src/jni/pc/audio_sink.cc create mode 100644 sdk/android/src/jni/pc/audio_sink.h create mode 100644 sdk/android/src/jni/pc/external_audio_processing_factory.cc create mode 100644 sdk/android/src/jni/pc/external_audio_processing_factory.h create mode 100644 sdk/android/src/jni/pc/external_audio_processing_interface.h create mode 100644 sdk/android/src/jni/pc/external_audio_processor.cc create mode 100644 sdk/android/src/jni/pc/external_audio_processor.h create mode 100644 sdk/android/src/jni/pc/frame_cryptor.cc create mode 100644 sdk/android/src/jni/pc/frame_cryptor.h create mode 100644 sdk/android/src/jni/pc/frame_cryptor_key_provider.cc create mode 100644 sdk/android/src/jni/pc/frame_cryptor_key_provider.h create mode 100644 sdk/android/src/jni/simulcast_video_encoder.cc create mode 100644 sdk/android/src/jni/simulcast_video_encoder.h create mode 100644 sdk/objc/PrivacyInfo.xcprivacy create mode 100644 sdk/objc/api/peerconnection/RTCAudioDeviceModule+Private.h create mode 100644 sdk/objc/api/peerconnection/RTCAudioDeviceModule.h create mode 100644 sdk/objc/api/peerconnection/RTCAudioDeviceModule.mm create mode 100644 sdk/objc/api/peerconnection/RTCFrameCryptor+Private.h create mode 100644 sdk/objc/api/peerconnection/RTCFrameCryptor.h create mode 100644 sdk/objc/api/peerconnection/RTCFrameCryptor.mm create mode 100644 sdk/objc/api/peerconnection/RTCFrameCryptorKeyProvider+Private.h create mode 100644 sdk/objc/api/peerconnection/RTCFrameCryptorKeyProvider.h create mode 100644 sdk/objc/api/peerconnection/RTCFrameCryptorKeyProvider.mm create mode 100644 sdk/objc/api/peerconnection/RTCIODevice+Private.h create mode 100644 sdk/objc/api/peerconnection/RTCIODevice.h create mode 100644 sdk/objc/api/peerconnection/RTCIODevice.mm create mode 100644 sdk/objc/api/video_codec/RTCVideoEncoderSimulcast.h create mode 100644 sdk/objc/api/video_codec/RTCVideoEncoderSimulcast.mm create mode 100644 sdk/objc/base/RTCAudioRenderer.h create mode 100644 sdk/objc/components/audio/RTCAudioBuffer+Private.h create mode 100644 sdk/objc/components/audio/RTCAudioBuffer.h create mode 100644 sdk/objc/components/audio/RTCAudioBuffer.mm create mode 100644 sdk/objc/components/audio/RTCAudioCustomProcessingAdapter+Private.h create mode 100644 sdk/objc/components/audio/RTCAudioCustomProcessingAdapter.h create mode 100644 sdk/objc/components/audio/RTCAudioCustomProcessingAdapter.mm create mode 100644 sdk/objc/components/audio/RTCAudioCustomProcessingDelegate.h create mode 100644 sdk/objc/components/audio/RTCAudioProcessingConfig+Private.h create mode 100644 sdk/objc/components/audio/RTCAudioProcessingConfig.h create mode 100644 sdk/objc/components/audio/RTCAudioProcessingConfig.mm create mode 100644 sdk/objc/components/audio/RTCAudioProcessingModule.h create mode 100644 sdk/objc/components/audio/RTCDefaultAudioProcessingModule+Private.h create mode 100644 sdk/objc/components/audio/RTCDefaultAudioProcessingModule.h create mode 100644 sdk/objc/components/audio/RTCDefaultAudioProcessingModule.mm create mode 100644 sdk/objc/components/capturer/RTCDesktopCapturer+Private.h create mode 100644 sdk/objc/components/capturer/RTCDesktopCapturer.h create mode 100644 sdk/objc/components/capturer/RTCDesktopCapturer.mm create mode 100644 sdk/objc/components/capturer/RTCDesktopMediaList+Private.h create mode 100644 sdk/objc/components/capturer/RTCDesktopMediaList.h create mode 100644 sdk/objc/components/capturer/RTCDesktopMediaList.mm create mode 100644 sdk/objc/components/capturer/RTCDesktopSource+Private.h create mode 100644 sdk/objc/components/capturer/RTCDesktopSource.h create mode 100644 sdk/objc/components/capturer/RTCDesktopSource.mm delete mode 100644 sdk/objc/components/renderer/metal/RTCMTLNSVideoView.m create mode 100644 sdk/objc/components/renderer/opengl/RTCNSGLVideoView.m create mode 100644 sdk/objc/components/video_codec/RTCVideoEncoderFactorySimulcast.h create mode 100644 sdk/objc/components/video_codec/RTCVideoEncoderFactorySimulcast.mm create mode 100644 sdk/objc/helpers/RTCYUVHelper.h create mode 100644 sdk/objc/helpers/RTCYUVHelper.mm create mode 100644 sdk/objc/native/src/objc_desktop_capture.h create mode 100644 sdk/objc/native/src/objc_desktop_capture.mm create mode 100644 sdk/objc/native/src/objc_desktop_media_list.h create mode 100644 sdk/objc/native/src/objc_desktop_media_list.mm diff --git a/.gitignore b/.gitignore index 8202b82014..2a4acfebb7 100644 --- a/.gitignore +++ b/.gitignore @@ -72,3 +72,9 @@ /xcodebuild /.vscode !webrtc/* +/tmp.patch +/out-release +/out-debug +/node_modules +/libwebrtc +/args.txt diff --git a/NOTICE b/NOTICE new file mode 100644 index 0000000000..3972578ec4 --- /dev/null +++ b/NOTICE @@ -0,0 +1,26 @@ +################################################################################### + +The following modifications follow Apache License 2.0 from shiguredo. + +https://github.com/webrtc-sdk/webrtc/commit/dfec53e93a0a1cb93f444caf50f844ec0068c7b7 +https://github.com/webrtc-sdk/webrtc/commit/403b4678543c5d4ac77bd1ea5753c02637b3bb89 +https://github.com/webrtc-sdk/webrtc/commit/77d5d685a90fb4bded17835ae72ec6671b26d696 + +Apache License 2.0 + +Copyright 2019-2021, Wandbox LLC (Original Author) +Copyright 2019-2021, Shiguredo Inc. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + +##################################################################################### \ No newline at end of file diff --git a/README.md b/README.md index 32b9f16bad..1c862c4ca4 100644 --- a/README.md +++ b/README.md @@ -1,32 +1,47 @@ -**WebRTC is a free, open software project** that provides browsers and mobile -applications with Real-Time Communications (RTC) capabilities via simple APIs. -The WebRTC components have been optimized to best serve this purpose. +# WebRTC-SDK -**Our mission:** To enable rich, high-quality RTC applications to be -developed for the browser, mobile platforms, and IoT devices, and allow them -all to communicate via a common set of protocols. +This repository contains a fork of WebRTC from Google with various improvements. -The WebRTC initiative is a project supported by Google, Mozilla and Opera, -amongst others. +## Main changes -### Development +### All -See [here][native-dev] for instructions on how to get started -developing with the native code. +- Dynamically acquire decoder to mitigate decoder limitations [#25](https://github.com/webrtc-sdk/webrtc/pull/25) +- Support for video simulcast with hardware & software encoders [patch](https://github.com/webrtc-sdk/webrtc/commit/ee030264e2274a2c90548a99b448782049e48fb4) +- Frame cryptor support (for end-to-end encryption) [patch](https://github.com/webrtc-sdk/webrtc/commit/3a2c008529a15fecde5f979a6ebb75c05463d45e) -[Authoritative list](native-api.md) of directories that contain the -native API header files. +### Android -### More info +- WrappedVideoDecoderFactory [#74](https://github.com/webrtc-sdk/webrtc/pull/74) - * Official web site: http://www.webrtc.org - * Master source code repo: https://webrtc.googlesource.com/src - * Samples and reference apps: https://github.com/webrtc - * Mailing list: http://groups.google.com/group/discuss-webrtc - * Continuous build: https://ci.chromium.org/p/webrtc/g/ci/console - * [Coding style guide](g3doc/style-guide.md) - * [Code of conduct](CODE_OF_CONDUCT.md) - * [Reporting bugs](docs/bug-reporting.md) - * [Documentation](g3doc/sitemap.md) +### iOS / Mac -[native-dev]: https://webrtc.googlesource.com/src/+/main/docs/native-code/ +- Sane audio handling [patch](https://github.com/webrtc-sdk/webrtc/commit/272127d457ab48e36241e82549870405864851f6) + - Do not acquire microphone/permissions unless actively publishing audio + - Abililty to bypass voice processing on iOS + - Remove hardcoded limitation of outputting only to right speaker on MacBook Pro +- Desktop capture for Mac [patch](https://github.com/webrtc-sdk/webrtc/commit/8e832d1163644ab504412c9b8f3ba8510d9890d6) + +### Windows + +- Fixed unable to acquire Mic when built-in AEC is enabled [#29](https://github.com/webrtc-sdk/webrtc/pull/29) + +## LICENSE + +- [Google WebRTC](https://chromium.googlesource.com/external/webrtc.git), is licensed under [BSD license](/LICENSE). + +- Contains patches from [shiguredo-webrtc-build](https://github.com/shiguredo-webrtc-build), licensed under [Apache 2.0](/NOTICE). + +- Contains changes from LiveKit, licensed under Apache 2.0. + +## Who is using this project + +- [flutter-webrtc](https://github.com/flutter-webrtc/flutter-webrtc) + +- [LiveKit](https://github.com/livekit) + +- [Membrane Framework](https://github.com/membraneframework/membrane_rtc_engine) + +- [Louper](https://louper.io) + +Are you using WebRTC SDK in your framework or app? Feel free to open a PR and add yourself! diff --git a/api/BUILD.gn b/api/BUILD.gn index c6ef19ac54..1731202a19 100644 --- a/api/BUILD.gn +++ b/api/BUILD.gn @@ -368,6 +368,7 @@ rtc_library("libjingle_peerconnection_api") { "video:encoded_image", "video:video_bitrate_allocator_factory", "video:video_frame", + "video:yuv_helper", "video:video_rtp_headers", "video_codecs:video_codecs_api", diff --git a/api/crypto/BUILD.gn b/api/crypto/BUILD.gn index 8d041ea059..9249b7716a 100644 --- a/api/crypto/BUILD.gn +++ b/api/crypto/BUILD.gn @@ -16,6 +16,24 @@ group("crypto") { ] } +rtc_library("frame_crypto_transformer") { + visibility = [ "*" ] + sources = [ + "frame_crypto_transformer.cc", + "frame_crypto_transformer.h", + ] + + deps = [ + "//api:frame_transformer_interface", + ] + + if (rtc_build_ssl) { + deps += [ "//third_party/boringssl" ] + } else { + configs += [ ":external_ssl_library" ] + } +} + rtc_library("options") { visibility = [ "*" ] sources = [ diff --git a/api/crypto/frame_crypto_transformer.cc b/api/crypto/frame_crypto_transformer.cc new file mode 100644 index 0000000000..06ec888170 --- /dev/null +++ b/api/crypto/frame_crypto_transformer.cc @@ -0,0 +1,727 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "frame_crypto_transformer.h" + +#include +#include +#include +#include +#include + +#include +#include +#include +#include + +#include "absl/container/inlined_vector.h" +#include "absl/types/optional.h" +#include "absl/types/variant.h" +#include "api/array_view.h" +#include "common_video/h264/h264_common.h" +#include "modules/rtp_rtcp/source/rtp_format_h264.h" +#include "rtc_base/byte_buffer.h" +#include "rtc_base/logging.h" + +enum class EncryptOrDecrypt { kEncrypt = 0, kDecrypt }; + +#define Success 0 +#define ErrorUnexpected -1 +#define OperationError -2 +#define ErrorDataTooSmall -3 +#define ErrorInvalidAesGcmTagLength -4 + +webrtc::VideoCodecType get_video_codec_type( + webrtc::TransformableFrameInterface* frame) { + auto videoFrame = + static_cast(frame); + return videoFrame->header().codec; +} + +webrtc::H264PacketizationMode get_h264_packetization_mode( + webrtc::TransformableFrameInterface* frame) { + auto video_frame = + static_cast(frame); + const auto& h264_header = absl::get( + video_frame->header().video_type_header); + return h264_header.packetization_mode; +} + +const EVP_AEAD* GetAesGcmAlgorithmFromKeySize(size_t key_size_bytes) { + switch (key_size_bytes) { + case 16: + return EVP_aead_aes_128_gcm(); + case 32: + return EVP_aead_aes_256_gcm(); + default: + return nullptr; + } +} + +const EVP_CIPHER* GetAesCbcAlgorithmFromKeySize(size_t key_size_bytes) { + switch (key_size_bytes) { + case 16: + return EVP_aes_128_cbc(); + case 32: + return EVP_aes_256_cbc(); + default: + return nullptr; + } +} + +inline bool FrameIsH264(webrtc::TransformableFrameInterface* frame, + webrtc::FrameCryptorTransformer::MediaType type) { + switch (type) { + case webrtc::FrameCryptorTransformer::MediaType::kVideoFrame: { + auto videoFrame = + static_cast(frame); + return videoFrame->header().codec == + webrtc::VideoCodecType::kVideoCodecH264; + } + default: + return false; + } +} + +inline bool NeedsRbspUnescaping(const uint8_t* frameData, size_t frameSize) { + for (size_t i = 0; i < frameSize - 3; ++i) { + if (frameData[i] == 0 && frameData[i + 1] == 0 && frameData[i + 2] == 3) + return true; + } + return false; +} + +std::string to_uint8_list(const uint8_t* data, int len) { + std::stringstream ss; + ss << "["; + for (int i = 0; i < len; i++) { + ss << static_cast(data[i]) << ","; + } + ss << "]"; + return ss.str(); +} + +std::string to_hex(const uint8_t* data, int len) { + std::stringstream ss; + ss << std::uppercase << std::hex << std::setfill('0'); + for (int i = 0; i < len; i++) { + ss << std::setw(2) << static_cast(data[i]); + } + return ss.str(); +} + +uint8_t get_unencrypted_bytes(webrtc::TransformableFrameInterface* frame, + webrtc::FrameCryptorTransformer::MediaType type) { + uint8_t unencrypted_bytes = 0; + switch (type) { + case webrtc::FrameCryptorTransformer::MediaType::kAudioFrame: + unencrypted_bytes = 1; + break; + case webrtc::FrameCryptorTransformer::MediaType::kVideoFrame: { + auto videoFrame = + static_cast(frame); + if (videoFrame->header().codec == + webrtc::VideoCodecType::kVideoCodecAV1) { + unencrypted_bytes = 0; + } else if (videoFrame->header().codec == + webrtc::VideoCodecType::kVideoCodecVP8) { + unencrypted_bytes = videoFrame->IsKeyFrame() ? 10 : 3; + } else if (videoFrame->header().codec == + webrtc::VideoCodecType::kVideoCodecH264) { + rtc::ArrayView date_in = frame->GetData(); + std::vector nalu_indices = + webrtc::H264::FindNaluIndices(date_in.data(), date_in.size()); + + int idx = 0; + for (const auto& index : nalu_indices) { + const uint8_t* slice = date_in.data() + index.payload_start_offset; + webrtc::H264::NaluType nalu_type = + webrtc::H264::ParseNaluType(slice[0]); + switch (nalu_type) { + case webrtc::H264::NaluType::kIdr: + case webrtc::H264::NaluType::kSlice: + unencrypted_bytes = index.payload_start_offset + 2; + RTC_LOG(LS_INFO) + << "NonParameterSetNalu::payload_size: " << index.payload_size + << ", nalu_type " << nalu_type << ", NaluIndex [" << idx++ + << "] offset: " << index.payload_start_offset; + return unencrypted_bytes; + default: + break; + } + } + } + break; + } + default: + break; + } + return unencrypted_bytes; +} + +int DerivePBKDF2KeyFromRawKey(const std::vector raw_key, + const std::vector& salt, + unsigned int optional_length_bits, + std::vector* derived_key) { + size_t key_size_bytes = optional_length_bits / 8; + derived_key->resize(key_size_bytes); + + if (PKCS5_PBKDF2_HMAC((const char*)raw_key.data(), raw_key.size(), + salt.data(), salt.size(), 100000, EVP_sha256(), + key_size_bytes, derived_key->data()) != 1) { + RTC_LOG(LS_ERROR) << "Failed to derive AES key from password."; + return ErrorUnexpected; + } + + RTC_LOG(LS_INFO) << "raw_key " + << to_uint8_list(raw_key.data(), raw_key.size()) << " len " + << raw_key.size() << " slat << " + << to_uint8_list(salt.data(), salt.size()) << " len " + << salt.size() << "\n derived_key " + << to_uint8_list(derived_key->data(), derived_key->size()) + << " len " << derived_key->size(); + + return Success; +} + +int AesGcmEncryptDecrypt(EncryptOrDecrypt mode, + const std::vector raw_key, + const rtc::ArrayView data, + unsigned int tag_length_bytes, + rtc::ArrayView iv, + rtc::ArrayView additional_data, + const EVP_AEAD* aead_alg, + std::vector* buffer) { + bssl::ScopedEVP_AEAD_CTX ctx; + + if (!aead_alg) { + RTC_LOG(LS_ERROR) << "Invalid AES-GCM key size."; + return ErrorUnexpected; + } + + if (!EVP_AEAD_CTX_init(ctx.get(), aead_alg, raw_key.data(), raw_key.size(), + tag_length_bytes, nullptr)) { + RTC_LOG(LS_ERROR) << "Failed to initialize AES-GCM context."; + return OperationError; + } + + size_t len; + int ok; + + if (mode == EncryptOrDecrypt::kDecrypt) { + if (data.size() < tag_length_bytes) { + RTC_LOG(LS_ERROR) << "Data too small for AES-GCM tag."; + return ErrorDataTooSmall; + } + + buffer->resize(data.size() - tag_length_bytes); + + ok = EVP_AEAD_CTX_open(ctx.get(), buffer->data(), &len, buffer->size(), + iv.data(), iv.size(), data.data(), data.size(), + additional_data.data(), additional_data.size()); + } else { + buffer->resize(data.size() + EVP_AEAD_max_overhead(aead_alg)); + + ok = EVP_AEAD_CTX_seal(ctx.get(), buffer->data(), &len, buffer->size(), + iv.data(), iv.size(), data.data(), data.size(), + additional_data.data(), additional_data.size()); + } + + if (!ok) { + RTC_LOG(LS_WARNING) << "Failed to perform AES-GCM operation."; + return OperationError; + } + + buffer->resize(len); + + return Success; +} + +int AesCbcEncryptDecrypt(EncryptOrDecrypt mode, + const std::vector& raw_key, + rtc::ArrayView iv, + const rtc::ArrayView input, + std::vector* output) { + const EVP_CIPHER* cipher = GetAesCbcAlgorithmFromKeySize(raw_key.size()); + RTC_DCHECK(cipher); // Already handled in Init(); + RTC_DCHECK_EQ(EVP_CIPHER_iv_length(cipher), iv.size()); + RTC_DCHECK_EQ(EVP_CIPHER_key_length(cipher), raw_key.size()); + + bssl::ScopedEVP_CIPHER_CTX ctx; + if (!EVP_CipherInit_ex(ctx.get(), cipher, nullptr, + reinterpret_cast(raw_key.data()), + iv.data(), + mode == EncryptOrDecrypt::kEncrypt ? 1 : 0)) { + return OperationError; + } + + // Encrypting needs a block size of space to allow for any padding. + output->resize(input.size() + + (mode == EncryptOrDecrypt::kEncrypt ? iv.size() : 0)); + int out_len; + if (!EVP_CipherUpdate(ctx.get(), output->data(), &out_len, input.data(), + input.size())) + return OperationError; + + // Write out the final block plus padding (if any) to the end of the data + // just written. + int tail_len; + if (!EVP_CipherFinal_ex(ctx.get(), output->data() + out_len, &tail_len)) + return OperationError; + + out_len += tail_len; + RTC_CHECK_LE(out_len, static_cast(output->size())); + return Success; +} + +int AesEncryptDecrypt(EncryptOrDecrypt mode, + webrtc::FrameCryptorTransformer::Algorithm algorithm, + const std::vector& raw_key, + rtc::ArrayView iv, + rtc::ArrayView additional_data, + const rtc::ArrayView data, + std::vector* buffer) { + switch (algorithm) { + case webrtc::FrameCryptorTransformer::Algorithm::kAesGcm: { + unsigned int tag_length_bits = 128; + return AesGcmEncryptDecrypt( + mode, raw_key, data, tag_length_bits / 8, iv, additional_data, + GetAesGcmAlgorithmFromKeySize(raw_key.size()), buffer); + } + case webrtc::FrameCryptorTransformer::Algorithm::kAesCbc: + return AesCbcEncryptDecrypt(mode, raw_key, iv, data, buffer); + } +} +namespace webrtc { + +FrameCryptorTransformer::FrameCryptorTransformer( + rtc::Thread* signaling_thread, + const std::string participant_id, + MediaType type, + Algorithm algorithm, + rtc::scoped_refptr key_provider) + : signaling_thread_(signaling_thread), + thread_(rtc::Thread::Create()), + participant_id_(participant_id), + type_(type), + algorithm_(algorithm), + key_provider_(key_provider) { + RTC_DCHECK(key_provider_ != nullptr); + thread_->SetName("FrameCryptorTransformer", this); + thread_->Start(); +} + +FrameCryptorTransformer::~FrameCryptorTransformer() { + thread_->Stop(); +} + +void FrameCryptorTransformer::Transform( + std::unique_ptr frame) { + webrtc::MutexLock lock(&sink_mutex_); + if (sink_callback_ == nullptr && sink_callbacks_.size() == 0) { + RTC_LOG(LS_WARNING) + << "FrameCryptorTransformer::Transform sink_callback_ is NULL"; + return; + } + + // do encrypt or decrypt here... + switch (frame->GetDirection()) { + case webrtc::TransformableFrameInterface::Direction::kSender: + RTC_DCHECK(thread_ != nullptr); + thread_->PostTask([frame = std::move(frame), this]() mutable { + encryptFrame(std::move(frame)); + }); + break; + case webrtc::TransformableFrameInterface::Direction::kReceiver: + RTC_DCHECK(thread_ != nullptr); + thread_->PostTask([frame = std::move(frame), this]() mutable { + decryptFrame(std::move(frame)); + }); + break; + case webrtc::TransformableFrameInterface::Direction::kUnknown: + // do nothing + RTC_LOG(LS_INFO) << "FrameCryptorTransformer::Transform() kUnknown"; + break; + } +} + +void FrameCryptorTransformer::encryptFrame( + std::unique_ptr frame) { + bool enabled_cryption = false; + rtc::scoped_refptr sink_callback = nullptr; + { + webrtc::MutexLock lock(&mutex_); + enabled_cryption = enabled_cryption_; + if (type_ == webrtc::FrameCryptorTransformer::MediaType::kAudioFrame) { + sink_callback = sink_callback_; + } else { + sink_callback = sink_callbacks_[frame->GetSsrc()]; + } + } + + if (sink_callback == nullptr) { + RTC_LOG(LS_WARNING) + << "FrameCryptorTransformer::encryptFrame() sink_callback is NULL"; + if (last_enc_error_ != FrameCryptionState::kInternalError) { + last_enc_error_ = FrameCryptionState::kInternalError; + onFrameCryptionStateChanged(last_enc_error_); + } + return; + } + + rtc::ArrayView date_in = frame->GetData(); + if (date_in.size() == 0 || !enabled_cryption) { + RTC_LOG(LS_WARNING) << "FrameCryptorTransformer::encryptFrame() " + "date_in.size() == 0 || enabled_cryption == false"; + if(key_provider_->options().discard_frame_when_cryptor_not_ready) { + return; + } + sink_callback->OnTransformedFrame(std::move(frame)); + return; + } + + auto key_handler = key_provider_->options().shared_key + ? key_provider_->GetSharedKey(participant_id_) + : key_provider_->GetKey(participant_id_); + + if (key_handler == nullptr || key_handler->GetKeySet(key_index_) == nullptr) { + RTC_LOG(LS_INFO) << "FrameCryptorTransformer::encryptFrame() no keys, or " + "key_index[" + << key_index_ << "] out of range for participant " + << participant_id_; + if (last_enc_error_ != FrameCryptionState::kMissingKey) { + last_enc_error_ = FrameCryptionState::kMissingKey; + onFrameCryptionStateChanged(last_enc_error_); + } + return; + } + + auto key_set = key_handler->GetKeySet(key_index_); + uint8_t unencrypted_bytes = get_unencrypted_bytes(frame.get(), type_); + + rtc::Buffer frame_header(unencrypted_bytes); + for (size_t i = 0; i < unencrypted_bytes; i++) { + frame_header[i] = date_in[i]; + } + + rtc::Buffer frame_trailer(2); + frame_trailer[0] = getIvSize(); + frame_trailer[1] = key_index_; + rtc::Buffer iv = makeIv(frame->GetSsrc(), frame->GetTimestamp()); + + rtc::Buffer payload(date_in.size() - unencrypted_bytes); + for (size_t i = unencrypted_bytes; i < date_in.size(); i++) { + payload[i - unencrypted_bytes] = date_in[i]; + } + + std::vector buffer; + if (AesEncryptDecrypt(EncryptOrDecrypt::kEncrypt, algorithm_, + key_set->encryption_key, iv, frame_header, payload, + &buffer) == Success) { + rtc::Buffer encrypted_payload(buffer.data(), buffer.size()); + rtc::Buffer tag(encrypted_payload.data() + encrypted_payload.size() - 16, + 16); + rtc::Buffer data_without_header; + data_without_header.AppendData(encrypted_payload); + data_without_header.AppendData(iv); + data_without_header.AppendData(frame_trailer); + + rtc::Buffer data_out; + data_out.AppendData(frame_header); + + if (FrameIsH264(frame.get(), type_)) { + H264::WriteRbsp(data_without_header.data(), data_without_header.size(), + &data_out); + } else { + data_out.AppendData(data_without_header); + RTC_CHECK_EQ(data_out.size(), frame_header.size() + + encrypted_payload.size() + iv.size() + + frame_trailer.size()); + } + + frame->SetData(data_out); + + if (last_enc_error_ != FrameCryptionState::kOk) { + last_enc_error_ = FrameCryptionState::kOk; + onFrameCryptionStateChanged(last_enc_error_); + } + sink_callback->OnTransformedFrame(std::move(frame)); + } else { + if (last_enc_error_ != FrameCryptionState::kEncryptionFailed) { + last_enc_error_ = FrameCryptionState::kEncryptionFailed; + onFrameCryptionStateChanged(last_enc_error_); + } + RTC_LOG(LS_ERROR) << "FrameCryptorTransformer::encryptFrame() failed"; + } +} + +void FrameCryptorTransformer::decryptFrame( + std::unique_ptr frame) { + bool enabled_cryption = false; + rtc::scoped_refptr sink_callback = nullptr; + { + webrtc::MutexLock lock(&mutex_); + enabled_cryption = enabled_cryption_; + if (type_ == webrtc::FrameCryptorTransformer::MediaType::kAudioFrame) { + sink_callback = sink_callback_; + } else { + sink_callback = sink_callbacks_[frame->GetSsrc()]; + } + } + + if (sink_callback == nullptr) { + RTC_LOG(LS_WARNING) + << "FrameCryptorTransformer::decryptFrame() sink_callback is NULL"; + if (last_dec_error_ != FrameCryptionState::kInternalError) { + last_dec_error_ = FrameCryptionState::kInternalError; + onFrameCryptionStateChanged(last_dec_error_); + } + return; + } + + rtc::ArrayView date_in = frame->GetData(); + + if (date_in.size() == 0 || !enabled_cryption) { + RTC_LOG(LS_WARNING) << "FrameCryptorTransformer::decryptFrame() " + "date_in.size() == 0 || enabled_cryption == false"; + if(key_provider_->options().discard_frame_when_cryptor_not_ready) { + return; + } + + sink_callback->OnTransformedFrame(std::move(frame)); + return; + } + + auto uncrypted_magic_bytes = key_provider_->options().uncrypted_magic_bytes; + if (uncrypted_magic_bytes.size() > 0 && + date_in.size() >= uncrypted_magic_bytes.size()) { + auto tmp = date_in.subview(date_in.size() - (uncrypted_magic_bytes.size()), + uncrypted_magic_bytes.size()); + auto data = std::vector(tmp.begin(), tmp.end()); + if (uncrypted_magic_bytes == data) { + RTC_CHECK_EQ(tmp.size(), uncrypted_magic_bytes.size()); + RTC_LOG(LS_INFO) << "FrameCryptorTransformer::uncrypted_magic_bytes( tmp " + << to_hex(tmp.data(), tmp.size()) << ", magic bytes " + << to_hex(uncrypted_magic_bytes.data(), + uncrypted_magic_bytes.size()) + << ")"; + + // magic bytes detected, this is a non-encrypted frame, skip frame + // decryption. + rtc::Buffer data_out; + data_out.AppendData( + date_in.subview(0, date_in.size() - uncrypted_magic_bytes.size())); + frame->SetData(data_out); + sink_callback->OnTransformedFrame(std::move(frame)); + return; + } + } + + uint8_t unencrypted_bytes = get_unencrypted_bytes(frame.get(), type_); + + rtc::Buffer frame_header(unencrypted_bytes); + for (size_t i = 0; i < unencrypted_bytes; i++) { + frame_header[i] = date_in[i]; + } + + rtc::Buffer frame_trailer(2); + frame_trailer[0] = date_in[date_in.size() - 2]; + frame_trailer[1] = date_in[date_in.size() - 1]; + uint8_t ivLength = frame_trailer[0]; + uint8_t key_index = frame_trailer[1]; + + if (ivLength != getIvSize()) { + RTC_LOG(LS_WARNING) << "FrameCryptorTransformer::decryptFrame() ivLength[" + << static_cast(ivLength) << "] != getIvSize()[" + << static_cast(getIvSize()) << "]"; + if (last_dec_error_ != FrameCryptionState::kDecryptionFailed) { + last_dec_error_ = FrameCryptionState::kDecryptionFailed; + onFrameCryptionStateChanged(last_dec_error_); + } + return; + } + + auto key_handler = key_provider_->options().shared_key + ? key_provider_->GetSharedKey(participant_id_) + : key_provider_->GetKey(participant_id_); + + if (0 > key_index || key_index >= key_provider_->options().key_ring_size || key_handler == nullptr || + key_handler->GetKeySet(key_index) == nullptr) { + RTC_LOG(LS_INFO) << "FrameCryptorTransformer::decryptFrame() no keys, or " + "key_index[" + << key_index << "] out of range for participant " + << participant_id_; + if (last_dec_error_ != FrameCryptionState::kMissingKey) { + last_dec_error_ = FrameCryptionState::kMissingKey; + onFrameCryptionStateChanged(last_dec_error_); + } + return; + } + + if (last_dec_error_ == kDecryptionFailed && !key_handler->HasValidKey()) { + // if decryption failed and we have an invalid key, + // please try to decrypt with the next new key + return; + } + + auto key_set = key_handler->GetKeySet(key_index); + + rtc::Buffer iv = rtc::Buffer(ivLength); + for (size_t i = 0; i < ivLength; i++) { + iv[i] = date_in[date_in.size() - 2 - ivLength + i]; + } + + rtc::Buffer encrypted_buffer(date_in.size() - unencrypted_bytes); + for (size_t i = unencrypted_bytes; i < date_in.size(); i++) { + encrypted_buffer[i - unencrypted_bytes] = date_in[i]; + } + + if (FrameIsH264(frame.get(), type_) && + NeedsRbspUnescaping(encrypted_buffer.data(), encrypted_buffer.size())) { + encrypted_buffer.SetData( + H264::ParseRbsp(encrypted_buffer.data(), encrypted_buffer.size())); + } + + rtc::Buffer encrypted_payload(encrypted_buffer.size() - ivLength - 2); + for (size_t i = 0; i < encrypted_payload.size(); i++) { + encrypted_payload[i] = encrypted_buffer[i]; + } + + rtc::Buffer tag(encrypted_payload.data() + encrypted_payload.size() - 16, 16); + std::vector buffer; + + int ratchet_count = 0; + auto initialKeyMaterial = key_set->material; + bool decryption_success = false; + if (AesEncryptDecrypt(EncryptOrDecrypt::kDecrypt, algorithm_, + key_set->encryption_key, iv, frame_header, + encrypted_payload, &buffer) == Success) { + decryption_success = true; + } else { + RTC_LOG(LS_WARNING) << "FrameCryptorTransformer::decryptFrame() failed"; + rtc::scoped_refptr ratcheted_key_set; + auto currentKeyMaterial = key_set->material; + if (key_provider_->options().ratchet_window_size > 0) { + while (ratchet_count < key_provider_->options().ratchet_window_size) { + ratchet_count++; + + RTC_LOG(LS_INFO) << "ratcheting key attempt " << ratchet_count << " of " + << key_provider_->options().ratchet_window_size; + + auto new_material = key_handler->RatchetKeyMaterial(currentKeyMaterial); + ratcheted_key_set = key_handler->DeriveKeys( + new_material, key_provider_->options().ratchet_salt, 128); + + if (AesEncryptDecrypt(EncryptOrDecrypt::kDecrypt, algorithm_, + ratcheted_key_set->encryption_key, iv, + frame_header, encrypted_payload, + &buffer) == Success) { + RTC_LOG(LS_INFO) << "FrameCryptorTransformer::decryptFrame() " + "ratcheted to key_index=" + << static_cast(key_index); + decryption_success = true; + // success, so we set the new key + key_handler->SetKeyFromMaterial(new_material, key_index); + key_handler->SetHasValidKey(); + if (last_dec_error_ != FrameCryptionState::kKeyRatcheted) { + last_dec_error_ = FrameCryptionState::kKeyRatcheted; + onFrameCryptionStateChanged(last_dec_error_); + } + break; + } + // for the next ratchet attempt + currentKeyMaterial = new_material; + } + + /* Since the key it is first send and only afterwards actually used for + encrypting, there were situations when the decrypting failed due to the + fact that the received frame was not encrypted yet and ratcheting, of + course, did not solve the problem. So if we fail RATCHET_WINDOW_SIZE + times, we come back to the initial key. + */ + if (!decryption_success || + ratchet_count >= key_provider_->options().ratchet_window_size) { + key_handler->SetKeyFromMaterial(initialKeyMaterial, key_index); + } + } + } + + if (!decryption_success) { + if (key_handler->DecryptionFailure()) { + if (last_dec_error_ != FrameCryptionState::kDecryptionFailed) { + last_dec_error_ = FrameCryptionState::kDecryptionFailed; + onFrameCryptionStateChanged(last_dec_error_); + } + } + return; + } + + rtc::Buffer payload(buffer.data(), buffer.size()); + rtc::Buffer data_out; + data_out.AppendData(frame_header); + data_out.AppendData(payload); + frame->SetData(data_out); + + if (last_dec_error_ != FrameCryptionState::kOk) { + last_dec_error_ = FrameCryptionState::kOk; + onFrameCryptionStateChanged(last_dec_error_); + } + sink_callback->OnTransformedFrame(std::move(frame)); +} + +void FrameCryptorTransformer::onFrameCryptionStateChanged( + FrameCryptionState state) { + webrtc::MutexLock lock(&mutex_); + if (observer_) { + RTC_DCHECK(signaling_thread_ != nullptr); + signaling_thread_->PostTask([observer = observer_, state = state, + participant_id = participant_id_]() mutable { + observer->OnFrameCryptionStateChanged(participant_id, state); + }); + } +} + +rtc::Buffer FrameCryptorTransformer::makeIv(uint32_t ssrc, uint32_t timestamp) { + uint32_t send_count = 0; + if (send_counts_.find(ssrc) == send_counts_.end()) { + srand((unsigned)time(NULL)); + send_counts_[ssrc] = floor(rand() * 0xFFFF); + } else { + send_count = send_counts_[ssrc]; + } + rtc::ByteBufferWriter buf; + buf.WriteUInt32(ssrc); + buf.WriteUInt32(timestamp); + buf.WriteUInt32(timestamp - (send_count % 0xFFFF)); + send_counts_[ssrc] = send_count + 1; + + RTC_CHECK_EQ(buf.Length(), getIvSize()); + + return rtc::Buffer(buf.Data(), buf.Length()); +} + +uint8_t FrameCryptorTransformer::getIvSize() { + switch (algorithm_) { + case Algorithm::kAesGcm: + return 12; + case Algorithm::kAesCbc: + return 16; + default: + return 0; + } +} + +} // namespace webrtc diff --git a/api/crypto/frame_crypto_transformer.h b/api/crypto/frame_crypto_transformer.h new file mode 100644 index 0000000000..9689ec1593 --- /dev/null +++ b/api/crypto/frame_crypto_transformer.h @@ -0,0 +1,482 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef WEBRTC_FRAME_CRYPTOR_TRANSFORMER_H_ +#define WEBRTC_FRAME_CRYPTOR_TRANSFORMER_H_ + +#include + +#include "api/frame_transformer_interface.h" +#include "api/task_queue/pending_task_safety_flag.h" +#include "api/task_queue/task_queue_base.h" +#include "rtc_base/buffer.h" +#include "rtc_base/synchronization/mutex.h" +#include "rtc_base/system/rtc_export.h" +#include "rtc_base/thread.h" + +int DerivePBKDF2KeyFromRawKey(const std::vector raw_key, + const std::vector& salt, + unsigned int optional_length_bits, + std::vector* derived_key); + +namespace webrtc { + +const size_t DEFAULT_KEYRING_SIZE = 16; +const size_t MAX_KEYRING_SIZE = 255; + +class ParticipantKeyHandler; + +struct KeyProviderOptions { + bool shared_key; + std::vector ratchet_salt; + std::vector uncrypted_magic_bytes; + int ratchet_window_size; + int failure_tolerance; + // key ring size should be between 1 and 255 + int key_ring_size; + bool discard_frame_when_cryptor_not_ready; + KeyProviderOptions() + : shared_key(false), + ratchet_window_size(0), + failure_tolerance(-1), + key_ring_size(DEFAULT_KEYRING_SIZE), + discard_frame_when_cryptor_not_ready(false) {} + KeyProviderOptions(KeyProviderOptions& copy) + : shared_key(copy.shared_key), + ratchet_salt(copy.ratchet_salt), + uncrypted_magic_bytes(copy.uncrypted_magic_bytes), + ratchet_window_size(copy.ratchet_window_size), + failure_tolerance(copy.failure_tolerance), + key_ring_size(copy.key_ring_size) {} +}; + +class KeyProvider : public rtc::RefCountInterface { + public: + virtual bool SetSharedKey(int key_index, std::vector key) = 0; + + virtual const rtc::scoped_refptr GetSharedKey( + const std::string participant_id) = 0; + + virtual const std::vector RatchetSharedKey(int key_index) = 0; + + virtual const std::vector ExportSharedKey(int key_index) const = 0; + + virtual bool SetKey(const std::string participant_id, + int key_index, + std::vector key) = 0; + + virtual const rtc::scoped_refptr GetKey( + const std::string participant_id) const = 0; + + virtual const std::vector RatchetKey( + const std::string participant_id, + int key_index) = 0; + + virtual const std::vector ExportKey(const std::string participant_id, + int key_index) const = 0; + + virtual void SetSifTrailer(const std::vector trailer) = 0; + + virtual KeyProviderOptions& options() = 0; + + protected: + virtual ~KeyProvider() {} +}; + +class ParticipantKeyHandler : public rtc::RefCountInterface { + public: + struct KeySet : public rtc::RefCountInterface { + std::vector material; + std::vector encryption_key; + KeySet(std::vector material, std::vector encryptionKey) + : material(material), encryption_key(encryptionKey) {} + }; + + public: + ParticipantKeyHandler(KeyProvider* key_provider) + : key_provider_(key_provider) { + int key_ring_size = key_provider_->options().key_ring_size; + if(key_ring_size <= 0) { + key_ring_size = DEFAULT_KEYRING_SIZE; + } else if (key_ring_size > (int)MAX_KEYRING_SIZE) { + // Keyring size needs to be between 1 and 256 + key_ring_size = MAX_KEYRING_SIZE; + } + crypto_key_ring_.resize(key_ring_size); + } + + virtual ~ParticipantKeyHandler() = default; + + rtc::scoped_refptr Clone() { + auto clone = rtc::make_ref_counted(key_provider_); + clone->crypto_key_ring_ = crypto_key_ring_; + clone->current_key_index_ = current_key_index_; + clone->has_valid_key_ = has_valid_key_; + return clone; + } + + virtual std::vector RatchetKey(int key_index) { + auto key_set = GetKeySet(key_index); + if (!key_set) { + return std::vector(); + } + auto current_material = key_set->material; + std::vector new_material; + if (DerivePBKDF2KeyFromRawKey(current_material, + key_provider_->options().ratchet_salt, 256, + &new_material) != 0) { + return std::vector(); + } + SetKeyFromMaterial(new_material, + key_index != -1 ? key_index : current_key_index_); + SetHasValidKey(); + return new_material; + } + + virtual rtc::scoped_refptr GetKeySet(int key_index) { + webrtc::MutexLock lock(&mutex_); + return crypto_key_ring_[key_index != -1 ? key_index : current_key_index_]; + } + + virtual void SetKey(std::vector password, int key_index) { + SetKeyFromMaterial(password, key_index); + SetHasValidKey(); + } + + std::vector RatchetKeyMaterial( + std::vector current_material) { + std::vector new_material; + if (DerivePBKDF2KeyFromRawKey(current_material, + key_provider_->options().ratchet_salt, 256, + &new_material) != 0) { + return std::vector(); + } + return new_material; + } + + rtc::scoped_refptr DeriveKeys(std::vector password, + std::vector ratchet_salt, + unsigned int optional_length_bits) { + std::vector derived_key; + if (DerivePBKDF2KeyFromRawKey(password, ratchet_salt, optional_length_bits, + &derived_key) == 0) { + return rtc::make_ref_counted(password, derived_key); + } + return nullptr; + } + + bool HasValidKey() { + webrtc::MutexLock lock(&mutex_); + return has_valid_key_; + } + + void SetHasValidKey() { + webrtc::MutexLock lock(&mutex_); + decryption_failure_count_ = 0; + has_valid_key_ = true; + } + + void SetKeyFromMaterial(std::vector password, int key_index) { + webrtc::MutexLock lock(&mutex_); + if (key_index >= 0) { + current_key_index_ = key_index % crypto_key_ring_.size(); + } + crypto_key_ring_[current_key_index_] = + DeriveKeys(password, key_provider_->options().ratchet_salt, 128); + } + + bool DecryptionFailure() { + webrtc::MutexLock lock(&mutex_); + if (key_provider_->options().failure_tolerance < 0) { + return false; + } + decryption_failure_count_ += 1; + + if (decryption_failure_count_ > + key_provider_->options().failure_tolerance) { + has_valid_key_ = false; + return true; + } + return false; + } + + private: + bool has_valid_key_ = false; + int decryption_failure_count_ = 0; + mutable webrtc::Mutex mutex_; + int current_key_index_ = 0; + KeyProvider* key_provider_; + std::vector> crypto_key_ring_; +}; + +class DefaultKeyProviderImpl : public KeyProvider { + public: + DefaultKeyProviderImpl(KeyProviderOptions options) : options_(options) {} + ~DefaultKeyProviderImpl() override = default; + + /// Set the shared key. + bool SetSharedKey(int key_index, std::vector key) override { + webrtc::MutexLock lock(&mutex_); + if (options_.shared_key) { + if (keys_.find("shared") == keys_.end()) { + keys_["shared"] = rtc::make_ref_counted(this); + } + + auto key_handler = keys_["shared"]; + key_handler->SetKey(key, key_index); + + for (auto& key_pair : keys_) { + if (key_pair.first != "shared") { + key_pair.second->SetKey(key, key_index); + } + } + return true; + } + return false; + } + + const std::vector RatchetSharedKey(int key_index) override { + webrtc::MutexLock lock(&mutex_); + auto it = keys_.find("shared"); + if (it == keys_.end()) { + return std::vector(); + } + auto new_key = it->second->RatchetKey(key_index); + if (options_.shared_key) { + for (auto& key_pair : keys_) { + if (key_pair.first != "shared") { + key_pair.second->SetKey(new_key, key_index); + } + } + } + return new_key; + } + + const std::vector ExportSharedKey(int key_index) const override { + webrtc::MutexLock lock(&mutex_); + auto it = keys_.find("shared"); + if (it == keys_.end()) { + return std::vector(); + } + auto key_set = it->second->GetKeySet(key_index); + if (key_set) { + return key_set->material; + } + return std::vector(); + } + + const rtc::scoped_refptr GetSharedKey( + const std::string participant_id) override { + webrtc::MutexLock lock(&mutex_); + if (options_.shared_key && keys_.find("shared") != keys_.end()) { + auto shared_key_handler = keys_["shared"]; + if (keys_.find(participant_id) != keys_.end()) { + return keys_[participant_id]; + } else { + auto key_handler_clone = shared_key_handler->Clone(); + keys_[participant_id] = key_handler_clone; + return key_handler_clone; + } + } + return nullptr; + } + + /// Set the key at the given index. + bool SetKey(const std::string participant_id, + int index, + std::vector key) override { + webrtc::MutexLock lock(&mutex_); + + if (keys_.find(participant_id) == keys_.end()) { + keys_[participant_id] = + rtc::make_ref_counted(this); + } + + auto key_handler = keys_[participant_id]; + key_handler->SetKey(key, index); + return true; + } + + const rtc::scoped_refptr GetKey( + const std::string participant_id) const override { + webrtc::MutexLock lock(&mutex_); + + if (keys_.find(participant_id) == keys_.end()) { + return nullptr; + } + + return keys_.find(participant_id)->second; + } + + const std::vector RatchetKey(const std::string participant_id, + int key_index) override { + auto key_handler = GetKey(participant_id); + if (key_handler) { + return key_handler->RatchetKey(key_index); + } + return std::vector(); + } + + const std::vector ExportKey(const std::string participant_id, + int key_index) const override { + auto key_handler = GetKey(participant_id); + if (key_handler) { + auto key_set = key_handler->GetKeySet(key_index); + if (key_set) { + return key_set->material; + } + } + return std::vector(); + } + + void SetSifTrailer(const std::vector trailer) override { + webrtc::MutexLock lock(&mutex_); + options_.uncrypted_magic_bytes = trailer; + } + + KeyProviderOptions& options() override { return options_; } + + private: + mutable webrtc::Mutex mutex_; + KeyProviderOptions options_; + std::unordered_map> + keys_; +}; + +enum FrameCryptionState { + kNew = 0, + kOk, + kEncryptionFailed, + kDecryptionFailed, + kMissingKey, + kKeyRatcheted, + kInternalError, +}; + +class FrameCryptorTransformerObserver : public rtc::RefCountInterface { + public: + virtual void OnFrameCryptionStateChanged(const std::string participant_id, + FrameCryptionState error) = 0; + + protected: + virtual ~FrameCryptorTransformerObserver() {} +}; + +class RTC_EXPORT FrameCryptorTransformer + : public rtc::RefCountedObject { + public: + enum class MediaType { + kAudioFrame = 0, + kVideoFrame, + }; + + enum class Algorithm { + kAesGcm = 0, + kAesCbc, + }; + + explicit FrameCryptorTransformer( + rtc::Thread* signaling_thread, + const std::string participant_id, + MediaType type, + Algorithm algorithm, + rtc::scoped_refptr key_provider); + ~FrameCryptorTransformer(); + virtual void RegisterFrameCryptorTransformerObserver( + rtc::scoped_refptr observer) { + webrtc::MutexLock lock(&mutex_); + observer_ = observer; + } + + virtual void UnRegisterFrameCryptorTransformerObserver() { + webrtc::MutexLock lock(&mutex_); + observer_ = nullptr; + } + + virtual void SetKeyIndex(int index) { + webrtc::MutexLock lock(&mutex_); + key_index_ = index; + } + + virtual int key_index() const { return key_index_; } + + virtual void SetEnabled(bool enabled) { + webrtc::MutexLock lock(&mutex_); + enabled_cryption_ = enabled; + } + virtual bool enabled() const { + webrtc::MutexLock lock(&mutex_); + return enabled_cryption_; + } + virtual const std::string participant_id() const { return participant_id_; } + + protected: + virtual void RegisterTransformedFrameCallback( + rtc::scoped_refptr callback) override { + webrtc::MutexLock lock(&sink_mutex_); + sink_callback_ = callback; + } + virtual void UnregisterTransformedFrameCallback() override { + webrtc::MutexLock lock(&sink_mutex_); + sink_callback_ = nullptr; + } + virtual void RegisterTransformedFrameSinkCallback( + rtc::scoped_refptr callback, + uint32_t ssrc) override { + webrtc::MutexLock lock(&sink_mutex_); + sink_callbacks_[ssrc] = callback; + } + virtual void UnregisterTransformedFrameSinkCallback(uint32_t ssrc) override { + webrtc::MutexLock lock(&sink_mutex_); + auto it = sink_callbacks_.find(ssrc); + if (it != sink_callbacks_.end()) { + sink_callbacks_.erase(it); + } + } + + virtual void Transform( + std::unique_ptr frame) override; + + private: + void encryptFrame(std::unique_ptr frame); + void decryptFrame(std::unique_ptr frame); + void onFrameCryptionStateChanged(FrameCryptionState error); + rtc::Buffer makeIv(uint32_t ssrc, uint32_t timestamp); + uint8_t getIvSize(); + + private: + TaskQueueBase* const signaling_thread_; + std::unique_ptr thread_; + std::string participant_id_; + mutable webrtc::Mutex mutex_; + mutable webrtc::Mutex sink_mutex_; + bool enabled_cryption_ RTC_GUARDED_BY(mutex_) = false; + MediaType type_; + Algorithm algorithm_; + rtc::scoped_refptr sink_callback_; + std::map> + sink_callbacks_; + int key_index_ = 0; + std::map send_counts_; + rtc::scoped_refptr key_provider_; + rtc::scoped_refptr observer_; + FrameCryptionState last_enc_error_ = FrameCryptionState::kNew; + FrameCryptionState last_dec_error_ = FrameCryptionState::kNew; +}; + +} // namespace webrtc + +#endif // WEBRTC_FRAME_CRYPTOR_TRANSFORMER_H_ diff --git a/api/frame_transformer_interface.h b/api/frame_transformer_interface.h index 102ac33334..3640cbccc1 100644 --- a/api/frame_transformer_interface.h +++ b/api/frame_transformer_interface.h @@ -64,6 +64,8 @@ class TransformableVideoFrameInterface : public TransformableFrameInterface { virtual VideoFrameMetadata Metadata() const = 0; virtual void SetMetadata(const VideoFrameMetadata&) = 0; + + virtual const RTPVideoHeader& header () const = 0; }; // Extends the TransformableFrameInterface to expose audio-specific information. diff --git a/api/media_stream_interface.cc b/api/media_stream_interface.cc index 6b0a6a9297..d8a79896f3 100644 --- a/api/media_stream_interface.cc +++ b/api/media_stream_interface.cc @@ -19,6 +19,10 @@ const char* const MediaStreamTrackInterface::kVideoKind = const char* const MediaStreamTrackInterface::kAudioKind = cricket::kMediaTypeAudio; +bool VideoTrackInterface::should_receive() const { + return true; +} + VideoTrackInterface::ContentHint VideoTrackInterface::content_hint() const { return ContentHint::kNone; } diff --git a/api/media_stream_interface.h b/api/media_stream_interface.h index 4f9bffac85..86682918fb 100644 --- a/api/media_stream_interface.h +++ b/api/media_stream_interface.h @@ -188,6 +188,8 @@ class RTC_EXPORT VideoTrackInterface virtual VideoTrackSourceInterface* GetSource() const = 0; + virtual void set_should_receive(bool should_receive) {} + virtual bool should_receive() const; virtual ContentHint content_hint() const; virtual void set_content_hint(ContentHint hint) {} diff --git a/api/peer_connection_interface.h b/api/peer_connection_interface.h index ca6baacd0d..60e071fef4 100644 --- a/api/peer_connection_interface.h +++ b/api/peer_connection_interface.h @@ -680,6 +680,15 @@ class RTC_EXPORT PeerConnectionInterface : public webrtc::RefCountInterface { // The burst interval of the pacer, see TaskQueuePacedSender constructor. absl::optional pacer_burst_interval; + // When this flag is set, ports not bound to any specific network interface + // will be used, in addition to normal ports bound to the enumerated + // interfaces. Without this flag, these "any address" ports would only be + // used when network enumeration fails or is disabled. But under certain + // conditions, these ports may succeed where others fail, so they may allow + // the application to work in a wider variety of environments, at the expense + // of having to allocate additional candidates. + bool enable_any_address_ports = false; + // // Don't forget to update operator== if adding something. // diff --git a/api/video/BUILD.gn b/api/video/BUILD.gn index 5ec689c096..60cd07c512 100644 --- a/api/video/BUILD.gn +++ b/api/video/BUILD.gn @@ -81,6 +81,19 @@ rtc_library("video_frame") { absl_deps = [ "//third_party/abseil-cpp/absl/types:optional" ] } +rtc_library("yuv_helper") { + visibility = [ "*" ] + sources = [ + "yuv_helper.cc", + "yuv_helper.h", + ] + + deps = [ + "../../rtc_base/system:rtc_export", + "//third_party/libyuv", + ] +} + if (is_android) { java_cpp_enum("video_frame_enums") { sources = [ "video_frame_buffer.h" ] diff --git a/api/video/yuv_helper.cc b/api/video/yuv_helper.cc new file mode 100644 index 0000000000..eab9126183 --- /dev/null +++ b/api/video/yuv_helper.cc @@ -0,0 +1,416 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "yuv_helper.h" + +#include "libyuv/convert.h" +#include "libyuv/convert_argb.h" +#include "libyuv/convert_from_argb.h" +#include "libyuv/row.h" +#include "third_party/libyuv/include/libyuv.h" +#include "video_rotation.h" + +namespace webrtc { + +int I420Rotate(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_u, + int dst_stride_u, + uint8_t* dst_v, + int dst_stride_v, + int width, + int height, + VideoRotation mode) { + return libyuv::I420Rotate(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_y, dst_stride_y, dst_u, + dst_stride_u, dst_v, dst_stride_v, width, height, + static_cast(mode)); +} + +int I420ToNV12(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_uv, + int dst_stride_uv, + int width, + int height) { + return libyuv::I420ToNV12(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_y, dst_stride_y, dst_uv, + dst_stride_uv, width, height); +} + +int I420ToARGB(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_argb, + int dst_stride_argb, + int width, + int height) { + return libyuv::I420ToARGB(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_argb, dst_stride_argb, width, + height); +} + +int I420ToBGRA(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_bgra, + int dst_stride_bgra, + int width, + int height) { + return libyuv::I420ToBGRA(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_bgra, dst_stride_bgra, width, + height); +} + +int I420ToABGR(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_abgr, + int dst_stride_abgr, + int width, + int height) { + return libyuv::I420ToABGR(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_abgr, dst_stride_abgr, width, + height); +} + +int I420ToRGBA(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_rgba, + int dst_stride_rgba, + int width, + int height) { + return libyuv::I420ToRGBA(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_rgba, dst_stride_rgba, width, + height); +} + +int I420ToRGB24(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_rgb24, + int dst_stride_rgb24, + int width, + int height) { + return libyuv::I420ToRGB24(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_rgb24, dst_stride_rgb24, width, + height); +} + +int I420Scale(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + int src_width, + int src_height, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_u, + int dst_stride_u, + uint8_t* dst_v, + int dst_stride_v, + int dst_width, + int dst_height, + libyuv::FilterMode filtering) { + return libyuv::I420Scale(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, src_width, src_height, dst_y, + dst_stride_y, dst_u, dst_stride_u, dst_v, + dst_stride_v, dst_width, dst_height, filtering); +} + +int ARGBToI420(const uint8_t* src_argb, + int src_stride_argb, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_u, + int dst_stride_u, + uint8_t* dst_v, + int dst_stride_v, + int width, + int height) { + return libyuv::ARGBToI420(src_argb, src_stride_argb, dst_y, dst_stride_y, + dst_u, dst_stride_u, dst_v, dst_stride_v, width, + height); +} + +int ABGRToI420(const uint8_t* src_abgr, + int src_stride_abgr, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_u, + int dst_stride_u, + uint8_t* dst_v, + int dst_stride_v, + int width, + int height) { + return libyuv::ABGRToI420(src_abgr, src_stride_abgr, dst_y, dst_stride_y, + dst_u, dst_stride_u, dst_v, dst_stride_v, width, + height); +} + +int ARGBToRGB24(const uint8_t* src_argb, + int src_stride_argb, + uint8_t* dst_rgb24, + int dst_stride_rgb24, + int width, + int height) { + return libyuv::ARGBToRGB24(src_argb, src_stride_argb, dst_rgb24, + dst_stride_rgb24, width, height); +} + +int NV12ToI420(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_uv, + int src_stride_uv, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_u, + int dst_stride_u, + uint8_t* dst_v, + int dst_stride_v, + int width, + int height) { + return libyuv::NV12ToI420(src_y, src_stride_y, src_uv, src_stride_uv, dst_y, + dst_stride_y, dst_u, dst_stride_u, dst_v, + dst_stride_v, width, height); +} + +int I444ToI420(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_u, + int dst_stride_u, + uint8_t* dst_v, + int dst_stride_v, + int width, + int height) { + return libyuv::I444ToI420(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_y, dst_stride_y, dst_u, + dst_stride_u, dst_v, dst_stride_v, width, height); +} + +int I422ToI420(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_u, + int dst_stride_u, + uint8_t* dst_v, + int dst_stride_v, + int width, + int height) { + return libyuv::I422ToI420(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_y, dst_stride_y, dst_u, + dst_stride_u, dst_v, dst_stride_v, width, height); +} + +int I010ToI420(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_u, + int dst_stride_u, + uint8_t* dst_v, + int dst_stride_v, + int width, + int height) { + return libyuv::I010ToI420(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_y, dst_stride_y, dst_u, + dst_stride_u, dst_v, dst_stride_v, width, height); +} + +int NV12ToARGB(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_uv, + int src_stride_uv, + uint8_t* dst_argb, + int dst_stride_argb, + int width, + int height) { + return libyuv::NV12ToARGB(src_y, src_stride_y, src_uv, src_stride_uv, + dst_argb, dst_stride_argb, width, height); +} + +int NV12ToABGR(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_uv, + int src_stride_uv, + uint8_t* dst_abgr, + int dst_stride_abgr, + int width, + int height) { + return libyuv::NV12ToABGR(src_y, src_stride_y, src_uv, src_stride_uv, + dst_abgr, dst_stride_abgr, width, height); +} + +int I444ToARGB(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_abgr, + int dst_stride_abgr, + int width, + int height) { + return libyuv::I444ToARGB(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_abgr, dst_stride_abgr, width, + height); +} + +int I444ToABGR(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_abgr, + int dst_stride_abgr, + int width, + int height) { + return libyuv::I444ToABGR(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_abgr, dst_stride_abgr, width, + height); +} + +int I422ToARGB(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_argb, + int dst_stride_argb, + int width, + int height) { + return libyuv::I422ToARGB(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_argb, dst_stride_argb, width, + height); +} + +int I422ToABGR(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_abgr, + int dst_stride_abgr, + int width, + int height) { + return libyuv::I422ToABGR(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_abgr, dst_stride_abgr, width, + height); +} + +int I010ToARGB(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + uint8_t* dst_argb, + int dst_stride_argb, + int width, + int height) { + return libyuv::I010ToARGB(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_argb, dst_stride_argb, width, + height); +} + +int I010ToABGR(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + uint8_t* dst_abgr, + int dst_stride_abgr, + int width, + int height) { + return libyuv::I010ToABGR(src_y, src_stride_y, src_u, src_stride_u, src_v, + src_stride_v, dst_abgr, dst_stride_abgr, width, + height); +} + +int ABGRToNV12(const uint8_t* src_abgr, + int src_stride_abgr, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_uv, + int dst_stride_uv, + int width, + int height) { + return libyuv::ABGRToNV12(src_abgr, src_stride_abgr, dst_y, dst_stride_y, + dst_uv, dst_stride_uv, width, height); +} + +int ARGBToNV12(const uint8_t* src_argb, + int src_stride_argb, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_uv, + int dst_stride_uv, + int width, + int height) { + return libyuv::ARGBToNV12(src_argb, src_stride_argb, dst_y, dst_stride_y, + dst_uv, dst_stride_uv, width, height); +} + +} // namespace webrtc diff --git a/api/video/yuv_helper.h b/api/video/yuv_helper.h new file mode 100644 index 0000000000..5e86fb378b --- /dev/null +++ b/api/video/yuv_helper.h @@ -0,0 +1,316 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "libyuv/convert.h" +#include "rtc_base/system/rtc_export.h" +#include "stdint.h" +#include "third_party/libyuv/include/libyuv.h" +#include "video_rotation.h" + +namespace webrtc { + +RTC_EXPORT int I420Rotate(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_u, + int dst_stride_u, + uint8_t* dst_v, + int dst_stride_v, + int width, + int height, + VideoRotation mode); + +RTC_EXPORT int I420ToNV12(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_uv, + int dst_stride_uv, + int width, + int height); + +RTC_EXPORT int I420ToARGB(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_argb, + int dst_stride_argb, + int width, + int height); + +RTC_EXPORT int I420ToBGRA(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_bgra, + int dst_stride_bgra, + int width, + int height); + +RTC_EXPORT int I420ToABGR(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_abgr, + int dst_stride_abgr, + int width, + int height); + +RTC_EXPORT int I420ToRGBA(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_rgba, + int dst_stride_rgba, + int width, + int height); + +RTC_EXPORT int I420ToRGB24(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_rgb24, + int dst_stride_rgb24, + int width, + int height); + +RTC_EXPORT int I420Scale(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + int src_width, + int src_height, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_u, + int dst_stride_u, + uint8_t* dst_v, + int dst_stride_v, + int dst_width, + int dst_height, + libyuv::FilterMode filtering); + +RTC_EXPORT int ARGBToI420(const uint8_t* src_argb, + int src_stride_argb, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_u, + int dst_stride_u, + uint8_t* dst_v, + int dst_stride_v, + int width, + int height); + +RTC_EXPORT int ABGRToI420(const uint8_t* src_abgr, + int src_stride_abgr, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_u, + int dst_stride_u, + uint8_t* dst_v, + int dst_stride_v, + int width, + int height); + +RTC_EXPORT int ARGBToRGB24(const uint8_t* src_argb, + int src_stride_argb, + uint8_t* dst_rgb24, + int dst_stride_rgb24, + int width, + int height); + +RTC_EXPORT int NV12ToI420(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_uv, + int src_stride_uv, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_u, + int dst_stride_u, + uint8_t* dst_v, + int dst_stride_v, + int width, + int height); + +RTC_EXPORT int I444ToI420(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_u, + int dst_stride_u, + uint8_t* dst_v, + int dst_stride_v, + int width, + int height); + +RTC_EXPORT int I422ToI420(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_u, + int dst_stride_u, + uint8_t* dst_v, + int dst_stride_v, + int width, + int height); + +RTC_EXPORT int I010ToI420(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_u, + int dst_stride_u, + uint8_t* dst_v, + int dst_stride_v, + int width, + int height); + +RTC_EXPORT int NV12ToARGB(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_uv, + int src_stride_uv, + uint8_t* dst_argb, + int dst_stride_argb, + int width, + int height); + +RTC_EXPORT int NV12ToABGR(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_uv, + int src_stride_uv, + uint8_t* dst_abgr, + int dst_stride_abgr, + int width, + int height); + +RTC_EXPORT int I444ToARGB(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_abgr, + int dst_stride_abgr, + int width, + int height); + +RTC_EXPORT int I444ToABGR(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_abgr, + int dst_stride_abgr, + int width, + int height); + +RTC_EXPORT int I422ToARGB(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_argb, + int dst_stride_argb, + int width, + int height); + +RTC_EXPORT int I422ToABGR(const uint8_t* src_y, + int src_stride_y, + const uint8_t* src_u, + int src_stride_u, + const uint8_t* src_v, + int src_stride_v, + uint8_t* dst_abgr, + int dst_stride_abgr, + int width, + int height); + +RTC_EXPORT int I010ToARGB(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + uint8_t* dst_argb, + int dst_stride_argb, + int width, + int height); + +RTC_EXPORT int I010ToABGR(const uint16_t* src_y, + int src_stride_y, + const uint16_t* src_u, + int src_stride_u, + const uint16_t* src_v, + int src_stride_v, + uint8_t* dst_abgr, + int dst_stride_abgr, + int width, + int height); + +RTC_EXPORT int ABGRToNV12(const uint8_t* src_abgr, + int src_stride_abgr, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_uv, + int dst_stride_uv, + int width, + int height); + +RTC_EXPORT int ARGBToNV12(const uint8_t* src_argb, + int src_stride_argb, + uint8_t* dst_y, + int dst_stride_y, + uint8_t* dst_uv, + int dst_stride_uv, + int width, + int height); + +} // namespace webrtc diff --git a/api/video_codecs/video_encoder_factory.h b/api/video_codecs/video_encoder_factory.h index d6e7ff93c5..ddc4eb7cb8 100644 --- a/api/video_codecs/video_encoder_factory.h +++ b/api/video_codecs/video_encoder_factory.h @@ -85,13 +85,8 @@ class VideoEncoderFactory { virtual CodecSupport QueryCodecSupport( const SdpVideoFormat& format, absl::optional scalability_mode) const { - // Default implementation, query for supported formats and check if the - // specified format is supported. Returns false if scalability_mode is - // specified. CodecSupport codec_support; - if (!scalability_mode) { - codec_support.is_supported = format.IsCodecInList(GetSupportedFormats()); - } + codec_support.is_supported = format.IsCodecInList(GetSupportedFormats()); return codec_support; } diff --git a/audio/audio_send_stream.cc b/audio/audio_send_stream.cc index 59b0ea5b5e..0804bc8286 100644 --- a/audio/audio_send_stream.cc +++ b/audio/audio_send_stream.cc @@ -415,6 +415,11 @@ void AudioSendStream::SetMuted(bool muted) { channel_send_->SetInputMute(muted); } +bool AudioSendStream::GetMuted() { + RTC_DCHECK_RUN_ON(&worker_thread_checker_); + return channel_send_->InputMute(); +} + webrtc::AudioSendStream::Stats AudioSendStream::GetStats() const { return GetStats(true); } diff --git a/audio/audio_send_stream.h b/audio/audio_send_stream.h index a37c8fd452..b7c265fa30 100644 --- a/audio/audio_send_stream.h +++ b/audio/audio_send_stream.h @@ -94,6 +94,7 @@ class AudioSendStream final : public webrtc::AudioSendStream, int payload_frequency, int event, int duration_ms) override; + bool GetMuted() override; void SetMuted(bool muted) override; webrtc::AudioSendStream::Stats GetStats() const override; webrtc::AudioSendStream::Stats GetStats( diff --git a/audio/audio_state.cc b/audio/audio_state.cc index 6f20e7b128..3a21d9b3a9 100644 --- a/audio/audio_state.cc +++ b/audio/audio_state.cc @@ -98,14 +98,26 @@ void AudioState::AddSendingStream(webrtc::AudioSendStream* stream, UpdateAudioTransportWithSendingStreams(); // Make sure recording is initialized; start recording if enabled. - auto* adm = config_.audio_device_module.get(); - if (!adm->Recording()) { - if (adm->InitRecording() == 0) { - if (recording_enabled_) { - adm->StartRecording(); + if (ShouldRecord()) { + auto* adm = config_.audio_device_module.get(); + if (!adm->Recording()) { + if (adm->InitRecording() == 0) { + if (recording_enabled_) { + + // TODO: Verify if the following windows only logic is still required. +#if defined(WEBRTC_WIN) + if (adm->BuiltInAECIsAvailable() && !adm->Playing()) { + if (!adm->PlayoutIsInitialized()) { + adm->InitPlayout(); + } + adm->StartPlayout(); + } +#endif + adm->StartRecording(); + } + } else { + RTC_DLOG_F(LS_ERROR) << "Failed to initialize recording."; } - } else { - RTC_DLOG_F(LS_ERROR) << "Failed to initialize recording."; } } } @@ -115,7 +127,8 @@ void AudioState::RemoveSendingStream(webrtc::AudioSendStream* stream) { auto count = sending_streams_.erase(stream); RTC_DCHECK_EQ(1, count); UpdateAudioTransportWithSendingStreams(); - if (sending_streams_.empty()) { + + if (!ShouldRecord()) { config_.audio_device_module->StopRecording(); } } @@ -143,7 +156,7 @@ void AudioState::SetRecording(bool enabled) { if (recording_enabled_ != enabled) { recording_enabled_ = enabled; if (enabled) { - if (!sending_streams_.empty()) { + if (ShouldRecord()) { config_.audio_device_module->StartRecording(); } } else { @@ -203,6 +216,39 @@ void AudioState::UpdateNullAudioPollerState() { null_audio_poller_.Stop(); } } + +void AudioState::OnMuteStreamChanged() { + + auto* adm = config_.audio_device_module.get(); + bool should_record = ShouldRecord(); + + if (should_record && !adm->Recording()) { + if (adm->InitRecording() == 0) { + adm->StartRecording(); + } + } else if (!should_record && adm->Recording()) { + adm->StopRecording(); + } +} + +bool AudioState::ShouldRecord() { + // no streams to send + if (sending_streams_.empty()) { + return false; + } + + int stream_count = sending_streams_.size(); + + int muted_count = 0; + for (const auto& kv : sending_streams_) { + if (kv.first->GetMuted()) { + muted_count++; + } + } + + return muted_count != stream_count; +} + } // namespace internal rtc::scoped_refptr AudioState::Create( diff --git a/audio/audio_state.h b/audio/audio_state.h index 88aaaa3697..f21cca771e 100644 --- a/audio/audio_state.h +++ b/audio/audio_state.h @@ -47,6 +47,8 @@ class AudioState : public webrtc::AudioState { void SetStereoChannelSwapping(bool enable) override; + void OnMuteStreamChanged() override; + AudioDeviceModule* audio_device_module() { RTC_DCHECK(config_.audio_device_module); return config_.audio_device_module.get(); @@ -64,6 +66,9 @@ class AudioState : public webrtc::AudioState { void UpdateAudioTransportWithSendingStreams(); void UpdateNullAudioPollerState() RTC_RUN_ON(&thread_checker_); + // Returns true when at least 1 stream exists and all streams are not muted. + bool ShouldRecord(); + SequenceChecker thread_checker_; SequenceChecker process_thread_checker_{SequenceChecker::kDetached}; const webrtc::AudioState::Config config_; diff --git a/audio/channel_receive.cc b/audio/channel_receive.cc index 17cf859ed8..d94ac6a70c 100644 --- a/audio/channel_receive.cc +++ b/audio/channel_receive.cc @@ -362,7 +362,6 @@ void ChannelReceive::OnReceivedPayloadData( void ChannelReceive::InitFrameTransformerDelegate( rtc::scoped_refptr frame_transformer) { RTC_DCHECK(frame_transformer); - RTC_DCHECK(!frame_transformer_delegate_); RTC_DCHECK(worker_thread_->IsCurrent()); // Pass a callback to ChannelReceive::OnReceivedPayloadData, to be called by @@ -922,6 +921,9 @@ void ChannelReceive::SetDepacketizerToDecoderFrameTransformer( RTC_DCHECK_NOTREACHED() << "Not setting the transformer?"; return; } + if(frame_transformer_delegate_) { + frame_transformer_delegate_->Reset(); + } if (frame_transformer_delegate_) { // Depending on when the channel is created, the transformer might be set // twice. Don't replace the delegate if it was already initialized. diff --git a/audio/channel_send.cc b/audio/channel_send.cc index 4a2700177b..bc474ee33d 100644 --- a/audio/channel_send.cc +++ b/audio/channel_send.cc @@ -100,6 +100,8 @@ class ChannelSend : public ChannelSendInterface, // Muting, Volume and Level. void SetInputMute(bool enable) override; + bool InputMute() const override; + // Stats. ANAStats GetANAStatistics() const override; @@ -163,8 +165,6 @@ class ChannelSend : public ChannelSendInterface, size_t payloadSize, int64_t absolute_capture_timestamp_ms) override; - bool InputMute() const; - int32_t SendRtpAudio(AudioFrameType frameType, uint8_t payloadType, uint32_t rtp_timestamp_without_offset, diff --git a/audio/channel_send.h b/audio/channel_send.h index b6a6a37bf5..c16be987b4 100644 --- a/audio/channel_send.h +++ b/audio/channel_send.h @@ -83,6 +83,8 @@ class ChannelSendInterface { virtual bool SendTelephoneEventOutband(int event, int duration_ms) = 0; virtual void OnBitrateAllocation(BitrateAllocationUpdate update) = 0; virtual int GetTargetBitrate() const = 0; + + virtual bool InputMute() const = 0; virtual void SetInputMute(bool muted) = 0; virtual void ProcessAndEncodeAudio( diff --git a/call/audio_send_stream.h b/call/audio_send_stream.h index 9c2fad652f..7e73ab2ce6 100644 --- a/call/audio_send_stream.h +++ b/call/audio_send_stream.h @@ -190,6 +190,7 @@ class AudioSendStream : public AudioSender { int event, int duration_ms) = 0; + virtual bool GetMuted() = 0; virtual void SetMuted(bool muted) = 0; virtual Stats GetStats() const = 0; diff --git a/call/audio_state.h b/call/audio_state.h index 79fb5cf981..85f04758dd 100644 --- a/call/audio_state.h +++ b/call/audio_state.h @@ -59,6 +59,9 @@ class AudioState : public rtc::RefCountInterface { virtual void SetStereoChannelSwapping(bool enable) = 0; + // Notify the AudioState that a stream updated it's mute state. + virtual void OnMuteStreamChanged() = 0; + static rtc::scoped_refptr Create( const AudioState::Config& config); diff --git a/media/base/media_channel.h b/media/base/media_channel.h index 124f68c6ff..739254c5c6 100644 --- a/media/base/media_channel.h +++ b/media/base/media_channel.h @@ -1002,6 +1002,8 @@ class VideoMediaReceiveChannelInterface : public MediaReceiveChannelInterface { webrtc::RtcpMode rtcp_mode, absl::optional rtx_time) = 0; virtual bool AddDefaultRecvStreamForTesting(const StreamParams& sp) = 0; + virtual void StartReceive(uint32_t ssrc) {} + virtual void StopReceive(uint32_t ssrc) {} }; } // namespace cricket diff --git a/media/engine/webrtc_video_engine.cc b/media/engine/webrtc_video_engine.cc index d4dccdf18f..7178f9d561 100644 --- a/media/engine/webrtc_video_engine.cc +++ b/media/engine/webrtc_video_engine.cc @@ -3500,6 +3500,24 @@ void WebRtcVideoReceiveChannel::WebRtcVideoReceiveStream::SetReceiverParameters( } } +void WebRtcVideoReceiveChannel::StartReceive(uint32_t ssrc) { + RTC_DCHECK_RUN_ON(&thread_checker_); + WebRtcVideoReceiveStream* stream = FindReceiveStream(ssrc); + if(!stream) { + return; + } + stream->StartReceiveStream(); +} + +void WebRtcVideoReceiveChannel::StopReceive(uint32_t ssrc) { + RTC_DCHECK_RUN_ON(&thread_checker_); + WebRtcVideoReceiveStream* stream = FindReceiveStream(ssrc); + if(!stream) { + return; + } + stream->StopReceiveStream(); +} + void WebRtcVideoReceiveChannel::WebRtcVideoReceiveStream:: RecreateReceiveStream() { RTC_DCHECK_RUN_ON(&thread_checker_); diff --git a/media/engine/webrtc_video_engine.h b/media/engine/webrtc_video_engine.h index d370a5445b..279884da12 100644 --- a/media/engine/webrtc_video_engine.h +++ b/media/engine/webrtc_video_engine.h @@ -632,7 +632,8 @@ class WebRtcVideoReceiveChannel : public MediaChannelUtil, bool nack_enabled, webrtc::RtcpMode rtcp_mode, absl::optional rtx_time) override; - + void StartReceive(uint32_t ssrc) override; + void StopReceive(uint32_t ssrc) override; private: class WebRtcVideoReceiveStream; struct ChangedReceiverParameters { diff --git a/media/engine/webrtc_voice_engine.h b/media/engine/webrtc_voice_engine.h index b28b9652bb..9d18ef6302 100644 --- a/media/engine/webrtc_voice_engine.h +++ b/media/engine/webrtc_voice_engine.h @@ -132,6 +132,8 @@ class WebRtcVoiceEngine final : public VoiceEngineInterface { absl::optional GetAudioDeviceStats() override; + // Moved to public so WebRtcVoiceMediaChannel can access it. + webrtc::AudioState* audio_state(); private: // Every option that is "set" will be applied. Every option not "set" will be @@ -145,7 +147,6 @@ class WebRtcVoiceEngine final : public VoiceEngineInterface { webrtc::AudioDeviceModule* adm(); webrtc::AudioProcessing* apm() const; - webrtc::AudioState* audio_state(); std::vector CollectCodecs( const std::vector& specs) const; diff --git a/modules/audio_device/audio_device_data_observer.cc b/modules/audio_device/audio_device_data_observer.cc index 0524830327..88a8301c4f 100644 --- a/modules/audio_device/audio_device_data_observer.cc +++ b/modules/audio_device/audio_device_data_observer.cc @@ -307,6 +307,10 @@ class ADMWrapper : public AudioDeviceModule, public AudioTransport { } #endif // WEBRTC_IOS + int32_t SetAudioDeviceSink(AudioDeviceSink* sink) const override { + return impl_->SetAudioDeviceSink(sink); + } + protected: rtc::scoped_refptr impl_; AudioDeviceDataObserver* legacy_observer_ = nullptr; diff --git a/modules/audio_device/audio_device_generic.h b/modules/audio_device/audio_device_generic.h index 41e24eb3b0..0585129de4 100644 --- a/modules/audio_device/audio_device_generic.h +++ b/modules/audio_device/audio_device_generic.h @@ -135,6 +135,10 @@ class AudioDeviceGeneric { virtual int GetRecordAudioParameters(AudioParameters* params) const; #endif // WEBRTC_IOS + virtual int32_t SetAudioDeviceSink(AudioDeviceSink* sink) { return -1; } + virtual int32_t GetPlayoutDevice() const { return -1; } + virtual int32_t GetRecordingDevice() const { return -1; } + virtual void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) = 0; virtual ~AudioDeviceGeneric() {} diff --git a/modules/audio_device/audio_device_impl.cc b/modules/audio_device/audio_device_impl.cc index 80ed928933..0cd86d7cdb 100644 --- a/modules/audio_device/audio_device_impl.cc +++ b/modules/audio_device/audio_device_impl.cc @@ -63,15 +63,17 @@ namespace webrtc { rtc::scoped_refptr AudioDeviceModule::Create( AudioLayer audio_layer, - TaskQueueFactory* task_queue_factory) { + TaskQueueFactory* task_queue_factory, + bool bypass_voice_processing) { RTC_DLOG(LS_INFO) << __FUNCTION__; - return AudioDeviceModule::CreateForTest(audio_layer, task_queue_factory); + return AudioDeviceModule::CreateForTest(audio_layer, task_queue_factory, bypass_voice_processing); } // static rtc::scoped_refptr AudioDeviceModule::CreateForTest( AudioLayer audio_layer, - TaskQueueFactory* task_queue_factory) { + TaskQueueFactory* task_queue_factory, + bool bypass_voice_processing) { RTC_DLOG(LS_INFO) << __FUNCTION__; // The "AudioDeviceModule::kWindowsCoreAudio2" audio layer has its own @@ -92,7 +94,7 @@ rtc::scoped_refptr AudioDeviceModule::CreateForTest( // Create the generic reference counted (platform independent) implementation. auto audio_device = rtc::make_ref_counted( - audio_layer, task_queue_factory); + audio_layer, task_queue_factory, bypass_voice_processing); // Ensure that the current platform is supported. if (audio_device->CheckPlatform() == -1) { @@ -115,8 +117,13 @@ rtc::scoped_refptr AudioDeviceModule::CreateForTest( AudioDeviceModuleImpl::AudioDeviceModuleImpl( AudioLayer audio_layer, - TaskQueueFactory* task_queue_factory) - : audio_layer_(audio_layer), audio_device_buffer_(task_queue_factory) { + TaskQueueFactory* task_queue_factory, + bool bypass_voice_processing) + : audio_layer_(audio_layer), +#if defined(WEBRTC_IOS) + bypass_voice_processing_(bypass_voice_processing), +#endif + audio_device_buffer_(task_queue_factory) { RTC_DLOG(LS_INFO) << __FUNCTION__; } @@ -240,7 +247,7 @@ int32_t AudioDeviceModuleImpl::CreatePlatformSpecificObjects() { #if defined(WEBRTC_IOS) if (audio_layer == kPlatformDefaultAudio) { audio_device_.reset( - new ios_adm::AudioDeviceIOS(/*bypass_voice_processing=*/false)); + new ios_adm::AudioDeviceIOS(/*bypass_voice_processing=*/bypass_voice_processing_)); RTC_LOG(LS_INFO) << "iPhone Audio APIs will be utilized."; } // END #if defined(WEBRTC_IOS) @@ -895,6 +902,27 @@ int AudioDeviceModuleImpl::GetRecordAudioParameters( } #endif // WEBRTC_IOS +int32_t AudioDeviceModuleImpl::SetAudioDeviceSink(AudioDeviceSink* sink) const { + RTC_LOG(LS_INFO) << __FUNCTION__ << "(" << sink << ")"; + int32_t ok = audio_device_->SetAudioDeviceSink(sink); + RTC_LOG(LS_INFO) << "output: " << ok; + return ok; +} + +int32_t AudioDeviceModuleImpl::GetPlayoutDevice() const { + RTC_LOG(LS_INFO) << __FUNCTION__; + int32_t r = audio_device_->GetPlayoutDevice(); + RTC_LOG(LS_INFO) << "output: " << r; + return r; +} + +int32_t AudioDeviceModuleImpl::GetRecordingDevice() const { + RTC_LOG(LS_INFO) << __FUNCTION__; + int32_t r = audio_device_->GetRecordingDevice(); + RTC_LOG(LS_INFO) << "output: " << r; + return r; +} + AudioDeviceModuleImpl::PlatformType AudioDeviceModuleImpl::Platform() const { RTC_LOG(LS_INFO) << __FUNCTION__; return platform_type_; diff --git a/modules/audio_device/audio_device_impl.h b/modules/audio_device/audio_device_impl.h index 46d91a46c8..fd9b62c65b 100644 --- a/modules/audio_device/audio_device_impl.h +++ b/modules/audio_device/audio_device_impl.h @@ -47,7 +47,8 @@ class AudioDeviceModuleImpl : public AudioDeviceModuleForTest { int32_t AttachAudioBuffer(); AudioDeviceModuleImpl(AudioLayer audio_layer, - TaskQueueFactory* task_queue_factory); + TaskQueueFactory* task_queue_factory, + bool bypass_voice_processing = false); // If `create_detached` is true, created ADM can be used on another thread // compared to the one on which it was created. It's useful for testing. AudioDeviceModuleImpl(AudioLayer audio_layer, @@ -155,6 +156,10 @@ class AudioDeviceModuleImpl : public AudioDeviceModuleForTest { int GetRecordAudioParameters(AudioParameters* params) const override; #endif // WEBRTC_IOS + int32_t SetAudioDeviceSink(AudioDeviceSink* sink) const override; + int32_t GetPlayoutDevice() const override; + int32_t GetRecordingDevice() const override; + AudioDeviceBuffer* GetAudioDeviceBuffer() { return &audio_device_buffer_; } int RestartPlayoutInternally() override { return -1; } @@ -169,6 +174,9 @@ class AudioDeviceModuleImpl : public AudioDeviceModuleForTest { AudioLayer audio_layer_; PlatformType platform_type_ = kPlatformNotSupported; bool initialized_ = false; +#if defined(WEBRTC_IOS) + bool bypass_voice_processing_; +#endif AudioDeviceBuffer audio_device_buffer_; std::unique_ptr audio_device_; }; diff --git a/modules/audio_device/include/audio_device.h b/modules/audio_device/include/audio_device.h index 47d2aecfa7..58019cc24f 100644 --- a/modules/audio_device/include/audio_device.h +++ b/modules/audio_device/include/audio_device.h @@ -12,16 +12,25 @@ #define MODULES_AUDIO_DEVICE_INCLUDE_AUDIO_DEVICE_H_ #include "absl/types/optional.h" -#include "api/ref_count.h" #include "api/scoped_refptr.h" #include "api/task_queue/task_queue_factory.h" #include "modules/audio_device/include/audio_device_defines.h" +#include "rtc_base/ref_count.h" namespace webrtc { class AudioDeviceModuleForTest; -class AudioDeviceModule : public webrtc::RefCountInterface { +// Sink for callbacks related to a audio device. +class AudioDeviceSink { + public: + virtual ~AudioDeviceSink() = default; + + // input/output devices updated or default device changed + virtual void OnDevicesUpdated() = 0; +}; + +class AudioDeviceModule : public rtc::RefCountInterface { public: enum AudioLayer { kPlatformDefaultAudio = 0, @@ -56,12 +65,14 @@ class AudioDeviceModule : public webrtc::RefCountInterface { // Creates a default ADM for usage in production code. static rtc::scoped_refptr Create( AudioLayer audio_layer, - TaskQueueFactory* task_queue_factory); + TaskQueueFactory* task_queue_factory, + bool bypass_voice_processing = false); // Creates an ADM with support for extra test methods. Don't use this factory // in production code. static rtc::scoped_refptr CreateForTest( AudioLayer audio_layer, - TaskQueueFactory* task_queue_factory); + TaskQueueFactory* task_queue_factory, + bool bypass_voice_processing = false); // Retrieve the currently utilized audio layer virtual int32_t ActiveAudioLayer(AudioLayer* audioLayer) const = 0; @@ -171,6 +182,10 @@ class AudioDeviceModule : public webrtc::RefCountInterface { virtual int GetRecordAudioParameters(AudioParameters* params) const = 0; #endif // WEBRTC_IOS + virtual int32_t SetAudioDeviceSink(AudioDeviceSink* sink) const { return -1; } + virtual int32_t GetPlayoutDevice() const { return -1; } + virtual int32_t GetRecordingDevice() const { return -1; } + protected: ~AudioDeviceModule() override {} }; diff --git a/modules/audio_device/mac/audio_device_mac.cc b/modules/audio_device/mac/audio_device_mac.cc index ed7b0e4669..0a371737b3 100644 --- a/modules/audio_device/mac/audio_device_mac.cc +++ b/modules/audio_device/mac/audio_device_mac.cc @@ -119,8 +119,6 @@ AudioDeviceMac::AudioDeviceMac() _twoDevices(true), _doStop(false), _doStopRec(false), - _macBookPro(false), - _macBookProPanRight(false), _captureLatencyUs(0), _renderLatencyUs(0), _captureDelayUs(0), @@ -277,8 +275,11 @@ AudioDeviceGeneric::InitStatus AudioDeviceMac::Init() { // but now must be explicitly specified. HAL would otherwise try to use the // main thread to issue notifications. AudioObjectPropertyAddress propertyAddress = { - kAudioHardwarePropertyRunLoop, kAudioObjectPropertyScopeGlobal, - kAudioObjectPropertyElementMaster}; + kAudioHardwarePropertyRunLoop, + kAudioObjectPropertyScopeGlobal, + kAudioObjectPropertyElementMaster + }; + CFRunLoopRef runLoop = NULL; UInt32 size = sizeof(CFRunLoopRef); int aoerr = AudioObjectSetPropertyData( @@ -294,22 +295,15 @@ AudioDeviceGeneric::InitStatus AudioDeviceMac::Init() { WEBRTC_CA_LOG_ERR(AudioObjectAddPropertyListener( kAudioObjectSystemObject, &propertyAddress, &objectListenerProc, this)); - // Determine if this is a MacBook Pro - _macBookPro = false; - _macBookProPanRight = false; - char buf[128]; - size_t length = sizeof(buf); - memset(buf, 0, length); + // Listen for default output device change. + propertyAddress.mSelector = kAudioHardwarePropertyDefaultOutputDevice; + WEBRTC_CA_LOG_ERR(AudioObjectAddPropertyListener( + kAudioObjectSystemObject, &propertyAddress, &objectListenerProc, this)); - int intErr = sysctlbyname("hw.model", buf, &length, NULL, 0); - if (intErr != 0) { - RTC_LOG(LS_ERROR) << "Error in sysctlbyname(): " << err; - } else { - RTC_LOG(LS_VERBOSE) << "Hardware model: " << buf; - if (strncmp(buf, "MacBookPro", 10) == 0) { - _macBookPro = true; - } - } + // Listen for default input device change. + propertyAddress.mSelector = kAudioHardwarePropertyDefaultInputDevice; + WEBRTC_CA_LOG_ERR(AudioObjectAddPropertyListener( + kAudioObjectSystemObject, &propertyAddress, &objectListenerProc, this)); _initialized = true; @@ -337,9 +331,21 @@ int32_t AudioDeviceMac::Terminate() { OSStatus err = noErr; int retVal = 0; + // Remove listeners for global scope. AudioObjectPropertyAddress propertyAddress = { - kAudioHardwarePropertyDevices, kAudioObjectPropertyScopeGlobal, - kAudioObjectPropertyElementMaster}; + kAudioHardwarePropertyDevices, // selector + kAudioObjectPropertyScopeGlobal, // scope + kAudioObjectPropertyElementMaster // element + }; + + WEBRTC_CA_LOG_WARN(AudioObjectRemovePropertyListener( + kAudioObjectSystemObject, &propertyAddress, &objectListenerProc, this)); + + propertyAddress.mSelector = kAudioHardwarePropertyDefaultOutputDevice; + WEBRTC_CA_LOG_WARN(AudioObjectRemovePropertyListener( + kAudioObjectSystemObject, &propertyAddress, &objectListenerProc, this)); + + propertyAddress.mSelector = kAudioHardwarePropertyDefaultInputDevice; WEBRTC_CA_LOG_WARN(AudioObjectRemovePropertyListener( kAudioObjectSystemObject, &propertyAddress, &objectListenerProc, this)); @@ -789,6 +795,14 @@ int16_t AudioDeviceMac::PlayoutDevices() { MaxNumberDevices); } +int32_t AudioDeviceMac::GetPlayoutDevice() const { + if (_outputDeviceIsSpecified) { + return _outputDeviceIndex; + } + + return 0; +} + int32_t AudioDeviceMac::SetPlayoutDevice(uint16_t index) { MutexLock lock(&mutex_); @@ -830,13 +844,11 @@ int32_t AudioDeviceMac::PlayoutDeviceName(uint16_t index, } memset(name, 0, kAdmMaxDeviceNameSize); - - if (guid != NULL) { - memset(guid, 0, kAdmMaxGuidSize); - } + memset(guid, 0, kAdmMaxGuidSize); return GetDeviceName(kAudioDevicePropertyScopeOutput, index, - rtc::ArrayView(name, kAdmMaxDeviceNameSize)); + rtc::ArrayView(name, kAdmMaxDeviceNameSize), + rtc::ArrayView(guid, kAdmMaxGuidSize)); } int32_t AudioDeviceMac::RecordingDeviceName(uint16_t index, @@ -855,7 +867,8 @@ int32_t AudioDeviceMac::RecordingDeviceName(uint16_t index, } return GetDeviceName(kAudioDevicePropertyScopeInput, index, - rtc::ArrayView(name, kAdmMaxDeviceNameSize)); + rtc::ArrayView(name, kAdmMaxDeviceNameSize), + rtc::ArrayView(guid, kAdmMaxGuidSize)); } int16_t AudioDeviceMac::RecordingDevices() { @@ -864,6 +877,14 @@ int16_t AudioDeviceMac::RecordingDevices() { MaxNumberDevices); } +int32_t AudioDeviceMac::GetRecordingDevice() const { + if (_inputDeviceIsSpecified) { + return _inputDeviceIndex; + } + + return 0; +} + int32_t AudioDeviceMac::SetRecordingDevice(uint16_t index) { if (_recIsInitialized) { return -1; @@ -979,34 +1000,8 @@ int32_t AudioDeviceMac::InitPlayout() { _renderDeviceIsAlive = 1; _doStop = false; - // The internal microphone of a MacBook Pro is located under the left speaker - // grille. When the internal speakers are in use, we want to fully stereo - // pan to the right. AudioObjectPropertyAddress propertyAddress = { kAudioDevicePropertyDataSource, kAudioDevicePropertyScopeOutput, 0}; - if (_macBookPro) { - _macBookProPanRight = false; - Boolean hasProperty = - AudioObjectHasProperty(_outputDeviceID, &propertyAddress); - if (hasProperty) { - UInt32 dataSource = 0; - size = sizeof(dataSource); - WEBRTC_CA_LOG_WARN(AudioObjectGetPropertyData( - _outputDeviceID, &propertyAddress, 0, NULL, &size, &dataSource)); - - if (dataSource == 'ispk') { - _macBookProPanRight = true; - RTC_LOG(LS_VERBOSE) - << "MacBook Pro using internal speakers; stereo panning right"; - } else { - RTC_LOG(LS_VERBOSE) << "MacBook Pro not using internal speakers"; - } - - // Add a listener to determine if the status changes. - WEBRTC_CA_LOG_WARN(AudioObjectAddPropertyListener( - _outputDeviceID, &propertyAddress, &objectListenerProc, this)); - } - } // Get current stream description propertyAddress.mSelector = kAudioDevicePropertyStreamFormat; @@ -1297,7 +1292,7 @@ int32_t AudioDeviceMac::StartRecording() { while (CaptureWorkerThread()) { } }, - "CaptureWorkerThread", + "Audio_CaptureWorkerThread", rtc::ThreadAttributes().SetPriority(rtc::ThreadPriority::kRealtime)); OSStatus err = noErr; @@ -1390,7 +1385,11 @@ int32_t AudioDeviceMac::StopRecording() { // Remove listeners. AudioObjectPropertyAddress propertyAddress = { - kAudioDevicePropertyStreamFormat, kAudioDevicePropertyScopeInput, 0}; + kAudioDevicePropertyStreamFormat, // selector + kAudioDevicePropertyScopeInput, // scope + 0, // element + }; + WEBRTC_CA_LOG_WARN(AudioObjectRemovePropertyListener( _inputDeviceID, &propertyAddress, &objectListenerProc, this)); @@ -1434,7 +1433,7 @@ int32_t AudioDeviceMac::StartPlayout() { while (RenderWorkerThread()) { } }, - "RenderWorkerThread", + "Audio_RenderWorkerThread", rtc::ThreadAttributes().SetPriority(rtc::ThreadPriority::kRealtime)); if (_twoDevices || !_recording) { @@ -1503,7 +1502,11 @@ int32_t AudioDeviceMac::StopPlayout() { // Remove listeners. AudioObjectPropertyAddress propertyAddress = { - kAudioDevicePropertyStreamFormat, kAudioDevicePropertyScopeOutput, 0}; + kAudioDevicePropertyStreamFormat, // selector + kAudioDevicePropertyScopeOutput, // scope + 0, // element + }; + WEBRTC_CA_LOG_WARN(AudioObjectRemovePropertyListener( _outputDeviceID, &propertyAddress, &objectListenerProc, this)); @@ -1511,16 +1514,6 @@ int32_t AudioDeviceMac::StopPlayout() { WEBRTC_CA_LOG_WARN(AudioObjectRemovePropertyListener( _outputDeviceID, &propertyAddress, &objectListenerProc, this)); - if (_macBookPro) { - Boolean hasProperty = - AudioObjectHasProperty(_outputDeviceID, &propertyAddress); - if (hasProperty) { - propertyAddress.mSelector = kAudioDevicePropertyDataSource; - WEBRTC_CA_LOG_WARN(AudioObjectRemovePropertyListener( - _outputDeviceID, &propertyAddress, &objectListenerProc, this)); - } - } - _playIsInitialized = false; _playing = false; @@ -1548,8 +1541,11 @@ int32_t AudioDeviceMac::GetNumberDevices(const AudioObjectPropertyScope scope, OSStatus err = noErr; AudioObjectPropertyAddress propertyAddress = { - kAudioHardwarePropertyDevices, kAudioObjectPropertyScopeGlobal, - kAudioObjectPropertyElementMaster}; + kAudioHardwarePropertyDevices, + kAudioObjectPropertyScopeGlobal, + kAudioObjectPropertyElementMaster, + }; + UInt32 size = 0; WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyDataSize( kAudioObjectSystemObject, &propertyAddress, 0, NULL, &size)); @@ -1648,7 +1644,8 @@ int32_t AudioDeviceMac::GetNumberDevices(const AudioObjectPropertyScope scope, int32_t AudioDeviceMac::GetDeviceName(const AudioObjectPropertyScope scope, const uint16_t index, - rtc::ArrayView name) { + rtc::ArrayView name, + rtc::ArrayView guid) { OSStatus err = noErr; AudioDeviceID deviceIds[MaxNumberDevices]; @@ -1685,10 +1682,9 @@ int32_t AudioDeviceMac::GetDeviceName(const AudioObjectPropertyScope scope, isDefaultDevice = true; } } - AudioObjectPropertyAddress propertyAddress = {kAudioDevicePropertyDeviceName, scope, 0}; - + rtc::SimpleStringBuilder guid_ss(guid); if (isDefaultDevice) { std::array devName; UInt32 len = devName.size(); @@ -1698,6 +1694,7 @@ int32_t AudioDeviceMac::GetDeviceName(const AudioObjectPropertyScope scope, rtc::SimpleStringBuilder ss(name); ss.AppendFormat("default (%s)", devName.data()); + guid_ss << "default"; } else { if (index < numberDevices) { usedID = deviceIds[index]; @@ -1705,7 +1702,7 @@ int32_t AudioDeviceMac::GetDeviceName(const AudioObjectPropertyScope scope, usedID = index; } UInt32 len = name.size(); - + guid_ss << std::to_string(deviceIds[index]); WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData( usedID, &propertyAddress, 0, NULL, &len, name.data())); } @@ -1916,6 +1913,66 @@ OSStatus AudioDeviceMac::implObjectListenerProc( HandleDataSourceChange(objectId, addresses[i]); } else if (addresses[i].mSelector == kAudioDeviceProcessorOverload) { HandleProcessorOverload(addresses[i]); + } else if (addresses[i].mSelector == kAudioHardwarePropertyDefaultOutputDevice) { + RTC_LOG(LS_VERBOSE) << "kAudioHardwarePropertyDefaultOutputDevice"; + // default audio output device changed + HandleDefaultOutputDeviceChange(); + } else if (addresses[i].mSelector == kAudioHardwarePropertyDefaultInputDevice) { + RTC_LOG(LS_VERBOSE) << "kAudioHardwarePropertyDefaultInputDevice"; + // default audio input device changed + HandleDefaultInputDeviceChange(); + } + } + + return 0; +} + +int32_t AudioDeviceMac::HandleDefaultOutputDeviceChange() { + + if (SpeakerIsInitialized()) { + RTC_LOG(LS_WARNING) << "Default audio output device has changed"; + int32_t renderDeviceIsAlive = _renderDeviceIsAlive; + bool wasPlaying = _playing && renderDeviceIsAlive == 1; + + if (wasPlaying && _outputDeviceIsSpecified && _outputDeviceIndex == 0) { + + StopPlayout(); + + // default is already selected _outputDeviceIndex(0) + // re-init and start playout + InitPlayout(); + StartPlayout(); + } + + // Notify default output device updated + if (audio_device_module_sink_) { + audio_device_module_sink_->OnDevicesUpdated(); + } + } + + return 0; +} + +int32_t AudioDeviceMac::HandleDefaultInputDeviceChange() { + + if (MicrophoneIsInitialized()) { + RTC_LOG(LS_WARNING) << "Default audio input device has changed"; + int32_t captureDeviceIsAlive = _captureDeviceIsAlive; + bool wasRecording = _recording && captureDeviceIsAlive == 1; + + if (wasRecording && _inputDeviceIsSpecified && _inputDeviceIndex == 0) { + + StopRecording(); + + // default is already selected _inputDeviceIndex(0) + // re-init and start recording + InitRecording(); + StartRecording(); + } + + // Notify default input device updated + if (audio_device_module_sink_) { + audio_device_module_sink_->OnDevicesUpdated(); } } @@ -1938,9 +1995,29 @@ int32_t AudioDeviceMac::HandleDeviceChange() { &size, &deviceIsAlive); if (err == kAudioHardwareBadDeviceError || deviceIsAlive == 0) { - RTC_LOG(LS_WARNING) << "Capture device is not alive (probably removed)"; - _captureDeviceIsAlive = 0; - _mixerManager.CloseMicrophone(); + RTC_LOG(LS_WARNING) << "Audio input device is not alive (probably removed) deviceID: " << _inputDeviceID; + //AtomicSet32(&_captureDeviceIsAlive, 0); + + // Logic to switch to default device (if exists) + // when the current device is not alive anymore + int32_t captureDeviceIsAlive = _captureDeviceIsAlive; + bool wasRecording = _recording && captureDeviceIsAlive == 1; + + StopRecording(); + + // was playing & default device exists + if (wasRecording && SetRecordingDevice(0) == 0) { + InitRecording(); + StartRecording(); + } else { + _mixerManager.CloseMicrophone(); + } + + // Notify input device removed + if (audio_device_module_sink_) { + audio_device_module_sink_->OnDevicesUpdated(); + } + } else if (err != noErr) { logCAMsg(rtc::LS_ERROR, "Error in AudioDeviceGetProperty()", (const char*)&err); @@ -1957,9 +2034,29 @@ int32_t AudioDeviceMac::HandleDeviceChange() { &size, &deviceIsAlive); if (err == kAudioHardwareBadDeviceError || deviceIsAlive == 0) { - RTC_LOG(LS_WARNING) << "Render device is not alive (probably removed)"; - _renderDeviceIsAlive = 0; - _mixerManager.CloseSpeaker(); + RTC_LOG(LS_WARNING) << "Audio output device is not alive (probably removed) deviceID: " << _outputDeviceID; + // AtomicSet32(&_renderDeviceIsAlive, 0); // StopPlayout() does this + + // Logic to switch to default device (if exists) + // when the current device is not alive anymore + int32_t renderDeviceIsAlive = _renderDeviceIsAlive; + bool wasPlaying = _playing && renderDeviceIsAlive == 1; + + StopPlayout(); + + // was playing & default device exists + if (wasPlaying && SetPlayoutDevice(0) == 0) { + InitPlayout(); + StartPlayout(); + } else { + _mixerManager.CloseSpeaker(); + } + + // Notify output device removed + if (audio_device_module_sink_) { + audio_device_module_sink_->OnDevicesUpdated(); + } + } else if (err != noErr) { logCAMsg(rtc::LS_ERROR, "Error in AudioDeviceGetProperty()", (const char*)&err); @@ -2061,28 +2158,10 @@ int32_t AudioDeviceMac::HandleStreamFormatChange( int32_t AudioDeviceMac::HandleDataSourceChange( const AudioObjectID objectId, const AudioObjectPropertyAddress propertyAddress) { - OSStatus err = noErr; - - if (_macBookPro && - propertyAddress.mScope == kAudioDevicePropertyScopeOutput) { - RTC_LOG(LS_VERBOSE) << "Data source changed"; - - _macBookProPanRight = false; - UInt32 dataSource = 0; - UInt32 size = sizeof(UInt32); - WEBRTC_CA_RETURN_ON_ERR(AudioObjectGetPropertyData( - objectId, &propertyAddress, 0, NULL, &size, &dataSource)); - if (dataSource == 'ispk') { - _macBookProPanRight = true; - RTC_LOG(LS_VERBOSE) - << "MacBook Pro using internal speakers; stereo panning right"; - } else { - RTC_LOG(LS_VERBOSE) << "MacBook Pro not using internal speakers"; - } - } return 0; } + int32_t AudioDeviceMac::HandleProcessorOverload( const AudioObjectPropertyAddress propertyAddress) { // TODO(xians): we probably want to notify the user in some way of the @@ -2400,24 +2479,6 @@ bool AudioDeviceMac::RenderWorkerThread() { uint32_t nOutSamples = nSamples * _outDesiredFormat.mChannelsPerFrame; SInt16* pPlayBuffer = (SInt16*)&playBuffer; - if (_macBookProPanRight && (_playChannels == 2)) { - // Mix entirely into the right channel and zero the left channel. - SInt32 sampleInt32 = 0; - for (uint32_t sampleIdx = 0; sampleIdx < nOutSamples; sampleIdx += 2) { - sampleInt32 = pPlayBuffer[sampleIdx]; - sampleInt32 += pPlayBuffer[sampleIdx + 1]; - sampleInt32 /= 2; - - if (sampleInt32 > 32767) { - sampleInt32 = 32767; - } else if (sampleInt32 < -32768) { - sampleInt32 = -32768; - } - - pPlayBuffer[sampleIdx] = 0; - pPlayBuffer[sampleIdx + 1] = static_cast(sampleInt32); - } - } PaUtil_WriteRingBuffer(_paRenderBuffer, pPlayBuffer, nOutSamples); diff --git a/modules/audio_device/mac/audio_device_mac.h b/modules/audio_device/mac/audio_device_mac.h index bb06395d03..6cb5482a84 100644 --- a/modules/audio_device/mac/audio_device_mac.h +++ b/modules/audio_device/mac/audio_device_mac.h @@ -154,6 +154,13 @@ class AudioDeviceMac : public AudioDeviceGeneric { virtual void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer) RTC_LOCKS_EXCLUDED(mutex_); + virtual int32_t SetAudioDeviceSink(AudioDeviceSink* sink) RTC_LOCKS_EXCLUDED(mutex_) { + audio_device_module_sink_ = sink; + return 0; + } + virtual int32_t GetPlayoutDevice() const; + virtual int32_t GetRecordingDevice() const; + private: int32_t InitSpeakerLocked() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); int32_t InitMicrophoneLocked() RTC_EXCLUSIVE_LOCKS_REQUIRED(mutex_); @@ -180,7 +187,8 @@ class AudioDeviceMac : public AudioDeviceGeneric { int32_t GetDeviceName(AudioObjectPropertyScope scope, uint16_t index, - rtc::ArrayView name); + rtc::ArrayView name, + rtc::ArrayView guid); int32_t InitDevice(uint16_t userDeviceIndex, AudioDeviceID& deviceId, @@ -201,6 +209,8 @@ class AudioDeviceMac : public AudioDeviceGeneric { const AudioObjectPropertyAddress addresses[]); int32_t HandleDeviceChange(); + int32_t HandleDefaultOutputDeviceChange(); + int32_t HandleDefaultInputDeviceChange(); int32_t HandleStreamFormatChange(AudioObjectID objectId, AudioObjectPropertyAddress propertyAddress); @@ -343,6 +353,8 @@ class AudioDeviceMac : public AudioDeviceGeneric { // Typing detection // 0x5c is key "9", after that comes function keys. bool prev_key_state_[0x5d]; + + AudioDeviceSink *audio_device_module_sink_ = nullptr; }; } // namespace webrtc diff --git a/modules/audio_device/win/audio_device_core_win.cc b/modules/audio_device/win/audio_device_core_win.cc index aa8b6a9ebe..c1c2b32a9b 100644 --- a/modules/audio_device/win/audio_device_core_win.cc +++ b/modules/audio_device/win/audio_device_core_win.cc @@ -461,6 +461,8 @@ AudioDeviceWindowsCore::AudioDeviceWindowsCore() _playChannelsPrioList[0] = 2; // stereo is prio 1 _playChannelsPrioList[1] = 1; // mono is prio 2 + _deviceStateListener = new DeviceStateListener(); + HRESULT hr; // We know that this API will work since it has already been verified in @@ -474,6 +476,8 @@ AudioDeviceWindowsCore::AudioDeviceWindowsCore() reinterpret_cast(&_ptrEnumerator)); RTC_DCHECK(_ptrEnumerator); + _ptrEnumerator->RegisterEndpointNotificationCallback(_deviceStateListener); + // DMO initialization for built-in WASAPI AEC. { IMediaObject* ptrDMO = NULL; @@ -499,6 +503,8 @@ AudioDeviceWindowsCore::~AudioDeviceWindowsCore() { Terminate(); + _ptrEnumerator->UnregisterEndpointNotificationCallback(_deviceStateListener); + // The IMMDeviceEnumerator is created during construction. Must release // it here and not in Terminate() since we don't recreate it in Init(). SAFE_RELEASE(_ptrEnumerator); @@ -535,6 +541,11 @@ AudioDeviceWindowsCore::~AudioDeviceWindowsCore() { _hShutdownCaptureEvent = NULL; } + if(NULL != _deviceStateListener) { + delete _deviceStateListener; + _deviceStateListener = NULL; + } + if (_avrtLibrary) { BOOL freeOK = FreeLibrary(_avrtLibrary); if (!freeOK) { @@ -3894,6 +3905,65 @@ int32_t AudioDeviceWindowsCore::_GetDeviceID(IMMDevice* pDevice, return 0; } +int32_t AudioDeviceWindowsCore::SetAudioDeviceSink(AudioDeviceSink* sink) { + _deviceStateListener->SetAudioDeviceSink(sink); + return 0; +} + +void AudioDeviceWindowsCore::DeviceStateListener::SetAudioDeviceSink(AudioDeviceSink *sink) { + callback_ = sink; +} + +HRESULT AudioDeviceWindowsCore::DeviceStateListener::OnDeviceStateChanged(LPCWSTR pwstrDeviceId, DWORD dwNewState) { + RTC_DLOG(LS_INFO) << "AudioDeviceWindowsCore::OnDeviceStateChanged => " << pwstrDeviceId << ", NewState => " << dwNewState; + if(callback_) callback_->OnDevicesUpdated(); + return S_OK; +} + +HRESULT AudioDeviceWindowsCore::DeviceStateListener::OnDeviceAdded(LPCWSTR pwstrDeviceId) { + RTC_DLOG(LS_INFO) << "AudioDeviceWindowsCore::OnDeviceAdded => " << pwstrDeviceId; + return S_OK; +} + +HRESULT AudioDeviceWindowsCore::DeviceStateListener::OnDeviceRemoved(LPCWSTR pwstrDeviceId) { + RTC_DLOG(LS_INFO) << "AudioDeviceWindowsCore::OnDeviceRemoved => " << pwstrDeviceId; + return S_OK; +} + +HRESULT AudioDeviceWindowsCore::DeviceStateListener::OnDefaultDeviceChanged(EDataFlow flow, ERole role, LPCWSTR pwstrDefaultDeviceId) { + RTC_DLOG(LS_INFO) << "AudioDeviceWindowsCore::OnDefaultDeviceChanged => " << pwstrDefaultDeviceId; + return S_OK; +} + +HRESULT AudioDeviceWindowsCore::DeviceStateListener::OnPropertyValueChanged(LPCWSTR pwstrDeviceId, const PROPERTYKEY key) { + //RTC_DLOG(LS_INFO) << "AudioDeviceWindowsCore::OnPropertyValueChanged => " << pwstrDeviceId; + return S_OK; +} + +ULONG AudioDeviceWindowsCore::DeviceStateListener::AddRef() { + ULONG new_ref = InterlockedIncrement(&ref_count_); + // RTC_DLOG(LS_INFO) << "__AddRef => " << new_ref; + return new_ref; +} + +ULONG AudioDeviceWindowsCore::DeviceStateListener::Release() { + ULONG new_ref = InterlockedDecrement(&ref_count_); + // RTC_DLOG(LS_INFO) << "__Release => " << new_ref; + return new_ref; +} + +HRESULT AudioDeviceWindowsCore::DeviceStateListener::QueryInterface(REFIID iid, void** object) { + if (object == nullptr) { + return E_POINTER; + } + if (iid == IID_IUnknown || iid == __uuidof(IMMNotificationClient)) { + *object = static_cast(this); + return S_OK; + } + *object = nullptr; + return E_NOINTERFACE; +} + // ---------------------------------------------------------------------------- // _GetDefaultDevice // ---------------------------------------------------------------------------- diff --git a/modules/audio_device/win/audio_device_core_win.h b/modules/audio_device/win/audio_device_core_win.h index 380effb449..10b6a92b7f 100644 --- a/modules/audio_device/win/audio_device_core_win.h +++ b/modules/audio_device/win/audio_device_core_win.h @@ -22,6 +22,8 @@ #include #include // IMediaObject #include // MMDevice +#include +#include #include "api/scoped_refptr.h" #include "modules/audio_device/audio_device_generic.h" @@ -50,6 +52,34 @@ class AudioDeviceWindowsCore : public AudioDeviceGeneric { AudioDeviceWindowsCore(); ~AudioDeviceWindowsCore(); + class DeviceStateListener : public IMMNotificationClient { + public: + virtual ~DeviceStateListener() = default; + HRESULT __stdcall OnDeviceStateChanged(LPCWSTR pwstrDeviceId, + DWORD dwNewState) override; + HRESULT __stdcall OnDeviceAdded(LPCWSTR pwstrDeviceId) override; + + HRESULT __stdcall OnDeviceRemoved(LPCWSTR pwstrDeviceId) override; + + HRESULT + __stdcall OnDefaultDeviceChanged(EDataFlow flow, + ERole role, + LPCWSTR pwstrDefaultDeviceId) override; + + HRESULT __stdcall OnPropertyValueChanged(LPCWSTR pwstrDeviceId, + const PROPERTYKEY key) override; + // IUnknown (required by IMMNotificationClient). + ULONG __stdcall AddRef() override; + ULONG __stdcall Release() override; + HRESULT __stdcall QueryInterface(REFIID iid, void** object) override; + + void SetAudioDeviceSink(AudioDeviceSink *sink); + + private: + LONG ref_count_ = 1; + AudioDeviceSink *callback_ = nullptr; + }; + static bool CoreAudioIsSupported(); // Retrieve the currently utilized audio layer @@ -150,6 +180,8 @@ class AudioDeviceWindowsCore : public AudioDeviceGeneric { virtual int32_t EnableBuiltInAEC(bool enable); + virtual int32_t SetAudioDeviceSink(AudioDeviceSink* sink); + public: virtual void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer); @@ -237,6 +269,7 @@ class AudioDeviceWindowsCore : public AudioDeviceGeneric { IAudioEndpointVolume* _ptrCaptureVolume; ISimpleAudioVolume* _ptrRenderSimpleVolume; + DeviceStateListener *_deviceStateListener = nullptr; // DirectX Media Object (DMO) for the built-in AEC. rtc::scoped_refptr _dmo; rtc::scoped_refptr _mediaBuffer; diff --git a/modules/desktop_capture/mac/screen_capturer_mac.mm b/modules/desktop_capture/mac/screen_capturer_mac.mm index 60089fd0f2..aa05ceb9aa 100644 --- a/modules/desktop_capture/mac/screen_capturer_mac.mm +++ b/modules/desktop_capture/mac/screen_capturer_mac.mm @@ -216,16 +216,7 @@ DesktopRect GetExcludedWindowPixelBounds(CGWindowID window, float dip_to_pixel_s ScreenConfigurationChanged(); } - // When screen is zoomed in/out, OSX only updates the part of Rects currently - // displayed on screen, with relative location to current top-left on screen. - // This will cause problems when we copy the dirty regions to the captured - // image. So we invalidate the whole screen to copy all the screen contents. - // With CGI method, the zooming will be ignored and the whole screen contents - // will be captured as before. - // With IOSurface method, the zoomed screen contents will be captured. - if (UAZoomEnabled()) { - helper_.InvalidateScreen(screen_pixel_bounds_.size()); - } + helper_.InvalidateScreen(screen_pixel_bounds_.size()); DesktopRegion region; helper_.TakeInvalidRegion(®ion); diff --git a/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.cc b/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.cc index 2bb71941f9..106adc0d39 100644 --- a/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.cc +++ b/modules/rtp_rtcp/source/rtp_sender_video_frame_transformer_delegate.cc @@ -92,6 +92,8 @@ class TransformableVideoSenderFrame : public TransformableVideoFrameInterface { csrcs_ = metadata.GetCsrcs(); } + const RTPVideoHeader& header() const override { return header_; } + const RTPVideoHeader& GetHeader() const { return header_; } uint8_t GetPayloadType() const override { return payload_type_; } absl::optional GetCodecType() const { return codec_type_; } diff --git a/modules/rtp_rtcp/source/rtp_video_stream_receiver_frame_transformer_delegate.cc b/modules/rtp_rtcp/source/rtp_video_stream_receiver_frame_transformer_delegate.cc index fbd10c4c7b..4f24f4842f 100644 --- a/modules/rtp_rtcp/source/rtp_video_stream_receiver_frame_transformer_delegate.cc +++ b/modules/rtp_rtcp/source/rtp_video_stream_receiver_frame_transformer_delegate.cc @@ -72,6 +72,10 @@ class TransformableVideoReceiverFrame frame_->SetHeaderFromMetadata(new_metadata); } + const RTPVideoHeader& header () const override { + return frame_->GetRtpVideoHeader(); + } + std::unique_ptr ExtractFrame() && { return std::move(frame_); } diff --git a/pc/audio_rtp_receiver.cc b/pc/audio_rtp_receiver.cc index 6e7ca6d0b5..ae879ddb1b 100644 --- a/pc/audio_rtp_receiver.cc +++ b/pc/audio_rtp_receiver.cc @@ -298,7 +298,7 @@ void AudioRtpReceiver::Reconfigure(bool track_enabled) { media_channel_->SetFrameDecryptor(*signaled_ssrc_, frame_decryptor_); } - if (frame_transformer_) { + if (frame_transformer_ && track_enabled) { media_channel_->SetDepacketizerToDecoderFrameTransformer( signaled_ssrc_.value_or(0), frame_transformer_); } diff --git a/pc/media_stream_track_proxy.h b/pc/media_stream_track_proxy.h index 2af3aedb22..fab23d17ec 100644 --- a/pc/media_stream_track_proxy.h +++ b/pc/media_stream_track_proxy.h @@ -55,6 +55,8 @@ PROXY_SECONDARY_METHOD2(void, PROXY_SECONDARY_METHOD1(void, RemoveSink, rtc::VideoSinkInterface*) PROXY_SECONDARY_METHOD0(void, RequestRefreshFrame) BYPASS_PROXY_CONSTMETHOD0(VideoTrackSourceInterface*, GetSource) +PROXY_CONSTMETHOD0(bool, should_receive) +PROXY_METHOD1(void, set_should_receive, bool) PROXY_METHOD1(void, RegisterObserver, ObserverInterface*) PROXY_METHOD1(void, UnregisterObserver, ObserverInterface*) diff --git a/pc/peer_connection.cc b/pc/peer_connection.cc index 444a0bf618..950b71254e 100644 --- a/pc/peer_connection.cc +++ b/pc/peer_connection.cc @@ -445,6 +445,7 @@ bool PeerConnectionInterface::RTCConfiguration::operator==( std::vector vpn_list; PortAllocatorConfig port_allocator_config; absl::optional pacer_burst_interval; + bool enable_any_address_ports; }; static_assert(sizeof(stuff_being_tested_for_equality) == sizeof(*this), "Did you add something to RTCConfiguration and forget to " @@ -506,7 +507,8 @@ bool PeerConnectionInterface::RTCConfiguration::operator==( port_allocator_config.min_port == o.port_allocator_config.min_port && port_allocator_config.max_port == o.port_allocator_config.max_port && port_allocator_config.flags == o.port_allocator_config.flags && - pacer_burst_interval == o.pacer_burst_interval; + pacer_burst_interval == o.pacer_burst_interval && + enable_any_address_ports == o.enable_any_address_ports; } bool PeerConnectionInterface::RTCConfiguration::operator!=( @@ -2148,6 +2150,11 @@ PeerConnection::InitializePortAllocator_n( RTC_LOG(LS_INFO) << "Disable candidates on link-local network interfaces."; } + if (configuration.enable_any_address_ports) { + port_allocator_flags |= cricket::PORTALLOCATOR_ENABLE_ANY_ADDRESS_PORTS; + RTC_LOG(LS_INFO) << "Enable gathering on any address ports."; + } + port_allocator_->set_flags(port_allocator_flags); // No step delay is used while allocating ports. port_allocator_->set_step_delay(cricket::kMinimumStepDelay); diff --git a/pc/video_rtp_receiver.cc b/pc/video_rtp_receiver.cc index 4432982027..1ed13b9088 100644 --- a/pc/video_rtp_receiver.cc +++ b/pc/video_rtp_receiver.cc @@ -41,15 +41,20 @@ VideoRtpReceiver::VideoRtpReceiver( rtc::Thread::Current(), worker_thread, VideoTrack::Create(receiver_id, source_, worker_thread))), - attachment_id_(GenerateUniqueId()) { + cached_track_should_receive_(track_->should_receive()), + attachment_id_(GenerateUniqueId()), + worker_thread_safety_(PendingTaskSafetyFlag::CreateDetachedInactive()) { RTC_DCHECK(worker_thread_); SetStreams(streams); + track_->RegisterObserver(this); RTC_DCHECK_EQ(source_->state(), MediaSourceInterface::kInitializing); } VideoRtpReceiver::~VideoRtpReceiver() { RTC_DCHECK_RUN_ON(&signaling_thread_checker_); RTC_DCHECK(!media_channel_); + + track_->UnregisterObserver(this); } std::vector VideoRtpReceiver::stream_ids() const { @@ -114,6 +119,39 @@ void VideoRtpReceiver::Stop() { track_->internal()->set_ended(); } +void VideoRtpReceiver::OnChanged() { + RTC_DCHECK_RUN_ON(&signaling_thread_checker_); + if (cached_track_should_receive_ != track_->should_receive()) { + cached_track_should_receive_ = track_->should_receive(); + worker_thread_->PostTask( + [this, receive = cached_track_should_receive_]() { + RTC_DCHECK_RUN_ON(worker_thread_); + if(receive) { + StartMediaChannel(); + } else { + StopMediaChannel(); + } + }); + } +} + +void VideoRtpReceiver::StartMediaChannel() { + RTC_DCHECK_RUN_ON(worker_thread_); + if (!media_channel_) { + return; + } + media_channel_->StartReceive(signaled_ssrc_.value_or(0)); + OnGenerateKeyFrame(); +} + +void VideoRtpReceiver::StopMediaChannel() { + RTC_DCHECK_RUN_ON(worker_thread_); + if (!media_channel_) { + return; + } + media_channel_->StopReceive(signaled_ssrc_.value_or(0)); +} + void VideoRtpReceiver::RestartMediaChannel(absl::optional ssrc) { RTC_DCHECK_RUN_ON(&signaling_thread_checker_); MediaSourceInterface::SourceState state = source_->state(); @@ -209,6 +247,7 @@ void VideoRtpReceiver::set_transport( void VideoRtpReceiver::SetStreams( const std::vector>& streams) { RTC_DCHECK_RUN_ON(&signaling_thread_checker_); + // Remove remote track from any streams that are going away. for (const auto& existing_stream : streams_) { bool removed = true; diff --git a/pc/video_rtp_receiver.h b/pc/video_rtp_receiver.h index ef88016052..491efe2931 100644 --- a/pc/video_rtp_receiver.h +++ b/pc/video_rtp_receiver.h @@ -42,7 +42,8 @@ namespace webrtc { -class VideoRtpReceiver : public RtpReceiverInternal { +class VideoRtpReceiver : public RtpReceiverInternal, + public ObserverInterface { public: // An SSRC of 0 will create a receiver that will match the first SSRC it // sees. Must be called on signaling thread. @@ -60,6 +61,9 @@ class VideoRtpReceiver : public RtpReceiverInternal { rtc::scoped_refptr video_track() const { return track_; } + // ObserverInterface implementation + void OnChanged() override; + // RtpReceiverInterface implementation rtc::scoped_refptr track() const override { return track_; @@ -115,6 +119,8 @@ class VideoRtpReceiver : public RtpReceiverInternal { cricket::MediaReceiveChannelInterface* media_channel); private: + void StartMediaChannel(); + void StopMediaChannel(); void RestartMediaChannel(absl::optional ssrc) RTC_RUN_ON(&signaling_thread_checker_); void RestartMediaChannel_w(absl::optional ssrc, @@ -162,6 +168,8 @@ class VideoRtpReceiver : public RtpReceiverInternal { RTC_GUARDED_BY(&signaling_thread_checker_) = nullptr; bool received_first_packet_ RTC_GUARDED_BY(&signaling_thread_checker_) = false; + + bool cached_track_should_receive_ RTC_GUARDED_BY(&signaling_thread_checker_); const int attachment_id_; rtc::scoped_refptr frame_decryptor_ RTC_GUARDED_BY(worker_thread_); @@ -177,6 +185,7 @@ class VideoRtpReceiver : public RtpReceiverInternal { // or switched. bool saved_generate_keyframe_ RTC_GUARDED_BY(worker_thread_) = false; bool saved_encoded_sink_enabled_ RTC_GUARDED_BY(worker_thread_) = false; + const rtc::scoped_refptr worker_thread_safety_; }; } // namespace webrtc diff --git a/pc/video_track.cc b/pc/video_track.cc index 0bf8687af3..8922cdaf1f 100644 --- a/pc/video_track.cc +++ b/pc/video_track.cc @@ -76,6 +76,19 @@ VideoTrackSourceInterface* VideoTrack::GetSourceInternal() const { return video_source_->internal(); } +void VideoTrack::set_should_receive(bool receive) { + RTC_DCHECK_RUN_ON(&signaling_thread_); + if (should_receive_ == receive) + return; + should_receive_ = receive; + Notifier::FireOnChanged(); +} + +bool VideoTrack::should_receive() const { + RTC_DCHECK_RUN_ON(&signaling_thread_); + return should_receive_; +} + VideoTrackInterface::ContentHint VideoTrack::content_hint() const { RTC_DCHECK_RUN_ON(&signaling_thread_); return content_hint_; diff --git a/pc/video_track.h b/pc/video_track.h index e504182c82..521692787b 100644 --- a/pc/video_track.h +++ b/pc/video_track.h @@ -48,6 +48,9 @@ class VideoTrack : public MediaStreamTrack, void RequestRefreshFrame() override; VideoTrackSourceInterface* GetSource() const override; + void set_should_receive(bool should_receive) override; + bool should_receive() const override; + ContentHint content_hint() const override; void set_content_hint(ContentHint hint) override; bool set_enabled(bool enable) override; @@ -81,6 +84,7 @@ class VideoTrack : public MediaStreamTrack, // be queried without blocking on the worker thread by callers that don't // use an api proxy to call the `enabled()` method. bool enabled_w_ RTC_GUARDED_BY(worker_thread_) = true; + bool should_receive_ RTC_GUARDED_BY(signaling_thread_) = true; }; } // namespace webrtc diff --git a/sdk/BUILD.gn b/sdk/BUILD.gn index d31673eb7a..a6c20de1f1 100644 --- a/sdk/BUILD.gn +++ b/sdk/BUILD.gn @@ -119,6 +119,7 @@ if (is_ios || is_mac) { "objc/base/RTCVideoFrame.mm", "objc/base/RTCVideoFrameBuffer.h", "objc/base/RTCVideoRenderer.h", + "objc/base/RTCAudioRenderer.h", "objc/base/RTCYUVPlanarBuffer.h", ] @@ -143,12 +144,15 @@ if (is_ios || is_mac) { "objc/helpers/RTCDispatcher+Private.h", "objc/helpers/RTCDispatcher.h", "objc/helpers/RTCDispatcher.m", + "objc/helpers/RTCYUVHelper.h", + "objc/helpers/RTCYUVHelper.mm", "objc/helpers/scoped_cftyperef.h", ] deps = [ ":base_objc", "../rtc_base:checks", + "//third_party/libyuv", ] absl_deps = [ "//third_party/abseil-cpp/absl/strings" ] @@ -407,7 +411,12 @@ if (is_ios || is_mac) { "objc/components/network/RTCNetworkMonitor.mm", ] - configs += [ ":used_from_extension" ] + configs += [ + "..:common_objc", + ":used_from_extension", + ] + + public_configs = [ ":common_config_objc" ] frameworks = [ "Network.framework" ] @@ -616,17 +625,13 @@ if (is_ios || is_mac) { "Metal.framework", "MetalKit.framework", ] - if (is_ios) { + if (is_ios || is_mac) { sources += [ "objc/components/renderer/metal/RTCMTLVideoView.h", "objc/components/renderer/metal/RTCMTLVideoView.m", ] } if (is_mac) { - sources += [ - "objc/components/renderer/metal/RTCMTLNSVideoView.h", - "objc/components/renderer/metal/RTCMTLNSVideoView.m", - ] frameworks += [ "AppKit.framework" ] } deps = [ @@ -682,6 +687,43 @@ if (is_ios || is_mac) { "../rtc_base/system:gcd_helpers", ] } + + rtc_library("desktopcapture_objc") { + visibility = [ "*" ] + sources = [ + "objc/components/capturer/RTCDesktopCapturer+Private.h", + "objc/components/capturer/RTCDesktopCapturer.h", + "objc/components/capturer/RTCDesktopCapturer.mm", + "objc/components/capturer/RTCDesktopSource+Private.h", + "objc/components/capturer/RTCDesktopSource.h", + "objc/components/capturer/RTCDesktopSource.mm", + "objc/components/capturer/RTCDesktopMediaList+Private.h", + "objc/components/capturer/RTCDesktopMediaList.h", + "objc/components/capturer/RTCDesktopMediaList.mm", + "objc/native/src/objc_desktop_capture.h", + "objc/native/src/objc_desktop_capture.mm", + "objc/native/src/objc_desktop_media_list.h", + "objc/native/src/objc_desktop_media_list.mm", + ] + frameworks = [ + "AppKit.framework", + ] + + configs += [ "..:common_objc" ] + + public_configs = [ ":common_config_objc" ] + + deps = [ + ":base_objc", + ":helpers_objc", + ":videoframebuffer_objc", + "../rtc_base/system:gcd_helpers", + "../modules/desktop_capture", + ] + if(is_mac) { + deps += [ "//third_party:jpeg", ] + } + } rtc_library("videocodec_objc") { visibility = [ "*" ] @@ -722,6 +764,7 @@ if (is_ios || is_mac) { ] deps = [ + ":simulcast", ":base_objc", ":native_video", ":videocodec_objc", @@ -821,6 +864,22 @@ if (is_ios || is_mac) { ] } + rtc_library("simulcast") { + sources = [ + "objc/components/video_codec/RTCVideoEncoderFactorySimulcast.h", + "objc/components/video_codec/RTCVideoEncoderFactorySimulcast.mm", + "objc/api/video_codec/RTCVideoEncoderSimulcast.h", + "objc/api/video_codec/RTCVideoEncoderSimulcast.mm", + ] + + deps = [ + ":base_objc", + ":wrapped_native_codec_objc", + "../media:rtc_media_base", + "../media:rtc_simulcast_encoder_adapter", + ] + } + rtc_library("mediaconstraints_objc") { configs += [ "..:no_global_constructors" ] sources = [ @@ -922,6 +981,11 @@ if (is_ios || is_mac) { ] configs += [ "..:no_global_constructors" ] sources = [ + "objc/api/peerconnection/RTCAudioDeviceModule.h", + "objc/api/peerconnection/RTCAudioDeviceModule+Private.h", + "objc/api/peerconnection/RTCAudioDeviceModule.mm", + "objc/api/peerconnection/RTCIODevice.h", + "objc/api/peerconnection/RTCIODevice.mm", "objc/api/peerconnection/RTCAudioSource+Private.h", "objc/api/peerconnection/RTCAudioSource.h", "objc/api/peerconnection/RTCAudioSource.mm", @@ -947,6 +1011,12 @@ if (is_ios || is_mac) { "objc/api/peerconnection/RTCDtmfSender.mm", "objc/api/peerconnection/RTCFieldTrials.h", "objc/api/peerconnection/RTCFieldTrials.mm", + "objc/api/peerconnection/RTCFrameCryptor+Private.h", + "objc/api/peerconnection/RTCFrameCryptor.h", + "objc/api/peerconnection/RTCFrameCryptor.mm", + "objc/api/peerconnection/RTCFrameCryptorKeyProvider+Private.h", + "objc/api/peerconnection/RTCFrameCryptorKeyProvider.h", + "objc/api/peerconnection/RTCFrameCryptorKeyProvider.mm", "objc/api/peerconnection/RTCIceCandidate+Private.h", "objc/api/peerconnection/RTCIceCandidate.h", "objc/api/peerconnection/RTCIceCandidate.mm", @@ -1034,6 +1104,20 @@ if (is_ios || is_mac) { "objc/api/peerconnection/RTCVideoTrack+Private.h", "objc/api/peerconnection/RTCVideoTrack.h", "objc/api/peerconnection/RTCVideoTrack.mm", + "objc/components/audio/RTCAudioBuffer.h", + "objc/components/audio/RTCAudioBuffer.mm", + "objc/components/audio/RTCAudioBuffer+Private.h", + "objc/components/audio/RTCAudioCustomProcessingAdapter.h", + "objc/components/audio/RTCAudioCustomProcessingAdapter.mm", + "objc/components/audio/RTCAudioCustomProcessingAdapter+Private.h", + "objc/components/audio/RTCAudioCustomProcessingDelegate.h", + "objc/components/audio/RTCAudioProcessingConfig.h", + "objc/components/audio/RTCAudioProcessingConfig.mm", + "objc/components/audio/RTCAudioProcessingConfig+Private.h", + "objc/components/audio/RTCAudioProcessingModule.h", + "objc/components/audio/RTCDefaultAudioProcessingModule.h", + "objc/components/audio/RTCDefaultAudioProcessingModule.mm", + "objc/components/audio/RTCDefaultAudioProcessingModule+Private.h", ] configs += [ @@ -1057,6 +1141,7 @@ if (is_ios || is_mac) { ":videorendereradapter_objc", ":videosource_objc", ":videotoolbox_objc", + "../api/crypto:frame_crypto_transformer", "../api:dtmf_sender_interface", "../api:enable_media", "../api:libjingle_peerconnection_api", @@ -1250,6 +1335,13 @@ if (is_ios || is_mac) { } } + bundle_data("darwin_privacy_info") { + sources = [ + "objc/PrivacyInfo.xcprivacy", + ] + outputs = [ "{{bundle_resources_dir}}/{{source_file_part}}" ] + } + if (is_ios) { apple_framework_bundle_with_umbrella_header("framework_objc") { info_plist = "objc/Info.plist" @@ -1275,6 +1367,7 @@ if (is_ios || is_mac) { "objc/base/RTCVideoFrame.h", "objc/base/RTCVideoFrameBuffer.h", "objc/base/RTCVideoRenderer.h", + "objc/base/RTCAudioRenderer.h", "objc/base/RTCYUVPlanarBuffer.h", "objc/components/audio/RTCAudioDevice.h", "objc/components/audio/RTCAudioSession.h", @@ -1296,12 +1389,17 @@ if (is_ios || is_mac) { "objc/components/video_frame_buffer/RTCCVPixelBuffer.h", "objc/helpers/RTCCameraPreviewView.h", "objc/helpers/RTCDispatcher.h", + "objc/helpers/RTCYUVHelper.h", "objc/helpers/UIDevice+RTCDevice.h", + "objc/api/peerconnection/RTCAudioDeviceModule.h", + "objc/api/peerconnection/RTCIODevice.h", "objc/api/peerconnection/RTCAudioSource.h", "objc/api/peerconnection/RTCAudioTrack.h", "objc/api/peerconnection/RTCConfiguration.h", "objc/api/peerconnection/RTCDataChannel.h", "objc/api/peerconnection/RTCDataChannelConfiguration.h", + "objc/api/peerconnection/RTCFrameCryptor.h", + "objc/api/peerconnection/RTCFrameCryptorKeyProvider.h", "objc/api/peerconnection/RTCFieldTrials.h", "objc/api/peerconnection/RTCIceCandidate.h", "objc/api/peerconnection/RTCIceCandidateErrorEvent.h", @@ -1345,6 +1443,9 @@ if (is_ios || is_mac) { "objc/api/video_codec/RTCVideoEncoderAV1.h", "objc/api/video_frame_buffer/RTCNativeI420Buffer.h", "objc/api/video_frame_buffer/RTCNativeMutableI420Buffer.h", + # Added for Simulcast support + "objc/components/video_codec/RTCVideoEncoderFactorySimulcast.h", + "objc/api/video_codec/RTCVideoEncoderSimulcast.h", ] if (!build_with_chromium) { @@ -1374,6 +1475,7 @@ if (is_ios || is_mac) { ":videocapture_objc", ":videocodec_objc", ":videotoolbox_objc", + ":darwin_privacy_info", ] if (!build_with_chromium) { deps += [ @@ -1411,6 +1513,8 @@ if (is_ios || is_mac) { output_name = "WebRTC" sources = [ + "objc/api/peerconnection/RTCAudioDeviceModule.h", + "objc/api/peerconnection/RTCIODevice.h", "objc/api/peerconnection/RTCAudioSource.h", "objc/api/peerconnection/RTCAudioTrack.h", "objc/api/peerconnection/RTCCertificate.h", @@ -1419,6 +1523,8 @@ if (is_ios || is_mac) { "objc/api/peerconnection/RTCDataChannel.h", "objc/api/peerconnection/RTCDataChannelConfiguration.h", "objc/api/peerconnection/RTCDtmfSender.h", + "objc/api/peerconnection/RTCFrameCryptor.h", + "objc/api/peerconnection/RTCFrameCryptorKeyProvider.h", "objc/api/peerconnection/RTCFieldTrials.h", "objc/api/peerconnection/RTCIceCandidate.h", "objc/api/peerconnection/RTCIceCandidateErrorEvent.h", @@ -1477,9 +1583,14 @@ if (is_ios || is_mac) { "objc/base/RTCVideoFrame.h", "objc/base/RTCVideoFrameBuffer.h", "objc/base/RTCVideoRenderer.h", + "objc/base/RTCAudioRenderer.h", "objc/base/RTCYUVPlanarBuffer.h", "objc/components/capturer/RTCCameraVideoCapturer.h", "objc/components/capturer/RTCFileVideoCapturer.h", + "objc/components/capturer/RTCDesktopCapturer.h", + "objc/components/capturer/RTCDesktopSource.h", + "objc/components/capturer/RTCDesktopMediaList.h", + "objc/components/renderer/metal/RTCMTLVideoView.h", "objc/components/renderer/metal/RTCMTLNSVideoView.h", "objc/components/renderer/opengl/RTCVideoViewShading.h", "objc/components/video_codec/RTCCodecSpecificInfoH264.h", @@ -1492,6 +1603,10 @@ if (is_ios || is_mac) { "objc/components/video_codec/RTCVideoEncoderH264.h", "objc/components/video_frame_buffer/RTCCVPixelBuffer.h", "objc/helpers/RTCDispatcher.h", + "objc/helpers/RTCYUVHelper.h", + # Added for Simulcast support + "objc/components/video_codec/RTCVideoEncoderFactorySimulcast.h", + "objc/api/video_codec/RTCVideoEncoderSimulcast.h", ] if (!build_with_chromium) { sources += [ @@ -1507,8 +1622,10 @@ if (is_ios || is_mac) { ":native_video", ":peerconnectionfactory_base_objc", ":videocapture_objc", + ":desktopcapture_objc", ":videocodec_objc", ":videotoolbox_objc", + ":darwin_privacy_info", ] if (!build_with_chromium) { deps += [ diff --git a/sdk/android/BUILD.gn b/sdk/android/BUILD.gn index 8858d21b60..7c05290942 100644 --- a/sdk/android/BUILD.gn +++ b/sdk/android/BUILD.gn @@ -261,12 +261,18 @@ if (is_android) { "api/org/webrtc/AudioProcessingFactory.java", "api/org/webrtc/AudioSource.java", "api/org/webrtc/AudioTrack.java", + "api/org/webrtc/AudioTrackSink.java", "api/org/webrtc/CallSessionFileRotatingLogSink.java", "api/org/webrtc/CandidatePairChangeEvent.java", "api/org/webrtc/CryptoOptions.java", "api/org/webrtc/DataChannel.java", "api/org/webrtc/DtmfSender.java", + "api/org/webrtc/ExternalAudioProcessingFactory.java", "api/org/webrtc/FecControllerFactoryFactoryInterface.java", + "api/org/webrtc/FrameCryptor.java", + "api/org/webrtc/FrameCryptorAlgorithm.java", + "api/org/webrtc/FrameCryptorFactory.java", + "api/org/webrtc/FrameCryptorKeyProvider.java", "api/org/webrtc/FrameDecryptor.java", "api/org/webrtc/FrameEncryptor.java", "api/org/webrtc/IceCandidate.java", @@ -365,6 +371,7 @@ if (is_android) { sources = [ "api/org/webrtc/DefaultVideoDecoderFactory.java", "api/org/webrtc/DefaultVideoEncoderFactory.java", + "api/org/webrtc/WrappedVideoDecoderFactory.java", ] deps = [ @@ -528,6 +535,8 @@ if (is_android) { sources = [ "api/org/webrtc/SoftwareVideoDecoderFactory.java", "api/org/webrtc/SoftwareVideoEncoderFactory.java", + "api/org/webrtc/SimulcastVideoEncoder.java", + "api/org/webrtc/SimulcastVideoEncoderFactory.java", ] deps = [ @@ -717,6 +726,8 @@ if (current_os == "linux" || is_android) { "src/jni/pc/add_ice_candidate_observer.cc", "src/jni/pc/add_ice_candidate_observer.h", "src/jni/pc/android_network_monitor.h", + "src/jni/pc/audio_sink.cc", + "src/jni/pc/audio_sink.h", "src/jni/pc/audio_track.cc", "src/jni/pc/call_session_file_rotating_log_sink.cc", "src/jni/pc/crypto_options.cc", @@ -724,6 +735,15 @@ if (current_os == "linux" || is_android) { "src/jni/pc/data_channel.cc", "src/jni/pc/data_channel.h", "src/jni/pc/dtmf_sender.cc", + "src/jni/pc/external_audio_processing_factory.cc", + "src/jni/pc/external_audio_processing_factory.h", + "src/jni/pc/external_audio_processing_interface.h", + "src/jni/pc/external_audio_processor.cc", + "src/jni/pc/external_audio_processor.h", + "src/jni/pc/frame_cryptor.cc", + "src/jni/pc/frame_cryptor.h", + "src/jni/pc/frame_cryptor_key_provider.cc", + "src/jni/pc/frame_cryptor_key_provider.h", "src/jni/pc/ice_candidate.cc", "src/jni/pc/ice_candidate.h", "src/jni/pc/media_constraints.cc", @@ -782,6 +802,7 @@ if (current_os == "linux" || is_android) { "../../api:rtp_parameters", "../../api:rtp_sender_interface", "../../api:turn_customizer", + "../../api/crypto:frame_crypto_transformer", "../../api/crypto:options", "../../api/rtc_event_log:rtc_event_log_factory", "../../api/task_queue:default_task_queue_factory", @@ -899,6 +920,21 @@ if (current_os == "linux" || is_android) { ] } + rtc_library("simulcast_jni") { + visibility = [ "*" ] + allow_poison = [ "software_video_codecs" ] + sources = [ + "src/jni/simulcast_video_encoder.cc", + "src/jni/simulcast_video_encoder.h" + ] + deps = [ + ":base_jni", + ":video_jni", + ":native_api_codecs", + "../../media:rtc_simulcast_encoder_adapter" + ] + } + rtc_library("swcodecs_jni") { visibility = [ "*" ] allow_poison = [ "software_video_codecs" ] @@ -912,6 +948,7 @@ if (current_os == "linux" || is_android) { ":libvpx_vp8_jni", ":libvpx_vp9_jni", ":native_api_jni", + ":simulcast_jni", ":video_jni", "../../api/environment", "../../api/video_codecs:builtin_video_decoder_factory", @@ -1396,11 +1433,16 @@ if (current_os == "linux" || is_android) { sources = [ "api/org/webrtc/AddIceObserver.java", "api/org/webrtc/AudioTrack.java", + "api/org/webrtc/AudioTrackSink.java", "api/org/webrtc/CallSessionFileRotatingLogSink.java", "api/org/webrtc/CandidatePairChangeEvent.java", "api/org/webrtc/CryptoOptions.java", "api/org/webrtc/DataChannel.java", "api/org/webrtc/DtmfSender.java", + "api/org/webrtc/ExternalAudioProcessingFactory.java", + "api/org/webrtc/FrameCryptor.java", + "api/org/webrtc/FrameCryptorFactory.java", + "api/org/webrtc/FrameCryptorKeyProvider.java", "api/org/webrtc/IceCandidate.java", "api/org/webrtc/IceCandidateErrorEvent.java", "api/org/webrtc/MediaConstraints.java", diff --git a/sdk/android/api/org/webrtc/AudioTrack.java b/sdk/android/api/org/webrtc/AudioTrack.java index ca745db634..b30e46cebc 100644 --- a/sdk/android/api/org/webrtc/AudioTrack.java +++ b/sdk/android/api/org/webrtc/AudioTrack.java @@ -10,8 +10,12 @@ package org.webrtc; +import java.util.IdentityHashMap; + /** Java wrapper for a C++ AudioTrackInterface */ public class AudioTrack extends MediaStreamTrack { + private final IdentityHashMap sinks = new IdentityHashMap(); + public AudioTrack(long nativeTrack) { super(nativeTrack); } @@ -23,10 +27,54 @@ public void setVolume(double volume) { nativeSetVolume(getNativeAudioTrack(), volume); } + /** + * Adds an AudioTrackSink to the track. This callback is only + * called for remote audio tracks. + * + * Repeated addSink calls will not add the sink multiple times. + */ + public void addSink(AudioTrackSink sink) { + if (sink == null) { + throw new IllegalArgumentException("The AudioTrackSink is not allowed to be null"); + } + if (!sinks.containsKey(sink)) { + final long nativeSink = nativeWrapSink(sink); + sinks.put(sink, nativeSink); + nativeAddSink(getNativeMediaStreamTrack(), nativeSink); + } + } + + /** + * Removes an AudioTrackSink from the track. + * + * If the AudioTrackSink was not attached to the track, this is a no-op. + */ + public void removeSink(AudioTrackSink sink) { + final Long nativeSink = sinks.remove(sink); + if (nativeSink != null) { + nativeRemoveSink(getNativeMediaStreamTrack(), nativeSink); + nativeFreeSink(nativeSink); + } + } + + @Override + public void dispose() { + for (long nativeSink : sinks.values()) { + nativeRemoveSink(getNativeMediaStreamTrack(), nativeSink); + nativeFreeSink(nativeSink); + } + sinks.clear(); + super.dispose(); + } + /** Returns a pointer to webrtc::AudioTrackInterface. */ long getNativeAudioTrack() { return getNativeMediaStreamTrack(); } private static native void nativeSetVolume(long track, double volume); + private static native void nativeAddSink(long track, long nativeSink); + private static native void nativeRemoveSink(long track, long nativeSink); + private static native long nativeWrapSink(AudioTrackSink sink); + private static native void nativeFreeSink(long sink); } diff --git a/sdk/android/api/org/webrtc/AudioTrackSink.java b/sdk/android/api/org/webrtc/AudioTrackSink.java new file mode 100644 index 0000000000..eca390f82c --- /dev/null +++ b/sdk/android/api/org/webrtc/AudioTrackSink.java @@ -0,0 +1,27 @@ +/* + * Copyright 2023 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import java.nio.ByteBuffer; + +/** + * Java version of rtc::AudioTrackSinkInterface. + */ +public interface AudioTrackSink { + /** + * Implementations should copy the audio data into a local copy if they wish + * to use the data after this function returns. + */ + @CalledByNative + void onData(ByteBuffer audioData, int bitsPerSample, int sampleRate, + int numberOfChannels, int numberOfFrames, + long absoluteCaptureTimestampMs); +} diff --git a/sdk/android/api/org/webrtc/ExternalAudioProcessingFactory.java b/sdk/android/api/org/webrtc/ExternalAudioProcessingFactory.java new file mode 100644 index 0000000000..7425d2af57 --- /dev/null +++ b/sdk/android/api/org/webrtc/ExternalAudioProcessingFactory.java @@ -0,0 +1,144 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.webrtc; + +import java.nio.ByteBuffer; + +import androidx.annotation.Nullable; +import org.webrtc.AudioProcessingFactory; + + +public class ExternalAudioProcessingFactory implements AudioProcessingFactory { + + /** + * Interface for external audio processing. + */ + public static interface AudioProcessing { + /** + * Called when the processor should be initialized with a new sample rate and + * number of channels. + */ + @CalledByNative("AudioProcessing") + void initialize(int sampleRateHz, int numChannels); + /** Called when the processor should be reset with a new sample rate. */ + @CalledByNative("AudioProcessing") + void reset(int newRate); + /** + * Processes the given capture or render signal. NOTE: `buffer.data` will be + * freed once this function returns so callers who want to use the data + * asynchronously must make sure to copy it first. + */ + @CalledByNative("AudioProcessing") + void process(int numBands, int numFrames, ByteBuffer buffer); + } + + private long apmPtr; + private long capturePostProcessingPtr; + private long renderPreProcessingPtr; + + public ExternalAudioProcessingFactory() { + apmPtr = nativeGetDefaultApm(); + capturePostProcessingPtr = 0; + renderPreProcessingPtr = 0; + } + + @Override + public long createNative() { + if(apmPtr == 0) { + apmPtr = nativeGetDefaultApm(); + } + return apmPtr; + } + + /** + * Sets the capture post processing module. + * This module is applied to the audio signal after capture and before sending + * to the audio encoder. + */ + public void setCapturePostProcessing(@Nullable AudioProcessing processing) { + checkExternalAudioProcessorExists(); + long newPtr = nativeSetCapturePostProcessing(processing); + if (capturePostProcessingPtr != 0) { + JniCommon.nativeReleaseRef(capturePostProcessingPtr); + capturePostProcessingPtr = 0; + } + capturePostProcessingPtr = newPtr; + } + + /** + * Sets the render pre processing module. + * This module is applied to the audio signal after receiving from the audio + * decoder and before rendering. + */ + public void setRenderPreProcessing(@Nullable AudioProcessing processing) { + checkExternalAudioProcessorExists(); + long newPtr = nativeSetRenderPreProcessing(processing); + if (renderPreProcessingPtr != 0) { + JniCommon.nativeReleaseRef(renderPreProcessingPtr); + renderPreProcessingPtr = 0; + } + renderPreProcessingPtr = newPtr; + } + + /** + * Sets the bypass flag for the capture post processing module. + * If true, the registered audio processing will be bypassed. + */ + public void setBypassFlagForCapturePost( boolean bypass) { + checkExternalAudioProcessorExists(); + nativeSetBypassFlagForCapturePost(bypass); + } + + /** + * Sets the bypass flag for the render pre processing module. + * If true, the registered audio processing will be bypassed. + */ + public void setBypassFlagForRenderPre( boolean bypass) { + checkExternalAudioProcessorExists(); + nativeSetBypassFlagForRenderPre(bypass); + } + + /** + * Destroys the ExternalAudioProcessor. + */ + public void destroy() { + checkExternalAudioProcessorExists(); + if (renderPreProcessingPtr != 0) { + JniCommon.nativeReleaseRef(renderPreProcessingPtr); + renderPreProcessingPtr = 0; + } + if (capturePostProcessingPtr != 0) { + JniCommon.nativeReleaseRef(capturePostProcessingPtr); + capturePostProcessingPtr = 0; + } + nativeDestroy(); + apmPtr = 0; + } + + private void checkExternalAudioProcessorExists() { + if (apmPtr == 0) { + throw new IllegalStateException("ExternalAudioProcessor has been disposed."); + } + } + + private static native long nativeGetDefaultApm(); + private static native long nativeSetCapturePostProcessing(AudioProcessing processing); + private static native long nativeSetRenderPreProcessing(AudioProcessing processing); + private static native void nativeSetBypassFlagForCapturePost(boolean bypass); + private static native void nativeSetBypassFlagForRenderPre(boolean bypass); + private static native void nativeDestroy(); +} diff --git a/sdk/android/api/org/webrtc/FrameCryptor.java b/sdk/android/api/org/webrtc/FrameCryptor.java new file mode 100644 index 0000000000..d633e05005 --- /dev/null +++ b/sdk/android/api/org/webrtc/FrameCryptor.java @@ -0,0 +1,108 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.webrtc; + +import androidx.annotation.Nullable; + +public class FrameCryptor { + public enum FrameCryptionState { + NEW, + OK, + ENCRYPTIONFAILED, + DECRYPTIONFAILED, + MISSINGKEY, + KEYRATCHETED, + INTERNALERROR; + + @CalledByNative("FrameCryptionState") + static FrameCryptionState fromNativeIndex(int nativeIndex) { + return values()[nativeIndex]; + } + } + + public static interface Observer { + @CalledByNative("Observer") + void onFrameCryptionStateChanged(String participantId, FrameCryptionState newState); + } + + private long nativeFrameCryptor; + private long observerPtr; + + public long getNativeFrameCryptor() { + return nativeFrameCryptor; + } + + @CalledByNative + public FrameCryptor(long nativeFrameCryptor) { + this.nativeFrameCryptor = nativeFrameCryptor; + this.observerPtr = 0; + } + + public void setEnabled(boolean enabled) { + checkFrameCryptorExists(); + nativeSetEnabled(nativeFrameCryptor, enabled); + } + + public boolean isEnabled() { + checkFrameCryptorExists(); + return nativeIsEnabled(nativeFrameCryptor); + } + + public int getKeyIndex() { + checkFrameCryptorExists(); + return nativeGetKeyIndex(nativeFrameCryptor); + } + + public void setKeyIndex(int index) { + checkFrameCryptorExists(); + nativeSetKeyIndex(nativeFrameCryptor, index); + } + + public void dispose() { + checkFrameCryptorExists(); + nativeUnSetObserver(nativeFrameCryptor); + JniCommon.nativeReleaseRef(nativeFrameCryptor); + nativeFrameCryptor = 0; + if (observerPtr != 0) { + JniCommon.nativeReleaseRef(observerPtr); + observerPtr = 0; + } + } + + public void setObserver(@Nullable Observer observer) { + checkFrameCryptorExists(); + long newPtr = nativeSetObserver(nativeFrameCryptor, observer); + if (observerPtr != 0) { + JniCommon.nativeReleaseRef(observerPtr); + observerPtr = 0; + } + newPtr = observerPtr; + } + + private void checkFrameCryptorExists() { + if (nativeFrameCryptor == 0) { + throw new IllegalStateException("FrameCryptor has been disposed."); + } + } + + private static native void nativeSetEnabled(long frameCryptorPointer, boolean enabled); + private static native boolean nativeIsEnabled(long frameCryptorPointer); + private static native void nativeSetKeyIndex(long frameCryptorPointer, int index); + private static native int nativeGetKeyIndex(long frameCryptorPointer); + private static native long nativeSetObserver(long frameCryptorPointer, Observer observer); + private static native void nativeUnSetObserver(long frameCryptorPointer); +} diff --git a/sdk/android/api/org/webrtc/FrameCryptorAlgorithm.java b/sdk/android/api/org/webrtc/FrameCryptorAlgorithm.java new file mode 100644 index 0000000000..d0d4dc8374 --- /dev/null +++ b/sdk/android/api/org/webrtc/FrameCryptorAlgorithm.java @@ -0,0 +1,22 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.webrtc; + +public enum FrameCryptorAlgorithm { + AES_GCM, + AES_CBC, +} \ No newline at end of file diff --git a/sdk/android/api/org/webrtc/FrameCryptorFactory.java b/sdk/android/api/org/webrtc/FrameCryptorFactory.java new file mode 100644 index 0000000000..865a4b78bb --- /dev/null +++ b/sdk/android/api/org/webrtc/FrameCryptorFactory.java @@ -0,0 +1,44 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.webrtc; + +public class FrameCryptorFactory { + public static FrameCryptorKeyProvider createFrameCryptorKeyProvider( + boolean sharedKey, byte[] ratchetSalt, int ratchetWindowSize, byte[] uncryptedMagicBytes, int failureTolerance, int keyRingSize, boolean discardFrameWhenCryptorNotReady) { + return nativeCreateFrameCryptorKeyProvider(sharedKey, ratchetSalt, ratchetWindowSize, uncryptedMagicBytes, failureTolerance, keyRingSize, discardFrameWhenCryptorNotReady); + } + + public static FrameCryptor createFrameCryptorForRtpSender(PeerConnectionFactory factory, RtpSender rtpSender, + String participantId, FrameCryptorAlgorithm algorithm, FrameCryptorKeyProvider keyProvider) { + return nativeCreateFrameCryptorForRtpSender(factory.getNativeOwnedFactoryAndThreads(),rtpSender.getNativeRtpSender(), participantId, + algorithm.ordinal(), keyProvider.getNativeKeyProvider()); + } + + public static FrameCryptor createFrameCryptorForRtpReceiver(PeerConnectionFactory factory, RtpReceiver rtpReceiver, + String participantId, FrameCryptorAlgorithm algorithm, FrameCryptorKeyProvider keyProvider) { + return nativeCreateFrameCryptorForRtpReceiver(factory.getNativeOwnedFactoryAndThreads(), rtpReceiver.getNativeRtpReceiver(), participantId, + algorithm.ordinal(), keyProvider.getNativeKeyProvider()); + } + + private static native FrameCryptor nativeCreateFrameCryptorForRtpSender(long factory, + long rtpSender, String participantId, int algorithm, long nativeFrameCryptorKeyProvider); + private static native FrameCryptor nativeCreateFrameCryptorForRtpReceiver(long factory, + long rtpReceiver, String participantId, int algorithm, long nativeFrameCryptorKeyProvider); + + private static native FrameCryptorKeyProvider nativeCreateFrameCryptorKeyProvider( + boolean sharedKey, byte[] ratchetSalt, int ratchetWindowSize, byte[] uncryptedMagicBytes, int failureTolerance, int keyRingSize, boolean discardFrameWhenCryptorNotReady); +} diff --git a/sdk/android/api/org/webrtc/FrameCryptorKeyProvider.java b/sdk/android/api/org/webrtc/FrameCryptorKeyProvider.java new file mode 100644 index 0000000000..6ab0cdddf5 --- /dev/null +++ b/sdk/android/api/org/webrtc/FrameCryptorKeyProvider.java @@ -0,0 +1,93 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.webrtc; + +import java.util.ArrayList; + +public class FrameCryptorKeyProvider { + private long nativeKeyProvider; + + @CalledByNative + public FrameCryptorKeyProvider(long nativeKeyProvider) { + this.nativeKeyProvider = nativeKeyProvider; + } + + public long getNativeKeyProvider() { + return nativeKeyProvider; + } + + public boolean setSharedKey(int index, byte[] key) { + checkKeyProviderExists(); + return nativeSetSharedKey(nativeKeyProvider,index, key); + } + + public byte[] ratchetSharedKey(int index) { + checkKeyProviderExists(); + return nativeRatchetSharedKey(nativeKeyProvider, index); + } + + public byte[] exportSharedKey(int index) { + checkKeyProviderExists(); + return nativeExportSharedKey(nativeKeyProvider, index); + } + + public boolean setKey(String participantId, int index, byte[] key) { + checkKeyProviderExists(); + return nativeSetKey(nativeKeyProvider, participantId, index, key); + } + + public byte[] ratchetKey(String participantId, int index) { + checkKeyProviderExists(); + return nativeRatchetKey(nativeKeyProvider, participantId, index); + } + + public byte[] exportKey(String participantId, int index) { + checkKeyProviderExists(); + return nativeExportKey(nativeKeyProvider, participantId, index); + } + + public void setSifTrailer(byte[] sifTrailer) { + checkKeyProviderExists(); + nativeSetSifTrailer(nativeKeyProvider, sifTrailer); + } + + public void dispose() { + checkKeyProviderExists(); + JniCommon.nativeReleaseRef(nativeKeyProvider); + nativeKeyProvider = 0; + } + + private void checkKeyProviderExists() { + if (nativeKeyProvider == 0) { + throw new IllegalStateException("FrameCryptorKeyProvider has been disposed."); + } + } + private static native boolean nativeSetSharedKey( + long keyProviderPointer, int index, byte[] key); + private static native byte[] nativeRatchetSharedKey( + long keyProviderPointer, int index); + private static native byte[] nativeExportSharedKey( + long keyProviderPointer, int index); + private static native boolean nativeSetKey( + long keyProviderPointer, String participantId, int index, byte[] key); + private static native byte[] nativeRatchetKey( + long keyProviderPointer, String participantId, int index); + private static native byte[] nativeExportKey( + long keyProviderPointer, String participantId, int index); + private static native void nativeSetSifTrailer( + long keyProviderPointer, byte[] sifTrailer); +} \ No newline at end of file diff --git a/sdk/android/api/org/webrtc/HardwareVideoEncoderFactory.java b/sdk/android/api/org/webrtc/HardwareVideoEncoderFactory.java index d43fc27fa0..05c39a4b38 100644 --- a/sdk/android/api/org/webrtc/HardwareVideoEncoderFactory.java +++ b/sdk/android/api/org/webrtc/HardwareVideoEncoderFactory.java @@ -143,11 +143,11 @@ public VideoCodecInfo[] getSupportedCodecs() { // supported by the decoder. if (type == VideoCodecMimeType.H264 && isH264HighProfileSupported(codec)) { supportedCodecInfos.add(new VideoCodecInfo( - name, MediaCodecUtils.getCodecProperties(type, /* highProfile= */ true))); + name, MediaCodecUtils.getCodecProperties(type, /* highProfile= */ true), new ArrayList<>())); } supportedCodecInfos.add(new VideoCodecInfo( - name, MediaCodecUtils.getCodecProperties(type, /* highProfile= */ false))); + name, MediaCodecUtils.getCodecProperties(type, /* highProfile= */ false), new ArrayList<>())); } } diff --git a/sdk/android/api/org/webrtc/LibaomAv1Encoder.java b/sdk/android/api/org/webrtc/LibaomAv1Encoder.java index 340586ef81..9e80541969 100644 --- a/sdk/android/api/org/webrtc/LibaomAv1Encoder.java +++ b/sdk/android/api/org/webrtc/LibaomAv1Encoder.java @@ -9,6 +9,7 @@ */ package org.webrtc; +import java.util.List; public class LibaomAv1Encoder extends WrappedNativeVideoEncoder { @Override @@ -22,4 +23,10 @@ public long createNative(long webrtcEnvRef) { public boolean isHardwareEncoder() { return false; } + + static List scalabilityModes() { + return nativeGetSupportedScalabilityModes(); + } + + static native List nativeGetSupportedScalabilityModes(); } diff --git a/sdk/android/api/org/webrtc/LibvpxVp9Encoder.java b/sdk/android/api/org/webrtc/LibvpxVp9Encoder.java index 71bcd1374b..c864ac2794 100644 --- a/sdk/android/api/org/webrtc/LibvpxVp9Encoder.java +++ b/sdk/android/api/org/webrtc/LibvpxVp9Encoder.java @@ -9,6 +9,7 @@ */ package org.webrtc; +import java.util.List; public class LibvpxVp9Encoder extends WrappedNativeVideoEncoder { @Override @@ -24,4 +25,10 @@ public boolean isHardwareEncoder() { } static native boolean nativeIsSupported(); + + static List scalabilityModes() { + return nativeGetSupportedScalabilityModes(); + } + + static native List nativeGetSupportedScalabilityModes(); } diff --git a/sdk/android/api/org/webrtc/PeerConnection.java b/sdk/android/api/org/webrtc/PeerConnection.java index d530bc2c0c..576e38c9e5 100644 --- a/sdk/android/api/org/webrtc/PeerConnection.java +++ b/sdk/android/api/org/webrtc/PeerConnection.java @@ -566,6 +566,17 @@ public static class RTCConfiguration { * See: https://www.chromestatus.com/feature/6269234631933952 */ public boolean offerExtmapAllowMixed; + + /** + * When this flag is set, ports not bound to any specific network interface + * will be used, in addition to normal ports bound to the enumerated + * interfaces. Without this flag, these "any address" ports would only be + * used when network enumeration fails or is disabled. But under certain + * conditions, these ports may succeed where others fail, so they may allow + * the application to work in a wider variety of environments, at the expense + * of having to allocate additional candidates. + */ + public boolean enableIceGatheringOnAnyAddressPorts; // TODO(deadbeef): Instead of duplicating the defaults here, we should do // something to pick up the defaults from C++. The Objective-C equivalent @@ -608,6 +619,7 @@ public RTCConfiguration(List iceServers) { turnLoggingId = null; enableImplicitRollback = false; offerExtmapAllowMixed = true; + enableIceGatheringOnAnyAddressPorts = false; } @CalledByNative("RTCConfiguration") @@ -816,6 +828,11 @@ boolean getEnableImplicitRollback() { boolean getOfferExtmapAllowMixed() { return offerExtmapAllowMixed; } + + @CalledByNative("RTCConfiguration") + boolean getEnableIceGatheringOnAnyAddressPorts() { + return enableIceGatheringOnAnyAddressPorts; + } }; private final List localStreams = new ArrayList<>(); diff --git a/sdk/android/api/org/webrtc/PeerConnectionFactory.java b/sdk/android/api/org/webrtc/PeerConnectionFactory.java index c46718fdd6..762ad3b678 100644 --- a/sdk/android/api/org/webrtc/PeerConnectionFactory.java +++ b/sdk/android/api/org/webrtc/PeerConnectionFactory.java @@ -20,6 +20,7 @@ import org.webrtc.RtpCapabilities; import org.webrtc.audio.AudioDeviceModule; import org.webrtc.audio.JavaAudioDeviceModule; +import org.webrtc.RtpCapabilities; /** * Java wrapper for a C++ PeerConnectionFactoryInterface. Main entry point to diff --git a/sdk/android/api/org/webrtc/RtpParameters.java b/sdk/android/api/org/webrtc/RtpParameters.java index 9ca8311610..4e3f106785 100644 --- a/sdk/android/api/org/webrtc/RtpParameters.java +++ b/sdk/android/api/org/webrtc/RtpParameters.java @@ -76,6 +76,8 @@ public static class Encoding { // If non-null, scale the width and height down by this factor for video. If null, // implementation default scaling factor will be used. @Nullable public Double scaleResolutionDownBy; + // Scalability modes are used to represent simulcast and SVC layers. + @Nullable public String scalabilityMode; // SSRC to be used by this encoding. // Can't be changed between getParameters/setParameters. public Long ssrc; @@ -93,8 +95,8 @@ public Encoding(String rid, boolean active, Double scaleResolutionDownBy) { @CalledByNative("Encoding") Encoding(String rid, boolean active, double bitratePriority, @Priority int networkPriority, Integer maxBitrateBps, Integer minBitrateBps, Integer maxFramerate, - Integer numTemporalLayers, Double scaleResolutionDownBy, Long ssrc, - boolean adaptiveAudioPacketTime) { + Integer numTemporalLayers, Double scaleResolutionDownBy, String scalabilityMode, + Long ssrc, boolean adaptiveAudioPacketTime) { this.rid = rid; this.active = active; this.bitratePriority = bitratePriority; @@ -104,6 +106,7 @@ public Encoding(String rid, boolean active, Double scaleResolutionDownBy) { this.maxFramerate = maxFramerate; this.numTemporalLayers = numTemporalLayers; this.scaleResolutionDownBy = scaleResolutionDownBy; + this.scalabilityMode = scalabilityMode; this.ssrc = ssrc; this.adaptiveAudioPacketTime = adaptiveAudioPacketTime; } @@ -160,6 +163,12 @@ Double getScaleResolutionDownBy() { return scaleResolutionDownBy; } + @Nullable + @CalledByNative("Encoding") + String getScalabilityMode() { + return scalabilityMode; + } + @CalledByNative("Encoding") Long getSsrc() { return ssrc; diff --git a/sdk/android/api/org/webrtc/SimulcastVideoEncoder.java b/sdk/android/api/org/webrtc/SimulcastVideoEncoder.java new file mode 100644 index 0000000000..306cbe57d8 --- /dev/null +++ b/sdk/android/api/org/webrtc/SimulcastVideoEncoder.java @@ -0,0 +1,28 @@ +package org.webrtc; + +public class SimulcastVideoEncoder extends WrappedNativeVideoEncoder { + + static native long nativeCreateEncoder(long webrtcEnvRef, VideoEncoderFactory primary, VideoEncoderFactory fallback, VideoCodecInfo info); + + VideoEncoderFactory primary; + VideoEncoderFactory fallback; + VideoCodecInfo info; + + public SimulcastVideoEncoder(VideoEncoderFactory primary, VideoEncoderFactory fallback, VideoCodecInfo info) { + this.primary = primary; + this.fallback = fallback; + this.info = info; + } + + @Override + public long createNative(long webrtcEnvRef) { + return nativeCreateEncoder(webrtcEnvRef, primary, fallback, info); + } + + @Override + public boolean isHardwareEncoder() { + return false; + } + +} + diff --git a/sdk/android/api/org/webrtc/SimulcastVideoEncoderFactory.java b/sdk/android/api/org/webrtc/SimulcastVideoEncoderFactory.java new file mode 100644 index 0000000000..97b4f32087 --- /dev/null +++ b/sdk/android/api/org/webrtc/SimulcastVideoEncoderFactory.java @@ -0,0 +1,43 @@ +/* + * Copyright 2017 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +package org.webrtc; + +import androidx.annotation.Nullable; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Arrays; + +public class SimulcastVideoEncoderFactory implements VideoEncoderFactory { + + VideoEncoderFactory primary; + VideoEncoderFactory fallback; + + public SimulcastVideoEncoderFactory(VideoEncoderFactory primary, VideoEncoderFactory fallback) { + this.primary = primary; + this.fallback = fallback; + } + + @Nullable + @Override + public VideoEncoder createEncoder(VideoCodecInfo info) { + return new SimulcastVideoEncoder(primary, fallback, info); + } + + @Override + public VideoCodecInfo[] getSupportedCodecs() { + List codecs = new ArrayList(); + codecs.addAll(Arrays.asList(primary.getSupportedCodecs())); + codecs.addAll(Arrays.asList(fallback.getSupportedCodecs())); + return codecs.toArray(new VideoCodecInfo[codecs.size()]); + } + +} diff --git a/sdk/android/api/org/webrtc/VideoCodecInfo.java b/sdk/android/api/org/webrtc/VideoCodecInfo.java index 4f97cf74cf..e0f5153d47 100644 --- a/sdk/android/api/org/webrtc/VideoCodecInfo.java +++ b/sdk/android/api/org/webrtc/VideoCodecInfo.java @@ -14,6 +14,8 @@ import java.util.Arrays; import java.util.Locale; import java.util.Map; +import java.util.List; +import java.util.ArrayList; /** * Represent a video codec as encoded in SDP. @@ -34,13 +36,16 @@ public class VideoCodecInfo { public final String name; public final Map params; + public final List scalabilityModes; + @Deprecated public final int payload; @CalledByNative - public VideoCodecInfo(String name, Map params) { + public VideoCodecInfo(String name, Map params, List scalabilityModes) { this.payload = 0; this.name = name; this.params = params; + this.scalabilityModes = scalabilityModes; } @Deprecated @@ -48,6 +53,7 @@ public VideoCodecInfo(int payload, String name, Map params) { this.payload = payload; this.name = name; this.params = params; + this.scalabilityModes = new ArrayList<>(); } @Override @@ -83,4 +89,9 @@ String getName() { Map getParams() { return params; } + + @CalledByNative + List getScalabilityModes() { + return scalabilityModes; + } } diff --git a/sdk/android/api/org/webrtc/VideoTrack.java b/sdk/android/api/org/webrtc/VideoTrack.java index 512e46c26e..1791592b56 100644 --- a/sdk/android/api/org/webrtc/VideoTrack.java +++ b/sdk/android/api/org/webrtc/VideoTrack.java @@ -54,6 +54,24 @@ public void removeSink(VideoSink sink) { } } + /** + * For a remote video track, starts/stops receiving the video stream. + * + * If this is a local video track, this is a no-op. + */ + public void setShouldReceive(boolean shouldReceive){ + nativeSetShouldReceive(getNativeMediaStreamTrack(), shouldReceive); + } + + /** + * The current receive status for a remote video track. + * + * This has no meaning for a local video track. + */ + public boolean shouldReceive(){ + return nativeGetShouldReceive(getNativeMediaStreamTrack()); + } + @Override public void dispose() { for (long nativeSink : sinks.values()) { @@ -73,4 +91,6 @@ public long getNativeVideoTrack() { private static native void nativeRemoveSink(long track, long nativeSink); private static native long nativeWrapSink(VideoSink sink); private static native void nativeFreeSink(long sink); + private static native void nativeSetShouldReceive(long track, boolean shouldReceive); + private static native boolean nativeGetShouldReceive(long track); } diff --git a/sdk/android/api/org/webrtc/WrappedVideoDecoderFactory.java b/sdk/android/api/org/webrtc/WrappedVideoDecoderFactory.java new file mode 100644 index 0000000000..a7acd37289 --- /dev/null +++ b/sdk/android/api/org/webrtc/WrappedVideoDecoderFactory.java @@ -0,0 +1,75 @@ +/* + * Copyright 2023 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.webrtc; + +import android.media.MediaCodecInfo; +import androidx.annotation.Nullable; + +import java.util.Arrays; +import java.util.LinkedHashSet; + +public class WrappedVideoDecoderFactory implements VideoDecoderFactory { + public WrappedVideoDecoderFactory(@Nullable EglBase.Context eglContext) { + this.hardwareVideoDecoderFactory = new HardwareVideoDecoderFactory(eglContext); + this.platformSoftwareVideoDecoderFactory = new PlatformSoftwareVideoDecoderFactory(eglContext); + } + + private final VideoDecoderFactory hardwareVideoDecoderFactory; + private final VideoDecoderFactory hardwareVideoDecoderFactoryWithoutEglContext = new HardwareVideoDecoderFactory(null) ; + private final VideoDecoderFactory softwareVideoDecoderFactory = new SoftwareVideoDecoderFactory(); + @Nullable + private final VideoDecoderFactory platformSoftwareVideoDecoderFactory; + + @Override + public VideoDecoder createDecoder(VideoCodecInfo codecType) { + VideoDecoder softwareDecoder = this.softwareVideoDecoderFactory.createDecoder(codecType); + VideoDecoder hardwareDecoder = this.hardwareVideoDecoderFactory.createDecoder(codecType); + if (softwareDecoder == null && this.platformSoftwareVideoDecoderFactory != null) { + softwareDecoder = this.platformSoftwareVideoDecoderFactory.createDecoder(codecType); + } + + if(hardwareDecoder != null && disableSurfaceTextureFrame(hardwareDecoder.getImplementationName())) { + hardwareDecoder.release(); + hardwareDecoder = this.hardwareVideoDecoderFactoryWithoutEglContext.createDecoder(codecType); + } + + if (hardwareDecoder != null && softwareDecoder != null) { + return new VideoDecoderFallback(softwareDecoder, hardwareDecoder); + } else { + return hardwareDecoder != null ? hardwareDecoder : softwareDecoder; + } + } + + private boolean disableSurfaceTextureFrame(String name) { + if (name.startsWith("OMX.qcom.") || name.startsWith("OMX.hisi.")) { + return true; + } + return false; + } + + @Override + public VideoCodecInfo[] getSupportedCodecs() { + LinkedHashSet supportedCodecInfos = new LinkedHashSet(); + supportedCodecInfos.addAll(Arrays.asList(this.softwareVideoDecoderFactory.getSupportedCodecs())); + supportedCodecInfos.addAll(Arrays.asList(this.hardwareVideoDecoderFactory.getSupportedCodecs())); + if (this.platformSoftwareVideoDecoderFactory != null) { + supportedCodecInfos.addAll(Arrays.asList(this.platformSoftwareVideoDecoderFactory.getSupportedCodecs())); + } + + return (VideoCodecInfo[])supportedCodecInfos.toArray(new VideoCodecInfo[supportedCodecInfos.size()]); + } +} diff --git a/sdk/android/api/org/webrtc/audio/JavaAudioDeviceModule.java b/sdk/android/api/org/webrtc/audio/JavaAudioDeviceModule.java index b118843ea0..8ef45f727b 100644 --- a/sdk/android/api/org/webrtc/audio/JavaAudioDeviceModule.java +++ b/sdk/android/api/org/webrtc/audio/JavaAudioDeviceModule.java @@ -42,6 +42,7 @@ public static class Builder { private AudioTrackErrorCallback audioTrackErrorCallback; private AudioRecordErrorCallback audioRecordErrorCallback; private SamplesReadyCallback samplesReadyCallback; + private PlaybackSamplesReadyCallback playbackSamplesReadyCallback; private AudioTrackStateCallback audioTrackStateCallback; private AudioRecordStateCallback audioRecordStateCallback; private boolean useHardwareAcousticEchoCanceler = isBuiltInAcousticEchoCancelerSupported(); @@ -140,6 +141,14 @@ public Builder setSamplesReadyCallback(SamplesReadyCallback samplesReadyCallback return this; } + /** + * Set a callback to listen to the audio output passed to the AudioTrack. + */ + public Builder setPlaybackSamplesReadyCallback(PlaybackSamplesReadyCallback playbackSamplesReadyCallback) { + this.playbackSamplesReadyCallback = playbackSamplesReadyCallback; + return this; + } + /** * Set a callback to retrieve information from the AudioTrack on when audio starts and stop. */ @@ -258,7 +267,7 @@ public JavaAudioDeviceModule createAudioDeviceModule() { samplesReadyCallback, useHardwareAcousticEchoCanceler, useHardwareNoiseSuppressor); final WebRtcAudioTrack audioOutput = new WebRtcAudioTrack(context, audioManager, audioAttributes, audioTrackErrorCallback, - audioTrackStateCallback, useLowLatency, enableVolumeLogger); + audioTrackStateCallback, playbackSamplesReadyCallback, useLowLatency, enableVolumeLogger); return new JavaAudioDeviceModule(context, audioManager, audioInput, audioOutput, inputSampleRate, outputSampleRate, useStereoInput, useStereoOutput); } @@ -325,6 +334,11 @@ public static interface SamplesReadyCallback { void onWebRtcAudioRecordSamplesReady(AudioSamples samples); } + /** Called when new audio samples are ready. This should only be set for debug purposes */ + public static interface PlaybackSamplesReadyCallback { + void onWebRtcAudioTrackSamplesReady(AudioSamples samples); + } + /* AudioTrack */ // Audio playout/track error handler functions. public enum AudioTrackStartErrorCode { @@ -362,8 +376,8 @@ public static boolean isBuiltInNoiseSuppressorSupported() { private final Context context; private final AudioManager audioManager; - private final WebRtcAudioRecord audioInput; - private final WebRtcAudioTrack audioOutput; + public final WebRtcAudioRecord audioInput; + public final WebRtcAudioTrack audioOutput; private final int inputSampleRate; private final int outputSampleRate; private final boolean useStereoInput; diff --git a/sdk/android/instrumentationtests/src/org/webrtc/AndroidVideoDecoderInstrumentationTest.java b/sdk/android/instrumentationtests/src/org/webrtc/AndroidVideoDecoderInstrumentationTest.java index 6f448124e8..d9fadabfd9 100644 --- a/sdk/android/instrumentationtests/src/org/webrtc/AndroidVideoDecoderInstrumentationTest.java +++ b/sdk/android/instrumentationtests/src/org/webrtc/AndroidVideoDecoderInstrumentationTest.java @@ -48,7 +48,7 @@ public AndroidVideoDecoderInstrumentationTest(String codecName, boolean useEglCo if (codecName.equals("H264")) { this.codecType = H264Utils.DEFAULT_H264_BASELINE_PROFILE_CODEC; } else { - this.codecType = new VideoCodecInfo(codecName, new HashMap<>()); + this.codecType = new VideoCodecInfo(codecName, new HashMap<>(), new ArrayList<>()); } this.useEglContext = useEglContext; } diff --git a/sdk/android/instrumentationtests/src/org/webrtc/DefaultVideoEncoderFactoryTest.java b/sdk/android/instrumentationtests/src/org/webrtc/DefaultVideoEncoderFactoryTest.java index fe608c794e..1a9dd5fc38 100644 --- a/sdk/android/instrumentationtests/src/org/webrtc/DefaultVideoEncoderFactoryTest.java +++ b/sdk/android/instrumentationtests/src/org/webrtc/DefaultVideoEncoderFactoryTest.java @@ -47,7 +47,7 @@ public void setUp() { @SmallTest @Test public void getSupportedCodecs_hwVp8SameParamsAsSwVp8_oneVp8() { - VideoCodecInfo hwVp8Encoder = new VideoCodecInfo("VP8", new HashMap<>()); + VideoCodecInfo hwVp8Encoder = new VideoCodecInfo("VP8", new HashMap<>(), new ArrayList<>()); VideoEncoderFactory hwFactory = new CustomHardwareVideoEncoderFactory(hwVp8Encoder); DefaultVideoEncoderFactory defFactory = new DefaultVideoEncoderFactory(hwFactory); VideoCodecInfo[] supportedCodecs = defFactory.getSupportedCodecs(); @@ -62,7 +62,7 @@ public void getSupportedCodecs_hwVp8SameParamsAsSwVp8_oneVp8() { public void getSupportedCodecs_hwVp8WithDifferentParams_twoVp8() { VideoCodecInfo hwVp8Encoder = new VideoCodecInfo("VP8", new HashMap() { { put("param", "value"); } - }); + }, new ArrayList<>()); VideoEncoderFactory hwFactory = new CustomHardwareVideoEncoderFactory(hwVp8Encoder); DefaultVideoEncoderFactory defFactory = new DefaultVideoEncoderFactory(hwFactory); VideoCodecInfo[] supportedCodecs = defFactory.getSupportedCodecs(); diff --git a/sdk/android/instrumentationtests/src/org/webrtc/SoftwareVideoDecoderFactoryTest.java b/sdk/android/instrumentationtests/src/org/webrtc/SoftwareVideoDecoderFactoryTest.java index 8a5d9788ee..8be15624da 100644 --- a/sdk/android/instrumentationtests/src/org/webrtc/SoftwareVideoDecoderFactoryTest.java +++ b/sdk/android/instrumentationtests/src/org/webrtc/SoftwareVideoDecoderFactoryTest.java @@ -14,6 +14,7 @@ import androidx.annotation.Nullable; import androidx.test.filters.SmallTest; +import java.util.ArrayList; import java.util.HashMap; import org.junit.Before; import org.junit.Test; @@ -55,7 +56,7 @@ public void createDecoder_supportedCodec_returnsNotNull() { @Test public void createDecoder_unsupportedCodec_returnsNull() { VideoDecoderFactory factory = new SoftwareVideoDecoderFactory(); - VideoCodecInfo codec = new VideoCodecInfo("unsupported", new HashMap()); + VideoCodecInfo codec = new VideoCodecInfo("unsupported", new HashMap(), new ArrayList<>()); VideoDecoder decoder = factory.createDecoder(codec); assertThat(decoder).isNull(); } diff --git a/sdk/android/instrumentationtests/src/org/webrtc/SoftwareVideoEncoderFactoryTest.java b/sdk/android/instrumentationtests/src/org/webrtc/SoftwareVideoEncoderFactoryTest.java index 696b423cde..0fa4c4cc17 100644 --- a/sdk/android/instrumentationtests/src/org/webrtc/SoftwareVideoEncoderFactoryTest.java +++ b/sdk/android/instrumentationtests/src/org/webrtc/SoftwareVideoEncoderFactoryTest.java @@ -14,6 +14,7 @@ import androidx.annotation.Nullable; import androidx.test.filters.SmallTest; +import java.util.ArrayList; import java.util.HashMap; import org.junit.Before; import org.junit.Test; @@ -52,7 +53,7 @@ public void createEncoder_supportedCodec_returnsNotNull() { @Test public void createEncoder_unsupportedCodec_returnsNull() { VideoEncoderFactory factory = new SoftwareVideoEncoderFactory(); - VideoCodecInfo codec = new VideoCodecInfo("unsupported", new HashMap()); + VideoCodecInfo codec = new VideoCodecInfo("unsupported", new HashMap(), new ArrayList<>()); VideoEncoder encoder = factory.createEncoder(codec); assertThat(encoder).isNull(); } diff --git a/sdk/android/native_unittests/org/webrtc/CodecsWrapperTestHelper.java b/sdk/android/native_unittests/org/webrtc/CodecsWrapperTestHelper.java index 70151d3b78..72c5c64191 100644 --- a/sdk/android/native_unittests/org/webrtc/CodecsWrapperTestHelper.java +++ b/sdk/android/native_unittests/org/webrtc/CodecsWrapperTestHelper.java @@ -12,6 +12,7 @@ import java.util.HashMap; import java.util.Map; +import java.util.ArrayList; public class CodecsWrapperTestHelper { @CalledByNative @@ -20,7 +21,7 @@ public static VideoCodecInfo createTestVideoCodecInfo() { params.put( VideoCodecInfo.H264_FMTP_PROFILE_LEVEL_ID, VideoCodecInfo.H264_CONSTRAINED_BASELINE_3_1); - VideoCodecInfo codec_info = new VideoCodecInfo("H264", params); + VideoCodecInfo codec_info = new VideoCodecInfo("H264", params, new ArrayList<>()); return codec_info; } diff --git a/sdk/android/src/java/org/webrtc/H264Utils.java b/sdk/android/src/java/org/webrtc/H264Utils.java index abb79c6582..4bf292ee12 100644 --- a/sdk/android/src/java/org/webrtc/H264Utils.java +++ b/sdk/android/src/java/org/webrtc/H264Utils.java @@ -12,6 +12,7 @@ import java.util.Map; import java.util.HashMap; +import java.util.ArrayList; /** Container for static helper functions related to dealing with H264 codecs. */ class H264Utils { @@ -38,9 +39,9 @@ public static Map getDefaultH264Params(boolean isHighProfile) { } public static VideoCodecInfo DEFAULT_H264_BASELINE_PROFILE_CODEC = - new VideoCodecInfo("H264", getDefaultH264Params(/* isHighProfile= */ false)); + new VideoCodecInfo("H264", getDefaultH264Params(/* isHighProfile= */ false), new ArrayList<>()); public static VideoCodecInfo DEFAULT_H264_HIGH_PROFILE_CODEC = - new VideoCodecInfo("H264", getDefaultH264Params(/* isHighProfile= */ true)); + new VideoCodecInfo("H264", getDefaultH264Params(/* isHighProfile= */ true), new ArrayList<>()); public static boolean isSameH264Profile( Map params1, Map params2) { diff --git a/sdk/android/src/java/org/webrtc/HardwareVideoEncoder.java b/sdk/android/src/java/org/webrtc/HardwareVideoEncoder.java index 4512d9a049..71ef14b2ba 100644 --- a/sdk/android/src/java/org/webrtc/HardwareVideoEncoder.java +++ b/sdk/android/src/java/org/webrtc/HardwareVideoEncoder.java @@ -45,8 +45,8 @@ class HardwareVideoEncoder implements VideoEncoder { private static final int MEDIA_CODEC_RELEASE_TIMEOUT_MS = 5000; private static final int DEQUEUE_OUTPUT_BUFFER_TIMEOUT_US = 100000; - // Size of the input frames should be multiple of 16 for the H/W encoder. - private static final int REQUIRED_RESOLUTION_ALIGNMENT = 16; + // Size of the input frames should be multiple of 2 for the H/W encoder. + private static final int REQUIRED_RESOLUTION_ALIGNMENT = 2; /** * Keeps track of the number of output buffers that have been passed down the pipeline and not yet @@ -210,6 +210,11 @@ public VideoCodecStatus initEncode(Settings settings, Callback callback) { this.callback = callback; automaticResizeOn = settings.automaticResizeOn; + if (settings.width % REQUIRED_RESOLUTION_ALIGNMENT != 0 + || settings.height % REQUIRED_RESOLUTION_ALIGNMENT != 0) { + Logging.e(TAG, "MediaCodec requires 2x2 alignment."); + return VideoCodecStatus.ERR_SIZE; + } this.width = settings.width; this.height = settings.height; useSurfaceMode = canUseSurface(); @@ -533,6 +538,12 @@ private VideoCodecStatus resetCodec(int newWidth, int newHeight, boolean newUseS if (status != VideoCodecStatus.OK) { return status; } + + if (newWidth % REQUIRED_RESOLUTION_ALIGNMENT != 0 + || newHeight % REQUIRED_RESOLUTION_ALIGNMENT != 0) { + Logging.e(TAG, "MediaCodec requires 2x2 alignment."); + return VideoCodecStatus.ERR_SIZE; + } width = newWidth; height = newHeight; useSurfaceMode = newUseSurfaceMode; diff --git a/sdk/android/src/java/org/webrtc/MediaCodecVideoDecoderFactory.java b/sdk/android/src/java/org/webrtc/MediaCodecVideoDecoderFactory.java index 9a73bc49ff..d5b892279c 100644 --- a/sdk/android/src/java/org/webrtc/MediaCodecVideoDecoderFactory.java +++ b/sdk/android/src/java/org/webrtc/MediaCodecVideoDecoderFactory.java @@ -72,11 +72,11 @@ public VideoCodecInfo[] getSupportedCodecs() { String name = type.name(); if (type == VideoCodecMimeType.H264 && isH264HighProfileSupported(codec)) { supportedCodecInfos.add(new VideoCodecInfo( - name, MediaCodecUtils.getCodecProperties(type, /* highProfile= */ true))); + name, MediaCodecUtils.getCodecProperties(type, /* highProfile= */ true), new ArrayList<>())); } supportedCodecInfos.add(new VideoCodecInfo( - name, MediaCodecUtils.getCodecProperties(type, /* highProfile= */ false))); + name, MediaCodecUtils.getCodecProperties(type, /* highProfile= */ false), new ArrayList<>())); } } diff --git a/sdk/android/src/java/org/webrtc/audio/WebRtcAudioTrack.java b/sdk/android/src/java/org/webrtc/audio/WebRtcAudioTrack.java index 2b34e34013..25d10e4f61 100644 --- a/sdk/android/src/java/org/webrtc/audio/WebRtcAudioTrack.java +++ b/sdk/android/src/java/org/webrtc/audio/WebRtcAudioTrack.java @@ -20,12 +20,14 @@ import android.os.Process; import androidx.annotation.Nullable; import java.nio.ByteBuffer; +import java.util.Arrays; import org.webrtc.CalledByNative; import org.webrtc.Logging; import org.webrtc.ThreadUtils; import org.webrtc.audio.JavaAudioDeviceModule.AudioTrackErrorCallback; import org.webrtc.audio.JavaAudioDeviceModule.AudioTrackStartErrorCode; import org.webrtc.audio.JavaAudioDeviceModule.AudioTrackStateCallback; +import org.webrtc.audio.JavaAudioDeviceModule.PlaybackSamplesReadyCallback; import org.webrtc.audio.LowLatencyAudioBufferManager; class WebRtcAudioTrack { @@ -62,7 +64,7 @@ class WebRtcAudioTrack { private ByteBuffer byteBuffer; - private @Nullable final AudioAttributes audioAttributes; + public @Nullable AudioAttributes audioAttributes; private @Nullable AudioTrack audioTrack; private @Nullable AudioTrackThread audioThread; private final VolumeLogger volumeLogger; @@ -76,6 +78,7 @@ class WebRtcAudioTrack { private final @Nullable AudioTrackErrorCallback errorCallback; private final @Nullable AudioTrackStateCallback stateCallback; + private final @Nullable PlaybackSamplesReadyCallback audioSamplesReadyCallback; /** * Audio thread which keeps calling AudioTrack.write() to stream audio. @@ -129,6 +132,17 @@ public void run() { reportWebRtcAudioTrackError("AudioTrack.write failed: " + bytesWritten); } } + + if (audioSamplesReadyCallback != null && keepAlive) { + // Copy the entire byte buffer array. The start of the byteBuffer is not necessarily + // at index 0. + byte[] data = Arrays.copyOfRange(byteBuffer.array(), byteBuffer.arrayOffset(), + sizeInBytes + byteBuffer.arrayOffset()); + audioSamplesReadyCallback.onWebRtcAudioTrackSamplesReady( + new JavaAudioDeviceModule.AudioSamples(audioTrack.getAudioFormat(), + audioTrack.getChannelCount(), audioTrack.getSampleRate(), data)); + } + if (useLowLatency) { bufferManager.maybeAdjustBufferSize(audioTrack); } @@ -154,13 +168,13 @@ public void stopThread() { @CalledByNative WebRtcAudioTrack(Context context, AudioManager audioManager) { this(context, audioManager, null /* audioAttributes */, null /* errorCallback */, - null /* stateCallback */, false /* useLowLatency */, true /* enableVolumeLogger */); + null /* stateCallback */, null /* audioSamplesReadyCallback */, false /* useLowLatency */, true /* enableVolumeLogger */); } WebRtcAudioTrack(Context context, AudioManager audioManager, @Nullable AudioAttributes audioAttributes, @Nullable AudioTrackErrorCallback errorCallback, - @Nullable AudioTrackStateCallback stateCallback, boolean useLowLatency, - boolean enableVolumeLogger) { + @Nullable AudioTrackStateCallback stateCallback, @Nullable PlaybackSamplesReadyCallback audioSamplesReadyCallback, + boolean useLowLatency, boolean enableVolumeLogger) { threadChecker.detachThread(); this.context = context; this.audioManager = audioManager; @@ -168,6 +182,7 @@ public void stopThread() { this.errorCallback = errorCallback; this.stateCallback = stateCallback; this.volumeLogger = enableVolumeLogger ? new VolumeLogger(audioManager) : null; + this.audioSamplesReadyCallback = audioSamplesReadyCallback; this.useLowLatency = useLowLatency; Logging.d(TAG, "ctor" + WebRtcAudioUtils.getThreadInfo()); } diff --git a/sdk/android/src/jni/libaom_av1_encoder.cc b/sdk/android/src/jni/libaom_av1_encoder.cc index 9ed4abf5d6..17f7655c53 100644 --- a/sdk/android/src/jni/libaom_av1_encoder.cc +++ b/sdk/android/src/jni/libaom_av1_encoder.cc @@ -16,6 +16,9 @@ #include "sdk/android/generated_libaom_av1_encoder_jni/LibaomAv1Encoder_jni.h" #include "sdk/android/src/jni/jni_helpers.h" +#include +#include + namespace webrtc { namespace jni { @@ -26,5 +29,14 @@ jlong JNI_LibaomAv1Encoder_Create(JNIEnv* jni, jlong j_webrtc_env_ref) { .release()); } +static webrtc::ScopedJavaLocalRef JNI_LibaomAv1Encoder_GetSupportedScalabilityModes(JNIEnv* jni) { + std::vector modes; + for (const auto scalability_mode : webrtc::kAllScalabilityModes) { + if (webrtc::ScalabilityStructureConfig(scalability_mode).has_value()) { + modes.push_back(std::string(webrtc::ScalabilityModeToString(scalability_mode))); + } + } + return NativeToJavaStringArray(jni, modes); +} } // namespace jni } // namespace webrtc diff --git a/sdk/android/src/jni/pc/audio_sink.cc b/sdk/android/src/jni/pc/audio_sink.cc new file mode 100644 index 0000000000..5bd88c75f6 --- /dev/null +++ b/sdk/android/src/jni/pc/audio_sink.cc @@ -0,0 +1,39 @@ +/* + * Copyright 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#include "sdk/android/src/jni/pc/audio_sink.h" + +#include "sdk/android/generated_peerconnection_jni/AudioTrackSink_jni.h" + +namespace webrtc { +namespace jni { + +AudioTrackSinkWrapper::AudioTrackSinkWrapper(JNIEnv* jni, const JavaRef& j_sink) + : j_sink_(jni, j_sink) {} + +AudioTrackSinkWrapper::~AudioTrackSinkWrapper() {} + +void AudioTrackSinkWrapper::OnData( + const void* audio_data, + int bits_per_sample, + int sample_rate, + size_t number_of_channels, + size_t number_of_frames, + absl::optional absolute_capture_timestamp_ms) { + JNIEnv* jni = AttachCurrentThreadIfNeeded(); + int length = (bits_per_sample / 8) * number_of_channels * number_of_frames; + ScopedJavaLocalRef audio_buffer = + NewDirectByteBuffer(jni, (void *) audio_data, length); + Java_AudioTrackSink_onData(jni, j_sink_, + audio_buffer, bits_per_sample, sample_rate, (int) number_of_channels, (int) number_of_frames, (absolute_capture_timestamp_ms ? absolute_capture_timestamp_ms.value() : 0)); +} + +} // namespace jni +} // namespace webrtc diff --git a/sdk/android/src/jni/pc/audio_sink.h b/sdk/android/src/jni/pc/audio_sink.h new file mode 100644 index 0000000000..809f460e0c --- /dev/null +++ b/sdk/android/src/jni/pc/audio_sink.h @@ -0,0 +1,41 @@ +/* + * Copyright 2018 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#ifndef SDK_ANDROID_SRC_JNI_AUDIO_TRACK_SINK_H_ +#define SDK_ANDROID_SRC_JNI_AUDIO_TRACK_SINK_H_ + +#include + +#include "api/media_stream_interface.h" +#include "sdk/android/src/jni/jni_helpers.h" + +namespace webrtc { +namespace jni { + +class AudioTrackSinkWrapper : public webrtc::AudioTrackSinkInterface { + public: + AudioTrackSinkWrapper(JNIEnv* jni, const JavaRef& j_sink); + ~AudioTrackSinkWrapper() override; + + private: + void OnData(const void* audio_data, + int bits_per_sample, + int sample_rate, + size_t number_of_channels, + size_t number_of_frames, + absl::optional absolute_capture_timestamp_ms) override; + + const ScopedJavaGlobalRef j_sink_; +}; + +} // namespace jni +} // namespace webrtc + +#endif // SDK_ANDROID_SRC_JNI_AUDIO_TRACK_SINK_H_ diff --git a/sdk/android/src/jni/pc/audio_track.cc b/sdk/android/src/jni/pc/audio_track.cc index 36ed43f1d4..df2d605893 100644 --- a/sdk/android/src/jni/pc/audio_track.cc +++ b/sdk/android/src/jni/pc/audio_track.cc @@ -9,6 +9,8 @@ */ #include "api/media_stream_interface.h" +#include "sdk/android/src/jni/pc/audio_sink.h" + #include "sdk/android/generated_peerconnection_jni/AudioTrack_jni.h" namespace webrtc { @@ -20,5 +22,29 @@ static void JNI_AudioTrack_SetVolume(JNIEnv*, jlong j_p, jdouble volume) { source->SetVolume(volume); } +static void JNI_AudioTrack_AddSink(JNIEnv* jni, + jlong j_native_track, + jlong j_native_sink) { + reinterpret_cast(j_native_track) + ->AddSink(reinterpret_cast(j_native_sink)); +} + +static void JNI_AudioTrack_RemoveSink(JNIEnv* jni, + jlong j_native_track, + jlong j_native_sink) { + reinterpret_cast(j_native_track) + ->RemoveSink(reinterpret_cast(j_native_sink)); +} + +static jlong JNI_AudioTrack_WrapSink(JNIEnv* jni, + const JavaParamRef& sink) { + return jlongFromPointer(new AudioTrackSinkWrapper(jni, sink)); +} + +static void JNI_AudioTrack_FreeSink(JNIEnv* jni, jlong j_native_sink) { + delete reinterpret_cast(j_native_sink); +} + + } // namespace jni } // namespace webrtc diff --git a/sdk/android/src/jni/pc/external_audio_processing_factory.cc b/sdk/android/src/jni/pc/external_audio_processing_factory.cc new file mode 100644 index 0000000000..3d7ee7a4d9 --- /dev/null +++ b/sdk/android/src/jni/pc/external_audio_processing_factory.cc @@ -0,0 +1,143 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "sdk/android/src/jni/pc/external_audio_processing_factory.h" + +#include +#include + +#include "api/make_ref_counted.h" +#include "rtc_base/ref_counted_object.h" +#include "sdk/android/generated_peerconnection_jni/ExternalAudioProcessingFactory_jni.h" +#include "sdk/android/native_api/jni/java_types.h" +#include "sdk/android/native_api/jni/scoped_java_ref.h" +#include "sdk/android/src/jni/jni_helpers.h" +#include "sdk/android/src/jni/pc/external_audio_processor.h" + +namespace webrtc { +namespace jni { + +ExternalAudioProcessingJni::ExternalAudioProcessingJni( + JNIEnv* jni, + const JavaRef& j_processing) + : j_processing_global_(jni, j_processing) {} +ExternalAudioProcessingJni::~ExternalAudioProcessingJni() {} +void ExternalAudioProcessingJni::Initialize(int sample_rate_hz, + int num_channels) { + JNIEnv* env = AttachCurrentThreadIfNeeded(); + Java_AudioProcessing_initialize(env, j_processing_global_, sample_rate_hz, + num_channels); +} + +void ExternalAudioProcessingJni::Reset(int new_rate) { + JNIEnv* env = AttachCurrentThreadIfNeeded(); + Java_AudioProcessing_reset(env, j_processing_global_, new_rate); +} + +void ExternalAudioProcessingJni::Process(int num_bands, int num_frames, int buffer_size, float* buffer) { + JNIEnv* env = AttachCurrentThreadIfNeeded(); + ScopedJavaLocalRef audio_buffer = + NewDirectByteBuffer(env, (void*)buffer, buffer_size * sizeof(float)); + Java_AudioProcessing_process(env, j_processing_global_, num_bands, num_frames, audio_buffer); +} + +ExternalAudioProcessingFactory::ExternalAudioProcessingFactory() { + capture_post_processor_ = new ExternalAudioProcessor(); + std::unique_ptr capture_post_processor( + capture_post_processor_); + + render_pre_processor_ = new ExternalAudioProcessor(); + std::unique_ptr render_pre_processor( + render_pre_processor_); + + apm_ = webrtc::AudioProcessingBuilder() + .SetCapturePostProcessing(std::move(capture_post_processor)) + .SetRenderPreProcessing(std::move(render_pre_processor)) + .Create(); + + webrtc::AudioProcessing::Config config; + apm_->ApplyConfig(config); +} + +static ExternalAudioProcessingFactory* default_processor_ptr; + +static jlong JNI_ExternalAudioProcessingFactory_GetDefaultApm(JNIEnv* env) { + if (!default_processor_ptr) { + auto default_processor = rtc::make_ref_counted(); + default_processor_ptr = default_processor.release(); + } + return webrtc::jni::jlongFromPointer(default_processor_ptr->apm().get()); +} + +static jlong JNI_ExternalAudioProcessingFactory_SetCapturePostProcessing( + JNIEnv* env, + const JavaParamRef& j_processing) { + if (!default_processor_ptr) { + return 0; + } + auto processing = + rtc::make_ref_counted(env, j_processing); + processing->AddRef(); + default_processor_ptr->capture_post_processor()->SetExternalAudioProcessing( + processing.get()); + return jlongFromPointer(processing.get()); +} + +static jlong JNI_ExternalAudioProcessingFactory_SetRenderPreProcessing( + JNIEnv* env, + const JavaParamRef& j_processing) { + if (!default_processor_ptr) { + return 0; + } + auto processing = + rtc::make_ref_counted(env, j_processing); + processing->AddRef(); + default_processor_ptr->render_pre_processor()->SetExternalAudioProcessing( + processing.get()); + return jlongFromPointer(processing.get()); +} + +static void JNI_ExternalAudioProcessingFactory_SetBypassFlagForCapturePost( + JNIEnv* env, + jboolean bypass) { + if (!default_processor_ptr) { + return; + } + default_processor_ptr->capture_post_processor()->SetBypassFlag(bypass); +} + +static void JNI_ExternalAudioProcessingFactory_SetBypassFlagForRenderPre( + JNIEnv* env, + jboolean bypass) { + if (!default_processor_ptr) { + return; + } + default_processor_ptr->render_pre_processor()->SetBypassFlag(bypass); +} + +static void JNI_ExternalAudioProcessingFactory_Destroy(JNIEnv* env) { + if (!default_processor_ptr) { + return; + } + default_processor_ptr->render_pre_processor()->SetExternalAudioProcessing( + nullptr); + default_processor_ptr->capture_post_processor()->SetExternalAudioProcessing( + nullptr); + delete default_processor_ptr; +} + +} // namespace jni +} // namespace webrtc diff --git a/sdk/android/src/jni/pc/external_audio_processing_factory.h b/sdk/android/src/jni/pc/external_audio_processing_factory.h new file mode 100644 index 0000000000..5dfebe81fc --- /dev/null +++ b/sdk/android/src/jni/pc/external_audio_processing_factory.h @@ -0,0 +1,68 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#define WEBRTC_APM_DEBUG_DUMP 0 + +#include "rtc_base/ref_counted_object.h" +#include "sdk/android/native_api/jni/scoped_java_ref.h" +#include "sdk/android/src/jni/pc/external_audio_processor.h" +#include "sdk/android/src/jni/pc/external_audio_processing_interface.h" + +namespace webrtc { +namespace jni { + +class ExternalAudioProcessingJni + : public webrtc::ExternalAudioProcessingInterface, + public rtc::RefCountInterface { + public: + ExternalAudioProcessingJni(JNIEnv* jni, const JavaRef& j_processing); + ~ExternalAudioProcessingJni(); + + protected: + virtual void Initialize(int sample_rate_hz, int num_channels) override; + virtual void Reset(int new_rate) override; + virtual void Process(int num_bans, int num_frames, int buffer_size, float* buffer) override; + + private: + const ScopedJavaGlobalRef j_processing_global_; + const ScopedJavaGlobalRef j_processing_; +}; + +class ExternalAudioProcessingFactory : public rtc::RefCountInterface { + public: + ExternalAudioProcessingFactory(); + virtual ~ExternalAudioProcessingFactory() = default; + + ExternalAudioProcessor* capture_post_processor() { + return capture_post_processor_; + } + + ExternalAudioProcessor* render_pre_processor() { + return render_pre_processor_; + } + + rtc::scoped_refptr apm() { return apm_; } + + private: + rtc::scoped_refptr apm_; + ExternalAudioProcessor* capture_post_processor_; + ExternalAudioProcessor* render_pre_processor_; +}; + +} // namespace jni +} // namespace webrtc diff --git a/sdk/android/src/jni/pc/external_audio_processing_interface.h b/sdk/android/src/jni/pc/external_audio_processing_interface.h new file mode 100644 index 0000000000..1202be106b --- /dev/null +++ b/sdk/android/src/jni/pc/external_audio_processing_interface.h @@ -0,0 +1,33 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#ifndef API_ANDROID_JNI_EXTERNALAUDIOPROCESSORINTERFACE_H_ +#define API_ANDROID_JNI_EXTERNALAUDIOPROCESSORINTERFACE_H_ + +namespace webrtc { + +class ExternalAudioProcessingInterface { + public: + virtual void Initialize(int sample_rate_hz, int num_channels) = 0; + virtual void Reset(int new_rate) = 0; + virtual void Process(int num_bands, int num_frames, int buffer_size, float* buffer) = 0; + + protected: + virtual ~ExternalAudioProcessingInterface() = default; +}; + +} // namespace webrtc + +#endif // API_ANDROID_JNI_EXTERNALAUDIOPROCESSORINTERFACE_H_ diff --git a/sdk/android/src/jni/pc/external_audio_processor.cc b/sdk/android/src/jni/pc/external_audio_processor.cc new file mode 100644 index 0000000000..274982d6d4 --- /dev/null +++ b/sdk/android/src/jni/pc/external_audio_processor.cc @@ -0,0 +1,72 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "sdk/android/src/jni/pc/external_audio_processor.h" + +namespace webrtc { + +void ExternalAudioProcessor::SetExternalAudioProcessing( + ExternalAudioProcessingInterface* processor) { + webrtc::MutexLock lock(&mutex_); + external_processor_ = processor; + if (initialized_) { + external_processor_->Initialize(sample_rate_hz_, num_channels_); + } +} + +void ExternalAudioProcessor::SetBypassFlag(bool bypass) { + webrtc::MutexLock lock(&mutex_); + bypass_flag_ = bypass; +} + +void ExternalAudioProcessor::Initialize(int sample_rate_hz, int num_channels) { + webrtc::MutexLock lock(&mutex_); + sample_rate_hz_ = sample_rate_hz; + num_channels_ = num_channels; + if (external_processor_) { + external_processor_->Initialize(sample_rate_hz, num_channels); + } + initialized_ = true; +} + +void ExternalAudioProcessor::Process(webrtc::AudioBuffer* audio) { + webrtc::MutexLock lock(&mutex_); + if (!external_processor_ || bypass_flag_ || !initialized_) { + return; + } + + size_t num_frames = audio->num_frames(); + size_t num_bands =audio->num_bands(); + + // 1 buffer = 10ms of frames + int rate = num_frames * 100; + + if (rate != sample_rate_hz_) { + external_processor_->Reset(rate); + sample_rate_hz_ = rate; + } + + external_processor_->Process(num_bands, num_frames, kNsFrameSize * num_bands, audio->channels()[0]); +} + +std::string ExternalAudioProcessor::ToString() const { + return "ExternalAudioProcessor"; +} + +void ExternalAudioProcessor::SetRuntimeSetting( + webrtc::AudioProcessing::RuntimeSetting setting) {} + +} // namespace webrtc diff --git a/sdk/android/src/jni/pc/external_audio_processor.h b/sdk/android/src/jni/pc/external_audio_processor.h new file mode 100644 index 0000000000..1dc31809fc --- /dev/null +++ b/sdk/android/src/jni/pc/external_audio_processor.h @@ -0,0 +1,57 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef SDK_ANDROID_SRC_JNI_PC_EXTERNAL_AUDIO_PROCESSOR_H_ +#define SDK_ANDROID_SRC_JNI_PC_EXTERNAL_AUDIO_PROCESSOR_H_ + +#define WEBRTC_APM_DEBUG_DUMP 0 + +#include "modules/audio_processing/audio_buffer.h" +#include "modules/audio_processing/audio_processing_impl.h" +#include "modules/audio_processing/include/audio_processing.h" +#include "sdk/android/src/jni/pc/external_audio_processing_interface.h" + +namespace webrtc { + +class ExternalAudioProcessor : public webrtc::CustomProcessing { + public: + ExternalAudioProcessor() = default; + ~ExternalAudioProcessor() override = default; + + void SetExternalAudioProcessing( + ExternalAudioProcessingInterface* processor); + + void SetBypassFlag(bool bypass); + + private: + void Initialize(int sample_rate_hz, int num_channels) override; + void Process(webrtc::AudioBuffer* audio) override; + std::string ToString() const override; + void SetRuntimeSetting( + webrtc::AudioProcessing::RuntimeSetting setting) override; + + private: + mutable webrtc::Mutex mutex_; + ExternalAudioProcessingInterface* external_processor_; + bool bypass_flag_ = false; + bool initialized_ = false; + int sample_rate_hz_ = 0; + int num_channels_ = 0; +}; + +} // namespace webrtc + +#endif // SDK_ANDROID_SRC_JNI_PC_EXTERNAL_AUDIO_PROCESSOR_H_ diff --git a/sdk/android/src/jni/pc/frame_cryptor.cc b/sdk/android/src/jni/pc/frame_cryptor.cc new file mode 100644 index 0000000000..af2fd8f2b0 --- /dev/null +++ b/sdk/android/src/jni/pc/frame_cryptor.cc @@ -0,0 +1,202 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#include "sdk/android/src/jni/pc/frame_cryptor.h" + +#include "api/rtp_receiver_interface.h" +#include "api/rtp_sender_interface.h" +#include "rtc_base/ref_counted_object.h" +#include "sdk/android/generated_peerconnection_jni/FrameCryptorFactory_jni.h" +#include "sdk/android/generated_peerconnection_jni/FrameCryptor_jni.h" +#include "sdk/android/native_api/jni/java_types.h" +#include "sdk/android/src/jni/jni_helpers.h" +#include "sdk/android/src/jni/pc/frame_cryptor_key_provider.h" +#include "sdk/android/src/jni/pc/owned_factory_and_threads.h" + +namespace webrtc { +namespace jni { + +FrameCryptorObserverJni::FrameCryptorObserverJni( + JNIEnv* jni, + const JavaRef& j_observer) + : j_observer_global_(jni, j_observer) {} + +FrameCryptorObserverJni::~FrameCryptorObserverJni() {} + +void FrameCryptorObserverJni::OnFrameCryptionStateChanged( + const std::string participant_id, + FrameCryptionState new_state) { + JNIEnv* env = AttachCurrentThreadIfNeeded(); + Java_Observer_onFrameCryptionStateChanged( + env, j_observer_global_, NativeToJavaString(env, participant_id), + Java_FrameCryptionState_fromNativeIndex(env, new_state)); +} + +ScopedJavaLocalRef NativeToJavaFrameCryptor( + JNIEnv* env, + rtc::scoped_refptr cryptor) { + if (!cryptor) + return nullptr; + // Sender is now owned by the Java object, and will be freed from + // FrameCryptor.dispose(). + return Java_FrameCryptor_Constructor(env, + jlongFromPointer(cryptor.release())); +} + +static void JNI_FrameCryptor_SetEnabled(JNIEnv* jni, + jlong j_frame_cryptor_pointer, + jboolean j_enabled) { + reinterpret_cast(j_frame_cryptor_pointer) + ->SetEnabled(j_enabled); +} + +static jboolean JNI_FrameCryptor_IsEnabled(JNIEnv* jni, + jlong j_frame_cryptor_pointer) { + return reinterpret_cast(j_frame_cryptor_pointer) + ->enabled(); +} + +static void JNI_FrameCryptor_SetKeyIndex(JNIEnv* jni, + jlong j_frame_cryptor_pointer, + jint j_index) { + reinterpret_cast(j_frame_cryptor_pointer) + ->SetKeyIndex(j_index); +} + +static jint JNI_FrameCryptor_GetKeyIndex(JNIEnv* jni, + jlong j_frame_cryptor_pointer) { + return reinterpret_cast(j_frame_cryptor_pointer) + ->key_index(); +} + +static jlong JNI_FrameCryptor_SetObserver( + JNIEnv* jni, + jlong j_frame_cryptor_pointer, + const JavaParamRef& j_observer) { + auto observer = + rtc::make_ref_counted(jni, j_observer); + observer->AddRef(); + reinterpret_cast(j_frame_cryptor_pointer) + ->RegisterFrameCryptorTransformerObserver(observer); + return jlongFromPointer(observer.get()); +} + +static void JNI_FrameCryptor_UnSetObserver(JNIEnv* jni, + jlong j_frame_cryptor_pointer) { + reinterpret_cast(j_frame_cryptor_pointer) + ->UnRegisterFrameCryptorTransformerObserver(); +} + +webrtc::FrameCryptorTransformer::Algorithm AlgorithmFromIndex(int index) { + switch (index) { + case 0: + return webrtc::FrameCryptorTransformer::Algorithm::kAesGcm; + case 1: + return webrtc::FrameCryptorTransformer::Algorithm::kAesCbc; + default: + return webrtc::FrameCryptorTransformer::Algorithm::kAesGcm; + } +} + +static base::android::ScopedJavaLocalRef +JNI_FrameCryptorFactory_CreateFrameCryptorForRtpReceiver( + JNIEnv* env, + jlong native_factory, + jlong j_rtp_receiver_pointer, + const base::android::JavaParamRef& participantId, + jint j_algorithm_index, + jlong j_key_provider) { + OwnedFactoryAndThreads* factory = + reinterpret_cast(native_factory); + auto keyProvider = + reinterpret_cast(j_key_provider); + auto participant_id = JavaToStdString(env, participantId); + auto rtpReceiver = + reinterpret_cast(j_rtp_receiver_pointer); + auto mediaType = + rtpReceiver->track()->kind() == "audio" + ? webrtc::FrameCryptorTransformer::MediaType::kAudioFrame + : webrtc::FrameCryptorTransformer::MediaType::kVideoFrame; + auto frame_crypto_transformer = + rtc::scoped_refptr( + new webrtc::FrameCryptorTransformer(factory->signaling_thread(), + participant_id, mediaType, AlgorithmFromIndex(j_algorithm_index), + rtc::scoped_refptr(keyProvider))); + + rtpReceiver->SetDepacketizerToDecoderFrameTransformer( + frame_crypto_transformer); + frame_crypto_transformer->SetEnabled(false); + + return NativeToJavaFrameCryptor(env, frame_crypto_transformer); +} + +static base::android::ScopedJavaLocalRef +JNI_FrameCryptorFactory_CreateFrameCryptorForRtpSender( + JNIEnv* env, + jlong native_factory, + jlong j_rtp_sender_pointer, + const base::android::JavaParamRef& participantId, + jint j_algorithm_index, + jlong j_key_provider) { + OwnedFactoryAndThreads* factory = + reinterpret_cast(native_factory); + auto keyProvider = + reinterpret_cast(j_key_provider); + auto rtpSender = reinterpret_cast(j_rtp_sender_pointer); + auto participant_id = JavaToStdString(env, participantId); + auto mediaType = + rtpSender->track()->kind() == "audio" + ? webrtc::FrameCryptorTransformer::MediaType::kAudioFrame + : webrtc::FrameCryptorTransformer::MediaType::kVideoFrame; + auto frame_crypto_transformer = + rtc::scoped_refptr( + new webrtc::FrameCryptorTransformer(factory->signaling_thread(), + participant_id, mediaType, AlgorithmFromIndex(j_algorithm_index), + rtc::scoped_refptr(keyProvider))); + + rtpSender->SetEncoderToPacketizerFrameTransformer(frame_crypto_transformer); + frame_crypto_transformer->SetEnabled(false); + + return NativeToJavaFrameCryptor(env, frame_crypto_transformer); +} + +static base::android::ScopedJavaLocalRef +JNI_FrameCryptorFactory_CreateFrameCryptorKeyProvider( + JNIEnv* env, + jboolean j_shared, + const base::android::JavaParamRef& j_ratchetSalt, + jint j_ratchetWindowSize, + const base::android::JavaParamRef& j_uncryptedMagicBytes, + jint j_failureTolerance, + jint j_keyRingSize, + jboolean j_discardFrameWhenCryptorNotReady) { + auto ratchetSalt = JavaToNativeByteArray(env, j_ratchetSalt); + KeyProviderOptions options; + options.ratchet_salt = + std::vector(ratchetSalt.begin(), ratchetSalt.end()); + options.ratchet_window_size = j_ratchetWindowSize; + auto uncryptedMagicBytes = JavaToNativeByteArray(env, j_uncryptedMagicBytes); + options.uncrypted_magic_bytes = + std::vector(uncryptedMagicBytes.begin(), uncryptedMagicBytes.end()); + options.shared_key = j_shared; + options.failure_tolerance = j_failureTolerance; + options.key_ring_size = j_keyRingSize; + options.discard_frame_when_cryptor_not_ready = j_discardFrameWhenCryptorNotReady; + return NativeToJavaFrameCryptorKeyProvider( + env, rtc::make_ref_counted(options)); +} + +} // namespace jni +} // namespace webrtc diff --git a/sdk/android/src/jni/pc/frame_cryptor.h b/sdk/android/src/jni/pc/frame_cryptor.h new file mode 100644 index 0000000000..dd0788d212 --- /dev/null +++ b/sdk/android/src/jni/pc/frame_cryptor.h @@ -0,0 +1,49 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef SDK_ANDROID_SRC_JNI_PC_FRAME_CRYPTOR_H_ +#define SDK_ANDROID_SRC_JNI_PC_FRAME_CRYPTOR_H_ + +#include + +#include "api/crypto/frame_crypto_transformer.h" +#include "sdk/android/native_api/jni/scoped_java_ref.h" + +namespace webrtc { +namespace jni { + +ScopedJavaLocalRef NativeToJavaFrameCryptor( + JNIEnv* env, + rtc::scoped_refptr cryptor); + +class FrameCryptorObserverJni : public FrameCryptorTransformerObserver { + public: + FrameCryptorObserverJni(JNIEnv* jni, const JavaRef& j_observer); + ~FrameCryptorObserverJni() override; + + protected: + void OnFrameCryptionStateChanged(const std::string participant_id, + FrameCryptionState state) override; + + private: + const ScopedJavaGlobalRef j_observer_global_; + const ScopedJavaGlobalRef j_observer_; +}; + +} // namespace jni +} // namespace webrtc + +#endif // SDK_ANDROID_SRC_JNI_PC_FRAME_CRYPTOR_H_ diff --git a/sdk/android/src/jni/pc/frame_cryptor_key_provider.cc b/sdk/android/src/jni/pc/frame_cryptor_key_provider.cc new file mode 100644 index 0000000000..e41d16ed91 --- /dev/null +++ b/sdk/android/src/jni/pc/frame_cryptor_key_provider.cc @@ -0,0 +1,123 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#include "sdk/android/src/jni/pc/frame_cryptor_key_provider.h" + +#include "sdk/android/generated_peerconnection_jni/FrameCryptorKeyProvider_jni.h" +#include "sdk/android/native_api/jni/java_types.h" +#include "sdk/android/src/jni/jni_helpers.h" + +namespace webrtc { +namespace jni { + +ScopedJavaLocalRef NativeToJavaFrameCryptorKeyProvider( + JNIEnv* env, + rtc::scoped_refptr key_provider) { + if (!key_provider) + return nullptr; + // Sender is now owned by the Java object, and will be freed from + // FrameCryptorKeyProvider.dispose(). + return Java_FrameCryptorKeyProvider_Constructor( + env, jlongFromPointer(key_provider.release())); +} + +static jboolean JNI_FrameCryptorKeyProvider_SetSharedKey( + JNIEnv* jni, + jlong j_key_provider, + jint j_index, + const base::android::JavaParamRef& j_key) { + auto key = JavaToNativeByteArray(jni, j_key); + return reinterpret_cast(j_key_provider) + ->SetSharedKey(j_index,std::vector(key.begin(), key.end())); +} + +static base::android::ScopedJavaLocalRef +JNI_FrameCryptorKeyProvider_RatchetSharedKey( + JNIEnv* env, + jlong keyProviderPointer, + jint j_index) { + auto key_provider = + reinterpret_cast(keyProviderPointer); + auto newKey = key_provider->RatchetSharedKey(j_index); + std::vector int8tKey = + std::vector(newKey.begin(), newKey.end()); + return NativeToJavaByteArray(env, rtc::ArrayView(int8tKey)); +} + +static base::android::ScopedJavaLocalRef +JNI_FrameCryptorKeyProvider_ExportSharedKey( + JNIEnv* env, + jlong keyProviderPointer, + jint j_index) { + auto key_provider = + reinterpret_cast(keyProviderPointer); + auto key = key_provider->ExportSharedKey(j_index); + std::vector int8tKey = std::vector(key.begin(), key.end()); + return NativeToJavaByteArray(env, rtc::ArrayView(int8tKey)); +} + +static jboolean JNI_FrameCryptorKeyProvider_SetKey( + JNIEnv* jni, + jlong j_key_provider, + const base::android::JavaParamRef& participantId, + jint j_index, + const base::android::JavaParamRef& j_key) { + auto key = JavaToNativeByteArray(jni, j_key); + auto participant_id = JavaToStdString(jni, participantId); + return reinterpret_cast(j_key_provider) + ->SetKey(participant_id, j_index, + std::vector(key.begin(), key.end())); +} + +static base::android::ScopedJavaLocalRef +JNI_FrameCryptorKeyProvider_RatchetKey( + JNIEnv* env, + jlong keyProviderPointer, + const base::android::JavaParamRef& participantId, + jint j_index) { + auto participant_id = JavaToStdString(env, participantId); + auto key_provider = + reinterpret_cast(keyProviderPointer); + auto newKey = key_provider->RatchetKey(participant_id, j_index); + std::vector int8tKey = + std::vector(newKey.begin(), newKey.end()); + return NativeToJavaByteArray(env, rtc::ArrayView(int8tKey)); +} + +static base::android::ScopedJavaLocalRef +JNI_FrameCryptorKeyProvider_ExportKey( + JNIEnv* env, + jlong keyProviderPointer, + const base::android::JavaParamRef& participantId, + jint j_index) { + auto participant_id = JavaToStdString(env, participantId); + auto key_provider = + reinterpret_cast(keyProviderPointer); + auto key = key_provider->ExportKey(participant_id, j_index); + std::vector int8tKey = std::vector(key.begin(), key.end()); + return NativeToJavaByteArray(env, rtc::ArrayView(int8tKey)); +} + +static void JNI_FrameCryptorKeyProvider_SetSifTrailer( + JNIEnv* jni, + jlong j_key_provider, + const base::android::JavaParamRef& j_trailer) { + auto trailer = JavaToNativeByteArray(jni, j_trailer); + reinterpret_cast(j_key_provider) + ->SetSifTrailer(std::vector(trailer.begin(), trailer.end())); +} + +} // namespace jni +} // namespace webrtc diff --git a/sdk/android/src/jni/pc/frame_cryptor_key_provider.h b/sdk/android/src/jni/pc/frame_cryptor_key_provider.h new file mode 100644 index 0000000000..8832a83035 --- /dev/null +++ b/sdk/android/src/jni/pc/frame_cryptor_key_provider.h @@ -0,0 +1,35 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef SDK_ANDROID_SRC_JNI_PC_FRAME_CRYPTOR_KEY_PROVIDER_H_ +#define SDK_ANDROID_SRC_JNI_PC_FRAME_CRYPTOR_KEY_PROVIDER_H_ + +#include + +#include "api/crypto/frame_crypto_transformer.h" +#include "sdk/android/native_api/jni/scoped_java_ref.h" + +namespace webrtc { +namespace jni { + +ScopedJavaLocalRef NativeToJavaFrameCryptorKeyProvider( + JNIEnv* env, + rtc::scoped_refptr cryptor); + +} // namespace jni +} // namespace webrtc + +#endif // SDK_ANDROID_SRC_JNI_PC_FRAME_CRYPTOR_KEY_PROVIDER_H_ diff --git a/sdk/android/src/jni/pc/peer_connection.cc b/sdk/android/src/jni/pc/peer_connection.cc index e5fb11e3b4..deab006bec 100644 --- a/sdk/android/src/jni/pc/peer_connection.cc +++ b/sdk/android/src/jni/pc/peer_connection.cc @@ -272,6 +272,9 @@ void JavaToNativeRTCConfiguration( rtc_config->enable_implicit_rollback = Java_RTCConfiguration_getEnableImplicitRollback(jni, j_rtc_config); + rtc_config->enable_any_address_ports = + Java_RTCConfiguration_getEnableIceGatheringOnAnyAddressPorts(jni, j_rtc_config); + ScopedJavaLocalRef j_turn_logging_id = Java_RTCConfiguration_getTurnLoggingId(jni, j_rtc_config); if (!IsNull(jni, j_turn_logging_id)) { diff --git a/sdk/android/src/jni/pc/peer_connection_factory.cc b/sdk/android/src/jni/pc/peer_connection_factory.cc index 9a21e10ede..c323113061 100644 --- a/sdk/android/src/jni/pc/peer_connection_factory.cc +++ b/sdk/android/src/jni/pc/peer_connection_factory.cc @@ -32,6 +32,7 @@ #include "sdk/android/src/jni/logging/log_sink.h" #include "sdk/android/src/jni/pc/android_network_monitor.h" #include "sdk/android/src/jni/pc/audio.h" +#include "sdk/android/src/jni/pc/rtp_capabilities.h" #include "sdk/android/src/jni/pc/ice_candidate.h" #include "sdk/android/src/jni/pc/media_stream_track.h" #include "sdk/android/src/jni/pc/owned_factory_and_threads.h" diff --git a/sdk/android/src/jni/pc/rtp_parameters.cc b/sdk/android/src/jni/pc/rtp_parameters.cc index 4bd9ee0e1d..6feb6a631b 100644 --- a/sdk/android/src/jni/pc/rtp_parameters.cc +++ b/sdk/android/src/jni/pc/rtp_parameters.cc @@ -53,6 +53,7 @@ ScopedJavaLocalRef NativeToJavaRtpEncodingParameter( NativeToJavaInteger(env, encoding.max_framerate), NativeToJavaInteger(env, encoding.num_temporal_layers), NativeToJavaDouble(env, encoding.scale_resolution_down_by), + NativeToJavaString(env, encoding.scalability_mode), encoding.ssrc ? NativeToJavaLong(env, *encoding.ssrc) : nullptr, encoding.adaptive_ptime); } @@ -116,6 +117,11 @@ RtpEncodingParameters JavaToNativeRtpEncodingParameters( Java_Encoding_getScaleResolutionDownBy(jni, j_encoding_parameters); encoding.scale_resolution_down_by = JavaToNativeOptionalDouble(jni, j_scale_resolution_down_by); + ScopedJavaLocalRef j_scalability_mode = + Java_Encoding_getScalabilityMode(jni, j_encoding_parameters); + if (!IsNull(jni, j_scalability_mode)) { + encoding.scalability_mode = JavaToNativeString(jni,j_scalability_mode); + } encoding.adaptive_ptime = Java_Encoding_getAdaptivePTime(jni, j_encoding_parameters); ScopedJavaLocalRef j_ssrc = diff --git a/sdk/android/src/jni/simulcast_video_encoder.cc b/sdk/android/src/jni/simulcast_video_encoder.cc new file mode 100644 index 0000000000..6874c1821b --- /dev/null +++ b/sdk/android/src/jni/simulcast_video_encoder.cc @@ -0,0 +1,35 @@ +#include + +#include "sdk/android/src/jni/jni_helpers.h" +#include "sdk/android/src/jni/video_encoder_factory_wrapper.h" +#include "sdk/android/src/jni/video_codec_info.h" +#include "sdk/android/native_api/codecs/wrapper.h" +#include "media/engine/simulcast_encoder_adapter.h" +#include "rtc_base/logging.h" + +using namespace webrtc; +using namespace webrtc::jni; + +#ifdef __cplusplus +extern "C" { +#endif + +// (VideoEncoderFactory primary, VideoEncoderFactory fallback, VideoCodecInfo info) +JNIEXPORT jlong JNICALL Java_org_webrtc_SimulcastVideoEncoder_nativeCreateEncoder(JNIEnv *env, jclass klass, jlong webrtcEnvRef, jobject primary, jobject fallback, jobject info) { + RTC_LOG(LS_INFO) << "Create simulcast video encoder"; + JavaParamRef info_ref(info); + SdpVideoFormat format = VideoCodecInfoToSdpVideoFormat(env, info_ref); + + // TODO: 影響は軽微だが、リークする可能性があるので将来的に修正したい + // https://github.com/shiguredo-webrtc-build/webrtc-build/pull/16#pullrequestreview-600675795 + return NativeToJavaPointer(std::make_unique( + *reinterpret_cast(webrtcEnvRef), + JavaToNativeVideoEncoderFactory(env, primary).release(), + JavaToNativeVideoEncoderFactory(env, fallback).release(), + format).release()); +} + + +#ifdef __cplusplus +} +#endif diff --git a/sdk/android/src/jni/simulcast_video_encoder.h b/sdk/android/src/jni/simulcast_video_encoder.h new file mode 100644 index 0000000000..d80ccde014 --- /dev/null +++ b/sdk/android/src/jni/simulcast_video_encoder.h @@ -0,0 +1,22 @@ +/* DO NOT EDIT THIS FILE - it is machine generated */ +#include +/* Header for class org_webrtc_SimulcastVideoEncoder */ + +#ifndef _Included_org_webrtc_SimulcastVideoEncoder +#define _Included_org_webrtc_SimulcastVideoEncoder +#ifdef __cplusplus +extern "C" { +#endif +/* + * Class: org_webrtc_SimulcastVideoEncoder + * Method: nativeCreateEncoder + * Signature: (Lorg/webrtc/VideoEncoderFactory;Lorg/webrtc/VideoEncoderFactory;Lorg/webrtc/VideoCodecInfo;)J + */ + +JNIEXPORT jlong JNICALL Java_org_webrtc_SimulcastVideoEncoder_nativeCreateEncoder + (JNIEnv *, jclass, jlong, jobject, jobject, jobject); + +#ifdef __cplusplus +} +#endif +#endif diff --git a/sdk/android/src/jni/video_codec_info.cc b/sdk/android/src/jni/video_codec_info.cc index a218a1d23f..a85dde67dc 100644 --- a/sdk/android/src/jni/video_codec_info.cc +++ b/sdk/android/src/jni/video_codec_info.cc @@ -13,15 +13,28 @@ #include "sdk/android/generated_video_jni/VideoCodecInfo_jni.h" #include "sdk/android/native_api/jni/java_types.h" #include "sdk/android/src/jni/jni_helpers.h" +#include "api/video_codecs/scalability_mode.h" +#include "modules/video_coding/svc/scalability_mode_util.h" namespace webrtc { namespace jni { SdpVideoFormat VideoCodecInfoToSdpVideoFormat(JNIEnv* jni, const JavaRef& j_info) { + std::vector params = + JavaToStdVectorStrings(jni, Java_VideoCodecInfo_getScalabilityModes(jni, j_info)); + absl::InlinedVector + scalability_modes; + for (auto mode : params) { + auto scalability_mode = ScalabilityModeFromString(mode); + if (scalability_mode != absl::nullopt) { + scalability_modes.push_back(*scalability_mode); + } + } return SdpVideoFormat( JavaToNativeString(jni, Java_VideoCodecInfo_getName(jni, j_info)), - JavaToNativeStringMap(jni, Java_VideoCodecInfo_getParams(jni, j_info))); + JavaToNativeStringMap(jni, Java_VideoCodecInfo_getParams(jni, j_info)), + scalability_modes); } ScopedJavaLocalRef SdpVideoFormatToVideoCodecInfo( @@ -29,8 +42,17 @@ ScopedJavaLocalRef SdpVideoFormatToVideoCodecInfo( const SdpVideoFormat& format) { ScopedJavaLocalRef j_params = NativeToJavaStringMap(jni, format.parameters); + webrtc::ScopedJavaLocalRef j_scalability_modes; + if (!format.scalability_modes.empty()) { + JavaListBuilder builder(jni); + for (auto mode : format.scalability_modes) { + std::string scalability_mode(ScalabilityModeToString(mode)); + builder.add(NativeToJavaString(jni, scalability_mode)); + } + j_scalability_modes = builder.java_list(); + } return Java_VideoCodecInfo_Constructor( - jni, NativeToJavaString(jni, format.name), j_params); + jni, NativeToJavaString(jni, format.name), j_params, j_scalability_modes); } } // namespace jni diff --git a/sdk/android/src/jni/video_track.cc b/sdk/android/src/jni/video_track.cc index eb343ebdb3..2078359cbc 100644 --- a/sdk/android/src/jni/video_track.cc +++ b/sdk/android/src/jni/video_track.cc @@ -44,5 +44,16 @@ static void JNI_VideoTrack_FreeSink(JNIEnv* jni, jlong j_native_sink) { delete reinterpret_cast*>(j_native_sink); } +static void JNI_VideoTrack_SetShouldReceive(JNIEnv* jni, + jlong j_native_track, + jboolean should_receive) { + reinterpret_cast(j_native_track)->set_should_receive(should_receive); +} + +static jboolean JNI_VideoTrack_GetShouldReceive(JNIEnv* jni, + jlong j_native_track) { + return reinterpret_cast(j_native_track)->should_receive(); +} + } // namespace jni } // namespace webrtc diff --git a/sdk/android/src/jni/vp9_codec.cc b/sdk/android/src/jni/vp9_codec.cc index dcce5bb5e8..9131b8306d 100644 --- a/sdk/android/src/jni/vp9_codec.cc +++ b/sdk/android/src/jni/vp9_codec.cc @@ -11,11 +11,18 @@ #include #include "api/environment/environment.h" +#include "absl/container/inlined_vector.h" +#include "api/video_codecs/sdp_video_format.h" #include "modules/video_coding/codecs/vp9/include/vp9.h" +#include "modules/video_coding/svc/create_scalability_structure.h" + #include "sdk/android/generated_libvpx_vp9_jni/LibvpxVp9Decoder_jni.h" #include "sdk/android/generated_libvpx_vp9_jni/LibvpxVp9Encoder_jni.h" #include "sdk/android/src/jni/jni_helpers.h" +#include +#include + namespace webrtc { namespace jni { @@ -37,5 +44,14 @@ static jboolean JNI_LibvpxVp9Decoder_IsSupported(JNIEnv* jni) { return !SupportedVP9Codecs().empty(); } +static webrtc::ScopedJavaLocalRef JNI_LibvpxVp9Encoder_GetSupportedScalabilityModes(JNIEnv* jni) { + std::vector modes; + for (const auto scalability_mode : webrtc::kAllScalabilityModes) { + if (webrtc::ScalabilityStructureConfig(scalability_mode).has_value()) { + modes.push_back(std::string(webrtc::ScalabilityModeToString(scalability_mode))); + } + } + return NativeToJavaStringArray(jni, modes); +} } // namespace jni } // namespace webrtc diff --git a/sdk/objc/PrivacyInfo.xcprivacy b/sdk/objc/PrivacyInfo.xcprivacy new file mode 100644 index 0000000000..7204a67c33 --- /dev/null +++ b/sdk/objc/PrivacyInfo.xcprivacy @@ -0,0 +1,28 @@ + + + + + NSPrivacyCollectedDataTypes + + NSPrivacyAccessedAPITypes + + + NSPrivacyAccessedAPIType + NSPrivacyAccessedAPICategoryFileTimestamp + NSPrivacyAccessedAPITypeReasons + + C617.1 + + + + NSPrivacyAccessedAPIType + NSPrivacyAccessedAPICategorySystemBootTime + NSPrivacyAccessedAPITypeReasons + + 35F9.1 + 8FFB.1 + + + + + \ No newline at end of file diff --git a/sdk/objc/api/RTCVideoRendererAdapter+Private.h b/sdk/objc/api/RTCVideoRendererAdapter+Private.h index 9b123d2d05..cac9ab665c 100644 --- a/sdk/objc/api/RTCVideoRendererAdapter+Private.h +++ b/sdk/objc/api/RTCVideoRendererAdapter+Private.h @@ -16,7 +16,7 @@ NS_ASSUME_NONNULL_BEGIN -@interface RTCVideoRendererAdapter () +@interface RTC_OBJC_TYPE(RTCVideoRendererAdapter) () /** * The Objective-C video renderer passed to this adapter during construction. diff --git a/sdk/objc/api/RTCVideoRendererAdapter.h b/sdk/objc/api/RTCVideoRendererAdapter.h index b0b6f04488..bbb8c6e71c 100644 --- a/sdk/objc/api/RTCVideoRendererAdapter.h +++ b/sdk/objc/api/RTCVideoRendererAdapter.h @@ -10,6 +10,8 @@ #import +#import "RTCMacros.h" + NS_ASSUME_NONNULL_BEGIN /* @@ -18,7 +20,7 @@ NS_ASSUME_NONNULL_BEGIN * adapter adapts calls made to that interface to the RTCVideoRenderer supplied * during construction. */ -@interface RTCVideoRendererAdapter : NSObject +@interface RTC_OBJC_TYPE (RTCVideoRendererAdapter): NSObject - (instancetype)init NS_UNAVAILABLE; diff --git a/sdk/objc/api/RTCVideoRendererAdapter.mm b/sdk/objc/api/RTCVideoRendererAdapter.mm index ef02f72f60..d992c64108 100644 --- a/sdk/objc/api/RTCVideoRendererAdapter.mm +++ b/sdk/objc/api/RTCVideoRendererAdapter.mm @@ -17,10 +17,9 @@ namespace webrtc { -class VideoRendererAdapter - : public rtc::VideoSinkInterface { +class VideoRendererAdapter : public rtc::VideoSinkInterface { public: - VideoRendererAdapter(RTCVideoRendererAdapter* adapter) { + VideoRendererAdapter(RTC_OBJC_TYPE(RTCVideoRendererAdapter) * adapter) { adapter_ = adapter; size_ = CGSizeZero; } @@ -28,9 +27,9 @@ void OnFrame(const webrtc::VideoFrame& nativeVideoFrame) override { RTC_OBJC_TYPE(RTCVideoFrame)* videoFrame = NativeToObjCVideoFrame(nativeVideoFrame); - CGSize current_size = (videoFrame.rotation % 180 == 0) - ? CGSizeMake(videoFrame.width, videoFrame.height) - : CGSizeMake(videoFrame.height, videoFrame.width); + CGSize current_size = (videoFrame.rotation % 180 == 0) ? + CGSizeMake(videoFrame.width, videoFrame.height) : + CGSizeMake(videoFrame.height, videoFrame.width); if (!CGSizeEqualToSize(size_, current_size)) { size_ = current_size; @@ -40,12 +39,12 @@ void OnFrame(const webrtc::VideoFrame& nativeVideoFrame) override { } private: - __weak RTCVideoRendererAdapter *adapter_; + __weak RTC_OBJC_TYPE(RTCVideoRendererAdapter) * adapter_; CGSize size_; }; -} +} // namespace webrtc -@implementation RTCVideoRendererAdapter { +@implementation RTC_OBJC_TYPE (RTCVideoRendererAdapter) { std::unique_ptr _adapter; } @@ -60,7 +59,7 @@ - (instancetype)initWithNativeRenderer:(id)vide return self; } -- (rtc::VideoSinkInterface *)nativeVideoRenderer { +- (rtc::VideoSinkInterface*)nativeVideoRenderer { return _adapter.get(); } diff --git a/sdk/objc/api/peerconnection/RTCAudioDeviceModule+Private.h b/sdk/objc/api/peerconnection/RTCAudioDeviceModule+Private.h new file mode 100644 index 0000000000..73c1a4e26a --- /dev/null +++ b/sdk/objc/api/peerconnection/RTCAudioDeviceModule+Private.h @@ -0,0 +1,31 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import "RTCAudioDeviceModule.h" +#import "sdk/objc/native/api/audio_device_module.h" + +#include "rtc_base/thread.h" + +NS_ASSUME_NONNULL_BEGIN + +@interface RTC_OBJC_TYPE(RTCAudioDeviceModule) () + +- (instancetype)initWithNativeModule:(rtc::scoped_refptr )module + workerThread:(rtc::Thread *)workerThread; + +@end + +NS_ASSUME_NONNULL_END diff --git a/sdk/objc/api/peerconnection/RTCAudioDeviceModule.h b/sdk/objc/api/peerconnection/RTCAudioDeviceModule.h new file mode 100644 index 0000000000..b02cecfd0b --- /dev/null +++ b/sdk/objc/api/peerconnection/RTCAudioDeviceModule.h @@ -0,0 +1,56 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import +#import + +#import "RTCMacros.h" +#import "RTCIODevice.h" + +NS_ASSUME_NONNULL_BEGIN + +typedef void (^RTCOnAudioDevicesDidUpdate)(); + +RTC_OBJC_EXPORT +@interface RTC_OBJC_TYPE (RTCAudioDeviceModule) : NSObject + +@property(nonatomic, readonly) NSArray *outputDevices; +@property(nonatomic, readonly) NSArray *inputDevices; + +@property(nonatomic, readonly) BOOL playing; +@property(nonatomic, readonly) BOOL recording; + +@property(nonatomic, assign) RTC_OBJC_TYPE(RTCIODevice) *outputDevice; +@property(nonatomic, assign) RTC_OBJC_TYPE(RTCIODevice) *inputDevice; + +// Executes low-level API's in sequence to switch the device +// Use outputDevice / inputDevice property unless you need to know if setting the device is +// successful. +- (BOOL)trySetOutputDevice:(nullable RTC_OBJC_TYPE(RTCIODevice) *)device; +- (BOOL)trySetInputDevice:(nullable RTC_OBJC_TYPE(RTCIODevice) *)device; + +- (BOOL)setDevicesUpdatedHandler: (nullable RTCOnAudioDevicesDidUpdate) handler; + +- (BOOL)startPlayout; +- (BOOL)stopPlayout; +- (BOOL)initPlayout; +- (BOOL)startRecording; +- (BOOL)stopRecording; +- (BOOL)initRecording; + +@end + +NS_ASSUME_NONNULL_END diff --git a/sdk/objc/api/peerconnection/RTCAudioDeviceModule.mm b/sdk/objc/api/peerconnection/RTCAudioDeviceModule.mm new file mode 100644 index 0000000000..c88de392d7 --- /dev/null +++ b/sdk/objc/api/peerconnection/RTCAudioDeviceModule.mm @@ -0,0 +1,294 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#import "RTCAudioDeviceModule.h" +#import "RTCAudioDeviceModule+Private.h" +#import "RTCIODevice+Private.h" +#import "base/RTCLogging.h" + +#import "sdk/objc/native/api/audio_device_module.h" + +class AudioDeviceSink : public webrtc::AudioDeviceSink { + public: + AudioDeviceSink() {} + + void OnDevicesUpdated() override { + + RTCLogInfo(@"AudioDeviceSink OnDevicesUpdated"); + + if (callback_handler_) { + callback_handler_(); + } + } + + // private: + RTCOnAudioDevicesDidUpdate callback_handler_; +}; + +@implementation RTC_OBJC_TYPE (RTCAudioDeviceModule) { + rtc::Thread *_workerThread; + rtc::scoped_refptr _native; + AudioDeviceSink *_sink; +} + +- (instancetype)initWithNativeModule:(rtc::scoped_refptr )module + workerThread:(rtc::Thread * )workerThread { + + RTCLogInfo(@"RTCAudioDeviceModule initWithNativeModule:workerThread:"); + + self = [super init]; + _native = module; + _workerThread = workerThread; + + _sink = new AudioDeviceSink(); + + _workerThread->BlockingCall([self] { + _native->SetAudioDeviceSink(_sink); + }); + + return self; +} + +- (NSArray *)outputDevices { + + return _workerThread->BlockingCall([self] { + return [self _outputDevices]; + }); +} + +- (NSArray *)inputDevices { + return _workerThread->BlockingCall([self] { + return [self _inputDevices]; + }); +} + +- (RTC_OBJC_TYPE(RTCIODevice) *)outputDevice { + return _workerThread->BlockingCall([self] { + + NSArray *devices = [self _outputDevices]; + int16_t devicesCount = (int16_t)([devices count]); + int16_t index = _native->GetPlayoutDevice(); + + if (devicesCount == 0 || index <= -1 || index > (devicesCount - 1)) { + return (RTC_OBJC_TYPE(RTCIODevice) *)nil; + } + + return (RTC_OBJC_TYPE(RTCIODevice) *)[devices objectAtIndex:index]; + }); +} + +- (void)setOutputDevice: (RTC_OBJC_TYPE(RTCIODevice) *)device { + [self trySetOutputDevice: device]; +} + +- (BOOL)trySetOutputDevice: (RTC_OBJC_TYPE(RTCIODevice) *)device { + + return _workerThread->BlockingCall([self, device] { + + NSUInteger index = 0; + NSArray *devices = [self _outputDevices]; + + if ([devices count] == 0) { + return NO; + } + + if (device != nil) { + index = [devices indexOfObjectPassingTest:^BOOL(RTC_OBJC_TYPE(RTCIODevice) *e, NSUInteger i, BOOL *stop) { + return (*stop = [e.deviceId isEqualToString:device.deviceId]); + }]; + if (index == NSNotFound) { + return NO; + } + } + + _native->StopPlayout(); + + if (_native->SetPlayoutDevice(index) == 0 + && _native->InitPlayout() == 0 + && _native->StartPlayout() == 0) { + + return YES; + } + + return NO; + }); +} + +- (RTC_OBJC_TYPE(RTCIODevice) *)inputDevice { + + return _workerThread->BlockingCall([self] { + + NSArray *devices = [self _inputDevices]; + int16_t devicesCount = (int16_t)([devices count]); + int16_t index = _native->GetRecordingDevice(); + + if (devicesCount == 0 || index <= -1 || index > (devicesCount - 1)) { + return (RTC_OBJC_TYPE(RTCIODevice) *)nil; + } + + return (RTC_OBJC_TYPE(RTCIODevice) *)[devices objectAtIndex:index]; + }); +} + +- (void)setInputDevice: (RTC_OBJC_TYPE(RTCIODevice) *)device { + [self trySetInputDevice: device]; +} + +- (BOOL)trySetInputDevice: (RTC_OBJC_TYPE(RTCIODevice) *)device { + + return _workerThread->BlockingCall([self, device] { + + NSUInteger index = 0; + NSArray *devices = [self _inputDevices]; + + if ([devices count] == 0) { + return NO; + } + + if (device != nil) { + index = [devices indexOfObjectPassingTest:^BOOL(RTC_OBJC_TYPE(RTCIODevice) *e, NSUInteger i, BOOL *stop) { + return (*stop = [e.deviceId isEqualToString:device.deviceId]); + }]; + if (index == NSNotFound) { + return NO; + } + } + + _native->StopRecording(); + + if (_native->SetRecordingDevice(index) == 0 + && _native->InitRecording() == 0 + && _native->StartRecording() == 0) { + + return YES; + } + + return NO; + }); +} + +- (BOOL)playing { + + return _workerThread->BlockingCall([self] { + return _native->Playing(); + }); +} + +- (BOOL)recording { + + return _workerThread->BlockingCall([self] { + return _native->Recording(); + }); +} + +#pragma mark - Low-level access + +- (BOOL)startPlayout { + + return _workerThread->BlockingCall([self] { + return _native->StartPlayout() == 0; + }); +} + +- (BOOL)stopPlayout { + + return _workerThread->BlockingCall([self] { + return _native->StopPlayout() == 0; + }); +} + +- (BOOL)initPlayout { + + return _workerThread->BlockingCall([self] { + return _native->InitPlayout() == 0; + }); +} + +- (BOOL)startRecording { + + return _workerThread->BlockingCall([self] { + return _native->StartRecording() == 0; + }); +} + +- (BOOL)stopRecording { + + return _workerThread->BlockingCall([self] { + return _native->StopRecording() == 0; + }); +} + +- (BOOL)initRecording { + + return _workerThread->BlockingCall([self] { + return _native->InitRecording() == 0; + }); +} + +- (BOOL)setDevicesUpdatedHandler: (nullable RTCOnAudioDevicesDidUpdate) handler { + _sink->callback_handler_ = handler; + return YES; +} + +#pragma mark - Private + +- (NSArray *)_outputDevices { + + char guid[webrtc::kAdmMaxGuidSize + 1] = {0}; + char name[webrtc::kAdmMaxDeviceNameSize + 1] = {0}; + + NSMutableArray *result = [NSMutableArray array]; + + int16_t count = _native->PlayoutDevices(); + + if (count > 0) { + for (int i = 0; i < count; i++) { + _native->PlayoutDeviceName(i, name, guid); + NSString *strGUID = [[NSString alloc] initWithCString:guid encoding:NSUTF8StringEncoding]; + NSString *strName = [[NSString alloc] initWithCString:name encoding:NSUTF8StringEncoding]; + RTC_OBJC_TYPE(RTCIODevice) *device = [[RTC_OBJC_TYPE(RTCIODevice) alloc] initWithType:RTCIODeviceTypeOutput deviceId:strGUID name:strName]; + [result addObject: device]; + } + } + + return result; +} + +- (NSArray *)_inputDevices { + + char guid[webrtc::kAdmMaxGuidSize + 1] = {0}; + char name[webrtc::kAdmMaxDeviceNameSize + 1] = {0}; + + NSMutableArray *result = [NSMutableArray array]; + + int16_t count = _native->RecordingDevices(); + + if (count > 0) { + for (int i = 0; i < count; i++) { + _native->RecordingDeviceName(i, name, guid); + NSString *strGUID = [[NSString alloc] initWithCString:guid encoding:NSUTF8StringEncoding]; + NSString *strName = [[NSString alloc] initWithCString:name encoding:NSUTF8StringEncoding]; + RTC_OBJC_TYPE(RTCIODevice) *device = [[RTC_OBJC_TYPE(RTCIODevice) alloc] initWithType:RTCIODeviceTypeInput deviceId:strGUID name:strName]; + [result addObject: device]; + } + } + + return result; +} + +@end diff --git a/sdk/objc/api/peerconnection/RTCAudioTrack+Private.h b/sdk/objc/api/peerconnection/RTCAudioTrack+Private.h index 6495500484..38c0bd3b1b 100644 --- a/sdk/objc/api/peerconnection/RTCAudioTrack+Private.h +++ b/sdk/objc/api/peerconnection/RTCAudioTrack+Private.h @@ -8,6 +8,7 @@ * be found in the AUTHORS file in the root of the source tree. */ +#import #import "RTCAudioTrack.h" #include "api/media_stream_interface.h" @@ -15,17 +16,18 @@ NS_ASSUME_NONNULL_BEGIN @class RTC_OBJC_TYPE(RTCPeerConnectionFactory); -@interface RTC_OBJC_TYPE (RTCAudioTrack) -() +@interface RTC_OBJC_TYPE (RTCAudioTrack) () - /** AudioTrackInterface created or passed in at construction. */ - @property(nonatomic, readonly) rtc::scoped_refptr nativeAudioTrack; +/** AudioTrackInterface created or passed in at construction. */ +@property(nonatomic, readonly) rtc::scoped_refptr nativeAudioTrack; /** Initialize an RTCAudioTrack with an id. */ - (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory source:(RTC_OBJC_TYPE(RTCAudioSource) *)source trackId:(NSString *)trackId; +- (void)didCaptureSampleBuffer:(CMSampleBufferRef)sampleBuffer; + @end NS_ASSUME_NONNULL_END diff --git a/sdk/objc/api/peerconnection/RTCAudioTrack.h b/sdk/objc/api/peerconnection/RTCAudioTrack.h index 95eb5d3d48..c8218ad926 100644 --- a/sdk/objc/api/peerconnection/RTCAudioTrack.h +++ b/sdk/objc/api/peerconnection/RTCAudioTrack.h @@ -13,6 +13,7 @@ NS_ASSUME_NONNULL_BEGIN +@protocol RTC_OBJC_TYPE (RTCAudioRenderer); @class RTC_OBJC_TYPE(RTCAudioSource); RTC_OBJC_EXPORT @@ -23,6 +24,13 @@ RTC_OBJC_EXPORT /** The audio source for this audio track. */ @property(nonatomic, readonly) RTC_OBJC_TYPE(RTCAudioSource) * source; +/** Register a renderer that will receive all audio CMSampleBuffers on this track. + * Does not retain. */ +- (void)addRenderer:(id)renderer; + +/** Deregister a renderer */ +- (void)removeRenderer:(id)renderer; + @end NS_ASSUME_NONNULL_END diff --git a/sdk/objc/api/peerconnection/RTCAudioTrack.mm b/sdk/objc/api/peerconnection/RTCAudioTrack.mm index 5c1736f436..d79972eae1 100644 --- a/sdk/objc/api/peerconnection/RTCAudioTrack.mm +++ b/sdk/objc/api/peerconnection/RTCAudioTrack.mm @@ -8,8 +8,12 @@ * be found in the AUTHORS file in the root of the source tree. */ +#import +#import + #import "RTCAudioTrack+Private.h" +#import "RTCAudioRenderer.h" #import "RTCAudioSource+Private.h" #import "RTCMediaStreamTrack+Private.h" #import "RTCPeerConnectionFactory+Private.h" @@ -17,7 +21,170 @@ #include "rtc_base/checks.h" -@implementation RTC_OBJC_TYPE (RTCAudioTrack) +namespace webrtc { +/** + * Captures audio data and converts to CMSampleBuffers + */ +class AudioSinkConverter : public rtc::RefCountInterface, public webrtc::AudioTrackSinkInterface { + private: + os_unfair_lock *lock_; + __weak RTC_OBJC_TYPE(RTCAudioTrack) *audio_track_; + int64_t total_frames_ = 0; + bool attached_ = false; + + public: + AudioSinkConverter(RTC_OBJC_TYPE(RTCAudioTrack) *audioTrack, os_unfair_lock *lock) { + RTC_LOG(LS_INFO) << "RTCAudioTrack.AudioSinkConverter init"; + audio_track_ = audioTrack; + lock_ = lock; + } + + ~AudioSinkConverter() { + // + RTC_LOG(LS_INFO) << "RTCAudioTrack.AudioSinkConverter dealloc"; + } + + // Must be called while locked + void TryAttach() { + if (attached_) { + // Already attached + return; + } + RTC_LOG(LS_INFO) << "RTCAudioTrack attaching sink..."; + // Reset for creating CMSampleTimingInfo correctly + audio_track_.nativeAudioTrack->AddSink(this); + total_frames_ = 0; + attached_ = true; + } + + // Must be called while locked + void TryDetach() { + if (!attached_) { + // Already detached + return; + } + RTC_LOG(LS_INFO) << "RTCAudioTrack detaching sink..."; + audio_track_.nativeAudioTrack->RemoveSink(this); + attached_ = false; + } + + void OnData(const void *audio_data, + int bits_per_sample, + int sample_rate, + size_t number_of_channels, + size_t number_of_frames, + absl::optional absolute_capture_timestamp_ms) override { + RTC_LOG(LS_INFO) << "RTCAudioTrack.AudioSinkConverter OnData bits_per_sample: " + << bits_per_sample << " sample_rate: " << sample_rate + << " number_of_channels: " << number_of_channels + << " number_of_frames: " << number_of_frames + << " absolute_capture_timestamp_ms: " + << (absolute_capture_timestamp_ms ? absolute_capture_timestamp_ms.value() : 0); + + bool is_locked = os_unfair_lock_trylock(lock_); + if (!is_locked) { + RTC_LOG(LS_INFO) << "RTCAudioTrack.AudioSinkConverter OnData already locked, skipping..."; + return; + } + bool is_attached = attached_; + os_unfair_lock_unlock(lock_); + + if (!is_attached) { + RTC_LOG(LS_INFO) << "RTCAudioTrack.AudioSinkConverter OnData already detached, skipping..."; + return; + } + + /* + * Convert to CMSampleBuffer + */ + + if (!(number_of_channels == 1 || number_of_channels == 2)) { + NSLog(@"RTCAudioTrack: Only mono or stereo is supported currently. numberOfChannels: %zu", + number_of_channels); + return; + } + + OSStatus status; + + AudioChannelLayout acl; + bzero(&acl, sizeof(acl)); + acl.mChannelLayoutTag = + number_of_channels == 2 ? kAudioChannelLayoutTag_Stereo : kAudioChannelLayoutTag_Mono; + + AudioStreamBasicDescription sd; + sd.mSampleRate = sample_rate; + sd.mFormatID = kAudioFormatLinearPCM; + sd.mFormatFlags = kLinearPCMFormatFlagIsSignedInteger | kLinearPCMFormatFlagIsPacked; + sd.mFramesPerPacket = 1; + sd.mChannelsPerFrame = number_of_channels; + sd.mBitsPerChannel = bits_per_sample; /* 16 */ + sd.mBytesPerFrame = sd.mChannelsPerFrame * (sd.mBitsPerChannel / 8); + sd.mBytesPerPacket = sd.mBytesPerFrame; + + CMSampleTimingInfo timing = { + CMTimeMake(1, sample_rate), + CMTimeMake(total_frames_, sample_rate), + kCMTimeInvalid, + }; + + total_frames_ += number_of_frames; // update the total + + CMFormatDescriptionRef format = NULL; + status = CMAudioFormatDescriptionCreate( + kCFAllocatorDefault, &sd, sizeof(acl), &acl, 0, NULL, NULL, &format); + + if (status != 0) { + NSLog(@"RTCAudioTrack: Failed to create audio format description"); + return; + } + + CMSampleBufferRef buffer; + status = CMSampleBufferCreate(kCFAllocatorDefault, + NULL, + false, + NULL, + NULL, + format, + (CMItemCount)number_of_frames, + 1, + &timing, + 0, + NULL, + &buffer); + // format is no longer required + CFRelease(format); + + if (status != 0) { + NSLog(@"RTCAudioTrack: Failed to allocate sample buffer"); + return; + } + + AudioBufferList bufferList; + bufferList.mNumberBuffers = 1; + bufferList.mBuffers[0].mNumberChannels = sd.mChannelsPerFrame; + bufferList.mBuffers[0].mDataByteSize = (UInt32)(number_of_frames * sd.mBytesPerFrame); + bufferList.mBuffers[0].mData = (void *)audio_data; + status = CMSampleBufferSetDataBufferFromAudioBufferList( + buffer, kCFAllocatorDefault, kCFAllocatorDefault, 0, &bufferList); + if (status != 0) { + NSLog(@"RTCAudioTrack: Failed to convert audio buffer list into sample buffer"); + return; + } + + // Report back to RTCAudioTrack + [audio_track_ didCaptureSampleBuffer:buffer]; + + CFRelease(buffer); + } +}; +} // namespace webrtc + +@implementation RTC_OBJC_TYPE (RTCAudioTrack) { + rtc::scoped_refptr _audioConverter; + // Stores weak references to renderers + NSHashTable *_renderers; + os_unfair_lock _lock; +} @synthesize source = _source; @@ -43,7 +210,21 @@ - (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)facto NSParameterAssert(factory); NSParameterAssert(nativeTrack); NSParameterAssert(type == RTCMediaStreamTrackTypeAudio); - return [super initWithFactory:factory nativeTrack:nativeTrack type:type]; + if (self = [super initWithFactory:factory nativeTrack:nativeTrack type:type]) { + RTC_LOG(LS_INFO) << "RTCAudioTrack init"; + _renderers = [NSHashTable weakObjectsHashTable]; + _audioConverter = new rtc::RefCountedObject(self, &_lock); + } + + return self; +} + +- (void)dealloc { + os_unfair_lock_lock(&_lock); + _audioConverter->TryDetach(); + os_unfair_lock_unlock(&_lock); + + RTC_LOG(LS_INFO) << "RTCAudioTrack dealloc"; } - (RTC_OBJC_TYPE(RTCAudioSource) *)source { @@ -57,6 +238,25 @@ - (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)facto return _source; } +- (void)addRenderer:(id)renderer { + os_unfair_lock_lock(&_lock); + [_renderers addObject:renderer]; + _audioConverter->TryAttach(); + os_unfair_lock_unlock(&_lock); +} + +- (void)removeRenderer:(id)renderer { + os_unfair_lock_lock(&_lock); + [_renderers removeObject:renderer]; + NSUInteger renderersCount = _renderers.allObjects.count; + + if (renderersCount == 0) { + // Detach if no more renderers... + _audioConverter->TryDetach(); + } + os_unfair_lock_unlock(&_lock); +} + #pragma mark - Private - (rtc::scoped_refptr)nativeAudioTrack { @@ -64,4 +264,18 @@ - (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)facto static_cast(self.nativeTrack.get())); } +- (void)didCaptureSampleBuffer:(CMSampleBufferRef)sampleBuffer { + bool is_locked = os_unfair_lock_trylock(&_lock); + if (!is_locked) { + RTC_LOG(LS_INFO) << "RTCAudioTrack didCaptureSampleBuffer already locked, skipping..."; + return; + } + NSArray *renderers = [_renderers allObjects]; + os_unfair_lock_unlock(&_lock); + + for (id renderer in renderers) { + [renderer renderSampleBuffer:sampleBuffer]; + } +} + @end diff --git a/sdk/objc/api/peerconnection/RTCConfiguration.h b/sdk/objc/api/peerconnection/RTCConfiguration.h index 011eaa613d..b643f32f2a 100644 --- a/sdk/objc/api/peerconnection/RTCConfiguration.h +++ b/sdk/objc/api/peerconnection/RTCConfiguration.h @@ -255,6 +255,17 @@ RTC_OBJC_EXPORT */ @property(nonatomic, copy, nullable) NSNumber *iceInactiveTimeout; +/** + * When this flag is set, ports not bound to any specific network interface + * will be used, in addition to normal ports bound to the enumerated + * interfaces. Without this flag, these "any address" ports would only be + * used when network enumeration fails or is disabled. But under certain + * conditions, these ports may succeed where others fail, so they may allow + * the application to work in a wider variety of environments, at the expense + * of having to allocate additional candidates. + */ +@property(nonatomic, assign) BOOL enableIceGatheringOnAnyAddressPorts; + - (instancetype)init; @end diff --git a/sdk/objc/api/peerconnection/RTCConfiguration.mm b/sdk/objc/api/peerconnection/RTCConfiguration.mm index 86ecbabf8d..70bbc5d370 100644 --- a/sdk/objc/api/peerconnection/RTCConfiguration.mm +++ b/sdk/objc/api/peerconnection/RTCConfiguration.mm @@ -62,6 +62,7 @@ @implementation RTC_OBJC_TYPE (RTCConfiguration) @synthesize iceUnwritableTimeout = _iceUnwritableTimeout; @synthesize iceUnwritableMinChecks = _iceUnwritableMinChecks; @synthesize iceInactiveTimeout = _iceInactiveTimeout; +@synthesize enableIceGatheringOnAnyAddressPorts = _enableIceGatheringOnAnyAddressPorts; - (instancetype)init { // Copy defaults. @@ -156,6 +157,7 @@ - (instancetype)initWithNativeConfiguration: _iceInactiveTimeout = config.ice_inactive_timeout.has_value() ? [NSNumber numberWithInt:*config.ice_inactive_timeout] : nil; + _enableIceGatheringOnAnyAddressPorts = config.enable_any_address_ports; } return self; } @@ -303,6 +305,7 @@ - (NSString *)description { if (_iceInactiveTimeout != nil) { nativeConfig->ice_inactive_timeout = absl::optional(_iceInactiveTimeout.intValue); } + nativeConfig->enable_any_address_ports = _enableIceGatheringOnAnyAddressPorts; return nativeConfig.release(); } diff --git a/sdk/objc/api/peerconnection/RTCEncodedImage+Private.mm b/sdk/objc/api/peerconnection/RTCEncodedImage+Private.mm index c8936d3ad5..ac9946e55d 100644 --- a/sdk/objc/api/peerconnection/RTCEncodedImage+Private.mm +++ b/sdk/objc/api/peerconnection/RTCEncodedImage+Private.mm @@ -34,16 +34,16 @@ explicit ObjCEncodedImageBuffer(NSData *data) : data_(data) {} NSData *data_; }; -} +} // namespace // A simple wrapper around webrtc::EncodedImageBufferInterface to make it usable with associated // objects. -@interface RTCWrappedEncodedImageBuffer : NSObject +@interface RTC_OBJC_TYPE (RTCWrappedEncodedImageBuffer): NSObject @property(nonatomic) rtc::scoped_refptr buffer; - (instancetype)initWithEncodedImageBuffer: (rtc::scoped_refptr)buffer; @end -@implementation RTCWrappedEncodedImageBuffer +@implementation RTC_OBJC_TYPE (RTCWrappedEncodedImageBuffer) @synthesize buffer = _buffer; - (instancetype)initWithEncodedImageBuffer: (rtc::scoped_refptr)buffer { @@ -59,7 +59,7 @@ @implementation RTC_OBJC_TYPE (RTCEncodedImage) (Private) - (rtc::scoped_refptr)encodedData { - RTCWrappedEncodedImageBuffer *wrappedBuffer = + RTC_OBJC_TYPE(RTCWrappedEncodedImageBuffer) *wrappedBuffer = objc_getAssociatedObject(self, @selector(encodedData)); return wrappedBuffer.buffer; } @@ -68,7 +68,7 @@ - (void)setEncodedData:(rtc::scoped_refptr) return objc_setAssociatedObject( self, @selector(encodedData), - [[RTCWrappedEncodedImageBuffer alloc] initWithEncodedImageBuffer:buffer], + [[RTC_OBJC_TYPE(RTCWrappedEncodedImageBuffer) alloc] initWithEncodedImageBuffer:buffer], OBJC_ASSOCIATION_RETAIN_NONATOMIC); } diff --git a/sdk/objc/api/peerconnection/RTCFieldTrials.mm b/sdk/objc/api/peerconnection/RTCFieldTrials.mm index 193da9e4f7..b5a2eca8f0 100644 --- a/sdk/objc/api/peerconnection/RTCFieldTrials.mm +++ b/sdk/objc/api/peerconnection/RTCFieldTrials.mm @@ -10,47 +10,58 @@ #import "RTCFieldTrials.h" +#import #include - #import "base/RTCLogging.h" #include "system_wrappers/include/field_trial.h" NSString *const kRTCFieldTrialAudioForceABWENoTWCCKey = @"WebRTC-Audio-ABWENoTWCC"; -NSString * const kRTCFieldTrialFlexFec03AdvertisedKey = @"WebRTC-FlexFEC-03-Advertised"; -NSString * const kRTCFieldTrialFlexFec03Key = @"WebRTC-FlexFEC-03"; -NSString * const kRTCFieldTrialH264HighProfileKey = @"WebRTC-H264HighProfile"; -NSString * const kRTCFieldTrialMinimizeResamplingOnMobileKey = +NSString *const kRTCFieldTrialFlexFec03AdvertisedKey = @"WebRTC-FlexFEC-03-Advertised"; +NSString *const kRTCFieldTrialFlexFec03Key = @"WebRTC-FlexFEC-03"; +NSString *const kRTCFieldTrialH264HighProfileKey = @"WebRTC-H264HighProfile"; +NSString *const kRTCFieldTrialMinimizeResamplingOnMobileKey = @"WebRTC-Audio-MinimizeResamplingOnMobile"; NSString *const kRTCFieldTrialUseNWPathMonitor = @"WebRTC-Network-UseNWPathMonitor"; -NSString * const kRTCFieldTrialEnabledValue = @"Enabled"; +NSString *const kRTCFieldTrialEnabledValue = @"Enabled"; // InitFieldTrialsFromString stores the char*, so the char array must outlive // the application. static char *gFieldTrialInitString = nullptr; +static os_unfair_lock fieldTrialLock = OS_UNFAIR_LOCK_INIT; void RTCInitFieldTrialDictionary(NSDictionary *fieldTrials) { if (!fieldTrials) { RTCLogWarning(@"No fieldTrials provided."); return; } + // Assemble the keys and values into the field trial string. - // We don't perform any extra format checking. That should be done by the underlying WebRTC calls. NSMutableString *fieldTrialInitString = [NSMutableString string]; for (NSString *key in fieldTrials) { NSString *fieldTrialEntry = [NSString stringWithFormat:@"%@/%@/", key, fieldTrials[key]]; [fieldTrialInitString appendString:fieldTrialEntry]; } + size_t len = fieldTrialInitString.length + 1; + + // Locking before modifying global variable + os_unfair_lock_lock(&fieldTrialLock); if (gFieldTrialInitString != nullptr) { delete[] gFieldTrialInitString; + gFieldTrialInitString = nullptr; } + gFieldTrialInitString = new char[len]; - if (![fieldTrialInitString getCString:gFieldTrialInitString - maxLength:len - encoding:NSUTF8StringEncoding]) { + bool success = [fieldTrialInitString getCString:gFieldTrialInitString + maxLength:len + encoding:NSUTF8StringEncoding]; + if (!success) { RTCLogError(@"Failed to convert field trial string."); + os_unfair_lock_unlock(&fieldTrialLock); return; } + webrtc::field_trial::InitFieldTrialsFromString(gFieldTrialInitString); + os_unfair_lock_unlock(&fieldTrialLock); } diff --git a/sdk/objc/api/peerconnection/RTCFrameCryptor+Private.h b/sdk/objc/api/peerconnection/RTCFrameCryptor+Private.h new file mode 100644 index 0000000000..86e6fdff8c --- /dev/null +++ b/sdk/objc/api/peerconnection/RTCFrameCryptor+Private.h @@ -0,0 +1,45 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import "RTCFrameCryptor.h" + +#include +#include "api/crypto/frame_crypto_transformer.h" + +NS_ASSUME_NONNULL_BEGIN + +@interface RTC_OBJC_TYPE (RTCFrameCryptor) +() + + @end + +namespace webrtc { + +class RTCFrameCryptorDelegateAdapter : public FrameCryptorTransformerObserver { + public: + RTCFrameCryptorDelegateAdapter(RTC_OBJC_TYPE(RTCFrameCryptor) * frameCryptor); + ~RTCFrameCryptorDelegateAdapter() override; + + void OnFrameCryptionStateChanged(const std::string participant_id, + FrameCryptionState state) override; + + private: + __weak RTC_OBJC_TYPE(RTCFrameCryptor) * frame_cryptor_; +}; + +} // namespace webrtc + +NS_ASSUME_NONNULL_END diff --git a/sdk/objc/api/peerconnection/RTCFrameCryptor.h b/sdk/objc/api/peerconnection/RTCFrameCryptor.h new file mode 100644 index 0000000000..864e55be95 --- /dev/null +++ b/sdk/objc/api/peerconnection/RTCFrameCryptor.h @@ -0,0 +1,78 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import + +#import "RTCMacros.h" + +NS_ASSUME_NONNULL_BEGIN + +@class RTC_OBJC_TYPE(RTCRtpSender); +@class RTC_OBJC_TYPE(RTCRtpReceiver); +@class RTC_OBJC_TYPE(RTCFrameCryptorKeyProvider); +@class RTC_OBJC_TYPE(RTCFrameCryptor); +@class RTC_OBJC_TYPE(RTCPeerConnectionFactory); + +typedef NS_ENUM(NSUInteger, RTCCryptorAlgorithm) { + RTCCryptorAlgorithmAesGcm = 0, + RTCCryptorAlgorithmAesCbc, +}; + +typedef NS_ENUM(NSInteger, FrameCryptionState) { + FrameCryptionStateNew = 0, + FrameCryptionStateOk, + FrameCryptionStateEncryptionFailed, + FrameCryptionStateDecryptionFailed, + FrameCryptionStateMissingKey, + FrameCryptionStateKeyRatcheted, + FrameCryptionStateInternalError, +}; + +RTC_OBJC_EXPORT +@protocol RTC_OBJC_TYPE +(RTCFrameCryptorDelegate) + /** Called when the RTCFrameCryptor got errors. */ + - (void)frameCryptor + : (RTC_OBJC_TYPE(RTCFrameCryptor) *)frameCryptor didStateChangeWithParticipantId + : (NSString *)participantId withState : (FrameCryptionState)stateChanged; +@end + +RTC_OBJC_EXPORT +@interface RTC_OBJC_TYPE (RTCFrameCryptor) : NSObject + +@property(nonatomic, assign) BOOL enabled; + +@property(nonatomic, assign) int keyIndex; + +@property(nonatomic, readonly) NSString *participantId; + +@property(nonatomic, weak, nullable) id delegate; + +- (nullable instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory + rtpSender:(RTC_OBJC_TYPE(RTCRtpSender) *)sender + participantId:(NSString *)participantId + algorithm:(RTCCryptorAlgorithm)algorithm + keyProvider:(RTC_OBJC_TYPE(RTCFrameCryptorKeyProvider) *)keyProvider; + +- (nullable instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory + rtpReceiver:(RTC_OBJC_TYPE(RTCRtpReceiver) *)receiver + participantId:(NSString *)participantId + algorithm:(RTCCryptorAlgorithm)algorithm + keyProvider:(RTC_OBJC_TYPE(RTCFrameCryptorKeyProvider) *)keyProvider; + +@end + +NS_ASSUME_NONNULL_END diff --git a/sdk/objc/api/peerconnection/RTCFrameCryptor.mm b/sdk/objc/api/peerconnection/RTCFrameCryptor.mm new file mode 100644 index 0000000000..c51b77e9c5 --- /dev/null +++ b/sdk/objc/api/peerconnection/RTCFrameCryptor.mm @@ -0,0 +1,224 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import "RTCFrameCryptor+Private.h" +#import "RTCFrameCryptorKeyProvider+Private.h" +#import "RTCPeerConnectionFactory+Private.h" +#import "RTCRtpReceiver+Private.h" +#import "RTCRtpSender+Private.h" + +#import +#include + +#import "base/RTCLogging.h" +#import "helpers/NSString+StdString.h" + +#include "api/crypto/frame_crypto_transformer.h" +#include "api/rtp_receiver_interface.h" +#include "api/rtp_sender_interface.h" + +namespace webrtc { + +RTCFrameCryptorDelegateAdapter::RTCFrameCryptorDelegateAdapter(RTC_OBJC_TYPE(RTCFrameCryptor) * + frameCryptor) + : frame_cryptor_(frameCryptor) {} + +RTCFrameCryptorDelegateAdapter::~RTCFrameCryptorDelegateAdapter() {} + +/* + kNew = 0, + kOk, + kEncryptionFailed, + kDecryptionFailed, + kMissingKey, + kInternalError, +*/ +void RTCFrameCryptorDelegateAdapter::OnFrameCryptionStateChanged(const std::string participant_id, + FrameCryptionState state) { + RTC_OBJC_TYPE(RTCFrameCryptor) *frameCryptor = frame_cryptor_; + if (frameCryptor.delegate) { + switch (state) { + case FrameCryptionState::kNew: + [frameCryptor.delegate frameCryptor:frameCryptor + didStateChangeWithParticipantId:[NSString stringForStdString:participant_id] + withState:FrameCryptionStateNew]; + break; + case FrameCryptionState::kOk: + [frameCryptor.delegate frameCryptor:frameCryptor + didStateChangeWithParticipantId:[NSString stringForStdString:participant_id] + withState:FrameCryptionStateOk]; + break; + case FrameCryptionState::kEncryptionFailed: + [frameCryptor.delegate frameCryptor:frameCryptor + didStateChangeWithParticipantId:[NSString stringForStdString:participant_id] + withState:FrameCryptionStateEncryptionFailed]; + break; + case FrameCryptionState::kDecryptionFailed: + [frameCryptor.delegate frameCryptor:frameCryptor + didStateChangeWithParticipantId:[NSString stringForStdString:participant_id] + withState:FrameCryptionStateDecryptionFailed]; + break; + case FrameCryptionState::kMissingKey: + [frameCryptor.delegate frameCryptor:frameCryptor + didStateChangeWithParticipantId:[NSString stringForStdString:participant_id] + withState:FrameCryptionStateMissingKey]; + break; + case FrameCryptionState::kKeyRatcheted: + [frameCryptor.delegate frameCryptor:frameCryptor + didStateChangeWithParticipantId:[NSString stringForStdString:participant_id] + withState:FrameCryptionStateKeyRatcheted]; + break; + case FrameCryptionState::kInternalError: + [frameCryptor.delegate frameCryptor:frameCryptor + didStateChangeWithParticipantId:[NSString stringForStdString:participant_id] + withState:FrameCryptionStateInternalError]; + break; + } + } +} +} // namespace webrtc + +@implementation RTC_OBJC_TYPE (RTCFrameCryptor) { + const webrtc::RtpSenderInterface *_sender; + const webrtc::RtpReceiverInterface *_receiver; + rtc::scoped_refptr _frame_crypto_transformer; + rtc::scoped_refptr _observer; + os_unfair_lock _lock; +} + +@synthesize participantId = _participantId; +@synthesize delegate = _delegate; + +- (webrtc::FrameCryptorTransformer::Algorithm)algorithmFromEnum:(RTCCryptorAlgorithm)algorithm { + switch (algorithm) { + case RTCCryptorAlgorithmAesGcm: + return webrtc::FrameCryptorTransformer::Algorithm::kAesGcm; + case RTCCryptorAlgorithmAesCbc: + return webrtc::FrameCryptorTransformer::Algorithm::kAesCbc; + default: + return webrtc::FrameCryptorTransformer::Algorithm::kAesGcm; + } +} + +- (nullable instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory + rtpSender:(RTC_OBJC_TYPE(RTCRtpSender) *)sender + participantId:(NSString *)participantId + algorithm:(RTCCryptorAlgorithm)algorithm + keyProvider:(RTC_OBJC_TYPE(RTCFrameCryptorKeyProvider) *)keyProvider { + if (self = [super init]) { + _lock = OS_UNFAIR_LOCK_INIT; + + rtc::scoped_refptr nativeRtpSender = sender.nativeRtpSender; + if (nativeRtpSender == nullptr) return nil; + + rtc::scoped_refptr nativeTrack = nativeRtpSender->track(); + if (nativeTrack == nullptr) return nil; + + _observer = rtc::make_ref_counted(self); + _participantId = participantId; + + webrtc::FrameCryptorTransformer::MediaType mediaType = + nativeTrack->kind() == "audio" ? webrtc::FrameCryptorTransformer::MediaType::kAudioFrame + : webrtc::FrameCryptorTransformer::MediaType::kVideoFrame; + + _frame_crypto_transformer = + rtc::scoped_refptr(new webrtc::FrameCryptorTransformer( + factory.signalingThread, [participantId stdString], mediaType, + [self algorithmFromEnum:algorithm], keyProvider.nativeKeyProvider)); + + nativeRtpSender->SetEncoderToPacketizerFrameTransformer(_frame_crypto_transformer); + _frame_crypto_transformer->SetEnabled(false); + _frame_crypto_transformer->RegisterFrameCryptorTransformerObserver(_observer); + } + + return self; +} + +- (nullable instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory + rtpReceiver:(RTC_OBJC_TYPE(RTCRtpReceiver) *)receiver + participantId:(NSString *)participantId + algorithm:(RTCCryptorAlgorithm)algorithm + keyProvider:(RTC_OBJC_TYPE(RTCFrameCryptorKeyProvider) *)keyProvider { + if (self = [super init]) { + _lock = OS_UNFAIR_LOCK_INIT; + + rtc::scoped_refptr nativeRtpReceiver = receiver.nativeRtpReceiver; + if (nativeRtpReceiver == nullptr) return nil; + + rtc::scoped_refptr nativeTrack = nativeRtpReceiver->track(); + if (nativeTrack == nullptr) return nil; + + _observer = rtc::make_ref_counted(self); + _participantId = participantId; + + webrtc::FrameCryptorTransformer::MediaType mediaType = + nativeTrack->kind() == "audio" ? webrtc::FrameCryptorTransformer::MediaType::kAudioFrame + : webrtc::FrameCryptorTransformer::MediaType::kVideoFrame; + + _frame_crypto_transformer = + rtc::scoped_refptr(new webrtc::FrameCryptorTransformer( + factory.signalingThread, [participantId stdString], mediaType, + [self algorithmFromEnum:algorithm], keyProvider.nativeKeyProvider)); + + nativeRtpReceiver->SetDepacketizerToDecoderFrameTransformer(_frame_crypto_transformer); + _frame_crypto_transformer->SetEnabled(false); + _frame_crypto_transformer->RegisterFrameCryptorTransformerObserver(_observer); + } + + return self; +} + +- (void)dealloc { + os_unfair_lock_lock(&_lock); + if (_frame_crypto_transformer != nullptr) { + _frame_crypto_transformer->UnRegisterFrameCryptorTransformerObserver(); + _frame_crypto_transformer = nullptr; + } + _observer = nullptr; + os_unfair_lock_unlock(&_lock); +} + +- (BOOL)enabled { + os_unfair_lock_lock(&_lock); + BOOL result = _frame_crypto_transformer != nullptr ? _frame_crypto_transformer->enabled() : NO; + os_unfair_lock_unlock(&_lock); + return result; +} + +- (void)setEnabled:(BOOL)enabled { + os_unfair_lock_lock(&_lock); + if (_frame_crypto_transformer != nullptr) { + _frame_crypto_transformer->SetEnabled(enabled); + } + os_unfair_lock_unlock(&_lock); +} + +- (int)keyIndex { + os_unfair_lock_lock(&_lock); + int result = _frame_crypto_transformer != nullptr ? _frame_crypto_transformer->key_index() : 0; + os_unfair_lock_unlock(&_lock); + return result; +} + +- (void)setKeyIndex:(int)keyIndex { + os_unfair_lock_lock(&_lock); + if (_frame_crypto_transformer != nullptr) { + _frame_crypto_transformer->SetKeyIndex(keyIndex); + } + os_unfair_lock_unlock(&_lock); +} + +@end diff --git a/sdk/objc/api/peerconnection/RTCFrameCryptorKeyProvider+Private.h b/sdk/objc/api/peerconnection/RTCFrameCryptorKeyProvider+Private.h new file mode 100644 index 0000000000..eb7c83e2e7 --- /dev/null +++ b/sdk/objc/api/peerconnection/RTCFrameCryptorKeyProvider+Private.h @@ -0,0 +1,31 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import "RTCFrameCryptorKeyProvider.h" + +#include "api/crypto/frame_crypto_transformer.h" +#include "rtc_base/ref_count.h" + +NS_ASSUME_NONNULL_BEGIN + +@interface RTC_OBJC_TYPE (RTCFrameCryptorKeyProvider) +() + + @property(nonatomic, readonly) rtc::scoped_refptr nativeKeyProvider; + +@end + +NS_ASSUME_NONNULL_END diff --git a/sdk/objc/api/peerconnection/RTCFrameCryptorKeyProvider.h b/sdk/objc/api/peerconnection/RTCFrameCryptorKeyProvider.h new file mode 100644 index 0000000000..6443b23349 --- /dev/null +++ b/sdk/objc/api/peerconnection/RTCFrameCryptorKeyProvider.h @@ -0,0 +1,62 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import + +#import "RTCMacros.h" + +NS_ASSUME_NONNULL_BEGIN + +RTC_OBJC_EXPORT +@interface RTC_OBJC_TYPE (RTCFrameCryptorKeyProvider) : NSObject + +- (void)setSharedKey:(NSData *)key withIndex:(int)index; + +- (NSData *)ratchetSharedKey:(int)index; + +- (NSData *)exportSharedKey:(int)index; + +- (void)setKey:(NSData *)key withIndex:(int)index forParticipant:(NSString *)participantId; + +- (NSData *)ratchetKey:(NSString *)participantId withIndex:(int)index; + +- (NSData *)exportKey:(NSString *)participantId withIndex:(int)index; + +- (void)setSifTrailer:(NSData *)trailer; + +- (instancetype)initWithRatchetSalt:(NSData *)salt + ratchetWindowSize:(int)windowSize + sharedKeyMode:(BOOL)sharedKey + uncryptedMagicBytes:(nullable NSData *)uncryptedMagicBytes; + +- (instancetype)initWithRatchetSalt:(NSData *)salt + ratchetWindowSize:(int)windowSize + sharedKeyMode:(BOOL)sharedKey + uncryptedMagicBytes:(nullable NSData *)uncryptedMagicBytes + failureTolerance:(int)failureTolerance + keyRingSize:(int)keyRingSize; + +- (instancetype)initWithRatchetSalt:(NSData *)salt + ratchetWindowSize:(int)windowSize + sharedKeyMode:(BOOL)sharedKey + uncryptedMagicBytes:(nullable NSData *)uncryptedMagicBytes + failureTolerance:(int)failureTolerance + keyRingSize:(int)keyRingSize + discardFrameWhenCryptorNotReady:(BOOL)discardFrameWhenCryptorNotReady; + +@end + +NS_ASSUME_NONNULL_END diff --git a/sdk/objc/api/peerconnection/RTCFrameCryptorKeyProvider.mm b/sdk/objc/api/peerconnection/RTCFrameCryptorKeyProvider.mm new file mode 100644 index 0000000000..88bebfcd9d --- /dev/null +++ b/sdk/objc/api/peerconnection/RTCFrameCryptorKeyProvider.mm @@ -0,0 +1,124 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import "RTCFrameCryptorKeyProvider+Private.h" + +#include +#include "api/crypto/frame_crypto_transformer.h" + +#import "base/RTCLogging.h" +#import "helpers/NSString+StdString.h" + +@implementation RTC_OBJC_TYPE (RTCFrameCryptorKeyProvider) { + rtc::scoped_refptr _nativeKeyProvider; +} + +- (rtc::scoped_refptr)nativeKeyProvider { + return _nativeKeyProvider; +} + +- (instancetype)initWithRatchetSalt:(NSData *)salt + ratchetWindowSize:(int)windowSize + sharedKeyMode:(BOOL)sharedKey + uncryptedMagicBytes:(NSData *)uncryptedMagicBytes { + return [self initWithRatchetSalt:salt + ratchetWindowSize:windowSize + sharedKeyMode:sharedKey + uncryptedMagicBytes:uncryptedMagicBytes + failureTolerance:-1 + keyRingSize:webrtc::DEFAULT_KEYRING_SIZE]; +} + +- (instancetype)initWithRatchetSalt:(NSData *)salt + ratchetWindowSize:(int)windowSize + sharedKeyMode:(BOOL)sharedKey + uncryptedMagicBytes:(nullable NSData *)uncryptedMagicBytes + failureTolerance:(int)failureTolerance + keyRingSize:(int)keyRingSize { + return [self initWithRatchetSalt:salt + ratchetWindowSize:windowSize + sharedKeyMode:sharedKey + uncryptedMagicBytes:uncryptedMagicBytes + failureTolerance:-1 + keyRingSize:keyRingSize + discardFrameWhenCryptorNotReady:false]; +} + +- (instancetype)initWithRatchetSalt:(NSData *)salt + ratchetWindowSize:(int)windowSize + sharedKeyMode:(BOOL)sharedKey + uncryptedMagicBytes:(nullable NSData *)uncryptedMagicBytes + failureTolerance:(int)failureTolerance + keyRingSize:(int)keyRingSize + discardFrameWhenCryptorNotReady:(BOOL)discardFrameWhenCryptorNotReady { + if (self = [super init]) { + webrtc::KeyProviderOptions options; + options.ratchet_salt = std::vector((const uint8_t *)salt.bytes, + ((const uint8_t *)salt.bytes) + salt.length); + options.ratchet_window_size = windowSize; + options.shared_key = sharedKey; + options.failure_tolerance = failureTolerance; + options.key_ring_size = keyRingSize; + options.discard_frame_when_cryptor_not_ready = discardFrameWhenCryptorNotReady; + if(uncryptedMagicBytes != nil) { + options.uncrypted_magic_bytes = std::vector((const uint8_t *)uncryptedMagicBytes.bytes, + ((const uint8_t *)uncryptedMagicBytes.bytes) + uncryptedMagicBytes.length); + } + _nativeKeyProvider = rtc::make_ref_counted(options); + } + return self; +} + +- (void)setKey:(NSData *)key withIndex:(int)index forParticipant:(NSString *)participantId { + _nativeKeyProvider->SetKey( + [participantId stdString], + index, + std::vector((const uint8_t *)key.bytes, ((const uint8_t *)key.bytes) + key.length)); +} + +- (void)setSharedKey:(NSData *)key withIndex:(int)index { + _nativeKeyProvider->SetSharedKey( + index, + std::vector((const uint8_t *)key.bytes, ((const uint8_t *)key.bytes) + key.length)); +} + +- (NSData *)ratchetSharedKey:(int)index { + std::vector nativeKey = _nativeKeyProvider->RatchetSharedKey(index); + return [NSData dataWithBytes:nativeKey.data() length:nativeKey.size()]; +} + +- (NSData *)exportSharedKey:(int)index { + std::vector nativeKey = _nativeKeyProvider->ExportSharedKey(index); + return [NSData dataWithBytes:nativeKey.data() length:nativeKey.size()]; +} + +- (NSData *)ratchetKey:(NSString *)participantId withIndex:(int)index { + std::vector nativeKey = _nativeKeyProvider->RatchetKey([participantId stdString], index); + return [NSData dataWithBytes:nativeKey.data() length:nativeKey.size()]; +} + +- (NSData *)exportKey:(NSString *)participantId withIndex:(int)index { + std::vector nativeKey = _nativeKeyProvider->ExportKey([participantId stdString], index); + return [NSData dataWithBytes:nativeKey.data() length:nativeKey.size()]; +} + +- (void)setSifTrailer:(NSData *)trailer { + _nativeKeyProvider->SetSifTrailer( + std::vector((const uint8_t *)trailer.bytes, + ((const uint8_t *)trailer.bytes) + trailer.length)); +} + +@end diff --git a/sdk/objc/api/peerconnection/RTCIODevice+Private.h b/sdk/objc/api/peerconnection/RTCIODevice+Private.h new file mode 100644 index 0000000000..e736c993e1 --- /dev/null +++ b/sdk/objc/api/peerconnection/RTCIODevice+Private.h @@ -0,0 +1,28 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#import "RTCIODevice.h" + +NS_ASSUME_NONNULL_BEGIN + +@interface RTC_OBJC_TYPE(RTCIODevice) () + +- (instancetype)initWithType:(RTCIODeviceType)type + deviceId:(NSString *)deviceId + name:(NSString* )name; + +@end + +NS_ASSUME_NONNULL_END diff --git a/sdk/objc/api/peerconnection/RTCIODevice.h b/sdk/objc/api/peerconnection/RTCIODevice.h new file mode 100644 index 0000000000..f44d532081 --- /dev/null +++ b/sdk/objc/api/peerconnection/RTCIODevice.h @@ -0,0 +1,41 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import + +#import "RTCMacros.h" + +NS_ASSUME_NONNULL_BEGIN + +typedef NS_ENUM(NSInteger, RTCIODeviceType) { + RTCIODeviceTypeOutput, + RTCIODeviceTypeInput, +}; + +RTC_OBJC_EXPORT +@interface RTC_OBJC_TYPE(RTCIODevice) : NSObject + ++ (instancetype)defaultDeviceWithType: (RTCIODeviceType)type; +- (instancetype)init NS_UNAVAILABLE; + +@property(nonatomic, readonly) BOOL isDefault; +@property(nonatomic, readonly) RTCIODeviceType type; +@property(nonatomic, copy, readonly) NSString *deviceId; +@property(nonatomic, copy, readonly) NSString *name; + +@end + +NS_ASSUME_NONNULL_END diff --git a/sdk/objc/api/peerconnection/RTCIODevice.mm b/sdk/objc/api/peerconnection/RTCIODevice.mm new file mode 100644 index 0000000000..b3738f71fe --- /dev/null +++ b/sdk/objc/api/peerconnection/RTCIODevice.mm @@ -0,0 +1,71 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import "RTCIODevice.h" +#import "RTCIODevice+Private.h" + +NSString *const kDefaultDeviceId = @"default"; + +@implementation RTC_OBJC_TYPE(RTCIODevice) + +@synthesize type = _type; +@synthesize deviceId = _deviceId; +@synthesize name = _name; + ++ (instancetype)defaultDeviceWithType: (RTCIODeviceType)type { + return [[self alloc] initWithType: type + deviceId: kDefaultDeviceId + name: @""]; +} + +- (instancetype)initWithType: (RTCIODeviceType)type + deviceId: (NSString *)deviceId + name: (NSString* )name { + if (self = [super init]) { + _type = type; + _deviceId = deviceId; + _name = name; + } + return self; +} + +#pragma mark - IODevice + +- (BOOL)isDefault { + return [_deviceId isEqualToString: kDefaultDeviceId]; +} + +#pragma mark - Equatable + +- (BOOL)isEqual: (id)object { + if (self == object) { + return YES; + } + if (object == nil) { + return NO; + } + if (![object isMemberOfClass:[self class]]) { + return NO; + } + + return [_deviceId isEqualToString:((RTC_OBJC_TYPE(RTCIODevice) *)object).deviceId]; +} + +- (NSUInteger)hash { + return [_deviceId hash]; +} + +@end diff --git a/sdk/objc/api/peerconnection/RTCPeerConnectionFactory+Native.h b/sdk/objc/api/peerconnection/RTCPeerConnectionFactory+Native.h index f361b9f0ea..cc45aba1ec 100644 --- a/sdk/objc/api/peerconnection/RTCPeerConnectionFactory+Native.h +++ b/sdk/objc/api/peerconnection/RTCPeerConnectionFactory+Native.h @@ -50,7 +50,8 @@ NS_ASSUME_NONNULL_BEGIN audioDeviceModule: (nullable webrtc::AudioDeviceModule *)audioDeviceModule audioProcessingModule: - (rtc::scoped_refptr)audioProcessingModule; + (rtc::scoped_refptr)audioProcessingModule + bypassVoiceProcessing:(BOOL)bypassVoiceProcessing; - (instancetype) initWithNativeAudioEncoderFactory: @@ -65,7 +66,8 @@ NS_ASSUME_NONNULL_BEGIN audioProcessingModule: (rtc::scoped_refptr)audioProcessingModule networkControllerFactory:(std::unique_ptr) - networkControllerFactory; + networkControllerFactory + bypassVoiceProcessing:(BOOL)bypassVoiceProcessing; - (instancetype) initWithEncoderFactory:(nullable id)encoderFactory diff --git a/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.h b/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.h index 17777f6d5d..1e0908db52 100644 --- a/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.h +++ b/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.h @@ -24,6 +24,11 @@ NS_ASSUME_NONNULL_BEGIN @class RTC_OBJC_TYPE(RTCVideoSource); @class RTC_OBJC_TYPE(RTCVideoTrack); @class RTC_OBJC_TYPE(RTCPeerConnectionFactoryOptions); +@class RTC_OBJC_TYPE(RTCAudioDeviceModule); +@class RTC_OBJC_TYPE(RTCRtpCapabilities); + +typedef NS_ENUM(NSInteger, RTCRtpMediaType); + @protocol RTC_OBJC_TYPE (RTCPeerConnectionDelegate); @protocol RTC_OBJC_TYPE @@ -34,6 +39,8 @@ NS_ASSUME_NONNULL_BEGIN (RTCSSLCertificateVerifier); @protocol RTC_OBJC_TYPE (RTCAudioDevice); +@protocol RTC_OBJC_TYPE +(RTCAudioProcessingModule); RTC_OBJC_EXPORT @interface RTC_OBJC_TYPE (RTCPeerConnectionFactory) : NSObject @@ -52,6 +59,16 @@ RTC_OBJC_EXPORT decoderFactory:(nullable id)decoderFactory audioDevice:(nullable id)audioDevice; +/* Initialize object with bypass voice processing */ +- (instancetype) + initWithBypassVoiceProcessing:(BOOL)bypassVoiceProcessing + encoderFactory:(nullable id)encoderFactory + decoderFactory:(nullable id)decoderFactory + audioProcessingModule: + (nullable id)audioProcessingModule; + +@property(nonatomic, readonly) RTC_OBJC_TYPE(RTCAudioDeviceModule) *audioDeviceModule; + /** * Valid kind values are kRTCMediaStreamTrackKindAudio and * kRTCMediaStreamTrackKindVideo. diff --git a/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.mm b/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.mm index 64be41ae15..08200f46fd 100644 --- a/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.mm +++ b/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.mm @@ -15,6 +15,9 @@ #import "RTCPeerConnectionFactoryOptions+Private.h" #import "RTCRtpCapabilities+Private.h" +#import "RTCAudioDeviceModule.h" +#import "RTCAudioDeviceModule+Private.h" + #import "RTCAudioSource+Private.h" #import "RTCAudioTrack+Private.h" #import "RTCMediaConstraints+Private.h" @@ -22,6 +25,9 @@ #import "RTCPeerConnection+Private.h" #import "RTCVideoSource+Private.h" #import "RTCVideoTrack+Private.h" +#import "RTCRtpReceiver+Private.h" +#import "RTCRtpCapabilities+Private.h" +#import "RTCRtpCodecCapability+Private.h" #import "base/RTCLogging.h" #import "base/RTCVideoDecoderFactory.h" #import "base/RTCVideoEncoderFactory.h" @@ -49,6 +55,9 @@ #include "sdk/objc/native/src/objc_video_decoder_factory.h" #include "sdk/objc/native/src/objc_video_encoder_factory.h" +#import "components/audio/RTCAudioProcessingModule.h" +#import "components/audio/RTCDefaultAudioProcessingModule+Private.h" + #if defined(WEBRTC_IOS) #import "sdk/objc/native/api/audio_device_module.h" #endif @@ -57,14 +66,18 @@ @implementation RTC_OBJC_TYPE (RTCPeerConnectionFactory) { std::unique_ptr _networkThread; std::unique_ptr _workerThread; std::unique_ptr _signalingThread; + rtc::scoped_refptr _nativeAudioDeviceModule; + RTC_OBJC_TYPE(RTCDefaultAudioProcessingModule) *_defaultAudioProcessingModule; + BOOL _hasStartedAecDump; } @synthesize nativeFactory = _nativeFactory; +@synthesize audioDeviceModule = _audioDeviceModule; -- (rtc::scoped_refptr)audioDeviceModule { +- (rtc::scoped_refptr)createAudioDeviceModule:(BOOL)bypassVoiceProcessing { #if defined(WEBRTC_IOS) - return webrtc::CreateAudioDeviceModule(); + return webrtc::CreateAudioDeviceModule(bypassVoiceProcessing); #else return nullptr; #endif @@ -78,8 +91,9 @@ - (instancetype)init { RTCVideoEncoderFactoryH264) alloc] init]) nativeVideoDecoderFactory:webrtc::ObjCToNativeVideoDecoderFactory([[RTC_OBJC_TYPE( RTCVideoDecoderFactoryH264) alloc] init]) - audioDeviceModule:[self audioDeviceModule].get() - audioProcessingModule:nullptr]; + audioDeviceModule:[self createAudioDeviceModule:NO].get() + audioProcessingModule:nullptr + bypassVoiceProcessing:NO]; } - (instancetype) @@ -107,14 +121,66 @@ - (instancetype)init { if (audioDevice) { audio_device_module = webrtc::CreateAudioDeviceModule(audioDevice); } else { - audio_device_module = [self audioDeviceModule]; + audio_device_module = [self createAudioDeviceModule:NO]; + } + return [self initWithNativeAudioEncoderFactory:webrtc::CreateBuiltinAudioEncoderFactory() + nativeAudioDecoderFactory:webrtc::CreateBuiltinAudioDecoderFactory() + nativeVideoEncoderFactory:std::move(native_encoder_factory) + nativeVideoDecoderFactory:std::move(native_decoder_factory) + audioDeviceModule:audio_device_module.get() + audioProcessingModule:nullptr + bypassVoiceProcessing:NO]; +#endif +} + +- (RTC_OBJC_TYPE(RTCRtpCapabilities) *)rtpSenderCapabilitiesFor:(RTCRtpMediaType)mediaType { + + webrtc::RtpCapabilities capabilities = _nativeFactory->GetRtpSenderCapabilities([RTC_OBJC_TYPE(RTCRtpReceiver) nativeMediaTypeForMediaType: mediaType]); + + return [[RTCRtpCapabilities alloc] initWithNativeRtpCapabilities: capabilities]; +} + +- (RTC_OBJC_TYPE(RTCRtpCapabilities) *)rtpReceiverCapabilitiesFor:(RTCRtpMediaType)mediaType { + + webrtc::RtpCapabilities capabilities = _nativeFactory->GetRtpReceiverCapabilities([RTC_OBJC_TYPE(RTCRtpReceiver) nativeMediaTypeForMediaType: mediaType]); + + return [[RTCRtpCapabilities alloc] initWithNativeRtpCapabilities: capabilities]; +} + +- (instancetype) + initWithBypassVoiceProcessing:(BOOL)bypassVoiceProcessing + encoderFactory:(nullable id)encoderFactory + decoderFactory:(nullable id)decoderFactory + audioProcessingModule: + (nullable id)audioProcessingModule { +#ifdef HAVE_NO_MEDIA + return [self initWithNoMedia]; +#else + std::unique_ptr native_encoder_factory; + std::unique_ptr native_decoder_factory; + if (encoderFactory) { + native_encoder_factory = webrtc::ObjCToNativeVideoEncoderFactory(encoderFactory); + } + if (decoderFactory) { + native_decoder_factory = webrtc::ObjCToNativeVideoDecoderFactory(decoderFactory); + } + rtc::scoped_refptr audio_device_module = [self createAudioDeviceModule:bypassVoiceProcessing]; + + if ([audioProcessingModule isKindOfClass:[RTC_OBJC_TYPE(RTCDefaultAudioProcessingModule) class]]) { + _defaultAudioProcessingModule = (RTC_OBJC_TYPE(RTCDefaultAudioProcessingModule) *)audioProcessingModule; + } else { + _defaultAudioProcessingModule = [[RTC_OBJC_TYPE(RTCDefaultAudioProcessingModule) alloc] init]; } + + NSLog(@"AudioProcessingModule: %@", _defaultAudioProcessingModule); + return [self initWithNativeAudioEncoderFactory:webrtc::CreateBuiltinAudioEncoderFactory() nativeAudioDecoderFactory:webrtc::CreateBuiltinAudioDecoderFactory() nativeVideoEncoderFactory:std::move(native_encoder_factory) nativeVideoDecoderFactory:std::move(native_decoder_factory) audioDeviceModule:audio_device_module.get() - audioProcessingModule:nullptr]; + audioProcessingModule:_defaultAudioProcessingModule.nativeAudioProcessingModule + bypassVoiceProcessing:bypassVoiceProcessing]; #endif } @@ -163,14 +229,16 @@ - (instancetype)initWithNativeAudioEncoderFactory: (std::unique_ptr)videoDecoderFactory audioDeviceModule:(webrtc::AudioDeviceModule *)audioDeviceModule audioProcessingModule: - (rtc::scoped_refptr)audioProcessingModule { + (rtc::scoped_refptr)audioProcessingModule + bypassVoiceProcessing:(BOOL)bypassVoiceProcessing { return [self initWithNativeAudioEncoderFactory:audioEncoderFactory nativeAudioDecoderFactory:audioDecoderFactory nativeVideoEncoderFactory:std::move(videoEncoderFactory) nativeVideoDecoderFactory:std::move(videoDecoderFactory) audioDeviceModule:audioDeviceModule audioProcessingModule:audioProcessingModule - networkControllerFactory:nullptr]; + networkControllerFactory:nullptr + bypassVoiceProcessing:NO]; } - (instancetype)initWithNativeAudioEncoderFactory: (rtc::scoped_refptr)audioEncoderFactory @@ -185,7 +253,8 @@ - (instancetype)initWithNativeAudioEncoderFactory: (rtc::scoped_refptr)audioProcessingModule networkControllerFactory: (std::unique_ptr) - networkControllerFactory { + networkControllerFactory + bypassVoiceProcessing:(BOOL)bypassVoiceProcessing { if (self = [self initNative]) { webrtc::PeerConnectionFactoryDependencies dependencies; dependencies.network_thread = _networkThread.get(); @@ -197,11 +266,22 @@ - (instancetype)initWithNativeAudioEncoderFactory: dependencies.trials = std::make_unique(); dependencies.task_queue_factory = webrtc::CreateDefaultTaskQueueFactory(dependencies.trials.get()); - dependencies.adm = std::move(audioDeviceModule); + + // always create ADM on worker thread + _nativeAudioDeviceModule = _workerThread->BlockingCall([&dependencies, &bypassVoiceProcessing]() { + return webrtc::AudioDeviceModule::Create(webrtc::AudioDeviceModule::AudioLayer::kPlatformDefaultAudio, + dependencies.task_queue_factory.get(), + bypassVoiceProcessing == YES); + }); + + _audioDeviceModule = [[RTC_OBJC_TYPE(RTCAudioDeviceModule) alloc] initWithNativeModule: _nativeAudioDeviceModule + workerThread: _workerThread.get()]; + dependencies.adm = _nativeAudioDeviceModule; dependencies.audio_encoder_factory = std::move(audioEncoderFactory); dependencies.audio_decoder_factory = std::move(audioDecoderFactory); dependencies.video_encoder_factory = std::move(videoEncoderFactory); dependencies.video_decoder_factory = std::move(videoDecoderFactory); + if (audioProcessingModule) { dependencies.audio_processing = std::move(audioProcessingModule); } else { diff --git a/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder+DefaultComponents.h b/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder+DefaultComponents.h index 070a0e74a5..4d7025bf93 100644 --- a/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder+DefaultComponents.h +++ b/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder+DefaultComponents.h @@ -12,9 +12,9 @@ NS_ASSUME_NONNULL_BEGIN -@interface RTCPeerConnectionFactoryBuilder (DefaultComponents) +@interface RTC_OBJC_TYPE(RTCPeerConnectionFactoryBuilder) (DefaultComponents) -+ (RTCPeerConnectionFactoryBuilder *)defaultBuilder; ++ (RTC_OBJC_TYPE(RTCPeerConnectionFactoryBuilder) *)defaultBuilder; @end diff --git a/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder+DefaultComponents.mm b/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder+DefaultComponents.mm index 522e520e12..a2f633e1a4 100644 --- a/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder+DefaultComponents.mm +++ b/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder+DefaultComponents.mm @@ -22,10 +22,10 @@ #import "sdk/objc/native/api/audio_device_module.h" #endif -@implementation RTCPeerConnectionFactoryBuilder (DefaultComponents) +@implementation RTC_OBJC_TYPE(RTCPeerConnectionFactoryBuilder) (DefaultComponents) -+ (RTCPeerConnectionFactoryBuilder *)defaultBuilder { - RTCPeerConnectionFactoryBuilder *builder = [[RTCPeerConnectionFactoryBuilder alloc] init]; ++ (RTC_OBJC_TYPE(RTCPeerConnectionFactoryBuilder) *)defaultBuilder { + RTC_OBJC_TYPE(RTCPeerConnectionFactoryBuilder) *builder = [[RTC_OBJC_TYPE(RTCPeerConnectionFactoryBuilder) alloc] init]; auto audioEncoderFactory = webrtc::CreateBuiltinAudioEncoderFactory(); [builder setAudioEncoderFactory:audioEncoderFactory]; diff --git a/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder.h b/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder.h index f0b0de156a..a46839b6b3 100644 --- a/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder.h +++ b/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder.h @@ -25,9 +25,9 @@ class AudioProcessing; NS_ASSUME_NONNULL_BEGIN -@interface RTCPeerConnectionFactoryBuilder : NSObject +@interface RTC_OBJC_TYPE(RTCPeerConnectionFactoryBuilder) : NSObject -+ (RTCPeerConnectionFactoryBuilder *)builder; ++ (RTC_OBJC_TYPE(RTCPeerConnectionFactoryBuilder) *)builder; - (RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)createPeerConnectionFactory; diff --git a/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder.mm b/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder.mm index 627909a0e3..4cb12b0a59 100644 --- a/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder.mm +++ b/sdk/objc/api/peerconnection/RTCPeerConnectionFactoryBuilder.mm @@ -18,7 +18,7 @@ #include "modules/audio_device/include/audio_device.h" #include "modules/audio_processing/include/audio_processing.h" -@implementation RTCPeerConnectionFactoryBuilder { +@implementation RTC_OBJC_TYPE(RTCPeerConnectionFactoryBuilder) { std::unique_ptr _videoEncoderFactory; std::unique_ptr _videoDecoderFactory; rtc::scoped_refptr _audioEncoderFactory; @@ -27,8 +27,8 @@ @implementation RTCPeerConnectionFactoryBuilder { rtc::scoped_refptr _audioProcessingModule; } -+ (RTCPeerConnectionFactoryBuilder *)builder { - return [[RTCPeerConnectionFactoryBuilder alloc] init]; ++ (RTC_OBJC_TYPE(RTCPeerConnectionFactoryBuilder) *)builder { + return [[RTC_OBJC_TYPE(RTCPeerConnectionFactoryBuilder) alloc] init]; } - (RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)createPeerConnectionFactory { @@ -39,7 +39,8 @@ + (RTCPeerConnectionFactoryBuilder *)builder { nativeVideoEncoderFactory:std::move(_videoEncoderFactory) nativeVideoDecoderFactory:std::move(_videoDecoderFactory) audioDeviceModule:_audioDeviceModule.get() - audioProcessingModule:_audioProcessingModule]; + audioProcessingModule:_audioProcessingModule + bypassVoiceProcessing:NO]; } - (void)setVideoEncoderFactory:(std::unique_ptr)videoEncoderFactory { diff --git a/sdk/objc/api/peerconnection/RTCRtpCodecParameters.h b/sdk/objc/api/peerconnection/RTCRtpCodecParameters.h index 6135223720..4d24d3ccd6 100644 --- a/sdk/objc/api/peerconnection/RTCRtpCodecParameters.h +++ b/sdk/objc/api/peerconnection/RTCRtpCodecParameters.h @@ -30,6 +30,7 @@ RTC_EXTERN const NSString *const kRTCComfortNoiseCodecName; RTC_EXTERN const NSString *const kRTCVp8CodecName; RTC_EXTERN const NSString *const kRTCVp9CodecName; RTC_EXTERN const NSString *const kRTCH264CodecName; +RTC_EXTERN const NSString *const kRTCAv1CodecName; /** Defined in https://www.w3.org/TR/webrtc/#idl-def-rtcrtpcodecparameters */ RTC_OBJC_EXPORT diff --git a/sdk/objc/api/peerconnection/RTCRtpCodecParameters.mm b/sdk/objc/api/peerconnection/RTCRtpCodecParameters.mm index 6201e57b93..42a310cb79 100644 --- a/sdk/objc/api/peerconnection/RTCRtpCodecParameters.mm +++ b/sdk/objc/api/peerconnection/RTCRtpCodecParameters.mm @@ -32,6 +32,7 @@ const NSString * const kRTCVp8CodecName = @(cricket::kVp8CodecName); const NSString * const kRTCVp9CodecName = @(cricket::kVp9CodecName); const NSString * const kRTCH264CodecName = @(cricket::kH264CodecName); +const NSString * const kRTCAv1CodecName = @(cricket::kAv1CodecName); @implementation RTC_OBJC_TYPE (RTCRtpCodecParameters) diff --git a/sdk/objc/api/peerconnection/RTCRtpEncodingParameters.h b/sdk/objc/api/peerconnection/RTCRtpEncodingParameters.h index 07f6b7a39c..af0c6993bc 100644 --- a/sdk/objc/api/peerconnection/RTCRtpEncodingParameters.h +++ b/sdk/objc/api/peerconnection/RTCRtpEncodingParameters.h @@ -69,6 +69,10 @@ RTC_OBJC_EXPORT https://w3c.github.io/webrtc-extensions/#dom-rtcrtpencodingparameters-adaptiveptime */ @property(nonatomic, assign) BOOL adaptiveAudioPacketTime; +/** A case-sensitive identifier of the scalability mode to be used for this stream. + https://w3c.github.io/webrtc-svc/#rtcrtpencodingparameters */ +@property(nonatomic, copy, nullable) NSString *scalabilityMode; + - (instancetype)init; @end diff --git a/sdk/objc/api/peerconnection/RTCRtpEncodingParameters.mm b/sdk/objc/api/peerconnection/RTCRtpEncodingParameters.mm index d6087dafb0..aecb88b6f6 100644 --- a/sdk/objc/api/peerconnection/RTCRtpEncodingParameters.mm +++ b/sdk/objc/api/peerconnection/RTCRtpEncodingParameters.mm @@ -25,6 +25,7 @@ @implementation RTC_OBJC_TYPE (RTCRtpEncodingParameters) @synthesize bitratePriority = _bitratePriority; @synthesize networkPriority = _networkPriority; @synthesize adaptiveAudioPacketTime = _adaptiveAudioPacketTime; +@synthesize scalabilityMode = _scalabilityMode; - (instancetype)init { webrtc::RtpEncodingParameters nativeParameters; @@ -59,6 +60,9 @@ - (instancetype)initWithNativeParameters: if (nativeParameters.ssrc) { _ssrc = [NSNumber numberWithUnsignedLong:*nativeParameters.ssrc]; } + if (nativeParameters.scalability_mode) { + _scalabilityMode = [NSString stringWithUTF8String:nativeParameters.scalability_mode->c_str()]; + } _bitratePriority = nativeParameters.bitrate_priority; _networkPriority = [RTC_OBJC_TYPE(RTCRtpEncodingParameters) priorityFromNativePriority:nativeParameters.network_priority]; @@ -92,6 +96,9 @@ - (instancetype)initWithNativeParameters: if (_ssrc != nil) { parameters.ssrc = absl::optional(_ssrc.unsignedLongValue); } + if (_scalabilityMode != nil) { + parameters.scalability_mode = absl::optional(std::string([_scalabilityMode UTF8String])); + } parameters.bitrate_priority = _bitratePriority; parameters.network_priority = [RTC_OBJC_TYPE(RTCRtpEncodingParameters) nativePriorityFromPriority:_networkPriority]; diff --git a/sdk/objc/api/peerconnection/RTCRtpTransceiver.h b/sdk/objc/api/peerconnection/RTCRtpTransceiver.h index 1701f680a4..fca088be7e 100644 --- a/sdk/objc/api/peerconnection/RTCRtpTransceiver.h +++ b/sdk/objc/api/peerconnection/RTCRtpTransceiver.h @@ -14,6 +14,8 @@ #import "RTCRtpReceiver.h" #import "RTCRtpSender.h" +@class RTC_OBJC_TYPE(RTCRtpCodecCapability); + NS_ASSUME_NONNULL_BEGIN extern NSString *const kRTCRtpTransceiverErrorDomain; @@ -105,6 +107,8 @@ RTC_OBJC_EXPORT */ @property(nonatomic, readonly) RTCRtpTransceiverDirection direction; +@property(nonatomic, copy) NSArray *codecPreferences; + /** The currentDirection attribute indicates the current direction negotiated * for this transceiver. If this transceiver has never been represented in an * offer/answer exchange, or if the transceiver is stopped, the value is not diff --git a/sdk/objc/api/peerconnection/RTCRtpTransceiver.mm b/sdk/objc/api/peerconnection/RTCRtpTransceiver.mm index eb0ad96738..dc51149934 100644 --- a/sdk/objc/api/peerconnection/RTCRtpTransceiver.mm +++ b/sdk/objc/api/peerconnection/RTCRtpTransceiver.mm @@ -15,6 +15,8 @@ #import "RTCRtpParameters+Private.h" #import "RTCRtpReceiver+Private.h" #import "RTCRtpSender+Private.h" +#import "RTCRtpCodecCapability.h" +#import "RTCRtpCodecCapability+Private.h" #import "base/RTCLogging.h" #import "helpers/NSString+StdString.h" @@ -68,6 +70,20 @@ - (NSString *)mid { } } +- (NSArray *)codecPreferences { + + NSMutableArray *result = [NSMutableArray array]; + + std::vector capabilities = _nativeRtpTransceiver->codec_preferences(); + + for (auto & element : capabilities) { + RTC_OBJC_TYPE(RTCRtpCodecCapability) *object = [[RTC_OBJC_TYPE(RTCRtpCodecCapability) alloc] initWithNativeRtpCodecCapability: element]; + [result addObject: object]; + } + + return result; +} + @synthesize sender = _sender; @synthesize receiver = _receiver; diff --git a/sdk/objc/api/peerconnection/RTCVideoCodecInfo+Private.mm b/sdk/objc/api/peerconnection/RTCVideoCodecInfo+Private.mm index 2eb8d366d2..88c2add1df 100644 --- a/sdk/objc/api/peerconnection/RTCVideoCodecInfo+Private.mm +++ b/sdk/objc/api/peerconnection/RTCVideoCodecInfo+Private.mm @@ -12,6 +12,11 @@ #import "helpers/NSString+StdString.h" +#include "absl/container/inlined_vector.h" +#include "api/video_codecs/sdp_video_format.h" +#include "modules/video_coding/svc/scalability_mode_util.h" +#include "modules/video_coding/svc/create_scalability_structure.h" + @implementation RTC_OBJC_TYPE (RTCVideoCodecInfo) (Private) @@ -31,8 +36,16 @@ - (instancetype)initWithNativeSdpVideoFormat : (webrtc::SdpVideoFormat)format { std::string value = [NSString stdStringForString:self.parameters[paramKey]]; parameters[key] = value; } - - return webrtc::SdpVideoFormat([NSString stdStringForString:self.name], parameters); + + absl::InlinedVector + scalability_modes; + for (NSString *scalabilityMode in self.scalabilityModes) { + auto scalability_mode = webrtc::ScalabilityModeFromString([NSString stdStringForString:scalabilityMode]); + if (scalability_mode != absl::nullopt) { + scalability_modes.push_back(*scalability_mode); + } + } + return webrtc::SdpVideoFormat([NSString stdStringForString:self.name], parameters, scalability_modes); } @end diff --git a/sdk/objc/api/peerconnection/RTCVideoTrack.h b/sdk/objc/api/peerconnection/RTCVideoTrack.h index 5382b7169f..56d25c1568 100644 --- a/sdk/objc/api/peerconnection/RTCVideoTrack.h +++ b/sdk/objc/api/peerconnection/RTCVideoTrack.h @@ -25,6 +25,9 @@ RTC_OBJC_EXPORT /** The video source for this video track. */ @property(nonatomic, readonly) RTC_OBJC_TYPE(RTCVideoSource) *source; +/** The receive state, if this is a remote video track. */ +@property(nonatomic, assign) BOOL shouldReceive; + - (instancetype)init NS_UNAVAILABLE; /** Register a renderer that will render all frames received on this track. */ diff --git a/sdk/objc/api/peerconnection/RTCVideoTrack.mm b/sdk/objc/api/peerconnection/RTCVideoTrack.mm index d3296f6279..546ec80a61 100644 --- a/sdk/objc/api/peerconnection/RTCVideoTrack.mm +++ b/sdk/objc/api/peerconnection/RTCVideoTrack.mm @@ -53,7 +53,7 @@ - (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)facto } - (void)dealloc { - for (RTCVideoRendererAdapter *adapter in _adapters) { + for (RTC_OBJC_TYPE(RTCVideoRendererAdapter) * adapter in _adapters) { self.nativeVideoTrack->RemoveSink(adapter.nativeVideoRenderer); } } @@ -70,6 +70,14 @@ - (void)dealloc { return _source; } +- (BOOL)shouldReceive { + return self.nativeVideoTrack->should_receive(); +} + +- (void)setShouldReceive:(BOOL)shouldReceive { + self.nativeVideoTrack->set_should_receive(shouldReceive); +} + - (void)addRenderer:(id)renderer { if (!_workerThread->IsCurrent()) { _workerThread->BlockingCall([renderer, self] { [self addRenderer:renderer]; }); @@ -77,18 +85,17 @@ - (void)addRenderer:(id)renderer { } // Make sure we don't have this renderer yet. - for (RTCVideoRendererAdapter *adapter in _adapters) { + for (RTC_OBJC_TYPE(RTCVideoRendererAdapter) * adapter in _adapters) { if (adapter.videoRenderer == renderer) { RTC_LOG(LS_INFO) << "|renderer| is already attached to this track"; return; } } // Create a wrapper that provides a native pointer for us. - RTCVideoRendererAdapter* adapter = - [[RTCVideoRendererAdapter alloc] initWithNativeRenderer:renderer]; + RTC_OBJC_TYPE(RTCVideoRendererAdapter) *adapter = + [[RTC_OBJC_TYPE(RTCVideoRendererAdapter) alloc] initWithNativeRenderer:renderer]; [_adapters addObject:adapter]; - self.nativeVideoTrack->AddOrUpdateSink(adapter.nativeVideoRenderer, - rtc::VideoSinkWants()); + self.nativeVideoTrack->AddOrUpdateSink(adapter.nativeVideoRenderer, rtc::VideoSinkWants()); } - (void)removeRenderer:(id)renderer { @@ -97,9 +104,8 @@ - (void)removeRenderer:(id)renderer { return; } __block NSUInteger indexToRemove = NSNotFound; - [_adapters enumerateObjectsUsingBlock:^(RTCVideoRendererAdapter *adapter, - NSUInteger idx, - BOOL *stop) { + [_adapters enumerateObjectsUsingBlock:^( + RTC_OBJC_TYPE(RTCVideoRendererAdapter) * adapter, NSUInteger idx, BOOL * stop) { if (adapter.videoRenderer == renderer) { indexToRemove = idx; *stop = YES; @@ -109,8 +115,7 @@ - (void)removeRenderer:(id)renderer { RTC_LOG(LS_INFO) << "removeRenderer called with a renderer that has not been previously added"; return; } - RTCVideoRendererAdapter *adapterToRemove = - [_adapters objectAtIndex:indexToRemove]; + RTC_OBJC_TYPE(RTCVideoRendererAdapter) *adapterToRemove = [_adapters objectAtIndex:indexToRemove]; self.nativeVideoTrack->RemoveSink(adapterToRemove.nativeVideoRenderer); [_adapters removeObjectAtIndex:indexToRemove]; } diff --git a/sdk/objc/api/video_codec/RTCVideoEncoderAV1.h b/sdk/objc/api/video_codec/RTCVideoEncoderAV1.h index 8aa55e4bfa..a900728049 100644 --- a/sdk/objc/api/video_codec/RTCVideoEncoderAV1.h +++ b/sdk/objc/api/video_codec/RTCVideoEncoderAV1.h @@ -24,4 +24,6 @@ RTC_OBJC_EXPORT + (bool)isSupported; ++ (NSArray *)scalabilityModes; + @end diff --git a/sdk/objc/api/video_codec/RTCVideoEncoderAV1.mm b/sdk/objc/api/video_codec/RTCVideoEncoderAV1.mm index 43137b5bc8..662a3ca694 100644 --- a/sdk/objc/api/video_codec/RTCVideoEncoderAV1.mm +++ b/sdk/objc/api/video_codec/RTCVideoEncoderAV1.mm @@ -15,7 +15,11 @@ #import "RTCNativeVideoEncoder.h" #import "RTCNativeVideoEncoderBuilder+Native.h" #import "RTCVideoEncoderAV1.h" +#import "helpers/NSString+StdString.h" +#include "absl/container/inlined_vector.h" +#include "api/video_codecs/sdp_video_format.h" #include "modules/video_coding/codecs/av1/libaom_av1_encoder.h" +#include "modules/video_coding/svc/create_scalability_structure.h" @interface RTC_OBJC_TYPE (RTCVideoEncoderAV1Builder) : RTC_OBJC_TYPE(RTCNativeVideoEncoder) @@ -39,4 +43,14 @@ + (bool)isSupported { return true; } + + (NSArray *)scalabilityModes { + NSMutableArray *scalabilityModes = [NSMutableArray array]; + for (const auto scalability_mode : webrtc::kAllScalabilityModes) { + if (webrtc::ScalabilityStructureConfig(scalability_mode).has_value()) { + [scalabilityModes addObject:[NSString stringForAbslStringView:webrtc::ScalabilityModeToString(scalability_mode)]]; + } + } + return scalabilityModes; + } + @end diff --git a/sdk/objc/api/video_codec/RTCVideoEncoderSimulcast.h b/sdk/objc/api/video_codec/RTCVideoEncoderSimulcast.h new file mode 100644 index 0000000000..4f1b55c713 --- /dev/null +++ b/sdk/objc/api/video_codec/RTCVideoEncoderSimulcast.h @@ -0,0 +1,13 @@ +#import "RTCMacros.h" +#import "RTCVideoEncoder.h" +#import "RTCVideoEncoderFactory.h" +#import "RTCVideoCodecInfo.h" + +RTC_OBJC_EXPORT +@interface RTC_OBJC_TYPE (RTCVideoEncoderSimulcast) : NSObject + ++ (id)simulcastEncoderWithPrimary:(id)primary + fallback:(id)fallback + videoCodecInfo:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)videoCodecInfo; + +@end diff --git a/sdk/objc/api/video_codec/RTCVideoEncoderSimulcast.mm b/sdk/objc/api/video_codec/RTCVideoEncoderSimulcast.mm new file mode 100644 index 0000000000..8ca988562c --- /dev/null +++ b/sdk/objc/api/video_codec/RTCVideoEncoderSimulcast.mm @@ -0,0 +1,64 @@ +#import + +#import "RTCMacros.h" +#import "RTCVideoEncoderSimulcast.h" +#import "RTCNativeVideoEncoder.h" +#import "RTCNativeVideoEncoderBuilder+Native.h" +#import "api/peerconnection/RTCVideoCodecInfo+Private.h" +#include "api/transport/field_trial_based_config.h" + +#include "native/api/video_encoder_factory.h" +#include "media/engine/simulcast_encoder_adapter.h" + +@interface RTC_OBJC_TYPE (RTCVideoEncoderSimulcastBuilder) + : RTC_OBJC_TYPE(RTCNativeVideoEncoder) { + + id _primary; + id _fallback; + RTC_OBJC_TYPE(RTCVideoCodecInfo) *_videoCodecInfo; +} + +- (id)initWithPrimary:(id)primary + fallback:(id)fallback + videoCodecInfo:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)videoCodecInfo; + +@end + +@implementation RTC_OBJC_TYPE (RTCVideoEncoderSimulcastBuilder) + +- (std::unique_ptr)build:(const webrtc::Environment&)env { + auto nativePrimary = webrtc::ObjCToNativeVideoEncoderFactory(_primary); + auto nativeFallback = webrtc::ObjCToNativeVideoEncoderFactory(_fallback); + auto nativeFormat = [_videoCodecInfo nativeSdpVideoFormat]; + return std::make_unique( + env, + nativePrimary.release(), + nativeFallback.release(), + std::move(nativeFormat)); +} + +- (id)initWithPrimary:(id)primary + fallback:(id)fallback + videoCodecInfo:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)videoCodecInfo { + if (self = [super init]) { + self->_primary = primary; + self->_fallback = fallback; + self->_videoCodecInfo = videoCodecInfo; + } + return self; +} + +@end + +@implementation RTC_OBJC_TYPE (RTCVideoEncoderSimulcast) + ++ (id)simulcastEncoderWithPrimary:(id)primary + fallback:(id)fallback + videoCodecInfo:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)videoCodecInfo { + return [[RTC_OBJC_TYPE(RTCVideoEncoderSimulcastBuilder) alloc] + initWithPrimary:primary + fallback:fallback + videoCodecInfo:videoCodecInfo]; +} + +@end diff --git a/sdk/objc/api/video_codec/RTCVideoEncoderVP9.h b/sdk/objc/api/video_codec/RTCVideoEncoderVP9.h index f7dac6117d..adfca0f9a4 100644 --- a/sdk/objc/api/video_codec/RTCVideoEncoderVP9.h +++ b/sdk/objc/api/video_codec/RTCVideoEncoderVP9.h @@ -24,4 +24,6 @@ RTC_OBJC_EXPORT + (bool)isSupported; ++ (NSArray *)scalabilityModes; + @end diff --git a/sdk/objc/api/video_codec/RTCVideoEncoderVP9.mm b/sdk/objc/api/video_codec/RTCVideoEncoderVP9.mm index 1026f40092..3055017024 100644 --- a/sdk/objc/api/video_codec/RTCVideoEncoderVP9.mm +++ b/sdk/objc/api/video_codec/RTCVideoEncoderVP9.mm @@ -16,7 +16,12 @@ #import "RTCNativeVideoEncoderBuilder+Native.h" #import "RTCVideoEncoderVP9.h" +#import "helpers/NSString+StdString.h" + +#include "absl/container/inlined_vector.h" +#include "api/video_codecs/sdp_video_format.h" #include "modules/video_coding/codecs/vp9/include/vp9.h" +#include "modules/video_coding/svc/create_scalability_structure.h" @interface RTC_OBJC_TYPE (RTCVideoEncoderVP9Builder) : RTC_OBJC_TYPE(RTCNativeVideoEncoder) @@ -47,5 +52,13 @@ + (bool)isSupported { return false; #endif } - + + (NSArray *)scalabilityModes { + NSMutableArray *scalabilityModes = [NSMutableArray array]; + for (const auto scalability_mode : webrtc::kAllScalabilityModes) { + if (webrtc::ScalabilityStructureConfig(scalability_mode).has_value()) { + [scalabilityModes addObject:[NSString stringForAbslStringView:webrtc::ScalabilityModeToString(scalability_mode)]]; + } + } + return scalabilityModes; + } @end diff --git a/sdk/objc/base/RTCAudioRenderer.h b/sdk/objc/base/RTCAudioRenderer.h new file mode 100644 index 0000000000..3669831fca --- /dev/null +++ b/sdk/objc/base/RTCAudioRenderer.h @@ -0,0 +1,32 @@ +/* + * Copyright 2023 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import +#if TARGET_OS_IPHONE +#import +#endif + +#import "RTCMacros.h" + +NS_ASSUME_NONNULL_BEGIN + +RTC_OBJC_EXPORT @protocol RTC_OBJC_TYPE(RTCAudioRenderer) + +- (void)renderSampleBuffer: (CMSampleBufferRef)sampleBuffer NS_SWIFT_NAME(render(sampleBuffer:)); + +@end + +NS_ASSUME_NONNULL_END diff --git a/sdk/objc/base/RTCVideoCodecInfo.h b/sdk/objc/base/RTCVideoCodecInfo.h index fa28958f25..9da0c7aa81 100644 --- a/sdk/objc/base/RTCVideoCodecInfo.h +++ b/sdk/objc/base/RTCVideoCodecInfo.h @@ -26,10 +26,16 @@ RTC_OBJC_EXPORT parameters:(nullable NSDictionary *)parameters NS_DESIGNATED_INITIALIZER; +- (instancetype)initWithName:(NSString *)name + parameters:(nullable NSDictionary *)parameters + scalabilityModes:(nullable NSArray *)scalabilityModes + NS_DESIGNATED_INITIALIZER; + - (BOOL)isEqualToCodecInfo:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)info; @property(nonatomic, readonly) NSString *name; @property(nonatomic, readonly) NSDictionary *parameters; +@property(nonatomic, readonly) NSArray *scalabilityModes; @end diff --git a/sdk/objc/base/RTCVideoCodecInfo.m b/sdk/objc/base/RTCVideoCodecInfo.m index ce26ae1de3..441ecde7c5 100644 --- a/sdk/objc/base/RTCVideoCodecInfo.m +++ b/sdk/objc/base/RTCVideoCodecInfo.m @@ -14,6 +14,7 @@ @implementation RTC_OBJC_TYPE (RTCVideoCodecInfo) @synthesize name = _name; @synthesize parameters = _parameters; +@synthesize scalabilityModes = _scalabilityModes; - (instancetype)initWithName:(NSString *)name { return [self initWithName:name parameters:nil]; @@ -24,15 +25,29 @@ - (instancetype)initWithName:(NSString *)name if (self = [super init]) { _name = name; _parameters = (parameters ? parameters : @{}); + _scalabilityModes = @[]; } return self; } +- (instancetype)initWithName:(NSString *)name + parameters:(nullable NSDictionary *)parameters + scalabilityModes:(nullable NSArray *)scalabilityModes { + if (self = [super init]) { + _name = name; + _parameters = (parameters ? parameters : @{}); + _scalabilityModes = (scalabilityModes ? scalabilityModes : @[]); + } + + return self; + } + - (BOOL)isEqualToCodecInfo:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)info { if (!info || ![self.name isEqualToString:info.name] || - ![self.parameters isEqualToDictionary:info.parameters]) { + ![self.parameters isEqualToDictionary:info.parameters] || + ![self.scalabilityModes isEqualToArray:info.scalabilityModes]) { return NO; } return YES; diff --git a/sdk/objc/components/audio/RTCAudioBuffer+Private.h b/sdk/objc/components/audio/RTCAudioBuffer+Private.h new file mode 100644 index 0000000000..effd8bb429 --- /dev/null +++ b/sdk/objc/components/audio/RTCAudioBuffer+Private.h @@ -0,0 +1,29 @@ +/* + * Copyright 2023 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import "RTCAudioBuffer.h" + +#include "modules/audio_processing/audio_buffer.h" + +NS_ASSUME_NONNULL_BEGIN + +@interface RTC_OBJC_TYPE (RTCAudioBuffer)() + +- (instancetype)initWithNativeType: (webrtc::AudioBuffer *) audioBuffer; + +@end + +NS_ASSUME_NONNULL_END diff --git a/sdk/objc/components/audio/RTCAudioBuffer.h b/sdk/objc/components/audio/RTCAudioBuffer.h new file mode 100644 index 0000000000..8bbd068657 --- /dev/null +++ b/sdk/objc/components/audio/RTCAudioBuffer.h @@ -0,0 +1,38 @@ +/* + * Copyright 2023 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import + +#import "RTCMacros.h" + +NS_ASSUME_NONNULL_BEGIN + +RTC_OBJC_EXPORT +@interface RTC_OBJC_TYPE (RTCAudioBuffer) : NSObject + +@property(nonatomic, readonly) size_t channels; +@property(nonatomic, readonly) size_t frames; +@property(nonatomic, readonly) size_t framesPerBand; +@property(nonatomic, readonly) size_t bands; + +// Returns pointer arrays. Index range from 0 to `frames`. +- (float* _Nonnull)rawBufferForChannel:(size_t)channel; + +// TODO: More convenience methods... + +@end + +NS_ASSUME_NONNULL_END diff --git a/sdk/objc/components/audio/RTCAudioBuffer.mm b/sdk/objc/components/audio/RTCAudioBuffer.mm new file mode 100644 index 0000000000..e37ea344dd --- /dev/null +++ b/sdk/objc/components/audio/RTCAudioBuffer.mm @@ -0,0 +1,55 @@ +/* + * Copyright 2023 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import "RTCAudioBuffer.h" + +#include "modules/audio_processing/audio_buffer.h" + +@implementation RTC_OBJC_TYPE (RTCAudioBuffer) { + // Raw + webrtc::AudioBuffer *_audioBuffer; +} + +- (size_t)channels { + return _audioBuffer->num_channels(); +} + +- (size_t)frames { + return _audioBuffer->num_frames(); +} + +- (size_t)framesPerBand { + return _audioBuffer->num_frames_per_band(); +} + +- (size_t)bands { + return _audioBuffer->num_bands(); +} + +- (float *)rawBufferForChannel:(size_t)channel { + return _audioBuffer->channels()[channel]; +} + +#pragma mark - Private + +- (instancetype)initWithNativeType:(webrtc::AudioBuffer *)audioBuffer { + if (self = [super init]) { + _audioBuffer = audioBuffer; + } + return self; +} + +@end diff --git a/sdk/objc/components/audio/RTCAudioCustomProcessingAdapter+Private.h b/sdk/objc/components/audio/RTCAudioCustomProcessingAdapter+Private.h new file mode 100644 index 0000000000..9995b58abb --- /dev/null +++ b/sdk/objc/components/audio/RTCAudioCustomProcessingAdapter+Private.h @@ -0,0 +1,43 @@ +/* + * Copyright 2023 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import "RTCAudioCustomProcessingAdapter.h" +#import "RTCAudioCustomProcessingDelegate.h" +#import "RTCMacros.h" + +#include "modules/audio_processing/include/audio_processing.h" + +NS_ASSUME_NONNULL_BEGIN + +@interface RTC_OBJC_TYPE(RTCAudioCustomProcessingAdapter) () + +// Thread safe set/get with os_unfair_lock. +@property(nonatomic, weak, nullable) id + audioCustomProcessingDelegate; + +// Direct read access without lock. +@property(nonatomic, readonly, weak, nullable) id + rawAudioCustomProcessingDelegate; + +@property(nonatomic, readonly) std::unique_ptr + nativeAudioCustomProcessingModule; + +- (instancetype)initWithDelegate: + (nullable id)audioCustomProcessingDelegate; + +@end + +NS_ASSUME_NONNULL_END diff --git a/sdk/objc/components/audio/RTCAudioCustomProcessingAdapter.h b/sdk/objc/components/audio/RTCAudioCustomProcessingAdapter.h new file mode 100644 index 0000000000..3230c19323 --- /dev/null +++ b/sdk/objc/components/audio/RTCAudioCustomProcessingAdapter.h @@ -0,0 +1,28 @@ +/* + * Copyright 2023 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import +#import "RTCMacros.h" + +NS_ASSUME_NONNULL_BEGIN + +@interface RTC_OBJC_TYPE(RTCAudioCustomProcessingAdapter) : NSObject + +- (instancetype)init NS_UNAVAILABLE; + +@end + +NS_ASSUME_NONNULL_END diff --git a/sdk/objc/components/audio/RTCAudioCustomProcessingAdapter.mm b/sdk/objc/components/audio/RTCAudioCustomProcessingAdapter.mm new file mode 100644 index 0000000000..c0f297c786 --- /dev/null +++ b/sdk/objc/components/audio/RTCAudioCustomProcessingAdapter.mm @@ -0,0 +1,139 @@ +/* + * Copyright 2023 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import +#import + +#import "RTCAudioBuffer+Private.h" +#import "RTCAudioCustomProcessingAdapter+Private.h" +#import "RTCAudioCustomProcessingAdapter.h" + +#include "rtc_base/logging.h" + +namespace webrtc { + +class AudioCustomProcessingAdapter : public webrtc::CustomProcessing { + public: + bool is_initialized_; + int sample_rate_hz_; + int num_channels_; + + AudioCustomProcessingAdapter(RTC_OBJC_TYPE(RTCAudioCustomProcessingAdapter) *adapter, os_unfair_lock *lock) { + RTC_LOG(LS_INFO) << "RTCAudioCustomProcessingAdapter.AudioCustomProcessingAdapter init"; + + adapter_ = adapter; + lock_ = lock; + is_initialized_ = false; + sample_rate_hz_ = 0; + num_channels_ = 0; + } + + ~AudioCustomProcessingAdapter() { + RTC_LOG(LS_INFO) << "RTCAudioCustomProcessingAdapter.AudioCustomProcessingAdapter dealloc"; + + os_unfair_lock_lock(lock_); + id delegate = adapter_.rawAudioCustomProcessingDelegate; + [delegate audioProcessingRelease]; + os_unfair_lock_unlock(lock_); + } + + void Initialize(int sample_rate_hz, int num_channels) override { + os_unfair_lock_lock(lock_); + id delegate = adapter_.rawAudioCustomProcessingDelegate; + [delegate audioProcessingInitializeWithSampleRate:sample_rate_hz channels:num_channels]; + is_initialized_ = true; + sample_rate_hz_ = sample_rate_hz; + num_channels_ = num_channels; + os_unfair_lock_unlock(lock_); + } + + void Process(AudioBuffer *audio_buffer) override { + bool is_locked = os_unfair_lock_trylock(lock_); + if (!is_locked) { + RTC_LOG(LS_INFO) << "RTCAudioCustomProcessingAdapter.AudioCustomProcessingAdapter Process " + "already locked, skipping..."; + + return; + } + id delegate = adapter_.rawAudioCustomProcessingDelegate; + if (delegate != nil) { + RTC_OBJC_TYPE(RTCAudioBuffer) *audioBuffer = [[RTC_OBJC_TYPE(RTCAudioBuffer) alloc] initWithNativeType:audio_buffer]; + [delegate audioProcessingProcess:audioBuffer]; + } + os_unfair_lock_unlock(lock_); + } + + std::string ToString() const override { return "AudioCustomProcessingAdapter"; } + + private: + __weak RTC_OBJC_TYPE(RTCAudioCustomProcessingAdapter) *adapter_; + os_unfair_lock *lock_; +}; +} // namespace webrtc + +@implementation RTC_OBJC_TYPE(RTCAudioCustomProcessingAdapter) { + webrtc::AudioCustomProcessingAdapter *_adapter; + os_unfair_lock _lock; +} + +@synthesize rawAudioCustomProcessingDelegate = _rawAudioCustomProcessingDelegate; + +- (instancetype)initWithDelegate: + (nullable id)audioCustomProcessingDelegate { + if (self = [super init]) { + _lock = OS_UNFAIR_LOCK_INIT; + _rawAudioCustomProcessingDelegate = audioCustomProcessingDelegate; + _adapter = new webrtc::AudioCustomProcessingAdapter(self, &_lock); + RTC_LOG(LS_INFO) << "RTCAudioCustomProcessingAdapter init"; + } + + return self; +} + +- (void)dealloc { + RTC_LOG(LS_INFO) << "RTCAudioCustomProcessingAdapter dealloc"; +} + +#pragma mark - Getter & Setter for audioCustomProcessingDelegate + +- (nullable id)audioCustomProcessingDelegate { + os_unfair_lock_lock(&_lock); + id delegate = _rawAudioCustomProcessingDelegate; + os_unfair_lock_unlock(&_lock); + return delegate; +} + +- (void)setAudioCustomProcessingDelegate:(nullable id)delegate { + os_unfair_lock_lock(&_lock); + if (_rawAudioCustomProcessingDelegate != nil && _adapter->is_initialized_) { + [_rawAudioCustomProcessingDelegate audioProcessingRelease]; + } + _rawAudioCustomProcessingDelegate = delegate; + if (_adapter->is_initialized_) { + [_rawAudioCustomProcessingDelegate + audioProcessingInitializeWithSampleRate:_adapter->sample_rate_hz_ + channels:_adapter->num_channels_]; + } + os_unfair_lock_unlock(&_lock); +} + +#pragma mark - Private + +- (std::unique_ptr)nativeAudioCustomProcessingModule { + return std::unique_ptr(_adapter); +} + +@end diff --git a/sdk/objc/components/audio/RTCAudioCustomProcessingDelegate.h b/sdk/objc/components/audio/RTCAudioCustomProcessingDelegate.h new file mode 100644 index 0000000000..a8e4981fbc --- /dev/null +++ b/sdk/objc/components/audio/RTCAudioCustomProcessingDelegate.h @@ -0,0 +1,52 @@ +/* + * Copyright 2023 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import + +#import "RTCMacros.h" + +NS_ASSUME_NONNULL_BEGIN + +@class RTC_OBJC_TYPE(RTCAudioBuffer); + +RTC_OBJC_EXPORT @protocol RTC_OBJC_TYPE (RTCAudioCustomProcessingDelegate) + +/** +* (Re-)initialize the audio processor. +* This method can be invoked multiple times. +*/ +- (void)audioProcessingInitializeWithSampleRate : (size_t)sampleRateHz channels +: (size_t)channels NS_SWIFT_NAME(audioProcessingInitialize(sampleRate:channels:)); + +/** + * Process (read or write) the audio buffer. + * RTCAudioBuffer is a simple wrapper for webrtc::AudioBuffer and the valid scope is only inside + * this method. Do not retain it. + */ +- (void)audioProcessingProcess:(RTC_OBJC_TYPE(RTCAudioBuffer) *)audioBuffer + NS_SWIFT_NAME(audioProcessingProcess(audioBuffer:)); + +// TOOD: +// virtual void SetRuntimeSetting(AudioProcessing::RuntimeSetting setting); + +/** + * Suggests releasing resources allocated by the audio processor. + */ +- (void)audioProcessingRelease; + +@end + +NS_ASSUME_NONNULL_END diff --git a/sdk/objc/components/audio/RTCAudioProcessingConfig+Private.h b/sdk/objc/components/audio/RTCAudioProcessingConfig+Private.h new file mode 100644 index 0000000000..ed565ee0aa --- /dev/null +++ b/sdk/objc/components/audio/RTCAudioProcessingConfig+Private.h @@ -0,0 +1,29 @@ +/* + * Copyright 2023 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import "RTCAudioProcessingConfig.h" + +#include "modules/audio_processing/include/audio_processing.h" + +NS_ASSUME_NONNULL_BEGIN + +@interface RTC_OBJC_TYPE (RTCAudioProcessingConfig)() + +@property(nonatomic, readonly) webrtc::AudioProcessing::Config nativeAudioProcessingConfig; + +@end + +NS_ASSUME_NONNULL_END diff --git a/sdk/objc/components/audio/RTCAudioProcessingConfig.h b/sdk/objc/components/audio/RTCAudioProcessingConfig.h new file mode 100644 index 0000000000..3c7dce45f1 --- /dev/null +++ b/sdk/objc/components/audio/RTCAudioProcessingConfig.h @@ -0,0 +1,31 @@ +/* + * Copyright 2023 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import +#import "RTCMacros.h" + +NS_ASSUME_NONNULL_BEGIN + +RTC_OBJC_EXPORT +@interface RTC_OBJC_TYPE (RTCAudioProcessingConfig) : NSObject + +@property(nonatomic, assign) BOOL echoCancellerEnabled; +@property(nonatomic, assign) BOOL echoCancellerMobileMode; + + +@end + +NS_ASSUME_NONNULL_END diff --git a/sdk/objc/components/audio/RTCAudioProcessingConfig.mm b/sdk/objc/components/audio/RTCAudioProcessingConfig.mm new file mode 100644 index 0000000000..ca40f16e17 --- /dev/null +++ b/sdk/objc/components/audio/RTCAudioProcessingConfig.mm @@ -0,0 +1,51 @@ +/* + * Copyright 2023 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import "RTCAudioProcessingConfig.h" + +#include "modules/audio_processing/include/audio_processing.h" + +@implementation RTC_OBJC_TYPE (RTCAudioProcessingConfig) { + webrtc::AudioProcessing::Config _config; +} + +// config.echo_canceller.enabled + +- (BOOL)echoCancellerEnabled { + return _config.echo_canceller.enabled; +} + +- (void)setEchoCancellerEnabled:(BOOL)value { + _config.echo_canceller.enabled = value; +} + +// config.echo_canceller.mobile_mode + +- (BOOL)echoCancellerMobileMode { + return _config.echo_canceller.mobile_mode; +} + +- (void)setEchoCancellerMobileMode:(BOOL)value { + _config.echo_canceller.mobile_mode = value; +} + +#pragma mark - Private + +- (webrtc::AudioProcessing::Config)nativeAudioProcessingConfig { + return _config; +} + +@end diff --git a/sdk/objc/components/audio/RTCAudioProcessingModule.h b/sdk/objc/components/audio/RTCAudioProcessingModule.h new file mode 100644 index 0000000000..9b05c4800e --- /dev/null +++ b/sdk/objc/components/audio/RTCAudioProcessingModule.h @@ -0,0 +1,33 @@ +/* + * Copyright 2023 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import + +#import "RTCMacros.h" + +NS_ASSUME_NONNULL_BEGIN + +@class RTC_OBJC_TYPE(RTCAudioProcessingConfig); + +RTC_OBJC_EXPORT @protocol RTC_OBJC_TYPE (RTCAudioProcessingModule) + +- (void)applyConfig: (RTC_OBJC_TYPE(RTCAudioProcessingConfig) *)config; + +// TODO: Implement... + +@end + +NS_ASSUME_NONNULL_END diff --git a/sdk/objc/components/audio/RTCAudioSession+Configuration.mm b/sdk/objc/components/audio/RTCAudioSession+Configuration.mm index 4bf0a542e1..f7b45b5850 100644 --- a/sdk/objc/components/audio/RTCAudioSession+Configuration.mm +++ b/sdk/objc/components/audio/RTCAudioSession+Configuration.mm @@ -63,6 +63,33 @@ - (BOOL)setConfiguration:(RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *)configur } } + if (self.mode != configuration.mode) { + NSError *modeError = nil; + if (![self setMode:configuration.mode error:&modeError]) { + RTCLogError(@"Failed to set mode to %@: %@", + self.mode, + modeError.localizedDescription); + error = modeError; + } else { + RTCLog(@"Set mode to: %@", configuration.mode); + } + } + + // Sometimes category options don't stick after setting mode. + if (self.categoryOptions != configuration.categoryOptions) { + NSError *categoryError = nil; + if (![self setCategory:configuration.category + withOptions:configuration.categoryOptions + error:&categoryError]) { + RTCLogError(@"Failed to set category options: %@", + categoryError.localizedDescription); + error = categoryError; + } else { + RTCLog(@"Set category options to: %ld", + (long)configuration.categoryOptions); + } + } + if (self.preferredSampleRate != configuration.sampleRate) { NSError *sampleRateError = nil; if (![self setPreferredSampleRate:configuration.sampleRate diff --git a/sdk/objc/components/audio/RTCAudioSession.mm b/sdk/objc/components/audio/RTCAudioSession.mm index 641d2ed8c7..083d4737c4 100644 --- a/sdk/objc/components/audio/RTCAudioSession.mm +++ b/sdk/objc/components/audio/RTCAudioSession.mm @@ -552,8 +552,7 @@ - (void)handleRouteChangeNotification:(NSNotification *)notification { RTCLog(@"Audio route changed: OldDeviceUnavailable"); break; case AVAudioSessionRouteChangeReasonCategoryChange: - RTCLog(@"Audio route changed: CategoryChange to :%@", - self.session.category); + RTCLog(@"Audio route changed: CategoryChange to :%@", self.session.category); break; case AVAudioSessionRouteChangeReasonOverride: RTCLog(@"Audio route changed: Override"); diff --git a/sdk/objc/components/audio/RTCAudioSessionConfiguration.m b/sdk/objc/components/audio/RTCAudioSessionConfiguration.m index 71b0c0cb3a..76e17e7afb 100644 --- a/sdk/objc/components/audio/RTCAudioSessionConfiguration.m +++ b/sdk/objc/components/audio/RTCAudioSessionConfiguration.m @@ -54,15 +54,17 @@ @implementation RTC_OBJC_TYPE (RTCAudioSessionConfiguration) - (instancetype)init { if (self = [super init]) { + // Use AVAudioSession values for default + AVAudioSession *session = [AVAudioSession sharedInstance]; // Use a category which supports simultaneous recording and playback. // By default, using this category implies that our app’s audio is // nonmixable, hence activating the session will interrupt any other // audio sessions which are also nonmixable. - _category = AVAudioSessionCategoryPlayAndRecord; - _categoryOptions = AVAudioSessionCategoryOptionAllowBluetooth; + _category = session.category; + _categoryOptions = session.categoryOptions; // Specify mode for two-way voice communication (e.g. VoIP). - _mode = AVAudioSessionModeVoiceChat; + _mode = session.mode; // Use best sample rate and buffer duration if the CPU has more than one // core. diff --git a/sdk/objc/components/audio/RTCDefaultAudioProcessingModule+Private.h b/sdk/objc/components/audio/RTCDefaultAudioProcessingModule+Private.h new file mode 100644 index 0000000000..4f8551e372 --- /dev/null +++ b/sdk/objc/components/audio/RTCDefaultAudioProcessingModule+Private.h @@ -0,0 +1,29 @@ +/* + * Copyright 2023 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import "RTCDefaultAudioProcessingModule.h" + +#include "modules/audio_processing/include/audio_processing.h" + +NS_ASSUME_NONNULL_BEGIN + +@interface RTC_OBJC_TYPE (RTCDefaultAudioProcessingModule)() + +@property(nonatomic, readonly) rtc::scoped_refptr nativeAudioProcessingModule; + +@end + +NS_ASSUME_NONNULL_END diff --git a/sdk/objc/components/audio/RTCDefaultAudioProcessingModule.h b/sdk/objc/components/audio/RTCDefaultAudioProcessingModule.h new file mode 100644 index 0000000000..2047b3f797 --- /dev/null +++ b/sdk/objc/components/audio/RTCDefaultAudioProcessingModule.h @@ -0,0 +1,46 @@ +/* + * Copyright 2023 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import + +#import "RTCAudioProcessingModule.h" +#import "RTCMacros.h" + +NS_ASSUME_NONNULL_BEGIN + +@class RTC_OBJC_TYPE(RTCAudioProcessingConfig); +@protocol RTC_OBJC_TYPE (RTCAudioCustomProcessingDelegate); + +RTC_OBJC_EXPORT +@interface RTC_OBJC_TYPE (RTCDefaultAudioProcessingModule) : NSObject + +- (instancetype)initWithConfig: (nullable RTC_OBJC_TYPE(RTCAudioProcessingConfig) *)config + capturePostProcessingDelegate: (nullable id)capturePostProcessingDelegate + renderPreProcessingDelegate: (nullable id)renderPreProcessingDelegate + NS_SWIFT_NAME(init(config:capturePostProcessingDelegate:renderPreProcessingDelegate:)) NS_DESIGNATED_INITIALIZER; + +- (void)applyConfig:(RTC_OBJC_TYPE(RTCAudioProcessingConfig) *)config; + +// Dynamically update delegates at runtime + +@property(nonatomic, weak, nullable) id + capturePostProcessingDelegate; +@property(nonatomic, weak, nullable) id + renderPreProcessingDelegate; + +@end + +NS_ASSUME_NONNULL_END diff --git a/sdk/objc/components/audio/RTCDefaultAudioProcessingModule.mm b/sdk/objc/components/audio/RTCDefaultAudioProcessingModule.mm new file mode 100644 index 0000000000..2f592cefa4 --- /dev/null +++ b/sdk/objc/components/audio/RTCDefaultAudioProcessingModule.mm @@ -0,0 +1,96 @@ +/* + * Copyright 2023 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import "RTCDefaultAudioProcessingModule.h" +#import "RTCAudioCustomProcessingAdapter+Private.h" +#import "RTCAudioProcessingConfig+Private.h" + +#include "modules/audio_processing/include/audio_processing.h" + +@implementation RTC_OBJC_TYPE (RTCDefaultAudioProcessingModule) { + rtc::scoped_refptr _nativeAudioProcessingModule; + // Custom processing adapters... + RTC_OBJC_TYPE(RTCAudioCustomProcessingAdapter) *_capturePostProcessingAdapter; + RTC_OBJC_TYPE(RTCAudioCustomProcessingAdapter) *_renderPreProcessingAdapter; +} + +- (instancetype)init { + return [self initWithConfig:nil + capturePostProcessingDelegate:nil + renderPreProcessingDelegate:nil]; +} + +- (instancetype)initWithConfig:(nullable RTC_OBJC_TYPE(RTCAudioProcessingConfig) *)config + capturePostProcessingDelegate: + (nullable id)capturePostProcessingDelegate + renderPreProcessingDelegate:(nullable id) + renderPreProcessingDelegate { + if (self = [super init]) { + webrtc::AudioProcessingBuilder builder = webrtc::AudioProcessingBuilder(); + + // TODO: Custom Config... + + if (config != nil) { + builder.SetConfig(config.nativeAudioProcessingConfig); + } + + _capturePostProcessingAdapter = + [[RTC_OBJC_TYPE(RTCAudioCustomProcessingAdapter) alloc] initWithDelegate:capturePostProcessingDelegate]; + builder.SetCapturePostProcessing( + _capturePostProcessingAdapter.nativeAudioCustomProcessingModule); + + _renderPreProcessingAdapter = + [[RTC_OBJC_TYPE(RTCAudioCustomProcessingAdapter) alloc] initWithDelegate:renderPreProcessingDelegate]; + builder.SetRenderPreProcessing(_renderPreProcessingAdapter.nativeAudioCustomProcessingModule); + + _nativeAudioProcessingModule = builder.Create(); + } + return self; +} + +#pragma mark - Getter & Setters for delegates + +- (nullable id)capturePostProcessingDelegate { + return _capturePostProcessingAdapter.audioCustomProcessingDelegate; +} + +- (void)setCapturePostProcessingDelegate: + (nullable id)delegate { + _capturePostProcessingAdapter.audioCustomProcessingDelegate = delegate; +} + +- (nullable id)renderPreProcessingDelegate { + return _renderPreProcessingAdapter.audioCustomProcessingDelegate; +} + +- (void)setRenderPreProcessingDelegate: + (nullable id)delegate { + _renderPreProcessingAdapter.audioCustomProcessingDelegate = delegate; +} + +#pragma mark - RTCAudioProcessingModule protocol + +- (void)applyConfig:(RTC_OBJC_TYPE(RTCAudioProcessingConfig) *)config { + _nativeAudioProcessingModule->ApplyConfig(config.nativeAudioProcessingConfig); +} + +#pragma mark - Private + +- (rtc::scoped_refptr)nativeAudioProcessingModule { + return _nativeAudioProcessingModule; +} + +@end diff --git a/sdk/objc/components/audio/RTCNativeAudioSessionDelegateAdapter.h b/sdk/objc/components/audio/RTCNativeAudioSessionDelegateAdapter.h index 6a75f01479..1799c11415 100644 --- a/sdk/objc/components/audio/RTCNativeAudioSessionDelegateAdapter.h +++ b/sdk/objc/components/audio/RTCNativeAudioSessionDelegateAdapter.h @@ -19,7 +19,7 @@ class AudioSessionObserver; /** Adapter that forwards RTCAudioSessionDelegate calls to the appropriate * methods on the AudioSessionObserver. */ -@interface RTCNativeAudioSessionDelegateAdapter : NSObject +@interface RTC_OBJC_TYPE(RTCNativeAudioSessionDelegateAdapter) : NSObject - (instancetype)init NS_UNAVAILABLE; diff --git a/sdk/objc/components/audio/RTCNativeAudioSessionDelegateAdapter.mm b/sdk/objc/components/audio/RTCNativeAudioSessionDelegateAdapter.mm index daddf314a4..f652ad1e5f 100644 --- a/sdk/objc/components/audio/RTCNativeAudioSessionDelegateAdapter.mm +++ b/sdk/objc/components/audio/RTCNativeAudioSessionDelegateAdapter.mm @@ -14,7 +14,7 @@ #import "base/RTCLogging.h" -@implementation RTCNativeAudioSessionDelegateAdapter { +@implementation RTC_OBJC_TYPE(RTCNativeAudioSessionDelegateAdapter) { webrtc::AudioSessionObserver *_observer; } diff --git a/sdk/objc/components/capturer/RTCCameraVideoCapturer.h b/sdk/objc/components/capturer/RTCCameraVideoCapturer.h index 370bfa70f0..74f0b39925 100644 --- a/sdk/objc/components/capturer/RTCCameraVideoCapturer.h +++ b/sdk/objc/components/capturer/RTCCameraVideoCapturer.h @@ -30,6 +30,8 @@ NS_EXTENSION_UNAVAILABLE_IOS("Camera not available in app extensions.") // Returns list of formats that are supported by this class for this device. + (NSArray *)supportedFormatsForDevice:(AVCaptureDevice *)device; ++ (CGFloat)defaultZoomFactorForDeviceType:(AVCaptureDeviceType)deviceType; + // Returns the most efficient supported output pixel format for this capturer. - (FourCharCode)preferredOutputPixelFormat; diff --git a/sdk/objc/components/capturer/RTCCameraVideoCapturer.m b/sdk/objc/components/capturer/RTCCameraVideoCapturer.m index e7c47b4e99..26075aeca4 100644 --- a/sdk/objc/components/capturer/RTCCameraVideoCapturer.m +++ b/sdk/objc/components/capturer/RTCCameraVideoCapturer.m @@ -42,7 +42,7 @@ @implementation RTC_OBJC_TYPE (RTCCameraVideoCapturer) { FourCharCode _outputPixelFormat; RTCVideoRotation _rotation; #if TARGET_OS_IPHONE - UIDeviceOrientation _orientation; + UIInterfaceOrientation _orientation; BOOL _generatingOrientationNotifications; #endif } @@ -75,7 +75,7 @@ - (instancetype)initWithDelegate:(__weak id_generatingOrientationNotifications = YES; } + // Must be called on main + [self updateOrientation]; }); #endif @@ -168,8 +190,7 @@ - (void)startCaptureWithDevice:(AVCaptureDevice *)device NSError *error = nil; if (![self.currentDevice lockForConfiguration:&error]) { - RTCLogError(@"Failed to lock device %@. Error: %@", - self.currentDevice, + RTCLogError(@"Failed to lock device %@. Error: %@", self.currentDevice, error.userInfo); if (completionHandler) { completionHandler(error); @@ -178,11 +199,12 @@ - (void)startCaptureWithDevice:(AVCaptureDevice *)device return; } [self reconfigureCaptureSessionInput]; - [self updateOrientation]; [self updateDeviceCaptureFormat:format fps:fps]; [self updateVideoDataOutputPixelFormat:format]; - [self.captureSession startRunning]; + [self updateZoomFactor]; [self.currentDevice unlockForConfiguration]; + + [self.captureSession startRunning]; self.isRunning = YES; if (completionHandler) { completionHandler(nil); @@ -221,10 +243,7 @@ - (void)stopCaptureWithCompletionHandler:(nullable void (^)(void))completionHand #if TARGET_OS_IPHONE - (void)deviceOrientationDidChange:(NSNotification *)notification { - [RTC_OBJC_TYPE(RTCDispatcher) dispatchAsyncOnType:RTCDispatcherTypeCaptureSession - block:^{ - [self updateOrientation]; - }]; + [self updateOrientation]; } #endif @@ -260,22 +279,20 @@ - (void)captureOutput:(AVCaptureOutput *)captureOutput usingFrontCamera = AVCaptureDevicePositionFront == deviceInput.device.position; } switch (_orientation) { - case UIDeviceOrientationPortrait: + case UIInterfaceOrientationPortrait: _rotation = RTCVideoRotation_90; break; - case UIDeviceOrientationPortraitUpsideDown: + case UIInterfaceOrientationPortraitUpsideDown: _rotation = RTCVideoRotation_270; break; - case UIDeviceOrientationLandscapeLeft: - _rotation = usingFrontCamera ? RTCVideoRotation_180 : RTCVideoRotation_0; - break; - case UIDeviceOrientationLandscapeRight: + case UIInterfaceOrientationLandscapeLeft: _rotation = usingFrontCamera ? RTCVideoRotation_0 : RTCVideoRotation_180; break; - case UIDeviceOrientationFaceUp: - case UIDeviceOrientationFaceDown: - case UIDeviceOrientationUnknown: - // Ignore. + case UIInterfaceOrientationLandscapeRight: + _rotation = usingFrontCamera ? RTCVideoRotation_180 : RTCVideoRotation_0; + break; + case UIInterfaceOrientationUnknown: + _rotation = RTCVideoRotation_0; break; } #else @@ -286,7 +303,7 @@ - (void)captureOutput:(AVCaptureOutput *)captureOutput RTC_OBJC_TYPE(RTCCVPixelBuffer) *rtcPixelBuffer = [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBuffer]; int64_t timeStampNs = CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)) * - kNanosecondsPerSecond; + kNanosecondsPerSecond; RTC_OBJC_TYPE(RTCVideoFrame) *videoFrame = [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:rtcPixelBuffer rotation:_rotation @@ -416,8 +433,7 @@ - (void)handleApplicationDidBecomeActive:(NSNotification *)notification { - (dispatch_queue_t)frameQueue { if (!_frameQueue) { _frameQueue = RTCDispatchQueueCreateWithTarget( - "org.webrtc.cameravideocapturer.video", - DISPATCH_QUEUE_SERIAL, + "org.webrtc.cameravideocapturer.video", DISPATCH_QUEUE_SERIAL, dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0)); } return _frameQueue; @@ -490,19 +506,31 @@ - (void)updateDeviceCaptureFormat:(AVCaptureDeviceFormat *)format fps:(NSInteger @"updateDeviceCaptureFormat must be called on the capture queue."); @try { _currentDevice.activeFormat = format; - _currentDevice.activeVideoMinFrameDuration = CMTimeMake(1, fps); + if (![NSStringFromClass([_currentDevice class]) isEqualToString:@"AVCaptureDALDevice"]) { + _currentDevice.activeVideoMinFrameDuration = CMTimeMake(1, fps); + } } @catch (NSException *exception) { RTCLogError(@"Failed to set active format!\n User info:%@", exception.userInfo); return; } } +- (void)updateZoomFactor { + NSAssert([RTC_OBJC_TYPE(RTCDispatcher) isOnQueueForType:RTCDispatcherTypeCaptureSession], + @"updateZoomFactor must be called on the capture queue."); + +#if TARGET_OS_IOS || TARGET_OS_TV + CGFloat videoZoomFactor = [[self class] defaultZoomFactorForDeviceType:_currentDevice.deviceType]; + [_currentDevice setVideoZoomFactor:videoZoomFactor]; +#endif +} + - (void)reconfigureCaptureSessionInput { NSAssert([RTC_OBJC_TYPE(RTCDispatcher) isOnQueueForType:RTCDispatcherTypeCaptureSession], @"reconfigureCaptureSessionInput must be called on the capture queue."); NSError *error = nil; - AVCaptureDeviceInput *input = - [AVCaptureDeviceInput deviceInputWithDevice:_currentDevice error:&error]; + AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:_currentDevice + error:&error]; if (!input) { RTCLogError(@"Failed to create front camera input: %@", error.localizedDescription); return; @@ -519,12 +547,19 @@ - (void)reconfigureCaptureSessionInput { [_captureSession commitConfiguration]; } -- (void)updateOrientation { - NSAssert([RTC_OBJC_TYPE(RTCDispatcher) isOnQueueForType:RTCDispatcherTypeCaptureSession], - @"updateOrientation must be called on the capture queue."); #if TARGET_OS_IPHONE - _orientation = [UIDevice currentDevice].orientation; -#endif +- (void)updateOrientation { + NSAssert([RTC_OBJC_TYPE(RTCDispatcher) isOnQueueForType:RTCDispatcherTypeMain], + @"statusBarOrientation must be called on the main queue."); + // statusBarOrientation must be called on the main queue + UIInterfaceOrientation newOrientation = [UIApplication sharedApplication].statusBarOrientation; + + [RTC_OBJC_TYPE(RTCDispatcher) dispatchAsyncOnType:RTCDispatcherTypeCaptureSession + block:^{ + // Must be called on the capture queue + self->_orientation = newOrientation; + }]; } +#endif @end diff --git a/sdk/objc/components/capturer/RTCDesktopCapturer+Private.h b/sdk/objc/components/capturer/RTCDesktopCapturer+Private.h new file mode 100644 index 0000000000..30aed69d29 --- /dev/null +++ b/sdk/objc/components/capturer/RTCDesktopCapturer+Private.h @@ -0,0 +1,49 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import "RTCDesktopCapturer.h" + +#include "sdk/objc/native/src/objc_desktop_capture.h" + +NS_ASSUME_NONNULL_BEGIN + +RTC_OBJC_EXPORT +@protocol RTC_OBJC_TYPE +(DesktopCapturerDelegate) +-(void)didCaptureVideoFrame:(RTC_OBJC_TYPE(RTCVideoFrame) *) frame; +-(void)didSourceCaptureStart; +-(void)didSourceCapturePaused; +-(void)didSourceCaptureStop; +-(void)didSourceCaptureError; +@end + +@interface RTC_OBJC_TYPE(RTCDesktopCapturer) () + +@property(nonatomic, readonly)std::shared_ptr nativeCapturer; + +- (void)didCaptureVideoFrame:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame; + +-(void)didSourceCaptureStart; + +-(void)didSourceCapturePaused; + +-(void)didSourceCaptureStop; + +-(void)didSourceCaptureError; + +@end + +NS_ASSUME_NONNULL_END \ No newline at end of file diff --git a/sdk/objc/components/capturer/RTCDesktopCapturer.h b/sdk/objc/components/capturer/RTCDesktopCapturer.h new file mode 100644 index 0000000000..b63912acf0 --- /dev/null +++ b/sdk/objc/components/capturer/RTCDesktopCapturer.h @@ -0,0 +1,61 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import +#import + +#import "RTCMacros.h" +#import "RTCVideoCapturer.h" +#import "RTCDesktopSource.h" + +NS_ASSUME_NONNULL_BEGIN + +@class RTC_OBJC_TYPE(RTCDesktopCapturer); + +RTC_OBJC_EXPORT +@protocol RTC_OBJC_TYPE +(RTCDesktopCapturerDelegate) +-(void)didSourceCaptureStart:(RTC_OBJC_TYPE(RTCDesktopCapturer) *) capturer; + +-(void)didSourceCapturePaused:(RTC_OBJC_TYPE(RTCDesktopCapturer) *) capturer; + +-(void)didSourceCaptureStop:(RTC_OBJC_TYPE(RTCDesktopCapturer) *) capturer; + +-(void)didSourceCaptureError:(RTC_OBJC_TYPE(RTCDesktopCapturer) *) capturer; +@end + +RTC_OBJC_EXPORT +// Screen capture that implements RTCVideoCapturer. Delivers frames to a +// RTCVideoCapturerDelegate (usually RTCVideoSource). +@interface RTC_OBJC_TYPE (RTCDesktopCapturer) : RTC_OBJC_TYPE(RTCVideoCapturer) + +@property(nonatomic, readonly) RTC_OBJC_TYPE(RTCDesktopSource) *source; + +- (instancetype)initWithSource:(RTC_OBJC_TYPE(RTCDesktopSource) *)source delegate:(__weak id)delegate captureDelegate:(__weak id)captureDelegate; + +- (instancetype)initWithDefaultScreen:(__weak id)delegate captureDelegate:(__weak id)captureDelegate; + +- (void)startCapture; + +- (void)startCaptureWithFPS:(NSInteger)fps; + +- (void)stopCapture; + +- (void)stopCaptureWithCompletionHandler:(nullable void (^)(void))completionHandler; + +@end + +NS_ASSUME_NONNULL_END diff --git a/sdk/objc/components/capturer/RTCDesktopCapturer.mm b/sdk/objc/components/capturer/RTCDesktopCapturer.mm new file mode 100644 index 0000000000..a1948684d3 --- /dev/null +++ b/sdk/objc/components/capturer/RTCDesktopCapturer.mm @@ -0,0 +1,104 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import + +#import "base/RTCLogging.h" +#import "base/RTCVideoFrameBuffer.h" + +#import "components/video_frame_buffer/RTCCVPixelBuffer.h" + +#import "RTCDesktopCapturer.h" +#import "RTCDesktopCapturer+Private.h" +#import "RTCDesktopSource+Private.h" + +@implementation RTC_OBJC_TYPE (RTCDesktopCapturer) { + __weak id _delegate; +} + +@synthesize nativeCapturer = _nativeCapturer; +@synthesize source = _source; + +- (instancetype)initWithSource:(RTC_OBJC_TYPE(RTCDesktopSource) *)source delegate:(__weak id)delegate captureDelegate:(__weak id)captureDelegate { + if (self = [super initWithDelegate:captureDelegate]) { + webrtc::DesktopType captureType = webrtc::kScreen; + if(source.sourceType == RTCDesktopSourceTypeWindow) { + captureType = webrtc::kWindow; + } + _nativeCapturer = std::make_shared(captureType, source.nativeMediaSource->id(), self); + _source = source; + _delegate = delegate; + } + return self; +} + +- (instancetype)initWithDefaultScreen:(__weak id)delegate captureDelegate:(__weak id)captureDelegate { + if (self = [super initWithDelegate:captureDelegate]) { + _nativeCapturer = std::make_unique(webrtc::kScreen, -1, self); + _source = nil; + _delegate = delegate; + } + return self; +} + + +-(void)dealloc { + _nativeCapturer->Stop(); + _nativeCapturer = nullptr; +} + +- (void)startCapture { + [self didSourceCaptureStart]; + _nativeCapturer->Start(30); +} + +- (void)startCaptureWithFPS:(NSInteger)fps { + _nativeCapturer->Start(fps); +} + +- (void)didCaptureVideoFrame + : (RTC_OBJC_TYPE(RTCVideoFrame) *)frame { + [self.delegate capturer:self didCaptureVideoFrame:frame]; +} + +- (void)stopCapture { + _nativeCapturer->Stop(); +} + +- (void)stopCaptureWithCompletionHandler:(nullable void (^)(void))completionHandler { + [self stopCapture]; + if(completionHandler != nil) { + completionHandler(); + } +} + +-(void)didSourceCaptureStart { + [_delegate didSourceCaptureStart:self]; +} + +-(void)didSourceCapturePaused { + [_delegate didSourceCapturePaused:self]; +} + +-(void)didSourceCaptureStop { + [_delegate didSourceCaptureStop:self]; +} + +-(void)didSourceCaptureError { + [_delegate didSourceCaptureError:self]; +} + +@end diff --git a/sdk/objc/components/capturer/RTCDesktopMediaList+Private.h b/sdk/objc/components/capturer/RTCDesktopMediaList+Private.h new file mode 100644 index 0000000000..eb1e76ddbb --- /dev/null +++ b/sdk/objc/components/capturer/RTCDesktopMediaList+Private.h @@ -0,0 +1,40 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import "RTCDesktopMediaList.h" + +namespace webrtc { + class ObjCDesktopMediaList; + class MediaSource; +} + +NS_ASSUME_NONNULL_BEGIN + +@interface RTC_OBJC_TYPE(RTCDesktopMediaList) () + +@property(nonatomic, readonly)std::shared_ptr nativeMediaList; + +-(void)mediaSourceAdded:(webrtc::MediaSource *) source; + +-(void)mediaSourceRemoved:(webrtc::MediaSource *) source; + +-(void)mediaSourceNameChanged:(webrtc::MediaSource *) source; + +-(void)mediaSourceThumbnailChanged:(webrtc::MediaSource *) source; + +@end + +NS_ASSUME_NONNULL_END \ No newline at end of file diff --git a/sdk/objc/components/capturer/RTCDesktopMediaList.h b/sdk/objc/components/capturer/RTCDesktopMediaList.h new file mode 100644 index 0000000000..fafeaf5e0d --- /dev/null +++ b/sdk/objc/components/capturer/RTCDesktopMediaList.h @@ -0,0 +1,51 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import +#import + +#import "RTCMacros.h" +#import "RTCDesktopSource.h" + +NS_ASSUME_NONNULL_BEGIN + +RTC_OBJC_EXPORT +@protocol RTC_OBJC_TYPE +(RTCDesktopMediaListDelegate) + +- (void)didDesktopSourceAdded:(RTC_OBJC_TYPE(RTCDesktopSource) *) source; + +- (void)didDesktopSourceRemoved:(RTC_OBJC_TYPE(RTCDesktopSource) *) source; + +- (void)didDesktopSourceNameChanged:(RTC_OBJC_TYPE(RTCDesktopSource) *) source; + +- (void)didDesktopSourceThumbnailChanged:(RTC_OBJC_TYPE(RTCDesktopSource) *) source; +@end + +RTC_OBJC_EXPORT +@interface RTC_OBJC_TYPE (RTCDesktopMediaList) : NSObject + +-(instancetype)initWithType:(RTCDesktopSourceType)type delegate:(__weak id)delegate; + +@property(nonatomic, readonly) RTCDesktopSourceType sourceType; + +- (int32_t)UpdateSourceList:(BOOL)forceReload updateAllThumbnails:(BOOL)updateThumbnail; + +- (NSArray*) getSources; + +@end + +NS_ASSUME_NONNULL_END diff --git a/sdk/objc/components/capturer/RTCDesktopMediaList.mm b/sdk/objc/components/capturer/RTCDesktopMediaList.mm new file mode 100644 index 0000000000..2bd6c1da0e --- /dev/null +++ b/sdk/objc/components/capturer/RTCDesktopMediaList.mm @@ -0,0 +1,99 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import "RTCDesktopMediaList.h" + +#import "RTCDesktopSource+Private.h" +#import "RTCDesktopMediaList+Private.h" + +@implementation RTC_OBJC_TYPE(RTCDesktopMediaList) { + RTCDesktopSourceType _sourceType; + NSMutableArray* _sources; + __weak id _delegate; +} + +@synthesize sourceType = _sourceType; +@synthesize nativeMediaList = _nativeMediaList; + +- (instancetype)initWithType:(RTCDesktopSourceType)type delegate:(__weak id)delegate{ + if (self = [super init]) { + webrtc::DesktopType captureType = webrtc::kScreen; + if(type == RTCDesktopSourceTypeWindow) { + captureType = webrtc::kWindow; + } + _nativeMediaList = std::make_shared(captureType, self); + _sourceType = type; + _delegate = delegate; + } + return self; +} + +- (int32_t)UpdateSourceList:(BOOL)forceReload updateAllThumbnails:(BOOL)updateThumbnail { + return _nativeMediaList->UpdateSourceList(forceReload, updateThumbnail); +} + +-(NSArray*) getSources { + _sources = [NSMutableArray array]; + int sourceCount = _nativeMediaList->GetSourceCount(); + for (int i = 0; i < sourceCount; i++) { + webrtc::MediaSource *mediaSource = _nativeMediaList->GetSource(i); + [_sources addObject:[[RTC_OBJC_TYPE(RTCDesktopSource) alloc] initWithNativeSource:mediaSource sourceType:_sourceType]]; + } + return _sources; +} + +-(void)mediaSourceAdded:(webrtc::MediaSource *) source { + RTC_OBJC_TYPE(RTCDesktopSource) *desktopSource = [[RTC_OBJC_TYPE(RTCDesktopSource) alloc] initWithNativeSource:source sourceType:_sourceType]; + [_sources addObject:desktopSource]; + [_delegate didDesktopSourceAdded:desktopSource]; +} + +-(void)mediaSourceRemoved:(webrtc::MediaSource *) source { + RTC_OBJC_TYPE(RTCDesktopSource) *desktopSource = [self getSourceById:source]; + if(desktopSource != nil) { + [_sources removeObject:desktopSource]; + [_delegate didDesktopSourceRemoved:desktopSource]; + } +} + +-(void)mediaSourceNameChanged:(webrtc::MediaSource *) source { + RTC_OBJC_TYPE(RTCDesktopSource) *desktopSource = [self getSourceById:source]; + if(desktopSource != nil) { + [desktopSource setName:source->name().c_str()]; + [_delegate didDesktopSourceNameChanged:desktopSource]; + } +} + +-(void)mediaSourceThumbnailChanged:(webrtc::MediaSource *) source { + RTC_OBJC_TYPE(RTCDesktopSource) *desktopSource = [self getSourceById:source]; + if(desktopSource != nil) { + [desktopSource setThumbnail:source->thumbnail()]; + [_delegate didDesktopSourceThumbnailChanged:desktopSource]; + } +} + +-(RTC_OBJC_TYPE(RTCDesktopSource) *)getSourceById:(webrtc::MediaSource *) source { + NSEnumerator *enumerator = [_sources objectEnumerator]; + RTC_OBJC_TYPE(RTCDesktopSource) *object; + while ((object = enumerator.nextObject) != nil) { + if(object.nativeMediaSource == source) { + return object; + } + } + return nil; +} + +@end \ No newline at end of file diff --git a/sdk/objc/components/capturer/RTCDesktopSource+Private.h b/sdk/objc/components/capturer/RTCDesktopSource+Private.h new file mode 100644 index 0000000000..3f4c4ef25f --- /dev/null +++ b/sdk/objc/components/capturer/RTCDesktopSource+Private.h @@ -0,0 +1,37 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#import + +#import "RTCDesktopSource.h" + +#include "sdk/objc/native/src/objc_desktop_media_list.h" + +NS_ASSUME_NONNULL_BEGIN + +@interface RTC_OBJC_TYPE(RTCDesktopSource) () + +- (instancetype)initWithNativeSource:(webrtc::MediaSource*) nativeSource + sourceType:(RTCDesktopSourceType) sourceType; + +@property(nonatomic, readonly)webrtc::MediaSource* nativeMediaSource; + +-(void) setName:(const char *) name; + +-(void) setThumbnail:(std::vector) thumbnail; + +@end + +NS_ASSUME_NONNULL_END \ No newline at end of file diff --git a/sdk/objc/components/capturer/RTCDesktopSource.h b/sdk/objc/components/capturer/RTCDesktopSource.h new file mode 100644 index 0000000000..82da458ce6 --- /dev/null +++ b/sdk/objc/components/capturer/RTCDesktopSource.h @@ -0,0 +1,40 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +#import +#import +#import + +#import "RTCMacros.h" + +typedef NS_ENUM(NSInteger, RTCDesktopSourceType) { + RTCDesktopSourceTypeScreen, + RTCDesktopSourceTypeWindow, +}; + +RTC_OBJC_EXPORT +@interface RTC_OBJC_TYPE (RTCDesktopSource) : NSObject + +@property(nonatomic, readonly) NSString *sourceId; + +@property(nonatomic, readonly) NSString *name; + +@property(nonatomic, readonly) NSImage *thumbnail; + +@property(nonatomic, readonly) RTCDesktopSourceType sourceType; + +-( NSImage *)UpdateThumbnail; + +@end \ No newline at end of file diff --git a/sdk/objc/components/capturer/RTCDesktopSource.mm b/sdk/objc/components/capturer/RTCDesktopSource.mm new file mode 100644 index 0000000000..e1bdc6893a --- /dev/null +++ b/sdk/objc/components/capturer/RTCDesktopSource.mm @@ -0,0 +1,68 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import + +#import "RTCDesktopSource.h" +#import "RTCDesktopSource+Private.h" + +@implementation RTC_OBJC_TYPE(RTCDesktopSource) { + NSString *_sourceId; + NSString *_name; + NSImage *_thumbnail; + RTCDesktopSourceType _sourceType; +} + +@synthesize sourceId = _sourceId; +@synthesize name = _name; +@synthesize thumbnail = _thumbnail; +@synthesize sourceType = _sourceType; +@synthesize nativeMediaSource = _nativeMediaSource; + +- (instancetype)initWithNativeSource:(webrtc::MediaSource*)nativeSource + sourceType:(RTCDesktopSourceType) sourceType { + if (self = [super init]) { + _nativeMediaSource = nativeSource; + _sourceId = [NSString stringWithUTF8String:std::to_string(nativeSource->id()).c_str()]; + _name = [NSString stringWithUTF8String:nativeSource->name().c_str()]; + _thumbnail = [self createThumbnailFromNativeSource:nativeSource->thumbnail()]; + _sourceType = sourceType; + } + return self; +} + +-(NSImage*)createThumbnailFromNativeSource:(std::vector)thumbnail { + NSData* data = [[NSData alloc] initWithBytes:thumbnail.data() length:thumbnail.size()]; + NSImage *image = [[NSImage alloc] initWithData:data]; + return image; +} + +-( NSImage *)UpdateThumbnail { + if(_nativeMediaSource->UpdateThumbnail()) { + _thumbnail = [self createThumbnailFromNativeSource:_nativeMediaSource->thumbnail()]; + } + return _thumbnail; +} + +-(void)setName:(const char *) name { + _name = [NSString stringWithUTF8String:name]; +} + +-(void)setThumbnail:(std::vector) thumbnail { + _thumbnail = [self createThumbnailFromNativeSource:thumbnail]; +} + +@end diff --git a/sdk/objc/components/network/RTCNetworkMonitor+Private.h b/sdk/objc/components/network/RTCNetworkMonitor+Private.h index b5c786be18..f3761f7ba3 100644 --- a/sdk/objc/components/network/RTCNetworkMonitor+Private.h +++ b/sdk/objc/components/network/RTCNetworkMonitor+Private.h @@ -9,16 +9,18 @@ */ #import "RTCNetworkMonitor.h" +#import "RTCMacros.h" #include "sdk/objc/native/src/network_monitor_observer.h" -@interface RTCNetworkMonitor () +@interface RTC_OBJC_TYPE (RTCNetworkMonitor) +() -/** `observer` is a raw pointer and should be kept alive - * for this object's lifetime. - */ -- (instancetype)initWithObserver:(webrtc::NetworkMonitorObserver *)observer - NS_DESIGNATED_INITIALIZER; + /** `observer` is a raw pointer and should be kept alive + * for this object's lifetime. + */ + - (instancetype)initWithObserver + : (webrtc::NetworkMonitorObserver *)observer NS_DESIGNATED_INITIALIZER; /** Stops the receiver from posting updates to `observer`. */ - (void)stop; diff --git a/sdk/objc/components/network/RTCNetworkMonitor.h b/sdk/objc/components/network/RTCNetworkMonitor.h index 21d22f5463..4b0cb4baf0 100644 --- a/sdk/objc/components/network/RTCNetworkMonitor.h +++ b/sdk/objc/components/network/RTCNetworkMonitor.h @@ -10,12 +10,14 @@ #import +#import "RTCMacros.h" + NS_ASSUME_NONNULL_BEGIN /** Listens for NWPathMonitor updates and forwards the results to a C++ * observer. */ -@interface RTCNetworkMonitor : NSObject +@interface RTC_OBJC_TYPE (RTCNetworkMonitor): NSObject - (instancetype)init NS_UNAVAILABLE; diff --git a/sdk/objc/components/network/RTCNetworkMonitor.mm b/sdk/objc/components/network/RTCNetworkMonitor.mm index 7e75b2b4c0..2e42ab5290 100644 --- a/sdk/objc/components/network/RTCNetworkMonitor.mm +++ b/sdk/objc/components/network/RTCNetworkMonitor.mm @@ -46,7 +46,7 @@ } // namespace -@implementation RTCNetworkMonitor { +@implementation RTC_OBJC_TYPE (RTCNetworkMonitor) { webrtc::NetworkMonitorObserver *_observer; nw_path_monitor_t _pathMonitor; dispatch_queue_t _monitorQueue; @@ -63,12 +63,12 @@ - (instancetype)initWithObserver:(webrtc::NetworkMonitorObserver *)observer { return nil; } RTCLog(@"NW path monitor created."); - __weak RTCNetworkMonitor *weakSelf = self; + __weak RTC_OBJC_TYPE(RTCNetworkMonitor) *weakSelf = self; nw_path_monitor_set_update_handler(_pathMonitor, ^(nw_path_t path) { if (weakSelf == nil) { return; } - RTCNetworkMonitor *strongSelf = weakSelf; + RTC_OBJC_TYPE(RTCNetworkMonitor) *strongSelf = weakSelf; RTCLog(@"NW path monitor: updated."); nw_path_status_t status = nw_path_get_status(path); if (status == nw_path_status_invalid) { diff --git a/sdk/objc/components/renderer/metal/RTCMTLI420Renderer.h b/sdk/objc/components/renderer/metal/RTCMTLI420Renderer.h index e5987fe22a..c4e2724042 100644 --- a/sdk/objc/components/renderer/metal/RTCMTLI420Renderer.h +++ b/sdk/objc/components/renderer/metal/RTCMTLI420Renderer.h @@ -13,5 +13,5 @@ #import "RTCMTLRenderer.h" NS_AVAILABLE(10_11, 9_0) -@interface RTCMTLI420Renderer : RTCMTLRenderer +@interface RTC_OBJC_TYPE(RTCMTLI420Renderer): RTC_OBJC_TYPE(RTCMTLRenderer) @end diff --git a/sdk/objc/components/renderer/metal/RTCMTLI420Renderer.mm b/sdk/objc/components/renderer/metal/RTCMTLI420Renderer.mm index eba8800240..d7852ad958 100644 --- a/sdk/objc/components/renderer/metal/RTCMTLI420Renderer.mm +++ b/sdk/objc/components/renderer/metal/RTCMTLI420Renderer.mm @@ -70,7 +70,7 @@ fragment half4 fragmentColorConversion( return half4(out); }); -@implementation RTCMTLI420Renderer { +@implementation RTC_OBJC_TYPE(RTCMTLI420Renderer) { // Textures. id _yTexture; id _uTexture; diff --git a/sdk/objc/components/renderer/metal/RTCMTLNSVideoView.m b/sdk/objc/components/renderer/metal/RTCMTLNSVideoView.m deleted file mode 100644 index 625fb1caa7..0000000000 --- a/sdk/objc/components/renderer/metal/RTCMTLNSVideoView.m +++ /dev/null @@ -1,122 +0,0 @@ -/* - * Copyright 2017 The WebRTC project authors. All Rights Reserved. - * - * Use of this source code is governed by a BSD-style license - * that can be found in the LICENSE file in the root of the source - * tree. An additional intellectual property rights grant can be found - * in the file PATENTS. All contributing project authors may - * be found in the AUTHORS file in the root of the source tree. - */ - -#import "RTCMTLNSVideoView.h" - -#import -#import - -#import "base/RTCVideoFrame.h" - -#import "RTCMTLI420Renderer.h" - -@interface RTC_OBJC_TYPE (RTCMTLNSVideoView) -() @property(nonatomic) id renderer; -@property(nonatomic, strong) MTKView *metalView; -@property(atomic, strong) RTC_OBJC_TYPE(RTCVideoFrame) * videoFrame; -@end - -@implementation RTC_OBJC_TYPE (RTCMTLNSVideoView) { - id _renderer; -} - -@synthesize delegate = _delegate; -@synthesize renderer = _renderer; -@synthesize metalView = _metalView; -@synthesize videoFrame = _videoFrame; - -- (instancetype)initWithFrame:(CGRect)frameRect { - self = [super initWithFrame:frameRect]; - if (self) { - [self configure]; - } - return self; -} - -- (instancetype)initWithCoder:(NSCoder *)aCoder { - self = [super initWithCoder:aCoder]; - if (self) { - [self configure]; - } - return self; -} - -#pragma mark - Private - -+ (BOOL)isMetalAvailable { - return [MTLCopyAllDevices() count] > 0; -} - -- (void)configure { - if ([[self class] isMetalAvailable]) { - _metalView = [[MTKView alloc] initWithFrame:self.bounds]; - [self addSubview:_metalView]; - _metalView.layerContentsPlacement = NSViewLayerContentsPlacementScaleProportionallyToFit; - _metalView.translatesAutoresizingMaskIntoConstraints = NO; - _metalView.framebufferOnly = YES; - _metalView.delegate = self; - - _renderer = [[RTCMTLI420Renderer alloc] init]; - if (![(RTCMTLI420Renderer *)_renderer addRenderingDestination:_metalView]) { - _renderer = nil; - }; - } -} - -- (void)updateConstraints { - NSDictionary *views = NSDictionaryOfVariableBindings(_metalView); - - NSArray *constraintsHorizontal = - [NSLayoutConstraint constraintsWithVisualFormat:@"H:|-0-[_metalView]-0-|" - options:0 - metrics:nil - views:views]; - [self addConstraints:constraintsHorizontal]; - - NSArray *constraintsVertical = - [NSLayoutConstraint constraintsWithVisualFormat:@"V:|-0-[_metalView]-0-|" - options:0 - metrics:nil - views:views]; - [self addConstraints:constraintsVertical]; - [super updateConstraints]; -} - -#pragma mark - MTKViewDelegate methods -- (void)drawInMTKView:(nonnull MTKView *)view { - if (self.videoFrame == nil) { - return; - } - if (view == self.metalView) { - [_renderer drawFrame:self.videoFrame]; - } -} - -- (void)mtkView:(MTKView *)view drawableSizeWillChange:(CGSize)size { -} - -#pragma mark - RTC_OBJC_TYPE(RTCVideoRenderer) - -- (void)setSize:(CGSize)size { - _metalView.drawableSize = size; - dispatch_async(dispatch_get_main_queue(), ^{ - [self.delegate videoView:self didChangeVideoSize:size]; - }); - [_metalView draw]; -} - -- (void)renderFrame:(nullable RTC_OBJC_TYPE(RTCVideoFrame) *)frame { - if (frame == nil) { - return; - } - self.videoFrame = [frame newI420VideoFrame]; -} - -@end diff --git a/sdk/objc/components/renderer/metal/RTCMTLNV12Renderer.h b/sdk/objc/components/renderer/metal/RTCMTLNV12Renderer.h index 866b7ea17e..125612a269 100644 --- a/sdk/objc/components/renderer/metal/RTCMTLNV12Renderer.h +++ b/sdk/objc/components/renderer/metal/RTCMTLNV12Renderer.h @@ -13,6 +13,6 @@ #import "RTCMTLRenderer.h" NS_AVAILABLE(10_11, 9_0) -@interface RTCMTLNV12Renderer : RTCMTLRenderer +@interface RTC_OBJC_TYPE(RTCMTLNV12Renderer): RTC_OBJC_TYPE(RTCMTLRenderer) @end diff --git a/sdk/objc/components/renderer/metal/RTCMTLNV12Renderer.mm b/sdk/objc/components/renderer/metal/RTCMTLNV12Renderer.mm index 7b037c6dbc..c4000b1b1d 100644 --- a/sdk/objc/components/renderer/metal/RTCMTLNV12Renderer.mm +++ b/sdk/objc/components/renderer/metal/RTCMTLNV12Renderer.mm @@ -60,7 +60,7 @@ fragment half4 fragmentColorConversion( return half4(out); }); -@implementation RTCMTLNV12Renderer { +@implementation RTC_OBJC_TYPE(RTCMTLNV12Renderer) { // Textures. CVMetalTextureCacheRef _textureCache; id _yTexture; diff --git a/sdk/objc/components/renderer/metal/RTCMTLRGBRenderer.h b/sdk/objc/components/renderer/metal/RTCMTLRGBRenderer.h index 9db422cd22..5e355a8504 100644 --- a/sdk/objc/components/renderer/metal/RTCMTLRGBRenderer.h +++ b/sdk/objc/components/renderer/metal/RTCMTLRGBRenderer.h @@ -11,12 +11,13 @@ #import #import "RTCMTLRenderer.h" +#import "RTCMacros.h" /** @abstract RGB/BGR renderer. * @discussion This renderer handles both kCVPixelFormatType_32BGRA and * kCVPixelFormatType_32ARGB. */ NS_AVAILABLE(10_11, 9_0) -@interface RTCMTLRGBRenderer : RTCMTLRenderer +@interface RTC_OBJC_TYPE (RTCMTLRGBRenderer): RTC_OBJC_TYPE(RTCMTLRenderer) @end diff --git a/sdk/objc/components/renderer/metal/RTCMTLRGBRenderer.mm b/sdk/objc/components/renderer/metal/RTCMTLRGBRenderer.mm index e5dc4ef80a..6ca4a4000d 100644 --- a/sdk/objc/components/renderer/metal/RTCMTLRGBRenderer.mm +++ b/sdk/objc/components/renderer/metal/RTCMTLRGBRenderer.mm @@ -30,12 +30,12 @@ } Vertex; typedef struct { - float4 position[[position]]; + float4 position [[position]]; float2 texcoord; } VertexIO; - vertex VertexIO vertexPassthrough(constant Vertex *verticies[[buffer(0)]], - uint vid[[vertex_id]]) { + vertex VertexIO vertexPassthrough(constant Vertex * verticies [[buffer(0)]], + uint vid [[vertex_id]]) { VertexIO out; constant Vertex &v = verticies[vid]; out.position = float4(float2(v.position), 0.0, 1.0); @@ -43,9 +43,9 @@ vertex VertexIO vertexPassthrough(constant Vertex *verticies[[buffer(0)]], return out; } - fragment half4 fragmentColorConversion(VertexIO in[[stage_in]], - texture2d texture[[texture(0)]], - constant bool &isARGB[[buffer(0)]]) { + fragment half4 fragmentColorConversion(VertexIO in [[stage_in]], + texture2d texture [[texture(0)]], + constant bool &isARGB [[buffer(0)]]) { constexpr sampler s(address::clamp_to_edge, filter::linear); half4 out = texture.sample(s, in.texcoord); @@ -56,7 +56,7 @@ fragment half4 fragmentColorConversion(VertexIO in[[stage_in]], return out; }); -@implementation RTCMTLRGBRenderer { +@implementation RTC_OBJC_TYPE (RTCMTLRGBRenderer) { // Textures. CVMetalTextureCacheRef _textureCache; id _texture; @@ -73,8 +73,8 @@ - (BOOL)addRenderingDestination:(__kindof MTKView *)view { } - (BOOL)initializeTextureCache { - CVReturn status = CVMetalTextureCacheCreate(kCFAllocatorDefault, nil, [self currentMetalDevice], - nil, &_textureCache); + CVReturn status = CVMetalTextureCacheCreate( + kCFAllocatorDefault, nil, [self currentMetalDevice], nil, &_textureCache); if (status != kCVReturnSuccess) { RTCLogError(@"Metal: Failed to initialize metal texture cache. Return status is %d", status); return NO; @@ -130,9 +130,15 @@ - (BOOL)setupTexturesForFrame:(nonnull RTC_OBJC_TYPE(RTCVideoFrame) *)frame { return NO; } - CVReturn result = CVMetalTextureCacheCreateTextureFromImage( - kCFAllocatorDefault, _textureCache, pixelBuffer, nil, mtlPixelFormat, - width, height, 0, &textureOut); + CVReturn result = CVMetalTextureCacheCreateTextureFromImage(kCFAllocatorDefault, + _textureCache, + pixelBuffer, + nil, + mtlPixelFormat, + width, + height, + 0, + &textureOut); if (result == kCVReturnSuccess) { gpuTexture = CVMetalTextureGetTexture(textureOut); } diff --git a/sdk/objc/components/renderer/metal/RTCMTLRenderer+Private.h b/sdk/objc/components/renderer/metal/RTCMTLRenderer+Private.h index 916d4d4430..f6a82db56a 100644 --- a/sdk/objc/components/renderer/metal/RTCMTLRenderer+Private.h +++ b/sdk/objc/components/renderer/metal/RTCMTLRenderer+Private.h @@ -16,7 +16,7 @@ NS_ASSUME_NONNULL_BEGIN -@interface RTCMTLRenderer (Private) +@interface RTC_OBJC_TYPE(RTCMTLRenderer) (Private) - (nullable id)currentMetalDevice; - (NSString *)shaderSource; - (BOOL)setupTexturesForFrame:(nonnull RTC_OBJC_TYPE(RTCVideoFrame) *)frame; diff --git a/sdk/objc/components/renderer/metal/RTCMTLRenderer.h b/sdk/objc/components/renderer/metal/RTCMTLRenderer.h index aa31545973..6bbca3d985 100644 --- a/sdk/objc/components/renderer/metal/RTCMTLRenderer.h +++ b/sdk/objc/components/renderer/metal/RTCMTLRenderer.h @@ -21,7 +21,7 @@ NS_ASSUME_NONNULL_BEGIN /** * Protocol defining ability to render RTCVideoFrame in Metal enabled views. */ -@protocol RTCMTLRenderer +@protocol RTC_OBJC_TYPE(RTCMTLRenderer) /** * Method to be implemented to perform actual rendering of the provided frame. @@ -49,7 +49,7 @@ NS_ASSUME_NONNULL_BEGIN * Implementation of RTCMTLRenderer protocol. */ NS_AVAILABLE(10_11, 9_0) -@interface RTCMTLRenderer : NSObject +@interface RTC_OBJC_TYPE(RTCMTLRenderer) : NSObject /** @abstract A wrapped RTCVideoRotation, or nil. @discussion When not nil, the rotation of the actual frame is ignored when rendering. diff --git a/sdk/objc/components/renderer/metal/RTCMTLRenderer.mm b/sdk/objc/components/renderer/metal/RTCMTLRenderer.mm index 410590a7b1..ca3fcc3e51 100644 --- a/sdk/objc/components/renderer/metal/RTCMTLRenderer.mm +++ b/sdk/objc/components/renderer/metal/RTCMTLRenderer.mm @@ -87,7 +87,7 @@ static inline void getCubeVertexData(int cropX, // In future we might use triple buffering method if it improves performance. static const NSInteger kMaxInflightBuffers = 1; -@implementation RTCMTLRenderer { +@implementation RTC_OBJC_TYPE(RTCMTLRenderer) { __kindof MTKView *_view; // Controller. diff --git a/sdk/objc/components/renderer/metal/RTCMTLVideoView.h b/sdk/objc/components/renderer/metal/RTCMTLVideoView.h index 3320d12076..bed02ffa92 100644 --- a/sdk/objc/components/renderer/metal/RTCMTLVideoView.h +++ b/sdk/objc/components/renderer/metal/RTCMTLVideoView.h @@ -10,6 +10,10 @@ #import +#if TARGET_OS_OSX +#import +#endif + #import "RTCMacros.h" #import "RTCVideoFrame.h" #import "RTCVideoRenderer.h" @@ -22,14 +26,26 @@ NS_ASSUME_NONNULL_BEGIN * It has id property that renders video frames in the view's * bounds using Metal. */ +#if TARGET_OS_IPHONE NS_CLASS_AVAILABLE_IOS(9) +#elif TARGET_OS_OSX +NS_AVAILABLE_MAC(10.11) +#endif RTC_OBJC_EXPORT -@interface RTC_OBJC_TYPE (RTCMTLVideoView) : UIView +@interface RTC_OBJC_TYPE (RTCMTLVideoView) : + +#if TARGET_OS_IPHONE + UIView +#elif TARGET_OS_OSX + NSView +#endif @property(nonatomic, weak) id delegate; +#if TARGET_OS_IPHONE @property(nonatomic) UIViewContentMode videoContentMode; +#endif /** @abstract Enables/disables rendering. */ @@ -39,6 +55,8 @@ RTC_OBJC_EXPORT */ @property(nonatomic, nullable) NSValue* rotationOverride; ++ (BOOL)isMetalAvailable; + @end NS_ASSUME_NONNULL_END diff --git a/sdk/objc/components/renderer/metal/RTCMTLVideoView.m b/sdk/objc/components/renderer/metal/RTCMTLVideoView.m index c5d9e4385f..d4d98a0bf4 100644 --- a/sdk/objc/components/renderer/metal/RTCMTLVideoView.m +++ b/sdk/objc/components/renderer/metal/RTCMTLVideoView.m @@ -22,17 +22,12 @@ #import "RTCMTLNV12Renderer.h" #import "RTCMTLRGBRenderer.h" -// To avoid unreconized symbol linker errors, we're taking advantage of the objc runtime. -// Linking errors occur when compiling for architectures that don't support Metal. -#define MTKViewClass NSClassFromString(@"MTKView") -#define RTCMTLNV12RendererClass NSClassFromString(@"RTCMTLNV12Renderer") -#define RTCMTLI420RendererClass NSClassFromString(@"RTCMTLI420Renderer") -#define RTCMTLRGBRendererClass NSClassFromString(@"RTCMTLRGBRenderer") - -@interface RTC_OBJC_TYPE (RTCMTLVideoView) -() @property(nonatomic) RTCMTLI420Renderer *rendererI420; -@property(nonatomic) RTCMTLNV12Renderer *rendererNV12; -@property(nonatomic) RTCMTLRGBRenderer *rendererRGB; +#import "RTCMTLRenderer+Private.h" + +@interface RTC_OBJC_TYPE (RTCMTLVideoView) () +@property(nonatomic) RTC_OBJC_TYPE(RTCMTLI420Renderer) *rendererI420; +@property(nonatomic) RTC_OBJC_TYPE(RTCMTLNV12Renderer) * rendererNV12; +@property(nonatomic) RTC_OBJC_TYPE(RTCMTLRGBRenderer) * rendererRGB; @property(nonatomic) MTKView *metalView; @property(atomic) RTC_OBJC_TYPE(RTCVideoFrame) * videoFrame; @property(nonatomic) CGSize videoFrameSize; @@ -51,6 +46,14 @@ @implementation RTC_OBJC_TYPE (RTCMTLVideoView) @synthesize lastFrameTimeNs = _lastFrameTimeNs; @synthesize rotationOverride = _rotationOverride; ++ (BOOL)isMetalAvailable { +#if TARGET_OS_IPHONE + return MTLCreateSystemDefaultDevice() != nil; +#elif TARGET_OS_OSX + return [MTLCopyAllDevices() count] > 0; +#endif +} + - (instancetype)initWithFrame:(CGRect)frameRect { self = [super initWithFrame:frameRect]; if (self) { @@ -75,6 +78,7 @@ - (void)setEnabled:(BOOL)enabled { self.metalView.paused = !enabled; } +#if TARGET_OS_IPHONE - (UIViewContentMode)videoContentMode { return self.metalView.contentMode; } @@ -82,27 +86,24 @@ - (UIViewContentMode)videoContentMode { - (void)setVideoContentMode:(UIViewContentMode)mode { self.metalView.contentMode = mode; } +#endif #pragma mark - Private -+ (BOOL)isMetalAvailable { - return MTLCreateSystemDefaultDevice() != nil; -} - + (MTKView *)createMetalView:(CGRect)frame { - return [[MTKViewClass alloc] initWithFrame:frame]; + return [[MTKView alloc] initWithFrame:frame]; } -+ (RTCMTLNV12Renderer *)createNV12Renderer { - return [[RTCMTLNV12RendererClass alloc] init]; ++ (RTC_OBJC_TYPE(RTCMTLNV12Renderer) *)createNV12Renderer { + return [[RTC_OBJC_TYPE(RTCMTLNV12Renderer) alloc] init]; } -+ (RTCMTLI420Renderer *)createI420Renderer { - return [[RTCMTLI420RendererClass alloc] init]; ++ (RTC_OBJC_TYPE(RTCMTLI420Renderer) *)createI420Renderer { + return [[RTC_OBJC_TYPE(RTCMTLI420Renderer) alloc] init]; } -+ (RTCMTLRGBRenderer *)createRGBRenderer { - return [[RTCMTLRGBRenderer alloc] init]; ++ (RTC_OBJC_TYPE(RTCMTLRGBRenderer) *)createRGBRenderer { + return [[RTC_OBJC_TYPE(RTCMTLRGBRenderer) alloc] init]; } - (void)configure { @@ -111,19 +112,24 @@ - (void)configure { self.metalView = [RTC_OBJC_TYPE(RTCMTLVideoView) createMetalView:self.bounds]; self.metalView.delegate = self; +#if TARGET_OS_IPHONE self.metalView.contentMode = UIViewContentModeScaleAspectFill; +#elif TARGET_OS_OSX + self.metalView.layerContentsPlacement = NSViewLayerContentsPlacementScaleProportionallyToFit; +#endif + [self addSubview:self.metalView]; self.videoFrameSize = CGSizeZero; } +#if TARGET_OS_IPHONE - (void)setMultipleTouchEnabled:(BOOL)multipleTouchEnabled { - [super setMultipleTouchEnabled:multipleTouchEnabled]; - self.metalView.multipleTouchEnabled = multipleTouchEnabled; + [super setMultipleTouchEnabled:multipleTouchEnabled]; + self.metalView.multipleTouchEnabled = multipleTouchEnabled; } +#endif -- (void)layoutSubviews { - [super layoutSubviews]; - +- (void)performLayout { CGRect bounds = self.bounds; self.metalView.frame = bounds; if (!CGSizeEqualToSize(self.videoFrameSize, CGSizeZero)) { @@ -148,7 +154,7 @@ - (void)drawInMTKView:(nonnull MTKView *)view { return; } - RTCMTLRenderer *renderer; + RTC_OBJC_TYPE(RTCMTLRenderer) * renderer; if ([videoFrame.buffer isKindOfClass:[RTC_OBJC_TYPE(RTCCVPixelBuffer) class]]) { RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = (RTC_OBJC_TYPE(RTCCVPixelBuffer) *)videoFrame.buffer; const OSType pixelFormat = CVPixelBufferGetPixelFormatType(buffer.pixelBuffer); @@ -203,10 +209,10 @@ - (void)setRotationOverride:(NSValue *)rotationOverride { [self setNeedsLayout]; } -- (RTCVideoRotation)frameRotation { +- (RTCVideoRotation)videoRotation { if (self.rotationOverride) { RTCVideoRotation rotation; - if (@available(iOS 11, *)) { + if (@available(iOS 11, macos 10.13, *)) { [self.rotationOverride getValue:&rotation size:sizeof(rotation)]; } else { [self.rotationOverride getValue:&rotation]; @@ -220,10 +226,10 @@ - (RTCVideoRotation)frameRotation { - (CGSize)drawableSize { // Flip width/height if the rotations are not the same. CGSize videoFrameSize = self.videoFrameSize; - RTCVideoRotation frameRotation = [self frameRotation]; + RTCVideoRotation videoRotation = [self videoRotation]; BOOL useLandscape = - (frameRotation == RTCVideoRotation_0) || (frameRotation == RTCVideoRotation_180); + (videoRotation == RTCVideoRotation_0) || (videoRotation == RTCVideoRotation_180); BOOL sizeIsLandscape = (self.videoFrame.rotation == RTCVideoRotation_0) || (self.videoFrame.rotation == RTCVideoRotation_180); @@ -259,7 +265,34 @@ - (void)renderFrame:(nullable RTC_OBJC_TYPE(RTCVideoFrame) *)frame { RTCLogInfo(@"Incoming frame is nil. Exiting render callback."); return; } - self.videoFrame = frame; + + // Workaround to support RTCCVPixelBuffer rendering. + // RTCMTLRGBRenderer seems to be broken at the moment. + BOOL useI420 = NO; + if ([frame.buffer isKindOfClass:[RTC_OBJC_TYPE(RTCCVPixelBuffer) class]]) { + RTC_OBJC_TYPE(RTCCVPixelBuffer) *buffer = (RTC_OBJC_TYPE(RTCCVPixelBuffer) *)frame.buffer; + const OSType pixelFormat = CVPixelBufferGetPixelFormatType(buffer.pixelBuffer); + useI420 = pixelFormat == kCVPixelFormatType_32BGRA || pixelFormat == kCVPixelFormatType_32ARGB; + } + self.videoFrame = useI420 ? [frame newI420VideoFrame] : frame; +} + +#pragma mark - Cross platform + +#if TARGET_OS_IPHONE +- (void)layoutSubviews { + [super layoutSubviews]; + [self performLayout]; +} +#elif TARGET_OS_OSX +- (void)layout { + [super layout]; + [self performLayout]; +} + +- (void)setNeedsLayout { + self.needsLayout = YES; } +#endif @end diff --git a/sdk/objc/components/renderer/opengl/RTCDefaultShader.h b/sdk/objc/components/renderer/opengl/RTCDefaultShader.h index 71a073ab21..b00cf8047d 100644 --- a/sdk/objc/components/renderer/opengl/RTCDefaultShader.h +++ b/sdk/objc/components/renderer/opengl/RTCDefaultShader.h @@ -16,7 +16,7 @@ NS_ASSUME_NONNULL_BEGIN * and RTCEAGLVideoView if no external shader is specified. This shader will render * the video in a rectangle without any color or geometric transformations. */ -@interface RTCDefaultShader : NSObject +@interface RTC_OBJC_TYPE(RTCDefaultShader) : NSObject @end diff --git a/sdk/objc/components/renderer/opengl/RTCDefaultShader.mm b/sdk/objc/components/renderer/opengl/RTCDefaultShader.mm index 9d686f625c..7f9373dc3a 100644 --- a/sdk/objc/components/renderer/opengl/RTCDefaultShader.mm +++ b/sdk/objc/components/renderer/opengl/RTCDefaultShader.mm @@ -65,7 +65,7 @@ " 1.0);\n" " }\n"; -@implementation RTCDefaultShader { +@implementation RTC_OBJC_TYPE(RTCDefaultShader) { GLuint _vertexBuffer; GLuint _vertexArray; // Store current rotation and only upload new vertex data when rotation changes. diff --git a/sdk/objc/components/renderer/opengl/RTCDisplayLinkTimer.h b/sdk/objc/components/renderer/opengl/RTCDisplayLinkTimer.h index b78501e9e6..1c5b64fdfc 100644 --- a/sdk/objc/components/renderer/opengl/RTCDisplayLinkTimer.h +++ b/sdk/objc/components/renderer/opengl/RTCDisplayLinkTimer.h @@ -10,11 +10,13 @@ #import +#import "RTCMacros.h" + // RTCDisplayLinkTimer wraps a CADisplayLink and is set to fire every two screen // refreshes, which should be 30fps. We wrap the display link in order to avoid // a retain cycle since CADisplayLink takes a strong reference onto its target. // The timer is paused by default. -@interface RTCDisplayLinkTimer : NSObject +@interface RTC_OBJC_TYPE (RTCDisplayLinkTimer): NSObject @property(nonatomic) BOOL isPaused; diff --git a/sdk/objc/components/renderer/opengl/RTCDisplayLinkTimer.m b/sdk/objc/components/renderer/opengl/RTCDisplayLinkTimer.m index 906bb898d6..f4cf03304d 100644 --- a/sdk/objc/components/renderer/opengl/RTCDisplayLinkTimer.m +++ b/sdk/objc/components/renderer/opengl/RTCDisplayLinkTimer.m @@ -12,7 +12,7 @@ #import -@implementation RTCDisplayLinkTimer { +@implementation RTC_OBJC_TYPE (RTCDisplayLinkTimer) { CADisplayLink *_displayLink; void (^_timerHandler)(void); } @@ -21,17 +21,15 @@ - (instancetype)initWithTimerHandler:(void (^)(void))timerHandler { NSParameterAssert(timerHandler); if (self = [super init]) { _timerHandler = timerHandler; - _displayLink = - [CADisplayLink displayLinkWithTarget:self - selector:@selector(displayLinkDidFire:)]; + _displayLink = [CADisplayLink displayLinkWithTarget:self + selector:@selector(displayLinkDidFire:)]; _displayLink.paused = YES; #if __IPHONE_OS_VERSION_MIN_REQUIRED >= __IPHONE_10_0 _displayLink.preferredFramesPerSecond = 30; #else [_displayLink setFrameInterval:2]; #endif - [_displayLink addToRunLoop:[NSRunLoop currentRunLoop] - forMode:NSRunLoopCommonModes]; + [_displayLink addToRunLoop:[NSRunLoop currentRunLoop] forMode:NSRunLoopCommonModes]; } return self; } diff --git a/sdk/objc/components/renderer/opengl/RTCEAGLVideoView.m b/sdk/objc/components/renderer/opengl/RTCEAGLVideoView.m index 89e62d2ce7..0a00494d2d 100644 --- a/sdk/objc/components/renderer/opengl/RTCEAGLVideoView.m +++ b/sdk/objc/components/renderer/opengl/RTCEAGLVideoView.m @@ -42,14 +42,14 @@ @interface RTC_OBJC_TYPE (RTCEAGLVideoView) @end @implementation RTC_OBJC_TYPE (RTCEAGLVideoView) { - RTCDisplayLinkTimer *_timer; + RTC_OBJC_TYPE(RTCDisplayLinkTimer) * _timer; EAGLContext *_glContext; // This flag should only be set and read on the main thread (e.g. by // setNeedsDisplay) BOOL _isDirty; id _shader; - RTCNV12TextureCache *_nv12TextureCache; - RTCI420TextureCache *_i420TextureCache; + RTC_OBJC_TYPE(RTCNV12TextureCache) *_nv12TextureCache; + RTC_OBJC_TYPE(RTCI420TextureCache) *_i420TextureCache; // As timestamps should be unique between frames, will store last // drawn frame timestamp instead of the whole frame to reduce memory usage. int64_t _lastDrawnFrameTimeStampNs; @@ -61,11 +61,11 @@ @implementation RTC_OBJC_TYPE (RTCEAGLVideoView) { @synthesize rotationOverride = _rotationOverride; - (instancetype)initWithFrame:(CGRect)frame { - return [self initWithFrame:frame shader:[[RTCDefaultShader alloc] init]]; + return [self initWithFrame:frame shader:[[RTC_OBJC_TYPE(RTCDefaultShader) alloc] init]]; } - (instancetype)initWithCoder:(NSCoder *)aDecoder { - return [self initWithCoder:aDecoder shader:[[RTCDefaultShader alloc] init]]; + return [self initWithCoder:aDecoder shader:[[RTC_OBJC_TYPE(RTCDefaultShader) alloc] init]]; } - (instancetype)initWithFrame:(CGRect)frame shader:(id)shader { @@ -90,8 +90,7 @@ - (instancetype)initWithCoder:(NSCoder *)aDecoder } - (BOOL)configure { - EAGLContext *glContext = - [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES3]; + EAGLContext *glContext = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES3]; if (!glContext) { glContext = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2]; } @@ -102,8 +101,7 @@ - (BOOL)configure { _glContext = glContext; // GLKView manages a framebuffer for us. - _glkView = [[GLKView alloc] initWithFrame:CGRectZero - context:_glContext]; + _glkView = [[GLKView alloc] initWithFrame:CGRectZero context:_glContext]; _glkView.drawableColorFormat = GLKViewDrawableColorFormatRGBA8888; _glkView.drawableDepthFormat = GLKViewDrawableDepthFormatNone; _glkView.drawableStencilFormat = GLKViewDrawableStencilFormatNone; @@ -115,8 +113,7 @@ - (BOOL)configure { // Listen to application state in order to clean up OpenGL before app goes // away. - NSNotificationCenter *notificationCenter = - [NSNotificationCenter defaultCenter]; + NSNotificationCenter *notificationCenter = [NSNotificationCenter defaultCenter]; [notificationCenter addObserver:self selector:@selector(willResignActive) name:UIApplicationWillResignActiveNotification @@ -130,7 +127,7 @@ - (BOOL)configure { // using a refresh rate proportional to screen refresh frequency. This // occurs on the main thread. __weak RTC_OBJC_TYPE(RTCEAGLVideoView) *weakSelf = self; - _timer = [[RTCDisplayLinkTimer alloc] initWithTimerHandler:^{ + _timer = [[RTC_OBJC_TYPE(RTCDisplayLinkTimer) alloc] initWithTimerHandler:^{ RTC_OBJC_TYPE(RTCEAGLVideoView) *strongSelf = weakSelf; [strongSelf displayLinkTimerDidFire]; }]; @@ -141,14 +138,13 @@ - (BOOL)configure { } - (void)setMultipleTouchEnabled:(BOOL)multipleTouchEnabled { - [super setMultipleTouchEnabled:multipleTouchEnabled]; - _glkView.multipleTouchEnabled = multipleTouchEnabled; + [super setMultipleTouchEnabled:multipleTouchEnabled]; + _glkView.multipleTouchEnabled = multipleTouchEnabled; } - (void)dealloc { [[NSNotificationCenter defaultCenter] removeObserver:self]; - UIApplicationState appState = - [UIApplication sharedApplication].applicationState; + UIApplicationState appState = [UIApplication sharedApplication].applicationState; if (appState == UIApplicationStateActive) { [self teardownGL]; } @@ -189,14 +185,14 @@ - (void)glkView:(GLKView *)view drawInRect:(CGRect)rect { return; } RTCVideoRotation rotation = frame.rotation; - if(_rotationOverride != nil) { - [_rotationOverride getValue: &rotation]; + if (_rotationOverride != nil) { + [_rotationOverride getValue:&rotation]; } [self ensureGLContext]; glClear(GL_COLOR_BUFFER_BIT); if ([frame.buffer isKindOfClass:[RTC_OBJC_TYPE(RTCCVPixelBuffer) class]]) { if (!_nv12TextureCache) { - _nv12TextureCache = [[RTCNV12TextureCache alloc] initWithContext:_glContext]; + _nv12TextureCache = [[RTC_OBJC_TYPE(RTCNV12TextureCache) alloc] initWithContext:_glContext]; } if (_nv12TextureCache) { [_nv12TextureCache uploadFrameToTextures:frame]; @@ -211,7 +207,7 @@ - (void)glkView:(GLKView *)view drawInRect:(CGRect)rect { } } else { if (!_i420TextureCache) { - _i420TextureCache = [[RTCI420TextureCache alloc] initWithContext:_glContext]; + _i420TextureCache = [[RTC_OBJC_TYPE(RTCI420TextureCache) alloc] initWithContext:_glContext]; } [_i420TextureCache uploadFrameToTextures:frame]; [_shader applyShadingForFrameWithWidth:frame.width diff --git a/sdk/objc/components/renderer/opengl/RTCI420TextureCache.h b/sdk/objc/components/renderer/opengl/RTCI420TextureCache.h index 9fdcc5a695..2c2319d043 100644 --- a/sdk/objc/components/renderer/opengl/RTCI420TextureCache.h +++ b/sdk/objc/components/renderer/opengl/RTCI420TextureCache.h @@ -11,7 +11,7 @@ #import "RTCOpenGLDefines.h" #import "base/RTCVideoFrame.h" -@interface RTCI420TextureCache : NSObject +@interface RTC_OBJC_TYPE(RTCI420TextureCache) : NSObject @property(nonatomic, readonly) GLuint yTexture; @property(nonatomic, readonly) GLuint uTexture; diff --git a/sdk/objc/components/renderer/opengl/RTCI420TextureCache.mm b/sdk/objc/components/renderer/opengl/RTCI420TextureCache.mm index a91e927cb4..1e1c2bd189 100644 --- a/sdk/objc/components/renderer/opengl/RTCI420TextureCache.mm +++ b/sdk/objc/components/renderer/opengl/RTCI420TextureCache.mm @@ -24,7 +24,7 @@ static const GLsizei kNumTexturesPerSet = 3; static const GLsizei kNumTextures = kNumTexturesPerSet * kNumTextureSets; -@implementation RTCI420TextureCache { +@implementation RTC_OBJC_TYPE(RTCI420TextureCache) { BOOL _hasUnpackRowLength; GLint _currentTextureSet; // Handles for OpenGL constructs. diff --git a/sdk/objc/components/renderer/opengl/RTCNSGLVideoView.m b/sdk/objc/components/renderer/opengl/RTCNSGLVideoView.m new file mode 100644 index 0000000000..97957faf24 --- /dev/null +++ b/sdk/objc/components/renderer/opengl/RTCNSGLVideoView.m @@ -0,0 +1,199 @@ +/* + * Copyright 2015 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#import + +#if !TARGET_OS_IPHONE + +#import "RTCNSGLVideoView.h" + +#import +#import +#import + +#import "RTCDefaultShader.h" +#import "RTCI420TextureCache.h" +#import "base/RTCLogging.h" +#import "base/RTCVideoFrame.h" + +@interface RTC_OBJC_TYPE (RTCNSGLVideoView) +() + // `videoFrame` is set when we receive a frame from a worker thread and is read + // from the display link callback so atomicity is required. + @property(atomic, strong) RTC_OBJC_TYPE(RTCVideoFrame) * + videoFrame; +@property(atomic, strong) RTC_OBJC_TYPE(RTCI420TextureCache) *i420TextureCache; + +- (void)drawFrame; +@end + +static CVReturn OnDisplayLinkFired(CVDisplayLinkRef displayLink, + const CVTimeStamp *now, + const CVTimeStamp *outputTime, + CVOptionFlags flagsIn, + CVOptionFlags *flagsOut, + void *displayLinkContext) { + RTC_OBJC_TYPE(RTCNSGLVideoView) *view = + (__bridge RTC_OBJC_TYPE(RTCNSGLVideoView) *)displayLinkContext; + [view drawFrame]; + return kCVReturnSuccess; +} + +@implementation RTC_OBJC_TYPE (RTCNSGLVideoView) { + CVDisplayLinkRef _displayLink; + RTC_OBJC_TYPE(RTCVideoFrame) * _lastDrawnFrame; + id _shader; +} + +@synthesize delegate = _delegate; +@synthesize videoFrame = _videoFrame; +@synthesize i420TextureCache = _i420TextureCache; + +- (instancetype)initWithFrame:(NSRect)frame pixelFormat:(NSOpenGLPixelFormat *)format { + return [self initWithFrame:frame pixelFormat:format shader:[[RTC_OBJC_TYPE(RTCDefaultShader) alloc] init]]; +} + +- (instancetype)initWithFrame:(NSRect)frame + pixelFormat:(NSOpenGLPixelFormat *)format + shader:(id)shader { + if (self = [super initWithFrame:frame pixelFormat:format]) { + _shader = shader; + } + return self; +} + +- (void)dealloc { + [self teardownDisplayLink]; +} + +- (void)drawRect:(NSRect)rect { + [self drawFrame]; +} + +- (void)reshape { + [super reshape]; + NSRect frame = [self frame]; + [self ensureGLContext]; + CGLLockContext([[self openGLContext] CGLContextObj]); + glViewport(0, 0, frame.size.width, frame.size.height); + CGLUnlockContext([[self openGLContext] CGLContextObj]); +} + +- (void)lockFocus { + NSOpenGLContext *context = [self openGLContext]; + [super lockFocus]; + if ([context view] != self) { + [context setView:self]; + } + [context makeCurrentContext]; +} + +- (void)prepareOpenGL { + [super prepareOpenGL]; + [self ensureGLContext]; + glDisable(GL_DITHER); + [self setupDisplayLink]; +} + +- (void)clearGLContext { + [self ensureGLContext]; + self.i420TextureCache = nil; + [super clearGLContext]; +} + +#pragma mark - RTC_OBJC_TYPE(RTCVideoRenderer) + +// These methods may be called on non-main thread. +- (void)setSize:(CGSize)size { + dispatch_async(dispatch_get_main_queue(), ^{ + [self.delegate videoView:self didChangeVideoSize:size]; + }); +} + +- (void)renderFrame:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame { + self.videoFrame = frame; +} + +#pragma mark - Private + +- (void)drawFrame { + RTC_OBJC_TYPE(RTCVideoFrame) *frame = self.videoFrame; + if (!frame || frame == _lastDrawnFrame) { + return; + } + // This method may be called from CVDisplayLink callback which isn't on the + // main thread so we have to lock the GL context before drawing. + NSOpenGLContext *context = [self openGLContext]; + CGLLockContext([context CGLContextObj]); + + [self ensureGLContext]; + glClear(GL_COLOR_BUFFER_BIT); + + // Rendering native CVPixelBuffer is not supported on OS X. + // TODO(magjed): Add support for NV12 texture cache on OS X. + frame = [frame newI420VideoFrame]; + if (!self.i420TextureCache) { + self.i420TextureCache = [[RTC_OBJC_TYPE(RTCI420TextureCache) alloc] initWithContext:context]; + } + RTC_OBJC_TYPE(RTCI420TextureCache) *i420TextureCache = self.i420TextureCache; + if (i420TextureCache) { + [i420TextureCache uploadFrameToTextures:frame]; + [_shader applyShadingForFrameWithWidth:frame.width + height:frame.height + rotation:frame.rotation + yPlane:i420TextureCache.yTexture + uPlane:i420TextureCache.uTexture + vPlane:i420TextureCache.vTexture]; + [context flushBuffer]; + _lastDrawnFrame = frame; + } + CGLUnlockContext([context CGLContextObj]); +} + +- (void)setupDisplayLink { + if (_displayLink) { + return; + } + // Synchronize buffer swaps with vertical refresh rate. + GLint swapInt = 1; + [[self openGLContext] setValues:&swapInt forParameter:NSOpenGLCPSwapInterval]; + + // Create display link. + CVDisplayLinkCreateWithActiveCGDisplays(&_displayLink); + CVDisplayLinkSetOutputCallback(_displayLink, + &OnDisplayLinkFired, + (__bridge void *)self); + // Set the display link for the current renderer. + CGLContextObj cglContext = [[self openGLContext] CGLContextObj]; + CGLPixelFormatObj cglPixelFormat = [[self pixelFormat] CGLPixelFormatObj]; + CVDisplayLinkSetCurrentCGDisplayFromOpenGLContext( + _displayLink, cglContext, cglPixelFormat); + CVDisplayLinkStart(_displayLink); +} + +- (void)teardownDisplayLink { + if (!_displayLink) { + return; + } + CVDisplayLinkRelease(_displayLink); + _displayLink = NULL; +} + +- (void)ensureGLContext { + NSOpenGLContext* context = [self openGLContext]; + NSAssert(context, @"context shouldn't be nil"); + if ([NSOpenGLContext currentContext] != context) { + [context makeCurrentContext]; + } +} + +@end + +#endif // !TARGET_OS_IPHONE diff --git a/sdk/objc/components/renderer/opengl/RTCNV12TextureCache.h b/sdk/objc/components/renderer/opengl/RTCNV12TextureCache.h index f202b836b5..420490b1ab 100644 --- a/sdk/objc/components/renderer/opengl/RTCNV12TextureCache.h +++ b/sdk/objc/components/renderer/opengl/RTCNV12TextureCache.h @@ -16,7 +16,7 @@ NS_ASSUME_NONNULL_BEGIN -@interface RTCNV12TextureCache : NSObject +@interface RTC_OBJC_TYPE(RTCNV12TextureCache) : NSObject @property(nonatomic, readonly) GLuint yTexture; @property(nonatomic, readonly) GLuint uvTexture; diff --git a/sdk/objc/components/renderer/opengl/RTCNV12TextureCache.m b/sdk/objc/components/renderer/opengl/RTCNV12TextureCache.m index a520ac45b4..096767be55 100644 --- a/sdk/objc/components/renderer/opengl/RTCNV12TextureCache.m +++ b/sdk/objc/components/renderer/opengl/RTCNV12TextureCache.m @@ -14,7 +14,7 @@ #import "base/RTCVideoFrameBuffer.h" #import "components/video_frame_buffer/RTCCVPixelBuffer.h" -@implementation RTCNV12TextureCache { +@implementation RTC_OBJC_TYPE(RTCNV12TextureCache) { CVOpenGLESTextureCacheRef _textureCache; CVOpenGLESTextureRef _yTextureRef; CVOpenGLESTextureRef _uvTextureRef; diff --git a/sdk/objc/components/video_codec/RTCDefaultVideoEncoderFactory.m b/sdk/objc/components/video_codec/RTCDefaultVideoEncoderFactory.m index 8de55bde4a..3cc92382e6 100644 --- a/sdk/objc/components/video_codec/RTCDefaultVideoEncoderFactory.m +++ b/sdk/objc/components/video_codec/RTCDefaultVideoEncoderFactory.m @@ -55,11 +55,13 @@ @implementation RTC_OBJC_TYPE (RTCDefaultVideoEncoderFactory) if ([RTC_OBJC_TYPE(RTCVideoEncoderVP9) isSupported]) { [result - addObject:[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecVp9Name]]; + addObject:[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecVp9Name parameters:nil scalabilityModes:[RTC_OBJC_TYPE(RTCVideoEncoderVP9) scalabilityModes]]]; } #if defined(RTC_USE_LIBAOM_AV1_ENCODER) - [result addObject:[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecAv1Name]]; + RTC_OBJC_TYPE(RTCVideoCodecInfo) *av1Info = + [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecAv1Name parameters:nil scalabilityModes:[RTC_OBJC_TYPE(RTCVideoEncoderAV1) scalabilityModes]]; + [result addObject:av1Info]; #endif return result; diff --git a/sdk/objc/components/video_codec/RTCVideoEncoderFactorySimulcast.h b/sdk/objc/components/video_codec/RTCVideoEncoderFactorySimulcast.h new file mode 100644 index 0000000000..4070af22e4 --- /dev/null +++ b/sdk/objc/components/video_codec/RTCVideoEncoderFactorySimulcast.h @@ -0,0 +1,16 @@ +#import + +#import "RTCMacros.h" +#import "RTCVideoEncoderFactory.h" + +NS_ASSUME_NONNULL_BEGIN + +RTC_OBJC_EXPORT +@interface RTC_OBJC_TYPE (RTCVideoEncoderFactorySimulcast) : NSObject + +- (instancetype)initWithPrimary:(id)primary + fallback:(id)fallback; + +@end + +NS_ASSUME_NONNULL_END diff --git a/sdk/objc/components/video_codec/RTCVideoEncoderFactorySimulcast.mm b/sdk/objc/components/video_codec/RTCVideoEncoderFactorySimulcast.mm new file mode 100644 index 0000000000..2af8a63500 --- /dev/null +++ b/sdk/objc/components/video_codec/RTCVideoEncoderFactorySimulcast.mm @@ -0,0 +1,63 @@ +#import + +#import "RTCMacros.h" +#import "RTCVideoCodecInfo.h" +#import "RTCVideoEncoderFactorySimulcast.h" +#import "api/video_codec/RTCVideoEncoderSimulcast.h" +#import "api/peerconnection/RTCVideoCodecInfo+Private.h" + +#include "absl/container/inlined_vector.h" +#include "api/video_codecs/video_codec.h" +#include "api/video_codecs/sdp_video_format.h" +#include "api/video_codecs/video_codec.h" +#include "modules/video_coding/codecs/av1/av1_svc_config.h" +#include "modules/video_coding/codecs/vp9/include/vp9.h" +#include "media/base/media_constants.h" + +@interface RTC_OBJC_TYPE (RTCVideoEncoderFactorySimulcast) () + +@property id primary; +@property id fallback; + +@end + + +@implementation RTC_OBJC_TYPE (RTCVideoEncoderFactorySimulcast) + +@synthesize primary = _primary; +@synthesize fallback = _fallback; + +- (instancetype)initWithPrimary:(id)primary + fallback:(id)fallback { + if (self = [super init]) { + _primary = primary; + _fallback = fallback; + } + return self; +} + +- (nullable id)createEncoder: (RTC_OBJC_TYPE(RTCVideoCodecInfo) *)info { + return [RTC_OBJC_TYPE(RTCVideoEncoderSimulcast) simulcastEncoderWithPrimary: _primary fallback: _fallback videoCodecInfo: info]; +} + +- (NSArray *)supportedCodecs { + NSArray *supportedCodecs = [[_primary supportedCodecs] arrayByAddingObjectsFromArray: [_fallback supportedCodecs]]; + + NSMutableArray *addingCodecs = [[NSMutableArray alloc] init]; + + for (const webrtc::SdpVideoFormat& format : webrtc::SupportedVP9Codecs(true)) { + RTCVideoCodecInfo *codec = [[RTCVideoCodecInfo alloc] initWithNativeSdpVideoFormat: format]; + [addingCodecs addObject: codec]; + } + + auto av1Format = webrtc::SdpVideoFormat( + cricket::kAv1CodecName, webrtc::CodecParameterMap(), + webrtc::LibaomAv1EncoderSupportedScalabilityModes()); + RTCVideoCodecInfo *av1Codec = [[RTCVideoCodecInfo alloc] initWithNativeSdpVideoFormat: av1Format]; + [addingCodecs addObject: av1Codec]; + + return [supportedCodecs arrayByAddingObjectsFromArray: addingCodecs]; +} + + +@end diff --git a/sdk/objc/components/video_codec/RTCVideoEncoderH264.mm b/sdk/objc/components/video_codec/RTCVideoEncoderH264.mm index 2160d79ae5..d3dd33aef6 100644 --- a/sdk/objc/components/video_codec/RTCVideoEncoderH264.mm +++ b/sdk/objc/components/video_codec/RTCVideoEncoderH264.mm @@ -54,14 +54,42 @@ - (void)frameWasEncoded : (OSStatus)status flags : (VTEncodeInfoFlags)infoFlags // The ratio between kVTCompressionPropertyKey_DataRateLimits and // kVTCompressionPropertyKey_AverageBitRate. The data rate limit is set higher // than the average bit rate to avoid undershooting the target. -const float kLimitToAverageBitRateFactor = 1.5f; +const float kLimitToAverageBitRateFactor = 10.0f; // These thresholds deviate from the default h264 QP thresholds, as they // have been found to work better on devices that support VideoToolbox const int kLowH264QpThreshold = 28; const int kHighH264QpThreshold = 39; +const int kBitsPerByte = 8; const OSType kNV12PixelFormat = kCVPixelFormatType_420YpCbCr8BiPlanarFullRange; +typedef NS_ENUM(NSInteger, RTCVideoEncodeMode) { + Variable = 0, + Constant = 1, +}; + +NSArray *CreateRateLimitArray(uint32_t computedBitrateBps, RTCVideoEncodeMode mode) { + switch (mode) { + case Variable: { + // 5 seconds should be an okay interval for VBR to enforce the long-term + // limit. + float avgInterval = 5.0; + uint32_t avgBytesPerSecond = computedBitrateBps / kBitsPerByte * avgInterval; + // And the peak bitrate is measured per-second in a way similar to CBR. + float peakInterval = 1.0; + uint32_t peakBytesPerSecond = + computedBitrateBps * kLimitToAverageBitRateFactor / kBitsPerByte; + return @[ @(peakBytesPerSecond), @(peakInterval), @(avgBytesPerSecond), @(avgInterval) ]; + } + case Constant: { + // CBR should be enforces with granularity of a second. + float targetInterval = 1.0; + int32_t targetBitrate = computedBitrateBps / kBitsPerByte; + return @[ @(targetBitrate), @(targetInterval) ]; + } + } +} + // Struct that we pass to the encoder per frame to encode. We receive it again // in the encoder callback. struct RTCFrameEncodeParams { @@ -180,10 +208,13 @@ void compressionOutputCallback(void *encoder, // no specific VideoToolbox profile for the specified level, AutoLevel will be // returned. The user must initialize the encoder with a resolution and // framerate conforming to the selected H264 level regardless. -CFStringRef ExtractProfile(const webrtc::H264ProfileLevelId &profile_level_id) { +CFStringRef ExtractProfile(const webrtc::H264ProfileLevelId &profile_level_id, bool screenSharing) { switch (profile_level_id.profile) { case webrtc::H264Profile::kProfileConstrainedBaseline: case webrtc::H264Profile::kProfileBaseline: + if (screenSharing) { + return kVTProfileLevel_H264_Baseline_AutoLevel; + } switch (profile_level_id.level) { case webrtc::H264Level::kLevel3: return kVTProfileLevel_H264_Baseline_3_0; @@ -319,8 +350,8 @@ NSUInteger GetMaxSampleRate(const webrtc::H264ProfileLevelId &profile_level_id) @implementation RTC_OBJC_TYPE (RTCVideoEncoderH264) { RTC_OBJC_TYPE(RTCVideoCodecInfo) * _codecInfo; - std::unique_ptr _bitrateAdjuster; uint32_t _targetBitrateBps; + uint32_t _targetFrameRate; uint32_t _encoderBitrateBps; uint32_t _encoderFrameRate; uint32_t _maxAllowedFrameRate; @@ -330,10 +361,17 @@ @implementation RTC_OBJC_TYPE (RTCVideoEncoderH264) { int32_t _width; int32_t _height; VTCompressionSessionRef _compressionSession; - RTCVideoCodecMode _mode; + CVPixelBufferPoolRef _pixelBufferPool; + RTCVideoCodecMode _codecMode; + unsigned int _maxQP; + unsigned int _minBitrate; + unsigned int _maxBitrate; + RTCVideoEncodeMode _encodeMode; webrtc::H264BitstreamParser _h264BitstreamParser; std::vector _frameScaleBuffer; + + CMTime _previousPresentationTimeStamp; } // .5 is set as a mininum to prevent overcompensating for large temporary @@ -346,12 +384,14 @@ @implementation RTC_OBJC_TYPE (RTCVideoEncoderH264) { - (instancetype)initWithCodecInfo:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)codecInfo { if (self = [super init]) { _codecInfo = codecInfo; - _bitrateAdjuster.reset(new webrtc::BitrateAdjuster(.5, .95)); _packetizationMode = RTCH264PacketizationModeNonInterleaved; _profile_level_id = webrtc::ParseSdpForH264ProfileLevelId([codecInfo nativeSdpVideoFormat].parameters); + _previousPresentationTimeStamp = kCMTimeZero; RTC_DCHECK(_profile_level_id); - RTC_LOG(LS_INFO) << "Using profile " << CFStringToString(ExtractProfile(*_profile_level_id)); + RTC_LOG(LS_INFO) << "Using profile " + << CFStringToString(ExtractProfile( + *_profile_level_id, _codecMode == RTCVideoCodecModeScreensharing)); RTC_CHECK([codecInfo.name isEqualToString:kRTCVideoCodecH264Name]); } return self; @@ -368,7 +408,12 @@ - (NSInteger)startEncodeWithSettings:(RTC_OBJC_TYPE(RTCVideoEncoderSettings) *)s _width = settings.width; _height = settings.height; - _mode = settings.mode; + _codecMode = settings.mode; + _maxQP = settings.qpMax; + + _encodeMode = Variable; // Always variable mode for now + _minBitrate = settings.minBitrate * 1000; // minBitrate is in kbps. + _maxBitrate = settings.maxBitrate * 1000; // maxBitrate is in kbps. uint32_t aligned_width = (((_width + 15) >> 4) << 4); uint32_t aligned_height = (((_height + 15) >> 4) << 4); @@ -376,9 +421,15 @@ - (NSInteger)startEncodeWithSettings:(RTC_OBJC_TYPE(RTCVideoEncoderSettings) *)s (aligned_width * aligned_height)); // We can only set average bitrate on the HW encoder. - _targetBitrateBps = settings.startBitrate * 1000; // startBitrate is in kbps. - _bitrateAdjuster->SetTargetBitrateBps(_targetBitrateBps); - _encoderFrameRate = MIN(settings.maxFramerate, _maxAllowedFrameRate); + if (_encodeMode == Constant) { + _targetBitrateBps = _maxBitrate; + } else { + _targetBitrateBps = settings.startBitrate * 1000; // startBitrate is in kbps. + } + + _targetFrameRate = MIN(settings.maxFramerate, _maxAllowedFrameRate); + _encoderBitrateBps = 0; + _encoderFrameRate = 0; if (settings.maxFramerate > _maxAllowedFrameRate && _maxAllowedFrameRate > 0) { RTC_LOG(LS_WARNING) << "Initial encoder frame rate setting " << settings.maxFramerate << " is larger than the " @@ -397,8 +448,15 @@ - (NSInteger)encode:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame if (!_callback || !_compressionSession) { return WEBRTC_VIDEO_CODEC_UNINITIALIZED; } - BOOL isKeyframeRequired = NO; + CMTime presentationTimeStamp = CMTimeMake(frame.timeStampNs / rtc::kNumNanosecsPerMillisec, 1000); + if (CMTimeCompare(presentationTimeStamp, _previousPresentationTimeStamp) == 0) { + // Same PTS + return WEBRTC_VIDEO_CODEC_NO_OUTPUT; + } + _previousPresentationTimeStamp = presentationTimeStamp; + + BOOL isKeyframeRequired = NO; // Get a pixel buffer from the pool and copy frame data over. if ([self resetCompressionSessionIfNeededWithFrame:frame]) { isKeyframeRequired = YES; @@ -425,8 +483,8 @@ - (NSInteger)encode:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame int dstWidth = CVPixelBufferGetWidth(pixelBuffer); int dstHeight = CVPixelBufferGetHeight(pixelBuffer); if ([rtcPixelBuffer requiresScalingToWidth:dstWidth height:dstHeight]) { - int size = - [rtcPixelBuffer bufferSizeForCroppingAndScalingToWidth:dstWidth height:dstHeight]; + int size = [rtcPixelBuffer bufferSizeForCroppingAndScalingToWidth:dstWidth + height:dstHeight]; _frameScaleBuffer.resize(size); } else { _frameScaleBuffer.clear(); @@ -466,7 +524,6 @@ - (NSInteger)encode:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame } } - CMTime presentationTimeStamp = CMTimeMake(frame.timeStampNs / rtc::kNumNanosecsPerMillisec, 1000); CFDictionaryRef frameProperties = nullptr; if (isKeyframeRequired) { CFTypeRef keys[] = {kVTEncodeFrameOptionKey_ForceKeyFrame}; @@ -484,8 +541,8 @@ - (NSInteger)encode:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame frame.rotation)); encodeParams->codecSpecificInfo.packetizationMode = _packetizationMode; - // Update the bitrate if needed. - [self setBitrateBps:_bitrateAdjuster->GetAdjustedBitrateBps() frameRate:_encoderFrameRate]; + // Update encoder bitrate or frameRate if needed. + [self updateEncoderBitrateAndFrameRate]; OSStatus status = VTCompressionSessionEncodeFrame(_compressionSession, pixelBuffer, @@ -526,14 +583,19 @@ - (void)setCallback:(RTCVideoEncoderCallback)callback { } - (int)setBitrate:(uint32_t)bitrateKbit framerate:(uint32_t)framerate { - _targetBitrateBps = 1000 * bitrateKbit; - _bitrateAdjuster->SetTargetBitrateBps(_targetBitrateBps); + // set target bitrate bps + _targetBitrateBps = bitrateKbit * 1000; + + RTC_LOG(LS_INFO) << "setBitrateKBit: " << bitrateKbit << " targetBps: " << _targetBitrateBps + << " frameRate: " << framerate; + if (framerate > _maxAllowedFrameRate && _maxAllowedFrameRate > 0) { RTC_LOG(LS_WARNING) << "Encoder frame rate setting " << framerate << " is larger than the " << "maximal allowed frame rate " << _maxAllowedFrameRate << "."; } - framerate = MIN(framerate, _maxAllowedFrameRate); - [self setBitrateBps:_bitrateAdjuster->GetAdjustedBitrateBps() frameRate:framerate]; + + _targetFrameRate = MIN(framerate, _maxAllowedFrameRate); + return WEBRTC_VIDEO_CODEC_OK; } @@ -585,7 +647,8 @@ - (BOOL)resetCompressionSessionIfNeededWithFrame:(RTC_OBJC_TYPE(RTCVideoFrame) * CVPixelBufferPoolRef pixelBufferPool = VTCompressionSessionGetPixelBufferPool(_compressionSession); if (!pixelBufferPool) { - return NO; + [self resetCompressionSessionWithPixelFormat:framePixelFormat]; + return YES; } NSDictionary *poolAttributes = @@ -631,14 +694,19 @@ - (int)resetCompressionSessionWithPixelFormat:(OSType)framePixelFormat { (NSString *)kCVPixelBufferPixelFormatTypeKey : @(framePixelFormat), }; - NSDictionary *encoder_specs; + NSMutableDictionary *encoder_specs; #if defined(WEBRTC_MAC) && !defined(WEBRTC_IOS) // Currently hw accl is supported above 360p on mac, below 360p // the compression session will be created with hw accl disabled. - encoder_specs = @{ + encoder_specs = [@{ (NSString *)kVTVideoEncoderSpecification_EnableHardwareAcceleratedVideoEncoder : @(YES), - }; - + } mutableCopy]; + // Enable low-latency video encoding + if (@available(iOS 14.5, macOS 11.3, *)) { + [encoder_specs addEntriesFromDictionary:@{ + (NSString *)kVTVideoEncoderSpecification_EnableLowLatencyRateControl : @(YES), + }]; + } #endif OSStatus status = VTCompressionSessionCreate( nullptr, // use default allocator @@ -675,11 +743,30 @@ - (int)resetCompressionSessionWithPixelFormat:(OSType)framePixelFormat { - (void)configureCompressionSession { RTC_DCHECK(_compressionSession); SetVTSessionProperty(_compressionSession, kVTCompressionPropertyKey_RealTime, true); - SetVTSessionProperty(_compressionSession, - kVTCompressionPropertyKey_ProfileLevel, - ExtractProfile(*_profile_level_id)); + // Sacrifice encoding speed over quality when necessary + if (@available(iOS 14.0, macOS 11.0, *)) { + SetVTSessionProperty( + _compressionSession, kVTCompressionPropertyKey_PrioritizeEncodingSpeedOverQuality, true); + } + // Set maximum QP for screen sharing mode, range must be within 1 to 51 + // https://developer.apple.com/documentation/videotoolbox/kvtcompressionpropertykey_maxallowedframeqp + if (@available(iOS 15.0, macOS 12.0, *)) { + // Only enable for screen sharing and let VideoToolbox do the optimizing as much as possible. + if (_codecMode == RTCVideoCodecModeScreensharing) { + RTC_LOG(LS_INFO) << "Configuring VideoToolbox to use maxQP: " << kHighH264QpThreshold + << " mode: " << _codecMode; + SetVTSessionProperty( + _compressionSession, kVTCompressionPropertyKey_MaxAllowedFrameQP, kHighH264QpThreshold); + } + } + SetVTSessionProperty( + _compressionSession, + kVTCompressionPropertyKey_ProfileLevel, + ExtractProfile(*_profile_level_id, _codecMode == RTCVideoCodecModeScreensharing)); SetVTSessionProperty(_compressionSession, kVTCompressionPropertyKey_AllowFrameReordering, false); - [self setEncoderBitrateBps:_targetBitrateBps frameRate:_encoderFrameRate]; + + // [self updateEncoderBitrateAndFrameRate]; + // TODO(tkchin): Look at entropy mode and colorspace matrices. // TODO(tkchin): Investigate to see if there's any way to make this work. // May need it to interop with Android. Currently this call just fails. @@ -706,49 +793,59 @@ - (NSString *)implementationName { return @"VideoToolbox"; } -- (void)setBitrateBps:(uint32_t)bitrateBps frameRate:(uint32_t)frameRate { - if (_encoderBitrateBps != bitrateBps || _encoderFrameRate != frameRate) { - [self setEncoderBitrateBps:bitrateBps frameRate:frameRate]; +- (void)updateEncoderBitrateAndFrameRate { + // If no compression session simply return + if (!_compressionSession) { + return; } -} + // Initial status + OSStatus status = noErr; -- (void)setEncoderBitrateBps:(uint32_t)bitrateBps frameRate:(uint32_t)frameRate { - if (_compressionSession) { - SetVTSessionProperty(_compressionSession, kVTCompressionPropertyKey_AverageBitRate, bitrateBps); + uint32_t computedBitrateBps = _targetBitrateBps; - // With zero `_maxAllowedFrameRate`, we fall back to automatic frame rate detection. - if (_maxAllowedFrameRate > 0) { - SetVTSessionProperty( - _compressionSession, kVTCompressionPropertyKey_ExpectedFrameRate, frameRate); - } + // With zero `_maxAllowedFrameRate`, we fall back to automatic frame rate detection. + uint32_t computedFrameRate = _maxAllowedFrameRate > 0 ? _targetFrameRate : 0; - // TODO(tkchin): Add a helper method to set array value. - int64_t dataLimitBytesPerSecondValue = - static_cast(bitrateBps * kLimitToAverageBitRateFactor / 8); - CFNumberRef bytesPerSecond = - CFNumberCreate(kCFAllocatorDefault, kCFNumberSInt64Type, &dataLimitBytesPerSecondValue); - int64_t oneSecondValue = 1; - CFNumberRef oneSecond = - CFNumberCreate(kCFAllocatorDefault, kCFNumberSInt64Type, &oneSecondValue); - const void *nums[2] = {bytesPerSecond, oneSecond}; - CFArrayRef dataRateLimits = CFArrayCreate(nullptr, nums, 2, &kCFTypeArrayCallBacks); - OSStatus status = VTSessionSetProperty( - _compressionSession, kVTCompressionPropertyKey_DataRateLimits, dataRateLimits); - if (bytesPerSecond) { - CFRelease(bytesPerSecond); - } - if (oneSecond) { - CFRelease(oneSecond); + // Set frame rate + if (computedFrameRate != _encoderFrameRate) { + status = VTSessionSetProperty(_compressionSession, + kVTCompressionPropertyKey_ExpectedFrameRate, + (__bridge CFTypeRef) @(computedFrameRate)); + // Ensure the bitrate was set successfully + if (status != noErr) { + RTC_LOG(LS_ERROR) << "Failed to set frame rate: " << computedFrameRate + << " error: " << status; + } else { + RTC_LOG(LS_INFO) << "Did update encoder frame rate: " << computedFrameRate; } - if (dataRateLimits) { - CFRelease(dataRateLimits); + _encoderFrameRate = computedFrameRate; + } + + // Set bitrate + if (computedBitrateBps != _encoderBitrateBps) { + status = VTSessionSetProperty(_compressionSession, + kVTCompressionPropertyKey_AverageBitRate, + (__bridge CFTypeRef) @(computedBitrateBps)); + + // Ensure the bitrate was set successfully + if (status != noErr) { + RTC_LOG(LS_ERROR) << "Failed to update encoder bitrate: " << computedBitrateBps + << "error: " << status; + } else { + RTC_LOG(LS_INFO) << "Did update encoder bitrate: " << computedBitrateBps; } + + status = VTSessionSetProperty( + _compressionSession, + kVTCompressionPropertyKey_DataRateLimits, + (__bridge CFArrayRef)CreateRateLimitArray(computedBitrateBps, _encodeMode)); if (status != noErr) { - RTC_LOG(LS_ERROR) << "Failed to set data rate limit with code: " << status; + RTC_LOG(LS_ERROR) << "Failed to update encoder data rate limits"; + } else { + RTC_LOG(LS_INFO) << "Did update encoder data rate limits"; } - _encoderBitrateBps = bitrateBps; - _encoderFrameRate = frameRate; + _encoderBitrateBps = computedBitrateBps; } } @@ -804,8 +901,9 @@ - (void)frameWasEncoded:(OSStatus)status frame.captureTimeMs = renderTimeMs; frame.timeStamp = timestamp; frame.rotation = rotation; - frame.contentType = (_mode == RTCVideoCodecModeScreensharing) ? RTCVideoContentTypeScreenshare : - RTCVideoContentTypeUnspecified; + frame.contentType = (_codecMode == RTCVideoCodecModeScreensharing) ? + RTCVideoContentTypeScreenshare : + RTCVideoContentTypeUnspecified; frame.flags = webrtc::VideoSendTiming::kInvalid; _h264BitstreamParser.ParseBitstream(*buffer); @@ -816,7 +914,6 @@ - (void)frameWasEncoded:(OSStatus)status RTC_LOG(LS_ERROR) << "Encode callback failed"; return; } - _bitrateAdjuster->Update(frame.buffer.length); } - (nullable RTC_OBJC_TYPE(RTCVideoEncoderQpThresholds) *)scalingSettings { @@ -826,3 +923,4 @@ - (void)frameWasEncoded:(OSStatus)status } @end + diff --git a/sdk/objc/helpers/RTCYUVHelper.h b/sdk/objc/helpers/RTCYUVHelper.h new file mode 100644 index 0000000000..ec8ce48355 --- /dev/null +++ b/sdk/objc/helpers/RTCYUVHelper.h @@ -0,0 +1,118 @@ +/* + * Copyright 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#import + +#import "RTCMacros.h" +#import "RTCVideoFrame.h" + +RTC_OBJC_EXPORT +@interface RTC_OBJC_TYPE (RTCYUVHelper) : NSObject + +- (instancetype)init NS_UNAVAILABLE; + ++ (void)I420Rotate:(const uint8_t*)srcY + srcStrideY:(int)srcStrideY + srcU:(const uint8_t*)srcU + srcStrideU:(int)srcStrideU + srcV:(const uint8_t*)srcV + srcStrideV:(int)srcStrideV + dstY:(uint8_t*)dstY + dstStrideY:(int)dstStrideY + dstU:(uint8_t*)dstU + dstStrideU:(int)dstStrideU + dstV:(uint8_t*)dstV + dstStrideV:(int)dstStrideV + width:(int)width + height:(int)height + mode:(RTCVideoRotation)mode; + ++ (int)I420ToNV12:(const uint8_t*)srcY + srcStrideY:(int)srcStrideY + srcU:(const uint8_t*)srcU + srcStrideU:(int)srcStrideU + srcV:(const uint8_t*)srcV + srcStrideV:(int)srcStrideV + dstY:(uint8_t*)dstY + dstStrideY:(int)dstStrideY + dstUV:(uint8_t*)dstUV + dstStrideUV:(int)dstStrideUV + width:(int)width + height:(int)height; + ++ (int)I420ToNV21:(const uint8_t*)srcY + srcStrideY:(int)srcStrideY + srcU:(const uint8_t*)srcU + srcStrideU:(int)srcStrideU + srcV:(const uint8_t*)srcV + srcStrideV:(int)srcStrideV + dstY:(uint8_t*)dstY + dstStrideY:(int)dstStrideY + dstUV:(uint8_t*)dstUV + dstStrideUV:(int)dstStrideUV + width:(int)width + height:(int)height; + ++ (int)I420ToARGB:(const uint8_t*)srcY + srcStrideY:(int)srcStrideY + srcU:(const uint8_t*)srcU + srcStrideU:(int)srcStrideU + srcV:(const uint8_t*)srcV + srcStrideV:(int)srcStrideV + dstARGB:(uint8_t*)dstARGB + dstStrideARGB:(int)dstStrideARGB + width:(int)width + height:(int)height; + ++ (int)I420ToBGRA:(const uint8_t*)srcY + srcStrideY:(int)srcStrideY + srcU:(const uint8_t*)srcU + srcStrideU:(int)srcStrideU + srcV:(const uint8_t*)srcV + srcStrideV:(int)srcStrideV + dstBGRA:(uint8_t*)dstBGRA + dstStrideBGRA:(int)dstStrideBGRA + width:(int)width + height:(int)height; + ++ (int)I420ToABGR:(const uint8_t*)srcY + srcStrideY:(int)srcStrideY + srcU:(const uint8_t*)srcU + srcStrideU:(int)srcStrideU + srcV:(const uint8_t*)srcV + srcStrideV:(int)srcStrideV + dstABGR:(uint8_t*)dstABGR + dstStrideABGR:(int)dstStrideABGR + width:(int)width + height:(int)height; + ++ (int)I420ToRGBA:(const uint8_t*)srcY + srcStrideY:(int)srcStrideY + srcU:(const uint8_t*)srcU + srcStrideU:(int)srcStrideU + srcV:(const uint8_t*)srcV + srcStrideV:(int)srcStrideV + dstRGBA:(uint8_t*)dstRGBA + dstStrideRGBA:(int)dstStrideRGBA + width:(int)width + height:(int)height; + ++ (int)I420ToRGB24:(const uint8_t*)srcY + srcStrideY:(int)srcStrideY + srcU:(const uint8_t*)srcU + srcStrideU:(int)srcStrideU + srcV:(const uint8_t*)srcV + srcStrideV:(int)srcStrideV + dstRGB24:(uint8_t*)dstRGB24 + dstStrideRGB24:(int)dstStrideRGB24 + width:(int)width + height:(int)height; + +@end diff --git a/sdk/objc/helpers/RTCYUVHelper.mm b/sdk/objc/helpers/RTCYUVHelper.mm new file mode 100644 index 0000000000..4a39d469da --- /dev/null +++ b/sdk/objc/helpers/RTCYUVHelper.mm @@ -0,0 +1,179 @@ +/* + * Copyright 2016 The WebRTC project authors. All Rights Reserved. + * + * Use of this source code is governed by a BSD-style license + * that can be found in the LICENSE file in the root of the source + * tree. An additional intellectual property rights grant can be found + * in the file PATENTS. All contributing project authors may + * be found in the AUTHORS file in the root of the source tree. + */ + +#import "RTCYUVHelper.h" + +#include "third_party/libyuv/include/libyuv.h" + +@implementation RTC_OBJC_TYPE (RTCYUVHelper) + ++ (void)I420Rotate:(const uint8_t*)srcY + srcStrideY:(int)srcStrideY + srcU:(const uint8_t*)srcU + srcStrideU:(int)srcStrideU + srcV:(const uint8_t*)srcV + srcStrideV:(int)srcStrideV + dstY:(uint8_t*)dstY + dstStrideY:(int)dstStrideY + dstU:(uint8_t*)dstU + dstStrideU:(int)dstStrideU + dstV:(uint8_t*)dstV + dstStrideV:(int)dstStrideV + width:(int)width + height:(int)height + mode:(RTCVideoRotation)mode { + libyuv::I420Rotate(srcY, + srcStrideY, + srcU, + srcStrideU, + srcV, + srcStrideV, + dstY, + dstStrideY, + dstU, + dstStrideU, + dstV, + dstStrideV, + width, + height, + (libyuv::RotationMode)mode); +} + ++ (int)I420ToNV12:(const uint8_t*)srcY + srcStrideY:(int)srcStrideY + srcU:(const uint8_t*)srcU + srcStrideU:(int)srcStrideU + srcV:(const uint8_t*)srcV + srcStrideV:(int)srcStrideV + dstY:(uint8_t*)dstY + dstStrideY:(int)dstStrideY + dstUV:(uint8_t*)dstUV + dstStrideUV:(int)dstStrideUV + width:(int)width + height:(int)height { + return libyuv::I420ToNV12(srcY, + srcStrideY, + srcU, + srcStrideU, + srcV, + srcStrideV, + dstY, + dstStrideY, + dstUV, + dstStrideUV, + width, + height); +} + ++ (int)I420ToNV21:(const uint8_t*)srcY + srcStrideY:(int)srcStrideY + srcU:(const uint8_t*)srcU + srcStrideU:(int)srcStrideU + srcV:(const uint8_t*)srcV + srcStrideV:(int)srcStrideV + dstY:(uint8_t*)dstY + dstStrideY:(int)dstStrideY + dstUV:(uint8_t*)dstUV + dstStrideUV:(int)dstStrideUV + width:(int)width + height:(int)height { + return libyuv::I420ToNV21(srcY, + srcStrideY, + srcU, + srcStrideU, + srcV, + srcStrideV, + dstY, + dstStrideY, + dstUV, + dstStrideUV, + width, + height); +} + ++ (int)I420ToARGB:(const uint8_t*)srcY + srcStrideY:(int)srcStrideY + srcU:(const uint8_t*)srcU + srcStrideU:(int)srcStrideU + srcV:(const uint8_t*)srcV + srcStrideV:(int)srcStrideV + dstARGB:(uint8_t*)dstARGB + dstStrideARGB:(int)dstStrideARGB + width:(int)width + height:(int)height { + return libyuv::I420ToARGB( + srcY, srcStrideY, srcU, srcStrideU, srcV, srcStrideV, dstARGB, dstStrideARGB, width, height); +} + ++ (int)I420ToBGRA:(const uint8_t*)srcY + srcStrideY:(int)srcStrideY + srcU:(const uint8_t*)srcU + srcStrideU:(int)srcStrideU + srcV:(const uint8_t*)srcV + srcStrideV:(int)srcStrideV + dstBGRA:(uint8_t*)dstBGRA + dstStrideBGRA:(int)dstStrideBGRA + width:(int)width + height:(int)height { + return libyuv::I420ToBGRA( + srcY, srcStrideY, srcU, srcStrideU, srcV, srcStrideV, dstBGRA, dstStrideBGRA, width, height); +} + ++ (int)I420ToABGR:(const uint8_t*)srcY + srcStrideY:(int)srcStrideY + srcU:(const uint8_t*)srcU + srcStrideU:(int)srcStrideU + srcV:(const uint8_t*)srcV + srcStrideV:(int)srcStrideV + dstABGR:(uint8_t*)dstABGR + dstStrideABGR:(int)dstStrideABGR + width:(int)width + height:(int)height { + return libyuv::I420ToABGR( + srcY, srcStrideY, srcU, srcStrideU, srcV, srcStrideV, dstABGR, dstStrideABGR, width, height); +} + ++ (int)I420ToRGBA:(const uint8_t*)srcY + srcStrideY:(int)srcStrideY + srcU:(const uint8_t*)srcU + srcStrideU:(int)srcStrideU + srcV:(const uint8_t*)srcV + srcStrideV:(int)srcStrideV + dstRGBA:(uint8_t*)dstRGBA + dstStrideRGBA:(int)dstStrideRGBA + width:(int)width + height:(int)height { + return libyuv::I420ToRGBA( + srcY, srcStrideY, srcU, srcStrideU, srcV, srcStrideV, dstRGBA, dstStrideRGBA, width, height); +} + ++ (int)I420ToRGB24:(const uint8_t*)srcY + srcStrideY:(int)srcStrideY + srcU:(const uint8_t*)srcU + srcStrideU:(int)srcStrideU + srcV:(const uint8_t*)srcV + srcStrideV:(int)srcStrideV + dstRGB24:(uint8_t*)dstRGB24 + dstStrideRGB24:(int)dstStrideRGB24 + width:(int)width + height:(int)height { + return libyuv::I420ToRGB24(srcY, + srcStrideY, + srcU, + srcStrideU, + srcV, + srcStrideV, + dstRGB24, + dstStrideRGB24, + width, + height); +} + +@end diff --git a/sdk/objc/native/api/audio_device_module.mm b/sdk/objc/native/api/audio_device_module.mm index 4e7b681e69..ada25bd9ee 100644 --- a/sdk/objc/native/api/audio_device_module.mm +++ b/sdk/objc/native/api/audio_device_module.mm @@ -13,7 +13,11 @@ #include "api/make_ref_counted.h" #include "rtc_base/logging.h" +#if defined(WEBRTC_IOS) #include "sdk/objc/native/src/audio/audio_device_module_ios.h" +#endif + +#include "modules/audio_device/include/audio_device.h" namespace webrtc { diff --git a/sdk/objc/native/api/video_capturer.mm b/sdk/objc/native/api/video_capturer.mm index a7260ab802..d5cf6fd563 100644 --- a/sdk/objc/native/api/video_capturer.mm +++ b/sdk/objc/native/api/video_capturer.mm @@ -20,7 +20,7 @@ RTC_OBJC_TYPE(RTCVideoCapturer) * objc_video_capturer, rtc::Thread *signaling_thread, rtc::Thread *worker_thread) { - RTCObjCVideoSourceAdapter *adapter = [[RTCObjCVideoSourceAdapter alloc] init]; + RTC_OBJC_TYPE(RTCObjCVideoSourceAdapter) *adapter = [[RTC_OBJC_TYPE(RTCObjCVideoSourceAdapter) alloc] init]; rtc::scoped_refptr objc_video_track_source = rtc::make_ref_counted(adapter); rtc::scoped_refptr video_source = diff --git a/sdk/objc/native/src/audio/audio_device_ios.h b/sdk/objc/native/src/audio/audio_device_ios.h index 4ef4d0b5df..506487a1c2 100644 --- a/sdk/objc/native/src/audio/audio_device_ios.h +++ b/sdk/objc/native/src/audio/audio_device_ios.h @@ -25,7 +25,7 @@ #include "sdk/objc/base/RTCMacros.h" #include "voice_processing_audio_unit.h" -RTC_FWD_DECL_OBJC_CLASS(RTCNativeAudioSessionDelegateAdapter); +RTC_FWD_DECL_OBJC_CLASS(RTC_OBJC_TYPE(RTCNativeAudioSessionDelegateAdapter)); namespace webrtc { @@ -172,6 +172,8 @@ class AudioDeviceIOS : public AudioDeviceGeneric, void HandlePlayoutGlitchDetected(); void HandleOutputVolumeChange(); + bool RestartAudioUnit(bool enable_input); + // Uses current `playout_parameters_` and `record_parameters_` to inform the // audio device buffer (ADB) about our internal audio parameters. void UpdateAudioDeviceBuffer(); @@ -200,7 +202,7 @@ class AudioDeviceIOS : public AudioDeviceGeneric, // Activates our audio session, creates and initializes the voice-processing // audio unit and verifies that we got the preferred native audio parameters. - bool InitPlayOrRecord(); + bool InitPlayOrRecord(bool enable_input); // Closes and deletes the voice-processing I/O unit. void ShutdownPlayOrRecord(); @@ -260,24 +262,24 @@ class AudioDeviceIOS : public AudioDeviceGeneric, // will be changed dynamically to account for this behavior. rtc::BufferT record_audio_buffer_; + bool recording_is_initialized_; + // Set to 1 when recording is active and 0 otherwise. std::atomic recording_; + bool playout_is_initialized_; + // Set to 1 when playout is active and 0 otherwise. std::atomic playing_; // Set to true after successful call to Init(), false otherwise. bool initialized_ RTC_GUARDED_BY(thread_); - // Set to true after successful call to InitRecording() or InitPlayout(), - // false otherwise. - bool audio_is_initialized_; - // Set to true if audio session is interrupted, false otherwise. bool is_interrupted_; // Audio interruption observer instance. - RTCNativeAudioSessionDelegateAdapter* audio_session_observer_ + RTC_OBJC_TYPE(RTCNativeAudioSessionDelegateAdapter)* audio_session_observer_ RTC_GUARDED_BY(thread_); // Set to true if we've activated the audio session. diff --git a/sdk/objc/native/src/audio/audio_device_ios.mm b/sdk/objc/native/src/audio/audio_device_ios.mm index 78420ec232..660edf7439 100644 --- a/sdk/objc/native/src/audio/audio_device_ios.mm +++ b/sdk/objc/native/src/audio/audio_device_ios.mm @@ -95,10 +95,11 @@ static void LogDeviceInfo() { : bypass_voice_processing_(bypass_voice_processing), audio_device_buffer_(nullptr), audio_unit_(nullptr), + recording_is_initialized_(false), recording_(0), + playout_is_initialized_(false), playing_(0), initialized_(false), - audio_is_initialized_(false), is_interrupted_(false), has_configured_session_(false), num_detected_playout_glitches_(0), @@ -110,7 +111,7 @@ static void LogDeviceInfo() { io_thread_checker_.Detach(); thread_ = rtc::Thread::Current(); - audio_session_observer_ = [[RTCNativeAudioSessionDelegateAdapter alloc] initWithObserver:this]; + audio_session_observer_ = [[RTC_OBJC_TYPE(RTCNativeAudioSessionDelegateAdapter) alloc] initWithObserver:this]; mach_timebase_info_data_t tinfo; mach_timebase_info(&tinfo); machTickUnitsToNanoseconds_ = (double)tinfo.numer / tinfo.denom; @@ -180,48 +181,57 @@ static void LogDeviceInfo() { LOGI() << "InitPlayout"; RTC_DCHECK_RUN_ON(thread_); RTC_DCHECK(initialized_); - RTC_DCHECK(!audio_is_initialized_); + RTC_DCHECK(!playout_is_initialized_); RTC_DCHECK(!playing_.load()); - if (!audio_is_initialized_) { - if (!InitPlayOrRecord()) { + if (!recording_is_initialized_) { + // recording not initialized yet, init with no input + if (!InitPlayOrRecord(false)) { RTC_LOG_F(LS_ERROR) << "InitPlayOrRecord failed for InitPlayout!"; return -1; } } - audio_is_initialized_ = true; + + playout_is_initialized_ = true; + return 0; } bool AudioDeviceIOS::PlayoutIsInitialized() const { RTC_DCHECK_RUN_ON(thread_); - return audio_is_initialized_; + return playout_is_initialized_; } bool AudioDeviceIOS::RecordingIsInitialized() const { RTC_DCHECK_RUN_ON(thread_); - return audio_is_initialized_; + return recording_is_initialized_; } int32_t AudioDeviceIOS::InitRecording() { LOGI() << "InitRecording"; RTC_DCHECK_RUN_ON(thread_); RTC_DCHECK(initialized_); - RTC_DCHECK(!audio_is_initialized_); + RTC_DCHECK(!recording_is_initialized_); RTC_DCHECK(!recording_.load()); - if (!audio_is_initialized_) { - if (!InitPlayOrRecord()) { + if (!playout_is_initialized_) { + // playout not initialized yet, init with input + if (!InitPlayOrRecord(true)) { RTC_LOG_F(LS_ERROR) << "InitPlayOrRecord failed for InitRecording!"; return -1; } + } else { + // playout already initialized, restart audio unit with input + RestartAudioUnit(true); } - audio_is_initialized_ = true; + + recording_is_initialized_ = true; + return 0; } int32_t AudioDeviceIOS::StartPlayout() { LOGI() << "StartPlayout"; RTC_DCHECK_RUN_ON(thread_); - RTC_DCHECK(audio_is_initialized_); + RTC_DCHECK(playout_is_initialized_); RTC_DCHECK(!playing_.load()); RTC_DCHECK(audio_unit_); if (fine_audio_buffer_) { @@ -246,14 +256,16 @@ static void LogDeviceInfo() { int32_t AudioDeviceIOS::StopPlayout() { LOGI() << "StopPlayout"; RTC_DCHECK_RUN_ON(thread_); - if (!audio_is_initialized_ || !playing_.load()) { + if (!playout_is_initialized_ || !playing_.load()) { return 0; } if (!recording_.load()) { ShutdownPlayOrRecord(); - audio_is_initialized_ = false; + + recording_is_initialized_ = false; } playing_.store(0, std::memory_order_release); + playout_is_initialized_ = false; // Derive average number of calls to OnGetPlayoutData() between detected // audio glitches and add the result to a histogram. @@ -277,7 +289,7 @@ static void LogDeviceInfo() { int32_t AudioDeviceIOS::StartRecording() { LOGI() << "StartRecording"; RTC_DCHECK_RUN_ON(thread_); - RTC_DCHECK(audio_is_initialized_); + RTC_DCHECK(recording_is_initialized_); RTC_DCHECK(!recording_.load()); RTC_DCHECK(audio_unit_); if (fine_audio_buffer_) { @@ -300,14 +312,19 @@ static void LogDeviceInfo() { int32_t AudioDeviceIOS::StopRecording() { LOGI() << "StopRecording"; RTC_DCHECK_RUN_ON(thread_); - if (!audio_is_initialized_ || !recording_.load()) { + if (!recording_is_initialized_ || !recording_.load()) { return 0; } if (!playing_.load()) { ShutdownPlayOrRecord(); - audio_is_initialized_ = false; + + playout_is_initialized_ = false; + } else if (playout_is_initialized_) { + // restart audio unit with no input + RestartAudioUnit(false); } recording_.store(0, std::memory_order_release); + recording_is_initialized_ = false; return 0; } @@ -455,7 +472,7 @@ static void LogDeviceInfo() { // Exclude extreme delta values since they do most likely not correspond // to a real glitch. Instead, the most probable cause is that a headset // has been plugged in or out. There are more direct ways to detect - // audio device changes (see HandleValidRouteChange()) but experiments + // audio device changes (see ValidRouteChange()) but experiments // show that using it leads to more complex implementations. // TODO(henrika): more tests might be needed to come up with an even // better upper limit. @@ -589,7 +606,7 @@ static void LogDeviceInfo() { SetupAudioBuffersForActiveAudioSession(); // Initialize the audio unit again with the new sample rate. - if (!audio_unit_->Initialize(playout_parameters_.sample_rate())) { + if (!audio_unit_->Initialize(playout_parameters_.sample_rate(), recording_is_initialized_)) { RTCLogError(@"Failed to initialize the audio unit with sample rate: %d", playout_parameters_.sample_rate()); return; @@ -643,6 +660,46 @@ static void LogDeviceInfo() { last_output_volume_change_time_ = rtc::TimeMillis(); } +bool AudioDeviceIOS::RestartAudioUnit(bool enable_input) { + RTC_DCHECK_RUN_ON(&io_thread_checker_); + + LOGI() << "RestartAudioUnit"; + + // If we don't have an audio unit yet, or the audio unit is uninitialized, + // there is no work to do. + if (!audio_unit_ || audio_unit_->GetState() < VoiceProcessingAudioUnit::kInitialized) { + return false; + } + + bool restart_audio_unit = false; + if (audio_unit_->GetState() == VoiceProcessingAudioUnit::kStarted) { + audio_unit_->Stop(); + PrepareForNewStart(); + restart_audio_unit = true; + } + + if (audio_unit_->GetState() == VoiceProcessingAudioUnit::kInitialized) { + audio_unit_->Uninitialize(); + } + + // Initialize the audio unit again with the same sample rate. + const double sample_rate = playout_parameters_.sample_rate(); + + if (!audio_unit_->Initialize(sample_rate, enable_input)) { + RTCLogError(@"Failed to initialize the audio unit with sample rate: %f", sample_rate); + return false; + } + + // Restart the audio unit if it was already running. + if (restart_audio_unit && !audio_unit_->Start()) { + RTCLogError(@"Failed to start audio unit with sample rate: %f", sample_rate); + return false; + } + + LOGI() << "Successfully enabled audio unit for recording."; + return true; +} + void AudioDeviceIOS::UpdateAudioDeviceBuffer() { LOGI() << "UpdateAudioDevicebuffer"; // AttachAudioBuffer() is called at construction by the main class but check @@ -736,7 +793,7 @@ static void LogDeviceInfo() { // If we're not initialized we don't need to do anything. Audio unit will // be initialized on initialization. - if (!audio_is_initialized_) return; + if (!playout_is_initialized_ && !recording_is_initialized_) return; // If we're initialized, we must have an audio unit. RTC_DCHECK(audio_unit_); @@ -774,7 +831,7 @@ static void LogDeviceInfo() { RTCLog(@"Initializing audio unit for UpdateAudioUnit"); ConfigureAudioSession(); SetupAudioBuffersForActiveAudioSession(); - if (!audio_unit_->Initialize(playout_parameters_.sample_rate())) { + if (!audio_unit_->Initialize(playout_parameters_.sample_rate(), recording_is_initialized_)) { RTCLogError(@"Failed to initialize audio unit."); return; } @@ -864,7 +921,7 @@ static void LogDeviceInfo() { RTCLog(@"Unconfigured audio session."); } -bool AudioDeviceIOS::InitPlayOrRecord() { +bool AudioDeviceIOS::InitPlayOrRecord(bool enable_input) { LOGI() << "InitPlayOrRecord"; RTC_DCHECK_RUN_ON(thread_); @@ -900,7 +957,7 @@ static void LogDeviceInfo() { return false; } SetupAudioBuffersForActiveAudioSession(); - audio_unit_->Initialize(playout_parameters_.sample_rate()); + audio_unit_->Initialize(playout_parameters_.sample_rate(), enable_input); } // Release the lock. diff --git a/sdk/objc/native/src/audio/audio_device_module_ios.h b/sdk/objc/native/src/audio/audio_device_module_ios.h index 189d7e6c9c..2f9b95a0a8 100644 --- a/sdk/objc/native/src/audio/audio_device_module_ios.h +++ b/sdk/objc/native/src/audio/audio_device_module_ios.h @@ -129,6 +129,9 @@ class AudioDeviceModuleIOS : public AudioDeviceModule { int GetPlayoutAudioParameters(AudioParameters* params) const override; int GetRecordAudioParameters(AudioParameters* params) const override; #endif // WEBRTC_IOS + + int32_t SetAudioDeviceSink(AudioDeviceSink* sink) const override; + private: const bool bypass_voice_processing_; bool initialized_ = false; diff --git a/sdk/objc/native/src/audio/audio_device_module_ios.mm b/sdk/objc/native/src/audio/audio_device_module_ios.mm index 5effef3abd..5f93a06ee8 100644 --- a/sdk/objc/native/src/audio/audio_device_module_ios.mm +++ b/sdk/objc/native/src/audio/audio_device_module_ios.mm @@ -665,5 +665,11 @@ return r; } #endif // WEBRTC_IOS + + int32_t AudioDeviceModuleIOS::SetAudioDeviceSink(AudioDeviceSink* sink) const { + // not implemented + RTC_LOG(LS_WARNING) << __FUNCTION__ << "(" << sink << ") Not implemented"; + return -1; + } } } diff --git a/sdk/objc/native/src/audio/voice_processing_audio_unit.h b/sdk/objc/native/src/audio/voice_processing_audio_unit.h index ed9dd98568..b474cda104 100644 --- a/sdk/objc/native/src/audio/voice_processing_audio_unit.h +++ b/sdk/objc/native/src/audio/voice_processing_audio_unit.h @@ -75,7 +75,7 @@ class VoiceProcessingAudioUnit { VoiceProcessingAudioUnit::State GetState() const; // Initializes the underlying audio unit with the given sample rate. - bool Initialize(Float64 sample_rate); + bool Initialize(Float64 sample_rate, bool enable_input); // Starts the underlying audio unit. OSStatus Start(); diff --git a/sdk/objc/native/src/audio/voice_processing_audio_unit.mm b/sdk/objc/native/src/audio/voice_processing_audio_unit.mm index 3905b6857a..b3daacb334 100644 --- a/sdk/objc/native/src/audio/voice_processing_audio_unit.mm +++ b/sdk/objc/native/src/audio/voice_processing_audio_unit.mm @@ -111,19 +111,6 @@ static OSStatus GetAGCState(AudioUnit audio_unit, UInt32* enabled) { return false; } - // Enable input on the input scope of the input element. - UInt32 enable_input = 1; - result = AudioUnitSetProperty(vpio_unit_, kAudioOutputUnitProperty_EnableIO, - kAudioUnitScope_Input, kInputBus, &enable_input, - sizeof(enable_input)); - if (result != noErr) { - DisposeAudioUnit(); - RTCLogError(@"Failed to enable input on input scope of input element. " - "Error=%ld.", - (long)result); - return false; - } - // Enable output on the output scope of the output element. UInt32 enable_output = 1; result = AudioUnitSetProperty(vpio_unit_, kAudioOutputUnitProperty_EnableIO, @@ -193,7 +180,7 @@ static OSStatus GetAGCState(AudioUnit audio_unit, UInt32* enabled) { return state_; } -bool VoiceProcessingAudioUnit::Initialize(Float64 sample_rate) { +bool VoiceProcessingAudioUnit::Initialize(Float64 sample_rate, bool enable_input) { RTC_DCHECK_GE(state_, kUninitialized); RTCLog(@"Initializing audio unit with sample rate: %f", sample_rate); @@ -204,6 +191,19 @@ static OSStatus GetAGCState(AudioUnit audio_unit, UInt32* enabled) { LogStreamDescription(format); #endif + UInt32 _enable_input = enable_input ? 1 : 0; + RTCLog(@"Initializing AudioUnit, _enable_input=%d", (int) _enable_input); + result = AudioUnitSetProperty(vpio_unit_, kAudioOutputUnitProperty_EnableIO, + kAudioUnitScope_Input, kInputBus, &_enable_input, + sizeof(_enable_input)); + if (result != noErr) { + DisposeAudioUnit(); + RTCLogError(@"Failed to enable input on input scope of input element. " + "Error=%ld.", + (long)result); + return false; + } + // Set the format on the output scope of the input element/bus. result = AudioUnitSetProperty(vpio_unit_, kAudioUnitProperty_StreamFormat, diff --git a/sdk/objc/native/src/objc_audio_device.h b/sdk/objc/native/src/objc_audio_device.h index fcfe7a6e8b..88f6f19f99 100644 --- a/sdk/objc/native/src/objc_audio_device.h +++ b/sdk/objc/native/src/objc_audio_device.h @@ -19,7 +19,7 @@ #include "modules/audio_device/include/audio_device.h" #include "rtc_base/thread.h" -@class ObjCAudioDeviceDelegate; +@class RTC_OBJC_TYPE(ObjCAudioDeviceDelegate); namespace webrtc { @@ -267,7 +267,7 @@ class ObjCAudioDeviceModule : public AudioDeviceModule { rtc::BufferT record_audio_buffer_; // Delegate object provided to RTCAudioDevice during initialization - ObjCAudioDeviceDelegate* audio_device_delegate_; + RTC_OBJC_TYPE(ObjCAudioDeviceDelegate)* audio_device_delegate_; }; } // namespace objc_adm diff --git a/sdk/objc/native/src/objc_audio_device.mm b/sdk/objc/native/src/objc_audio_device.mm index d629fae20f..5fb72d8a5c 100644 --- a/sdk/objc/native/src/objc_audio_device.mm +++ b/sdk/objc/native/src/objc_audio_device.mm @@ -77,7 +77,7 @@ if (![audio_device_ isInitialized]) { if (audio_device_delegate_ == nil) { - audio_device_delegate_ = [[ObjCAudioDeviceDelegate alloc] + audio_device_delegate_ = [[RTC_OBJC_TYPE(ObjCAudioDeviceDelegate) alloc] initWithAudioDeviceModule:rtc::scoped_refptr(this) audioDeviceThread:thread_]; } diff --git a/sdk/objc/native/src/objc_audio_device_delegate.h b/sdk/objc/native/src/objc_audio_device_delegate.h index 3af079dad9..0b546f269c 100644 --- a/sdk/objc/native/src/objc_audio_device_delegate.h +++ b/sdk/objc/native/src/objc_audio_device_delegate.h @@ -22,7 +22,7 @@ class ObjCAudioDeviceModule; } // namespace objc_adm } // namespace webrtc -@interface ObjCAudioDeviceDelegate : NSObject +@interface RTC_OBJC_TYPE(ObjCAudioDeviceDelegate) : NSObject - (instancetype)initWithAudioDeviceModule: (rtc::scoped_refptr)audioDeviceModule diff --git a/sdk/objc/native/src/objc_audio_device_delegate.mm b/sdk/objc/native/src/objc_audio_device_delegate.mm index 156d6326a4..f4c8cfb71a 100644 --- a/sdk/objc/native/src/objc_audio_device_delegate.mm +++ b/sdk/objc/native/src/objc_audio_device_delegate.mm @@ -55,7 +55,7 @@ } // namespace -@implementation ObjCAudioDeviceDelegate { +@implementation RTC_OBJC_TYPE(ObjCAudioDeviceDelegate) { rtc::scoped_refptr impl_; } diff --git a/sdk/objc/native/src/objc_desktop_capture.h b/sdk/objc/native/src/objc_desktop_capture.h new file mode 100644 index 0000000000..a781457220 --- /dev/null +++ b/sdk/objc/native/src/objc_desktop_capture.h @@ -0,0 +1,70 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef SDK_OBJC_NATIVE_SRC_OBJC_DESKTOP_CAPTURE_H_ +#define SDK_OBJC_NATIVE_SRC_OBJC_DESKTOP_CAPTURE_H_ + +#import "base/RTCMacros.h" + +#include "api/video/i420_buffer.h" +#include "api/video/video_frame.h" +#include "modules/desktop_capture/desktop_capture_options.h" +#include "modules/desktop_capture/desktop_and_cursor_composer.h" +#include "modules/desktop_capture/desktop_frame.h" +#include "rtc_base/thread.h" + +@protocol RTC_OBJC_TYPE +(DesktopCapturerDelegate); + +namespace webrtc { + +enum DesktopType { kScreen, kWindow }; + +class ObjCDesktopCapturer : public DesktopCapturer::Callback { + public: + enum CaptureState { CS_RUNNING, CS_STOPPED, CS_FAILED}; + + public: + ObjCDesktopCapturer(DesktopType type, + webrtc::DesktopCapturer::SourceId source_id, + id delegate); + virtual ~ObjCDesktopCapturer(); + + virtual CaptureState Start(uint32_t fps); + + virtual void Stop(); + + virtual bool IsRunning(); + + protected: + virtual void OnCaptureResult(webrtc::DesktopCapturer::Result result, + std::unique_ptr frame) override; + private: + void CaptureFrame(); + webrtc::DesktopCaptureOptions options_; + std::unique_ptr capturer_; + std::unique_ptr thread_; + CaptureState capture_state_ = CS_STOPPED; + DesktopType type_; + webrtc::DesktopCapturer::SourceId source_id_; + id delegate_; + uint32_t capture_delay_ = 1000; // 1s + webrtc::DesktopCapturer::Result result_ = webrtc::DesktopCapturer::Result::SUCCESS; +}; + +} // namespace webrtc + +#endif // SDK_OBJC_NATIVE_SRC_OBJC_DESKTOP_CAPTURE_H_ diff --git a/sdk/objc/native/src/objc_desktop_capture.mm b/sdk/objc/native/src/objc_desktop_capture.mm new file mode 100644 index 0000000000..7aba3e5612 --- /dev/null +++ b/sdk/objc/native/src/objc_desktop_capture.mm @@ -0,0 +1,205 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "api/sequence_checker.h" +#include "rtc_base/checks.h" +#include "sdk/objc/native/src/objc_desktop_capture.h" +#include "sdk/objc/native/src/objc_video_frame.h" +#include "third_party/libyuv/include/libyuv.h" + +#import "components/capturer/RTCDesktopCapturer+Private.h" +#import "components/video_frame_buffer/RTCCVPixelBuffer.h" + +namespace webrtc { + +enum { kCaptureDelay = 33, kCaptureMessageId = 1000 }; + +ObjCDesktopCapturer::ObjCDesktopCapturer(DesktopType type, + webrtc::DesktopCapturer::SourceId source_id, + id delegate) + : thread_(rtc::Thread::Create()), source_id_(source_id), delegate_(delegate) { + RTC_DCHECK(thread_); + type_ = type; + thread_->Start(); + options_ = webrtc::DesktopCaptureOptions::CreateDefault(); + options_.set_detect_updated_region(true); + options_.set_allow_iosurface(true); + thread_->BlockingCall([this, type] { + if (type == kScreen) { + capturer_ = std::make_unique( + webrtc::DesktopCapturer::CreateScreenCapturer(options_), options_); + } else { + capturer_ = std::make_unique( + webrtc::DesktopCapturer::CreateWindowCapturer(options_), options_); + } + }); +} + +ObjCDesktopCapturer::~ObjCDesktopCapturer() { + thread_->BlockingCall([this] { + capturer_.reset(); + }); +} + +ObjCDesktopCapturer::CaptureState ObjCDesktopCapturer::Start(uint32_t fps) { + if(capture_state_ == CS_RUNNING) { + return capture_state_; + } + + if(fps == 0) { + capture_state_ = CS_FAILED; + return capture_state_; + } + + if (fps >= 60) { + capture_delay_ = uint32_t(1000.0 / 60.0); + } else { + capture_delay_ = uint32_t(1000.0 / fps); + } + + if (source_id_ != -1) { + if (!capturer_->SelectSource(source_id_)) { + capture_state_ = CS_FAILED; + return capture_state_; + } + if (type_ == kWindow) { + if (!capturer_->FocusOnSelectedSource()) { + capture_state_ = CS_FAILED; + return capture_state_; + } + } + } + + thread_->BlockingCall([this] { + capturer_->Start(this); + }); + capture_state_ = CS_RUNNING; + + thread_->PostTask([this] { + CaptureFrame(); + }); + + [delegate_ didSourceCaptureStart]; + return capture_state_; +} + +void ObjCDesktopCapturer::Stop() { + [delegate_ didSourceCaptureStop]; + capture_state_ = CS_STOPPED; +} + +bool ObjCDesktopCapturer::IsRunning() { + return capture_state_ == CS_RUNNING; +} + +void ObjCDesktopCapturer::OnCaptureResult(webrtc::DesktopCapturer::Result result, + std::unique_ptr frame) { + if (result != result_) { + if (result == webrtc::DesktopCapturer::Result::ERROR_PERMANENT) { + [delegate_ didSourceCaptureError]; + capture_state_ = CS_FAILED; + return; + } + + if (result == webrtc::DesktopCapturer::Result::ERROR_TEMPORARY) { + result_ = result; + [delegate_ didSourceCapturePaused]; + return; + } + + if (result == webrtc::DesktopCapturer::Result::SUCCESS) { + result_ = result; + [delegate_ didSourceCaptureStart]; + } + } + + if (result == webrtc::DesktopCapturer::Result::ERROR_TEMPORARY) { + return; + } + + int width = frame->size().width(); + int height = frame->size().height(); + int real_width = width; + + if (type_ == kWindow) { + int multiple = 0; +#if defined(WEBRTC_ARCH_X86_FAMILY) + multiple = 16; +#elif defined(WEBRTC_ARCH_ARM64) + multiple = 32; +#endif + // A multiple of $multiple must be used as the width of the src frame, + // and the right black border needs to be cropped during conversion. + if (multiple != 0 && (width % multiple) != 0) { + width = (width / multiple + 1) * multiple; + } + } + + CVPixelBufferRef pixelBuffer = NULL; + + NSDictionary *pixelAttributes = @{(NSString *)kCVPixelBufferIOSurfacePropertiesKey : @{}}; + CVReturn res = CVPixelBufferCreate(kCFAllocatorDefault, + width, + height, + kCVPixelFormatType_32BGRA, + (__bridge CFDictionaryRef)(pixelAttributes), + &pixelBuffer); + CVPixelBufferLockBaseAddress(pixelBuffer, 0); + uint8_t *pxdata = (uint8_t *)CVPixelBufferGetBaseAddress(pixelBuffer); + libyuv::ConvertToARGB(reinterpret_cast(frame->data()), + real_width * height * 4, + reinterpret_cast(pxdata), + width * 4, + 0, + 0, + width, + height, + real_width, + height, + libyuv::kRotate0, + libyuv::FOURCC_ARGB); + CVPixelBufferUnlockBaseAddress(pixelBuffer, 0); + + if (res != kCVReturnSuccess) { + NSLog(@"Unable to create cvpixelbuffer %d", res); + return; + } + + RTC_OBJC_TYPE(RTCCVPixelBuffer) *rtcPixelBuffer = + [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:pixelBuffer]; + NSTimeInterval timeStampSeconds = CACurrentMediaTime(); + int64_t timeStampNs = lroundf(timeStampSeconds * NSEC_PER_SEC); + RTC_OBJC_TYPE(RTCVideoFrame) *videoFrame = + [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:rtcPixelBuffer + rotation:RTCVideoRotation_0 + timeStampNs:timeStampNs]; + CVPixelBufferRelease(pixelBuffer); + [delegate_ didCaptureVideoFrame:videoFrame]; +} + +void ObjCDesktopCapturer::CaptureFrame() { + RTC_DCHECK_RUN_ON(thread_.get()); + if (capture_state_ == CS_RUNNING) { + capturer_->CaptureFrame(); + thread_->PostDelayedHighPrecisionTask( + [this]() { + CaptureFrame(); + }, + TimeDelta::Millis(capture_delay_)); + } +} + +} // namespace webrtc diff --git a/sdk/objc/native/src/objc_desktop_media_list.h b/sdk/objc/native/src/objc_desktop_media_list.h new file mode 100644 index 0000000000..ecb2d27221 --- /dev/null +++ b/sdk/objc/native/src/objc_desktop_media_list.h @@ -0,0 +1,111 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef SDK_OBJC_NATIVE_SRC_OBJC_DESKTOP_MEDIA_LIST_H_ +#define SDK_OBJC_NATIVE_SRC_OBJC_DESKTOP_MEDIA_LIST_H_ + +#import "base/RTCMacros.h" + +#include "api/video/i420_buffer.h" +#include "api/video/video_frame.h" +#include "modules/desktop_capture/desktop_capture_options.h" +#include "modules/desktop_capture/desktop_capturer.h" +#include "modules/desktop_capture/desktop_frame.h" +#include "rtc_base/thread.h" + +#include "objc_desktop_capture.h" + +#import "components/capturer/RTCDesktopMediaList+Private.h" + +namespace webrtc { + +class MediaSource { + public: + MediaSource( ObjCDesktopMediaList *mediaList, DesktopCapturer::Source src, DesktopType type) + : source(src), mediaList_(mediaList), type_(type) {} + virtual ~MediaSource() {} + + DesktopCapturer::Source source; + + // source id + DesktopCapturer::SourceId id() const { return source.id; } + + // source name + std::string name() const { return source.title; } + + // Returns the thumbnail of the source, jpeg format. + std::vector thumbnail() const { return thumbnail_; } + + + + DesktopType type() const { return type_; } + + bool UpdateThumbnail(); + + void SaveCaptureResult(webrtc::DesktopCapturer::Result result, + std::unique_ptr frame); + + private: + std::vector thumbnail_; + ObjCDesktopMediaList *mediaList_; + DesktopType type_; +}; + +class ObjCDesktopMediaList { + public: + enum CaptureState { CS_RUNNING, CS_STOPPED, CS_FAILED}; + public: + ObjCDesktopMediaList(DesktopType type, RTC_OBJC_TYPE(RTCDesktopMediaList)* objcMediaList); + + virtual ~ObjCDesktopMediaList(); + + virtual int32_t UpdateSourceList(bool force_reload = false, bool get_thumbnail = true); + + virtual int GetSourceCount() const; + + virtual MediaSource* GetSource(int index); + + virtual bool GetThumbnail(MediaSource *source, bool notify); + + private: + class CallbackProxy : public DesktopCapturer::Callback { + public: + CallbackProxy(){} + void SetCallback(std::function frame)> on_capture_result) { + on_capture_result_ = on_capture_result; + } + private: + void OnCaptureResult(webrtc::DesktopCapturer::Result result, + std::unique_ptr frame) override { + if(on_capture_result_) on_capture_result_(result, std::move(frame)); + } + std::function frame)> on_capture_result_ = nullptr; + }; + private: + std::unique_ptr callback_; + webrtc::DesktopCaptureOptions options_; + std::unique_ptr capturer_; + std::unique_ptr thread_; + std::vector> sources_; + RTC_OBJC_TYPE(RTCDesktopMediaList)* objcMediaList_; + DesktopType type_; +}; + +} // namespace webrtc + +#endif // SDK_OBJC_NATIVE_SRC_OBJC_DESKTOP_MEDIA_LIST_H_ diff --git a/sdk/objc/native/src/objc_desktop_media_list.mm b/sdk/objc/native/src/objc_desktop_media_list.mm new file mode 100644 index 0000000000..cb783737a2 --- /dev/null +++ b/sdk/objc/native/src/objc_desktop_media_list.mm @@ -0,0 +1,252 @@ +/* + * Copyright 2022 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "sdk/objc/native/src/objc_desktop_media_list.h" +#include "rtc_base/checks.h" +#include "sdk/objc/native/src/objc_video_frame.h" +#include "third_party/libyuv/include/libyuv.h" + +extern "C" { +#if defined(USE_SYSTEM_LIBJPEG) +#include +#else +// Include directory supplied by gn +#include "jpeglib.h" // NOLINT +#endif +} + +#include +#include + +#import + +namespace webrtc { + +ObjCDesktopMediaList::ObjCDesktopMediaList(DesktopType type, + RTC_OBJC_TYPE(RTCDesktopMediaList) * objcMediaList) + : thread_(rtc::Thread::Create()), objcMediaList_(objcMediaList), type_(type) { + RTC_DCHECK(thread_); + thread_->Start(); + options_ = webrtc::DesktopCaptureOptions::CreateDefault(); + options_.set_detect_updated_region(true); + options_.set_allow_iosurface(true); + + callback_ = std::make_unique(); + + thread_->BlockingCall([this, type] { + if (type == kScreen) { + capturer_ = webrtc::DesktopCapturer::CreateScreenCapturer(options_); + } else { + capturer_ = webrtc::DesktopCapturer::CreateWindowCapturer(options_); + } + capturer_->Start(callback_.get()); + }); +} + +ObjCDesktopMediaList::~ObjCDesktopMediaList() { + thread_->BlockingCall([this] { + capturer_.reset(); + }); +} + +int32_t ObjCDesktopMediaList::UpdateSourceList(bool force_reload, bool get_thumbnail) { + if (force_reload) { + for (auto source : sources_) { + [objcMediaList_ mediaSourceRemoved:source.get()]; + } + sources_.clear(); + } + + webrtc::DesktopCapturer::SourceList new_sources; + + thread_->BlockingCall([this, &new_sources] { + capturer_->GetSourceList(&new_sources); + }); + + typedef std::set SourceSet; + SourceSet new_source_set; + for (size_t i = 0; i < new_sources.size(); ++i) { + if (type_ == kScreen && new_sources[i].title.length() == 0) { + new_sources[i].title = std::string("Screen " + std::to_string(i + 1)); + } + new_source_set.insert(new_sources[i].id); + } + // Iterate through the old sources to find the removed sources. + for (size_t i = 0; i < sources_.size(); ++i) { + if (new_source_set.find(sources_[i]->id()) == new_source_set.end()) { + [objcMediaList_ mediaSourceRemoved:(*(sources_.begin() + i)).get()]; + sources_.erase(sources_.begin() + i); + --i; + } + } + // Iterate through the new sources to find the added sources. + if (new_sources.size() > sources_.size()) { + SourceSet old_source_set; + for (size_t i = 0; i < sources_.size(); ++i) { + old_source_set.insert(sources_[i]->id()); + } + for (size_t i = 0; i < new_sources.size(); ++i) { + if (old_source_set.find(new_sources[i].id) == old_source_set.end()) { + MediaSource *source = new MediaSource(this, new_sources[i], type_); + sources_.insert(sources_.begin() + i, std::shared_ptr(source)); + [objcMediaList_ mediaSourceAdded:source]; + GetThumbnail(source, true); + } + } + } + + RTC_DCHECK_EQ(new_sources.size(), sources_.size()); + + // Find the moved/changed sources. + size_t pos = 0; + while (pos < sources_.size()) { + if (!(sources_[pos]->id() == new_sources[pos].id)) { + // Find the source that should be moved to |pos|, starting from |pos + 1| + // of |sources_|, because entries before |pos| should have been sorted. + size_t old_pos = pos + 1; + for (; old_pos < sources_.size(); ++old_pos) { + if (sources_[old_pos]->id() == new_sources[pos].id) break; + } + RTC_DCHECK(sources_[old_pos]->id() == new_sources[pos].id); + + // Move the source from |old_pos| to |pos|. + auto temp = sources_[old_pos]; + sources_.erase(sources_.begin() + old_pos); + sources_.insert(sources_.begin() + pos, temp); + //[objcMediaList_ mediaSourceMoved:old_pos newIndex:pos]; + } + + if (sources_[pos]->source.title != new_sources[pos].title) { + sources_[pos]->source.title = new_sources[pos].title; + [objcMediaList_ mediaSourceNameChanged:sources_[pos].get()]; + } + ++pos; + } + + if (get_thumbnail) { + for (auto source : sources_) { + GetThumbnail(source.get(), true); + } + } + return sources_.size(); +} + +bool ObjCDesktopMediaList::GetThumbnail(MediaSource *source, bool notify) { + thread_->PostTask([this, source, notify] { + if(capturer_->SelectSource(source->id())){ + callback_->SetCallback([&](webrtc::DesktopCapturer::Result result, + std::unique_ptr frame) { + auto old_thumbnail = source->thumbnail(); + source->SaveCaptureResult(result, std::move(frame)); + if(old_thumbnail.size() != source->thumbnail().size() && notify) { + [objcMediaList_ mediaSourceThumbnailChanged:source]; + } + }); + capturer_->CaptureFrame(); + } + }); + + return true; +} + +int ObjCDesktopMediaList::GetSourceCount() const { + return sources_.size(); +} + +MediaSource *ObjCDesktopMediaList::GetSource(int index) { + return sources_[index].get(); +} + +bool MediaSource::UpdateThumbnail() { + return mediaList_->GetThumbnail(this, true); +} + +void MediaSource::SaveCaptureResult(webrtc::DesktopCapturer::Result result, + std::unique_ptr frame) { + if (result != webrtc::DesktopCapturer::Result::SUCCESS) { + return; + } + int width = frame->size().width(); + int height = frame->size().height(); + int real_width = width; + + if (type_ == kWindow) { + int multiple = 0; +#if defined(WEBRTC_ARCH_X86_FAMILY) + multiple = 16; +#elif defined(WEBRTC_ARCH_ARM64) + multiple = 32; +#endif + // A multiple of $multiple must be used as the width of the src frame, + // and the right black border needs to be cropped during conversion. + if (multiple != 0 && (width % multiple) != 0) { + width = (width / multiple + 1) * multiple; + } + } + + CVPixelBufferRef pixelBuffer = NULL; + + NSDictionary *pixelAttributes = @{(NSString *)kCVPixelBufferIOSurfacePropertiesKey : @{}}; + CVReturn res = CVPixelBufferCreate(kCFAllocatorDefault, + width, + height, + kCVPixelFormatType_32BGRA, + (__bridge CFDictionaryRef)(pixelAttributes), + &pixelBuffer); + CVPixelBufferLockBaseAddress(pixelBuffer, 0); + uint8_t *pxdata = (uint8_t *)CVPixelBufferGetBaseAddress(pixelBuffer); + libyuv::ConvertToARGB(reinterpret_cast(frame->data()), + real_width * height * 4, + reinterpret_cast(pxdata), + width * 4, + 0, + 0, + width, + height, + real_width, + height, + libyuv::kRotate0, + libyuv::FOURCC_ARGB); + CVPixelBufferUnlockBaseAddress(pixelBuffer, 0); + + if (res != kCVReturnSuccess) { + NSLog(@"Unable to create cvpixelbuffer %d", res); + return; + } + + CIImage *ciImage = [CIImage imageWithCVPixelBuffer:pixelBuffer]; + CGRect outputSize = CGRectMake(0, 0, width, height); + + CIContext *tempContext = [CIContext contextWithOptions:nil]; + CGImageRef cgImage = [tempContext createCGImage:ciImage fromRect:outputSize]; + NSData *imageData; + NSBitmapImageRep *newRep = [[NSBitmapImageRep alloc] initWithCGImage:cgImage]; + [newRep setSize:NSSizeToCGSize(outputSize.size)]; + imageData = [newRep representationUsingType:NSBitmapImageFileTypeJPEG + properties:@{ + NSImageCompressionFactor : @1.0f + }]; + + thumbnail_.resize(imageData.length); + const void *_Nullable rawData = [imageData bytes]; + char *src = (char *)rawData; + std::copy(src, src + imageData.length, thumbnail_.begin()); + + CGImageRelease(cgImage); + CVPixelBufferRelease(pixelBuffer); +} + +} // namespace webrtc diff --git a/sdk/objc/native/src/objc_network_monitor.h b/sdk/objc/native/src/objc_network_monitor.h index 709e9dfbe5..c5440d587b 100644 --- a/sdk/objc/native/src/objc_network_monitor.h +++ b/sdk/objc/native/src/objc_network_monitor.h @@ -59,7 +59,7 @@ class ObjCNetworkMonitor : public rtc::NetworkMonitorInterface, std::map adapter_type_by_name_ RTC_GUARDED_BY(thread_); rtc::scoped_refptr safety_flag_; - RTCNetworkMonitor* network_monitor_ = nil; + RTC_OBJC_TYPE(RTCNetworkMonitor) * network_monitor_ = nil; }; } // namespace webrtc diff --git a/sdk/objc/native/src/objc_network_monitor.mm b/sdk/objc/native/src/objc_network_monitor.mm index 535548c64c..e0785e6d0b 100644 --- a/sdk/objc/native/src/objc_network_monitor.mm +++ b/sdk/objc/native/src/objc_network_monitor.mm @@ -39,7 +39,7 @@ thread_ = rtc::Thread::Current(); RTC_DCHECK_RUN_ON(thread_); safety_flag_->SetAlive(); - network_monitor_ = [[RTCNetworkMonitor alloc] initWithObserver:this]; + network_monitor_ = [[RTC_OBJC_TYPE(RTCNetworkMonitor) alloc] initWithObserver:this]; if (network_monitor_ == nil) { RTC_LOG(LS_WARNING) << "Failed to create RTCNetworkMonitor; not available on this OS?"; } diff --git a/sdk/objc/native/src/objc_video_track_source.h b/sdk/objc/native/src/objc_video_track_source.h index 19a3d6db43..5fe39baade 100644 --- a/sdk/objc/native/src/objc_video_track_source.h +++ b/sdk/objc/native/src/objc_video_track_source.h @@ -19,7 +19,7 @@ RTC_FWD_DECL_OBJC_CLASS(RTC_OBJC_TYPE(RTCVideoFrame)); -@interface RTCObjCVideoSourceAdapter : NSObject +@interface RTC_OBJC_TYPE(RTCObjCVideoSourceAdapter) : NSObject @end namespace webrtc { @@ -28,7 +28,7 @@ class ObjCVideoTrackSource : public rtc::AdaptedVideoTrackSource { public: ObjCVideoTrackSource(); explicit ObjCVideoTrackSource(bool is_screencast); - explicit ObjCVideoTrackSource(RTCObjCVideoSourceAdapter* adapter); + explicit ObjCVideoTrackSource(RTC_OBJC_TYPE(RTCObjCVideoSourceAdapter)* adapter); bool is_screencast() const override; @@ -50,7 +50,7 @@ class ObjCVideoTrackSource : public rtc::AdaptedVideoTrackSource { rtc::VideoBroadcaster broadcaster_; rtc::TimestampAligner timestamp_aligner_; - RTCObjCVideoSourceAdapter* adapter_; + RTC_OBJC_TYPE(RTCObjCVideoSourceAdapter)* adapter_; bool is_screencast_; }; diff --git a/sdk/objc/native/src/objc_video_track_source.mm b/sdk/objc/native/src/objc_video_track_source.mm index 7937e90505..401db1d111 100644 --- a/sdk/objc/native/src/objc_video_track_source.mm +++ b/sdk/objc/native/src/objc_video_track_source.mm @@ -17,11 +17,11 @@ #include "api/video/i420_buffer.h" #include "sdk/objc/native/src/objc_frame_buffer.h" -@interface RTCObjCVideoSourceAdapter () +@interface RTC_OBJC_TYPE(RTCObjCVideoSourceAdapter) () @property(nonatomic) webrtc::ObjCVideoTrackSource *objCVideoTrackSource; @end -@implementation RTCObjCVideoSourceAdapter +@implementation RTC_OBJC_TYPE(RTCObjCVideoSourceAdapter) @synthesize objCVideoTrackSource = _objCVideoTrackSource; @@ -40,7 +40,7 @@ - (void)capturer:(RTC_OBJC_TYPE(RTCVideoCapturer) *)capturer : AdaptedVideoTrackSource(/* required resolution alignment */ 2), is_screencast_(is_screencast) {} -ObjCVideoTrackSource::ObjCVideoTrackSource(RTCObjCVideoSourceAdapter *adapter) : adapter_(adapter) { +ObjCVideoTrackSource::ObjCVideoTrackSource(RTC_OBJC_TYPE(RTCObjCVideoSourceAdapter) *adapter) : adapter_(adapter) { adapter_.objCVideoTrackSource = this; } diff --git a/sdk/objc/unittests/RTCMTLVideoView_xctest.m b/sdk/objc/unittests/RTCMTLVideoView_xctest.m index 587a6b588f..84d73586e2 100644 --- a/sdk/objc/unittests/RTCMTLVideoView_xctest.m +++ b/sdk/objc/unittests/RTCMTLVideoView_xctest.m @@ -32,8 +32,8 @@ @interface RTC_OBJC_TYPE (RTCMTLVideoView) + (BOOL)isMetalAvailable; + (UIView *)createMetalView:(CGRect)frame; -+ (id)createNV12Renderer; -+ (id)createI420Renderer; ++ (id)createNV12Renderer; ++ (id)createI420Renderer; - (void)drawInMTKView:(id)view; @end @@ -91,7 +91,7 @@ - (id)frameMockWithCVPixelBuffer:(BOOL)hasCVPixelBuffer { } - (id)rendererMockWithSuccessfulSetup:(BOOL)success { - id rendererMock = OCMClassMock([RTCMTLRenderer class]); + id rendererMock = OCMClassMock([RTC_OBJC_TYPE(RTCMTLRenderer) class]); OCMStub([rendererMock addRenderingDestination:[OCMArg any]]).andReturn(success); return rendererMock; } diff --git a/sdk/objc/unittests/RTCPeerConnectionFactoryBuilderTest.mm b/sdk/objc/unittests/RTCPeerConnectionFactoryBuilderTest.mm index 5ba5a52a53..c4dda5aef1 100644 --- a/sdk/objc/unittests/RTCPeerConnectionFactoryBuilderTest.mm +++ b/sdk/objc/unittests/RTCPeerConnectionFactoryBuilderTest.mm @@ -46,7 +46,7 @@ - (void)testBuilder { nativeVideoDecoderFactory:nullptr audioDeviceModule:nullptr audioProcessingModule:nullptr]); - RTCPeerConnectionFactoryBuilder* builder = [[RTCPeerConnectionFactoryBuilder alloc] init]; + RTC_OBJC_TYPE(RTCPeerConnectionFactoryBuilder)* builder = [[RTC_OBJC_TYPE(RTCPeerConnectionFactoryBuilder) alloc] init]; RTC_OBJC_TYPE(RTCPeerConnectionFactory)* peerConnectionFactory = [builder createPeerConnectionFactory]; EXPECT_TRUE(peerConnectionFactory != nil); @@ -63,7 +63,7 @@ - (void)testDefaultComponentsBuilder { nativeVideoDecoderFactory:nullptr audioDeviceModule:nullptr audioProcessingModule:nullptr]); - RTCPeerConnectionFactoryBuilder* builder = [RTCPeerConnectionFactoryBuilder defaultBuilder]; + RTC_OBJC_TYPE(RTCPeerConnectionFactoryBuilder)* builder = [RTC_OBJC_TYPE(RTCPeerConnectionFactoryBuilder) defaultBuilder]; RTC_OBJC_TYPE(RTCPeerConnectionFactory)* peerConnectionFactory = [builder createPeerConnectionFactory]; EXPECT_TRUE(peerConnectionFactory != nil); diff --git a/video/video_stream_encoder.cc b/video/video_stream_encoder.cc index 5ed5110c96..dc0d34e720 100644 --- a/video/video_stream_encoder.cc +++ b/video/video_stream_encoder.cc @@ -2030,9 +2030,15 @@ void VideoStreamEncoder::EncodeVideoFrame(const VideoFrame& video_frame, was_encode_called_since_last_initialization_ = true; if (encode_status < 0) { - RTC_LOG(LS_ERROR) << "Encoder failed, failing encoder format: " - << encoder_config_.video_format.ToString(); - RequestEncoderSwitch(); + if (encode_status == WEBRTC_VIDEO_CODEC_ENCODER_FAILURE) { + RTC_LOG(LS_ERROR) << "Encoder failed, failing encoder format: " + << encoder_config_.video_format.ToString(); + RequestEncoderSwitch(); + } else { + RTC_LOG(LS_ERROR) << "Failed to encode frame. Error code: " + << encode_status; + } + return; } From 7e8c15c71ce27ce5ec4c15a81ea78ff3c72a62db Mon Sep 17 00:00:00 2001 From: Hiroshi Horie <548776+hiroshihorie@users.noreply.github.com> Date: Fri, 14 Jun 2024 16:58:13 +0900 Subject: [PATCH 04/49] Fix missing RTC_OBJC_TYPE macros --- sdk/objc/api/peerconnection/RTCPeerConnectionFactory.mm | 4 ++-- .../video_codec/RTCVideoEncoderFactorySimulcast.mm | 6 +++--- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.mm b/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.mm index 08200f46fd..c06f4b36fe 100644 --- a/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.mm +++ b/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.mm @@ -137,14 +137,14 @@ - (instancetype)init { webrtc::RtpCapabilities capabilities = _nativeFactory->GetRtpSenderCapabilities([RTC_OBJC_TYPE(RTCRtpReceiver) nativeMediaTypeForMediaType: mediaType]); - return [[RTCRtpCapabilities alloc] initWithNativeRtpCapabilities: capabilities]; + return [[RTC_OBJC_TYPE(RTCRtpCapabilities) alloc] initWithNativeRtpCapabilities:capabilities]; } - (RTC_OBJC_TYPE(RTCRtpCapabilities) *)rtpReceiverCapabilitiesFor:(RTCRtpMediaType)mediaType { webrtc::RtpCapabilities capabilities = _nativeFactory->GetRtpReceiverCapabilities([RTC_OBJC_TYPE(RTCRtpReceiver) nativeMediaTypeForMediaType: mediaType]); - return [[RTCRtpCapabilities alloc] initWithNativeRtpCapabilities: capabilities]; + return [[RTC_OBJC_TYPE(RTCRtpCapabilities) alloc] initWithNativeRtpCapabilities:capabilities]; } - (instancetype) diff --git a/sdk/objc/components/video_codec/RTCVideoEncoderFactorySimulcast.mm b/sdk/objc/components/video_codec/RTCVideoEncoderFactorySimulcast.mm index 2af8a63500..66590d0d94 100644 --- a/sdk/objc/components/video_codec/RTCVideoEncoderFactorySimulcast.mm +++ b/sdk/objc/components/video_codec/RTCVideoEncoderFactorySimulcast.mm @@ -43,17 +43,17 @@ - (instancetype)initWithPrimary:(id)prima - (NSArray *)supportedCodecs { NSArray *supportedCodecs = [[_primary supportedCodecs] arrayByAddingObjectsFromArray: [_fallback supportedCodecs]]; - NSMutableArray *addingCodecs = [[NSMutableArray alloc] init]; + NSMutableArray *addingCodecs = [[NSMutableArray alloc] init]; for (const webrtc::SdpVideoFormat& format : webrtc::SupportedVP9Codecs(true)) { - RTCVideoCodecInfo *codec = [[RTCVideoCodecInfo alloc] initWithNativeSdpVideoFormat: format]; + RTC_OBJC_TYPE(RTCVideoCodecInfo) *codec = [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithNativeSdpVideoFormat: format]; [addingCodecs addObject: codec]; } auto av1Format = webrtc::SdpVideoFormat( cricket::kAv1CodecName, webrtc::CodecParameterMap(), webrtc::LibaomAv1EncoderSupportedScalabilityModes()); - RTCVideoCodecInfo *av1Codec = [[RTCVideoCodecInfo alloc] initWithNativeSdpVideoFormat: av1Format]; + RTC_OBJC_TYPE(RTCVideoCodecInfo) *av1Codec = [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithNativeSdpVideoFormat: av1Format]; [addingCodecs addObject: av1Codec]; return [supportedCodecs arrayByAddingObjectsFromArray: addingCodecs]; From c600b00f4c42fd7bd455bf00b0f5680cbe85b10c Mon Sep 17 00:00:00 2001 From: Hiroshi Horie <548776+hiroshihorie@users.noreply.github.com> Date: Fri, 14 Jun 2024 18:10:54 +0900 Subject: [PATCH 05/49] Fix missing headers and Metal linking --- sdk/BUILD.gn | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/sdk/BUILD.gn b/sdk/BUILD.gn index a6c20de1f1..9afb869380 100644 --- a/sdk/BUILD.gn +++ b/sdk/BUILD.gn @@ -1446,6 +1446,10 @@ if (is_ios || is_mac) { # Added for Simulcast support "objc/components/video_codec/RTCVideoEncoderFactorySimulcast.h", "objc/api/video_codec/RTCVideoEncoderSimulcast.h", + "objc/components/audio/RTCAudioBuffer.h", + "objc/components/audio/RTCAudioProcessingModule.h", + "objc/components/audio/RTCDefaultAudioProcessingModule.h", + "objc/components/audio/RTCAudioCustomProcessingDelegate.h", ] if (!build_with_chromium) { @@ -1607,6 +1611,10 @@ if (is_ios || is_mac) { # Added for Simulcast support "objc/components/video_codec/RTCVideoEncoderFactorySimulcast.h", "objc/api/video_codec/RTCVideoEncoderSimulcast.h", + "objc/components/audio/RTCAudioBuffer.h", + "objc/components/audio/RTCAudioProcessingModule.h", + "objc/components/audio/RTCDefaultAudioProcessingModule.h", + "objc/components/audio/RTCAudioCustomProcessingDelegate.h", ] if (!build_with_chromium) { sources += [ @@ -1618,6 +1626,7 @@ if (is_ios || is_mac) { deps = [ ":base_objc", ":default_codec_factory_objc", + ":metal_objc", ":native_api", ":native_video", ":peerconnectionfactory_base_objc", From 7534c15d2ab370b0bb6825b47f81cd8f97d63d98 Mon Sep 17 00:00:00 2001 From: Hiroshi Horie <548776+hiroshihorie@users.noreply.github.com> Date: Fri, 21 Jun 2024 04:49:24 +0900 Subject: [PATCH 06/49] Fix Mac Catalyst `RTCCameraVideoCapturer` rotation (#126) --- .../capturer/RTCCameraVideoCapturer.m | 24 +++++++++---------- 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/sdk/objc/components/capturer/RTCCameraVideoCapturer.m b/sdk/objc/components/capturer/RTCCameraVideoCapturer.m index 26075aeca4..2095cb6006 100644 --- a/sdk/objc/components/capturer/RTCCameraVideoCapturer.m +++ b/sdk/objc/components/capturer/RTCCameraVideoCapturer.m @@ -15,7 +15,7 @@ #import "base/RTCVideoFrameBuffer.h" #import "components/video_frame_buffer/RTCCVPixelBuffer.h" -#if TARGET_OS_IPHONE +#if TARGET_OS_IPHONE && !TARGET_OS_MACCATALYST #import "helpers/UIDevice+RTCDevice.h" #endif @@ -41,7 +41,7 @@ @implementation RTC_OBJC_TYPE (RTCCameraVideoCapturer) { FourCharCode _preferredOutputPixelFormat; FourCharCode _outputPixelFormat; RTCVideoRotation _rotation; -#if TARGET_OS_IPHONE +#if TARGET_OS_IPHONE && !TARGET_OS_MACCATALYST UIInterfaceOrientation _orientation; BOOL _generatingOrientationNotifications; #endif @@ -74,7 +74,7 @@ - (instancetype)initWithDelegate:(__weak id_generatingOrientationNotifications) { [[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications]; @@ -224,7 +224,7 @@ - (void)stopCaptureWithCompletionHandler:(nullable void (^)(void))completionHand } [self.captureSession stopRunning]; -#if TARGET_OS_IPHONE +#if TARGET_OS_IPHONE && !TARGET_OS_MACCATALYST dispatch_async(dispatch_get_main_queue(), ^{ if (self->_generatingOrientationNotifications) { [[UIDevice currentDevice] endGeneratingDeviceOrientationNotifications]; @@ -241,7 +241,7 @@ - (void)stopCaptureWithCompletionHandler:(nullable void (^)(void))completionHand #pragma mark iOS notifications -#if TARGET_OS_IPHONE +#if TARGET_OS_IPHONE && !TARGET_OS_MACCATALYST - (void)deviceOrientationDidChange:(NSNotification *)notification { [self updateOrientation]; } @@ -264,7 +264,7 @@ - (void)captureOutput:(AVCaptureOutput *)captureOutput return; } -#if TARGET_OS_IPHONE +#if TARGET_OS_IPHONE && !TARGET_OS_MACCATALYST // Default to portrait orientation on iPhone. BOOL usingFrontCamera = NO; // Check the image's EXIF for the camera the image came from as the image could have been @@ -314,7 +314,7 @@ - (void)captureOutput:(AVCaptureOutput *)captureOutput - (void)captureOutput:(AVCaptureOutput *)captureOutput didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection { -#if TARGET_OS_IPHONE +#if TARGET_OS_IPHONE && !TARGET_OS_MACCATALYST CFStringRef droppedReason = CMGetAttachment(sampleBuffer, kCMSampleBufferAttachmentKey_DroppedFrameReason, nil); #else @@ -328,7 +328,7 @@ - (void)captureOutput:(AVCaptureOutput *)captureOutput - (void)handleCaptureSessionInterruption:(NSNotification *)notification { NSString *reasonString = nil; -#if TARGET_OS_IPHONE +#if TARGET_OS_IPHONE && !TARGET_OS_MACCATALYST NSNumber *reason = notification.userInfo[AVCaptureSessionInterruptionReasonKey]; if (reason) { switch (reason.intValue) { @@ -360,7 +360,7 @@ - (void)handleCaptureSessionRuntimeError:(NSNotification *)notification { [RTC_OBJC_TYPE(RTCDispatcher) dispatchAsyncOnType:RTCDispatcherTypeCaptureSession block:^{ -#if TARGET_OS_IPHONE +#if TARGET_OS_IPHONE && !TARGET_OS_MACCATALYST if (error.code == AVErrorMediaServicesWereReset) { [self handleNonFatalError]; } else { @@ -411,7 +411,7 @@ - (void)handleNonFatalError { }]; } -#if TARGET_OS_IPHONE +#if TARGET_OS_IPHONE && !TARGET_OS_MACCATALYST #pragma mark - UIApplication notifications @@ -547,7 +547,7 @@ - (void)reconfigureCaptureSessionInput { [_captureSession commitConfiguration]; } -#if TARGET_OS_IPHONE +#if TARGET_OS_IPHONE && !TARGET_OS_MACCATALYST - (void)updateOrientation { NSAssert([RTC_OBJC_TYPE(RTCDispatcher) isOnQueueForType:RTCDispatcherTypeMain], @"statusBarOrientation must be called on the main queue."); From 432a28b10332bac4b1c952002c2ed91f4bbf7d58 Mon Sep 17 00:00:00 2001 From: Hiroshi Horie <548776+hiroshihorie@users.noreply.github.com> Date: Fri, 21 Jun 2024 04:52:01 +0900 Subject: [PATCH 07/49] Fix set frame transformer (#125) --- audio/channel_receive.cc | 35 +++++++++++-------- .../api/peerconnection/RTCFrameCryptor.mm | 28 ++++++++++----- 2 files changed, 40 insertions(+), 23 deletions(-) diff --git a/audio/channel_receive.cc b/audio/channel_receive.cc index d94ac6a70c..892591f485 100644 --- a/audio/channel_receive.cc +++ b/audio/channel_receive.cc @@ -917,24 +917,29 @@ void ChannelReceive::SetAssociatedSendChannel( void ChannelReceive::SetDepacketizerToDecoderFrameTransformer( rtc::scoped_refptr frame_transformer) { RTC_DCHECK_RUN_ON(&worker_thread_checker_); - if (!frame_transformer) { - RTC_DCHECK_NOTREACHED() << "Not setting the transformer?"; - return; - } - if(frame_transformer_delegate_) { + + // Check if a reset is needed + if (frame_transformer_delegate_ && + frame_transformer_delegate_->FrameTransformer() != frame_transformer) { frame_transformer_delegate_->Reset(); - } - if (frame_transformer_delegate_) { - // Depending on when the channel is created, the transformer might be set - // twice. Don't replace the delegate if it was already initialized. - // TODO(crbug.com/webrtc/15674): Prevent multiple calls during - // reconfiguration. - RTC_CHECK_EQ(frame_transformer_delegate_->FrameTransformer(), - frame_transformer); - return; + frame_transformer_delegate_ = nullptr; + RTC_DLOG(LS_INFO) << "Frame transformer delegate has been reset."; } - InitFrameTransformerDelegate(std::move(frame_transformer)); + // Initialize the delegate if needed + if (frame_transformer_delegate_ && + frame_transformer_delegate_->FrameTransformer() == frame_transformer) { + RTC_DLOG(LS_INFO) + << "Frame transformer is already set to the provided transformer."; + } else { + if (!frame_transformer) { + RTC_DCHECK_NOTREACHED() << "Attempted to set a null frame transformer."; + } else { + RTC_DLOG(LS_INFO) << "Initializing frame transformer delegate with the " + "new frame transformer."; + InitFrameTransformerDelegate(std::move(frame_transformer)); + } + } } void ChannelReceive::SetFrameDecryptor( diff --git a/sdk/objc/api/peerconnection/RTCFrameCryptor.mm b/sdk/objc/api/peerconnection/RTCFrameCryptor.mm index c51b77e9c5..7e6cd699d7 100644 --- a/sdk/objc/api/peerconnection/RTCFrameCryptor.mm +++ b/sdk/objc/api/peerconnection/RTCFrameCryptor.mm @@ -127,21 +127,27 @@ - (nullable instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory rtc::scoped_refptr nativeTrack = nativeRtpSender->track(); if (nativeTrack == nullptr) return nil; - _observer = rtc::make_ref_counted(self); - _participantId = participantId; - webrtc::FrameCryptorTransformer::MediaType mediaType = nativeTrack->kind() == "audio" ? webrtc::FrameCryptorTransformer::MediaType::kAudioFrame : webrtc::FrameCryptorTransformer::MediaType::kVideoFrame; + os_unfair_lock_lock(&_lock); + _observer = rtc::make_ref_counted(self); + _participantId = participantId; + _frame_crypto_transformer = rtc::scoped_refptr(new webrtc::FrameCryptorTransformer( factory.signalingThread, [participantId stdString], mediaType, [self algorithmFromEnum:algorithm], keyProvider.nativeKeyProvider)); - nativeRtpSender->SetEncoderToPacketizerFrameTransformer(_frame_crypto_transformer); + factory.workerThread->BlockingCall([self, nativeRtpSender] { + // Must be called on Worker thread + nativeRtpSender->SetEncoderToPacketizerFrameTransformer(_frame_crypto_transformer); + }); + _frame_crypto_transformer->SetEnabled(false); _frame_crypto_transformer->RegisterFrameCryptorTransformerObserver(_observer); + os_unfair_lock_unlock(&_lock); } return self; @@ -161,21 +167,27 @@ - (nullable instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory rtc::scoped_refptr nativeTrack = nativeRtpReceiver->track(); if (nativeTrack == nullptr) return nil; - _observer = rtc::make_ref_counted(self); - _participantId = participantId; - webrtc::FrameCryptorTransformer::MediaType mediaType = nativeTrack->kind() == "audio" ? webrtc::FrameCryptorTransformer::MediaType::kAudioFrame : webrtc::FrameCryptorTransformer::MediaType::kVideoFrame; + os_unfair_lock_lock(&_lock); + _observer = rtc::make_ref_counted(self); + _participantId = participantId; + _frame_crypto_transformer = rtc::scoped_refptr(new webrtc::FrameCryptorTransformer( factory.signalingThread, [participantId stdString], mediaType, [self algorithmFromEnum:algorithm], keyProvider.nativeKeyProvider)); - nativeRtpReceiver->SetDepacketizerToDecoderFrameTransformer(_frame_crypto_transformer); + factory.workerThread->BlockingCall([self, nativeRtpReceiver] { + // Must be called on Worker thread + nativeRtpReceiver->SetDepacketizerToDecoderFrameTransformer(_frame_crypto_transformer); + }); + _frame_crypto_transformer->SetEnabled(false); _frame_crypto_transformer->RegisterFrameCryptorTransformerObserver(_observer); + os_unfair_lock_unlock(&_lock); } return self; From a5b6625514d38417488b7ec84adf5c55135bd915 Mon Sep 17 00:00:00 2001 From: davidliu Date: Tue, 9 Jul 2024 12:31:06 +0900 Subject: [PATCH 08/49] Fix webrtc_voice_engine not notifying mute change (#128) Looks like this line was missed during the m125 update. https://github.com/webrtc-sdk/webrtc/commit/272127d457ab48e36241e82549870405864851f6#diff-56f5e0c459b287281ef3b0431d3f4129e8e4be4c6955d845bcb22210f08b7ba5R2289 Adding it back in so that mic is properly released when muted. --- audio/audio_state.cc | 8 +++++++- media/engine/webrtc_voice_engine.cc | 3 +++ 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/audio/audio_state.cc b/audio/audio_state.cc index 3a21d9b3a9..c715bc4cb9 100644 --- a/audio/audio_state.cc +++ b/audio/audio_state.cc @@ -128,7 +128,9 @@ void AudioState::RemoveSendingStream(webrtc::AudioSendStream* stream) { RTC_DCHECK_EQ(1, count); UpdateAudioTransportWithSendingStreams(); - if (!ShouldRecord()) { + bool should_record = ShouldRecord(); + RTC_LOG(LS_INFO) << "RemoveSendingStream: should_record = " << should_record; + if (!should_record) { config_.audio_device_module->StopRecording(); } } @@ -222,6 +224,7 @@ void AudioState::OnMuteStreamChanged() { auto* adm = config_.audio_device_module.get(); bool should_record = ShouldRecord(); + RTC_LOG(LS_INFO) << "OnMuteStreamChanged: should_record = " << should_record; if (should_record && !adm->Recording()) { if (adm->InitRecording() == 0) { adm->StartRecording(); @@ -232,8 +235,10 @@ void AudioState::OnMuteStreamChanged() { } bool AudioState::ShouldRecord() { + RTC_LOG(LS_INFO) << "ShouldRecord"; // no streams to send if (sending_streams_.empty()) { + RTC_LOG(LS_INFO) << "ShouldRecord: send stream = empty"; return false; } @@ -246,6 +251,7 @@ bool AudioState::ShouldRecord() { } } + RTC_LOG(LS_INFO) << "ShouldRecord: " << muted_count << " muted, " << stream_count << " sending"; return muted_count != stream_count; } diff --git a/media/engine/webrtc_voice_engine.cc b/media/engine/webrtc_voice_engine.cc index 23a1b1927c..d627381f59 100644 --- a/media/engine/webrtc_voice_engine.cc +++ b/media/engine/webrtc_voice_engine.cc @@ -1698,6 +1698,9 @@ bool WebRtcVoiceSendChannel::MuteStream(uint32_t ssrc, bool muted) { ap->set_output_will_be_muted(all_muted); } + // Notfy the AudioState that the mute state has updated. + engine_->audio_state()->OnMuteStreamChanged(); + return true; } From 7ddfc43fcd2e4ba0548258a76fe88d49b34588c5 Mon Sep 17 00:00:00 2001 From: davidliu Date: Tue, 9 Jul 2024 12:31:37 +0900 Subject: [PATCH 09/49] android: Allow for skipping checking the audio playstate if needed (#129) Pausing/stopping the audio track can lead to a race condition against the AudioTrackThread due to this assert. Normally this is fine since directly pausing/stopping isn't possible, but user is using reflection to workaround another audio issue (muted participants still have a sending audio stream which keeps the audio alive, affecting global sound if in the background). Not a full fix, as would like to manually control the audio track directly (needs a bigger fix to handle proper synchronization before allowing public access), but this will work through reflection (user takes responsibility for usage). --- .../src/java/org/webrtc/audio/WebRtcAudioTrack.java | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/sdk/android/src/java/org/webrtc/audio/WebRtcAudioTrack.java b/sdk/android/src/java/org/webrtc/audio/WebRtcAudioTrack.java index 25d10e4f61..e4499e694a 100644 --- a/sdk/android/src/java/org/webrtc/audio/WebRtcAudioTrack.java +++ b/sdk/android/src/java/org/webrtc/audio/WebRtcAudioTrack.java @@ -80,6 +80,8 @@ class WebRtcAudioTrack { private final @Nullable AudioTrackStateCallback stateCallback; private final @Nullable PlaybackSamplesReadyCallback audioSamplesReadyCallback; + private boolean checkPlayState = true; + /** * Audio thread which keeps calling AudioTrack.write() to stream audio. * Data is periodically acquired from the native WebRTC layer using the @@ -99,7 +101,10 @@ public AudioTrackThread(String name) { public void run() { Process.setThreadPriority(Process.THREAD_PRIORITY_URGENT_AUDIO); Logging.d(TAG, "AudioTrackThread" + WebRtcAudioUtils.getThreadInfo()); - assertTrue(audioTrack.getPlayState() == AudioTrack.PLAYSTATE_PLAYING); + + if (checkPlayState) { + assertTrue(audioTrack.getPlayState() == AudioTrack.PLAYSTATE_PLAYING); + } // Audio playout has started and the client is informed about it. doAudioTrackStateCallback(AUDIO_TRACK_START); From 0ef336a595a09e198f2cb4b4df37d6490ad6550d Mon Sep 17 00:00:00 2001 From: Hiroshi Horie <548776+hiroshihorie@users.noreply.github.com> Date: Tue, 16 Jul 2024 15:17:59 +0800 Subject: [PATCH 10/49] Allow to pass in capture session to RTCCameraVideoCapturer (#132) Expose initializers to pass in capture session to RTCCameraVideoCapturer so we can use AVCaptureMultiCamSession etc to capture front and back simultaneously for iOS. --- sdk/objc/components/capturer/RTCCameraVideoCapturer.h | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/sdk/objc/components/capturer/RTCCameraVideoCapturer.h b/sdk/objc/components/capturer/RTCCameraVideoCapturer.h index 74f0b39925..aa0469a297 100644 --- a/sdk/objc/components/capturer/RTCCameraVideoCapturer.h +++ b/sdk/objc/components/capturer/RTCCameraVideoCapturer.h @@ -32,6 +32,13 @@ NS_EXTENSION_UNAVAILABLE_IOS("Camera not available in app extensions.") + (CGFloat)defaultZoomFactorForDeviceType:(AVCaptureDeviceType)deviceType; +- (instancetype)initWithDelegate: + (nullable __weak id)delegate; + +- (instancetype)initWithDelegate: + (nullable __weak id)delegate + captureSession:(AVCaptureSession *)captureSession; + // Returns the most efficient supported output pixel format for this capturer. - (FourCharCode)preferredOutputPixelFormat; From 57787823c95701490b8b27f39f10ba4098b0f3d3 Mon Sep 17 00:00:00 2001 From: davidliu Date: Thu, 18 Jul 2024 15:27:23 +0900 Subject: [PATCH 11/49] Fix NetworkMonitor race condition when dispatching native observers (#135) There is a race condition in NetworkMonitor where native observers may be removed concurrently with a notification being dispatched, leading to a dangling pointer dereference (trying to dispatch an observer that was already removed and destroyed), and from there a crash with access violation. By ensuring dispatching to native observers is done within the synchronization lock that guards additions/removals of native observers protects against this race condition. Since native observers callbacks are posted to the networking thread in the C++ side anyway, there should be no risk of deadlock/starvation due to long-running observers. Bug: webrtc:15837 Change-Id: Id2b788f102dbd25de76ceed434c4cd68aa9a569e Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/338643 Reviewed-by: Taylor Brandstetter Commit-Queue: Harald Alvestrand Reviewed-by: Harald Alvestrand Cr-Commit-Position: refs/heads/main@{#42256} Co-authored-by: Guy Hershenbaum --- .../api/org/webrtc/NetworkMonitor.java | 38 +++++++++---------- 1 file changed, 19 insertions(+), 19 deletions(-) diff --git a/sdk/android/api/org/webrtc/NetworkMonitor.java b/sdk/android/api/org/webrtc/NetworkMonitor.java index 0bc461df18..ca7e4d5656 100644 --- a/sdk/android/api/org/webrtc/NetworkMonitor.java +++ b/sdk/android/api/org/webrtc/NetworkMonitor.java @@ -227,10 +227,13 @@ private void updateCurrentConnectionType(NetworkChangeDetector.ConnectionType ne /** Alerts all observers of a connection change. */ private void notifyObserversOfConnectionTypeChange( NetworkChangeDetector.ConnectionType newConnectionType) { - List nativeObservers = getNativeNetworkObserversSync(); - for (Long nativeObserver : nativeObservers) { - nativeNotifyConnectionTypeChanged(nativeObserver); + + synchronized (nativeNetworkObservers) { + for (Long nativeObserver : nativeNetworkObservers) { + nativeNotifyConnectionTypeChanged(nativeObserver); + } } + // This avoids calling external methods while locking on an object. List javaObservers; synchronized (networkObservers) { @@ -243,25 +246,28 @@ private void notifyObserversOfConnectionTypeChange( private void notifyObserversOfNetworkConnect( NetworkChangeDetector.NetworkInformation networkInfo) { - List nativeObservers = getNativeNetworkObserversSync(); - for (Long nativeObserver : nativeObservers) { - nativeNotifyOfNetworkConnect(nativeObserver, networkInfo); + synchronized (nativeNetworkObservers) { + for (Long nativeObserver : nativeNetworkObservers) { + nativeNotifyOfNetworkConnect(nativeObserver, networkInfo); + } } } private void notifyObserversOfNetworkDisconnect(long networkHandle) { - List nativeObservers = getNativeNetworkObserversSync(); - for (Long nativeObserver : nativeObservers) { - nativeNotifyOfNetworkDisconnect(nativeObserver, networkHandle); + synchronized (nativeNetworkObservers) { + for (Long nativeObserver : nativeNetworkObservers) { + nativeNotifyOfNetworkDisconnect(nativeObserver, networkHandle); + } } } private void notifyObserversOfNetworkPreference( List types, int preference) { - List nativeObservers = getNativeNetworkObserversSync(); - for (NetworkChangeDetector.ConnectionType type : types) { - for (Long nativeObserver : nativeObservers) { - nativeNotifyOfNetworkPreference(nativeObserver, type, preference); + synchronized(nativeNetworkObservers) { + for (NetworkChangeDetector.ConnectionType type : types) { + for (Long nativeObserver : nativeNetworkObservers) { + nativeNotifyOfNetworkPreference(nativeObserver, type, preference); + } } } } @@ -282,12 +288,6 @@ private void updateObserverActiveNetworkList(long nativeObserver) { nativeNotifyOfActiveNetworkList(nativeObserver, networkInfos); } - private List getNativeNetworkObserversSync() { - synchronized (nativeNetworkObservers) { - return new ArrayList<>(nativeNetworkObservers); - } - } - /** * Adds an observer for any connection type changes. * From 6bb47f56de3b28e84eedd98a703c16b4d9a5b814 Mon Sep 17 00:00:00 2001 From: CloudWebRTC Date: Mon, 12 Aug 2024 14:35:18 +0800 Subject: [PATCH 12/49] Support for Vision Pro (#131) TODO: - [x] fix compile for RTCCameraVideoCapturer - [ ] fix RTCMTLRenderer ? --------- Co-authored-by: Hiroshi Horie <548776+hiroshihorie@users.noreply.github.com> --- DEPS | 2 +- sdk/BUILD.gn | 55 +++++++++++++------ .../capturer/RTCCameraVideoCapturer.h | 3 + .../capturer/RTCCameraVideoCapturer.m | 27 +++++---- .../video_codec/RTCVideoDecoderH264.mm | 2 +- .../video_codec/RTCVideoEncoderH264.mm | 2 +- 6 files changed, 62 insertions(+), 29 deletions(-) diff --git a/DEPS b/DEPS index a619f3e53c..39f4533a34 100644 --- a/DEPS +++ b/DEPS @@ -54,7 +54,7 @@ deps = { 'src/base': 'https://chromium.googlesource.com/chromium/src/base@738cf0c976fd3d07c5f1853f050594c5295300d8', 'src/build': - 'https://chromium.googlesource.com/chromium/src/build@cab574b350bc82dc3e7a1f634fedeb3079bf9e9d', + 'https://github.com/webrtc-sdk/build@1367989663b6cd7873ec606d0ed1f7878dd3ea7d', 'src/buildtools': 'https://chromium.googlesource.com/chromium/src/buildtools@5eb927f0a922dfacf10cfa84ee76f39dcf2a7311', # Gradle 6.6.1. Used for testing Android Studio project generation for WebRTC. diff --git a/sdk/BUILD.gn b/sdk/BUILD.gn index 9afb869380..01dc112abb 100644 --- a/sdk/BUILD.gn +++ b/sdk/BUILD.gn @@ -172,11 +172,15 @@ if (is_ios || is_mac) { if (is_ios) { sources += [ - "objc/helpers/RTCCameraPreviewView.h", - "objc/helpers/RTCCameraPreviewView.m", "objc/helpers/UIDevice+RTCDevice.h", "objc/helpers/UIDevice+RTCDevice.mm", ] + if (target_environment != "xrdevice" && target_environment != "xrsimulator") { + sources += [ + "objc/helpers/RTCCameraPreviewView.h", + "objc/helpers/RTCCameraPreviewView.m", + ] + } frameworks += [ "UIKit.framework" ] } } @@ -274,11 +278,14 @@ if (is_ios || is_mac) { deps = [ ":base_objc", ":helpers_objc", - ":metal_objc", ":opengl_objc", ":videocapture_objc", ":videoframebuffer_objc", ] + + if (target_environment != "xrdevice" && target_environment != "xrsimulator") { + deps += [ ":metal_objc" ] + } } rtc_library("audio_device") { @@ -625,7 +632,7 @@ if (is_ios || is_mac) { "Metal.framework", "MetalKit.framework", ] - if (is_ios || is_mac) { + if ((is_ios || is_mac) && !(target_environment == "xrsimulator" || target_environment == "xrdevice")) { sources += [ "objc/components/renderer/metal/RTCMTLVideoView.h", "objc/components/renderer/metal/RTCMTLVideoView.m", @@ -667,9 +674,13 @@ if (is_ios || is_mac) { sources = [ "objc/components/capturer/RTCCameraVideoCapturer.h", "objc/components/capturer/RTCCameraVideoCapturer.m", - "objc/components/capturer/RTCFileVideoCapturer.h", - "objc/components/capturer/RTCFileVideoCapturer.m", ] + if (target_environment != "xrdevice" && target_environment != "xrsimulator") { + sources += [ + "objc/components/capturer/RTCFileVideoCapturer.h", + "objc/components/capturer/RTCFileVideoCapturer.m", + ] + } frameworks = [ "AVFoundation.framework", "CoreVideo.framework", @@ -687,7 +698,7 @@ if (is_ios || is_mac) { "../rtc_base/system:gcd_helpers", ] } - + rtc_library("desktopcapture_objc") { visibility = [ "*" ] sources = [ @@ -705,7 +716,7 @@ if (is_ios || is_mac) { "objc/native/src/objc_desktop_media_list.h", "objc/native/src/objc_desktop_media_list.mm", ] - frameworks = [ + frameworks = [ "AppKit.framework", ] @@ -720,7 +731,7 @@ if (is_ios || is_mac) { "../rtc_base/system:gcd_helpers", "../modules/desktop_capture", ] - if(is_mac) { + if (is_mac) { deps += [ "//third_party:jpeg", ] } } @@ -879,7 +890,7 @@ if (is_ios || is_mac) { "../media:rtc_simulcast_encoder_adapter", ] } - + rtc_library("mediaconstraints_objc") { configs += [ "..:no_global_constructors" ] sources = [ @@ -1242,7 +1253,6 @@ if (is_ios || is_mac) { ":framework_objc", ":helpers_objc", ":mediaconstraints_objc", - ":metal_objc", ":native_api", ":native_api_audio_device_module", ":native_video", @@ -1275,7 +1285,11 @@ if (is_ios || is_mac) { "../system_wrappers", "//third_party/libyuv", ] - + if (target_environment != "xrdevice" && target_environment != "xrsimulator") { + deps += [ + ":metal_objc", + ] + } if (rtc_ios_use_opengl_rendering) { deps += [ ":opengl_objc" ] } @@ -1375,7 +1389,6 @@ if (is_ios || is_mac) { "objc/components/capturer/RTCCameraVideoCapturer.h", "objc/components/capturer/RTCFileVideoCapturer.h", "objc/components/network/RTCNetworkMonitor.h", - "objc/components/renderer/metal/RTCMTLVideoView.h", "objc/components/renderer/opengl/RTCEAGLVideoView.h", "objc/components/renderer/opengl/RTCVideoViewShading.h", "objc/components/video_codec/RTCCodecSpecificInfoH264.h", @@ -1387,7 +1400,6 @@ if (is_ios || is_mac) { "objc/components/video_codec/RTCVideoEncoderFactoryH264.h", "objc/components/video_codec/RTCVideoEncoderH264.h", "objc/components/video_frame_buffer/RTCCVPixelBuffer.h", - "objc/helpers/RTCCameraPreviewView.h", "objc/helpers/RTCDispatcher.h", "objc/helpers/RTCYUVHelper.h", "objc/helpers/UIDevice+RTCDevice.h", @@ -1452,6 +1464,13 @@ if (is_ios || is_mac) { "objc/components/audio/RTCAudioCustomProcessingDelegate.h", ] + if (target_environment != "xrdevice" && target_environment != "xrsimulator") { + common_objc_headers += [ + "objc/helpers/RTCCameraPreviewView.h", + "objc/components/renderer/metal/RTCMTLVideoView.h", + ] + } + if (!build_with_chromium) { common_objc_headers += [ "objc/api/logging/RTCCallbackLogger.h", @@ -1472,7 +1491,6 @@ if (is_ios || is_mac) { ":audio_objc", ":base_objc", ":default_codec_factory_objc", - ":metal_objc", ":native_api", ":native_video", ":peerconnectionfactory_base_objc", @@ -1481,6 +1499,9 @@ if (is_ios || is_mac) { ":videotoolbox_objc", ":darwin_privacy_info", ] + if (target_environment != "xrdevice" && target_environment != "xrsimulator") { + deps += [ ":metal_objc" ] + } if (!build_with_chromium) { deps += [ ":callback_logger_objc", @@ -1626,7 +1647,6 @@ if (is_ios || is_mac) { deps = [ ":base_objc", ":default_codec_factory_objc", - ":metal_objc", ":native_api", ":native_video", ":peerconnectionfactory_base_objc", @@ -1636,6 +1656,9 @@ if (is_ios || is_mac) { ":videotoolbox_objc", ":darwin_privacy_info", ] + if (target_environment != "xrdevice" && target_environment != "xrsimulator") { + deps += [ ":metal_objc" ] + } if (!build_with_chromium) { deps += [ ":callback_logger_objc", diff --git a/sdk/objc/components/capturer/RTCCameraVideoCapturer.h b/sdk/objc/components/capturer/RTCCameraVideoCapturer.h index aa0469a297..212109c556 100644 --- a/sdk/objc/components/capturer/RTCCameraVideoCapturer.h +++ b/sdk/objc/components/capturer/RTCCameraVideoCapturer.h @@ -27,10 +27,13 @@ NS_EXTENSION_UNAVAILABLE_IOS("Camera not available in app extensions.") // Returns list of available capture devices that support video capture. + (NSArray *)captureDevices; + // Returns list of formats that are supported by this class for this device. + (NSArray *)supportedFormatsForDevice:(AVCaptureDevice *)device; +#if !TARGET_OS_VISION + (CGFloat)defaultZoomFactorForDeviceType:(AVCaptureDeviceType)deviceType; +#endif - (instancetype)initWithDelegate: (nullable __weak id)delegate; diff --git a/sdk/objc/components/capturer/RTCCameraVideoCapturer.m b/sdk/objc/components/capturer/RTCCameraVideoCapturer.m index 2095cb6006..c431bcfac8 100644 --- a/sdk/objc/components/capturer/RTCCameraVideoCapturer.m +++ b/sdk/objc/components/capturer/RTCCameraVideoCapturer.m @@ -74,7 +74,7 @@ - (instancetype)initWithDelegate:(__weak id *)captureDevices { +#if TARGET_OS_VISION + // Simply return an empty array. + return [NSArray array]; +#else AVCaptureDeviceDiscoverySession *session = [AVCaptureDeviceDiscoverySession discoverySessionWithDeviceTypes:@[ AVCaptureDeviceTypeBuiltInWideAngleCamera ] mediaType:AVMediaTypeVideo position:AVCaptureDevicePositionUnspecified]; return session.devices; +#endif } + (NSArray *)supportedFormatsForDevice:(AVCaptureDevice *)device { @@ -131,6 +136,7 @@ - (void)dealloc { return device.formats; } +#if !TARGET_OS_VISION + (CGFloat)defaultZoomFactorForDeviceType:(AVCaptureDeviceType)deviceType { // AVCaptureDeviceTypeBuiltInTripleCamera, Virtual, switchOver: [2, 6], default: 2 // AVCaptureDeviceTypeBuiltInDualCamera, Virtual, switchOver: [3], default: 1 @@ -150,6 +156,7 @@ + (CGFloat)defaultZoomFactorForDeviceType:(AVCaptureDeviceType)deviceType { return 1.0; } +#endif - (FourCharCode)preferredOutputPixelFormat { return _preferredOutputPixelFormat; @@ -175,7 +182,7 @@ - (void)startCaptureWithDevice:(AVCaptureDevice *)device block:^{ RTCLogInfo("startCaptureWithDevice %@ @ %ld fps", format, (long)fps); -#if TARGET_OS_IPHONE && !TARGET_OS_MACCATALYST +#if TARGET_OS_IPHONE && !TARGET_OS_MACCATALYST && !TARGET_OS_VISION dispatch_async(dispatch_get_main_queue(), ^{ if (!self->_generatingOrientationNotifications) { [[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications]; @@ -224,7 +231,7 @@ - (void)stopCaptureWithCompletionHandler:(nullable void (^)(void))completionHand } [self.captureSession stopRunning]; -#if TARGET_OS_IPHONE && !TARGET_OS_MACCATALYST +#if TARGET_OS_IPHONE && !TARGET_OS_MACCATALYST && !TARGET_OS_VISION dispatch_async(dispatch_get_main_queue(), ^{ if (self->_generatingOrientationNotifications) { [[UIDevice currentDevice] endGeneratingDeviceOrientationNotifications]; @@ -241,7 +248,7 @@ - (void)stopCaptureWithCompletionHandler:(nullable void (^)(void))completionHand #pragma mark iOS notifications -#if TARGET_OS_IPHONE && !TARGET_OS_MACCATALYST +#if TARGET_OS_IPHONE && !TARGET_OS_MACCATALYST && !TARGET_OS_VISION - (void)deviceOrientationDidChange:(NSNotification *)notification { [self updateOrientation]; } @@ -264,7 +271,7 @@ - (void)captureOutput:(AVCaptureOutput *)captureOutput return; } -#if TARGET_OS_IPHONE && !TARGET_OS_MACCATALYST +#if TARGET_OS_IPHONE && !TARGET_OS_MACCATALYST && !TARGET_OS_VISION // Default to portrait orientation on iPhone. BOOL usingFrontCamera = NO; // Check the image's EXIF for the camera the image came from as the image could have been @@ -442,7 +449,7 @@ - (dispatch_queue_t)frameQueue { - (BOOL)setupCaptureSession:(AVCaptureSession *)captureSession { NSAssert(_captureSession == nil, @"Setup capture session called twice."); _captureSession = captureSession; -#if defined(WEBRTC_IOS) +#if defined(WEBRTC_IOS) && !TARGET_OS_VISION _captureSession.sessionPreset = AVCaptureSessionPresetInputPriority; _captureSession.usesApplicationAudioSession = NO; #endif @@ -519,7 +526,7 @@ - (void)updateZoomFactor { NSAssert([RTC_OBJC_TYPE(RTCDispatcher) isOnQueueForType:RTCDispatcherTypeCaptureSession], @"updateZoomFactor must be called on the capture queue."); -#if TARGET_OS_IOS || TARGET_OS_TV +#if (TARGET_OS_IOS || TARGET_OS_TV) && !TARGET_OS_VISION CGFloat videoZoomFactor = [[self class] defaultZoomFactorForDeviceType:_currentDevice.deviceType]; [_currentDevice setVideoZoomFactor:videoZoomFactor]; #endif @@ -529,8 +536,8 @@ - (void)reconfigureCaptureSessionInput { NSAssert([RTC_OBJC_TYPE(RTCDispatcher) isOnQueueForType:RTCDispatcherTypeCaptureSession], @"reconfigureCaptureSessionInput must be called on the capture queue."); NSError *error = nil; - AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:_currentDevice - error:&error]; + AVCaptureDeviceInput *input = [[AVCaptureDeviceInput alloc] initWithDevice:_currentDevice + error:&error]; if (!input) { RTCLogError(@"Failed to create front camera input: %@", error.localizedDescription); return; @@ -547,7 +554,7 @@ - (void)reconfigureCaptureSessionInput { [_captureSession commitConfiguration]; } -#if TARGET_OS_IPHONE && !TARGET_OS_MACCATALYST +#if TARGET_OS_IPHONE && !TARGET_OS_MACCATALYST && !TARGET_OS_VISION - (void)updateOrientation { NSAssert([RTC_OBJC_TYPE(RTCDispatcher) isOnQueueForType:RTCDispatcherTypeMain], @"statusBarOrientation must be called on the main queue."); diff --git a/sdk/objc/components/video_codec/RTCVideoDecoderH264.mm b/sdk/objc/components/video_codec/RTCVideoDecoderH264.mm index 6708b26c89..563758e3c9 100644 --- a/sdk/objc/components/video_codec/RTCVideoDecoderH264.mm +++ b/sdk/objc/components/video_codec/RTCVideoDecoderH264.mm @@ -206,7 +206,7 @@ - (int)resetDecompressionSession { NSDictionary *attributes = @{ #if defined(WEBRTC_IOS) && (TARGET_OS_MACCATALYST || TARGET_OS_SIMULATOR) (NSString *)kCVPixelBufferMetalCompatibilityKey : @(YES), -#elif defined(WEBRTC_IOS) +#elif defined(WEBRTC_IOS) && !defined(TARGET_OS_VISION) (NSString *)kCVPixelBufferOpenGLESCompatibilityKey : @(YES), #elif defined(WEBRTC_MAC) && !defined(WEBRTC_ARCH_ARM64) (NSString *)kCVPixelBufferOpenGLCompatibilityKey : @(YES), diff --git a/sdk/objc/components/video_codec/RTCVideoEncoderH264.mm b/sdk/objc/components/video_codec/RTCVideoEncoderH264.mm index d3dd33aef6..0683696fbc 100644 --- a/sdk/objc/components/video_codec/RTCVideoEncoderH264.mm +++ b/sdk/objc/components/video_codec/RTCVideoEncoderH264.mm @@ -685,7 +685,7 @@ - (int)resetCompressionSessionWithPixelFormat:(OSType)framePixelFormat { NSDictionary *sourceAttributes = @{ #if defined(WEBRTC_IOS) && (TARGET_OS_MACCATALYST || TARGET_OS_SIMULATOR) (NSString *)kCVPixelBufferMetalCompatibilityKey : @(YES), -#elif defined(WEBRTC_IOS) +#elif defined(WEBRTC_IOS) && !defined(TARGET_OS_VISION) (NSString *)kCVPixelBufferOpenGLESCompatibilityKey : @(YES), #elif defined(WEBRTC_MAC) && !defined(WEBRTC_ARCH_ARM64) (NSString *)kCVPixelBufferOpenGLCompatibilityKey : @(YES), From c852b0efe3256d3249b87991d749eea80fc46044 Mon Sep 17 00:00:00 2001 From: Hiroshi Horie <548776+hiroshihorie@users.noreply.github.com> Date: Tue, 20 Aug 2024 23:52:10 +0900 Subject: [PATCH 13/49] Multicam support (#137) TODO: - [x] Return `.systemPreferredCamera` for devices (visionOS only). - [x] Use `AVCaptureMultiCamSession` only if `isMultiCamSupported` is true. - [x] Silence statusBarOrientation warning. --------- Co-authored-by: duanweiwei1982@gmail.com --- .../capturer/RTCCameraVideoCapturer.m | 164 +++++++++++++++--- 1 file changed, 140 insertions(+), 24 deletions(-) diff --git a/sdk/objc/components/capturer/RTCCameraVideoCapturer.m b/sdk/objc/components/capturer/RTCCameraVideoCapturer.m index c431bcfac8..09b3e0a476 100644 --- a/sdk/objc/components/capturer/RTCCameraVideoCapturer.m +++ b/sdk/objc/components/capturer/RTCCameraVideoCapturer.m @@ -9,13 +9,21 @@ */ #import +#import #import "RTCCameraVideoCapturer.h" #import "base/RTCLogging.h" #import "base/RTCVideoFrameBuffer.h" #import "components/video_frame_buffer/RTCCVPixelBuffer.h" -#if TARGET_OS_IPHONE && !TARGET_OS_MACCATALYST +// AVCaptureMultiCamSession iOS 13.0+, iPadOS 13.0+, Mac Catalyst 14.0+, tvOS 17.0+ +#define TARGET_MULTICAM_CAPABLE (TARGET_OS_IPHONE && !TARGET_OS_VISION) + +// iOS 2.0+, iPadOS 2.0+, Mac Catalyst 13.0+ +#define TARGET_WATCH_DEVICE_ROTATION \ + (TARGET_OS_IPHONE && !TARGET_OS_MACCATALYST && !TARGET_OS_VISION) + +#if TARGET_WATCH_DEVICE_ROTATION #import "helpers/UIDevice+RTCDevice.h" #endif @@ -41,12 +49,24 @@ @implementation RTC_OBJC_TYPE (RTCCameraVideoCapturer) { FourCharCode _preferredOutputPixelFormat; FourCharCode _outputPixelFormat; RTCVideoRotation _rotation; -#if TARGET_OS_IPHONE && !TARGET_OS_MACCATALYST + +#if TARGET_WATCH_DEVICE_ROTATION UIInterfaceOrientation _orientation; BOOL _generatingOrientationNotifications; #endif + +#if TARGET_MULTICAM_CAPABLE + AVCaptureConnection *_captureConnection; +#endif } +#if TARGET_MULTICAM_CAPABLE +// Shared multi-camera session across capturers. +static AVCaptureMultiCamSession *_sharedMultiCamSession = nil; +static os_unfair_lock _sharedMultiCamSessionLock = OS_UNFAIR_LOCK_INIT; +static NSUInteger _sharedMultiCamSessionCount = 0; +#endif + @synthesize frameQueue = _frameQueue; @synthesize captureSession = _captureSession; @synthesize currentDevice = _currentDevice; @@ -55,14 +75,13 @@ @implementation RTC_OBJC_TYPE (RTCCameraVideoCapturer) { @synthesize willBeRunning = _willBeRunning; - (instancetype)init { - return [self initWithDelegate:nil captureSession:[[AVCaptureSession alloc] init]]; + return [self initWithDelegate:nil captureSession:[self createCaptureSession]]; } - (instancetype)initWithDelegate:(__weak id)delegate { - return [self initWithDelegate:delegate captureSession:[[AVCaptureSession alloc] init]]; + return [self initWithDelegate:delegate captureSession:[self createCaptureSession]]; } -// This initializer is used for testing. - (instancetype)initWithDelegate:(__weak id)delegate captureSession:(AVCaptureSession *)captureSession { if (self = [super initWithDelegate:delegate]) { @@ -73,8 +92,10 @@ - (instancetype)initWithDelegate:(__weak id *)captureDevices { #if TARGET_OS_VISION - // Simply return an empty array. - return [NSArray array]; + AVCaptureDevice *device = AVCaptureDevice.systemPreferredCamera; + return device ? @[ device ] : @[]; #else AVCaptureDeviceDiscoverySession *session = [AVCaptureDeviceDiscoverySession discoverySessionWithDeviceTypes:@[ AVCaptureDeviceTypeBuiltInWideAngleCamera ] @@ -182,7 +205,7 @@ - (void)startCaptureWithDevice:(AVCaptureDevice *)device block:^{ RTCLogInfo("startCaptureWithDevice %@ @ %ld fps", format, (long)fps); -#if TARGET_OS_IPHONE && !TARGET_OS_MACCATALYST && !TARGET_OS_VISION +#if TARGET_WATCH_DEVICE_ROTATION dispatch_async(dispatch_get_main_queue(), ^{ if (!self->_generatingOrientationNotifications) { [[UIDevice currentDevice] beginGeneratingDeviceOrientationNotifications]; @@ -211,8 +234,8 @@ - (void)startCaptureWithDevice:(AVCaptureDevice *)device [self updateZoomFactor]; [self.currentDevice unlockForConfiguration]; - [self.captureSession startRunning]; - self.isRunning = YES; + [self startRunning]; + if (completionHandler) { completionHandler(nil); } @@ -225,13 +248,23 @@ - (void)stopCaptureWithCompletionHandler:(nullable void (^)(void))completionHand dispatchAsyncOnType:RTCDispatcherTypeCaptureSession block:^{ RTCLogInfo("Stop"); - self.currentDevice = nil; + +#if TARGET_MULTICAM_CAPABLE + [self.captureSession removeConnection:self->_captureConnection]; + self->_captureConnection = nil; +#endif + for (AVCaptureDeviceInput *oldInput in [self.captureSession.inputs copy]) { - [self.captureSession removeInput:oldInput]; + // Remove any old input with same device. + if ([self->_currentDevice isEqual:oldInput.device]) { + [self.captureSession removeInput:oldInput]; + } } - [self.captureSession stopRunning]; + self.currentDevice = nil; + + [self stopRunning]; -#if TARGET_OS_IPHONE && !TARGET_OS_MACCATALYST && !TARGET_OS_VISION +#if TARGET_WATCH_DEVICE_ROTATION dispatch_async(dispatch_get_main_queue(), ^{ if (self->_generatingOrientationNotifications) { [[UIDevice currentDevice] endGeneratingDeviceOrientationNotifications]; @@ -239,7 +272,7 @@ - (void)stopCaptureWithCompletionHandler:(nullable void (^)(void))completionHand } }); #endif - self.isRunning = NO; + if (completionHandler) { completionHandler(); } @@ -248,7 +281,7 @@ - (void)stopCaptureWithCompletionHandler:(nullable void (^)(void))completionHand #pragma mark iOS notifications -#if TARGET_OS_IPHONE && !TARGET_OS_MACCATALYST && !TARGET_OS_VISION +#if TARGET_WATCH_DEVICE_ROTATION - (void)deviceOrientationDidChange:(NSNotification *)notification { [self updateOrientation]; } @@ -271,7 +304,7 @@ - (void)captureOutput:(AVCaptureOutput *)captureOutput return; } -#if TARGET_OS_IPHONE && !TARGET_OS_MACCATALYST && !TARGET_OS_VISION +#if TARGET_WATCH_DEVICE_ROTATION // Default to portrait orientation on iPhone. BOOL usingFrontCamera = NO; // Check the image's EXIF for the camera the image came from as the image could have been @@ -433,10 +466,70 @@ - (void)handleApplicationDidBecomeActive:(NSNotification *)notification { }]; } -#endif // TARGET_OS_IPHONE +#endif #pragma mark - Private +- (AVCaptureSession *)createCaptureSession { +#if TARGET_MULTICAM_CAPABLE + if (AVCaptureMultiCamSession.isMultiCamSupported) { + // AVCaptureMultiCamSession exists and device supports multi-cam. + if (_sharedMultiCamSession == nil) { + _sharedMultiCamSession = [[AVCaptureMultiCamSession alloc] init]; + } + return _sharedMultiCamSession; + } else { + // AVCaptureMultiCamSession exists but device doesn't support multi-cam. + return [[AVCaptureSession alloc] init]; + } +#else + // AVCaptureMultiCamSession doesn't exist with this platform, use AVCaptureSession. + return [[AVCaptureSession alloc] init]; +#endif +} + +- (BOOL)isUsingSelfCreatedMultiCamSession { +#if TARGET_MULTICAM_CAPABLE + return _sharedMultiCamSession != nil && _sharedMultiCamSession == _captureSession; +#else + return NO; +#endif +} + +- (void)startRunning { + BOOL shouldStartRunning = YES; +#if TARGET_MULTICAM_CAPABLE + if ([self isUsingSelfCreatedMultiCamSession]) { + os_unfair_lock_lock(&_sharedMultiCamSessionLock); + shouldStartRunning = _sharedMultiCamSessionCount == 0; + _sharedMultiCamSessionCount += 1; + os_unfair_lock_unlock(&_sharedMultiCamSessionLock); + } +#endif + if (shouldStartRunning) { + [_captureSession startRunning]; + } + self.isRunning = YES; +} + +- (void)stopRunning { + BOOL shouldStopRunning = YES; +#if TARGET_MULTICAM_CAPABLE + if ([self isUsingSelfCreatedMultiCamSession]) { + os_unfair_lock_lock(&_sharedMultiCamSessionLock); + if (_sharedMultiCamSessionCount > 0) { + _sharedMultiCamSessionCount -= 1; + shouldStopRunning = _sharedMultiCamSessionCount == 0; + } + os_unfair_lock_unlock(&_sharedMultiCamSessionLock); + } +#endif + if (shouldStopRunning) { + [_captureSession stopRunning]; + } + self.isRunning = NO; +} + - (dispatch_queue_t)frameQueue { if (!_frameQueue) { _frameQueue = RTCDispatchQueueCreateWithTarget( @@ -459,7 +552,12 @@ - (BOOL)setupCaptureSession:(AVCaptureSession *)captureSession { RTCLogError(@"Video data output unsupported."); return NO; } + +#if TARGET_MULTICAM_CAPABLE + [_captureSession addOutputWithNoConnections:_videoDataOutput]; +#else [_captureSession addOutput:_videoDataOutput]; +#endif return YES; } @@ -544,22 +642,40 @@ - (void)reconfigureCaptureSessionInput { } [_captureSession beginConfiguration]; for (AVCaptureDeviceInput *oldInput in [_captureSession.inputs copy]) { - [_captureSession removeInput:oldInput]; + // Remove any old input with same device. + if ([_currentDevice isEqual:oldInput.device]) { + [_captureSession removeInput:oldInput]; + } } + if ([_captureSession canAddInput:input]) { +#if TARGET_MULTICAM_CAPABLE + [_captureSession addInputWithNoConnections:input]; + + AVCaptureInputPort *videoPort = input.ports.firstObject; + _captureConnection = [AVCaptureConnection connectionWithInputPorts:@[ videoPort ] + output:_videoDataOutput]; + + [_captureSession addConnection:_captureConnection]; +#else [_captureSession addInput:input]; +#endif } else { RTCLogError(@"Cannot add camera as an input to the session."); } + [_captureSession commitConfiguration]; } -#if TARGET_OS_IPHONE && !TARGET_OS_MACCATALYST && !TARGET_OS_VISION +#if TARGET_WATCH_DEVICE_ROTATION - (void)updateOrientation { NSAssert([RTC_OBJC_TYPE(RTCDispatcher) isOnQueueForType:RTCDispatcherTypeMain], - @"statusBarOrientation must be called on the main queue."); - // statusBarOrientation must be called on the main queue - UIInterfaceOrientation newOrientation = [UIApplication sharedApplication].statusBarOrientation; + @"Retrieving device orientation must be called on the main queue."); + + // Must be called on the main queue. + UIWindowScene *windowScene = + (UIWindowScene *)[UIApplication sharedApplication].connectedScenes.anyObject; + UIInterfaceOrientation newOrientation = windowScene.interfaceOrientation; [RTC_OBJC_TYPE(RTCDispatcher) dispatchAsyncOnType:RTCDispatcherTypeCaptureSession block:^{ From 158cde8c66c61dd2bb968d9b0f23ea9af3d86784 Mon Sep 17 00:00:00 2001 From: Hiroshi Horie <548776+hiroshihorie@users.noreply.github.com> Date: Thu, 22 Aug 2024 22:17:58 +0900 Subject: [PATCH 14/49] tvOS support (#139) 17.0+ only atm --------- Co-authored-by: cloudwebrtc --- DEPS | 2 +- sdk/objc/components/audio/RTCAudioSession.mm | 2 ++ .../capturer/RTCCameraVideoCapturer.m | 2 +- .../renderer/metal/RTCMTLRenderer.h | 2 +- sdk/objc/helpers/RTCCameraPreviewView.m | 33 +++++++++++-------- 5 files changed, 25 insertions(+), 16 deletions(-) diff --git a/DEPS b/DEPS index 39f4533a34..ab365900fc 100644 --- a/DEPS +++ b/DEPS @@ -54,7 +54,7 @@ deps = { 'src/base': 'https://chromium.googlesource.com/chromium/src/base@738cf0c976fd3d07c5f1853f050594c5295300d8', 'src/build': - 'https://github.com/webrtc-sdk/build@1367989663b6cd7873ec606d0ed1f7878dd3ea7d', + 'https://github.com/webrtc-sdk/build@6978bac6466311e4bee4c7a9fd395faa939e0fcd', 'src/buildtools': 'https://chromium.googlesource.com/chromium/src/buildtools@5eb927f0a922dfacf10cfa84ee76f39dcf2a7311', # Gradle 6.6.1. Used for testing Android Studio project generation for WebRTC. diff --git a/sdk/objc/components/audio/RTCAudioSession.mm b/sdk/objc/components/audio/RTCAudioSession.mm index 083d4737c4..11d1a1c337 100644 --- a/sdk/objc/components/audio/RTCAudioSession.mm +++ b/sdk/objc/components/audio/RTCAudioSession.mm @@ -739,6 +739,7 @@ - (BOOL)configureWebRTCSession:(NSError **)outError { return NO; } +#if !TARGET_OS_TV // Ensure that the device currently supports audio input. // TODO(tkchin): Figure out if this is really necessary. if (!self.inputAvailable) { @@ -749,6 +750,7 @@ - (BOOL)configureWebRTCSession:(NSError **)outError { } return NO; } +#endif // It can happen (e.g. in combination with BT devices) that the attempt to set // the preferred sample rate for WebRTC (48kHz) fails. If so, make a new diff --git a/sdk/objc/components/capturer/RTCCameraVideoCapturer.m b/sdk/objc/components/capturer/RTCCameraVideoCapturer.m index 09b3e0a476..65198b6298 100644 --- a/sdk/objc/components/capturer/RTCCameraVideoCapturer.m +++ b/sdk/objc/components/capturer/RTCCameraVideoCapturer.m @@ -21,7 +21,7 @@ // iOS 2.0+, iPadOS 2.0+, Mac Catalyst 13.0+ #define TARGET_WATCH_DEVICE_ROTATION \ - (TARGET_OS_IPHONE && !TARGET_OS_MACCATALYST && !TARGET_OS_VISION) + (TARGET_OS_IPHONE && !TARGET_OS_MACCATALYST && !TARGET_OS_VISION && !TARGET_OS_TV) #if TARGET_WATCH_DEVICE_ROTATION #import "helpers/UIDevice+RTCDevice.h" diff --git a/sdk/objc/components/renderer/metal/RTCMTLRenderer.h b/sdk/objc/components/renderer/metal/RTCMTLRenderer.h index 6bbca3d985..8988fb189b 100644 --- a/sdk/objc/components/renderer/metal/RTCMTLRenderer.h +++ b/sdk/objc/components/renderer/metal/RTCMTLRenderer.h @@ -37,7 +37,7 @@ NS_ASSUME_NONNULL_BEGIN * cleanups. */ -#if TARGET_OS_IOS +#if TARGET_OS_IOS || TARGET_OS_TV - (BOOL)addRenderingDestination:(__kindof UIView *)view; #else - (BOOL)addRenderingDestination:(__kindof NSView *)view; diff --git a/sdk/objc/helpers/RTCCameraPreviewView.m b/sdk/objc/helpers/RTCCameraPreviewView.m index 7fb1831ed0..db8a265efd 100644 --- a/sdk/objc/helpers/RTCCameraPreviewView.m +++ b/sdk/objc/helpers/RTCCameraPreviewView.m @@ -26,22 +26,28 @@ + (Class)layerClass { - (instancetype)initWithFrame:(CGRect)aRect { self = [super initWithFrame:aRect]; if (self) { +#if !TARGET_OS_TV [self addOrientationObserver]; +#endif } return self; } -- (instancetype)initWithCoder:(NSCoder*)aDecoder { +- (instancetype)initWithCoder:(NSCoder *)aDecoder { self = [super initWithCoder:aDecoder]; if (self) { +#if !TARGET_OS_TV [self addOrientationObserver]; +#endif } return self; } +#if !TARGET_OS_TV - (void)dealloc { [self removeOrientationObserver]; } +#endif - (void)setCaptureSession:(AVCaptureSession *)captureSession { if (_captureSession == captureSession) { @@ -56,15 +62,18 @@ - (void)setCaptureSession:(AVCaptureSession *)captureSession { dispatchAsyncOnType:RTCDispatcherTypeCaptureSession block:^{ previewLayer.session = captureSession; +#if !TARGET_OS_TV [RTC_OBJC_TYPE(RTCDispatcher) dispatchAsyncOnType:RTCDispatcherTypeMain block:^{ [self setCorrectVideoOrientation]; }]; +#endif }]; }]; } +#if !TARGET_OS_TV - (void)layoutSubviews { [super layoutSubviews]; @@ -72,7 +81,7 @@ - (void)layoutSubviews { [self setCorrectVideoOrientation]; } --(void)orientationChanged:(NSNotification *)notification { +- (void)orientationChanged:(NSNotification *)notification { [self setCorrectVideoOrientation]; } @@ -85,17 +94,13 @@ - (void)setCorrectVideoOrientation { if (previewLayer.connection.isVideoOrientationSupported) { // Set the video orientation based on device orientation. if (deviceOrientation == UIDeviceOrientationPortraitUpsideDown) { - previewLayer.connection.videoOrientation = - AVCaptureVideoOrientationPortraitUpsideDown; + previewLayer.connection.videoOrientation = AVCaptureVideoOrientationPortraitUpsideDown; } else if (deviceOrientation == UIDeviceOrientationLandscapeRight) { - previewLayer.connection.videoOrientation = - AVCaptureVideoOrientationLandscapeRight; + previewLayer.connection.videoOrientation = AVCaptureVideoOrientationLandscapeRight; } else if (deviceOrientation == UIDeviceOrientationLandscapeLeft) { - previewLayer.connection.videoOrientation = - AVCaptureVideoOrientationLandscapeLeft; + previewLayer.connection.videoOrientation = AVCaptureVideoOrientationLandscapeLeft; } else if (deviceOrientation == UIDeviceOrientationPortrait) { - previewLayer.connection.videoOrientation = - AVCaptureVideoOrientationPortrait; + previewLayer.connection.videoOrientation = AVCaptureVideoOrientationPortrait; } // If device orientation switches to FaceUp or FaceDown, don't change video orientation. } @@ -105,9 +110,9 @@ - (void)setCorrectVideoOrientation { - (void)addOrientationObserver { [[NSNotificationCenter defaultCenter] addObserver:self - selector:@selector(orientationChanged:) - name:UIDeviceOrientationDidChangeNotification - object:nil]; + selector:@selector(orientationChanged:) + name:UIDeviceOrientationDidChangeNotification + object:nil]; } - (void)removeOrientationObserver { @@ -116,6 +121,8 @@ - (void)removeOrientationObserver { object:nil]; } +#endif + - (AVCaptureVideoPreviewLayer *)previewLayer { return (AVCaptureVideoPreviewLayer *)self.layer; } From 14db92e4131185f5d80023cfe60e3fd5c15a897b Mon Sep 17 00:00:00 2001 From: davidliu Date: Mon, 2 Sep 2024 13:15:38 +0900 Subject: [PATCH 15/49] Add isDisposed to MediaStreamTrack (#140) --- sdk/android/api/org/webrtc/MediaStreamTrack.java | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/sdk/android/api/org/webrtc/MediaStreamTrack.java b/sdk/android/api/org/webrtc/MediaStreamTrack.java index 2e4c3e18f7..fa1c3d3b71 100644 --- a/sdk/android/api/org/webrtc/MediaStreamTrack.java +++ b/sdk/android/api/org/webrtc/MediaStreamTrack.java @@ -121,6 +121,10 @@ private void checkMediaStreamTrackExists() { } } + public boolean isDisposed() { + return nativeTrack == 0; + } + private static native String nativeGetId(long track); private static native String nativeGetKind(long track); private static native boolean nativeGetEnabled(long track); From cdc3bba5aa38910a55428b919ba45aceac1ad9ad Mon Sep 17 00:00:00 2001 From: CloudWebRTC Date: Sun, 15 Sep 2024 07:03:46 +0800 Subject: [PATCH 16/49] chore: handle invalid cipher from key size. (#142) --- api/crypto/frame_crypto_transformer.cc | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/api/crypto/frame_crypto_transformer.cc b/api/crypto/frame_crypto_transformer.cc index 06ec888170..8c26080cb7 100644 --- a/api/crypto/frame_crypto_transformer.cc +++ b/api/crypto/frame_crypto_transformer.cc @@ -256,7 +256,10 @@ int AesCbcEncryptDecrypt(EncryptOrDecrypt mode, const rtc::ArrayView input, std::vector* output) { const EVP_CIPHER* cipher = GetAesCbcAlgorithmFromKeySize(raw_key.size()); - RTC_DCHECK(cipher); // Already handled in Init(); + if (!cipher) { + RTC_LOG(LS_ERROR) << "Invalid AES-CBC key size."; + return ErrorUnexpected; + } RTC_DCHECK_EQ(EVP_CIPHER_iv_length(cipher), iv.size()); RTC_DCHECK_EQ(EVP_CIPHER_key_length(cipher), raw_key.size()); @@ -297,9 +300,13 @@ int AesEncryptDecrypt(EncryptOrDecrypt mode, switch (algorithm) { case webrtc::FrameCryptorTransformer::Algorithm::kAesGcm: { unsigned int tag_length_bits = 128; + const EVP_AEAD* cipher = GetAesGcmAlgorithmFromKeySize(raw_key.size()); + if (!cipher) { + RTC_LOG(LS_ERROR) << "Invalid AES-GCM key size."; + return ErrorUnexpected; + } return AesGcmEncryptDecrypt( - mode, raw_key, data, tag_length_bits / 8, iv, additional_data, - GetAesGcmAlgorithmFromKeySize(raw_key.size()), buffer); + mode, raw_key, data, tag_length_bits / 8, iv, additional_data, cipher, buffer); } case webrtc::FrameCryptorTransformer::Algorithm::kAesCbc: return AesCbcEncryptDecrypt(mode, raw_key, iv, data, buffer); From 3c17c967193054f62a0508dc7da211aaf530a3f3 Mon Sep 17 00:00:00 2001 From: Hiroshi Horie <548776+hiroshihorie@users.noreply.github.com> Date: Mon, 23 Sep 2024 18:39:25 +0900 Subject: [PATCH 17/49] Allow software AEC for Simulator (#143) ~Allow to use "googEchoCancellation" constraint for software AEC. For devices "googEchoCancellation" should be false to use VoiceProcessingIO.~ --- media/engine/webrtc_voice_engine.cc | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/media/engine/webrtc_voice_engine.cc b/media/engine/webrtc_voice_engine.cc index d627381f59..1c6c31020d 100644 --- a/media/engine/webrtc_voice_engine.cc +++ b/media/engine/webrtc_voice_engine.cc @@ -483,7 +483,7 @@ void WebRtcVoiceEngine::ApplyOptions(const AudioOptions& options_in) { // Use desktop AEC by default, when not using hardware AEC. bool use_mobile_software_aec = false; -#if defined(WEBRTC_IOS) +#if defined(WEBRTC_IOS) && !TARGET_OS_SIMULATOR if (options.ios_force_software_aec_HACK && *options.ios_force_software_aec_HACK) { // EC may be forced on for a device known to have non-functioning platform @@ -501,7 +501,7 @@ void WebRtcVoiceEngine::ApplyOptions(const AudioOptions& options_in) { #endif // Set and adjust gain control options. -#if defined(WEBRTC_IOS) +#if defined(WEBRTC_IOS) && !TARGET_OS_SIMULATOR // On iOS, VPIO provides built-in AGC. options.auto_gain_control = false; RTC_LOG(LS_INFO) << "Always disable AGC on iOS. Use built-in instead."; From 7662c43c04689088b8d41818a7027ec6ed6cfc0b Mon Sep 17 00:00:00 2001 From: Hiroshi Horie <548776+hiroshihorie@users.noreply.github.com> Date: Tue, 24 Sep 2024 03:13:35 +0900 Subject: [PATCH 18/49] Fix AudioRenderer crash & expose AVAudioPCMBuffer (#144) --- sdk/BUILD.gn | 21 ++ .../api/RTCAudioRendererAdapter+Private.h | 36 +++ sdk/objc/api/RTCAudioRendererAdapter.h | 29 ++ sdk/objc/api/RTCAudioRendererAdapter.mm | 107 ++++++++ .../peerconnection/RTCAudioTrack+Private.h | 2 - sdk/objc/api/peerconnection/RTCAudioTrack.h | 5 +- sdk/objc/api/peerconnection/RTCAudioTrack.mm | 252 ++++-------------- sdk/objc/base/RTCAudioRenderer.h | 5 +- 8 files changed, 256 insertions(+), 201 deletions(-) create mode 100644 sdk/objc/api/RTCAudioRendererAdapter+Private.h create mode 100644 sdk/objc/api/RTCAudioRendererAdapter.h create mode 100644 sdk/objc/api/RTCAudioRendererAdapter.mm diff --git a/sdk/BUILD.gn b/sdk/BUILD.gn index 01dc112abb..33bf72df8e 100644 --- a/sdk/BUILD.gn +++ b/sdk/BUILD.gn @@ -939,6 +939,25 @@ if (is_ios || is_mac) { ] } + rtc_library("audiorendereradapter_objc") { + visibility = [ "*" ] + sources = [ + "objc/api/RTCAudioRendererAdapter+Private.h", + "objc/api/RTCAudioRendererAdapter.h", + "objc/api/RTCAudioRendererAdapter.mm", + ] + + configs += [ "..:common_objc" ] + public_configs = [ ":common_config_objc" ] + + deps = [ + ":base_objc", + ":native_api", + "../api:libjingle_peerconnection_api", + "../api:media_stream_interface", + ] + } + rtc_library("mediasource_objc") { sources = [ "objc/api/peerconnection/RTCMediaSource+Private.h", @@ -1150,6 +1169,7 @@ if (is_ios || is_mac) { ":objc_audio_device_module", ":videoframebuffer_objc", ":videorendereradapter_objc", + ":audiorendereradapter_objc", ":videosource_objc", ":videotoolbox_objc", "../api/crypto:frame_crypto_transformer", @@ -1895,6 +1915,7 @@ if (is_ios || is_mac) { "CoreMedia.framework", "CoreVideo.framework", "VideoToolbox.framework", + "Accelerate.framework", ] } } diff --git a/sdk/objc/api/RTCAudioRendererAdapter+Private.h b/sdk/objc/api/RTCAudioRendererAdapter+Private.h new file mode 100644 index 0000000000..8a914138de --- /dev/null +++ b/sdk/objc/api/RTCAudioRendererAdapter+Private.h @@ -0,0 +1,36 @@ +/* + * Copyright 2024 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import "RTCAudioRendererAdapter.h" + +#import "base/RTCAudioRenderer.h" + +#include "api/media_stream_interface.h" + +NS_ASSUME_NONNULL_BEGIN + +@interface RTC_OBJC_TYPE(RTCAudioRendererAdapter) () + +@property(nonatomic, readonly) id audioRenderer; + +@property(nonatomic, readonly) webrtc::AudioTrackSinkInterface *nativeAudioRenderer; + +- (instancetype)initWithNativeRenderer:(id)audioRenderer + NS_DESIGNATED_INITIALIZER; + +@end + +NS_ASSUME_NONNULL_END diff --git a/sdk/objc/api/RTCAudioRendererAdapter.h b/sdk/objc/api/RTCAudioRendererAdapter.h new file mode 100644 index 0000000000..5753257182 --- /dev/null +++ b/sdk/objc/api/RTCAudioRendererAdapter.h @@ -0,0 +1,29 @@ +/* + * Copyright 2024 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import + +#import "RTCMacros.h" + +NS_ASSUME_NONNULL_BEGIN + +@interface RTC_OBJC_TYPE (RTCAudioRendererAdapter): NSObject + +- (instancetype)init NS_UNAVAILABLE; + +@end + +NS_ASSUME_NONNULL_END diff --git a/sdk/objc/api/RTCAudioRendererAdapter.mm b/sdk/objc/api/RTCAudioRendererAdapter.mm new file mode 100644 index 0000000000..67c05792ad --- /dev/null +++ b/sdk/objc/api/RTCAudioRendererAdapter.mm @@ -0,0 +1,107 @@ +/* + * Copyright 2024 LiveKit + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#import +#import "RTCAudioRendererAdapter+Private.h" + +#include + +namespace webrtc { + +class AudioRendererAdapter : public webrtc::AudioTrackSinkInterface { + public: + AudioRendererAdapter(RTC_OBJC_TYPE(RTCAudioRendererAdapter) * adapter) { adapter_ = adapter; } + + private: + __weak RTC_OBJC_TYPE(RTCAudioRendererAdapter) * adapter_; + + void OnData(const void *audio_data, int bits_per_sample, int sample_rate, + size_t number_of_channels, size_t number_of_frames, + absl::optional absolute_capture_timestamp_ms) override { + OSStatus status; + AudioChannelLayout acl = {}; + acl.mChannelLayoutTag = + (number_of_channels == 2) ? kAudioChannelLayoutTag_Stereo : kAudioChannelLayoutTag_Mono; + + AudioStreamBasicDescription sd = { + .mSampleRate = static_cast(sample_rate), + .mFormatID = kAudioFormatLinearPCM, + .mFormatFlags = kAudioFormatFlagIsFloat | kAudioFormatFlagIsPacked, + .mBytesPerPacket = static_cast(number_of_channels * 4), + .mFramesPerPacket = 1, + .mBytesPerFrame = static_cast(number_of_channels * 4), + .mChannelsPerFrame = static_cast(number_of_channels), + .mBitsPerChannel = 32, + .mReserved = 0}; + + CMFormatDescriptionRef formatDescription = nullptr; + status = CMAudioFormatDescriptionCreate(kCFAllocatorDefault, &sd, sizeof(acl), &acl, 0, NULL, + NULL, &formatDescription); + if (status != noErr) { + NSLog(@"RTCAudioTrack: Failed to create audio formatDescription description. Error: %d", + (int)status); + return; + } + + AVAudioFormat *format = + [[AVAudioFormat alloc] initWithCMAudioFormatDescription:formatDescription]; + CFRelease(formatDescription); + + AVAudioFrameCount frameCount = static_cast(number_of_frames); + AVAudioPCMBuffer *pcmBuffer = [[AVAudioPCMBuffer alloc] initWithPCMFormat:format + frameCapacity:frameCount]; + if (!pcmBuffer) { + NSLog(@"Failed to create AVAudioPCMBuffer"); + return; + } + + pcmBuffer.frameLength = frameCount; + const int16_t *inputData = static_cast(audio_data); + const float scale = 1.0f / 32768.0f; + + dispatch_apply(number_of_channels, dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0), + ^(size_t channel) { + vDSP_vflt16(inputData + channel * number_of_frames, 1, + pcmBuffer.floatChannelData[channel], 1, frameCount); + vDSP_vsmul(pcmBuffer.floatChannelData[channel], 1, &scale, + pcmBuffer.floatChannelData[channel], 1, frameCount); + }); + + [adapter_.audioRenderer renderPCMBuffer:pcmBuffer]; + } +}; +} // namespace webrtc + +@implementation RTC_OBJC_TYPE (RTCAudioRendererAdapter) { + std::unique_ptr _adapter; +} + +@synthesize audioRenderer = _audioRenderer; + +- (instancetype)initWithNativeRenderer:(id)audioRenderer { + NSParameterAssert(audioRenderer); + if (self = [super init]) { + _audioRenderer = audioRenderer; + _adapter.reset(new webrtc::AudioRendererAdapter(self)); + } + return self; +} + +- (webrtc::AudioTrackSinkInterface *)nativeAudioRenderer { + return _adapter.get(); +} + +@end diff --git a/sdk/objc/api/peerconnection/RTCAudioTrack+Private.h b/sdk/objc/api/peerconnection/RTCAudioTrack+Private.h index 38c0bd3b1b..731bc1d56d 100644 --- a/sdk/objc/api/peerconnection/RTCAudioTrack+Private.h +++ b/sdk/objc/api/peerconnection/RTCAudioTrack+Private.h @@ -26,8 +26,6 @@ NS_ASSUME_NONNULL_BEGIN source:(RTC_OBJC_TYPE(RTCAudioSource) *)source trackId:(NSString *)trackId; -- (void)didCaptureSampleBuffer:(CMSampleBufferRef)sampleBuffer; - @end NS_ASSUME_NONNULL_END diff --git a/sdk/objc/api/peerconnection/RTCAudioTrack.h b/sdk/objc/api/peerconnection/RTCAudioTrack.h index c8218ad926..6505686043 100644 --- a/sdk/objc/api/peerconnection/RTCAudioTrack.h +++ b/sdk/objc/api/peerconnection/RTCAudioTrack.h @@ -24,13 +24,12 @@ RTC_OBJC_EXPORT /** The audio source for this audio track. */ @property(nonatomic, readonly) RTC_OBJC_TYPE(RTCAudioSource) * source; -/** Register a renderer that will receive all audio CMSampleBuffers on this track. - * Does not retain. */ - (void)addRenderer:(id)renderer; -/** Deregister a renderer */ - (void)removeRenderer:(id)renderer; +- (void)removeAllRenderers; + @end NS_ASSUME_NONNULL_END diff --git a/sdk/objc/api/peerconnection/RTCAudioTrack.mm b/sdk/objc/api/peerconnection/RTCAudioTrack.mm index d79972eae1..dcdf9d93e8 100644 --- a/sdk/objc/api/peerconnection/RTCAudioTrack.mm +++ b/sdk/objc/api/peerconnection/RTCAudioTrack.mm @@ -17,173 +17,14 @@ #import "RTCAudioSource+Private.h" #import "RTCMediaStreamTrack+Private.h" #import "RTCPeerConnectionFactory+Private.h" +#import "api/RTCAudioRendererAdapter+Private.h" #import "helpers/NSString+StdString.h" #include "rtc_base/checks.h" -namespace webrtc { -/** - * Captures audio data and converts to CMSampleBuffers - */ -class AudioSinkConverter : public rtc::RefCountInterface, public webrtc::AudioTrackSinkInterface { - private: - os_unfair_lock *lock_; - __weak RTC_OBJC_TYPE(RTCAudioTrack) *audio_track_; - int64_t total_frames_ = 0; - bool attached_ = false; - - public: - AudioSinkConverter(RTC_OBJC_TYPE(RTCAudioTrack) *audioTrack, os_unfair_lock *lock) { - RTC_LOG(LS_INFO) << "RTCAudioTrack.AudioSinkConverter init"; - audio_track_ = audioTrack; - lock_ = lock; - } - - ~AudioSinkConverter() { - // - RTC_LOG(LS_INFO) << "RTCAudioTrack.AudioSinkConverter dealloc"; - } - - // Must be called while locked - void TryAttach() { - if (attached_) { - // Already attached - return; - } - RTC_LOG(LS_INFO) << "RTCAudioTrack attaching sink..."; - // Reset for creating CMSampleTimingInfo correctly - audio_track_.nativeAudioTrack->AddSink(this); - total_frames_ = 0; - attached_ = true; - } - - // Must be called while locked - void TryDetach() { - if (!attached_) { - // Already detached - return; - } - RTC_LOG(LS_INFO) << "RTCAudioTrack detaching sink..."; - audio_track_.nativeAudioTrack->RemoveSink(this); - attached_ = false; - } - - void OnData(const void *audio_data, - int bits_per_sample, - int sample_rate, - size_t number_of_channels, - size_t number_of_frames, - absl::optional absolute_capture_timestamp_ms) override { - RTC_LOG(LS_INFO) << "RTCAudioTrack.AudioSinkConverter OnData bits_per_sample: " - << bits_per_sample << " sample_rate: " << sample_rate - << " number_of_channels: " << number_of_channels - << " number_of_frames: " << number_of_frames - << " absolute_capture_timestamp_ms: " - << (absolute_capture_timestamp_ms ? absolute_capture_timestamp_ms.value() : 0); - - bool is_locked = os_unfair_lock_trylock(lock_); - if (!is_locked) { - RTC_LOG(LS_INFO) << "RTCAudioTrack.AudioSinkConverter OnData already locked, skipping..."; - return; - } - bool is_attached = attached_; - os_unfair_lock_unlock(lock_); - - if (!is_attached) { - RTC_LOG(LS_INFO) << "RTCAudioTrack.AudioSinkConverter OnData already detached, skipping..."; - return; - } - - /* - * Convert to CMSampleBuffer - */ - - if (!(number_of_channels == 1 || number_of_channels == 2)) { - NSLog(@"RTCAudioTrack: Only mono or stereo is supported currently. numberOfChannels: %zu", - number_of_channels); - return; - } - - OSStatus status; - - AudioChannelLayout acl; - bzero(&acl, sizeof(acl)); - acl.mChannelLayoutTag = - number_of_channels == 2 ? kAudioChannelLayoutTag_Stereo : kAudioChannelLayoutTag_Mono; - - AudioStreamBasicDescription sd; - sd.mSampleRate = sample_rate; - sd.mFormatID = kAudioFormatLinearPCM; - sd.mFormatFlags = kLinearPCMFormatFlagIsSignedInteger | kLinearPCMFormatFlagIsPacked; - sd.mFramesPerPacket = 1; - sd.mChannelsPerFrame = number_of_channels; - sd.mBitsPerChannel = bits_per_sample; /* 16 */ - sd.mBytesPerFrame = sd.mChannelsPerFrame * (sd.mBitsPerChannel / 8); - sd.mBytesPerPacket = sd.mBytesPerFrame; - - CMSampleTimingInfo timing = { - CMTimeMake(1, sample_rate), - CMTimeMake(total_frames_, sample_rate), - kCMTimeInvalid, - }; - - total_frames_ += number_of_frames; // update the total - - CMFormatDescriptionRef format = NULL; - status = CMAudioFormatDescriptionCreate( - kCFAllocatorDefault, &sd, sizeof(acl), &acl, 0, NULL, NULL, &format); - - if (status != 0) { - NSLog(@"RTCAudioTrack: Failed to create audio format description"); - return; - } - - CMSampleBufferRef buffer; - status = CMSampleBufferCreate(kCFAllocatorDefault, - NULL, - false, - NULL, - NULL, - format, - (CMItemCount)number_of_frames, - 1, - &timing, - 0, - NULL, - &buffer); - // format is no longer required - CFRelease(format); - - if (status != 0) { - NSLog(@"RTCAudioTrack: Failed to allocate sample buffer"); - return; - } - - AudioBufferList bufferList; - bufferList.mNumberBuffers = 1; - bufferList.mBuffers[0].mNumberChannels = sd.mChannelsPerFrame; - bufferList.mBuffers[0].mDataByteSize = (UInt32)(number_of_frames * sd.mBytesPerFrame); - bufferList.mBuffers[0].mData = (void *)audio_data; - status = CMSampleBufferSetDataBufferFromAudioBufferList( - buffer, kCFAllocatorDefault, kCFAllocatorDefault, 0, &bufferList); - if (status != 0) { - NSLog(@"RTCAudioTrack: Failed to convert audio buffer list into sample buffer"); - return; - } - - // Report back to RTCAudioTrack - [audio_track_ didCaptureSampleBuffer:buffer]; - - CFRelease(buffer); - } -}; -} // namespace webrtc - @implementation RTC_OBJC_TYPE (RTCAudioTrack) { - rtc::scoped_refptr _audioConverter; - // Stores weak references to renderers - NSHashTable *_renderers; - os_unfair_lock _lock; + rtc::Thread *_signalingThread; + NSMutableArray *_adapters; } @synthesize source = _source; @@ -201,6 +42,7 @@ - (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)facto if (self = [self initWithFactory:factory nativeTrack:track type:RTCMediaStreamTrackTypeAudio]) { _source = source; } + return self; } @@ -211,20 +53,15 @@ - (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)facto NSParameterAssert(nativeTrack); NSParameterAssert(type == RTCMediaStreamTrackTypeAudio); if (self = [super initWithFactory:factory nativeTrack:nativeTrack type:type]) { - RTC_LOG(LS_INFO) << "RTCAudioTrack init"; - _renderers = [NSHashTable weakObjectsHashTable]; - _audioConverter = new rtc::RefCountedObject(self, &_lock); + _adapters = [NSMutableArray array]; + _signalingThread = factory.signalingThread; } return self; } - (void)dealloc { - os_unfair_lock_lock(&_lock); - _audioConverter->TryDetach(); - os_unfair_lock_unlock(&_lock); - - RTC_LOG(LS_INFO) << "RTCAudioTrack dealloc"; + [self removeAllRenderers]; } - (RTC_OBJC_TYPE(RTCAudioSource) *)source { @@ -239,22 +76,61 @@ - (void)dealloc { } - (void)addRenderer:(id)renderer { - os_unfair_lock_lock(&_lock); - [_renderers addObject:renderer]; - _audioConverter->TryAttach(); - os_unfair_lock_unlock(&_lock); + if (!_signalingThread->IsCurrent()) { + _signalingThread->BlockingCall([renderer, self] { [self addRenderer:renderer]; }); + return; + } + + // Make sure we don't have this renderer yet. + for (RTC_OBJC_TYPE(RTCAudioRendererAdapter) * adapter in _adapters) { + if (adapter.audioRenderer == renderer) { + RTC_LOG(LS_INFO) << "|renderer| is already attached to this track"; + return; + } + } + // Create a wrapper that provides a native pointer for us. + RTC_OBJC_TYPE(RTCAudioRendererAdapter) *adapter = + [[RTC_OBJC_TYPE(RTCAudioRendererAdapter) alloc] initWithNativeRenderer:renderer]; + [_adapters addObject:adapter]; + self.nativeAudioTrack->AddSink(adapter.nativeAudioRenderer); } - (void)removeRenderer:(id)renderer { - os_unfair_lock_lock(&_lock); - [_renderers removeObject:renderer]; - NSUInteger renderersCount = _renderers.allObjects.count; + if (!_signalingThread->IsCurrent()) { + _signalingThread->BlockingCall([renderer, self] { [self removeRenderer:renderer]; }); + return; + } + __block NSUInteger indexToRemove = NSNotFound; + [_adapters enumerateObjectsUsingBlock:^(RTC_OBJC_TYPE(RTCAudioRendererAdapter) * adapter, + NSUInteger idx, BOOL * stop) { + if (adapter.audioRenderer == renderer) { + indexToRemove = idx; + *stop = YES; + } + }]; + if (indexToRemove == NSNotFound) { + RTC_LOG(LS_INFO) << "removeRenderer called with a renderer that has not been previously added"; + return; + } + RTC_OBJC_TYPE(RTCAudioRendererAdapter) *adapterToRemove = [_adapters objectAtIndex:indexToRemove]; + self.nativeAudioTrack->RemoveSink(adapterToRemove.nativeAudioRenderer); + [_adapters removeObjectAtIndex:indexToRemove]; +} - if (renderersCount == 0) { - // Detach if no more renderers... - _audioConverter->TryDetach(); +- (void)removeAllRenderers { + // Ensure the method is executed on the signaling thread. + if (!_signalingThread->IsCurrent()) { + _signalingThread->BlockingCall([self] { [self removeAllRenderers]; }); + return; } - os_unfair_lock_unlock(&_lock); + + // Iterate over all adapters and remove each one from the native audio track. + for (RTC_OBJC_TYPE(RTCAudioRendererAdapter) * adapter in _adapters) { + self.nativeAudioTrack->RemoveSink(adapter.nativeAudioRenderer); + } + + // Clear the adapters array after all sinks have been removed. + [_adapters removeAllObjects]; } #pragma mark - Private @@ -264,18 +140,4 @@ - (void)removeRenderer:(id)renderer { static_cast(self.nativeTrack.get())); } -- (void)didCaptureSampleBuffer:(CMSampleBufferRef)sampleBuffer { - bool is_locked = os_unfair_lock_trylock(&_lock); - if (!is_locked) { - RTC_LOG(LS_INFO) << "RTCAudioTrack didCaptureSampleBuffer already locked, skipping..."; - return; - } - NSArray *renderers = [_renderers allObjects]; - os_unfair_lock_unlock(&_lock); - - for (id renderer in renderers) { - [renderer renderSampleBuffer:sampleBuffer]; - } -} - @end diff --git a/sdk/objc/base/RTCAudioRenderer.h b/sdk/objc/base/RTCAudioRenderer.h index 3669831fca..73bad7d39c 100644 --- a/sdk/objc/base/RTCAudioRenderer.h +++ b/sdk/objc/base/RTCAudioRenderer.h @@ -15,6 +15,9 @@ */ #import +#import +#import + #if TARGET_OS_IPHONE #import #endif @@ -25,7 +28,7 @@ NS_ASSUME_NONNULL_BEGIN RTC_OBJC_EXPORT @protocol RTC_OBJC_TYPE(RTCAudioRenderer) -- (void)renderSampleBuffer: (CMSampleBufferRef)sampleBuffer NS_SWIFT_NAME(render(sampleBuffer:)); +- (void)renderPCMBuffer: (AVAudioPCMBuffer *)pcmBuffer NS_SWIFT_NAME(render(pcmBuffer:)); @end From d84b36ee440af5da9083ea3bfbaf851f2f457e50 Mon Sep 17 00:00:00 2001 From: CloudWebRTC Date: Fri, 4 Oct 2024 20:49:42 +0800 Subject: [PATCH 19/49] fix: Fix bug for bypass voice processing. (#147) --- .../peerconnection/RTCPeerConnectionFactory.mm | 18 +++++++++++------- 1 file changed, 11 insertions(+), 7 deletions(-) diff --git a/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.mm b/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.mm index c06f4b36fe..72ac70a781 100644 --- a/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.mm +++ b/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.mm @@ -238,7 +238,7 @@ - (instancetype)initWithNativeAudioEncoderFactory: audioDeviceModule:audioDeviceModule audioProcessingModule:audioProcessingModule networkControllerFactory:nullptr - bypassVoiceProcessing:NO]; + bypassVoiceProcessing:bypassVoiceProcessing]; } - (instancetype)initWithNativeAudioEncoderFactory: (rtc::scoped_refptr)audioEncoderFactory @@ -267,12 +267,16 @@ - (instancetype)initWithNativeAudioEncoderFactory: dependencies.task_queue_factory = webrtc::CreateDefaultTaskQueueFactory(dependencies.trials.get()); - // always create ADM on worker thread - _nativeAudioDeviceModule = _workerThread->BlockingCall([&dependencies, &bypassVoiceProcessing]() { - return webrtc::AudioDeviceModule::Create(webrtc::AudioDeviceModule::AudioLayer::kPlatformDefaultAudio, - dependencies.task_queue_factory.get(), - bypassVoiceProcessing == YES); - }); + if(audioDeviceModule) { + _nativeAudioDeviceModule = std::move(audioDeviceModule); + } else { + // always create ADM on worker thread + _nativeAudioDeviceModule = _workerThread->BlockingCall([&dependencies, &bypassVoiceProcessing]() { + return webrtc::AudioDeviceModule::Create(webrtc::AudioDeviceModule::AudioLayer::kPlatformDefaultAudio, + dependencies.task_queue_factory.get(), + bypassVoiceProcessing == YES); + }); + } _audioDeviceModule = [[RTC_OBJC_TYPE(RTCAudioDeviceModule) alloc] initWithNativeModule: _nativeAudioDeviceModule workerThread: _workerThread.get()]; From 0ae5688d4d490274ea47fee3f87f622ef3f13113 Mon Sep 17 00:00:00 2001 From: CloudWebRTC Date: Mon, 7 Oct 2024 22:00:18 +0800 Subject: [PATCH 20/49] chore: remove aes cbc for framecryptor. (#145) --- api/crypto/frame_crypto_transformer.cc | 47 ++----------------- .../api/org/webrtc/FrameCryptorAlgorithm.java | 1 - sdk/android/src/jni/pc/frame_cryptor.cc | 2 - sdk/objc/api/peerconnection/RTCFrameCryptor.h | 1 - .../api/peerconnection/RTCFrameCryptor.mm | 2 - 5 files changed, 3 insertions(+), 50 deletions(-) diff --git a/api/crypto/frame_crypto_transformer.cc b/api/crypto/frame_crypto_transformer.cc index 8c26080cb7..5f6ea55a60 100644 --- a/api/crypto/frame_crypto_transformer.cc +++ b/api/crypto/frame_crypto_transformer.cc @@ -250,46 +250,6 @@ int AesGcmEncryptDecrypt(EncryptOrDecrypt mode, return Success; } -int AesCbcEncryptDecrypt(EncryptOrDecrypt mode, - const std::vector& raw_key, - rtc::ArrayView iv, - const rtc::ArrayView input, - std::vector* output) { - const EVP_CIPHER* cipher = GetAesCbcAlgorithmFromKeySize(raw_key.size()); - if (!cipher) { - RTC_LOG(LS_ERROR) << "Invalid AES-CBC key size."; - return ErrorUnexpected; - } - RTC_DCHECK_EQ(EVP_CIPHER_iv_length(cipher), iv.size()); - RTC_DCHECK_EQ(EVP_CIPHER_key_length(cipher), raw_key.size()); - - bssl::ScopedEVP_CIPHER_CTX ctx; - if (!EVP_CipherInit_ex(ctx.get(), cipher, nullptr, - reinterpret_cast(raw_key.data()), - iv.data(), - mode == EncryptOrDecrypt::kEncrypt ? 1 : 0)) { - return OperationError; - } - - // Encrypting needs a block size of space to allow for any padding. - output->resize(input.size() + - (mode == EncryptOrDecrypt::kEncrypt ? iv.size() : 0)); - int out_len; - if (!EVP_CipherUpdate(ctx.get(), output->data(), &out_len, input.data(), - input.size())) - return OperationError; - - // Write out the final block plus padding (if any) to the end of the data - // just written. - int tail_len; - if (!EVP_CipherFinal_ex(ctx.get(), output->data() + out_len, &tail_len)) - return OperationError; - - out_len += tail_len; - RTC_CHECK_LE(out_len, static_cast(output->size())); - return Success; -} - int AesEncryptDecrypt(EncryptOrDecrypt mode, webrtc::FrameCryptorTransformer::Algorithm algorithm, const std::vector& raw_key, @@ -308,8 +268,9 @@ int AesEncryptDecrypt(EncryptOrDecrypt mode, return AesGcmEncryptDecrypt( mode, raw_key, data, tag_length_bits / 8, iv, additional_data, cipher, buffer); } - case webrtc::FrameCryptorTransformer::Algorithm::kAesCbc: - return AesCbcEncryptDecrypt(mode, raw_key, iv, data, buffer); + default: + RTC_LOG(LS_ERROR) << "Unsupported algorithm."; + return ErrorUnexpected; } } namespace webrtc { @@ -724,8 +685,6 @@ uint8_t FrameCryptorTransformer::getIvSize() { switch (algorithm_) { case Algorithm::kAesGcm: return 12; - case Algorithm::kAesCbc: - return 16; default: return 0; } diff --git a/sdk/android/api/org/webrtc/FrameCryptorAlgorithm.java b/sdk/android/api/org/webrtc/FrameCryptorAlgorithm.java index d0d4dc8374..121656cc99 100644 --- a/sdk/android/api/org/webrtc/FrameCryptorAlgorithm.java +++ b/sdk/android/api/org/webrtc/FrameCryptorAlgorithm.java @@ -18,5 +18,4 @@ public enum FrameCryptorAlgorithm { AES_GCM, - AES_CBC, } \ No newline at end of file diff --git a/sdk/android/src/jni/pc/frame_cryptor.cc b/sdk/android/src/jni/pc/frame_cryptor.cc index af2fd8f2b0..33b938f7f4 100644 --- a/sdk/android/src/jni/pc/frame_cryptor.cc +++ b/sdk/android/src/jni/pc/frame_cryptor.cc @@ -103,8 +103,6 @@ webrtc::FrameCryptorTransformer::Algorithm AlgorithmFromIndex(int index) { switch (index) { case 0: return webrtc::FrameCryptorTransformer::Algorithm::kAesGcm; - case 1: - return webrtc::FrameCryptorTransformer::Algorithm::kAesCbc; default: return webrtc::FrameCryptorTransformer::Algorithm::kAesGcm; } diff --git a/sdk/objc/api/peerconnection/RTCFrameCryptor.h b/sdk/objc/api/peerconnection/RTCFrameCryptor.h index 864e55be95..43719f35d8 100644 --- a/sdk/objc/api/peerconnection/RTCFrameCryptor.h +++ b/sdk/objc/api/peerconnection/RTCFrameCryptor.h @@ -28,7 +28,6 @@ NS_ASSUME_NONNULL_BEGIN typedef NS_ENUM(NSUInteger, RTCCryptorAlgorithm) { RTCCryptorAlgorithmAesGcm = 0, - RTCCryptorAlgorithmAesCbc, }; typedef NS_ENUM(NSInteger, FrameCryptionState) { diff --git a/sdk/objc/api/peerconnection/RTCFrameCryptor.mm b/sdk/objc/api/peerconnection/RTCFrameCryptor.mm index 7e6cd699d7..41893b086c 100644 --- a/sdk/objc/api/peerconnection/RTCFrameCryptor.mm +++ b/sdk/objc/api/peerconnection/RTCFrameCryptor.mm @@ -106,8 +106,6 @@ @implementation RTC_OBJC_TYPE (RTCFrameCryptor) { switch (algorithm) { case RTCCryptorAlgorithmAesGcm: return webrtc::FrameCryptorTransformer::Algorithm::kAesGcm; - case RTCCryptorAlgorithmAesCbc: - return webrtc::FrameCryptorTransformer::Algorithm::kAesCbc; default: return webrtc::FrameCryptorTransformer::Algorithm::kAesGcm; } From c38ce7f37c6a7ada209b7143660180bb6e6a0fbf Mon Sep 17 00:00:00 2001 From: Hiroshi Horie <548776+hiroshihorie@users.noreply.github.com> Date: Sat, 19 Oct 2024 15:51:27 +0900 Subject: [PATCH 21/49] Change audio renderer output format (#149) Instead of converting to Float, output original Int data without conversion. Output the raw format and convert when required. --- sdk/objc/api/RTCAudioRendererAdapter.mm | 35 ++++++++++++++++--------- 1 file changed, 22 insertions(+), 13 deletions(-) diff --git a/sdk/objc/api/RTCAudioRendererAdapter.mm b/sdk/objc/api/RTCAudioRendererAdapter.mm index 67c05792ad..20b4a651b9 100644 --- a/sdk/objc/api/RTCAudioRendererAdapter.mm +++ b/sdk/objc/api/RTCAudioRendererAdapter.mm @@ -39,12 +39,12 @@ void OnData(const void *audio_data, int bits_per_sample, int sample_rate, AudioStreamBasicDescription sd = { .mSampleRate = static_cast(sample_rate), .mFormatID = kAudioFormatLinearPCM, - .mFormatFlags = kAudioFormatFlagIsFloat | kAudioFormatFlagIsPacked, - .mBytesPerPacket = static_cast(number_of_channels * 4), + .mFormatFlags = kLinearPCMFormatFlagIsSignedInteger | kLinearPCMFormatFlagIsPacked, + .mBytesPerPacket = static_cast(number_of_channels * 2), .mFramesPerPacket = 1, - .mBytesPerFrame = static_cast(number_of_channels * 4), + .mBytesPerFrame = static_cast(number_of_channels * 2), .mChannelsPerFrame = static_cast(number_of_channels), - .mBitsPerChannel = 32, + .mBitsPerChannel = 16, .mReserved = 0}; CMFormatDescriptionRef formatDescription = nullptr; @@ -69,16 +69,25 @@ void OnData(const void *audio_data, int bits_per_sample, int sample_rate, } pcmBuffer.frameLength = frameCount; + + // Handle both mono and stereo const int16_t *inputData = static_cast(audio_data); - const float scale = 1.0f / 32768.0f; - - dispatch_apply(number_of_channels, dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0), - ^(size_t channel) { - vDSP_vflt16(inputData + channel * number_of_frames, 1, - pcmBuffer.floatChannelData[channel], 1, frameCount); - vDSP_vsmul(pcmBuffer.floatChannelData[channel], 1, &scale, - pcmBuffer.floatChannelData[channel], 1, frameCount); - }); + if (number_of_channels == 1) { + // Mono: straight copy + memcpy(pcmBuffer.int16ChannelData[0], inputData, number_of_frames * sizeof(int16_t)); + } else if (number_of_channels == 2) { + // Stereo: manual deinterleave + int16_t *leftChannel = pcmBuffer.int16ChannelData[0]; + int16_t *rightChannel = pcmBuffer.int16ChannelData[1]; + + for (size_t i = 0; i < number_of_frames; i++) { + leftChannel[i] = inputData[i * 2]; + rightChannel[i] = inputData[i * 2 + 1]; + } + } else { + NSLog(@"Unsupported number of channels: %zu", number_of_channels); + return; + } [adapter_.audioRenderer renderPCMBuffer:pcmBuffer]; } From cd6792e5cacf5bf85bdb344bc83a66fca04e36bb Mon Sep 17 00:00:00 2001 From: Hiroshi Horie <548776+hiroshihorie@users.noreply.github.com> Date: Sat, 19 Oct 2024 16:54:56 +0900 Subject: [PATCH 22/49] Fixed issue with missing network interfaces on iOS (#151) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Related issue: https://github.com/webrtc-sdk/webrtc/issues/148 Cherry-pick : https://webrtc.googlesource.com/src/+/fea60ef8e72fb17b4f8a5363aff7e63ab8027b4f Fixed issue with network interfaces due to a missing return value in the "nw_path_enumerate_interfaces(...)" block. Exposed in iOS 18, RTCNetworkMonitor::initWithObserver will only enumerate the first interface, instead of all device interfaces Bug: webrtc:359245764 Change-Id: Ifb9f28c33306c0096476a4afb0cdb4d734e87b2c Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/359541 Auto-Submit: Corby Commit-Queue: Jonas Oreland Reviewed-by: Kári Helgason Reviewed-by: Jonas Oreland Cr-Commit-Position: refs/heads/main@{#42818} Co-authored-by: Corby Hoback --- AUTHORS | 1 + sdk/objc/components/network/RTCNetworkMonitor.mm | 1 + 2 files changed, 2 insertions(+) diff --git a/AUTHORS b/AUTHORS index c0c40930d6..16467a57aa 100644 --- a/AUTHORS +++ b/AUTHORS @@ -33,6 +33,7 @@ Christophe Dumez Chris Tserng Cody Barnes Colin Plumb +Corby Hoback Cyril Lashkevich CZ Theng Danail Kirov diff --git a/sdk/objc/components/network/RTCNetworkMonitor.mm b/sdk/objc/components/network/RTCNetworkMonitor.mm index 2e42ab5290..a3685e0556 100644 --- a/sdk/objc/components/network/RTCNetworkMonitor.mm +++ b/sdk/objc/components/network/RTCNetworkMonitor.mm @@ -89,6 +89,7 @@ - (instancetype)initWithObserver:(webrtc::NetworkMonitorObserver *)observer { RTCLog(@"NW path monitor available interface: %s", name); rtc::AdapterType adapterType = AdapterTypeFromInterfaceType(interfaceType); map->insert(std::pair(name, adapterType)); + return true; }); @synchronized(strongSelf) { webrtc::NetworkMonitorObserver *observer = strongSelf->_observer; From 543121ba1cd47780e92d48546b880333265b37b5 Mon Sep 17 00:00:00 2001 From: davidliu Date: Wed, 30 Oct 2024 20:33:46 -0700 Subject: [PATCH 23/49] Custom audio input for Android (#154) --- .../webrtc/audio/JavaAudioDeviceModule.java | 27 +- .../org/webrtc/audio/WebRtcAudioRecord.java | 326 ++++++++++++------ 2 files changed, 247 insertions(+), 106 deletions(-) diff --git a/sdk/android/api/org/webrtc/audio/JavaAudioDeviceModule.java b/sdk/android/api/org/webrtc/audio/JavaAudioDeviceModule.java index 8ef45f727b..00b8ce8e14 100644 --- a/sdk/android/api/org/webrtc/audio/JavaAudioDeviceModule.java +++ b/sdk/android/api/org/webrtc/audio/JavaAudioDeviceModule.java @@ -16,6 +16,7 @@ import android.media.AudioManager; import android.os.Build; import androidx.annotation.RequiresApi; +import java.nio.ByteBuffer; import java.util.concurrent.ScheduledExecutorService; import org.webrtc.JniCommon; import org.webrtc.Logging; @@ -43,6 +44,7 @@ public static class Builder { private AudioRecordErrorCallback audioRecordErrorCallback; private SamplesReadyCallback samplesReadyCallback; private PlaybackSamplesReadyCallback playbackSamplesReadyCallback; + private AudioBufferCallback audioBufferCallback; private AudioTrackStateCallback audioTrackStateCallback; private AudioRecordStateCallback audioRecordStateCallback; private boolean useHardwareAcousticEchoCanceler = isBuiltInAcousticEchoCancelerSupported(); @@ -141,6 +143,14 @@ public Builder setSamplesReadyCallback(SamplesReadyCallback samplesReadyCallback return this; } + /** + * Set a callback to listen for buffer requests from the AudioRecord. + */ + public Builder setAudioBufferCallback(AudioBufferCallback audioBufferCallback) { + this.audioBufferCallback = audioBufferCallback; + return this; + } + /** * Set a callback to listen to the audio output passed to the AudioTrack. */ @@ -264,7 +274,8 @@ public JavaAudioDeviceModule createAudioDeviceModule() { } final WebRtcAudioRecord audioInput = new WebRtcAudioRecord(context, executor, audioManager, audioSource, audioFormat, audioRecordErrorCallback, audioRecordStateCallback, - samplesReadyCallback, useHardwareAcousticEchoCanceler, useHardwareNoiseSuppressor); + samplesReadyCallback, audioBufferCallback, useHardwareAcousticEchoCanceler, + useHardwareNoiseSuppressor); final WebRtcAudioTrack audioOutput = new WebRtcAudioTrack(context, audioManager, audioAttributes, audioTrackErrorCallback, audioTrackStateCallback, playbackSamplesReadyCallback, useLowLatency, enableVolumeLogger); @@ -358,6 +369,16 @@ public static interface AudioTrackStateCallback { void onWebRtcAudioTrackStop(); } + public static interface AudioBufferCallback { + /** + * Called when new audio samples are ready. + * @param buffer the buffer of audio bytes. Changes to this buffer will be published on the audio track. + * @param captureTimeNs the capture timestamp of the original audio data. + * @return the capture timestamp in nanoseconds. Return 0 if not available. + */ + long onBuffer(ByteBuffer buffer, int audioFormat, int channelCount, int sampleRate, int bytesRead, long captureTimeNs); + } + /** * Returns true if the device supports built-in HW AEC, and the UUID is approved (some UUIDs can * be excluded). @@ -432,6 +453,10 @@ public void setMicrophoneMute(boolean mute) { audioInput.setMicrophoneMute(mute); } + public void setAudioRecordEnabled(boolean enable) { + audioInput.setUseAudioRecord(enable); + } + @Override public boolean setNoiseSuppressorEnabled(boolean enabled) { Logging.d(TAG, "setNoiseSuppressorEnabled: " + enabled); diff --git a/sdk/android/src/java/org/webrtc/audio/WebRtcAudioRecord.java b/sdk/android/src/java/org/webrtc/audio/WebRtcAudioRecord.java index cfb651f6cd..7ae525ee06 100644 --- a/sdk/android/src/java/org/webrtc/audio/WebRtcAudioRecord.java +++ b/sdk/android/src/java/org/webrtc/audio/WebRtcAudioRecord.java @@ -39,6 +39,7 @@ import org.webrtc.CalledByNative; import org.webrtc.Logging; import org.webrtc.ThreadUtils; +import org.webrtc.audio.JavaAudioDeviceModule.AudioBufferCallback; import org.webrtc.audio.JavaAudioDeviceModule.AudioRecordErrorCallback; import org.webrtc.audio.JavaAudioDeviceModule.AudioRecordStartErrorCode; import org.webrtc.audio.JavaAudioDeviceModule.AudioRecordStateCallback; @@ -83,6 +84,8 @@ class WebRtcAudioRecord { private final AudioManager audioManager; private final int audioSource; private final int audioFormat; + private int channelCount; + private int sampleRate; private long nativeAudioRecord; @@ -90,7 +93,10 @@ class WebRtcAudioRecord { private @Nullable ByteBuffer byteBuffer; - private @Nullable AudioRecord audioRecord; + private final Object audioRecordStateLock = new Object(); + private boolean useAudioRecord = true; + private volatile @Nullable AudioRecord audioRecord; + private @Nullable AudioRecordThread audioThread; private @Nullable AudioDeviceInfo preferredDevice; @@ -105,6 +111,7 @@ class WebRtcAudioRecord { private final @Nullable AudioRecordErrorCallback errorCallback; private final @Nullable AudioRecordStateCallback stateCallback; private final @Nullable SamplesReadyCallback audioSamplesReadyCallback; + private final @Nullable AudioBufferCallback audioBufferCallback; private final boolean isAcousticEchoCancelerSupported; private final boolean isNoiseSuppressorSupported; @@ -125,7 +132,9 @@ public AudioRecordThread(String name) { public void run() { Process.setThreadPriority(Process.THREAD_PRIORITY_URGENT_AUDIO); Logging.d(TAG, "AudioRecordThread" + WebRtcAudioUtils.getThreadInfo()); - assertTrue(audioRecord.getRecordingState() == AudioRecord.RECORDSTATE_RECORDING); + if (audioRecord != null) { + assertTrue(audioRecord.getRecordingState() == AudioRecord.RECORDSTATE_RECORDING); + } // Audio recording has started and the client is informed about it. doAudioRecordStateCallback(AUDIO_RECORD_START); @@ -136,52 +145,111 @@ public void run() { audioTimestamp = new AudioTimestamp(); } while (keepAlive) { - int bytesRead = audioRecord.read(byteBuffer, byteBuffer.capacity()); - if (bytesRead == byteBuffer.capacity()) { - if (microphoneMute) { - byteBuffer.clear(); - byteBuffer.put(emptyBytes); + long captureTimeNs = 0; + AudioRecord audioRecord; + + synchronized (audioRecordStateLock) { + audioRecord = WebRtcAudioRecord.this.audioRecord; + } + + if (audioRecord == null && useAudioRecord) { + boolean result = initAudioRecord(); + + if (!result) { + // Failed audio record init, don't try again. + useAudioRecord = false; + } else { + synchronized (audioRecordStateLock) { + audioRecord = WebRtcAudioRecord.this.audioRecord; + } + + assertTrue(audioRecord != null); + try { + audioRecord.startRecording(); + } catch (IllegalStateException e) { + reportWebRtcAudioRecordStartError(AudioRecordStartErrorCode.AUDIO_RECORD_START_EXCEPTION, + "AudioRecord.startRecording failed: " + e.getMessage()); + audioRecord = null; + useAudioRecord = false; + } + if (useAudioRecord && audioRecord.getRecordingState() != AudioRecord.RECORDSTATE_RECORDING) { + reportWebRtcAudioRecordStartError(AudioRecordStartErrorCode.AUDIO_RECORD_START_STATE_MISMATCH, + "AudioRecord.startRecording failed - incorrect state: " + + audioRecord.getRecordingState()); + audioRecord = null; + useAudioRecord = false; + } } - // It's possible we've been shut down during the read, and stopRecording() tried and - // failed to join this thread. To be a bit safer, try to avoid calling any native methods - // in case they've been unregistered after stopRecording() returned. - if (keepAlive) { - long captureTimeNs = 0; - if (Build.VERSION.SDK_INT >= 24) { - if (audioRecord.getTimestamp(audioTimestamp, AudioTimestamp.TIMEBASE_MONOTONIC) - == AudioRecord.SUCCESS) { - captureTimeNs = audioTimestamp.nanoTime; + } + + if (audioRecord != null && !useAudioRecord) { + audioRecord = null; + releaseAudioResources(); + } + + int bytesRead = 0; + if (audioRecord != null) { + bytesRead = audioRecord.read(byteBuffer, byteBuffer.capacity()); + if (bytesRead == byteBuffer.capacity()) { + if (microphoneMute) { + byteBuffer.clear(); + byteBuffer.put(emptyBytes); + } + + if (keepAlive) { + if (Build.VERSION.SDK_INT >= 24) { + if (audioRecord.getTimestamp(audioTimestamp, AudioTimestamp.TIMEBASE_MONOTONIC) + == AudioRecord.SUCCESS) { + captureTimeNs = audioTimestamp.nanoTime; + } } } - nativeDataIsRecorded(nativeAudioRecord, bytesRead, captureTimeNs); - } - if (audioSamplesReadyCallback != null) { - // Copy the entire byte buffer array. The start of the byteBuffer is not necessarily - // at index 0. - byte[] data = Arrays.copyOfRange(byteBuffer.array(), byteBuffer.arrayOffset(), - byteBuffer.capacity() + byteBuffer.arrayOffset()); - audioSamplesReadyCallback.onWebRtcAudioRecordSamplesReady( - new JavaAudioDeviceModule.AudioSamples(audioRecord.getAudioFormat(), - audioRecord.getChannelCount(), audioRecord.getSampleRate(), data)); + } else { + String errorMessage = "AudioRecord.read failed: " + bytesRead; + Logging.e(TAG, errorMessage); + + if (bytesRead == AudioRecord.ERROR_INVALID_OPERATION) { + keepAlive = false; + reportWebRtcAudioRecordError(errorMessage); + } + // AudioRecord is primary driver in this path, so try again if possible. + continue; } } else { - String errorMessage = "AudioRecord.read failed: " + bytesRead; - Logging.e(TAG, errorMessage); - if (bytesRead == AudioRecord.ERROR_INVALID_OPERATION) { - keepAlive = false; - reportWebRtcAudioRecordError(errorMessage); - } + byteBuffer.clear(); + byteBuffer.put(emptyBytes); + } + + if (keepAlive && audioBufferCallback != null) { + captureTimeNs = audioBufferCallback.onBuffer(byteBuffer, audioFormat, + channelCount, sampleRate, bytesRead, captureTimeNs); + bytesRead = byteBuffer.capacity(); + } + + // It's possible we've been shut down during the read, and stopRecording() tried and + // failed to join this thread. To be a bit safer, try to avoid calling any native methods + // in case they've been unregistered after stopRecording() returned. + if (keepAlive) { + nativeDataIsRecorded(nativeAudioRecord, bytesRead, captureTimeNs); + } + if (audioSamplesReadyCallback != null) { + // Copy the entire byte buffer array. The start of the byteBuffer is not necessarily + // at index 0. + byte[] data = Arrays.copyOfRange(byteBuffer.array(), byteBuffer.arrayOffset(), + byteBuffer.capacity() + byteBuffer.arrayOffset()); + audioSamplesReadyCallback.onWebRtcAudioRecordSamplesReady( + new JavaAudioDeviceModule.AudioSamples(audioFormat, channelCount, sampleRate, data)); } } try { if (audioRecord != null) { audioRecord.stop(); - doAudioRecordStateCallback(AUDIO_RECORD_STOP); } } catch (IllegalStateException e) { Logging.e(TAG, "AudioRecord.stop failed: " + e.getMessage()); } + doAudioRecordStateCallback(AUDIO_RECORD_STOP); } // Stops the inner thread loop and also calls AudioRecord.stop(). @@ -196,7 +264,8 @@ public void stopThread() { WebRtcAudioRecord(Context context, AudioManager audioManager) { this(context, newDefaultScheduler() /* scheduler */, audioManager, DEFAULT_AUDIO_SOURCE, DEFAULT_AUDIO_FORMAT, null /* errorCallback */, null /* stateCallback */, - null /* audioSamplesReadyCallback */, WebRtcAudioEffects.isAcousticEchoCancelerSupported(), + null /* audioSamplesReadyCallback */, null /* audioBufferCallback */, + WebRtcAudioEffects.isAcousticEchoCancelerSupported(), WebRtcAudioEffects.isNoiseSuppressorSupported()); } @@ -205,6 +274,7 @@ public WebRtcAudioRecord(Context context, ScheduledExecutorService scheduler, @Nullable AudioRecordErrorCallback errorCallback, @Nullable AudioRecordStateCallback stateCallback, @Nullable SamplesReadyCallback audioSamplesReadyCallback, + @Nullable AudioBufferCallback audioBufferCallback, boolean isAcousticEchoCancelerSupported, boolean isNoiseSuppressorSupported) { if (isAcousticEchoCancelerSupported && !WebRtcAudioEffects.isAcousticEchoCancelerSupported()) { throw new IllegalArgumentException("HW AEC not supported"); @@ -220,6 +290,7 @@ public WebRtcAudioRecord(Context context, ScheduledExecutorService scheduler, this.errorCallback = errorCallback; this.stateCallback = stateCallback; this.audioSamplesReadyCallback = audioSamplesReadyCallback; + this.audioBufferCallback = audioBufferCallback; this.isAcousticEchoCancelerSupported = isAcousticEchoCancelerSupported; this.isNoiseSuppressorSupported = isNoiseSuppressorSupported; Logging.d(TAG, "ctor" + WebRtcAudioUtils.getThreadInfo()); @@ -273,6 +344,11 @@ private boolean enableBuiltInNS(boolean enable) { return effects.setNS(enable); } + public void setUseAudioRecord(boolean enable) { + Logging.d(TAG, "setUseAudioRecord(" + enable + ")"); + this.useAudioRecord = enable; + } + @CalledByNative private int initRecording(int sampleRate, int channels) { Logging.d(TAG, "initRecording(sampleRate=" + sampleRate + ", channels=" + channels + ")"); @@ -280,6 +356,8 @@ private int initRecording(int sampleRate, int channels) { reportWebRtcAudioRecordInitError("InitRecording called twice without StopRecording."); return -1; } + this.sampleRate = sampleRate; + this.channelCount = channels; final int bytesPerFrame = channels * getBytesPerSample(audioFormat); final int framesPerBuffer = sampleRate / BUFFERS_PER_SECOND; byteBuffer = ByteBuffer.allocateDirect(bytesPerFrame * framesPerBuffer); @@ -294,53 +372,13 @@ private int initRecording(int sampleRate, int channels) { // the native class cache the address to the memory once. nativeCacheDirectBufferAddress(nativeAudioRecord, byteBuffer); - // Get the minimum buffer size required for the successful creation of - // an AudioRecord object, in byte units. - // Note that this size doesn't guarantee a smooth recording under load. - final int channelConfig = channelCountToConfiguration(channels); - int minBufferSize = AudioRecord.getMinBufferSize(sampleRate, channelConfig, audioFormat); - if (minBufferSize == AudioRecord.ERROR || minBufferSize == AudioRecord.ERROR_BAD_VALUE) { - reportWebRtcAudioRecordInitError("AudioRecord.getMinBufferSize failed: " + minBufferSize); - return -1; - } - Logging.d(TAG, "AudioRecord.getMinBufferSize: " + minBufferSize); - - // Use a larger buffer size than the minimum required when creating the - // AudioRecord instance to ensure smooth recording under load. It has been - // verified that it does not increase the actual recording latency. - int bufferSizeInBytes = Math.max(BUFFER_SIZE_FACTOR * minBufferSize, byteBuffer.capacity()); - Logging.d(TAG, "bufferSizeInBytes: " + bufferSizeInBytes); - try { - if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { - // Use the AudioRecord.Builder class on Android M (23) and above. - // Throws IllegalArgumentException. - audioRecord = createAudioRecordOnMOrHigher( - audioSource, sampleRate, channelConfig, audioFormat, bufferSizeInBytes); - audioSourceMatchesRecordingSessionRef.set(null); - if (preferredDevice != null) { - setPreferredDevice(preferredDevice); - } - } else { - // Use the old AudioRecord constructor for API levels below 23. - // Throws UnsupportedOperationException. - audioRecord = createAudioRecordOnLowerThanM( - audioSource, sampleRate, channelConfig, audioFormat, bufferSizeInBytes); - audioSourceMatchesRecordingSessionRef.set(null); + if(useAudioRecord) { + boolean result = initAudioRecord(); + if (!result) { + return -1; } - } catch (IllegalArgumentException | UnsupportedOperationException e) { - // Report of exception message is sufficient. Example: "Cannot create AudioRecord". - reportWebRtcAudioRecordInitError(e.getMessage()); - releaseAudioResources(); - return -1; - } - if (audioRecord == null || audioRecord.getState() != AudioRecord.STATE_INITIALIZED) { - reportWebRtcAudioRecordInitError("Creation or initialization of audio recorder failed."); - releaseAudioResources(); - return -1; } - effects.enable(audioRecord.getAudioSessionId()); - logMainParameters(); - logMainParametersExtended(); + // Check number of active recording sessions. Should be zero but we have seen conflict cases // and adding a log for it can help us figure out details about conflicting sessions. final int numActiveRecordingSessions = @@ -354,6 +392,70 @@ private int initRecording(int sampleRate, int channels) { return framesPerBuffer; } + private boolean initAudioRecord() { + if (sampleRate == 0 || channelCount == 0) { + Logging.w(TAG, "initAudioRecord called before initRecord!"); + return false; + } + + synchronized (audioRecordStateLock) { + if (audioRecord != null) { + reportWebRtcAudioRecordInitError("InitAudioRecord called twice without StopRecording."); + return false; + } + // Get the minimum buffer size required for the successful creation of + // an AudioRecord object, in byte units. + // Note that this size doesn't guarantee a smooth recording under load. + final int channelConfig = channelCountToConfiguration(channelCount); + int minBufferSize = AudioRecord.getMinBufferSize(sampleRate, channelConfig, audioFormat); + if (minBufferSize == AudioRecord.ERROR || minBufferSize == AudioRecord.ERROR_BAD_VALUE) { + reportWebRtcAudioRecordInitError("AudioRecord.getMinBufferSize failed: " + minBufferSize); + return false; + } + Logging.d(TAG, "AudioRecord.getMinBufferSize: " + minBufferSize); + + // Use a larger buffer size than the minimum required when creating the + // AudioRecord instance to ensure smooth recording under load. It has been + // verified that it does not increase the actual recording latency. + int bufferSizeInBytes = Math.max(BUFFER_SIZE_FACTOR * minBufferSize, byteBuffer.capacity()); + Logging.d(TAG, "bufferSizeInBytes: " + bufferSizeInBytes); + + try { + if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { + // Use the AudioRecord.Builder class on Android M (23) and above. + // Throws IllegalArgumentException. + audioRecord = createAudioRecordOnMOrHigher( + audioSource, sampleRate, channelConfig, audioFormat, bufferSizeInBytes); + audioSourceMatchesRecordingSessionRef.set(null); + if (preferredDevice != null) { + setPreferredDevice(preferredDevice); + } + } else { + // Use the old AudioRecord constructor for API levels below 23. + // Throws UnsupportedOperationException. + audioRecord = createAudioRecordOnLowerThanM( + audioSource, sampleRate, channelConfig, audioFormat, bufferSizeInBytes); + audioSourceMatchesRecordingSessionRef.set(null); + } + } catch (IllegalArgumentException | UnsupportedOperationException e) { + // Report of exception message is sufficient. Example: "Cannot create AudioRecord". + reportWebRtcAudioRecordInitError(e.getMessage()); + releaseAudioResources(); + return false; + } + if (audioRecord == null || audioRecord.getState() != AudioRecord.STATE_INITIALIZED) { + reportWebRtcAudioRecordInitError("Creation or initialization of audio recorder failed."); + releaseAudioResources(); + return false; + } + + effects.enable(audioRecord.getAudioSessionId()); + + logMainParameters(); + logMainParametersExtended(); + } + return true; + } /** * Prefer a specific {@link AudioDeviceInfo} device for recording. Calling after recording starts * is valid but may cause a temporary interruption if the audio routing changes. @@ -374,20 +476,24 @@ void setPreferredDevice(@Nullable AudioDeviceInfo preferredDevice) { @CalledByNative private boolean startRecording() { Logging.d(TAG, "startRecording"); - assertTrue(audioRecord != null); assertTrue(audioThread == null); - try { - audioRecord.startRecording(); - } catch (IllegalStateException e) { - reportWebRtcAudioRecordStartError(AudioRecordStartErrorCode.AUDIO_RECORD_START_EXCEPTION, - "AudioRecord.startRecording failed: " + e.getMessage()); - return false; - } - if (audioRecord.getRecordingState() != AudioRecord.RECORDSTATE_RECORDING) { - reportWebRtcAudioRecordStartError(AudioRecordStartErrorCode.AUDIO_RECORD_START_STATE_MISMATCH, - "AudioRecord.startRecording failed - incorrect state: " - + audioRecord.getRecordingState()); - return false; + synchronized (audioRecordStateLock) { + if (useAudioRecord) { + assertTrue(audioRecord != null); + try { + audioRecord.startRecording(); + } catch (IllegalStateException e) { + reportWebRtcAudioRecordStartError(AudioRecordStartErrorCode.AUDIO_RECORD_START_EXCEPTION, + "AudioRecord.startRecording failed: " + e.getMessage()); + return false; + } + if (audioRecord.getRecordingState() != AudioRecord.RECORDSTATE_RECORDING) { + reportWebRtcAudioRecordStartError(AudioRecordStartErrorCode.AUDIO_RECORD_START_STATE_MISMATCH, + "AudioRecord.startRecording failed - incorrect state: " + + audioRecord.getRecordingState()); + return false; + } + } } audioThread = new AudioRecordThread("AudioRecordJavaThread"); audioThread.start(); @@ -412,7 +518,6 @@ private boolean stopRecording() { WebRtcAudioUtils.logAudioState(TAG, context, audioManager); } audioThread = null; - effects.release(); releaseAudioResources(); return true; } @@ -439,20 +544,28 @@ private static AudioRecord createAudioRecordOnLowerThanM( } private void logMainParameters() { - Logging.d(TAG, - "AudioRecord: " - + "session ID: " + audioRecord.getAudioSessionId() + ", " - + "channels: " + audioRecord.getChannelCount() + ", " - + "sample rate: " + audioRecord.getSampleRate()); + synchronized(audioRecordStateLock) { + if(audioRecord != null) { + Logging.d(TAG, + "AudioRecord: " + + "session ID: " + audioRecord.getAudioSessionId() + ", " + + "channels: " + audioRecord.getChannelCount() + ", " + + "sample rate: " + audioRecord.getSampleRate()); + } + } } @TargetApi(Build.VERSION_CODES.M) private void logMainParametersExtended() { if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.M) { - Logging.d(TAG, - "AudioRecord: " - // The frame count of the native AudioRecord buffer. - + "buffer size in frames: " + audioRecord.getBufferSizeInFrames()); + synchronized(audioRecordStateLock) { + if(audioRecord != null) { + Logging.d(TAG, + "AudioRecord: " + // The frame count of the native AudioRecord buffer. + + "buffer size in frames: " + audioRecord.getBufferSizeInFrames()); + } + } } } @@ -526,9 +639,12 @@ public boolean setNoiseSuppressorEnabled(boolean enabled) { // Releases the native AudioRecord resources. private void releaseAudioResources() { Logging.d(TAG, "releaseAudioResources"); - if (audioRecord != null) { - audioRecord.release(); - audioRecord = null; + synchronized (audioRecordStateLock) { + effects.release(); + if (audioRecord != null) { + audioRecord.release(); + audioRecord = null; + } } audioSourceMatchesRecordingSessionRef.set(null); } From b99fd2c270361aea2d458e61ac4a4cd2443bdbf6 Mon Sep 17 00:00:00 2001 From: Michael Sloan Date: Mon, 2 Dec 2024 18:59:22 -0700 Subject: [PATCH 24/49] Use `rtc::ToString` instead of `std::to_string` in `SocketAddress::PortAsString()` (#156) Justification for this change is that `std::to_string` should be avoided as it uses the user's locale and calls to it get serialized, which is bad for concurrency. My actual motivation for this is quite bizarre. Before this change, with Zed's use of the LiveKit Rust SDK, I was getting connection strings that were not valid utf-8, instead having a port of `3\u0000\u0000\u001c\u0000`. I have not figured out how that could happen or why this change fixes it. --- rtc_base/socket_address.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rtc_base/socket_address.cc b/rtc_base/socket_address.cc index 8601fc9040..4fc19d5831 100644 --- a/rtc_base/socket_address.cc +++ b/rtc_base/socket_address.cc @@ -162,7 +162,7 @@ std::string SocketAddress::HostAsSensitiveURIString() const { } std::string SocketAddress::PortAsString() const { - return std::to_string(port_); + return rtc::ToString(port_); } std::string SocketAddress::ToString() const { From f5243e3f6033ccd8e93916520f6a5cd5ecde5f81 Mon Sep 17 00:00:00 2001 From: davidliu Date: Thu, 16 Jan 2025 16:01:33 +0900 Subject: [PATCH 25/49] Fix deadlock when creating a frame cryptor on iOS (#157) Co-authored-by: cloudwebrtc --- sdk/objc/api/peerconnection/RTCFrameCryptor.mm | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/sdk/objc/api/peerconnection/RTCFrameCryptor.mm b/sdk/objc/api/peerconnection/RTCFrameCryptor.mm index 41893b086c..7ea3ef5c43 100644 --- a/sdk/objc/api/peerconnection/RTCFrameCryptor.mm +++ b/sdk/objc/api/peerconnection/RTCFrameCryptor.mm @@ -138,8 +138,8 @@ - (nullable instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory factory.signalingThread, [participantId stdString], mediaType, [self algorithmFromEnum:algorithm], keyProvider.nativeKeyProvider)); - factory.workerThread->BlockingCall([self, nativeRtpSender] { - // Must be called on Worker thread + factory.signalingThread->BlockingCall([self, nativeRtpSender] { + // Must be called on signal thread nativeRtpSender->SetEncoderToPacketizerFrameTransformer(_frame_crypto_transformer); }); @@ -178,8 +178,8 @@ - (nullable instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory factory.signalingThread, [participantId stdString], mediaType, [self algorithmFromEnum:algorithm], keyProvider.nativeKeyProvider)); - factory.workerThread->BlockingCall([self, nativeRtpReceiver] { - // Must be called on Worker thread + factory.signalingThread->BlockingCall([self, nativeRtpReceiver] { + // Must be called on signal thread nativeRtpReceiver->SetDepacketizerToDecoderFrameTransformer(_frame_crypto_transformer); }); From 844bafa06d0b9088fd7fa4244832abf8e70a1d3d Mon Sep 17 00:00:00 2001 From: Hiroshi Horie <548776+hiroshihorie@users.noreply.github.com> Date: Fri, 24 Jan 2025 14:15:09 +0900 Subject: [PATCH 26/49] Expose apm config (#163) --- sdk/BUILD.gn | 2 + .../audio/RTCAudioProcessingConfig+Private.h | 2 + .../audio/RTCAudioProcessingConfig.h | 10 +++- .../audio/RTCAudioProcessingConfig.mm | 55 +++++++++++++++++-- .../audio/RTCAudioProcessingModule.h | 4 +- .../audio/RTCDefaultAudioProcessingModule.h | 2 +- .../audio/RTCDefaultAudioProcessingModule.mm | 7 ++- 7 files changed, 71 insertions(+), 11 deletions(-) diff --git a/sdk/BUILD.gn b/sdk/BUILD.gn index 33bf72df8e..57aeb69110 100644 --- a/sdk/BUILD.gn +++ b/sdk/BUILD.gn @@ -1482,6 +1482,7 @@ if (is_ios || is_mac) { "objc/components/audio/RTCAudioProcessingModule.h", "objc/components/audio/RTCDefaultAudioProcessingModule.h", "objc/components/audio/RTCAudioCustomProcessingDelegate.h", + "objc/components/audio/RTCAudioProcessingConfig.h", ] if (target_environment != "xrdevice" && target_environment != "xrsimulator") { @@ -1656,6 +1657,7 @@ if (is_ios || is_mac) { "objc/components/audio/RTCAudioProcessingModule.h", "objc/components/audio/RTCDefaultAudioProcessingModule.h", "objc/components/audio/RTCAudioCustomProcessingDelegate.h", + "objc/components/audio/RTCAudioProcessingConfig.h", ] if (!build_with_chromium) { sources += [ diff --git a/sdk/objc/components/audio/RTCAudioProcessingConfig+Private.h b/sdk/objc/components/audio/RTCAudioProcessingConfig+Private.h index ed565ee0aa..3eacaa653d 100644 --- a/sdk/objc/components/audio/RTCAudioProcessingConfig+Private.h +++ b/sdk/objc/components/audio/RTCAudioProcessingConfig+Private.h @@ -22,6 +22,8 @@ NS_ASSUME_NONNULL_BEGIN @interface RTC_OBJC_TYPE (RTCAudioProcessingConfig)() +- (instancetype)initWithNativeAudioProcessingConfig:(webrtc::AudioProcessing::Config)config; + @property(nonatomic, readonly) webrtc::AudioProcessing::Config nativeAudioProcessingConfig; @end diff --git a/sdk/objc/components/audio/RTCAudioProcessingConfig.h b/sdk/objc/components/audio/RTCAudioProcessingConfig.h index 3c7dce45f1..7e4e666608 100644 --- a/sdk/objc/components/audio/RTCAudioProcessingConfig.h +++ b/sdk/objc/components/audio/RTCAudioProcessingConfig.h @@ -22,8 +22,14 @@ NS_ASSUME_NONNULL_BEGIN RTC_OBJC_EXPORT @interface RTC_OBJC_TYPE (RTCAudioProcessingConfig) : NSObject -@property(nonatomic, assign) BOOL echoCancellerEnabled; -@property(nonatomic, assign) BOOL echoCancellerMobileMode; +@property(nonatomic, assign) BOOL isEchoCancellationEnabled; +@property(nonatomic, assign) BOOL isEchoCancellationMobileMode; + +@property(nonatomic, assign) BOOL isNoiseSuppressionEnabled; +@property(nonatomic, assign) BOOL isHighpassFilterEnabled; + +@property(nonatomic, assign) BOOL isAutoGainControl1Enabled; +@property(nonatomic, assign) BOOL isAutoGainControl2Enabled; @end diff --git a/sdk/objc/components/audio/RTCAudioProcessingConfig.mm b/sdk/objc/components/audio/RTCAudioProcessingConfig.mm index ca40f16e17..d98a98204e 100644 --- a/sdk/objc/components/audio/RTCAudioProcessingConfig.mm +++ b/sdk/objc/components/audio/RTCAudioProcessingConfig.mm @@ -24,26 +24,73 @@ @implementation RTC_OBJC_TYPE (RTCAudioProcessingConfig) { // config.echo_canceller.enabled -- (BOOL)echoCancellerEnabled { +- (BOOL)isEchoCancellationEnabled { return _config.echo_canceller.enabled; } -- (void)setEchoCancellerEnabled:(BOOL)value { +- (void)setIsEchoCancellationEnabled:(BOOL)value { _config.echo_canceller.enabled = value; } // config.echo_canceller.mobile_mode -- (BOOL)echoCancellerMobileMode { +- (BOOL)isEchoCancellationMobileMode { return _config.echo_canceller.mobile_mode; } -- (void)setEchoCancellerMobileMode:(BOOL)value { +- (void)setIsEchoCancellationMobileMode:(BOOL)value { _config.echo_canceller.mobile_mode = value; } +// config.noise_suppression.enabled + +- (BOOL)isNoiseSuppressionEnabled { + return _config.noise_suppression.enabled; +} + +- (void)setIsNoiseSuppressionEnabled:(BOOL)value { + _config.noise_suppression.enabled = value; +} + +// config.high_pass_filter.enabled + +- (BOOL)isHighpassFilterEnabled { + return _config.high_pass_filter.enabled; +} + +- (void)setIsHighpassFilterEnabled:(BOOL)value { + _config.high_pass_filter.enabled = value; +} + +// config.gain_controller1.enabled + +- (BOOL)isAutoGainControl1Enabled { + return _config.gain_controller1.enabled; +} + +- (void)setIsAutoGainControl1Enabled:(BOOL)value { + _config.gain_controller1.enabled = value; +} + +// config.gain_controller2.enabled + +- (BOOL)isAutoGainControl2Enabled { + return _config.gain_controller2.enabled; +} + +- (void)setIsAutoGainControl2Enabled:(BOOL)value { + _config.gain_controller2.enabled = value; +} + #pragma mark - Private +- (instancetype)initWithNativeAudioProcessingConfig:(webrtc::AudioProcessing::Config)config { + if (self = [super init]) { + _config = config; + } + return self; +} + - (webrtc::AudioProcessing::Config)nativeAudioProcessingConfig { return _config; } diff --git a/sdk/objc/components/audio/RTCAudioProcessingModule.h b/sdk/objc/components/audio/RTCAudioProcessingModule.h index 9b05c4800e..af8024ddcb 100644 --- a/sdk/objc/components/audio/RTCAudioProcessingModule.h +++ b/sdk/objc/components/audio/RTCAudioProcessingModule.h @@ -24,9 +24,7 @@ NS_ASSUME_NONNULL_BEGIN RTC_OBJC_EXPORT @protocol RTC_OBJC_TYPE (RTCAudioProcessingModule) -- (void)applyConfig: (RTC_OBJC_TYPE(RTCAudioProcessingConfig) *)config; - -// TODO: Implement... +@property(nonatomic, assign) RTC_OBJC_TYPE(RTCAudioProcessingConfig) * config; @end diff --git a/sdk/objc/components/audio/RTCDefaultAudioProcessingModule.h b/sdk/objc/components/audio/RTCDefaultAudioProcessingModule.h index 2047b3f797..81a1e7e11e 100644 --- a/sdk/objc/components/audio/RTCDefaultAudioProcessingModule.h +++ b/sdk/objc/components/audio/RTCDefaultAudioProcessingModule.h @@ -32,7 +32,7 @@ RTC_OBJC_EXPORT renderPreProcessingDelegate: (nullable id)renderPreProcessingDelegate NS_SWIFT_NAME(init(config:capturePostProcessingDelegate:renderPreProcessingDelegate:)) NS_DESIGNATED_INITIALIZER; -- (void)applyConfig:(RTC_OBJC_TYPE(RTCAudioProcessingConfig) *)config; +@property(nonatomic, assign) RTC_OBJC_TYPE(RTCAudioProcessingConfig) * config; // Dynamically update delegates at runtime diff --git a/sdk/objc/components/audio/RTCDefaultAudioProcessingModule.mm b/sdk/objc/components/audio/RTCDefaultAudioProcessingModule.mm index 2f592cefa4..825fa67cd6 100644 --- a/sdk/objc/components/audio/RTCDefaultAudioProcessingModule.mm +++ b/sdk/objc/components/audio/RTCDefaultAudioProcessingModule.mm @@ -83,7 +83,12 @@ - (void)setRenderPreProcessingDelegate: #pragma mark - RTCAudioProcessingModule protocol -- (void)applyConfig:(RTC_OBJC_TYPE(RTCAudioProcessingConfig) *)config { +- (RTC_OBJC_TYPE(RTCAudioProcessingConfig) *)config { + webrtc::AudioProcessing::Config nativeConfig = _nativeAudioProcessingModule->GetConfig(); + return [[RTC_OBJC_TYPE(RTCAudioProcessingConfig) alloc] initWithNativeAudioProcessingConfig: nativeConfig]; +} + +- (void)setConfig:(RTC_OBJC_TYPE(RTCAudioProcessingConfig) *)config { _nativeAudioProcessingModule->ApplyConfig(config.nativeAudioProcessingConfig); } From c4b376ac375710de19c1ba20157dba9b065930b0 Mon Sep 17 00:00:00 2001 From: davidliu Date: Mon, 17 Mar 2025 22:54:11 +0900 Subject: [PATCH 27/49] Android audio prewarm (#164) --- .../webrtc/audio/JavaAudioDeviceModule.java | 17 +- .../org/webrtc/audio/WebRtcAudioRecord.java | 218 ++++++++++++++---- 2 files changed, 195 insertions(+), 40 deletions(-) diff --git a/sdk/android/api/org/webrtc/audio/JavaAudioDeviceModule.java b/sdk/android/api/org/webrtc/audio/JavaAudioDeviceModule.java index 00b8ce8e14..1ca195b7b0 100644 --- a/sdk/android/api/org/webrtc/audio/JavaAudioDeviceModule.java +++ b/sdk/android/api/org/webrtc/audio/JavaAudioDeviceModule.java @@ -275,7 +275,7 @@ public JavaAudioDeviceModule createAudioDeviceModule() { final WebRtcAudioRecord audioInput = new WebRtcAudioRecord(context, executor, audioManager, audioSource, audioFormat, audioRecordErrorCallback, audioRecordStateCallback, samplesReadyCallback, audioBufferCallback, useHardwareAcousticEchoCanceler, - useHardwareNoiseSuppressor); + useHardwareNoiseSuppressor, inputSampleRate, useStereoInput ? 2 : 1); final WebRtcAudioTrack audioOutput = new WebRtcAudioTrack(context, audioManager, audioAttributes, audioTrackErrorCallback, audioTrackStateCallback, playbackSamplesReadyCallback, useLowLatency, enableVolumeLogger); @@ -457,6 +457,21 @@ public void setAudioRecordEnabled(boolean enable) { audioInput.setUseAudioRecord(enable); } + public void prewarmRecording(){ + audioInput.initRecordingIfNeeded(); + audioInput.prewarmRecordingIfNeeded(); + } + + public void requestStartRecording() { + audioInput.initRecordingIfNeeded(); + audioInput.startRecordingIfNeeded(); + } + + public void requestStopRecording() { + audioInput.initRecordingIfNeeded(); + audioInput.stopRecordingIfNeeded(); + } + @Override public boolean setNoiseSuppressorEnabled(boolean enabled) { Logging.d(TAG, "setNoiseSuppressorEnabled: " + enabled); diff --git a/sdk/android/src/java/org/webrtc/audio/WebRtcAudioRecord.java b/sdk/android/src/java/org/webrtc/audio/WebRtcAudioRecord.java index 7ae525ee06..d05c2d0c10 100644 --- a/sdk/android/src/java/org/webrtc/audio/WebRtcAudioRecord.java +++ b/sdk/android/src/java/org/webrtc/audio/WebRtcAudioRecord.java @@ -34,6 +34,7 @@ import java.util.concurrent.ScheduledFuture; import java.util.concurrent.ThreadFactory; import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; import org.webrtc.CalledByNative; @@ -87,6 +88,9 @@ class WebRtcAudioRecord { private int channelCount; private int sampleRate; + private int expectedChannelCount; + private int expectedSampleRate; + private long nativeAudioRecord; private final WebRtcAudioEffects effects = new WebRtcAudioEffects(); @@ -97,6 +101,7 @@ class WebRtcAudioRecord { private boolean useAudioRecord = true; private volatile @Nullable AudioRecord audioRecord; + private final Object audioThreadStateLock = new Object(); private @Nullable AudioRecordThread audioThread; private @Nullable AudioDeviceInfo preferredDevice; @@ -147,9 +152,10 @@ public void run() { while (keepAlive) { long captureTimeNs = 0; AudioRecord audioRecord; - + boolean shouldReportData; synchronized (audioRecordStateLock) { audioRecord = WebRtcAudioRecord.this.audioRecord; + shouldReportData = nativeCalledInitRecording.get(); } if (audioRecord == null && useAudioRecord) { @@ -229,7 +235,7 @@ public void run() { // It's possible we've been shut down during the read, and stopRecording() tried and // failed to join this thread. To be a bit safer, try to avoid calling any native methods // in case they've been unregistered after stopRecording() returned. - if (keepAlive) { + if (keepAlive && shouldReportData) { nativeDataIsRecorded(nativeAudioRecord, bytesRead, captureTimeNs); } if (audioSamplesReadyCallback != null) { @@ -266,7 +272,10 @@ public void stopThread() { DEFAULT_AUDIO_FORMAT, null /* errorCallback */, null /* stateCallback */, null /* audioSamplesReadyCallback */, null /* audioBufferCallback */, WebRtcAudioEffects.isAcousticEchoCancelerSupported(), - WebRtcAudioEffects.isNoiseSuppressorSupported()); + WebRtcAudioEffects.isNoiseSuppressorSupported(), + WebRtcAudioManager.getSampleRate(audioManager) /* expectedSampleRate */, + 1 /* expectedChannelCount */ + ); } public WebRtcAudioRecord(Context context, ScheduledExecutorService scheduler, @@ -275,7 +284,9 @@ public WebRtcAudioRecord(Context context, ScheduledExecutorService scheduler, @Nullable AudioRecordStateCallback stateCallback, @Nullable SamplesReadyCallback audioSamplesReadyCallback, @Nullable AudioBufferCallback audioBufferCallback, - boolean isAcousticEchoCancelerSupported, boolean isNoiseSuppressorSupported) { + boolean isAcousticEchoCancelerSupported, boolean isNoiseSuppressorSupported, + int expectedSampleRate, int expectedChannelCount + ) { if (isAcousticEchoCancelerSupported && !WebRtcAudioEffects.isAcousticEchoCancelerSupported()) { throw new IllegalArgumentException("HW AEC not supported"); } @@ -293,6 +304,8 @@ public WebRtcAudioRecord(Context context, ScheduledExecutorService scheduler, this.audioBufferCallback = audioBufferCallback; this.isAcousticEchoCancelerSupported = isAcousticEchoCancelerSupported; this.isNoiseSuppressorSupported = isNoiseSuppressorSupported; + this.expectedSampleRate = expectedSampleRate; + this.expectedChannelCount = expectedChannelCount; Logging.d(TAG, "ctor" + WebRtcAudioUtils.getThreadInfo()); } @@ -349,17 +362,55 @@ public void setUseAudioRecord(boolean enable) { this.useAudioRecord = enable; } + /** + * Allows clients to init recording manually. + * + * @return true if recording was initialized correctly. + */ + public boolean initRecordingIfNeeded() { + synchronized (audioRecordStateLock) { + if (audioRecord == null){ + return initRecordingImpl(expectedSampleRate, expectedChannelCount, false) >= 0; + } + } + return true; + } + @CalledByNative private int initRecording(int sampleRate, int channels) { Logging.d(TAG, "initRecording(sampleRate=" + sampleRate + ", channels=" + channels + ")"); + + synchronized (audioRecordStateLock) { + if (!nativeCalledInitRecording.compareAndSet(false, true)) { + reportWebRtcAudioRecordInitError("InitRecording called twice without StopRecording."); + return -1; + } + + if (audioRecord == null){ + return initRecordingImpl(sampleRate, channels, true); + } + + // initRecording was already called previously by client. + // Handle required steps for native libwebrtc. + final int framesPerBuffer = getFramesPerBuffer(sampleRate); + if (byteBuffer == null) { + throw new IllegalStateException("initRecording: byteBuffer is null!"); + } + nativeCacheDirectBufferAddress(nativeAudioRecord, byteBuffer); + return framesPerBuffer; + } + } + + private int initRecordingImpl(int sampleRate, int channels, boolean nativeCall) { + Logging.d(TAG, "initRecordingImpl(sampleRate=" + sampleRate + ", channels=" + channels + ")"); if (audioRecord != null) { reportWebRtcAudioRecordInitError("InitRecording called twice without StopRecording."); return -1; } this.sampleRate = sampleRate; this.channelCount = channels; - final int bytesPerFrame = channels * getBytesPerSample(audioFormat); - final int framesPerBuffer = sampleRate / BUFFERS_PER_SECOND; + final int bytesPerFrame = getBytesPerFrame(channels, this.audioFormat); + final int framesPerBuffer = getFramesPerBuffer(sampleRate); byteBuffer = ByteBuffer.allocateDirect(bytesPerFrame * framesPerBuffer); if (!(byteBuffer.hasArray())) { reportWebRtcAudioRecordInitError("ByteBuffer does not have backing array."); @@ -370,7 +421,10 @@ private int initRecording(int sampleRate, int channels) { // Rather than passing the ByteBuffer with every callback (requiring // the potentially expensive GetDirectBufferAddress) we simply have the // the native class cache the address to the memory once. - nativeCacheDirectBufferAddress(nativeAudioRecord, byteBuffer); + // Caching can only be done on the native thread. + if (nativeCall) { + nativeCacheDirectBufferAddress(nativeAudioRecord, byteBuffer); + } if(useAudioRecord) { boolean result = initAudioRecord(); @@ -473,53 +527,131 @@ void setPreferredDevice(@Nullable AudioDeviceInfo preferredDevice) { } } + public boolean prewarmRecordingIfNeeded() { + if(audioThread == null) { + synchronized(audioRecordStateLock) { + synchronized (audioThreadStateLock) { + if (audioThread == null) { + return startRecordingImpl(); + } + } + } + } + return true; + } + + public boolean startRecordingIfNeeded() { + clientCalledStartRecording.set(true); + if(audioThread == null) { + synchronized(audioRecordStateLock) { + synchronized (audioThreadStateLock) { + if (audioThread == null) { + return startRecordingImpl(); + } + } + } + } + return true; + } + @CalledByNative private boolean startRecording() { + if (!nativeCalledStartRecording.compareAndSet(false, true)) { + throw new IllegalStateException("startRecording called twice without stopRecording"); + } + if (audioThread == null) { + synchronized(audioRecordStateLock) { + synchronized (audioThreadStateLock) { + if (audioThread == null) { + return startRecordingImpl(); + } + } + } + } + return true; + } + + private boolean startRecordingImpl() { Logging.d(TAG, "startRecording"); - assertTrue(audioThread == null); synchronized (audioRecordStateLock) { - if (useAudioRecord) { - assertTrue(audioRecord != null); - try { - audioRecord.startRecording(); - } catch (IllegalStateException e) { - reportWebRtcAudioRecordStartError(AudioRecordStartErrorCode.AUDIO_RECORD_START_EXCEPTION, - "AudioRecord.startRecording failed: " + e.getMessage()); - return false; - } - if (audioRecord.getRecordingState() != AudioRecord.RECORDSTATE_RECORDING) { - reportWebRtcAudioRecordStartError(AudioRecordStartErrorCode.AUDIO_RECORD_START_STATE_MISMATCH, - "AudioRecord.startRecording failed - incorrect state: " - + audioRecord.getRecordingState()); - return false; + synchronized (audioThreadStateLock) { + assertTrue(audioThread == null); + // Disabling useAudioRecord allows for "recordingless" recording, + // where we emit audio buffers to be mixed in by client. + if (useAudioRecord) { + assertTrue(audioRecord != null); + try { + audioRecord.startRecording(); + } catch (IllegalStateException e) { + reportWebRtcAudioRecordStartError(AudioRecordStartErrorCode.AUDIO_RECORD_START_EXCEPTION, + "AudioRecord.startRecording failed: " + e.getMessage()); + return false; + } + if (audioRecord.getRecordingState() != AudioRecord.RECORDSTATE_RECORDING) { + reportWebRtcAudioRecordStartError(AudioRecordStartErrorCode.AUDIO_RECORD_START_STATE_MISMATCH, + "AudioRecord.startRecording failed - incorrect state: " + + audioRecord.getRecordingState()); + return false; + } } + audioThread = new AudioRecordThread("AudioRecordJavaThread"); + audioThread.start(); + scheduleLogRecordingConfigurationsTask(audioRecord); + return true; + } + } + } + + private AtomicBoolean clientCalledStartRecording = new AtomicBoolean(false); + private AtomicBoolean nativeCalledInitRecording = new AtomicBoolean(false); + private AtomicBoolean nativeCalledStartRecording = new AtomicBoolean(false); + + public boolean stopRecordingIfNeeded() { + Logging.d(TAG, "stopRecordingIfNeeded"); + synchronized(audioRecordStateLock) { + clientCalledStartRecording.set(false); + if(audioThread != null) { + return stopRecordingIfNeededImpl(); } } - audioThread = new AudioRecordThread("AudioRecordJavaThread"); - audioThread.start(); - scheduleLogRecordingConfigurationsTask(audioRecord); return true; } @CalledByNative private boolean stopRecording() { Logging.d(TAG, "stopRecording"); - assertTrue(audioThread != null); - if (future != null) { - if (!future.isDone()) { - // Might be needed if the client calls startRecording(), stopRecording() back-to-back. - future.cancel(true /* mayInterruptIfRunning */); - } - future = null; + synchronized(audioRecordStateLock) { + nativeCalledStartRecording.set(false); + nativeCalledInitRecording.set(false); + return stopRecordingIfNeededImpl(); } - audioThread.stopThread(); - if (!ThreadUtils.joinUninterruptibly(audioThread, AUDIO_RECORD_THREAD_JOIN_TIMEOUT_MS)) { - Logging.e(TAG, "Join of AudioRecordJavaThread timed out"); - WebRtcAudioUtils.logAudioState(TAG, context, audioManager); + } + + private boolean stopRecordingIfNeededImpl() { + synchronized(audioRecordStateLock) { + if(clientCalledStartRecording.get() || nativeCalledStartRecording.get()) { + // Someone has still requested recording, ignore stop request. + return true; + } + + Logging.d(TAG, "stopping recording"); + assertTrue(audioThread != null); + if (future != null) { + if (!future.isDone()) { + // Might be needed if the client calls startRecording(), stopRecording() back-to-back. + future.cancel(true /* mayInterruptIfRunning */); + } + future = null; + } + audioThread.stopThread(); + if (!ThreadUtils.joinUninterruptibly(audioThread, AUDIO_RECORD_THREAD_JOIN_TIMEOUT_MS)) { + Logging.e(TAG, "Join of AudioRecordJavaThread timed out"); + WebRtcAudioUtils.logAudioState(TAG, context, audioManager); + } + audioThread = null; + releaseAudioResources(); + return true; } - audioThread = null; - releaseAudioResources(); - return true; } @TargetApi(Build.VERSION_CODES.M) @@ -708,6 +840,14 @@ private static int getBytesPerSample(int audioFormat) { } } + private static int getBytesPerFrame(int channels, int audioFormat) { + return channels * getBytesPerSample(audioFormat); + } + + private static int getFramesPerBuffer(int sampleRate) { + return sampleRate / BUFFERS_PER_SECOND; + } + // Use an ExecutorService to schedule a task after a given delay where the task consists of // checking (by logging) the current status of active recording sessions. private void scheduleLogRecordingConfigurationsTask(AudioRecord audioRecord) { From 1d5d3b80ec10f9102ac55a2a020248159bb48557 Mon Sep 17 00:00:00 2001 From: Hiroshi Horie <548776+hiroshihorie@users.noreply.github.com> Date: Wed, 19 Mar 2025 10:43:18 +0900 Subject: [PATCH 28/49] Metal renderer scale patch (#165) --- .../renderer/metal/RTCMTLVideoView.m | 23 ++++++++++++++++--- 1 file changed, 20 insertions(+), 3 deletions(-) diff --git a/sdk/objc/components/renderer/metal/RTCMTLVideoView.m b/sdk/objc/components/renderer/metal/RTCMTLVideoView.m index d4d98a0bf4..8c08587dc2 100644 --- a/sdk/objc/components/renderer/metal/RTCMTLVideoView.m +++ b/sdk/objc/components/renderer/metal/RTCMTLVideoView.m @@ -129,13 +129,25 @@ - (void)setMultipleTouchEnabled:(BOOL)multipleTouchEnabled { } #endif +- (CGFloat)currentScaleFactor { + CGFloat scale = 1.0; +#if TARGET_OS_IPHONE + scale = [UIScreen mainScreen].scale; +#elif TARGET_OS_OSX + scale = [NSScreen mainScreen].backingScaleFactor; +#endif + return MAX(scale, 1.0); +} + - (void)performLayout { CGRect bounds = self.bounds; self.metalView.frame = bounds; if (!CGSizeEqualToSize(self.videoFrameSize, CGSizeZero)) { self.metalView.drawableSize = [self drawableSize]; } else { - self.metalView.drawableSize = bounds.size; + // Apply scale factor for default size as well (when videoFrameSize is zero) + CGFloat scale = [self currentScaleFactor]; + self.metalView.drawableSize = CGSizeMake(bounds.size.width * scale, bounds.size.height * scale); } } @@ -233,11 +245,16 @@ - (CGSize)drawableSize { BOOL sizeIsLandscape = (self.videoFrame.rotation == RTCVideoRotation_0) || (self.videoFrame.rotation == RTCVideoRotation_180); + CGSize size; if (useLandscape == sizeIsLandscape) { - return videoFrameSize; + size = videoFrameSize; } else { - return CGSizeMake(videoFrameSize.height, videoFrameSize.width); + size = CGSizeMake(videoFrameSize.height, videoFrameSize.width); } + + // Apply scale factor for retina displays + CGFloat scale = [self currentScaleFactor]; + return CGSizeMake(size.width * scale, size.height * scale); } #pragma mark - RTC_OBJC_TYPE(RTCVideoRenderer) From 7ec4c03bff7f7ce117dc9100f081d031d946d995 Mon Sep 17 00:00:00 2001 From: CloudWebRTC Date: Mon, 24 Mar 2025 19:34:23 +0800 Subject: [PATCH 29/49] fix typo in framecryptor. (#167) --- api/crypto/frame_crypto_transformer.cc | 46 +++++++++++++------------- 1 file changed, 23 insertions(+), 23 deletions(-) diff --git a/api/crypto/frame_crypto_transformer.cc b/api/crypto/frame_crypto_transformer.cc index 5f6ea55a60..722549563e 100644 --- a/api/crypto/frame_crypto_transformer.cc +++ b/api/crypto/frame_crypto_transformer.cc @@ -141,13 +141,13 @@ uint8_t get_unencrypted_bytes(webrtc::TransformableFrameInterface* frame, unencrypted_bytes = videoFrame->IsKeyFrame() ? 10 : 3; } else if (videoFrame->header().codec == webrtc::VideoCodecType::kVideoCodecH264) { - rtc::ArrayView date_in = frame->GetData(); + rtc::ArrayView data_in = frame->GetData(); std::vector nalu_indices = - webrtc::H264::FindNaluIndices(date_in.data(), date_in.size()); + webrtc::H264::FindNaluIndices(data_in.data(), data_in.size()); int idx = 0; for (const auto& index : nalu_indices) { - const uint8_t* slice = date_in.data() + index.payload_start_offset; + const uint8_t* slice = data_in.data() + index.payload_start_offset; webrtc::H264::NaluType nalu_type = webrtc::H264::ParseNaluType(slice[0]); switch (nalu_type) { @@ -350,10 +350,10 @@ void FrameCryptorTransformer::encryptFrame( return; } - rtc::ArrayView date_in = frame->GetData(); - if (date_in.size() == 0 || !enabled_cryption) { + rtc::ArrayView data_in = frame->GetData(); + if (data_in.size() == 0 || !enabled_cryption) { RTC_LOG(LS_WARNING) << "FrameCryptorTransformer::encryptFrame() " - "date_in.size() == 0 || enabled_cryption == false"; + "data_in.size() == 0 || enabled_cryption == false"; if(key_provider_->options().discard_frame_when_cryptor_not_ready) { return; } @@ -382,7 +382,7 @@ void FrameCryptorTransformer::encryptFrame( rtc::Buffer frame_header(unencrypted_bytes); for (size_t i = 0; i < unencrypted_bytes; i++) { - frame_header[i] = date_in[i]; + frame_header[i] = data_in[i]; } rtc::Buffer frame_trailer(2); @@ -390,9 +390,9 @@ void FrameCryptorTransformer::encryptFrame( frame_trailer[1] = key_index_; rtc::Buffer iv = makeIv(frame->GetSsrc(), frame->GetTimestamp()); - rtc::Buffer payload(date_in.size() - unencrypted_bytes); - for (size_t i = unencrypted_bytes; i < date_in.size(); i++) { - payload[i - unencrypted_bytes] = date_in[i]; + rtc::Buffer payload(data_in.size() - unencrypted_bytes); + for (size_t i = unencrypted_bytes; i < data_in.size(); i++) { + payload[i - unencrypted_bytes] = data_in[i]; } std::vector buffer; @@ -460,11 +460,11 @@ void FrameCryptorTransformer::decryptFrame( return; } - rtc::ArrayView date_in = frame->GetData(); + rtc::ArrayView data_in = frame->GetData(); - if (date_in.size() == 0 || !enabled_cryption) { + if (data_in.size() == 0 || !enabled_cryption) { RTC_LOG(LS_WARNING) << "FrameCryptorTransformer::decryptFrame() " - "date_in.size() == 0 || enabled_cryption == false"; + "data_in.size() == 0 || enabled_cryption == false"; if(key_provider_->options().discard_frame_when_cryptor_not_ready) { return; } @@ -475,8 +475,8 @@ void FrameCryptorTransformer::decryptFrame( auto uncrypted_magic_bytes = key_provider_->options().uncrypted_magic_bytes; if (uncrypted_magic_bytes.size() > 0 && - date_in.size() >= uncrypted_magic_bytes.size()) { - auto tmp = date_in.subview(date_in.size() - (uncrypted_magic_bytes.size()), + data_in.size() >= uncrypted_magic_bytes.size()) { + auto tmp = data_in.subview(data_in.size() - (uncrypted_magic_bytes.size()), uncrypted_magic_bytes.size()); auto data = std::vector(tmp.begin(), tmp.end()); if (uncrypted_magic_bytes == data) { @@ -491,7 +491,7 @@ void FrameCryptorTransformer::decryptFrame( // decryption. rtc::Buffer data_out; data_out.AppendData( - date_in.subview(0, date_in.size() - uncrypted_magic_bytes.size())); + data_in.subview(0, data_in.size() - uncrypted_magic_bytes.size())); frame->SetData(data_out); sink_callback->OnTransformedFrame(std::move(frame)); return; @@ -502,12 +502,12 @@ void FrameCryptorTransformer::decryptFrame( rtc::Buffer frame_header(unencrypted_bytes); for (size_t i = 0; i < unencrypted_bytes; i++) { - frame_header[i] = date_in[i]; + frame_header[i] = data_in[i]; } rtc::Buffer frame_trailer(2); - frame_trailer[0] = date_in[date_in.size() - 2]; - frame_trailer[1] = date_in[date_in.size() - 1]; + frame_trailer[0] = data_in[data_in.size() - 2]; + frame_trailer[1] = data_in[data_in.size() - 1]; uint8_t ivLength = frame_trailer[0]; uint8_t key_index = frame_trailer[1]; @@ -549,12 +549,12 @@ void FrameCryptorTransformer::decryptFrame( rtc::Buffer iv = rtc::Buffer(ivLength); for (size_t i = 0; i < ivLength; i++) { - iv[i] = date_in[date_in.size() - 2 - ivLength + i]; + iv[i] = data_in[data_in.size() - 2 - ivLength + i]; } - rtc::Buffer encrypted_buffer(date_in.size() - unencrypted_bytes); - for (size_t i = unencrypted_bytes; i < date_in.size(); i++) { - encrypted_buffer[i - unencrypted_bytes] = date_in[i]; + rtc::Buffer encrypted_buffer(data_in.size() - unencrypted_bytes); + for (size_t i = unencrypted_bytes; i < data_in.size(); i++) { + encrypted_buffer[i - unencrypted_bytes] = data_in[i]; } if (FrameIsH264(frame.get(), type_) && From 96cfb37aa1626324caa9b3c66d8bc8e91a5e687b Mon Sep 17 00:00:00 2001 From: Hiroshi Horie <548776+hiroshihorie@users.noreply.github.com> Date: Fri, 30 May 2025 17:02:17 +0900 Subject: [PATCH 30/49] Prefix enums with `RTC_OBJC_TYPE` macro (#171) Continued from https://github.com/webrtc-sdk/webrtc/pull/100 also prefix enums. I think this shouldn't break compiling. --- sdk/objc/api/logging/RTCCallbackLogger.h | 4 +- sdk/objc/api/logging/RTCCallbackLogger.mm | 26 +-- .../peerconnection/RTCAudioSource+Private.h | 2 +- sdk/objc/api/peerconnection/RTCAudioSource.mm | 4 +- sdk/objc/api/peerconnection/RTCAudioTrack.mm | 6 +- .../peerconnection/RTCConfiguration+Private.h | 38 +-- .../api/peerconnection/RTCConfiguration.h | 69 +++--- .../api/peerconnection/RTCConfiguration.mm | 158 +++++++------ .../peerconnection/RTCDataChannel+Private.h | 10 +- sdk/objc/api/peerconnection/RTCDataChannel.h | 12 +- sdk/objc/api/peerconnection/RTCDataChannel.mm | 32 +-- .../peerconnection/RTCEncodedImage+Private.mm | 10 +- sdk/objc/api/peerconnection/RTCFieldTrials.h | 16 +- sdk/objc/api/peerconnection/RTCFieldTrials.mm | 16 +- sdk/objc/api/peerconnection/RTCFileLogger.h | 22 +- sdk/objc/api/peerconnection/RTCFileLogger.mm | 28 +-- sdk/objc/api/peerconnection/RTCFrameCryptor.h | 26 +-- .../api/peerconnection/RTCFrameCryptor.mm | 22 +- .../api/peerconnection/RTCIODevice+Private.h | 2 +- sdk/objc/api/peerconnection/RTCIODevice.h | 10 +- sdk/objc/api/peerconnection/RTCIODevice.mm | 4 +- sdk/objc/api/peerconnection/RTCIceServer.h | 16 +- sdk/objc/api/peerconnection/RTCIceServer.mm | 28 +-- .../api/peerconnection/RTCMediaConstraints.h | 14 +- .../api/peerconnection/RTCMediaConstraints.mm | 16 +- .../peerconnection/RTCMediaSource+Private.h | 14 +- sdk/objc/api/peerconnection/RTCMediaSource.h | 12 +- sdk/objc/api/peerconnection/RTCMediaSource.mm | 36 +-- sdk/objc/api/peerconnection/RTCMediaStream.mm | 4 +- .../RTCMediaStreamTrack+Private.h | 14 +- .../api/peerconnection/RTCMediaStreamTrack.h | 12 +- .../api/peerconnection/RTCMediaStreamTrack.mm | 38 +-- .../RTCPeerConnection+Private.h | 26 +-- .../peerconnection/RTCPeerConnection+Stats.mm | 2 +- .../api/peerconnection/RTCPeerConnection.h | 90 ++++---- .../api/peerconnection/RTCPeerConnection.mm | 218 +++++++++--------- .../peerconnection/RTCPeerConnectionFactory.h | 2 +- .../RTCPeerConnectionFactory.mm | 8 +- .../peerconnection/RTCRtpCodecCapability.mm | 8 +- .../peerconnection/RTCRtpCodecParameters.h | 34 +-- .../peerconnection/RTCRtpCodecParameters.mm | 40 ++-- .../peerconnection/RTCRtpEncodingParameters.h | 12 +- .../RTCRtpEncodingParameters.mm | 20 +- .../api/peerconnection/RTCRtpParameters.h | 10 +- .../api/peerconnection/RTCRtpParameters.mm | 20 +- .../peerconnection/RTCRtpReceiver+Private.h | 6 +- sdk/objc/api/peerconnection/RTCRtpReceiver.h | 12 +- sdk/objc/api/peerconnection/RTCRtpReceiver.mm | 32 +-- .../RTCRtpTransceiver+Private.h | 4 +- .../api/peerconnection/RTCRtpTransceiver.h | 24 +- .../api/peerconnection/RTCRtpTransceiver.mm | 38 +-- sdk/objc/api/peerconnection/RTCSSLAdapter.h | 4 +- sdk/objc/api/peerconnection/RTCSSLAdapter.mm | 4 +- .../RTCSessionDescription+Private.h | 4 +- .../peerconnection/RTCSessionDescription.h | 18 +- .../peerconnection/RTCSessionDescription.mm | 30 +-- sdk/objc/api/peerconnection/RTCTracing.h | 8 +- sdk/objc/api/peerconnection/RTCTracing.mm | 8 +- .../RTCVideoEncoderSettings+Private.mm | 2 +- .../peerconnection/RTCVideoSource+Private.h | 2 +- sdk/objc/api/peerconnection/RTCVideoSource.mm | 4 +- sdk/objc/api/peerconnection/RTCVideoTrack.mm | 6 +- .../api/video_codec/RTCVideoCodecConstants.h | 6 +- .../api/video_codec/RTCVideoCodecConstants.mm | 6 +- sdk/objc/base/RTCEncodedImage.h | 24 +- sdk/objc/base/RTCLogging.h | 38 +-- sdk/objc/base/RTCLogging.mm | 22 +- sdk/objc/base/RTCMacros.h | 6 + sdk/objc/base/RTCVideoEncoderSettings.h | 8 +- sdk/objc/base/RTCVideoFrame.h | 18 +- sdk/objc/base/RTCVideoFrame.mm | 10 +- sdk/objc/components/audio/RTCAudioSession.h | 6 +- sdk/objc/components/audio/RTCAudioSession.mm | 22 +- .../audio/RTCAudioSessionConfiguration.h | 6 +- .../audio/RTCAudioSessionConfiguration.m | 14 +- .../capturer/RTCCameraVideoCapturer.m | 40 ++-- .../capturer/RTCDesktopCapturer+Private.h | 3 +- .../components/capturer/RTCDesktopCapturer.h | 3 +- .../components/capturer/RTCDesktopCapturer.mm | 2 +- .../components/capturer/RTCDesktopMediaList.h | 4 +- .../capturer/RTCDesktopMediaList.mm | 6 +- .../capturer/RTCDesktopSource+Private.h | 2 +- .../components/capturer/RTCDesktopSource.h | 8 +- .../components/capturer/RTCDesktopSource.mm | 4 +- .../capturer/RTCFileVideoCapturer.m | 34 +-- .../components/network/RTCNetworkMonitor.mm | 2 +- .../renderer/metal/RTCMTLRenderer.mm | 14 +- .../renderer/metal/RTCMTLVideoView.m | 12 +- .../components/renderer/opengl/RTCShader.h | 12 +- .../components/renderer/opengl/RTCShader.mm | 22 +- .../renderer/opengl/RTCVideoViewShading.h | 4 +- .../video_codec/RTCCodecSpecificInfoH264.h | 8 +- .../RTCDefaultVideoDecoderFactory.m | 22 +- .../RTCDefaultVideoEncoderFactory.m | 22 +- .../video_codec/RTCH264ProfileLevelId.h | 64 ++--- .../video_codec/RTCH264ProfileLevelId.mm | 24 +- .../video_codec/RTCVideoDecoderFactoryH264.m | 6 +- .../video_codec/RTCVideoDecoderH264.mm | 2 +- .../video_codec/RTCVideoEncoderFactoryH264.m | 6 +- .../video_codec/RTCVideoEncoderH264.mm | 52 ++--- sdk/objc/helpers/RTCCameraPreviewView.m | 6 +- sdk/objc/helpers/RTCDispatcher+Private.h | 2 +- sdk/objc/helpers/RTCDispatcher.h | 14 +- sdk/objc/helpers/RTCDispatcher.m | 14 +- sdk/objc/helpers/RTCYUVHelper.h | 2 +- sdk/objc/helpers/RTCYUVHelper.mm | 2 +- .../src/audio/voice_processing_audio_unit.mm | 4 +- sdk/objc/native/src/objc_audio_device.h | 4 +- sdk/objc/native/src/objc_audio_device.mm | 2 +- .../native/src/objc_audio_device_delegate.h | 2 +- .../native/src/objc_audio_device_delegate.mm | 2 +- sdk/objc/native/src/objc_desktop_capture.h | 8 +- sdk/objc/native/src/objc_desktop_capture.mm | 4 +- .../native/src/objc_video_encoder_factory.mm | 2 +- sdk/objc/native/src/objc_video_frame.mm | 2 +- sdk/objc/unittests/RTCAudioSessionTest.mm | 4 +- .../RTCPeerConnectionFactory_xctest.m | 16 +- 117 files changed, 1036 insertions(+), 1035 deletions(-) diff --git a/sdk/objc/api/logging/RTCCallbackLogger.h b/sdk/objc/api/logging/RTCCallbackLogger.h index 1d178b6d49..81d02bae52 100644 --- a/sdk/objc/api/logging/RTCCallbackLogger.h +++ b/sdk/objc/api/logging/RTCCallbackLogger.h @@ -17,7 +17,7 @@ NS_ASSUME_NONNULL_BEGIN typedef void (^RTCCallbackLoggerMessageHandler)(NSString *message); typedef void (^RTCCallbackLoggerMessageAndSeverityHandler)(NSString *message, - RTCLoggingSeverity severity); + RTC_OBJC_TYPE(RTCLoggingSeverity) severity); // This class intercepts WebRTC logs and forwards them to a registered block. // This class is not threadsafe. @@ -25,7 +25,7 @@ RTC_OBJC_EXPORT @interface RTC_OBJC_TYPE (RTCCallbackLogger) : NSObject // The severity level to capture. The default is kRTCLoggingSeverityInfo. -@property(nonatomic, assign) RTCLoggingSeverity severity; +@property(nonatomic, assign) RTC_OBJC_TYPE(RTCLoggingSeverity) severity; // The callback handler will be called on the same thread that does the // logging, so if the logging callback can be slow it may be a good idea diff --git a/sdk/objc/api/logging/RTCCallbackLogger.mm b/sdk/objc/api/logging/RTCCallbackLogger.mm index ba6fe1b1cc..82560af93e 100644 --- a/sdk/objc/api/logging/RTCCallbackLogger.mm +++ b/sdk/objc/api/logging/RTCCallbackLogger.mm @@ -53,24 +53,24 @@ void OnLogMessage(const std::string& message, rtc::LoggingSeverity severity) ove void OnLogMessage(absl::string_view message, rtc::LoggingSeverity severity) override { if (callback_handler_) { - RTCLoggingSeverity loggingSeverity = NativeSeverityToObjcSeverity(severity); + RTC_OBJC_TYPE(RTCLoggingSeverity) loggingSeverity = NativeSeverityToObjcSeverity(severity); callback_handler_([NSString stringForAbslStringView:message], loggingSeverity); } } private: - static RTCLoggingSeverity NativeSeverityToObjcSeverity(rtc::LoggingSeverity severity) { + static RTC_OBJC_TYPE(RTCLoggingSeverity) NativeSeverityToObjcSeverity(rtc::LoggingSeverity severity) { switch (severity) { case rtc::LS_VERBOSE: - return RTCLoggingSeverityVerbose; + return RTC_OBJC_TYPE(RTCLoggingSeverityVerbose); case rtc::LS_INFO: - return RTCLoggingSeverityInfo; + return RTC_OBJC_TYPE(RTCLoggingSeverityInfo); case rtc::LS_WARNING: - return RTCLoggingSeverityWarning; + return RTC_OBJC_TYPE(RTCLoggingSeverityWarning); case rtc::LS_ERROR: - return RTCLoggingSeverityError; + return RTC_OBJC_TYPE(RTCLoggingSeverityError); case rtc::LS_NONE: - return RTCLoggingSeverityNone; + return RTC_OBJC_TYPE(RTCLoggingSeverityNone); } } @@ -89,7 +89,7 @@ @implementation RTC_OBJC_TYPE (RTCCallbackLogger) { - (instancetype)init { self = [super init]; if (self != nil) { - _severity = RTCLoggingSeverityInfo; + _severity = RTC_OBJC_TYPE(RTCLoggingSeverityInfo); } return self; } @@ -135,15 +135,15 @@ - (void)stop { - (rtc::LoggingSeverity)rtcSeverity { switch (_severity) { - case RTCLoggingSeverityVerbose: + case RTC_OBJC_TYPE(RTCLoggingSeverityVerbose): return rtc::LS_VERBOSE; - case RTCLoggingSeverityInfo: + case RTC_OBJC_TYPE(RTCLoggingSeverityInfo): return rtc::LS_INFO; - case RTCLoggingSeverityWarning: + case RTC_OBJC_TYPE(RTCLoggingSeverityWarning): return rtc::LS_WARNING; - case RTCLoggingSeverityError: + case RTC_OBJC_TYPE(RTCLoggingSeverityError): return rtc::LS_ERROR; - case RTCLoggingSeverityNone: + case RTC_OBJC_TYPE(RTCLoggingSeverityNone): return rtc::LS_NONE; } } diff --git a/sdk/objc/api/peerconnection/RTCAudioSource+Private.h b/sdk/objc/api/peerconnection/RTCAudioSource+Private.h index 2c333f9d73..33edca7c65 100644 --- a/sdk/objc/api/peerconnection/RTCAudioSource+Private.h +++ b/sdk/objc/api/peerconnection/RTCAudioSource+Private.h @@ -29,6 +29,6 @@ - (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory nativeMediaSource:(rtc::scoped_refptr)nativeMediaSource - type:(RTCMediaSourceType)type NS_UNAVAILABLE; + type:(RTC_OBJC_TYPE(RTCMediaSourceType))type NS_UNAVAILABLE; @end diff --git a/sdk/objc/api/peerconnection/RTCAudioSource.mm b/sdk/objc/api/peerconnection/RTCAudioSource.mm index 1541045099..44da0c928d 100644 --- a/sdk/objc/api/peerconnection/RTCAudioSource.mm +++ b/sdk/objc/api/peerconnection/RTCAudioSource.mm @@ -26,7 +26,7 @@ - (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)facto if (self = [super initWithFactory:factory nativeMediaSource:nativeAudioSource - type:RTCMediaSourceTypeAudio]) { + type:RTC_OBJC_TYPE(RTCMediaSourceTypeAudio)]) { _nativeAudioSource = nativeAudioSource; } return self; @@ -34,7 +34,7 @@ - (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)facto - (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory nativeMediaSource:(rtc::scoped_refptr)nativeMediaSource - type:(RTCMediaSourceType)type { + type:(RTC_OBJC_TYPE(RTCMediaSourceType))type { RTC_DCHECK_NOTREACHED(); return nil; } diff --git a/sdk/objc/api/peerconnection/RTCAudioTrack.mm b/sdk/objc/api/peerconnection/RTCAudioTrack.mm index dcdf9d93e8..ce7641302a 100644 --- a/sdk/objc/api/peerconnection/RTCAudioTrack.mm +++ b/sdk/objc/api/peerconnection/RTCAudioTrack.mm @@ -39,7 +39,7 @@ - (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)facto std::string nativeId = [NSString stdStringForString:trackId]; rtc::scoped_refptr track = factory.nativeFactory->CreateAudioTrack(nativeId, source.nativeAudioSource.get()); - if (self = [self initWithFactory:factory nativeTrack:track type:RTCMediaStreamTrackTypeAudio]) { + if (self = [self initWithFactory:factory nativeTrack:track type:RTC_OBJC_TYPE(RTCMediaStreamTrackTypeAudio)]) { _source = source; } @@ -48,10 +48,10 @@ - (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)facto - (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory nativeTrack:(rtc::scoped_refptr)nativeTrack - type:(RTCMediaStreamTrackType)type { + type:(RTC_OBJC_TYPE(RTCMediaStreamTrackType))type { NSParameterAssert(factory); NSParameterAssert(nativeTrack); - NSParameterAssert(type == RTCMediaStreamTrackTypeAudio); + NSParameterAssert(type == RTC_OBJC_TYPE(RTCMediaStreamTrackTypeAudio)); if (self = [super initWithFactory:factory nativeTrack:nativeTrack type:type]) { _adapters = [NSMutableArray array]; _signalingThread = factory.signalingThread; diff --git a/sdk/objc/api/peerconnection/RTCConfiguration+Private.h b/sdk/objc/api/peerconnection/RTCConfiguration+Private.h index 6ad780acdc..68d735b715 100644 --- a/sdk/objc/api/peerconnection/RTCConfiguration+Private.h +++ b/sdk/objc/api/peerconnection/RTCConfiguration+Private.h @@ -18,52 +18,52 @@ NS_ASSUME_NONNULL_BEGIN () + (webrtc::PeerConnectionInterface::IceTransportsType)nativeTransportsTypeForTransportPolicy - : (RTCIceTransportPolicy)policy; + : (RTC_OBJC_TYPE(RTCIceTransportPolicy))policy; -+ (RTCIceTransportPolicy)transportPolicyForTransportsType: ++ (RTC_OBJC_TYPE(RTCIceTransportPolicy))transportPolicyForTransportsType: (webrtc::PeerConnectionInterface::IceTransportsType)nativeType; -+ (NSString *)stringForTransportPolicy:(RTCIceTransportPolicy)policy; ++ (NSString *)stringForTransportPolicy:(RTC_OBJC_TYPE(RTCIceTransportPolicy))policy; + (webrtc::PeerConnectionInterface::BundlePolicy)nativeBundlePolicyForPolicy: - (RTCBundlePolicy)policy; + (RTC_OBJC_TYPE(RTCBundlePolicy))policy; -+ (RTCBundlePolicy)bundlePolicyForNativePolicy: ++ (RTC_OBJC_TYPE(RTCBundlePolicy))bundlePolicyForNativePolicy: (webrtc::PeerConnectionInterface::BundlePolicy)nativePolicy; -+ (NSString *)stringForBundlePolicy:(RTCBundlePolicy)policy; ++ (NSString *)stringForBundlePolicy:(RTC_OBJC_TYPE(RTCBundlePolicy))policy; + (webrtc::PeerConnectionInterface::RtcpMuxPolicy)nativeRtcpMuxPolicyForPolicy: - (RTCRtcpMuxPolicy)policy; + (RTC_OBJC_TYPE(RTCRtcpMuxPolicy))policy; -+ (RTCRtcpMuxPolicy)rtcpMuxPolicyForNativePolicy: ++ (RTC_OBJC_TYPE(RTCRtcpMuxPolicy))rtcpMuxPolicyForNativePolicy: (webrtc::PeerConnectionInterface::RtcpMuxPolicy)nativePolicy; -+ (NSString *)stringForRtcpMuxPolicy:(RTCRtcpMuxPolicy)policy; ++ (NSString *)stringForRtcpMuxPolicy:(RTC_OBJC_TYPE(RTCRtcpMuxPolicy))policy; + (webrtc::PeerConnectionInterface::TcpCandidatePolicy)nativeTcpCandidatePolicyForPolicy: - (RTCTcpCandidatePolicy)policy; + (RTC_OBJC_TYPE(RTCTcpCandidatePolicy))policy; -+ (RTCTcpCandidatePolicy)tcpCandidatePolicyForNativePolicy: ++ (RTC_OBJC_TYPE(RTCTcpCandidatePolicy))tcpCandidatePolicyForNativePolicy: (webrtc::PeerConnectionInterface::TcpCandidatePolicy)nativePolicy; -+ (NSString *)stringForTcpCandidatePolicy:(RTCTcpCandidatePolicy)policy; ++ (NSString *)stringForTcpCandidatePolicy:(RTC_OBJC_TYPE(RTCTcpCandidatePolicy))policy; + (webrtc::PeerConnectionInterface::CandidateNetworkPolicy)nativeCandidateNetworkPolicyForPolicy: - (RTCCandidateNetworkPolicy)policy; + (RTC_OBJC_TYPE(RTCCandidateNetworkPolicy))policy; -+ (RTCCandidateNetworkPolicy)candidateNetworkPolicyForNativePolicy: ++ (RTC_OBJC_TYPE(RTCCandidateNetworkPolicy))candidateNetworkPolicyForNativePolicy: (webrtc::PeerConnectionInterface::CandidateNetworkPolicy)nativePolicy; -+ (NSString *)stringForCandidateNetworkPolicy:(RTCCandidateNetworkPolicy)policy; ++ (NSString *)stringForCandidateNetworkPolicy:(RTC_OBJC_TYPE(RTCCandidateNetworkPolicy))policy; -+ (rtc::KeyType)nativeEncryptionKeyTypeForKeyType:(RTCEncryptionKeyType)keyType; ++ (rtc::KeyType)nativeEncryptionKeyTypeForKeyType:(RTC_OBJC_TYPE(RTCEncryptionKeyType))keyType; -+ (webrtc::SdpSemantics)nativeSdpSemanticsForSdpSemantics:(RTCSdpSemantics)sdpSemantics; ++ (webrtc::SdpSemantics)nativeSdpSemanticsForSdpSemantics:(RTC_OBJC_TYPE(RTCSdpSemantics))sdpSemantics; -+ (RTCSdpSemantics)sdpSemanticsForNativeSdpSemantics:(webrtc::SdpSemantics)sdpSemantics; ++ (RTC_OBJC_TYPE(RTCSdpSemantics))sdpSemanticsForNativeSdpSemantics:(webrtc::SdpSemantics)sdpSemantics; -+ (NSString *)stringForSdpSemantics:(RTCSdpSemantics)sdpSemantics; ++ (NSString *)stringForSdpSemantics:(RTC_OBJC_TYPE(RTCSdpSemantics))sdpSemantics; /** * RTCConfiguration struct representation of this RTCConfiguration. diff --git a/sdk/objc/api/peerconnection/RTCConfiguration.h b/sdk/objc/api/peerconnection/RTCConfiguration.h index b643f32f2a..6478d4f704 100644 --- a/sdk/objc/api/peerconnection/RTCConfiguration.h +++ b/sdk/objc/api/peerconnection/RTCConfiguration.h @@ -20,52 +20,55 @@ * Represents the ice transport policy. This exposes the same states in C++, * which include one more state than what exists in the W3C spec. */ -typedef NS_ENUM(NSInteger, RTCIceTransportPolicy) { - RTCIceTransportPolicyNone, - RTCIceTransportPolicyRelay, - RTCIceTransportPolicyNoHost, - RTCIceTransportPolicyAll +typedef NS_ENUM(NSInteger, RTC_OBJC_TYPE(RTCIceTransportPolicy)) { + RTC_OBJC_TYPE(RTCIceTransportPolicyNone), + RTC_OBJC_TYPE(RTCIceTransportPolicyRelay), + RTC_OBJC_TYPE(RTCIceTransportPolicyNoHost), + RTC_OBJC_TYPE(RTCIceTransportPolicyAll) }; /** Represents the bundle policy. */ -typedef NS_ENUM(NSInteger, RTCBundlePolicy) { - RTCBundlePolicyBalanced, - RTCBundlePolicyMaxCompat, - RTCBundlePolicyMaxBundle +typedef NS_ENUM(NSInteger, RTC_OBJC_TYPE(RTCBundlePolicy)) { + RTC_OBJC_TYPE(RTCBundlePolicyBalanced), + RTC_OBJC_TYPE(RTCBundlePolicyMaxCompat), + RTC_OBJC_TYPE(RTCBundlePolicyMaxBundle) }; /** Represents the rtcp mux policy. */ -typedef NS_ENUM(NSInteger, RTCRtcpMuxPolicy) { RTCRtcpMuxPolicyNegotiate, RTCRtcpMuxPolicyRequire }; +typedef NS_ENUM(NSInteger, RTC_OBJC_TYPE(RTCRtcpMuxPolicy)) { + RTC_OBJC_TYPE(RTCRtcpMuxPolicyNegotiate), + RTC_OBJC_TYPE(RTCRtcpMuxPolicyRequire) +}; /** Represents the tcp candidate policy. */ -typedef NS_ENUM(NSInteger, RTCTcpCandidatePolicy) { - RTCTcpCandidatePolicyEnabled, - RTCTcpCandidatePolicyDisabled +typedef NS_ENUM(NSInteger, RTC_OBJC_TYPE(RTCTcpCandidatePolicy)) { + RTC_OBJC_TYPE(RTCTcpCandidatePolicyEnabled), + RTC_OBJC_TYPE(RTCTcpCandidatePolicyDisabled) }; /** Represents the candidate network policy. */ -typedef NS_ENUM(NSInteger, RTCCandidateNetworkPolicy) { - RTCCandidateNetworkPolicyAll, - RTCCandidateNetworkPolicyLowCost +typedef NS_ENUM(NSInteger, RTC_OBJC_TYPE(RTCCandidateNetworkPolicy)) { + RTC_OBJC_TYPE(RTCCandidateNetworkPolicyAll), + RTC_OBJC_TYPE(RTCCandidateNetworkPolicyLowCost) }; /** Represents the continual gathering policy. */ -typedef NS_ENUM(NSInteger, RTCContinualGatheringPolicy) { - RTCContinualGatheringPolicyGatherOnce, - RTCContinualGatheringPolicyGatherContinually +typedef NS_ENUM(NSInteger, RTC_OBJC_TYPE(RTCContinualGatheringPolicy)) { + RTC_OBJC_TYPE(RTCContinualGatheringPolicyGatherOnce), + RTC_OBJC_TYPE(RTCContinualGatheringPolicyGatherContinually) }; /** Represents the encryption key type. */ -typedef NS_ENUM(NSInteger, RTCEncryptionKeyType) { - RTCEncryptionKeyTypeRSA, - RTCEncryptionKeyTypeECDSA, +typedef NS_ENUM(NSInteger, RTC_OBJC_TYPE(RTCEncryptionKeyType)) { + RTC_OBJC_TYPE(RTCEncryptionKeyTypeRSA), + RTC_OBJC_TYPE(RTCEncryptionKeyTypeECDSA), }; /** Represents the chosen SDP semantics for the RTCPeerConnection. */ -typedef NS_ENUM(NSInteger, RTCSdpSemantics) { +typedef NS_ENUM(NSInteger, RTC_OBJC_TYPE(RTCSdpSemantics)) { // TODO(https://crbug.com/webrtc/13528): Remove support for Plan B. - RTCSdpSemanticsPlanB, - RTCSdpSemanticsUnifiedPlan, + RTC_OBJC_TYPE(RTCSdpSemanticsPlanB), + RTC_OBJC_TYPE(RTCSdpSemanticsUnifiedPlan), }; NS_ASSUME_NONNULL_BEGIN @@ -86,16 +89,16 @@ RTC_OBJC_EXPORT /** Which candidates the ICE agent is allowed to use. The W3C calls it * `iceTransportPolicy`, while in C++ it is called `type`. */ -@property(nonatomic, assign) RTCIceTransportPolicy iceTransportPolicy; +@property(nonatomic, assign) RTC_OBJC_TYPE(RTCIceTransportPolicy) iceTransportPolicy; /** The media-bundling policy to use when gathering ICE candidates. */ -@property(nonatomic, assign) RTCBundlePolicy bundlePolicy; +@property(nonatomic, assign) RTC_OBJC_TYPE(RTCBundlePolicy) bundlePolicy; /** The rtcp-mux policy to use when gathering ICE candidates. */ -@property(nonatomic, assign) RTCRtcpMuxPolicy rtcpMuxPolicy; -@property(nonatomic, assign) RTCTcpCandidatePolicy tcpCandidatePolicy; -@property(nonatomic, assign) RTCCandidateNetworkPolicy candidateNetworkPolicy; -@property(nonatomic, assign) RTCContinualGatheringPolicy continualGatheringPolicy; +@property(nonatomic, assign) RTC_OBJC_TYPE(RTCRtcpMuxPolicy) rtcpMuxPolicy; +@property(nonatomic, assign) RTC_OBJC_TYPE(RTCTcpCandidatePolicy) tcpCandidatePolicy; +@property(nonatomic, assign) RTC_OBJC_TYPE(RTCCandidateNetworkPolicy) candidateNetworkPolicy; +@property(nonatomic, assign) RTC_OBJC_TYPE(RTCContinualGatheringPolicy) continualGatheringPolicy; /** If set to YES, don't gather IPv6 ICE candidates on Wi-Fi. * Only intended to be used on specific devices. Certain phones disable IPv6 @@ -125,7 +128,7 @@ RTC_OBJC_EXPORT @property(nonatomic, assign) int iceBackupCandidatePairPingInterval; /** Key type used to generate SSL identity. Default is ECDSA. */ -@property(nonatomic, assign) RTCEncryptionKeyType keyType; +@property(nonatomic, assign) RTC_OBJC_TYPE(RTCEncryptionKeyType) keyType; /** ICE candidate pool size as defined in JSEP. Default is 0. */ @property(nonatomic, assign) int iceCandidatePoolSize; @@ -176,7 +179,7 @@ RTC_OBJC_EXPORT * the section. This will also cause RTCPeerConnection to ignore all but the * first m= section of the same media type. */ -@property(nonatomic, assign) RTCSdpSemantics sdpSemantics; +@property(nonatomic, assign) RTC_OBJC_TYPE(RTCSdpSemantics) sdpSemantics; /** Actively reset the SRTP parameters when the DTLS transports underneath are * changed after offer/answer negotiation. This is only intended to be a diff --git a/sdk/objc/api/peerconnection/RTCConfiguration.mm b/sdk/objc/api/peerconnection/RTCConfiguration.mm index 70bbc5d370..1bf4157124 100644 --- a/sdk/objc/api/peerconnection/RTCConfiguration.mm +++ b/sdk/objc/api/peerconnection/RTCConfiguration.mm @@ -111,7 +111,7 @@ - (instancetype)initWithNativeConfiguration: _iceConnectionReceivingTimeout = config.ice_connection_receiving_timeout; _iceBackupCandidatePairPingInterval = config.ice_backup_candidate_pair_ping_interval; - _keyType = RTCEncryptionKeyTypeECDSA; + _keyType = RTC_OBJC_TYPE(RTCEncryptionKeyTypeECDSA); _iceCandidatePoolSize = config.ice_candidate_pool_size; _shouldPruneTurnPorts = config.prune_turn_ports; _shouldPresumeWritableWhenFullyRelayed = @@ -310,233 +310,231 @@ - (NSString *)description { } + (webrtc::PeerConnectionInterface::IceTransportsType) - nativeTransportsTypeForTransportPolicy:(RTCIceTransportPolicy)policy { + nativeTransportsTypeForTransportPolicy:(RTC_OBJC_TYPE(RTCIceTransportPolicy))policy { switch (policy) { - case RTCIceTransportPolicyNone: + case RTC_OBJC_TYPE(RTCIceTransportPolicyNone): return webrtc::PeerConnectionInterface::kNone; - case RTCIceTransportPolicyRelay: + case RTC_OBJC_TYPE(RTCIceTransportPolicyRelay): return webrtc::PeerConnectionInterface::kRelay; - case RTCIceTransportPolicyNoHost: + case RTC_OBJC_TYPE(RTCIceTransportPolicyNoHost): return webrtc::PeerConnectionInterface::kNoHost; - case RTCIceTransportPolicyAll: + case RTC_OBJC_TYPE(RTCIceTransportPolicyAll): return webrtc::PeerConnectionInterface::kAll; } } -+ (RTCIceTransportPolicy)transportPolicyForTransportsType: ++ (RTC_OBJC_TYPE(RTCIceTransportPolicy))transportPolicyForTransportsType: (webrtc::PeerConnectionInterface::IceTransportsType)nativeType { switch (nativeType) { case webrtc::PeerConnectionInterface::kNone: - return RTCIceTransportPolicyNone; + return RTC_OBJC_TYPE(RTCIceTransportPolicyNone); case webrtc::PeerConnectionInterface::kRelay: - return RTCIceTransportPolicyRelay; + return RTC_OBJC_TYPE(RTCIceTransportPolicyRelay); case webrtc::PeerConnectionInterface::kNoHost: - return RTCIceTransportPolicyNoHost; + return RTC_OBJC_TYPE(RTCIceTransportPolicyNoHost); case webrtc::PeerConnectionInterface::kAll: - return RTCIceTransportPolicyAll; + return RTC_OBJC_TYPE(RTCIceTransportPolicyAll); } } -+ (NSString *)stringForTransportPolicy:(RTCIceTransportPolicy)policy { ++ (NSString *)stringForTransportPolicy:(RTC_OBJC_TYPE(RTCIceTransportPolicy))policy { switch (policy) { - case RTCIceTransportPolicyNone: + case RTC_OBJC_TYPE(RTCIceTransportPolicyNone): return @"NONE"; - case RTCIceTransportPolicyRelay: + case RTC_OBJC_TYPE(RTCIceTransportPolicyRelay): return @"RELAY"; - case RTCIceTransportPolicyNoHost: + case RTC_OBJC_TYPE(RTCIceTransportPolicyNoHost): return @"NO_HOST"; - case RTCIceTransportPolicyAll: + case RTC_OBJC_TYPE(RTCIceTransportPolicyAll): return @"ALL"; } } + (webrtc::PeerConnectionInterface::BundlePolicy)nativeBundlePolicyForPolicy: - (RTCBundlePolicy)policy { + (RTC_OBJC_TYPE(RTCBundlePolicy))policy { switch (policy) { - case RTCBundlePolicyBalanced: + case RTC_OBJC_TYPE(RTCBundlePolicyBalanced): return webrtc::PeerConnectionInterface::kBundlePolicyBalanced; - case RTCBundlePolicyMaxCompat: + case RTC_OBJC_TYPE(RTCBundlePolicyMaxCompat): return webrtc::PeerConnectionInterface::kBundlePolicyMaxCompat; - case RTCBundlePolicyMaxBundle: + case RTC_OBJC_TYPE(RTCBundlePolicyMaxBundle): return webrtc::PeerConnectionInterface::kBundlePolicyMaxBundle; } } -+ (RTCBundlePolicy)bundlePolicyForNativePolicy: ++ (RTC_OBJC_TYPE(RTCBundlePolicy))bundlePolicyForNativePolicy: (webrtc::PeerConnectionInterface::BundlePolicy)nativePolicy { switch (nativePolicy) { case webrtc::PeerConnectionInterface::kBundlePolicyBalanced: - return RTCBundlePolicyBalanced; + return RTC_OBJC_TYPE(RTCBundlePolicyBalanced); case webrtc::PeerConnectionInterface::kBundlePolicyMaxCompat: - return RTCBundlePolicyMaxCompat; + return RTC_OBJC_TYPE(RTCBundlePolicyMaxCompat); case webrtc::PeerConnectionInterface::kBundlePolicyMaxBundle: - return RTCBundlePolicyMaxBundle; + return RTC_OBJC_TYPE(RTCBundlePolicyMaxBundle); } } -+ (NSString *)stringForBundlePolicy:(RTCBundlePolicy)policy { ++ (NSString *)stringForBundlePolicy:(RTC_OBJC_TYPE(RTCBundlePolicy))policy { switch (policy) { - case RTCBundlePolicyBalanced: + case RTC_OBJC_TYPE(RTCBundlePolicyBalanced): return @"BALANCED"; - case RTCBundlePolicyMaxCompat: + case RTC_OBJC_TYPE(RTCBundlePolicyMaxCompat): return @"MAX_COMPAT"; - case RTCBundlePolicyMaxBundle: + case RTC_OBJC_TYPE(RTCBundlePolicyMaxBundle): return @"MAX_BUNDLE"; } } + (webrtc::PeerConnectionInterface::RtcpMuxPolicy)nativeRtcpMuxPolicyForPolicy: - (RTCRtcpMuxPolicy)policy { + (RTC_OBJC_TYPE(RTCRtcpMuxPolicy))policy { switch (policy) { - case RTCRtcpMuxPolicyNegotiate: + case RTC_OBJC_TYPE(RTCRtcpMuxPolicyNegotiate): return webrtc::PeerConnectionInterface::kRtcpMuxPolicyNegotiate; - case RTCRtcpMuxPolicyRequire: + case RTC_OBJC_TYPE(RTCRtcpMuxPolicyRequire): return webrtc::PeerConnectionInterface::kRtcpMuxPolicyRequire; } } -+ (RTCRtcpMuxPolicy)rtcpMuxPolicyForNativePolicy: ++ (RTC_OBJC_TYPE(RTCRtcpMuxPolicy))rtcpMuxPolicyForNativePolicy: (webrtc::PeerConnectionInterface::RtcpMuxPolicy)nativePolicy { switch (nativePolicy) { case webrtc::PeerConnectionInterface::kRtcpMuxPolicyNegotiate: - return RTCRtcpMuxPolicyNegotiate; + return RTC_OBJC_TYPE(RTCRtcpMuxPolicyNegotiate); case webrtc::PeerConnectionInterface::kRtcpMuxPolicyRequire: - return RTCRtcpMuxPolicyRequire; + return RTC_OBJC_TYPE(RTCRtcpMuxPolicyRequire); } } -+ (NSString *)stringForRtcpMuxPolicy:(RTCRtcpMuxPolicy)policy { ++ (NSString *)stringForRtcpMuxPolicy:(RTC_OBJC_TYPE(RTCRtcpMuxPolicy))policy { switch (policy) { - case RTCRtcpMuxPolicyNegotiate: + case RTC_OBJC_TYPE(RTCRtcpMuxPolicyNegotiate): return @"NEGOTIATE"; - case RTCRtcpMuxPolicyRequire: + case RTC_OBJC_TYPE(RTCRtcpMuxPolicyRequire): return @"REQUIRE"; } } -+ (webrtc::PeerConnectionInterface::TcpCandidatePolicy) - nativeTcpCandidatePolicyForPolicy:(RTCTcpCandidatePolicy)policy { ++ (webrtc::PeerConnectionInterface::TcpCandidatePolicy)nativeTcpCandidatePolicyForPolicy: + (RTC_OBJC_TYPE(RTCTcpCandidatePolicy))policy { switch (policy) { - case RTCTcpCandidatePolicyEnabled: + case RTC_OBJC_TYPE(RTCTcpCandidatePolicyEnabled): return webrtc::PeerConnectionInterface::kTcpCandidatePolicyEnabled; - case RTCTcpCandidatePolicyDisabled: + case RTC_OBJC_TYPE(RTCTcpCandidatePolicyDisabled): return webrtc::PeerConnectionInterface::kTcpCandidatePolicyDisabled; } } -+ (webrtc::PeerConnectionInterface::CandidateNetworkPolicy) - nativeCandidateNetworkPolicyForPolicy:(RTCCandidateNetworkPolicy)policy { ++ (webrtc::PeerConnectionInterface::CandidateNetworkPolicy)nativeCandidateNetworkPolicyForPolicy: + (RTC_OBJC_TYPE(RTCCandidateNetworkPolicy))policy { switch (policy) { - case RTCCandidateNetworkPolicyAll: + case RTC_OBJC_TYPE(RTCCandidateNetworkPolicyAll): return webrtc::PeerConnectionInterface::kCandidateNetworkPolicyAll; - case RTCCandidateNetworkPolicyLowCost: + case RTC_OBJC_TYPE(RTCCandidateNetworkPolicyLowCost): return webrtc::PeerConnectionInterface::kCandidateNetworkPolicyLowCost; } } -+ (RTCTcpCandidatePolicy)tcpCandidatePolicyForNativePolicy: ++ (RTC_OBJC_TYPE(RTCTcpCandidatePolicy))tcpCandidatePolicyForNativePolicy: (webrtc::PeerConnectionInterface::TcpCandidatePolicy)nativePolicy { switch (nativePolicy) { case webrtc::PeerConnectionInterface::kTcpCandidatePolicyEnabled: - return RTCTcpCandidatePolicyEnabled; + return RTC_OBJC_TYPE(RTCTcpCandidatePolicyEnabled); case webrtc::PeerConnectionInterface::kTcpCandidatePolicyDisabled: - return RTCTcpCandidatePolicyDisabled; + return RTC_OBJC_TYPE(RTCTcpCandidatePolicyDisabled); } } -+ (NSString *)stringForTcpCandidatePolicy:(RTCTcpCandidatePolicy)policy { ++ (NSString *)stringForTcpCandidatePolicy:(RTC_OBJC_TYPE(RTCTcpCandidatePolicy))policy { switch (policy) { - case RTCTcpCandidatePolicyEnabled: + case RTC_OBJC_TYPE(RTCTcpCandidatePolicyEnabled): return @"TCP_ENABLED"; - case RTCTcpCandidatePolicyDisabled: + case RTC_OBJC_TYPE(RTCTcpCandidatePolicyDisabled): return @"TCP_DISABLED"; } } -+ (RTCCandidateNetworkPolicy)candidateNetworkPolicyForNativePolicy: ++ (RTC_OBJC_TYPE(RTCCandidateNetworkPolicy))candidateNetworkPolicyForNativePolicy: (webrtc::PeerConnectionInterface::CandidateNetworkPolicy)nativePolicy { switch (nativePolicy) { case webrtc::PeerConnectionInterface::kCandidateNetworkPolicyAll: - return RTCCandidateNetworkPolicyAll; + return RTC_OBJC_TYPE(RTCCandidateNetworkPolicyAll); case webrtc::PeerConnectionInterface::kCandidateNetworkPolicyLowCost: - return RTCCandidateNetworkPolicyLowCost; + return RTC_OBJC_TYPE(RTCCandidateNetworkPolicyLowCost); } } -+ (NSString *)stringForCandidateNetworkPolicy: - (RTCCandidateNetworkPolicy)policy { ++ (NSString *)stringForCandidateNetworkPolicy:(RTC_OBJC_TYPE(RTCCandidateNetworkPolicy))policy { switch (policy) { - case RTCCandidateNetworkPolicyAll: + case RTC_OBJC_TYPE(RTCCandidateNetworkPolicyAll): return @"CANDIDATE_ALL_NETWORKS"; - case RTCCandidateNetworkPolicyLowCost: + case RTC_OBJC_TYPE(RTCCandidateNetworkPolicyLowCost): return @"CANDIDATE_LOW_COST_NETWORKS"; } } + (webrtc::PeerConnectionInterface::ContinualGatheringPolicy) nativeContinualGatheringPolicyForPolicy: - (RTCContinualGatheringPolicy)policy { + (RTC_OBJC_TYPE(RTCContinualGatheringPolicy))policy { switch (policy) { - case RTCContinualGatheringPolicyGatherOnce: + case RTC_OBJC_TYPE(RTCContinualGatheringPolicyGatherOnce): return webrtc::PeerConnectionInterface::GATHER_ONCE; - case RTCContinualGatheringPolicyGatherContinually: + case RTC_OBJC_TYPE(RTCContinualGatheringPolicyGatherContinually): return webrtc::PeerConnectionInterface::GATHER_CONTINUALLY; } } -+ (RTCContinualGatheringPolicy)continualGatheringPolicyForNativePolicy: ++ (RTC_OBJC_TYPE(RTCContinualGatheringPolicy))continualGatheringPolicyForNativePolicy: (webrtc::PeerConnectionInterface::ContinualGatheringPolicy)nativePolicy { switch (nativePolicy) { case webrtc::PeerConnectionInterface::GATHER_ONCE: - return RTCContinualGatheringPolicyGatherOnce; + return RTC_OBJC_TYPE(RTCContinualGatheringPolicyGatherOnce); case webrtc::PeerConnectionInterface::GATHER_CONTINUALLY: - return RTCContinualGatheringPolicyGatherContinually; + return RTC_OBJC_TYPE(RTCContinualGatheringPolicyGatherContinually); } } + (NSString *)stringForContinualGatheringPolicy: - (RTCContinualGatheringPolicy)policy { + (RTC_OBJC_TYPE(RTCContinualGatheringPolicy))policy { switch (policy) { - case RTCContinualGatheringPolicyGatherOnce: + case RTC_OBJC_TYPE(RTCContinualGatheringPolicyGatherOnce): return @"GATHER_ONCE"; - case RTCContinualGatheringPolicyGatherContinually: + case RTC_OBJC_TYPE(RTCContinualGatheringPolicyGatherContinually): return @"GATHER_CONTINUALLY"; } } -+ (rtc::KeyType)nativeEncryptionKeyTypeForKeyType: - (RTCEncryptionKeyType)keyType { ++ (rtc::KeyType)nativeEncryptionKeyTypeForKeyType:(RTC_OBJC_TYPE(RTCEncryptionKeyType))keyType { switch (keyType) { - case RTCEncryptionKeyTypeRSA: + case RTC_OBJC_TYPE(RTCEncryptionKeyTypeRSA): return rtc::KT_RSA; - case RTCEncryptionKeyTypeECDSA: + case RTC_OBJC_TYPE(RTCEncryptionKeyTypeECDSA): return rtc::KT_ECDSA; } } -+ (webrtc::SdpSemantics)nativeSdpSemanticsForSdpSemantics:(RTCSdpSemantics)sdpSemantics { ++ (webrtc::SdpSemantics)nativeSdpSemanticsForSdpSemantics:(RTC_OBJC_TYPE(RTCSdpSemantics))sdpSemantics { switch (sdpSemantics) { - case RTCSdpSemanticsPlanB: + case RTC_OBJC_TYPE(RTCSdpSemanticsPlanB): return webrtc::SdpSemantics::kPlanB_DEPRECATED; - case RTCSdpSemanticsUnifiedPlan: + case RTC_OBJC_TYPE(RTCSdpSemanticsUnifiedPlan): return webrtc::SdpSemantics::kUnifiedPlan; } } -+ (RTCSdpSemantics)sdpSemanticsForNativeSdpSemantics:(webrtc::SdpSemantics)sdpSemantics { ++ (RTC_OBJC_TYPE(RTCSdpSemantics))sdpSemanticsForNativeSdpSemantics:(webrtc::SdpSemantics)sdpSemantics { switch (sdpSemantics) { case webrtc::SdpSemantics::kPlanB_DEPRECATED: - return RTCSdpSemanticsPlanB; + return RTC_OBJC_TYPE(RTCSdpSemanticsPlanB); case webrtc::SdpSemantics::kUnifiedPlan: - return RTCSdpSemanticsUnifiedPlan; + return RTC_OBJC_TYPE(RTCSdpSemanticsUnifiedPlan); } } -+ (NSString *)stringForSdpSemantics:(RTCSdpSemantics)sdpSemantics { ++ (NSString *)stringForSdpSemantics:(RTC_OBJC_TYPE(RTCSdpSemantics))sdpSemantics { switch (sdpSemantics) { - case RTCSdpSemanticsPlanB: + case RTC_OBJC_TYPE(RTCSdpSemanticsPlanB): return @"PLAN_B"; - case RTCSdpSemanticsUnifiedPlan: + case RTC_OBJC_TYPE(RTCSdpSemanticsUnifiedPlan): return @"UNIFIED_PLAN"; } } diff --git a/sdk/objc/api/peerconnection/RTCDataChannel+Private.h b/sdk/objc/api/peerconnection/RTCDataChannel+Private.h index d903b0c002..84eb092e6e 100644 --- a/sdk/objc/api/peerconnection/RTCDataChannel+Private.h +++ b/sdk/objc/api/peerconnection/RTCDataChannel+Private.h @@ -17,7 +17,7 @@ NS_ASSUME_NONNULL_BEGIN @class RTC_OBJC_TYPE(RTCPeerConnectionFactory); -@interface RTC_OBJC_TYPE (RTCDataBuffer) +@interface RTC_OBJC_TYPE(RTCDataBuffer) () /** @@ -31,7 +31,7 @@ NS_ASSUME_NONNULL_BEGIN @end -@interface RTC_OBJC_TYPE (RTCDataChannel) +@interface RTC_OBJC_TYPE(RTCDataChannel) () /** Initialize an RTCDataChannel from a native DataChannelInterface. */ @@ -40,12 +40,12 @@ NS_ASSUME_NONNULL_BEGIN : (rtc::scoped_refptr)nativeDataChannel NS_DESIGNATED_INITIALIZER; + (webrtc::DataChannelInterface::DataState)nativeDataChannelStateForState: - (RTCDataChannelState)state; + (RTC_OBJC_TYPE(RTCDataChannelState))state; -+ (RTCDataChannelState)dataChannelStateForNativeState: ++ (RTC_OBJC_TYPE(RTCDataChannelState))dataChannelStateForNativeState: (webrtc::DataChannelInterface::DataState)nativeState; -+ (NSString *)stringForState:(RTCDataChannelState)state; ++ (NSString *)stringForState:(RTC_OBJC_TYPE(RTCDataChannelState))state; @end diff --git a/sdk/objc/api/peerconnection/RTCDataChannel.h b/sdk/objc/api/peerconnection/RTCDataChannel.h index 89eb58bc3f..788fd1cfd4 100644 --- a/sdk/objc/api/peerconnection/RTCDataChannel.h +++ b/sdk/objc/api/peerconnection/RTCDataChannel.h @@ -54,11 +54,11 @@ RTC_OBJC_EXPORT @end /** Represents the state of the data channel. */ -typedef NS_ENUM(NSInteger, RTCDataChannelState) { - RTCDataChannelStateConnecting, - RTCDataChannelStateOpen, - RTCDataChannelStateClosing, - RTCDataChannelStateClosed, +typedef NS_ENUM(NSInteger, RTC_OBJC_TYPE(RTCDataChannelState)) { + RTC_OBJC_TYPE(RTCDataChannelStateConnecting), + RTC_OBJC_TYPE(RTCDataChannelStateOpen), + RTC_OBJC_TYPE(RTCDataChannelStateClosing), + RTC_OBJC_TYPE(RTCDataChannelStateClosed), }; RTC_OBJC_EXPORT @@ -108,7 +108,7 @@ RTC_OBJC_EXPORT @property(nonatomic, readonly) int channelId; /** The state of the data channel. */ -@property(nonatomic, readonly) RTCDataChannelState readyState; +@property(nonatomic, readonly) RTC_OBJC_TYPE(RTCDataChannelState) readyState; /** * The number of bytes of application data that have been queued using diff --git a/sdk/objc/api/peerconnection/RTCDataChannel.mm b/sdk/objc/api/peerconnection/RTCDataChannel.mm index 4a79cefdb4..de47ebbe6a 100644 --- a/sdk/objc/api/peerconnection/RTCDataChannel.mm +++ b/sdk/objc/api/peerconnection/RTCDataChannel.mm @@ -137,7 +137,7 @@ - (int)channelId { return _nativeDataChannel->id(); } -- (RTCDataChannelState)readyState { +- (RTC_OBJC_TYPE(RTCDataChannelState))readyState { return [[self class] dataChannelStateForNativeState: _nativeDataChannel->state()]; } @@ -177,42 +177,42 @@ - (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)facto } + (webrtc::DataChannelInterface::DataState) - nativeDataChannelStateForState:(RTCDataChannelState)state { + nativeDataChannelStateForState:(RTC_OBJC_TYPE(RTCDataChannelState))state { switch (state) { - case RTCDataChannelStateConnecting: + case RTC_OBJC_TYPE(RTCDataChannelStateConnecting): return webrtc::DataChannelInterface::DataState::kConnecting; - case RTCDataChannelStateOpen: + case RTC_OBJC_TYPE(RTCDataChannelStateOpen): return webrtc::DataChannelInterface::DataState::kOpen; - case RTCDataChannelStateClosing: + case RTC_OBJC_TYPE(RTCDataChannelStateClosing): return webrtc::DataChannelInterface::DataState::kClosing; - case RTCDataChannelStateClosed: + case RTC_OBJC_TYPE(RTCDataChannelStateClosed): return webrtc::DataChannelInterface::DataState::kClosed; } } -+ (RTCDataChannelState)dataChannelStateForNativeState: ++ (RTC_OBJC_TYPE(RTCDataChannelState))dataChannelStateForNativeState: (webrtc::DataChannelInterface::DataState)nativeState { switch (nativeState) { case webrtc::DataChannelInterface::DataState::kConnecting: - return RTCDataChannelStateConnecting; + return RTC_OBJC_TYPE(RTCDataChannelStateConnecting); case webrtc::DataChannelInterface::DataState::kOpen: - return RTCDataChannelStateOpen; + return RTC_OBJC_TYPE(RTCDataChannelStateOpen); case webrtc::DataChannelInterface::DataState::kClosing: - return RTCDataChannelStateClosing; + return RTC_OBJC_TYPE(RTCDataChannelStateClosing); case webrtc::DataChannelInterface::DataState::kClosed: - return RTCDataChannelStateClosed; + return RTC_OBJC_TYPE(RTCDataChannelStateClosed); } } -+ (NSString *)stringForState:(RTCDataChannelState)state { ++ (NSString *)stringForState:(RTC_OBJC_TYPE(RTCDataChannelState))state { switch (state) { - case RTCDataChannelStateConnecting: + case RTC_OBJC_TYPE(RTCDataChannelStateConnecting): return @"Connecting"; - case RTCDataChannelStateOpen: + case RTC_OBJC_TYPE(RTCDataChannelStateOpen): return @"Open"; - case RTCDataChannelStateClosing: + case RTC_OBJC_TYPE(RTCDataChannelStateClosing): return @"Closing"; - case RTCDataChannelStateClosed: + case RTC_OBJC_TYPE(RTCDataChannelStateClosed): return @"Closed"; } } diff --git a/sdk/objc/api/peerconnection/RTCEncodedImage+Private.mm b/sdk/objc/api/peerconnection/RTCEncodedImage+Private.mm index ac9946e55d..243b66df88 100644 --- a/sdk/objc/api/peerconnection/RTCEncodedImage+Private.mm +++ b/sdk/objc/api/peerconnection/RTCEncodedImage+Private.mm @@ -89,12 +89,12 @@ - (instancetype)initWithNativeEncodedImage:(const webrtc::EncodedImage &)encoded self.flags = encodedImage.timing_.flags; self.encodeStartMs = encodedImage.timing_.encode_start_ms; self.encodeFinishMs = encodedImage.timing_.encode_finish_ms; - self.frameType = static_cast(encodedImage._frameType); - self.rotation = static_cast(encodedImage.rotation_); + self.frameType = static_cast(encodedImage._frameType); + self.rotation = static_cast(encodedImage.rotation_); self.qp = @(encodedImage.qp_); self.contentType = (encodedImage.content_type_ == webrtc::VideoContentType::SCREENSHARE) ? - RTCVideoContentTypeScreenshare : - RTCVideoContentTypeUnspecified; + RTC_OBJC_TYPE(RTCVideoContentTypeScreenshare) : + RTC_OBJC_TYPE(RTCVideoContentTypeUnspecified); } return self; @@ -120,7 +120,7 @@ - (instancetype)initWithNativeEncodedImage:(const webrtc::EncodedImage &)encoded encodedImage._frameType = webrtc::VideoFrameType(self.frameType); encodedImage.rotation_ = webrtc::VideoRotation(self.rotation); encodedImage.qp_ = self.qp ? self.qp.intValue : -1; - encodedImage.content_type_ = (self.contentType == RTCVideoContentTypeScreenshare) ? + encodedImage.content_type_ = (self.contentType == RTC_OBJC_TYPE(RTCVideoContentTypeScreenshare)) ? webrtc::VideoContentType::SCREENSHARE : webrtc::VideoContentType::UNSPECIFIED; diff --git a/sdk/objc/api/peerconnection/RTCFieldTrials.h b/sdk/objc/api/peerconnection/RTCFieldTrials.h index 2c00e11721..0f51487d66 100644 --- a/sdk/objc/api/peerconnection/RTCFieldTrials.h +++ b/sdk/objc/api/peerconnection/RTCFieldTrials.h @@ -13,18 +13,18 @@ #import "RTCMacros.h" /** The only valid value for the following if set is kRTCFieldTrialEnabledValue. */ -RTC_EXTERN NSString *const kRTCFieldTrialAudioForceABWENoTWCCKey; -RTC_EXTERN NSString *const kRTCFieldTrialFlexFec03AdvertisedKey; -RTC_EXTERN NSString *const kRTCFieldTrialFlexFec03Key; -RTC_EXTERN NSString *const kRTCFieldTrialH264HighProfileKey; -RTC_EXTERN NSString *const kRTCFieldTrialMinimizeResamplingOnMobileKey; -RTC_EXTERN NSString *const kRTCFieldTrialUseNWPathMonitor; +RTC_EXTERN NSString *const RTC_CONSTANT_TYPE(RTCFieldTrialAudioForceABWENoTWCCKey); +RTC_EXTERN NSString *const RTC_CONSTANT_TYPE(RTCFieldTrialFlexFec03AdvertisedKey); +RTC_EXTERN NSString *const RTC_CONSTANT_TYPE(RTCFieldTrialFlexFec03Key); +RTC_EXTERN NSString *const RTC_CONSTANT_TYPE(RTCFieldTrialH264HighProfileKey); +RTC_EXTERN NSString *const RTC_CONSTANT_TYPE(RTCFieldTrialMinimizeResamplingOnMobileKey); +RTC_EXTERN NSString *const RTC_CONSTANT_TYPE(RTCFieldTrialUseNWPathMonitor); /** The valid value for field trials above. */ -RTC_EXTERN NSString *const kRTCFieldTrialEnabledValue; +RTC_EXTERN NSString *const RTC_CONSTANT_TYPE(RTCFieldTrialEnabledValue); /** Initialize field trials using a dictionary mapping field trial keys to their * values. See above for valid keys and values. Must be called before any other * call into WebRTC. See: webrtc/system_wrappers/include/field_trial.h */ -RTC_EXTERN void RTCInitFieldTrialDictionary(NSDictionary *fieldTrials); +RTC_EXTERN void RTC_OBJC_TYPE(RTCInitFieldTrialDictionary)(NSDictionary *fieldTrials); diff --git a/sdk/objc/api/peerconnection/RTCFieldTrials.mm b/sdk/objc/api/peerconnection/RTCFieldTrials.mm index b5a2eca8f0..310bb9bda4 100644 --- a/sdk/objc/api/peerconnection/RTCFieldTrials.mm +++ b/sdk/objc/api/peerconnection/RTCFieldTrials.mm @@ -16,21 +16,21 @@ #include "system_wrappers/include/field_trial.h" -NSString *const kRTCFieldTrialAudioForceABWENoTWCCKey = @"WebRTC-Audio-ABWENoTWCC"; -NSString *const kRTCFieldTrialFlexFec03AdvertisedKey = @"WebRTC-FlexFEC-03-Advertised"; -NSString *const kRTCFieldTrialFlexFec03Key = @"WebRTC-FlexFEC-03"; -NSString *const kRTCFieldTrialH264HighProfileKey = @"WebRTC-H264HighProfile"; -NSString *const kRTCFieldTrialMinimizeResamplingOnMobileKey = +NSString *const RTC_CONSTANT_TYPE(RTCFieldTrialAudioForceABWENoTWCCKey) = @"WebRTC-Audio-ABWENoTWCC"; +NSString *const RTC_CONSTANT_TYPE(RTCFieldTrialFlexFec03AdvertisedKey) = @"WebRTC-FlexFEC-03-Advertised"; +NSString *const RTC_CONSTANT_TYPE(RTCFieldTrialFlexFec03Key) = @"WebRTC-FlexFEC-03"; +NSString *const RTC_CONSTANT_TYPE(RTCFieldTrialH264HighProfileKey) = @"WebRTC-H264HighProfile"; +NSString *const RTC_CONSTANT_TYPE(RTCFieldTrialMinimizeResamplingOnMobileKey) = @"WebRTC-Audio-MinimizeResamplingOnMobile"; -NSString *const kRTCFieldTrialUseNWPathMonitor = @"WebRTC-Network-UseNWPathMonitor"; -NSString *const kRTCFieldTrialEnabledValue = @"Enabled"; +NSString *const RTC_CONSTANT_TYPE(RTCFieldTrialUseNWPathMonitor) = @"WebRTC-Network-UseNWPathMonitor"; +NSString *const RTC_CONSTANT_TYPE(RTCFieldTrialEnabledValue) = @"Enabled"; // InitFieldTrialsFromString stores the char*, so the char array must outlive // the application. static char *gFieldTrialInitString = nullptr; static os_unfair_lock fieldTrialLock = OS_UNFAIR_LOCK_INIT; -void RTCInitFieldTrialDictionary(NSDictionary *fieldTrials) { +void RTC_OBJC_TYPE(RTCInitFieldTrialDictionary)(NSDictionary *fieldTrials) { if (!fieldTrials) { RTCLogWarning(@"No fieldTrials provided."); return; diff --git a/sdk/objc/api/peerconnection/RTCFileLogger.h b/sdk/objc/api/peerconnection/RTCFileLogger.h index cb397c9633..6e11607e05 100644 --- a/sdk/objc/api/peerconnection/RTCFileLogger.h +++ b/sdk/objc/api/peerconnection/RTCFileLogger.h @@ -12,16 +12,16 @@ #import "RTCMacros.h" -typedef NS_ENUM(NSUInteger, RTCFileLoggerSeverity) { - RTCFileLoggerSeverityVerbose, - RTCFileLoggerSeverityInfo, - RTCFileLoggerSeverityWarning, - RTCFileLoggerSeverityError +typedef NS_ENUM(NSUInteger, RTC_OBJC_TYPE(RTCFileLoggerSeverity)) { + RTC_OBJC_TYPE(RTCFileLoggerSeverityVerbose), + RTC_OBJC_TYPE(RTCFileLoggerSeverityInfo), + RTC_OBJC_TYPE(RTCFileLoggerSeverityWarning), + RTC_OBJC_TYPE(RTCFileLoggerSeverityError) }; -typedef NS_ENUM(NSUInteger, RTCFileLoggerRotationType) { - RTCFileLoggerTypeCall, - RTCFileLoggerTypeApp, +typedef NS_ENUM(NSUInteger, RTC_OBJC_TYPE(RTCFileLoggerRotationType)) { + RTC_OBJC_TYPE(RTCFileLoggerTypeCall), + RTC_OBJC_TYPE(RTCFileLoggerTypeApp), }; NS_ASSUME_NONNULL_BEGIN @@ -37,11 +37,11 @@ RTC_OBJC_EXPORT @interface RTC_OBJC_TYPE (RTCFileLogger) : NSObject // The severity level to capture. The default is kRTCFileLoggerSeverityInfo. -@property(nonatomic, assign) RTCFileLoggerSeverity severity; +@property(nonatomic, assign) RTC_OBJC_TYPE(RTCFileLoggerSeverity) severity; // The rotation type for this file logger. The default is // kRTCFileLoggerTypeCall. -@property(nonatomic, readonly) RTCFileLoggerRotationType rotationType; +@property(nonatomic, readonly) RTC_OBJC_TYPE(RTCFileLoggerRotationType) rotationType; // Disables buffering disk writes. Should be set before `start`. Buffering // is enabled by default for performance. @@ -56,7 +56,7 @@ RTC_OBJC_EXPORT - (instancetype)initWithDirPath:(NSString *)dirPath maxFileSize:(NSUInteger)maxFileSize - rotationType:(RTCFileLoggerRotationType)rotationType NS_DESIGNATED_INITIALIZER; + rotationType:(RTC_OBJC_TYPE(RTCFileLoggerRotationType))rotationType NS_DESIGNATED_INITIALIZER; // Starts writing WebRTC logs to disk if not already started. Overwrites any // existing file(s). diff --git a/sdk/objc/api/peerconnection/RTCFileLogger.mm b/sdk/objc/api/peerconnection/RTCFileLogger.mm index 9562245611..e1e1e20d0d 100644 --- a/sdk/objc/api/peerconnection/RTCFileLogger.mm +++ b/sdk/objc/api/peerconnection/RTCFileLogger.mm @@ -19,7 +19,7 @@ NSString *const kDefaultLogDirName = @"webrtc_logs"; NSUInteger const kDefaultMaxFileSize = 10 * 1024 * 1024; // 10MB. -const char *kRTCFileLoggerRotatingLogPrefix = "rotating_log"; +const char * RTC_CONSTANT_TYPE(RTCFileLoggerRotatingLogPrefix) = "rotating_log"; @implementation RTC_OBJC_TYPE (RTCFileLogger) { BOOL _hasStarted; @@ -46,12 +46,12 @@ - (instancetype)initWithDirPath:(NSString *)dirPath maxFileSize:(NSUInteger)maxFileSize { return [self initWithDirPath:dirPath maxFileSize:maxFileSize - rotationType:RTCFileLoggerTypeCall]; + rotationType:RTC_OBJC_TYPE(RTCFileLoggerTypeCall)]; } - (instancetype)initWithDirPath:(NSString *)dirPath maxFileSize:(NSUInteger)maxFileSize - rotationType:(RTCFileLoggerRotationType)rotationType { + rotationType:(RTC_OBJC_TYPE(RTCFileLoggerRotationType))rotationType { NSParameterAssert(dirPath.length); NSParameterAssert(maxFileSize); if (self = [super init]) { @@ -73,7 +73,7 @@ - (instancetype)initWithDirPath:(NSString *)dirPath } _dirPath = dirPath; _maxFileSize = maxFileSize; - _severity = RTCFileLoggerSeverityInfo; + _severity = RTC_OBJC_TYPE(RTCFileLoggerSeverityInfo); } return self; } @@ -87,14 +87,14 @@ - (void)start { return; } switch (_rotationType) { - case RTCFileLoggerTypeApp: + case RTC_OBJC_TYPE(RTCFileLoggerTypeApp): _logSink.reset( new rtc::FileRotatingLogSink(_dirPath.UTF8String, - kRTCFileLoggerRotatingLogPrefix, + RTC_CONSTANT_TYPE(RTCFileLoggerRotatingLogPrefix), _maxFileSize, _maxFileSize / 10)); break; - case RTCFileLoggerTypeCall: + case RTC_OBJC_TYPE(RTCFileLoggerTypeCall): _logSink.reset( new rtc::CallSessionFileRotatingLogSink(_dirPath.UTF8String, _maxFileSize)); @@ -131,11 +131,11 @@ - (nullable NSData *)logData { NSMutableData* logData = [NSMutableData data]; std::unique_ptr stream; switch(_rotationType) { - case RTCFileLoggerTypeApp: + case RTC_OBJC_TYPE(RTCFileLoggerTypeApp): stream = std::make_unique(_dirPath.UTF8String, - kRTCFileLoggerRotatingLogPrefix); + RTC_CONSTANT_TYPE(RTCFileLoggerRotatingLogPrefix)); break; - case RTCFileLoggerTypeCall: + case RTC_OBJC_TYPE(RTCFileLoggerTypeCall): stream = std::make_unique(_dirPath.UTF8String); break; } @@ -156,13 +156,13 @@ - (nullable NSData *)logData { - (rtc::LoggingSeverity)rtcSeverity { switch (_severity) { - case RTCFileLoggerSeverityVerbose: + case RTC_OBJC_TYPE(RTCFileLoggerSeverityVerbose): return rtc::LS_VERBOSE; - case RTCFileLoggerSeverityInfo: + case RTC_OBJC_TYPE(RTCFileLoggerSeverityInfo): return rtc::LS_INFO; - case RTCFileLoggerSeverityWarning: + case RTC_OBJC_TYPE(RTCFileLoggerSeverityWarning): return rtc::LS_WARNING; - case RTCFileLoggerSeverityError: + case RTC_OBJC_TYPE(RTCFileLoggerSeverityError): return rtc::LS_ERROR; } } diff --git a/sdk/objc/api/peerconnection/RTCFrameCryptor.h b/sdk/objc/api/peerconnection/RTCFrameCryptor.h index 43719f35d8..b37ededa31 100644 --- a/sdk/objc/api/peerconnection/RTCFrameCryptor.h +++ b/sdk/objc/api/peerconnection/RTCFrameCryptor.h @@ -26,18 +26,18 @@ NS_ASSUME_NONNULL_BEGIN @class RTC_OBJC_TYPE(RTCFrameCryptor); @class RTC_OBJC_TYPE(RTCPeerConnectionFactory); -typedef NS_ENUM(NSUInteger, RTCCryptorAlgorithm) { - RTCCryptorAlgorithmAesGcm = 0, +typedef NS_ENUM(NSUInteger, RTC_OBJC_TYPE(RTCCryptorAlgorithm)) { + RTC_OBJC_TYPE(RTCCryptorAlgorithmAesGcm) = 0, }; -typedef NS_ENUM(NSInteger, FrameCryptionState) { - FrameCryptionStateNew = 0, - FrameCryptionStateOk, - FrameCryptionStateEncryptionFailed, - FrameCryptionStateDecryptionFailed, - FrameCryptionStateMissingKey, - FrameCryptionStateKeyRatcheted, - FrameCryptionStateInternalError, +typedef NS_ENUM(NSInteger, RTC_OBJC_TYPE(RTCFrameCryptorState)) { + RTC_OBJC_TYPE(RTCFrameCryptorStateNew) = 0, + RTC_OBJC_TYPE(RTCFrameCryptorStateOk), + RTC_OBJC_TYPE(RTCFrameCryptorStateEncryptionFailed), + RTC_OBJC_TYPE(RTCFrameCryptorStateDecryptionFailed), + RTC_OBJC_TYPE(RTCFrameCryptorStateMissingKey), + RTC_OBJC_TYPE(RTCFrameCryptorStateKeyRatcheted), + RTC_OBJC_TYPE(RTCFrameCryptorStateInternalError), }; RTC_OBJC_EXPORT @@ -46,7 +46,7 @@ RTC_OBJC_EXPORT /** Called when the RTCFrameCryptor got errors. */ - (void)frameCryptor : (RTC_OBJC_TYPE(RTCFrameCryptor) *)frameCryptor didStateChangeWithParticipantId - : (NSString *)participantId withState : (FrameCryptionState)stateChanged; + : (NSString *)participantId withState : (RTC_OBJC_TYPE(RTCFrameCryptorState))stateChanged; @end RTC_OBJC_EXPORT @@ -63,13 +63,13 @@ RTC_OBJC_EXPORT - (nullable instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory rtpSender:(RTC_OBJC_TYPE(RTCRtpSender) *)sender participantId:(NSString *)participantId - algorithm:(RTCCryptorAlgorithm)algorithm + algorithm:(RTC_OBJC_TYPE(RTCCryptorAlgorithm))algorithm keyProvider:(RTC_OBJC_TYPE(RTCFrameCryptorKeyProvider) *)keyProvider; - (nullable instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory rtpReceiver:(RTC_OBJC_TYPE(RTCRtpReceiver) *)receiver participantId:(NSString *)participantId - algorithm:(RTCCryptorAlgorithm)algorithm + algorithm:(RTC_OBJC_TYPE(RTCCryptorAlgorithm))algorithm keyProvider:(RTC_OBJC_TYPE(RTCFrameCryptorKeyProvider) *)keyProvider; @end diff --git a/sdk/objc/api/peerconnection/RTCFrameCryptor.mm b/sdk/objc/api/peerconnection/RTCFrameCryptor.mm index 7ea3ef5c43..06d95fecd0 100644 --- a/sdk/objc/api/peerconnection/RTCFrameCryptor.mm +++ b/sdk/objc/api/peerconnection/RTCFrameCryptor.mm @@ -54,37 +54,37 @@ case FrameCryptionState::kNew: [frameCryptor.delegate frameCryptor:frameCryptor didStateChangeWithParticipantId:[NSString stringForStdString:participant_id] - withState:FrameCryptionStateNew]; + withState:RTC_OBJC_TYPE(RTCFrameCryptorStateNew)]; break; case FrameCryptionState::kOk: [frameCryptor.delegate frameCryptor:frameCryptor didStateChangeWithParticipantId:[NSString stringForStdString:participant_id] - withState:FrameCryptionStateOk]; + withState:RTC_OBJC_TYPE(RTCFrameCryptorStateOk)]; break; case FrameCryptionState::kEncryptionFailed: [frameCryptor.delegate frameCryptor:frameCryptor didStateChangeWithParticipantId:[NSString stringForStdString:participant_id] - withState:FrameCryptionStateEncryptionFailed]; + withState:RTC_OBJC_TYPE(RTCFrameCryptorStateEncryptionFailed)]; break; case FrameCryptionState::kDecryptionFailed: [frameCryptor.delegate frameCryptor:frameCryptor didStateChangeWithParticipantId:[NSString stringForStdString:participant_id] - withState:FrameCryptionStateDecryptionFailed]; + withState:RTC_OBJC_TYPE(RTCFrameCryptorStateDecryptionFailed)]; break; case FrameCryptionState::kMissingKey: [frameCryptor.delegate frameCryptor:frameCryptor didStateChangeWithParticipantId:[NSString stringForStdString:participant_id] - withState:FrameCryptionStateMissingKey]; + withState:RTC_OBJC_TYPE(RTCFrameCryptorStateMissingKey)]; break; case FrameCryptionState::kKeyRatcheted: [frameCryptor.delegate frameCryptor:frameCryptor didStateChangeWithParticipantId:[NSString stringForStdString:participant_id] - withState:FrameCryptionStateKeyRatcheted]; + withState:RTC_OBJC_TYPE(RTCFrameCryptorStateKeyRatcheted)]; break; case FrameCryptionState::kInternalError: [frameCryptor.delegate frameCryptor:frameCryptor didStateChangeWithParticipantId:[NSString stringForStdString:participant_id] - withState:FrameCryptionStateInternalError]; + withState:RTC_OBJC_TYPE(RTCFrameCryptorStateInternalError)]; break; } } @@ -102,9 +102,9 @@ @implementation RTC_OBJC_TYPE (RTCFrameCryptor) { @synthesize participantId = _participantId; @synthesize delegate = _delegate; -- (webrtc::FrameCryptorTransformer::Algorithm)algorithmFromEnum:(RTCCryptorAlgorithm)algorithm { +- (webrtc::FrameCryptorTransformer::Algorithm)algorithmFromEnum:(RTC_OBJC_TYPE(RTCCryptorAlgorithm))algorithm { switch (algorithm) { - case RTCCryptorAlgorithmAesGcm: + case RTC_OBJC_TYPE(RTCCryptorAlgorithmAesGcm): return webrtc::FrameCryptorTransformer::Algorithm::kAesGcm; default: return webrtc::FrameCryptorTransformer::Algorithm::kAesGcm; @@ -114,7 +114,7 @@ @implementation RTC_OBJC_TYPE (RTCFrameCryptor) { - (nullable instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory rtpSender:(RTC_OBJC_TYPE(RTCRtpSender) *)sender participantId:(NSString *)participantId - algorithm:(RTCCryptorAlgorithm)algorithm + algorithm:(RTC_OBJC_TYPE(RTCCryptorAlgorithm))algorithm keyProvider:(RTC_OBJC_TYPE(RTCFrameCryptorKeyProvider) *)keyProvider { if (self = [super init]) { _lock = OS_UNFAIR_LOCK_INIT; @@ -154,7 +154,7 @@ - (nullable instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory - (nullable instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory rtpReceiver:(RTC_OBJC_TYPE(RTCRtpReceiver) *)receiver participantId:(NSString *)participantId - algorithm:(RTCCryptorAlgorithm)algorithm + algorithm:(RTC_OBJC_TYPE(RTCCryptorAlgorithm))algorithm keyProvider:(RTC_OBJC_TYPE(RTCFrameCryptorKeyProvider) *)keyProvider { if (self = [super init]) { _lock = OS_UNFAIR_LOCK_INIT; diff --git a/sdk/objc/api/peerconnection/RTCIODevice+Private.h b/sdk/objc/api/peerconnection/RTCIODevice+Private.h index e736c993e1..54d19a4626 100644 --- a/sdk/objc/api/peerconnection/RTCIODevice+Private.h +++ b/sdk/objc/api/peerconnection/RTCIODevice+Private.h @@ -19,7 +19,7 @@ NS_ASSUME_NONNULL_BEGIN @interface RTC_OBJC_TYPE(RTCIODevice) () -- (instancetype)initWithType:(RTCIODeviceType)type +- (instancetype)initWithType:(RTC_OBJC_TYPE(RTCIODeviceType))type deviceId:(NSString *)deviceId name:(NSString* )name; diff --git a/sdk/objc/api/peerconnection/RTCIODevice.h b/sdk/objc/api/peerconnection/RTCIODevice.h index f44d532081..e84e55d95e 100644 --- a/sdk/objc/api/peerconnection/RTCIODevice.h +++ b/sdk/objc/api/peerconnection/RTCIODevice.h @@ -20,19 +20,19 @@ NS_ASSUME_NONNULL_BEGIN -typedef NS_ENUM(NSInteger, RTCIODeviceType) { - RTCIODeviceTypeOutput, - RTCIODeviceTypeInput, +typedef NS_ENUM(NSInteger, RTC_OBJC_TYPE(RTCIODeviceType)) { + RTC_OBJC_TYPE(RTCIODeviceTypeOutput), + RTC_OBJC_TYPE(RTCIODeviceTypeInput), }; RTC_OBJC_EXPORT @interface RTC_OBJC_TYPE(RTCIODevice) : NSObject -+ (instancetype)defaultDeviceWithType: (RTCIODeviceType)type; ++ (instancetype)defaultDeviceWithType: (RTC_OBJC_TYPE(RTCIODeviceType))type; - (instancetype)init NS_UNAVAILABLE; @property(nonatomic, readonly) BOOL isDefault; -@property(nonatomic, readonly) RTCIODeviceType type; +@property(nonatomic, readonly) RTC_OBJC_TYPE(RTCIODeviceType) type; @property(nonatomic, copy, readonly) NSString *deviceId; @property(nonatomic, copy, readonly) NSString *name; diff --git a/sdk/objc/api/peerconnection/RTCIODevice.mm b/sdk/objc/api/peerconnection/RTCIODevice.mm index b3738f71fe..e496b22612 100644 --- a/sdk/objc/api/peerconnection/RTCIODevice.mm +++ b/sdk/objc/api/peerconnection/RTCIODevice.mm @@ -25,13 +25,13 @@ @implementation RTC_OBJC_TYPE(RTCIODevice) @synthesize deviceId = _deviceId; @synthesize name = _name; -+ (instancetype)defaultDeviceWithType: (RTCIODeviceType)type { ++ (instancetype)defaultDeviceWithType: (RTC_OBJC_TYPE(RTCIODeviceType))type { return [[self alloc] initWithType: type deviceId: kDefaultDeviceId name: @""]; } -- (instancetype)initWithType: (RTCIODeviceType)type +- (instancetype)initWithType: (RTC_OBJC_TYPE(RTCIODeviceType))type deviceId: (NSString *)deviceId name: (NSString* )name { if (self = [super init]) { diff --git a/sdk/objc/api/peerconnection/RTCIceServer.h b/sdk/objc/api/peerconnection/RTCIceServer.h index 7ddcbc1a1f..da7a6009f5 100644 --- a/sdk/objc/api/peerconnection/RTCIceServer.h +++ b/sdk/objc/api/peerconnection/RTCIceServer.h @@ -12,9 +12,9 @@ #import "RTCMacros.h" -typedef NS_ENUM(NSUInteger, RTCTlsCertPolicy) { - RTCTlsCertPolicySecure, - RTCTlsCertPolicyInsecureNoCheck +typedef NS_ENUM(NSUInteger, RTC_OBJC_TYPE(RTCTlsCertPolicy)) { + RTC_OBJC_TYPE(RTCTlsCertPolicySecure), + RTC_OBJC_TYPE(RTCTlsCertPolicyInsecureNoCheck) }; NS_ASSUME_NONNULL_BEGIN @@ -34,7 +34,7 @@ RTC_OBJC_EXPORT /** * TLS certificate policy to use if this RTCIceServer object is a TURN server. */ -@property(nonatomic, readonly) RTCTlsCertPolicy tlsCertPolicy; +@property(nonatomic, readonly) RTC_OBJC_TYPE(RTCTlsCertPolicy) tlsCertPolicy; /** If the URIs in `urls` only contain IP addresses, this field can be used @@ -72,7 +72,7 @@ RTC_OBJC_EXPORT - (instancetype)initWithURLStrings:(NSArray *)urlStrings username:(nullable NSString *)username credential:(nullable NSString *)credential - tlsCertPolicy:(RTCTlsCertPolicy)tlsCertPolicy; + tlsCertPolicy:(RTC_OBJC_TYPE(RTCTlsCertPolicy))tlsCertPolicy; /** * Initialize an RTCIceServer with its associated URLs, optional username, @@ -81,7 +81,7 @@ RTC_OBJC_EXPORT - (instancetype)initWithURLStrings:(NSArray *)urlStrings username:(nullable NSString *)username credential:(nullable NSString *)credential - tlsCertPolicy:(RTCTlsCertPolicy)tlsCertPolicy + tlsCertPolicy:(RTC_OBJC_TYPE(RTCTlsCertPolicy))tlsCertPolicy hostname:(nullable NSString *)hostname; /** @@ -91,7 +91,7 @@ RTC_OBJC_EXPORT - (instancetype)initWithURLStrings:(NSArray *)urlStrings username:(nullable NSString *)username credential:(nullable NSString *)credential - tlsCertPolicy:(RTCTlsCertPolicy)tlsCertPolicy + tlsCertPolicy:(RTC_OBJC_TYPE(RTCTlsCertPolicy))tlsCertPolicy hostname:(nullable NSString *)hostname tlsAlpnProtocols:(NSArray *)tlsAlpnProtocols; @@ -103,7 +103,7 @@ RTC_OBJC_EXPORT - (instancetype)initWithURLStrings:(NSArray *)urlStrings username:(nullable NSString *)username credential:(nullable NSString *)credential - tlsCertPolicy:(RTCTlsCertPolicy)tlsCertPolicy + tlsCertPolicy:(RTC_OBJC_TYPE(RTCTlsCertPolicy))tlsCertPolicy hostname:(nullable NSString *)hostname tlsAlpnProtocols:(nullable NSArray *)tlsAlpnProtocols tlsEllipticCurves:(nullable NSArray *)tlsEllipticCurves diff --git a/sdk/objc/api/peerconnection/RTCIceServer.mm b/sdk/objc/api/peerconnection/RTCIceServer.mm index 19a0a7e9e8..0e88d6ea29 100644 --- a/sdk/objc/api/peerconnection/RTCIceServer.mm +++ b/sdk/objc/api/peerconnection/RTCIceServer.mm @@ -12,7 +12,7 @@ #import "helpers/NSString+StdString.h" -@implementation RTC_OBJC_TYPE (RTCIceServer) +@implementation RTC_OBJC_TYPE(RTCIceServer) @synthesize urlStrings = _urlStrings; @synthesize username = _username; @@ -34,13 +34,13 @@ - (instancetype)initWithURLStrings:(NSArray *)urlStrings return [self initWithURLStrings:urlStrings username:username credential:credential - tlsCertPolicy:RTCTlsCertPolicySecure]; + tlsCertPolicy:RTC_OBJC_TYPE(RTCTlsCertPolicySecure)]; } - (instancetype)initWithURLStrings:(NSArray *)urlStrings username:(NSString *)username credential:(NSString *)credential - tlsCertPolicy:(RTCTlsCertPolicy)tlsCertPolicy { + tlsCertPolicy:(RTC_OBJC_TYPE(RTCTlsCertPolicy))tlsCertPolicy { return [self initWithURLStrings:urlStrings username:username credential:credential @@ -51,7 +51,7 @@ - (instancetype)initWithURLStrings:(NSArray *)urlStrings - (instancetype)initWithURLStrings:(NSArray *)urlStrings username:(NSString *)username credential:(NSString *)credential - tlsCertPolicy:(RTCTlsCertPolicy)tlsCertPolicy + tlsCertPolicy:(RTC_OBJC_TYPE(RTCTlsCertPolicy))tlsCertPolicy hostname:(NSString *)hostname { return [self initWithURLStrings:urlStrings username:username @@ -64,7 +64,7 @@ - (instancetype)initWithURLStrings:(NSArray *)urlStrings - (instancetype)initWithURLStrings:(NSArray *)urlStrings username:(NSString *)username credential:(NSString *)credential - tlsCertPolicy:(RTCTlsCertPolicy)tlsCertPolicy + tlsCertPolicy:(RTC_OBJC_TYPE(RTCTlsCertPolicy))tlsCertPolicy hostname:(NSString *)hostname tlsAlpnProtocols:(NSArray *)tlsAlpnProtocols { return [self initWithURLStrings:urlStrings @@ -79,7 +79,7 @@ - (instancetype)initWithURLStrings:(NSArray *)urlStrings - (instancetype)initWithURLStrings:(NSArray *)urlStrings username:(NSString *)username credential:(NSString *)credential - tlsCertPolicy:(RTCTlsCertPolicy)tlsCertPolicy + tlsCertPolicy:(RTC_OBJC_TYPE(RTCTlsCertPolicy))tlsCertPolicy hostname:(NSString *)hostname tlsAlpnProtocols:(NSArray *)tlsAlpnProtocols tlsEllipticCurves:(NSArray *)tlsEllipticCurves { @@ -109,11 +109,11 @@ - (NSString *)description { #pragma mark - Private -- (NSString *)stringForTlsCertPolicy:(RTCTlsCertPolicy)tlsCertPolicy { +- (NSString *)stringForTlsCertPolicy:(RTC_OBJC_TYPE(RTCTlsCertPolicy))tlsCertPolicy { switch (tlsCertPolicy) { - case RTCTlsCertPolicySecure: + case RTC_OBJC_TYPE(RTCTlsCertPolicySecure): return @"RTCTlsCertPolicySecure"; - case RTCTlsCertPolicyInsecureNoCheck: + case RTC_OBJC_TYPE(RTCTlsCertPolicyInsecureNoCheck): return @"RTCTlsCertPolicyInsecureNoCheck"; } } @@ -140,11 +140,11 @@ - (NSString *)stringForTlsCertPolicy:(RTCTlsCertPolicy)tlsCertPolicy { }]; switch (_tlsCertPolicy) { - case RTCTlsCertPolicySecure: + case RTC_OBJC_TYPE(RTCTlsCertPolicySecure): iceServer.tls_cert_policy = webrtc::PeerConnectionInterface::kTlsCertPolicySecure; break; - case RTCTlsCertPolicyInsecureNoCheck: + case RTC_OBJC_TYPE(RTCTlsCertPolicyInsecureNoCheck): iceServer.tls_cert_policy = webrtc::PeerConnectionInterface::kTlsCertPolicyInsecureNoCheck; break; @@ -172,14 +172,14 @@ - (instancetype)initWithNativeServer: for (auto const &curve : nativeServer.tls_elliptic_curves) { [tlsEllipticCurves addObject:[NSString stringForStdString:curve]]; } - RTCTlsCertPolicy tlsCertPolicy; + RTC_OBJC_TYPE(RTCTlsCertPolicy) tlsCertPolicy; switch (nativeServer.tls_cert_policy) { case webrtc::PeerConnectionInterface::kTlsCertPolicySecure: - tlsCertPolicy = RTCTlsCertPolicySecure; + tlsCertPolicy = RTC_OBJC_TYPE(RTCTlsCertPolicySecure); break; case webrtc::PeerConnectionInterface::kTlsCertPolicyInsecureNoCheck: - tlsCertPolicy = RTCTlsCertPolicyInsecureNoCheck; + tlsCertPolicy = RTC_OBJC_TYPE(RTCTlsCertPolicyInsecureNoCheck); break; } diff --git a/sdk/objc/api/peerconnection/RTCMediaConstraints.h b/sdk/objc/api/peerconnection/RTCMediaConstraints.h index c5baf20c1d..af986ccd8f 100644 --- a/sdk/objc/api/peerconnection/RTCMediaConstraints.h +++ b/sdk/objc/api/peerconnection/RTCMediaConstraints.h @@ -18,17 +18,17 @@ NS_ASSUME_NONNULL_BEGIN /** The value for this key should be a base64 encoded string containing * the data from the serialized configuration proto. */ -RTC_EXTERN NSString *const kRTCMediaConstraintsAudioNetworkAdaptorConfig; +RTC_EXTERN NSString *const RTC_CONSTANT_TYPE(RTCMediaConstraintsAudioNetworkAdaptorConfig); /** Constraint keys for generating offers and answers. */ -RTC_EXTERN NSString *const kRTCMediaConstraintsIceRestart; -RTC_EXTERN NSString *const kRTCMediaConstraintsOfferToReceiveAudio; -RTC_EXTERN NSString *const kRTCMediaConstraintsOfferToReceiveVideo; -RTC_EXTERN NSString *const kRTCMediaConstraintsVoiceActivityDetection; +RTC_EXTERN NSString *const RTC_CONSTANT_TYPE(RTCMediaConstraintsIceRestart); +RTC_EXTERN NSString *const RTC_CONSTANT_TYPE(RTCMediaConstraintsOfferToReceiveAudio); +RTC_EXTERN NSString *const RTC_CONSTANT_TYPE(RTCMediaConstraintsOfferToReceiveVideo); +RTC_EXTERN NSString *const RTC_CONSTANT_TYPE(RTCMediaConstraintsVoiceActivityDetection); /** Constraint values for Boolean parameters. */ -RTC_EXTERN NSString *const kRTCMediaConstraintsValueTrue; -RTC_EXTERN NSString *const kRTCMediaConstraintsValueFalse; +RTC_EXTERN NSString *const RTC_CONSTANT_TYPE(RTCMediaConstraintsValueTrue); +RTC_EXTERN NSString *const RTC_CONSTANT_TYPE(RTCMediaConstraintsValueFalse); RTC_OBJC_EXPORT @interface RTC_OBJC_TYPE (RTCMediaConstraints) : NSObject diff --git a/sdk/objc/api/peerconnection/RTCMediaConstraints.mm b/sdk/objc/api/peerconnection/RTCMediaConstraints.mm index 0f46e4b8fe..42fb1f548d 100644 --- a/sdk/objc/api/peerconnection/RTCMediaConstraints.mm +++ b/sdk/objc/api/peerconnection/RTCMediaConstraints.mm @@ -14,19 +14,19 @@ #include -NSString *const kRTCMediaConstraintsAudioNetworkAdaptorConfig = +NSString *const RTC_CONSTANT_TYPE(RTCMediaConstraintsAudioNetworkAdaptorConfig) = @(webrtc::MediaConstraints::kAudioNetworkAdaptorConfig); -NSString *const kRTCMediaConstraintsIceRestart = @(webrtc::MediaConstraints::kIceRestart); -NSString *const kRTCMediaConstraintsOfferToReceiveAudio = +NSString *const RTC_CONSTANT_TYPE(RTCMediaConstraintsIceRestart) = @(webrtc::MediaConstraints::kIceRestart); +NSString *const RTC_CONSTANT_TYPE(RTCMediaConstraintsOfferToReceiveAudio) = @(webrtc::MediaConstraints::kOfferToReceiveAudio); -NSString *const kRTCMediaConstraintsOfferToReceiveVideo = +NSString *const RTC_CONSTANT_TYPE(RTCMediaConstraintsOfferToReceiveVideo) = @(webrtc::MediaConstraints::kOfferToReceiveVideo); -NSString *const kRTCMediaConstraintsVoiceActivityDetection = +NSString *const RTC_CONSTANT_TYPE(RTCMediaConstraintsVoiceActivityDetection) = @(webrtc::MediaConstraints::kVoiceActivityDetection); -NSString *const kRTCMediaConstraintsValueTrue = @(webrtc::MediaConstraints::kValueTrue); -NSString *const kRTCMediaConstraintsValueFalse = @(webrtc::MediaConstraints::kValueFalse); +NSString *const RTC_CONSTANT_TYPE(RTCMediaConstraintsValueTrue) = @(webrtc::MediaConstraints::kValueTrue); +NSString *const RTC_CONSTANT_TYPE(RTCMediaConstraintsValueFalse) = @(webrtc::MediaConstraints::kValueFalse); @implementation RTC_OBJC_TYPE (RTCMediaConstraints) { NSDictionary *_mandatory; @@ -73,7 +73,7 @@ - (NSString *)description { NSString *value = [constraints objectForKey:key]; NSAssert([value isKindOfClass:[NSString class]], @"%@ is not an NSString.", value); - if ([kRTCMediaConstraintsAudioNetworkAdaptorConfig isEqualToString:key]) { + if ([RTC_CONSTANT_TYPE(RTCMediaConstraintsAudioNetworkAdaptorConfig) isEqualToString:key]) { // This value is base64 encoded. NSData *charData = [[NSData alloc] initWithBase64EncodedString:value options:0]; std::string configValue = diff --git a/sdk/objc/api/peerconnection/RTCMediaSource+Private.h b/sdk/objc/api/peerconnection/RTCMediaSource+Private.h index edda892e50..fb50d82778 100644 --- a/sdk/objc/api/peerconnection/RTCMediaSource+Private.h +++ b/sdk/objc/api/peerconnection/RTCMediaSource+Private.h @@ -16,9 +16,9 @@ NS_ASSUME_NONNULL_BEGIN @class RTC_OBJC_TYPE(RTCPeerConnectionFactory); -typedef NS_ENUM(NSInteger, RTCMediaSourceType) { - RTCMediaSourceTypeAudio, - RTCMediaSourceTypeVideo, +typedef NS_ENUM(NSInteger, RTC_OBJC_TYPE(RTCMediaSourceType)) { + RTC_OBJC_TYPE(RTCMediaSourceTypeAudio), + RTC_OBJC_TYPE(RTCMediaSourceTypeVideo), }; @interface RTC_OBJC_TYPE (RTCMediaSource) @@ -29,13 +29,13 @@ typedef NS_ENUM(NSInteger, RTCMediaSourceType) { - (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory nativeMediaSource:(rtc::scoped_refptr)nativeMediaSource - type:(RTCMediaSourceType)type NS_DESIGNATED_INITIALIZER; + type:(RTC_OBJC_TYPE(RTCMediaSourceType))type NS_DESIGNATED_INITIALIZER; -+ (webrtc::MediaSourceInterface::SourceState)nativeSourceStateForState:(RTCSourceState)state; ++ (webrtc::MediaSourceInterface::SourceState)nativeSourceStateForState:(RTC_OBJC_TYPE(RTCSourceState))state; -+ (RTCSourceState)sourceStateForNativeState:(webrtc::MediaSourceInterface::SourceState)nativeState; ++ (RTC_OBJC_TYPE(RTCSourceState))sourceStateForNativeState:(webrtc::MediaSourceInterface::SourceState)nativeState; -+ (NSString *)stringForState:(RTCSourceState)state; ++ (NSString *)stringForState:(RTC_OBJC_TYPE(RTCSourceState))state; @end diff --git a/sdk/objc/api/peerconnection/RTCMediaSource.h b/sdk/objc/api/peerconnection/RTCMediaSource.h index ba19c2a352..f1fb4d1539 100644 --- a/sdk/objc/api/peerconnection/RTCMediaSource.h +++ b/sdk/objc/api/peerconnection/RTCMediaSource.h @@ -12,11 +12,11 @@ #import "RTCMacros.h" -typedef NS_ENUM(NSInteger, RTCSourceState) { - RTCSourceStateInitializing, - RTCSourceStateLive, - RTCSourceStateEnded, - RTCSourceStateMuted, +typedef NS_ENUM(NSInteger, RTC_OBJC_TYPE(RTCSourceState)) { + RTC_OBJC_TYPE(RTCSourceStateInitializing), + RTC_OBJC_TYPE(RTCSourceStateLive), + RTC_OBJC_TYPE(RTCSourceStateEnded), + RTC_OBJC_TYPE(RTCSourceStateMuted), }; NS_ASSUME_NONNULL_BEGIN @@ -25,7 +25,7 @@ RTC_OBJC_EXPORT @interface RTC_OBJC_TYPE (RTCMediaSource) : NSObject /** The current state of the RTCMediaSource. */ -@property(nonatomic, readonly) RTCSourceState state; +@property(nonatomic, readonly) RTC_OBJC_TYPE(RTCSourceState) state; - (instancetype)init NS_UNAVAILABLE; diff --git a/sdk/objc/api/peerconnection/RTCMediaSource.mm b/sdk/objc/api/peerconnection/RTCMediaSource.mm index 61472a782a..10840438f8 100644 --- a/sdk/objc/api/peerconnection/RTCMediaSource.mm +++ b/sdk/objc/api/peerconnection/RTCMediaSource.mm @@ -14,14 +14,14 @@ @implementation RTC_OBJC_TYPE (RTCMediaSource) { RTC_OBJC_TYPE(RTCPeerConnectionFactory) * _factory; - RTCMediaSourceType _type; + RTC_OBJC_TYPE(RTCMediaSourceType) _type; } @synthesize nativeMediaSource = _nativeMediaSource; - (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory nativeMediaSource:(rtc::scoped_refptr)nativeMediaSource - type:(RTCMediaSourceType)type { + type:(RTC_OBJC_TYPE(RTCMediaSourceType))type { RTC_DCHECK(factory); RTC_DCHECK(nativeMediaSource); if (self = [super init]) { @@ -32,49 +32,49 @@ - (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)facto return self; } -- (RTCSourceState)state { +- (RTC_OBJC_TYPE(RTCSourceState))state { return [[self class] sourceStateForNativeState:_nativeMediaSource->state()]; } #pragma mark - Private + (webrtc::MediaSourceInterface::SourceState)nativeSourceStateForState: - (RTCSourceState)state { + (RTC_OBJC_TYPE(RTCSourceState))state { switch (state) { - case RTCSourceStateInitializing: + case RTC_OBJC_TYPE(RTCSourceStateInitializing): return webrtc::MediaSourceInterface::kInitializing; - case RTCSourceStateLive: + case RTC_OBJC_TYPE(RTCSourceStateLive): return webrtc::MediaSourceInterface::kLive; - case RTCSourceStateEnded: + case RTC_OBJC_TYPE(RTCSourceStateEnded): return webrtc::MediaSourceInterface::kEnded; - case RTCSourceStateMuted: + case RTC_OBJC_TYPE(RTCSourceStateMuted): return webrtc::MediaSourceInterface::kMuted; } } -+ (RTCSourceState)sourceStateForNativeState: ++ (RTC_OBJC_TYPE(RTCSourceState))sourceStateForNativeState: (webrtc::MediaSourceInterface::SourceState)nativeState { switch (nativeState) { case webrtc::MediaSourceInterface::kInitializing: - return RTCSourceStateInitializing; + return RTC_OBJC_TYPE(RTCSourceStateInitializing); case webrtc::MediaSourceInterface::kLive: - return RTCSourceStateLive; + return RTC_OBJC_TYPE(RTCSourceStateLive); case webrtc::MediaSourceInterface::kEnded: - return RTCSourceStateEnded; + return RTC_OBJC_TYPE(RTCSourceStateEnded); case webrtc::MediaSourceInterface::kMuted: - return RTCSourceStateMuted; + return RTC_OBJC_TYPE(RTCSourceStateMuted); } } -+ (NSString *)stringForState:(RTCSourceState)state { ++ (NSString *)stringForState:(RTC_OBJC_TYPE(RTCSourceState))state { switch (state) { - case RTCSourceStateInitializing: + case RTC_OBJC_TYPE(RTCSourceStateInitializing): return @"Initializing"; - case RTCSourceStateLive: + case RTC_OBJC_TYPE(RTCSourceStateLive): return @"Live"; - case RTCSourceStateEnded: + case RTC_OBJC_TYPE(RTCSourceStateEnded): return @"Ended"; - case RTCSourceStateMuted: + case RTC_OBJC_TYPE(RTCSourceStateMuted): return @"Muted"; } } diff --git a/sdk/objc/api/peerconnection/RTCMediaStream.mm b/sdk/objc/api/peerconnection/RTCMediaStream.mm index 0018dd6945..8375a9c879 100644 --- a/sdk/objc/api/peerconnection/RTCMediaStream.mm +++ b/sdk/objc/api/peerconnection/RTCMediaStream.mm @@ -132,7 +132,7 @@ - (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)facto _nativeMediaStream = nativeMediaStream; for (auto &track : audioTracks) { - RTCMediaStreamTrackType type = RTCMediaStreamTrackTypeAudio; + RTC_OBJC_TYPE(RTCMediaStreamTrackType) type = RTC_OBJC_TYPE(RTCMediaStreamTrackTypeAudio); RTC_OBJC_TYPE(RTCAudioTrack) *audioTrack = [[RTC_OBJC_TYPE(RTCAudioTrack) alloc] initWithFactory:_factory nativeTrack:track @@ -141,7 +141,7 @@ - (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)facto } for (auto &track : videoTracks) { - RTCMediaStreamTrackType type = RTCMediaStreamTrackTypeVideo; + RTC_OBJC_TYPE(RTCMediaStreamTrackType) type = RTC_OBJC_TYPE(RTCMediaStreamTrackTypeVideo); RTC_OBJC_TYPE(RTCVideoTrack) *videoTrack = [[RTC_OBJC_TYPE(RTCVideoTrack) alloc] initWithFactory:_factory nativeTrack:track diff --git a/sdk/objc/api/peerconnection/RTCMediaStreamTrack+Private.h b/sdk/objc/api/peerconnection/RTCMediaStreamTrack+Private.h index df45c79f44..f8c2261fcd 100644 --- a/sdk/objc/api/peerconnection/RTCMediaStreamTrack+Private.h +++ b/sdk/objc/api/peerconnection/RTCMediaStreamTrack+Private.h @@ -12,9 +12,9 @@ #include "api/media_stream_interface.h" -typedef NS_ENUM(NSInteger, RTCMediaStreamTrackType) { - RTCMediaStreamTrackTypeAudio, - RTCMediaStreamTrackTypeVideo, +typedef NS_ENUM(NSInteger, RTC_OBJC_TYPE(RTCMediaStreamTrackType)) { + RTC_OBJC_TYPE(RTCMediaStreamTrackTypeAudio), + RTC_OBJC_TYPE(RTCMediaStreamTrackTypeVideo), }; NS_ASSUME_NONNULL_BEGIN @@ -38,7 +38,7 @@ NS_ASSUME_NONNULL_BEGIN */ - (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory nativeTrack:(rtc::scoped_refptr)nativeTrack - type:(RTCMediaStreamTrackType)type NS_DESIGNATED_INITIALIZER; + type:(RTC_OBJC_TYPE(RTCMediaStreamTrackType))type NS_DESIGNATED_INITIALIZER; - (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory nativeTrack:(rtc::scoped_refptr)nativeTrack; @@ -46,12 +46,12 @@ NS_ASSUME_NONNULL_BEGIN - (BOOL)isEqualToTrack:(RTC_OBJC_TYPE(RTCMediaStreamTrack) *)track; + (webrtc::MediaStreamTrackInterface::TrackState)nativeTrackStateForState: - (RTCMediaStreamTrackState)state; + (RTC_OBJC_TYPE(RTCMediaStreamTrackState))state; -+ (RTCMediaStreamTrackState)trackStateForNativeState: ++ (RTC_OBJC_TYPE(RTCMediaStreamTrackState))trackStateForNativeState: (webrtc::MediaStreamTrackInterface::TrackState)nativeState; -+ (NSString *)stringForState:(RTCMediaStreamTrackState)state; ++ (NSString *)stringForState:(RTC_OBJC_TYPE(RTCMediaStreamTrackState))state; + (RTC_OBJC_TYPE(RTCMediaStreamTrack) *) mediaTrackForNativeTrack:(rtc::scoped_refptr)nativeTrack diff --git a/sdk/objc/api/peerconnection/RTCMediaStreamTrack.h b/sdk/objc/api/peerconnection/RTCMediaStreamTrack.h index 2200122ccd..8a2df6e8a3 100644 --- a/sdk/objc/api/peerconnection/RTCMediaStreamTrack.h +++ b/sdk/objc/api/peerconnection/RTCMediaStreamTrack.h @@ -15,15 +15,15 @@ /** * Represents the state of the track. This exposes the same states in C++. */ -typedef NS_ENUM(NSInteger, RTCMediaStreamTrackState) { - RTCMediaStreamTrackStateLive, - RTCMediaStreamTrackStateEnded +typedef NS_ENUM(NSInteger, RTC_OBJC_TYPE(RTCMediaStreamTrackState)) { + RTC_OBJC_TYPE(RTCMediaStreamTrackStateLive), + RTC_OBJC_TYPE(RTCMediaStreamTrackStateEnded) }; NS_ASSUME_NONNULL_BEGIN -RTC_EXTERN NSString *const kRTCMediaStreamTrackKindAudio; -RTC_EXTERN NSString *const kRTCMediaStreamTrackKindVideo; +RTC_EXTERN NSString *const RTC_CONSTANT_TYPE(RTCMediaStreamTrackKindAudio); +RTC_EXTERN NSString *const RTC_CONSTANT_TYPE(RTCMediaStreamTrackKindVideo); RTC_OBJC_EXPORT @interface RTC_OBJC_TYPE (RTCMediaStreamTrack) : NSObject @@ -41,7 +41,7 @@ RTC_OBJC_EXPORT @property(nonatomic, assign) BOOL isEnabled; /** The state of the track. */ -@property(nonatomic, readonly) RTCMediaStreamTrackState readyState; +@property(nonatomic, readonly) RTC_OBJC_TYPE(RTCMediaStreamTrackState) readyState; - (instancetype)init NS_UNAVAILABLE; diff --git a/sdk/objc/api/peerconnection/RTCMediaStreamTrack.mm b/sdk/objc/api/peerconnection/RTCMediaStreamTrack.mm index f1e128ca60..c10e5a0dac 100644 --- a/sdk/objc/api/peerconnection/RTCMediaStreamTrack.mm +++ b/sdk/objc/api/peerconnection/RTCMediaStreamTrack.mm @@ -14,15 +14,15 @@ #import "helpers/NSString+StdString.h" -NSString * const kRTCMediaStreamTrackKindAudio = +NSString * const RTC_CONSTANT_TYPE(RTCMediaStreamTrackKindAudio) = @(webrtc::MediaStreamTrackInterface::kAudioKind); -NSString * const kRTCMediaStreamTrackKindVideo = +NSString * const RTC_CONSTANT_TYPE(RTCMediaStreamTrackKindVideo) = @(webrtc::MediaStreamTrackInterface::kVideoKind); -@implementation RTC_OBJC_TYPE (RTCMediaStreamTrack) { +@implementation RTC_OBJC_TYPE(RTCMediaStreamTrack) { RTC_OBJC_TYPE(RTCPeerConnectionFactory) * _factory; rtc::scoped_refptr _nativeTrack; - RTCMediaStreamTrackType _type; + RTC_OBJC_TYPE(RTCMediaStreamTrackType) _type; } - (NSString *)kind { @@ -41,7 +41,7 @@ - (void)setIsEnabled:(BOOL)isEnabled { _nativeTrack->set_enabled(isEnabled); } -- (RTCMediaStreamTrackState)readyState { +- (RTC_OBJC_TYPE(RTCMediaStreamTrackState))readyState { return [[self class] trackStateForNativeState:_nativeTrack->state()]; } @@ -78,7 +78,7 @@ - (NSUInteger)hash { - (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory nativeTrack:(rtc::scoped_refptr)nativeTrack - type:(RTCMediaStreamTrackType)type { + type:(RTC_OBJC_TYPE(RTCMediaStreamTrackType))type { NSParameterAssert(nativeTrack); NSParameterAssert(factory); if (self = [super init]) { @@ -94,11 +94,11 @@ - (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)facto NSParameterAssert(nativeTrack); if (nativeTrack->kind() == std::string(webrtc::MediaStreamTrackInterface::kAudioKind)) { - return [self initWithFactory:factory nativeTrack:nativeTrack type:RTCMediaStreamTrackTypeAudio]; + return [self initWithFactory:factory nativeTrack:nativeTrack type:RTC_OBJC_TYPE(RTCMediaStreamTrackTypeAudio)]; } if (nativeTrack->kind() == std::string(webrtc::MediaStreamTrackInterface::kVideoKind)) { - return [self initWithFactory:factory nativeTrack:nativeTrack type:RTCMediaStreamTrackTypeVideo]; + return [self initWithFactory:factory nativeTrack:nativeTrack type:RTC_OBJC_TYPE(RTCMediaStreamTrackTypeVideo)]; } return nil; } @@ -111,30 +111,30 @@ - (BOOL)isEqualToTrack:(RTC_OBJC_TYPE(RTCMediaStreamTrack) *)track { } + (webrtc::MediaStreamTrackInterface::TrackState)nativeTrackStateForState: - (RTCMediaStreamTrackState)state { + (RTC_OBJC_TYPE(RTCMediaStreamTrackState))state { switch (state) { - case RTCMediaStreamTrackStateLive: + case RTC_OBJC_TYPE(RTCMediaStreamTrackStateLive): return webrtc::MediaStreamTrackInterface::kLive; - case RTCMediaStreamTrackStateEnded: + case RTC_OBJC_TYPE(RTCMediaStreamTrackStateEnded): return webrtc::MediaStreamTrackInterface::kEnded; } } -+ (RTCMediaStreamTrackState)trackStateForNativeState: ++ (RTC_OBJC_TYPE(RTCMediaStreamTrackState))trackStateForNativeState: (webrtc::MediaStreamTrackInterface::TrackState)nativeState { switch (nativeState) { case webrtc::MediaStreamTrackInterface::kLive: - return RTCMediaStreamTrackStateLive; + return RTC_OBJC_TYPE(RTCMediaStreamTrackStateLive); case webrtc::MediaStreamTrackInterface::kEnded: - return RTCMediaStreamTrackStateEnded; + return RTC_OBJC_TYPE(RTCMediaStreamTrackStateEnded); } } -+ (NSString *)stringForState:(RTCMediaStreamTrackState)state { ++ (NSString *)stringForState:(RTC_OBJC_TYPE(RTCMediaStreamTrackState))state { switch (state) { - case RTCMediaStreamTrackStateLive: + case RTC_OBJC_TYPE(RTCMediaStreamTrackStateLive): return @"Live"; - case RTCMediaStreamTrackStateEnded: + case RTC_OBJC_TYPE(RTCMediaStreamTrackStateEnded): return @"Ended"; } } @@ -147,11 +147,11 @@ + (NSString *)stringForState:(RTCMediaStreamTrackState)state { if (nativeTrack->kind() == webrtc::MediaStreamTrackInterface::kAudioKind) { return [[RTC_OBJC_TYPE(RTCAudioTrack) alloc] initWithFactory:factory nativeTrack:nativeTrack - type:RTCMediaStreamTrackTypeAudio]; + type:RTC_OBJC_TYPE(RTCMediaStreamTrackTypeAudio)]; } else if (nativeTrack->kind() == webrtc::MediaStreamTrackInterface::kVideoKind) { return [[RTC_OBJC_TYPE(RTCVideoTrack) alloc] initWithFactory:factory nativeTrack:nativeTrack - type:RTCMediaStreamTrackTypeVideo]; + type:RTC_OBJC_TYPE(RTCMediaStreamTrackTypeVideo)]; } else { return [[RTC_OBJC_TYPE(RTCMediaStreamTrack) alloc] initWithFactory:factory nativeTrack:nativeTrack]; diff --git a/sdk/objc/api/peerconnection/RTCPeerConnection+Private.h b/sdk/objc/api/peerconnection/RTCPeerConnection+Private.h index 9714f504ac..6f764d8e09 100644 --- a/sdk/objc/api/peerconnection/RTCPeerConnection+Private.h +++ b/sdk/objc/api/peerconnection/RTCPeerConnection+Private.h @@ -104,39 +104,39 @@ class PeerConnectionDelegateAdapter : public PeerConnectionObserver { NS_DESIGNATED_INITIALIZER; + (webrtc::PeerConnectionInterface::SignalingState)nativeSignalingStateForState: - (RTCSignalingState)state; + (RTC_OBJC_TYPE(RTCSignalingState))state; -+ (RTCSignalingState)signalingStateForNativeState: ++ (RTC_OBJC_TYPE(RTCSignalingState))signalingStateForNativeState: (webrtc::PeerConnectionInterface::SignalingState)nativeState; -+ (NSString *)stringForSignalingState:(RTCSignalingState)state; ++ (NSString *)stringForSignalingState:(RTC_OBJC_TYPE(RTCSignalingState))state; + (webrtc::PeerConnectionInterface::IceConnectionState)nativeIceConnectionStateForState: - (RTCIceConnectionState)state; + (RTC_OBJC_TYPE(RTCIceConnectionState))state; + (webrtc::PeerConnectionInterface::PeerConnectionState)nativeConnectionStateForState: - (RTCPeerConnectionState)state; + (RTC_OBJC_TYPE(RTCPeerConnectionState))state; -+ (RTCIceConnectionState)iceConnectionStateForNativeState: ++ (RTC_OBJC_TYPE(RTCIceConnectionState))iceConnectionStateForNativeState: (webrtc::PeerConnectionInterface::IceConnectionState)nativeState; -+ (RTCPeerConnectionState)connectionStateForNativeState: ++ (RTC_OBJC_TYPE(RTCPeerConnectionState))connectionStateForNativeState: (webrtc::PeerConnectionInterface::PeerConnectionState)nativeState; -+ (NSString *)stringForIceConnectionState:(RTCIceConnectionState)state; ++ (NSString *)stringForIceConnectionState:(RTC_OBJC_TYPE(RTCIceConnectionState))state; -+ (NSString *)stringForConnectionState:(RTCPeerConnectionState)state; ++ (NSString *)stringForConnectionState:(RTC_OBJC_TYPE(RTCPeerConnectionState))state; + (webrtc::PeerConnectionInterface::IceGatheringState)nativeIceGatheringStateForState: - (RTCIceGatheringState)state; + (RTC_OBJC_TYPE(RTCIceGatheringState))state; -+ (RTCIceGatheringState)iceGatheringStateForNativeState: ++ (RTC_OBJC_TYPE(RTCIceGatheringState))iceGatheringStateForNativeState: (webrtc::PeerConnectionInterface::IceGatheringState)nativeState; -+ (NSString *)stringForIceGatheringState:(RTCIceGatheringState)state; ++ (NSString *)stringForIceGatheringState:(RTC_OBJC_TYPE(RTCIceGatheringState))state; + (webrtc::PeerConnectionInterface::StatsOutputLevel)nativeStatsOutputLevelForLevel: - (RTCStatsOutputLevel)level; + (RTC_OBJC_TYPE(RTCStatsOutputLevel))level; @end diff --git a/sdk/objc/api/peerconnection/RTCPeerConnection+Stats.mm b/sdk/objc/api/peerconnection/RTCPeerConnection+Stats.mm index f8d38143f3..a6f19b7025 100644 --- a/sdk/objc/api/peerconnection/RTCPeerConnection+Stats.mm +++ b/sdk/objc/api/peerconnection/RTCPeerConnection+Stats.mm @@ -88,7 +88,7 @@ - (void)statisticsWithCompletionHandler:(RTCStatisticsCompletionHandler)completi } - (void)statsForTrack:(RTC_OBJC_TYPE(RTCMediaStreamTrack) *)mediaStreamTrack - statsOutputLevel:(RTCStatsOutputLevel)statsOutputLevel + statsOutputLevel:(RTC_OBJC_TYPE(RTCStatsOutputLevel))statsOutputLevel completionHandler: (void (^)(NSArray *stats))completionHandler { rtc::scoped_refptr observer = diff --git a/sdk/objc/api/peerconnection/RTCPeerConnection.h b/sdk/objc/api/peerconnection/RTCPeerConnection.h index 466e053492..2820128635 100644 --- a/sdk/objc/api/peerconnection/RTCPeerConnection.h +++ b/sdk/objc/api/peerconnection/RTCPeerConnection.h @@ -29,57 +29,57 @@ @class RTC_OBJC_TYPE(RTCStatisticsReport); @class RTC_OBJC_TYPE(RTCLegacyStatsReport); -typedef NS_ENUM(NSInteger, RTCRtpMediaType); +typedef NS_ENUM(NSInteger, RTC_OBJC_TYPE(RTCRtpMediaType)); NS_ASSUME_NONNULL_BEGIN -extern NSString *const kRTCPeerConnectionErrorDomain; -extern int const kRTCSessionDescriptionErrorCode; +extern NSString *const RTC_CONSTANT_TYPE(RTCPeerConnectionErrorDomain); +extern int const RTC_CONSTANT_TYPE(RTCSessionDescriptionErrorCode); /** Represents the signaling state of the peer connection. */ -typedef NS_ENUM(NSInteger, RTCSignalingState) { - RTCSignalingStateStable, - RTCSignalingStateHaveLocalOffer, - RTCSignalingStateHaveLocalPrAnswer, - RTCSignalingStateHaveRemoteOffer, - RTCSignalingStateHaveRemotePrAnswer, +typedef NS_ENUM(NSInteger, RTC_OBJC_TYPE(RTCSignalingState)) { + RTC_OBJC_TYPE(RTCSignalingStateStable), + RTC_OBJC_TYPE(RTCSignalingStateHaveLocalOffer), + RTC_OBJC_TYPE(RTCSignalingStateHaveLocalPrAnswer), + RTC_OBJC_TYPE(RTCSignalingStateHaveRemoteOffer), + RTC_OBJC_TYPE(RTCSignalingStateHaveRemotePrAnswer), // Not an actual state, represents the total number of states. - RTCSignalingStateClosed, + RTC_OBJC_TYPE(RTCSignalingStateClosed), }; /** Represents the ice connection state of the peer connection. */ -typedef NS_ENUM(NSInteger, RTCIceConnectionState) { - RTCIceConnectionStateNew, - RTCIceConnectionStateChecking, - RTCIceConnectionStateConnected, - RTCIceConnectionStateCompleted, - RTCIceConnectionStateFailed, - RTCIceConnectionStateDisconnected, - RTCIceConnectionStateClosed, - RTCIceConnectionStateCount, +typedef NS_ENUM(NSInteger, RTC_OBJC_TYPE(RTCIceConnectionState)) { + RTC_OBJC_TYPE(RTCIceConnectionStateNew), + RTC_OBJC_TYPE(RTCIceConnectionStateChecking), + RTC_OBJC_TYPE(RTCIceConnectionStateConnected), + RTC_OBJC_TYPE(RTCIceConnectionStateCompleted), + RTC_OBJC_TYPE(RTCIceConnectionStateFailed), + RTC_OBJC_TYPE(RTCIceConnectionStateDisconnected), + RTC_OBJC_TYPE(RTCIceConnectionStateClosed), + RTC_OBJC_TYPE(RTCIceConnectionStateCount), }; /** Represents the combined ice+dtls connection state of the peer connection. */ -typedef NS_ENUM(NSInteger, RTCPeerConnectionState) { - RTCPeerConnectionStateNew, - RTCPeerConnectionStateConnecting, - RTCPeerConnectionStateConnected, - RTCPeerConnectionStateDisconnected, - RTCPeerConnectionStateFailed, - RTCPeerConnectionStateClosed, +typedef NS_ENUM(NSInteger, RTC_OBJC_TYPE(RTCPeerConnectionState)) { + RTC_OBJC_TYPE(RTCPeerConnectionStateNew), + RTC_OBJC_TYPE(RTCPeerConnectionStateConnecting), + RTC_OBJC_TYPE(RTCPeerConnectionStateConnected), + RTC_OBJC_TYPE(RTCPeerConnectionStateDisconnected), + RTC_OBJC_TYPE(RTCPeerConnectionStateFailed), + RTC_OBJC_TYPE(RTCPeerConnectionStateClosed), }; /** Represents the ice gathering state of the peer connection. */ -typedef NS_ENUM(NSInteger, RTCIceGatheringState) { - RTCIceGatheringStateNew, - RTCIceGatheringStateGathering, - RTCIceGatheringStateComplete, +typedef NS_ENUM(NSInteger, RTC_OBJC_TYPE(RTCIceGatheringState)) { + RTC_OBJC_TYPE(RTCIceGatheringStateNew), + RTC_OBJC_TYPE(RTCIceGatheringStateGathering), + RTC_OBJC_TYPE(RTCIceGatheringStateComplete), }; /** Represents the stats output level. */ -typedef NS_ENUM(NSInteger, RTCStatsOutputLevel) { - RTCStatsOutputLevelStandard, - RTCStatsOutputLevelDebug, +typedef NS_ENUM(NSInteger, RTC_OBJC_TYPE(RTCStatsOutputLevel)) { + RTC_OBJC_TYPE(RTCStatsOutputLevelStandard), + RTC_OBJC_TYPE(RTCStatsOutputLevelDebug), }; typedef void (^RTCCreateSessionDescriptionCompletionHandler)( @@ -96,7 +96,7 @@ RTC_OBJC_EXPORT /** Called when the SignalingState changed. */ - (void)peerConnection : (RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection didChangeSignalingState - : (RTCSignalingState)stateChanged; + : (RTC_OBJC_TYPE(RTCSignalingState))stateChanged; /** Called when media is received on a new stream from remote peer. */ - (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection @@ -113,11 +113,11 @@ RTC_OBJC_EXPORT /** Called any time the IceConnectionState changes. */ - (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection - didChangeIceConnectionState:(RTCIceConnectionState)newState; + didChangeIceConnectionState:(RTC_OBJC_TYPE(RTCIceConnectionState))newState; /** Called any time the IceGatheringState changes. */ - (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection - didChangeIceGatheringState:(RTCIceGatheringState)newState; + didChangeIceGatheringState:(RTC_OBJC_TYPE(RTCIceGatheringState))newState; /** New ice candidate has been found. */ - (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection @@ -139,11 +139,11 @@ RTC_OBJC_EXPORT /** Called any time the IceConnectionState changes following standardized * transition. */ - (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection - didChangeStandardizedIceConnectionState:(RTCIceConnectionState)newState; + didChangeStandardizedIceConnectionState:(RTC_OBJC_TYPE(RTCIceConnectionState))newState; /** Called any time the PeerConnectionState changes. */ - (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection - didChangeConnectionState:(RTCPeerConnectionState)newState; + didChangeConnectionState:(RTC_OBJC_TYPE(RTCPeerConnectionState))newState; - (void)peerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)peerConnection didStartReceivingOnTransceiver:(RTC_OBJC_TYPE(RTCRtpTransceiver) *)transceiver; @@ -183,10 +183,10 @@ RTC_OBJC_EXPORT @property(nonatomic, readonly) NSArray *localStreams; @property(nonatomic, readonly, nullable) RTC_OBJC_TYPE(RTCSessionDescription) * localDescription; @property(nonatomic, readonly, nullable) RTC_OBJC_TYPE(RTCSessionDescription) * remoteDescription; -@property(nonatomic, readonly) RTCSignalingState signalingState; -@property(nonatomic, readonly) RTCIceConnectionState iceConnectionState; -@property(nonatomic, readonly) RTCPeerConnectionState connectionState; -@property(nonatomic, readonly) RTCIceGatheringState iceGatheringState; +@property(nonatomic, readonly) RTC_OBJC_TYPE(RTCSignalingState) signalingState; +@property(nonatomic, readonly) RTC_OBJC_TYPE(RTCIceConnectionState) iceConnectionState; +@property(nonatomic, readonly) RTC_OBJC_TYPE(RTCPeerConnectionState) connectionState; +@property(nonatomic, readonly) RTC_OBJC_TYPE(RTCIceGatheringState) iceGatheringState; @property(nonatomic, readonly, copy) RTC_OBJC_TYPE(RTCConfiguration) * configuration; /** Gets all RTCRtpSenders associated with this peer connection. @@ -295,9 +295,9 @@ RTC_OBJC_EXPORT /** Adds a transceiver with the given kind. Can either be RTCRtpMediaTypeAudio * or RTCRtpMediaTypeVideo. */ -- (nullable RTC_OBJC_TYPE(RTCRtpTransceiver) *)addTransceiverOfType:(RTCRtpMediaType)mediaType; +- (nullable RTC_OBJC_TYPE(RTCRtpTransceiver) *)addTransceiverOfType:(RTC_OBJC_TYPE(RTCRtpMediaType))mediaType; - (nullable RTC_OBJC_TYPE(RTCRtpTransceiver) *) - addTransceiverOfType:(RTCRtpMediaType)mediaType + addTransceiverOfType:(RTC_OBJC_TYPE(RTCRtpMediaType))mediaType init:(RTC_OBJC_TYPE(RTCRtpTransceiverInit) *)init; /** Tells the PeerConnection that ICE should be restarted. This triggers a need @@ -374,7 +374,7 @@ typedef void (^RTCStatisticsCompletionHandler)(RTC_OBJC_TYPE(RTCStatisticsReport */ - (void)statsForTrack : (nullable RTC_OBJC_TYPE(RTCMediaStreamTrack) *)mediaStreamTrack statsOutputLevel - : (RTCStatsOutputLevel)statsOutputLevel completionHandler + : (RTC_OBJC_TYPE(RTCStatsOutputLevel))statsOutputLevel completionHandler : (nullable void (^)(NSArray *stats))completionHandler; /** Gather statistic through the v2 statistics API. */ diff --git a/sdk/objc/api/peerconnection/RTCPeerConnection.mm b/sdk/objc/api/peerconnection/RTCPeerConnection.mm index e55c8a4a3e..0fdb3df45e 100644 --- a/sdk/objc/api/peerconnection/RTCPeerConnection.mm +++ b/sdk/objc/api/peerconnection/RTCPeerConnection.mm @@ -36,8 +36,8 @@ #include "rtc_base/numerics/safe_conversions.h" #include "sdk/objc/native/api/ssl_certificate_verifier.h" -NSString *const kRTCPeerConnectionErrorDomain = @"org.webrtc.RTC_OBJC_TYPE(RTCPeerConnection)"; -int const kRTCPeerConnnectionSessionDescriptionError = -1; +NSString *const RTC_CONSTANT_TYPE(RTCPeerConnectionErrorDomain) = @"org.webrtc.RTC_OBJC_TYPE(RTCPeerConnection)"; +int const RTC_CONSTANT_TYPE(RTCPeerConnnectionSessionDescriptionError) = -1; namespace { @@ -64,8 +64,8 @@ void OnCompelete(webrtc::RTCError error) { } else { // TODO(hta): Add handling of error.type() NSString *str = [NSString stringForStdString:error.message()]; - NSError *err = [NSError errorWithDomain:kRTCPeerConnectionErrorDomain - code:kRTCPeerConnnectionSessionDescriptionError + NSError *err = [NSError errorWithDomain:RTC_CONSTANT_TYPE(RTCPeerConnectionErrorDomain) + code:RTC_CONSTANT_TYPE(RTCPeerConnnectionSessionDescriptionError) userInfo:@{NSLocalizedDescriptionKey : str}]; completion_handler_(err); } @@ -103,8 +103,8 @@ void OnFailure(RTCError error) override { // TODO(hta): Add handling of error.type() NSString *str = [NSString stringForStdString:error.message()]; NSError* err = - [NSError errorWithDomain:kRTCPeerConnectionErrorDomain - code:kRTCPeerConnnectionSessionDescriptionError + [NSError errorWithDomain:RTC_CONSTANT_TYPE(RTCPeerConnectionErrorDomain) + code:RTC_CONSTANT_TYPE(RTCPeerConnnectionSessionDescriptionError) userInfo:@{ NSLocalizedDescriptionKey : str }]; completion_handler_(nil, err); completion_handler_ = nil; @@ -126,7 +126,7 @@ void OnFailure(RTCError error) override { void PeerConnectionDelegateAdapter::OnSignalingChange( PeerConnectionInterface::SignalingState new_state) { - RTCSignalingState state = + RTC_OBJC_TYPE(RTCSignalingState) state = [[RTC_OBJC_TYPE(RTCPeerConnection) class] signalingStateForNativeState:new_state]; RTC_OBJC_TYPE(RTCPeerConnection) *peer_connection = peer_connection_; [peer_connection.delegate peerConnection:peer_connection @@ -184,7 +184,7 @@ void OnFailure(RTCError error) override { void PeerConnectionDelegateAdapter::OnIceConnectionChange( PeerConnectionInterface::IceConnectionState new_state) { - RTCIceConnectionState state = + RTC_OBJC_TYPE(RTCIceConnectionState) state = [RTC_OBJC_TYPE(RTCPeerConnection) iceConnectionStateForNativeState:new_state]; [peer_connection_.delegate peerConnection:peer_connection_ didChangeIceConnectionState:state]; } @@ -193,7 +193,7 @@ void OnFailure(RTCError error) override { PeerConnectionInterface::IceConnectionState new_state) { if ([peer_connection_.delegate respondsToSelector:@selector(peerConnection:didChangeStandardizedIceConnectionState:)]) { - RTCIceConnectionState state = + RTC_OBJC_TYPE(RTCIceConnectionState) state = [RTC_OBJC_TYPE(RTCPeerConnection) iceConnectionStateForNativeState:new_state]; [peer_connection_.delegate peerConnection:peer_connection_ didChangeStandardizedIceConnectionState:state]; @@ -204,7 +204,7 @@ void OnFailure(RTCError error) override { PeerConnectionInterface::PeerConnectionState new_state) { if ([peer_connection_.delegate respondsToSelector:@selector(peerConnection:didChangeConnectionState:)]) { - RTCPeerConnectionState state = + RTC_OBJC_TYPE(RTCPeerConnectionState) state = [RTC_OBJC_TYPE(RTCPeerConnection) connectionStateForNativeState:new_state]; [peer_connection_.delegate peerConnection:peer_connection_ didChangeConnectionState:state]; } @@ -212,7 +212,7 @@ void OnFailure(RTCError error) override { void PeerConnectionDelegateAdapter::OnIceGatheringChange( PeerConnectionInterface::IceGatheringState new_state) { - RTCIceGatheringState state = + RTC_OBJC_TYPE(RTCIceGatheringState) state = [[RTC_OBJC_TYPE(RTCPeerConnection) class] iceGatheringStateForNativeState:new_state]; RTC_OBJC_TYPE(RTCPeerConnection) *peer_connection = peer_connection_; [peer_connection.delegate peerConnection:peer_connection @@ -409,23 +409,20 @@ - (nullable instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory }); } -- (RTCSignalingState)signalingState { - return [[self class] - signalingStateForNativeState:_peerConnection->signaling_state()]; +- (RTC_OBJC_TYPE(RTCSignalingState))signalingState { + return [[self class] signalingStateForNativeState:self.nativePeerConnection->signaling_state()]; } -- (RTCIceConnectionState)iceConnectionState { - return [[self class] iceConnectionStateForNativeState: - _peerConnection->ice_connection_state()]; +- (RTC_OBJC_TYPE(RTCIceConnectionState))iceConnectionState { + return [[self class] iceConnectionStateForNativeState:self.nativePeerConnection->ice_connection_state()]; } -- (RTCPeerConnectionState)connectionState { - return [[self class] connectionStateForNativeState:_peerConnection->peer_connection_state()]; +- (RTC_OBJC_TYPE(RTCPeerConnectionState))connectionState { + return [[self class] connectionStateForNativeState:self.nativePeerConnection->peer_connection_state()]; } -- (RTCIceGatheringState)iceGatheringState { - return [[self class] iceGatheringStateForNativeState: - _peerConnection->ice_gathering_state()]; +- (RTC_OBJC_TYPE(RTCIceGatheringState))iceGatheringState { + return [[self class] iceGatheringStateForNativeState:self.nativePeerConnection->ice_gathering_state()]; } - (BOOL)setConfiguration:(RTC_OBJC_TYPE(RTCConfiguration) *)configuration { @@ -463,7 +460,7 @@ - (void)addIceCandidate:(RTC_OBJC_TYPE(RTCIceCandidate) *)candidate completionHandler(nil); } else { NSString *str = [NSString stringForStdString:error.message()]; - NSError *err = [NSError errorWithDomain:kRTCPeerConnectionErrorDomain + NSError *err = [NSError errorWithDomain:RTC_CONSTANT_TYPE(RTCPeerConnectionErrorDomain) code:static_cast(error.type()) userInfo:@{NSLocalizedDescriptionKey : str}]; completionHandler(err); @@ -544,13 +541,12 @@ - (BOOL)removeTrack:(RTC_OBJC_TYPE(RTCRtpSender) *)sender { nativeRtpTransceiver:nativeTransceiverOrError.MoveValue()]; } -- (nullable RTC_OBJC_TYPE(RTCRtpTransceiver) *)addTransceiverOfType:(RTCRtpMediaType)mediaType { - return [self addTransceiverOfType:mediaType - init:[[RTC_OBJC_TYPE(RTCRtpTransceiverInit) alloc] init]]; +- (nullable RTC_OBJC_TYPE(RTCRtpTransceiver) *)addTransceiverOfType:(RTC_OBJC_TYPE(RTCRtpMediaType))mediaType { + return [self addTransceiverOfType:mediaType init:[[RTC_OBJC_TYPE(RTCRtpTransceiverInit) alloc] init]]; } - (nullable RTC_OBJC_TYPE(RTCRtpTransceiver) *) - addTransceiverOfType:(RTCRtpMediaType)mediaType + addTransceiverOfType:(RTC_OBJC_TYPE(RTCRtpMediaType))mediaType init:(RTC_OBJC_TYPE(RTCRtpTransceiverInit) *)init { webrtc::RTCErrorOr> nativeTransceiverOrError = _peerConnection->AddTransceiver( @@ -712,217 +708,217 @@ - (void)stopRtcEventLog { #pragma mark - Private + (webrtc::PeerConnectionInterface::SignalingState)nativeSignalingStateForState: - (RTCSignalingState)state { + (RTC_OBJC_TYPE(RTCSignalingState))state { switch (state) { - case RTCSignalingStateStable: + case RTC_OBJC_TYPE(RTCSignalingStateStable): return webrtc::PeerConnectionInterface::kStable; - case RTCSignalingStateHaveLocalOffer: + case RTC_OBJC_TYPE(RTCSignalingStateHaveLocalOffer): return webrtc::PeerConnectionInterface::kHaveLocalOffer; - case RTCSignalingStateHaveLocalPrAnswer: + case RTC_OBJC_TYPE(RTCSignalingStateHaveLocalPrAnswer): return webrtc::PeerConnectionInterface::kHaveLocalPrAnswer; - case RTCSignalingStateHaveRemoteOffer: + case RTC_OBJC_TYPE(RTCSignalingStateHaveRemoteOffer): return webrtc::PeerConnectionInterface::kHaveRemoteOffer; - case RTCSignalingStateHaveRemotePrAnswer: + case RTC_OBJC_TYPE(RTCSignalingStateHaveRemotePrAnswer): return webrtc::PeerConnectionInterface::kHaveRemotePrAnswer; - case RTCSignalingStateClosed: + case RTC_OBJC_TYPE(RTCSignalingStateClosed): return webrtc::PeerConnectionInterface::kClosed; } } -+ (RTCSignalingState)signalingStateForNativeState: ++ (RTC_OBJC_TYPE(RTCSignalingState))signalingStateForNativeState: (webrtc::PeerConnectionInterface::SignalingState)nativeState { switch (nativeState) { case webrtc::PeerConnectionInterface::kStable: - return RTCSignalingStateStable; + return RTC_OBJC_TYPE(RTCSignalingStateStable); case webrtc::PeerConnectionInterface::kHaveLocalOffer: - return RTCSignalingStateHaveLocalOffer; + return RTC_OBJC_TYPE(RTCSignalingStateHaveLocalOffer); case webrtc::PeerConnectionInterface::kHaveLocalPrAnswer: - return RTCSignalingStateHaveLocalPrAnswer; + return RTC_OBJC_TYPE(RTCSignalingStateHaveLocalPrAnswer); case webrtc::PeerConnectionInterface::kHaveRemoteOffer: - return RTCSignalingStateHaveRemoteOffer; + return RTC_OBJC_TYPE(RTCSignalingStateHaveRemoteOffer); case webrtc::PeerConnectionInterface::kHaveRemotePrAnswer: - return RTCSignalingStateHaveRemotePrAnswer; + return RTC_OBJC_TYPE(RTCSignalingStateHaveRemotePrAnswer); case webrtc::PeerConnectionInterface::kClosed: - return RTCSignalingStateClosed; + return RTC_OBJC_TYPE(RTCSignalingStateClosed); } } -+ (NSString *)stringForSignalingState:(RTCSignalingState)state { ++ (NSString *)stringForSignalingState:(RTC_OBJC_TYPE(RTCSignalingState))state { switch (state) { - case RTCSignalingStateStable: + case RTC_OBJC_TYPE(RTCSignalingStateStable): return @"STABLE"; - case RTCSignalingStateHaveLocalOffer: + case RTC_OBJC_TYPE(RTCSignalingStateHaveLocalOffer): return @"HAVE_LOCAL_OFFER"; - case RTCSignalingStateHaveLocalPrAnswer: + case RTC_OBJC_TYPE(RTCSignalingStateHaveLocalPrAnswer): return @"HAVE_LOCAL_PRANSWER"; - case RTCSignalingStateHaveRemoteOffer: + case RTC_OBJC_TYPE(RTCSignalingStateHaveRemoteOffer): return @"HAVE_REMOTE_OFFER"; - case RTCSignalingStateHaveRemotePrAnswer: + case RTC_OBJC_TYPE(RTCSignalingStateHaveRemotePrAnswer): return @"HAVE_REMOTE_PRANSWER"; - case RTCSignalingStateClosed: + case RTC_OBJC_TYPE(RTCSignalingStateClosed): return @"CLOSED"; } } + (webrtc::PeerConnectionInterface::PeerConnectionState)nativeConnectionStateForState: - (RTCPeerConnectionState)state { + (RTC_OBJC_TYPE(RTCPeerConnectionState))state { switch (state) { - case RTCPeerConnectionStateNew: + case RTC_OBJC_TYPE(RTCPeerConnectionStateNew): return webrtc::PeerConnectionInterface::PeerConnectionState::kNew; - case RTCPeerConnectionStateConnecting: + case RTC_OBJC_TYPE(RTCPeerConnectionStateConnecting): return webrtc::PeerConnectionInterface::PeerConnectionState::kConnecting; - case RTCPeerConnectionStateConnected: + case RTC_OBJC_TYPE(RTCPeerConnectionStateConnected): return webrtc::PeerConnectionInterface::PeerConnectionState::kConnected; - case RTCPeerConnectionStateFailed: + case RTC_OBJC_TYPE(RTCPeerConnectionStateFailed): return webrtc::PeerConnectionInterface::PeerConnectionState::kFailed; - case RTCPeerConnectionStateDisconnected: + case RTC_OBJC_TYPE(RTCPeerConnectionStateDisconnected): return webrtc::PeerConnectionInterface::PeerConnectionState::kDisconnected; - case RTCPeerConnectionStateClosed: + case RTC_OBJC_TYPE(RTCPeerConnectionStateClosed): return webrtc::PeerConnectionInterface::PeerConnectionState::kClosed; } } -+ (RTCPeerConnectionState)connectionStateForNativeState: ++ (RTC_OBJC_TYPE(RTCPeerConnectionState))connectionStateForNativeState: (webrtc::PeerConnectionInterface::PeerConnectionState)nativeState { switch (nativeState) { case webrtc::PeerConnectionInterface::PeerConnectionState::kNew: - return RTCPeerConnectionStateNew; + return RTC_OBJC_TYPE(RTCPeerConnectionStateNew); case webrtc::PeerConnectionInterface::PeerConnectionState::kConnecting: - return RTCPeerConnectionStateConnecting; + return RTC_OBJC_TYPE(RTCPeerConnectionStateConnecting); case webrtc::PeerConnectionInterface::PeerConnectionState::kConnected: - return RTCPeerConnectionStateConnected; + return RTC_OBJC_TYPE(RTCPeerConnectionStateConnected); case webrtc::PeerConnectionInterface::PeerConnectionState::kFailed: - return RTCPeerConnectionStateFailed; + return RTC_OBJC_TYPE(RTCPeerConnectionStateFailed); case webrtc::PeerConnectionInterface::PeerConnectionState::kDisconnected: - return RTCPeerConnectionStateDisconnected; + return RTC_OBJC_TYPE(RTCPeerConnectionStateDisconnected); case webrtc::PeerConnectionInterface::PeerConnectionState::kClosed: - return RTCPeerConnectionStateClosed; + return RTC_OBJC_TYPE(RTCPeerConnectionStateClosed); } } -+ (NSString *)stringForConnectionState:(RTCPeerConnectionState)state { ++ (NSString *)stringForConnectionState:(RTC_OBJC_TYPE(RTCPeerConnectionState))state { switch (state) { - case RTCPeerConnectionStateNew: + case RTC_OBJC_TYPE(RTCPeerConnectionStateNew): return @"NEW"; - case RTCPeerConnectionStateConnecting: + case RTC_OBJC_TYPE(RTCPeerConnectionStateConnecting): return @"CONNECTING"; - case RTCPeerConnectionStateConnected: + case RTC_OBJC_TYPE(RTCPeerConnectionStateConnected): return @"CONNECTED"; - case RTCPeerConnectionStateFailed: + case RTC_OBJC_TYPE(RTCPeerConnectionStateFailed): return @"FAILED"; - case RTCPeerConnectionStateDisconnected: + case RTC_OBJC_TYPE(RTCPeerConnectionStateDisconnected): return @"DISCONNECTED"; - case RTCPeerConnectionStateClosed: + case RTC_OBJC_TYPE(RTCPeerConnectionStateClosed): return @"CLOSED"; } } + (webrtc::PeerConnectionInterface::IceConnectionState) - nativeIceConnectionStateForState:(RTCIceConnectionState)state { + nativeIceConnectionStateForState:(RTC_OBJC_TYPE(RTCIceConnectionState))state { switch (state) { - case RTCIceConnectionStateNew: + case RTC_OBJC_TYPE(RTCIceConnectionStateNew): return webrtc::PeerConnectionInterface::kIceConnectionNew; - case RTCIceConnectionStateChecking: + case RTC_OBJC_TYPE(RTCIceConnectionStateChecking): return webrtc::PeerConnectionInterface::kIceConnectionChecking; - case RTCIceConnectionStateConnected: + case RTC_OBJC_TYPE(RTCIceConnectionStateConnected): return webrtc::PeerConnectionInterface::kIceConnectionConnected; - case RTCIceConnectionStateCompleted: + case RTC_OBJC_TYPE(RTCIceConnectionStateCompleted): return webrtc::PeerConnectionInterface::kIceConnectionCompleted; - case RTCIceConnectionStateFailed: + case RTC_OBJC_TYPE(RTCIceConnectionStateFailed): return webrtc::PeerConnectionInterface::kIceConnectionFailed; - case RTCIceConnectionStateDisconnected: + case RTC_OBJC_TYPE(RTCIceConnectionStateDisconnected): return webrtc::PeerConnectionInterface::kIceConnectionDisconnected; - case RTCIceConnectionStateClosed: + case RTC_OBJC_TYPE(RTCIceConnectionStateClosed): return webrtc::PeerConnectionInterface::kIceConnectionClosed; - case RTCIceConnectionStateCount: + case RTC_OBJC_TYPE(RTCIceConnectionStateCount): return webrtc::PeerConnectionInterface::kIceConnectionMax; } } -+ (RTCIceConnectionState)iceConnectionStateForNativeState: ++ (RTC_OBJC_TYPE(RTCIceConnectionState))iceConnectionStateForNativeState: (webrtc::PeerConnectionInterface::IceConnectionState)nativeState { switch (nativeState) { case webrtc::PeerConnectionInterface::kIceConnectionNew: - return RTCIceConnectionStateNew; + return RTC_OBJC_TYPE(RTCIceConnectionStateNew); case webrtc::PeerConnectionInterface::kIceConnectionChecking: - return RTCIceConnectionStateChecking; + return RTC_OBJC_TYPE(RTCIceConnectionStateChecking); case webrtc::PeerConnectionInterface::kIceConnectionConnected: - return RTCIceConnectionStateConnected; + return RTC_OBJC_TYPE(RTCIceConnectionStateConnected); case webrtc::PeerConnectionInterface::kIceConnectionCompleted: - return RTCIceConnectionStateCompleted; + return RTC_OBJC_TYPE(RTCIceConnectionStateCompleted); case webrtc::PeerConnectionInterface::kIceConnectionFailed: - return RTCIceConnectionStateFailed; + return RTC_OBJC_TYPE(RTCIceConnectionStateFailed); case webrtc::PeerConnectionInterface::kIceConnectionDisconnected: - return RTCIceConnectionStateDisconnected; + return RTC_OBJC_TYPE(RTCIceConnectionStateDisconnected); case webrtc::PeerConnectionInterface::kIceConnectionClosed: - return RTCIceConnectionStateClosed; + return RTC_OBJC_TYPE(RTCIceConnectionStateClosed); case webrtc::PeerConnectionInterface::kIceConnectionMax: - return RTCIceConnectionStateCount; + return RTC_OBJC_TYPE(RTCIceConnectionStateCount); } } -+ (NSString *)stringForIceConnectionState:(RTCIceConnectionState)state { ++ (NSString *)stringForIceConnectionState:(RTC_OBJC_TYPE(RTCIceConnectionState))state { switch (state) { - case RTCIceConnectionStateNew: + case RTC_OBJC_TYPE(RTCIceConnectionStateNew): return @"NEW"; - case RTCIceConnectionStateChecking: + case RTC_OBJC_TYPE(RTCIceConnectionStateChecking): return @"CHECKING"; - case RTCIceConnectionStateConnected: + case RTC_OBJC_TYPE(RTCIceConnectionStateConnected): return @"CONNECTED"; - case RTCIceConnectionStateCompleted: + case RTC_OBJC_TYPE(RTCIceConnectionStateCompleted): return @"COMPLETED"; - case RTCIceConnectionStateFailed: + case RTC_OBJC_TYPE(RTCIceConnectionStateFailed): return @"FAILED"; - case RTCIceConnectionStateDisconnected: + case RTC_OBJC_TYPE(RTCIceConnectionStateDisconnected): return @"DISCONNECTED"; - case RTCIceConnectionStateClosed: + case RTC_OBJC_TYPE(RTCIceConnectionStateClosed): return @"CLOSED"; - case RTCIceConnectionStateCount: + case RTC_OBJC_TYPE(RTCIceConnectionStateCount): return @"COUNT"; } } + (webrtc::PeerConnectionInterface::IceGatheringState) - nativeIceGatheringStateForState:(RTCIceGatheringState)state { + nativeIceGatheringStateForState:(RTC_OBJC_TYPE(RTCIceGatheringState))state { switch (state) { - case RTCIceGatheringStateNew: + case RTC_OBJC_TYPE(RTCIceGatheringStateNew): return webrtc::PeerConnectionInterface::kIceGatheringNew; - case RTCIceGatheringStateGathering: + case RTC_OBJC_TYPE(RTCIceGatheringStateGathering): return webrtc::PeerConnectionInterface::kIceGatheringGathering; - case RTCIceGatheringStateComplete: + case RTC_OBJC_TYPE(RTCIceGatheringStateComplete): return webrtc::PeerConnectionInterface::kIceGatheringComplete; } } -+ (RTCIceGatheringState)iceGatheringStateForNativeState: ++ (RTC_OBJC_TYPE(RTCIceGatheringState))iceGatheringStateForNativeState: (webrtc::PeerConnectionInterface::IceGatheringState)nativeState { switch (nativeState) { case webrtc::PeerConnectionInterface::kIceGatheringNew: - return RTCIceGatheringStateNew; + return RTC_OBJC_TYPE(RTCIceGatheringStateNew); case webrtc::PeerConnectionInterface::kIceGatheringGathering: - return RTCIceGatheringStateGathering; + return RTC_OBJC_TYPE(RTCIceGatheringStateGathering); case webrtc::PeerConnectionInterface::kIceGatheringComplete: - return RTCIceGatheringStateComplete; + return RTC_OBJC_TYPE(RTCIceGatheringStateComplete); } } -+ (NSString *)stringForIceGatheringState:(RTCIceGatheringState)state { ++ (NSString *)stringForIceGatheringState:(RTC_OBJC_TYPE(RTCIceGatheringState))state { switch (state) { - case RTCIceGatheringStateNew: + case RTC_OBJC_TYPE(RTCIceGatheringStateNew): return @"NEW"; - case RTCIceGatheringStateGathering: + case RTC_OBJC_TYPE(RTCIceGatheringStateGathering): return @"GATHERING"; - case RTCIceGatheringStateComplete: + case RTC_OBJC_TYPE(RTCIceGatheringStateComplete): return @"COMPLETE"; } } + (webrtc::PeerConnectionInterface::StatsOutputLevel) - nativeStatsOutputLevelForLevel:(RTCStatsOutputLevel)level { + nativeStatsOutputLevelForLevel:(RTC_OBJC_TYPE(RTCStatsOutputLevel))level { switch (level) { - case RTCStatsOutputLevelStandard: + case RTC_OBJC_TYPE(RTCStatsOutputLevelStandard): return webrtc::PeerConnectionInterface::kStatsOutputLevelStandard; - case RTCStatsOutputLevelDebug: + case RTC_OBJC_TYPE(RTCStatsOutputLevelDebug): return webrtc::PeerConnectionInterface::kStatsOutputLevelDebug; } } diff --git a/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.h b/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.h index 1e0908db52..e8926ccfe5 100644 --- a/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.h +++ b/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.h @@ -27,7 +27,7 @@ NS_ASSUME_NONNULL_BEGIN @class RTC_OBJC_TYPE(RTCAudioDeviceModule); @class RTC_OBJC_TYPE(RTCRtpCapabilities); -typedef NS_ENUM(NSInteger, RTCRtpMediaType); +typedef NS_ENUM(NSInteger, RTC_OBJC_TYPE(RTCRtpMediaType)); @protocol RTC_OBJC_TYPE (RTCPeerConnectionDelegate); diff --git a/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.mm b/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.mm index 72ac70a781..16d3db3a9a 100644 --- a/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.mm +++ b/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.mm @@ -133,14 +133,14 @@ - (instancetype)init { #endif } -- (RTC_OBJC_TYPE(RTCRtpCapabilities) *)rtpSenderCapabilitiesFor:(RTCRtpMediaType)mediaType { +- (RTC_OBJC_TYPE(RTCRtpCapabilities) *)rtpSenderCapabilitiesFor:(RTC_OBJC_TYPE(RTCRtpMediaType))mediaType { webrtc::RtpCapabilities capabilities = _nativeFactory->GetRtpSenderCapabilities([RTC_OBJC_TYPE(RTCRtpReceiver) nativeMediaTypeForMediaType: mediaType]); return [[RTC_OBJC_TYPE(RTCRtpCapabilities) alloc] initWithNativeRtpCapabilities:capabilities]; } -- (RTC_OBJC_TYPE(RTCRtpCapabilities) *)rtpReceiverCapabilitiesFor:(RTCRtpMediaType)mediaType { +- (RTC_OBJC_TYPE(RTCRtpCapabilities) *)rtpReceiverCapabilitiesFor:(RTC_OBJC_TYPE(RTCRtpMediaType))mediaType { webrtc::RtpCapabilities capabilities = _nativeFactory->GetRtpReceiverCapabilities([RTC_OBJC_TYPE(RTCRtpReceiver) nativeMediaTypeForMediaType: mediaType]); @@ -441,9 +441,9 @@ - (void)stopAecDump { #pragma mark - Private + (cricket::MediaType)mediaTypeForKind:(NSString *)kind { - if (kind == kRTCMediaStreamTrackKindAudio) { + if (kind == RTC_CONSTANT_TYPE(RTCMediaStreamTrackKindAudio)) { return cricket::MEDIA_TYPE_AUDIO; - } else if (kind == kRTCMediaStreamTrackKindVideo) { + } else if (kind == RTC_CONSTANT_TYPE(RTCMediaStreamTrackKindVideo)) { return cricket::MEDIA_TYPE_VIDEO; } else { RTC_DCHECK_NOTREACHED(); diff --git a/sdk/objc/api/peerconnection/RTCRtpCodecCapability.mm b/sdk/objc/api/peerconnection/RTCRtpCodecCapability.mm index 2dc1e5dc4b..d3940a085b 100644 --- a/sdk/objc/api/peerconnection/RTCRtpCodecCapability.mm +++ b/sdk/objc/api/peerconnection/RTCRtpCodecCapability.mm @@ -41,10 +41,10 @@ - (instancetype)initWithNativeRtpCodecCapability: _name = [NSString stringForStdString:nativeRtpCodecCapability.name]; switch (nativeRtpCodecCapability.kind) { case cricket::MEDIA_TYPE_AUDIO: - _kind = kRTCMediaStreamTrackKindAudio; + _kind = RTC_CONSTANT_TYPE(RTCMediaStreamTrackKindAudio); break; case cricket::MEDIA_TYPE_VIDEO: - _kind = kRTCMediaStreamTrackKindVideo; + _kind = RTC_CONSTANT_TYPE(RTCMediaStreamTrackKindVideo); break; case cricket::MEDIA_TYPE_DATA: RTC_DCHECK_NOTREACHED(); @@ -92,9 +92,9 @@ - (NSString *)description { rtpCodecCapability.name = [NSString stdStringForString:_name]; // NSString pointer comparison is safe here since "kind" is readonly and only // populated above. - if (_kind == kRTCMediaStreamTrackKindAudio) { + if (_kind == RTC_CONSTANT_TYPE(RTCMediaStreamTrackKindAudio)) { rtpCodecCapability.kind = cricket::MEDIA_TYPE_AUDIO; - } else if (_kind == kRTCMediaStreamTrackKindVideo) { + } else if (_kind == RTC_CONSTANT_TYPE(RTCMediaStreamTrackKindVideo)) { rtpCodecCapability.kind = cricket::MEDIA_TYPE_VIDEO; } else { RTC_DCHECK_NOTREACHED(); diff --git a/sdk/objc/api/peerconnection/RTCRtpCodecParameters.h b/sdk/objc/api/peerconnection/RTCRtpCodecParameters.h index 4d24d3ccd6..86d9d6b88a 100644 --- a/sdk/objc/api/peerconnection/RTCRtpCodecParameters.h +++ b/sdk/objc/api/peerconnection/RTCRtpCodecParameters.h @@ -14,23 +14,23 @@ NS_ASSUME_NONNULL_BEGIN -RTC_EXTERN const NSString *const kRTCRtxCodecName; -RTC_EXTERN const NSString *const kRTCRedCodecName; -RTC_EXTERN const NSString *const kRTCUlpfecCodecName; -RTC_EXTERN const NSString *const kRTCFlexfecCodecName; -RTC_EXTERN const NSString *const kRTCOpusCodecName; -RTC_EXTERN const NSString *const kRTCIsacCodecName; -RTC_EXTERN const NSString *const kRTCL16CodecName; -RTC_EXTERN const NSString *const kRTCG722CodecName; -RTC_EXTERN const NSString *const kRTCIlbcCodecName; -RTC_EXTERN const NSString *const kRTCPcmuCodecName; -RTC_EXTERN const NSString *const kRTCPcmaCodecName; -RTC_EXTERN const NSString *const kRTCDtmfCodecName; -RTC_EXTERN const NSString *const kRTCComfortNoiseCodecName; -RTC_EXTERN const NSString *const kRTCVp8CodecName; -RTC_EXTERN const NSString *const kRTCVp9CodecName; -RTC_EXTERN const NSString *const kRTCH264CodecName; -RTC_EXTERN const NSString *const kRTCAv1CodecName; +RTC_EXTERN const NSString *const RTC_CONSTANT_TYPE(RTCRtxCodecName); +RTC_EXTERN const NSString *const RTC_CONSTANT_TYPE(RTCRedCodecName); +RTC_EXTERN const NSString *const RTC_CONSTANT_TYPE(RTCUlpfecCodecName); +RTC_EXTERN const NSString *const RTC_CONSTANT_TYPE(RTCFlexfecCodecName); +RTC_EXTERN const NSString *const RTC_CONSTANT_TYPE(RTCOpusCodecName); +RTC_EXTERN const NSString *const RTC_CONSTANT_TYPE(RTCIsacCodecName); +RTC_EXTERN const NSString *const RTC_CONSTANT_TYPE(RTCL16CodecName); +RTC_EXTERN const NSString *const RTC_CONSTANT_TYPE(RTCG722CodecName); +RTC_EXTERN const NSString *const RTC_CONSTANT_TYPE(RTCIlbcCodecName); +RTC_EXTERN const NSString *const RTC_CONSTANT_TYPE(RTCPcmuCodecName); +RTC_EXTERN const NSString *const RTC_CONSTANT_TYPE(RTCPcmaCodecName); +RTC_EXTERN const NSString *const RTC_CONSTANT_TYPE(RTCDtmfCodecName); +RTC_EXTERN const NSString *const RTC_CONSTANT_TYPE(RTCComfortNoiseCodecName); +RTC_EXTERN const NSString *const RTC_CONSTANT_TYPE(RTCVp8CodecName); +RTC_EXTERN const NSString *const RTC_CONSTANT_TYPE(RTCVp9CodecName); +RTC_EXTERN const NSString *const RTC_CONSTANT_TYPE(RTCH264CodecName); +RTC_EXTERN const NSString *const RTC_CONSTANT_TYPE(RTCAv1CodecName); /** Defined in https://www.w3.org/TR/webrtc/#idl-def-rtcrtpcodecparameters */ RTC_OBJC_EXPORT diff --git a/sdk/objc/api/peerconnection/RTCRtpCodecParameters.mm b/sdk/objc/api/peerconnection/RTCRtpCodecParameters.mm index 42a310cb79..bbeb84a488 100644 --- a/sdk/objc/api/peerconnection/RTCRtpCodecParameters.mm +++ b/sdk/objc/api/peerconnection/RTCRtpCodecParameters.mm @@ -16,23 +16,23 @@ #include "media/base/media_constants.h" #include "rtc_base/checks.h" -const NSString * const kRTCRtxCodecName = @(cricket::kRtxCodecName); -const NSString * const kRTCRedCodecName = @(cricket::kRedCodecName); -const NSString * const kRTCUlpfecCodecName = @(cricket::kUlpfecCodecName); -const NSString * const kRTCFlexfecCodecName = @(cricket::kFlexfecCodecName); -const NSString * const kRTCOpusCodecName = @(cricket::kOpusCodecName); -const NSString * const kRTCL16CodecName = @(cricket::kL16CodecName); -const NSString * const kRTCG722CodecName = @(cricket::kG722CodecName); -const NSString * const kRTCIlbcCodecName = @(cricket::kIlbcCodecName); -const NSString * const kRTCPcmuCodecName = @(cricket::kPcmuCodecName); -const NSString * const kRTCPcmaCodecName = @(cricket::kPcmaCodecName); -const NSString * const kRTCDtmfCodecName = @(cricket::kDtmfCodecName); -const NSString * const kRTCComfortNoiseCodecName = +const NSString * const RTC_CONSTANT_TYPE(RTCRtxCodecName) = @(cricket::kRtxCodecName); +const NSString * const RTC_CONSTANT_TYPE(RTCRedCodecName) = @(cricket::kRedCodecName); +const NSString * const RTC_CONSTANT_TYPE(RTCUlpfecCodecName) = @(cricket::kUlpfecCodecName); +const NSString * const RTC_CONSTANT_TYPE(RTCFlexfecCodecName) = @(cricket::kFlexfecCodecName); +const NSString * const RTC_CONSTANT_TYPE(RTCOpusCodecName) = @(cricket::kOpusCodecName); +const NSString * const RTC_CONSTANT_TYPE(RTCL16CodecName) = @(cricket::kL16CodecName); +const NSString * const RTC_CONSTANT_TYPE(RTCG722CodecName) = @(cricket::kG722CodecName); +const NSString * const RTC_CONSTANT_TYPE(RTCIlbcCodecName) = @(cricket::kIlbcCodecName); +const NSString * const RTC_CONSTANT_TYPE(RTCPcmuCodecName) = @(cricket::kPcmuCodecName); +const NSString * const RTC_CONSTANT_TYPE(RTCPcmaCodecName) = @(cricket::kPcmaCodecName); +const NSString * const RTC_CONSTANT_TYPE(RTCDtmfCodecName) = @(cricket::kDtmfCodecName); +const NSString * const RTC_CONSTANT_TYPE(RTCComfortNoiseCodecName) = @(cricket::kComfortNoiseCodecName); -const NSString * const kRTCVp8CodecName = @(cricket::kVp8CodecName); -const NSString * const kRTCVp9CodecName = @(cricket::kVp9CodecName); -const NSString * const kRTCH264CodecName = @(cricket::kH264CodecName); -const NSString * const kRTCAv1CodecName = @(cricket::kAv1CodecName); +const NSString * const RTC_CONSTANT_TYPE(RTCVp8CodecName) = @(cricket::kVp8CodecName); +const NSString * const RTC_CONSTANT_TYPE(RTCVp9CodecName) = @(cricket::kVp9CodecName); +const NSString * const RTC_CONSTANT_TYPE(RTCH264CodecName) = @(cricket::kH264CodecName); +const NSString * const RTC_CONSTANT_TYPE(RTCAv1CodecName) = @(cricket::kAv1CodecName); @implementation RTC_OBJC_TYPE (RTCRtpCodecParameters) @@ -55,10 +55,10 @@ - (instancetype)initWithNativeParameters: _name = [NSString stringForStdString:nativeParameters.name]; switch (nativeParameters.kind) { case cricket::MEDIA_TYPE_AUDIO: - _kind = kRTCMediaStreamTrackKindAudio; + _kind = RTC_CONSTANT_TYPE(RTCMediaStreamTrackKindAudio); break; case cricket::MEDIA_TYPE_VIDEO: - _kind = kRTCMediaStreamTrackKindVideo; + _kind = RTC_CONSTANT_TYPE(RTCMediaStreamTrackKindVideo); break; case cricket::MEDIA_TYPE_DATA: RTC_DCHECK_NOTREACHED(); @@ -89,9 +89,9 @@ - (instancetype)initWithNativeParameters: parameters.name = [NSString stdStringForString:_name]; // NSString pointer comparison is safe here since "kind" is readonly and only // populated above. - if (_kind == kRTCMediaStreamTrackKindAudio) { + if (_kind == RTC_CONSTANT_TYPE(RTCMediaStreamTrackKindAudio)) { parameters.kind = cricket::MEDIA_TYPE_AUDIO; - } else if (_kind == kRTCMediaStreamTrackKindVideo) { + } else if (_kind == RTC_CONSTANT_TYPE(RTCMediaStreamTrackKindVideo)) { parameters.kind = cricket::MEDIA_TYPE_VIDEO; } else { RTC_DCHECK_NOTREACHED(); diff --git a/sdk/objc/api/peerconnection/RTCRtpEncodingParameters.h b/sdk/objc/api/peerconnection/RTCRtpEncodingParameters.h index af0c6993bc..e47e04de80 100644 --- a/sdk/objc/api/peerconnection/RTCRtpEncodingParameters.h +++ b/sdk/objc/api/peerconnection/RTCRtpEncodingParameters.h @@ -15,11 +15,11 @@ NS_ASSUME_NONNULL_BEGIN /** Corresponds to webrtc::Priority. */ -typedef NS_ENUM(NSInteger, RTCPriority) { - RTCPriorityVeryLow, - RTCPriorityLow, - RTCPriorityMedium, - RTCPriorityHigh +typedef NS_ENUM(NSInteger, RTC_OBJC_TYPE(RTCPriority)) { + RTC_OBJC_TYPE(RTCPriorityVeryLow), + RTC_OBJC_TYPE(RTCPriorityLow), + RTC_OBJC_TYPE(RTCPriorityMedium), + RTC_OBJC_TYPE(RTCPriorityHigh) }; RTC_OBJC_EXPORT @@ -63,7 +63,7 @@ RTC_OBJC_EXPORT @property(nonatomic, assign) double bitratePriority; /** The relative DiffServ Code Point priority. */ -@property(nonatomic, assign) RTCPriority networkPriority; +@property(nonatomic, assign) RTC_OBJC_TYPE(RTCPriority) networkPriority; /** Allow dynamic frame length changes for audio: https://w3c.github.io/webrtc-extensions/#dom-rtcrtpencodingparameters-adaptiveptime */ diff --git a/sdk/objc/api/peerconnection/RTCRtpEncodingParameters.mm b/sdk/objc/api/peerconnection/RTCRtpEncodingParameters.mm index aecb88b6f6..ea95a83ccd 100644 --- a/sdk/objc/api/peerconnection/RTCRtpEncodingParameters.mm +++ b/sdk/objc/api/peerconnection/RTCRtpEncodingParameters.mm @@ -106,29 +106,29 @@ - (instancetype)initWithNativeParameters: return parameters; } -+ (webrtc::Priority)nativePriorityFromPriority:(RTCPriority)networkPriority { ++ (webrtc::Priority)nativePriorityFromPriority:(RTC_OBJC_TYPE(RTCPriority))networkPriority { switch (networkPriority) { - case RTCPriorityVeryLow: + case RTC_OBJC_TYPE(RTCPriorityVeryLow): return webrtc::Priority::kVeryLow; - case RTCPriorityLow: + case RTC_OBJC_TYPE(RTCPriorityLow): return webrtc::Priority::kLow; - case RTCPriorityMedium: + case RTC_OBJC_TYPE(RTCPriorityMedium): return webrtc::Priority::kMedium; - case RTCPriorityHigh: + case RTC_OBJC_TYPE(RTCPriorityHigh): return webrtc::Priority::kHigh; } } -+ (RTCPriority)priorityFromNativePriority:(webrtc::Priority)nativePriority { ++ (RTC_OBJC_TYPE(RTCPriority))priorityFromNativePriority:(webrtc::Priority)nativePriority { switch (nativePriority) { case webrtc::Priority::kVeryLow: - return RTCPriorityVeryLow; + return RTC_OBJC_TYPE(RTCPriorityVeryLow); case webrtc::Priority::kLow: - return RTCPriorityLow; + return RTC_OBJC_TYPE(RTCPriorityLow); case webrtc::Priority::kMedium: - return RTCPriorityMedium; + return RTC_OBJC_TYPE(RTCPriorityMedium); case webrtc::Priority::kHigh: - return RTCPriorityHigh; + return RTC_OBJC_TYPE(RTCPriorityHigh); } } diff --git a/sdk/objc/api/peerconnection/RTCRtpParameters.h b/sdk/objc/api/peerconnection/RTCRtpParameters.h index 3d71c55ab9..b09c75f2d1 100644 --- a/sdk/objc/api/peerconnection/RTCRtpParameters.h +++ b/sdk/objc/api/peerconnection/RTCRtpParameters.h @@ -19,11 +19,11 @@ NS_ASSUME_NONNULL_BEGIN /** Corresponds to webrtc::DegradationPreference. */ -typedef NS_ENUM(NSInteger, RTCDegradationPreference) { - RTCDegradationPreferenceDisabled, - RTCDegradationPreferenceMaintainFramerate, - RTCDegradationPreferenceMaintainResolution, - RTCDegradationPreferenceBalanced +typedef NS_ENUM(NSInteger, RTC_OBJC_TYPE(RTCDegradationPreference)) { + RTC_OBJC_TYPE(RTCDegradationPreferenceDisabled), + RTC_OBJC_TYPE(RTCDegradationPreferenceMaintainFramerate), + RTC_OBJC_TYPE(RTCDegradationPreferenceMaintainResolution), + RTC_OBJC_TYPE(RTCDegradationPreferenceBalanced) }; RTC_OBJC_EXPORT diff --git a/sdk/objc/api/peerconnection/RTCRtpParameters.mm b/sdk/objc/api/peerconnection/RTCRtpParameters.mm index 2baf0ecd80..3db58b657f 100644 --- a/sdk/objc/api/peerconnection/RTCRtpParameters.mm +++ b/sdk/objc/api/peerconnection/RTCRtpParameters.mm @@ -80,22 +80,22 @@ - (instancetype)initWithNativeParameters: } if (_degradationPreference) { parameters.degradation_preference = [RTC_OBJC_TYPE(RTCRtpParameters) - nativeDegradationPreferenceFromDegradationPreference:(RTCDegradationPreference) + nativeDegradationPreferenceFromDegradationPreference:(RTC_OBJC_TYPE(RTCDegradationPreference)) _degradationPreference.intValue]; } return parameters; } + (webrtc::DegradationPreference)nativeDegradationPreferenceFromDegradationPreference: - (RTCDegradationPreference)degradationPreference { + (RTC_OBJC_TYPE(RTCDegradationPreference))degradationPreference { switch (degradationPreference) { - case RTCDegradationPreferenceDisabled: + case RTC_OBJC_TYPE(RTCDegradationPreferenceDisabled): return webrtc::DegradationPreference::DISABLED; - case RTCDegradationPreferenceMaintainFramerate: + case RTC_OBJC_TYPE(RTCDegradationPreferenceMaintainFramerate): return webrtc::DegradationPreference::MAINTAIN_FRAMERATE; - case RTCDegradationPreferenceMaintainResolution: + case RTC_OBJC_TYPE(RTCDegradationPreferenceMaintainResolution): return webrtc::DegradationPreference::MAINTAIN_RESOLUTION; - case RTCDegradationPreferenceBalanced: + case RTC_OBJC_TYPE(RTCDegradationPreferenceBalanced): return webrtc::DegradationPreference::BALANCED; } } @@ -108,13 +108,13 @@ + (NSNumber *)degradationPreferenceFromNativeDegradationPreference: switch (*nativeDegradationPreference) { case webrtc::DegradationPreference::DISABLED: - return @(RTCDegradationPreferenceDisabled); + return @(RTC_OBJC_TYPE(RTCDegradationPreferenceDisabled)); case webrtc::DegradationPreference::MAINTAIN_FRAMERATE: - return @(RTCDegradationPreferenceMaintainFramerate); + return @(RTC_OBJC_TYPE(RTCDegradationPreferenceMaintainFramerate)); case webrtc::DegradationPreference::MAINTAIN_RESOLUTION: - return @(RTCDegradationPreferenceMaintainResolution); + return @(RTC_OBJC_TYPE(RTCDegradationPreferenceMaintainResolution)); case webrtc::DegradationPreference::BALANCED: - return @(RTCDegradationPreferenceBalanced); + return @(RTC_OBJC_TYPE(RTCDegradationPreferenceBalanced)); } } diff --git a/sdk/objc/api/peerconnection/RTCRtpReceiver+Private.h b/sdk/objc/api/peerconnection/RTCRtpReceiver+Private.h index eccbcbc3a0..218055b2b2 100644 --- a/sdk/objc/api/peerconnection/RTCRtpReceiver+Private.h +++ b/sdk/objc/api/peerconnection/RTCRtpReceiver+Private.h @@ -41,11 +41,11 @@ class RtpReceiverDelegateAdapter : public RtpReceiverObserverInterface { nativeRtpReceiver:(rtc::scoped_refptr)nativeRtpReceiver NS_DESIGNATED_INITIALIZER; -+ (RTCRtpMediaType)mediaTypeForNativeMediaType:(cricket::MediaType)nativeMediaType; ++ (RTC_OBJC_TYPE(RTCRtpMediaType))mediaTypeForNativeMediaType:(cricket::MediaType)nativeMediaType; -+ (cricket::MediaType)nativeMediaTypeForMediaType:(RTCRtpMediaType)mediaType; ++ (cricket::MediaType)nativeMediaTypeForMediaType:(RTC_OBJC_TYPE(RTCRtpMediaType))mediaType; -+ (NSString *)stringForMediaType:(RTCRtpMediaType)mediaType; ++ (NSString *)stringForMediaType:(RTC_OBJC_TYPE(RTCRtpMediaType))mediaType; @end diff --git a/sdk/objc/api/peerconnection/RTCRtpReceiver.h b/sdk/objc/api/peerconnection/RTCRtpReceiver.h index 1e407fd71b..2c32b82d95 100644 --- a/sdk/objc/api/peerconnection/RTCRtpReceiver.h +++ b/sdk/objc/api/peerconnection/RTCRtpReceiver.h @@ -17,11 +17,11 @@ NS_ASSUME_NONNULL_BEGIN /** Represents the media type of the RtpReceiver. */ -typedef NS_ENUM(NSInteger, RTCRtpMediaType) { - RTCRtpMediaTypeAudio, - RTCRtpMediaTypeVideo, - RTCRtpMediaTypeData, - RTCRtpMediaTypeUnsupported, +typedef NS_ENUM(NSInteger, RTC_OBJC_TYPE(RTCRtpMediaType)) { + RTC_OBJC_TYPE(RTCRtpMediaTypeAudio), + RTC_OBJC_TYPE(RTCRtpMediaTypeVideo), + RTC_OBJC_TYPE(RTCRtpMediaTypeData), + RTC_OBJC_TYPE(RTCRtpMediaTypeUnsupported), }; @class RTC_OBJC_TYPE(RTCRtpReceiver); @@ -44,7 +44,7 @@ RTC_OBJC_EXPORT */ - (void)rtpReceiver : (RTC_OBJC_TYPE(RTCRtpReceiver) *)rtpReceiver didReceiveFirstPacketForMediaType - : (RTCRtpMediaType)mediaType; + : (RTC_OBJC_TYPE(RTCRtpMediaType))mediaType; @end diff --git a/sdk/objc/api/peerconnection/RTCRtpReceiver.mm b/sdk/objc/api/peerconnection/RTCRtpReceiver.mm index 60af86ac1b..9b269c8336 100644 --- a/sdk/objc/api/peerconnection/RTCRtpReceiver.mm +++ b/sdk/objc/api/peerconnection/RTCRtpReceiver.mm @@ -27,7 +27,7 @@ void RtpReceiverDelegateAdapter::OnFirstPacketReceived( cricket::MediaType media_type) { - RTCRtpMediaType packet_media_type = + RTC_OBJC_TYPE(RTCRtpMediaType) packet_media_type = [RTC_OBJC_TYPE(RTCRtpReceiver) mediaTypeForNativeMediaType:media_type]; RTC_OBJC_TYPE(RTCRtpReceiver) *receiver = receiver_; [receiver.delegate rtpReceiver:receiver didReceiveFirstPacketForMediaType:packet_media_type]; @@ -116,42 +116,42 @@ - (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)facto return self; } -+ (RTCRtpMediaType)mediaTypeForNativeMediaType: ++ (RTC_OBJC_TYPE(RTCRtpMediaType))mediaTypeForNativeMediaType: (cricket::MediaType)nativeMediaType { switch (nativeMediaType) { case cricket::MEDIA_TYPE_AUDIO: - return RTCRtpMediaTypeAudio; + return RTC_OBJC_TYPE(RTCRtpMediaTypeAudio); case cricket::MEDIA_TYPE_VIDEO: - return RTCRtpMediaTypeVideo; + return RTC_OBJC_TYPE(RTCRtpMediaTypeVideo); case cricket::MEDIA_TYPE_DATA: - return RTCRtpMediaTypeData; + return RTC_OBJC_TYPE(RTCRtpMediaTypeData); case cricket::MEDIA_TYPE_UNSUPPORTED: - return RTCRtpMediaTypeUnsupported; + return RTC_OBJC_TYPE(RTCRtpMediaTypeUnsupported); } } -+ (cricket::MediaType)nativeMediaTypeForMediaType:(RTCRtpMediaType)mediaType { ++ (cricket::MediaType)nativeMediaTypeForMediaType:(RTC_OBJC_TYPE(RTCRtpMediaType))mediaType { switch (mediaType) { - case RTCRtpMediaTypeAudio: + case RTC_OBJC_TYPE(RTCRtpMediaTypeAudio): return cricket::MEDIA_TYPE_AUDIO; - case RTCRtpMediaTypeVideo: + case RTC_OBJC_TYPE(RTCRtpMediaTypeVideo): return cricket::MEDIA_TYPE_VIDEO; - case RTCRtpMediaTypeData: + case RTC_OBJC_TYPE(RTCRtpMediaTypeData): return cricket::MEDIA_TYPE_DATA; - case RTCRtpMediaTypeUnsupported: + case RTC_OBJC_TYPE(RTCRtpMediaTypeUnsupported): return cricket::MEDIA_TYPE_UNSUPPORTED; } } -+ (NSString *)stringForMediaType:(RTCRtpMediaType)mediaType { ++ (NSString *)stringForMediaType:(RTC_OBJC_TYPE(RTCRtpMediaType))mediaType { switch (mediaType) { - case RTCRtpMediaTypeAudio: + case RTC_OBJC_TYPE(RTCRtpMediaTypeAudio): return @"AUDIO"; - case RTCRtpMediaTypeVideo: + case RTC_OBJC_TYPE(RTCRtpMediaTypeVideo): return @"VIDEO"; - case RTCRtpMediaTypeData: + case RTC_OBJC_TYPE(RTCRtpMediaTypeData): return @"DATA"; - case RTCRtpMediaTypeUnsupported: + case RTC_OBJC_TYPE(RTCRtpMediaTypeUnsupported): return @"UNSUPPORTED"; } } diff --git a/sdk/objc/api/peerconnection/RTCRtpTransceiver+Private.h b/sdk/objc/api/peerconnection/RTCRtpTransceiver+Private.h index 868cbd80fe..d106765f6d 100644 --- a/sdk/objc/api/peerconnection/RTCRtpTransceiver+Private.h +++ b/sdk/objc/api/peerconnection/RTCRtpTransceiver+Private.h @@ -36,9 +36,9 @@ NS_ASSUME_NONNULL_BEGIN NS_DESIGNATED_INITIALIZER; + (webrtc::RtpTransceiverDirection)nativeRtpTransceiverDirectionFromDirection: - (RTCRtpTransceiverDirection)direction; + (RTC_OBJC_TYPE(RTCRtpTransceiverDirection))direction; -+ (RTCRtpTransceiverDirection)rtpTransceiverDirectionFromNativeDirection: ++ (RTC_OBJC_TYPE(RTCRtpTransceiverDirection))rtpTransceiverDirectionFromNativeDirection: (webrtc::RtpTransceiverDirection)nativeDirection; @end diff --git a/sdk/objc/api/peerconnection/RTCRtpTransceiver.h b/sdk/objc/api/peerconnection/RTCRtpTransceiver.h index fca088be7e..16ca42d63b 100644 --- a/sdk/objc/api/peerconnection/RTCRtpTransceiver.h +++ b/sdk/objc/api/peerconnection/RTCRtpTransceiver.h @@ -18,15 +18,15 @@ NS_ASSUME_NONNULL_BEGIN -extern NSString *const kRTCRtpTransceiverErrorDomain; +extern NSString *const RTC_CONSTANT_TYPE(RTCRtpTransceiverErrorDomain); /** https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiverdirection */ -typedef NS_ENUM(NSInteger, RTCRtpTransceiverDirection) { - RTCRtpTransceiverDirectionSendRecv, - RTCRtpTransceiverDirectionSendOnly, - RTCRtpTransceiverDirectionRecvOnly, - RTCRtpTransceiverDirectionInactive, - RTCRtpTransceiverDirectionStopped +typedef NS_ENUM(NSInteger, RTC_OBJC_TYPE(RTCRtpTransceiverDirection)) { + RTC_OBJC_TYPE(RTCRtpTransceiverDirectionSendRecv), + RTC_OBJC_TYPE(RTCRtpTransceiverDirectionSendOnly), + RTC_OBJC_TYPE(RTCRtpTransceiverDirectionRecvOnly), + RTC_OBJC_TYPE(RTCRtpTransceiverDirectionInactive), + RTC_OBJC_TYPE(RTCRtpTransceiverDirectionStopped) }; /** Structure for initializing an RTCRtpTransceiver in a call to @@ -37,7 +37,7 @@ RTC_OBJC_EXPORT @interface RTC_OBJC_TYPE (RTCRtpTransceiverInit) : NSObject /** Direction of the RTCRtpTransceiver. See RTCRtpTransceiver.direction. */ -@property(nonatomic) RTCRtpTransceiverDirection direction; +@property(nonatomic) RTC_OBJC_TYPE(RTCRtpTransceiverDirection) direction; /** The added RTCRtpTransceiver will be added to these streams. */ @property(nonatomic) NSArray *streamIds; @@ -70,7 +70,7 @@ RTC_OBJC_EXPORT /** Media type of the transceiver. The sender and receiver will also have this * type. */ - @property(nonatomic, readonly) RTCRtpMediaType mediaType; + @property(nonatomic, readonly) RTC_OBJC_TYPE(RTCRtpMediaType) mediaType; /** The mid attribute is the mid negotiated and present in the local and * remote descriptions. Before negotiation is complete, the mid value may be @@ -105,7 +105,7 @@ RTC_OBJC_EXPORT * transceiver, which will be used in calls to createOffer and createAnswer. * https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiver-direction */ -@property(nonatomic, readonly) RTCRtpTransceiverDirection direction; +@property(nonatomic, readonly) RTC_OBJC_TYPE(RTCRtpTransceiverDirection) direction; @property(nonatomic, copy) NSArray *codecPreferences; @@ -115,7 +115,7 @@ RTC_OBJC_EXPORT * present and this method returns NO. * https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiver-currentdirection */ -- (BOOL)currentDirection:(RTCRtpTransceiverDirection *)currentDirectionOut; +- (BOOL)currentDirection:(RTC_OBJC_TYPE(RTCRtpTransceiverDirection) *)currentDirectionOut; /** The stop method irreversibly stops the RTCRtpTransceiver. The sender of * this transceiver will no longer send, the receiver will no longer receive. @@ -134,7 +134,7 @@ RTC_OBJC_EXPORT * descriptions as sendrecv, sendonly, recvonly, or inactive. * https://w3c.github.io/webrtc-pc/#dom-rtcrtptransceiver-direction */ -- (void)setDirection:(RTCRtpTransceiverDirection)direction error:(NSError **)error; +- (void)setDirection:(RTC_OBJC_TYPE(RTCRtpTransceiverDirection))direction error:(NSError **)error; @end diff --git a/sdk/objc/api/peerconnection/RTCRtpTransceiver.mm b/sdk/objc/api/peerconnection/RTCRtpTransceiver.mm index dc51149934..16fccf3690 100644 --- a/sdk/objc/api/peerconnection/RTCRtpTransceiver.mm +++ b/sdk/objc/api/peerconnection/RTCRtpTransceiver.mm @@ -22,7 +22,7 @@ #include "api/rtp_parameters.h" -NSString *const kRTCRtpTransceiverErrorDomain = @"org.webrtc.RTCRtpTranceiver"; +NSString *const RTC_CONSTANT_TYPE(RTCRtpTransceiverErrorDomain) = @"org.webrtc.RTCRtpTranceiver"; @implementation RTC_OBJC_TYPE (RTCRtpTransceiverInit) @@ -32,7 +32,7 @@ @implementation RTC_OBJC_TYPE (RTCRtpTransceiverInit) - (instancetype)init { if (self = [super init]) { - _direction = RTCRtpTransceiverDirectionSendRecv; + _direction = RTC_OBJC_TYPE(RTCRtpTransceiverDirectionSendRecv); } return self; } @@ -57,7 +57,7 @@ @implementation RTC_OBJC_TYPE (RTCRtpTransceiver) { rtc::scoped_refptr _nativeRtpTransceiver; } -- (RTCRtpMediaType)mediaType { +- (RTC_OBJC_TYPE(RTCRtpMediaType))mediaType { return [RTC_OBJC_TYPE(RTCRtpReceiver) mediaTypeForNativeMediaType:_nativeRtpTransceiver->media_type()]; } @@ -91,17 +91,17 @@ - (BOOL)isStopped { return _nativeRtpTransceiver->stopped(); } -- (RTCRtpTransceiverDirection)direction { +- (RTC_OBJC_TYPE(RTCRtpTransceiverDirection))direction { return [RTC_OBJC_TYPE(RTCRtpTransceiver) rtpTransceiverDirectionFromNativeDirection:_nativeRtpTransceiver->direction()]; } -- (void)setDirection:(RTCRtpTransceiverDirection)direction error:(NSError **)error { +- (void)setDirection:(RTC_OBJC_TYPE(RTCRtpTransceiverDirection))direction error:(NSError **)error { webrtc::RTCError nativeError = _nativeRtpTransceiver->SetDirectionWithError( [RTC_OBJC_TYPE(RTCRtpTransceiver) nativeRtpTransceiverDirectionFromDirection:direction]); if (!nativeError.ok() && error) { - *error = [NSError errorWithDomain:kRTCRtpTransceiverErrorDomain + *error = [NSError errorWithDomain:RTC_CONSTANT_TYPE(RTCRtpTransceiverErrorDomain) code:static_cast(nativeError.type()) userInfo:@{ @"message" : [NSString stringWithCString:nativeError.message() @@ -110,7 +110,7 @@ - (void)setDirection:(RTCRtpTransceiverDirection)direction error:(NSError **)err } } -- (BOOL)currentDirection:(RTCRtpTransceiverDirection *)currentDirectionOut { +- (BOOL)currentDirection:(RTC_OBJC_TYPE(RTCRtpTransceiverDirection) *)currentDirectionOut { if (_nativeRtpTransceiver->current_direction()) { *currentDirectionOut = [RTC_OBJC_TYPE(RTCRtpTransceiver) rtpTransceiverDirectionFromNativeDirection:*_nativeRtpTransceiver->current_direction()]; @@ -183,34 +183,34 @@ - (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)facto } + (webrtc::RtpTransceiverDirection)nativeRtpTransceiverDirectionFromDirection: - (RTCRtpTransceiverDirection)direction { + (RTC_OBJC_TYPE(RTCRtpTransceiverDirection))direction { switch (direction) { - case RTCRtpTransceiverDirectionSendRecv: + case RTC_OBJC_TYPE(RTCRtpTransceiverDirectionSendRecv): return webrtc::RtpTransceiverDirection::kSendRecv; - case RTCRtpTransceiverDirectionSendOnly: + case RTC_OBJC_TYPE(RTCRtpTransceiverDirectionSendOnly): return webrtc::RtpTransceiverDirection::kSendOnly; - case RTCRtpTransceiverDirectionRecvOnly: + case RTC_OBJC_TYPE(RTCRtpTransceiverDirectionRecvOnly): return webrtc::RtpTransceiverDirection::kRecvOnly; - case RTCRtpTransceiverDirectionInactive: + case RTC_OBJC_TYPE(RTCRtpTransceiverDirectionInactive): return webrtc::RtpTransceiverDirection::kInactive; - case RTCRtpTransceiverDirectionStopped: + case RTC_OBJC_TYPE(RTCRtpTransceiverDirectionStopped): return webrtc::RtpTransceiverDirection::kStopped; } } -+ (RTCRtpTransceiverDirection)rtpTransceiverDirectionFromNativeDirection: ++ (RTC_OBJC_TYPE(RTCRtpTransceiverDirection))rtpTransceiverDirectionFromNativeDirection: (webrtc::RtpTransceiverDirection)nativeDirection { switch (nativeDirection) { case webrtc::RtpTransceiverDirection::kSendRecv: - return RTCRtpTransceiverDirectionSendRecv; + return RTC_OBJC_TYPE(RTCRtpTransceiverDirectionSendRecv); case webrtc::RtpTransceiverDirection::kSendOnly: - return RTCRtpTransceiverDirectionSendOnly; + return RTC_OBJC_TYPE(RTCRtpTransceiverDirectionSendOnly); case webrtc::RtpTransceiverDirection::kRecvOnly: - return RTCRtpTransceiverDirectionRecvOnly; + return RTC_OBJC_TYPE(RTCRtpTransceiverDirectionRecvOnly); case webrtc::RtpTransceiverDirection::kInactive: - return RTCRtpTransceiverDirectionInactive; + return RTC_OBJC_TYPE(RTCRtpTransceiverDirectionInactive); case webrtc::RtpTransceiverDirection::kStopped: - return RTCRtpTransceiverDirectionStopped; + return RTC_OBJC_TYPE(RTCRtpTransceiverDirectionStopped); } } diff --git a/sdk/objc/api/peerconnection/RTCSSLAdapter.h b/sdk/objc/api/peerconnection/RTCSSLAdapter.h index f68bc5e9e3..05a2311336 100644 --- a/sdk/objc/api/peerconnection/RTCSSLAdapter.h +++ b/sdk/objc/api/peerconnection/RTCSSLAdapter.h @@ -16,5 +16,5 @@ * Initialize and clean up the SSL library. Failure is fatal. These call the * corresponding functions in webrtc/rtc_base/ssladapter.h. */ -RTC_EXTERN BOOL RTCInitializeSSL(void); -RTC_EXTERN BOOL RTCCleanupSSL(void); +RTC_EXTERN BOOL RTC_OBJC_TYPE(RTCInitializeSSL)(void); +RTC_EXTERN BOOL RTC_OBJC_TYPE(RTCCleanupSSL)(void); diff --git a/sdk/objc/api/peerconnection/RTCSSLAdapter.mm b/sdk/objc/api/peerconnection/RTCSSLAdapter.mm index 430249577b..c56cff260a 100644 --- a/sdk/objc/api/peerconnection/RTCSSLAdapter.mm +++ b/sdk/objc/api/peerconnection/RTCSSLAdapter.mm @@ -13,13 +13,13 @@ #include "rtc_base/checks.h" #include "rtc_base/ssl_adapter.h" -BOOL RTCInitializeSSL(void) { +BOOL RTC_OBJC_TYPE(RTCInitializeSSL)(void) { BOOL initialized = rtc::InitializeSSL(); RTC_DCHECK(initialized); return initialized; } -BOOL RTCCleanupSSL(void) { +BOOL RTC_OBJC_TYPE(RTCCleanupSSL)(void) { BOOL cleanedUp = rtc::CleanupSSL(); RTC_DCHECK(cleanedUp); return cleanedUp; diff --git a/sdk/objc/api/peerconnection/RTCSessionDescription+Private.h b/sdk/objc/api/peerconnection/RTCSessionDescription+Private.h index d01c04b0b5..258c92ebc7 100644 --- a/sdk/objc/api/peerconnection/RTCSessionDescription+Private.h +++ b/sdk/objc/api/peerconnection/RTCSessionDescription+Private.h @@ -33,9 +33,9 @@ NS_ASSUME_NONNULL_BEGIN - (instancetype)initWithNativeDescription: (const webrtc::SessionDescriptionInterface *)nativeDescription; -+ (std::string)stdStringForType:(RTCSdpType)type; ++ (std::string)stdStringForType:(RTC_OBJC_TYPE(RTCSdpType))type; -+ (RTCSdpType)typeForStdString:(const std::string &)string; ++ (RTC_OBJC_TYPE(RTCSdpType))typeForStdString:(const std::string &)string; @end diff --git a/sdk/objc/api/peerconnection/RTCSessionDescription.h b/sdk/objc/api/peerconnection/RTCSessionDescription.h index 8a9479d5cf..f322cc38fe 100644 --- a/sdk/objc/api/peerconnection/RTCSessionDescription.h +++ b/sdk/objc/api/peerconnection/RTCSessionDescription.h @@ -16,11 +16,11 @@ * Represents the session description type. This exposes the same types that are * in C++, which doesn't include the rollback type that is in the W3C spec. */ -typedef NS_ENUM(NSInteger, RTCSdpType) { - RTCSdpTypeOffer, - RTCSdpTypePrAnswer, - RTCSdpTypeAnswer, - RTCSdpTypeRollback, +typedef NS_ENUM(NSInteger, RTC_OBJC_TYPE(RTCSdpType)) { + RTC_OBJC_TYPE(RTCSdpTypeOffer), + RTC_OBJC_TYPE(RTCSdpTypePrAnswer), + RTC_OBJC_TYPE(RTCSdpTypeAnswer), + RTC_OBJC_TYPE(RTCSdpTypeRollback), }; NS_ASSUME_NONNULL_BEGIN @@ -29,7 +29,7 @@ RTC_OBJC_EXPORT @interface RTC_OBJC_TYPE (RTCSessionDescription) : NSObject /** The type of session description. */ -@property(nonatomic, readonly) RTCSdpType type; +@property(nonatomic, readonly) RTC_OBJC_TYPE(RTCSdpType) type; /** The SDP string representation of this session description. */ @property(nonatomic, readonly) NSString *sdp; @@ -37,11 +37,11 @@ RTC_OBJC_EXPORT - (instancetype)init NS_UNAVAILABLE; /** Initialize a session description with a type and SDP string. */ -- (instancetype)initWithType:(RTCSdpType)type sdp:(NSString *)sdp NS_DESIGNATED_INITIALIZER; +- (instancetype)initWithType:(RTC_OBJC_TYPE(RTCSdpType))type sdp:(NSString *)sdp NS_DESIGNATED_INITIALIZER; -+ (NSString *)stringForType:(RTCSdpType)type; ++ (NSString *)stringForType:(RTC_OBJC_TYPE(RTCSdpType))type; -+ (RTCSdpType)typeForString:(NSString *)string; ++ (RTC_OBJC_TYPE(RTCSdpType))typeForString:(NSString *)string; @end diff --git a/sdk/objc/api/peerconnection/RTCSessionDescription.mm b/sdk/objc/api/peerconnection/RTCSessionDescription.mm index 539c90b14c..7e84c4eabe 100644 --- a/sdk/objc/api/peerconnection/RTCSessionDescription.mm +++ b/sdk/objc/api/peerconnection/RTCSessionDescription.mm @@ -20,17 +20,17 @@ @implementation RTC_OBJC_TYPE (RTCSessionDescription) @synthesize type = _type; @synthesize sdp = _sdp; -+ (NSString *)stringForType:(RTCSdpType)type { ++ (NSString *)stringForType:(RTC_OBJC_TYPE(RTCSdpType))type { std::string string = [[self class] stdStringForType:type]; return [NSString stringForStdString:string]; } -+ (RTCSdpType)typeForString:(NSString *)string { ++ (RTC_OBJC_TYPE(RTCSdpType))typeForString:(NSString *)string { std::string typeString = string.stdString; return [[self class] typeForStdString:typeString]; } -- (instancetype)initWithType:(RTCSdpType)type sdp:(NSString *)sdp { +- (instancetype)initWithType:(RTC_OBJC_TYPE(RTCSdpType))type sdp:(NSString *)sdp { if (self = [super init]) { _type = type; _sdp = [sdp copy]; @@ -66,37 +66,37 @@ - (instancetype)initWithNativeDescription: NSParameterAssert(nativeDescription); std::string sdp; nativeDescription->ToString(&sdp); - RTCSdpType type = [[self class] typeForStdString:nativeDescription->type()]; + RTC_OBJC_TYPE(RTCSdpType) type = [[self class] typeForStdString:nativeDescription->type()]; return [self initWithType:type sdp:[NSString stringForStdString:sdp]]; } -+ (std::string)stdStringForType:(RTCSdpType)type { ++ (std::string)stdStringForType:(RTC_OBJC_TYPE(RTCSdpType))type { switch (type) { - case RTCSdpTypeOffer: + case RTC_OBJC_TYPE(RTCSdpTypeOffer): return webrtc::SessionDescriptionInterface::kOffer; - case RTCSdpTypePrAnswer: + case RTC_OBJC_TYPE(RTCSdpTypePrAnswer): return webrtc::SessionDescriptionInterface::kPrAnswer; - case RTCSdpTypeAnswer: + case RTC_OBJC_TYPE(RTCSdpTypeAnswer): return webrtc::SessionDescriptionInterface::kAnswer; - case RTCSdpTypeRollback: + case RTC_OBJC_TYPE(RTCSdpTypeRollback): return webrtc::SessionDescriptionInterface::kRollback; } } -+ (RTCSdpType)typeForStdString:(const std::string &)string { ++ (RTC_OBJC_TYPE(RTCSdpType))typeForStdString:(const std::string &)string { if (string == webrtc::SessionDescriptionInterface::kOffer) { - return RTCSdpTypeOffer; + return RTC_OBJC_TYPE(RTCSdpTypeOffer); } else if (string == webrtc::SessionDescriptionInterface::kPrAnswer) { - return RTCSdpTypePrAnswer; + return RTC_OBJC_TYPE(RTCSdpTypePrAnswer); } else if (string == webrtc::SessionDescriptionInterface::kAnswer) { - return RTCSdpTypeAnswer; + return RTC_OBJC_TYPE(RTCSdpTypeAnswer); } else if (string == webrtc::SessionDescriptionInterface::kRollback) { - return RTCSdpTypeRollback; + return RTC_OBJC_TYPE(RTCSdpTypeRollback); } else { RTC_DCHECK_NOTREACHED(); - return RTCSdpTypeOffer; + return RTC_OBJC_TYPE(RTCSdpTypeOffer); } } diff --git a/sdk/objc/api/peerconnection/RTCTracing.h b/sdk/objc/api/peerconnection/RTCTracing.h index 5c66e5a63a..a4d8f762eb 100644 --- a/sdk/objc/api/peerconnection/RTCTracing.h +++ b/sdk/objc/api/peerconnection/RTCTracing.h @@ -12,10 +12,10 @@ #import "RTCMacros.h" -RTC_EXTERN void RTCSetupInternalTracer(void); +RTC_EXTERN void RTC_OBJC_TYPE(RTCSetupInternalTracer)(void); /** Starts capture to specified file. Must be a valid writable path. * Returns YES if capture starts. */ -RTC_EXTERN BOOL RTCStartInternalCapture(NSString* filePath); -RTC_EXTERN void RTCStopInternalCapture(void); -RTC_EXTERN void RTCShutdownInternalTracer(void); +RTC_EXTERN BOOL RTC_OBJC_TYPE(RTCStartInternalCapture)(NSString* filePath); +RTC_EXTERN void RTC_OBJC_TYPE(RTCStopInternalCapture)(void); +RTC_EXTERN void RTC_OBJC_TYPE(RTCShutdownInternalTracer)(void); diff --git a/sdk/objc/api/peerconnection/RTCTracing.mm b/sdk/objc/api/peerconnection/RTCTracing.mm index 72f9f4da13..3a93589c88 100644 --- a/sdk/objc/api/peerconnection/RTCTracing.mm +++ b/sdk/objc/api/peerconnection/RTCTracing.mm @@ -12,18 +12,18 @@ #include "rtc_base/event_tracer.h" -void RTCSetupInternalTracer(void) { +void RTC_OBJC_TYPE(RTCSetupInternalTracer)(void) { rtc::tracing::SetupInternalTracer(); } -BOOL RTCStartInternalCapture(NSString *filePath) { +BOOL RTC_OBJC_TYPE(RTCStartInternalCapture)(NSString *filePath) { return rtc::tracing::StartInternalCapture(filePath.UTF8String); } -void RTCStopInternalCapture(void) { +void RTC_OBJC_TYPE(RTCStopInternalCapture)(void) { rtc::tracing::StopInternalCapture(); } -void RTCShutdownInternalTracer(void) { +void RTC_OBJC_TYPE(RTCShutdownInternalTracer)(void) { rtc::tracing::ShutdownInternalTracer(); } diff --git a/sdk/objc/api/peerconnection/RTCVideoEncoderSettings+Private.mm b/sdk/objc/api/peerconnection/RTCVideoEncoderSettings+Private.mm index dec3a61090..95c9f83c32 100644 --- a/sdk/objc/api/peerconnection/RTCVideoEncoderSettings+Private.mm +++ b/sdk/objc/api/peerconnection/RTCVideoEncoderSettings+Private.mm @@ -28,7 +28,7 @@ - (instancetype)initWithNativeVideoCodec : (const webrtc::VideoCodec *)videoCode self.minBitrate = videoCodec->minBitrate; self.maxFramerate = videoCodec->maxFramerate; self.qpMax = videoCodec->qpMax; - self.mode = (RTCVideoCodecMode)videoCodec->mode; + self.mode = (RTC_OBJC_TYPE(RTCVideoCodecMode))videoCodec->mode; } } diff --git a/sdk/objc/api/peerconnection/RTCVideoSource+Private.h b/sdk/objc/api/peerconnection/RTCVideoSource+Private.h index 8e475dd21e..00f54bd476 100644 --- a/sdk/objc/api/peerconnection/RTCVideoSource+Private.h +++ b/sdk/objc/api/peerconnection/RTCVideoSource+Private.h @@ -35,7 +35,7 @@ NS_ASSUME_NONNULL_BEGIN - (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory nativeMediaSource:(rtc::scoped_refptr)nativeMediaSource - type:(RTCMediaSourceType)type NS_UNAVAILABLE; + type:(RTC_OBJC_TYPE(RTCMediaSourceType))type NS_UNAVAILABLE; - (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory signalingThread:(rtc::Thread *)signalingThread diff --git a/sdk/objc/api/peerconnection/RTCVideoSource.mm b/sdk/objc/api/peerconnection/RTCVideoSource.mm index 486ca93771..5c9f801a56 100644 --- a/sdk/objc/api/peerconnection/RTCVideoSource.mm +++ b/sdk/objc/api/peerconnection/RTCVideoSource.mm @@ -35,7 +35,7 @@ - (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)facto RTC_DCHECK(nativeVideoSource); if (self = [super initWithFactory:factory nativeMediaSource:nativeVideoSource - type:RTCMediaSourceTypeVideo]) { + type:RTC_OBJC_TYPE(RTCMediaSourceTypeVideo)]) { _nativeVideoSource = nativeVideoSource; } return self; @@ -43,7 +43,7 @@ - (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)facto - (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory nativeMediaSource:(rtc::scoped_refptr)nativeMediaSource - type:(RTCMediaSourceType)type { + type:(RTC_OBJC_TYPE(RTCMediaSourceType))type { RTC_DCHECK_NOTREACHED(); return nil; } diff --git a/sdk/objc/api/peerconnection/RTCVideoTrack.mm b/sdk/objc/api/peerconnection/RTCVideoTrack.mm index 546ec80a61..6f0629a40d 100644 --- a/sdk/objc/api/peerconnection/RTCVideoTrack.mm +++ b/sdk/objc/api/peerconnection/RTCVideoTrack.mm @@ -32,7 +32,7 @@ - (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)facto std::string nativeId = [NSString stdStringForString:trackId]; rtc::scoped_refptr track = factory.nativeFactory->CreateVideoTrack(source.nativeVideoSource, nativeId); - if (self = [self initWithFactory:factory nativeTrack:track type:RTCMediaStreamTrackTypeVideo]) { + if (self = [self initWithFactory:factory nativeTrack:track type:RTC_OBJC_TYPE(RTCMediaStreamTrackTypeVideo)]) { _source = source; } return self; @@ -41,10 +41,10 @@ - (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)facto - (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory nativeTrack: (rtc::scoped_refptr)nativeMediaTrack - type:(RTCMediaStreamTrackType)type { + type:(RTC_OBJC_TYPE(RTCMediaStreamTrackType))type { NSParameterAssert(factory); NSParameterAssert(nativeMediaTrack); - NSParameterAssert(type == RTCMediaStreamTrackTypeVideo); + NSParameterAssert(type == RTC_OBJC_TYPE(RTCMediaStreamTrackTypeVideo)); if (self = [super initWithFactory:factory nativeTrack:nativeMediaTrack type:type]) { _adapters = [NSMutableArray array]; _workerThread = factory.workerThread; diff --git a/sdk/objc/api/video_codec/RTCVideoCodecConstants.h b/sdk/objc/api/video_codec/RTCVideoCodecConstants.h index 8b17a75aef..b7252eaebf 100644 --- a/sdk/objc/api/video_codec/RTCVideoCodecConstants.h +++ b/sdk/objc/api/video_codec/RTCVideoCodecConstants.h @@ -12,6 +12,6 @@ #import "RTCMacros.h" -RTC_EXTERN NSString* const kRTCVideoCodecVp8Name; -RTC_EXTERN NSString* const kRTCVideoCodecVp9Name; -RTC_EXTERN NSString* const kRTCVideoCodecAv1Name; +RTC_EXTERN NSString* const RTC_CONSTANT_TYPE(RTCVideoCodecVp8Name); +RTC_EXTERN NSString* const RTC_CONSTANT_TYPE(RTCVideoCodecVp9Name); +RTC_EXTERN NSString* const RTC_CONSTANT_TYPE(RTCVideoCodecAv1Name); diff --git a/sdk/objc/api/video_codec/RTCVideoCodecConstants.mm b/sdk/objc/api/video_codec/RTCVideoCodecConstants.mm index 1ab236a2c2..1bec295174 100644 --- a/sdk/objc/api/video_codec/RTCVideoCodecConstants.mm +++ b/sdk/objc/api/video_codec/RTCVideoCodecConstants.mm @@ -13,6 +13,6 @@ #include "media/base/media_constants.h" -NSString *const kRTCVideoCodecVp8Name = @(cricket::kVp8CodecName); -NSString *const kRTCVideoCodecVp9Name = @(cricket::kVp9CodecName); -NSString *const kRTCVideoCodecAv1Name = @(cricket::kAv1CodecName); +NSString *const RTC_CONSTANT_TYPE(RTCVideoCodecVp8Name) = @(cricket::kVp8CodecName); +NSString *const RTC_CONSTANT_TYPE(RTCVideoCodecVp9Name) = @(cricket::kVp9CodecName); +NSString *const RTC_CONSTANT_TYPE(RTCVideoCodecAv1Name) = @(cricket::kAv1CodecName); diff --git a/sdk/objc/base/RTCEncodedImage.h b/sdk/objc/base/RTCEncodedImage.h index 28529e5906..161ffa1532 100644 --- a/sdk/objc/base/RTCEncodedImage.h +++ b/sdk/objc/base/RTCEncodedImage.h @@ -16,17 +16,17 @@ NS_ASSUME_NONNULL_BEGIN /** Represents an encoded frame's type. */ -typedef NS_ENUM(NSUInteger, RTCFrameType) { - RTCFrameTypeEmptyFrame = 0, - RTCFrameTypeAudioFrameSpeech = 1, - RTCFrameTypeAudioFrameCN = 2, - RTCFrameTypeVideoFrameKey = 3, - RTCFrameTypeVideoFrameDelta = 4, +typedef NS_ENUM(NSUInteger, RTC_OBJC_TYPE(RTCFrameType)) { + RTC_OBJC_TYPE(RTCFrameTypeEmptyFrame) = 0, + RTC_OBJC_TYPE(RTCFrameTypeAudioFrameSpeech) = 1, + RTC_OBJC_TYPE(RTCFrameTypeAudioFrameCN) = 2, + RTC_OBJC_TYPE(RTCFrameTypeVideoFrameKey) = 3, + RTC_OBJC_TYPE(RTCFrameTypeVideoFrameDelta) = 4, }; -typedef NS_ENUM(NSUInteger, RTCVideoContentType) { - RTCVideoContentTypeUnspecified, - RTCVideoContentTypeScreenshare, +typedef NS_ENUM(NSUInteger, RTC_OBJC_TYPE(RTCVideoContentType)) { + RTC_OBJC_TYPE(RTCVideoContentTypeUnspecified), + RTC_OBJC_TYPE(RTCVideoContentTypeScreenshare), }; /** Represents an encoded frame. Corresponds to webrtc::EncodedImage. */ @@ -42,10 +42,10 @@ RTC_OBJC_EXPORT @property(nonatomic, assign) uint8_t flags; @property(nonatomic, assign) int64_t encodeStartMs; @property(nonatomic, assign) int64_t encodeFinishMs; -@property(nonatomic, assign) RTCFrameType frameType; -@property(nonatomic, assign) RTCVideoRotation rotation; +@property(nonatomic, assign) RTC_OBJC_TYPE(RTCFrameType) frameType; +@property(nonatomic, assign) RTC_OBJC_TYPE(RTCVideoRotation) rotation; @property(nonatomic, strong) NSNumber *qp; -@property(nonatomic, assign) RTCVideoContentType contentType; +@property(nonatomic, assign) RTC_OBJC_TYPE(RTCVideoContentType) contentType; @end diff --git a/sdk/objc/base/RTCLogging.h b/sdk/objc/base/RTCLogging.h index 0fa6a91b69..94c8e78474 100644 --- a/sdk/objc/base/RTCLogging.h +++ b/sdk/objc/base/RTCLogging.h @@ -13,47 +13,47 @@ #import "RTCMacros.h" // Subset of rtc::LoggingSeverity. -typedef NS_ENUM(NSInteger, RTCLoggingSeverity) { - RTCLoggingSeverityVerbose, - RTCLoggingSeverityInfo, - RTCLoggingSeverityWarning, - RTCLoggingSeverityError, - RTCLoggingSeverityNone, +typedef NS_ENUM(NSInteger, RTC_OBJC_TYPE(RTCLoggingSeverity)) { + RTC_OBJC_TYPE(RTCLoggingSeverityVerbose), + RTC_OBJC_TYPE(RTCLoggingSeverityInfo), + RTC_OBJC_TYPE(RTCLoggingSeverityWarning), + RTC_OBJC_TYPE(RTCLoggingSeverityError), + RTC_OBJC_TYPE(RTCLoggingSeverityNone), }; // Wrapper for C++ RTC_LOG(sev) macros. // Logs the log string to the webrtc logstream for the given severity. -RTC_EXTERN void RTCLogEx(RTCLoggingSeverity severity, NSString* log_string); +RTC_EXTERN void RTC_OBJC_TYPE(RTCLogEx)(RTC_OBJC_TYPE(RTCLoggingSeverity) severity, NSString* log_string); // Wrapper for rtc::LogMessage::LogToDebug. // Sets the minimum severity to be logged to console. -RTC_EXTERN void RTCSetMinDebugLogLevel(RTCLoggingSeverity severity); +RTC_EXTERN void RTC_OBJC_TYPE(RTCSetMinDebugLogLevel)(RTC_OBJC_TYPE(RTCLoggingSeverity) severity); // Returns the filename with the path prefix removed. -RTC_EXTERN NSString* RTCFileName(const char* filePath); +RTC_EXTERN NSString* RTC_OBJC_TYPE(RTCFileName)(const char* filePath); // Some convenience macros. -#define RTCLogString(format, ...) \ - [NSString stringWithFormat:@"(%@:%d %s): " format, \ - RTCFileName(__FILE__), \ - __LINE__, \ - __FUNCTION__, \ +#define RTCLogString(format, ...) \ + [NSString stringWithFormat:@"(%@:%d %s): " format, \ + RTC_OBJC_TYPE(RTCFileName)(__FILE__), \ + __LINE__, \ + __FUNCTION__, \ ##__VA_ARGS__] #define RTCLogFormat(severity, format, ...) \ do { \ NSString* log_string = RTCLogString(format, ##__VA_ARGS__); \ - RTCLogEx(severity, log_string); \ + RTC_OBJC_TYPE(RTCLogEx)(severity, log_string); \ } while (false) -#define RTCLogVerbose(format, ...) RTCLogFormat(RTCLoggingSeverityVerbose, format, ##__VA_ARGS__) +#define RTCLogVerbose(format, ...) RTCLogFormat(RTC_OBJC_TYPE(RTCLoggingSeverityVerbose), format, ##__VA_ARGS__) -#define RTCLogInfo(format, ...) RTCLogFormat(RTCLoggingSeverityInfo, format, ##__VA_ARGS__) +#define RTCLogInfo(format, ...) RTCLogFormat(RTC_OBJC_TYPE(RTCLoggingSeverityInfo), format, ##__VA_ARGS__) -#define RTCLogWarning(format, ...) RTCLogFormat(RTCLoggingSeverityWarning, format, ##__VA_ARGS__) +#define RTCLogWarning(format, ...) RTCLogFormat(RTC_OBJC_TYPE(RTCLoggingSeverityWarning), format, ##__VA_ARGS__) -#define RTCLogError(format, ...) RTCLogFormat(RTCLoggingSeverityError, format, ##__VA_ARGS__) +#define RTCLogError(format, ...) RTCLogFormat(RTC_OBJC_TYPE(RTCLoggingSeverityError), format, ##__VA_ARGS__) #if !defined(NDEBUG) #define RTCLogDebug(format, ...) RTCLogInfo(format, ##__VA_ARGS__) diff --git a/sdk/objc/base/RTCLogging.mm b/sdk/objc/base/RTCLogging.mm index e8dae02efb..6722f091df 100644 --- a/sdk/objc/base/RTCLogging.mm +++ b/sdk/objc/base/RTCLogging.mm @@ -12,33 +12,33 @@ #include "rtc_base/logging.h" -rtc::LoggingSeverity RTCGetNativeLoggingSeverity(RTCLoggingSeverity severity) { +rtc::LoggingSeverity RTC_OBJC_TYPE(RTCGetNativeLoggingSeverity)(RTC_OBJC_TYPE(RTCLoggingSeverity) severity) { switch (severity) { - case RTCLoggingSeverityVerbose: + case RTC_OBJC_TYPE(RTCLoggingSeverityVerbose): return rtc::LS_VERBOSE; - case RTCLoggingSeverityInfo: + case RTC_OBJC_TYPE(RTCLoggingSeverityInfo): return rtc::LS_INFO; - case RTCLoggingSeverityWarning: + case RTC_OBJC_TYPE(RTCLoggingSeverityWarning): return rtc::LS_WARNING; - case RTCLoggingSeverityError: + case RTC_OBJC_TYPE(RTCLoggingSeverityError): return rtc::LS_ERROR; - case RTCLoggingSeverityNone: + case RTC_OBJC_TYPE(RTCLoggingSeverityNone): return rtc::LS_NONE; } } -void RTCLogEx(RTCLoggingSeverity severity, NSString* log_string) { +void RTC_OBJC_TYPE(RTCLogEx)(RTC_OBJC_TYPE(RTCLoggingSeverity) severity, NSString* log_string) { if (log_string.length) { const char* utf8_string = log_string.UTF8String; - RTC_LOG_V(RTCGetNativeLoggingSeverity(severity)) << utf8_string; + RTC_LOG_V(RTC_OBJC_TYPE(RTCGetNativeLoggingSeverity)(severity)) << utf8_string; } } -void RTCSetMinDebugLogLevel(RTCLoggingSeverity severity) { - rtc::LogMessage::LogToDebug(RTCGetNativeLoggingSeverity(severity)); +void RTC_OBJC_TYPE(RTCSetMinDebugLogLevel)(RTC_OBJC_TYPE(RTCLoggingSeverity) severity) { + rtc::LogMessage::LogToDebug(RTC_OBJC_TYPE(RTCGetNativeLoggingSeverity)(severity)); } -NSString* RTCFileName(const char* file_path) { +NSString* RTC_OBJC_TYPE(RTCFileName)(const char* file_path) { NSString* ns_file_path = [[NSString alloc] initWithBytesNoCopy:const_cast(file_path) length:strlen(file_path) diff --git a/sdk/objc/base/RTCMacros.h b/sdk/objc/base/RTCMacros.h index 114ced0ea6..439675c4db 100644 --- a/sdk/objc/base/RTCMacros.h +++ b/sdk/objc/base/RTCMacros.h @@ -41,6 +41,10 @@ #define RTC_OBJC_TYPE_PREFIX #endif +#ifndef RTC_CONSTANT_TYPE_PREFIX +#define RTC_CONSTANT_TYPE_PREFIX k +#endif + // RCT_OBJC_TYPE // // Macro used internally to declare API types. Declaring an API type without @@ -48,6 +52,8 @@ // that will be affected by the configurable RTC_OBJC_TYPE_PREFIX. #define RTC_OBJC_TYPE(type_name) RTC_SYMBOL_CONCAT(RTC_OBJC_TYPE_PREFIX, type_name) +#define RTC_CONSTANT_TYPE(type_name) RTC_SYMBOL_CONCAT(RTC_CONSTANT_TYPE_PREFIX, type_name) + #if defined(__cplusplus) #define RTC_EXTERN extern "C" RTC_OBJC_EXPORT #else diff --git a/sdk/objc/base/RTCVideoEncoderSettings.h b/sdk/objc/base/RTCVideoEncoderSettings.h index ae792eab71..0f6eb20409 100644 --- a/sdk/objc/base/RTCVideoEncoderSettings.h +++ b/sdk/objc/base/RTCVideoEncoderSettings.h @@ -14,9 +14,9 @@ NS_ASSUME_NONNULL_BEGIN -typedef NS_ENUM(NSUInteger, RTCVideoCodecMode) { - RTCVideoCodecModeRealtimeVideo, - RTCVideoCodecModeScreensharing, +typedef NS_ENUM(NSUInteger, RTC_OBJC_TYPE(RTCVideoCodecMode)) { + RTC_OBJC_TYPE(RTCVideoCodecModeRealtimeVideo), + RTC_OBJC_TYPE(RTCVideoCodecModeScreensharing), }; /** Settings for encoder. Corresponds to webrtc::VideoCodec. */ @@ -35,7 +35,7 @@ RTC_OBJC_EXPORT @property(nonatomic, assign) uint32_t maxFramerate; @property(nonatomic, assign) unsigned int qpMax; -@property(nonatomic, assign) RTCVideoCodecMode mode; +@property(nonatomic, assign) RTC_OBJC_TYPE(RTCVideoCodecMode) mode; @end diff --git a/sdk/objc/base/RTCVideoFrame.h b/sdk/objc/base/RTCVideoFrame.h index edf074b682..512c78e883 100644 --- a/sdk/objc/base/RTCVideoFrame.h +++ b/sdk/objc/base/RTCVideoFrame.h @@ -15,11 +15,11 @@ NS_ASSUME_NONNULL_BEGIN -typedef NS_ENUM(NSInteger, RTCVideoRotation) { - RTCVideoRotation_0 = 0, - RTCVideoRotation_90 = 90, - RTCVideoRotation_180 = 180, - RTCVideoRotation_270 = 270, +typedef NS_ENUM(NSInteger, RTC_OBJC_TYPE(RTCVideoRotation)) { + RTC_OBJC_TYPE(RTCVideoRotation_0) = 0, + RTC_OBJC_TYPE(RTCVideoRotation_90) = 90, + RTC_OBJC_TYPE(RTCVideoRotation_180) = 180, + RTC_OBJC_TYPE(RTCVideoRotation_270) = 270, }; @protocol RTC_OBJC_TYPE @@ -34,7 +34,7 @@ RTC_OBJC_EXPORT /** Height without rotation applied. */ @property(nonatomic, readonly) int height; -@property(nonatomic, readonly) RTCVideoRotation rotation; +@property(nonatomic, readonly) RTC_OBJC_TYPE(RTCVideoRotation) rotation; /** Timestamp in nanoseconds. */ @property(nonatomic, readonly) int64_t timeStampNs; @@ -51,7 +51,7 @@ RTC_OBJC_EXPORT * Deprecated - initialize with a RTCCVPixelBuffer instead */ - (instancetype)initWithPixelBuffer:(CVPixelBufferRef)pixelBuffer - rotation:(RTCVideoRotation)rotation + rotation:(RTC_OBJC_TYPE(RTCVideoRotation))rotation timeStampNs:(int64_t)timeStampNs DEPRECATED_MSG_ATTRIBUTE("use initWithBuffer instead"); @@ -66,14 +66,14 @@ RTC_OBJC_EXPORT cropHeight:(int)cropHeight cropX:(int)cropX cropY:(int)cropY - rotation:(RTCVideoRotation)rotation + rotation:(RTC_OBJC_TYPE(RTCVideoRotation))rotation timeStampNs:(int64_t)timeStampNs DEPRECATED_MSG_ATTRIBUTE("use initWithBuffer instead"); /** Initialize an RTCVideoFrame from a frame buffer, rotation, and timestamp. */ - (instancetype)initWithBuffer:(id)frameBuffer - rotation:(RTCVideoRotation)rotation + rotation:(RTC_OBJC_TYPE(RTCVideoRotation))rotation timeStampNs:(int64_t)timeStampNs; /** Return a frame that is guaranteed to be I420, i.e. it is possible to access diff --git a/sdk/objc/base/RTCVideoFrame.mm b/sdk/objc/base/RTCVideoFrame.mm index e162238d73..cb4975b4d8 100644 --- a/sdk/objc/base/RTCVideoFrame.mm +++ b/sdk/objc/base/RTCVideoFrame.mm @@ -14,7 +14,7 @@ #import "RTCVideoFrameBuffer.h" @implementation RTC_OBJC_TYPE (RTCVideoFrame) { - RTCVideoRotation _rotation; + RTC_OBJC_TYPE(RTCVideoRotation) _rotation; int64_t _timeStampNs; } @@ -29,7 +29,7 @@ - (int)height { return _buffer.height; } -- (RTCVideoRotation)rotation { +- (RTC_OBJC_TYPE(RTCVideoRotation) )rotation { return _rotation; } @@ -44,7 +44,7 @@ - (int64_t)timeStampNs { } - (instancetype)initWithPixelBuffer:(CVPixelBufferRef)pixelBuffer - rotation:(RTCVideoRotation)rotation + rotation:(RTC_OBJC_TYPE(RTCVideoRotation))rotation timeStampNs:(int64_t)timeStampNs { // Deprecated. return nil; @@ -57,14 +57,14 @@ - (instancetype)initWithPixelBuffer:(CVPixelBufferRef)pixelBuffer cropHeight:(int)cropHeight cropX:(int)cropX cropY:(int)cropY - rotation:(RTCVideoRotation)rotation + rotation:(RTC_OBJC_TYPE(RTCVideoRotation))rotation timeStampNs:(int64_t)timeStampNs { // Deprecated. return nil; } - (instancetype)initWithBuffer:(id)buffer - rotation:(RTCVideoRotation)rotation + rotation:(RTC_OBJC_TYPE(RTCVideoRotation))rotation timeStampNs:(int64_t)timeStampNs { if (self = [super init]) { _buffer = buffer; diff --git a/sdk/objc/components/audio/RTCAudioSession.h b/sdk/objc/components/audio/RTCAudioSession.h index 2730664858..c8836b90bd 100644 --- a/sdk/objc/components/audio/RTCAudioSession.h +++ b/sdk/objc/components/audio/RTCAudioSession.h @@ -15,11 +15,11 @@ NS_ASSUME_NONNULL_BEGIN -extern NSString *const kRTCAudioSessionErrorDomain; +extern NSString *const RTC_CONSTANT_TYPE(RTCAudioSessionErrorDomain); /** Method that requires lock was called without lock. */ -extern NSInteger const kRTCAudioSessionErrorLockRequired; +extern NSInteger const RTC_CONSTANT_TYPE(RTCAudioSessionErrorLockRequired); /** Unknown configuration error occurred. */ -extern NSInteger const kRTCAudioSessionErrorConfiguration; +extern NSInteger const RTC_CONSTANT_TYPE(RTCAudioSessionErrorConfiguration); @class RTC_OBJC_TYPE(RTCAudioSession); @class RTC_OBJC_TYPE(RTCAudioSessionConfiguration); diff --git a/sdk/objc/components/audio/RTCAudioSession.mm b/sdk/objc/components/audio/RTCAudioSession.mm index 11d1a1c337..7d8ec7f824 100644 --- a/sdk/objc/components/audio/RTCAudioSession.mm +++ b/sdk/objc/components/audio/RTCAudioSession.mm @@ -26,10 +26,10 @@ #error ABSL_HAVE_THREAD_LOCAL should be defined for MacOS / iOS Targets. #endif -NSString *const kRTCAudioSessionErrorDomain = @"org.webrtc.RTC_OBJC_TYPE(RTCAudioSession)"; -NSInteger const kRTCAudioSessionErrorLockRequired = -1; -NSInteger const kRTCAudioSessionErrorConfiguration = -2; -NSString * const kRTCAudioSessionOutputVolumeSelector = @"outputVolume"; +NSString *const RTC_CONSTANT_TYPE(RTCAudioSessionErrorDomain) = @"org.webrtc.RTC_OBJC_TYPE(RTCAudioSession)"; +NSInteger const RTC_CONSTANT_TYPE(RTCAudioSessionErrorLockRequired) = -1; +NSInteger const RTC_CONSTANT_TYPE(RTCAudioSessionErrorConfiguration) = -2; +NSString * const RTC_CONSTANT_TYPE(RTCAudioSessionOutputVolumeSelector) = @"outputVolume"; namespace { // Since webrtc::Mutex is not a reentrant lock and cannot check if the mutex is locked, @@ -110,7 +110,7 @@ - (instancetype)initWithAudioSession:(id)audioSession { name:UIApplicationDidBecomeActiveNotification object:nil]; [_session addObserver:self - forKeyPath:kRTCAudioSessionOutputVolumeSelector + forKeyPath:RTC_CONSTANT_TYPE(RTCAudioSessionOutputVolumeSelector) options:NSKeyValueObservingOptionNew | NSKeyValueObservingOptionOld context:(__bridge void *)RTC_OBJC_TYPE(RTCAudioSession).class]; @@ -122,7 +122,7 @@ - (instancetype)initWithAudioSession:(id)audioSession { - (void)dealloc { [[NSNotificationCenter defaultCenter] removeObserver:self]; [_session removeObserver:self - forKeyPath:kRTCAudioSessionOutputVolumeSelector + forKeyPath:RTC_CONSTANT_TYPE(RTCAudioSessionOutputVolumeSelector) context:(__bridge void *)RTC_OBJC_TYPE(RTCAudioSession).class]; RTCLog(@"RTC_OBJC_TYPE(RTCAudioSession) (%p): dealloc.", self); } @@ -623,8 +623,8 @@ - (void)handleApplicationDidBecomeActive:(NSNotification *)notification { + (NSError *)lockError { NSDictionary *userInfo = @{NSLocalizedDescriptionKey : @"Must call lockForConfiguration before calling this method."}; - NSError *error = [[NSError alloc] initWithDomain:kRTCAudioSessionErrorDomain - code:kRTCAudioSessionErrorLockRequired + NSError *error = [[NSError alloc] initWithDomain:RTC_CONSTANT_TYPE(RTCAudioSessionErrorDomain) + code:RTC_CONSTANT_TYPE(RTCAudioSessionErrorLockRequired) userInfo:userInfo]; return error; } @@ -794,8 +794,8 @@ - (NSError *)configurationErrorWithDescription:(NSString *)description { NSDictionary* userInfo = @{ NSLocalizedDescriptionKey: description, }; - return [[NSError alloc] initWithDomain:kRTCAudioSessionErrorDomain - code:kRTCAudioSessionErrorConfiguration + return [[NSError alloc] initWithDomain:RTC_CONSTANT_TYPE(RTCAudioSessionErrorDomain) + code:RTC_CONSTANT_TYPE(RTCAudioSessionErrorConfiguration) userInfo:userInfo]; } @@ -881,7 +881,7 @@ - (void)notifyAudioUnitStartFailedWithError:(OSStatus)error { SEL sel = @selector(audioSession:audioUnitStartFailedWithError:); if ([delegate respondsToSelector:sel]) { [delegate audioSession:self - audioUnitStartFailedWithError:[NSError errorWithDomain:kRTCAudioSessionErrorDomain + audioUnitStartFailedWithError:[NSError errorWithDomain:RTC_CONSTANT_TYPE(RTCAudioSessionErrorDomain) code:error userInfo:nil]]; } diff --git a/sdk/objc/components/audio/RTCAudioSessionConfiguration.h b/sdk/objc/components/audio/RTCAudioSessionConfiguration.h index 6c6f808a32..1bfc30ea5f 100644 --- a/sdk/objc/components/audio/RTCAudioSessionConfiguration.h +++ b/sdk/objc/components/audio/RTCAudioSessionConfiguration.h @@ -15,9 +15,9 @@ NS_ASSUME_NONNULL_BEGIN -RTC_EXTERN const int kRTCAudioSessionPreferredNumberOfChannels; -RTC_EXTERN const double kRTCAudioSessionHighPerformanceSampleRate; -RTC_EXTERN const double kRTCAudioSessionHighPerformanceIOBufferDuration; +RTC_EXTERN const int RTC_CONSTANT_TYPE(RTCAudioSessionPreferredNumberOfChannels); +RTC_EXTERN const double RTC_CONSTANT_TYPE(RTCAudioSessionHighPerformanceSampleRate); +RTC_EXTERN const double RTC_CONSTANT_TYPE(RTCAudioSessionHighPerformanceIOBufferDuration); // Struct to hold configuration values. RTC_OBJC_EXPORT diff --git a/sdk/objc/components/audio/RTCAudioSessionConfiguration.m b/sdk/objc/components/audio/RTCAudioSessionConfiguration.m index 76e17e7afb..e44675f12d 100644 --- a/sdk/objc/components/audio/RTCAudioSessionConfiguration.m +++ b/sdk/objc/components/audio/RTCAudioSessionConfiguration.m @@ -22,13 +22,13 @@ // audio unit. Hence, we will not hit a RTC_CHECK in // VerifyAudioParametersForActiveAudioSession() for a mismatch between the // preferred number of channels and the actual number of channels. -const int kRTCAudioSessionPreferredNumberOfChannels = 1; +const int RTC_CONSTANT_TYPE(RTCAudioSessionPreferredNumberOfChannels) = 1; // Preferred hardware sample rate (unit is in Hertz). The client sample rate // will be set to this value as well to avoid resampling the the audio unit's // format converter. Note that, some devices, e.g. BT headsets, only supports // 8000Hz as native sample rate. -const double kRTCAudioSessionHighPerformanceSampleRate = 48000.0; +const double RTC_CONSTANT_TYPE(RTCAudioSessionHighPerformanceSampleRate) = 48000.0; // Use a hardware I/O buffer size (unit is in seconds) that matches the 10ms // size used by WebRTC. The exact actual size will differ between devices. @@ -38,7 +38,7 @@ // buffers used by WebRTC. It is beneficial for the performance if the native // size is as an even multiple of 10ms as possible since it results in "clean" // callback sequence without bursts of callbacks back to back. -const double kRTCAudioSessionHighPerformanceIOBufferDuration = 0.02; +const double RTC_CONSTANT_TYPE(RTCAudioSessionHighPerformanceIOBufferDuration) = 0.02; static RTC_OBJC_TYPE(RTCAudioSessionConfiguration) *gWebRTCConfiguration = nil; @@ -68,15 +68,15 @@ - (instancetype)init { // Use best sample rate and buffer duration if the CPU has more than one // core. - _sampleRate = kRTCAudioSessionHighPerformanceSampleRate; - _ioBufferDuration = kRTCAudioSessionHighPerformanceIOBufferDuration; + _sampleRate = RTC_CONSTANT_TYPE(RTCAudioSessionHighPerformanceSampleRate); + _ioBufferDuration = RTC_CONSTANT_TYPE(RTCAudioSessionHighPerformanceIOBufferDuration); // We try to use mono in both directions to save resources and format // conversions in the audio unit. Some devices does only support stereo; // e.g. wired headset on iPhone 6. // TODO(henrika): add support for stereo if needed. - _inputNumberOfChannels = kRTCAudioSessionPreferredNumberOfChannels; - _outputNumberOfChannels = kRTCAudioSessionPreferredNumberOfChannels; + _inputNumberOfChannels = RTC_CONSTANT_TYPE(RTCAudioSessionPreferredNumberOfChannels); + _outputNumberOfChannels = RTC_CONSTANT_TYPE(RTCAudioSessionPreferredNumberOfChannels); } return self; } diff --git a/sdk/objc/components/capturer/RTCCameraVideoCapturer.m b/sdk/objc/components/capturer/RTCCameraVideoCapturer.m index 65198b6298..510aec2df7 100644 --- a/sdk/objc/components/capturer/RTCCameraVideoCapturer.m +++ b/sdk/objc/components/capturer/RTCCameraVideoCapturer.m @@ -48,7 +48,7 @@ @implementation RTC_OBJC_TYPE (RTCCameraVideoCapturer) { AVCaptureSession *_captureSession; FourCharCode _preferredOutputPixelFormat; FourCharCode _outputPixelFormat; - RTCVideoRotation _rotation; + RTC_OBJC_TYPE(RTCVideoRotation) _rotation; #if TARGET_WATCH_DEVICE_ROTATION UIInterfaceOrientation _orientation; @@ -97,7 +97,7 @@ - (instancetype)initWithDelegate:(__weak id_orientation = newOrientation; diff --git a/sdk/objc/components/capturer/RTCDesktopCapturer+Private.h b/sdk/objc/components/capturer/RTCDesktopCapturer+Private.h index 30aed69d29..7118161d18 100644 --- a/sdk/objc/components/capturer/RTCDesktopCapturer+Private.h +++ b/sdk/objc/components/capturer/RTCDesktopCapturer+Private.h @@ -21,8 +21,7 @@ NS_ASSUME_NONNULL_BEGIN RTC_OBJC_EXPORT -@protocol RTC_OBJC_TYPE -(DesktopCapturerDelegate) +@protocol RTC_OBJC_TYPE(RTCDesktopCapturerPrivateDelegate) -(void)didCaptureVideoFrame:(RTC_OBJC_TYPE(RTCVideoFrame) *) frame; -(void)didSourceCaptureStart; -(void)didSourceCapturePaused; diff --git a/sdk/objc/components/capturer/RTCDesktopCapturer.h b/sdk/objc/components/capturer/RTCDesktopCapturer.h index b63912acf0..48c76310a4 100644 --- a/sdk/objc/components/capturer/RTCDesktopCapturer.h +++ b/sdk/objc/components/capturer/RTCDesktopCapturer.h @@ -26,8 +26,7 @@ NS_ASSUME_NONNULL_BEGIN @class RTC_OBJC_TYPE(RTCDesktopCapturer); RTC_OBJC_EXPORT -@protocol RTC_OBJC_TYPE -(RTCDesktopCapturerDelegate) +@protocol RTC_OBJC_TYPE(RTCDesktopCapturerDelegate) -(void)didSourceCaptureStart:(RTC_OBJC_TYPE(RTCDesktopCapturer) *) capturer; -(void)didSourceCapturePaused:(RTC_OBJC_TYPE(RTCDesktopCapturer) *) capturer; diff --git a/sdk/objc/components/capturer/RTCDesktopCapturer.mm b/sdk/objc/components/capturer/RTCDesktopCapturer.mm index a1948684d3..fc9244850e 100644 --- a/sdk/objc/components/capturer/RTCDesktopCapturer.mm +++ b/sdk/objc/components/capturer/RTCDesktopCapturer.mm @@ -35,7 +35,7 @@ @implementation RTC_OBJC_TYPE (RTCDesktopCapturer) { - (instancetype)initWithSource:(RTC_OBJC_TYPE(RTCDesktopSource) *)source delegate:(__weak id)delegate captureDelegate:(__weak id)captureDelegate { if (self = [super initWithDelegate:captureDelegate]) { webrtc::DesktopType captureType = webrtc::kScreen; - if(source.sourceType == RTCDesktopSourceTypeWindow) { + if(source.sourceType == RTC_OBJC_TYPE(RTCDesktopSourceTypeWindow)) { captureType = webrtc::kWindow; } _nativeCapturer = std::make_shared(captureType, source.nativeMediaSource->id(), self); diff --git a/sdk/objc/components/capturer/RTCDesktopMediaList.h b/sdk/objc/components/capturer/RTCDesktopMediaList.h index fafeaf5e0d..30360d2d24 100644 --- a/sdk/objc/components/capturer/RTCDesktopMediaList.h +++ b/sdk/objc/components/capturer/RTCDesktopMediaList.h @@ -38,9 +38,9 @@ RTC_OBJC_EXPORT RTC_OBJC_EXPORT @interface RTC_OBJC_TYPE (RTCDesktopMediaList) : NSObject --(instancetype)initWithType:(RTCDesktopSourceType)type delegate:(__weak id)delegate; +-(instancetype)initWithType:(RTC_OBJC_TYPE(RTCDesktopSourceType))type delegate:(__weak id)delegate; -@property(nonatomic, readonly) RTCDesktopSourceType sourceType; +@property(nonatomic, readonly) RTC_OBJC_TYPE(RTCDesktopSourceType) sourceType; - (int32_t)UpdateSourceList:(BOOL)forceReload updateAllThumbnails:(BOOL)updateThumbnail; diff --git a/sdk/objc/components/capturer/RTCDesktopMediaList.mm b/sdk/objc/components/capturer/RTCDesktopMediaList.mm index 2bd6c1da0e..c274ad7825 100644 --- a/sdk/objc/components/capturer/RTCDesktopMediaList.mm +++ b/sdk/objc/components/capturer/RTCDesktopMediaList.mm @@ -20,7 +20,7 @@ #import "RTCDesktopMediaList+Private.h" @implementation RTC_OBJC_TYPE(RTCDesktopMediaList) { - RTCDesktopSourceType _sourceType; + RTC_OBJC_TYPE(RTCDesktopSourceType) _sourceType; NSMutableArray* _sources; __weak id _delegate; } @@ -28,10 +28,10 @@ @implementation RTC_OBJC_TYPE(RTCDesktopMediaList) { @synthesize sourceType = _sourceType; @synthesize nativeMediaList = _nativeMediaList; -- (instancetype)initWithType:(RTCDesktopSourceType)type delegate:(__weak id)delegate{ +- (instancetype)initWithType:(RTC_OBJC_TYPE(RTCDesktopSourceType))type delegate:(__weak id)delegate{ if (self = [super init]) { webrtc::DesktopType captureType = webrtc::kScreen; - if(type == RTCDesktopSourceTypeWindow) { + if(type == RTC_OBJC_TYPE(RTCDesktopSourceTypeWindow)) { captureType = webrtc::kWindow; } _nativeMediaList = std::make_shared(captureType, self); diff --git a/sdk/objc/components/capturer/RTCDesktopSource+Private.h b/sdk/objc/components/capturer/RTCDesktopSource+Private.h index 3f4c4ef25f..787ba8bb65 100644 --- a/sdk/objc/components/capturer/RTCDesktopSource+Private.h +++ b/sdk/objc/components/capturer/RTCDesktopSource+Private.h @@ -24,7 +24,7 @@ NS_ASSUME_NONNULL_BEGIN @interface RTC_OBJC_TYPE(RTCDesktopSource) () - (instancetype)initWithNativeSource:(webrtc::MediaSource*) nativeSource - sourceType:(RTCDesktopSourceType) sourceType; + sourceType:(RTC_OBJC_TYPE(RTCDesktopSourceType)) sourceType; @property(nonatomic, readonly)webrtc::MediaSource* nativeMediaSource; diff --git a/sdk/objc/components/capturer/RTCDesktopSource.h b/sdk/objc/components/capturer/RTCDesktopSource.h index 82da458ce6..bad716e19d 100644 --- a/sdk/objc/components/capturer/RTCDesktopSource.h +++ b/sdk/objc/components/capturer/RTCDesktopSource.h @@ -19,9 +19,9 @@ #import "RTCMacros.h" -typedef NS_ENUM(NSInteger, RTCDesktopSourceType) { - RTCDesktopSourceTypeScreen, - RTCDesktopSourceTypeWindow, +typedef NS_ENUM(NSInteger, RTC_OBJC_TYPE(RTCDesktopSourceType)) { + RTC_OBJC_TYPE(RTCDesktopSourceTypeScreen), + RTC_OBJC_TYPE(RTCDesktopSourceTypeWindow), }; RTC_OBJC_EXPORT @@ -33,7 +33,7 @@ RTC_OBJC_EXPORT @property(nonatomic, readonly) NSImage *thumbnail; -@property(nonatomic, readonly) RTCDesktopSourceType sourceType; +@property(nonatomic, readonly) RTC_OBJC_TYPE(RTCDesktopSourceType) sourceType; -( NSImage *)UpdateThumbnail; diff --git a/sdk/objc/components/capturer/RTCDesktopSource.mm b/sdk/objc/components/capturer/RTCDesktopSource.mm index e1bdc6893a..0b0c2a4021 100644 --- a/sdk/objc/components/capturer/RTCDesktopSource.mm +++ b/sdk/objc/components/capturer/RTCDesktopSource.mm @@ -23,7 +23,7 @@ @implementation RTC_OBJC_TYPE(RTCDesktopSource) { NSString *_sourceId; NSString *_name; NSImage *_thumbnail; - RTCDesktopSourceType _sourceType; + RTC_OBJC_TYPE(RTCDesktopSourceType) _sourceType; } @synthesize sourceId = _sourceId; @@ -33,7 +33,7 @@ @implementation RTC_OBJC_TYPE(RTCDesktopSource) { @synthesize nativeMediaSource = _nativeMediaSource; - (instancetype)initWithNativeSource:(webrtc::MediaSource*)nativeSource - sourceType:(RTCDesktopSourceType) sourceType { + sourceType:(RTC_OBJC_TYPE(RTCDesktopSourceType)) sourceType { if (self = [super init]) { _nativeMediaSource = nativeSource; _sourceId = [NSString stringWithUTF8String:std::to_string(nativeSource->id()).c_str()]; diff --git a/sdk/objc/components/capturer/RTCFileVideoCapturer.m b/sdk/objc/components/capturer/RTCFileVideoCapturer.m index bcf1506259..d5089e3e1f 100644 --- a/sdk/objc/components/capturer/RTCFileVideoCapturer.m +++ b/sdk/objc/components/capturer/RTCFileVideoCapturer.m @@ -15,18 +15,18 @@ #import "components/video_frame_buffer/RTCCVPixelBuffer.h" #include "rtc_base/system/gcd_helpers.h" -NSString *const kRTCFileVideoCapturerErrorDomain = +NSString *const RTC_CONSTANT_TYPE(RTCFileVideoCapturerErrorDomain) = @"org.webrtc.RTC_OBJC_TYPE(RTCFileVideoCapturer)"; -typedef NS_ENUM(NSInteger, RTCFileVideoCapturerErrorCode) { - RTCFileVideoCapturerErrorCode_CapturerRunning = 2000, - RTCFileVideoCapturerErrorCode_FileNotFound +typedef NS_ENUM(NSInteger, RTC_OBJC_TYPE(RTCFileVideoCapturerErrorCode)) { + RTC_OBJC_TYPE(RTCFileVideoCapturerErrorCode_CapturerRunning) = 2000, + RTC_OBJC_TYPE(RTCFileVideoCapturerErrorCode_FileNotFound) }; -typedef NS_ENUM(NSInteger, RTCFileVideoCapturerStatus) { - RTCFileVideoCapturerStatusNotInitialized, - RTCFileVideoCapturerStatusStarted, - RTCFileVideoCapturerStatusStopped +typedef NS_ENUM(NSInteger, RTC_OBJC_TYPE(RTCFileVideoCapturerStatus)) { + RTC_OBJC_TYPE(RTCFileVideoCapturerStatusNotInitialized), + RTC_OBJC_TYPE(RTCFileVideoCapturerStatusStarted), + RTC_OBJC_TYPE(RTCFileVideoCapturerStatusStopped) }; @interface RTC_OBJC_TYPE (RTCFileVideoCapturer) @@ -37,7 +37,7 @@ @interface RTC_OBJC_TYPE (RTCFileVideoCapturer) @implementation RTC_OBJC_TYPE (RTCFileVideoCapturer) { AVAssetReader *_reader; AVAssetReaderTrackOutput *_outTrack; - RTCFileVideoCapturerStatus _status; + RTC_OBJC_TYPE(RTCFileVideoCapturerStatus) _status; dispatch_queue_t _frameQueue; } @@ -46,16 +46,16 @@ @implementation RTC_OBJC_TYPE (RTCFileVideoCapturer) { - (void)startCapturingFromFileNamed:(NSString *)nameOfFile onError:(RTCFileVideoCapturerErrorBlock)errorBlock { - if (_status == RTCFileVideoCapturerStatusStarted) { + if (_status == RTC_OBJC_TYPE(RTCFileVideoCapturerStatusStarted)) { NSError *error = - [NSError errorWithDomain:kRTCFileVideoCapturerErrorDomain - code:RTCFileVideoCapturerErrorCode_CapturerRunning + [NSError errorWithDomain:RTC_CONSTANT_TYPE(RTCFileVideoCapturerErrorDomain) + code:RTC_OBJC_TYPE(RTCFileVideoCapturerErrorCode_CapturerRunning) userInfo:@{NSUnderlyingErrorKey : @"Capturer has been started."}]; errorBlock(error); return; } else { - _status = RTCFileVideoCapturerStatusStarted; + _status = RTC_OBJC_TYPE(RTCFileVideoCapturerStatusStarted); } dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{ @@ -63,8 +63,8 @@ - (void)startCapturingFromFileNamed:(NSString *)nameOfFile if (!pathForFile) { NSString *errorString = [NSString stringWithFormat:@"File %@ not found in bundle", nameOfFile]; - NSError *error = [NSError errorWithDomain:kRTCFileVideoCapturerErrorDomain - code:RTCFileVideoCapturerErrorCode_FileNotFound + NSError *error = [NSError errorWithDomain:RTC_CONSTANT_TYPE(RTCFileVideoCapturerErrorDomain) + code:RTC_OBJC_TYPE(RTCFileVideoCapturerErrorCode_FileNotFound) userInfo:@{NSUnderlyingErrorKey : errorString}]; errorBlock(error); return; @@ -101,7 +101,7 @@ - (void)setupReaderOnError:(RTCFileVideoCapturerErrorBlock)errorBlock { [self readNextBuffer]; } - (void)stopCapture { - _status = RTCFileVideoCapturerStatusStopped; + _status = RTC_OBJC_TYPE(RTCFileVideoCapturerStatusStopped); RTCLog(@"File capturer stopped."); } @@ -129,7 +129,7 @@ - (dispatch_queue_t)frameQueue { } - (void)readNextBuffer { - if (_status == RTCFileVideoCapturerStatusStopped) { + if (_status == RTC_OBJC_TYPE(RTCFileVideoCapturerStatusStopped)) { [_reader cancelReading]; _reader = nil; return; diff --git a/sdk/objc/components/network/RTCNetworkMonitor.mm b/sdk/objc/components/network/RTCNetworkMonitor.mm index a3685e0556..fb27835469 100644 --- a/sdk/objc/components/network/RTCNetworkMonitor.mm +++ b/sdk/objc/components/network/RTCNetworkMonitor.mm @@ -101,7 +101,7 @@ - (instancetype)initWithObserver:(webrtc::NetworkMonitorObserver *)observer { }); nw_path_monitor_set_queue( _pathMonitor, - [RTC_OBJC_TYPE(RTCDispatcher) dispatchQueueForType:RTCDispatcherTypeNetworkMonitor]); + [RTC_OBJC_TYPE(RTCDispatcher) dispatchQueueForType:RTC_OBJC_TYPE(RTCDispatcherTypeNetworkMonitor)]); nw_path_monitor_start(_pathMonitor); } } diff --git a/sdk/objc/components/renderer/metal/RTCMTLRenderer.mm b/sdk/objc/components/renderer/metal/RTCMTLRenderer.mm index ca3fcc3e51..9bbf230862 100644 --- a/sdk/objc/components/renderer/metal/RTCMTLRenderer.mm +++ b/sdk/objc/components/renderer/metal/RTCMTLRenderer.mm @@ -36,7 +36,7 @@ static inline void getCubeVertexData(int cropX, int cropHeight, size_t frameWidth, size_t frameHeight, - RTCVideoRotation rotation, + RTC_OBJC_TYPE(RTCVideoRotation) rotation, float *buffer) { // The computed values are the adjusted texture coordinates, in [0..1]. // For the left and top, 0.0 means no cropping and e.g. 0.2 means we're skipping 20% of the @@ -51,28 +51,28 @@ static inline void getCubeVertexData(int cropX, // These arrays map the view coordinates to texture coordinates, taking cropping and rotation // into account. The first two columns are view coordinates, the last two are texture coordinates. switch (rotation) { - case RTCVideoRotation_0: { + case RTC_OBJC_TYPE(RTCVideoRotation_0): { float values[16] = {-1.0, -1.0, cropLeft, cropBottom, 1.0, -1.0, cropRight, cropBottom, -1.0, 1.0, cropLeft, cropTop, 1.0, 1.0, cropRight, cropTop}; memcpy(buffer, &values, sizeof(values)); } break; - case RTCVideoRotation_90: { + case RTC_OBJC_TYPE(RTCVideoRotation_90): { float values[16] = {-1.0, -1.0, cropRight, cropBottom, 1.0, -1.0, cropRight, cropTop, -1.0, 1.0, cropLeft, cropBottom, 1.0, 1.0, cropLeft, cropTop}; memcpy(buffer, &values, sizeof(values)); } break; - case RTCVideoRotation_180: { + case RTC_OBJC_TYPE(RTCVideoRotation_180): { float values[16] = {-1.0, -1.0, cropRight, cropTop, 1.0, -1.0, cropLeft, cropTop, -1.0, 1.0, cropRight, cropBottom, 1.0, 1.0, cropLeft, cropBottom}; memcpy(buffer, &values, sizeof(values)); } break; - case RTCVideoRotation_270: { + case RTC_OBJC_TYPE(RTCVideoRotation_270): { float values[16] = {-1.0, -1.0, cropLeft, cropTop, 1.0, -1.0, cropLeft, cropBottom, -1.0, 1.0, cropRight, cropTop, @@ -109,7 +109,7 @@ @implementation RTC_OBJC_TYPE(RTCMTLRenderer) { int _oldCropHeight; int _oldCropX; int _oldCropY; - RTCVideoRotation _oldRotation; + RTC_OBJC_TYPE(RTCVideoRotation) _oldRotation; } @synthesize rotationOverride = _rotationOverride; @@ -173,7 +173,7 @@ - (void)getWidth:(int *)width - (BOOL)setupTexturesForFrame:(nonnull RTC_OBJC_TYPE(RTCVideoFrame) *)frame { // Apply rotation override if set. - RTCVideoRotation rotation; + RTC_OBJC_TYPE(RTCVideoRotation) rotation; NSValue *rotationOverride = self.rotationOverride; if (rotationOverride) { #if defined(__IPHONE_11_0) && defined(__IPHONE_OS_VERSION_MAX_ALLOWED) && \ diff --git a/sdk/objc/components/renderer/metal/RTCMTLVideoView.m b/sdk/objc/components/renderer/metal/RTCMTLVideoView.m index 8c08587dc2..14b26c989e 100644 --- a/sdk/objc/components/renderer/metal/RTCMTLVideoView.m +++ b/sdk/objc/components/renderer/metal/RTCMTLVideoView.m @@ -221,9 +221,9 @@ - (void)setRotationOverride:(NSValue *)rotationOverride { [self setNeedsLayout]; } -- (RTCVideoRotation)videoRotation { +- (RTC_OBJC_TYPE(RTCVideoRotation) )videoRotation { if (self.rotationOverride) { - RTCVideoRotation rotation; + RTC_OBJC_TYPE(RTCVideoRotation) rotation; if (@available(iOS 11, macos 10.13, *)) { [self.rotationOverride getValue:&rotation size:sizeof(rotation)]; } else { @@ -238,12 +238,12 @@ - (RTCVideoRotation)videoRotation { - (CGSize)drawableSize { // Flip width/height if the rotations are not the same. CGSize videoFrameSize = self.videoFrameSize; - RTCVideoRotation videoRotation = [self videoRotation]; + RTC_OBJC_TYPE(RTCVideoRotation) videoRotation = [self videoRotation]; BOOL useLandscape = - (videoRotation == RTCVideoRotation_0) || (videoRotation == RTCVideoRotation_180); - BOOL sizeIsLandscape = (self.videoFrame.rotation == RTCVideoRotation_0) || - (self.videoFrame.rotation == RTCVideoRotation_180); + (videoRotation == RTC_OBJC_TYPE(RTCVideoRotation_0)) || (videoRotation == RTC_OBJC_TYPE(RTCVideoRotation_180)); + BOOL sizeIsLandscape = (self.videoFrame.rotation == RTC_OBJC_TYPE(RTCVideoRotation_0)) || + (self.videoFrame.rotation == RTC_OBJC_TYPE(RTCVideoRotation_180)); CGSize size; if (useLandscape == sizeIsLandscape) { diff --git a/sdk/objc/components/renderer/opengl/RTCShader.h b/sdk/objc/components/renderer/opengl/RTCShader.h index d1b91fb643..4f4fa3d9c5 100644 --- a/sdk/objc/components/renderer/opengl/RTCShader.h +++ b/sdk/objc/components/renderer/opengl/RTCShader.h @@ -10,12 +10,12 @@ #import "base/RTCVideoFrame.h" -RTC_EXTERN const char kRTCVertexShaderSource[]; +RTC_EXTERN const char RTC_CONSTANT_TYPE(RTCVertexShaderSource)[]; -RTC_EXTERN GLuint RTCCreateShader(GLenum type, const GLchar* source); -RTC_EXTERN GLuint RTCCreateProgram(GLuint vertexShader, GLuint fragmentShader); +RTC_EXTERN GLuint RTC_OBJC_TYPE(RTCCreateShader)(GLenum type, const GLchar* source); +RTC_EXTERN GLuint RTC_OBJC_TYPE(RTCCreateProgram)(GLuint vertexShader, GLuint fragmentShader); RTC_EXTERN GLuint -RTCCreateProgramFromFragmentSource(const char fragmentShaderSource[]); -RTC_EXTERN BOOL RTCCreateVertexBuffer(GLuint* vertexBuffer, +RTC_OBJC_TYPE(RTCCreateProgramFromFragmentSource)(const char fragmentShaderSource[]); +RTC_EXTERN BOOL RTC_OBJC_TYPE(RTCCreateVertexBuffer)(GLuint* vertexBuffer, GLuint* vertexArray); -RTC_EXTERN void RTCSetVertexData(RTCVideoRotation rotation); +RTC_EXTERN void RTC_OBJC_TYPE(RTCSetVertexData)(RTCVideoRotation rotation); diff --git a/sdk/objc/components/renderer/opengl/RTCShader.mm b/sdk/objc/components/renderer/opengl/RTCShader.mm index 25f6eee34e..85d2bb37b6 100644 --- a/sdk/objc/components/renderer/opengl/RTCShader.mm +++ b/sdk/objc/components/renderer/opengl/RTCShader.mm @@ -22,7 +22,7 @@ #include "rtc_base/logging.h" // Vertex shader doesn't do anything except pass coordinates through. -const char kRTCVertexShaderSource[] = +const char RTC_CONSTANT_TYPE(RTCVertexShaderSource)[] = SHADER_VERSION VERTEX_SHADER_IN " vec2 position;\n" VERTEX_SHADER_IN " vec2 texcoord;\n" @@ -34,7 +34,7 @@ // Compiles a shader of the given `type` with GLSL source `source` and returns // the shader handle or 0 on error. -GLuint RTCCreateShader(GLenum type, const GLchar *source) { +GLuint RTC_OBJC_TYPE(RTCCreateShader)(GLenum type, const GLchar *source) { GLuint shader = glCreateShader(type); if (!shader) { return 0; @@ -61,7 +61,7 @@ GLuint RTCCreateShader(GLenum type, const GLchar *source) { // Links a shader program with the given vertex and fragment shaders and // returns the program handle or 0 on error. -GLuint RTCCreateProgram(GLuint vertexShader, GLuint fragmentShader) { +GLuint RTC_OBJC_TYPE(RTCCreateProgram)(GLuint vertexShader, GLuint fragmentShader) { if (vertexShader == 0 || fragmentShader == 0) { return 0; } @@ -83,8 +83,8 @@ GLuint RTCCreateProgram(GLuint vertexShader, GLuint fragmentShader) { // Creates and links a shader program with the given fragment shader source and // a plain vertex shader. Returns the program handle or 0 on error. -GLuint RTCCreateProgramFromFragmentSource(const char fragmentShaderSource[]) { - GLuint vertexShader = RTCCreateShader(GL_VERTEX_SHADER, kRTCVertexShaderSource); +GLuint RTC_OBJC_TYPE(RTCCreateProgramFromFragmentSource)(const char fragmentShaderSource[]) { + GLuint vertexShader = RTCCreateShader(GL_VERTEX_SHADER, RTC_CONSTANT_TYPE(RTCVertexShaderSource)); RTC_CHECK(vertexShader) << "failed to create vertex shader"; GLuint fragmentShader = RTCCreateShader(GL_FRAGMENT_SHADER, fragmentShaderSource); @@ -120,7 +120,7 @@ GLuint RTCCreateProgramFromFragmentSource(const char fragmentShaderSource[]) { return program; } -BOOL RTCCreateVertexBuffer(GLuint *vertexBuffer, GLuint *vertexArray) { +BOOL RTC_OBJC_TYPE(RTCCreateVertexBuffer)(GLuint *vertexBuffer, GLuint *vertexArray) { glGenBuffers(1, vertexBuffer); if (*vertexBuffer == 0) { glDeleteVertexArrays(1, vertexArray); @@ -132,7 +132,7 @@ BOOL RTCCreateVertexBuffer(GLuint *vertexBuffer, GLuint *vertexArray) { } // Set vertex data to the currently bound vertex buffer. -void RTCSetVertexData(RTCVideoRotation rotation) { +void RTC_OBJC_TYPE(RTCSetVertexData)(RTCVideoRotation rotation) { // When modelview and projection matrices are identity (default) the world is // contained in the square around origin with unit size 2. Drawing to these // coordinates is equivalent to drawing to the entire screen. The texture is @@ -150,16 +150,16 @@ void RTCSetVertexData(RTCVideoRotation rotation) { // Rotate the UV coordinates. int rotation_offset; switch (rotation) { - case RTCVideoRotation_0: + case RTC_OBJC_TYPE(RTCVideoRotation_0): rotation_offset = 0; break; - case RTCVideoRotation_90: + case RTC_OBJC_TYPE(RTCVideoRotation_90): rotation_offset = 1; break; - case RTCVideoRotation_180: + case RTC_OBJC_TYPE(RTCVideoRotation_180): rotation_offset = 2; break; - case RTCVideoRotation_270: + case RTC_OBJC_TYPE(RTCVideoRotation_270): rotation_offset = 3; break; } diff --git a/sdk/objc/components/renderer/opengl/RTCVideoViewShading.h b/sdk/objc/components/renderer/opengl/RTCVideoViewShading.h index 9df30a8fa0..e41d228b43 100644 --- a/sdk/objc/components/renderer/opengl/RTCVideoViewShading.h +++ b/sdk/objc/components/renderer/opengl/RTCVideoViewShading.h @@ -24,13 +24,13 @@ RTC_OBJC_EXPORT /** Callback for I420 frames. Each plane is given as a texture. */ - (void)applyShadingForFrameWithWidth : (int)width height : (int)height rotation - : (RTCVideoRotation)rotation yPlane : (GLuint)yPlane uPlane : (GLuint)uPlane vPlane + : (RTC_OBJC_TYPE(RTCVideoRotation))rotation yPlane : (GLuint)yPlane uPlane : (GLuint)uPlane vPlane : (GLuint)vPlane; /** Callback for NV12 frames. Each plane is given as a texture. */ - (void)applyShadingForFrameWithWidth:(int)width height:(int)height - rotation:(RTCVideoRotation)rotation + rotation:(RTC_OBJC_TYPE(RTCVideoRotation))rotation yPlane:(GLuint)yPlane uvPlane:(GLuint)uvPlane; diff --git a/sdk/objc/components/video_codec/RTCCodecSpecificInfoH264.h b/sdk/objc/components/video_codec/RTCCodecSpecificInfoH264.h index ae3003a115..9541e75edc 100644 --- a/sdk/objc/components/video_codec/RTCCodecSpecificInfoH264.h +++ b/sdk/objc/components/video_codec/RTCCodecSpecificInfoH264.h @@ -14,14 +14,14 @@ #import "RTCMacros.h" /** Class for H264 specific config. */ -typedef NS_ENUM(NSUInteger, RTCH264PacketizationMode) { - RTCH264PacketizationModeNonInterleaved = 0, // Mode 1 - STAP-A, FU-A is allowed - RTCH264PacketizationModeSingleNalUnit // Mode 0 - only single NALU allowed +typedef NS_ENUM(NSUInteger, RTC_OBJC_TYPE(RTCH264PacketizationMode)) { + RTC_OBJC_TYPE(RTCH264PacketizationModeNonInterleaved) = 0, // Mode 1 - STAP-A, FU-A is allowed + RTC_OBJC_TYPE(RTCH264PacketizationModeSingleNalUnit) // Mode 0 - only single NALU allowed }; RTC_OBJC_EXPORT @interface RTC_OBJC_TYPE (RTCCodecSpecificInfoH264) : NSObject -@property(nonatomic, assign) RTCH264PacketizationMode packetizationMode; +@property(nonatomic, assign) RTC_OBJC_TYPE(RTCH264PacketizationMode) packetizationMode; @end diff --git a/sdk/objc/components/video_codec/RTCDefaultVideoDecoderFactory.m b/sdk/objc/components/video_codec/RTCDefaultVideoDecoderFactory.m index 6e3baa8750..9c1943565a 100644 --- a/sdk/objc/components/video_codec/RTCDefaultVideoDecoderFactory.m +++ b/sdk/objc/components/video_codec/RTCDefaultVideoDecoderFactory.m @@ -25,25 +25,25 @@ @implementation RTC_OBJC_TYPE (RTCDefaultVideoDecoderFactory) - (NSArray *)supportedCodecs { NSDictionary *constrainedHighParams = @{ - @"profile-level-id" : kRTCMaxSupportedH264ProfileLevelConstrainedHigh, + @"profile-level-id" : RTC_CONSTANT_TYPE(RTCMaxSupportedH264ProfileLevelConstrainedHigh), @"level-asymmetry-allowed" : @"1", @"packetization-mode" : @"1", }; RTC_OBJC_TYPE(RTCVideoCodecInfo) *constrainedHighInfo = - [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecH264Name + [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:RTC_CONSTANT_TYPE(RTCVideoCodecH264Name) parameters:constrainedHighParams]; NSDictionary *constrainedBaselineParams = @{ - @"profile-level-id" : kRTCMaxSupportedH264ProfileLevelConstrainedBaseline, + @"profile-level-id" : RTC_CONSTANT_TYPE(RTCMaxSupportedH264ProfileLevelConstrainedBaseline), @"level-asymmetry-allowed" : @"1", @"packetization-mode" : @"1", }; RTC_OBJC_TYPE(RTCVideoCodecInfo) *constrainedBaselineInfo = - [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecH264Name + [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:RTC_CONSTANT_TYPE(RTCVideoCodecH264Name) parameters:constrainedBaselineParams]; RTC_OBJC_TYPE(RTCVideoCodecInfo) *vp8Info = - [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecVp8Name]; + [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:RTC_CONSTANT_TYPE(RTCVideoCodecVp8Name)]; NSMutableArray *result = [@[ constrainedHighInfo, @@ -53,28 +53,28 @@ @implementation RTC_OBJC_TYPE (RTCDefaultVideoDecoderFactory) if ([RTC_OBJC_TYPE(RTCVideoDecoderVP9) isSupported]) { [result - addObject:[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecVp9Name]]; + addObject:[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:RTC_CONSTANT_TYPE(RTCVideoCodecVp9Name)]]; } #if defined(RTC_DAV1D_IN_INTERNAL_DECODER_FACTORY) - [result addObject:[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecAv1Name]]; + [result addObject:[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:RTC_CONSTANT_TYPE(RTCVideoCodecAv1Name)]]; #endif return result; } - (id)createDecoder:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)info { - if ([info.name isEqualToString:kRTCVideoCodecH264Name]) { + if ([info.name isEqualToString:RTC_CONSTANT_TYPE(RTCVideoCodecH264Name)]) { return [[RTC_OBJC_TYPE(RTCVideoDecoderH264) alloc] init]; - } else if ([info.name isEqualToString:kRTCVideoCodecVp8Name]) { + } else if ([info.name isEqualToString:RTC_CONSTANT_TYPE(RTCVideoCodecVp8Name)]) { return [RTC_OBJC_TYPE(RTCVideoDecoderVP8) vp8Decoder]; - } else if ([info.name isEqualToString:kRTCVideoCodecVp9Name] && + } else if ([info.name isEqualToString:RTC_CONSTANT_TYPE(RTCVideoCodecVp9Name)] && [RTC_OBJC_TYPE(RTCVideoDecoderVP9) isSupported]) { return [RTC_OBJC_TYPE(RTCVideoDecoderVP9) vp9Decoder]; } #if defined(RTC_DAV1D_IN_INTERNAL_DECODER_FACTORY) - if ([info.name isEqualToString:kRTCVideoCodecAv1Name]) { + if ([info.name isEqualToString:RTC_CONSTANT_TYPE(RTCVideoCodecAv1Name)]) { return [RTC_OBJC_TYPE(RTCVideoDecoderAV1) av1Decoder]; } #endif diff --git a/sdk/objc/components/video_codec/RTCDefaultVideoEncoderFactory.m b/sdk/objc/components/video_codec/RTCDefaultVideoEncoderFactory.m index 3cc92382e6..50db62b610 100644 --- a/sdk/objc/components/video_codec/RTCDefaultVideoEncoderFactory.m +++ b/sdk/objc/components/video_codec/RTCDefaultVideoEncoderFactory.m @@ -27,25 +27,25 @@ @implementation RTC_OBJC_TYPE (RTCDefaultVideoEncoderFactory) + (NSArray *)supportedCodecs { NSDictionary *constrainedHighParams = @{ - @"profile-level-id" : kRTCMaxSupportedH264ProfileLevelConstrainedHigh, + @"profile-level-id" : RTC_CONSTANT_TYPE(RTCMaxSupportedH264ProfileLevelConstrainedHigh), @"level-asymmetry-allowed" : @"1", @"packetization-mode" : @"1", }; RTC_OBJC_TYPE(RTCVideoCodecInfo) *constrainedHighInfo = - [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecH264Name + [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:RTC_CONSTANT_TYPE(RTCVideoCodecH264Name) parameters:constrainedHighParams]; NSDictionary *constrainedBaselineParams = @{ - @"profile-level-id" : kRTCMaxSupportedH264ProfileLevelConstrainedBaseline, + @"profile-level-id" : RTC_CONSTANT_TYPE(RTCMaxSupportedH264ProfileLevelConstrainedBaseline), @"level-asymmetry-allowed" : @"1", @"packetization-mode" : @"1", }; RTC_OBJC_TYPE(RTCVideoCodecInfo) *constrainedBaselineInfo = - [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecH264Name + [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:RTC_CONSTANT_TYPE(RTCVideoCodecH264Name) parameters:constrainedBaselineParams]; RTC_OBJC_TYPE(RTCVideoCodecInfo) *vp8Info = - [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecVp8Name]; + [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:RTC_CONSTANT_TYPE(RTCVideoCodecVp8Name)]; NSMutableArray *result = [@[ constrainedHighInfo, @@ -55,12 +55,12 @@ @implementation RTC_OBJC_TYPE (RTCDefaultVideoEncoderFactory) if ([RTC_OBJC_TYPE(RTCVideoEncoderVP9) isSupported]) { [result - addObject:[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecVp9Name parameters:nil scalabilityModes:[RTC_OBJC_TYPE(RTCVideoEncoderVP9) scalabilityModes]]]; + addObject:[[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:RTC_CONSTANT_TYPE(RTCVideoCodecVp9Name) parameters:nil scalabilityModes:[RTC_OBJC_TYPE(RTCVideoEncoderVP9) scalabilityModes]]]; } #if defined(RTC_USE_LIBAOM_AV1_ENCODER) RTC_OBJC_TYPE(RTCVideoCodecInfo) *av1Info = - [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:kRTCVideoCodecAv1Name parameters:nil scalabilityModes:[RTC_OBJC_TYPE(RTCVideoEncoderAV1) scalabilityModes]]; + [[RTC_OBJC_TYPE(RTCVideoCodecInfo) alloc] initWithName:RTC_CONSTANT_TYPE(RTCVideoCodecAv1Name) parameters:nil scalabilityModes:[RTC_OBJC_TYPE(RTCVideoEncoderAV1) scalabilityModes]]; [result addObject:av1Info]; #endif @@ -68,17 +68,17 @@ @implementation RTC_OBJC_TYPE (RTCDefaultVideoEncoderFactory) } - (id)createEncoder:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)info { - if ([info.name isEqualToString:kRTCVideoCodecH264Name]) { + if ([info.name isEqualToString:RTC_CONSTANT_TYPE(RTCVideoCodecH264Name)]) { return [[RTC_OBJC_TYPE(RTCVideoEncoderH264) alloc] initWithCodecInfo:info]; - } else if ([info.name isEqualToString:kRTCVideoCodecVp8Name]) { + } else if ([info.name isEqualToString:RTC_CONSTANT_TYPE(RTCVideoCodecVp8Name)]) { return [RTC_OBJC_TYPE(RTCVideoEncoderVP8) vp8Encoder]; - } else if ([info.name isEqualToString:kRTCVideoCodecVp9Name] && + } else if ([info.name isEqualToString:RTC_CONSTANT_TYPE(RTCVideoCodecVp9Name)] && [RTC_OBJC_TYPE(RTCVideoEncoderVP9) isSupported]) { return [RTC_OBJC_TYPE(RTCVideoEncoderVP9) vp9Encoder]; } #if defined(RTC_USE_LIBAOM_AV1_ENCODER) - if ([info.name isEqualToString:kRTCVideoCodecAv1Name]) { + if ([info.name isEqualToString:RTC_CONSTANT_TYPE(RTCVideoCodecAv1Name)]) { return [RTC_OBJC_TYPE(RTCVideoEncoderAV1) av1Encoder]; } #endif diff --git a/sdk/objc/components/video_codec/RTCH264ProfileLevelId.h b/sdk/objc/components/video_codec/RTCH264ProfileLevelId.h index dac7bb5610..727f3b1ffb 100644 --- a/sdk/objc/components/video_codec/RTCH264ProfileLevelId.h +++ b/sdk/objc/components/video_codec/RTCH264ProfileLevelId.h @@ -12,49 +12,49 @@ #import "RTCMacros.h" -RTC_OBJC_EXPORT extern NSString *const kRTCVideoCodecH264Name; -RTC_OBJC_EXPORT extern NSString *const kRTCLevel31ConstrainedHigh; -RTC_OBJC_EXPORT extern NSString *const kRTCLevel31ConstrainedBaseline; -RTC_OBJC_EXPORT extern NSString *const kRTCMaxSupportedH264ProfileLevelConstrainedHigh; -RTC_OBJC_EXPORT extern NSString *const kRTCMaxSupportedH264ProfileLevelConstrainedBaseline; +RTC_OBJC_EXPORT extern NSString *const RTC_CONSTANT_TYPE(RTCVideoCodecH264Name); +RTC_OBJC_EXPORT extern NSString *const RTC_CONSTANT_TYPE(RTCLevel31ConstrainedHigh); +RTC_OBJC_EXPORT extern NSString *const RTC_CONSTANT_TYPE(RTCLevel31ConstrainedBaseline); +RTC_OBJC_EXPORT extern NSString *const RTC_CONSTANT_TYPE(RTCMaxSupportedH264ProfileLevelConstrainedHigh); +RTC_OBJC_EXPORT extern NSString *const RTC_CONSTANT_TYPE(RTCMaxSupportedH264ProfileLevelConstrainedBaseline); /** H264 Profiles and levels. */ -typedef NS_ENUM(NSUInteger, RTCH264Profile) { - RTCH264ProfileConstrainedBaseline, - RTCH264ProfileBaseline, - RTCH264ProfileMain, - RTCH264ProfileConstrainedHigh, - RTCH264ProfileHigh, +typedef NS_ENUM(NSUInteger, RTC_OBJC_TYPE(RTCH264Profile)) { + RTC_OBJC_TYPE(RTCH264ProfileConstrainedBaseline), + RTC_OBJC_TYPE(RTCH264ProfileBaseline), + RTC_OBJC_TYPE(RTCH264ProfileMain), + RTC_OBJC_TYPE(RTCH264ProfileConstrainedHigh), + RTC_OBJC_TYPE(RTCH264ProfileHigh), }; -typedef NS_ENUM(NSUInteger, RTCH264Level) { - RTCH264Level1_b = 0, - RTCH264Level1 = 10, - RTCH264Level1_1 = 11, - RTCH264Level1_2 = 12, - RTCH264Level1_3 = 13, - RTCH264Level2 = 20, - RTCH264Level2_1 = 21, - RTCH264Level2_2 = 22, - RTCH264Level3 = 30, - RTCH264Level3_1 = 31, - RTCH264Level3_2 = 32, - RTCH264Level4 = 40, - RTCH264Level4_1 = 41, - RTCH264Level4_2 = 42, - RTCH264Level5 = 50, - RTCH264Level5_1 = 51, - RTCH264Level5_2 = 52 +typedef NS_ENUM(NSUInteger, RTC_OBJC_TYPE(RTCH264Level)) { + RTC_OBJC_TYPE(RTCH264Level1_b) = 0, + RTC_OBJC_TYPE(RTCH264Level1) = 10, + RTC_OBJC_TYPE(RTCH264Level1_1) = 11, + RTC_OBJC_TYPE(RTCH264Level1_2) = 12, + RTC_OBJC_TYPE(RTCH264Level1_3) = 13, + RTC_OBJC_TYPE(RTCH264Level2) = 20, + RTC_OBJC_TYPE(RTCH264Level2_1) = 21, + RTC_OBJC_TYPE(RTCH264Level2_2) = 22, + RTC_OBJC_TYPE(RTCH264Level3) = 30, + RTC_OBJC_TYPE(RTCH264Level3_1) = 31, + RTC_OBJC_TYPE(RTCH264Level3_2) = 32, + RTC_OBJC_TYPE(RTCH264Level4) = 40, + RTC_OBJC_TYPE(RTCH264Level4_1) = 41, + RTC_OBJC_TYPE(RTCH264Level4_2) = 42, + RTC_OBJC_TYPE(RTCH264Level5) = 50, + RTC_OBJC_TYPE(RTCH264Level5_1) = 51, + RTC_OBJC_TYPE(RTCH264Level5_2) = 52 }; RTC_OBJC_EXPORT @interface RTC_OBJC_TYPE (RTCH264ProfileLevelId) : NSObject -@property(nonatomic, readonly) RTCH264Profile profile; -@property(nonatomic, readonly) RTCH264Level level; +@property(nonatomic, readonly) RTC_OBJC_TYPE(RTCH264Profile) profile; +@property(nonatomic, readonly) RTC_OBJC_TYPE(RTCH264Level) level; @property(nonatomic, readonly) NSString *hexString; - (instancetype)initWithHexString:(NSString *)hexString; -- (instancetype)initWithProfile:(RTCH264Profile)profile level:(RTCH264Level)level; +- (instancetype)initWithProfile:(RTC_OBJC_TYPE(RTCH264Profile))profile level:(RTC_OBJC_TYPE(RTCH264Level))level; @end diff --git a/sdk/objc/components/video_codec/RTCH264ProfileLevelId.mm b/sdk/objc/components/video_codec/RTCH264ProfileLevelId.mm index f0ef3ec232..cfcde61f13 100644 --- a/sdk/objc/components/video_codec/RTCH264ProfileLevelId.mm +++ b/sdk/objc/components/video_codec/RTCH264ProfileLevelId.mm @@ -26,12 +26,12 @@ } // namespace -NSString *const kRTCVideoCodecH264Name = @(cricket::kH264CodecName); -NSString *const kRTCLevel31ConstrainedHigh = @"640c1f"; -NSString *const kRTCLevel31ConstrainedBaseline = @"42e01f"; -NSString *const kRTCMaxSupportedH264ProfileLevelConstrainedHigh = +NSString *const RTC_CONSTANT_TYPE(RTCVideoCodecH264Name) = @(cricket::kH264CodecName); +NSString *const RTC_CONSTANT_TYPE(RTCLevel31ConstrainedHigh) = @"640c1f"; +NSString *const RTC_CONSTANT_TYPE(RTCLevel31ConstrainedBaseline) = @"42e01f"; +NSString *const RTC_CONSTANT_TYPE(RTCMaxSupportedH264ProfileLevelConstrainedHigh) = MaxSupportedProfileLevelConstrainedHigh(); -NSString *const kRTCMaxSupportedH264ProfileLevelConstrainedBaseline = +NSString *const RTC_CONSTANT_TYPE(RTCMaxSupportedH264ProfileLevelConstrainedBaseline) = MaxSupportedProfileLevelConstrainedBaseline(); namespace { @@ -59,7 +59,7 @@ return profile; } #endif - return kRTCLevel31ConstrainedBaseline; + return RTC_CONSTANT_TYPE(RTCLevel31ConstrainedBaseline); } NSString *MaxSupportedProfileLevelConstrainedHigh() { @@ -69,7 +69,7 @@ return profile; } #endif - return kRTCLevel31ConstrainedHigh; + return RTC_CONSTANT_TYPE(RTCLevel31ConstrainedHigh); } } // namespace @@ -77,8 +77,8 @@ @interface RTC_OBJC_TYPE (RTCH264ProfileLevelId) () - @property(nonatomic, assign) RTCH264Profile profile; -@property(nonatomic, assign) RTCH264Level level; + @property(nonatomic, assign) RTC_OBJC_TYPE(RTCH264Profile) profile; +@property(nonatomic, assign) RTC_OBJC_TYPE(RTCH264Level) level; @property(nonatomic, strong) NSString *hexString; @end @@ -96,14 +96,14 @@ - (instancetype)initWithHexString:(NSString *)hexString { absl::optional profile_level_id = webrtc::ParseH264ProfileLevelId([hexString cStringUsingEncoding:NSUTF8StringEncoding]); if (profile_level_id.has_value()) { - self.profile = static_cast(profile_level_id->profile); - self.level = static_cast(profile_level_id->level); + self.profile = static_cast(profile_level_id->profile); + self.level = static_cast(profile_level_id->level); } } return self; } -- (instancetype)initWithProfile:(RTCH264Profile)profile level:(RTCH264Level)level { +- (instancetype)initWithProfile:(RTC_OBJC_TYPE(RTCH264Profile))profile level:(RTC_OBJC_TYPE(RTCH264Level))level { if (self = [super init]) { self.profile = profile; self.level = level; diff --git a/sdk/objc/components/video_codec/RTCVideoDecoderFactoryH264.m b/sdk/objc/components/video_codec/RTCVideoDecoderFactoryH264.m index bdae19d687..3cab153494 100644 --- a/sdk/objc/components/video_codec/RTCVideoDecoderFactoryH264.m +++ b/sdk/objc/components/video_codec/RTCVideoDecoderFactoryH264.m @@ -17,10 +17,10 @@ @implementation RTC_OBJC_TYPE (RTCVideoDecoderFactoryH264) - (NSArray *)supportedCodecs { NSMutableArray *codecs = [NSMutableArray array]; - NSString *codecName = kRTCVideoCodecH264Name; + NSString *codecName = RTC_CONSTANT_TYPE(RTCVideoCodecH264Name); NSDictionary *constrainedHighParams = @{ - @"profile-level-id" : kRTCMaxSupportedH264ProfileLevelConstrainedHigh, + @"profile-level-id" : RTC_CONSTANT_TYPE(RTCMaxSupportedH264ProfileLevelConstrainedHigh), @"level-asymmetry-allowed" : @"1", @"packetization-mode" : @"1", }; @@ -30,7 +30,7 @@ @implementation RTC_OBJC_TYPE (RTCVideoDecoderFactoryH264) [codecs addObject:constrainedHighInfo]; NSDictionary *constrainedBaselineParams = @{ - @"profile-level-id" : kRTCMaxSupportedH264ProfileLevelConstrainedBaseline, + @"profile-level-id" : RTC_CONSTANT_TYPE(RTCMaxSupportedH264ProfileLevelConstrainedBaseline), @"level-asymmetry-allowed" : @"1", @"packetization-mode" : @"1", }; diff --git a/sdk/objc/components/video_codec/RTCVideoDecoderH264.mm b/sdk/objc/components/video_codec/RTCVideoDecoderH264.mm index 563758e3c9..4e977c968f 100644 --- a/sdk/objc/components/video_codec/RTCVideoDecoderH264.mm +++ b/sdk/objc/components/video_codec/RTCVideoDecoderH264.mm @@ -64,7 +64,7 @@ void decompressionOutputCallback(void *decoderRef, [[RTC_OBJC_TYPE(RTCCVPixelBuffer) alloc] initWithPixelBuffer:imageBuffer]; RTC_OBJC_TYPE(RTCVideoFrame) *decodedFrame = [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:frameBuffer - rotation:RTCVideoRotation_0 + rotation:RTC_OBJC_TYPE(RTCVideoRotation_0) timeStampNs:CMTimeGetSeconds(timestamp) * rtc::kNumNanosecsPerSec]; decodedFrame.timeStamp = decodeParams->timestamp; decodeParams->callback(decodedFrame); diff --git a/sdk/objc/components/video_codec/RTCVideoEncoderFactoryH264.m b/sdk/objc/components/video_codec/RTCVideoEncoderFactoryH264.m index 9843849307..cf55c252cb 100644 --- a/sdk/objc/components/video_codec/RTCVideoEncoderFactoryH264.m +++ b/sdk/objc/components/video_codec/RTCVideoEncoderFactoryH264.m @@ -17,10 +17,10 @@ @implementation RTC_OBJC_TYPE (RTCVideoEncoderFactoryH264) - (NSArray *)supportedCodecs { NSMutableArray *codecs = [NSMutableArray array]; - NSString *codecName = kRTCVideoCodecH264Name; + NSString *codecName = RTC_CONSTANT_TYPE(RTCVideoCodecH264Name); NSDictionary *constrainedHighParams = @{ - @"profile-level-id" : kRTCMaxSupportedH264ProfileLevelConstrainedHigh, + @"profile-level-id" : RTC_CONSTANT_TYPE(RTCMaxSupportedH264ProfileLevelConstrainedHigh), @"level-asymmetry-allowed" : @"1", @"packetization-mode" : @"1", }; @@ -30,7 +30,7 @@ @implementation RTC_OBJC_TYPE (RTCVideoEncoderFactoryH264) [codecs addObject:constrainedHighInfo]; NSDictionary *constrainedBaselineParams = @{ - @"profile-level-id" : kRTCMaxSupportedH264ProfileLevelConstrainedBaseline, + @"profile-level-id" : RTC_CONSTANT_TYPE(RTCMaxSupportedH264ProfileLevelConstrainedBaseline), @"level-asymmetry-allowed" : @"1", @"packetization-mode" : @"1", }; diff --git a/sdk/objc/components/video_codec/RTCVideoEncoderH264.mm b/sdk/objc/components/video_codec/RTCVideoEncoderH264.mm index 0683696fbc..cb837a43c0 100644 --- a/sdk/objc/components/video_codec/RTCVideoEncoderH264.mm +++ b/sdk/objc/components/video_codec/RTCVideoEncoderH264.mm @@ -45,7 +45,7 @@ - (void)frameWasEncoded : (OSStatus)status flags : (VTEncodeInfoFlags)infoFlags : (CMSampleBufferRef)sampleBuffer codecSpecificInfo : (id)codecSpecificInfo width : (int32_t)width height : (int32_t)height renderTimeMs : (int64_t)renderTimeMs timestamp : (uint32_t)timestamp rotation - : (RTCVideoRotation)rotation; + : (RTC_OBJC_TYPE(RTCVideoRotation))rotation; @end @@ -63,14 +63,14 @@ - (void)frameWasEncoded : (OSStatus)status flags : (VTEncodeInfoFlags)infoFlags const OSType kNV12PixelFormat = kCVPixelFormatType_420YpCbCr8BiPlanarFullRange; -typedef NS_ENUM(NSInteger, RTCVideoEncodeMode) { - Variable = 0, - Constant = 1, +typedef NS_ENUM(NSInteger, RTC_OBJC_TYPE(RTCVideoEncodeMode)) { + RTC_OBJC_TYPE(RTCVideoEncodeModeVariable) = 0, + RTC_OBJC_TYPE(RTCVideoEncodeModeConstant) = 1, }; -NSArray *CreateRateLimitArray(uint32_t computedBitrateBps, RTCVideoEncodeMode mode) { +NSArray *CreateRateLimitArray(uint32_t computedBitrateBps, RTC_OBJC_TYPE(RTCVideoEncodeMode) mode) { switch (mode) { - case Variable: { + case RTC_OBJC_TYPE(RTCVideoEncodeModeVariable): { // 5 seconds should be an okay interval for VBR to enforce the long-term // limit. float avgInterval = 5.0; @@ -81,7 +81,7 @@ typedef NS_ENUM(NSInteger, RTCVideoEncodeMode) { computedBitrateBps * kLimitToAverageBitRateFactor / kBitsPerByte; return @[ @(peakBytesPerSecond), @(peakInterval), @(avgBytesPerSecond), @(avgInterval) ]; } - case Constant: { + case RTC_OBJC_TYPE(RTCVideoEncodeModeConstant): { // CBR should be enforces with granularity of a second. float targetInterval = 1.0; int32_t targetBitrate = computedBitrateBps / kBitsPerByte; @@ -99,7 +99,7 @@ typedef NS_ENUM(NSInteger, RTCVideoEncodeMode) { int32_t h, int64_t rtms, uint32_t ts, - RTCVideoRotation r) + RTC_OBJC_TYPE(RTCVideoRotation) r) : encoder(e), width(w), height(h), render_time_ms(rtms), timestamp(ts), rotation(r) { if (csi) { codecSpecificInfo = csi; @@ -114,7 +114,7 @@ typedef NS_ENUM(NSInteger, RTCVideoEncodeMode) { int32_t height; int64_t render_time_ms; uint32_t timestamp; - RTCVideoRotation rotation; + RTC_OBJC_TYPE(RTCVideoRotation) rotation; }; // We receive I420Frames as input, but we need to feed CVPixelBuffers into the @@ -355,18 +355,18 @@ @implementation RTC_OBJC_TYPE (RTCVideoEncoderH264) { uint32_t _encoderBitrateBps; uint32_t _encoderFrameRate; uint32_t _maxAllowedFrameRate; - RTCH264PacketizationMode _packetizationMode; + RTC_OBJC_TYPE(RTCH264PacketizationMode) _packetizationMode; absl::optional _profile_level_id; RTCVideoEncoderCallback _callback; int32_t _width; int32_t _height; VTCompressionSessionRef _compressionSession; CVPixelBufferPoolRef _pixelBufferPool; - RTCVideoCodecMode _codecMode; + RTC_OBJC_TYPE(RTCVideoCodecMode) _codecMode; unsigned int _maxQP; unsigned int _minBitrate; unsigned int _maxBitrate; - RTCVideoEncodeMode _encodeMode; + RTC_OBJC_TYPE(RTCVideoEncodeMode) _encodeMode; webrtc::H264BitstreamParser _h264BitstreamParser; std::vector _frameScaleBuffer; @@ -384,15 +384,15 @@ @implementation RTC_OBJC_TYPE (RTCVideoEncoderH264) { - (instancetype)initWithCodecInfo:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)codecInfo { if (self = [super init]) { _codecInfo = codecInfo; - _packetizationMode = RTCH264PacketizationModeNonInterleaved; + _packetizationMode = RTC_OBJC_TYPE(RTCH264PacketizationModeNonInterleaved); _profile_level_id = webrtc::ParseSdpForH264ProfileLevelId([codecInfo nativeSdpVideoFormat].parameters); _previousPresentationTimeStamp = kCMTimeZero; RTC_DCHECK(_profile_level_id); RTC_LOG(LS_INFO) << "Using profile " << CFStringToString(ExtractProfile( - *_profile_level_id, _codecMode == RTCVideoCodecModeScreensharing)); - RTC_CHECK([codecInfo.name isEqualToString:kRTCVideoCodecH264Name]); + *_profile_level_id, _codecMode == RTC_OBJC_TYPE(RTCVideoCodecModeScreensharing))); + RTC_CHECK([codecInfo.name isEqualToString:RTC_CONSTANT_TYPE(RTCVideoCodecH264Name)]); } return self; } @@ -404,14 +404,14 @@ - (void)dealloc { - (NSInteger)startEncodeWithSettings:(RTC_OBJC_TYPE(RTCVideoEncoderSettings) *)settings numberOfCores:(int)numberOfCores { RTC_DCHECK(settings); - RTC_DCHECK([settings.name isEqualToString:kRTCVideoCodecH264Name]); + RTC_DCHECK([settings.name isEqualToString:RTC_CONSTANT_TYPE(RTCVideoCodecH264Name)]); _width = settings.width; _height = settings.height; _codecMode = settings.mode; _maxQP = settings.qpMax; - _encodeMode = Variable; // Always variable mode for now + _encodeMode = RTC_OBJC_TYPE(RTCVideoEncodeModeVariable); // Always variable mode for now _minBitrate = settings.minBitrate * 1000; // minBitrate is in kbps. _maxBitrate = settings.maxBitrate * 1000; // maxBitrate is in kbps. @@ -421,7 +421,7 @@ - (NSInteger)startEncodeWithSettings:(RTC_OBJC_TYPE(RTCVideoEncoderSettings) *)s (aligned_width * aligned_height)); // We can only set average bitrate on the HW encoder. - if (_encodeMode == Constant) { + if (_encodeMode == RTC_OBJC_TYPE(RTCVideoEncodeModeConstant)) { _targetBitrateBps = _maxBitrate; } else { _targetBitrateBps = settings.startBitrate * 1000; // startBitrate is in kbps. @@ -517,7 +517,7 @@ - (NSInteger)encode:(RTC_OBJC_TYPE(RTCVideoFrame) *)frame // Check if we need a keyframe. if (!isKeyframeRequired && frameTypes) { for (NSNumber *frameType in frameTypes) { - if ((RTCFrameType)frameType.intValue == RTCFrameTypeVideoFrameKey) { + if ((RTC_OBJC_TYPE(RTCFrameType))frameType.intValue == RTC_OBJC_TYPE(RTCFrameTypeVideoFrameKey)) { isKeyframeRequired = YES; break; } @@ -752,7 +752,7 @@ - (void)configureCompressionSession { // https://developer.apple.com/documentation/videotoolbox/kvtcompressionpropertykey_maxallowedframeqp if (@available(iOS 15.0, macOS 12.0, *)) { // Only enable for screen sharing and let VideoToolbox do the optimizing as much as possible. - if (_codecMode == RTCVideoCodecModeScreensharing) { + if (_codecMode == RTC_OBJC_TYPE(RTCVideoCodecModeScreensharing)) { RTC_LOG(LS_INFO) << "Configuring VideoToolbox to use maxQP: " << kHighH264QpThreshold << " mode: " << _codecMode; SetVTSessionProperty( @@ -762,7 +762,7 @@ - (void)configureCompressionSession { SetVTSessionProperty( _compressionSession, kVTCompressionPropertyKey_ProfileLevel, - ExtractProfile(*_profile_level_id, _codecMode == RTCVideoCodecModeScreensharing)); + ExtractProfile(*_profile_level_id, _codecMode == RTC_OBJC_TYPE(RTCVideoCodecModeScreensharing))); SetVTSessionProperty(_compressionSession, kVTCompressionPropertyKey_AllowFrameReordering, false); // [self updateEncoderBitrateAndFrameRate]; @@ -857,7 +857,7 @@ - (void)frameWasEncoded:(OSStatus)status height:(int32_t)height renderTimeMs:(int64_t)renderTimeMs timestamp:(uint32_t)timestamp - rotation:(RTCVideoRotation)rotation { + rotation:(RTC_OBJC_TYPE(RTCVideoRotation))rotation { RTCVideoEncoderCallback callback = _callback; if (!callback) { return; @@ -897,13 +897,13 @@ - (void)frameWasEncoded:(OSStatus)status }]; frame.encodedWidth = width; frame.encodedHeight = height; - frame.frameType = isKeyframe ? RTCFrameTypeVideoFrameKey : RTCFrameTypeVideoFrameDelta; + frame.frameType = isKeyframe ? RTC_OBJC_TYPE(RTCFrameTypeVideoFrameKey) : RTC_OBJC_TYPE(RTCFrameTypeVideoFrameDelta); frame.captureTimeMs = renderTimeMs; frame.timeStamp = timestamp; frame.rotation = rotation; - frame.contentType = (_codecMode == RTCVideoCodecModeScreensharing) ? - RTCVideoContentTypeScreenshare : - RTCVideoContentTypeUnspecified; + frame.contentType = (_codecMode == RTC_OBJC_TYPE(RTCVideoCodecModeScreensharing)) ? + RTC_OBJC_TYPE(RTCVideoContentTypeScreenshare) : + RTC_OBJC_TYPE(RTCVideoContentTypeUnspecified); frame.flags = webrtc::VideoSendTiming::kInvalid; _h264BitstreamParser.ParseBitstream(*buffer); diff --git a/sdk/objc/helpers/RTCCameraPreviewView.m b/sdk/objc/helpers/RTCCameraPreviewView.m index db8a265efd..44ef49e530 100644 --- a/sdk/objc/helpers/RTCCameraPreviewView.m +++ b/sdk/objc/helpers/RTCCameraPreviewView.m @@ -55,16 +55,16 @@ - (void)setCaptureSession:(AVCaptureSession *)captureSession { } _captureSession = captureSession; [RTC_OBJC_TYPE(RTCDispatcher) - dispatchAsyncOnType:RTCDispatcherTypeMain + dispatchAsyncOnType:RTC_OBJC_TYPE(RTCDispatcherTypeMain) block:^{ AVCaptureVideoPreviewLayer *previewLayer = [self previewLayer]; [RTC_OBJC_TYPE(RTCDispatcher) - dispatchAsyncOnType:RTCDispatcherTypeCaptureSession + dispatchAsyncOnType:RTC_OBJC_TYPE(RTCDispatcherTypeCaptureSession) block:^{ previewLayer.session = captureSession; #if !TARGET_OS_TV [RTC_OBJC_TYPE(RTCDispatcher) - dispatchAsyncOnType:RTCDispatcherTypeMain + dispatchAsyncOnType:RTC_OBJC_TYPE(RTCDispatcherTypeMain) block:^{ [self setCorrectVideoOrientation]; }]; diff --git a/sdk/objc/helpers/RTCDispatcher+Private.h b/sdk/objc/helpers/RTCDispatcher+Private.h index 195c651790..e587e11c0a 100644 --- a/sdk/objc/helpers/RTCDispatcher+Private.h +++ b/sdk/objc/helpers/RTCDispatcher+Private.h @@ -13,6 +13,6 @@ @interface RTC_OBJC_TYPE (RTCDispatcher) () - + (dispatch_queue_t)dispatchQueueForType : (RTCDispatcherQueueType)dispatchType; + + (dispatch_queue_t)dispatchQueueForType : (RTC_OBJC_TYPE(RTCDispatcherQueueType))dispatchType; @end diff --git a/sdk/objc/helpers/RTCDispatcher.h b/sdk/objc/helpers/RTCDispatcher.h index e148af6dea..144ca606e0 100644 --- a/sdk/objc/helpers/RTCDispatcher.h +++ b/sdk/objc/helpers/RTCDispatcher.h @@ -12,16 +12,16 @@ #import "RTCMacros.h" -typedef NS_ENUM(NSInteger, RTCDispatcherQueueType) { +typedef NS_ENUM(NSInteger, RTC_OBJC_TYPE(RTCDispatcherQueueType)) { // Main dispatcher queue. - RTCDispatcherTypeMain, + RTC_OBJC_TYPE(RTCDispatcherTypeMain), // Used for starting/stopping AVCaptureSession, and assigning // capture session to AVCaptureVideoPreviewLayer. - RTCDispatcherTypeCaptureSession, + RTC_OBJC_TYPE(RTCDispatcherTypeCaptureSession), // Used for operations on AVAudioSession. - RTCDispatcherTypeAudioSession, + RTC_OBJC_TYPE(RTCDispatcherTypeAudioSession), // Used for operations on NWPathMonitor. - RTCDispatcherTypeNetworkMonitor, + RTC_OBJC_TYPE(RTCDispatcherTypeNetworkMonitor), }; /** Dispatcher that asynchronously dispatches blocks to a specific @@ -36,11 +36,11 @@ RTC_OBJC_EXPORT * @param dispatchType The queue type to dispatch on. * @param block The block to dispatch asynchronously. */ -+ (void)dispatchAsyncOnType:(RTCDispatcherQueueType)dispatchType block:(dispatch_block_t)block; ++ (void)dispatchAsyncOnType:(RTC_OBJC_TYPE(RTCDispatcherQueueType))dispatchType block:(dispatch_block_t)block; /** Returns YES if run on queue for the dispatchType otherwise NO. * Useful for asserting that a method is run on a correct queue. */ -+ (BOOL)isOnQueueForType:(RTCDispatcherQueueType)dispatchType; ++ (BOOL)isOnQueueForType:(RTC_OBJC_TYPE(RTCDispatcherQueueType))dispatchType; @end diff --git a/sdk/objc/helpers/RTCDispatcher.m b/sdk/objc/helpers/RTCDispatcher.m index 4df19bc297..7cfacc9faa 100644 --- a/sdk/objc/helpers/RTCDispatcher.m +++ b/sdk/objc/helpers/RTCDispatcher.m @@ -30,13 +30,13 @@ + (void)initialize { }); } -+ (void)dispatchAsyncOnType:(RTCDispatcherQueueType)dispatchType ++ (void)dispatchAsyncOnType:(RTC_OBJC_TYPE(RTCDispatcherQueueType))dispatchType block:(dispatch_block_t)block { dispatch_queue_t queue = [self dispatchQueueForType:dispatchType]; dispatch_async(queue, block); } -+ (BOOL)isOnQueueForType:(RTCDispatcherQueueType)dispatchType { ++ (BOOL)isOnQueueForType:(RTC_OBJC_TYPE(RTCDispatcherQueueType))dispatchType { dispatch_queue_t targetQueue = [self dispatchQueueForType:dispatchType]; const char* targetLabel = dispatch_queue_get_label(targetQueue); const char* currentLabel = dispatch_queue_get_label(DISPATCH_CURRENT_QUEUE_LABEL); @@ -49,15 +49,15 @@ + (BOOL)isOnQueueForType:(RTCDispatcherQueueType)dispatchType { #pragma mark - Private -+ (dispatch_queue_t)dispatchQueueForType:(RTCDispatcherQueueType)dispatchType { ++ (dispatch_queue_t)dispatchQueueForType:(RTC_OBJC_TYPE(RTCDispatcherQueueType))dispatchType { switch (dispatchType) { - case RTCDispatcherTypeMain: + case RTC_OBJC_TYPE(RTCDispatcherTypeMain): return dispatch_get_main_queue(); - case RTCDispatcherTypeCaptureSession: + case RTC_OBJC_TYPE(RTCDispatcherTypeCaptureSession): return kCaptureSessionQueue; - case RTCDispatcherTypeAudioSession: + case RTC_OBJC_TYPE(RTCDispatcherTypeAudioSession): return kAudioSessionQueue; - case RTCDispatcherTypeNetworkMonitor: + case RTC_OBJC_TYPE(RTCDispatcherTypeNetworkMonitor): return kNetworkMonitorQueue; } } diff --git a/sdk/objc/helpers/RTCYUVHelper.h b/sdk/objc/helpers/RTCYUVHelper.h index ec8ce48355..354be73729 100644 --- a/sdk/objc/helpers/RTCYUVHelper.h +++ b/sdk/objc/helpers/RTCYUVHelper.h @@ -32,7 +32,7 @@ RTC_OBJC_EXPORT dstStrideV:(int)dstStrideV width:(int)width height:(int)height - mode:(RTCVideoRotation)mode; + mode:(RTC_OBJC_TYPE(RTCVideoRotation))mode; + (int)I420ToNV12:(const uint8_t*)srcY srcStrideY:(int)srcStrideY diff --git a/sdk/objc/helpers/RTCYUVHelper.mm b/sdk/objc/helpers/RTCYUVHelper.mm index 4a39d469da..8c52677c8f 100644 --- a/sdk/objc/helpers/RTCYUVHelper.mm +++ b/sdk/objc/helpers/RTCYUVHelper.mm @@ -28,7 +28,7 @@ + (void)I420Rotate:(const uint8_t*)srcY dstStrideV:(int)dstStrideV width:(int)width height:(int)height - mode:(RTCVideoRotation)mode { + mode:(RTC_OBJC_TYPE(RTCVideoRotation))mode { libyuv::I420Rotate(srcY, srcStrideY, srcU, diff --git a/sdk/objc/native/src/audio/voice_processing_audio_unit.mm b/sdk/objc/native/src/audio/voice_processing_audio_unit.mm index b3daacb334..c16545d6e1 100644 --- a/sdk/objc/native/src/audio/voice_processing_audio_unit.mm +++ b/sdk/objc/native/src/audio/voice_processing_audio_unit.mm @@ -447,7 +447,7 @@ static OSStatus GetAGCState(AudioUnit audio_unit, UInt32* enabled) { // - linear PCM => noncompressed audio data format with one frame per packet // - no need to specify interleaving since only mono is supported AudioStreamBasicDescription format; - RTC_DCHECK_EQ(1, kRTCAudioSessionPreferredNumberOfChannels); + RTC_DCHECK_EQ(1, RTC_CONSTANT_TYPE(RTCAudioSessionPreferredNumberOfChannels)); format.mSampleRate = sample_rate; format.mFormatID = kAudioFormatLinearPCM; format.mFormatFlags = @@ -455,7 +455,7 @@ static OSStatus GetAGCState(AudioUnit audio_unit, UInt32* enabled) { format.mBytesPerPacket = kBytesPerSample; format.mFramesPerPacket = 1; // uncompressed. format.mBytesPerFrame = kBytesPerSample; - format.mChannelsPerFrame = kRTCAudioSessionPreferredNumberOfChannels; + format.mChannelsPerFrame = RTC_CONSTANT_TYPE(RTCAudioSessionPreferredNumberOfChannels); format.mBitsPerChannel = 8 * kBytesPerSample; return format; } diff --git a/sdk/objc/native/src/objc_audio_device.h b/sdk/objc/native/src/objc_audio_device.h index 88f6f19f99..7bd02960f9 100644 --- a/sdk/objc/native/src/objc_audio_device.h +++ b/sdk/objc/native/src/objc_audio_device.h @@ -19,7 +19,7 @@ #include "modules/audio_device/include/audio_device.h" #include "rtc_base/thread.h" -@class RTC_OBJC_TYPE(ObjCAudioDeviceDelegate); +@class RTC_OBJC_TYPE(RTCObjCAudioDeviceDelegate); namespace webrtc { @@ -267,7 +267,7 @@ class ObjCAudioDeviceModule : public AudioDeviceModule { rtc::BufferT record_audio_buffer_; // Delegate object provided to RTCAudioDevice during initialization - RTC_OBJC_TYPE(ObjCAudioDeviceDelegate)* audio_device_delegate_; + RTC_OBJC_TYPE(RTCObjCAudioDeviceDelegate)* audio_device_delegate_; }; } // namespace objc_adm diff --git a/sdk/objc/native/src/objc_audio_device.mm b/sdk/objc/native/src/objc_audio_device.mm index 5fb72d8a5c..7fa33c5dfd 100644 --- a/sdk/objc/native/src/objc_audio_device.mm +++ b/sdk/objc/native/src/objc_audio_device.mm @@ -77,7 +77,7 @@ if (![audio_device_ isInitialized]) { if (audio_device_delegate_ == nil) { - audio_device_delegate_ = [[RTC_OBJC_TYPE(ObjCAudioDeviceDelegate) alloc] + audio_device_delegate_ = [[RTC_OBJC_TYPE(RTCObjCAudioDeviceDelegate) alloc] initWithAudioDeviceModule:rtc::scoped_refptr(this) audioDeviceThread:thread_]; } diff --git a/sdk/objc/native/src/objc_audio_device_delegate.h b/sdk/objc/native/src/objc_audio_device_delegate.h index 0b546f269c..8cf9c0c80a 100644 --- a/sdk/objc/native/src/objc_audio_device_delegate.h +++ b/sdk/objc/native/src/objc_audio_device_delegate.h @@ -22,7 +22,7 @@ class ObjCAudioDeviceModule; } // namespace objc_adm } // namespace webrtc -@interface RTC_OBJC_TYPE(ObjCAudioDeviceDelegate) : NSObject +@interface RTC_OBJC_TYPE(RTCObjCAudioDeviceDelegate) : NSObject - (instancetype)initWithAudioDeviceModule: (rtc::scoped_refptr)audioDeviceModule diff --git a/sdk/objc/native/src/objc_audio_device_delegate.mm b/sdk/objc/native/src/objc_audio_device_delegate.mm index f4c8cfb71a..0c839cc76b 100644 --- a/sdk/objc/native/src/objc_audio_device_delegate.mm +++ b/sdk/objc/native/src/objc_audio_device_delegate.mm @@ -55,7 +55,7 @@ } // namespace -@implementation RTC_OBJC_TYPE(ObjCAudioDeviceDelegate) { +@implementation RTC_OBJC_TYPE(RTCObjCAudioDeviceDelegate) { rtc::scoped_refptr impl_; } diff --git a/sdk/objc/native/src/objc_desktop_capture.h b/sdk/objc/native/src/objc_desktop_capture.h index a781457220..b9ca6c9759 100644 --- a/sdk/objc/native/src/objc_desktop_capture.h +++ b/sdk/objc/native/src/objc_desktop_capture.h @@ -27,7 +27,7 @@ #include "rtc_base/thread.h" @protocol RTC_OBJC_TYPE -(DesktopCapturerDelegate); +(RTCDesktopCapturerPrivateDelegate); namespace webrtc { @@ -39,8 +39,8 @@ class ObjCDesktopCapturer : public DesktopCapturer::Callback { public: ObjCDesktopCapturer(DesktopType type, - webrtc::DesktopCapturer::SourceId source_id, - id delegate); + webrtc::DesktopCapturer::SourceId source_id, + id delegate); virtual ~ObjCDesktopCapturer(); virtual CaptureState Start(uint32_t fps); @@ -60,7 +60,7 @@ class ObjCDesktopCapturer : public DesktopCapturer::Callback { CaptureState capture_state_ = CS_STOPPED; DesktopType type_; webrtc::DesktopCapturer::SourceId source_id_; - id delegate_; + id delegate_; uint32_t capture_delay_ = 1000; // 1s webrtc::DesktopCapturer::Result result_ = webrtc::DesktopCapturer::Result::SUCCESS; }; diff --git a/sdk/objc/native/src/objc_desktop_capture.mm b/sdk/objc/native/src/objc_desktop_capture.mm index 7aba3e5612..ca43f6afd3 100644 --- a/sdk/objc/native/src/objc_desktop_capture.mm +++ b/sdk/objc/native/src/objc_desktop_capture.mm @@ -29,7 +29,7 @@ ObjCDesktopCapturer::ObjCDesktopCapturer(DesktopType type, webrtc::DesktopCapturer::SourceId source_id, - id delegate) + id delegate) : thread_(rtc::Thread::Create()), source_id_(source_id), delegate_(delegate) { RTC_DCHECK(thread_); type_ = type; @@ -184,7 +184,7 @@ int64_t timeStampNs = lroundf(timeStampSeconds * NSEC_PER_SEC); RTC_OBJC_TYPE(RTCVideoFrame) *videoFrame = [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:rtcPixelBuffer - rotation:RTCVideoRotation_0 + rotation:RTC_OBJC_TYPE(RTCVideoRotation_0) timeStampNs:timeStampNs]; CVPixelBufferRelease(pixelBuffer); [delegate_ didCaptureVideoFrame:videoFrame]; diff --git a/sdk/objc/native/src/objc_video_encoder_factory.mm b/sdk/objc/native/src/objc_video_encoder_factory.mm index 1085cb8cb4..d5aeee95e8 100644 --- a/sdk/objc/native/src/objc_video_encoder_factory.mm +++ b/sdk/objc/native/src/objc_video_encoder_factory.mm @@ -76,7 +76,7 @@ int32_t Encode(const VideoFrame &frame, const std::vector *frame_types) override { NSMutableArray *rtcFrameTypes = [NSMutableArray array]; for (size_t i = 0; i < frame_types->size(); ++i) { - [rtcFrameTypes addObject:@(RTCFrameType(frame_types->at(i)))]; + [rtcFrameTypes addObject:@(RTC_OBJC_TYPE(RTCFrameType)(frame_types->at(i)))]; } return [encoder_ encode:ToObjCVideoFrame(frame) diff --git a/sdk/objc/native/src/objc_video_frame.mm b/sdk/objc/native/src/objc_video_frame.mm index ff07dc8552..935f89055d 100644 --- a/sdk/objc/native/src/objc_video_frame.mm +++ b/sdk/objc/native/src/objc_video_frame.mm @@ -18,7 +18,7 @@ RTC_OBJC_TYPE(RTCVideoFrame) * ToObjCVideoFrame(const VideoFrame &frame) { RTC_OBJC_TYPE(RTCVideoFrame) *videoFrame = [[RTC_OBJC_TYPE(RTCVideoFrame) alloc] initWithBuffer:ToObjCVideoFrameBuffer(frame.video_frame_buffer()) - rotation:RTCVideoRotation(frame.rotation()) + rotation:RTC_OBJC_TYPE(RTCVideoRotation)(frame.rotation()) timeStampNs:frame.timestamp_us() * rtc::kNumNanosecsPerMicrosec]; videoFrame.timeStamp = frame.rtp_timestamp(); diff --git a/sdk/objc/unittests/RTCAudioSessionTest.mm b/sdk/objc/unittests/RTCAudioSessionTest.mm index d7cfc9ed04..3efcec6dad 100644 --- a/sdk/objc/unittests/RTCAudioSessionTest.mm +++ b/sdk/objc/unittests/RTCAudioSessionTest.mm @@ -291,8 +291,8 @@ - (void)DISABLED_testConfigureWebRTCSessionWithoutLocking { waitLock.Wait(timeout); [audioSession setCategory:AVAudioSessionCategoryPlayAndRecord withOptions:0 error:&error]; EXPECT_TRUE(error != nil); - EXPECT_EQ(error.domain, kRTCAudioSessionErrorDomain); - EXPECT_EQ(error.code, kRTCAudioSessionErrorLockRequired); + EXPECT_EQ(error.domain, RTC_CONSTANT_TYPE(RTCAudioSessionErrorDomain)); + EXPECT_EQ(error.code, RTC_CONSTANT_TYPE(RTCAudioSessionErrorLockRequired)); waitCleanup.Set(); thread->Stop(); diff --git a/sdk/objc/unittests/RTCPeerConnectionFactory_xctest.m b/sdk/objc/unittests/RTCPeerConnectionFactory_xctest.m index 7d42b6bdbb..88ffeaac13 100644 --- a/sdk/objc/unittests/RTCPeerConnectionFactory_xctest.m +++ b/sdk/objc/unittests/RTCPeerConnectionFactory_xctest.m @@ -186,7 +186,7 @@ - (void)testRTCRtpSenderLifetime { factory = [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init]; peerConnection = [factory peerConnectionWithConfiguration:config constraints:constraints delegate:nil]; - sender = [peerConnection senderWithKind:kRTCMediaStreamTrackKindVideo streamId:@"stream"]; + sender = [peerConnection senderWithKind:RTC_CONSTANT_TYPE(RTCMediaStreamTrackKindVideo) streamId:@"stream"]; XCTAssertNotNil(sender); [peerConnection close]; peerConnection = nil; @@ -216,10 +216,10 @@ - (void)testRTCRtpReceiverLifetime { @autoreleasepool { factory = [[RTC_OBJC_TYPE(RTCPeerConnectionFactory) alloc] init]; pc1 = [factory peerConnectionWithConfiguration:config constraints:constraints delegate:nil]; - [pc1 senderWithKind:kRTCMediaStreamTrackKindAudio streamId:@"stream"]; + [pc1 senderWithKind:RTC_CONSTANT_TYPE(RTCMediaStreamTrackKindAudio) streamId:@"stream"]; pc2 = [factory peerConnectionWithConfiguration:config constraints:constraints delegate:nil]; - [pc2 senderWithKind:kRTCMediaStreamTrackKindAudio streamId:@"stream"]; + [pc2 senderWithKind:RTC_CONSTANT_TYPE(RTCMediaStreamTrackKindAudio) streamId:@"stream"]; NSTimeInterval negotiationTimeout = 15; XCTAssertTrue([self negotiatePeerConnection:pc1 @@ -320,7 +320,7 @@ - (void)testRollback { config.sdpSemantics = RTCSdpSemanticsUnifiedPlan; RTC_OBJC_TYPE(RTCMediaConstraints) *constraints = [[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:@{ - kRTCMediaConstraintsOfferToReceiveAudio : kRTCMediaConstraintsValueTrue + RTC_CONSTANT_TYPE(RTCMediaConstraintsOfferToReceiveAudio) : RTC_CONSTANT_TYPE(RTCMediaConstraintsValueTrue) } optionalConstraints:nil]; @@ -381,7 +381,7 @@ - (void)testSenderCapabilities { decoderFactory:decoder]; RTC_OBJC_TYPE(RTCRtpCapabilities) *capabilities = - [factory rtpSenderCapabilitiesForKind:kRTCMediaStreamTrackKindVideo]; + [factory rtpSenderCapabilitiesForKind:RTC_CONSTANT_TYPE(RTCMediaStreamTrackKindVideo)]; NSMutableArray *codecNames = [NSMutableArray new]; for (RTC_OBJC_TYPE(RTCRtpCodecCapability) * codec in capabilities.codecs) { [codecNames addObject:codec.name]; @@ -410,7 +410,7 @@ - (void)testReceiverCapabilities { decoderFactory:decoder]; RTC_OBJC_TYPE(RTCRtpCapabilities) *capabilities = - [factory rtpReceiverCapabilitiesForKind:kRTCMediaStreamTrackKindVideo]; + [factory rtpReceiverCapabilitiesForKind:RTC_CONSTANT_TYPE(RTCMediaStreamTrackKindVideo)]; NSMutableArray *codecNames = [NSMutableArray new]; for (RTC_OBJC_TYPE(RTCRtpCodecCapability) * codec in capabilities.codecs) { [codecNames addObject:codec.name]; @@ -454,7 +454,7 @@ - (void)testSetCodecPreferences { XCTAssertNotNil(tranceiver); RTC_OBJC_TYPE(RTCRtpCapabilities) *capabilities = - [factory rtpReceiverCapabilitiesForKind:kRTCMediaStreamTrackKindVideo]; + [factory rtpReceiverCapabilitiesForKind:RTC_CONSTANT_TYPE(RTCMediaStreamTrackKindVideo)]; RTC_OBJC_TYPE(RTCRtpCodecCapability) * targetCodec; for (RTC_OBJC_TYPE(RTCRtpCodecCapability) * codec in capabilities.codecs) { @@ -513,7 +513,7 @@ - (bool)negotiatePeerConnection:(RTC_OBJC_TYPE(RTCPeerConnection) *)pc1 __weak RTC_OBJC_TYPE(RTCPeerConnection) *weakPC2 = pc2; RTC_OBJC_TYPE(RTCMediaConstraints) *sdpConstraints = [[RTC_OBJC_TYPE(RTCMediaConstraints) alloc] initWithMandatoryConstraints:@{ - kRTCMediaConstraintsOfferToReceiveAudio : kRTCMediaConstraintsValueTrue + RTC_CONSTANT_TYPE(RTCMediaConstraintsOfferToReceiveAudio) : RTC_CONSTANT_TYPE(RTCMediaConstraintsValueTrue) } optionalConstraints:nil]; From ed96590e9dc0d35c783d0abb4ae5b4aeaf032145 Mon Sep 17 00:00:00 2001 From: CloudWebRTC Date: Tue, 3 Jun 2025 11:54:17 +0800 Subject: [PATCH 31/49] Bump version for boringssl to fix compiler error. (#172) https://boringssl.googlesource.com/boringssl/+/12391e648d3c6f04a718721568cc89208b780654 --- DEPS | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/DEPS b/DEPS index ab365900fc..52a9371457 100644 --- a/DEPS +++ b/DEPS @@ -187,7 +187,7 @@ deps = { }, 'src/third_party/boringssl/src': - 'https://boringssl.googlesource.com/boringssl.git@f94f3ed3965ea033001fb9ae006084eee408b861', + 'https://boringssl.googlesource.com/boringssl.git@12391e648d3c6f04a718721568cc89208b780654', 'src/third_party/breakpad/breakpad': 'https://chromium.googlesource.com/breakpad/breakpad.git@76788faa4ef163081f82273bfca7fae8a734b971', 'src/third_party/catapult': From c6c6561757802e649a7348d77625c5eb4a5e6489 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?B=C5=82az=CC=87ej=20Pankowski?= <86720177+pblazej@users.noreply.github.com> Date: Fri, 13 Jun 2025 13:47:28 +0200 Subject: [PATCH 32/49] gn: bypass problematic visionOS check and privacy info --- sdk/BUILD.gn | 52 +++++++++++++++++++--------------------------------- webrtc.gni | 2 +- 2 files changed, 20 insertions(+), 34 deletions(-) diff --git a/sdk/BUILD.gn b/sdk/BUILD.gn index bfb8545480..f3641149c2 100644 --- a/sdk/BUILD.gn +++ b/sdk/BUILD.gn @@ -174,12 +174,10 @@ if (is_ios || is_mac) { "objc/helpers/UIDevice+RTCDevice.h", "objc/helpers/UIDevice+RTCDevice.mm", ] - if (target_environment != "xrdevice" && target_environment != "xrsimulator") { - sources += [ - "objc/helpers/RTCCameraPreviewView.h", - "objc/helpers/RTCCameraPreviewView.m", - ] - } + sources += [ + "objc/helpers/RTCCameraPreviewView.h", + "objc/helpers/RTCCameraPreviewView.m", + ] frameworks += [ "UIKit.framework" ] } } @@ -282,9 +280,7 @@ if (is_ios || is_mac) { ":videoframebuffer_objc", ] - if (target_environment != "xrdevice" && target_environment != "xrsimulator") { - deps += [ ":metal_objc" ] - } + deps += [ ":metal_objc" ] } rtc_library("audio_device") { @@ -638,7 +634,7 @@ if (is_ios || is_mac) { "Metal.framework", "MetalKit.framework", ] - if ((is_ios || is_mac) && !(target_environment == "xrsimulator" || target_environment == "xrdevice")) { + if (is_ios || is_mac) { sources += [ "objc/components/renderer/metal/RTCMTLVideoView.h", "objc/components/renderer/metal/RTCMTLVideoView.m", @@ -681,12 +677,10 @@ if (is_ios || is_mac) { "objc/components/capturer/RTCCameraVideoCapturer.h", "objc/components/capturer/RTCCameraVideoCapturer.m", ] - if (target_environment != "xrdevice" && target_environment != "xrsimulator") { - sources += [ - "objc/components/capturer/RTCFileVideoCapturer.h", - "objc/components/capturer/RTCFileVideoCapturer.m", - ] - } + sources += [ + "objc/components/capturer/RTCFileVideoCapturer.h", + "objc/components/capturer/RTCFileVideoCapturer.m", + ] frameworks = [ "AVFoundation.framework", "CoreVideo.framework", @@ -1335,11 +1329,9 @@ if (is_ios || is_mac) { "../test:wait_until", "//third_party/libyuv", ] - if (target_environment != "xrdevice" && target_environment != "xrsimulator") { - deps += [ - ":metal_objc", - ] - } + deps += [ + ":metal_objc", + ] if (rtc_ios_use_opengl_rendering) { deps += [ ":opengl_objc" ] } @@ -1516,12 +1508,10 @@ if (is_ios || is_mac) { "objc/components/audio/RTCAudioProcessingConfig.h", ] - if (target_environment != "xrdevice" && target_environment != "xrsimulator") { - common_objc_headers += [ - "objc/helpers/RTCCameraPreviewView.h", - "objc/components/renderer/metal/RTCMTLVideoView.h", - ] - } + common_objc_headers += [ + "objc/helpers/RTCCameraPreviewView.h", + "objc/components/renderer/metal/RTCMTLVideoView.h", + ] if (!build_with_chromium) { common_objc_headers += [ @@ -1551,9 +1541,7 @@ if (is_ios || is_mac) { ":videotoolbox_objc", ":darwin_privacy_info", ] - if (target_environment != "xrdevice" && target_environment != "xrsimulator") { - deps += [ ":metal_objc" ] - } + deps += [ ":metal_objc" ] if (!build_with_chromium) { deps += [ ":callback_logger_objc", @@ -1711,9 +1699,7 @@ if (is_ios || is_mac) { ":videotoolbox_objc", ":darwin_privacy_info", ] - if (target_environment != "xrdevice" && target_environment != "xrsimulator") { - deps += [ ":metal_objc" ] - } + deps += [ ":metal_objc" ] if (!build_with_chromium) { deps += [ ":callback_logger_objc", diff --git a/webrtc.gni b/webrtc.gni index 2a407632d6..d3c3d58b8f 100644 --- a/webrtc.gni +++ b/webrtc.gni @@ -1203,7 +1203,7 @@ if (is_mac || is_ios) { copy("copy_privacy_manifest_$target_name") { sources = [ privacy_manifest_path ] outputs = - [ "$root_out_dir/$output_name.framework/$privacy_manifest_out_path" ] + [ "$root_out_dir/$privacy_manifest_out_path" ] deps = [ ":create_privacy_manifest_$target_name" ] } From 054ae1cf2bdad3f40a4bded4d647b7c4b67810b4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?B=C5=82az=CC=87ej=20Pankowski?= <86720177+pblazej@users.noreply.github.com> Date: Fri, 13 Jun 2025 14:32:33 +0200 Subject: [PATCH 33/49] iOS: fix audio init flags --- api/crypto/frame_crypto_transformer.h | 2 +- .../components/capturer/RTCCameraVideoCapturer.m | 2 +- sdk/objc/components/network/RTCNetworkMonitor.mm | 2 +- sdk/objc/native/src/audio/audio_device_ios.mm | 12 ++++++------ 4 files changed, 9 insertions(+), 9 deletions(-) diff --git a/api/crypto/frame_crypto_transformer.h b/api/crypto/frame_crypto_transformer.h index 91eee11919..af530e9c5c 100644 --- a/api/crypto/frame_crypto_transformer.h +++ b/api/crypto/frame_crypto_transformer.h @@ -367,7 +367,7 @@ enum FrameCryptionState { kInternalError, }; -class FrameCryptorTransformerObserver : public rtc::RefCountInterface { +class FrameCryptorTransformerObserver : public webrtc::RefCountInterface { public: virtual void OnFrameCryptionStateChanged(const std::string participant_id, FrameCryptionState error) = 0; diff --git a/sdk/objc/components/capturer/RTCCameraVideoCapturer.m b/sdk/objc/components/capturer/RTCCameraVideoCapturer.m index b5941d441a..b6d71e75e0 100644 --- a/sdk/objc/components/capturer/RTCCameraVideoCapturer.m +++ b/sdk/objc/components/capturer/RTCCameraVideoCapturer.m @@ -305,7 +305,7 @@ - (void)stopCaptureWithCompletionHandler: #if TARGET_WATCH_DEVICE_ROTATION - (void)deviceOrientationDidChange:(NSNotification *)notification { [RTC_OBJC_TYPE(RTCDispatcher) - dispatchAsyncOnType:RTCDispatcherTypeCaptureSession + dispatchAsyncOnType:RTC_OBJC_TYPE(RTCDispatcherTypeCaptureSession) block:^{ [self updateOrientation]; }]; diff --git a/sdk/objc/components/network/RTCNetworkMonitor.mm b/sdk/objc/components/network/RTCNetworkMonitor.mm index 46a9cb4ab2..8dbc53551e 100644 --- a/sdk/objc/components/network/RTCNetworkMonitor.mm +++ b/sdk/objc/components/network/RTCNetworkMonitor.mm @@ -67,7 +67,7 @@ - (instancetype)initWithObserver:(webrtc::NetworkMonitorObserver *)observer { RTCLog(@"NW path monitor created."); __weak RTC_OBJC_TYPE(RTCNetworkMonitor) *weakSelf = self; nw_path_monitor_set_update_handler(_pathMonitor, ^(nw_path_t path) { - RTCNetworkMonitor *strongSelf = weakSelf; + RTC_OBJC_TYPE(RTCNetworkMonitor) *strongSelf = weakSelf; if (strongSelf == nil) { return; } diff --git a/sdk/objc/native/src/audio/audio_device_ios.mm b/sdk/objc/native/src/audio/audio_device_ios.mm index 73a8033841..079645c595 100644 --- a/sdk/objc/native/src/audio/audio_device_ios.mm +++ b/sdk/objc/native/src/audio/audio_device_ios.mm @@ -250,7 +250,7 @@ static void LogDeviceInfo() { RTC_DCHECK(playout_is_initialized_); RTC_DCHECK(!playing_.load()); RTC_DCHECK(audio_unit_); - if (!audio_is_initialized_) { + if (!playout_is_initialized_) { return -1; } if (fine_audio_buffer_) { @@ -318,7 +318,7 @@ static void LogDeviceInfo() { RTC_DCHECK(recording_is_initialized_); RTC_DCHECK(!recording_.load()); RTC_DCHECK(audio_unit_); - if (!audio_is_initialized_) { + if (!recording_is_initialized_) { return -1; } if (fine_audio_buffer_) { @@ -875,8 +875,8 @@ static void LogDeviceInfo() { bool AudioDeviceIOS::CreateAudioUnit() { RTC_DCHECK(!audio_unit_); - RTC_DCHECK(!audio_is_initialized_); - if (audio_unit_ || audio_is_initialized_) { + RTC_DCHECK(!playout_is_initialized_ && !recording_is_initialized_); + if (audio_unit_ || playout_is_initialized_ || recording_is_initialized_) { return false; } BOOL detect_mute_speech_ = (muted_speech_event_handler_ != 0); @@ -1079,7 +1079,6 @@ static void LogDeviceInfo() { // Release the lock. [session unlockForConfiguration]; - audio_is_initialized_ = true; return true; } @@ -1106,7 +1105,8 @@ static void LogDeviceInfo() { // session, hence we deactivate as last action. UnconfigureAudioSession(); - audio_is_initialized_ = false; + playout_is_initialized_ = false; + recording_is_initialized_ = false; } void AudioDeviceIOS::PrepareForNewStart() { From ff6612458e1f60b8dc58669b648d96863dad6da6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?B=C5=82az=CC=87ej=20Pankowski?= <86720177+pblazej@users.noreply.github.com> Date: Mon, 16 Jun 2025 11:16:51 +0200 Subject: [PATCH 34/49] tvOS, xrOS: update build repo --- DEPS | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/DEPS b/DEPS index 61027df618..11faaee948 100644 --- a/DEPS +++ b/DEPS @@ -75,7 +75,7 @@ deps = { 'src/base': 'https://chromium.googlesource.com/chromium/src/base@86c814633cf284bc8057a539bc722e2a672afe2f', 'src/build': - 'https://chromium.googlesource.com/chromium/src/build@88030b320338e0706b6b93336c4b35e6bbaf467e', + 'https://github.com/webrtc-sdk/build@3d944ec41a599db36084fda275465d24091dec95', 'src/buildtools': 'https://chromium.googlesource.com/chromium/src/buildtools@0f32cb9025766951122d4ed19aba87a94ded3f43', # Gradle 6.6.1. Used for testing Android Studio project generation for WebRTC. From f571d7ef3e6544b6864e6e6c8297bbfb42a6af4e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?B=C5=82az=CC=87ej=20Pankowski?= <86720177+pblazej@users.noreply.github.com> Date: Mon, 16 Jun 2025 11:17:24 +0200 Subject: [PATCH 35/49] Apple: build script --- build_xcframework_dynamic_livekit.sh | 334 +++++++++++++++++++++++++++ 1 file changed, 334 insertions(+) create mode 100755 build_xcframework_dynamic_livekit.sh diff --git a/build_xcframework_dynamic_livekit.sh b/build_xcframework_dynamic_livekit.sh new file mode 100755 index 0000000000..9dfb37262c --- /dev/null +++ b/build_xcframework_dynamic_livekit.sh @@ -0,0 +1,334 @@ +#!/bin/sh +if [ ! -n "$1" ]; then + echo "Usage: $0 'debug' | 'release'" + exit 0 +fi + +MODE=$1 +OUT_DIR=./out-$MODE +DEBUG="false" +if [ "$MODE" == "debug" ]; then + DEBUG="true" +fi + +echo "build_xcframework_dynamic_livekit.sh: MODE=$MODE, DEBUG=$DEBUG" + +# gn gen $OUT_DIR/tvOS-arm64-device --args=" +# rtc_objc_prefix = \"LK\" +# treat_warnings_as_errors = false +# target_os = \"ios\" +# ios_enable_code_signing = false +# is_component_build = false +# target_environment = \"appletv\" +# target_cpu = \"arm64\" +# ios_deployment_target = \"17.0\" +# use_goma = false +# rtc_enable_symbol_export = true +# rtc_libvpx_build_vp9 = true +# rtc_include_tests = false +# rtc_build_examples = false +# rtc_use_h264 = false +# rtc_enable_protobuf = false +# enable_libaom = true +# rtc_include_dav1d_in_internal_decoder_factory = true +# use_rtti = true +# is_debug = $DEBUG +# enable_dsyms = $DEBUG +# enable_stripping = true" --ide=xcode + +# ninja -C $OUT_DIR/tvOS-arm64-device ios_framework_bundle -j 10 + +# gn gen $OUT_DIR/tvOS-arm64-simulator --args=" +# rtc_objc_prefix = \"LK\" +# treat_warnings_as_errors = false +# target_os = \"ios\" +# ios_enable_code_signing = false +# is_component_build = false +# target_environment = \"appletvsimulator\" +# target_cpu = \"arm64\" +# ios_deployment_target = \"17.0\" +# use_goma = false +# rtc_enable_symbol_export = true +# rtc_libvpx_build_vp9 = true +# rtc_include_tests = false +# rtc_build_examples = false +# rtc_use_h264 = false +# rtc_enable_protobuf = false +# enable_libaom = true +# rtc_include_dav1d_in_internal_decoder_factory = true +# use_rtti = true +# is_debug = $DEBUG +# enable_dsyms = $DEBUG +# enable_stripping = true" --ide=xcode + +# ninja -C $OUT_DIR/tvOS-arm64-simulator ios_framework_bundle -j 10 + +# gn gen $OUT_DIR/xrOS-arm64-device --args=" +# rtc_objc_prefix = \"LK\" +# treat_warnings_as_errors = false +# target_os = \"ios\" +# ios_enable_code_signing = false +# is_component_build = false +# target_environment = \"xrdevice\" +# target_cpu = \"arm64\" +# ios_deployment_target = \"1.1.0\" +# use_goma = false +# rtc_enable_symbol_export = true +# rtc_libvpx_build_vp9 = true +# rtc_include_tests = false +# rtc_build_examples = false +# rtc_use_h264 = false +# rtc_enable_protobuf = false +# enable_libaom = true +# rtc_include_dav1d_in_internal_decoder_factory = true +# use_rtti = true +# is_debug = $DEBUG +# enable_dsyms = $DEBUG +# enable_stripping = true" --ide=xcode + +# ninja -C $OUT_DIR/xrOS-arm64-device ios_framework_bundle -j 10 + +# gn gen $OUT_DIR/xrOS-arm64-simulator --args=" +# rtc_objc_prefix = \"LK\" +# treat_warnings_as_errors = false +# target_os = \"ios\" +# ios_enable_code_signing = false +# is_component_build = false +# target_environment = \"xrsimulator\" +# target_cpu = \"arm64\" +# ios_deployment_target = \"1.1.0\" +# use_goma = false +# rtc_enable_symbol_export = true +# rtc_libvpx_build_vp9 = true +# rtc_include_tests = false +# rtc_build_examples = false +# rtc_use_h264 = false +# rtc_enable_protobuf = false +# enable_libaom = true +# rtc_include_dav1d_in_internal_decoder_factory = true +# use_rtti = true +# is_debug = $DEBUG +# enable_dsyms = $DEBUG +# enable_stripping = true" --ide=xcode + +# ninja -C $OUT_DIR/xrOS-arm64-simulator ios_framework_bundle -j 10 + +# gn gen $OUT_DIR/catalyst-arm64 --args=" +# rtc_objc_prefix = \"LK\" +# treat_warnings_as_errors = false +# target_os = \"ios\" +# ios_enable_code_signing = false +# is_component_build = false +# target_environment = \"catalyst\" +# target_cpu = \"arm64\" +# ios_deployment_target = \"14.0\" +# use_goma = false +# rtc_enable_symbol_export = true +# rtc_libvpx_build_vp9 = true +# rtc_include_tests = false +# rtc_build_examples = false +# rtc_use_h264 = false +# rtc_enable_protobuf = false +# enable_libaom = true +# rtc_include_dav1d_in_internal_decoder_factory = true +# use_rtti = true +# is_debug = $DEBUG +# enable_dsyms = $DEBUG +# enable_stripping = true" --ide=xcode + +# ninja -C $OUT_DIR/catalyst-arm64 ios_framework_bundle -j 10 + +# gn gen $OUT_DIR/catalyst-x64 --args=" +# rtc_objc_prefix = \"LK\" +# treat_warnings_as_errors = false +# target_os = \"ios\" +# ios_enable_code_signing = false +# is_component_build = false +# target_environment = \"catalyst\" +# target_cpu = \"x64\" +# ios_deployment_target = \"14.0\" +# use_goma = false +# rtc_enable_symbol_export = true +# rtc_libvpx_build_vp9 = true +# rtc_include_tests = false +# rtc_build_examples = false +# rtc_use_h264 = false +# rtc_enable_protobuf = false +# enable_libaom = true +# rtc_include_dav1d_in_internal_decoder_factory = true +# use_rtti = true +# is_debug = $DEBUG +# enable_dsyms = $DEBUG +# enable_stripping = true" --ide=xcode + +# ninja -C $OUT_DIR/catalyst-x64 ios_framework_bundle -j 10 + +# gn gen $OUT_DIR/iOS-arm64-device --args=" +# rtc_objc_prefix = \"LK\" +# treat_warnings_as_errors = false +# target_os = \"ios\" +# ios_enable_code_signing = false +# is_component_build = false +# target_environment = \"device\" +# target_cpu = \"arm64\" +# ios_deployment_target = \"13.0\" +# use_goma = false +# rtc_enable_symbol_export = true +# rtc_libvpx_build_vp9 = true +# rtc_include_tests = false +# rtc_build_examples = false +# rtc_use_h264 = false +# rtc_enable_protobuf = false +# enable_libaom = true +# rtc_include_dav1d_in_internal_decoder_factory = true +# use_rtti = true +# is_debug = $DEBUG +# enable_dsyms = $DEBUG +# enable_stripping = true" --ide=xcode + +# ninja -C $OUT_DIR/iOS-arm64-device ios_framework_bundle -j 10 + +# gn gen $OUT_DIR/iOS-x64-simulator --args=" +# rtc_objc_prefix = \"LK\" +# treat_warnings_as_errors = false +# target_os = \"ios\" +# ios_enable_code_signing = false +# is_component_build = false +# target_environment = \"simulator\" +# target_cpu = \"x64\" +# ios_deployment_target = \"13.0\" +# rtc_libvpx_build_vp9 = true +# use_goma = false +# rtc_enable_symbol_export = true +# rtc_include_tests = false +# rtc_build_examples = false +# rtc_use_h264 = false +# rtc_enable_protobuf = false +# enable_libaom = true +# rtc_include_dav1d_in_internal_decoder_factory = true +# use_rtti = true +# is_debug = $DEBUG +# enable_dsyms = $DEBUG +# enable_stripping = true" --ide=xcode + +# ninja -C $OUT_DIR/iOS-x64-simulator ios_framework_bundle -j 10 + +# gn gen $OUT_DIR/iOS-arm64-simulator --args=" +# rtc_objc_prefix = \"LK\" +# treat_warnings_as_errors = false +# target_os = \"ios\" +# ios_enable_code_signing = false +# is_component_build = false +# target_environment = \"simulator\" +# target_cpu = \"arm64\" +# ios_deployment_target = \"13.0\" +# use_goma = false +# rtc_enable_symbol_export = true +# rtc_libvpx_build_vp9 = true +# rtc_include_tests = false +# rtc_build_examples = false +# rtc_use_h264 = false +# rtc_enable_protobuf = false +# enable_libaom = true +# rtc_include_dav1d_in_internal_decoder_factory = true +# use_rtti = true +# is_debug = $DEBUG +# enable_dsyms = $DEBUG +# enable_stripping = true" --ide=xcode + +# ninja -C $OUT_DIR/iOS-arm64-simulator ios_framework_bundle -j 10 + +# gn gen $OUT_DIR/macOS-x64 --args=" +# rtc_objc_prefix = \"LK\" +# treat_warnings_as_errors = false +# target_os=\"mac\" +# target_cpu=\"x64\" +# mac_deployment_target=\"10.15\" +# is_component_build = false +# target_cpu = \"x64\" +# use_goma = false +# rtc_enable_symbol_export = true +# rtc_libvpx_build_vp9 = true +# rtc_include_tests = false +# rtc_build_examples = false +# rtc_use_h264 = false +# rtc_enable_protobuf = false +# enable_libaom = true +# rtc_include_dav1d_in_internal_decoder_factory = true +# use_rtti = true +# is_debug = $DEBUG +# enable_dsyms = $DEBUG" --ide=xcode + +# ninja -C $OUT_DIR/macOS-x64 mac_framework_bundle -j 10 + +gn gen $OUT_DIR/macOS-arm64 --args=" + use_custom_libcxx = false + use_explicit_libcxx_modules = false + rtc_objc_prefix = \"LK\" + treat_warnings_as_errors = false + target_os=\"mac\" + target_cpu=\"arm64\" + mac_deployment_target=\"10.15\" + is_component_build = false + use_goma = false + rtc_enable_symbol_export = true + rtc_libvpx_build_vp9 = true + rtc_include_tests = false + rtc_build_examples = false + rtc_use_h264 = false + rtc_enable_protobuf = false + enable_libaom = true + enable_freetype = false + enable_rust = false + rtc_include_dav1d_in_internal_decoder_factory = true + use_rtti = true + is_debug = $DEBUG + enable_dsyms = $DEBUG" --ide=xcode + +ninja -C $OUT_DIR/macOS-arm64 mac_framework_bundle -j 10 + +# rm -rf $OUT_DIR/*-lib $OUT_DIR/LiveKitWebRTC.* + +# mkdir -p $OUT_DIR/macOS-lib +# cp -R $OUT_DIR/macOS-x64/LiveKitWebRTC.framework $OUT_DIR/macOS-lib/LiveKitWebRTC.framework +# lipo -create -output $OUT_DIR/macOS-lib/LiveKitWebRTC.framework/LiveKitWebRTC $OUT_DIR/macOS-arm64/LiveKitWebRTC.framework/LiveKitWebRTC $OUT_DIR/macOS-x64/LiveKitWebRTC.framework/LiveKitWebRTC + +# mkdir -p $OUT_DIR/catalyst-lib +# cp -R $OUT_DIR/catalyst-arm64/LiveKitWebRTC.framework $OUT_DIR/catalyst-lib/LiveKitWebRTC.framework +# lipo -create -output $OUT_DIR/catalyst-lib/LiveKitWebRTC.framework/LiveKitWebRTC $OUT_DIR/catalyst-arm64/LiveKitWebRTC.framework/LiveKitWebRTC $OUT_DIR/catalyst-x64/LiveKitWebRTC.framework/LiveKitWebRTC + +# mkdir -p $OUT_DIR/iOS-device-lib +# cp -R $OUT_DIR/iOS-arm64-device/LiveKitWebRTC.framework $OUT_DIR/iOS-device-lib/LiveKitWebRTC.framework +# lipo -create -output $OUT_DIR/iOS-device-lib/LiveKitWebRTC.framework/LiveKitWebRTC $OUT_DIR/iOS-arm64-device/LiveKitWebRTC.framework/LiveKitWebRTC + +# mkdir -p $OUT_DIR/iOS-simulator-lib +# cp -R $OUT_DIR/iOS-arm64-simulator/LiveKitWebRTC.framework $OUT_DIR/iOS-simulator-lib/LiveKitWebRTC.framework +# lipo -create -output $OUT_DIR/iOS-simulator-lib/LiveKitWebRTC.framework/LiveKitWebRTC $OUT_DIR/iOS-arm64-simulator/LiveKitWebRTC.framework/LiveKitWebRTC $OUT_DIR/iOS-x64-simulator/LiveKitWebRTC.framework/LiveKitWebRTC + +# xcodebuild -create-xcframework \ +# -framework $OUT_DIR/iOS-device-lib/LiveKitWebRTC.framework \ +# -framework $OUT_DIR/iOS-simulator-lib/LiveKitWebRTC.framework \ +# -framework $OUT_DIR/xrOS-arm64-device/LiveKitWebRTC.framework \ +# -framework $OUT_DIR/xrOS-arm64-simulator/LiveKitWebRTC.framework \ +# -framework $OUT_DIR/tvOS-arm64-device/LiveKitWebRTC.framework \ +# -framework $OUT_DIR/tvOS-arm64-simulator/LiveKitWebRTC.framework \ +# -framework $OUT_DIR/catalyst-lib/LiveKitWebRTC.framework \ +# -framework $OUT_DIR/macOS-lib/LiveKitWebRTC.framework \ +# -output $OUT_DIR/LiveKitWebRTC.xcframework + +# cp ./src/LICENSE $OUT_DIR/LiveKitWebRTC.xcframework/ + +# cd $OUT_DIR/LiveKitWebRTC.xcframework/macos-arm64_x86_64/LiveKitWebRTC.framework/ +# mv LiveKitWebRTC Versions/A/LiveKitWebRTC +# ln -s Versions/Current/LiveKitWebRTC LiveKitWebRTC +# cd ../../../../ + +# cd $OUT_DIR/LiveKitWebRTC.xcframework/ios-arm64_x86_64-maccatalyst/LiveKitWebRTC.framework/ +# mv LiveKitWebRTC Versions/A/LiveKitWebRTC +# ln -s Versions/Current/LiveKitWebRTC LiveKitWebRTC +# cd ../../../ +# zip --symlinks -9 -r LiveKitWebRTC.xcframework.zip LiveKitWebRTC.xcframework + +# # hash +# shasum -a 256 LiveKitWebRTC.xcframework.zip >LiveKitWebRTC.xcframework.zip.shasum +# cat LiveKitWebRTC.xcframework.zip.shasum From c73d33f486ae6df7b2b78d9e609eb1da841f516a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?B=C5=82az=CC=87ej=20Pankowski?= <86720177+pblazej@users.noreply.github.com> Date: Mon, 16 Jun 2025 14:20:34 +0200 Subject: [PATCH 36/49] Apple: update build repo --- DEPS | 2 +- build_xcframework_dynamic_livekit.sh | 442 +++++++++++++-------------- 2 files changed, 220 insertions(+), 224 deletions(-) diff --git a/DEPS b/DEPS index 11faaee948..e79b1c27aa 100644 --- a/DEPS +++ b/DEPS @@ -75,7 +75,7 @@ deps = { 'src/base': 'https://chromium.googlesource.com/chromium/src/base@86c814633cf284bc8057a539bc722e2a672afe2f', 'src/build': - 'https://github.com/webrtc-sdk/build@3d944ec41a599db36084fda275465d24091dec95', + 'https://github.com/webrtc-sdk/build@9af2ddd8e5ad6278165cadfa554bea6f25081dd2', 'src/buildtools': 'https://chromium.googlesource.com/chromium/src/buildtools@0f32cb9025766951122d4ed19aba87a94ded3f43', # Gradle 6.6.1. Used for testing Android Studio project generation for WebRTC. diff --git a/build_xcframework_dynamic_livekit.sh b/build_xcframework_dynamic_livekit.sh index 9dfb37262c..a0ae3166af 100755 --- a/build_xcframework_dynamic_livekit.sh +++ b/build_xcframework_dynamic_livekit.sh @@ -13,257 +13,255 @@ fi echo "build_xcframework_dynamic_livekit.sh: MODE=$MODE, DEBUG=$DEBUG" -# gn gen $OUT_DIR/tvOS-arm64-device --args=" -# rtc_objc_prefix = \"LK\" -# treat_warnings_as_errors = false -# target_os = \"ios\" -# ios_enable_code_signing = false -# is_component_build = false -# target_environment = \"appletv\" -# target_cpu = \"arm64\" -# ios_deployment_target = \"17.0\" -# use_goma = false -# rtc_enable_symbol_export = true -# rtc_libvpx_build_vp9 = true -# rtc_include_tests = false -# rtc_build_examples = false -# rtc_use_h264 = false -# rtc_enable_protobuf = false -# enable_libaom = true -# rtc_include_dav1d_in_internal_decoder_factory = true -# use_rtti = true -# is_debug = $DEBUG -# enable_dsyms = $DEBUG -# enable_stripping = true" --ide=xcode +gn gen $OUT_DIR/tvOS-arm64-device --args=" + rtc_objc_prefix = \"LK\" + treat_warnings_as_errors = false + target_os = \"ios\" + ios_enable_code_signing = false + is_component_build = false + target_environment = \"appletv\" + target_cpu = \"arm64\" + ios_deployment_target = \"17.0\" + use_goma = false + rtc_enable_symbol_export = true + rtc_libvpx_build_vp9 = true + rtc_include_tests = false + rtc_build_examples = false + rtc_use_h264 = false + rtc_enable_protobuf = false + enable_libaom = true + rtc_include_dav1d_in_internal_decoder_factory = true + use_rtti = true + is_debug = $DEBUG + enable_dsyms = $DEBUG + enable_stripping = true" --ide=xcode # ninja -C $OUT_DIR/tvOS-arm64-device ios_framework_bundle -j 10 -# gn gen $OUT_DIR/tvOS-arm64-simulator --args=" -# rtc_objc_prefix = \"LK\" -# treat_warnings_as_errors = false -# target_os = \"ios\" -# ios_enable_code_signing = false -# is_component_build = false -# target_environment = \"appletvsimulator\" -# target_cpu = \"arm64\" -# ios_deployment_target = \"17.0\" -# use_goma = false -# rtc_enable_symbol_export = true -# rtc_libvpx_build_vp9 = true -# rtc_include_tests = false -# rtc_build_examples = false -# rtc_use_h264 = false -# rtc_enable_protobuf = false -# enable_libaom = true -# rtc_include_dav1d_in_internal_decoder_factory = true -# use_rtti = true -# is_debug = $DEBUG -# enable_dsyms = $DEBUG -# enable_stripping = true" --ide=xcode +gn gen $OUT_DIR/tvOS-arm64-simulator --args=" + rtc_objc_prefix = \"LK\" + treat_warnings_as_errors = false + target_os = \"ios\" + ios_enable_code_signing = false + is_component_build = false + target_environment = \"appletvsimulator\" + target_cpu = \"arm64\" + ios_deployment_target = \"17.0\" + use_goma = false + rtc_enable_symbol_export = true + rtc_libvpx_build_vp9 = true + rtc_include_tests = false + rtc_build_examples = false + rtc_use_h264 = false + rtc_enable_protobuf = false + enable_libaom = true + rtc_include_dav1d_in_internal_decoder_factory = true + use_rtti = true + is_debug = $DEBUG + enable_dsyms = $DEBUG + enable_stripping = true" --ide=xcode # ninja -C $OUT_DIR/tvOS-arm64-simulator ios_framework_bundle -j 10 -# gn gen $OUT_DIR/xrOS-arm64-device --args=" -# rtc_objc_prefix = \"LK\" -# treat_warnings_as_errors = false -# target_os = \"ios\" -# ios_enable_code_signing = false -# is_component_build = false -# target_environment = \"xrdevice\" -# target_cpu = \"arm64\" -# ios_deployment_target = \"1.1.0\" -# use_goma = false -# rtc_enable_symbol_export = true -# rtc_libvpx_build_vp9 = true -# rtc_include_tests = false -# rtc_build_examples = false -# rtc_use_h264 = false -# rtc_enable_protobuf = false -# enable_libaom = true -# rtc_include_dav1d_in_internal_decoder_factory = true -# use_rtti = true -# is_debug = $DEBUG -# enable_dsyms = $DEBUG -# enable_stripping = true" --ide=xcode +gn gen $OUT_DIR/xrOS-arm64-device --args=" + rtc_objc_prefix = \"LK\" + treat_warnings_as_errors = false + target_os = \"ios\" + ios_enable_code_signing = false + is_component_build = false + target_environment = \"xrdevice\" + target_cpu = \"arm64\" + ios_deployment_target = \"1.1.0\" + use_goma = false + rtc_enable_symbol_export = true + rtc_libvpx_build_vp9 = true + rtc_include_tests = false + rtc_build_examples = false + rtc_use_h264 = false + rtc_enable_protobuf = false + enable_libaom = true + rtc_include_dav1d_in_internal_decoder_factory = true + use_rtti = true + is_debug = $DEBUG + enable_dsyms = $DEBUG + enable_stripping = true" --ide=xcode # ninja -C $OUT_DIR/xrOS-arm64-device ios_framework_bundle -j 10 -# gn gen $OUT_DIR/xrOS-arm64-simulator --args=" -# rtc_objc_prefix = \"LK\" -# treat_warnings_as_errors = false -# target_os = \"ios\" -# ios_enable_code_signing = false -# is_component_build = false -# target_environment = \"xrsimulator\" -# target_cpu = \"arm64\" -# ios_deployment_target = \"1.1.0\" -# use_goma = false -# rtc_enable_symbol_export = true -# rtc_libvpx_build_vp9 = true -# rtc_include_tests = false -# rtc_build_examples = false -# rtc_use_h264 = false -# rtc_enable_protobuf = false -# enable_libaom = true -# rtc_include_dav1d_in_internal_decoder_factory = true -# use_rtti = true -# is_debug = $DEBUG -# enable_dsyms = $DEBUG -# enable_stripping = true" --ide=xcode +gn gen $OUT_DIR/xrOS-arm64-simulator --args=" + rtc_objc_prefix = \"LK\" + treat_warnings_as_errors = false + target_os = \"ios\" + ios_enable_code_signing = false + is_component_build = false + target_environment = \"xrsimulator\" + target_cpu = \"arm64\" + ios_deployment_target = \"1.1.0\" + use_goma = false + rtc_enable_symbol_export = true + rtc_libvpx_build_vp9 = true + rtc_include_tests = false + rtc_build_examples = false + rtc_use_h264 = false + rtc_enable_protobuf = false + enable_libaom = true + rtc_include_dav1d_in_internal_decoder_factory = true + use_rtti = true + is_debug = $DEBUG + enable_dsyms = $DEBUG + enable_stripping = true" --ide=xcode # ninja -C $OUT_DIR/xrOS-arm64-simulator ios_framework_bundle -j 10 -# gn gen $OUT_DIR/catalyst-arm64 --args=" -# rtc_objc_prefix = \"LK\" -# treat_warnings_as_errors = false -# target_os = \"ios\" -# ios_enable_code_signing = false -# is_component_build = false -# target_environment = \"catalyst\" -# target_cpu = \"arm64\" -# ios_deployment_target = \"14.0\" -# use_goma = false -# rtc_enable_symbol_export = true -# rtc_libvpx_build_vp9 = true -# rtc_include_tests = false -# rtc_build_examples = false -# rtc_use_h264 = false -# rtc_enable_protobuf = false -# enable_libaom = true -# rtc_include_dav1d_in_internal_decoder_factory = true -# use_rtti = true -# is_debug = $DEBUG -# enable_dsyms = $DEBUG -# enable_stripping = true" --ide=xcode +gn gen $OUT_DIR/catalyst-arm64 --args=" + rtc_objc_prefix = \"LK\" + treat_warnings_as_errors = false + target_os = \"ios\" + ios_enable_code_signing = false + is_component_build = false + target_environment = \"catalyst\" + target_cpu = \"arm64\" + ios_deployment_target = \"14.0\" + use_goma = false + rtc_enable_symbol_export = true + rtc_libvpx_build_vp9 = true + rtc_include_tests = false + rtc_build_examples = false + rtc_use_h264 = false + rtc_enable_protobuf = false + enable_libaom = true + rtc_include_dav1d_in_internal_decoder_factory = true + use_rtti = true + is_debug = $DEBUG + enable_dsyms = $DEBUG + enable_stripping = true" --ide=xcode # ninja -C $OUT_DIR/catalyst-arm64 ios_framework_bundle -j 10 -# gn gen $OUT_DIR/catalyst-x64 --args=" -# rtc_objc_prefix = \"LK\" -# treat_warnings_as_errors = false -# target_os = \"ios\" -# ios_enable_code_signing = false -# is_component_build = false -# target_environment = \"catalyst\" -# target_cpu = \"x64\" -# ios_deployment_target = \"14.0\" -# use_goma = false -# rtc_enable_symbol_export = true -# rtc_libvpx_build_vp9 = true -# rtc_include_tests = false -# rtc_build_examples = false -# rtc_use_h264 = false -# rtc_enable_protobuf = false -# enable_libaom = true -# rtc_include_dav1d_in_internal_decoder_factory = true -# use_rtti = true -# is_debug = $DEBUG -# enable_dsyms = $DEBUG -# enable_stripping = true" --ide=xcode +gn gen $OUT_DIR/catalyst-x64 --args=" + rtc_objc_prefix = \"LK\" + treat_warnings_as_errors = false + target_os = \"ios\" + ios_enable_code_signing = false + is_component_build = false + target_environment = \"catalyst\" + target_cpu = \"x64\" + ios_deployment_target = \"14.0\" + use_goma = false + rtc_enable_symbol_export = true + rtc_libvpx_build_vp9 = true + rtc_include_tests = false + rtc_build_examples = false + rtc_use_h264 = false + rtc_enable_protobuf = false + enable_libaom = true + rtc_include_dav1d_in_internal_decoder_factory = true + use_rtti = true + is_debug = $DEBUG + enable_dsyms = $DEBUG + enable_stripping = true" --ide=xcode # ninja -C $OUT_DIR/catalyst-x64 ios_framework_bundle -j 10 -# gn gen $OUT_DIR/iOS-arm64-device --args=" -# rtc_objc_prefix = \"LK\" -# treat_warnings_as_errors = false -# target_os = \"ios\" -# ios_enable_code_signing = false -# is_component_build = false -# target_environment = \"device\" -# target_cpu = \"arm64\" -# ios_deployment_target = \"13.0\" -# use_goma = false -# rtc_enable_symbol_export = true -# rtc_libvpx_build_vp9 = true -# rtc_include_tests = false -# rtc_build_examples = false -# rtc_use_h264 = false -# rtc_enable_protobuf = false -# enable_libaom = true -# rtc_include_dav1d_in_internal_decoder_factory = true -# use_rtti = true -# is_debug = $DEBUG -# enable_dsyms = $DEBUG -# enable_stripping = true" --ide=xcode +gn gen $OUT_DIR/iOS-arm64-device --args=" + rtc_objc_prefix = \"LK\" + treat_warnings_as_errors = false + target_os = \"ios\" + ios_enable_code_signing = false + is_component_build = false + target_environment = \"device\" + target_cpu = \"arm64\" + ios_deployment_target = \"13.0\" + use_goma = false + rtc_enable_symbol_export = true + rtc_libvpx_build_vp9 = true + rtc_include_tests = false + rtc_build_examples = false + rtc_use_h264 = false + rtc_enable_protobuf = false + enable_libaom = true + rtc_include_dav1d_in_internal_decoder_factory = true + use_rtti = true + is_debug = $DEBUG + enable_dsyms = $DEBUG + enable_stripping = true" --ide=xcode # ninja -C $OUT_DIR/iOS-arm64-device ios_framework_bundle -j 10 -# gn gen $OUT_DIR/iOS-x64-simulator --args=" -# rtc_objc_prefix = \"LK\" -# treat_warnings_as_errors = false -# target_os = \"ios\" -# ios_enable_code_signing = false -# is_component_build = false -# target_environment = \"simulator\" -# target_cpu = \"x64\" -# ios_deployment_target = \"13.0\" -# rtc_libvpx_build_vp9 = true -# use_goma = false -# rtc_enable_symbol_export = true -# rtc_include_tests = false -# rtc_build_examples = false -# rtc_use_h264 = false -# rtc_enable_protobuf = false -# enable_libaom = true -# rtc_include_dav1d_in_internal_decoder_factory = true -# use_rtti = true -# is_debug = $DEBUG -# enable_dsyms = $DEBUG -# enable_stripping = true" --ide=xcode +gn gen $OUT_DIR/iOS-x64-simulator --args=" + rtc_objc_prefix = \"LK\" + treat_warnings_as_errors = false + target_os = \"ios\" + ios_enable_code_signing = false + is_component_build = false + target_environment = \"simulator\" + target_cpu = \"x64\" + ios_deployment_target = \"13.0\" + rtc_libvpx_build_vp9 = true + use_goma = false + rtc_enable_symbol_export = true + rtc_include_tests = false + rtc_build_examples = false + rtc_use_h264 = false + rtc_enable_protobuf = false + enable_libaom = true + rtc_include_dav1d_in_internal_decoder_factory = true + use_rtti = true + is_debug = $DEBUG + enable_dsyms = $DEBUG + enable_stripping = true" --ide=xcode # ninja -C $OUT_DIR/iOS-x64-simulator ios_framework_bundle -j 10 -# gn gen $OUT_DIR/iOS-arm64-simulator --args=" -# rtc_objc_prefix = \"LK\" -# treat_warnings_as_errors = false -# target_os = \"ios\" -# ios_enable_code_signing = false -# is_component_build = false -# target_environment = \"simulator\" -# target_cpu = \"arm64\" -# ios_deployment_target = \"13.0\" -# use_goma = false -# rtc_enable_symbol_export = true -# rtc_libvpx_build_vp9 = true -# rtc_include_tests = false -# rtc_build_examples = false -# rtc_use_h264 = false -# rtc_enable_protobuf = false -# enable_libaom = true -# rtc_include_dav1d_in_internal_decoder_factory = true -# use_rtti = true -# is_debug = $DEBUG -# enable_dsyms = $DEBUG -# enable_stripping = true" --ide=xcode +gn gen $OUT_DIR/iOS-arm64-simulator --args=" + rtc_objc_prefix = \"LK\" + treat_warnings_as_errors = false + target_os = \"ios\" + ios_enable_code_signing = false + is_component_build = false + target_environment = \"simulator\" + target_cpu = \"x64\" + ios_deployment_target = \"13.0\" + rtc_libvpx_build_vp9 = true + use_goma = false + rtc_enable_symbol_export = true + rtc_include_tests = false + rtc_build_examples = false + rtc_use_h264 = false + rtc_enable_protobuf = false + enable_libaom = true + rtc_include_dav1d_in_internal_decoder_factory = true + use_rtti = true + is_debug = $DEBUG + enable_dsyms = $DEBUG + enable_stripping = true" --ide=xcode # ninja -C $OUT_DIR/iOS-arm64-simulator ios_framework_bundle -j 10 -# gn gen $OUT_DIR/macOS-x64 --args=" -# rtc_objc_prefix = \"LK\" -# treat_warnings_as_errors = false -# target_os=\"mac\" -# target_cpu=\"x64\" -# mac_deployment_target=\"10.15\" -# is_component_build = false -# target_cpu = \"x64\" -# use_goma = false -# rtc_enable_symbol_export = true -# rtc_libvpx_build_vp9 = true -# rtc_include_tests = false -# rtc_build_examples = false -# rtc_use_h264 = false -# rtc_enable_protobuf = false -# enable_libaom = true -# rtc_include_dav1d_in_internal_decoder_factory = true -# use_rtti = true -# is_debug = $DEBUG -# enable_dsyms = $DEBUG" --ide=xcode +gn gen $OUT_DIR/macOS-x64 --args=" + rtc_objc_prefix = \"LK\" + treat_warnings_as_errors = false + target_os=\"mac\" + target_cpu=\"x64\" + mac_deployment_target=\"10.15\" + is_component_build = false + target_cpu = \"x64\" + use_goma = false + rtc_enable_symbol_export = true + rtc_libvpx_build_vp9 = true + rtc_include_tests = false + rtc_build_examples = false + rtc_use_h264 = false + rtc_enable_protobuf = false + enable_libaom = true + rtc_include_dav1d_in_internal_decoder_factory = true + use_rtti = true + is_debug = $DEBUG + enable_dsyms = $DEBUG" --ide=xcode # ninja -C $OUT_DIR/macOS-x64 mac_framework_bundle -j 10 gn gen $OUT_DIR/macOS-arm64 --args=" - use_custom_libcxx = false - use_explicit_libcxx_modules = false rtc_objc_prefix = \"LK\" treat_warnings_as_errors = false target_os=\"mac\" @@ -278,14 +276,12 @@ gn gen $OUT_DIR/macOS-arm64 --args=" rtc_use_h264 = false rtc_enable_protobuf = false enable_libaom = true - enable_freetype = false - enable_rust = false rtc_include_dav1d_in_internal_decoder_factory = true use_rtti = true is_debug = $DEBUG enable_dsyms = $DEBUG" --ide=xcode -ninja -C $OUT_DIR/macOS-arm64 mac_framework_bundle -j 10 +# ninja -C $OUT_DIR/macOS-arm64 mac_framework_bundle -j 10 # rm -rf $OUT_DIR/*-lib $OUT_DIR/LiveKitWebRTC.* From af50180b45e015254c6dc9edd390d8702488fb63 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?B=C5=82az=CC=87ej=20Pankowski?= <86720177+pblazej@users.noreply.github.com> Date: Mon, 16 Jun 2025 14:20:43 +0200 Subject: [PATCH 37/49] Revert "gn: bypass problematic visionOS check and privacy info" This reverts commit c6c6561757802e649a7348d77625c5eb4a5e6489. --- sdk/BUILD.gn | 52 +++++++++++++++++++++++++++++++++------------------- webrtc.gni | 2 +- 2 files changed, 34 insertions(+), 20 deletions(-) diff --git a/sdk/BUILD.gn b/sdk/BUILD.gn index f3641149c2..bfb8545480 100644 --- a/sdk/BUILD.gn +++ b/sdk/BUILD.gn @@ -174,10 +174,12 @@ if (is_ios || is_mac) { "objc/helpers/UIDevice+RTCDevice.h", "objc/helpers/UIDevice+RTCDevice.mm", ] - sources += [ - "objc/helpers/RTCCameraPreviewView.h", - "objc/helpers/RTCCameraPreviewView.m", - ] + if (target_environment != "xrdevice" && target_environment != "xrsimulator") { + sources += [ + "objc/helpers/RTCCameraPreviewView.h", + "objc/helpers/RTCCameraPreviewView.m", + ] + } frameworks += [ "UIKit.framework" ] } } @@ -280,7 +282,9 @@ if (is_ios || is_mac) { ":videoframebuffer_objc", ] - deps += [ ":metal_objc" ] + if (target_environment != "xrdevice" && target_environment != "xrsimulator") { + deps += [ ":metal_objc" ] + } } rtc_library("audio_device") { @@ -634,7 +638,7 @@ if (is_ios || is_mac) { "Metal.framework", "MetalKit.framework", ] - if (is_ios || is_mac) { + if ((is_ios || is_mac) && !(target_environment == "xrsimulator" || target_environment == "xrdevice")) { sources += [ "objc/components/renderer/metal/RTCMTLVideoView.h", "objc/components/renderer/metal/RTCMTLVideoView.m", @@ -677,10 +681,12 @@ if (is_ios || is_mac) { "objc/components/capturer/RTCCameraVideoCapturer.h", "objc/components/capturer/RTCCameraVideoCapturer.m", ] - sources += [ - "objc/components/capturer/RTCFileVideoCapturer.h", - "objc/components/capturer/RTCFileVideoCapturer.m", - ] + if (target_environment != "xrdevice" && target_environment != "xrsimulator") { + sources += [ + "objc/components/capturer/RTCFileVideoCapturer.h", + "objc/components/capturer/RTCFileVideoCapturer.m", + ] + } frameworks = [ "AVFoundation.framework", "CoreVideo.framework", @@ -1329,9 +1335,11 @@ if (is_ios || is_mac) { "../test:wait_until", "//third_party/libyuv", ] - deps += [ - ":metal_objc", - ] + if (target_environment != "xrdevice" && target_environment != "xrsimulator") { + deps += [ + ":metal_objc", + ] + } if (rtc_ios_use_opengl_rendering) { deps += [ ":opengl_objc" ] } @@ -1508,10 +1516,12 @@ if (is_ios || is_mac) { "objc/components/audio/RTCAudioProcessingConfig.h", ] - common_objc_headers += [ - "objc/helpers/RTCCameraPreviewView.h", - "objc/components/renderer/metal/RTCMTLVideoView.h", - ] + if (target_environment != "xrdevice" && target_environment != "xrsimulator") { + common_objc_headers += [ + "objc/helpers/RTCCameraPreviewView.h", + "objc/components/renderer/metal/RTCMTLVideoView.h", + ] + } if (!build_with_chromium) { common_objc_headers += [ @@ -1541,7 +1551,9 @@ if (is_ios || is_mac) { ":videotoolbox_objc", ":darwin_privacy_info", ] - deps += [ ":metal_objc" ] + if (target_environment != "xrdevice" && target_environment != "xrsimulator") { + deps += [ ":metal_objc" ] + } if (!build_with_chromium) { deps += [ ":callback_logger_objc", @@ -1699,7 +1711,9 @@ if (is_ios || is_mac) { ":videotoolbox_objc", ":darwin_privacy_info", ] - deps += [ ":metal_objc" ] + if (target_environment != "xrdevice" && target_environment != "xrsimulator") { + deps += [ ":metal_objc" ] + } if (!build_with_chromium) { deps += [ ":callback_logger_objc", diff --git a/webrtc.gni b/webrtc.gni index d3c3d58b8f..2a407632d6 100644 --- a/webrtc.gni +++ b/webrtc.gni @@ -1203,7 +1203,7 @@ if (is_mac || is_ios) { copy("copy_privacy_manifest_$target_name") { sources = [ privacy_manifest_path ] outputs = - [ "$root_out_dir/$privacy_manifest_out_path" ] + [ "$root_out_dir/$output_name.framework/$privacy_manifest_out_path" ] deps = [ ":create_privacy_manifest_$target_name" ] } From 3ff8a8c262db29f06774fff5cafa3719d48ba9ef Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?B=C5=82az=CC=87ej=20Pankowski?= <86720177+pblazej@users.noreply.github.com> Date: Mon, 16 Jun 2025 14:36:40 +0200 Subject: [PATCH 38/49] Apple: remove duplicate (?) privacy file --- webrtc.gni | 27 --------------------------- 1 file changed, 27 deletions(-) diff --git a/webrtc.gni b/webrtc.gni index 2a407632d6..be03d29e02 100644 --- a/webrtc.gni +++ b/webrtc.gni @@ -1058,7 +1058,6 @@ if (is_mac || is_ios) { umbrella_header_path = "$target_gen_dir/$output_name.framework/WebRTC/$output_name.h" modulemap_path = "$target_gen_dir/Modules/module.modulemap" - privacy_manifest_path = "$target_gen_dir/$target_name/PrivacyInfo.xcprivacy" action_foreach("create_bracket_include_headers_$target_name") { script = "//tools_webrtc/apple/copy_framework_header.py" @@ -1099,7 +1098,6 @@ if (is_mac || is_ios) { deps += [ ":copy_framework_headers_$this_target_name", ":copy_modulemap_$this_target_name", - ":copy_privacy_manifest_$this_target_name", ":copy_umbrella_header_$this_target_name", ":create_bracket_include_headers_$this_target_name", ":modulemap_$this_target_name", @@ -1122,7 +1120,6 @@ if (is_mac || is_ios) { ":create_bracket_include_headers_$this_target_name") deps += [ - ":copy_privacy_manifest_$this_target_name", ":copy_umbrella_header_$this_target_name", ":create_bracket_include_headers_$this_target_name", ] @@ -1132,13 +1129,8 @@ if (is_mac || is_ios) { if (is_mac || target_environment == "catalyst") { # Catalyst frameworks use the same layout as regular Mac frameworks. headers_dir = "Versions/A/Headers" - - # The path to the privacy manifest file differs between Mac and iOS. - # https://developer.apple.com/documentation/bundleresources/privacy_manifest_files/adding_a_privacy_manifest_to_your_app_or_third-party_sdk - privacy_manifest_out_path = "Versions/A/Resources/PrivacyInfo.xcprivacy" } else { headers_dir = "Headers" - privacy_manifest_out_path = "PrivacyInfo.xcprivacy" } bundle_data("copy_framework_headers_$this_target_name") { @@ -1188,25 +1180,6 @@ if (is_mac || is_ios) { deps = [ ":umbrella_header_$target_name" ] } - - action("create_privacy_manifest_$target_name") { - script = "//tools_webrtc/apple/generate_privacy_manifest.py" - - args = [ - "--output", - rebase_path(privacy_manifest_path), - ] - - outputs = [ privacy_manifest_path ] - } - - copy("copy_privacy_manifest_$target_name") { - sources = [ privacy_manifest_path ] - outputs = - [ "$root_out_dir/$output_name.framework/$privacy_manifest_out_path" ] - - deps = [ ":create_privacy_manifest_$target_name" ] - } } } From 53a190f83b0c93a999e946e26c208beca74d30ed Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?B=C5=82az=CC=87ej=20Pankowski?= <86720177+pblazej@users.noreply.github.com> Date: Mon, 16 Jun 2025 14:36:51 +0200 Subject: [PATCH 39/49] Apple: add missing iOS checks --- sdk/BUILD.gn | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/sdk/BUILD.gn b/sdk/BUILD.gn index bfb8545480..a710d600d6 100644 --- a/sdk/BUILD.gn +++ b/sdk/BUILD.gn @@ -282,7 +282,7 @@ if (is_ios || is_mac) { ":videoframebuffer_objc", ] - if (target_environment != "xrdevice" && target_environment != "xrsimulator") { + if (is_ios && (target_environment != "xrdevice" && target_environment != "xrsimulator")) { deps += [ ":metal_objc" ] } } @@ -638,7 +638,7 @@ if (is_ios || is_mac) { "Metal.framework", "MetalKit.framework", ] - if ((is_ios || is_mac) && !(target_environment == "xrsimulator" || target_environment == "xrdevice")) { + if (is_mac || (is_ios && !(target_environment == "xrsimulator" || target_environment == "xrdevice"))) { sources += [ "objc/components/renderer/metal/RTCMTLVideoView.h", "objc/components/renderer/metal/RTCMTLVideoView.m", @@ -681,7 +681,7 @@ if (is_ios || is_mac) { "objc/components/capturer/RTCCameraVideoCapturer.h", "objc/components/capturer/RTCCameraVideoCapturer.m", ] - if (target_environment != "xrdevice" && target_environment != "xrsimulator") { + if (is_ios && (target_environment != "xrdevice" && target_environment != "xrsimulator")) { sources += [ "objc/components/capturer/RTCFileVideoCapturer.h", "objc/components/capturer/RTCFileVideoCapturer.m", @@ -1711,7 +1711,7 @@ if (is_ios || is_mac) { ":videotoolbox_objc", ":darwin_privacy_info", ] - if (target_environment != "xrdevice" && target_environment != "xrsimulator") { + if (is_ios && (target_environment != "xrdevice" && target_environment != "xrsimulator")) { deps += [ ":metal_objc" ] } if (!build_with_chromium) { From 509ebe450e690293b864690a59c66b07c7004d21 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?B=C5=82az=CC=87ej=20Pankowski?= <86720177+pblazej@users.noreply.github.com> Date: Mon, 16 Jun 2025 15:30:23 +0200 Subject: [PATCH 40/49] Apple: parametrize script --- build_xcframework_dynamic_livekit.sh | 384 ++++++--------------------- 1 file changed, 84 insertions(+), 300 deletions(-) diff --git a/build_xcframework_dynamic_livekit.sh b/build_xcframework_dynamic_livekit.sh index a0ae3166af..c86ff70ab4 100755 --- a/build_xcframework_dynamic_livekit.sh +++ b/build_xcframework_dynamic_livekit.sh @@ -11,168 +11,17 @@ if [ "$MODE" == "debug" ]; then DEBUG="true" fi -echo "build_xcframework_dynamic_livekit.sh: MODE=$MODE, DEBUG=$DEBUG" - -gn gen $OUT_DIR/tvOS-arm64-device --args=" - rtc_objc_prefix = \"LK\" - treat_warnings_as_errors = false - target_os = \"ios\" - ios_enable_code_signing = false - is_component_build = false - target_environment = \"appletv\" - target_cpu = \"arm64\" - ios_deployment_target = \"17.0\" - use_goma = false - rtc_enable_symbol_export = true - rtc_libvpx_build_vp9 = true - rtc_include_tests = false - rtc_build_examples = false - rtc_use_h264 = false - rtc_enable_protobuf = false - enable_libaom = true - rtc_include_dav1d_in_internal_decoder_factory = true - use_rtti = true - is_debug = $DEBUG - enable_dsyms = $DEBUG - enable_stripping = true" --ide=xcode - -# ninja -C $OUT_DIR/tvOS-arm64-device ios_framework_bundle -j 10 - -gn gen $OUT_DIR/tvOS-arm64-simulator --args=" - rtc_objc_prefix = \"LK\" - treat_warnings_as_errors = false - target_os = \"ios\" - ios_enable_code_signing = false - is_component_build = false - target_environment = \"appletvsimulator\" - target_cpu = \"arm64\" - ios_deployment_target = \"17.0\" - use_goma = false - rtc_enable_symbol_export = true - rtc_libvpx_build_vp9 = true - rtc_include_tests = false - rtc_build_examples = false - rtc_use_h264 = false - rtc_enable_protobuf = false - enable_libaom = true - rtc_include_dav1d_in_internal_decoder_factory = true - use_rtti = true - is_debug = $DEBUG - enable_dsyms = $DEBUG - enable_stripping = true" --ide=xcode - -# ninja -C $OUT_DIR/tvOS-arm64-simulator ios_framework_bundle -j 10 - -gn gen $OUT_DIR/xrOS-arm64-device --args=" - rtc_objc_prefix = \"LK\" - treat_warnings_as_errors = false - target_os = \"ios\" - ios_enable_code_signing = false - is_component_build = false - target_environment = \"xrdevice\" - target_cpu = \"arm64\" - ios_deployment_target = \"1.1.0\" - use_goma = false - rtc_enable_symbol_export = true - rtc_libvpx_build_vp9 = true - rtc_include_tests = false - rtc_build_examples = false - rtc_use_h264 = false - rtc_enable_protobuf = false - enable_libaom = true - rtc_include_dav1d_in_internal_decoder_factory = true - use_rtti = true - is_debug = $DEBUG - enable_dsyms = $DEBUG - enable_stripping = true" --ide=xcode - -# ninja -C $OUT_DIR/xrOS-arm64-device ios_framework_bundle -j 10 - -gn gen $OUT_DIR/xrOS-arm64-simulator --args=" - rtc_objc_prefix = \"LK\" - treat_warnings_as_errors = false - target_os = \"ios\" - ios_enable_code_signing = false - is_component_build = false - target_environment = \"xrsimulator\" - target_cpu = \"arm64\" - ios_deployment_target = \"1.1.0\" - use_goma = false - rtc_enable_symbol_export = true - rtc_libvpx_build_vp9 = true - rtc_include_tests = false - rtc_build_examples = false - rtc_use_h264 = false - rtc_enable_protobuf = false - enable_libaom = true - rtc_include_dav1d_in_internal_decoder_factory = true - use_rtti = true - is_debug = $DEBUG - enable_dsyms = $DEBUG - enable_stripping = true" --ide=xcode - -# ninja -C $OUT_DIR/xrOS-arm64-simulator ios_framework_bundle -j 10 +GREEN='\033[0;32m' +YELLOW='\033[1;33m' +NC='\033[0m' -gn gen $OUT_DIR/catalyst-arm64 --args=" - rtc_objc_prefix = \"LK\" - treat_warnings_as_errors = false - target_os = \"ios\" - ios_enable_code_signing = false - is_component_build = false - target_environment = \"catalyst\" - target_cpu = \"arm64\" - ios_deployment_target = \"14.0\" - use_goma = false - rtc_enable_symbol_export = true - rtc_libvpx_build_vp9 = true - rtc_include_tests = false - rtc_build_examples = false - rtc_use_h264 = false - rtc_enable_protobuf = false - enable_libaom = true - rtc_include_dav1d_in_internal_decoder_factory = true - use_rtti = true - is_debug = $DEBUG - enable_dsyms = $DEBUG - enable_stripping = true" --ide=xcode - -# ninja -C $OUT_DIR/catalyst-arm64 ios_framework_bundle -j 10 - -gn gen $OUT_DIR/catalyst-x64 --args=" - rtc_objc_prefix = \"LK\" - treat_warnings_as_errors = false - target_os = \"ios\" - ios_enable_code_signing = false - is_component_build = false - target_environment = \"catalyst\" - target_cpu = \"x64\" - ios_deployment_target = \"14.0\" - use_goma = false - rtc_enable_symbol_export = true - rtc_libvpx_build_vp9 = true - rtc_include_tests = false - rtc_build_examples = false - rtc_use_h264 = false - rtc_enable_protobuf = false - enable_libaom = true - rtc_include_dav1d_in_internal_decoder_factory = true - use_rtti = true - is_debug = $DEBUG - enable_dsyms = $DEBUG - enable_stripping = true" --ide=xcode - -# ninja -C $OUT_DIR/catalyst-x64 ios_framework_bundle -j 10 +echo "build_xcframework_dynamic_livekit.sh: MODE=$MODE, DEBUG=$DEBUG" -gn gen $OUT_DIR/iOS-arm64-device --args=" +COMMON_ARGS=" rtc_objc_prefix = \"LK\" treat_warnings_as_errors = false - target_os = \"ios\" ios_enable_code_signing = false is_component_build = false - target_environment = \"device\" - target_cpu = \"arm64\" - ios_deployment_target = \"13.0\" - use_goma = false rtc_enable_symbol_export = true rtc_libvpx_build_vp9 = true rtc_include_tests = false @@ -184,147 +33,82 @@ gn gen $OUT_DIR/iOS-arm64-device --args=" use_rtti = true is_debug = $DEBUG enable_dsyms = $DEBUG - enable_stripping = true" --ide=xcode - -# ninja -C $OUT_DIR/iOS-arm64-device ios_framework_bundle -j 10 - -gn gen $OUT_DIR/iOS-x64-simulator --args=" - rtc_objc_prefix = \"LK\" - treat_warnings_as_errors = false - target_os = \"ios\" - ios_enable_code_signing = false - is_component_build = false - target_environment = \"simulator\" - target_cpu = \"x64\" - ios_deployment_target = \"13.0\" - rtc_libvpx_build_vp9 = true - use_goma = false - rtc_enable_symbol_export = true - rtc_include_tests = false - rtc_build_examples = false - rtc_use_h264 = false - rtc_enable_protobuf = false - enable_libaom = true - rtc_include_dav1d_in_internal_decoder_factory = true - use_rtti = true - is_debug = $DEBUG - enable_dsyms = $DEBUG - enable_stripping = true" --ide=xcode - -# ninja -C $OUT_DIR/iOS-x64-simulator ios_framework_bundle -j 10 - -gn gen $OUT_DIR/iOS-arm64-simulator --args=" - rtc_objc_prefix = \"LK\" - treat_warnings_as_errors = false - target_os = \"ios\" - ios_enable_code_signing = false - is_component_build = false - target_environment = \"simulator\" - target_cpu = \"x64\" - ios_deployment_target = \"13.0\" - rtc_libvpx_build_vp9 = true - use_goma = false - rtc_enable_symbol_export = true - rtc_include_tests = false - rtc_build_examples = false - rtc_use_h264 = false - rtc_enable_protobuf = false - enable_libaom = true - rtc_include_dav1d_in_internal_decoder_factory = true - use_rtti = true - is_debug = $DEBUG - enable_dsyms = $DEBUG - enable_stripping = true" --ide=xcode - -# ninja -C $OUT_DIR/iOS-arm64-simulator ios_framework_bundle -j 10 - -gn gen $OUT_DIR/macOS-x64 --args=" - rtc_objc_prefix = \"LK\" - treat_warnings_as_errors = false - target_os=\"mac\" - target_cpu=\"x64\" - mac_deployment_target=\"10.15\" - is_component_build = false - target_cpu = \"x64\" - use_goma = false - rtc_enable_symbol_export = true - rtc_libvpx_build_vp9 = true - rtc_include_tests = false - rtc_build_examples = false - rtc_use_h264 = false - rtc_enable_protobuf = false - enable_libaom = true - rtc_include_dav1d_in_internal_decoder_factory = true - use_rtti = true - is_debug = $DEBUG - enable_dsyms = $DEBUG" --ide=xcode - -# ninja -C $OUT_DIR/macOS-x64 mac_framework_bundle -j 10 - -gn gen $OUT_DIR/macOS-arm64 --args=" - rtc_objc_prefix = \"LK\" - treat_warnings_as_errors = false - target_os=\"mac\" - target_cpu=\"arm64\" - mac_deployment_target=\"10.15\" - is_component_build = false - use_goma = false - rtc_enable_symbol_export = true - rtc_libvpx_build_vp9 = true - rtc_include_tests = false - rtc_build_examples = false - rtc_use_h264 = false - rtc_enable_protobuf = false - enable_libaom = true - rtc_include_dav1d_in_internal_decoder_factory = true - use_rtti = true - is_debug = $DEBUG - enable_dsyms = $DEBUG" --ide=xcode - -# ninja -C $OUT_DIR/macOS-arm64 mac_framework_bundle -j 10 - -# rm -rf $OUT_DIR/*-lib $OUT_DIR/LiveKitWebRTC.* - -# mkdir -p $OUT_DIR/macOS-lib -# cp -R $OUT_DIR/macOS-x64/LiveKitWebRTC.framework $OUT_DIR/macOS-lib/LiveKitWebRTC.framework -# lipo -create -output $OUT_DIR/macOS-lib/LiveKitWebRTC.framework/LiveKitWebRTC $OUT_DIR/macOS-arm64/LiveKitWebRTC.framework/LiveKitWebRTC $OUT_DIR/macOS-x64/LiveKitWebRTC.framework/LiveKitWebRTC - -# mkdir -p $OUT_DIR/catalyst-lib -# cp -R $OUT_DIR/catalyst-arm64/LiveKitWebRTC.framework $OUT_DIR/catalyst-lib/LiveKitWebRTC.framework -# lipo -create -output $OUT_DIR/catalyst-lib/LiveKitWebRTC.framework/LiveKitWebRTC $OUT_DIR/catalyst-arm64/LiveKitWebRTC.framework/LiveKitWebRTC $OUT_DIR/catalyst-x64/LiveKitWebRTC.framework/LiveKitWebRTC - -# mkdir -p $OUT_DIR/iOS-device-lib -# cp -R $OUT_DIR/iOS-arm64-device/LiveKitWebRTC.framework $OUT_DIR/iOS-device-lib/LiveKitWebRTC.framework -# lipo -create -output $OUT_DIR/iOS-device-lib/LiveKitWebRTC.framework/LiveKitWebRTC $OUT_DIR/iOS-arm64-device/LiveKitWebRTC.framework/LiveKitWebRTC - -# mkdir -p $OUT_DIR/iOS-simulator-lib -# cp -R $OUT_DIR/iOS-arm64-simulator/LiveKitWebRTC.framework $OUT_DIR/iOS-simulator-lib/LiveKitWebRTC.framework -# lipo -create -output $OUT_DIR/iOS-simulator-lib/LiveKitWebRTC.framework/LiveKitWebRTC $OUT_DIR/iOS-arm64-simulator/LiveKitWebRTC.framework/LiveKitWebRTC $OUT_DIR/iOS-x64-simulator/LiveKitWebRTC.framework/LiveKitWebRTC - -# xcodebuild -create-xcframework \ -# -framework $OUT_DIR/iOS-device-lib/LiveKitWebRTC.framework \ -# -framework $OUT_DIR/iOS-simulator-lib/LiveKitWebRTC.framework \ -# -framework $OUT_DIR/xrOS-arm64-device/LiveKitWebRTC.framework \ -# -framework $OUT_DIR/xrOS-arm64-simulator/LiveKitWebRTC.framework \ -# -framework $OUT_DIR/tvOS-arm64-device/LiveKitWebRTC.framework \ -# -framework $OUT_DIR/tvOS-arm64-simulator/LiveKitWebRTC.framework \ -# -framework $OUT_DIR/catalyst-lib/LiveKitWebRTC.framework \ -# -framework $OUT_DIR/macOS-lib/LiveKitWebRTC.framework \ -# -output $OUT_DIR/LiveKitWebRTC.xcframework - -# cp ./src/LICENSE $OUT_DIR/LiveKitWebRTC.xcframework/ - -# cd $OUT_DIR/LiveKitWebRTC.xcframework/macos-arm64_x86_64/LiveKitWebRTC.framework/ -# mv LiveKitWebRTC Versions/A/LiveKitWebRTC -# ln -s Versions/Current/LiveKitWebRTC LiveKitWebRTC -# cd ../../../../ - -# cd $OUT_DIR/LiveKitWebRTC.xcframework/ios-arm64_x86_64-maccatalyst/LiveKitWebRTC.framework/ -# mv LiveKitWebRTC Versions/A/LiveKitWebRTC -# ln -s Versions/Current/LiveKitWebRTC LiveKitWebRTC -# cd ../../../ -# zip --symlinks -9 -r LiveKitWebRTC.xcframework.zip LiveKitWebRTC.xcframework - -# # hash -# shasum -a 256 LiveKitWebRTC.xcframework.zip >LiveKitWebRTC.xcframework.zip.shasum -# cat LiveKitWebRTC.xcframework.zip.shasum + enable_stripping = true" + +PLATFORMS=( + "tvOS-arm64-device:target_os=\"ios\" target_environment=\"appletv\" target_cpu=\"arm64\" ios_deployment_target=\"17.0\"" + "tvOS-arm64-simulator:target_os=\"ios\" target_environment=\"appletvsimulator\" target_cpu=\"arm64\" ios_deployment_target=\"17.0\"" + "xrOS-arm64-device:target_os=\"ios\" target_environment=\"xrdevice\" target_cpu=\"arm64\" ios_deployment_target=\"1.1.0\"" + "xrOS-arm64-simulator:target_os=\"ios\" target_environment=\"xrsimulator\" target_cpu=\"arm64\" ios_deployment_target=\"1.1.0\"" + "catalyst-arm64:target_os=\"ios\" target_environment=\"catalyst\" target_cpu=\"arm64\" ios_deployment_target=\"14.0\"" + "catalyst-x64:target_os=\"ios\" target_environment=\"catalyst\" target_cpu=\"x64\" ios_deployment_target=\"14.0\"" + "iOS-arm64-device:target_os=\"ios\" target_environment=\"device\" target_cpu=\"arm64\" ios_deployment_target=\"13.0\"" + "iOS-x64-simulator:target_os=\"ios\" target_environment=\"simulator\" target_cpu=\"x64\" ios_deployment_target=\"13.0\"" + "iOS-arm64-simulator:target_os=\"ios\" target_environment=\"simulator\" target_cpu=\"x64\" ios_deployment_target=\"13.0\"" + "macOS-x64:target_os=\"mac\" target_cpu=\"x64\" mac_deployment_target=\"10.15\"" + "macOS-arm64:target_os=\"mac\" target_cpu=\"arm64\" mac_deployment_target=\"10.15\"" +) + +for platform_config in "${PLATFORMS[@]}"; do + platform="${platform_config%%:*}" + config="${platform_config#*:}" + + echo "Generating configuration for $platform..." + gn gen $OUT_DIR/$platform --args="$COMMON_ARGS $config" --ide=xcode + + if [[ $platform == *"macOS"* ]]; then + build_target="mac_framework_bundle" + else + build_target="ios_framework_bundle" + fi + + echo "${YELLOW}Building $platform...${NC}" + ninja -C $OUT_DIR/$platform $build_target -j 10 --quiet + echo "${GREEN}Build $platform completed${NC}" +done + +rm -rf $OUT_DIR/*-lib $OUT_DIR/LiveKitWebRTC.* + +mkdir -p $OUT_DIR/macOS-lib +cp -R $OUT_DIR/macOS-x64/LiveKitWebRTC.framework $OUT_DIR/macOS-lib/LiveKitWebRTC.framework +lipo -create -output $OUT_DIR/macOS-lib/LiveKitWebRTC.framework/LiveKitWebRTC $OUT_DIR/macOS-arm64/LiveKitWebRTC.framework/LiveKitWebRTC $OUT_DIR/macOS-x64/LiveKitWebRTC.framework/LiveKitWebRTC + +mkdir -p $OUT_DIR/catalyst-lib +cp -R $OUT_DIR/catalyst-arm64/LiveKitWebRTC.framework $OUT_DIR/catalyst-lib/LiveKitWebRTC.framework +lipo -create -output $OUT_DIR/catalyst-lib/LiveKitWebRTC.framework/LiveKitWebRTC $OUT_DIR/catalyst-arm64/LiveKitWebRTC.framework/LiveKitWebRTC $OUT_DIR/catalyst-x64/LiveKitWebRTC.framework/LiveKitWebRTC + +mkdir -p $OUT_DIR/iOS-device-lib +cp -R $OUT_DIR/iOS-arm64-device/LiveKitWebRTC.framework $OUT_DIR/iOS-device-lib/LiveKitWebRTC.framework +lipo -create -output $OUT_DIR/iOS-device-lib/LiveKitWebRTC.framework/LiveKitWebRTC $OUT_DIR/iOS-arm64-device/LiveKitWebRTC.framework/LiveKitWebRTC + +mkdir -p $OUT_DIR/iOS-simulator-lib +cp -R $OUT_DIR/iOS-arm64-simulator/LiveKitWebRTC.framework $OUT_DIR/iOS-simulator-lib/LiveKitWebRTC.framework +lipo -create -output $OUT_DIR/iOS-simulator-lib/LiveKitWebRTC.framework/LiveKitWebRTC $OUT_DIR/iOS-arm64-simulator/LiveKitWebRTC.framework/LiveKitWebRTC $OUT_DIR/iOS-x64-simulator/LiveKitWebRTC.framework/LiveKitWebRTC + +xcodebuild -create-xcframework \ + -framework $OUT_DIR/iOS-device-lib/LiveKitWebRTC.framework \ + -framework $OUT_DIR/iOS-simulator-lib/LiveKitWebRTC.framework \ + -framework $OUT_DIR/xrOS-arm64-device/LiveKitWebRTC.framework \ + -framework $OUT_DIR/xrOS-arm64-simulator/LiveKitWebRTC.framework \ + -framework $OUT_DIR/tvOS-arm64-device/LiveKitWebRTC.framework \ + -framework $OUT_DIR/tvOS-arm64-simulator/LiveKitWebRTC.framework \ + -framework $OUT_DIR/catalyst-lib/LiveKitWebRTC.framework \ + -framework $OUT_DIR/macOS-lib/LiveKitWebRTC.framework \ + -output $OUT_DIR/LiveKitWebRTC.xcframework + +cp ./src/LICENSE $OUT_DIR/LiveKitWebRTC.xcframework/ + +cd $OUT_DIR/LiveKitWebRTC.xcframework/macos-arm64_x86_64/LiveKitWebRTC.framework/ +mv LiveKitWebRTC Versions/A/LiveKitWebRTC +ln -s Versions/Current/LiveKitWebRTC LiveKitWebRTC +cd ../../../../ + +cd $OUT_DIR/LiveKitWebRTC.xcframework/ios-arm64_x86_64-maccatalyst/LiveKitWebRTC.framework/ +mv LiveKitWebRTC Versions/A/LiveKitWebRTC +ln -s Versions/Current/LiveKitWebRTC LiveKitWebRTC +cd ../../../ +zip --symlinks -9 -r LiveKitWebRTC.xcframework.zip LiveKitWebRTC.xcframework + +# hash +shasum -a 256 LiveKitWebRTC.xcframework.zip >LiveKitWebRTC.xcframework.zip.shasum +cat LiveKitWebRTC.xcframework.zip.shasum From 9c77146bf5c800d6bcc5d6ecf45f9a1cbe9c9e2d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?B=C5=82az=CC=87ej=20Pankowski?= <86720177+pblazej@users.noreply.github.com> Date: Wed, 18 Jun 2025 11:57:38 +0200 Subject: [PATCH 41/49] CR: Revert some namespaces --- api/video_codecs/video_encoder_factory.h | 3 --- sdk/android/src/jni/pc/peer_connection.cc | 2 +- sdk/objc/api/logging/RTCCallbackLogger.mm | 22 +++++++++---------- .../peerconnection/RTCAudioSource+Private.h | 8 ++++--- sdk/objc/api/peerconnection/RTCAudioSource.mm | 8 ++++--- sdk/objc/api/peerconnection/RTCAudioTrack.mm | 8 ++++--- .../api/peerconnection/RTCConfiguration.mm | 2 +- sdk/objc/api/peerconnection/RTCFileLogger.mm | 8 +++---- .../RTCPeerConnectionFactory.mm | 2 +- .../peerconnection/RTCRtpCodecCapability.mm | 8 +++---- sdk/objc/api/peerconnection/RTCSSLAdapter.mm | 4 ++-- sdk/objc/api/peerconnection/RTCVideoSource.mm | 2 +- .../api/video_codec/RTCVideoCodecConstants.mm | 6 ++--- sdk/objc/base/RTCLogging.h | 2 +- 14 files changed, 44 insertions(+), 41 deletions(-) diff --git a/api/video_codecs/video_encoder_factory.h b/api/video_codecs/video_encoder_factory.h index 2b20bc8db1..0e0509c0b6 100644 --- a/api/video_codecs/video_encoder_factory.h +++ b/api/video_codecs/video_encoder_factory.h @@ -85,9 +85,6 @@ class VideoEncoderFactory { virtual CodecSupport QueryCodecSupport( const SdpVideoFormat& format, std::optional scalability_mode) const { - // Default implementation, query for supported formats and check if the - // specified format is supported. Returns false if scalability_mode is - // specified. CodecSupport codec_support; codec_support.is_supported = format.IsCodecInList(GetSupportedFormats()); return codec_support; diff --git a/sdk/android/src/jni/pc/peer_connection.cc b/sdk/android/src/jni/pc/peer_connection.cc index 22428641e2..2953bd2b7c 100644 --- a/sdk/android/src/jni/pc/peer_connection.cc +++ b/sdk/android/src/jni/pc/peer_connection.cc @@ -280,7 +280,7 @@ void JavaToNativeRTCConfiguration( rtc_config->enable_any_address_ports = Java_RTCConfiguration_getEnableIceGatheringOnAnyAddressPorts(jni, j_rtc_config); - ScopedJavaLocalRef j_turn_logging_id = + jni_zero::ScopedJavaLocalRef j_turn_logging_id = Java_RTCConfiguration_getTurnLoggingId(jni, j_rtc_config); if (!IsNull(jni, j_turn_logging_id)) { rtc_config->turn_logging_id = JavaToNativeString(jni, j_turn_logging_id); diff --git a/sdk/objc/api/logging/RTCCallbackLogger.mm b/sdk/objc/api/logging/RTCCallbackLogger.mm index 1ff732d083..32554e8986 100644 --- a/sdk/objc/api/logging/RTCCallbackLogger.mm +++ b/sdk/objc/api/logging/RTCCallbackLogger.mm @@ -64,17 +64,17 @@ void OnLogMessage(absl::string_view message, } private: - static RTC_OBJC_TYPE(RTCLoggingSeverity) NativeSeverityToObjcSeverity(rtc::LoggingSeverity severity) { + static RTC_OBJC_TYPE(RTCLoggingSeverity) NativeSeverityToObjcSeverity(webrtc::LoggingSeverity severity) { switch (severity) { - case rtc::LS_VERBOSE: + case webrtc::LS_VERBOSE: return RTC_OBJC_TYPE(RTCLoggingSeverityVerbose); - case rtc::LS_INFO: + case webrtc::LS_INFO: return RTC_OBJC_TYPE(RTCLoggingSeverityInfo); - case rtc::LS_WARNING: + case webrtc::LS_WARNING: return RTC_OBJC_TYPE(RTCLoggingSeverityWarning); - case rtc::LS_ERROR: + case webrtc::LS_ERROR: return RTC_OBJC_TYPE(RTCLoggingSeverityError); - case rtc::LS_NONE: + case webrtc::LS_NONE: return RTC_OBJC_TYPE(RTCLoggingSeverityNone); } } @@ -141,15 +141,15 @@ - (void)stop { - (webrtc::LoggingSeverity)rtcSeverity { switch (_severity) { case RTC_OBJC_TYPE(RTCLoggingSeverityVerbose): - return rtc::LS_VERBOSE; + return webrtc::LS_VERBOSE; case RTC_OBJC_TYPE(RTCLoggingSeverityInfo): - return rtc::LS_INFO; + return webrtc::LS_INFO; case RTC_OBJC_TYPE(RTCLoggingSeverityWarning): - return rtc::LS_WARNING; + return webrtc::LS_WARNING; case RTC_OBJC_TYPE(RTCLoggingSeverityError): - return rtc::LS_ERROR; + return webrtc::LS_ERROR; case RTC_OBJC_TYPE(RTCLoggingSeverityNone): - return rtc::LS_NONE; + return webrtc::LS_NONE; } } diff --git a/sdk/objc/api/peerconnection/RTCAudioSource+Private.h b/sdk/objc/api/peerconnection/RTCAudioSource+Private.h index dba0b5d6f4..08832af4d2 100644 --- a/sdk/objc/api/peerconnection/RTCAudioSource+Private.h +++ b/sdk/objc/api/peerconnection/RTCAudioSource+Private.h @@ -30,8 +30,10 @@ (webrtc::scoped_refptr)nativeAudioSource NS_DESIGNATED_INITIALIZER; -- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory - nativeMediaSource:(rtc::scoped_refptr)nativeMediaSource - type:(RTC_OBJC_TYPE(RTCMediaSourceType))type NS_UNAVAILABLE; +- (instancetype) + initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory + nativeMediaSource: + (webrtc::scoped_refptr)nativeMediaSource + type:(RTC_OBJC_TYPE(RTCMediaSourceType))type NS_UNAVAILABLE; @end diff --git a/sdk/objc/api/peerconnection/RTCAudioSource.mm b/sdk/objc/api/peerconnection/RTCAudioSource.mm index b66f7d052e..85331d0c0b 100644 --- a/sdk/objc/api/peerconnection/RTCAudioSource.mm +++ b/sdk/objc/api/peerconnection/RTCAudioSource.mm @@ -33,9 +33,11 @@ @implementation RTC_OBJC_TYPE (RTCAudioSource) { return self; } -- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory - nativeMediaSource:(rtc::scoped_refptr)nativeMediaSource - type:(RTC_OBJC_TYPE(RTCMediaSourceType))type { +- (instancetype) + initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory + nativeMediaSource: + (webrtc::scoped_refptr)nativeMediaSource + type:(RTC_OBJC_TYPE(RTCMediaSourceType))type { RTC_DCHECK_NOTREACHED(); return nil; } diff --git a/sdk/objc/api/peerconnection/RTCAudioTrack.mm b/sdk/objc/api/peerconnection/RTCAudioTrack.mm index 4c45340310..db8968750f 100644 --- a/sdk/objc/api/peerconnection/RTCAudioTrack.mm +++ b/sdk/objc/api/peerconnection/RTCAudioTrack.mm @@ -47,9 +47,11 @@ - (instancetype)initWithFactory: return self; } -- (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory - nativeTrack:(rtc::scoped_refptr)nativeTrack - type:(RTC_OBJC_TYPE(RTCMediaStreamTrackType))type { +- (instancetype) + initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory + nativeTrack:(webrtc::scoped_refptr) + nativeTrack + type:(RTC_OBJC_TYPE(RTCMediaStreamTrackType))type { NSParameterAssert(factory); NSParameterAssert(nativeTrack); NSParameterAssert(type == RTC_OBJC_TYPE(RTCMediaStreamTrackTypeAudio)); diff --git a/sdk/objc/api/peerconnection/RTCConfiguration.mm b/sdk/objc/api/peerconnection/RTCConfiguration.mm index 7dcdaa4388..30ab46bcc9 100644 --- a/sdk/objc/api/peerconnection/RTCConfiguration.mm +++ b/sdk/objc/api/peerconnection/RTCConfiguration.mm @@ -526,7 +526,7 @@ case RTC_OBJC_TYPE(RTCContinualGatheringPolicyGatherContinually): } } -+ (rtc::KeyType)nativeEncryptionKeyTypeForKeyType:(RTC_OBJC_TYPE(RTCEncryptionKeyType))keyType { ++ (webrtc::KeyType)nativeEncryptionKeyTypeForKeyType:(RTC_OBJC_TYPE(RTCEncryptionKeyType))keyType { switch (keyType) { case RTC_OBJC_TYPE(RTCEncryptionKeyTypeRSA): return rtc::KT_RSA; diff --git a/sdk/objc/api/peerconnection/RTCFileLogger.mm b/sdk/objc/api/peerconnection/RTCFileLogger.mm index 58ea2d94c4..1be4c04b13 100644 --- a/sdk/objc/api/peerconnection/RTCFileLogger.mm +++ b/sdk/objc/api/peerconnection/RTCFileLogger.mm @@ -158,13 +158,13 @@ case RTC_OBJC_TYPE(RTCFileLoggerTypeCall): - (webrtc::LoggingSeverity)rtcSeverity { switch (_severity) { case RTC_OBJC_TYPE(RTCFileLoggerSeverityVerbose): - return rtc::LS_VERBOSE; + return webrtc::LS_VERBOSE; case RTC_OBJC_TYPE(RTCFileLoggerSeverityInfo): - return rtc::LS_INFO; + return webrtc::LS_INFO; case RTC_OBJC_TYPE(RTCFileLoggerSeverityWarning): - return rtc::LS_WARNING; + return webrtc::LS_WARNING; case RTC_OBJC_TYPE(RTCFileLoggerSeverityError): - return rtc::LS_ERROR; + return webrtc::LS_ERROR; } } diff --git a/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.mm b/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.mm index a24a2e43c4..4de8d64857 100644 --- a/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.mm +++ b/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.mm @@ -77,7 +77,7 @@ @implementation RTC_OBJC_TYPE (RTCPeerConnectionFactory) { @synthesize nativeFactory = _nativeFactory; @synthesize audioDeviceModule = _audioDeviceModule; -- (rtc::scoped_refptr)createAudioDeviceModule:(BOOL)bypassVoiceProcessing { +- (webrtc::scoped_refptr)createAudioDeviceModule:(BOOL)bypassVoiceProcessing { #if defined(WEBRTC_IOS) return webrtc::CreateAudioDeviceModule(bypassVoiceProcessing); #else diff --git a/sdk/objc/api/peerconnection/RTCRtpCodecCapability.mm b/sdk/objc/api/peerconnection/RTCRtpCodecCapability.mm index bba1e3e079..5da4821dfa 100644 --- a/sdk/objc/api/peerconnection/RTCRtpCodecCapability.mm +++ b/sdk/objc/api/peerconnection/RTCRtpCodecCapability.mm @@ -41,10 +41,10 @@ - (instancetype)initWithNativeRtpCodecCapability: } _name = [NSString stringForStdString:nativeRtpCodecCapability.name]; switch (nativeRtpCodecCapability.kind) { - case cricket::MEDIA_TYPE_AUDIO: + case webrtc::MediaType::AUDIO: _kind = RTC_CONSTANT_TYPE(RTCMediaStreamTrackKindAudio); break; - case cricket::MEDIA_TYPE_VIDEO: + case webrtc::MediaType::VIDEO: _kind = RTC_CONSTANT_TYPE(RTCMediaStreamTrackKindVideo); break; default: @@ -96,9 +96,9 @@ - (NSString *)description { // NSString pointer comparison is safe here since "kind" is readonly and only // populated above. if (_kind == RTC_CONSTANT_TYPE(RTCMediaStreamTrackKindAudio)) { - rtpCodecCapability.kind = cricket::MEDIA_TYPE_AUDIO; + rtpCodecCapability.kind = webrtc::MediaType::AUDIO; } else if (_kind == RTC_CONSTANT_TYPE(RTCMediaStreamTrackKindVideo)) { - rtpCodecCapability.kind = cricket::MEDIA_TYPE_VIDEO; + rtpCodecCapability.kind = webrtc::MediaType::VIDEO; } else { RTC_DCHECK_NOTREACHED(); } diff --git a/sdk/objc/api/peerconnection/RTCSSLAdapter.mm b/sdk/objc/api/peerconnection/RTCSSLAdapter.mm index c56cff260a..0703cb968a 100644 --- a/sdk/objc/api/peerconnection/RTCSSLAdapter.mm +++ b/sdk/objc/api/peerconnection/RTCSSLAdapter.mm @@ -14,13 +14,13 @@ #include "rtc_base/ssl_adapter.h" BOOL RTC_OBJC_TYPE(RTCInitializeSSL)(void) { - BOOL initialized = rtc::InitializeSSL(); + BOOL initialized = webrtc::InitializeSSL(); RTC_DCHECK(initialized); return initialized; } BOOL RTC_OBJC_TYPE(RTCCleanupSSL)(void) { - BOOL cleanedUp = rtc::CleanupSSL(); + BOOL cleanedUp = webrtc::CleanupSSL(); RTC_DCHECK(cleanedUp); return cleanedUp; } diff --git a/sdk/objc/api/peerconnection/RTCVideoSource.mm b/sdk/objc/api/peerconnection/RTCVideoSource.mm index cb8e495fd1..de3c1068d5 100644 --- a/sdk/objc/api/peerconnection/RTCVideoSource.mm +++ b/sdk/objc/api/peerconnection/RTCVideoSource.mm @@ -44,7 +44,7 @@ @implementation RTC_OBJC_TYPE (RTCVideoSource) { } - (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)factory - nativeMediaSource:(rtc::scoped_refptr)nativeMediaSource + nativeMediaSource:(webrtc::scoped_refptr)nativeMediaSource type:(RTC_OBJC_TYPE(RTCMediaSourceType))type { RTC_DCHECK_NOTREACHED(); return nil; diff --git a/sdk/objc/api/video_codec/RTCVideoCodecConstants.mm b/sdk/objc/api/video_codec/RTCVideoCodecConstants.mm index 1bec295174..605606b564 100644 --- a/sdk/objc/api/video_codec/RTCVideoCodecConstants.mm +++ b/sdk/objc/api/video_codec/RTCVideoCodecConstants.mm @@ -13,6 +13,6 @@ #include "media/base/media_constants.h" -NSString *const RTC_CONSTANT_TYPE(RTCVideoCodecVp8Name) = @(cricket::kVp8CodecName); -NSString *const RTC_CONSTANT_TYPE(RTCVideoCodecVp9Name) = @(cricket::kVp9CodecName); -NSString *const RTC_CONSTANT_TYPE(RTCVideoCodecAv1Name) = @(cricket::kAv1CodecName); +NSString *const RTC_CONSTANT_TYPE(RTCVideoCodecVp8Name) = @(webrtc::kVp8CodecName); +NSString *const RTC_CONSTANT_TYPE(RTCVideoCodecVp9Name) = @(webrtc::kVp9CodecName); +NSString *const RTC_CONSTANT_TYPE(RTCVideoCodecAv1Name) = @(webrtc::kAv1CodecName); diff --git a/sdk/objc/base/RTCLogging.h b/sdk/objc/base/RTCLogging.h index 8e3c7cb58b..0bf74c740a 100644 --- a/sdk/objc/base/RTCLogging.h +++ b/sdk/objc/base/RTCLogging.h @@ -12,7 +12,7 @@ #import "sdk/objc/base/RTCMacros.h" -// Subset of rtc::LoggingSeverity. +// Subset of webrtc::LoggingSeverity. typedef NS_ENUM(NSInteger, RTC_OBJC_TYPE(RTCLoggingSeverity)) { RTC_OBJC_TYPE(RTCLoggingSeverityVerbose), RTC_OBJC_TYPE(RTCLoggingSeverityInfo), From 0dc60230e899ce45953457cc002fa303cd11c151 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?B=C5=82az=CC=87ej=20Pankowski?= <86720177+pblazej@users.noreply.github.com> Date: Wed, 18 Jun 2025 14:01:08 +0200 Subject: [PATCH 42/49] ObjC: Fix init warnings --- sdk/objc/api/RTCAudioRendererAdapter.mm | 3 ++- sdk/objc/api/peerconnection/RTCAudioSource.mm | 5 +++-- sdk/objc/api/peerconnection/RTCAudioTrack.mm | 6 ++++-- sdk/objc/api/peerconnection/RTCFrameCryptor.mm | 6 ++++-- .../api/peerconnection/RTCFrameCryptorKeyProvider.mm | 3 ++- sdk/objc/api/peerconnection/RTCIODevice.mm | 3 ++- sdk/objc/api/peerconnection/RTCPeerConnectionFactory.mm | 9 ++++++--- sdk/objc/api/peerconnection/RTCRtpTransceiver.mm | 3 ++- sdk/objc/api/peerconnection/RTCSessionDescription.mm | 3 ++- sdk/objc/api/peerconnection/RTCVideoSource.mm | 5 +++-- sdk/objc/api/peerconnection/RTCVideoTrack.mm | 6 ++++-- sdk/objc/api/video_codec/RTCVideoEncoderSimulcast.mm | 3 ++- sdk/objc/base/RTCVideoCodecInfo.h | 5 ----- sdk/objc/base/RTCVideoCodecInfo.m | 4 ++-- sdk/objc/components/audio/RTCAudioBuffer.mm | 3 ++- .../components/audio/RTCAudioCustomProcessingAdapter.mm | 3 ++- sdk/objc/components/audio/RTCAudioProcessingConfig.mm | 3 ++- sdk/objc/components/audio/RTCAudioSessionConfiguration.m | 5 +++-- .../components/audio/RTCDefaultAudioProcessingModule.mm | 3 ++- sdk/objc/components/video_codec/RTCH264ProfileLevelId.mm | 3 ++- .../video_codec/RTCVideoEncoderFactorySimulcast.mm | 5 +++-- 21 files changed, 54 insertions(+), 35 deletions(-) diff --git a/sdk/objc/api/RTCAudioRendererAdapter.mm b/sdk/objc/api/RTCAudioRendererAdapter.mm index 24d4c43c8f..8f2a92f553 100644 --- a/sdk/objc/api/RTCAudioRendererAdapter.mm +++ b/sdk/objc/api/RTCAudioRendererAdapter.mm @@ -102,7 +102,8 @@ @implementation RTC_OBJC_TYPE (RTCAudioRendererAdapter) { - (instancetype)initWithNativeRenderer:(id)audioRenderer { NSParameterAssert(audioRenderer); - if (self = [super init]) { + self = [super init]; + if (self) { _audioRenderer = audioRenderer; _adapter.reset(new webrtc::AudioRendererAdapter(self)); } diff --git a/sdk/objc/api/peerconnection/RTCAudioSource.mm b/sdk/objc/api/peerconnection/RTCAudioSource.mm index 85331d0c0b..458d24435e 100644 --- a/sdk/objc/api/peerconnection/RTCAudioSource.mm +++ b/sdk/objc/api/peerconnection/RTCAudioSource.mm @@ -25,9 +25,10 @@ @implementation RTC_OBJC_TYPE (RTCAudioSource) { RTC_DCHECK(factory); RTC_DCHECK(nativeAudioSource); - if (self = [super initWithFactory:factory + self = [super initWithFactory:factory nativeMediaSource:nativeAudioSource - type:RTC_OBJC_TYPE(RTCMediaSourceTypeAudio)]) { + type:RTC_OBJC_TYPE(RTCMediaSourceTypeAudio)]; + if (self) { _nativeAudioSource = nativeAudioSource; } return self; diff --git a/sdk/objc/api/peerconnection/RTCAudioTrack.mm b/sdk/objc/api/peerconnection/RTCAudioTrack.mm index db8968750f..b2d5915c09 100644 --- a/sdk/objc/api/peerconnection/RTCAudioTrack.mm +++ b/sdk/objc/api/peerconnection/RTCAudioTrack.mm @@ -40,7 +40,8 @@ - (instancetype)initWithFactory: std::string nativeId = [NSString stdStringForString:trackId]; webrtc::scoped_refptr track = factory.nativeFactory->CreateAudioTrack(nativeId, source.nativeAudioSource.get()); - if (self = [self initWithFactory:factory nativeTrack:track type:RTC_OBJC_TYPE(RTCMediaStreamTrackTypeAudio)]) { + self = [self initWithFactory:factory nativeTrack:track type:RTC_OBJC_TYPE(RTCMediaStreamTrackTypeAudio)]; + if (self) { _source = source; } @@ -55,7 +56,8 @@ - (instancetype)initWithFactory: NSParameterAssert(factory); NSParameterAssert(nativeTrack); NSParameterAssert(type == RTC_OBJC_TYPE(RTCMediaStreamTrackTypeAudio)); - if (self = [super initWithFactory:factory nativeTrack:nativeTrack type:type]) { + self = [super initWithFactory:factory nativeTrack:nativeTrack type:type]; + if (self) { _adapters = [NSMutableArray array]; _signalingThread = factory.signalingThread; } diff --git a/sdk/objc/api/peerconnection/RTCFrameCryptor.mm b/sdk/objc/api/peerconnection/RTCFrameCryptor.mm index d339582a95..4a7b02497a 100644 --- a/sdk/objc/api/peerconnection/RTCFrameCryptor.mm +++ b/sdk/objc/api/peerconnection/RTCFrameCryptor.mm @@ -116,7 +116,8 @@ - (nullable instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory participantId:(NSString *)participantId algorithm:(RTC_OBJC_TYPE(RTCCryptorAlgorithm))algorithm keyProvider:(RTC_OBJC_TYPE(RTCFrameCryptorKeyProvider) *)keyProvider { - if (self = [super init]) { + self = [super init]; + if (self) { _lock = OS_UNFAIR_LOCK_INIT; webrtc::scoped_refptr nativeRtpSender = sender.nativeRtpSender; @@ -156,7 +157,8 @@ - (nullable instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory participantId:(NSString *)participantId algorithm:(RTC_OBJC_TYPE(RTCCryptorAlgorithm))algorithm keyProvider:(RTC_OBJC_TYPE(RTCFrameCryptorKeyProvider) *)keyProvider { - if (self = [super init]) { + self = [super init]; + if (self) { _lock = OS_UNFAIR_LOCK_INIT; webrtc::scoped_refptr nativeRtpReceiver = receiver.nativeRtpReceiver; diff --git a/sdk/objc/api/peerconnection/RTCFrameCryptorKeyProvider.mm b/sdk/objc/api/peerconnection/RTCFrameCryptorKeyProvider.mm index 52112b5ea6..c261a3efb7 100644 --- a/sdk/objc/api/peerconnection/RTCFrameCryptorKeyProvider.mm +++ b/sdk/objc/api/peerconnection/RTCFrameCryptorKeyProvider.mm @@ -64,7 +64,8 @@ - (instancetype)initWithRatchetSalt:(NSData *)salt failureTolerance:(int)failureTolerance keyRingSize:(int)keyRingSize discardFrameWhenCryptorNotReady:(BOOL)discardFrameWhenCryptorNotReady { - if (self = [super init]) { + self = [super init]; + if (self) { webrtc::KeyProviderOptions options; options.ratchet_salt = std::vector((const uint8_t *)salt.bytes, ((const uint8_t *)salt.bytes) + salt.length); diff --git a/sdk/objc/api/peerconnection/RTCIODevice.mm b/sdk/objc/api/peerconnection/RTCIODevice.mm index e496b22612..21e416317f 100644 --- a/sdk/objc/api/peerconnection/RTCIODevice.mm +++ b/sdk/objc/api/peerconnection/RTCIODevice.mm @@ -34,7 +34,8 @@ + (instancetype)defaultDeviceWithType: (RTC_OBJC_TYPE(RTCIODeviceType))type { - (instancetype)initWithType: (RTC_OBJC_TYPE(RTCIODeviceType))type deviceId: (NSString *)deviceId name: (NSString* )name { - if (self = [super init]) { + self = [super init]; + if (self) { _type = type; _deviceId = deviceId; _name = name; diff --git a/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.mm b/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.mm index 4de8d64857..cd9ed6ab5e 100644 --- a/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.mm +++ b/sdk/objc/api/peerconnection/RTCPeerConnectionFactory.mm @@ -251,7 +251,8 @@ - (instancetype)initWithNativeDependencies: } - (instancetype)initNative { - if (self = [super init]) { + self = [super init]; + if (self) { _networkThread = webrtc::Thread::CreateWithSocketServer(); _networkThread->SetName("network_thread", _networkThread.get()); BOOL result = _networkThread->Start(); @@ -271,7 +272,8 @@ - (instancetype)initNative { } - (instancetype)initWithNoMedia { - if (self = [self initNative]) { + self = [self initNative]; + if (self) { webrtc::PeerConnectionFactoryDependencies dependencies; dependencies.network_thread = _networkThread.get(); dependencies.worker_thread = _workerThread.get(); @@ -321,7 +323,8 @@ - (instancetype)initWithNativeAudioEncoderFactory: (std::unique_ptr) networkControllerFactory bypassVoiceProcessing:(BOOL)bypassVoiceProcessing { - if (self = [self initNative]) { + self = [self initNative]; + if (self) { webrtc::PeerConnectionFactoryDependencies dependencies; dependencies.network_thread = _networkThread.get(); dependencies.worker_thread = _workerThread.get(); diff --git a/sdk/objc/api/peerconnection/RTCRtpTransceiver.mm b/sdk/objc/api/peerconnection/RTCRtpTransceiver.mm index 1f191ebcf1..c000e093d7 100644 --- a/sdk/objc/api/peerconnection/RTCRtpTransceiver.mm +++ b/sdk/objc/api/peerconnection/RTCRtpTransceiver.mm @@ -32,7 +32,8 @@ @implementation RTC_OBJC_TYPE (RTCRtpTransceiverInit) @synthesize sendEncodings = _sendEncodings; - (instancetype)init { - if (self = [super init]) { + self = [super init]; + if (self) { _direction = RTC_OBJC_TYPE(RTCRtpTransceiverDirectionSendRecv); } return self; diff --git a/sdk/objc/api/peerconnection/RTCSessionDescription.mm b/sdk/objc/api/peerconnection/RTCSessionDescription.mm index 0d4008ef27..461129ba62 100644 --- a/sdk/objc/api/peerconnection/RTCSessionDescription.mm +++ b/sdk/objc/api/peerconnection/RTCSessionDescription.mm @@ -32,7 +32,8 @@ + (NSString *)stringForType:(RTC_OBJC_TYPE(RTCSdpType))type { } - (instancetype)initWithType:(RTC_OBJC_TYPE(RTCSdpType))type sdp:(NSString *)sdp { - if (self = [super init]) { + self = [super init]; + if (self) { _type = type; _sdp = [sdp copy]; } diff --git a/sdk/objc/api/peerconnection/RTCVideoSource.mm b/sdk/objc/api/peerconnection/RTCVideoSource.mm index de3c1068d5..9c57d2664d 100644 --- a/sdk/objc/api/peerconnection/RTCVideoSource.mm +++ b/sdk/objc/api/peerconnection/RTCVideoSource.mm @@ -35,9 +35,10 @@ @implementation RTC_OBJC_TYPE (RTCVideoSource) { nativeVideoSource { RTC_DCHECK(factory); RTC_DCHECK(nativeVideoSource); - if (self = [super initWithFactory:factory + self = [super initWithFactory:factory nativeMediaSource:nativeVideoSource - type:RTC_OBJC_TYPE(RTCMediaSourceTypeVideo)]) { + type:RTC_OBJC_TYPE(RTCMediaSourceTypeVideo)]; + if (self) { _nativeVideoSource = nativeVideoSource; } return self; diff --git a/sdk/objc/api/peerconnection/RTCVideoTrack.mm b/sdk/objc/api/peerconnection/RTCVideoTrack.mm index 513e1ef8be..2f04295d14 100644 --- a/sdk/objc/api/peerconnection/RTCVideoTrack.mm +++ b/sdk/objc/api/peerconnection/RTCVideoTrack.mm @@ -33,7 +33,8 @@ - (instancetype)initWithFactory: std::string nativeId = [NSString stdStringForString:trackId]; webrtc::scoped_refptr track = factory.nativeFactory->CreateVideoTrack(source.nativeVideoSource, nativeId); - if (self = [self initWithFactory:factory nativeTrack:track type:RTC_OBJC_TYPE(RTCMediaStreamTrackTypeVideo)]) { + self = [self initWithFactory:factory nativeTrack:track type:RTC_OBJC_TYPE(RTCMediaStreamTrackTypeVideo)]; + if (self) { _source = source; } return self; @@ -46,7 +47,8 @@ - (instancetype)initWithFactory:(RTC_OBJC_TYPE(RTCPeerConnectionFactory) *)facto NSParameterAssert(factory); NSParameterAssert(nativeMediaTrack); NSParameterAssert(type == RTC_OBJC_TYPE(RTCMediaStreamTrackTypeVideo)); - if (self = [super initWithFactory:factory nativeTrack:nativeMediaTrack type:type]) { + self = [super initWithFactory:factory nativeTrack:nativeMediaTrack type:type]; + if (self) { _adapters = [NSMutableArray array]; _workerThread = factory.workerThread; } diff --git a/sdk/objc/api/video_codec/RTCVideoEncoderSimulcast.mm b/sdk/objc/api/video_codec/RTCVideoEncoderSimulcast.mm index 8ca988562c..0efc96f6d9 100644 --- a/sdk/objc/api/video_codec/RTCVideoEncoderSimulcast.mm +++ b/sdk/objc/api/video_codec/RTCVideoEncoderSimulcast.mm @@ -40,7 +40,8 @@ @implementation RTC_OBJC_TYPE (RTCVideoEncoderSimulcastBuilder) - (id)initWithPrimary:(id)primary fallback:(id)fallback videoCodecInfo:(RTC_OBJC_TYPE(RTCVideoCodecInfo) *)videoCodecInfo { - if (self = [super init]) { + self = [super init]; + if (self) { self->_primary = primary; self->_fallback = fallback; self->_videoCodecInfo = videoCodecInfo; diff --git a/sdk/objc/base/RTCVideoCodecInfo.h b/sdk/objc/base/RTCVideoCodecInfo.h index c141b0d82d..ad8e5acdfd 100644 --- a/sdk/objc/base/RTCVideoCodecInfo.h +++ b/sdk/objc/base/RTCVideoCodecInfo.h @@ -27,11 +27,6 @@ RTC_OBJC_EXPORT parameters:(nullable NSDictionary *) parameters; -- (instancetype)initWithName:(NSString *)name - parameters:(NSDictionary *)parameters - scalabilityModes:(NSArray *)scalabilityModes - NS_DESIGNATED_INITIALIZER; - - (instancetype)initWithName:(NSString *)name parameters:(nullable NSDictionary *)parameters scalabilityModes:(nullable NSArray *)scalabilityModes diff --git a/sdk/objc/base/RTCVideoCodecInfo.m b/sdk/objc/base/RTCVideoCodecInfo.m index c2436d1c9b..370845f24b 100644 --- a/sdk/objc/base/RTCVideoCodecInfo.m +++ b/sdk/objc/base/RTCVideoCodecInfo.m @@ -28,8 +28,8 @@ - (instancetype)initWithName:(NSString *)name } - (instancetype)initWithName:(NSString *)name - parameters:(NSDictionary *)parameters - scalabilityModes:(NSArray *)scalabilityModes { + parameters:(nullable NSDictionary *)parameters + scalabilityModes:(nullable NSArray *)scalabilityModes { self = [super init]; if (self) { _name = name; diff --git a/sdk/objc/components/audio/RTCAudioBuffer.mm b/sdk/objc/components/audio/RTCAudioBuffer.mm index e37ea344dd..2d98ee691e 100644 --- a/sdk/objc/components/audio/RTCAudioBuffer.mm +++ b/sdk/objc/components/audio/RTCAudioBuffer.mm @@ -46,7 +46,8 @@ - (float *)rawBufferForChannel:(size_t)channel { #pragma mark - Private - (instancetype)initWithNativeType:(webrtc::AudioBuffer *)audioBuffer { - if (self = [super init]) { + self = [super init]; + if (self) { _audioBuffer = audioBuffer; } return self; diff --git a/sdk/objc/components/audio/RTCAudioCustomProcessingAdapter.mm b/sdk/objc/components/audio/RTCAudioCustomProcessingAdapter.mm index c0f297c786..85b7b82609 100644 --- a/sdk/objc/components/audio/RTCAudioCustomProcessingAdapter.mm +++ b/sdk/objc/components/audio/RTCAudioCustomProcessingAdapter.mm @@ -93,7 +93,8 @@ @implementation RTC_OBJC_TYPE(RTCAudioCustomProcessingAdapter) { - (instancetype)initWithDelegate: (nullable id)audioCustomProcessingDelegate { - if (self = [super init]) { + self = [super init]; + if (self) { _lock = OS_UNFAIR_LOCK_INIT; _rawAudioCustomProcessingDelegate = audioCustomProcessingDelegate; _adapter = new webrtc::AudioCustomProcessingAdapter(self, &_lock); diff --git a/sdk/objc/components/audio/RTCAudioProcessingConfig.mm b/sdk/objc/components/audio/RTCAudioProcessingConfig.mm index d98a98204e..4cdc80cb22 100644 --- a/sdk/objc/components/audio/RTCAudioProcessingConfig.mm +++ b/sdk/objc/components/audio/RTCAudioProcessingConfig.mm @@ -85,7 +85,8 @@ - (void)setIsAutoGainControl2Enabled:(BOOL)value { #pragma mark - Private - (instancetype)initWithNativeAudioProcessingConfig:(webrtc::AudioProcessing::Config)config { - if (self = [super init]) { + self = [super init]; + if (self) { _config = config; } return self; diff --git a/sdk/objc/components/audio/RTCAudioSessionConfiguration.m b/sdk/objc/components/audio/RTCAudioSessionConfiguration.m index 0f3beb324c..36e4a0c68f 100644 --- a/sdk/objc/components/audio/RTCAudioSessionConfiguration.m +++ b/sdk/objc/components/audio/RTCAudioSessionConfiguration.m @@ -53,11 +53,12 @@ @implementation RTC_OBJC_TYPE (RTCAudioSessionConfiguration) @synthesize outputNumberOfChannels = _outputNumberOfChannels; - (instancetype)init { - if (self = [super init]) { + self = [super init]; + if (self) { // Use AVAudioSession values for default AVAudioSession *session = [AVAudioSession sharedInstance]; // Use a category which supports simultaneous recording and playback. - // By default, using this category implies that our app’s audio is + // By default, using this category implies that our app's audio is // nonmixable, hence activating the session will interrupt any other // audio sessions which are also nonmixable. _category = session.category; diff --git a/sdk/objc/components/audio/RTCDefaultAudioProcessingModule.mm b/sdk/objc/components/audio/RTCDefaultAudioProcessingModule.mm index 1eda79e47a..9d9c78345a 100644 --- a/sdk/objc/components/audio/RTCDefaultAudioProcessingModule.mm +++ b/sdk/objc/components/audio/RTCDefaultAudioProcessingModule.mm @@ -41,7 +41,8 @@ - (instancetype)initWithConfig:(nullable RTC_OBJC_TYPE(RTCAudioProcessingConfig) (nullable id)capturePostProcessingDelegate renderPreProcessingDelegate:(nullable id) renderPreProcessingDelegate { - if (self = [super init]) { + self = [super init]; + if (self) { webrtc::BuiltinAudioProcessingBuilder builder = webrtc::BuiltinAudioProcessingBuilder(); // TODO: Custom Config... diff --git a/sdk/objc/components/video_codec/RTCH264ProfileLevelId.mm b/sdk/objc/components/video_codec/RTCH264ProfileLevelId.mm index 7aa7e93c46..45f7126cd5 100644 --- a/sdk/objc/components/video_codec/RTCH264ProfileLevelId.mm +++ b/sdk/objc/components/video_codec/RTCH264ProfileLevelId.mm @@ -108,7 +108,8 @@ - (instancetype)initWithHexString:(NSString *)hexString { } - (instancetype)initWithProfile:(RTC_OBJC_TYPE(RTCH264Profile))profile level:(RTC_OBJC_TYPE(RTCH264Level))level { - if (self = [super init]) { + self = [super init]; + if (self) { self.profile = profile; self.level = level; diff --git a/sdk/objc/components/video_codec/RTCVideoEncoderFactorySimulcast.mm b/sdk/objc/components/video_codec/RTCVideoEncoderFactorySimulcast.mm index 66590d0d94..e9c9c5bde3 100644 --- a/sdk/objc/components/video_codec/RTCVideoEncoderFactorySimulcast.mm +++ b/sdk/objc/components/video_codec/RTCVideoEncoderFactorySimulcast.mm @@ -28,8 +28,9 @@ @implementation RTC_OBJC_TYPE (RTCVideoEncoderFactorySimulcast) @synthesize fallback = _fallback; - (instancetype)initWithPrimary:(id)primary - fallback:(id)fallback { - if (self = [super init]) { + fallback:(id)fallback { + self = [super init]; + if (self) { _primary = primary; _fallback = fallback; } From a3ccf161c9c817f3c6e17fc2a66e1336ff353a64 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?B=C5=82az=CC=87ej=20Pankowski?= <86720177+pblazej@users.noreply.github.com> Date: Wed, 18 Jun 2025 14:10:54 +0200 Subject: [PATCH 43/49] xrOS: bump target --- build_xcframework_dynamic_livekit.sh | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/build_xcframework_dynamic_livekit.sh b/build_xcframework_dynamic_livekit.sh index c86ff70ab4..891b4ae2c0 100755 --- a/build_xcframework_dynamic_livekit.sh +++ b/build_xcframework_dynamic_livekit.sh @@ -13,6 +13,7 @@ fi GREEN='\033[0;32m' YELLOW='\033[1;33m' +RED='\033[0;31m' NC='\033[0m' echo "build_xcframework_dynamic_livekit.sh: MODE=$MODE, DEBUG=$DEBUG" @@ -38,8 +39,8 @@ COMMON_ARGS=" PLATFORMS=( "tvOS-arm64-device:target_os=\"ios\" target_environment=\"appletv\" target_cpu=\"arm64\" ios_deployment_target=\"17.0\"" "tvOS-arm64-simulator:target_os=\"ios\" target_environment=\"appletvsimulator\" target_cpu=\"arm64\" ios_deployment_target=\"17.0\"" - "xrOS-arm64-device:target_os=\"ios\" target_environment=\"xrdevice\" target_cpu=\"arm64\" ios_deployment_target=\"1.1.0\"" - "xrOS-arm64-simulator:target_os=\"ios\" target_environment=\"xrsimulator\" target_cpu=\"arm64\" ios_deployment_target=\"1.1.0\"" + "xrOS-arm64-device:target_os=\"ios\" target_environment=\"xrdevice\" target_cpu=\"arm64\" ios_deployment_target=\"2.2.0\"" + "xrOS-arm64-simulator:target_os=\"ios\" target_environment=\"xrsimulator\" target_cpu=\"arm64\" ios_deployment_target=\"2.2.0\"" "catalyst-arm64:target_os=\"ios\" target_environment=\"catalyst\" target_cpu=\"arm64\" ios_deployment_target=\"14.0\"" "catalyst-x64:target_os=\"ios\" target_environment=\"catalyst\" target_cpu=\"x64\" ios_deployment_target=\"14.0\"" "iOS-arm64-device:target_os=\"ios\" target_environment=\"device\" target_cpu=\"arm64\" ios_deployment_target=\"13.0\"" @@ -64,6 +65,10 @@ for platform_config in "${PLATFORMS[@]}"; do echo "${YELLOW}Building $platform...${NC}" ninja -C $OUT_DIR/$platform $build_target -j 10 --quiet + if [ $? -ne 0 ]; then + echo "${RED}Build $platform failed${NC}" + exit 1 + fi echo "${GREEN}Build $platform completed${NC}" done From 8c9a3499036948590c2fafd3a7ef6867f4a7a709 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?B=C5=82az=CC=87ej=20Pankowski?= <86720177+pblazej@users.noreply.github.com> Date: Fri, 20 Jun 2025 10:40:49 +0200 Subject: [PATCH 44/49] Android: silence deprecation --- rtc_base/socket_address.cc | 3 +++ 1 file changed, 3 insertions(+) diff --git a/rtc_base/socket_address.cc b/rtc_base/socket_address.cc index de7a65f96d..6511024c11 100644 --- a/rtc_base/socket_address.cc +++ b/rtc_base/socket_address.cc @@ -155,7 +155,10 @@ std::string SocketAddress::HostAsSensitiveURIString() const { } std::string SocketAddress::PortAsString() const { +#pragma clang diagnostic push +#pragma clang diagnostic ignored "-Wdeprecated-declarations" return rtc::ToString(port_); +#pragma clang diagnostic pop } std::string SocketAddress::ToString() const { From c0f0c9297813f6d4a91c3ad2d93400815a050404 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?B=C5=82az=CC=87ej=20Pankowski?= <86720177+pblazej@users.noreply.github.com> Date: Fri, 20 Jun 2025 11:11:24 +0200 Subject: [PATCH 45/49] Android: fix optional --- sdk/android/src/jni/pc/audio_sink.cc | 2 +- sdk/android/src/jni/pc/audio_sink.h | 3 ++- sdk/android/src/jni/video_codec_info.cc | 2 +- 3 files changed, 4 insertions(+), 3 deletions(-) diff --git a/sdk/android/src/jni/pc/audio_sink.cc b/sdk/android/src/jni/pc/audio_sink.cc index 5bd88c75f6..f889dbb552 100644 --- a/sdk/android/src/jni/pc/audio_sink.cc +++ b/sdk/android/src/jni/pc/audio_sink.cc @@ -26,7 +26,7 @@ void AudioTrackSinkWrapper::OnData( int sample_rate, size_t number_of_channels, size_t number_of_frames, - absl::optional absolute_capture_timestamp_ms) { + std::optional absolute_capture_timestamp_ms) { JNIEnv* jni = AttachCurrentThreadIfNeeded(); int length = (bits_per_sample / 8) * number_of_channels * number_of_frames; ScopedJavaLocalRef audio_buffer = diff --git a/sdk/android/src/jni/pc/audio_sink.h b/sdk/android/src/jni/pc/audio_sink.h index 809f460e0c..2493f93ee6 100644 --- a/sdk/android/src/jni/pc/audio_sink.h +++ b/sdk/android/src/jni/pc/audio_sink.h @@ -11,6 +11,7 @@ #ifndef SDK_ANDROID_SRC_JNI_AUDIO_TRACK_SINK_H_ #define SDK_ANDROID_SRC_JNI_AUDIO_TRACK_SINK_H_ +#include #include #include "api/media_stream_interface.h" @@ -30,7 +31,7 @@ class AudioTrackSinkWrapper : public webrtc::AudioTrackSinkInterface { int sample_rate, size_t number_of_channels, size_t number_of_frames, - absl::optional absolute_capture_timestamp_ms) override; + std::optional absolute_capture_timestamp_ms) override; const ScopedJavaGlobalRef j_sink_; }; diff --git a/sdk/android/src/jni/video_codec_info.cc b/sdk/android/src/jni/video_codec_info.cc index a85dde67dc..9c032894ca 100644 --- a/sdk/android/src/jni/video_codec_info.cc +++ b/sdk/android/src/jni/video_codec_info.cc @@ -27,7 +27,7 @@ SdpVideoFormat VideoCodecInfoToSdpVideoFormat(JNIEnv* jni, scalability_modes; for (auto mode : params) { auto scalability_mode = ScalabilityModeFromString(mode); - if (scalability_mode != absl::nullopt) { + if (scalability_mode) { scalability_modes.push_back(*scalability_mode); } } From 0c0d1e82a8107b4a8142d1ff0ea72baf85b58111 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?B=C5=82az=CC=87ej=20Pankowski?= <86720177+pblazej@users.noreply.github.com> Date: Fri, 20 Jun 2025 15:03:55 +0200 Subject: [PATCH 46/49] Android: fix more build errors --- .../pc/external_audio_processing_factory.cc | 6 ++++-- .../pc/external_audio_processing_factory.h | 4 ++-- sdk/android/src/jni/pc/frame_cryptor.cc | 14 ++++++------- .../src/jni/pc/frame_cryptor_key_provider.cc | 20 +++++++++---------- 4 files changed, 23 insertions(+), 21 deletions(-) diff --git a/sdk/android/src/jni/pc/external_audio_processing_factory.cc b/sdk/android/src/jni/pc/external_audio_processing_factory.cc index 3d7ee7a4d9..c7ec8dded6 100644 --- a/sdk/android/src/jni/pc/external_audio_processing_factory.cc +++ b/sdk/android/src/jni/pc/external_audio_processing_factory.cc @@ -19,6 +19,8 @@ #include #include +#include "api/audio/builtin_audio_processing_builder.h" +#include "api/environment/environment_factory.h" #include "api/make_ref_counted.h" #include "rtc_base/ref_counted_object.h" #include "sdk/android/generated_peerconnection_jni/ExternalAudioProcessingFactory_jni.h" @@ -63,10 +65,10 @@ ExternalAudioProcessingFactory::ExternalAudioProcessingFactory() { std::unique_ptr render_pre_processor( render_pre_processor_); - apm_ = webrtc::AudioProcessingBuilder() + apm_ = webrtc::BuiltinAudioProcessingBuilder() .SetCapturePostProcessing(std::move(capture_post_processor)) .SetRenderPreProcessing(std::move(render_pre_processor)) - .Create(); + .Build(CreateEnvironment()); webrtc::AudioProcessing::Config config; apm_->ApplyConfig(config); diff --git a/sdk/android/src/jni/pc/external_audio_processing_factory.h b/sdk/android/src/jni/pc/external_audio_processing_factory.h index 5dfebe81fc..bf0bc886b7 100644 --- a/sdk/android/src/jni/pc/external_audio_processing_factory.h +++ b/sdk/android/src/jni/pc/external_audio_processing_factory.h @@ -28,7 +28,7 @@ namespace jni { class ExternalAudioProcessingJni : public webrtc::ExternalAudioProcessingInterface, - public rtc::RefCountInterface { + public webrtc::RefCountInterface { public: ExternalAudioProcessingJni(JNIEnv* jni, const JavaRef& j_processing); ~ExternalAudioProcessingJni(); @@ -43,7 +43,7 @@ class ExternalAudioProcessingJni const ScopedJavaGlobalRef j_processing_; }; -class ExternalAudioProcessingFactory : public rtc::RefCountInterface { +class ExternalAudioProcessingFactory : public webrtc::RefCountInterface { public: ExternalAudioProcessingFactory(); virtual ~ExternalAudioProcessingFactory() = default; diff --git a/sdk/android/src/jni/pc/frame_cryptor.cc b/sdk/android/src/jni/pc/frame_cryptor.cc index 33b938f7f4..812814d84c 100644 --- a/sdk/android/src/jni/pc/frame_cryptor.cc +++ b/sdk/android/src/jni/pc/frame_cryptor.cc @@ -108,12 +108,12 @@ webrtc::FrameCryptorTransformer::Algorithm AlgorithmFromIndex(int index) { } } -static base::android::ScopedJavaLocalRef +static ScopedJavaLocalRef JNI_FrameCryptorFactory_CreateFrameCryptorForRtpReceiver( JNIEnv* env, jlong native_factory, jlong j_rtp_receiver_pointer, - const base::android::JavaParamRef& participantId, + const JavaParamRef& participantId, jint j_algorithm_index, jlong j_key_provider) { OwnedFactoryAndThreads* factory = @@ -140,12 +140,12 @@ JNI_FrameCryptorFactory_CreateFrameCryptorForRtpReceiver( return NativeToJavaFrameCryptor(env, frame_crypto_transformer); } -static base::android::ScopedJavaLocalRef +static ScopedJavaLocalRef JNI_FrameCryptorFactory_CreateFrameCryptorForRtpSender( JNIEnv* env, jlong native_factory, jlong j_rtp_sender_pointer, - const base::android::JavaParamRef& participantId, + const JavaParamRef& participantId, jint j_algorithm_index, jlong j_key_provider) { OwnedFactoryAndThreads* factory = @@ -170,13 +170,13 @@ JNI_FrameCryptorFactory_CreateFrameCryptorForRtpSender( return NativeToJavaFrameCryptor(env, frame_crypto_transformer); } -static base::android::ScopedJavaLocalRef +static ScopedJavaLocalRef JNI_FrameCryptorFactory_CreateFrameCryptorKeyProvider( JNIEnv* env, jboolean j_shared, - const base::android::JavaParamRef& j_ratchetSalt, + const JavaParamRef& j_ratchetSalt, jint j_ratchetWindowSize, - const base::android::JavaParamRef& j_uncryptedMagicBytes, + const JavaParamRef& j_uncryptedMagicBytes, jint j_failureTolerance, jint j_keyRingSize, jboolean j_discardFrameWhenCryptorNotReady) { diff --git a/sdk/android/src/jni/pc/frame_cryptor_key_provider.cc b/sdk/android/src/jni/pc/frame_cryptor_key_provider.cc index e41d16ed91..3456ab24b3 100644 --- a/sdk/android/src/jni/pc/frame_cryptor_key_provider.cc +++ b/sdk/android/src/jni/pc/frame_cryptor_key_provider.cc @@ -37,13 +37,13 @@ static jboolean JNI_FrameCryptorKeyProvider_SetSharedKey( JNIEnv* jni, jlong j_key_provider, jint j_index, - const base::android::JavaParamRef& j_key) { + const JavaParamRef& j_key) { auto key = JavaToNativeByteArray(jni, j_key); return reinterpret_cast(j_key_provider) ->SetSharedKey(j_index,std::vector(key.begin(), key.end())); } -static base::android::ScopedJavaLocalRef +static ScopedJavaLocalRef JNI_FrameCryptorKeyProvider_RatchetSharedKey( JNIEnv* env, jlong keyProviderPointer, @@ -56,7 +56,7 @@ JNI_FrameCryptorKeyProvider_RatchetSharedKey( return NativeToJavaByteArray(env, rtc::ArrayView(int8tKey)); } -static base::android::ScopedJavaLocalRef +static ScopedJavaLocalRef JNI_FrameCryptorKeyProvider_ExportSharedKey( JNIEnv* env, jlong keyProviderPointer, @@ -71,9 +71,9 @@ JNI_FrameCryptorKeyProvider_ExportSharedKey( static jboolean JNI_FrameCryptorKeyProvider_SetKey( JNIEnv* jni, jlong j_key_provider, - const base::android::JavaParamRef& participantId, + const JavaParamRef& participantId, jint j_index, - const base::android::JavaParamRef& j_key) { + const JavaParamRef& j_key) { auto key = JavaToNativeByteArray(jni, j_key); auto participant_id = JavaToStdString(jni, participantId); return reinterpret_cast(j_key_provider) @@ -81,11 +81,11 @@ static jboolean JNI_FrameCryptorKeyProvider_SetKey( std::vector(key.begin(), key.end())); } -static base::android::ScopedJavaLocalRef +static ScopedJavaLocalRef JNI_FrameCryptorKeyProvider_RatchetKey( JNIEnv* env, jlong keyProviderPointer, - const base::android::JavaParamRef& participantId, + const JavaParamRef& participantId, jint j_index) { auto participant_id = JavaToStdString(env, participantId); auto key_provider = @@ -96,11 +96,11 @@ JNI_FrameCryptorKeyProvider_RatchetKey( return NativeToJavaByteArray(env, rtc::ArrayView(int8tKey)); } -static base::android::ScopedJavaLocalRef +static ScopedJavaLocalRef JNI_FrameCryptorKeyProvider_ExportKey( JNIEnv* env, jlong keyProviderPointer, - const base::android::JavaParamRef& participantId, + const JavaParamRef& participantId, jint j_index) { auto participant_id = JavaToStdString(env, participantId); auto key_provider = @@ -113,7 +113,7 @@ JNI_FrameCryptorKeyProvider_ExportKey( static void JNI_FrameCryptorKeyProvider_SetSifTrailer( JNIEnv* jni, jlong j_key_provider, - const base::android::JavaParamRef& j_trailer) { + const JavaParamRef& j_trailer) { auto trailer = JavaToNativeByteArray(jni, j_trailer); reinterpret_cast(j_key_provider) ->SetSifTrailer(std::vector(trailer.begin(), trailer.end())); From d49a6e1e7232fb5ef08d0121665840b1867bae95 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?B=C5=82az=CC=87ej=20Pankowski?= <86720177+pblazej@users.noreply.github.com> Date: Fri, 20 Jun 2025 16:31:10 +0200 Subject: [PATCH 47/49] Android: fix imports --- sdk/android/src/jni/pc/peer_connection_factory.cc | 2 -- 1 file changed, 2 deletions(-) diff --git a/sdk/android/src/jni/pc/peer_connection_factory.cc b/sdk/android/src/jni/pc/peer_connection_factory.cc index 4489ed039a..7404a950f8 100644 --- a/sdk/android/src/jni/pc/peer_connection_factory.cc +++ b/sdk/android/src/jni/pc/peer_connection_factory.cc @@ -32,8 +32,6 @@ #include "sdk/android/src/jni/jni_helpers.h" #include "sdk/android/src/jni/logging/log_sink.h" #include "sdk/android/src/jni/pc/android_network_monitor.h" -#include "sdk/android/src/jni/pc/audio.h" -#include "sdk/android/src/jni/pc/rtp_capabilities.h" #include "sdk/android/src/jni/pc/ice_candidate.h" #include "sdk/android/src/jni/pc/media_stream_track.h" #include "sdk/android/src/jni/pc/owned_factory_and_threads.h" From 93ac5eea0188249c7b314a415865df9eb1d20bea Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?B=C5=82az=CC=87ej=20Pankowski?= <86720177+pblazej@users.noreply.github.com> Date: Fri, 20 Jun 2025 17:07:35 +0200 Subject: [PATCH 48/49] Android: fix jni --- sdk/android/src/jni/simulcast_video_encoder.cc | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/sdk/android/src/jni/simulcast_video_encoder.cc b/sdk/android/src/jni/simulcast_video_encoder.cc index 6874c1821b..afb64271d5 100644 --- a/sdk/android/src/jni/simulcast_video_encoder.cc +++ b/sdk/android/src/jni/simulcast_video_encoder.cc @@ -17,7 +17,7 @@ extern "C" { // (VideoEncoderFactory primary, VideoEncoderFactory fallback, VideoCodecInfo info) JNIEXPORT jlong JNICALL Java_org_webrtc_SimulcastVideoEncoder_nativeCreateEncoder(JNIEnv *env, jclass klass, jlong webrtcEnvRef, jobject primary, jobject fallback, jobject info) { RTC_LOG(LS_INFO) << "Create simulcast video encoder"; - JavaParamRef info_ref(info); + JavaParamRef info_ref(env, info); SdpVideoFormat format = VideoCodecInfoToSdpVideoFormat(env, info_ref); // TODO: 影響は軽微だが、リークする可能性があるので将来的に修正したい From 123975aff11e2d44201f3a4a08a5c4caa765a256 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?B=C5=82az=CC=87ej=20Pankowski?= <86720177+pblazej@users.noreply.github.com> Date: Mon, 23 Jun 2025 10:08:42 +0200 Subject: [PATCH 49/49] Android: fix ordinal warning --- .../api/org/webrtc/FrameCryptorAlgorithm.java | 12 +++++++++++- sdk/android/api/org/webrtc/FrameCryptorFactory.java | 4 ++-- 2 files changed, 13 insertions(+), 3 deletions(-) diff --git a/sdk/android/api/org/webrtc/FrameCryptorAlgorithm.java b/sdk/android/api/org/webrtc/FrameCryptorAlgorithm.java index 121656cc99..20b783e9ab 100644 --- a/sdk/android/api/org/webrtc/FrameCryptorAlgorithm.java +++ b/sdk/android/api/org/webrtc/FrameCryptorAlgorithm.java @@ -17,5 +17,15 @@ package org.webrtc; public enum FrameCryptorAlgorithm { - AES_GCM, + AES_GCM(0); + + private final int value; + + FrameCryptorAlgorithm(int value) { + this.value = value; + } + + public int getValue() { + return value; + } } \ No newline at end of file diff --git a/sdk/android/api/org/webrtc/FrameCryptorFactory.java b/sdk/android/api/org/webrtc/FrameCryptorFactory.java index 865a4b78bb..a2a165c711 100644 --- a/sdk/android/api/org/webrtc/FrameCryptorFactory.java +++ b/sdk/android/api/org/webrtc/FrameCryptorFactory.java @@ -25,13 +25,13 @@ public static FrameCryptorKeyProvider createFrameCryptorKeyProvider( public static FrameCryptor createFrameCryptorForRtpSender(PeerConnectionFactory factory, RtpSender rtpSender, String participantId, FrameCryptorAlgorithm algorithm, FrameCryptorKeyProvider keyProvider) { return nativeCreateFrameCryptorForRtpSender(factory.getNativeOwnedFactoryAndThreads(),rtpSender.getNativeRtpSender(), participantId, - algorithm.ordinal(), keyProvider.getNativeKeyProvider()); + algorithm.getValue(), keyProvider.getNativeKeyProvider()); } public static FrameCryptor createFrameCryptorForRtpReceiver(PeerConnectionFactory factory, RtpReceiver rtpReceiver, String participantId, FrameCryptorAlgorithm algorithm, FrameCryptorKeyProvider keyProvider) { return nativeCreateFrameCryptorForRtpReceiver(factory.getNativeOwnedFactoryAndThreads(), rtpReceiver.getNativeRtpReceiver(), participantId, - algorithm.ordinal(), keyProvider.getNativeKeyProvider()); + algorithm.getValue(), keyProvider.getNativeKeyProvider()); } private static native FrameCryptor nativeCreateFrameCryptorForRtpSender(long factory,