diff --git a/Sources/StreamVideo/WebRTC/v2/Extensions/AVFoundation/AVCaptureDevice+OutputFormat.swift b/Sources/StreamVideo/WebRTC/v2/Extensions/AVFoundation/AVCaptureDevice+OutputFormat.swift index e44c0015c..9e0956efc 100644 --- a/Sources/StreamVideo/WebRTC/v2/Extensions/AVFoundation/AVCaptureDevice+OutputFormat.swift +++ b/Sources/StreamVideo/WebRTC/v2/Extensions/AVFoundation/AVCaptureDevice+OutputFormat.swift @@ -22,7 +22,7 @@ extension AVCaptureDevice { /// /// - Note: The formats are sorted by their area difference relative to the preferred dimensions /// before applying the selection criteria. - func outputFormat( + public func outputFormat( preferredDimensions: CMVideoDimensions, preferredFrameRate: Int ) -> AVCaptureDevice.Format? { diff --git a/Sources/StreamVideo/WebRTC/v2/PeerConnection/Protocols/CaptureDeviceProtocol.swift b/Sources/StreamVideo/WebRTC/v2/PeerConnection/Protocols/CaptureDeviceProtocol.swift index 6bd460149..e8fa141a1 100644 --- a/Sources/StreamVideo/WebRTC/v2/PeerConnection/Protocols/CaptureDeviceProtocol.swift +++ b/Sources/StreamVideo/WebRTC/v2/PeerConnection/Protocols/CaptureDeviceProtocol.swift @@ -5,7 +5,7 @@ import AVFoundation /// A protocol that defines the properties and methods for a capture device. -protocol CaptureDeviceProtocol: Sendable { +public protocol CaptureDeviceProtocol: Sendable { /// The position of the capture device. var position: AVCaptureDevice.Position { get } diff --git a/Sources/StreamVideo/WebRTC/v2/VideoCapturing/StreamCaptureDeviceProvider.swift b/Sources/StreamVideo/WebRTC/v2/VideoCapturing/StreamCaptureDeviceProvider.swift index 45307c65d..d6c63e464 100644 --- a/Sources/StreamVideo/WebRTC/v2/VideoCapturing/StreamCaptureDeviceProvider.swift +++ b/Sources/StreamVideo/WebRTC/v2/VideoCapturing/StreamCaptureDeviceProvider.swift @@ -6,7 +6,7 @@ import Foundation import StreamWebRTC /// A protocol defining methods for providing capture devices. -protocol CaptureDeviceProviding { +public protocol CaptureDeviceProviding { /// Returns a capture device for the specified AVCaptureDevice position. /// - Parameter position: The position of the AVCaptureDevice. /// - Returns: A capture device conforming to CaptureDeviceProtocol. @@ -64,7 +64,7 @@ enum CaptureDeviceProviderKey: InjectionKey { /// An extension to manage injected values. extension InjectedValues { /// The capture device provider. - var captureDeviceProvider: CaptureDeviceProviding { + public var captureDeviceProvider: CaptureDeviceProviding { get { Self[CaptureDeviceProviderKey.self] } set { Self[CaptureDeviceProviderKey.self] = newValue } } diff --git a/Sources/StreamVideoSwiftUI/CallingViews/LobbyViewModel.swift b/Sources/StreamVideoSwiftUI/CallingViews/LobbyViewModel.swift index 60302d77e..08d4858cb 100644 --- a/Sources/StreamVideoSwiftUI/CallingViews/LobbyViewModel.swift +++ b/Sources/StreamVideoSwiftUI/CallingViews/LobbyViewModel.swift @@ -11,7 +11,7 @@ import SwiftUI public class LobbyViewModel: ObservableObject, @unchecked Sendable { @Injected(\.callAudioRecorder) private var callAudioRecorder - private let camera: Any + private let camera: CameraAdapter private var imagesTask: Task? private let disposableBag = DisposableBag() @@ -25,54 +25,35 @@ public class LobbyViewModel: ObservableObject, @unchecked Sendable { callType: callType, callId: callId ) - if #available(iOS 14, *) { - camera = Camera() - imagesTask = Task { - await handleCameraPreviews() - } - } else { - camera = NSObject() - } + camera = .init(cameraPosition: call.state.callSettings.cameraPosition) loadCurrentMembers() subscribeForCallJoinUpdates() subscribeForCallLeaveUpdates() - } - - @available(iOS 14, *) - func handleCameraPreviews() async { - let imageStream = (camera as? Camera)?.previewStream.dropFirst() - .map(\.image) - - guard let imageStream = imageStream else { return } - for await image in imageStream { - await MainActor.run { - viewfinderImage = image - } - } + camera + .$image + .map(\.?.image) + .receive(on: DispatchQueue.main) + .assign(to: \.viewfinderImage, onWeak: self) + .store(in: disposableBag) } - + public func startCamera(front: Bool) { - if #available(iOS 14, *) { - if front { - (camera as? Camera)?.switchCaptureDevice() - } - Task { - await(camera as? Camera)?.start() - } + Task { + await self.camera.start() } } public func stopCamera() { - imagesTask?.cancel() - imagesTask = nil - if #available(iOS 14, *) { - (camera as? Camera)?.stop() + camera.stop() + Task { @MainActor in + viewfinderImage = nil } } public func cleanUp() { disposableBag.removeAll() + camera.stop() Task { await callAudioRecorder.stopRecording() } @@ -84,6 +65,12 @@ public class LobbyViewModel: ObservableObject, @unchecked Sendable { } else { await callAudioRecorder.stopRecording() } + + if callSettings.videoOn { + startCamera(front: callSettings.cameraPosition == .front) + } else { + stopCamera() + } } // MARK: - private diff --git a/Sources/StreamVideoSwiftUI/Utils/Camera.swift b/Sources/StreamVideoSwiftUI/Utils/Camera.swift deleted file mode 100644 index 87dec6403..000000000 --- a/Sources/StreamVideoSwiftUI/Utils/Camera.swift +++ /dev/null @@ -1,317 +0,0 @@ -// -// Copyright © 2025 Stream.io Inc. All rights reserved. -// - -import AVFoundation -import CoreImage -import os.log -import StreamVideo -import UIKit - -@available(iOS 14.0, *) -class Camera: NSObject, @unchecked Sendable { - @Injected(\.orientationAdapter) private var orientationAdapter - - private lazy var captureSession = AVCaptureSession() - private var isCaptureSessionConfigured = false - private var deviceInput: AVCaptureDeviceInput? - private var photoOutput: AVCapturePhotoOutput? - private var videoOutput: AVCaptureVideoDataOutput? - private var sessionQueue: DispatchQueue! - private lazy var frameProcessingQueue: DispatchQueue = DispatchQueue( - label: "io.getstream.\(String(describing: type(of: self))).videoDataOutputQueue", - target: .global(qos: .userInteractive) - ) - - private var allCaptureDevices: [AVCaptureDevice] { - AVCaptureDevice.DiscoverySession( - deviceTypes: [ - .builtInTrueDepthCamera, - .builtInDualCamera, - .builtInDualWideCamera, - .builtInWideAngleCamera, - .builtInDualWideCamera - ], - mediaType: .video, - position: .unspecified - ).devices - } - - private var frontCaptureDevices: [AVCaptureDevice] { - allCaptureDevices - .filter { $0.position == .front } - } - - private var backCaptureDevices: [AVCaptureDevice] { - allCaptureDevices - .filter { $0.position == .back } - } - - private var captureDevices: [AVCaptureDevice] { - var devices = [AVCaptureDevice]() - #if os(macOS) || (os(iOS) && targetEnvironment(macCatalyst)) - devices += allCaptureDevices - #else - if let backDevice = backCaptureDevices.first { - devices += [backDevice] - } - if let frontDevice = frontCaptureDevices.first { - devices += [frontDevice] - } - #endif - return devices - } - - private var availableCaptureDevices: [AVCaptureDevice] { - captureDevices - .filter { $0.isConnected } - .filter { !$0.isSuspended } - } - - private var captureDevice: AVCaptureDevice? { - didSet { - guard let captureDevice = captureDevice else { return } - log.debug("Using capture device: \(captureDevice.localizedName)") - sessionQueue.async { - self.updateSessionForCaptureDevice(captureDevice) - } - } - } - - private var canRequestCameraAccess: Bool { - CallSettings(videoOn: true).videoOn - } - - var isRunning: Bool { - captureSession.isRunning - } - - var isUsingFrontCaptureDevice: Bool { - guard let captureDevice = captureDevice else { return false } - return frontCaptureDevices.contains(captureDevice) - } - - var isUsingBackCaptureDevice: Bool { - guard let captureDevice = captureDevice else { return false } - return backCaptureDevices.contains(captureDevice) - } - - private var addToPreviewStream: ((CIImage) -> Void)? - - var isPreviewPaused = false - - lazy var previewStream: AsyncStream = { - AsyncStream { continuation in - addToPreviewStream = { [weak self] ciImage in - guard let self else { return } - if !self.isPreviewPaused { - continuation.yield(ciImage) - } - } - } - }() - - override init() { - super.init() - initialize() - } - - deinit { - if let deviceInput { - captureSession.removeInput(deviceInput) - } - - if let videoOutput { - captureSession.removeOutput(videoOutput) - } - } - - private func initialize() { - sessionQueue = DispatchQueue(label: "session queue") - - guard canRequestCameraAccess else { - return - } - captureDevice = availableCaptureDevices.first ?? AVCaptureDevice.default(for: .video) - } - - private func configureCaptureSession(completionHandler: (_ success: Bool) -> Void) { - guard canRequestCameraAccess else { - completionHandler(false) - return - } - var success = false - - captureSession.beginConfiguration() - - defer { - self.captureSession.commitConfiguration() - completionHandler(success) - } - - guard - let captureDevice = captureDevice, - let deviceInput = try? AVCaptureDeviceInput(device: captureDevice) - else { - log.error("Failed to obtain video input.") - return - } - - captureSession.sessionPreset = AVCaptureSession.Preset.medium - - let videoOutput = AVCaptureVideoDataOutput() - videoOutput.setSampleBufferDelegate(self, queue: frameProcessingQueue) - - guard captureSession.canAddInput(deviceInput) else { - log.error("Unable to add device input to capture session.") - return - } - - guard captureSession.canAddOutput(videoOutput) else { - log.error("Unable to add video output to capture session.") - return - } - - captureSession.addInput(deviceInput) - captureSession.addOutput(videoOutput) - - self.deviceInput = deviceInput - self.videoOutput = videoOutput - - updateVideoOutputConnection() - - isCaptureSessionConfigured = true - - success = true - } - - private func checkAuthorization() async -> Bool { - switch AVCaptureDevice.authorizationStatus(for: .video) { - case .authorized: - log.debug("Camera access authorized.") - return true - case .notDetermined: - log.debug("Camera access not determined.") - sessionQueue.suspend() - let status = await AVCaptureDevice.requestAccess(for: .video) - sessionQueue.resume() - return status - case .denied: - log.debug("Camera access denied.") - return false - case .restricted: - log.debug("Camera library access restricted.") - return false - @unknown default: - return false - } - } - - private func deviceInputFor(device: AVCaptureDevice?) -> AVCaptureDeviceInput? { - guard let validDevice = device else { return nil } - do { - return try AVCaptureDeviceInput(device: validDevice) - } catch { - log.error("Error getting capture device input: \(error.localizedDescription)") - return nil - } - } - - private func updateSessionForCaptureDevice(_ captureDevice: AVCaptureDevice) { - guard isCaptureSessionConfigured else { return } - - captureSession.beginConfiguration() - defer { captureSession.commitConfiguration() } - - for input in captureSession.inputs { - if let deviceInput = input as? AVCaptureDeviceInput { - captureSession.removeInput(deviceInput) - } - } - - if let deviceInput = deviceInputFor(device: captureDevice) { - if !captureSession.inputs.contains(deviceInput), captureSession.canAddInput(deviceInput) { - captureSession.addInput(deviceInput) - } - } - - updateVideoOutputConnection() - } - - private func updateVideoOutputConnection() { - if let videoOutput = videoOutput, let videoOutputConnection = videoOutput.connection(with: .video) { - if videoOutputConnection.isVideoMirroringSupported { - videoOutputConnection.isVideoMirrored = isUsingFrontCaptureDevice - } - } - } - - func start() async { - guard canRequestCameraAccess else { - return - } - let authorized = await checkAuthorization() - guard authorized else { - log.error("Camera access was not authorized.") - return - } - - if isCaptureSessionConfigured { - if !captureSession.isRunning { - sessionQueue.async { [self] in - self.captureSession.startRunning() - } - } - return - } - - sessionQueue.async { [self] in - self.configureCaptureSession { success in - guard success else { return } - self.captureSession.startRunning() - } - } - } - - func stop() { - guard canRequestCameraAccess else { - return - } - if captureSession.isRunning { - sessionQueue.async { - self.captureSession.stopRunning() - } - } - } - - func switchCaptureDevice() { - guard canRequestCameraAccess else { - return - } - if let captureDevice = captureDevice, let index = availableCaptureDevices.firstIndex(of: captureDevice) { - let nextIndex = (index + 1) % availableCaptureDevices.count - self.captureDevice = availableCaptureDevices[nextIndex] - } else { - captureDevice = AVCaptureDevice.default(for: .video) - } - } -} - -@available(iOS 14.0, *) -extension Camera: AVCaptureVideoDataOutputSampleBufferDelegate { - - func captureOutput( - _ output: AVCaptureOutput, - didOutput sampleBuffer: CMSampleBuffer, - from connection: AVCaptureConnection - ) { - guard let pixelBuffer = sampleBuffer.imageBuffer else { return } - - let currentOrientation = orientationAdapter.orientation.captureVideoOrientation - if connection.isVideoOrientationSupported, connection.videoOrientation != currentOrientation { - connection.videoOrientation = currentOrientation - } - - addToPreviewStream?(CIImage(cvPixelBuffer: pixelBuffer)) - } -} diff --git a/Sources/StreamVideoSwiftUI/Utils/CameraAdapter.swift b/Sources/StreamVideoSwiftUI/Utils/CameraAdapter.swift new file mode 100644 index 000000000..2e38b032b --- /dev/null +++ b/Sources/StreamVideoSwiftUI/Utils/CameraAdapter.swift @@ -0,0 +1,155 @@ +// +// Copyright © 2025 Stream.io Inc. All rights reserved. +// + +import AVFoundation +import CoreImage +import Foundation +import StreamVideo + +final class CameraAdapter: NSObject, @unchecked Sendable { + @Injected(\.captureDeviceProvider) private var captureDeviceProvider + @Injected(\.orientationAdapter) private var orientationAdapter + + @Published private(set) var image: CIImage? + + private var cameraPosition: CameraPosition + private var input: AVCaptureDeviceInput? + + private let captureSession = AVCaptureSession() + private let videoOutput = AVCaptureVideoDataOutput() + private let frameProcessingQueue = DispatchQueue(label: "frame.processing.queue") + private let accessQueue: UnfairQueue = .init() + private let disposableBag = DisposableBag() + + init(cameraPosition: CameraPosition) { + self.cameraPosition = cameraPosition + super.init() + + orientationAdapter + .$orientation + .map(\.captureVideoOrientation) + .sink { [weak self] in self?.didUpdate($0) } + .store(in: disposableBag) + } + + deinit { + stop() + } + + func start() async { + guard + await requestAccessIfRequired() + else { + return + } + configureCaptureSession() + } + + func stop() { + accessQueue.sync { + guard + captureSession.isRunning + else { + return + } + captureSession.beginConfiguration() + image = nil + if let input { + captureSession.removeInput(input) + } + input = nil + captureSession.removeOutput(videoOutput) + videoOutput.setSampleBufferDelegate(nil, queue: nil) + + captureSession.commitConfiguration() + captureSession.stopRunning() + } + } + + func updateCameraPosition(_ cameraPosition: CameraPosition) async { + guard + cameraPosition != self.cameraPosition + else { + return + } + stop() + self.cameraPosition = cameraPosition + await start() + } + + // MARK: - Private Helpers + + private func configureCaptureSession() { + accessQueue.sync { + guard + !captureSession.isRunning, + let device = captureDeviceProvider.device(for: cameraPosition) as? AVCaptureDevice, + let input = try? AVCaptureDeviceInput(device: device) + else { + return + } + captureSession.beginConfiguration() + self.input = input + + captureSession.sessionPreset = AVCaptureSession.Preset.medium + videoOutput.setSampleBufferDelegate(self, queue: frameProcessingQueue) + + captureSession.addInput(input) + captureSession.addOutput(videoOutput) + + if + let videoOutputConnection = videoOutput.connection(with: .video), + videoOutputConnection.isVideoMirroringSupported { + videoOutputConnection.isVideoMirrored = cameraPosition == .front + } + + captureSession.commitConfiguration() + + didUpdate(orientationAdapter.orientation.captureVideoOrientation) + captureSession.startRunning() + } + } + + private func requestAccessIfRequired() async -> Bool { + switch AVCaptureDevice.authorizationStatus(for: .video) { + case .authorized: + return true + case .notDetermined: + let status = await AVCaptureDevice.requestAccess(for: .video) + return status + case .denied: + return false + case .restricted: + return false + @unknown default: + return false + } + } + + private func didUpdate(_ orientation: AVCaptureVideoOrientation) { + guard + let connection = videoOutput.connection(with: .video), + connection.videoOrientation != orientation + else { + return + } + + connection.videoOrientation = orientation + } +} + +extension CameraAdapter: AVCaptureVideoDataOutputSampleBufferDelegate { + + func captureOutput( + _ output: AVCaptureOutput, + didOutput sampleBuffer: CMSampleBuffer, + from connection: AVCaptureConnection + ) { + guard let pixelBuffer = sampleBuffer.imageBuffer else { return } + + Task { @MainActor in + self.image = CIImage(cvPixelBuffer: pixelBuffer) + } + } +} diff --git a/StreamVideo.xcodeproj/project.pbxproj b/StreamVideo.xcodeproj/project.pbxproj index d5bd5697d..96ac6f7c2 100644 --- a/StreamVideo.xcodeproj/project.pbxproj +++ b/StreamVideo.xcodeproj/project.pbxproj @@ -1278,7 +1278,7 @@ 849A04DE2D198A0C004C33F2 /* StopLiveRequest.swift in Sources */ = {isa = PBXBuildFile; fileRef = 849A04DA2D198A0C004C33F2 /* StopLiveRequest.swift */; }; 849A05122D199EE2004C33F2 /* StreamAppStateAdapter_Tests.swift in Sources */ = {isa = PBXBuildFile; fileRef = 40B48C0F2D14B901002C4EAB /* StreamAppStateAdapter_Tests.swift */; }; 849EDA8B297AFCC80072A12D /* PreJoiningView.swift in Sources */ = {isa = PBXBuildFile; fileRef = 849EDA8A297AFCC80072A12D /* PreJoiningView.swift */; }; - 849EDA8D297AFD840072A12D /* Camera.swift in Sources */ = {isa = PBXBuildFile; fileRef = 849EDA8C297AFD840072A12D /* Camera.swift */; }; + 849EDA8D297AFD840072A12D /* CameraAdapter.swift in Sources */ = {isa = PBXBuildFile; fileRef = 849EDA8C297AFD840072A12D /* CameraAdapter.swift */; }; 849EDA8F297AFE1C0072A12D /* LobbyViewModel.swift in Sources */ = {isa = PBXBuildFile; fileRef = 849EDA8E297AFE1C0072A12D /* LobbyViewModel.swift */; }; 84A4DCBB2A41DC6E00B1D1BF /* AsyncAssert.swift in Sources */ = {isa = PBXBuildFile; fileRef = 84A4DCBA2A41DC6E00B1D1BF /* AsyncAssert.swift */; }; 84A4DDBD2A3B35030097F3E9 /* LocationFetcher.swift in Sources */ = {isa = PBXBuildFile; fileRef = 84A4DDBC2A3B35030097F3E9 /* LocationFetcher.swift */; }; @@ -2719,7 +2719,7 @@ 849A04DA2D198A0C004C33F2 /* StopLiveRequest.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = StopLiveRequest.swift; sourceTree = ""; }; 849A04DB2D198A0C004C33F2 /* StopTranscriptionRequest.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = StopTranscriptionRequest.swift; sourceTree = ""; }; 849EDA8A297AFCC80072A12D /* PreJoiningView.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = PreJoiningView.swift; sourceTree = ""; }; - 849EDA8C297AFD840072A12D /* Camera.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = Camera.swift; sourceTree = ""; }; + 849EDA8C297AFD840072A12D /* CameraAdapter.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = CameraAdapter.swift; sourceTree = ""; }; 849EDA8E297AFE1C0072A12D /* LobbyViewModel.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = LobbyViewModel.swift; sourceTree = ""; }; 84A4DCBA2A41DC6E00B1D1BF /* AsyncAssert.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = AsyncAssert.swift; sourceTree = ""; }; 84A4DDBC2A3B35030097F3E9 /* LocationFetcher.swift */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.swift; path = LocationFetcher.swift; sourceTree = ""; }; @@ -6626,7 +6626,7 @@ 846FBE8C28AAEBBC00147F6E /* SearchBar.swift */, 846FBE8E28AAEC5D00147F6E /* KeyboardReadable.swift */, 8458B703290ACF2A00F8E487 /* CallSoundsPlayer.swift */, - 849EDA8C297AFD840072A12D /* Camera.swift */, + 849EDA8C297AFD840072A12D /* CameraAdapter.swift */, 8469593D29BF214700134EA0 /* ViewExtensions.swift */, 846E4B0429D2D3D3003733AB /* ModifiedContent.swift */, 8479F83329C09EF1009ECE37 /* UserAvatar.swift */, @@ -8569,7 +8569,7 @@ 846FBE8D28AAEBBC00147F6E /* SearchBar.swift in Sources */, 40245F3E2BE2709800FCF075 /* StatelessParticipantsListButton.swift in Sources */, 846E4B0529D2D3D3003733AB /* ModifiedContent.swift in Sources */, - 849EDA8D297AFD840072A12D /* Camera.swift in Sources */, + 849EDA8D297AFD840072A12D /* CameraAdapter.swift in Sources */, 844299362940A17F0037232A /* ZoomableScrollView.swift in Sources */, 8442993A29422BEA0037232A /* BackportStateObject.swift in Sources */, 4072A5862DAE821C00108E8F /* PictureInPictureVideoRendererView.swift in Sources */, diff --git a/StreamVideoSwiftUITests/Utils/ParticipantEventResetAdapter/ParticipantEventResetAdapter_Tests.swift b/StreamVideoSwiftUITests/Utils/ParticipantEventResetAdapter/ParticipantEventResetAdapter_Tests.swift index 3bc8be191..2d4f1f2aa 100644 --- a/StreamVideoSwiftUITests/Utils/ParticipantEventResetAdapter/ParticipantEventResetAdapter_Tests.swift +++ b/StreamVideoSwiftUITests/Utils/ParticipantEventResetAdapter/ParticipantEventResetAdapter_Tests.swift @@ -69,7 +69,7 @@ final class ParticipantEventResetAdapter_Tests: XCTestCase, @unchecked Sendable XCTAssertNotNil(self.viewModel.participantEvent) await self.wait(for: self.interval) - XCTAssertNil(self.viewModel.participantEvent) + await self.fulfilmentInMainActor { self.viewModel.participantEvent == nil } } try await group.waitForAll()