From 84202240741a186dd31cab279320478318fd6710 Mon Sep 17 00:00:00 2001 From: Abhash Kumar Singh Date: Thu, 18 Apr 2024 13:11:45 -0700 Subject: [PATCH 01/10] feat: add no light challenge implementation (#127) * feat: add no light challenge implementation * update package.swift for CI build * Fix unit tests * Address review comments --- .../xcshareddata/swiftpm/Package.resolved | 8 +- .../HostApp/Views/ExampleLivenessView.swift | 3 +- Package.resolved | 8 +- Package.swift | 3 +- .../BlazeFace/DetectedFace.swift | 6 +- .../FaceDetectorShortRange+Model.swift | 15 +++- .../FaceDetection/FaceDetector.swift | 5 ++ .../Views/GetReadyPage/GetReadyPageView.swift | 46 ++++++++---- .../InstructionContainerView.swift | 27 +++++-- .../Liveness/FaceLivenessDetectionView.swift | 75 ++++++++++++++----- ...ViewModel+FaceDetectionResultHandler.swift | 34 ++++++--- ...ctionViewModel+VideoSegmentProcessor.swift | 4 +- .../FaceLivenessDetectionViewModel.swift | 38 ++++++++-- .../FaceLivenessViewControllerPresenter.swift | 1 + .../Views/Liveness/LivenessStateMachine.swift | 6 ++ .../Liveness/LivenessViewController.swift | 7 ++ .../Views/LoadingPage/LoadingPageView.swift | 27 +++++++ .../FaceLivenessTests/DetectedFaceTests.swift | 26 ++++++- Tests/FaceLivenessTests/LivenessTests.swift | 33 ++++++-- .../MockLivenessService.swift | 12 ++- 20 files changed, 305 insertions(+), 79 deletions(-) create mode 100644 Sources/FaceLiveness/Views/LoadingPage/LoadingPageView.swift diff --git a/HostApp/HostApp.xcodeproj/project.xcworkspace/xcshareddata/swiftpm/Package.resolved b/HostApp/HostApp.xcodeproj/project.xcworkspace/xcshareddata/swiftpm/Package.resolved index 9a475d8f..29d9d34d 100644 --- a/HostApp/HostApp.xcodeproj/project.xcworkspace/xcshareddata/swiftpm/Package.resolved +++ b/HostApp/HostApp.xcodeproj/project.xcworkspace/xcshareddata/swiftpm/Package.resolved @@ -5,8 +5,8 @@ "kind" : "remoteSourceControl", "location" : "https://github.com/aws-amplify/amplify-swift", "state" : { - "revision" : "5b603ff7cfe1b03d753ae7ff9664316e6447f0ae", - "version" : "2.46.1" + "branch" : "feat/no-light-support", + "revision" : "7c1fa2f7a766208f5af69ca8dce5fd02e6de4db6" } }, { @@ -50,8 +50,8 @@ "kind" : "remoteSourceControl", "location" : "https://github.com/stephencelis/SQLite.swift.git", "state" : { - "revision" : "a95fc6df17d108bd99210db5e8a9bac90fe984b8", - "version" : "0.15.3" + "revision" : "5f5ad81ac0d0a0f3e56e39e646e8423c617df523", + "version" : "0.13.2" } }, { diff --git a/HostApp/HostApp/Views/ExampleLivenessView.swift b/HostApp/HostApp/Views/ExampleLivenessView.swift index 5f6868b6..7ec0c011 100644 --- a/HostApp/HostApp/Views/ExampleLivenessView.swift +++ b/HostApp/HostApp/Views/ExampleLivenessView.swift @@ -22,7 +22,8 @@ struct ExampleLivenessView: View { case .liveness: FaceLivenessDetectorView( sessionID: viewModel.sessionID, - region: "us-east-1", + // TODO: Change before merging to main + region: "us-west-2", isPresented: Binding( get: { viewModel.presentationState == .liveness }, set: { _ in } diff --git a/Package.resolved b/Package.resolved index e4e6e13c..baf17888 100644 --- a/Package.resolved +++ b/Package.resolved @@ -5,8 +5,8 @@ "kind" : "remoteSourceControl", "location" : "https://github.com/aws-amplify/amplify-swift", "state" : { - "revision" : "5b603ff7cfe1b03d753ae7ff9664316e6447f0ae", - "version" : "2.46.1" + "branch" : "feat/no-light-support", + "revision" : "7c1fa2f7a766208f5af69ca8dce5fd02e6de4db6" } }, { @@ -50,8 +50,8 @@ "kind" : "remoteSourceControl", "location" : "https://github.com/stephencelis/SQLite.swift.git", "state" : { - "revision" : "a95fc6df17d108bd99210db5e8a9bac90fe984b8", - "version" : "0.15.3" + "revision" : "5f5ad81ac0d0a0f3e56e39e646e8423c617df523", + "version" : "0.13.2" } }, { diff --git a/Package.swift b/Package.swift index 4e2d2e80..446f12c0 100644 --- a/Package.swift +++ b/Package.swift @@ -13,7 +13,8 @@ let package = Package( targets: ["FaceLiveness"]), ], dependencies: [ - .package(url: "https://github.com/aws-amplify/amplify-swift", exact: "2.46.1") + // TODO: Change this before merge to main + .package(url: "https://github.com/aws-amplify/amplify-swift", branch: "feat/no-light-support") ], targets: [ .target( diff --git a/Sources/FaceLiveness/FaceDetection/BlazeFace/DetectedFace.swift b/Sources/FaceLiveness/FaceDetection/BlazeFace/DetectedFace.swift index d6879848..1d62b263 100644 --- a/Sources/FaceLiveness/FaceDetection/BlazeFace/DetectedFace.swift +++ b/Sources/FaceLiveness/FaceDetection/BlazeFace/DetectedFace.swift @@ -6,6 +6,7 @@ // import Foundation +@_spi(PredictionsFaceLiveness) import AWSPredictionsPlugin struct DetectedFace { var boundingBox: CGRect @@ -19,7 +20,8 @@ struct DetectedFace { let confidence: Float - func boundingBoxFromLandmarks(ovalRect: CGRect) -> CGRect { + func boundingBoxFromLandmarks(ovalRect: CGRect, + ovalMatchChallenge: FaceLivenessSession.OvalMatchChallenge) -> CGRect { let alpha = 2.0 let gamma = 1.8 let ow = (alpha * pupilDistance + gamma * faceHeight) / 2 @@ -34,7 +36,7 @@ struct DetectedFace { } let faceWidth = ow - let faceHeight = 1.618 * faceWidth + let faceHeight = ovalMatchChallenge.oval.heightWidthRatio * faceWidth let faceBoxBottom = boundingBox.maxY let faceBoxTop = faceBoxBottom - faceHeight let faceBoxLeft = min(cx - ow / 2, rightEar.x) diff --git a/Sources/FaceLiveness/FaceDetection/BlazeFace/FaceDetectorShortRange+Model.swift b/Sources/FaceLiveness/FaceDetection/BlazeFace/FaceDetectorShortRange+Model.swift index d9430720..100f0418 100644 --- a/Sources/FaceLiveness/FaceDetection/BlazeFace/FaceDetectorShortRange+Model.swift +++ b/Sources/FaceLiveness/FaceDetection/BlazeFace/FaceDetectorShortRange+Model.swift @@ -12,6 +12,7 @@ import Accelerate import CoreGraphics import CoreImage import VideoToolbox +@_spi(PredictionsFaceLiveness) import AWSPredictionsPlugin enum FaceDetectorShortRange {} @@ -33,11 +34,16 @@ extension FaceDetectorShortRange { ) } + weak var faceDetectionSessionConfiguration: FaceDetectionSessionConfigurationWrapper? weak var detectionResultHandler: FaceDetectionResultHandler? func setResultHandler(detectionResultHandler: FaceDetectionResultHandler) { self.detectionResultHandler = detectionResultHandler } + + func setFaceDetectionSessionConfigurationWrapper(configuration: FaceDetectionSessionConfigurationWrapper) { + self.faceDetectionSessionConfiguration = configuration + } func detectFaces(from buffer: CVPixelBuffer) { let faces = prediction(for: buffer) @@ -105,10 +111,17 @@ extension FaceDetectorShortRange { count: confidenceScoresCapacity ) ) + + let blazeFaceDetectionThreshold: Float + if let sessionConfiguration = faceDetectionSessionConfiguration?.sessionConfiguration { + blazeFaceDetectionThreshold = Float(sessionConfiguration.ovalMatchChallenge.faceDetectionThreshold) + } else { + blazeFaceDetectionThreshold = confidenceScoreThreshold + } var passingConfidenceScoresIndices = confidenceScores .enumerated() - .filter { $0.element >= confidenceScoreThreshold } + .filter { $0.element >= blazeFaceDetectionThreshold} .sorted(by: { $0.element > $1.element }) diff --git a/Sources/FaceLiveness/FaceDetection/FaceDetector.swift b/Sources/FaceLiveness/FaceDetection/FaceDetector.swift index 3801eeab..1afb90c1 100644 --- a/Sources/FaceLiveness/FaceDetection/FaceDetector.swift +++ b/Sources/FaceLiveness/FaceDetection/FaceDetector.swift @@ -6,6 +6,7 @@ // import AVFoundation +@_spi(PredictionsFaceLiveness) import AWSPredictionsPlugin protocol FaceDetector { func detectFaces(from buffer: CVPixelBuffer) @@ -16,6 +17,10 @@ protocol FaceDetectionResultHandler: AnyObject { func process(newResult: FaceDetectionResult) } +protocol FaceDetectionSessionConfigurationWrapper: AnyObject { + var sessionConfiguration: FaceLivenessSession.SessionConfiguration? { get } +} + enum FaceDetectionResult { case noFace case singleFace(DetectedFace) diff --git a/Sources/FaceLiveness/Views/GetReadyPage/GetReadyPageView.swift b/Sources/FaceLiveness/Views/GetReadyPage/GetReadyPageView.swift index 00ecb9b7..dadb2076 100644 --- a/Sources/FaceLiveness/Views/GetReadyPage/GetReadyPageView.swift +++ b/Sources/FaceLiveness/Views/GetReadyPage/GetReadyPageView.swift @@ -6,35 +6,49 @@ // import SwiftUI +@_spi(PredictionsFaceLiveness) import AWSPredictionsPlugin struct GetReadyPageView: View { let beginCheckButtonDisabled: Bool let onBegin: () -> Void - + let challenge: Challenge + init( onBegin: @escaping () -> Void, - beginCheckButtonDisabled: Bool = false + beginCheckButtonDisabled: Bool = false, + challenge: Challenge ) { self.onBegin = onBegin self.beginCheckButtonDisabled = beginCheckButtonDisabled + self.challenge = challenge } var body: some View { VStack { ZStack { CameraPreviewView() - VStack { - WarningBox( - titleText: LocalizedStrings.get_ready_photosensitivity_title, - bodyText: LocalizedStrings.get_ready_photosensitivity_description, - popoverContent: { photosensitivityWarningPopoverContent } - ) - .accessibilityElement(children: .combine) - Text(LocalizedStrings.preview_center_your_face_text) - .font(.title) - .multilineTextAlignment(.center) - Spacer() - }.padding() + switch self.challenge.type { + case .faceMovementChallenge: + VStack { + Text(LocalizedStrings.preview_center_your_face_text) + .font(.title) + .multilineTextAlignment(.center) + Spacer() + }.padding() + case . faceMovementAndLightChallenge: + VStack { + WarningBox( + titleText: LocalizedStrings.get_ready_photosensitivity_title, + bodyText: LocalizedStrings.get_ready_photosensitivity_description, + popoverContent: { photosensitivityWarningPopoverContent } + ) + .accessibilityElement(children: .combine) + Text(LocalizedStrings.preview_center_your_face_text) + .font(.title) + .multilineTextAlignment(.center) + Spacer() + }.padding() + } } beginCheckButton } @@ -72,6 +86,8 @@ struct GetReadyPageView: View { struct GetReadyPageView_Previews: PreviewProvider { static var previews: some View { - GetReadyPageView(onBegin: {}) + GetReadyPageView(onBegin: {}, + challenge: .init(version: "2.0.0", + type: .faceMovementAndLightChallenge)) } } diff --git a/Sources/FaceLiveness/Views/Instruction/InstructionContainerView.swift b/Sources/FaceLiveness/Views/Instruction/InstructionContainerView.swift index ff02a3d6..5ed45ae7 100644 --- a/Sources/FaceLiveness/Views/Instruction/InstructionContainerView.swift +++ b/Sources/FaceLiveness/Views/Instruction/InstructionContainerView.swift @@ -7,6 +7,7 @@ import SwiftUI import Combine +@_spi(PredictionsFaceLiveness) import AWSPredictionsPlugin struct InstructionContainerView: View { @ObservedObject var viewModel: FaceLivenessDetectionViewModel @@ -97,13 +98,29 @@ struct InstructionContainerView: View { argument: LocalizedStrings.challenge_verifying ) } - case .faceMatched: + case .completedNoLightCheck: InstructionView( - text: LocalizedStrings.challenge_instruction_hold_still, - backgroundColor: .livenessPrimaryBackground, - textColor: .livenessPrimaryLabel, - font: .title + text: LocalizedStrings.challenge_verifying, + backgroundColor: .livenessBackground ) + .onAppear { + UIAccessibility.post( + notification: .announcement, + argument: LocalizedStrings.challenge_verifying + ) + } + case .faceMatched: + if let challenge = viewModel.challenge, + case .faceMovementAndLightChallenge = challenge.type { + InstructionView( + text: LocalizedStrings.challenge_instruction_hold_still, + backgroundColor: .livenessPrimaryBackground, + textColor: .livenessPrimaryLabel, + font: .title + ) + } else { + EmptyView() + } default: EmptyView() } diff --git a/Sources/FaceLiveness/Views/Liveness/FaceLivenessDetectionView.swift b/Sources/FaceLiveness/Views/Liveness/FaceLivenessDetectionView.swift index 81eacfe9..dd009d71 100644 --- a/Sources/FaceLiveness/Views/Liveness/FaceLivenessDetectionView.swift +++ b/Sources/FaceLiveness/Views/Liveness/FaceLivenessDetectionView.swift @@ -16,10 +16,11 @@ import Amplify public struct FaceLivenessDetectorView: View { @StateObject var viewModel: FaceLivenessDetectionViewModel @Binding var isPresented: Bool - @State var displayState: DisplayState = .awaitingCameraPermission + @State var displayState: DisplayState = .awaitingChallengeType @State var displayingCameraPermissionsNeededAlert = false let disableStartView: Bool + let facelivenessDetectorViewId: String let onCompletion: (Result) -> Void let sessionTask: Task @@ -32,6 +33,8 @@ public struct FaceLivenessDetectorView: View { isPresented: Binding, onCompletion: @escaping (Result) -> Void ) { + let viewId = UUID().uuidString + self.facelivenessDetectorViewId = viewId self.disableStartView = disableStartView self._isPresented = isPresented self.onCompletion = onCompletion @@ -41,7 +44,8 @@ public struct FaceLivenessDetectorView: View { withID: sessionID, credentialsProvider: credentialsProvider, region: region, - options: .init(), + options: .init(faceLivenessDetectorViewId: viewId, + preCheckViewEnabled: !disableStartView), completion: map(detectionCompletion: onCompletion) ) return session @@ -82,6 +86,8 @@ public struct FaceLivenessDetectorView: View { sessionID: sessionID ) ) + + faceDetector.setFaceDetectionSessionConfigurationWrapper(configuration: viewModel) } init( @@ -93,6 +99,8 @@ public struct FaceLivenessDetectorView: View { onCompletion: @escaping (Result) -> Void, captureSession: LivenessCaptureSession ) { + let viewId = UUID().uuidString + self.facelivenessDetectorViewId = viewId self.disableStartView = disableStartView self._isPresented = isPresented self.onCompletion = onCompletion @@ -102,7 +110,8 @@ public struct FaceLivenessDetectorView: View { withID: sessionID, credentialsProvider: credentialsProvider, region: region, - options: .init(), + options: .init(faceLivenessDetectorViewId: viewId, + preCheckViewEnabled: !disableStartView), completion: map(detectionCompletion: onCompletion) ) return session @@ -126,32 +135,44 @@ public struct FaceLivenessDetectorView: View { public var body: some View { switch displayState { - case .awaitingLivenessSession: + case .awaitingChallengeType: + LoadingPageView() + .onAppear { + Task { + do { + let session = try await sessionTask.value + viewModel.livenessService = session + viewModel.registerServiceEvents(onChallengeTypeReceived: { challenge in + self.displayState = DisplayState.awaitingLivenessSession(challenge) + }) + viewModel.initializeLivenessStream() + } catch { + throw FaceLivenessDetectionError.accessDenied + } + } + } + case .awaitingLivenessSession(let challenge): Color.clear .onAppear { Task { do { let newState = disableStartView ? DisplayState.displayingLiveness - : DisplayState.displayingGetReadyView + : DisplayState.displayingGetReadyView(challenge) guard self.displayState != newState else { return } - let session = try await sessionTask.value - viewModel.livenessService = session - viewModel.registerServiceEvents() self.displayState = newState - } catch { - throw FaceLivenessDetectionError.accessDenied } } } - case .displayingGetReadyView: + case .displayingGetReadyView(let challenge): GetReadyPageView( onBegin: { guard displayState != .displayingLiveness else { return } displayState = .displayingLiveness }, - beginCheckButtonDisabled: false + beginCheckButtonDisabled: false, + challenge: challenge ) .onAppear { DispatchQueue.main.async { @@ -217,7 +238,8 @@ public struct FaceLivenessDetectorView: View { for: .video, completionHandler: { accessGranted in guard accessGranted == true else { return } - displayState = .awaitingLivenessSession + guard let challenge = viewModel.challenge else { return } + displayState = .awaitingLivenessSession(challenge) } ) @@ -235,18 +257,37 @@ public struct FaceLivenessDetectorView: View { case .restricted, .denied: alertCameraAccessNeeded() case .authorized: - displayState = .awaitingLivenessSession + guard let challenge = viewModel.challenge else { return } + displayState = .awaitingLivenessSession(challenge) @unknown default: break } } } -enum DisplayState { - case awaitingLivenessSession - case displayingGetReadyView +enum DisplayState: Equatable { + case awaitingChallengeType + case awaitingLivenessSession(Challenge) + case displayingGetReadyView(Challenge) case displayingLiveness case awaitingCameraPermission + + static func == (lhs: DisplayState, rhs: DisplayState) -> Bool { + switch (lhs, rhs) { + case (.awaitingChallengeType, .awaitingChallengeType): + return true + case (let .awaitingLivenessSession(c1), let .awaitingLivenessSession(c2)): + return c1.type == c2.type && c1.version == c2.version + case (let .displayingGetReadyView(c1), let .displayingGetReadyView(c2)): + return c1.type == c2.type && c1.version == c2.version + case (.displayingLiveness, .displayingLiveness): + return true + case (.awaitingCameraPermission, .awaitingCameraPermission): + return true + default: + return false + } + } } enum InstructionState { diff --git a/Sources/FaceLiveness/Views/Liveness/FaceLivenessDetectionViewModel+FaceDetectionResultHandler.swift b/Sources/FaceLiveness/Views/Liveness/FaceLivenessDetectionViewModel+FaceDetectionResultHandler.swift index 99e92ee2..edb96e80 100644 --- a/Sources/FaceLiveness/Views/Liveness/FaceLivenessDetectionViewModel+FaceDetectionResultHandler.swift +++ b/Sources/FaceLiveness/Views/Liveness/FaceLivenessDetectionViewModel+FaceDetectionResultHandler.swift @@ -28,14 +28,15 @@ extension FaceLivenessDetectionViewModel: FaceDetectionResultHandler { } case .singleFace(let face): var normalizedFace = normalizeFace(face) - normalizedFace.boundingBox = normalizedFace.boundingBoxFromLandmarks(ovalRect: ovalRect) + guard let sessionConfiguration = sessionConfiguration else { return } + normalizedFace.boundingBox = normalizedFace.boundingBoxFromLandmarks(ovalRect: ovalRect, + ovalMatchChallenge: sessionConfiguration.ovalMatchChallenge) switch livenessState.state { case .pendingFacePreparedConfirmation: - if face.faceDistance <= initialFaceDistanceThreshold { + if face.faceDistance <= sessionConfiguration.ovalMatchChallenge.face.distanceThreshold { DispatchQueue.main.async { self.livenessState.awaitingRecording() - self.initializeLivenessStream() } DispatchQueue.main.asyncAfter(deadline: .now() + 1.0) { self.livenessState.beginRecording() @@ -55,7 +56,6 @@ extension FaceLivenessDetectionViewModel: FaceDetectionResultHandler { ) }) case .recording(ovalDisplayed: true): - guard let sessionConfiguration = sessionConfiguration else { return } let instruction = faceInOvalMatching.faceMatchState( for: normalizedFace.boundingBox, in: ovalRect, @@ -64,18 +64,18 @@ extension FaceLivenessDetectionViewModel: FaceDetectionResultHandler { handleInstruction( instruction, - colorSequences: sessionConfiguration.colorChallenge.colors + colorSequences: sessionConfiguration.colorChallenge?.colors ) case .awaitingFaceInOvalMatch: - guard let sessionConfiguration = sessionConfiguration else { return } let instruction = faceInOvalMatching.faceMatchState( for: normalizedFace.boundingBox, in: ovalRect, challengeConfig: sessionConfiguration.ovalMatchChallenge ) + handleInstruction( instruction, - colorSequences: sessionConfiguration.colorChallenge.colors + colorSequences: sessionConfiguration.colorChallenge?.colors ) default: break @@ -104,16 +104,30 @@ extension FaceLivenessDetectionViewModel: FaceDetectionResultHandler { func handleInstruction( _ instruction: Instructor.Instruction, - colorSequences: [FaceLivenessSession.DisplayColor] + colorSequences: [FaceLivenessSession.DisplayColor]? ) { DispatchQueue.main.async { switch instruction { case .match: self.livenessState.faceMatched() self.faceMatchedTimestamp = Date().timestampMilliseconds - DispatchQueue.main.asyncAfter(deadline: .now() + 1.0) { - self.livenessViewControllerDelegate?.displayFreshness(colorSequences: colorSequences) + + // next step after face match + switch self.challenge?.type { + case .faceMovementAndLightChallenge: + if let colorSequences = colorSequences { + DispatchQueue.main.asyncAfter(deadline: .now() + 1.0) { + self.livenessViewControllerDelegate?.displayFreshness(colorSequences: colorSequences) + } + } + case .faceMovementChallenge: + DispatchQueue.main.asyncAfter(deadline: .now() + 1.0) { + self.livenessViewControllerDelegate?.completeNoLightCheck() + } + default: + break } + let generator = UINotificationFeedbackGenerator() generator.notificationOccurred(.success) self.noFitStartTime = nil diff --git a/Sources/FaceLiveness/Views/Liveness/FaceLivenessDetectionViewModel+VideoSegmentProcessor.swift b/Sources/FaceLiveness/Views/Liveness/FaceLivenessDetectionViewModel+VideoSegmentProcessor.swift index c2ed2b39..d2f88343 100644 --- a/Sources/FaceLiveness/Views/Liveness/FaceLivenessDetectionViewModel+VideoSegmentProcessor.swift +++ b/Sources/FaceLiveness/Views/Liveness/FaceLivenessDetectionViewModel+VideoSegmentProcessor.swift @@ -11,8 +11,8 @@ extension FaceLivenessDetectionViewModel: VideoSegmentProcessor { func process(initalSegment: Data, currentSeparableSegment: Data) { let chunk = chunk(initial: initalSegment, current: currentSeparableSegment) sendVideoEvent(data: chunk, videoEventTime: .zero) - if !hasSentFinalVideoEvent, - case .completedDisplayingFreshness = livenessState.state { + if !hasSentFinalVideoEvent && + (livenessState.state == .completedDisplayingFreshness || livenessState.state == .completedNoLightCheck) { DispatchQueue.global(qos: .default).asyncAfter(deadline: .now() + 0.9) { self.sendFinalVideoEvent() } diff --git a/Sources/FaceLiveness/Views/Liveness/FaceLivenessDetectionViewModel.swift b/Sources/FaceLiveness/Views/Liveness/FaceLivenessDetectionViewModel.swift index 7f834cb2..8c6ae829 100644 --- a/Sources/FaceLiveness/Views/Liveness/FaceLivenessDetectionViewModel.swift +++ b/Sources/FaceLiveness/Views/Liveness/FaceLivenessDetectionViewModel.swift @@ -33,6 +33,7 @@ class FaceLivenessDetectionViewModel: ObservableObject { var hasSentFirstVideo = false var layerRectConverted: (CGRect) -> CGRect = { $0 } var sessionConfiguration: FaceLivenessSession.SessionConfiguration? + var challenge: Challenge? var normalizeFace: (DetectedFace) -> DetectedFace = { $0 } var provideSingleFrame: ((UIImage) -> Void)? var cameraViewRect = CGRect.zero @@ -89,7 +90,7 @@ class FaceLivenessDetectionViewModel: ObservableObject { NotificationCenter.default.removeObserver(self) } - func registerServiceEvents() { + func registerServiceEvents(onChallengeTypeReceived: @escaping (Challenge) -> Void) { livenessService?.register(onComplete: { [weak self] reason in self?.stopRecording() @@ -112,6 +113,13 @@ class FaceLivenessDetectionViewModel: ObservableObject { }, on: .challenge ) + + livenessService?.register( + listener: { [weak self] _challenge in + self?.challenge = _challenge + onChallengeTypeReceived(_challenge) + }, + on: .challenge) } @objc func willResignActive(_ notification: Notification) { @@ -178,7 +186,11 @@ class FaceLivenessDetectionViewModel: ObservableObject { func initializeLivenessStream() { do { - try livenessService?.initializeLivenessStream( + guard let livenessSession = livenessService as? FaceLivenessSession else { + throw FaceLivenessDetectionError.unknown + } + + try livenessSession.initializeLivenessStream( withSessionID: sessionID, userAgent: UserAgentValues.standard().userAgentString ) @@ -226,6 +238,8 @@ class FaceLivenessDetectionViewModel: ObservableObject { videoStartTime: UInt64 ) { guard initialClientEvent == nil else { return } + guard let challenge else { return } + videoChunker.start() let initialFace = FaceDetection( @@ -243,7 +257,9 @@ class FaceLivenessDetectionViewModel: ObservableObject { do { try livenessService?.send( - .initialFaceDetected(event: _initialClientEvent), + .initialFaceDetected(event: _initialClientEvent, + challenge: .init(version: challenge.version, + type: challenge.type)), eventDate: { .init() } ) } catch { @@ -261,7 +277,8 @@ class FaceLivenessDetectionViewModel: ObservableObject { guard let sessionConfiguration, let initialClientEvent, - let faceMatchedTimestamp + let faceMatchedTimestamp, + let challenge else { return } let finalClientEvent = FinalClientEvent( @@ -275,7 +292,9 @@ class FaceLivenessDetectionViewModel: ObservableObject { do { try livenessService?.send( - .final(event: finalClientEvent), + .final(event: finalClientEvent, + challenge: .init(version: challenge.version, + type: challenge.type)), eventDate: { .init() } ) @@ -310,6 +329,13 @@ class FaceLivenessDetectionViewModel: ObservableObject { self.faceGuideRect = faceGuide } } + + func completeNoLightCheck(faceGuide: CGRect) { + DispatchQueue.main.async { + self.livenessState.completedNoLightCheck() + self.faceGuideRect = faceGuide + } + } func sendVideoEvent(data: Data, videoEventTime: UInt64) { guard !hasSentFinalVideoEvent else { return } @@ -362,3 +388,5 @@ class FaceLivenessDetectionViewModel: ObservableObject { return data } } + +extension FaceLivenessDetectionViewModel: FaceDetectionSessionConfigurationWrapper { } diff --git a/Sources/FaceLiveness/Views/Liveness/FaceLivenessViewControllerPresenter.swift b/Sources/FaceLiveness/Views/Liveness/FaceLivenessViewControllerPresenter.swift index 5786620b..8fff8b9f 100644 --- a/Sources/FaceLiveness/Views/Liveness/FaceLivenessViewControllerPresenter.swift +++ b/Sources/FaceLiveness/Views/Liveness/FaceLivenessViewControllerPresenter.swift @@ -12,4 +12,5 @@ protocol FaceLivenessViewControllerPresenter: AnyObject { func drawOvalInCanvas(_ ovalRect: CGRect) func displayFreshness(colorSequences: [FaceLivenessSession.DisplayColor]) func displaySingleFrame(uiImage: UIImage) + func completeNoLightCheck() } diff --git a/Sources/FaceLiveness/Views/Liveness/LivenessStateMachine.swift b/Sources/FaceLiveness/Views/Liveness/LivenessStateMachine.swift index c59629c9..e61f8311 100644 --- a/Sources/FaceLiveness/Views/Liveness/LivenessStateMachine.swift +++ b/Sources/FaceLiveness/Views/Liveness/LivenessStateMachine.swift @@ -76,6 +76,10 @@ struct LivenessStateMachine { mutating func completedDisplayingFreshness() { state = .completedDisplayingFreshness } + + mutating func completedNoLightCheck() { + state = .completedNoLightCheck + } mutating func displayingFreshness() { state = .displayingFreshness @@ -95,6 +99,7 @@ struct LivenessStateMachine { enum State: Equatable { case initial + case awaitingChallengeType case pendingFacePreparedConfirmation(FaceNotPreparedReason) case recording(ovalDisplayed: Bool) case awaitingFaceInOvalMatch(FaceNotPreparedReason, Double) @@ -102,6 +107,7 @@ struct LivenessStateMachine { case initialClientInfoEventSent case displayingFreshness case completedDisplayingFreshness + case completedNoLightCheck case completed case awaitingDisconnectEvent case disconnectEventReceived diff --git a/Sources/FaceLiveness/Views/Liveness/LivenessViewController.swift b/Sources/FaceLiveness/Views/Liveness/LivenessViewController.swift index c274bde0..79b91e44 100644 --- a/Sources/FaceLiveness/Views/Liveness/LivenessViewController.swift +++ b/Sources/FaceLiveness/Views/Liveness/LivenessViewController.swift @@ -173,4 +173,11 @@ extension _LivenessViewController: FaceLivenessViewControllerPresenter { self.ovalExists = true } } + + func completeNoLightCheck() { + guard let faceGuideRect = self.faceGuideRect else { return } + self.viewModel.completeNoLightCheck( + faceGuide: faceGuideRect + ) + } } diff --git a/Sources/FaceLiveness/Views/LoadingPage/LoadingPageView.swift b/Sources/FaceLiveness/Views/LoadingPage/LoadingPageView.swift new file mode 100644 index 00000000..e02b4e79 --- /dev/null +++ b/Sources/FaceLiveness/Views/LoadingPage/LoadingPageView.swift @@ -0,0 +1,27 @@ +// +// Copyright Amazon.com Inc. or its affiliates. +// All Rights Reserved. +// +// SPDX-License-Identifier: Apache-2.0 +// + +import SwiftUI + +struct LoadingPageView: View { + + var body: some View { + VStack { + HStack(spacing: 5) { + ProgressView() + Text(LocalizedStrings.challenge_connecting) + } + + } + } +} + +struct LoadingPageView_Previews: PreviewProvider { + static var previews: some View { + LoadingPageView() + } +} diff --git a/Tests/FaceLivenessTests/DetectedFaceTests.swift b/Tests/FaceLivenessTests/DetectedFaceTests.swift index 4bee8292..6d538e33 100644 --- a/Tests/FaceLivenessTests/DetectedFaceTests.swift +++ b/Tests/FaceLivenessTests/DetectedFaceTests.swift @@ -7,7 +7,7 @@ import XCTest @testable import FaceLiveness - +@_spi(PredictionsFaceLiveness) import AWSPredictionsPlugin final class DetectedFaceTests: XCTestCase { var detectedFace: DetectedFace! @@ -104,7 +104,29 @@ final class DetectedFaceTests: XCTestCase { width: 0.6240418540649166, height: 0.8144985824018897 ) - let boundingBox = detectedFace.boundingBoxFromLandmarks(ovalRect: ovalRect) + + let face = FaceLivenessSession.OvalMatchChallenge.Face( + distanceThreshold: 0.1, + distanceThresholdMax: 0.1, + distanceThresholdMin: 0.1, + iouWidthThreshold: 0.1, + iouHeightThreshold: 0.1 + ) + + let oval = FaceLivenessSession.OvalMatchChallenge.Oval(boundingBox: .init(x: 0.1, + y: 0.1, + width: 0.1, + height: 0.1), + heightWidthRatio: 1.618, + iouThreshold: 0.1, + iouWidthThreshold: 0.1, + iouHeightThreshold: 0.1, + ovalFitTimeout: 1) + + let boundingBox = detectedFace.boundingBoxFromLandmarks(ovalRect: ovalRect, + ovalMatchChallenge: .init(faceDetectionThreshold: 0.7, + face: face, + oval: oval)) XCTAssertEqual(boundingBox.origin.x, expectedBoundingBox.origin.x) XCTAssertEqual(boundingBox.origin.y, expectedBoundingBox.origin.y) XCTAssertEqual(boundingBox.width, expectedBoundingBox.width) diff --git a/Tests/FaceLivenessTests/LivenessTests.swift b/Tests/FaceLivenessTests/LivenessTests.swift index da063930..c4b95d02 100644 --- a/Tests/FaceLivenessTests/LivenessTests.swift +++ b/Tests/FaceLivenessTests/LivenessTests.swift @@ -69,6 +69,7 @@ final class FaceLivenessDetectionViewModelTestCase: XCTestCase { /// Then: The end state of this flow is `.faceMatched` func testHappyPathToMatchedFace() async throws { viewModel.livenessService = self.livenessService + viewModel.challenge = Challenge(version: "2.0.0", type: .faceMovementAndLightChallenge) viewModel.livenessState.checkIsFacePrepared() XCTAssertEqual(viewModel.livenessState.state, .pendingFacePreparedConfirmation(.pendingCheck)) @@ -103,16 +104,37 @@ final class FaceLivenessDetectionViewModelTestCase: XCTestCase { XCTAssertEqual(faceDetector.interactions, [ "setResultHandler(detectionResultHandler:) (FaceLivenessDetectionViewModel)" ]) - XCTAssertEqual(livenessService.interactions, [ - "initializeLivenessStream(withSessionID:userAgent:)" - ]) + XCTAssertEqual(livenessService.interactions, []) } /// Given: A `FaceLivenessDetectionViewModel` /// When: The viewModel is processes a single face result with a face distance less than the inital face distance - /// Then: The end state of this flow is `.recording(ovalDisplayed: false)` and initializeLivenessStream(withSessionID:userAgent:) is called + /// Then: The end state of this flow is `.recording(ovalDisplayed: false)` func testTransitionToRecordingState() async throws { viewModel.livenessService = self.livenessService + viewModel.challenge = Challenge(version: "2.0.0", type: .faceMovementAndLightChallenge) + + let face = FaceLivenessSession.OvalMatchChallenge.Face( + distanceThreshold: 0.32, + distanceThresholdMax: 0.1, + distanceThresholdMin: 0.1, + iouWidthThreshold: 0.1, + iouHeightThreshold: 0.1 + ) + + let oval = FaceLivenessSession.OvalMatchChallenge.Oval(boundingBox: .init(x: 0.1, + y: 0.1, + width: 0.1, + height: 0.1), + heightWidthRatio: 1.618, + iouThreshold: 0.1, + iouWidthThreshold: 0.1, + iouHeightThreshold: 0.1, + ovalFitTimeout: 1) + + viewModel.sessionConfiguration = .init(ovalMatchChallenge: .init(faceDetectionThreshold: 0.7, + face: face, + oval: oval)) viewModel.livenessState.checkIsFacePrepared() XCTAssertEqual(viewModel.livenessState.state, .pendingFacePreparedConfirmation(.pendingCheck)) @@ -136,9 +158,6 @@ final class FaceLivenessDetectionViewModelTestCase: XCTestCase { XCTAssertEqual(faceDetector.interactions, [ "setResultHandler(detectionResultHandler:) (FaceLivenessDetectionViewModel)" ]) - XCTAssertEqual(livenessService.interactions, [ - "initializeLivenessStream(withSessionID:userAgent:)" - ]) } /// Given: A `FaceLivenessDetectionViewModel` diff --git a/Tests/FaceLivenessTests/MockLivenessService.swift b/Tests/FaceLivenessTests/MockLivenessService.swift index 2b4633d1..942f7488 100644 --- a/Tests/FaceLivenessTests/MockLivenessService.swift +++ b/Tests/FaceLivenessTests/MockLivenessService.swift @@ -18,7 +18,7 @@ class MockLivenessService { var onFinalClientEvent: (LivenessEvent, Date) -> Void = { _, _ in } var onFreshnessEvent: (LivenessEvent, Date) -> Void = { _, _ in } var onVideoEvent: (LivenessEvent, Date) -> Void = { _, _ in } - var onInitializeLivenessStream: (String, String) -> Void = { _, _ in } + var onInitializeLivenessStream: (String, String,[Challenge]?) -> Void = { _, _, _ in } var onServiceException: (FaceLivenessSessionError) -> Void = { _ in } var onCloseSocket: (URLSessionWebSocketTask.CloseCode) -> Void = { _ in } } @@ -44,10 +44,12 @@ extension MockLivenessService: LivenessService { } func initializeLivenessStream( - withSessionID sessionID: String, userAgent: String + withSessionID sessionID: String, + userAgent: String, + challenges: [Challenge] ) throws { interactions.append(#function) - onInitializeLivenessStream(sessionID, userAgent) + onInitializeLivenessStream(sessionID, userAgent, challenges) } func register( @@ -62,6 +64,10 @@ extension MockLivenessService: LivenessService { ) { interactions.append(#function) } + + func register(listener: @escaping (Challenge) -> Void, on event: LivenessEventKind.Server) { + interactions.append(#function) + } func closeSocket(with code: URLSessionWebSocketTask.CloseCode) { interactions.append(#function) From de44adf63923bfe37613eff5a7ad2fde37ea0e7b Mon Sep 17 00:00:00 2001 From: Abhash Kumar Singh Date: Mon, 6 May 2024 10:14:55 -0700 Subject: [PATCH 02/10] chore: Add attempt count changes (#137) * chore: Add attempt count changes * Fix unit tests * add unit tests * Update region for example liveness view * Update amplify-swift dependency --- .../xcshareddata/swiftpm/Package.resolved | 2 +- HostApp/HostApp/Model/LivenessResult.swift | 4 ++ .../HostApp/Views/ExampleLivenessView.swift | 3 +- .../LivenessResultContentView+Result.swift | 4 ++ .../Views/LivenessResultContentView.swift | 64 +++++++++++++------ Package.resolved | 2 +- .../Views/GetReadyPage/GetReadyPageView.swift | 35 ++++------ .../Liveness/FaceLivenessDetectionView.swift | 17 ++--- ...ViewModel+FaceDetectionResultHandler.swift | 4 +- .../FaceLivenessDetectionViewModel.swift | 24 +++++-- .../CredentialsProviderTestCase.swift | 3 +- Tests/FaceLivenessTests/LivenessTests.swift | 58 ++++++++++++++++- .../MockLivenessService.swift | 7 +- 13 files changed, 155 insertions(+), 72 deletions(-) diff --git a/HostApp/HostApp.xcodeproj/project.xcworkspace/xcshareddata/swiftpm/Package.resolved b/HostApp/HostApp.xcodeproj/project.xcworkspace/xcshareddata/swiftpm/Package.resolved index 29d9d34d..d18921e3 100644 --- a/HostApp/HostApp.xcodeproj/project.xcworkspace/xcshareddata/swiftpm/Package.resolved +++ b/HostApp/HostApp.xcodeproj/project.xcworkspace/xcshareddata/swiftpm/Package.resolved @@ -6,7 +6,7 @@ "location" : "https://github.com/aws-amplify/amplify-swift", "state" : { "branch" : "feat/no-light-support", - "revision" : "7c1fa2f7a766208f5af69ca8dce5fd02e6de4db6" + "revision" : "22e02fa21399122aac1d8b4f6ab23c242c79dae6" } }, { diff --git a/HostApp/HostApp/Model/LivenessResult.swift b/HostApp/HostApp/Model/LivenessResult.swift index 226bc30f..3a36f089 100644 --- a/HostApp/HostApp/Model/LivenessResult.swift +++ b/HostApp/HostApp/Model/LivenessResult.swift @@ -6,11 +6,13 @@ // import Foundation +@_spi(PredictionsFaceLiveness) import AWSPredictionsPlugin struct LivenessResult: Codable { let auditImageBytes: String? let confidenceScore: Double let isLive: Bool + let challenge: Challenge? } extension LivenessResult: CustomDebugStringConvertible { @@ -20,6 +22,8 @@ extension LivenessResult: CustomDebugStringConvertible { - confidenceScore: \(confidenceScore) - isLive: \(isLive) - auditImageBytes: \(auditImageBytes == nil ? "nil" : "") + - challengeType: \(String(describing: challenge?.type)) + - challengeVersion: \(String(describing: challenge?.version)) """ } } diff --git a/HostApp/HostApp/Views/ExampleLivenessView.swift b/HostApp/HostApp/Views/ExampleLivenessView.swift index 7ec0c011..5f6868b6 100644 --- a/HostApp/HostApp/Views/ExampleLivenessView.swift +++ b/HostApp/HostApp/Views/ExampleLivenessView.swift @@ -22,8 +22,7 @@ struct ExampleLivenessView: View { case .liveness: FaceLivenessDetectorView( sessionID: viewModel.sessionID, - // TODO: Change before merging to main - region: "us-west-2", + region: "us-east-1", isPresented: Binding( get: { viewModel.presentationState == .liveness }, set: { _ in } diff --git a/HostApp/HostApp/Views/LivenessResultContentView+Result.swift b/HostApp/HostApp/Views/LivenessResultContentView+Result.swift index 3f57982f..0b18eaab 100644 --- a/HostApp/HostApp/Views/LivenessResultContentView+Result.swift +++ b/HostApp/HostApp/Views/LivenessResultContentView+Result.swift @@ -6,6 +6,7 @@ // import SwiftUI +@_spi(PredictionsFaceLiveness) import AWSPredictionsPlugin extension LivenessResultContentView { struct Result { @@ -15,6 +16,7 @@ extension LivenessResultContentView { let valueBackgroundColor: Color let auditImage: Data? let isLive: Bool + let challenge: Challenge? init(livenessResult: LivenessResult) { guard livenessResult.confidenceScore > 0 else { @@ -24,6 +26,7 @@ extension LivenessResultContentView { valueBackgroundColor = .clear auditImage = nil isLive = false + challenge = nil return } isLive = livenessResult.isLive @@ -41,6 +44,7 @@ extension LivenessResultContentView { auditImage = livenessResult.auditImageBytes.flatMap{ Data(base64Encoded: $0) } + challenge = livenessResult.challenge } } diff --git a/HostApp/HostApp/Views/LivenessResultContentView.swift b/HostApp/HostApp/Views/LivenessResultContentView.swift index de2ecff7..51660f55 100644 --- a/HostApp/HostApp/Views/LivenessResultContentView.swift +++ b/HostApp/HostApp/Views/LivenessResultContentView.swift @@ -6,9 +6,10 @@ // import SwiftUI +@_spi(PredictionsFaceLiveness) import AWSPredictionsPlugin struct LivenessResultContentView: View { - @State var result: Result = .init(livenessResult: .init(auditImageBytes: nil, confidenceScore: -1, isLive: false)) + @State var result: Result = .init(livenessResult: .init(auditImageBytes: nil, confidenceScore: -1, isLive: false, challenge: nil)) let fetchResults: () async throws -> Result var body: some View { @@ -67,26 +68,48 @@ struct LivenessResultContentView: View { } } + func step(number: Int, text: String) -> some View { + HStack(alignment: .top) { + Text("\(number).") + Text(text) + } + } + + @ViewBuilder private func steps() -> some View { - func step(number: Int, text: String) -> some View { - HStack(alignment: .top) { - Text("\(number).") - Text(text) + switch result.challenge?.type { + case .faceMovementChallenge: + VStack( + alignment: .leading, + spacing: 8 + ) { + Text("Tips to pass the video check:") + .fontWeight(.semibold) + + Text("Remove sunglasses, mask, hat, or anything blocking your face.") + .accessibilityElement(children: .combine) + } + case .faceMovementAndLightChallenge: + VStack( + alignment: .leading, + spacing: 8 + ) { + Text("Tips to pass the video check:") + .fontWeight(.semibold) + + step(number: 1, text: "Avoid very bright lighting conditions, such as direct sunlight.") + .accessibilityElement(children: .combine) + + step(number: 2, text: "Remove sunglasses, mask, hat, or anything blocking your face.") + .accessibilityElement(children: .combine) + } + case .none: + VStack( + alignment: .leading, + spacing: 8 + ) { + EmptyView() } - } - - return VStack( - alignment: .leading, - spacing: 8 - ) { - Text("Tips to pass the video check:") - .fontWeight(.semibold) - - step(number: 1, text: "Avoid very bright lighting conditions, such as direct sunlight.") - .accessibilityElement(children: .combine) - - step(number: 2, text: "Remove sunglasses, mask, hat, or anything blocking your face.") - .accessibilityElement(children: .combine) } } } @@ -99,7 +122,8 @@ extension LivenessResultContentView { livenessResult: .init( auditImageBytes: nil, confidenceScore: 99.8329, - isLive: true + isLive: true, + challenge: nil ) ) } diff --git a/Package.resolved b/Package.resolved index baf17888..7749501d 100644 --- a/Package.resolved +++ b/Package.resolved @@ -6,7 +6,7 @@ "location" : "https://github.com/aws-amplify/amplify-swift", "state" : { "branch" : "feat/no-light-support", - "revision" : "7c1fa2f7a766208f5af69ca8dce5fd02e6de4db6" + "revision" : "22e02fa21399122aac1d8b4f6ab23c242c79dae6" } }, { diff --git a/Sources/FaceLiveness/Views/GetReadyPage/GetReadyPageView.swift b/Sources/FaceLiveness/Views/GetReadyPage/GetReadyPageView.swift index dadb2076..0c52ccff 100644 --- a/Sources/FaceLiveness/Views/GetReadyPage/GetReadyPageView.swift +++ b/Sources/FaceLiveness/Views/GetReadyPage/GetReadyPageView.swift @@ -27,28 +27,19 @@ struct GetReadyPageView: View { VStack { ZStack { CameraPreviewView() - switch self.challenge.type { - case .faceMovementChallenge: - VStack { - Text(LocalizedStrings.preview_center_your_face_text) - .font(.title) - .multilineTextAlignment(.center) - Spacer() - }.padding() - case . faceMovementAndLightChallenge: - VStack { - WarningBox( - titleText: LocalizedStrings.get_ready_photosensitivity_title, - bodyText: LocalizedStrings.get_ready_photosensitivity_description, - popoverContent: { photosensitivityWarningPopoverContent } - ) - .accessibilityElement(children: .combine) - Text(LocalizedStrings.preview_center_your_face_text) - .font(.title) - .multilineTextAlignment(.center) - Spacer() - }.padding() - } + VStack { + WarningBox( + titleText: LocalizedStrings.get_ready_photosensitivity_title, + bodyText: LocalizedStrings.get_ready_photosensitivity_description, + popoverContent: { photosensitivityWarningPopoverContent } + ) + .accessibilityElement(children: .combine) + .opacity(challenge.type == .faceMovementAndLightChallenge ? 1.0 : 0.0) + Text(LocalizedStrings.preview_center_your_face_text) + .font(.title) + .multilineTextAlignment(.center) + Spacer() + }.padding() } beginCheckButton } diff --git a/Sources/FaceLiveness/Views/Liveness/FaceLivenessDetectionView.swift b/Sources/FaceLiveness/Views/Liveness/FaceLivenessDetectionView.swift index dd009d71..019b9148 100644 --- a/Sources/FaceLiveness/Views/Liveness/FaceLivenessDetectionView.swift +++ b/Sources/FaceLiveness/Views/Liveness/FaceLivenessDetectionView.swift @@ -20,7 +20,6 @@ public struct FaceLivenessDetectorView: View { @State var displayingCameraPermissionsNeededAlert = false let disableStartView: Bool - let facelivenessDetectorViewId: String let onCompletion: (Result) -> Void let sessionTask: Task @@ -32,9 +31,7 @@ public struct FaceLivenessDetectorView: View { disableStartView: Bool = false, isPresented: Binding, onCompletion: @escaping (Result) -> Void - ) { - let viewId = UUID().uuidString - self.facelivenessDetectorViewId = viewId + ) { self.disableStartView = disableStartView self._isPresented = isPresented self.onCompletion = onCompletion @@ -44,8 +41,6 @@ public struct FaceLivenessDetectorView: View { withID: sessionID, credentialsProvider: credentialsProvider, region: region, - options: .init(faceLivenessDetectorViewId: viewId, - preCheckViewEnabled: !disableStartView), completion: map(detectionCompletion: onCompletion) ) return session @@ -83,7 +78,8 @@ public struct FaceLivenessDetectorView: View { captureSession: captureSession, videoChunker: videoChunker, closeButtonAction: { onCompletion(.failure(.userCancelled)) }, - sessionID: sessionID + sessionID: sessionID, + isPreviewScreenEnabled: !disableStartView ) ) @@ -99,8 +95,6 @@ public struct FaceLivenessDetectorView: View { onCompletion: @escaping (Result) -> Void, captureSession: LivenessCaptureSession ) { - let viewId = UUID().uuidString - self.facelivenessDetectorViewId = viewId self.disableStartView = disableStartView self._isPresented = isPresented self.onCompletion = onCompletion @@ -110,8 +104,6 @@ public struct FaceLivenessDetectorView: View { withID: sessionID, credentialsProvider: credentialsProvider, region: region, - options: .init(faceLivenessDetectorViewId: viewId, - preCheckViewEnabled: !disableStartView), completion: map(detectionCompletion: onCompletion) ) return session @@ -128,7 +120,8 @@ public struct FaceLivenessDetectorView: View { captureSession: captureSession, videoChunker: captureSession.outputSampleBufferCapturer!.videoChunker, closeButtonAction: { onCompletion(.failure(.userCancelled)) }, - sessionID: sessionID + sessionID: sessionID, + isPreviewScreenEnabled: !disableStartView ) ) } diff --git a/Sources/FaceLiveness/Views/Liveness/FaceLivenessDetectionViewModel+FaceDetectionResultHandler.swift b/Sources/FaceLiveness/Views/Liveness/FaceLivenessDetectionViewModel+FaceDetectionResultHandler.swift index edb96e80..0e43de2a 100644 --- a/Sources/FaceLiveness/Views/Liveness/FaceLivenessDetectionViewModel+FaceDetectionResultHandler.swift +++ b/Sources/FaceLiveness/Views/Liveness/FaceLivenessDetectionViewModel+FaceDetectionResultHandler.swift @@ -121,9 +121,7 @@ extension FaceLivenessDetectionViewModel: FaceDetectionResultHandler { } } case .faceMovementChallenge: - DispatchQueue.main.asyncAfter(deadline: .now() + 1.0) { - self.livenessViewControllerDelegate?.completeNoLightCheck() - } + self.livenessViewControllerDelegate?.completeNoLightCheck() default: break } diff --git a/Sources/FaceLiveness/Views/Liveness/FaceLivenessDetectionViewModel.swift b/Sources/FaceLiveness/Views/Liveness/FaceLivenessDetectionViewModel.swift index 8c6ae829..42e7149a 100644 --- a/Sources/FaceLiveness/Views/Liveness/FaceLivenessDetectionViewModel.swift +++ b/Sources/FaceLiveness/Views/Liveness/FaceLivenessDetectionViewModel.swift @@ -12,6 +12,7 @@ import AVFoundation fileprivate let videoSize: CGSize = .init(width: 480, height: 640) fileprivate let defaultNoFitTimeoutInterval: TimeInterval = 7 +fileprivate let defaultAttemptCountResetInterval: TimeInterval = 300.0 @MainActor class FaceLivenessDetectionViewModel: ObservableObject { @@ -28,6 +29,7 @@ class FaceLivenessDetectionViewModel: ObservableObject { let faceDetector: FaceDetector let faceInOvalMatching: FaceInOvalMatching let challengeID: String = UUID().uuidString + let isPreviewScreenEnabled : Bool var colorSequences: [ColorSequence] = [] var hasSentFinalVideoEvent = false var hasSentFirstVideo = false @@ -43,6 +45,9 @@ class FaceLivenessDetectionViewModel: ObservableObject { var faceMatchedTimestamp: UInt64? var noFitStartTime: Date? + static var attemptCount: Int = 0 + static var attemptIdTimeStamp: Date = Date() + var noFitTimeoutInterval: TimeInterval { if let sessionTimeoutMilliSec = sessionConfiguration?.ovalMatchChallenge.oval.ovalFitTimeout { return TimeInterval(sessionTimeoutMilliSec/1_000) @@ -58,7 +63,8 @@ class FaceLivenessDetectionViewModel: ObservableObject { videoChunker: VideoChunker, stateMachine: LivenessStateMachine = .init(state: .initial), closeButtonAction: @escaping () -> Void, - sessionID: String + sessionID: String, + isPreviewScreenEnabled: Bool ) { self.closeButtonAction = closeButtonAction self.videoChunker = videoChunker @@ -67,6 +73,7 @@ class FaceLivenessDetectionViewModel: ObservableObject { self.captureSession = captureSession self.faceDetector = faceDetector self.faceInOvalMatching = faceInOvalMatching + self.isPreviewScreenEnabled = isPreviewScreenEnabled self.closeButtonAction = { [weak self] in guard let self else { return } @@ -186,13 +193,20 @@ class FaceLivenessDetectionViewModel: ObservableObject { func initializeLivenessStream() { do { - guard let livenessSession = livenessService as? FaceLivenessSession else { - throw FaceLivenessDetectionError.unknown + if (abs(Self.attemptIdTimeStamp.timeIntervalSinceNow) > defaultAttemptCountResetInterval) { + Self.attemptCount = 1 + } else { + Self.attemptCount += 1 } + Self.attemptIdTimeStamp = Date() - try livenessSession.initializeLivenessStream( + try livenessService?.initializeLivenessStream( withSessionID: sessionID, - userAgent: UserAgentValues.standard().userAgentString + userAgent: UserAgentValues.standard().userAgentString, + challenges: FaceLivenessSession.supportedChallenges, + options: .init( + attemptCount: Self.attemptCount, + preCheckViewEnabled: isPreviewScreenEnabled) ) } catch { DispatchQueue.main.async { diff --git a/Tests/FaceLivenessTests/CredentialsProviderTestCase.swift b/Tests/FaceLivenessTests/CredentialsProviderTestCase.swift index 7d69251b..3c1dabbf 100644 --- a/Tests/FaceLivenessTests/CredentialsProviderTestCase.swift +++ b/Tests/FaceLivenessTests/CredentialsProviderTestCase.swift @@ -41,7 +41,8 @@ final class CredentialsProviderTestCase: XCTestCase { captureSession: captureSession, videoChunker: videoChunker, closeButtonAction: {}, - sessionID: UUID().uuidString + sessionID: UUID().uuidString, + isPreviewScreenEnabled: false ) self.videoChunker = videoChunker diff --git a/Tests/FaceLivenessTests/LivenessTests.swift b/Tests/FaceLivenessTests/LivenessTests.swift index c4b95d02..5603914a 100644 --- a/Tests/FaceLivenessTests/LivenessTests.swift +++ b/Tests/FaceLivenessTests/LivenessTests.swift @@ -32,7 +32,8 @@ final class FaceLivenessDetectionViewModelTestCase: XCTestCase { captureSession: captureSession, videoChunker: videoChunker, closeButtonAction: {}, - sessionID: UUID().uuidString + sessionID: UUID().uuidString, + isPreviewScreenEnabled: false ) self.videoChunker = videoChunker @@ -104,7 +105,9 @@ final class FaceLivenessDetectionViewModelTestCase: XCTestCase { XCTAssertEqual(faceDetector.interactions, [ "setResultHandler(detectionResultHandler:) (FaceLivenessDetectionViewModel)" ]) - XCTAssertEqual(livenessService.interactions, []) + XCTAssertEqual(livenessService.interactions, [ + "initializeLivenessStream(withSessionID:userAgent:challenges:options:)" + ]) } /// Given: A `FaceLivenessDetectionViewModel` @@ -193,4 +196,55 @@ final class FaceLivenessDetectionViewModelTestCase: XCTestCase { try await Task.sleep(seconds: 1) XCTAssertEqual(self.viewModel.livenessState.state, .encounteredUnrecoverableError(.timedOut)) } + + /// Given: A `FaceLivenessDetectionViewModel` + /// When: The initializeLivenessStream() is called for the first time and then called again after 3 seconds + /// Then: The attempt count is incremented + func testAttemptCountIncrementFirstTime() async throws { + viewModel.livenessService = self.livenessService + self.viewModel.initializeLivenessStream() + XCTAssertEqual(livenessService.interactions, [ + "initializeLivenessStream(withSessionID:userAgent:challenges:options:)" + ]) + + XCTAssertEqual(FaceLivenessDetectionViewModel.attemptCount, 1) + try await Task.sleep(seconds: 3) + + self.viewModel.initializeLivenessStream() + XCTAssertEqual(livenessService.interactions, [ + "initializeLivenessStream(withSessionID:userAgent:challenges:options:)", + "initializeLivenessStream(withSessionID:userAgent:challenges:options:)" + ]) + XCTAssertEqual(FaceLivenessDetectionViewModel.attemptCount, 2) + } + + /// Given: A `FaceLivenessDetectionViewModel` + /// When: The attempt count is 4, last attempt time was < 5 minutes and initializeLivenessStream() is called + /// Then: The attempt count is incremented + func testAttemptCountIncrement() async throws { + viewModel.livenessService = self.livenessService + FaceLivenessDetectionViewModel.attemptCount = 4 + FaceLivenessDetectionViewModel.attemptIdTimeStamp = Date().addingTimeInterval(-180) + self.viewModel.initializeLivenessStream() + XCTAssertEqual(livenessService.interactions, [ + "initializeLivenessStream(withSessionID:userAgent:challenges:options:)" + ]) + + XCTAssertEqual(FaceLivenessDetectionViewModel.attemptCount, 5) + } + + /// Given: A `FaceLivenessDetectionViewModel` + /// When: The attempt count is 4, last attempt time was > 5 minutes and initializeLivenessStream() is called + /// Then: The attempt count is not incremented and reset to 1 + func testAttemptCountReset() async throws { + viewModel.livenessService = self.livenessService + FaceLivenessDetectionViewModel.attemptCount = 4 + FaceLivenessDetectionViewModel.attemptIdTimeStamp = Date().addingTimeInterval(-305) + self.viewModel.initializeLivenessStream() + XCTAssertEqual(livenessService.interactions, [ + "initializeLivenessStream(withSessionID:userAgent:challenges:options:)" + ]) + + XCTAssertEqual(FaceLivenessDetectionViewModel.attemptCount, 1) + } } diff --git a/Tests/FaceLivenessTests/MockLivenessService.swift b/Tests/FaceLivenessTests/MockLivenessService.swift index 942f7488..d3e43a8d 100644 --- a/Tests/FaceLivenessTests/MockLivenessService.swift +++ b/Tests/FaceLivenessTests/MockLivenessService.swift @@ -18,7 +18,7 @@ class MockLivenessService { var onFinalClientEvent: (LivenessEvent, Date) -> Void = { _, _ in } var onFreshnessEvent: (LivenessEvent, Date) -> Void = { _, _ in } var onVideoEvent: (LivenessEvent, Date) -> Void = { _, _ in } - var onInitializeLivenessStream: (String, String,[Challenge]?) -> Void = { _, _, _ in } + var onInitializeLivenessStream: (String, String,[Challenge]?,FaceLivenessSession.Options) -> Void = { _, _, _, _ in } var onServiceException: (FaceLivenessSessionError) -> Void = { _ in } var onCloseSocket: (URLSessionWebSocketTask.CloseCode) -> Void = { _ in } } @@ -46,10 +46,11 @@ extension MockLivenessService: LivenessService { func initializeLivenessStream( withSessionID sessionID: String, userAgent: String, - challenges: [Challenge] + challenges: [Challenge], + options: FaceLivenessSession.Options ) throws { interactions.append(#function) - onInitializeLivenessStream(sessionID, userAgent, challenges) + onInitializeLivenessStream(sessionID, userAgent, challenges, options) } func register( From 4c6d9c52e3b0708a34b47700da63331f8435cf76 Mon Sep 17 00:00:00 2001 From: Abhash Kumar Singh Date: Mon, 1 Jul 2024 11:01:28 -0700 Subject: [PATCH 03/10] chore: update dependencies after rebase --- Package.resolved | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/Package.resolved b/Package.resolved index 7749501d..3bd3a678 100644 --- a/Package.resolved +++ b/Package.resolved @@ -6,7 +6,7 @@ "location" : "https://github.com/aws-amplify/amplify-swift", "state" : { "branch" : "feat/no-light-support", - "revision" : "22e02fa21399122aac1d8b4f6ab23c242c79dae6" + "revision" : "1a5386d8d8e8e1edf631625c7bb6e003b2c0c821" } }, { @@ -50,8 +50,8 @@ "kind" : "remoteSourceControl", "location" : "https://github.com/stephencelis/SQLite.swift.git", "state" : { - "revision" : "5f5ad81ac0d0a0f3e56e39e646e8423c617df523", - "version" : "0.13.2" + "revision" : "a95fc6df17d108bd99210db5e8a9bac90fe984b8", + "version" : "0.15.3" } }, { From cf5a3bd36628ab4a1815b32b433a372550970f5a Mon Sep 17 00:00:00 2001 From: Abhash Kumar Singh Date: Tue, 2 Jul 2024 11:29:13 -0700 Subject: [PATCH 04/10] fix: handle error on loading view (#154) --- .../Liveness/FaceLivenessDetectionView.swift | 19 +++++++++++++++++-- 1 file changed, 17 insertions(+), 2 deletions(-) diff --git a/Sources/FaceLiveness/Views/Liveness/FaceLivenessDetectionView.swift b/Sources/FaceLiveness/Views/Liveness/FaceLivenessDetectionView.swift index 019b9148..f803a863 100644 --- a/Sources/FaceLiveness/Views/Liveness/FaceLivenessDetectionView.swift +++ b/Sources/FaceLiveness/Views/Liveness/FaceLivenessDetectionView.swift @@ -82,8 +82,6 @@ public struct FaceLivenessDetectorView: View { isPreviewScreenEnabled: !disableStartView ) ) - - faceDetector.setFaceDetectionSessionConfigurationWrapper(configuration: viewModel) } init( @@ -142,6 +140,23 @@ public struct FaceLivenessDetectorView: View { } catch { throw FaceLivenessDetectionError.accessDenied } + + DispatchQueue.main.async { + if let faceDetector = viewModel.faceDetector as? FaceDetectorShortRange.Model { + faceDetector.setFaceDetectionSessionConfigurationWrapper(configuration: viewModel) + } + } + } + } + .onReceive(viewModel.$livenessState) { output in + switch output.state { + case .encounteredUnrecoverableError(let error): + let closeCode = error.webSocketCloseCode ?? .normalClosure + viewModel.livenessService?.closeSocket(with: closeCode) + isPresented = false + onCompletion(.failure(mapError(error))) + default: + break } } case .awaitingLivenessSession(let challenge): From 3557932292f01231982454ee00cd0c6e8bddde52 Mon Sep 17 00:00:00 2001 From: Abhash Kumar Singh Date: Thu, 22 Aug 2024 13:34:08 -0700 Subject: [PATCH 05/10] chore: update amplify package dependency after rebase --- Package.resolved | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Package.resolved b/Package.resolved index 3bd3a678..6cb527c5 100644 --- a/Package.resolved +++ b/Package.resolved @@ -6,7 +6,7 @@ "location" : "https://github.com/aws-amplify/amplify-swift", "state" : { "branch" : "feat/no-light-support", - "revision" : "1a5386d8d8e8e1edf631625c7bb6e003b2c0c821" + "revision" : "614be628cb01188e519bb0e9e4d90bd83703d139" } }, { From 16a4ded4fe7e58ac202aeece854b0408b99c484d Mon Sep 17 00:00:00 2001 From: Abhash Kumar Singh Date: Tue, 17 Jun 2025 12:25:39 -0700 Subject: [PATCH 06/10] update Package.resolved files --- .../xcshareddata/swiftpm/Package.resolved | 20 +++++++++---------- Package.resolved | 14 ++++++------- 2 files changed, 17 insertions(+), 17 deletions(-) diff --git a/HostApp/HostApp.xcodeproj/project.xcworkspace/xcshareddata/swiftpm/Package.resolved b/HostApp/HostApp.xcodeproj/project.xcworkspace/xcshareddata/swiftpm/Package.resolved index d18921e3..d267f453 100644 --- a/HostApp/HostApp.xcodeproj/project.xcworkspace/xcshareddata/swiftpm/Package.resolved +++ b/HostApp/HostApp.xcodeproj/project.xcworkspace/xcshareddata/swiftpm/Package.resolved @@ -6,7 +6,7 @@ "location" : "https://github.com/aws-amplify/amplify-swift", "state" : { "branch" : "feat/no-light-support", - "revision" : "22e02fa21399122aac1d8b4f6ab23c242c79dae6" + "revision" : "ae1d96a5eb37ab0f2bfcc9e836a2055c88cf8a01" } }, { @@ -23,17 +23,17 @@ "kind" : "remoteSourceControl", "location" : "https://github.com/awslabs/aws-crt-swift", "state" : { - "revision" : "dd17a98750b6182edacd6e8f0c30aa289c472b22", - "version" : "0.40.0" + "revision" : "74d970dde8a0d6b2fe1d8374767ca9793088ce2c", + "version" : "0.48.0" } }, { "identity" : "aws-sdk-swift", "kind" : "remoteSourceControl", - "location" : "https://github.com/awslabs/aws-sdk-swift.git", + "location" : "https://github.com/awslabs/aws-sdk-swift", "state" : { - "revision" : "9ad12684f6cb9c9b60e840c051a2bba604024650", - "version" : "1.0.69" + "revision" : "104958a898543582bb01102616bf5d61ed237352", + "version" : "1.2.59" } }, { @@ -41,8 +41,8 @@ "kind" : "remoteSourceControl", "location" : "https://github.com/smithy-lang/smithy-swift", "state" : { - "revision" : "402f091374dcf72c1e7ed43af10e3ee7e634fad8", - "version" : "0.106.0" + "revision" : "755367ae4e10004f8b5a94fbfdf3f638a1f225bc", + "version" : "0.125.0" } }, { @@ -50,8 +50,8 @@ "kind" : "remoteSourceControl", "location" : "https://github.com/stephencelis/SQLite.swift.git", "state" : { - "revision" : "5f5ad81ac0d0a0f3e56e39e646e8423c617df523", - "version" : "0.13.2" + "revision" : "a95fc6df17d108bd99210db5e8a9bac90fe984b8", + "version" : "0.15.3" } }, { diff --git a/Package.resolved b/Package.resolved index 6cb527c5..d267f453 100644 --- a/Package.resolved +++ b/Package.resolved @@ -6,7 +6,7 @@ "location" : "https://github.com/aws-amplify/amplify-swift", "state" : { "branch" : "feat/no-light-support", - "revision" : "614be628cb01188e519bb0e9e4d90bd83703d139" + "revision" : "ae1d96a5eb37ab0f2bfcc9e836a2055c88cf8a01" } }, { @@ -23,8 +23,8 @@ "kind" : "remoteSourceControl", "location" : "https://github.com/awslabs/aws-crt-swift", "state" : { - "revision" : "dd17a98750b6182edacd6e8f0c30aa289c472b22", - "version" : "0.40.0" + "revision" : "74d970dde8a0d6b2fe1d8374767ca9793088ce2c", + "version" : "0.48.0" } }, { @@ -32,8 +32,8 @@ "kind" : "remoteSourceControl", "location" : "https://github.com/awslabs/aws-sdk-swift", "state" : { - "revision" : "9ad12684f6cb9c9b60e840c051a2bba604024650", - "version" : "1.0.69" + "revision" : "104958a898543582bb01102616bf5d61ed237352", + "version" : "1.2.59" } }, { @@ -41,8 +41,8 @@ "kind" : "remoteSourceControl", "location" : "https://github.com/smithy-lang/smithy-swift", "state" : { - "revision" : "402f091374dcf72c1e7ed43af10e3ee7e634fad8", - "version" : "0.106.0" + "revision" : "755367ae4e10004f8b5a94fbfdf3f638a1f225bc", + "version" : "0.125.0" } }, { From 83e9e3394e058763a9e5c13c49a5c8fb699c836b Mon Sep 17 00:00:00 2001 From: Abhash Kumar Singh Date: Fri, 20 Jun 2025 11:41:11 -0700 Subject: [PATCH 07/10] feat: add back camera support (#180) * chore: back camera support * Add code for error scenarios * update error codes and message * Add challengeOption parameter and remove error codes * Update ChallengeOptions and use camera position based on challenge type received * Add UI changes for selecting back camera in HostApp * add default parameter to ChallengeOptions init * fix formatting * fix test build --- HostApp/HostApp.xcodeproj/project.pbxproj | 10 +++ .../HostApp/Views/ExampleLivenessView.swift | 35 +++++--- .../Views/ExampleLivenessViewModel.swift | 7 +- HostApp/HostApp/Views/RootView.swift | 17 ++-- HostApp/HostApp/Views/StartSessionView.swift | 39 +++++++- .../GetReadyPage/CameraPreviewView.swift | 2 +- .../GetReadyPage/CameraPreviewViewModel.swift | 7 +- .../Views/GetReadyPage/GetReadyPageView.swift | 14 +-- .../InstructionContainerView.swift | 2 +- .../Liveness/FaceLivenessDetectionError.swift | 2 +- .../Liveness/FaceLivenessDetectionView.swift | 88 +++++++++++++------ ...ViewModel+FaceDetectionResultHandler.swift | 4 +- .../FaceLivenessDetectionViewModel.swift | 58 ++++++++---- .../Views/Liveness/LivenessStateMachine.swift | 2 +- .../CredentialsProviderTestCase.swift | 9 +- Tests/FaceLivenessTests/LivenessTests.swift | 9 +- 16 files changed, 219 insertions(+), 86 deletions(-) diff --git a/HostApp/HostApp.xcodeproj/project.pbxproj b/HostApp/HostApp.xcodeproj/project.pbxproj index 7d1314c5..76052c19 100644 --- a/HostApp/HostApp.xcodeproj/project.pbxproj +++ b/HostApp/HostApp.xcodeproj/project.pbxproj @@ -131,6 +131,7 @@ 9070FFBD285112B5009867D5 /* HostAppUITests */, 9070FFA1285112B4009867D5 /* Products */, 90215EED291E9FB60050F2AD /* Frameworks */, + A5A9AF5054D0FF13505B212A /* AmplifyConfig */, ); sourceTree = ""; }; @@ -213,6 +214,15 @@ path = Model; sourceTree = ""; }; + A5A9AF5054D0FF13505B212A /* AmplifyConfig */ = { + isa = PBXGroup; + children = ( + 973619242BA378690003A590 /* awsconfiguration.json */, + 973619232BA378690003A590 /* amplifyconfiguration.json */, + ); + name = AmplifyConfig; + sourceTree = ""; + }; /* End PBXGroup section */ /* Begin PBXNativeTarget section */ diff --git a/HostApp/HostApp/Views/ExampleLivenessView.swift b/HostApp/HostApp/Views/ExampleLivenessView.swift index 5f6868b6..03e4dd55 100644 --- a/HostApp/HostApp/Views/ExampleLivenessView.swift +++ b/HostApp/HostApp/Views/ExampleLivenessView.swift @@ -9,22 +9,27 @@ import SwiftUI import FaceLiveness struct ExampleLivenessView: View { - @Binding var isPresented: Bool + @Binding var containerViewState: ContainerViewState @ObservedObject var viewModel: ExampleLivenessViewModel - init(sessionID: String, isPresented: Binding) { - self.viewModel = .init(sessionID: sessionID) - self._isPresented = isPresented + init(sessionID: String, containerViewState: Binding) { + self._containerViewState = containerViewState + if case let .liveness(selectedCamera) = _containerViewState.wrappedValue { + self.viewModel = .init(sessionID: sessionID, presentationState: .liveness(selectedCamera)) + } else { + self.viewModel = .init(sessionID: sessionID) + } } var body: some View { switch viewModel.presentationState { - case .liveness: + case .liveness(let camera): FaceLivenessDetectorView( sessionID: viewModel.sessionID, region: "us-east-1", + challengeOptions: .init(faceMovementChallengeOption: FaceMovementChallengeOption(camera: camera)), isPresented: Binding( - get: { viewModel.presentationState == .liveness }, + get: { viewModel.presentationState == .liveness(camera) }, set: { _ in } ), onCompletion: { result in @@ -33,11 +38,11 @@ struct ExampleLivenessView: View { case .success: withAnimation { viewModel.presentationState = .result } case .failure(.sessionNotFound), .failure(.cameraPermissionDenied), .failure(.accessDenied): - viewModel.presentationState = .liveness - isPresented = false + viewModel.presentationState = .liveness(camera) + containerViewState = .startSession case .failure(.userCancelled): - viewModel.presentationState = .liveness - isPresented = false + viewModel.presentationState = .liveness(camera) + containerViewState = .startSession case .failure(.sessionTimedOut): viewModel.presentationState = .error(.sessionTimedOut) case .failure(.socketClosed): @@ -46,6 +51,10 @@ struct ExampleLivenessView: View { viewModel.presentationState = .error(.countdownFaceTooClose) case .failure(.invalidSignature): viewModel.presentationState = .error(.invalidSignature) + case .failure(.faceInOvalMatchExceededTimeLimitError): + viewModel.presentationState = .error(.faceInOvalMatchExceededTimeLimitError) + case .failure(.internalServer): + viewModel.presentationState = .error(.internalServer) case .failure(.cameraNotAvailable): viewModel.presentationState = .error(.cameraNotAvailable) case .failure(.validation): @@ -58,11 +67,11 @@ struct ExampleLivenessView: View { } } ) - .id(isPresented) + .id(containerViewState) case .result: LivenessResultView( sessionID: viewModel.sessionID, - onTryAgain: { isPresented = false }, + onTryAgain: { containerViewState = .startSession }, content: { LivenessResultContentView(fetchResults: viewModel.fetchLivenessResult) } @@ -71,7 +80,7 @@ struct ExampleLivenessView: View { case .error(let detectionError): LivenessResultView( sessionID: viewModel.sessionID, - onTryAgain: { isPresented = false }, + onTryAgain: { containerViewState = .startSession }, content: { switch detectionError { case .socketClosed: diff --git a/HostApp/HostApp/Views/ExampleLivenessViewModel.swift b/HostApp/HostApp/Views/ExampleLivenessViewModel.swift index a04571bc..7dade2fb 100644 --- a/HostApp/HostApp/Views/ExampleLivenessViewModel.swift +++ b/HostApp/HostApp/Views/ExampleLivenessViewModel.swift @@ -10,11 +10,12 @@ import FaceLiveness import Amplify class ExampleLivenessViewModel: ObservableObject { - @Published var presentationState = PresentationState.liveness + @Published var presentationState: PresentationState = .liveness(.front) let sessionID: String - init(sessionID: String) { + init(sessionID: String, presentationState: PresentationState = .liveness(.front)) { self.sessionID = sessionID + self.presentationState = presentationState } func fetchLivenessResult() async throws -> LivenessResultContentView.Result { @@ -30,6 +31,6 @@ class ExampleLivenessViewModel: ObservableObject { } enum PresentationState: Equatable { - case liveness, result, error(FaceLivenessDetectionError) + case liveness(LivenessCamera), result, error(FaceLivenessDetectionError) } } diff --git a/HostApp/HostApp/Views/RootView.swift b/HostApp/HostApp/Views/RootView.swift index 7600f1b4..59a3c815 100644 --- a/HostApp/HostApp/Views/RootView.swift +++ b/HostApp/HostApp/Views/RootView.swift @@ -6,25 +6,32 @@ // import SwiftUI +import FaceLiveness struct RootView: View { @EnvironmentObject var sceneDelegate: SceneDelegate @State var sessionID = "" - @State var isPresentingContainerView = false + @State var containerViewState = ContainerViewState.startSession var body: some View { - if isPresentingContainerView { + switch containerViewState { + case .liveness: ExampleLivenessView( sessionID: sessionID, - isPresented: $isPresentingContainerView + containerViewState: $containerViewState ) - } else { + case .startSession: StartSessionView( sessionID: $sessionID, - isPresentingContainerView: $isPresentingContainerView + containerViewState: $containerViewState ) .background(Color.dynamicColors(light: .white, dark: .secondarySystemBackground)) .edgesIgnoringSafeArea(.all) } } } + +enum ContainerViewState: Hashable { + case liveness(LivenessCamera) + case startSession +} diff --git a/HostApp/HostApp/Views/StartSessionView.swift b/HostApp/HostApp/Views/StartSessionView.swift index 42f64401..6905e9b2 100644 --- a/HostApp/HostApp/Views/StartSessionView.swift +++ b/HostApp/HostApp/Views/StartSessionView.swift @@ -12,7 +12,7 @@ struct StartSessionView: View { @EnvironmentObject var sceneDelegate: SceneDelegate @ObservedObject var viewModel = StartSessionViewModel() @Binding var sessionID: String - @Binding var isPresentingContainerView: Bool + @Binding var containerViewState: ContainerViewState @State private var showAlert = false var body: some View { @@ -26,7 +26,7 @@ struct StartSessionView: View { ) button( - text: "Create Liveness Session", + text: "Create Liveness Session (front camera)", backgroundColor: .dynamicColors( light: .hex("#047D95"), dark: .hex("#7dd6e8") @@ -35,7 +35,7 @@ struct StartSessionView: View { viewModel.createSession { sessionId, err in if let sessionId = sessionId { sessionID = sessionId - isPresentingContainerView = true + containerViewState = .liveness(.front) } showAlert = err != nil @@ -50,7 +50,38 @@ struct StartSessionView: View { dismissButton: .default( Text("OK"), action: { - isPresentingContainerView = false + containerViewState = .startSession + } + ) + ) + } + + button( + text: "Create Liveness Session (back camera)", + backgroundColor: .dynamicColors( + light: .hex("#047D95"), + dark: .hex("#7dd6e8") + ), + action: { + viewModel.createSession { sessionId, err in + if let sessionId = sessionId { + sessionID = sessionId + containerViewState = .liveness(.back) + } + + showAlert = err != nil + } + }, + enabled: viewModel.isSignedIn + ) + .alert(isPresented: $showAlert) { + Alert( + title: Text("Error Creating Liveness Session"), + message: Text("Unable to create a liveness session id. Please try again."), + dismissButton: .default( + Text("OK"), + action: { + containerViewState = .startSession } ) ) diff --git a/Sources/FaceLiveness/Views/GetReadyPage/CameraPreviewView.swift b/Sources/FaceLiveness/Views/GetReadyPage/CameraPreviewView.swift index 2e8530f3..19e2f483 100644 --- a/Sources/FaceLiveness/Views/GetReadyPage/CameraPreviewView.swift +++ b/Sources/FaceLiveness/Views/GetReadyPage/CameraPreviewView.swift @@ -15,7 +15,7 @@ struct CameraPreviewView: View { @StateObject var model: CameraPreviewViewModel - init(model: CameraPreviewViewModel = CameraPreviewViewModel()) { + init(model: CameraPreviewViewModel = CameraPreviewViewModel(cameraPosition: .front)) { self._model = StateObject(wrappedValue: model) } diff --git a/Sources/FaceLiveness/Views/GetReadyPage/CameraPreviewViewModel.swift b/Sources/FaceLiveness/Views/GetReadyPage/CameraPreviewViewModel.swift index b50173b0..a46dbaa8 100644 --- a/Sources/FaceLiveness/Views/GetReadyPage/CameraPreviewViewModel.swift +++ b/Sources/FaceLiveness/Views/GetReadyPage/CameraPreviewViewModel.swift @@ -16,15 +16,18 @@ class CameraPreviewViewModel: NSObject, ObservableObject { @Published var buffer: CVPixelBuffer? var previewCaptureSession: LivenessCaptureSession? + let cameraPosition: LivenessCamera - override init() { + init(cameraPosition: LivenessCamera) { + self.cameraPosition = cameraPosition + super.init() setupSubscriptions() let avCaptureDevice = AVCaptureDevice.DiscoverySession( deviceTypes: [.builtInWideAngleCamera], mediaType: .video, - position: .front + position: cameraPosition == .front ? .front : .back ).devices.first let outputDelegate = CameraPreviewOutputSampleBufferDelegate { [weak self] buffer in diff --git a/Sources/FaceLiveness/Views/GetReadyPage/GetReadyPageView.swift b/Sources/FaceLiveness/Views/GetReadyPage/GetReadyPageView.swift index 0c52ccff..0142ca13 100644 --- a/Sources/FaceLiveness/Views/GetReadyPage/GetReadyPageView.swift +++ b/Sources/FaceLiveness/Views/GetReadyPage/GetReadyPageView.swift @@ -12,21 +12,24 @@ struct GetReadyPageView: View { let beginCheckButtonDisabled: Bool let onBegin: () -> Void let challenge: Challenge + let cameraPosition: LivenessCamera init( onBegin: @escaping () -> Void, beginCheckButtonDisabled: Bool = false, - challenge: Challenge + challenge: Challenge, + cameraPosition: LivenessCamera ) { self.onBegin = onBegin self.beginCheckButtonDisabled = beginCheckButtonDisabled self.challenge = challenge + self.cameraPosition = cameraPosition } var body: some View { VStack { ZStack { - CameraPreviewView() + CameraPreviewView(model: CameraPreviewViewModel(cameraPosition: cameraPosition)) VStack { WarningBox( titleText: LocalizedStrings.get_ready_photosensitivity_title, @@ -77,8 +80,9 @@ struct GetReadyPageView: View { struct GetReadyPageView_Previews: PreviewProvider { static var previews: some View { - GetReadyPageView(onBegin: {}, - challenge: .init(version: "2.0.0", - type: .faceMovementAndLightChallenge)) + GetReadyPageView( + onBegin: {}, + challenge: .init(version: "2.0.0", type: .faceMovementAndLightChallenge), + cameraPosition: .front) } } diff --git a/Sources/FaceLiveness/Views/Instruction/InstructionContainerView.swift b/Sources/FaceLiveness/Views/Instruction/InstructionContainerView.swift index 5ed45ae7..01dcd37f 100644 --- a/Sources/FaceLiveness/Views/Instruction/InstructionContainerView.swift +++ b/Sources/FaceLiveness/Views/Instruction/InstructionContainerView.swift @@ -110,7 +110,7 @@ struct InstructionContainerView: View { ) } case .faceMatched: - if let challenge = viewModel.challenge, + if let challenge = viewModel.challengeReceived, case .faceMovementAndLightChallenge = challenge.type { InstructionView( text: LocalizedStrings.challenge_instruction_hold_still, diff --git a/Sources/FaceLiveness/Views/Liveness/FaceLivenessDetectionError.swift b/Sources/FaceLiveness/Views/Liveness/FaceLivenessDetectionError.swift index e90a6f06..19ced079 100644 --- a/Sources/FaceLiveness/Views/Liveness/FaceLivenessDetectionError.swift +++ b/Sources/FaceLiveness/Views/Liveness/FaceLivenessDetectionError.swift @@ -125,7 +125,7 @@ public struct FaceLivenessDetectionError: Error, Equatable { message: "The signature on the request is invalid.", recoverySuggestion: "Ensure the device time is correct and try again." ) - + public static let cameraNotAvailable = FaceLivenessDetectionError( code: 18, message: "The camera is not available.", diff --git a/Sources/FaceLiveness/Views/Liveness/FaceLivenessDetectionView.swift b/Sources/FaceLiveness/Views/Liveness/FaceLivenessDetectionView.swift index f803a863..8f5e1af4 100644 --- a/Sources/FaceLiveness/Views/Liveness/FaceLivenessDetectionView.swift +++ b/Sources/FaceLiveness/Views/Liveness/FaceLivenessDetectionView.swift @@ -20,6 +20,7 @@ public struct FaceLivenessDetectorView: View { @State var displayingCameraPermissionsNeededAlert = false let disableStartView: Bool + let challengeOptions: ChallengeOptions let onCompletion: (Result) -> Void let sessionTask: Task @@ -29,12 +30,14 @@ public struct FaceLivenessDetectorView: View { credentialsProvider: AWSCredentialsProvider? = nil, region: String, disableStartView: Bool = false, + challengeOptions: ChallengeOptions, isPresented: Binding, onCompletion: @escaping (Result) -> Void ) { self.disableStartView = disableStartView self._isPresented = isPresented self.onCompletion = onCompletion + self.challengeOptions = challengeOptions self.sessionTask = Task { let session = try await AWSPredictionsPlugin.startFaceLivenessSession( @@ -57,29 +60,15 @@ public struct FaceLivenessDetectorView: View { assetWriterInput: LivenessAVAssetWriterInput() ) - let avCpatureDevice = AVCaptureDevice.DiscoverySession( - deviceTypes: [.builtInWideAngleCamera], - mediaType: .video, - position: .front - ).devices.first - - let captureSession = LivenessCaptureSession( - captureDevice: .init(avCaptureDevice: avCpatureDevice), - outputDelegate: OutputSampleBufferCapturer( - faceDetector: faceDetector, - videoChunker: videoChunker - ) - ) - self._viewModel = StateObject( wrappedValue: .init( faceDetector: faceDetector, faceInOvalMatching: faceInOvalStateMatching, - captureSession: captureSession, videoChunker: videoChunker, closeButtonAction: { onCompletion(.failure(.userCancelled)) }, sessionID: sessionID, - isPreviewScreenEnabled: !disableStartView + isPreviewScreenEnabled: !disableStartView, + challengeOptions: challengeOptions ) ) } @@ -89,6 +78,7 @@ public struct FaceLivenessDetectorView: View { credentialsProvider: AWSCredentialsProvider? = nil, region: String, disableStartView: Bool = false, + challengeOptions: ChallengeOptions, isPresented: Binding, onCompletion: @escaping (Result) -> Void, captureSession: LivenessCaptureSession @@ -96,6 +86,7 @@ public struct FaceLivenessDetectorView: View { self.disableStartView = disableStartView self._isPresented = isPresented self.onCompletion = onCompletion + self.challengeOptions = challengeOptions self.sessionTask = Task { let session = try await AWSPredictionsPlugin.startFaceLivenessSession( @@ -115,11 +106,11 @@ public struct FaceLivenessDetectorView: View { wrappedValue: .init( faceDetector: captureSession.outputSampleBufferCapturer!.faceDetector, faceInOvalMatching: faceInOvalStateMatching, - captureSession: captureSession, videoChunker: captureSession.outputSampleBufferCapturer!.videoChunker, closeButtonAction: { onCompletion(.failure(.userCancelled)) }, sessionID: sessionID, - isPreviewScreenEnabled: !disableStartView + isPreviewScreenEnabled: !disableStartView, + challengeOptions: challengeOptions ) ) } @@ -164,23 +155,32 @@ public struct FaceLivenessDetectorView: View { .onAppear { Task { do { + let cameraPosition: LivenessCamera + switch challenge.type { + case .faceMovementAndLightChallenge: + cameraPosition = challengeOptions.faceMovementAndLightChallengeOption.camera + case .faceMovementChallenge: + cameraPosition = challengeOptions.faceMovementChallengeOption.camera + } + let newState = disableStartView ? DisplayState.displayingLiveness - : DisplayState.displayingGetReadyView(challenge) + : DisplayState.displayingGetReadyView(challenge, cameraPosition) guard self.displayState != newState else { return } self.displayState = newState } } } - case .displayingGetReadyView(let challenge): + case .displayingGetReadyView(let challenge, let cameraPosition): GetReadyPageView( onBegin: { guard displayState != .displayingLiveness else { return } displayState = .displayingLiveness }, beginCheckButtonDisabled: false, - challenge: challenge + challenge: challenge, + cameraPosition: cameraPosition ) .onAppear { DispatchQueue.main.async { @@ -246,7 +246,7 @@ public struct FaceLivenessDetectorView: View { for: .video, completionHandler: { accessGranted in guard accessGranted == true else { return } - guard let challenge = viewModel.challenge else { return } + guard let challenge = viewModel.challengeReceived else { return } displayState = .awaitingLivenessSession(challenge) } ) @@ -265,7 +265,7 @@ public struct FaceLivenessDetectorView: View { case .restricted, .denied: alertCameraAccessNeeded() case .authorized: - guard let challenge = viewModel.challenge else { return } + guard let challenge = viewModel.challengeReceived else { return } displayState = .awaitingLivenessSession(challenge) @unknown default: break @@ -276,7 +276,7 @@ public struct FaceLivenessDetectorView: View { enum DisplayState: Equatable { case awaitingChallengeType case awaitingLivenessSession(Challenge) - case displayingGetReadyView(Challenge) + case displayingGetReadyView(Challenge, LivenessCamera) case displayingLiveness case awaitingCameraPermission @@ -286,8 +286,8 @@ enum DisplayState: Equatable { return true case (let .awaitingLivenessSession(c1), let .awaitingLivenessSession(c2)): return c1.type == c2.type && c1.version == c2.version - case (let .displayingGetReadyView(c1), let .displayingGetReadyView(c2)): - return c1.type == c2.type && c1.version == c2.version + case (let .displayingGetReadyView(c1, position1), let .displayingGetReadyView(c2, position2)): + return c1.type == c2.type && c1.version == c2.version && position1 == position2 case (.displayingLiveness, .displayingLiveness): return true case (.awaitingCameraPermission, .awaitingCameraPermission): @@ -331,3 +331,39 @@ private func map(detectionCompletion: @escaping (Result Void let videoChunker: VideoChunker let sessionID: String @@ -35,7 +35,7 @@ class FaceLivenessDetectionViewModel: ObservableObject { var hasSentFirstVideo = false var layerRectConverted: (CGRect) -> CGRect = { $0 } var sessionConfiguration: FaceLivenessSession.SessionConfiguration? - var challenge: Challenge? + var challengeReceived: Challenge? var normalizeFace: (DetectedFace) -> DetectedFace = { $0 } var provideSingleFrame: ((UIImage) -> Void)? var cameraViewRect = CGRect.zero @@ -44,6 +44,7 @@ class FaceLivenessDetectionViewModel: ObservableObject { var initialClientEvent: InitialClientEvent? var faceMatchedTimestamp: UInt64? var noFitStartTime: Date? + let challengeOptions: ChallengeOptions static var attemptCount: Int = 0 static var attemptIdTimeStamp: Date = Date() @@ -59,21 +60,21 @@ class FaceLivenessDetectionViewModel: ObservableObject { init( faceDetector: FaceDetector, faceInOvalMatching: FaceInOvalMatching, - captureSession: LivenessCaptureSession, videoChunker: VideoChunker, stateMachine: LivenessStateMachine = .init(state: .initial), closeButtonAction: @escaping () -> Void, sessionID: String, - isPreviewScreenEnabled: Bool + isPreviewScreenEnabled: Bool, + challengeOptions: ChallengeOptions ) { self.closeButtonAction = closeButtonAction self.videoChunker = videoChunker self.livenessState = stateMachine self.sessionID = sessionID - self.captureSession = captureSession self.faceDetector = faceDetector self.faceInOvalMatching = faceInOvalMatching self.isPreviewScreenEnabled = isPreviewScreenEnabled + self.challengeOptions = challengeOptions self.closeButtonAction = { [weak self] in guard let self else { return } @@ -123,7 +124,8 @@ class FaceLivenessDetectionViewModel: ObservableObject { livenessService?.register( listener: { [weak self] _challenge in - self?.challenge = _challenge + self?.challengeReceived = _challenge + self?.configureCaptureSession(challenge: _challenge) onChallengeTypeReceived(_challenge) }, on: .challenge) @@ -138,16 +140,16 @@ class FaceLivenessDetectionViewModel: ObservableObject { } func startSession() { - captureSession.startSession() + captureSession?.startSession() } func stopRecording() { - captureSession.stopRunning() + captureSession?.stopRunning() } func configureCamera(withinFrame frame: CGRect) -> CALayer? { do { - let avLayer = try captureSession.configureCamera(frame: frame) + let avLayer = try captureSession?.configureCamera(frame: frame) DispatchQueue.main.async { self.livenessState.checkIsFacePrepared() } @@ -203,7 +205,8 @@ class FaceLivenessDetectionViewModel: ObservableObject { try livenessService?.initializeLivenessStream( withSessionID: sessionID, userAgent: UserAgentValues.standard().userAgentString, - challenges: FaceLivenessSession.supportedChallenges, + challenges: [challengeOptions.faceMovementChallengeOption.challenge, + challengeOptions.faceMovementAndLightChallengeOption.challenge], options: .init( attemptCount: Self.attemptCount, preCheckViewEnabled: isPreviewScreenEnabled) @@ -252,7 +255,7 @@ class FaceLivenessDetectionViewModel: ObservableObject { videoStartTime: UInt64 ) { guard initialClientEvent == nil else { return } - guard let challenge else { return } + guard let challengeReceived else { return } videoChunker.start() @@ -272,8 +275,8 @@ class FaceLivenessDetectionViewModel: ObservableObject { do { try livenessService?.send( .initialFaceDetected(event: _initialClientEvent, - challenge: .init(version: challenge.version, - type: challenge.type)), + challenge: .init(version: challengeReceived.version, + type: challengeReceived.type)), eventDate: { .init() } ) } catch { @@ -292,7 +295,7 @@ class FaceLivenessDetectionViewModel: ObservableObject { let sessionConfiguration, let initialClientEvent, let faceMatchedTimestamp, - let challenge + let challengeReceived else { return } let finalClientEvent = FinalClientEvent( @@ -307,8 +310,8 @@ class FaceLivenessDetectionViewModel: ObservableObject { do { try livenessService?.send( .final(event: finalClientEvent, - challenge: .init(version: challenge.version, - type: challenge.type)), + challenge: .init(version: challengeReceived.version, + type: challengeReceived.type)), eventDate: { .init() } ) @@ -401,6 +404,29 @@ class FaceLivenessDetectionViewModel: ObservableObject { } return data } + + func configureCaptureSession(challenge: Challenge) { + let cameraPosition: LivenessCamera + switch challenge.type { + case .faceMovementChallenge: + cameraPosition = challengeOptions.faceMovementChallengeOption.camera + case .faceMovementAndLightChallenge: + cameraPosition = challengeOptions.faceMovementAndLightChallengeOption.camera + } + + let avCaptureDevice = AVCaptureDevice.default( + .builtInWideAngleCamera, + for: .video, + position: cameraPosition == .front ? .front : .back) + + self.captureSession = LivenessCaptureSession( + captureDevice: .init(avCaptureDevice: avCaptureDevice), + outputDelegate: OutputSampleBufferCapturer( + faceDetector: self.faceDetector, + videoChunker: self.videoChunker + ) + ) + } } extension FaceLivenessDetectionViewModel: FaceDetectionSessionConfigurationWrapper { } diff --git a/Sources/FaceLiveness/Views/Liveness/LivenessStateMachine.swift b/Sources/FaceLiveness/Views/Liveness/LivenessStateMachine.swift index e61f8311..62e563ef 100644 --- a/Sources/FaceLiveness/Views/Liveness/LivenessStateMachine.swift +++ b/Sources/FaceLiveness/Views/Liveness/LivenessStateMachine.swift @@ -165,7 +165,7 @@ struct LivenessStateMachine { static let couldNotOpenStream = LivenessError(code: 5, webSocketCloseCode: .unexpectedRuntimeError) static let socketClosed = LivenessError(code: 6, webSocketCloseCode: .normalClosure) static let viewResignation = LivenessError(code: 8, webSocketCloseCode: .viewClosure) - static let cameraNotAvailable = LivenessError(code: 9, webSocketCloseCode: .missingVideoPermission) + static let cameraNotAvailable = LivenessError(code: 9, webSocketCloseCode: .unexpectedRuntimeError) static func == (lhs: LivenessError, rhs: LivenessError) -> Bool { lhs.code == rhs.code diff --git a/Tests/FaceLivenessTests/CredentialsProviderTestCase.swift b/Tests/FaceLivenessTests/CredentialsProviderTestCase.swift index 3c1dabbf..396ef60a 100644 --- a/Tests/FaceLivenessTests/CredentialsProviderTestCase.swift +++ b/Tests/FaceLivenessTests/CredentialsProviderTestCase.swift @@ -38,11 +38,12 @@ final class CredentialsProviderTestCase: XCTestCase { let viewModel = FaceLivenessDetectionViewModel( faceDetector: faceDetector, faceInOvalMatching: .init(instructor: .init()), - captureSession: captureSession, videoChunker: videoChunker, closeButtonAction: {}, sessionID: UUID().uuidString, - isPreviewScreenEnabled: false + isPreviewScreenEnabled: false, + challengeOptions: .init(faceMovementChallengeOption: .init(camera: .front), + faceMovementAndLightChallengeOption: .init()) ) self.videoChunker = videoChunker @@ -66,6 +67,8 @@ final class CredentialsProviderTestCase: XCTestCase { sessionID: UUID().uuidString, credentialsProvider: credentialsProvider, region: "us-east-1", + challengeOptions: .init(faceMovementChallengeOption: .init(camera: .front), + faceMovementAndLightChallengeOption: .init()), isPresented: .constant(true), onCompletion: { _ in } ) @@ -102,6 +105,8 @@ final class CredentialsProviderTestCase: XCTestCase { sessionID: UUID().uuidString, credentialsProvider: credentialsProvider, region: "us-east-1", + challengeOptions: .init(faceMovementChallengeOption: .init(camera: .front), + faceMovementAndLightChallengeOption: .init()), isPresented: .constant(true), onCompletion: { _ in } ) diff --git a/Tests/FaceLivenessTests/LivenessTests.swift b/Tests/FaceLivenessTests/LivenessTests.swift index 5603914a..89db7315 100644 --- a/Tests/FaceLivenessTests/LivenessTests.swift +++ b/Tests/FaceLivenessTests/LivenessTests.swift @@ -29,11 +29,12 @@ final class FaceLivenessDetectionViewModelTestCase: XCTestCase { let viewModel = FaceLivenessDetectionViewModel( faceDetector: faceDetector, faceInOvalMatching: .init(instructor: .init()), - captureSession: captureSession, videoChunker: videoChunker, closeButtonAction: {}, sessionID: UUID().uuidString, - isPreviewScreenEnabled: false + isPreviewScreenEnabled: false, + challengeOptions: .init(faceMovementChallengeOption: .init(camera: .front), + faceMovementAndLightChallengeOption: .init()) ) self.videoChunker = videoChunker @@ -70,7 +71,7 @@ final class FaceLivenessDetectionViewModelTestCase: XCTestCase { /// Then: The end state of this flow is `.faceMatched` func testHappyPathToMatchedFace() async throws { viewModel.livenessService = self.livenessService - viewModel.challenge = Challenge(version: "2.0.0", type: .faceMovementAndLightChallenge) + viewModel.challengeReceived = Challenge(version: "2.0.0", type: .faceMovementAndLightChallenge) viewModel.livenessState.checkIsFacePrepared() XCTAssertEqual(viewModel.livenessState.state, .pendingFacePreparedConfirmation(.pendingCheck)) @@ -115,7 +116,7 @@ final class FaceLivenessDetectionViewModelTestCase: XCTestCase { /// Then: The end state of this flow is `.recording(ovalDisplayed: false)` func testTransitionToRecordingState() async throws { viewModel.livenessService = self.livenessService - viewModel.challenge = Challenge(version: "2.0.0", type: .faceMovementAndLightChallenge) + viewModel.challengeReceived = Challenge(version: "2.0.0", type: .faceMovementAndLightChallenge) let face = FaceLivenessSession.OvalMatchChallenge.Face( distanceThreshold: 0.32, From e6d4feae7b89049f2dc989ad43bdf53ac3e9c831 Mon Sep 17 00:00:00 2001 From: Abhash Kumar Singh Date: Mon, 23 Jun 2025 12:14:29 -0700 Subject: [PATCH 08/10] address review comments --- .../xcshareddata/swiftpm/Package.resolved | 2 +- HostApp/HostApp/Model/LivenessResult.swift | 15 +++- .../LivenessResultContentView+Result.swift | 2 +- HostApp/HostApp/Views/StartSessionView.swift | 34 +------- Package.resolved | 2 +- .../FaceDetectorShortRange+Model.swift | 9 +- .../Utilities/FinalClientEvent+Init.swift | 10 ++- .../CameraPermissionView.swift | 2 +- .../Views/GetReadyPage/GetReadyPageView.swift | 4 +- .../InstructionContainerView.swift | 2 +- .../Liveness/FaceLivenessDetectionView.swift | 83 +++++++++++-------- ...ViewModel+FaceDetectionResultHandler.swift | 30 +++++-- .../FaceLivenessDetectionViewModel.swift | 37 ++++++--- .../Liveness/LivenessViewController.swift | 5 +- .../CredentialsProviderTestCase.swift | 7 -- Tests/FaceLivenessTests/LivenessTests.swift | 13 +-- 16 files changed, 137 insertions(+), 120 deletions(-) diff --git a/HostApp/HostApp.xcodeproj/project.xcworkspace/xcshareddata/swiftpm/Package.resolved b/HostApp/HostApp.xcodeproj/project.xcworkspace/xcshareddata/swiftpm/Package.resolved index d267f453..692d2da3 100644 --- a/HostApp/HostApp.xcodeproj/project.xcworkspace/xcshareddata/swiftpm/Package.resolved +++ b/HostApp/HostApp.xcodeproj/project.xcworkspace/xcshareddata/swiftpm/Package.resolved @@ -6,7 +6,7 @@ "location" : "https://github.com/aws-amplify/amplify-swift", "state" : { "branch" : "feat/no-light-support", - "revision" : "ae1d96a5eb37ab0f2bfcc9e836a2055c88cf8a01" + "revision" : "21bc17c0438b8390edcf532e3ef51f1c25c25749" } }, { diff --git a/HostApp/HostApp/Model/LivenessResult.swift b/HostApp/HostApp/Model/LivenessResult.swift index 3a36f089..2d76773b 100644 --- a/HostApp/HostApp/Model/LivenessResult.swift +++ b/HostApp/HostApp/Model/LivenessResult.swift @@ -12,7 +12,7 @@ struct LivenessResult: Codable { let auditImageBytes: String? let confidenceScore: Double let isLive: Bool - let challenge: Challenge? + let challenge: Event? } extension LivenessResult: CustomDebugStringConvertible { @@ -22,8 +22,17 @@ extension LivenessResult: CustomDebugStringConvertible { - confidenceScore: \(confidenceScore) - isLive: \(isLive) - auditImageBytes: \(auditImageBytes == nil ? "nil" : "") - - challengeType: \(String(describing: challenge?.type)) - - challengeVersion: \(String(describing: challenge?.version)) + - challenge: type: \(String(describing: challenge?.type)) + " version: " + \(String(describing: challenge?.version)) """ } } + +struct Event: Codable { + let version: String + let type: ChallengeType + + enum CodingKeys: String, CodingKey { + case version = "Version" + case type = "Type" + } +} diff --git a/HostApp/HostApp/Views/LivenessResultContentView+Result.swift b/HostApp/HostApp/Views/LivenessResultContentView+Result.swift index 0b18eaab..e41759ad 100644 --- a/HostApp/HostApp/Views/LivenessResultContentView+Result.swift +++ b/HostApp/HostApp/Views/LivenessResultContentView+Result.swift @@ -16,7 +16,7 @@ extension LivenessResultContentView { let valueBackgroundColor: Color let auditImage: Data? let isLive: Bool - let challenge: Challenge? + let challenge: Event? init(livenessResult: LivenessResult) { guard livenessResult.confidenceScore > 0 else { diff --git a/HostApp/HostApp/Views/StartSessionView.swift b/HostApp/HostApp/Views/StartSessionView.swift index 6905e9b2..9e909a6b 100644 --- a/HostApp/HostApp/Views/StartSessionView.swift +++ b/HostApp/HostApp/Views/StartSessionView.swift @@ -26,7 +26,7 @@ struct StartSessionView: View { ) button( - text: "Create Liveness Session (front camera)", + text: "Create Liveness Session", backgroundColor: .dynamicColors( light: .hex("#047D95"), dark: .hex("#7dd6e8") @@ -35,6 +35,7 @@ struct StartSessionView: View { viewModel.createSession { sessionId, err in if let sessionId = sessionId { sessionID = sessionId + // modify camera preference for `FaceMovementChallenge` containerViewState = .liveness(.front) } @@ -55,37 +56,6 @@ struct StartSessionView: View { ) ) } - - button( - text: "Create Liveness Session (back camera)", - backgroundColor: .dynamicColors( - light: .hex("#047D95"), - dark: .hex("#7dd6e8") - ), - action: { - viewModel.createSession { sessionId, err in - if let sessionId = sessionId { - sessionID = sessionId - containerViewState = .liveness(.back) - } - - showAlert = err != nil - } - }, - enabled: viewModel.isSignedIn - ) - .alert(isPresented: $showAlert) { - Alert( - title: Text("Error Creating Liveness Session"), - message: Text("Unable to create a liveness session id. Please try again."), - dismissButton: .default( - Text("OK"), - action: { - containerViewState = .startSession - } - ) - ) - } Spacer() HStack { diff --git a/Package.resolved b/Package.resolved index d267f453..692d2da3 100644 --- a/Package.resolved +++ b/Package.resolved @@ -6,7 +6,7 @@ "location" : "https://github.com/aws-amplify/amplify-swift", "state" : { "branch" : "feat/no-light-support", - "revision" : "ae1d96a5eb37ab0f2bfcc9e836a2055c88cf8a01" + "revision" : "21bc17c0438b8390edcf532e3ef51f1c25c25749" } }, { diff --git a/Sources/FaceLiveness/FaceDetection/BlazeFace/FaceDetectorShortRange+Model.swift b/Sources/FaceLiveness/FaceDetection/BlazeFace/FaceDetectorShortRange+Model.swift index 100f0418..2e8a6900 100644 --- a/Sources/FaceLiveness/FaceDetection/BlazeFace/FaceDetectorShortRange+Model.swift +++ b/Sources/FaceLiveness/FaceDetection/BlazeFace/FaceDetectorShortRange+Model.swift @@ -114,14 +114,19 @@ extension FaceDetectorShortRange { let blazeFaceDetectionThreshold: Float if let sessionConfiguration = faceDetectionSessionConfiguration?.sessionConfiguration { - blazeFaceDetectionThreshold = Float(sessionConfiguration.ovalMatchChallenge.faceDetectionThreshold) + switch sessionConfiguration { + case .faceMovement(let ovalMatchChallenge): + blazeFaceDetectionThreshold = Float(ovalMatchChallenge.faceDetectionThreshold) + case .faceMovementAndLight(_, let ovalMatchChallenge): + blazeFaceDetectionThreshold = Float(ovalMatchChallenge.faceDetectionThreshold) + } } else { blazeFaceDetectionThreshold = confidenceScoreThreshold } var passingConfidenceScoresIndices = confidenceScores .enumerated() - .filter { $0.element >= blazeFaceDetectionThreshold} + .filter { $0.element >= blazeFaceDetectionThreshold} .sorted(by: { $0.element > $1.element }) diff --git a/Sources/FaceLiveness/Utilities/FinalClientEvent+Init.swift b/Sources/FaceLiveness/Utilities/FinalClientEvent+Init.swift index ccdf971f..9e864ce2 100644 --- a/Sources/FaceLiveness/Utilities/FinalClientEvent+Init.swift +++ b/Sources/FaceLiveness/Utilities/FinalClientEvent+Init.swift @@ -17,8 +17,14 @@ extension FinalClientEvent { faceMatchedEnd: UInt64, videoEnd: UInt64 ) { - let normalizedBoundingBox = sessionConfiguration - .ovalMatchChallenge + let ovalMatchChallenge: FaceLivenessSession.OvalMatchChallenge + switch sessionConfiguration { + case .faceMovement(let challenge): + ovalMatchChallenge = challenge + case .faceMovementAndLight(_, let challenge): + ovalMatchChallenge = challenge + } + let normalizedBoundingBox = ovalMatchChallenge .oval.boundingBox .normalize(within: videoSize) diff --git a/Sources/FaceLiveness/Views/CameraPermission/CameraPermissionView.swift b/Sources/FaceLiveness/Views/CameraPermission/CameraPermissionView.swift index e5edbf3f..ee9f3966 100644 --- a/Sources/FaceLiveness/Views/CameraPermission/CameraPermissionView.swift +++ b/Sources/FaceLiveness/Views/CameraPermission/CameraPermissionView.swift @@ -17,7 +17,7 @@ struct CameraPermissionView: View { } var body: some View { - VStack(alignment: .leading) { + VStack(alignment: .center) { Spacer() VStack { Text(LocalizedStrings.camera_permission_change_setting_header) diff --git a/Sources/FaceLiveness/Views/GetReadyPage/GetReadyPageView.swift b/Sources/FaceLiveness/Views/GetReadyPage/GetReadyPageView.swift index 0142ca13..5f5279e9 100644 --- a/Sources/FaceLiveness/Views/GetReadyPage/GetReadyPageView.swift +++ b/Sources/FaceLiveness/Views/GetReadyPage/GetReadyPageView.swift @@ -37,7 +37,7 @@ struct GetReadyPageView: View { popoverContent: { photosensitivityWarningPopoverContent } ) .accessibilityElement(children: .combine) - .opacity(challenge.type == .faceMovementAndLightChallenge ? 1.0 : 0.0) + .opacity(challenge == Challenge.faceMovementAndLightChallenge("2.0.0") ? 1.0 : 0.0) Text(LocalizedStrings.preview_center_your_face_text) .font(.title) .multilineTextAlignment(.center) @@ -82,7 +82,7 @@ struct GetReadyPageView_Previews: PreviewProvider { static var previews: some View { GetReadyPageView( onBegin: {}, - challenge: .init(version: "2.0.0", type: .faceMovementAndLightChallenge), + challenge: .faceMovementAndLightChallenge("2.0.0"), cameraPosition: .front) } } diff --git a/Sources/FaceLiveness/Views/Instruction/InstructionContainerView.swift b/Sources/FaceLiveness/Views/Instruction/InstructionContainerView.swift index 01dcd37f..0a4e0367 100644 --- a/Sources/FaceLiveness/Views/Instruction/InstructionContainerView.swift +++ b/Sources/FaceLiveness/Views/Instruction/InstructionContainerView.swift @@ -111,7 +111,7 @@ struct InstructionContainerView: View { } case .faceMatched: if let challenge = viewModel.challengeReceived, - case .faceMovementAndLightChallenge = challenge.type { + case .faceMovementAndLightChallenge = challenge { InstructionView( text: LocalizedStrings.challenge_instruction_hold_still, backgroundColor: .livenessPrimaryBackground, diff --git a/Sources/FaceLiveness/Views/Liveness/FaceLivenessDetectionView.swift b/Sources/FaceLiveness/Views/Liveness/FaceLivenessDetectionView.swift index 8f5e1af4..8f3374d8 100644 --- a/Sources/FaceLiveness/Views/Liveness/FaceLivenessDetectionView.swift +++ b/Sources/FaceLiveness/Views/Liveness/FaceLivenessDetectionView.swift @@ -125,11 +125,32 @@ public struct FaceLivenessDetectorView: View { let session = try await sessionTask.value viewModel.livenessService = session viewModel.registerServiceEvents(onChallengeTypeReceived: { challenge in - self.displayState = DisplayState.awaitingLivenessSession(challenge) + self.displayState = DisplayState.awaitingCameraPermission(challenge) }) viewModel.initializeLivenessStream() + } catch let error as FaceLivenessDetectionError { + switch error { + case .unknown: + viewModel.livenessState.unrecoverableStateEncountered(.unknown) + case .sessionTimedOut, + .faceInOvalMatchExceededTimeLimitError, + .countdownFaceTooClose, + .countdownMultipleFaces, + .countdownNoFace: + viewModel.livenessState.unrecoverableStateEncountered(.timedOut) + case .cameraPermissionDenied: + viewModel.livenessState.unrecoverableStateEncountered(.missingVideoPermission) + case .userCancelled: + viewModel.livenessState.unrecoverableStateEncountered(.userCancelled) + case .socketClosed: + viewModel.livenessState.unrecoverableStateEncountered(.socketClosed) + case .cameraNotAvailable: + viewModel.livenessState.unrecoverableStateEncountered(.cameraNotAvailable) + default: + viewModel.livenessState.unrecoverableStateEncountered(.couldNotOpenStream) + } } catch { - throw FaceLivenessDetectionError.accessDenied + viewModel.livenessState.unrecoverableStateEncountered(.couldNotOpenStream) } DispatchQueue.main.async { @@ -150,28 +171,30 @@ public struct FaceLivenessDetectorView: View { break } } + case .awaitingCameraPermission(let challenge): + CameraPermissionView(displayingCameraPermissionsNeededAlert: $displayingCameraPermissionsNeededAlert) + .onAppear { + checkCameraPermission(for: challenge) + } case .awaitingLivenessSession(let challenge): Color.clear .onAppear { Task { - do { - let cameraPosition: LivenessCamera - switch challenge.type { - case .faceMovementAndLightChallenge: - cameraPosition = challengeOptions.faceMovementAndLightChallengeOption.camera - case .faceMovementChallenge: - cameraPosition = challengeOptions.faceMovementChallengeOption.camera - } - - let newState = disableStartView - ? DisplayState.displayingLiveness - : DisplayState.displayingGetReadyView(challenge, cameraPosition) - guard self.displayState != newState else { return } - self.displayState = newState + let cameraPosition: LivenessCamera + switch challenge { + case .faceMovementAndLightChallenge: + cameraPosition = challengeOptions.faceMovementAndLightChallengeOption.camera + case .faceMovementChallenge: + cameraPosition = challengeOptions.faceMovementChallengeOption.camera } + + let newState = disableStartView + ? DisplayState.displayingLiveness + : DisplayState.displayingGetReadyView(challenge, cameraPosition) + guard self.displayState != newState else { return } + self.displayState = newState } } - case .displayingGetReadyView(let challenge, let cameraPosition): GetReadyPageView( onBegin: { @@ -218,11 +241,6 @@ public struct FaceLivenessDetectorView: View { break } } - case .awaitingCameraPermission: - CameraPermissionView(displayingCameraPermissionsNeededAlert: $displayingCameraPermissionsNeededAlert) - .onAppear { - checkCameraPermission() - } } } @@ -232,7 +250,7 @@ public struct FaceLivenessDetectorView: View { return .userCancelled case .timedOut: return .faceInOvalMatchExceededTimeLimitError - case .socketClosed: + case .couldNotOpenStream, .socketClosed: return .socketClosed case .cameraNotAvailable: return .cameraNotAvailable @@ -241,31 +259,28 @@ public struct FaceLivenessDetectorView: View { } } - private func requestCameraPermission() { + private func requestCameraPermission(for challenge: Challenge) { AVCaptureDevice.requestAccess( for: .video, completionHandler: { accessGranted in guard accessGranted == true else { return } - guard let challenge = viewModel.challengeReceived else { return } displayState = .awaitingLivenessSession(challenge) } ) - } private func alertCameraAccessNeeded() { displayingCameraPermissionsNeededAlert = true } - private func checkCameraPermission() { + private func checkCameraPermission(for challenge: Challenge) { let cameraAuthorizationStatus = AVCaptureDevice.authorizationStatus(for: .video) switch cameraAuthorizationStatus { case .notDetermined: - requestCameraPermission() + requestCameraPermission(for: challenge) case .restricted, .denied: alertCameraAccessNeeded() case .authorized: - guard let challenge = viewModel.challengeReceived else { return } displayState = .awaitingLivenessSession(challenge) @unknown default: break @@ -275,19 +290,19 @@ public struct FaceLivenessDetectorView: View { enum DisplayState: Equatable { case awaitingChallengeType + case awaitingCameraPermission(Challenge) case awaitingLivenessSession(Challenge) case displayingGetReadyView(Challenge, LivenessCamera) case displayingLiveness - case awaitingCameraPermission static func == (lhs: DisplayState, rhs: DisplayState) -> Bool { switch (lhs, rhs) { case (.awaitingChallengeType, .awaitingChallengeType): return true case (let .awaitingLivenessSession(c1), let .awaitingLivenessSession(c2)): - return c1.type == c2.type && c1.version == c2.version + return c1 == c2 case (let .displayingGetReadyView(c1, position1), let .displayingGetReadyView(c2, position2)): - return c1.type == c2.type && c1.version == c2.version && position1 == position2 + return c1 == c2 && position1 == position2 case (.displayingLiveness, .displayingLiveness): return true case (.awaitingCameraPermission, .awaitingCameraPermission): @@ -353,7 +368,7 @@ public struct FaceMovementChallengeOption { let camera: LivenessCamera public init(camera: LivenessCamera) { - self.challenge = .init(version: "1.0.0", type: .faceMovementChallenge) + self.challenge = .faceMovementChallenge("1.0.0") self.camera = camera } } @@ -363,7 +378,7 @@ public struct FaceMovementAndLightChallengeOption { let camera: LivenessCamera public init() { - self.challenge = .init(version: "2.0.0", type: .faceMovementAndLightChallenge) + self.challenge = .faceMovementAndLightChallenge("2.0.0") self.camera = .front } } diff --git a/Sources/FaceLiveness/Views/Liveness/FaceLivenessDetectionViewModel+FaceDetectionResultHandler.swift b/Sources/FaceLiveness/Views/Liveness/FaceLivenessDetectionViewModel+FaceDetectionResultHandler.swift index 64d32c9d..c2e3d032 100644 --- a/Sources/FaceLiveness/Views/Liveness/FaceLivenessDetectionViewModel+FaceDetectionResultHandler.swift +++ b/Sources/FaceLiveness/Views/Liveness/FaceLivenessDetectionViewModel+FaceDetectionResultHandler.swift @@ -27,14 +27,26 @@ extension FaceLivenessDetectionViewModel: FaceDetectionResultHandler { } } case .singleFace(let face): - var normalizedFace = normalizeFace(face) guard let sessionConfiguration = sessionConfiguration else { return } - normalizedFace.boundingBox = normalizedFace.boundingBoxFromLandmarks(ovalRect: ovalRect, - ovalMatchChallenge: sessionConfiguration.ovalMatchChallenge) + + let ovalMatchChallenge: FaceLivenessSession.OvalMatchChallenge + var colorChallenge: FaceLivenessSession.ColorChallenge? + switch sessionConfiguration { + case .faceMovement(let ovalChallenge): + ovalMatchChallenge = ovalChallenge + case .faceMovementAndLight(let colorSeqChallenge, let ovalChallenge): + colorChallenge = colorSeqChallenge + ovalMatchChallenge = ovalChallenge + } + + var normalizedFace = normalizeFace(face) + normalizedFace.boundingBox = normalizedFace.boundingBoxFromLandmarks( + ovalRect: ovalRect, + ovalMatchChallenge: ovalMatchChallenge) switch livenessState.state { case .pendingFacePreparedConfirmation: - if face.faceDistance <= sessionConfiguration.ovalMatchChallenge.face.distanceThreshold { + if face.faceDistance <= ovalMatchChallenge.face.distanceThreshold { DispatchQueue.main.async { self.livenessState.awaitingRecording() } @@ -59,23 +71,23 @@ extension FaceLivenessDetectionViewModel: FaceDetectionResultHandler { let instruction = faceInOvalMatching.faceMatchState( for: normalizedFace.boundingBox, in: ovalRect, - challengeConfig: sessionConfiguration.ovalMatchChallenge + challengeConfig: ovalMatchChallenge ) handleInstruction( instruction, - colorSequences: sessionConfiguration.colorChallenge?.colors + colorSequences: colorChallenge?.colors ) case .awaitingFaceInOvalMatch: let instruction = faceInOvalMatching.faceMatchState( for: normalizedFace.boundingBox, in: ovalRect, - challengeConfig: sessionConfiguration.ovalMatchChallenge + challengeConfig: ovalMatchChallenge ) handleInstruction( instruction, - colorSequences: sessionConfiguration.colorChallenge?.colors + colorSequences: colorChallenge?.colors ) default: break @@ -113,7 +125,7 @@ extension FaceLivenessDetectionViewModel: FaceDetectionResultHandler { self.faceMatchedTimestamp = Date().timestampMilliseconds // next step after face match - switch self.challengeReceived?.type { + switch self.challengeReceived{ case .faceMovementAndLightChallenge: if let colorSequences = colorSequences { DispatchQueue.main.asyncAfter(deadline: .now() + 1.0) { diff --git a/Sources/FaceLiveness/Views/Liveness/FaceLivenessDetectionViewModel.swift b/Sources/FaceLiveness/Views/Liveness/FaceLivenessDetectionViewModel.swift index 227849d6..3fb3ab14 100644 --- a/Sources/FaceLiveness/Views/Liveness/FaceLivenessDetectionViewModel.swift +++ b/Sources/FaceLiveness/Views/Liveness/FaceLivenessDetectionViewModel.swift @@ -50,11 +50,20 @@ class FaceLivenessDetectionViewModel: ObservableObject { static var attemptIdTimeStamp: Date = Date() var noFitTimeoutInterval: TimeInterval { - if let sessionTimeoutMilliSec = sessionConfiguration?.ovalMatchChallenge.oval.ovalFitTimeout { - return TimeInterval(sessionTimeoutMilliSec/1_000) - } else { + guard let sessionConfiguration = sessionConfiguration else { return defaultNoFitTimeoutInterval } + + let ovalMatchChallenge: FaceLivenessSession.OvalMatchChallenge + switch sessionConfiguration{ + case .faceMovement(let challenge): + ovalMatchChallenge = challenge + case .faceMovementAndLight(_, let challenge): + ovalMatchChallenge = challenge + } + + let sessionTimeoutMilliSec = ovalMatchChallenge.oval.ovalFitTimeout + return TimeInterval(sessionTimeoutMilliSec/1_000) } init( @@ -166,9 +175,17 @@ class FaceLivenessDetectionViewModel: ObservableObject { func drawOval(onComplete: @escaping () -> Void) { guard livenessState.state == .recording(ovalDisplayed: false), - let ovalParameters = sessionConfiguration?.ovalMatchChallenge.oval - else { return } - + let sessionConfiguration = sessionConfiguration else { return } + + let ovalMatchChallenge: FaceLivenessSession.OvalMatchChallenge + switch sessionConfiguration { + case .faceMovement(let challenge): + ovalMatchChallenge = challenge + case .faceMovementAndLight(_, let challenge): + ovalMatchChallenge = challenge + } + + let ovalParameters = ovalMatchChallenge.oval let scaleRatio = cameraViewRect.width / videoSize.width let rect = CGRect( x: ovalParameters.boundingBox.x, @@ -275,8 +292,7 @@ class FaceLivenessDetectionViewModel: ObservableObject { do { try livenessService?.send( .initialFaceDetected(event: _initialClientEvent, - challenge: .init(version: challengeReceived.version, - type: challengeReceived.type)), + challenge: challengeReceived), eventDate: { .init() } ) } catch { @@ -310,8 +326,7 @@ class FaceLivenessDetectionViewModel: ObservableObject { do { try livenessService?.send( .final(event: finalClientEvent, - challenge: .init(version: challengeReceived.version, - type: challengeReceived.type)), + challenge: challengeReceived), eventDate: { .init() } ) @@ -407,7 +422,7 @@ class FaceLivenessDetectionViewModel: ObservableObject { func configureCaptureSession(challenge: Challenge) { let cameraPosition: LivenessCamera - switch challenge.type { + switch challenge { case .faceMovementChallenge: cameraPosition = challengeOptions.faceMovementChallengeOption.camera case .faceMovementAndLightChallenge: diff --git a/Sources/FaceLiveness/Views/Liveness/LivenessViewController.swift b/Sources/FaceLiveness/Views/Liveness/LivenessViewController.swift index 79b91e44..29f2d686 100644 --- a/Sources/FaceLiveness/Views/Liveness/LivenessViewController.swift +++ b/Sources/FaceLiveness/Views/Liveness/LivenessViewController.swift @@ -105,7 +105,7 @@ final class _LivenessViewController: UIViewController { var initialFace: FaceDetection? var videoStartTimeStamp: UInt64? var faceMatchStartTime: UInt64? - var faceGuideRect: CGRect? + var faceGuideRect: CGRect! var freshnessEventsComplete = false var videoSentCount = 0 var hasSentFinalEvent = false @@ -175,9 +175,8 @@ extension _LivenessViewController: FaceLivenessViewControllerPresenter { } func completeNoLightCheck() { - guard let faceGuideRect = self.faceGuideRect else { return } self.viewModel.completeNoLightCheck( - faceGuide: faceGuideRect + faceGuide: faceGuideRect! ) } } diff --git a/Tests/FaceLivenessTests/CredentialsProviderTestCase.swift b/Tests/FaceLivenessTests/CredentialsProviderTestCase.swift index 396ef60a..988fcc4f 100644 --- a/Tests/FaceLivenessTests/CredentialsProviderTestCase.swift +++ b/Tests/FaceLivenessTests/CredentialsProviderTestCase.swift @@ -27,13 +27,6 @@ final class CredentialsProviderTestCase: XCTestCase { assetWriterDelegate: VideoChunker.AssetWriterDelegate(), assetWriterInput: LivenessAVAssetWriterInput() ) - let captureSession = LivenessCaptureSession( - captureDevice: .init(avCaptureDevice: nil), - outputDelegate: OutputSampleBufferCapturer( - faceDetector: faceDetector, - videoChunker: videoChunker - ) - ) let viewModel = FaceLivenessDetectionViewModel( faceDetector: faceDetector, diff --git a/Tests/FaceLivenessTests/LivenessTests.swift b/Tests/FaceLivenessTests/LivenessTests.swift index 89db7315..9180841f 100644 --- a/Tests/FaceLivenessTests/LivenessTests.swift +++ b/Tests/FaceLivenessTests/LivenessTests.swift @@ -18,13 +18,6 @@ final class FaceLivenessDetectionViewModelTestCase: XCTestCase { assetWriterDelegate: VideoChunker.AssetWriterDelegate(), assetWriterInput: LivenessAVAssetWriterInput() ) - let captureSession = LivenessCaptureSession( - captureDevice: .init(avCaptureDevice: nil), - outputDelegate: OutputSampleBufferCapturer( - faceDetector: faceDetector, - videoChunker: videoChunker - ) - ) let viewModel = FaceLivenessDetectionViewModel( faceDetector: faceDetector, @@ -71,7 +64,7 @@ final class FaceLivenessDetectionViewModelTestCase: XCTestCase { /// Then: The end state of this flow is `.faceMatched` func testHappyPathToMatchedFace() async throws { viewModel.livenessService = self.livenessService - viewModel.challengeReceived = Challenge(version: "2.0.0", type: .faceMovementAndLightChallenge) + viewModel.challengeReceived = .faceMovementAndLightChallenge("2.0.0") viewModel.livenessState.checkIsFacePrepared() XCTAssertEqual(viewModel.livenessState.state, .pendingFacePreparedConfirmation(.pendingCheck)) @@ -116,7 +109,7 @@ final class FaceLivenessDetectionViewModelTestCase: XCTestCase { /// Then: The end state of this flow is `.recording(ovalDisplayed: false)` func testTransitionToRecordingState() async throws { viewModel.livenessService = self.livenessService - viewModel.challengeReceived = Challenge(version: "2.0.0", type: .faceMovementAndLightChallenge) + viewModel.challengeReceived = .faceMovementChallenge("1.0.0") let face = FaceLivenessSession.OvalMatchChallenge.Face( distanceThreshold: 0.32, @@ -136,7 +129,7 @@ final class FaceLivenessDetectionViewModelTestCase: XCTestCase { iouHeightThreshold: 0.1, ovalFitTimeout: 1) - viewModel.sessionConfiguration = .init(ovalMatchChallenge: .init(faceDetectionThreshold: 0.7, + viewModel.sessionConfiguration = .faceMovement(.init(faceDetectionThreshold: 0.7, face: face, oval: oval)) From 52c89707c1e77a562c697f324e18250a03f4f081 Mon Sep 17 00:00:00 2001 From: Abhash Kumar Singh Date: Tue, 24 Jun 2025 10:44:07 -0700 Subject: [PATCH 09/10] remove unused property --- .../Liveness/FaceLivenessDetectionViewModel.swift | 9 ++------- .../Views/Liveness/LivenessViewController.swift | 10 ++-------- 2 files changed, 4 insertions(+), 15 deletions(-) diff --git a/Sources/FaceLiveness/Views/Liveness/FaceLivenessDetectionViewModel.swift b/Sources/FaceLiveness/Views/Liveness/FaceLivenessDetectionViewModel.swift index 3fb3ab14..ee49368b 100644 --- a/Sources/FaceLiveness/Views/Liveness/FaceLivenessDetectionViewModel.swift +++ b/Sources/FaceLiveness/Views/Liveness/FaceLivenessDetectionViewModel.swift @@ -40,7 +40,6 @@ class FaceLivenessDetectionViewModel: ObservableObject { var provideSingleFrame: ((UIImage) -> Void)? var cameraViewRect = CGRect.zero var ovalRect = CGRect.zero - var faceGuideRect: CGRect! var initialClientEvent: InitialClientEvent? var faceMatchedTimestamp: UInt64? var noFitStartTime: Date? @@ -303,7 +302,6 @@ class FaceLivenessDetectionViewModel: ObservableObject { } func sendFinalEvent( - targetFaceRect: CGRect, viewSize: CGSize, faceMatchedEnd: UInt64 ) { @@ -345,7 +343,6 @@ class FaceLivenessDetectionViewModel: ObservableObject { func sendFinalVideoEvent() { sendFinalEvent( - targetFaceRect: faceGuideRect, viewSize: videoSize, faceMatchedEnd: Date().timestampMilliseconds ) @@ -355,17 +352,15 @@ class FaceLivenessDetectionViewModel: ObservableObject { } } - func handleFreshnessComplete(faceGuide: CGRect) { + func handleFreshnessComplete() { DispatchQueue.main.async { self.livenessState.completedDisplayingFreshness() - self.faceGuideRect = faceGuide } } - func completeNoLightCheck(faceGuide: CGRect) { + func completeNoLightCheck() { DispatchQueue.main.async { self.livenessState.completedNoLightCheck() - self.faceGuideRect = faceGuide } } diff --git a/Sources/FaceLiveness/Views/Liveness/LivenessViewController.swift b/Sources/FaceLiveness/Views/Liveness/LivenessViewController.swift index 29f2d686..35952c12 100644 --- a/Sources/FaceLiveness/Views/Liveness/LivenessViewController.swift +++ b/Sources/FaceLiveness/Views/Liveness/LivenessViewController.swift @@ -105,7 +105,6 @@ final class _LivenessViewController: UIViewController { var initialFace: FaceDetection? var videoStartTimeStamp: UInt64? var faceMatchStartTime: UInt64? - var faceGuideRect: CGRect! var freshnessEventsComplete = false var videoSentCount = 0 var hasSentFinalEvent = false @@ -146,9 +145,7 @@ extension _LivenessViewController: FaceLivenessViewControllerPresenter { guard let self else { return } self.freshnessView.removeFromSuperview() - self.viewModel.handleFreshnessComplete( - faceGuide: self.faceGuideRect! - ) + self.viewModel.handleFreshnessComplete() } ) } @@ -156,7 +153,6 @@ extension _LivenessViewController: FaceLivenessViewControllerPresenter { func drawOvalInCanvas(_ ovalRect: CGRect) { DispatchQueue.main.async { guard let previewLayer = self.previewLayer else { return } - self.faceGuideRect = ovalRect let ovalView = OvalView( frame: previewLayer.frame, @@ -175,8 +171,6 @@ extension _LivenessViewController: FaceLivenessViewControllerPresenter { } func completeNoLightCheck() { - self.viewModel.completeNoLightCheck( - faceGuide: faceGuideRect! - ) + self.viewModel.completeNoLightCheck() } } From a00e8fa82bf4a3a5b1d038187db22edecaa4ce33 Mon Sep 17 00:00:00 2001 From: Abhash Kumar Singh Date: Mon, 30 Jun 2025 13:07:43 -0700 Subject: [PATCH 10/10] update amplify dependency --- .../project.xcworkspace/xcshareddata/swiftpm/Package.resolved | 4 ++-- Package.resolved | 4 ++-- Package.swift | 3 +-- 3 files changed, 5 insertions(+), 6 deletions(-) diff --git a/HostApp/HostApp.xcodeproj/project.xcworkspace/xcshareddata/swiftpm/Package.resolved b/HostApp/HostApp.xcodeproj/project.xcworkspace/xcshareddata/swiftpm/Package.resolved index 692d2da3..a47e606b 100644 --- a/HostApp/HostApp.xcodeproj/project.xcworkspace/xcshareddata/swiftpm/Package.resolved +++ b/HostApp/HostApp.xcodeproj/project.xcworkspace/xcshareddata/swiftpm/Package.resolved @@ -5,8 +5,8 @@ "kind" : "remoteSourceControl", "location" : "https://github.com/aws-amplify/amplify-swift", "state" : { - "branch" : "feat/no-light-support", - "revision" : "21bc17c0438b8390edcf532e3ef51f1c25c25749" + "revision" : "7fa7abdb9daf25bdd97cc4fbcdd0d5a5cc9c4bf1", + "version" : "2.49.0" } }, { diff --git a/Package.resolved b/Package.resolved index 692d2da3..a47e606b 100644 --- a/Package.resolved +++ b/Package.resolved @@ -5,8 +5,8 @@ "kind" : "remoteSourceControl", "location" : "https://github.com/aws-amplify/amplify-swift", "state" : { - "branch" : "feat/no-light-support", - "revision" : "21bc17c0438b8390edcf532e3ef51f1c25c25749" + "revision" : "7fa7abdb9daf25bdd97cc4fbcdd0d5a5cc9c4bf1", + "version" : "2.49.0" } }, { diff --git a/Package.swift b/Package.swift index 446f12c0..5eb3fae2 100644 --- a/Package.swift +++ b/Package.swift @@ -13,8 +13,7 @@ let package = Package( targets: ["FaceLiveness"]), ], dependencies: [ - // TODO: Change this before merge to main - .package(url: "https://github.com/aws-amplify/amplify-swift", branch: "feat/no-light-support") + .package(url: "https://github.com/aws-amplify/amplify-swift", exact: "2.49.0") ], targets: [ .target(