From ac1cd7f02547c53111988c9408cff24f0a447c18 Mon Sep 17 00:00:00 2001 From: Tobi Omotayo Date: Tue, 12 Nov 2024 07:37:00 +0100 Subject: [PATCH 01/18] decouple selfieviewmodel from livenesscheckmanager --- .../FaceDetector/LivenessCheckManager.swift | 20 +++++--- .../SelfieCapture/SelfieViewModelAction.swift | 5 +- .../SelfieCapture/SelfieViewModelV2.swift | 50 +++++++++++++------ 3 files changed, 48 insertions(+), 27 deletions(-) diff --git a/Sources/SmileID/Classes/FaceDetector/LivenessCheckManager.swift b/Sources/SmileID/Classes/FaceDetector/LivenessCheckManager.swift index 9805aa48..4cde8880 100644 --- a/Sources/SmileID/Classes/FaceDetector/LivenessCheckManager.swift +++ b/Sources/SmileID/Classes/FaceDetector/LivenessCheckManager.swift @@ -8,15 +8,19 @@ enum LivenessTask { case lookUp } +protocol LivenessCheckManagerDelegate: AnyObject { + func didCompleteLivenessTask() + func didCompleteLivenessChallenge() + func livenessChallengeTimeout() +} + class LivenessCheckManager: ObservableObject { /// The sequence of liveness tasks to be performed. private var livenessTaskSequence: [LivenessTask] = [] /// The index pointing to the current task in the sequence. private var currentTaskIndex: Int = 0 - /// The view model associated with the selfie capture process. - weak var selfieViewModel: SelfieViewModelV2? - /// A closure to trigger photo capture during the liveness check. - var captureImage: (() -> Void)? + + weak var delegate: LivenessCheckManagerDelegate? // MARK: Constants /// The minimum threshold for yaw (left-right head movement) @@ -88,7 +92,8 @@ class LivenessCheckManager: ObservableObject { /// Handles the timeout event for a task. private func handleTaskTimeout() { stopTaskTimer() - selfieViewModel?.perform(action: .activeLivenessTimeout) + // Capture random liveness images to complete required number: + delegate?.livenessChallengeTimeout() } /// Advances to the next task in the sequence @@ -160,12 +165,11 @@ class LivenessCheckManager: ObservableObject { /// Completes the current task and moves to the next one. /// If all tasks are completed, it signals the completion of the liveness challenge. private func completeCurrentTask() { - captureImage?() - captureImage?() + delegate?.didCompleteLivenessTask() if !advanceToNextTask() { // Liveness challenge complete - selfieViewModel?.perform(action: .activeLivenessCompleted) + delegate?.didCompleteLivenessChallenge() self.currentTask = nil } } diff --git a/Sources/SmileID/Classes/SelfieCapture/SelfieViewModelAction.swift b/Sources/SmileID/Classes/SelfieCapture/SelfieViewModelAction.swift index 579803a9..7b9c1c15 100644 --- a/Sources/SmileID/Classes/SelfieCapture/SelfieViewModelAction.swift +++ b/Sources/SmileID/Classes/SelfieCapture/SelfieViewModelAction.swift @@ -5,10 +5,7 @@ enum SelfieViewModelAction { case onViewAppear case windowSizeDetected(CGSize, EdgeInsets) - // Face Detection Actions - case activeLivenessCompleted - case activeLivenessTimeout - + // Job Submission Actions case jobProcessingDone case retryJobSubmission diff --git a/Sources/SmileID/Classes/SelfieCapture/SelfieViewModelV2.swift b/Sources/SmileID/Classes/SelfieCapture/SelfieViewModelV2.swift index 8057d48f..d97dd8d5 100644 --- a/Sources/SmileID/Classes/SelfieCapture/SelfieViewModelV2.swift +++ b/Sources/SmileID/Classes/SelfieCapture/SelfieViewModelV2.swift @@ -15,15 +15,16 @@ public class SelfieViewModelV2: ObservableObject { // MARK: Private Properties private var faceLayoutGuideFrame = CGRect(x: 0, y: 0, width: 250, height: 350) private var elapsedGuideAnimationDelay: TimeInterval = 0 + private var currentFrameBuffer: CVPixelBuffer? var selfieImage: UIImage? - var selfieImageURL: URL? { + private var selfieImageURL: URL? { didSet { DispatchQueue.main.async { self.selfieCaptured = self.selfieImage != nil } } } - var livenessImages: [URL] = [] + private var livenessImages: [URL] = [] private var hasDetectedValidFace: Bool = false private var shouldBeginLivenessChallenge: Bool { hasDetectedValidFace && selfieImage != nil && livenessCheckManager.currentTask != nil @@ -106,7 +107,7 @@ public class SelfieViewModelV2: ObservableObject { private func initialSetup() { self.faceValidator.delegate = self self.faceDetector.resultDelegate = self - self.livenessCheckManager.selfieViewModel = self + self.livenessCheckManager.delegate = self self.faceValidator.setLayoutGuideFrame(with: faceLayoutGuideFrame) self.userInstruction = .headInFrame @@ -144,15 +145,12 @@ public class SelfieViewModelV2: ObservableObject { } private func analyzeFrame(imageBuffer: CVPixelBuffer) { + currentFrameBuffer = imageBuffer faceDetector.processImageBuffer(imageBuffer) if hasDetectedValidFace && selfieImage == nil { captureSelfieImage(imageBuffer) livenessCheckManager.initiateLivenessCheck() } - - livenessCheckManager.captureImage = { [weak self] in - self?.captureLivenessImage(imageBuffer) - } } // MARK: Actions @@ -160,13 +158,6 @@ public class SelfieViewModelV2: ObservableObject { switch action { case let .windowSizeDetected(windowRect, safeAreaInsets): handleWindowSizeChanged(toRect: windowRect, edgeInsets: safeAreaInsets) - case .activeLivenessCompleted: - self.cameraManager.pauseSession() - handleSubmission() - case .activeLivenessTimeout: - self.forcedFailure = true - self.cameraManager.pauseSession() - handleSubmission() case .onViewAppear: handleViewAppeared() case .jobProcessingDone: @@ -271,7 +262,7 @@ extension SelfieViewModelV2 { } private func handleError(_ error: Error) { - print(error.localizedDescription) + debugPrint(error.localizedDescription) } private func handleSubmission() { @@ -329,6 +320,35 @@ extension SelfieViewModelV2: FaceValidatorDelegate { } } +// MARK: LivenessCheckManagerDelegate Methods +extension SelfieViewModelV2: LivenessCheckManagerDelegate { + func didCompleteLivenessTask() { + // capture liveness image twice + guard let imageBuffer = currentFrameBuffer else { return } + captureLivenessImage(imageBuffer) + captureLivenessImage(imageBuffer) + } + + func didCompleteLivenessChallenge() { + self.cameraManager.pauseSession() + handleSubmission() + } + + func livenessChallengeTimeout() { + let remainingImages = numLivenessImages - livenessImages.count + let count = remainingImages > 0 ? remainingImages : 0 + for _ in 0.. Date: Tue, 12 Nov 2024 11:17:58 +0100 Subject: [PATCH 02/18] improve object references so to prevent retain cycles. --- .../SelfieSubmissionManager.swift | 22 ++++++++++--------- .../SelfieCapture/SelfieViewModelV2.swift | 21 +++++++++++------- .../View/SelfieCaptureScreenV2.swift | 4 ++-- Sources/SmileID/Classes/Util.swift | 4 ++++ 4 files changed, 31 insertions(+), 20 deletions(-) diff --git a/Sources/SmileID/Classes/SelfieCapture/SelfieSubmissionManager.swift b/Sources/SmileID/Classes/SelfieCapture/SelfieSubmissionManager.swift index 7b7d5c7a..347a91fb 100644 --- a/Sources/SmileID/Classes/SelfieCapture/SelfieSubmissionManager.swift +++ b/Sources/SmileID/Classes/SelfieCapture/SelfieSubmissionManager.swift @@ -12,7 +12,7 @@ final class SelfieSubmissionManager { private let isEnroll: Bool private let numLivenessImages: Int private let allowNewEnroll: Bool - private var selfieImage: URL? + private var selfieImageUrl: URL? private var livenessImages: [URL] private var extraPartnerParams: [String: String] private let localMetadata: LocalMetadata @@ -26,7 +26,7 @@ final class SelfieSubmissionManager { isEnroll: Bool, numLivenessImages: Int, allowNewEnroll: Bool, - selfieImage: URL?, + selfieImageUrl: URL?, livenessImages: [URL], extraPartnerParams: [String: String], localMetadata: LocalMetadata @@ -36,7 +36,7 @@ final class SelfieSubmissionManager { self.isEnroll = isEnroll self.numLivenessImages = numLivenessImages self.allowNewEnroll = allowNewEnroll - self.selfieImage = selfieImage + self.selfieImageUrl = selfieImageUrl self.livenessImages = livenessImages self.extraPartnerParams = extraPartnerParams self.localMetadata = localMetadata @@ -83,11 +83,11 @@ final class SelfieSubmissionManager { private func validateImages(forcedFailure: Bool) throws { if forcedFailure { - guard selfieImage != nil else { + guard selfieImageUrl != nil else { throw SmileIDError.unknown("Selfie capture failed") } } else { - guard selfieImage != nil, livenessImages.count == numLivenessImages else { + guard selfieImageUrl != nil, livenessImages.count == numLivenessImages else { throw SmileIDError.unknown("Selfie capture failed") } } @@ -119,8 +119,8 @@ final class SelfieSubmissionManager { } private func prepareImagesForSubmission() throws -> (MultipartBody, [MultipartBody]) { - guard let smartSelfieImage = createMultipartBody(from: selfieImage) else { - throw SmileIDError.unknown("Failed to process selfie image") + guard let smartSelfieImage = createMultipartBody(from: selfieImageUrl) else { + throw SmileIDError.fileNotFound("Could not create multipart body for file") } let smartSelfieLivenessImages = livenessImages.compactMap { @@ -136,7 +136,9 @@ final class SelfieSubmissionManager { private func createMultipartBody(from fileURL: URL?) -> MultipartBody? { guard let fileURL = fileURL, let imageData = try? Data(contentsOf: fileURL) - else { return nil } + else { + return nil + } return MultipartBody( withImage: imageData, forKey: fileURL.lastPathComponent, @@ -187,7 +189,7 @@ final class SelfieSubmissionManager { try LocalStorage.moveToSubmittedJobs(jobId: self.jobId) // Update the references to the submitted selfie and liveness images - self.selfieImage = try LocalStorage.getFileByType( + self.selfieImageUrl = try LocalStorage.getFileByType( jobId: jobId, fileType: FileType.selfie, submitted: true @@ -204,7 +206,7 @@ final class SelfieSubmissionManager { do { let didMove = try LocalStorage.handleOfflineJobFailure(jobId: self.jobId, error: smileIDError) if didMove { - self.selfieImage = try LocalStorage.getFileByType(jobId: jobId, fileType: .selfie, submitted: true) + self.selfieImageUrl = try LocalStorage.getFileByType(jobId: jobId, fileType: .selfie, submitted: true) self.livenessImages = try LocalStorage.getFilesByType(jobId: jobId, fileType: .liveness, submitted: true) ?? [] } diff --git a/Sources/SmileID/Classes/SelfieCapture/SelfieViewModelV2.swift b/Sources/SmileID/Classes/SelfieCapture/SelfieViewModelV2.swift index d97dd8d5..c2c75c26 100644 --- a/Sources/SmileID/Classes/SelfieCapture/SelfieViewModelV2.swift +++ b/Sources/SmileID/Classes/SelfieCapture/SelfieViewModelV2.swift @@ -32,6 +32,7 @@ public class SelfieViewModelV2: ObservableObject { private var shouldSubmitJob: Bool { selfieImage != nil && livenessImages.count == numLivenessImages } + private var submissionTask: Task? private var forcedFailure: Bool = false private var apiResponse: SmartSelfieResponse? private var error: Error? @@ -102,6 +103,8 @@ public class SelfieViewModelV2: ObservableObject { deinit { stopGuideAnimationDelayTimer() + submissionTask?.cancel() + submissionTask = nil } private func initialSetup() { @@ -128,7 +131,7 @@ public class SelfieViewModelV2: ObservableObject { .receive(on: DispatchQueue.main) .filter { $0 == .unauthorized } .map { _ in AlertState.cameraUnauthorized } - .sink { alert in self.unauthorizedAlert = alert } + .sink { [weak self] alert in self?.unauthorizedAlert = alert } .store(in: &subscribers) cameraManager.sampleBufferPublisher @@ -140,7 +143,9 @@ public class SelfieViewModelV2: ObservableObject { // Drop the first ~2 seconds to allow the user to settle in .dropFirst(5) .compactMap { $0 } - .sink(receiveValue: analyzeFrame) + .sink { [weak self] imageBuffer in + self?.analyzeFrame(imageBuffer: imageBuffer) + } .store(in: &subscribers) } @@ -215,7 +220,6 @@ extension SelfieViewModelV2 { private func handleWindowSizeChanged(toRect: CGSize, edgeInsets: EdgeInsets) { let topPadding: CGFloat = edgeInsets.top + 100 - print(edgeInsets.top) faceLayoutGuideFrame = CGRect( x: (toRect.width / 2) - faceLayoutGuideFrame.width / 2, y: topPadding, @@ -269,7 +273,7 @@ extension SelfieViewModelV2 { DispatchQueue.main.async { self.selfieCaptureState = .processing(.inProgress) } - Task { + submissionTask = Task { try await submitJob() } } @@ -305,7 +309,6 @@ extension SelfieViewModelV2: FaceDetectorResultDelegate { DispatchQueue.main.async { self.publishUserInstruction(.headInFrame) } - print(error.localizedDescription) } } @@ -330,8 +333,10 @@ extension SelfieViewModelV2: LivenessCheckManagerDelegate { } func didCompleteLivenessChallenge() { - self.cameraManager.pauseSession() - handleSubmission() + DispatchQueue.main.asyncAfter(deadline: .now() + 1) { + self.cameraManager.pauseSession() + self.handleSubmission() + } } func livenessChallengeTimeout() { @@ -367,7 +372,7 @@ extension SelfieViewModelV2: SelfieSubmissionDelegate { isEnroll: self.isEnroll, numLivenessImages: self.numLivenessImages, allowNewEnroll: self.allowNewEnroll, - selfieImage: self.selfieImageURL, + selfieImageUrl: self.selfieImageURL, livenessImages: self.livenessImages, extraPartnerParams: self.extraPartnerParams, localMetadata: self.localMetadata diff --git a/Sources/SmileID/Classes/SelfieCapture/View/SelfieCaptureScreenV2.swift b/Sources/SmileID/Classes/SelfieCapture/View/SelfieCaptureScreenV2.swift index 3822a43c..1eb21739 100644 --- a/Sources/SmileID/Classes/SelfieCapture/View/SelfieCaptureScreenV2.swift +++ b/Sources/SmileID/Classes/SelfieCapture/View/SelfieCaptureScreenV2.swift @@ -72,10 +72,10 @@ public struct SelfieCaptureScreenV2: View { Spacer() UserInstructionsView( instruction: processingState.title, - message: getErrorSubtitle( + message: processingState == .error ? getErrorSubtitle( errorMessageRes: viewModel.errorMessageRes, errorMessage: viewModel.errorMessage - ) + ) : nil ) } SubmissionStatusView(processState: processingState) diff --git a/Sources/SmileID/Classes/Util.swift b/Sources/SmileID/Classes/Util.swift index 64563c11..bb2c23b3 100644 --- a/Sources/SmileID/Classes/Util.swift +++ b/Sources/SmileID/Classes/Util.swift @@ -79,6 +79,10 @@ func toErrorMessage(error: SmileIDError) -> (String, String?) { return (error.localizedDescription, nil) case let .httpError(_, message): return ("", message) + case let .fileNotFound(message): + return (message, nil) + case let .unknown(message): + return (message, nil) default: return ("Confirmation.FailureReason", nil) } From 64441dfbf0a330b15aaf3c6c48ad8baea88a9b17 Mon Sep 17 00:00:00 2001 From: Tobi Omotayo Date: Wed, 13 Nov 2024 12:45:59 +0100 Subject: [PATCH 03/18] write custom encoding function for failure reason, replace forced failure with failure reason enum. append failure reason data to multipart form request body. --- .../Networking/Models/FailureReason.swift | 14 ++++++++--- .../Classes/Networking/ServiceRunnable.swift | 16 ++++++------ .../SelfieSubmissionManager.swift | 25 ++++++++----------- .../SelfieCapture/SelfieViewModelV2.swift | 8 +++--- 4 files changed, 31 insertions(+), 32 deletions(-) diff --git a/Sources/SmileID/Classes/Networking/Models/FailureReason.swift b/Sources/SmileID/Classes/Networking/Models/FailureReason.swift index 8d901a64..f68bcd02 100644 --- a/Sources/SmileID/Classes/Networking/Models/FailureReason.swift +++ b/Sources/SmileID/Classes/Networking/Models/FailureReason.swift @@ -1,11 +1,17 @@ import Foundation -public enum FailureReason { - case activeLivenessTimedOut +public enum FailureReason: Encodable { + case mobileActiveLivenessTimeout - var key: String { + private enum CodingKeys: String, CodingKey { + case mobileActiveLivenessTimeout = "mobile_active_liveness_timed_out" + } + + public func encode(to encoder: any Encoder) throws { + var container = encoder.container(keyedBy: CodingKeys.self) switch self { - case .activeLivenessTimedOut: return "mobile_active_liveness_timed_out" + case .mobileActiveLivenessTimeout: + try container.encode(true, forKey: .mobileActiveLivenessTimeout) } } } diff --git a/Sources/SmileID/Classes/Networking/ServiceRunnable.swift b/Sources/SmileID/Classes/Networking/ServiceRunnable.swift index 16a94463..567e92f7 100644 --- a/Sources/SmileID/Classes/Networking/ServiceRunnable.swift +++ b/Sources/SmileID/Classes/Networking/ServiceRunnable.swift @@ -337,15 +337,13 @@ extension ServiceRunnable { body.append(lineBreak.data(using: .utf8)!) // Append failure reason if available - if let failureReason { - let activeLivenessTimedOutString = "\(failureReason == .activeLivenessTimedOut)" - if let valueData = "\(activeLivenessTimedOutString)\(lineBreak)".data(using: .utf8) { - body.append("--\(boundary)\(lineBreak)".data(using: .utf8)!) - body.append( - "Content-Disposition: form-data; name=\"\(failureReason.key)\"\(lineBreak + lineBreak)".data( - using: .utf8)!) - body.append(valueData) - } + if let failureReason, + let failureReasonData = try? encoder.encode(failureReason) { + body.append("--\(boundary)\(lineBreak)".data(using: .utf8)!) + body.append("Content-Disposition: form-data; name=\"failure_reason\"\(lineBreak)".data(using: .utf8)!) + body.append("Content-Type: application/json\(lineBreak + lineBreak)".data(using: .utf8)!) + body.append(failureReasonData) + body.append(lineBreak.data(using: .utf8)!) } // Append final boundary diff --git a/Sources/SmileID/Classes/SelfieCapture/SelfieSubmissionManager.swift b/Sources/SmileID/Classes/SelfieCapture/SelfieSubmissionManager.swift index 347a91fb..e470dbfa 100644 --- a/Sources/SmileID/Classes/SelfieCapture/SelfieSubmissionManager.swift +++ b/Sources/SmileID/Classes/SelfieCapture/SelfieSubmissionManager.swift @@ -42,10 +42,10 @@ final class SelfieSubmissionManager { self.localMetadata = localMetadata } - func submitJob(forcedFailure: Bool = false) async throws { + func submitJob(failureReason: FailureReason? = nil) async throws { do { // Validate that the necessary selfie data is present - try validateImages(forcedFailure: forcedFailure) + try validateImages() // Determine the type of job (enrollment or authentication) let jobType = determineJobType() @@ -68,7 +68,7 @@ final class SelfieSubmissionManager { authResponse: authResponse, smartSelfieImage: smartSelfieImage, smartSelfieLivenessImages: smartSelfieLivenessImages, - forcedFailure: forcedFailure + failureReason: failureReason ) // Update local storage after successful submission @@ -81,15 +81,10 @@ final class SelfieSubmissionManager { } } - private func validateImages(forcedFailure: Bool) throws { - if forcedFailure { - guard selfieImageUrl != nil else { - throw SmileIDError.unknown("Selfie capture failed") - } - } else { - guard selfieImageUrl != nil, livenessImages.count == numLivenessImages else { - throw SmileIDError.unknown("Selfie capture failed") - } + private func validateImages() throws { + guard selfieImageUrl != nil, + livenessImages.count == numLivenessImages else { + throw SmileIDError.unknown("Selfie capture failed") } } @@ -150,7 +145,7 @@ final class SelfieSubmissionManager { authResponse: AuthenticationResponse, smartSelfieImage: MultipartBody, smartSelfieLivenessImages: [MultipartBody], - forcedFailure: Bool + failureReason: FailureReason? ) async throws -> SmartSelfieResponse { if isEnroll { return try await SmileID.api @@ -164,7 +159,7 @@ final class SelfieSubmissionManager { callbackUrl: SmileID.callbackUrl, sandboxResult: nil, allowNewEnroll: allowNewEnroll, - failureReason: forcedFailure ? .activeLivenessTimedOut : nil, + failureReason: failureReason, metadata: localMetadata.metadata ) } else { @@ -178,7 +173,7 @@ final class SelfieSubmissionManager { partnerParams: extraPartnerParams, callbackUrl: SmileID.callbackUrl, sandboxResult: nil, - failureReason: forcedFailure ? .activeLivenessTimedOut : nil, + failureReason: failureReason, metadata: localMetadata.metadata ) } diff --git a/Sources/SmileID/Classes/SelfieCapture/SelfieViewModelV2.swift b/Sources/SmileID/Classes/SelfieCapture/SelfieViewModelV2.swift index c2c75c26..c94dacc8 100644 --- a/Sources/SmileID/Classes/SelfieCapture/SelfieViewModelV2.swift +++ b/Sources/SmileID/Classes/SelfieCapture/SelfieViewModelV2.swift @@ -33,7 +33,7 @@ public class SelfieViewModelV2: ObservableObject { selfieImage != nil && livenessImages.count == numLivenessImages } private var submissionTask: Task? - private var forcedFailure: Bool = false + private var failureReason: FailureReason? private var apiResponse: SmartSelfieResponse? private var error: Error? @Published public var errorMessageRes: String? @@ -215,7 +215,7 @@ extension SelfieViewModelV2 { selfieImage = nil livenessImages = [] selfieCaptureState = .capturingSelfie - forcedFailure = false + failureReason = nil } private func handleWindowSizeChanged(toRect: CGSize, edgeInsets: EdgeInsets) { @@ -348,7 +348,7 @@ extension SelfieViewModelV2: LivenessCheckManagerDelegate { } } - self.forcedFailure = true + self.failureReason = .mobileActiveLivenessTimeout self.cameraManager.pauseSession() handleSubmission() } @@ -378,7 +378,7 @@ extension SelfieViewModelV2: SelfieSubmissionDelegate { localMetadata: self.localMetadata ) submissionManager.delegate = self - try await submissionManager.submitJob(forcedFailure: self.forcedFailure) + try await submissionManager.submitJob(failureReason: self.failureReason) } private func addSelfieCaptureDurationMetaData() { From ab190d31f526de022a0909ae2dac3ab7be53f1ff Mon Sep 17 00:00:00 2001 From: Tobi Omotayo Date: Wed, 13 Nov 2024 12:58:19 +0100 Subject: [PATCH 04/18] check that submission task is nil before assigning it. --- Sources/SmileID/Classes/SelfieCapture/SelfieViewModelV2.swift | 1 + 1 file changed, 1 insertion(+) diff --git a/Sources/SmileID/Classes/SelfieCapture/SelfieViewModelV2.swift b/Sources/SmileID/Classes/SelfieCapture/SelfieViewModelV2.swift index c94dacc8..4f713d9c 100644 --- a/Sources/SmileID/Classes/SelfieCapture/SelfieViewModelV2.swift +++ b/Sources/SmileID/Classes/SelfieCapture/SelfieViewModelV2.swift @@ -273,6 +273,7 @@ extension SelfieViewModelV2 { DispatchQueue.main.async { self.selfieCaptureState = .processing(.inProgress) } + guard submissionTask == nil else { return } submissionTask = Task { try await submitJob() } From 1c9fe5a3bb4710d3964398a7b504d611a6586550 Mon Sep 17 00:00:00 2001 From: Tobi Omotayo Date: Wed, 13 Nov 2024 13:42:12 +0100 Subject: [PATCH 05/18] remove unnecessary comment --- Sources/SmileID/Classes/FaceDetector/LivenessCheckManager.swift | 1 - 1 file changed, 1 deletion(-) diff --git a/Sources/SmileID/Classes/FaceDetector/LivenessCheckManager.swift b/Sources/SmileID/Classes/FaceDetector/LivenessCheckManager.swift index 4cde8880..f858a6f2 100644 --- a/Sources/SmileID/Classes/FaceDetector/LivenessCheckManager.swift +++ b/Sources/SmileID/Classes/FaceDetector/LivenessCheckManager.swift @@ -92,7 +92,6 @@ class LivenessCheckManager: ObservableObject { /// Handles the timeout event for a task. private func handleTaskTimeout() { stopTaskTimer() - // Capture random liveness images to complete required number: delegate?.livenessChallengeTimeout() } From 5120a91f298489dd73c7df0a355ba42da237a8a3 Mon Sep 17 00:00:00 2001 From: JNdhlovu Date: Mon, 18 Nov 2024 12:20:31 +0200 Subject: [PATCH 06/18] feat: update changelog (#254) * feat: update changelog * chore: lint fix --- CHANGELOG.md | 3 + Example/Podfile.lock | 4 +- SmileID.podspec | 4 +- ...stratedDocumentVerificationViewModel.swift | 61 +++--- ...chestratedDocumentVerificationScreen.swift | 185 +++++++++--------- Sources/SmileID/Classes/SmileID.swift | 22 ++- 6 files changed, 155 insertions(+), 124 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index cb0024d0..126fb736 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,8 @@ # Release Notes +## 10.2.17 +### Added skipApiSubmission: Whether to skip api submission to SmileID and return only captured images on SmartSelfie enrollment, SmartSelfie authentic , Document verification and Enhanced DocV + ## 10.2.16 ### Fixed * Clear images on retry or start capture with the same jobId diff --git a/Example/Podfile.lock b/Example/Podfile.lock index 2f4db733..f9273bac 100644 --- a/Example/Podfile.lock +++ b/Example/Podfile.lock @@ -12,7 +12,7 @@ PODS: - Sentry (8.36.0): - Sentry/Core (= 8.36.0) - Sentry/Core (8.36.0) - - SmileID (10.2.16): + - SmileID (10.2.17): - FingerprintJS - lottie-ios (~> 4.4.2) - ZIPFoundation (~> 0.9) @@ -51,7 +51,7 @@ SPEC CHECKSUMS: lottie-ios: fcb5e73e17ba4c983140b7d21095c834b3087418 netfox: 9d5cc727fe7576c4c7688a2504618a156b7d44b7 Sentry: f8374b5415bc38dfb5645941b3ae31230fbeae57 - SmileID: 3c6d3101c7da84fe9acc36c10d2a189192f00d13 + SmileID: 44fef36001a02aa7362368e8a3f1127c03751166 SwiftLint: 3fe909719babe5537c552ee8181c0031392be933 ZIPFoundation: b8c29ea7ae353b309bc810586181fd073cb3312c diff --git a/SmileID.podspec b/SmileID.podspec index 774f949e..9e78b83c 100644 --- a/SmileID.podspec +++ b/SmileID.podspec @@ -1,11 +1,11 @@ Pod::Spec.new do |s| s.name = 'SmileID' - s.version = '10.2.16' + s.version = '10.2.17' s.summary = 'The Official Smile Identity iOS SDK.' s.homepage = 'https://docs.usesmileid.com/integration-options/mobile/ios-v10-beta' s.license = { :type => 'MIT', :file => 'LICENSE' } s.author = { 'Japhet' => 'japhet@usesmileid.com', 'Juma Allan' => 'juma@usesmileid.com', 'Vansh Gandhi' => 'vansh@usesmileid.com'} - s.source = { :git => "https://github.com/smileidentity/ios.git", :tag => "v10.2.16" } + s.source = { :git => "https://github.com/smileidentity/ios.git", :tag => "v10.2.17" } s.ios.deployment_target = '13.0' s.dependency 'ZIPFoundation', '~> 0.9' s.dependency 'FingerprintJS' diff --git a/Sources/SmileID/Classes/DocumentVerification/Model/OrchestratedDocumentVerificationViewModel.swift b/Sources/SmileID/Classes/DocumentVerification/Model/OrchestratedDocumentVerificationViewModel.swift index b54ec201..75425e7a 100644 --- a/Sources/SmileID/Classes/DocumentVerification/Model/OrchestratedDocumentVerificationViewModel.swift +++ b/Sources/SmileID/Classes/DocumentVerification/Model/OrchestratedDocumentVerificationViewModel.swift @@ -8,26 +8,27 @@ enum DocumentCaptureFlow: Equatable { case processing(ProcessingState) } -internal class IOrchestratedDocumentVerificationViewModel: ObservableObject { +class IOrchestratedDocumentVerificationViewModel: ObservableObject { // Input properties - internal let userId: String - internal let jobId: String - internal let allowNewEnroll: Bool - internal let countryCode: String - internal let documentType: String? - internal let captureBothSides: Bool - internal let jobType: JobType - internal let extraPartnerParams: [String: String] + let userId: String + let jobId: String + let allowNewEnroll: Bool + let countryCode: String + let documentType: String? + let captureBothSides: Bool + let skipApiSubmission: Bool + let jobType: JobType + let extraPartnerParams: [String: String] // Other properties - internal var documentFrontFile: Data? - internal var documentBackFile: Data? - internal var selfieFile: URL? - internal var livenessFiles: [URL]? - internal var savedFiles: DocumentCaptureResultStore? - internal var stepToRetry: DocumentCaptureFlow? - internal var didSubmitJob: Bool = false - internal var error: Error? + var documentFrontFile: Data? + var documentBackFile: Data? + var selfieFile: URL? + var livenessFiles: [URL]? + var savedFiles: DocumentCaptureResultStore? + var stepToRetry: DocumentCaptureFlow? + var didSubmitJob: Bool = false + var error: Error? var localMetadata: LocalMetadata // UI properties @@ -39,13 +40,14 @@ internal class IOrchestratedDocumentVerificationViewModel: Obse @Published var errorMessage: String? @Published var step = DocumentCaptureFlow.frontDocumentCapture - internal init( + init( userId: String, jobId: String, allowNewEnroll: Bool, countryCode: String, documentType: String?, captureBothSides: Bool, + skipApiSubmission: Bool, selfieFile: URL?, jobType: JobType, extraPartnerParams: [String: String] = [:], @@ -57,6 +59,7 @@ internal class IOrchestratedDocumentVerificationViewModel: Obse self.countryCode = countryCode self.documentType = documentType self.captureBothSides = captureBothSides + self.skipApiSubmission = skipApiSubmission self.selfieFile = selfieFile self.jobType = jobType self.extraPartnerParams = extraPartnerParams @@ -177,6 +180,10 @@ internal class IOrchestratedDocumentVerificationViewModel: Obse selfie: selfieFile, livenessImages: livenessFiles ?? [] ) + if skipApiSubmission { + DispatchQueue.main.async { self.step = .processing(.success) } + return + } let authRequest = AuthenticationRequest( jobType: jobType, enrollment: false, @@ -197,7 +204,7 @@ internal class IOrchestratedDocumentVerificationViewModel: Obse let authResponse = try await SmileID.api.authenticate(request: authRequest) let prepUploadRequest = PrepUploadRequest( partnerParams: authResponse.partnerParams.copy(extras: self.extraPartnerParams), - allowNewEnroll: String(allowNewEnroll), // TODO: - Fix when Michael changes this to boolean + allowNewEnroll: String(allowNewEnroll), // TODO: - Fix when Michael changes this to boolean metadata: localMetadata.metadata.items, timestamp: authResponse.timestamp, signature: authResponse.signature @@ -321,14 +328,14 @@ extension IOrchestratedDocumentVerificationViewModel: SmartSelfieResultDelegate } // swiftlint:disable opening_brace -internal class OrchestratedDocumentVerificationViewModel: +class OrchestratedDocumentVerificationViewModel: IOrchestratedDocumentVerificationViewModel { override func onFinished(delegate: DocumentVerificationResultDelegate) { if let savedFiles, - let selfiePath = getRelativePath(from: selfieFile), - let documentFrontPath = getRelativePath(from: savedFiles.documentFront), - let documentBackPath = getRelativePath(from: savedFiles.documentBack) + let selfiePath = getRelativePath(from: selfieFile), + let documentFrontPath = getRelativePath(from: savedFiles.documentFront), + let documentBackPath = getRelativePath(from: savedFiles.documentBack) { delegate.didSucceed( selfie: selfiePath, @@ -347,16 +354,16 @@ internal class OrchestratedDocumentVerificationViewModel: } // swiftlint:disable opening_brace -internal class OrchestratedEnhancedDocumentVerificationViewModel: +class OrchestratedEnhancedDocumentVerificationViewModel: IOrchestratedDocumentVerificationViewModel< EnhancedDocumentVerificationResultDelegate, EnhancedDocumentVerificationJobResult > { override func onFinished(delegate: EnhancedDocumentVerificationResultDelegate) { if let savedFiles, - let selfiePath = getRelativePath(from: selfieFile), - let documentFrontPath = getRelativePath(from: savedFiles.documentFront), - let documentBackPath = getRelativePath(from: savedFiles.documentBack) + let selfiePath = getRelativePath(from: selfieFile), + let documentFrontPath = getRelativePath(from: savedFiles.documentFront), + let documentBackPath = getRelativePath(from: savedFiles.documentBack) { delegate.didSucceed( selfie: selfiePath, diff --git a/Sources/SmileID/Classes/DocumentVerification/View/OrchestratedDocumentVerificationScreen.swift b/Sources/SmileID/Classes/DocumentVerification/View/OrchestratedDocumentVerificationScreen.swift index 648b81cf..c6695876 100644 --- a/Sources/SmileID/Classes/DocumentVerification/View/OrchestratedDocumentVerificationScreen.swift +++ b/Sources/SmileID/Classes/DocumentVerification/View/OrchestratedDocumentVerificationScreen.swift @@ -14,9 +14,10 @@ struct OrchestratedDocumentVerificationScreen: View { let allowGalleryUpload: Bool let allowAgentMode: Bool let showInstructions: Bool + let skipApiSubmission: Bool let extraPartnerParams: [String: String] let onResult: DocumentVerificationResultDelegate - + var body: some View { IOrchestratedDocumentVerificationScreen( countryCode: countryCode, @@ -31,6 +32,7 @@ struct OrchestratedDocumentVerificationScreen: View { allowGalleryUpload: allowGalleryUpload, allowAgentMode: allowAgentMode, showInstructions: showInstructions, + skipApiSubmission: skipApiSubmission, extraPartnerParams: extraPartnerParams, onResult: onResult, viewModel: OrchestratedDocumentVerificationViewModel( @@ -40,6 +42,7 @@ struct OrchestratedDocumentVerificationScreen: View { countryCode: countryCode, documentType: documentType, captureBothSides: captureBothSides, + skipApiSubmission: skipApiSubmission, selfieFile: bypassSelfieCaptureWithFile, jobType: .documentVerification, extraPartnerParams: extraPartnerParams, @@ -63,9 +66,10 @@ struct OrchestratedEnhancedDocumentVerificationScreen: View { let allowGalleryUpload: Bool let allowAgentMode: Bool let showInstructions: Bool + let skipApiSubmission: Bool let extraPartnerParams: [String: String] let onResult: EnhancedDocumentVerificationResultDelegate - + var body: some View { IOrchestratedDocumentVerificationScreen( countryCode: countryCode, @@ -80,6 +84,7 @@ struct OrchestratedEnhancedDocumentVerificationScreen: View { allowGalleryUpload: allowGalleryUpload, allowAgentMode: allowAgentMode, showInstructions: showInstructions, + skipApiSubmission: skipApiSubmission, extraPartnerParams: extraPartnerParams, onResult: onResult, viewModel: OrchestratedEnhancedDocumentVerificationViewModel( @@ -89,6 +94,7 @@ struct OrchestratedEnhancedDocumentVerificationScreen: View { countryCode: countryCode, documentType: documentType, captureBothSides: captureBothSides, + skipApiSubmission: skipApiSubmission, selfieFile: bypassSelfieCaptureWithFile, jobType: .enhancedDocumentVerification, extraPartnerParams: extraPartnerParams, @@ -111,10 +117,11 @@ private struct IOrchestratedDocumentVerificationScreen: View { let allowGalleryUpload: Bool let allowAgentMode: Bool let showInstructions: Bool + let skipApiSubmission: Bool var extraPartnerParams: [String: String] let onResult: T @ObservedObject var viewModel: IOrchestratedDocumentVerificationViewModel - + init( countryCode: String, documentType: String?, @@ -128,6 +135,7 @@ private struct IOrchestratedDocumentVerificationScreen: View { allowGalleryUpload: Bool, allowAgentMode: Bool, showInstructions: Bool, + skipApiSubmission: Bool, extraPartnerParams: [String: String], onResult: T, viewModel: IOrchestratedDocumentVerificationViewModel @@ -144,97 +152,98 @@ private struct IOrchestratedDocumentVerificationScreen: View { self.allowGalleryUpload = allowGalleryUpload self.allowAgentMode = allowAgentMode self.showInstructions = showInstructions + self.skipApiSubmission = skipApiSubmission self.extraPartnerParams = extraPartnerParams self.onResult = onResult self.viewModel = viewModel } - + var body: some View { switch viewModel.step { - case .frontDocumentCapture: - DocumentCaptureScreen( - side: .front, - showInstructions: showInstructions, - showAttribution: showAttribution, - allowGallerySelection: allowGalleryUpload, - showSkipButton: false, - instructionsHeroImage: SmileIDResourcesHelper.DocVFrontHero, - instructionsTitleText: SmileIDResourcesHelper.localizedString( - for: "Instructions.Document.Front.Header" - ), - instructionsSubtitleText: SmileIDResourcesHelper.localizedString( - for: "Instructions.Document.Front.Callout" - ), - captureTitleText: SmileIDResourcesHelper.localizedString(for: "Action.TakePhoto"), - knownIdAspectRatio: idAspectRatio, - onConfirm: viewModel.onFrontDocumentImageConfirmed, - onError: viewModel.onError - ) - case .backDocumentCapture: - DocumentCaptureScreen( - side: .back, - showInstructions: showInstructions, - showAttribution: showAttribution, - allowGallerySelection: allowGalleryUpload, - showSkipButton: false, - instructionsHeroImage: SmileIDResourcesHelper.DocVBackHero, - instructionsTitleText: SmileIDResourcesHelper.localizedString( - for: "Instructions.Document.Back.Header" - ), - instructionsSubtitleText: SmileIDResourcesHelper.localizedString( - for: "Instructions.Document.Back.Callout" - ), - captureTitleText: SmileIDResourcesHelper.localizedString(for: "Action.TakePhoto"), - knownIdAspectRatio: idAspectRatio, - onConfirm: viewModel.onBackDocumentImageConfirmed, - onError: viewModel.onError, - onSkip: viewModel.onDocumentBackSkip - ) - case .selfieCapture: - OrchestratedSelfieCaptureScreen( - userId: userId, - jobId: jobId, - isEnroll: false, - allowNewEnroll: allowNewEnroll, - allowAgentMode: allowAgentMode, - showAttribution: showAttribution, - showInstructions: showInstructions, - extraPartnerParams: extraPartnerParams, - skipApiSubmission: true, - onResult: viewModel - ) - case let .processing(state): - ProcessingScreen( - processingState: state, - inProgressTitle: SmileIDResourcesHelper.localizedString( - for: "Document.Processing.Header" - ), - inProgressSubtitle: SmileIDResourcesHelper.localizedString( - for: "Document.Processing.Callout" - ), - inProgressIcon: SmileIDResourcesHelper.DocumentProcessing, - successTitle: SmileIDResourcesHelper.localizedString( - for: "Document.Complete.Header" - ), - successSubtitle: SmileIDResourcesHelper.localizedString( - for: $viewModel.errorMessageRes.wrappedValue ?? "Document.Complete.Callout" - ), - successIcon: SmileIDResourcesHelper.CheckBold, - errorTitle: SmileIDResourcesHelper.localizedString(for: "Document.Error.Header"), - errorSubtitle: getErrorSubtitle( - errorMessageRes: $viewModel.errorMessageRes.wrappedValue, - errorMessage: $viewModel.errorMessage.wrappedValue - ), - errorIcon: SmileIDResourcesHelper.Scan, - continueButtonText: SmileIDResourcesHelper.localizedString( - for: "Confirmation.Continue" - ), - onContinue: { viewModel.onFinished(delegate: onResult) }, - retryButtonText: SmileIDResourcesHelper.localizedString(for: "Confirmation.Retry"), - onRetry: viewModel.onRetry, - closeButtonText: SmileIDResourcesHelper.localizedString(for: "Confirmation.Close"), - onClose: { viewModel.onFinished(delegate: onResult) } - ) + case .frontDocumentCapture: + DocumentCaptureScreen( + side: .front, + showInstructions: showInstructions, + showAttribution: showAttribution, + allowGallerySelection: allowGalleryUpload, + showSkipButton: false, + instructionsHeroImage: SmileIDResourcesHelper.DocVFrontHero, + instructionsTitleText: SmileIDResourcesHelper.localizedString( + for: "Instructions.Document.Front.Header" + ), + instructionsSubtitleText: SmileIDResourcesHelper.localizedString( + for: "Instructions.Document.Front.Callout" + ), + captureTitleText: SmileIDResourcesHelper.localizedString(for: "Action.TakePhoto"), + knownIdAspectRatio: idAspectRatio, + onConfirm: viewModel.onFrontDocumentImageConfirmed, + onError: viewModel.onError + ) + case .backDocumentCapture: + DocumentCaptureScreen( + side: .back, + showInstructions: showInstructions, + showAttribution: showAttribution, + allowGallerySelection: allowGalleryUpload, + showSkipButton: false, + instructionsHeroImage: SmileIDResourcesHelper.DocVBackHero, + instructionsTitleText: SmileIDResourcesHelper.localizedString( + for: "Instructions.Document.Back.Header" + ), + instructionsSubtitleText: SmileIDResourcesHelper.localizedString( + for: "Instructions.Document.Back.Callout" + ), + captureTitleText: SmileIDResourcesHelper.localizedString(for: "Action.TakePhoto"), + knownIdAspectRatio: idAspectRatio, + onConfirm: viewModel.onBackDocumentImageConfirmed, + onError: viewModel.onError, + onSkip: viewModel.onDocumentBackSkip + ) + case .selfieCapture: + OrchestratedSelfieCaptureScreen( + userId: userId, + jobId: jobId, + isEnroll: false, + allowNewEnroll: allowNewEnroll, + allowAgentMode: allowAgentMode, + showAttribution: showAttribution, + showInstructions: showInstructions, + extraPartnerParams: extraPartnerParams, + skipApiSubmission: true, + onResult: viewModel + ) + case let .processing(state): + ProcessingScreen( + processingState: state, + inProgressTitle: SmileIDResourcesHelper.localizedString( + for: "Document.Processing.Header" + ), + inProgressSubtitle: SmileIDResourcesHelper.localizedString( + for: "Document.Processing.Callout" + ), + inProgressIcon: SmileIDResourcesHelper.DocumentProcessing, + successTitle: SmileIDResourcesHelper.localizedString( + for: "Document.Complete.Header" + ), + successSubtitle: SmileIDResourcesHelper.localizedString( + for: $viewModel.errorMessageRes.wrappedValue ?? "Document.Complete.Callout" + ), + successIcon: SmileIDResourcesHelper.CheckBold, + errorTitle: SmileIDResourcesHelper.localizedString(for: "Document.Error.Header"), + errorSubtitle: getErrorSubtitle( + errorMessageRes: $viewModel.errorMessageRes.wrappedValue, + errorMessage: $viewModel.errorMessage.wrappedValue + ), + errorIcon: SmileIDResourcesHelper.Scan, + continueButtonText: SmileIDResourcesHelper.localizedString( + for: "Confirmation.Continue" + ), + onContinue: { viewModel.onFinished(delegate: onResult) }, + retryButtonText: SmileIDResourcesHelper.localizedString(for: "Confirmation.Retry"), + onRetry: viewModel.onRetry, + closeButtonText: SmileIDResourcesHelper.localizedString(for: "Confirmation.Close"), + onClose: { viewModel.onFinished(delegate: onResult) } + ) } } } diff --git a/Sources/SmileID/Classes/SmileID.swift b/Sources/SmileID/Classes/SmileID.swift index b56618e0..1f48c71c 100644 --- a/Sources/SmileID/Classes/SmileID.swift +++ b/Sources/SmileID/Classes/SmileID.swift @@ -6,7 +6,7 @@ import UIKit public class SmileID { /// The default value for `timeoutIntervalForRequest` for URLSession default configuration. public static let defaultRequestTimeout: TimeInterval = 60 - public static let version = "10.2.16" + public static let version = "10.2.17" @Injected var injectedApi: SmileIDServiceable public static var configuration: Config { config } @@ -28,7 +28,8 @@ public class SmileID { /// A private static constant that initializes a `URLSession` with a default configuration. /// This `URLSession` is used for creating `URLSessionDataTask`s in the networking layer. - /// The session configuration sets the timeout interval for requests to the value specified by `SmileID.requestTimeout`. + /// The session configuration sets the timeout interval for requests to the value specified by + /// `SmileID.requestTimeout`. /// /// - Returns: A `URLSession` instance with the specified configuration. private static let urlSession: URLSession = { @@ -207,7 +208,7 @@ public class SmileID { LocalStorage.getFileByType(jobId: jobId, fileType: .selfie), LocalStorage.getFileByType(jobId: jobId, fileType: .documentFront), LocalStorage.getFileByType(jobId: jobId, fileType: .documentBack), - LocalStorage.getInfoJsonFile(jobId: jobId), + LocalStorage.getInfoJsonFile(jobId: jobId) ].compactMap { $0 } allFiles = livenessFiles + additionalFiles } catch { @@ -289,6 +290,7 @@ public class SmileID { /// - showAttribution: Whether to show the Smile ID attribution or not on the Instructions /// screen /// - showInstructions: Whether to deactivate capture screen's instructions for SmartSelfie. + /// - skipApiSubmission: Whether to skip api submission to SmileID and return only captured images /// - extraPartnerParams: Custom values specific to partners /// - delegate: Callback to be invoked when the SmartSelfie™ Enrollment is complete. public class func smartSelfieEnrollmentScreen( @@ -298,6 +300,7 @@ public class SmileID { allowAgentMode: Bool = false, showAttribution: Bool = true, showInstructions: Bool = true, + skipApiSubmission: Bool = false, extraPartnerParams: [String: String] = [:], delegate: SmartSelfieResultDelegate ) -> some View { @@ -310,7 +313,7 @@ public class SmileID { showAttribution: showAttribution, showInstructions: showInstructions, extraPartnerParams: extraPartnerParams, - skipApiSubmission: false, + skipApiSubmission: skipApiSubmission, onResult: delegate ) } @@ -333,6 +336,7 @@ public class SmileID { /// - showAttribution: Whether to show the Smile ID attribution or not on the Instructions /// screen /// - showInstructions: Whether to deactivate capture screen's instructions for SmartSelfie. + /// - skipApiSubmission: Whether to skip api submission to SmileID and return only captured images /// - extraPartnerParams: Custom values specific to partners /// - delegate: Callback to be invoked when the SmartSelfie™ Authentication is complete. public class func smartSelfieAuthenticationScreen( @@ -342,6 +346,7 @@ public class SmileID { allowAgentMode: Bool = false, showAttribution: Bool = true, showInstructions: Bool = true, + skipApiSubmission: Bool = false, extraPartnerParams: [String: String] = [:], delegate: SmartSelfieResultDelegate ) -> some View { @@ -354,7 +359,7 @@ public class SmileID { showAttribution: showAttribution, showInstructions: showInstructions, extraPartnerParams: extraPartnerParams, - skipApiSubmission: false, + skipApiSubmission: skipApiSubmission, onResult: delegate ) } @@ -384,6 +389,7 @@ public class SmileID { /// - showInstructions: Whether to deactivate capture screen's instructions for Document /// Verification (NB! If instructions are disabled, gallery upload won't be possible) /// - showAttribution: Whether to show the Smile ID attribution on the Instructions screen + /// - skipApiSubmission: Whether to skip api submission to SmileID and return only captured images /// - extraPartnerParams: Custom values specific to partners /// - delegate: The delegate object that receives the result of the Document Verification public class func documentVerificationScreen( @@ -399,6 +405,7 @@ public class SmileID { allowGalleryUpload: Bool = false, showInstructions: Bool = true, showAttribution: Bool = true, + skipApiSubmission: Bool = false, extraPartnerParams: [String: String] = [:], delegate: DocumentVerificationResultDelegate ) -> some View { @@ -415,6 +422,7 @@ public class SmileID { allowGalleryUpload: allowGalleryUpload, allowAgentMode: allowAgentMode, showInstructions: showInstructions, + skipApiSubmission: skipApiSubmission, extraPartnerParams: extraPartnerParams, onResult: delegate ) @@ -445,6 +453,7 @@ public class SmileID { /// - showInstructions: Whether to deactivate capture screen's instructions for Document /// Verification (NB! If instructions are disabled, gallery upload won't be possible) /// - showAttribution: Whether to show the Smile ID attribution on the Instructions screen + /// - skipApiSubmission: Whether to skip api submission to SmileID and return only captured images /// - extraPartnerParams: Custom values specific to partners /// - delegate: The delegate object that receives the result of the Document Verification public class func enhancedDocumentVerificationScreen( @@ -459,6 +468,7 @@ public class SmileID { allowAgentMode: Bool = false, allowGalleryUpload: Bool = false, showInstructions: Bool = true, + skipApiSubmission: Bool = false, showAttribution: Bool = true, extraPartnerParams: [String: String] = [:], delegate: EnhancedDocumentVerificationResultDelegate @@ -476,6 +486,7 @@ public class SmileID { allowGalleryUpload: allowGalleryUpload, allowAgentMode: allowAgentMode, showInstructions: showInstructions, + skipApiSubmission: skipApiSubmission, extraPartnerParams: extraPartnerParams, onResult: delegate ) @@ -516,6 +527,7 @@ public class SmileID { /// the front camera will be used. /// - showAttribution: Whether to show the Smile ID attribution on the Instructions screen /// - showInstructions: Whether to deactivate capture screen's instructions for SmartSelfie. + /// - skipApiSubmission: Whether to skip api submission to SmileID and return only captured images /// - extraPartnerParams: Custom values specific to partners /// - delegate: Callback to be invoked when the Biometric KYC is complete. public class func biometricKycScreen( From c32f0b07b71a41e31675ee75e7eb6114ebf3eaba Mon Sep 17 00:00:00 2001 From: Tobi Omotayo Date: Wed, 20 Nov 2024 10:25:49 +0100 Subject: [PATCH 07/18] fix wrong version set for fingerprintjs package and dependency name causing spm not to resolve (#257) --- Package.resolved | 9 +++++++++ Package.swift | 8 ++++++-- 2 files changed, 15 insertions(+), 2 deletions(-) diff --git a/Package.resolved b/Package.resolved index 1d8b42aa..713e1c05 100644 --- a/Package.resolved +++ b/Package.resolved @@ -1,5 +1,14 @@ { "pins" : [ + { + "identity" : "fingerprintjs-ios", + "kind" : "remoteSourceControl", + "location" : "https://github.com/fingerprintjs/fingerprintjs-ios", + "state" : { + "revision" : "bd93291c149e328919a9a2881575494f6ea9245f", + "version" : "1.5.0" + } + }, { "identity" : "lottie-spm", "kind" : "remoteSourceControl", diff --git a/Package.swift b/Package.swift index 7a80b14d..0768a9a2 100644 --- a/Package.swift +++ b/Package.swift @@ -15,12 +15,16 @@ let package = Package( dependencies: [ .package(url: "https://github.com/weichsel/ZIPFoundation.git", .upToNextMajor(from: "0.9.0")), .package(url: "https://github.com/airbnb/lottie-spm", from: "4.4.2"), - .package(url: "https://github.com/fingerprintjs/fingerprintjs-ios", from: "4.4.2") + .package(url: "https://github.com/fingerprintjs/fingerprintjs-ios", from: "1.5.0") ], targets: [ .target( name: "SmileID", - dependencies: ["ZIPFoundation", "FingerprintJS", .product(name: "Lottie", package: "lottie-spm")], + dependencies: [ + .product(name: "ZIPFoundation", package: "ZIPFoundation"), + .product(name: "FingerprintJS", package: "fingerprintjs-ios"), + .product(name: "Lottie", package: "lottie-spm") + ], path: "Sources/SmileID", resources: [.process("Resources")] ), From 492580cdef694291c8557e774bbec640de8a660e Mon Sep 17 00:00:00 2001 From: Davina Anthony <97633603+daviinaa@users.noreply.github.com> Date: Tue, 26 Nov 2024 18:49:08 +0100 Subject: [PATCH 08/18] added autoassign to workflow (#259) * added autoassign to workflow * added autoassign to workflow/fix --- .github/workflows/auto-author-assign.yml | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) create mode 100644 .github/workflows/auto-author-assign.yml diff --git a/.github/workflows/auto-author-assign.yml b/.github/workflows/auto-author-assign.yml new file mode 100644 index 00000000..7a7439bc --- /dev/null +++ b/.github/workflows/auto-author-assign.yml @@ -0,0 +1,16 @@ +# .github/workflows/auto-author-assign.yml +name: Auto Author Assign + +on: + pull_request_target: + types: [opened, reopened] + +permissions: + pull-requests: write + +jobs: + assign-author: + if: ${{ !contains(github.event.pull_request.assignees, '') }} + runs-on: ubuntu-latest + steps: + - uses: toshimaru/auto-author-assign@v2.1.1 From 7f5d0745cd5f73ad3391cf412f137868964271da Mon Sep 17 00:00:00 2001 From: Tobi Omotayo Date: Wed, 27 Nov 2024 08:56:14 +0100 Subject: [PATCH 09/18] pod install --- Example/Podfile.lock | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Example/Podfile.lock b/Example/Podfile.lock index f9273bac..aa431d6b 100644 --- a/Example/Podfile.lock +++ b/Example/Podfile.lock @@ -51,7 +51,7 @@ SPEC CHECKSUMS: lottie-ios: fcb5e73e17ba4c983140b7d21095c834b3087418 netfox: 9d5cc727fe7576c4c7688a2504618a156b7d44b7 Sentry: f8374b5415bc38dfb5645941b3ae31230fbeae57 - SmileID: 44fef36001a02aa7362368e8a3f1127c03751166 + SmileID: dc04628f6e1572fc6e407649bfd05f91647ed947 SwiftLint: 3fe909719babe5537c552ee8181c0031392be933 ZIPFoundation: b8c29ea7ae353b309bc810586181fd073cb3312c From 0b8e43e23fbecda9441c834e2139c78913c2e989 Mon Sep 17 00:00:00 2001 From: Tobi Omotayo Date: Wed, 27 Nov 2024 08:57:02 +0100 Subject: [PATCH 10/18] add beta tag to strict mode products. add a cancel toolbar button to all product screens. remove cancel button from liveness instructions screen. --- Example/SmileID/Home/HomeView.swift | 4 ++-- Example/SmileID/Home/ProductCell.swift | 10 ++++++++++ .../View/FaceShapedProgressIndicator.swift | 2 +- .../View/LivenessCaptureInstructionsView.swift | 11 ----------- Sources/SmileID/Classes/SmileID.swift | 5 +++-- 5 files changed, 16 insertions(+), 16 deletions(-) diff --git a/Example/SmileID/Home/HomeView.swift b/Example/SmileID/Home/HomeView.swift index 43029189..bed74ca4 100644 --- a/Example/SmileID/Home/HomeView.swift +++ b/Example/SmileID/Home/HomeView.swift @@ -54,7 +54,7 @@ struct HomeView: View { ) ProductCell( image: "smart_selfie_enroll", - name: "SmartSelfie™ Enrollment (Strict Mode)", + name: "SmartSelfie™ Enrollment (Strict Mode)\n(BETA)", onClick: { viewModel.onProductClicked() }, @@ -74,7 +74,7 @@ struct HomeView: View { ) ProductCell( image: "smart_selfie_authentication", - name: "SmartSelfie™ Authentication (Strict Mode)", + name: "SmartSelfie™ Authentication (Strict Mode)\n(BETA)", onClick: { viewModel.onProductClicked() }, diff --git a/Example/SmileID/Home/ProductCell.swift b/Example/SmileID/Home/ProductCell.swift index a25967ce..d8283b8f 100644 --- a/Example/SmileID/Home/ProductCell.swift +++ b/Example/SmileID/Home/ProductCell.swift @@ -46,6 +46,16 @@ struct ProductCell: View { content: { NavigationView { content() + .toolbar { + ToolbarItem(placement: .cancellationAction) { + Button { + isPresented = false + } label: { + Text(SmileIDResourcesHelper.localizedString(for: "Action.Cancel")) + .foregroundColor(SmileID.theme.accent) + } + } + } } .environment(\.modalMode, $isPresented) } diff --git a/Sources/SmileID/Classes/SelfieCapture/View/FaceShapedProgressIndicator.swift b/Sources/SmileID/Classes/SelfieCapture/View/FaceShapedProgressIndicator.swift index f139dda9..6c2aa9e9 100644 --- a/Sources/SmileID/Classes/SelfieCapture/View/FaceShapedProgressIndicator.swift +++ b/Sources/SmileID/Classes/SelfieCapture/View/FaceShapedProgressIndicator.swift @@ -4,7 +4,7 @@ import SwiftUI struct FaceShapedProgressIndicator: View { let progress: Double private let strokeWidth = 10 - private let faceShape = FaceShape().scale(x: 0.8, y: 0.6).offset(y: -50) + private let faceShape = FaceShape().scale(x: 0.8, y: 0.55).offset(y: -50) private let bgColor = Color.white.opacity(0.8) var body: some View { bgColor diff --git a/Sources/SmileID/Classes/SelfieCapture/View/LivenessCaptureInstructionsView.swift b/Sources/SmileID/Classes/SelfieCapture/View/LivenessCaptureInstructionsView.swift index 153c4a50..27ec1df1 100644 --- a/Sources/SmileID/Classes/SelfieCapture/View/LivenessCaptureInstructionsView.swift +++ b/Sources/SmileID/Classes/SelfieCapture/View/LivenessCaptureInstructionsView.swift @@ -2,7 +2,6 @@ import Lottie import SwiftUI public struct LivenessCaptureInstructionsView: View { - @Environment(\.modalMode) private var modalMode @State private var showSelfieCaptureView: Bool = false private let showAttribution: Bool @@ -15,16 +14,6 @@ public struct LivenessCaptureInstructionsView: View { public var body: some View { VStack { - HStack { - Button { - self.modalMode.wrappedValue = false - } label: { - Text(SmileIDResourcesHelper.localizedString(for: "Action.Cancel")) - .foregroundColor(SmileID.theme.accent) - } - Spacer() - } - ZStack { LottieView { try await DotLottieFile.named("instructions_no_progress", bundle: SmileIDResourcesHelper.bundle) diff --git a/Sources/SmileID/Classes/SmileID.swift b/Sources/SmileID/Classes/SmileID.swift index b1bcdb1e..07aaa38c 100644 --- a/Sources/SmileID/Classes/SmileID.swift +++ b/Sources/SmileID/Classes/SmileID.swift @@ -304,6 +304,7 @@ public class SmileID { showAttribution: Bool = true, showInstructions: Bool = true, useStrictMode: Bool = false, + skipApiSubmission: Bool = false, extraPartnerParams: [String: String] = [:], delegate: SmartSelfieResultDelegate ) -> some View { @@ -318,7 +319,7 @@ public class SmileID { showInstructions: showInstructions, useStrictMode: useStrictMode, extraPartnerParams: extraPartnerParams, - skipApiSubmission: false, + skipApiSubmission: skipApiSubmission, onResult: delegate ) } else { @@ -331,7 +332,7 @@ public class SmileID { showAttribution: showAttribution, showInstructions: showInstructions, extraPartnerParams: extraPartnerParams, - skipApiSubmission: false, + skipApiSubmission: skipApiSubmission, onResult: delegate ) } From 7bd809bbdcec75cc7acbeff83bbc2f7d34c3e111 Mon Sep 17 00:00:00 2001 From: Tobi Omotayo Date: Wed, 27 Nov 2024 10:28:34 +0100 Subject: [PATCH 11/18] use a different multiplier for checking face bounds for selfie and liveness capture hide liveness progress if face is not valid. --- Sources/SmileID/Classes/FaceDetector/FaceValidator.swift | 9 +++++++-- .../SelfieCapture/View/SelfieCaptureScreenV2.swift | 3 ++- 2 files changed, 9 insertions(+), 3 deletions(-) diff --git a/Sources/SmileID/Classes/FaceDetector/FaceValidator.swift b/Sources/SmileID/Classes/FaceDetector/FaceValidator.swift index bee6b2ec..c2339c9f 100644 --- a/Sources/SmileID/Classes/FaceDetector/FaceValidator.swift +++ b/Sources/SmileID/Classes/FaceDetector/FaceValidator.swift @@ -17,7 +17,8 @@ final class FaceValidator { // MARK: Constants private let selfieQualityThreshold: Float = 0.5 private let luminanceThreshold: ClosedRange = 80...200 - private let faceBoundsMultiplier: CGFloat = 1.5 + private let selfiefaceBoundsMultiplier: CGFloat = 1.5 + private let livenessfaceBoundsMultiplier: CGFloat = 2.2 private let faceBoundsThreshold: CGFloat = 50 init() {} @@ -101,8 +102,12 @@ final class FaceValidator { } // MARK: Validation Checks - private func checkFaceSizeAndPosition(using boundingBox: CGRect, shouldCheckCentering: Bool) -> FaceBoundsState { + private func checkFaceSizeAndPosition( + using boundingBox: CGRect, + shouldCheckCentering: Bool + ) -> FaceBoundsState { let maxFaceWidth = faceLayoutGuideFrame.width - 20 + let faceBoundsMultiplier = shouldCheckCentering ? selfiefaceBoundsMultiplier : livenessfaceBoundsMultiplier let minFaceWidth = faceLayoutGuideFrame.width / faceBoundsMultiplier if boundingBox.width > maxFaceWidth { diff --git a/Sources/SmileID/Classes/SelfieCapture/View/SelfieCaptureScreenV2.swift b/Sources/SmileID/Classes/SelfieCapture/View/SelfieCaptureScreenV2.swift index 1eb21739..52fc603b 100644 --- a/Sources/SmileID/Classes/SelfieCapture/View/SelfieCaptureScreenV2.swift +++ b/Sources/SmileID/Classes/SelfieCapture/View/SelfieCaptureScreenV2.swift @@ -34,7 +34,8 @@ public struct SelfieCaptureScreenV2: View { showGuideAnimation: viewModel.showGuideAnimation, guideAnimation: viewModel.userInstruction?.guideAnimation ) - if let currentLivenessTask = viewModel.livenessCheckManager.currentTask { + if let currentLivenessTask = viewModel.livenessCheckManager.currentTask, + viewModel.faceInBounds { LivenessGuidesView( currentLivenessTask: currentLivenessTask, topArcProgress: $viewModel.livenessCheckManager.lookUpProgress, From 1f890a4dc31b67ed18c49512db5f55daaba8521e Mon Sep 17 00:00:00 2001 From: Tobi Omotayo Date: Wed, 27 Nov 2024 10:50:59 +0100 Subject: [PATCH 12/18] bump build number and add haptic feedback to selfie capture. --- Example/SmileID.xcodeproj/project.pbxproj | 8 +++---- .../Classes/Helpers/HapticManager.swift | 23 +++++++++++++++++++ .../SelfieCapture/SelfieViewModelV2.swift | 4 ++++ 3 files changed, 31 insertions(+), 4 deletions(-) create mode 100644 Sources/SmileID/Classes/Helpers/HapticManager.swift diff --git a/Example/SmileID.xcodeproj/project.pbxproj b/Example/SmileID.xcodeproj/project.pbxproj index ebf5b21c..ef649f15 100644 --- a/Example/SmileID.xcodeproj/project.pbxproj +++ b/Example/SmileID.xcodeproj/project.pbxproj @@ -891,7 +891,7 @@ CODE_SIGN_IDENTITY = "Apple Development"; "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "Apple Development"; CODE_SIGN_STYLE = Manual; - CURRENT_PROJECT_VERSION = 36; + CURRENT_PROJECT_VERSION = 37; DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym"; DEVELOPMENT_TEAM = ""; "DEVELOPMENT_TEAM[sdk=iphoneos*]" = 99P7YGX9Q6; @@ -901,7 +901,7 @@ INFOPLIST_KEY_LSApplicationCategoryType = "public.app-category.developer-tools"; IPHONEOS_DEPLOYMENT_TARGET = 14.0; LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks"; - MARKETING_VERSION = 1.3.0; + MARKETING_VERSION = 1.3.1; MODULE_NAME = ExampleApp; PRODUCT_BUNDLE_IDENTIFIER = "com.smileidentity.example-ios"; PRODUCT_NAME = "Smile ID"; @@ -924,7 +924,7 @@ CODE_SIGN_IDENTITY = "Apple Distribution"; "CODE_SIGN_IDENTITY[sdk=iphoneos*]" = "iPhone Distribution"; CODE_SIGN_STYLE = Manual; - CURRENT_PROJECT_VERSION = 36; + CURRENT_PROJECT_VERSION = 37; DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym"; DEVELOPMENT_TEAM = ""; "DEVELOPMENT_TEAM[sdk=iphoneos*]" = 99P7YGX9Q6; @@ -934,7 +934,7 @@ INFOPLIST_KEY_LSApplicationCategoryType = "public.app-category.developer-tools"; IPHONEOS_DEPLOYMENT_TARGET = 14.0; LD_RUNPATH_SEARCH_PATHS = "$(inherited) @executable_path/Frameworks"; - MARKETING_VERSION = 1.3.0; + MARKETING_VERSION = 1.3.1; MODULE_NAME = ExampleApp; PRODUCT_BUNDLE_IDENTIFIER = "com.smileidentity.example-ios"; PRODUCT_NAME = "Smile ID"; diff --git a/Sources/SmileID/Classes/Helpers/HapticManager.swift b/Sources/SmileID/Classes/Helpers/HapticManager.swift new file mode 100644 index 00000000..fd1cf28b --- /dev/null +++ b/Sources/SmileID/Classes/Helpers/HapticManager.swift @@ -0,0 +1,23 @@ +import UIKit + +class HapticManager { + static let shared = HapticManager() + + private init() {} + + // MARK: Notification Feedback + + /// Triggers a notification haptic feedback + /// - Parameter type: The notification type (success, warning, error) + func notification(type: UINotificationFeedbackGenerator.FeedbackType) { + let generator = UINotificationFeedbackGenerator() + generator.notificationOccurred(type) + } + + // MARK: Impact Feedback + + func impact(style: UIImpactFeedbackGenerator.FeedbackStyle) { + let generator = UIImpactFeedbackGenerator(style: style) + generator.impactOccurred() + } +} diff --git a/Sources/SmileID/Classes/SelfieCapture/SelfieViewModelV2.swift b/Sources/SmileID/Classes/SelfieCapture/SelfieViewModelV2.swift index 4f713d9c..b168b98c 100644 --- a/Sources/SmileID/Classes/SelfieCapture/SelfieViewModelV2.swift +++ b/Sources/SmileID/Classes/SelfieCapture/SelfieViewModelV2.swift @@ -154,6 +154,7 @@ public class SelfieViewModelV2: ObservableObject { faceDetector.processImageBuffer(imageBuffer) if hasDetectedValidFace && selfieImage == nil { captureSelfieImage(imageBuffer) + HapticManager.shared.notification(type: .success) livenessCheckManager.initiateLivenessCheck() } } @@ -331,6 +332,7 @@ extension SelfieViewModelV2: LivenessCheckManagerDelegate { guard let imageBuffer = currentFrameBuffer else { return } captureLivenessImage(imageBuffer) captureLivenessImage(imageBuffer) + HapticManager.shared.notification(type: .success) } func didCompleteLivenessChallenge() { @@ -409,6 +411,7 @@ extension SelfieViewModelV2: SelfieSubmissionDelegate { // MARK: SelfieJobSubmissionDelegate Methods func submissionDidSucceed(_ apiResponse: SmartSelfieResponse) { + HapticManager.shared.notification(type: .success) DispatchQueue.main.async { self.apiResponse = apiResponse self.selfieCaptureState = .processing(.success) @@ -420,6 +423,7 @@ extension SelfieViewModelV2: SelfieSubmissionDelegate { errorMessage: String?, errorMessageRes: String? ) { + HapticManager.shared.notification(type: .error) DispatchQueue.main.async { self.error = error self.errorMessage = errorMessage From 971cf50d9657b58248a27bb8ca9e19f759eb8fe9 Mon Sep 17 00:00:00 2001 From: Tobi Omotayo Date: Fri, 29 Nov 2024 11:11:25 +0100 Subject: [PATCH 13/18] reduce luminance threshold lowerbound --- .../SelfieCapture/SelfieViewModelV2.swift | 38 +++++++++++++++++-- 1 file changed, 34 insertions(+), 4 deletions(-) diff --git a/Sources/SmileID/Classes/SelfieCapture/SelfieViewModelV2.swift b/Sources/SmileID/Classes/SelfieCapture/SelfieViewModelV2.swift index b168b98c..b4ed15fd 100644 --- a/Sources/SmileID/Classes/SelfieCapture/SelfieViewModelV2.swift +++ b/Sources/SmileID/Classes/SelfieCapture/SelfieViewModelV2.swift @@ -141,7 +141,7 @@ public class SelfieViewModelV2: ObservableObject { latest: true ) // Drop the first ~2 seconds to allow the user to settle in - .dropFirst(5) + .dropFirst(5) .compactMap { $0 } .sink { [weak self] imageBuffer in self?.analyzeFrame(imageBuffer: imageBuffer) @@ -237,17 +237,47 @@ extension SelfieViewModelV2 { pixelBuffer, height: selfieImageSize, orientation: .up - ) + ), + let uiImage = UIImage(data: imageData) else { throw SmileIDError.unknown("Error resizing selfie image") } - self.selfieImage = UIImage(data: imageData) + self.selfieImage = flipImageForPreview(uiImage) self.selfieImageURL = try LocalStorage.createSelfieFile(jobId: jobId, selfieFile: imageData) } catch { handleError(error) } } + private func flipImageForPreview(_ image: UIImage) -> UIImage? { + guard let cgImage = image.cgImage else { return nil } + + let contextSize = CGSize(width: image.size.height, height: image.size.width) + UIGraphicsBeginImageContextWithOptions(contextSize, false, 1.0) + defer { UIGraphicsEndImageContext() } + guard let context = UIGraphicsGetCurrentContext() else { + return nil + } + // Apply a 90° clockwise rotation + // Translate the context to the center before rotating + // to ensure the image rotates around it center + context.translateBy(x: contextSize.width / 2, y: contextSize.height / 2) + context.rotate(by: .pi / 2) + + // Flip the context horizontally to compensate for the mirroring effect. + context.scaleBy(x: -1.0, y: 1.0) + + // Draw the image + context.draw( + cgImage, + in: CGRect( + x: -image.size.width / 2, y: -image.size.height / 2, width: image.size.width, height: image.size.height) + ) + + let correctedImage = UIGraphicsGetImageFromCurrentImageContext() + return correctedImage + } + private func captureLivenessImage(_ pixelBuffer: CVPixelBuffer) { do { guard @@ -391,7 +421,7 @@ extension SelfieViewModelV2: SelfieSubmissionDelegate { public func onFinished(callback: SmartSelfieResultDelegate) { if let selfieImageURL = selfieImageURL, - let selfiePath = getRelativePath(from: selfieImageURL), + let selfiePath = getRelativePath(from: selfieImageURL), livenessImages.count == numLivenessImages, !livenessImages.contains(where: { getRelativePath(from: $0) == nil }) { let livenessImagesPaths = livenessImages.compactMap { getRelativePath(from: $0) } From 2b40723a3b543fea60c3cd60d153bfc39e45b3ab Mon Sep 17 00:00:00 2001 From: Tobi Omotayo Date: Fri, 29 Nov 2024 11:11:48 +0100 Subject: [PATCH 14/18] reduce luminace threshold lower bound --- Sources/SmileID/Classes/FaceDetector/FaceValidator.swift | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Sources/SmileID/Classes/FaceDetector/FaceValidator.swift b/Sources/SmileID/Classes/FaceDetector/FaceValidator.swift index c2339c9f..af075044 100644 --- a/Sources/SmileID/Classes/FaceDetector/FaceValidator.swift +++ b/Sources/SmileID/Classes/FaceDetector/FaceValidator.swift @@ -16,7 +16,7 @@ final class FaceValidator { // MARK: Constants private let selfieQualityThreshold: Float = 0.5 - private let luminanceThreshold: ClosedRange = 80...200 + private let luminanceThreshold: ClosedRange = 40...200 private let selfiefaceBoundsMultiplier: CGFloat = 1.5 private let livenessfaceBoundsMultiplier: CGFloat = 2.2 private let faceBoundsThreshold: CGFloat = 50 From 7efd7204f08539a943e98b711b139a639a64f5a9 Mon Sep 17 00:00:00 2001 From: Tobi Omotayo Date: Fri, 29 Nov 2024 12:19:57 +0100 Subject: [PATCH 15/18] introduce a function to flip the selfie image for preview during submission. --- .../SelfieCapture/SelfieViewModelV2.swift | 32 ++++++++++++------- 1 file changed, 20 insertions(+), 12 deletions(-) diff --git a/Sources/SmileID/Classes/SelfieCapture/SelfieViewModelV2.swift b/Sources/SmileID/Classes/SelfieCapture/SelfieViewModelV2.swift index b4ed15fd..a073426b 100644 --- a/Sources/SmileID/Classes/SelfieCapture/SelfieViewModelV2.swift +++ b/Sources/SmileID/Classes/SelfieCapture/SelfieViewModelV2.swift @@ -252,20 +252,20 @@ extension SelfieViewModelV2 { private func flipImageForPreview(_ image: UIImage) -> UIImage? { guard let cgImage = image.cgImage else { return nil } - let contextSize = CGSize(width: image.size.height, height: image.size.width) + let contextSize = CGSize(width: image.size.width, height: image.size.height) UIGraphicsBeginImageContextWithOptions(contextSize, false, 1.0) - defer { UIGraphicsEndImageContext() } + defer { + UIGraphicsEndImageContext() + } guard let context = UIGraphicsGetCurrentContext() else { return nil } - // Apply a 90° clockwise rotation + + // Apply a 180° counterclockwise rotation // Translate the context to the center before rotating - // to ensure the image rotates around it center + // to ensure the image rotates around its center context.translateBy(x: contextSize.width / 2, y: contextSize.height / 2) - context.rotate(by: .pi / 2) - - // Flip the context horizontally to compensate for the mirroring effect. - context.scaleBy(x: -1.0, y: 1.0) + context.rotate(by: -.pi) // Draw the image context.draw( @@ -274,7 +274,9 @@ extension SelfieViewModelV2 { x: -image.size.width / 2, y: -image.size.height / 2, width: image.size.width, height: image.size.height) ) + // Get the new UIImage from the context let correctedImage = UIGraphicsGetImageFromCurrentImageContext() + return correctedImage } @@ -358,10 +360,16 @@ extension SelfieViewModelV2: FaceValidatorDelegate { // MARK: LivenessCheckManagerDelegate Methods extension SelfieViewModelV2: LivenessCheckManagerDelegate { func didCompleteLivenessTask() { - // capture liveness image twice - guard let imageBuffer = currentFrameBuffer else { return } - captureLivenessImage(imageBuffer) - captureLivenessImage(imageBuffer) + // capture first frame + guard let firstFrameBuffer = currentFrameBuffer else { return } + captureLivenessImage(firstFrameBuffer) + + // capture a second frame after a slight delay + // to ensure it's a different frame + DispatchQueue.main.asyncAfter(deadline: .now() + 0.8) { [weak self] in + guard let secondFrameBuffer = self?.currentFrameBuffer else { return } + self?.captureLivenessImage(secondFrameBuffer) + } HapticManager.shared.notification(type: .success) } From ebc1c94681c03b6a7207926efdc0a01cfaf01f8f Mon Sep 17 00:00:00 2001 From: Tobi Omotayo Date: Fri, 29 Nov 2024 12:20:14 +0100 Subject: [PATCH 16/18] reduce head turn thresholds --- .../SmileID/Classes/FaceDetector/LivenessCheckManager.swift | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Sources/SmileID/Classes/FaceDetector/LivenessCheckManager.swift b/Sources/SmileID/Classes/FaceDetector/LivenessCheckManager.swift index f858a6f2..92ccab7f 100644 --- a/Sources/SmileID/Classes/FaceDetector/LivenessCheckManager.swift +++ b/Sources/SmileID/Classes/FaceDetector/LivenessCheckManager.swift @@ -26,11 +26,11 @@ class LivenessCheckManager: ObservableObject { /// The minimum threshold for yaw (left-right head movement) private let minYawAngleThreshold: CGFloat = 0.15 /// The maximum threshold for yaw (left-right head movement) - private let maxYawAngleThreshold: CGFloat = 0.3 + private let maxYawAngleThreshold: CGFloat = 0.25 /// The minimum threshold for pitch (up-down head movement) private let minPitchAngleThreshold: CGFloat = 0.15 /// The maximum threshold for pitch (up-down head movement) - private let maxPitchAngleThreshold: CGFloat = 0.3 + private let maxPitchAngleThreshold: CGFloat = 0.25 /// The timeout duration for each task in seconds. private let taskTimeoutDuration: TimeInterval = 120 From d3d3c2629461b1373c1ec1756be389bb70ed25f8 Mon Sep 17 00:00:00 2001 From: Tobi Omotayo Date: Fri, 29 Nov 2024 12:42:48 +0100 Subject: [PATCH 17/18] remove brightness check from selfie capture v2 --- Example/Tests/FaceValidatorTests.swift | 30 ++++--------------- .../Classes/FaceDetector/FaceDetectorV2.swift | 29 ++---------------- .../Classes/FaceDetector/FaceValidator.swift | 12 ++------ .../SelfieCapture/SelfieViewModelV2.swift | 4 +-- 4 files changed, 10 insertions(+), 65 deletions(-) diff --git a/Example/Tests/FaceValidatorTests.swift b/Example/Tests/FaceValidatorTests.swift index a345a42f..46de33a7 100644 --- a/Example/Tests/FaceValidatorTests.swift +++ b/Example/Tests/FaceValidatorTests.swift @@ -24,8 +24,7 @@ class FaceValidatorTests: XCTestCase { func testValidateWithValidFace() { let result = performValidation( faceBoundingBox: CGRect(x: 65, y: 164, width: 190, height: 190), - selfieQualityData: SelfieQualityData(failed: 0.1, passed: 0.9), - brighness: 100 + selfieQualityData: SelfieQualityData(failed: 0.1, passed: 0.9) ) XCTAssertTrue(result.faceInBounds) @@ -36,8 +35,7 @@ class FaceValidatorTests: XCTestCase { func testValidateWithFaceTooSmall() { let result = performValidation( faceBoundingBox: CGRect(x: 65, y: 164, width: 100, height: 100), - selfieQualityData: SelfieQualityData(failed: 0.1, passed: 0.9), - brighness: 100 + selfieQualityData: SelfieQualityData(failed: 0.1, passed: 0.9) ) XCTAssertFalse(result.faceInBounds) @@ -48,8 +46,7 @@ class FaceValidatorTests: XCTestCase { func testValidateWithFaceTooLarge() { let result = performValidation( faceBoundingBox: CGRect(x: 65, y: 164, width: 250, height: 250), - selfieQualityData: SelfieQualityData(failed: 0.1, passed: 0.9), - brighness: 100 + selfieQualityData: SelfieQualityData(failed: 0.1, passed: 0.9) ) XCTAssertFalse(result.faceInBounds) @@ -60,8 +57,7 @@ class FaceValidatorTests: XCTestCase { func testValidWithFaceOffCentre() { let result = performValidation( faceBoundingBox: CGRect(x: 125, y: 164, width: 190, height: 190), - selfieQualityData: SelfieQualityData(failed: 0.1, passed: 0.9), - brighness: 100 + selfieQualityData: SelfieQualityData(failed: 0.1, passed: 0.9) ) XCTAssertFalse(result.faceInBounds) @@ -69,23 +65,10 @@ class FaceValidatorTests: XCTestCase { XCTAssertEqual(result.userInstruction, .headInFrame) } - func testValidateWithPoorBrightness() { - let result = performValidation( - faceBoundingBox: CGRect(x: 65, y: 164, width: 190, height: 190), - selfieQualityData: SelfieQualityData(failed: 0.1, passed: 0.9), - brighness: 70 - ) - - XCTAssertTrue(result.faceInBounds) - XCTAssertFalse(result.hasDetectedValidFace) - XCTAssertEqual(result.userInstruction, .goodLight) - } - func testValidateWithPoorSelfieQuality() { let result = performValidation( faceBoundingBox: CGRect(x: 65, y: 164, width: 190, height: 190), - selfieQualityData: SelfieQualityData(failed: 0.6, passed: 0.4), - brighness: 70 + selfieQualityData: SelfieQualityData(failed: 0.6, passed: 0.4) ) XCTAssertTrue(result.faceInBounds) @@ -97,7 +80,6 @@ class FaceValidatorTests: XCTestCase { let result = performValidation( faceBoundingBox: CGRect(x: 65, y: 164, width: 190, height: 190), selfieQualityData: SelfieQualityData(failed: 0.3, passed: 0.7), - brighness: 100, livenessTask: .lookLeft ) @@ -112,7 +94,6 @@ extension FaceValidatorTests { func performValidation( faceBoundingBox: CGRect, selfieQualityData: SelfieQualityData, - brighness: Int, livenessTask: LivenessTask? = nil ) -> FaceValidationResult { let faceGeometry = FaceGeometryData( @@ -125,7 +106,6 @@ extension FaceValidatorTests { faceValidator.validate( faceGeometry: faceGeometry, selfieQuality: selfieQualityData, - brightness: brighness, currentLivenessTask: livenessTask ) diff --git a/Sources/SmileID/Classes/FaceDetector/FaceDetectorV2.swift b/Sources/SmileID/Classes/FaceDetector/FaceDetectorV2.swift index 8a9ba94f..a07790dd 100644 --- a/Sources/SmileID/Classes/FaceDetector/FaceDetectorV2.swift +++ b/Sources/SmileID/Classes/FaceDetector/FaceDetectorV2.swift @@ -19,8 +19,7 @@ protocol FaceDetectorResultDelegate: AnyObject { _ detector: FaceDetectorV2, didDetectFace faceGeometry: FaceGeometryData, withFaceQuality faceQuality: Float, - selfieQuality: SelfieQualityData, - brightness: Int + selfieQuality: SelfieQualityData ) func faceDetector(_ detector: FaceDetectorV2, didFailWithError error: Error) } @@ -77,7 +76,6 @@ class FaceDetectorV2: NSObject { rect: faceObservation.boundingBox) ?? .zero let uiImage = UIImage(pixelBuffer: imageBuffer) - let brightness = self.calculateBrightness(uiImage) let croppedImage = try self.cropImageToFace(uiImage) let selfieQualityData = try self.selfieQualityRequest(imageBuffer: croppedImage) @@ -95,8 +93,7 @@ class FaceDetectorV2: NSObject { self, didDetectFace: faceGeometryData, withFaceQuality: faceQualityObservation.faceCaptureQuality ?? 0.0, - selfieQuality: selfieQualityData, - brightness: brightness + selfieQuality: selfieQualityData ) } else { // Fallback on earlier versions @@ -178,28 +175,6 @@ class FaceDetectorV2: NSObject { return resizedImage } - private func calculateBrightness(_ image: UIImage?) -> Int { - guard let image, let cgImage = image.cgImage, - let imageData = cgImage.dataProvider?.data, - let dataPointer = CFDataGetBytePtr(imageData) - else { - return 0 - } - - let bytesPerPixel = cgImage.bitsPerPixel / cgImage.bitsPerComponent - let dataLength = CFDataGetLength(imageData) - var result = 0.0 - for index in stride(from: 0, to: dataLength, by: bytesPerPixel) { - let red = dataPointer[index] - let green = dataPointer[index + 1] - let blue = dataPointer[index + 2] - result += 0.299 * Double(red) + 0.587 * Double(green) + 0.114 * Double(blue) - } - let pixelsCount = dataLength / bytesPerPixel - let brightness = Int(result) / pixelsCount - return brightness - } - private func faceDirection(faceObservation: VNFaceObservation) -> FaceDirection { guard let yaw = faceObservation.yaw?.doubleValue else { return .none diff --git a/Sources/SmileID/Classes/FaceDetector/FaceValidator.swift b/Sources/SmileID/Classes/FaceDetector/FaceValidator.swift index af075044..d33c5641 100644 --- a/Sources/SmileID/Classes/FaceDetector/FaceValidator.swift +++ b/Sources/SmileID/Classes/FaceDetector/FaceValidator.swift @@ -30,7 +30,6 @@ final class FaceValidator { func validate( faceGeometry: FaceGeometryData, selfieQuality: SelfieQualityData, - brightness: Int, currentLivenessTask: LivenessTask? ) { // check face bounds @@ -40,16 +39,12 @@ final class FaceValidator { ) let isAcceptableBounds = faceBoundsState == .detectedFaceAppropriateSizeAndPosition - // check brightness - let isAcceptableBrightness = luminanceThreshold.contains(brightness) - // check selfie quality let isAcceptableSelfieQuality = checkSelfieQuality(selfieQuality) // check that face is ready for capture let hasDetectedValidFace = checkValidFace( isAcceptableBounds, - isAcceptableBrightness, isAcceptableSelfieQuality ) @@ -57,7 +52,6 @@ final class FaceValidator { let userInstruction = userInstruction( from: faceBoundsState, detectedValidFace: hasDetectedValidFace, - isAcceptableBrightness: isAcceptableBrightness, isAcceptableSelfieQuality: isAcceptableSelfieQuality, livenessTask: currentLivenessTask ) @@ -73,7 +67,6 @@ final class FaceValidator { private func userInstruction( from faceBoundsState: FaceBoundsState, detectedValidFace: Bool, - isAcceptableBrightness: Bool, isAcceptableSelfieQuality: Bool, livenessTask: LivenessTask? ) -> SelfieCaptureInstruction? { @@ -95,7 +88,7 @@ final class FaceValidator { return .moveCloser } else if faceBoundsState == .detectedFaceTooLarge { return .moveBack - } else if !isAcceptableSelfieQuality || !isAcceptableBrightness { + } else if !isAcceptableSelfieQuality { return .goodLight } return nil @@ -134,9 +127,8 @@ final class FaceValidator { private func checkValidFace( _ isAcceptableBounds: Bool, - _ isAcceptableBrightness: Bool, _ isAcceptableSelfieQuality: Bool ) -> Bool { - return isAcceptableBounds && isAcceptableBrightness && isAcceptableSelfieQuality + return isAcceptableBounds && isAcceptableSelfieQuality } } diff --git a/Sources/SmileID/Classes/SelfieCapture/SelfieViewModelV2.swift b/Sources/SmileID/Classes/SelfieCapture/SelfieViewModelV2.swift index a073426b..a0c24daa 100644 --- a/Sources/SmileID/Classes/SelfieCapture/SelfieViewModelV2.swift +++ b/Sources/SmileID/Classes/SelfieCapture/SelfieViewModelV2.swift @@ -324,14 +324,12 @@ extension SelfieViewModelV2: FaceDetectorResultDelegate { _ detector: FaceDetectorV2, didDetectFace faceGeometry: FaceGeometryData, withFaceQuality faceQuality: Float, - selfieQuality: SelfieQualityData, - brightness: Int + selfieQuality: SelfieQualityData ) { faceValidator .validate( faceGeometry: faceGeometry, selfieQuality: selfieQuality, - brightness: brightness, currentLivenessTask: self.livenessCheckManager.currentTask ) if shouldBeginLivenessChallenge { From 40ab6bcf09a64e7e6a27fbe6316bf5ca3b77a0fc Mon Sep 17 00:00:00 2001 From: Tobi Omotayo Date: Fri, 29 Nov 2024 12:43:02 +0100 Subject: [PATCH 18/18] adjust screen brightness for selfie capture screen v2 --- .../View/OrchestratedSelfieCaptureScreenV2.swift | 2 -- .../Classes/SelfieCapture/View/SelfieCaptureScreenV2.swift | 4 ++++ 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/Sources/SmileID/Classes/SelfieCapture/View/OrchestratedSelfieCaptureScreenV2.swift b/Sources/SmileID/Classes/SelfieCapture/View/OrchestratedSelfieCaptureScreenV2.swift index d7871607..1193de6e 100644 --- a/Sources/SmileID/Classes/SelfieCapture/View/OrchestratedSelfieCaptureScreenV2.swift +++ b/Sources/SmileID/Classes/SelfieCapture/View/OrchestratedSelfieCaptureScreenV2.swift @@ -10,8 +10,6 @@ public struct OrchestratedSelfieCaptureScreenV2: View { public let onResult: SmartSelfieResultDelegate private let viewModel: SelfieViewModelV2 - private var originalBrightness = UIScreen.main.brightness - public init( userId: String, jobId: String, diff --git a/Sources/SmileID/Classes/SelfieCapture/View/SelfieCaptureScreenV2.swift b/Sources/SmileID/Classes/SelfieCapture/View/SelfieCaptureScreenV2.swift index 52fc603b..b09400d1 100644 --- a/Sources/SmileID/Classes/SelfieCapture/View/SelfieCaptureScreenV2.swift +++ b/Sources/SmileID/Classes/SelfieCapture/View/SelfieCaptureScreenV2.swift @@ -7,6 +7,8 @@ public struct SelfieCaptureScreenV2: View { private let faceShape = FaceShape() @Environment(\.modalMode) private var modalMode + private(set) var originalBrightness = UIScreen.main.brightness + public var body: some View { GeometryReader { proxy in VStack(spacing: 10) { @@ -111,10 +113,12 @@ public struct SelfieCaptureScreenV2: View { } .navigationBarHidden(true) .onAppear { + UIScreen.main.brightness = 1 viewModel.perform(action: .windowSizeDetected(proxy.size, proxy.safeAreaInsets)) viewModel.perform(action: .onViewAppear) } .onDisappear { + UIScreen.main.brightness = originalBrightness viewModel.cameraManager.pauseSession() } .alert(item: $viewModel.unauthorizedAlert) { alert in