diff --git a/Sources/FaceLiveness/AV/VideoChunker.swift b/Sources/FaceLiveness/AV/VideoChunker.swift index 31b8adc0..326e2bc1 100644 --- a/Sources/FaceLiveness/AV/VideoChunker.swift +++ b/Sources/FaceLiveness/AV/VideoChunker.swift @@ -33,6 +33,7 @@ final class VideoChunker { } func start() { + guard state == .pending else { return } state = .writing assetWriter.startWriting() assetWriter.startSession(atSourceTime: .zero) diff --git a/Sources/FaceLiveness/Views/Liveness/FaceLivenessDetectionViewModel+FaceDetectionResultHandler.swift b/Sources/FaceLiveness/Views/Liveness/FaceLivenessDetectionViewModel+FaceDetectionResultHandler.swift index 04b193de..8b8876a1 100644 --- a/Sources/FaceLiveness/Views/Liveness/FaceLivenessDetectionViewModel+FaceDetectionResultHandler.swift +++ b/Sources/FaceLiveness/Views/Liveness/FaceLivenessDetectionViewModel+FaceDetectionResultHandler.swift @@ -54,11 +54,12 @@ extension FaceLivenessDetectionViewModel: FaceDetectionResultHandler { } } case .recording(ovalDisplayed: false): - drawOval() - sendInitialFaceDetectedEvent( - initialFace: normalizedFace.boundingBox, - videoStartTime: Date().timestampMilliseconds - ) + drawOval(onComplete: { + self.sendInitialFaceDetectedEvent( + initialFace: normalizedFace.boundingBox, + videoStartTime: Date().timestampMilliseconds + ) + }) case .recording(ovalDisplayed: true): guard let sessionConfiguration = sessionConfiguration else { return } let instruction = faceInOvalMatching.faceMatchState( diff --git a/Sources/FaceLiveness/Views/Liveness/FaceLivenessDetectionViewModel.swift b/Sources/FaceLiveness/Views/Liveness/FaceLivenessDetectionViewModel.swift index e21d261c..5b8380fa 100644 --- a/Sources/FaceLiveness/Views/Liveness/FaceLivenessDetectionViewModel.swift +++ b/Sources/FaceLiveness/Views/Liveness/FaceLivenessDetectionViewModel.swift @@ -134,8 +134,7 @@ class FaceLivenessDetectionViewModel: ObservableObject { } } - - func drawOval() { + func drawOval(onComplete: @escaping () -> Void) { guard livenessState.state == .recording(ovalDisplayed: false), let ovalParameters = sessionConfiguration?.ovalMatchChallenge.oval else { return } @@ -158,6 +157,7 @@ class FaceLivenessDetectionViewModel: ObservableObject { livenessViewControllerDelegate?.drawOvalInCanvas(normalizedOvalRect) DispatchQueue.main.async { self.livenessState.ovalDisplayed() + onComplete() } ovalRect = normalizedOvalRect } @@ -212,6 +212,7 @@ class FaceLivenessDetectionViewModel: ObservableObject { initialFace: CGRect, videoStartTime: UInt64 ) { + guard initialClientEvent == nil else { return } videoChunker.start() let initialFace = FaceDetection(